Remove some warnings

This commit is contained in:
Joost VandeVondele
2020-09-06 22:13:42 +02:00
committed by nodchip
parent 3a06de298b
commit edbbc1a4df
6 changed files with 27 additions and 27 deletions

View File

@@ -25,9 +25,9 @@ class Trainer<Layers::AffineTransform<PreviousLayer, OutputDimensions>> {
public:
// factory function
static std::shared_ptr<Trainer> Create(
LayerType* target_layer, FeatureTransformer* feature_transformer) {
LayerType* target_layer, FeatureTransformer* ft) {
return std::shared_ptr<Trainer>(
new Trainer(target_layer, feature_transformer));
new Trainer(target_layer, ft));
}
// Set options such as hyperparameters
@@ -186,11 +186,11 @@ class Trainer<Layers::AffineTransform<PreviousLayer, OutputDimensions>> {
private:
// constructor
Trainer(LayerType* target_layer, FeatureTransformer* feature_transformer) :
Trainer(LayerType* target_layer, FeatureTransformer* ft) :
batch_size_(0),
batch_input_(nullptr),
previous_layer_trainer_(Trainer<PreviousLayer>::Create(
&target_layer->previous_layer_, feature_transformer)),
&target_layer->previous_layer_, ft)),
target_layer_(target_layer),
biases_(),
weights_(),

View File

@@ -23,9 +23,9 @@ class Trainer<Layers::ClippedReLU<PreviousLayer>> {
public:
// factory function
static std::shared_ptr<Trainer> Create(
LayerType* target_layer, FeatureTransformer* feature_transformer) {
LayerType* target_layer, FeatureTransformer* ft) {
return std::shared_ptr<Trainer>(
new Trainer(target_layer, feature_transformer));
new Trainer(target_layer, ft));
}
// Set options such as hyperparameters
@@ -78,10 +78,10 @@ class Trainer<Layers::ClippedReLU<PreviousLayer>> {
private:
// constructor
Trainer(LayerType* target_layer, FeatureTransformer* feature_transformer) :
Trainer(LayerType* target_layer, FeatureTransformer* ft) :
batch_size_(0),
previous_layer_trainer_(Trainer<PreviousLayer>::Create(
&target_layer->previous_layer_, feature_transformer)),
&target_layer->previous_layer_, ft)),
target_layer_(target_layer) {
std::fill(std::begin(min_activations_), std::end(min_activations_),
std::numeric_limits<LearnFloatType>::max());

View File

@@ -18,10 +18,10 @@ class SharedInputTrainer {
public:
// factory function
static std::shared_ptr<SharedInputTrainer> Create(
FeatureTransformer* feature_transformer) {
FeatureTransformer* ft) {
static std::shared_ptr<SharedInputTrainer> instance;
if (!instance) {
instance.reset(new SharedInputTrainer(feature_transformer));
instance.reset(new SharedInputTrainer(ft));
}
++instance->num_referrers_;
return instance;
@@ -105,13 +105,13 @@ class SharedInputTrainer {
private:
// constructor
SharedInputTrainer(FeatureTransformer* feature_transformer) :
SharedInputTrainer(FeatureTransformer* ft) :
batch_size_(0),
num_referrers_(0),
num_calls_(0),
current_operation_(Operation::kNone),
feature_transformer_trainer_(Trainer<FeatureTransformer>::Create(
feature_transformer)),
ft)),
output_(nullptr) {
}
@@ -161,8 +161,8 @@ class Trainer<Layers::InputSlice<OutputDimensions, Offset>> {
public:
// factory function
static std::shared_ptr<Trainer> Create(
LayerType* /*target_layer*/, FeatureTransformer* feature_transformer) {
return std::shared_ptr<Trainer>(new Trainer(feature_transformer));
LayerType* /*target_layer*/, FeatureTransformer* ft) {
return std::shared_ptr<Trainer>(new Trainer(ft));
}
// Set options such as hyperparameters
@@ -218,9 +218,9 @@ class Trainer<Layers::InputSlice<OutputDimensions, Offset>> {
private:
// constructor
Trainer(FeatureTransformer* feature_transformer):
Trainer(FeatureTransformer* ft):
batch_size_(0),
shared_input_trainer_(SharedInputTrainer::Create(feature_transformer)) {
shared_input_trainer_(SharedInputTrainer::Create(ft)) {
}
// number of input/output dimensions

View File

@@ -25,9 +25,9 @@ class Trainer<Layers::Sum<FirstPreviousLayer, RemainingPreviousLayers...>> :
public:
// factory function
static std::shared_ptr<Trainer> Create(
LayerType* target_layer, FeatureTransformer* feature_transformer) {
LayerType* target_layer, FeatureTransformer* ft) {
return std::shared_ptr<Trainer>(
new Trainer(target_layer, feature_transformer));
new Trainer(target_layer, ft));
}
// Set options such as hyperparameters
@@ -74,11 +74,11 @@ class Trainer<Layers::Sum<FirstPreviousLayer, RemainingPreviousLayers...>> :
private:
// constructor
Trainer(LayerType* target_layer, FeatureTransformer* feature_transformer):
Tail(target_layer, feature_transformer),
Trainer(LayerType* target_layer, FeatureTransformer* ft):
Tail(target_layer, ft),
batch_size_(0),
previous_layer_trainer_(Trainer<FirstPreviousLayer>::Create(
&target_layer->previous_layer_, feature_transformer)),
&target_layer->previous_layer_, ft)),
target_layer_(target_layer) {
}
@@ -110,9 +110,9 @@ class Trainer<Layers::Sum<PreviousLayer>> {
public:
// factory function
static std::shared_ptr<Trainer> Create(
LayerType* target_layer, FeatureTransformer* feature_transformer) {
LayerType* target_layer, FeatureTransformer* ft) {
return std::shared_ptr<Trainer>(
new Trainer(target_layer, feature_transformer));
new Trainer(target_layer, ft));
}
// Set options such as hyperparameters
@@ -154,10 +154,10 @@ class Trainer<Layers::Sum<PreviousLayer>> {
private:
// constructor
Trainer(LayerType* target_layer, FeatureTransformer* feature_transformer) :
Trainer(LayerType* target_layer, FeatureTransformer* ft) :
batch_size_(0),
previous_layer_trainer_(Trainer<PreviousLayer>::Create(
&target_layer->previous_layer_, feature_transformer)),
&target_layer->previous_layer_, ft)),
target_layer_(target_layer) {
}