From edbbc1a4df941b7e41bb0b4b34adfe7db90f3ec7 Mon Sep 17 00:00:00 2001 From: Joost VandeVondele Date: Sun, 6 Sep 2020 22:13:42 +0200 Subject: [PATCH] Remove some warnings --- src/learn/gensfen.cpp | 2 +- src/misc.h | 2 +- src/nnue/trainer/trainer_affine_transform.h | 8 ++++---- src/nnue/trainer/trainer_clipped_relu.h | 8 ++++---- src/nnue/trainer/trainer_input_slice.h | 16 ++++++++-------- src/nnue/trainer/trainer_sum.h | 18 +++++++++--------- 6 files changed, 27 insertions(+), 27 deletions(-) diff --git a/src/learn/gensfen.cpp b/src/learn/gensfen.cpp index eeeb7b2e..6c8c455e 100644 --- a/src/learn/gensfen.cpp +++ b/src/learn/gensfen.cpp @@ -92,7 +92,7 @@ namespace Learner { // All buffers should be empty since file_worker_thread // should have written everything before exiting. - for (const auto& p : sfen_buffers) { assert(p == nullptr); } + for (const auto& p : sfen_buffers) { assert(p == nullptr); (void)p ; } assert(sfen_buffers_pool.empty()); } #endif diff --git a/src/misc.h b/src/misc.h index 19bb008c..d73d0633 100644 --- a/src/misc.h +++ b/src/misc.h @@ -259,7 +259,7 @@ public: template AlignedAllocator(const AlignedAllocator&) {} T* allocate(std::size_t n) { return (T*)std_aligned_alloc(alignof(T), n * sizeof(T)); } - void deallocate(T* p, std::size_t n) { std_aligned_free(p); } + void deallocate(T* p, std::size_t ) { std_aligned_free(p); } }; // -------------------- diff --git a/src/nnue/trainer/trainer_affine_transform.h b/src/nnue/trainer/trainer_affine_transform.h index db56c1c0..da11ca29 100644 --- a/src/nnue/trainer/trainer_affine_transform.h +++ b/src/nnue/trainer/trainer_affine_transform.h @@ -25,9 +25,9 @@ class Trainer> { public: // factory function static std::shared_ptr Create( - LayerType* target_layer, FeatureTransformer* feature_transformer) { + LayerType* target_layer, FeatureTransformer* ft) { return std::shared_ptr( - new Trainer(target_layer, feature_transformer)); + new Trainer(target_layer, ft)); } // Set options such as hyperparameters @@ -186,11 +186,11 @@ class Trainer> { private: // constructor - Trainer(LayerType* target_layer, FeatureTransformer* feature_transformer) : + Trainer(LayerType* target_layer, FeatureTransformer* ft) : batch_size_(0), batch_input_(nullptr), previous_layer_trainer_(Trainer::Create( - &target_layer->previous_layer_, feature_transformer)), + &target_layer->previous_layer_, ft)), target_layer_(target_layer), biases_(), weights_(), diff --git a/src/nnue/trainer/trainer_clipped_relu.h b/src/nnue/trainer/trainer_clipped_relu.h index fd7b1a07..bd59a02d 100644 --- a/src/nnue/trainer/trainer_clipped_relu.h +++ b/src/nnue/trainer/trainer_clipped_relu.h @@ -23,9 +23,9 @@ class Trainer> { public: // factory function static std::shared_ptr Create( - LayerType* target_layer, FeatureTransformer* feature_transformer) { + LayerType* target_layer, FeatureTransformer* ft) { return std::shared_ptr( - new Trainer(target_layer, feature_transformer)); + new Trainer(target_layer, ft)); } // Set options such as hyperparameters @@ -78,10 +78,10 @@ class Trainer> { private: // constructor - Trainer(LayerType* target_layer, FeatureTransformer* feature_transformer) : + Trainer(LayerType* target_layer, FeatureTransformer* ft) : batch_size_(0), previous_layer_trainer_(Trainer::Create( - &target_layer->previous_layer_, feature_transformer)), + &target_layer->previous_layer_, ft)), target_layer_(target_layer) { std::fill(std::begin(min_activations_), std::end(min_activations_), std::numeric_limits::max()); diff --git a/src/nnue/trainer/trainer_input_slice.h b/src/nnue/trainer/trainer_input_slice.h index 33e39244..7d9e76c3 100644 --- a/src/nnue/trainer/trainer_input_slice.h +++ b/src/nnue/trainer/trainer_input_slice.h @@ -18,10 +18,10 @@ class SharedInputTrainer { public: // factory function static std::shared_ptr Create( - FeatureTransformer* feature_transformer) { + FeatureTransformer* ft) { static std::shared_ptr instance; if (!instance) { - instance.reset(new SharedInputTrainer(feature_transformer)); + instance.reset(new SharedInputTrainer(ft)); } ++instance->num_referrers_; return instance; @@ -105,13 +105,13 @@ class SharedInputTrainer { private: // constructor - SharedInputTrainer(FeatureTransformer* feature_transformer) : + SharedInputTrainer(FeatureTransformer* ft) : batch_size_(0), num_referrers_(0), num_calls_(0), current_operation_(Operation::kNone), feature_transformer_trainer_(Trainer::Create( - feature_transformer)), + ft)), output_(nullptr) { } @@ -161,8 +161,8 @@ class Trainer> { public: // factory function static std::shared_ptr Create( - LayerType* /*target_layer*/, FeatureTransformer* feature_transformer) { - return std::shared_ptr(new Trainer(feature_transformer)); + LayerType* /*target_layer*/, FeatureTransformer* ft) { + return std::shared_ptr(new Trainer(ft)); } // Set options such as hyperparameters @@ -218,9 +218,9 @@ class Trainer> { private: // constructor - Trainer(FeatureTransformer* feature_transformer): + Trainer(FeatureTransformer* ft): batch_size_(0), - shared_input_trainer_(SharedInputTrainer::Create(feature_transformer)) { + shared_input_trainer_(SharedInputTrainer::Create(ft)) { } // number of input/output dimensions diff --git a/src/nnue/trainer/trainer_sum.h b/src/nnue/trainer/trainer_sum.h index fb5b1532..f7bf3b3d 100644 --- a/src/nnue/trainer/trainer_sum.h +++ b/src/nnue/trainer/trainer_sum.h @@ -25,9 +25,9 @@ class Trainer> : public: // factory function static std::shared_ptr Create( - LayerType* target_layer, FeatureTransformer* feature_transformer) { + LayerType* target_layer, FeatureTransformer* ft) { return std::shared_ptr( - new Trainer(target_layer, feature_transformer)); + new Trainer(target_layer, ft)); } // Set options such as hyperparameters @@ -74,11 +74,11 @@ class Trainer> : private: // constructor - Trainer(LayerType* target_layer, FeatureTransformer* feature_transformer): - Tail(target_layer, feature_transformer), + Trainer(LayerType* target_layer, FeatureTransformer* ft): + Tail(target_layer, ft), batch_size_(0), previous_layer_trainer_(Trainer::Create( - &target_layer->previous_layer_, feature_transformer)), + &target_layer->previous_layer_, ft)), target_layer_(target_layer) { } @@ -110,9 +110,9 @@ class Trainer> { public: // factory function static std::shared_ptr Create( - LayerType* target_layer, FeatureTransformer* feature_transformer) { + LayerType* target_layer, FeatureTransformer* ft) { return std::shared_ptr( - new Trainer(target_layer, feature_transformer)); + new Trainer(target_layer, ft)); } // Set options such as hyperparameters @@ -154,10 +154,10 @@ class Trainer> { private: // constructor - Trainer(LayerType* target_layer, FeatureTransformer* feature_transformer) : + Trainer(LayerType* target_layer, FeatureTransformer* ft) : batch_size_(0), previous_layer_trainer_(Trainer::Create( - &target_layer->previous_layer_, feature_transformer)), + &target_layer->previous_layer_, ft)), target_layer_(target_layer) { }