diff --git a/.travis.yml b/.travis.yml index 438bf4d0..503d678a 100644 --- a/.travis.yml +++ b/.travis.yml @@ -77,8 +77,7 @@ script: - if [[ "$TRAVIS_OS_NAME" != "linux" || "$COMP" == "gcc" ]]; then make clean && make -j2 ARCH=x86-64-modern profile-build && ../tests/signature.sh $benchref; fi # start some basic learner CI - #TODO enable -Werror - - export CXXFLAGS="" + - export CXXFLAGS="-Werror" - if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then make clean && LDFLAGS="-lstdc++fs" make -j2 ARCH=x86-64-modern learn; fi - if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then make clean && LDFLAGS="-lstdc++fs" make -j2 ARCH=x86-64-modern profile-learn; fi diff --git a/src/learn/gensfen.cpp b/src/learn/gensfen.cpp index 3d015acf..84feabb0 100644 --- a/src/learn/gensfen.cpp +++ b/src/learn/gensfen.cpp @@ -878,8 +878,7 @@ namespace Learner next_move = search_pv[0]; } - RANDOM_MOVE:; - + // Random move. auto random_move = choose_random_move(pos, random_move_flag, ply, actual_random_move_count); if (random_move.has_value()) { @@ -897,7 +896,7 @@ namespace Learner a_psv.clear(); } - DO_MOVE:; + // Do move. pos.do_move(next_move, states[ply]); // Call node evaluate() for each difference calculation. diff --git a/src/nnue/evaluate_nnue_learner.cpp b/src/nnue/evaluate_nnue_learner.cpp index 7be06832..8b0413e5 100644 --- a/src/nnue/evaluate_nnue_learner.cpp +++ b/src/nnue/evaluate_nnue_learner.cpp @@ -113,8 +113,13 @@ void SetOptions(const std::string& options) { void RestoreParameters(const std::string& dir_name) { const std::string file_name = Path::Combine(dir_name, NNUE::savedfileName); std::ifstream stream(file_name, std::ios::binary); - bool result = ReadParameters(stream); +#ifndef NDEBUG + bool result = +#endif + ReadParameters(stream); +#ifndef NDEBUG assert(result); +#endif SendMessages({{"reset"}}); } @@ -216,8 +221,13 @@ void save_eval(std::string dir_name) { const std::string file_name = Path::Combine(eval_dir, NNUE::savedfileName); std::ofstream stream(file_name, std::ios::binary); - const bool result = NNUE::WriteParameters(stream); +#ifndef NDEBUG + const bool result = +#endif + NNUE::WriteParameters(stream); +#ifndef NDEBUG assert(result); +#endif std::cout << "save_eval() finished. folder = " << eval_dir << std::endl; } diff --git a/src/nnue/trainer/trainer.h b/src/nnue/trainer/trainer.h index d526557a..94553c07 100644 --- a/src/nnue/trainer/trainer.h +++ b/src/nnue/trainer/trainer.h @@ -70,8 +70,8 @@ struct Example { // Message used for setting hyperparameters struct Message { - Message(const std::string& name, const std::string& value = ""): - name(name), value(value), num_peekers(0), num_receivers(0) {} + Message(const std::string& message_name, const std::string& message_value = ""): + name(message_name), value(message_value), num_peekers(0), num_receivers(0) {} const std::string name; const std::string value; std::uint32_t num_peekers; diff --git a/src/nnue/trainer/trainer_input_slice.h b/src/nnue/trainer/trainer_input_slice.h index b6d6635b..6b0adc9f 100644 --- a/src/nnue/trainer/trainer_input_slice.h +++ b/src/nnue/trainer/trainer_input_slice.h @@ -206,7 +206,7 @@ class Trainer> { const IndexType input_offset = kInputDimensions * b; const IndexType output_offset = kOutputDimensions * b; for (IndexType i = 0; i < kInputDimensions; ++i) { - if (i < Offset || i >= Offset + kOutputDimensions) { + if ((int)i < (int)Offset || i >= Offset + kOutputDimensions) { gradients_[input_offset + i] = static_cast(0.0); } else { gradients_[input_offset + i] = gradients[output_offset + i - Offset];