Replace global_learning_rate with learning_rate local to the learner and passed to update_parameters as a parameter.

This commit is contained in:
Tomasz Sobczyk
2020-10-24 23:35:34 +02:00
committed by nodchip
parent cde6ec2bf2
commit f81fa3d712
3 changed files with 22 additions and 19 deletions

View File

@@ -18,9 +18,6 @@
#include "misc.h"
#include "thread_win32_osx.h"
// Learning rate scale
double global_learning_rate;
// Code for learning NNUE evaluation function
namespace Eval::NNUE {
@@ -181,11 +178,15 @@ namespace Eval::NNUE {
}
// update the evaluation function parameters
void update_parameters(uint64_t epoch, bool verbose, Learner::CalcGradFunc calc_grad) {
void update_parameters(
uint64_t epoch,
bool verbose,
double learning_rate,
Learner::CalcGradFunc calc_grad)
{
assert(batch_size > 0);
const auto learning_rate = static_cast<LearnFloatType>(
global_learning_rate / batch_size);
learning_rate /= batch_size;
std::lock_guard<std::mutex> lock(examples_mutex);
std::shuffle(examples.begin(), examples.end(), rng);