Replace the old loss/grad calculation completely.

This commit is contained in:
Tomasz Sobczyk
2020-11-29 12:18:02 +01:00
committed by nodchip
parent b71d1e8620
commit 539bd2d1c8
5 changed files with 79 additions and 230 deletions

View File

@@ -195,7 +195,6 @@ namespace Eval::NNUE {
uint64_t epoch,
bool verbose,
double learning_rate,
Learner::CalcGradFunc calc_grad,
Learner::CalcLossFunc calc_loss)
{
using namespace Learner::Autograd::UnivariateStatic;
@@ -237,8 +236,8 @@ namespace Eval::NNUE {
e.sign * network_output[b] * kPonanzaConstant));
const auto discrete = e.sign * e.discrete_nn_eval;
const auto& psv = e.psv;
const double gradient =
e.sign * calc_grad(shallow, (Value)psv.score, psv.game_result, psv.gamePly);
const auto loss = calc_loss(shallow, (Value)psv.score, psv.game_result, psv.gamePly);
const double gradient = loss.grad * e.sign * kPonanzaConstant;
gradients[b] = static_cast<LearnFloatType>(gradient * e.weight);
@@ -330,4 +329,4 @@ namespace Eval::NNUE {
#endif
out << "INFO (save_eval): Finished saving evaluation file in " << eval_dir << std::endl;
}
} // namespace Eval::NNUE
} // namespace Eval::NNUE

View File

@@ -38,7 +38,6 @@ namespace Eval::NNUE {
uint64_t epoch,
bool verbose,
double learning_rate,
Learner::CalcGradFunc calc_grad,
Learner::CalcLossFunc calc_loss);
// Check if there are any problems with learning