Cleaner and more outputs during training initialization.

This commit is contained in:
Tomasz Sobczyk
2020-10-24 14:21:59 +02:00
committed by nodchip
parent b882423005
commit 2c477d76ec
12 changed files with 129 additions and 94 deletions

View File

@@ -54,23 +54,28 @@ namespace Eval::NNUE {
} // namespace
// Initialize learning
void initialize_training(const std::string& seed) {
std::cout << "Initializing NN training for "
<< get_architecture_string() << std::endl;
void initialize_training(
const std::string& seed,
SynchronizedRegionLogger::Region& out) {
std::cout << std::endl;
out << "INFO (initialize_training): Initializing NN training for "
<< get_architecture_string() << std::endl;
std::cout << "Layers:\n"
<< get_layers_info() << std::endl;
out << std::endl;
std::cout << std::endl;
out << "Layers:\n"
<< get_layers_info() << std::endl;
out << std::endl;
assert(feature_transformer);
assert(network);
trainer = Trainer<Network>::create(network.get(), feature_transformer.get());
rng.seed(PRNG(seed).rand<uint64_t>());
if (Options["SkipLoadingEval"]) {
out << "INFO (initialize_training): Performing random net initialization.\n";
trainer->initialize(rng);
}
}

View File

@@ -3,11 +3,15 @@
#include "learn/learn.h"
#include "misc.h"
// Interface used for learning NNUE evaluation function
namespace Eval::NNUE {
// Initialize learning
void initialize_training(const std::string& seed);
void initialize_training(
const std::string& seed,
SynchronizedRegionLogger::Region& out);
// set the number of samples in the mini-batch
void set_batch_size(uint64_t size);

View File

@@ -82,9 +82,9 @@ namespace Eval::NNUE::Layers {
static std::string get_layers_info() {
std::string info = PreviousLayer::get_layers_info();
info += '\n';
info += "\n - ";
info += std::to_string(kLayerIndex);
info += ": ";
info += " - ";
info += get_name();
return info;
}

View File

@@ -76,9 +76,9 @@ namespace Eval::NNUE::Layers {
static std::string get_layers_info() {
std::string info = PreviousLayer::get_layers_info();
info += '\n';
info += "\n - ";
info += std::to_string(kLayerIndex);
info += ": ";
info += " - ";
info += get_name();
return info;
}

View File

@@ -65,8 +65,9 @@ namespace Eval::NNUE::Layers {
}
static std::string get_layers_info() {
std::string info = std::to_string(kLayerIndex);
info += ": ";
std::string info = " - ";
info += std::to_string(kLayerIndex);
info += " - ";
info += get_name();
return info;
}

View File

@@ -60,9 +60,9 @@ namespace Eval::NNUE::Layers {
static std::string get_layers_info() {
std::string info = Tail::get_layers_info();
info += '\n';
info += "\n - ";
info += std::to_string(kLayerIndex);
info += ": ";
info += " - ";
info += get_name();
return info;
}

View File

@@ -130,8 +130,9 @@ namespace Eval::NNUE {
}
static std::string get_layers_info() {
std::string info = std::to_string(kLayerIndex);
info += ": ";
std::string info = " - ";
info += std::to_string(kLayerIndex);
info += " - ";
info += get_name();
return info;
}

View File

@@ -107,7 +107,7 @@ namespace Eval::NNUE {
<< " - " << LayerType::get_name()
<< std::endl;
out << "==> largest min activation = " << largest_min_activation
out << " - largest min activation = " << largest_min_activation
<< " , smallest max activation = " << smallest_max_activation
<< std::endl;

View File

@@ -347,17 +347,17 @@ namespace Eval::NNUE {
<< " - " << LayerType::get_name()
<< std::endl;
out << "==> observed " << observed_features.count()
out << " - observed " << observed_features.count()
<< " (out of " << kInputDimensions << ") features"
<< std::endl;
out << "==> (min, max) of pre-activations = "
out << " - (min, max) of pre-activations = "
<< min_pre_activation_ << ", "
<< max_pre_activation_ << " (limit = "
<< kPreActivationLimit << ")"
<< std::endl;
out << "==> largest min activation = " << largest_min_activation
out << " - largest min activation = " << largest_min_activation
<< " , smallest max activation = " << smallest_max_activation
<< std::endl;