mirror of
https://github.com/HChaZZY/Stockfish.git
synced 2025-12-24 19:16:49 +08:00
PascalCase -> snake_case for consistency with the rest of the codebase.
This commit is contained in:
@@ -44,9 +44,9 @@ namespace Eval::NNUE {
|
||||
std::shared_ptr<Trainer<Network>> trainer;
|
||||
|
||||
// Tell the learner options such as hyperparameters
|
||||
void SendMessages(std::vector<Message> messages) {
|
||||
void send_messages(std::vector<Message> messages) {
|
||||
for (auto& message : messages) {
|
||||
trainer->SendMessage(&message);
|
||||
trainer->send_message(&message);
|
||||
assert(message.num_receivers > 0);
|
||||
}
|
||||
}
|
||||
@@ -54,31 +54,31 @@ namespace Eval::NNUE {
|
||||
} // namespace
|
||||
|
||||
// Initialize learning
|
||||
void InitializeTraining(const std::string& seed) {
|
||||
void initialize_training(const std::string& seed) {
|
||||
std::cout << "Initializing NN training for "
|
||||
<< GetArchitectureString() << std::endl;
|
||||
<< get_architecture_string() << std::endl;
|
||||
|
||||
assert(feature_transformer);
|
||||
assert(network);
|
||||
trainer = Trainer<Network>::Create(network.get(), feature_transformer.get());
|
||||
trainer = Trainer<Network>::create(network.get(), feature_transformer.get());
|
||||
rng.seed(PRNG(seed).rand<uint64_t>());
|
||||
|
||||
if (Options["SkipLoadingEval"]) {
|
||||
trainer->Initialize(rng);
|
||||
trainer->initialize(rng);
|
||||
}
|
||||
}
|
||||
|
||||
// set the number of samples in the mini-batch
|
||||
void SetBatchSize(uint64_t size) {
|
||||
void set_batch_size(uint64_t size) {
|
||||
assert(size > 0);
|
||||
batch_size = size;
|
||||
}
|
||||
|
||||
// Set options such as hyperparameters
|
||||
void SetOptions(const std::string& options) {
|
||||
void set_options(const std::string& options) {
|
||||
std::vector<Message> messages;
|
||||
for (const auto& option : Split(options, ',')) {
|
||||
const auto fields = Split(option, '=');
|
||||
for (const auto& option : Algo::split(options, ',')) {
|
||||
const auto fields = Algo::split(option, '=');
|
||||
assert(fields.size() == 1 || fields.size() == 2);
|
||||
|
||||
if (fields.size() == 1) {
|
||||
@@ -88,30 +88,30 @@ namespace Eval::NNUE {
|
||||
}
|
||||
}
|
||||
|
||||
SendMessages(std::move(messages));
|
||||
send_messages(std::move(messages));
|
||||
}
|
||||
|
||||
// Reread the evaluation function parameters for learning from the file
|
||||
void RestoreParameters(const std::string& dir_name) {
|
||||
const std::string file_name = Path::Combine(dir_name, NNUE::savedfileName);
|
||||
void restore_parameters(const std::string& dir_name) {
|
||||
const std::string file_name = Path::combine(dir_name, NNUE::savedfileName);
|
||||
std::ifstream stream(file_name, std::ios::binary);
|
||||
#ifndef NDEBUG
|
||||
bool result =
|
||||
#endif
|
||||
ReadParameters(stream);
|
||||
read_parameters(stream);
|
||||
#ifndef NDEBUG
|
||||
assert(result);
|
||||
#endif
|
||||
|
||||
SendMessages({{"reset"}});
|
||||
send_messages({{"reset"}});
|
||||
}
|
||||
|
||||
void FinalizeNet() {
|
||||
SendMessages({{"clear_unobserved_feature_weights"}});
|
||||
void finalize_net() {
|
||||
send_messages({{"clear_unobserved_feature_weights"}});
|
||||
}
|
||||
|
||||
// Add 1 sample of learning data
|
||||
void AddExample(Position& pos, Color rootColor,
|
||||
void add_example(Position& pos, Color rootColor,
|
||||
const Learner::PackedSfenValue& psv, double weight) {
|
||||
|
||||
Example example;
|
||||
@@ -126,7 +126,7 @@ namespace Eval::NNUE {
|
||||
|
||||
Features::IndexList active_indices[2];
|
||||
for (const auto trigger : kRefreshTriggers) {
|
||||
RawFeatures::AppendActiveIndices(pos, trigger, active_indices);
|
||||
RawFeatures::append_active_indices(pos, trigger, active_indices);
|
||||
}
|
||||
|
||||
if (pos.side_to_move() != WHITE) {
|
||||
@@ -136,9 +136,9 @@ namespace Eval::NNUE {
|
||||
for (const auto color : Colors) {
|
||||
std::vector<TrainingFeature> training_features;
|
||||
for (const auto base_index : active_indices[color]) {
|
||||
static_assert(Features::Factorizer<RawFeatures>::GetDimensions() <
|
||||
static_assert(Features::Factorizer<RawFeatures>::get_dimensions() <
|
||||
(1 << TrainingFeature::kIndexBits), "");
|
||||
Features::Factorizer<RawFeatures>::AppendTrainingFeatures(
|
||||
Features::Factorizer<RawFeatures>::append_training_features(
|
||||
base_index, &training_features);
|
||||
}
|
||||
|
||||
@@ -147,7 +147,7 @@ namespace Eval::NNUE {
|
||||
auto& unique_features = example.training_features[color];
|
||||
for (const auto& feature : training_features) {
|
||||
if (!unique_features.empty() &&
|
||||
feature.GetIndex() == unique_features.back().GetIndex()) {
|
||||
feature.get_index() == unique_features.back().get_index()) {
|
||||
|
||||
unique_features.back() += feature;
|
||||
} else {
|
||||
@@ -161,7 +161,7 @@ namespace Eval::NNUE {
|
||||
}
|
||||
|
||||
// update the evaluation function parameters
|
||||
void UpdateParameters() {
|
||||
void update_parameters() {
|
||||
assert(batch_size > 0);
|
||||
|
||||
const auto learning_rate = static_cast<LearnFloatType>(
|
||||
@@ -173,30 +173,30 @@ namespace Eval::NNUE {
|
||||
std::vector<Example> batch(examples.end() - batch_size, examples.end());
|
||||
examples.resize(examples.size() - batch_size);
|
||||
|
||||
const auto network_output = trainer->Propagate(batch);
|
||||
const auto network_output = trainer->propagate(batch);
|
||||
|
||||
std::vector<LearnFloatType> gradients(batch.size());
|
||||
for (std::size_t b = 0; b < batch.size(); ++b) {
|
||||
const auto shallow = static_cast<Value>(Round<std::int32_t>(
|
||||
const auto shallow = static_cast<Value>(round<std::int32_t>(
|
||||
batch[b].sign * network_output[b] * kPonanzaConstant));
|
||||
const auto& psv = batch[b].psv;
|
||||
const double gradient = batch[b].sign * Learner::calc_grad(shallow, psv);
|
||||
gradients[b] = static_cast<LearnFloatType>(gradient * batch[b].weight);
|
||||
}
|
||||
|
||||
trainer->Backpropagate(gradients.data(), learning_rate);
|
||||
trainer->backpropagate(gradients.data(), learning_rate);
|
||||
}
|
||||
SendMessages({{"quantize_parameters"}});
|
||||
send_messages({{"quantize_parameters"}});
|
||||
}
|
||||
|
||||
// Check if there are any problems with learning
|
||||
void CheckHealth() {
|
||||
SendMessages({{"check_health"}});
|
||||
void check_health() {
|
||||
send_messages({{"check_health"}});
|
||||
}
|
||||
|
||||
// save merit function parameters to a file
|
||||
void save_eval(std::string dir_name) {
|
||||
auto eval_dir = Path::Combine(Options["EvalSaveDir"], dir_name);
|
||||
auto eval_dir = Path::combine(Options["EvalSaveDir"], dir_name);
|
||||
std::cout << "save_eval() start. folder = " << eval_dir << std::endl;
|
||||
|
||||
// mkdir() will fail if this folder already exists, but
|
||||
@@ -204,12 +204,12 @@ namespace Eval::NNUE {
|
||||
// Also, assume that the folders up to EvalSaveDir have been dug.
|
||||
std::filesystem::create_directories(eval_dir);
|
||||
|
||||
const std::string file_name = Path::Combine(eval_dir, NNUE::savedfileName);
|
||||
const std::string file_name = Path::combine(eval_dir, NNUE::savedfileName);
|
||||
std::ofstream stream(file_name, std::ios::binary);
|
||||
#ifndef NDEBUG
|
||||
bool result =
|
||||
#endif
|
||||
WriteParameters(stream);
|
||||
write_parameters(stream);
|
||||
#ifndef NDEBUG
|
||||
assert(result);
|
||||
#endif
|
||||
|
||||
Reference in New Issue
Block a user