From 1a9f5dd295a489e3fd47f8a1dcf285a0300211d6 Mon Sep 17 00:00:00 2001 From: Jihoon Lee Date: Thu, 10 Mar 2022 18:40:10 +0900 Subject: [PATCH] Revert "Load Optimizer Variables" This reverts commit c669732b1f52f4aad3114839fe1ebba0f5d95f27. As this commit contains some compatibility breaking changes, this commit should be merged with where nntrainer is being used. Signed-off-by: Jihoon Lee --- nntrainer/layers/layer_node.cpp | 9 ++------ nntrainer/layers/layer_node.h | 4 +--- nntrainer/models/neuralnet.cpp | 38 ++++++++++++-------------------- nntrainer/optimizers/adam.cpp | 12 +++++----- nntrainer/optimizers/adam.h | 17 +------------- nntrainer/optimizers/optimizer_devel.cpp | 23 ++----------------- nntrainer/optimizers/optimizer_devel.h | 5 ----- 7 files changed, 25 insertions(+), 83 deletions(-) diff --git a/nntrainer/layers/layer_node.cpp b/nntrainer/layers/layer_node.cpp index a53a415..f9e6d50 100644 --- a/nntrainer/layers/layer_node.cpp +++ b/nntrainer/layers/layer_node.cpp @@ -415,7 +415,7 @@ void LayerNode::exportTo(Exporter &exporter, layer->exportTo(exporter, method); } -void LayerNode::read(std::ifstream &file, bool opt_var, bool load_opt_var) { +void LayerNode::read(std::ifstream &file, bool opt_var) { NNTR_THROW_IF(!run_context, std::runtime_error) << __func__ << " layer needs to be finalized first!"; if (opt_var) { @@ -425,12 +425,7 @@ void LayerNode::read(std::ifstream &file, bool opt_var, bool load_opt_var) { if (run_context->weightHasGradient(i)) { for (unsigned int j = 0; j < run_context->getNumWeightOptVar(i); ++j) { - if (load_opt_var) { - run_context->getWeightOptVar(i, j).read(file); - } else { - file.seekg(run_context->getWeightOptVar(i, j).bytes(), - std::ios::cur); - } + run_context->getWeightOptVar(i, j).read(file); } } } diff --git a/nntrainer/layers/layer_node.h b/nntrainer/layers/layer_node.h index cc71fee..ede2006 100644 --- a/nntrainer/layers/layer_node.h +++ b/nntrainer/layers/layer_node.h @@ -573,10 +573,8 @@ public: * @brief read layer Weight & Bias data from file * @param file input file stream * @param bool read optimizer variables - * @param bool load optimizer variables */ - void read(std::ifstream &file, bool opt_var = false, - bool load_opt_var = true); + void read(std::ifstream &file, bool opt_var = false); /** * @brief save layer Weight & Bias data from file diff --git a/nntrainer/models/neuralnet.cpp b/nntrainer/models/neuralnet.cpp index 6c1845c..8eaba4d 100644 --- a/nntrainer/models/neuralnet.cpp +++ b/nntrainer/models/neuralnet.cpp @@ -333,16 +333,7 @@ void NeuralNetwork::save(const std::string &file_path, (*iter)->save(model_file); } - model_file.write((char *)&epoch_idx, sizeof(epoch_idx)); - model_file.write((char *)&iter, sizeof(iter)); - opt->save(model_file); - unsigned int write_op_var = 0; - if (istrequal(opt->getType(), "adam")) { - write_op_var = 1; - } - - model_file.write((char *)&write_op_var, sizeof(write_op_var)); if (istrequal(opt->getType(), "adam")) { for (auto iter = model_graph.cbegin(); iter != model_graph.cend(); @@ -351,10 +342,12 @@ void NeuralNetwork::save(const std::string &file_path, } } + model_file.write((char *)&epoch_idx, sizeof(epoch_idx)); + model_file.write((char *)&iter, sizeof(iter)); + model_file.close(); break; } - case ml::train::ModelFormat::MODEL_FORMAT_INI: saveModelIni(file_path); break; @@ -403,24 +396,21 @@ void NeuralNetwork::load(const std::string &file_path, /// read. so, after this line, additional read shouldn't be called model_file.seekg(bin_file_pos); + if (istrequal(opt->getType(), "adam")) { + char opt_type[4]; + model_file.read(opt_type, 4); + if (istrequal(opt_type, "adam")) { + for (auto iter = model_graph.cbegin(); iter != model_graph.cend(); + iter++) { + (*iter)->read(model_file, true); + } + } + } + checkedRead(model_file, (char *)&epoch_idx, sizeof(epoch_idx), "[NeuralNetwork::readModel] failed to read epoch_idx"); checkedRead(model_file, (char *)&iter, sizeof(iter), "[NeuralNetwork::readModel] failed to read iteration"); - - opt->read(model_file); - unsigned int exist_op_var = 0; - - checkedRead(model_file, (char *)&exist_op_var, sizeof(exist_op_var)); - - if (istrequal(opt->getType(), "adam") && exist_op_var) { - bool load_opt_var = opt->is_load_var(); - for (auto iter = model_graph.cbegin(); iter != model_graph.cend(); - iter++) { - (*iter)->read(model_file, true, load_opt_var); - } - } - } catch (...) { std::cerr << "failed to read epoch idx, proceeding with default index\n"; } diff --git a/nntrainer/optimizers/adam.cpp b/nntrainer/optimizers/adam.cpp index 6c9e581..bccd8fb 100644 --- a/nntrainer/optimizers/adam.cpp +++ b/nntrainer/optimizers/adam.cpp @@ -22,16 +22,14 @@ namespace nntrainer { -Adam::Adam() : - adam_props(PropsB1(), PropsB2(), PropsEpsilon(), TorchRef(), LoadVar()) { +Adam::Adam() : adam_props(PropsB1(), PropsB2(), PropsEpsilon(), TorchRef()) { /** default properties */ setProperty({"learning_rate=0.001"}); - auto &[b1, b2, eps, torch_ref, load_mv] = adam_props; + auto &[b1, b2, eps, torch_ref] = adam_props; b1.set(0.9f); b2.set(0.999f); eps.set(1.0e-7f); torch_ref.set(false); - load_mv.set(true); } Adam::~Adam() {} @@ -77,8 +75,8 @@ void Adam::applyGradient(RunOptimizerContext &context) { // This is implementation of adam from original paper. // This is not deleted intentionally. unsigned int iteration = context.getIteration(); - float biasCorrection1 = 1.0f - pow(beta1, iteration + 1); - float biasCorrection2 = 1.0f - pow(beta2, iteration + 1); + float biasCorrection1 = 1 - pow(beta1, iteration + 1); + float biasCorrection2 = 1 - pow(beta2, iteration + 1); Tensor &wm = context.getOptimizerVariable(AdamParams::wm); Tensor &wv = context.getOptimizerVariable(AdamParams::wv); @@ -90,7 +88,7 @@ void Adam::applyGradient(RunOptimizerContext &context) { if (torch_ref) { Tensor denom = wv.apply(sqrtFloat); - denom.divide_i(sqrtDouble(biasCorrection2)); + denom.divide_i(sqrtFloat(biasCorrection2)); denom.add_i(epsilon); wm.divide(denom, x_grad); diff --git a/nntrainer/optimizers/adam.h b/nntrainer/optimizers/adam.h index b7d5cd5..de0eb45 100644 --- a/nntrainer/optimizers/adam.h +++ b/nntrainer/optimizers/adam.h @@ -63,16 +63,6 @@ public: }; /** - * @brief load momentum - * - */ -class LoadVar : public Property { -public: - static constexpr const char *key = "load_var"; /**< unique key to access */ - using prop_tag = bool_prop_tag; /**< property type */ -}; - -/** * @class Adam optimizer class * @brief Adam optimizer */ @@ -123,13 +113,8 @@ public: */ void setProperty(const std::vector &values) override; - /** - * @copydoc Optimizer::is_load_var() - */ - bool is_load_var() { return std::get(adam_props).get(); } - private: - std::tuple adam_props; + std::tuple adam_props; }; } /* namespace nntrainer */ diff --git a/nntrainer/optimizers/optimizer_devel.cpp b/nntrainer/optimizers/optimizer_devel.cpp index d33bc44..5704fb5 100644 --- a/nntrainer/optimizers/optimizer_devel.cpp +++ b/nntrainer/optimizers/optimizer_devel.cpp @@ -30,19 +30,7 @@ void Optimizer::setProperty(const std::vector &values) { } void Optimizer::read(std::ifstream &file) { - std::string loaded_type; - unsigned int opt_type = ml::train::OptimizerType::UNKNOWN; - checkedRead(file, (char *)&opt_type, sizeof(opt_type)); - switch (opt_type) { - case ml::train::OptimizerType::ADAM: - loaded_type = "adam"; - break; - case ml::train::OptimizerType::SGD: - loaded_type = "sgd"; - break; - default: - break; - } + std::string loaded_type = readString(file); if (loaded_type != getType()) { throw std::runtime_error( @@ -50,13 +38,6 @@ void Optimizer::read(std::ifstream &file) { } } -void Optimizer::save(std::ofstream &file) { - unsigned int opt_type = ml::train::OptimizerType::UNKNOWN; - if (istrequal(getType(), "adam")) - opt_type = ml::train::OptimizerType::ADAM; - if (istrequal(getType(), "sgd")) - opt_type = ml::train::OptimizerType::SGD; - file.write((char *)&opt_type, sizeof(opt_type)); -} +void Optimizer::save(std::ofstream &file) { writeString(file, getType()); } } // namespace nntrainer diff --git a/nntrainer/optimizers/optimizer_devel.h b/nntrainer/optimizers/optimizer_devel.h index 9b5f242..cc51227 100644 --- a/nntrainer/optimizers/optimizer_devel.h +++ b/nntrainer/optimizers/optimizer_devel.h @@ -89,11 +89,6 @@ public: virtual void save(std::ofstream &file); /** - * @brief get the option of loading optimizer variables - */ - virtual bool is_load_var() { return false; }; - - /** * @brief Get dimension of extra variables if the optimizer needs any. * @param dim Dimension of tensor to be added as a optimizer variable * @return Vector of dimensions -- 2.7.4