From a80ab2b0b04e923d9f375420aa7eb3f8dad21535 Mon Sep 17 00:00:00 2001 From: Parichay Kapoor Date: Fri, 21 Aug 2020 15:45:13 +0900 Subject: [PATCH] [INI] This patch refactors the INI naming for saving model file This patch update the INI naming for saving the model file from model to save_path **Self evaluation:** 1. Build test: [x]Passed [ ]Failed [ ]Skipped 2. Run test: [x]Passed [ ]Failed [ ]Skipped Signed-off-by: Parichay Kapoor --- Applications/Classification/README.md | 2 +- Applications/Classification/res/Classification.ini | 2 +- Applications/Classification/res/Classification_func.ini | 2 +- Applications/Classification/res/Classification_new.ini | 2 +- Applications/LogisticRegression/res/LogisticRegression.ini | 2 +- Applications/ReinforcementLearning/DeepQ/jni/DeepQ.ini | 2 +- Applications/Tizen_CAPI/Tizen_CAPI_config.ini | 2 +- Applications/Training/res/Training.ini | 2 +- Applications/mnist/res/mnist.ini | 2 +- Applications/mnist/res/mnist_valid.ini | 2 +- docs/configuration-ini.md | 8 ++++---- nntrainer/include/neuralnet.h | 4 ++-- nntrainer/src/model_loader.cpp | 2 +- nntrainer/src/neuralnet.cpp | 14 +++++++------- test/include/nntrainer_test_util.h | 2 +- test/tizen_capi/test_conf.ini | 2 +- 16 files changed, 26 insertions(+), 26 deletions(-) diff --git a/Applications/Classification/README.md b/Applications/Classification/README.md index 338e4c2..b8470bc 100644 --- a/Applications/Classification/README.md +++ b/Applications/Classification/README.md @@ -54,7 +54,7 @@ Decay_steps = 1000 Epoch = 30000 Optimizer = adam Cost = cross -Model = "model.bin" +Save_Path = "model.bin" minibatch = 32 beta1 = 0.9 beta2 = 0.9999 diff --git a/Applications/Classification/res/Classification.ini b/Applications/Classification/res/Classification.ini index 13a5bf8..48bd139 100644 --- a/Applications/Classification/res/Classification.ini +++ b/Applications/Classification/res/Classification.ini @@ -9,7 +9,7 @@ Optimizer = adam # Optimizer : sgd (stochastic gradien decent), # adam (Adamtive Moment Estimation) Cost = cross # Cost(loss) function : mse (mean squared error) # cross (cross entropy) -Model = "model.bin" # model path to save / read +Save_Path = "model.bin" # model path to save / read minibatch = 32 # mini batch size beta1 = 0.9 # beta 1 for adam beta2 = 0.9999 # beta 2 for adam diff --git a/Applications/Classification/res/Classification_func.ini b/Applications/Classification/res/Classification_func.ini index d968dcb..9fc201d 100644 --- a/Applications/Classification/res/Classification_func.ini +++ b/Applications/Classification/res/Classification_func.ini @@ -9,7 +9,7 @@ Optimizer = adam # Optimizer : sgd (stochastic gradien decent), # adam (Adamtive Moment Estimation) Cost = cross # Cost(loss) function : mse (mean squared error) # cross (cross entropy) -Model = "model.bin" # model path to save / read +Save_Path = "model.bin" # model path to save / read minibatch = 32 # mini batch size beta1 = 0.9 # beta 1 for adam beta2 = 0.9999 # beta 2 for adam diff --git a/Applications/Classification/res/Classification_new.ini b/Applications/Classification/res/Classification_new.ini index 33afd77..c2234fc 100644 --- a/Applications/Classification/res/Classification_new.ini +++ b/Applications/Classification/res/Classification_new.ini @@ -10,7 +10,7 @@ Optimizer = sgd # Optimizer : sgd (stochastic gradien decent), Activation = sigmoid # activation : sigmoid, tanh Cost = cross # Cost(loss) function : mse (mean squared error) # cross (cross entropy) -Model = "model.bin" # model path to save / read +Save_Path = "model.bin" # model path to save / read minibatch = 32 # mini batch size beta1 = 0.9 # beta 1 for adam beta2 = 0.9999 # beta 2 for adam diff --git a/Applications/LogisticRegression/res/LogisticRegression.ini b/Applications/LogisticRegression/res/LogisticRegression.ini index ed12fe7..2f227f6 100644 --- a/Applications/LogisticRegression/res/LogisticRegression.ini +++ b/Applications/LogisticRegression/res/LogisticRegression.ini @@ -7,7 +7,7 @@ Optimizer = sgd # Optimizer : sgd (stochastic gradien decent), # adam (Adamtive Moment Estimation) Cost = cross # Cost(loss) function : mse (mean squared error) # cross ( cross entropy ) -Model = "model.bin" # model path to save / read +Save_Path = "model.bin" # model path to save / read minibatch = 1 # mini batch size epsilon = 1e-5 diff --git a/Applications/ReinforcementLearning/DeepQ/jni/DeepQ.ini b/Applications/ReinforcementLearning/DeepQ/jni/DeepQ.ini index fc4dc54..1b249e4 100644 --- a/Applications/ReinforcementLearning/DeepQ/jni/DeepQ.ini +++ b/Applications/ReinforcementLearning/DeepQ/jni/DeepQ.ini @@ -8,7 +8,7 @@ Optimizer = adam # Optimizer : sgd (stochastic gradien decent), Cost = mse # Cost(loss) function : mse (mean squared error) # cross (cross entropy) -Model = "model.bin" # model path to save / read +Save_Path = "model.bin" # model path to save / read minibatch = 32 # mini batch size beta1 = 0.9 # beta 1 for adam beta2 = 0.9999 # beta 2 for adam diff --git a/Applications/Tizen_CAPI/Tizen_CAPI_config.ini b/Applications/Tizen_CAPI/Tizen_CAPI_config.ini index bb95b8a..072e421 100644 --- a/Applications/Tizen_CAPI/Tizen_CAPI_config.ini +++ b/Applications/Tizen_CAPI/Tizen_CAPI_config.ini @@ -9,7 +9,7 @@ Optimizer = adam # Optimizer : sgd (stochastic gradien decent), # adam (Adamtive Moment Estimation) Cost = cross # Cost(loss) function : mse (mean squared error) # cross ( for cross entropy ) -Model = "model.bin" # model path to save / read +Save_Path = "model.bin" # model path to save / read minibatch = 32 # mini batch size beta1 = 0.9 # beta 1 for adam beta2 = 0.9999 # beta 2 for adam diff --git a/Applications/Training/res/Training.ini b/Applications/Training/res/Training.ini index 324b446..52eac60 100644 --- a/Applications/Training/res/Training.ini +++ b/Applications/Training/res/Training.ini @@ -7,7 +7,7 @@ Optimizer = sgd # Optimizer : sgd (stochastic gradien decent), # adam (Adamtive Moment Estimation) Cost = cross # Cost(loss) function : mse (mean squared error) # cross (cross entropy) -Model = "model.bin" # model path to save / read +Save_Path = "model.bin" # model path to save / read minibatch = 1 # mini batch size # beta1 = 0.9 # beta 1 for adam # beta2 = 0.9999 # beta 2 for adam diff --git a/Applications/mnist/res/mnist.ini b/Applications/mnist/res/mnist.ini index 8521409..cb10de1 100644 --- a/Applications/mnist/res/mnist.ini +++ b/Applications/mnist/res/mnist.ini @@ -7,7 +7,7 @@ Optimizer = adam # Optimizer : sgd (stochastic gradien decent), # adam (Adamtive Moment Estimation) Cost = cross # Cost(loss) function : mse (mean squared error) # cross ( for cross entropy ) -Model = "model.bin" # model path to save / read +Save_Path = "model.bin" # model path to save / read minibatch = 32 # mini batch size beta1 = 0.9 # beta 1 for adam beta2 = 0.999 # beta 2 for adam diff --git a/Applications/mnist/res/mnist_valid.ini b/Applications/mnist/res/mnist_valid.ini index 32dde54..958f5a8 100644 --- a/Applications/mnist/res/mnist_valid.ini +++ b/Applications/mnist/res/mnist_valid.ini @@ -7,7 +7,7 @@ Optimizer = adam # Optimizer : sgd (stochastic gradien decent), # adam (Adamtive Moment Estimation) Cost = cross # Cost(loss) function : mse (mean squared error) # cross ( for cross entropy ) -Model = "model.bin" # model path to save / read +Save_Path = "model.bin" # model path to save / read minibatch = 32 # mini batch size beta1 = 0.9 # beta 1 for adam beta2 = 0.999 # beta 2 for adam diff --git a/docs/configuration-ini.md b/docs/configuration-ini.md index 486b633..af35be1 100644 --- a/docs/configuration-ini.md +++ b/docs/configuration-ini.md @@ -15,7 +15,7 @@ Network section includes the hyper-parameters about Network such as mini batch s Start with "[Model]" -1. ```type = ``` +1. ```type (mandatory) = ``` Type of Network * regression : network for linear regression @@ -43,7 +43,7 @@ Start with "[Model]" * cross : cross entropy Only allowed with sigmoid and softmax activation function -6. ```model = ``` +6. ```save_path = ``` Model file path to save updated weights @@ -73,7 +73,7 @@ Learning_rate = 1e-4 Epoch = 1500 Optimizer = adam Cost = cross -Model = "model.bin" +Save_Path = "model.bin" minibatch = 32 beta1 = 0.9 beta2 = 0.999 @@ -268,7 +268,7 @@ learning_rate = 1e-4 epoch = 1500 optimizer = adam cost = cross -model = "model.bin" +Save_Path = "model.bin" minibatch = 32 beta1 = 0.9 beta2 = 0.999 diff --git a/nntrainer/include/neuralnet.h b/nntrainer/include/neuralnet.h index 11ea4f2..083c5f5 100644 --- a/nntrainer/include/neuralnet.h +++ b/nntrainer/include/neuralnet.h @@ -78,7 +78,7 @@ public: swap(lhs.loss, rhs.loss); swap(lhs.cost, rhs.cost); swap(lhs.weight_ini, rhs.weight_ini); - swap(lhs.model, rhs.model); + swap(lhs.save_path, rhs.save_path); swap(lhs.opt, rhs.opt); swap(lhs.net_type, rhs.net_type); swap(lhs.layers, rhs.layers); @@ -306,7 +306,7 @@ private: WeightIniType weight_ini; /**< Weight Initialization type */ - std::string model; /**< Model path to save / read */ + std::string save_path; /**< Model path to save / read */ Optimizer opt; /**< Optimizer, This gets copied into each layer, do not use this directly */ diff --git a/nntrainer/src/model_loader.cpp b/nntrainer/src/model_loader.cpp index 383fae4..7f83520 100644 --- a/nntrainer/src/model_loader.cpp +++ b/nntrainer/src/model_loader.cpp @@ -44,7 +44,7 @@ int ModelLoader::loadModelConfigIni(dictionary *ini, NeuralNetwork &model) { model.epoch = iniparser_getint(ini, "Model:Epoch", model.epoch); model.cost = (CostType)parseType( iniparser_getstring(ini, "Model:Cost", unknown), TOKEN_COST); - model.model = iniparser_getstring(ini, "Model:Model", "model.bin"); + model.save_path = iniparser_getstring(ini, "Model:Save_path", "./model.bin"); model.batch_size = iniparser_getint(ini, "Model:Minibatch", model.batch_size); /** Default to adam optimizer */ diff --git a/nntrainer/src/neuralnet.cpp b/nntrainer/src/neuralnet.cpp index fc64394..7f640a0 100644 --- a/nntrainer/src/neuralnet.cpp +++ b/nntrainer/src/neuralnet.cpp @@ -162,7 +162,7 @@ int NeuralNetwork::setTrainConfig(std::vector values) { NN_RETURN_STATUS(); } break; case PropertyType::model_file: { - model = value; + save_path = value; } break; case PropertyType::continue_train: { bool cont_train; @@ -335,12 +335,12 @@ NeuralNetwork &NeuralNetwork::copy(NeuralNetwork &from) { } /** - * @brief save model + * @brief save model to file * save Weight & Bias Data into file by calling save from layer * save training parameters from the optimizer */ void NeuralNetwork::saveModel() { - std::ofstream model_file(model, std::ios::out | std::ios::binary); + std::ofstream model_file(save_path, std::ios::out | std::ios::binary); for (unsigned int i = 0; i < layers.size(); i++) layers[i]->save(model_file); model_file.write((char *)&iter, sizeof(iter)); @@ -348,21 +348,21 @@ void NeuralNetwork::saveModel() { } /** - * @brief read model + * @brief read model from file * read Weight & Bias Data into file by calling save from layer * read training parameters from the optimizer if continuing train */ void NeuralNetwork::readModel() { - if (!isFileExist(model)) + if (!isFileExist(save_path)) return; - std::ifstream model_file(model, std::ios::in | std::ios::binary); + std::ifstream model_file(save_path, std::ios::in | std::ios::binary); for (unsigned int i = 0; i < layers.size(); i++) layers[i]->read(model_file); if (continue_train) { model_file.read((char *)&iter, sizeof(iter)); } model_file.close(); - ml_logi("read modelfile: %s", model.c_str()); + ml_logi("read modelfile: %s", save_path.c_str()); } int NeuralNetwork::train() { diff --git a/test/include/nntrainer_test_util.h b/test/include/nntrainer_test_util.h index 4ad5b7b..135c7c1 100644 --- a/test/include/nntrainer_test_util.h +++ b/test/include/nntrainer_test_util.h @@ -190,7 +190,7 @@ const std::string config_str = "[Model]" "\n" "weight_Decay_Lambda = 0.005" "\n" - "Model = 'model.bin'" + "Save_Path = 'model.bin'" "\n" "minibatch = 32" "\n" diff --git a/test/tizen_capi/test_conf.ini b/test/tizen_capi/test_conf.ini index 3cdabb6..cf9f6a7 100644 --- a/test/tizen_capi/test_conf.ini +++ b/test/tizen_capi/test_conf.ini @@ -7,7 +7,7 @@ Optimizer = sgd # Optimizer : sgd (stochastic gradien decent), # adam (Adamtive Moment Estimation) Cost = cross # Cost(loss) function : mse (mean squared error) # cross (Cross Entropy) -Model = "model.bin" # model path to save / read +Save_Path = "model.bin" # model path to save / read Minibatch = 1 # mini batch size # beta1 = 0.9 # beta 1 for adam # beta2 = 0.9999 # beta 2 for adam -- 2.7.4