From d374b56fdadb00770a3b8c26b98ab3480ed4230c Mon Sep 17 00:00:00 2001 From: Jihoon Lee Date: Wed, 28 Oct 2020 13:56:11 +0900 Subject: [PATCH] [Fix/Optimizer] Fix decay_rate application There was a bug that decay_rate were always applied when it is set to default value. Becuase `decay_steps != -1` was evaluated to true. This patch fixes the issue. **Self evaluation:** 1. Build test: [X]Passed [ ]Failed [ ]Skipped 2. Run test: [X]Passed [ ]Failed [ ]Skipped Signed-off-by: Jihoon Lee --- nntrainer/include/optimizer_internal.h | 8 ++++---- nntrainer/src/optimizer.cpp | 6 +++--- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/nntrainer/include/optimizer_internal.h b/nntrainer/include/optimizer_internal.h index aa5ac36..4bf3777 100644 --- a/nntrainer/include/optimizer_internal.h +++ b/nntrainer/include/optimizer_internal.h @@ -52,7 +52,7 @@ public: * @brief Default Constructor of Optimizer Class */ Optimizer(const OptType t, float lr, float decay_rate = 1.0f, - float decay_steps = -1.0f, float continue_train = false) : + unsigned int decay_steps = 0, float continue_train = false) : type(t), learning_rate(lr), decay_rate(decay_rate), @@ -175,9 +175,9 @@ protected: */ virtual double getLearningRate(int iteration); - float learning_rate; /** learning rate */ - float decay_rate; /** decay rate for learning rate */ - float decay_steps; /** decay steps for learning rate */ + float learning_rate; /** learning rate */ + float decay_rate; /** decay rate for learning rate */ + unsigned int decay_steps; /** decay steps for learning rate */ bool continue_train; /** Continue training with previous tensors for adam */ private: diff --git a/nntrainer/src/optimizer.cpp b/nntrainer/src/optimizer.cpp index e4bcc3e..604e575 100644 --- a/nntrainer/src/optimizer.cpp +++ b/nntrainer/src/optimizer.cpp @@ -42,8 +42,8 @@ int Optimizer::initialize(std::shared_ptr weight_list, double Optimizer::getLearningRate(int iteration) { double ll = learning_rate; - if (decay_steps != -1) { - ll = ll * pow(decay_rate, (iteration / decay_steps)); + if (decay_steps != 0) { + ll = ll * pow(decay_rate, (iteration / (float)decay_steps)); } return ll; @@ -111,7 +111,7 @@ void Optimizer::setProperty(const PropertyType type, const std::string &value) { status = setFloat(learning_rate, value); break; case PropertyType::decay_steps: - status = setFloat(decay_steps, value); + status = setUint(decay_steps, value); break; case PropertyType::decay_rate: status = setFloat(decay_rate, value); -- 2.7.4