From 072d698032f5966be17ce3aa1d5d108a33bdc1f4 Mon Sep 17 00:00:00 2001 From: Parichay Kapoor Date: Wed, 24 Jun 2020 11:57:49 +0900 Subject: [PATCH] [bias] Bias updation missing for sgd Bias updatation fixed for sgd where it only happened when bias was initialized with 0 For adam, bias updataion was happening twice Signed-off-by: Parichay Kapoor --- nntrainer/include/optimizer.h | 3 +-- nntrainer/src/optimizer.cpp | 8 ++------ 2 files changed, 3 insertions(+), 8 deletions(-) diff --git a/nntrainer/include/optimizer.h b/nntrainer/include/optimizer.h index 76fb1f0..2b470d9 100644 --- a/nntrainer/include/optimizer.h +++ b/nntrainer/include/optimizer.h @@ -160,10 +160,9 @@ public: * @param[in/out] Weight Weight Tensor * @param[in/out] Bias Bias Tensor * @param[in] iteration nth epoch number - * @param[in] init_zero bool it is true if bias sets zero. */ void calculate(const Tensor &djdw, const Tensor &djdb, Tensor &weight, - Tensor &bias, int iteration, bool init_zero); + Tensor &bias, int iteration); /** * @brief Property Enumeration diff --git a/nntrainer/src/optimizer.cpp b/nntrainer/src/optimizer.cpp index dbb355f..34e8c26 100644 --- a/nntrainer/src/optimizer.cpp +++ b/nntrainer/src/optimizer.cpp @@ -78,8 +78,7 @@ int Optimizer::initialize(TensorDim d, bool set_tensor) { } void Optimizer::calculate(const Tensor &djdw, const Tensor &djdb, - Tensor &weight, Tensor &bias, int iteration, - bool init_zero) { + Tensor &weight, Tensor &bias, int iteration) { Tensor djdwAvg, djdbAvg; float ll = popt.learning_rate; if (popt.decay_steps != -1) { @@ -92,6 +91,7 @@ void Optimizer::calculate(const Tensor &djdw, const Tensor &djdb, switch (type) { case OptType::sgd: weight.add_i(djdwAvg, -ll); + bias.add_i(djdbAvg, -ll); break; case OptType::adam: { std::function sqrtEps = [&](float f) { @@ -124,10 +124,6 @@ void Optimizer::calculate(const Tensor &djdw, const Tensor &djdb, default: break; } - - if (init_zero) { - bias.add_i(djdbAvg, -ll); - } } int Optimizer::setProperty(std::vector values) { -- 2.7.4