[optimizer] add log when lr_scheduler property is also set in optimizer
authorhyeonseok lee <hs89.lee@samsung.com>
Fri, 14 Apr 2023 07:35:34 +0000 (16:35 +0900)
committerJijoong Moon <jijoong.moon@samsung.com>
Tue, 18 Apr 2023 04:49:44 +0000 (13:49 +0900)
 - Added log when Exponential learning rate scheduler properties(decay_rate, decay_steps) are set both in optimizer and lr_scheduler

Signed-off-by: hyeonseok lee <hs89.lee@samsung.com>
api/capi/include/nntrainer.h
nntrainer/optimizers/optimizer_wrapped.cpp
nntrainer/optimizers/optimizer_wrapped.h

index b69c72d..8bafaab 100644 (file)
@@ -393,6 +393,12 @@ int ml_train_optimizer_destroy(ml_train_optimizer_h optimizer);
  * @retval #ML_ERROR_NONE Successful.
  * @retval #ML_ERROR_NOT_SUPPORTED Not supported.
  * @retval #ML_ERROR_INVALID_PARAMETER Invalid parameter.
+ * @note For now the properties for Exponential learning rate
+ * scheduler(decay_rate, decay_steps) can be set using
+ * ml_train_optimizer_set_property for backward compatibility. But
+ * ml_train_optimizer_set_property will not support to set decay_rate,
+ * decay_steps properties from tizen 8.0. Use ml_train_lr_scheduler_set_property
+ * instead.
  */
 int ml_train_optimizer_set_property(ml_train_optimizer_h optimizer, ...);
 
index 69b0338..c8eec44 100644 (file)
@@ -16,6 +16,7 @@
 #include <common_properties.h>
 #include <lr_scheduler_constant.h>
 #include <lr_scheduler_exponential.h>
+#include <nntrainer_log.h>
 #include <node_exporter.h>
 #include <optimizer_wrapped.h>
 
@@ -99,6 +100,9 @@ void OptimizerWrapped::finalize() {
   /** if lr_sched not set, make lr_sched from properties */
   if (!lr_sched) {
     if (!props_empty) {
+      ml_logw(
+        "Either decay_rate or decay_steps properties are set in optimizer. "
+        "Please set these properties in learning rate scheduler");
       lr_sched = std::make_unique<ExponentialLearningRateScheduler>();
       if (!props_dr.empty())
         lr_sched->setProperty({"decay_rate=" + std::to_string(props_dr.get())});
@@ -109,6 +113,9 @@ void OptimizerWrapped::finalize() {
       lr_sched = std::make_unique<ConstantLearningRateScheduler>();
     }
     lr_sched->setProperty({"learning_rate=" + std::to_string(props_lr.get())});
+  } else if (lr_sched && !props_lr.empty()) {
+    ml_logw("Learning rate property is set in both optimizer and learning rate "
+            "scheduler. The value which is set in Optimizer will be ignored.");
   }
 
   lr_sched->finalize();
index c0667f8..4158f6b 100644 (file)
@@ -153,6 +153,7 @@ private:
   std::shared_ptr<nntrainer::LearningRateScheduler>
     lr_sched; /**< the underlying learning rate scheduler */
 
+  /** @todo remove DecayRate, DecaySteps*/
   std::tuple<props::LearningRate, props::DecayRate, props::DecaySteps>
     props; /**< lr scheduler props for backward compatibility */
 };