* @retval #ML_ERROR_NONE Successful.
* @retval #ML_ERROR_NOT_SUPPORTED Not supported.
* @retval #ML_ERROR_INVALID_PARAMETER Invalid parameter.
+ * @note For now the properties for Exponential learning rate
+ * scheduler(decay_rate, decay_steps) can be set using
+ * ml_train_optimizer_set_property for backward compatibility. But
+ * ml_train_optimizer_set_property will not support to set decay_rate,
+ * decay_steps properties from tizen 8.0. Use ml_train_lr_scheduler_set_property
+ * instead.
*/
int ml_train_optimizer_set_property(ml_train_optimizer_h optimizer, ...);
#include <common_properties.h>
#include <lr_scheduler_constant.h>
#include <lr_scheduler_exponential.h>
+#include <nntrainer_log.h>
#include <node_exporter.h>
#include <optimizer_wrapped.h>
/** if lr_sched not set, make lr_sched from properties */
if (!lr_sched) {
if (!props_empty) {
+ ml_logw(
+ "Either decay_rate or decay_steps properties are set in optimizer. "
+ "Please set these properties in learning rate scheduler");
lr_sched = std::make_unique<ExponentialLearningRateScheduler>();
if (!props_dr.empty())
lr_sched->setProperty({"decay_rate=" + std::to_string(props_dr.get())});
lr_sched = std::make_unique<ConstantLearningRateScheduler>();
}
lr_sched->setProperty({"learning_rate=" + std::to_string(props_lr.get())});
+ } else if (lr_sched && !props_lr.empty()) {
+ ml_logw("Learning rate property is set in both optimizer and learning rate "
+ "scheduler. The value which is set in Optimizer will be ignored.");
}
lr_sched->finalize();
std::shared_ptr<nntrainer::LearningRateScheduler>
lr_sched; /**< the underlying learning rate scheduler */
+ /** @todo remove DecayRate, DecaySteps*/
std::tuple<props::LearningRate, props::DecayRate, props::DecaySteps>
props; /**< lr scheduler props for backward compatibility */
};