- Change return type from void to int.
Capi will call this function so it should be return status.
- Change learning rate scheduler pointer from unique_ptr to shared_ptr
Signed-off-by: hyeonseok lee <hs89.lee@samsung.com>
*
* @param lrs the learning rate scheduler object
*/
- virtual void setLearningRateScheduler(
- std::unique_ptr<ml::train::LearningRateScheduler> &&lrs) = 0;
+ virtual int setLearningRateScheduler(
+ std::shared_ptr<ml::train::LearningRateScheduler> lrs) = 0;
};
/**
return optimizer->getOptimizerVariableDim(dim);
}
-void OptimizerWrapped::setLearningRateScheduler(
- std::unique_ptr<ml::train::LearningRateScheduler> &&lrs) {
- nntrainer::LearningRateScheduler *ptr =
- static_cast<nntrainer::LearningRateScheduler *>(lrs.release());
- lr_sched = std::unique_ptr<nntrainer::LearningRateScheduler>(ptr);
+int OptimizerWrapped::setLearningRateScheduler(
+ std::shared_ptr<ml::train::LearningRateScheduler> lrs) {
+ lr_sched = std::static_pointer_cast<nntrainer::LearningRateScheduler>(lrs);
+
+ return ML_ERROR_NONE;
}
nntrainer::LearningRateScheduler *OptimizerWrapped::getLearningRateScheduler() {
*
* @param lrs the learning rate scheduler object
*/
- void setLearningRateScheduler(
- std::unique_ptr<ml::train::LearningRateScheduler> &&lrs) override;
+ int setLearningRateScheduler(
+ std::shared_ptr<ml::train::LearningRateScheduler> lrs) override;
/**
* Support all the interface requirements by nntrainer::Optimizer
private:
std::unique_ptr<OptimizerCore> optimizer; /**< the underlying optimizer */
- std::unique_ptr<nntrainer::LearningRateScheduler>
+ std::shared_ptr<nntrainer::LearningRateScheduler>
lr_sched; /**< the underlying learning rate scheduler */
std::tuple<props::LearningRate, props::DecayRate, props::DecaySteps>