1 // SPDX-License-Identifier: Apache-2.0
3 * Copyright (C) 2021 Parichay Kapoor <pk.kapoor@samsung.com>
5 * @file optimizer_wrapped.cpp
6 * @date 10 December 2021
7 * @brief This is Optimizer Wrapped interface class
8 * @see https://github.com/nnstreamer/nntrainer
9 * @author Parichay Kapoor <pk.kapoor@samsung.com>
10 * @bug No known bugs except for NYI items
12 * @details wraps the optimizer and learning rate scheduler together
15 #include <app_context.h>
16 #include <common_properties.h>
17 #include <lr_scheduler_constant.h>
18 #include <lr_scheduler_exponential.h>
19 #include <node_exporter.h>
20 #include <optimizer_wrapped.h>
25 * @brief Optimizer wrapped creator with constructor for optimizer
27 std::unique_ptr<OptimizerWrapped>
28 createOptimizerWrapped(const ml::train::OptimizerType &type,
29 const std::vector<std::string> &properties) {
30 auto &ac = nntrainer::AppContext::Global();
31 return createOptimizerWrapped(ac.createObject<OptimizerCore>(type),
36 * @brief Optimizer wrapped creator with constructor for optimizer
38 std::unique_ptr<OptimizerWrapped>
39 createOptimizerWrapped(const std::string &type,
40 const std::vector<std::string> &properties) {
41 auto &ac = nntrainer::AppContext::Global();
42 return createOptimizerWrapped(ac.createObject<OptimizerCore>(type),
47 * @brief Optimizer wrapped creator with constructor for optimizer
49 std::unique_ptr<OptimizerWrapped>
50 createOptimizerWrapped(std::unique_ptr<OptimizerCore> &&opt,
51 const std::vector<std::string> &properties) {
52 auto opt_wrapped = std::make_unique<OptimizerWrapped>(std::move(opt));
54 opt_wrapped->setProperty(properties);
58 OptimizerWrapped::OptimizerWrapped(std::unique_ptr<OptimizerCore> &&opt) :
59 optimizer(std::move(opt)),
61 props(props::LearningRate(), props::DecayRate(), props::DecaySteps()) {
62 std::get<props::LearningRate>(props).set(optimizer->getDefaultLearningRate());
65 const std::string OptimizerWrapped::getType() const {
66 return optimizer->getType();
69 void OptimizerWrapped::setProperty(const std::vector<std::string> &values) {
70 auto remain_props = loadProperties(values, props);
71 optimizer->setProperty(remain_props);
74 double OptimizerWrapped::getLearningRate(size_t iteration) {
75 return lr_sched->getLearningRate(iteration);
78 void OptimizerWrapped::applyGradient(RunOptimizerContext &context) {
79 optimizer->applyGradient(context);
82 void OptimizerWrapped::exportTo(Exporter &exporter,
83 const ExportMethods &method) const {
84 optimizer->exportTo(exporter, method);
85 lr_sched->exportTo(exporter, method);
88 void OptimizerWrapped::finalize() {
89 auto const &props_lr = std::get<props::LearningRate>(props);
90 auto const &props_dr = std::get<props::DecayRate>(props);
91 auto const &props_ds = std::get<props::DecaySteps>(props);
93 /** if lr_sched already set and property not empty, error */
94 bool props_empty = props_lr.empty() & props_dr.empty() & props_ds.empty();
96 NNTR_THROW_IF(props_empty && !lr_sched, std::invalid_argument)
97 << "Learning rate scheduler not set for the optimizer " << getType();
98 NNTR_THROW_IF(!props_empty && lr_sched, std::invalid_argument)
99 << "Multiple learning rate schedulers set for the optimizer " << getType();
101 /** if lr_sched not set, make lr_sched from properties */
103 if (!props_dr.empty() || !props_ds.empty()) {
104 lr_sched = std::make_unique<ExponentialLearningRateScheduler>();
105 if (!props_dr.empty())
106 lr_sched->setProperty({"decay_rate=" + std::to_string(props_dr.get())});
107 if (!props_ds.empty())
108 lr_sched->setProperty(
109 {"decay_steps=" + std::to_string(props_ds.get())});
111 lr_sched = std::make_unique<ConstantLearningRateScheduler>();
114 if (!props_lr.empty())
115 lr_sched->setProperty(
116 {"learning_rate=" + std::to_string(props_lr.get())});
119 lr_sched->finalize();
120 optimizer->finalize();
123 void OptimizerWrapped::read(std::ifstream &file) { optimizer->read(file); }
125 void OptimizerWrapped::save(std::ofstream &file) { optimizer->save(file); }
127 std::vector<TensorDim>
128 OptimizerWrapped::getOptimizerVariableDim(const TensorDim &dim) {
129 return optimizer->getOptimizerVariableDim(dim);
132 void OptimizerWrapped::setLearningRateScheduler(
133 std::unique_ptr<nntrainer::LearningRateScheduler> &&lrs) {
134 lr_sched = std::move(lrs);
137 nntrainer::LearningRateScheduler *OptimizerWrapped::setLearningRateScheduler() {
138 return lr_sched.get();
141 } // namespace nntrainer