cleanup the optimizer interface and existing implementations.
Signed-off-by: Parichay Kapoor <pk.kapoor@samsung.com>
# optimizer headers
/usr/include/nntrainer/optimizer_context.h
/usr/include/nntrainer/optimizer_devel.h
-/usr/include/nntrainer/optimizer_impl.h
/usr/include/nntrainer/lr_scheduler.h
# pkg config and static binary
/usr/lib/*/pkgconfig/nntrainer.pc
$(NNTRAINER_ROOT)/nntrainer/graph/connection.cpp \
$(NNTRAINER_ROOT)/nntrainer/optimizers/optimizer_context.cpp \
$(NNTRAINER_ROOT)/nntrainer/optimizers/optimizer_devel.cpp \
- $(NNTRAINER_ROOT)/nntrainer/optimizers/optimizer_impl.cpp \
+ $(NNTRAINER_ROOT)/nntrainer/optimizers/optimizer_wrapped.cpp \
$(NNTRAINER_ROOT)/nntrainer/optimizers/adam.cpp \
$(NNTRAINER_ROOT)/nntrainer/optimizers/sgd.cpp \
$(NNTRAINER_ROOT)/nntrainer/optimizers/lr_scheduler_constant.cpp \
#include <vector>
#include <dynamic_training_optimization.h>
+#include <optimizer_wrapped.h>
#include <tensor.h>
#include <util_func.h>
#include <weight.h>
bool DynamicTrainingOptimization::checkIfApply(
const std::vector<Weight> &weights, const std::shared_ptr<Var_Grad> &input,
const std::shared_ptr<Var_Grad> &output,
- const std::shared_ptr<Optimizer> &opt, int iteration) {
+ const std::shared_ptr<OptimizerWrapped> &opt, int iteration) {
if (!enabled || iteration < skip_n_iterations)
return true;
bool DynamicTrainingOptimization::checkIfApply(
const Weight &weight, const std::shared_ptr<Var_Grad> &input,
const std::shared_ptr<Var_Grad> &output,
- const std::shared_ptr<Optimizer> &opt, int iteration) {
+ const std::shared_ptr<OptimizerWrapped> &opt, int iteration) {
if (iteration < skip_n_iterations)
return true;
#include <vector>
#include <layer_devel.h>
-#include <optimizer_devel.h>
#include <tensor.h>
namespace nntrainer {
class Weight;
class Var_Grad;
+class OptimizerWrapped;
/**
* @class DynamicTraining Optimization
bool checkIfApply(const std::vector<Weight> &weights,
const std::shared_ptr<Var_Grad> &input,
const std::shared_ptr<Var_Grad> &output,
- const std::shared_ptr<Optimizer> &opt, int iteration);
+ const std::shared_ptr<OptimizerWrapped> &opt,
+ int iteration);
/**
* @brief Check if the given weight can skip updating
bool checkIfApply(const Weight &weight,
const std::shared_ptr<Var_Grad> &input,
const std::shared_ptr<Var_Grad> &output,
- const std::shared_ptr<Optimizer> &opt, int iteration);
+ const std::shared_ptr<OptimizerWrapped> &opt,
+ int iteration);
/**< Different types of reduce operations */
static const std::string dft_opt_max;
Adam::Adam() : adam_props(PropsB1(), PropsB2(), PropsEpsilon(), TorchRef()) {
/** default properties */
- setProperty({"learning_rate=0.001"});
- auto &[b1, b2, eps, torch_ref] = adam_props;
+ auto &[b1, b2, eps, rotch_ref] = adam_props;
b1.set(0.9f);
b2.set(0.999f);
eps.set(1.0e-7f);
void Adam::exportTo(Exporter &exporter, const ExportMethods &method) const {
exporter.saveResult(adam_props, method, this);
- OptimizerImpl::exportTo(exporter, method);
+ Optimizer::exportTo(exporter, method);
}
void Adam::setProperty(const std::vector<std::string> &values) {
auto left = loadProperties(values, adam_props);
- OptimizerImpl::setProperty(left);
+ Optimizer::setProperty(left);
}
double Adam::getUpdatedLearningRate(unsigned int iteration, double ll) const {
#include <tuple>
#include <base_properties.h>
-#include <optimizer_impl.h>
+#include <optimizer_devel.h>
namespace nntrainer {
* @class Adam optimizer class
* @brief Adam optimizer
*/
-class Adam : public OptimizerImpl {
+class Adam : public Optimizer {
public:
/**
* @brief Construct a new Adam object
~Adam();
/**
+ * @copydoc Optimizer::getDefaultLearningRate()
+ *
+ */
+ double getDefaultLearningRate() const { return 0.001; }
+
+ /**
* @copydoc applyGradient(RunOptimizerContext &context)
*/
void applyGradient(RunOptimizerContext &context) override;
optimizer_sources = [
'adam.cpp',
'optimizer_devel.cpp',
- 'optimizer_impl.cpp',
'sgd.cpp',
'optimizer_context.cpp',
'lr_scheduler_constant.cpp',
optimizer_headers = [
'optimizer_devel.h',
- 'optimizer_impl.h',
'optimizer_context.h',
'lr_scheduler.h'
]
* @brief get Learning Rate
* @retval Learning rate in float
*/
- virtual float getLearningRate() const { return getLearningRate(0); }
-
- /**
- * @brief get Learning Rate for the given iteration
- * @param[in] iteration Iteration for the learning rate
- * @retval Learning rate in double
- * @detail the return value of this function and getLearningRate() must
- * match for iteration == 0.
- */
- virtual double getLearningRate(size_t iteration) const = 0;
-
+ virtual double getDefaultLearningRate() const = 0;
/**
* @brief apply gradient to weight
* @param[in] context Optimizer context
+++ /dev/null
-// SPDX-License-Identifier: Apache-2.0
-/**
- * Copyright (C) 2020 Parichay Kapoor <pk.kapoor@samsung.com>
- *
- * @file optimizer_impl.cpp
- * @date 18 March 2021
- * @brief This is base Optimizer implementation class
- * @see https://github.com/nnstreamer/nntrainer
- * @author Jijoong Moon <jijoong.moon@samsung.com>
- * @author Parichay Kapoor <pk.kapoor@samsung.com>
- * @bug No known bugs except for NYI items
- *
- */
-
-#include <cmath>
-#include <fstream>
-#include <iostream>
-
-#include <common_properties.h>
-#include <nntrainer_error.h>
-#include <nntrainer_log.h>
-#include <node_exporter.h>
-#include <optimizer_impl.h>
-#include <util_func.h>
-
-namespace nntrainer {
-
-OptimizerImpl::OptimizerImpl() :
- optimizer_impl_props(props::LearningRate(), props::DecayRate(),
- props::DecaySteps()) {}
-
-void OptimizerImpl::setProperty(const std::vector<std::string> &values) {
- auto left = loadProperties(values, optimizer_impl_props);
- NNTR_THROW_IF(left.size(), std::invalid_argument)
- << "[OptimizerImpl] There are unparsed properties";
-}
-
-void OptimizerImpl::exportTo(Exporter &exporter,
- const ExportMethods &method) const {
- exporter.saveResult(optimizer_impl_props, method, this);
-}
-
-double OptimizerImpl::getLearningRate(size_t iteration) const {
-
- auto &[float_lr, decay_rate, decay_steps] = optimizer_impl_props;
- double ll = float_lr;
-
- if (!decay_steps.empty() && !decay_rate.empty()) {
- ll = ll * pow(decay_rate, (iteration / (float)decay_steps));
- }
-
- return ll;
-}
-
-} // namespace nntrainer
+++ /dev/null
-// SPDX-License-Identifier: Apache-2.0
-/**
- * Copyright (C) 2020 Parichay Kapoor <pk.kapoor@samsung.com>
- *
- * @file optimizer_impl.h
- * @date 18 March 2021
- * @brief This is base Optimizer implementation class
- * @see https://github.com/nnstreamer/nntrainer
- * @author Jijoong Moon <jijoong.moon@samsung.com>
- * @author Parichay Kapoor <pk.kapoor@samsung.com>
- * @bug No known bugs except for NYI items
- *
- */
-
-#ifndef __OPTIMIZER_IMPL_H__
-#define __OPTIMIZER_IMPL_H__
-#ifdef __cplusplus
-
-#include <tuple>
-
-#include <common_properties.h>
-#include <optimizer_devel.h>
-
-namespace nntrainer {
-
-/**
- * @class Optimizer Base class for optimizers
- * @brief Basic implementation class for nntrainer supported optimizers
- */
-class OptimizerImpl : public Optimizer {
-
-public:
- /**
- * @brief Construct a new Optimizer Impl object
- *
- */
- OptimizerImpl();
-
- /**
- * @brief copy constructor
- * @param[in] rhs OptimizerImpl to be copied
- */
- OptimizerImpl(const OptimizerImpl &rhs) = default;
-
- /**
- * @brief copy assignment operator
- * @param[in] rhs OptimizerImpl to be copied
- */
- OptimizerImpl &operator=(const OptimizerImpl &rhs) = default;
-
- /**
- * @brief Move constructor operator.
- * @param[in] rhs OptimizerImpl to be moved
- */
- OptimizerImpl(OptimizerImpl &&rhs) noexcept = default;
-
- /**
- * @brief Move assignment operator.
- * @parma[in] rhs OptimizerImpl to be moved.
- */
- OptimizerImpl &operator=(OptimizerImpl &&rhs) noexcept = default;
-
- /**
- * @brief get Learning Rate for the given iteration
- * @param[in] iteration Iteration for the learning rate
- * @retval Learning rate
- */
- double getLearningRate(size_t iteration) const override;
-
- /**
- * @copydoc Optimizer::setProperty(const std::vector<std::string> &values)
- */
- void setProperty(const std::vector<std::string> &values) override;
-
- /**
- * @copydoc Optimizer::exportTo(Exporter &exporter, const ExportMethods&
- * method)
- */
- void exportTo(Exporter &exporter, const ExportMethods &method) const override;
-
- /**
- * @brief Get dimension of extra variables if the optimizer needs any.
- * @param dim Dimension of tensor to be added as a optimizer variable
- * @return Vector of dimensions
- */
- virtual std::vector<TensorDim>
- getOptimizerVariableDim(const TensorDim &dim) override {
- return {};
- }
-
-protected:
- std::tuple<props::LearningRate, props::DecayRate, props::DecaySteps>
- optimizer_impl_props;
-};
-
-} /* namespace nntrainer */
-
-#endif /* __cplusplus */
-#endif /* __OPTIMIZER_IMPL_H__ */
OptimizerWrapped::OptimizerWrapped(std::unique_ptr<OptimizerCore> &&opt) :
optimizer(std::move(opt)),
lr_sched(),
- props(props::LearningRate(), props::DecayRate(), props::DecaySteps()) {}
+ props(props::LearningRate(), props::DecayRate(), props::DecaySteps()) {
+ std::get<props::LearningRate>(props).set(optimizer->getDefaultLearningRate());
+}
const std::string OptimizerWrapped::getType() const {
return optimizer->getType();
void OptimizerWrapped::setProperty(const std::vector<std::string> &values) {
auto remain_props = loadProperties(values, props);
- // TODO: update to remain_props
- optimizer->setProperty(values);
- // optimizer->setProperty(remain_props);
+ optimizer->setProperty(remain_props);
}
double OptimizerWrapped::getLearningRate(size_t iteration) {
#include <string>
#include <vector>
+#include <common_properties.h>
#include <lr_scheduler.h>
#include <optimizer.h>
#include <optimizer_devel.h>
namespace nntrainer {
-namespace props {
-class LearningRate;
-class DecaySteps;
-class DecayRate;
-} // namespace props
-
-/** TODO: change to nntrainer::Optimizer */
using OptimizerCore = nntrainer::Optimizer;
/**
PluggedOptimizer &operator=(PluggedOptimizer &&rhs) = default;
/**
- * @copydoc OptimizerDevel::getLearningRate()
+ * @copydoc Optimizer::getDefaultLearningRate()
*
*/
- float getLearningRate() const override {
- return optimizer_devel->getLearningRate();
+ double getDefaultLearningRate() const override {
+ return optimizer_devel->getDefaultLearningRate();
}
-
- /**
- * @brief get Learning Rate for the given iteration
- * @param[in] iteration Iteration for the learning rate
- * @retval Learning rate in double
- * @detail the return value of this function and getLearningRate() must
- * match for iteration == 0.
- */
- double getLearningRate(size_t iteration) const override {
- return optimizer_devel->getLearningRate(iteration);
- }
-
/**
* @brief apply gradient to weight
* @param[in] context Optimizer context
namespace nntrainer {
-SGD::SGD() { setProperty({"learning_rate=0.0001"}); }
-
void SGD::applyGradient(RunOptimizerContext &context) {
context.applyGradient(context.getLearningRate());
}
#define __SGD_H__
#ifdef __cplusplus
-#include <optimizer_impl.h>
+#include <optimizer_devel.h>
namespace nntrainer {
* @class SGD optimizer class
* @brief Stochastic Gradient Descent optimizer class
*/
-class SGD : public OptimizerImpl {
+class SGD : public Optimizer {
public:
/**
* @brief Construct a new SGD object
*
*/
- SGD();
+ SGD() {}
+
+ /**
+ * @copydoc Optimizer::getDefaultLearningRate()
+ *
+ */
+ double getDefaultLearningRate() const { return 0.0001; }
/**
* @copydoc applyGradient(RunOptimizerContext &context)
*/
const std::string getType() const { return SGD::type; }
+ /**
+ * @copydoc Optimizer::getOptimizerVariableDim(const TensorDim &dim)
+ */
+ std::vector<TensorDim>
+ getOptimizerVariableDim(const TensorDim &dim) override {
+ return {};
+ }
+
inline static const std::string type = "sgd";
};
} /* namespace nntrainer */
# optimizer headers
%{_includedir}/nntrainer/optimizer_context.h
%{_includedir}/nntrainer/optimizer_devel.h
-%{_includedir}/nntrainer/optimizer_impl.h
%{_includedir}/nntrainer/lr_scheduler.h
%{_libdir}/pkgconfig/nntrainer.pc
# update this to enable external applications
/** Full custom optimizer example which overrides all functions */
const std::string getType() const override { return "identity_optimizer"; }
- float getLearningRate() const override { return 1.0f; }
-
- double getLearningRate(size_t iteration) const override { return 1.0f; }
+ double getDefaultLearningRate() const override { return 1.0; }
void setProperty(const std::vector<std::string> &values) override {}
/** Minimal custom optimizer example which define only necessary functions */
const std::string getType() const override { return "identity_optimizer"; }
- double getLearningRate(size_t iteration) const override { return 1.0f; }
+ double getDefaultLearningRate() const override { return 1.0; }
std::vector<nntrainer::TensorDim>
getOptimizerVariableDim(const nntrainer::TensorDim &dim) override {