#include <neuralnet.h>
#include <nntrainer.h>
#include <nntrainer_log.h>
-#include <optimizer_internal.h>
#include <string>
#include <unordered_map>
virtual const std::string getType() const = 0;
/**
- * @brief get Learning Rate
- * @retval Learning rate
+ * @brief Default allowed properties
+ * Available for all optimizers
+ * - learning_rate : float
+ *
+ * Available for SGD and Adam optimizers
+ * - decay_rate : float,
+ * - decay_steps : float,
+ *
+ * Available for Adam optimizer
+ * - beta1 : float,
+ * - beta2 : float,
+ * - epsilon : float,
*/
- virtual float getLearningRate() = 0;
-
- /**
- * @brief get Decay Rate for learning rate decay
- * @retval decay rate
- */
- virtual float getDecayRate() = 0;
-
- /**
- * @brief get Decay Steps for learning rate decay
- * @retval decay steps
- */
- virtual float getDecaySteps() = 0;
/**
* @brief set Optimizer Parameters
* @retval #ML_ERROR_INVALID_PARAMETER invalid parameter.
*/
virtual int setProperty(std::vector<std::string> values) = 0;
-
- /**
- * @brief Property Enumeration
- * learning_rate : float ,
- * decay_rate : float,
- * decay_steps : float,
- * beta1 : float,
- * beta2 : float,
- * epsilon : float,
- */
- enum class PropertyType {
- learning_rate = 0,
- decay_rate = 1,
- decay_steps = 2,
- beta1 = 3,
- beta2 = 4,
- epsilon = 5,
- continue_train = 6,
- unknown = 7,
- };
-
- /**
- * @brief setProperty by PropertyType
- * @note By passing empty string, this can validate if @a type is valid
- * @param[in] type property type to be passed
- * @param[in] value value to be passed, if empty string is passed, do nothing
- * but throws error when @a type is invalid
- * @exception exception::not_supported when property type is not valid for
- * the particular layer
- * @exception std::invalid_argument invalid argument
- */
- virtual void setProperty(const PropertyType type,
- const std::string &value = "") = 0;
-
- /**
- * @brief validate the optimizer
- */
- virtual void checkValidation() = 0;
};
/**
/usr/include/nntrainer/neuralnet.h
/usr/include/nntrainer/tensor.h
/usr/include/nntrainer/tensor_dim.h
-/usr/include/nntrainer/optimizer_internal.h
+/usr/include/nntrainer/optimizer_devel.h
+/usr/include/nntrainer/optimizer_impl.h
/usr/include/nntrainer/optimizer_factory.h
/usr/include/nntrainer/nntrainer-api-common.h
/usr/include/nntrainer/nntrainer.h
$(NNTRAINER_ROOT)/nntrainer/layers/rnn.cpp \
$(NNTRAINER_ROOT)/nntrainer/layers/acti_func.cpp \
$(NNTRAINER_ROOT)/nntrainer/graph/network_graph.cpp \
- $(NNTRAINER_ROOT)/nntrainer/optimizers/optimizer.cpp \
+ $(NNTRAINER_ROOT)/nntrainer/optimizers/optimizer_devel.cpp \
+ $(NNTRAINER_ROOT)/nntrainer/optimizers/optimizer_impl.cpp \
$(NNTRAINER_ROOT)/nntrainer/optimizers/adam.cpp \
$(NNTRAINER_ROOT)/nntrainer/optimizers/sgd.cpp \
$(NNTRAINER_ROOT)/nntrainer/optimizers/optimizer_factory.cpp \
/**
* Copyright (C) 2020 Parichay Kapoor <pk.kapoor@samsung.com>
*
- * @file optimizer_factory.cpp
+ * @file databuffer_factory.cpp
* @date 11 October 2020
* @see https://github.com/nnstreamer/nntrainer
* @author Parichay Kapoor <pk.kapoor@samsung.com>
#include <lazy_tensor.h>
#include <nntrainer_error.h>
#include <nntrainer_log.h>
-#include <optimizer_internal.h>
#include <parse_util.h>
#include <tensor.h>
#include <util_func.h>
#include <layer_internal.h>
#include <nntrainer_error.h>
#include <nntrainer_log.h>
-#include <optimizer_internal.h>
#include <parse_util.h>
#include <util_func.h>
#include <acti_func.h>
#include <layer.h>
#include <manager.h>
-#include <optimizer_internal.h>
+#include <optimizer_devel.h>
#include <tensor.h>
#include <tensor_dim.h>
#include <weight.h>
#include <nntrainer_log.h>
#include <optimizer_factory.h>
#include <parse_util.h>
+#include <sgd.h>
#include <util_func.h>
#if defined(ENABLE_NNSTREAMER_BACKBONE)
ini, "Model:Learning_rate",
std::to_string(model.opt->getLearningRate()).c_str()))});
- optimizer_prop.push_back(
- {"decay_steps=" + std::string(iniparser_getstring(
- ini, "Model:Decay_steps",
- std::to_string(model.opt->getDecaySteps()).c_str()))});
- optimizer_prop.push_back(
- {"decay_rate=" + std::string(iniparser_getstring(
- ini, "Model:Decay_rate",
- std::to_string(model.opt->getDecayRate()).c_str()))});
-
- if (model.opt->getType() == "adam") {
- std::shared_ptr<Adam> opt_adam = std::static_pointer_cast<Adam>(model.opt);
+ // TODO: create a optimizer section in the INI
+ if (model.opt->getType() == SGD::type || model.opt->getType() == Adam::type) {
+ std::shared_ptr<OptimizerImpl> opt_impl =
+ std::static_pointer_cast<OptimizerImpl>(model.opt);
optimizer_prop.push_back(
- {"beta1=" +
- std::string(iniparser_getstring(
- ini, "Model:Beta1", std::to_string(opt_adam->getBeta1()).c_str()))});
+ {"decay_steps=" + std::string(iniparser_getstring(
+ ini, "Model:Decay_steps",
+ std::to_string(opt_impl->getDecaySteps()).c_str()))});
optimizer_prop.push_back(
- {"beta2=" +
- std::string(iniparser_getstring(
- ini, "Model:Beta2", std::to_string(opt_adam->getBeta2()).c_str()))});
- optimizer_prop.push_back(
- {"epsilon=" + std::string(iniparser_getstring(
- ini, "Model:Epsilon",
- std::to_string(opt_adam->getEpsilon()).c_str()))});
+ {"decay_rate=" + std::string(iniparser_getstring(
+ ini, "Model:Decay_rate",
+ std::to_string(opt_impl->getDecayRate()).c_str()))});
+
+ if (opt_impl->getType() == "adam") {
+ std::shared_ptr<Adam> opt_adam = std::static_pointer_cast<Adam>(opt_impl);
+
+ optimizer_prop.push_back(
+ {"beta1=" +
+ std::string(iniparser_getstring(
+ ini, "Model:Beta1", std::to_string(opt_adam->getBeta1()).c_str()))});
+ optimizer_prop.push_back(
+ {"beta2=" +
+ std::string(iniparser_getstring(
+ ini, "Model:Beta2", std::to_string(opt_adam->getBeta2()).c_str()))});
+ optimizer_prop.push_back(
+ {"epsilon=" + std::string(iniparser_getstring(
+ ini, "Model:Epsilon",
+ std::to_string(opt_adam->getEpsilon()).c_str()))});
+ }
}
status = model.opt->setProperty(optimizer_prop);
#include <manager.h>
#include <ml-api-common.h>
#include <network_graph.h>
-#include <optimizer_internal.h>
+#include <optimizer_devel.h>
#include <pooling2d_layer.h>
#include <tensor.h>
}
}
-double Adam::getLearningRate(int iteration) {
- double ll = Optimizer::getLearningRate(iteration);
+double Adam::getLearningRate(size_t iteration) const {
+ double ll = OptimizerImpl::getLearningRate(iteration);
std::function<float(double)> biasCorrection = [&](float f) {
return 1.0f - pow(f, iteration + 1);
status = setDouble(epsilon, value);
break;
default:
- Optimizer::setProperty(type, value);
+ OptimizerImpl::setProperty(type, value);
status = ML_ERROR_NONE;
break;
}
#define __ADAM_H__
#ifdef __cplusplus
-#include <optimizer_internal.h>
+#include <optimizer_impl.h>
namespace nntrainer {
* @class Adam optimizer class
* @brief Adam optimizer
*/
-class Adam : public Optimizer {
+class Adam : public OptimizerImpl {
public:
/**
* @brief Constructor of Optimizer Class
template <typename... Args>
Adam(float lr = 0.001f, double b1 = 0.9f, double b2 = 0.999f,
double ep = 1.0e-7f, Args... args) :
- Optimizer(lr, args...),
+ OptimizerImpl(lr, args...),
beta1(b1),
beta2(b2),
epsilon(ep) {}
/**
* @copydoc getLearningRate(int iteration)
*/
- double getLearningRate(int iteration);
+ double getLearningRate(size_t iteration) const;
/**
* @copydoc setProperty(const PropertyType type,
optimizer_sources = [
'adam.cpp',
- 'optimizer.cpp',
+ 'optimizer_devel.cpp',
+ 'optimizer_impl.cpp',
'optimizer_factory.cpp',
'sgd.cpp'
]
optimizer_headers = [
'optimizer_factory.h',
- 'optimizer_internal.h'
+ 'optimizer_devel.h',
+ 'optimizer_impl.h'
]
foreach s : optimizer_sources
+++ /dev/null
-/**
- * Copyright (C) 2020 Samsung Electronics Co., Ltd. All Rights Reserved.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- *
- * @file optimizer.cpp
- * @date 08 April 2020
- * @brief This is Implementation of Optimizer class
- * @see https://github.com/nnstreamer/nntrainer
- * @author Jijoong Moon <jijoong.moon@samsung.com>
- * @bug No known bugs except for NYI items
- *
- */
-
-#include <cmath>
-#include <fstream>
-#include <iostream>
-
-#include <lazy_tensor.h>
-#include <nntrainer_error.h>
-#include <nntrainer_log.h>
-#include <optimizer_internal.h>
-#include <parse_util.h>
-#include <util_func.h>
-
-namespace nntrainer {
-
-int Optimizer::initialize() { return ML_ERROR_NONE; }
-
-double Optimizer::getLearningRate(int iteration) {
- double ll = learning_rate;
-
- if (decay_steps != 0) {
- ll = ll * pow(decay_rate, (iteration / (float)decay_steps));
- }
-
- return ll;
-}
-
-void Optimizer::applyGradients(std::vector<Weight> &weight_list,
- int iteration) {
-
- if (weight_list.empty())
- return;
-
- double ll = getLearningRate(iteration);
-
- for (auto &weight : weight_list) {
- if (!weight.getTrainable())
- continue;
-
- /** calculate regularization gradient before applying the gradient */
- weight.calcRegularizationGradient();
-
- applyGradient(weight, ll, iteration);
- }
-}
-
-int Optimizer::setProperty(std::vector<std::string> values) {
- int status = ML_ERROR_NONE;
-
- for (unsigned int i = 0; i < values.size(); ++i) {
- std::string key;
- std::string value;
-
- status = getKeyValue(values[i], key, value);
- NN_RETURN_STATUS();
-
- unsigned int type = parseOptProperty(key);
-
- if (value.empty()) {
- return ML_ERROR_INVALID_PARAMETER;
- }
-
- try {
- /// @note this calls derived setProperty if available
- setProperty(static_cast<PropertyType>(type), value);
- } catch (...) {
- return ML_ERROR_INVALID_PARAMETER;
- }
- }
-
- try {
- checkValidation();
- } catch (...) {
- return ML_ERROR_INVALID_PARAMETER;
- }
- return status;
-}
-
-void Optimizer::checkValidation() {
- if (learning_rate <= 0.0f)
- throw std::invalid_argument("Learning rate must be positive");
-}
-
-void Optimizer::setProperty(const PropertyType type, const std::string &value) {
- int status = ML_ERROR_NONE;
-
- switch (type) {
- case PropertyType::learning_rate:
- status = setFloat(learning_rate, value);
- break;
- case PropertyType::decay_steps:
- status = setUint(decay_steps, value);
- break;
- case PropertyType::decay_rate:
- status = setFloat(decay_rate, value);
- break;
- case PropertyType::continue_train:
- status = setBoolean(continue_train, value);
- break;
- default:
- ml_loge("Error: Unknown Optimizer Property Key");
- status = ML_ERROR_INVALID_PARAMETER;
- break;
- }
-
- throw_status(status);
-}
-
-void Optimizer::read(std::ifstream &file) {
- std::string loaded_type = readString(file);
-
- if (loaded_type != getType()) {
- throw std::runtime_error(
- "[Optimizer::read] written type unmatches with realized type");
- }
-}
-
-void Optimizer::save(std::ofstream &file) { writeString(file, getType()); }
-} // namespace nntrainer
--- /dev/null
+// SPDX-License-Identifier: Apache-2.0
+/**
+ * Copyright (C) 2020 Parichay Kapoor <pk.kapoor@samsung.com>
+ *
+ * @file optimizer_devel.cpp
+ * @date 08 April 2020
+ * @brief This is Optimizer internal interface class
+ * @see https://github.com/nnstreamer/nntrainer
+ * @author Jijoong Moon <jijoong.moon@samsung.com>
+ * @author Parichay Kapoor <pk.kapoor@samsung.com>
+ * @bug No known bugs except for NYI items
+ *
+ */
+
+#include <fstream>
+#include <iostream>
+
+#include <nntrainer_log.h>
+#include <optimizer_devel.h>
+#include <parse_util.h>
+#include <util_func.h>
+
+namespace nntrainer {
+
+void Optimizer::applyGradients(std::vector<Weight> &weight_list,
+ int iteration) {
+
+ if (weight_list.empty())
+ return;
+
+ double ll = getLearningRate(iteration);
+
+ for (auto &weight : weight_list) {
+ if (!weight.getTrainable())
+ continue;
+
+ /** calculate regularization gradient before applying the gradient */
+ weight.calcRegularizationGradient();
+
+ applyGradient(weight, ll, iteration);
+ }
+}
+
+int Optimizer::setProperty(std::vector<std::string> values) {
+ int status = ML_ERROR_NONE;
+
+ for (unsigned int i = 0; i < values.size(); ++i) {
+ std::string key;
+ std::string value;
+
+ status = getKeyValue(values[i], key, value);
+ NN_RETURN_STATUS();
+
+ unsigned int type = parseOptProperty(key);
+
+ if (value.empty()) {
+ return ML_ERROR_INVALID_PARAMETER;
+ }
+
+ try {
+ /// @note this calls derived setProperty if available
+ setProperty(static_cast<PropertyType>(type), value);
+ } catch (...) {
+ return ML_ERROR_INVALID_PARAMETER;
+ }
+ }
+
+ try {
+ checkValidation();
+ } catch (...) {
+ return ML_ERROR_INVALID_PARAMETER;
+ }
+ return status;
+}
+
+void Optimizer::checkValidation() const {
+ if (getLearningRate() <= 0.0f)
+ throw std::invalid_argument("Learning rate must be positive");
+}
+
+void Optimizer::setProperty(const PropertyType type, const std::string &value) {
+ int status = ML_ERROR_NONE;
+
+ switch (type) {
+ default:
+ ml_loge("Error: Unknown Optimizer Property Key");
+ status = ML_ERROR_INVALID_PARAMETER;
+ break;
+ }
+
+ throw_status(status);
+}
+
+void Optimizer::read(std::ifstream &file) {
+ std::string loaded_type = readString(file);
+
+ if (loaded_type != getType()) {
+ throw std::runtime_error(
+ "[Optimizer::read] written type unmatches with set type");
+ }
+}
+
+void Optimizer::save(std::ofstream &file) { writeString(file, getType()); }
+
+} // namespace nntrainer
--- /dev/null
+// SPDX-License-Identifier: Apache-2.0
+/**
+ * Copyright (C) 2020 Parichay Kapoor <pk.kapoor@samsung.com>
+ *
+ * @file optimizer_devel.h
+ * @date 08 April 2020
+ * @brief This is Optimizer internal interface class
+ * @see https://github.com/nnstreamer/nntrainer
+ * @author Jijoong Moon <jijoong.moon@samsung.com>
+ * @author Parichay Kapoor <pk.kapoor@samsung.com>
+ * @bug No known bugs except for NYI items
+ *
+ */
+
+#ifndef __OPTIMIZER_DEVEL_H__
+#define __OPTIMIZER_DEVEL_H__
+#ifdef __cplusplus
+
+#include <memory>
+
+#include <optimizer.h>
+#include <tensor.h>
+#include <weight.h>
+
+namespace nntrainer {
+
+/**
+ * @class Optimizer Base class for optimizers
+ * @brief Base class for all optimizers
+ */
+class Optimizer : public ml::train::Optimizer {
+
+public:
+ /**
+ * @brief Default Constructor of Optimizer Class
+ */
+ // Optimizer() = default
+
+ /**
+ * @brief get Learning Rate
+ * @retval Learning rate in float
+ */
+ virtual float getLearningRate() const { return getLearningRate(0); };
+
+ /**
+ * @brief get Learning Rate for the given iteration
+ * @param[in] iteration Iteration for the learning rate
+ * @retval Learning rate in double
+ * @detail the return value of this function and getLearningRate() must
+ * match for iteration == 0.
+ */
+ virtual double getLearningRate(size_t iteration) const = 0;
+
+ /**
+ * @brief apply gradient to weight_list
+ * @param[in] params Weight list
+ * @param[in] iteration nth epoch number
+ */
+ virtual void applyGradients(std::vector<Weight> ¶ms, int iteration);
+
+ /**
+ * @brief set Optimizer Parameters
+ * @param[in] values Optimizer Parameter list
+ * @retval #ML_ERROR_NONE Successful.
+ * @retval #ML_ERROR_INVALID_PARAMETER invalid parameter.
+ */
+ virtual int setProperty(std::vector<std::string> values);
+
+ /**
+ * @brief Default allowed properties
+ * Available for all optimizers
+ * - learning_rate : float
+ *
+ * Available for SGD and Adam optimizers
+ * - decay_rate : float,
+ * - decay_steps : float,
+ *
+ * Available for Adam optimizer
+ * - beta1 : float,
+ * - beta2 : float,
+ * - epsilon : float,
+ *
+ * @todo: convert to string
+ */
+ enum class PropertyType {
+ learning_rate = 0,
+ decay_rate = 1,
+ decay_steps = 2,
+ beta1 = 3,
+ beta2 = 4,
+ epsilon = 5,
+ continue_train = 6,
+ unknown = 7,
+ };
+
+ /**
+ * @brief setProperty by PropertyType
+ * @note By passing empty string, this can validate if @a type is valid
+ * @param[in] type property type to be passed
+ * @param[in] value value to be passed, if empty string is passed, do nothing
+ * but throws error when @a type is invalid
+ * @exception exception::not_supported when property type is not valid for
+ * the particular layer
+ * @exception std::invalid_argument invalid argument
+ */
+ virtual void setProperty(const PropertyType type,
+ const std::string &value = "") = 0;
+
+ /**
+ * @brief initialize optimizer.
+ * @retval #ML_ERROR_NONE Successful.
+ * @retval #ML_ERROR_INVALID_PARAMETER invalid parameter.
+ */
+ virtual int initialize() = 0;
+
+ /**
+ * @brief Read Training optimizer paramters from file
+ * @param[in] file input stream file
+ */
+ virtual void read(std::ifstream &file);
+
+ /**
+ * @brief Save Training optimizer paramters from file
+ * @param[in] file output stream file
+ */
+ virtual void save(std::ofstream &file);
+
+ /**
+ * @brief validate the optimizer
+ */
+ virtual void checkValidation() const;
+
+ /**
+ * @brief Add extra variables per weight if the optimizer needs any.
+ * @param[in] params Weight list
+ * @retval #ML_ERROR_NONE Successful.
+ * @retval #ML_ERROR_INVALID_PARAMETER invalid parameter.
+ */
+ virtual void addOptimizerVariable(std::vector<Weight> ¶ms) = 0;
+
+ /**
+ * @brief get Optimizer Type
+ * @retval Optimizer type
+ */
+ virtual const std::string getType() const = 0;
+
+private:
+ /**
+ * @brief apply gradient to the given weight
+ * @param[in] weight Weight and gradient set to be updated
+ * @param[in] num_weights size of the array
+ * @param[in] iteration nth epoch number
+ * @note weight which is called upon can be assumed to be trainable
+ */
+ virtual void applyGradient(Weight &weight, double updated_lr,
+ int iteration) = 0;
+};
+
+} /* namespace nntrainer */
+
+#endif /* __cplusplus */
+#endif /* __OPTIMIZER_DEVEL_H__ */
#define __OPTIMIZER_FACTORY_H__
#ifdef __cplusplus
-#include <optimizer.h>
-#include <optimizer_internal.h>
+#include <optimizer_devel.h>
namespace nntrainer {
--- /dev/null
+// SPDX-License-Identifier: Apache-2.0
+/**
+ * Copyright (C) 2020 Parichay Kapoor <pk.kapoor@samsung.com>
+ *
+ * @file optimizer_impl.cpp
+ * @date 18 March 2021
+ * @brief This is base Optimizer implementation class
+ * @see https://github.com/nnstreamer/nntrainer
+ * @author Jijoong Moon <jijoong.moon@samsung.com>
+ * @author Parichay Kapoor <pk.kapoor@samsung.com>
+ * @bug No known bugs except for NYI items
+ *
+ */
+
+#include <fstream>
+#include <iostream>
+
+#include <cmath>
+#include <nntrainer_error.h>
+#include <nntrainer_log.h>
+#include <optimizer_impl.h>
+#include <parse_util.h>
+#include <util_func.h>
+
+namespace nntrainer {
+
+int OptimizerImpl::initialize() { return ML_ERROR_NONE; }
+
+void OptimizerImpl::setProperty(const PropertyType type,
+ const std::string &value) {
+ int status = ML_ERROR_NONE;
+
+ switch (type) {
+ case PropertyType::learning_rate:
+ status = setFloat(learning_rate, value);
+ break;
+ case PropertyType::decay_steps:
+ status = setUint(decay_steps, value);
+ break;
+ case PropertyType::decay_rate:
+ status = setFloat(decay_rate, value);
+ break;
+ case PropertyType::continue_train:
+ status = setBoolean(continue_train, value);
+ break;
+ default:
+ Optimizer::setProperty(type, value);
+ status = ML_ERROR_NONE;
+ break;
+ }
+
+ throw_status(status);
+}
+
+double OptimizerImpl::getLearningRate(size_t iteration) const {
+ double ll = learning_rate;
+
+ if (decay_steps != 0) {
+ ll = ll * pow(decay_rate, (iteration / (float)decay_steps));
+ }
+
+ return ll;
+}
+} // namespace nntrainer
--- /dev/null
+// SPDX-License-Identifier: Apache-2.0
+/**
+ * Copyright (C) 2020 Parichay Kapoor <pk.kapoor@samsung.com>
+ *
+ * @file optimizer_impl.h
+ * @date 18 March 2021
+ * @brief This is base Optimizer implementation class
+ * @see https://github.com/nnstreamer/nntrainer
+ * @author Jijoong Moon <jijoong.moon@samsung.com>
+ * @author Parichay Kapoor <pk.kapoor@samsung.com>
+ * @bug No known bugs except for NYI items
+ *
+ */
+
+#ifndef __OPTIMIZER_IMPL_H__
+#define __OPTIMIZER_IMPL_H__
+#ifdef __cplusplus
+
+#include <optimizer_devel.h>
+
+namespace nntrainer {
+
+/**
+ * @class Optimizer Base class for optimizers
+ * @brief Basic implementation class for nntrainer supported optimizers
+ */
+class OptimizerImpl : public Optimizer {
+
+public:
+ /**
+ * @brief Default Constructor of Optimizer Class
+ */
+ OptimizerImpl(float lr, float decay_rate = 1.0f, unsigned int decay_steps = 0,
+ float continue_train = false) :
+ Optimizer(),
+ learning_rate(lr),
+ decay_rate(decay_rate),
+ decay_steps(decay_steps),
+ continue_train(continue_train) {}
+
+ /**
+ * @brief copy constructor
+ * @param[in] rhs OptimizerImpl to be copied
+ */
+ OptimizerImpl(const OptimizerImpl &rhs) = default;
+
+ /**
+ * @brief copy assignment operator
+ * @param[in] rhs OptimizerImpl to be copied
+ */
+ OptimizerImpl &operator=(const OptimizerImpl &rhs) = default;
+
+ /**
+ * @brief Move constructor operator.
+ * @param[in] rhs OptimizerImpl to be moved
+ */
+ OptimizerImpl(OptimizerImpl &&rhs) noexcept = default;
+
+ /**
+ * @brief Move assignment operator.
+ * @parma[in] rhs OptimizerImpl to be moved.
+ */
+ OptimizerImpl &operator=(OptimizerImpl &&rhs) = default;
+
+ /**
+ * @brief get Learning Rate
+ * @retval Learning rate in float
+ */
+ float getLearningRate() const { return learning_rate; };
+
+ /**
+ * @brief get Decay Rate for learning rate decay
+ * @retval decay rate
+ */
+ float getDecayRate() const { return decay_rate; };
+
+ /**
+ * @brief get Decay Steps for learning rate decay
+ * @retval decay steps
+ */
+ float getDecaySteps() const { return decay_steps; };
+
+ /**
+ * @brief get Learning Rate for the given iteration
+ * @param[in] iteration Iteration for the learning rate
+ * @retval Learning rate
+ */
+ double getLearningRate(size_t iteration) const;
+
+ /**
+ * @brief setProperty by PropertyType
+ * @note By passing empty string, this can validate if @a type is valid
+ * @param[in] type property type to be passed
+ * @param[in] value value to be passed, if empty string is passed, do nothing
+ * but throws error when @a type is invalid
+ * @exception exception::not_supported when property type is not valid for
+ * the particular layer
+ * @exception std::invalid_argument invalid argument
+ */
+ virtual void setProperty(const PropertyType type,
+ const std::string &value = "");
+
+ /**
+ * @brief initialize optimizer.
+ * @retval #ML_ERROR_NONE Successful.
+ * @retval #ML_ERROR_INVALID_PARAMETER invalid parameter.
+ */
+ virtual int initialize();
+
+ /**
+ * @brief Add extra variables per weight if the optimizer needs any.
+ * @param[in] params Weight list
+ * @retval #ML_ERROR_NONE Successful.
+ * @retval #ML_ERROR_INVALID_PARAMETER invalid parameter.
+ */
+ virtual void addOptimizerVariable(std::vector<Weight> ¶ms) {}
+
+protected:
+ float learning_rate; /**< learning rate */
+ float decay_rate; /** decay rate for learning rate */
+ unsigned int decay_steps; /** decay steps for learning rate */
+ bool continue_train; /** Continue training with previous tensors for adam */
+
+private:
+ /**
+ * @brief apply gradient to the given weight
+ * @param[in] weight Weight and gradient set to be updated
+ * @param[in] num_weights size of the array
+ * @param[in] iteration nth epoch number
+ * @note weight which is called upon can be assumed to be trainable
+ */
+ virtual void applyGradient(Weight &weight, double updated_lr,
+ int iteration) = 0;
+};
+
+} /* namespace nntrainer */
+
+#endif /* __cplusplus */
+#endif /* __OPTIMIZER_IMPL_H__ */
+++ /dev/null
-/**
- * Copyright (C) 2020 Samsung Electronics Co., Ltd. All Rights Reserved.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * http://www.apache.org/licenses/LICENSE-2.0
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * @file optimizer_internal.h
- * @date 08 April 2020
- * @brief This is Optimizer classes of Neural Network
- * @see https://github.com/nnstreamer/nntrainer
- * @author Jijoong Moon <jijoong.moon@samsung.com>
- * @bug No known bugs except for NYI items
- *
- */
-#ifndef __OPTIMIZER_H__
-#define __OPTIMIZER_H__
-#ifdef __cplusplus
-
-#include <memory>
-#include <optimizer.h>
-#include <tensor.h>
-#include <weight.h>
-
-namespace nntrainer {
-
-/**
- * @class Optimizer Base class for optimizers
- * @brief Base class for all optimizers
- */
-class Optimizer : public ml::train::Optimizer {
-
-public:
- /**
- * @brief Default Constructor of Optimizer Class
- */
- Optimizer(float lr, float decay_rate = 1.0f, unsigned int decay_steps = 0,
- float continue_train = false) :
- learning_rate(lr),
- decay_rate(decay_rate),
- decay_steps(decay_steps),
- continue_train(continue_train) {
- checkValidation();
- }
-
- /**
- * @brief copy constructor
- * @param[in] rhs Optimizer to be copied
- */
- Optimizer(const Optimizer &rhs) = default;
-
- /**
- * @brief copy assignment operator
- * @param[in] rhs Optimizer to be copied
- */
- Optimizer &operator=(const Optimizer &rhs) = default;
-
- /**
- * @brief Move constructor of Conv 2D Layer.
- * @param[in] Conv2dLayer &&
- */
- Optimizer(Optimizer &&rhs) noexcept = default;
-
- /**
- * @brief Move assignment operator.
- * @parma[in] rhs Optimizer to be moved.
- */
- Optimizer &operator=(Optimizer &&rhs) = default;
-
- /**
- * @brief get Learning Rate
- * @retval Learning rate
- */
- float getLearningRate() { return learning_rate; };
-
- /**
- * @brief get Decay Rate for learning rate decay
- * @retval decay rate
- */
- float getDecayRate() { return decay_rate; };
-
- /**
- * @brief get Decay Steps for learning rate decay
- * @retval decay steps
- */
- float getDecaySteps() { return decay_steps; };
-
- /**
- * @brief set Optimizer Parameters
- * @param[in] values Optimizer Parameter list
- * @retval #ML_ERROR_NONE Successful.
- * @retval #ML_ERROR_INVALID_PARAMETER invalid parameter.
- */
- int setProperty(std::vector<std::string> values);
-
- /**
- * @brief apply gradient to weight_list
- * @param[in] params Weight list
- * @param[in] iteration nth epoch number
- */
- void applyGradients(std::vector<Weight> ¶ms, int iteration);
-
- /**
- * @brief Read Training optimizer paramters from file
- * @param[in] file input stream file
- */
- virtual void read(std::ifstream &file);
-
- /**
- * @brief Save Training optimizer paramters from file
- * @param[in] file output stream file
- */
- virtual void save(std::ofstream &file);
-
- /**
- * @brief setProperty by PropertyType
- * @note By passing empty string, this can validate if @a type is valid
- * @param[in] type property type to be passed
- * @param[in] value value to be passed, if empty string is passed, do nothing
- * but throws error when @a type is invalid
- * @exception exception::not_supported when property type is not valid for
- * the particular layer
- * @exception std::invalid_argument invalid argument
- */
- virtual void setProperty(const PropertyType type,
- const std::string &value = "");
-
- /**
- * @brief validate the optimizer
- */
- virtual void checkValidation();
-
- /**
- * @brief initialize optimizer.
- * @retval #ML_ERROR_NONE Successful.
- * @retval #ML_ERROR_INVALID_PARAMETER invalid parameter.
- */
- virtual int initialize();
-
- /**
- * @brief Add extra variables per weight if the optimizer needs any.
- * @param[in] params Weight list
- * @retval #ML_ERROR_NONE Successful.
- * @retval #ML_ERROR_INVALID_PARAMETER invalid parameter.
- */
- virtual void addOptimizerVariable(std::vector<Weight> ¶ms) {}
-
- /**
- * @brief get Learning Rate for the given iteration
- * @param[in] iteration Iteration for the learning rate
- * @retval Learning rate
- */
- virtual double getLearningRate(int iteration);
-
-protected:
- float learning_rate; /** learning rate */
- float decay_rate; /** decay rate for learning rate */
- unsigned int decay_steps; /** decay steps for learning rate */
- bool continue_train; /** Continue training with previous tensors for adam */
-
-private:
- /**
- * @brief apply gradient to the given weight
- * @param[in] weight Weight and gradient set to be updated
- * @param[in] num_weights size of the array
- * @param[in] iteration nth epoch number
- * @note weight which is called upon can be assumed to be trainable
- */
- virtual void applyGradient(Weight &weight, double updated_lr,
- int iteration) = 0;
-};
-
-} /* namespace nntrainer */
-
-#endif /* __cplusplus */
-#endif /* __OPTIMIZER_H__ */
#define __SGD_H__
#ifdef __cplusplus
-#include <optimizer_internal.h>
+#include <optimizer_impl.h>
namespace nntrainer {
* @class SGD optimizer class
* @brief Stochastic Gradient Descent optimizer class
*/
-class SGD : public Optimizer {
+class SGD : public OptimizerImpl {
public:
/**
* @brief Constructor of Optimizer Class
*/
template <typename... Args>
- SGD(float lr = 0.0001f, Args... args) : Optimizer(lr, args...) {}
+ SGD(float lr = 0.0001f, Args... args) : OptimizerImpl(lr, args...) {}
/**
* @copydoc applyGradient(Weight &weight, double updated_lr,
#include <neuralnet.h>
#include <nntrainer_error.h>
#include <nntrainer_log.h>
-#include <optimizer_internal.h>
+#include <optimizer_devel.h>
#include <parse_util.h>
#include <pooling2d_layer.h>
#include <sstream>
%{_includedir}/nntrainer/neuralnet.h
%{_includedir}/nntrainer/tensor.h
%{_includedir}/nntrainer/tensor_dim.h
-%{_includedir}/nntrainer/optimizer_internal.h
+%{_includedir}/nntrainer/optimizer_devel.h
+%{_includedir}/nntrainer/optimizer_impl.h
%{_includedir}/nntrainer/optimizer_factory.h
%{_includedir}/nntrainer/nntrainer-api-common.h
%{_includedir}/nntrainer/var_grad.h
#include <typeinfo>
#include <unistd.h>
-#include <optimizer.h>
+#include <optimizer_devel.h>
+#include <weight.h>
#include <app_context.h>
#include <nntrainer_error.h>
std::invalid_argument);
}
-class CustomOptimizer : public ml::train::Optimizer {
+class CustomOptimizer : public nntrainer::Optimizer {
public:
+ /** Full custom optimizer example which overrides all functions */
const std::string getType() const { return "identity_optimizer"; }
float getLearningRate() { return 1.0f; }
- float getDecayRate() { return 1.0f; }
-
- float getDecaySteps() { return 1.0f; }
+ double getLearningRate(size_t iteration) const { return 1.0f; }
int setProperty(std::vector<std::string> values) { return 1; }
+ int initialize() { return 0; }
+
+ void addOptimizerVariable(std::vector<nntrainer::Weight> ¶ms) {}
+
void setProperty(const PropertyType type, const std::string &value = "") {}
void checkValidation() {}
+
+ void applyGradient(nntrainer::Weight &weight, double updated_lr,
+ int iteration) {}
};
-class CustomOptimizer2 : public ml::train::Optimizer {
+class CustomOptimizer2 : public nntrainer::Optimizer {
public:
+ /** Minimal custom optimizer example which define only necessary functions */
const std::string getType() const { return "identity_optimizer"; }
- float getLearningRate() { return 1.0f; }
-
- float getDecayRate() { return 1.0f; }
+ int initialize() { return 0; }
- float getDecaySteps() { return 1.0f; }
+ double getLearningRate(size_t iteration) const { return 1.0f; }
- int setProperty(std::vector<std::string> values) { return 1; }
+ void addOptimizerVariable(std::vector<nntrainer::Weight> ¶ms) {}
- void setProperty(const PropertyType type, const std::string &value = "") {}
-
- void checkValidation() {}
+ void applyGradient(nntrainer::Weight &weight, double updated_lr,
+ int iteration) {}
};
/// @todo solidify the api signature