2 * Copyright (c) 2023 Samsung Electronics Co., Ltd. All Rights Reserved
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
8 * http://www.apache.org/licenses/LICENSE-2.0
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
17 #ifndef __ONERT_EXEC_TRAIN_OPTIMIZER_SGD_H__
18 #define __ONERT_EXEC_TRAIN_OPTIMIZER_SGD_H__
20 #include "exec/train/optimizer/Optimizer.h"
32 * @class SGD optimizer class
33 * @brief SGD optimizer
35 class SGD : public Optimizer
45 explicit SGD() : _props{}, _learning_rate{0.01} {}
46 explicit SGD(const Property &props) : _props{props}, _learning_rate{0.01} {}
47 explicit SGD(double lr) : _props{}, _learning_rate{lr} {}
48 explicit SGD(const Property &props, double lr) : _props{props}, _learning_rate{lr} {}
52 * @brief Get the name of optimizer
54 * @return The name of optimizer
56 std::string name() const override { return std::string{"SGD"}; }
59 * @brief Get the Learning Rate
61 * @param iteration The number of training steps
62 * @return Learning rate
64 double getLearningRate(uint32_t iteration = 0) const override;
67 * @brief Apply gradient to a trainable tensor
69 * @param factors UpdateFactors to be used for applying gradient to a trainable tensor
71 void applyGradient(const UpdateFactors &factors) const override;
75 double _learning_rate;
78 } // namespace optimizer
83 #endif // __ONERT_EXEC_TRAIN_OPTIMIZER_SGD_H__