*
* @file centroid_knn.cpp
* @date 09 Jan 2021
- * @details This file contains the simple nearest neighbor layer, this layer
- * takes centroid and calculate l2 distance
+ * @brief This file contains the simple nearest neighbor layer
* @see https://github.com/nnstreamer/nntrainer
* @author Jihoon Lee <jhoon.it.lee@samsung.com>
* @bug No known bugs except for NYI items
*
+ * @details This layer takes centroid and calculate l2 distance
*/
#include <iostream>
if (weights.empty()) {
weights.reserve(2);
weights.emplace_back(map_dim, nntrainer::WeightInitializer::WEIGHT_ZEROS,
- false, "centroidNN:map");
+ nntrainer::WeightRegularizer::NONE, 1.0f, false,
+ "centroidNN:map");
weights.emplace_back(samples_seen,
- nntrainer::WeightInitializer::WEIGHT_ZEROS, false,
+ nntrainer::WeightInitializer::WEIGHT_ZEROS,
+ nntrainer::WeightRegularizer::NONE, 1.0f, false,
"centroidNN:num_samples");
manager.trackWeights(weights);
} else {
weights[0].reset(map_dim, nntrainer::WeightInitializer::WEIGHT_ZEROS,
- false);
+ nntrainer::WeightRegularizer::NONE, 1.0f, false);
weights[1].reset(samples_seen, nntrainer::WeightInitializer::WEIGHT_ZEROS,
- false);
+ nntrainer::WeightRegularizer::NONE, 1.0f, false);
}
return ML_ERROR_NONE;
virtual void applyGradient(unsigned int iteration,
std::shared_ptr<Optimizer> optimizer) {
if (optimizer)
- optimizer->apply_gradients(weights, iteration);
+ optimizer->applyGradients(weights, iteration);
}
/**
layer->calcDerivative();
if (apply_gradient)
- opt->apply_gradients(layer->getWeightsRef(), iteration);
+ opt->applyGradients(layer->getWeightsRef(), iteration);
}
/**
return ll;
}
-void Adam::apply_gradient(Weight &weight, double updated_lr, int iteration) {
+void Adam::applyGradient(Weight &weight, double updated_lr, int iteration) {
- Tensor &x = weight.getVariableRef();
Tensor &x_grad = weight.getGradientRef();
// This is implementation of adam from original paper.
x_grad = wv.apply(sqrtEps, x_grad);
x_grad.multiply_i(wm);
- x.add_i(x_grad, -updated_lr);
+ weight.applyGradient(updated_lr);
}
void Adam::setProperty(const PropertyType type, const std::string &value) {
epsilon(ep) {}
/**
- * @copydoc apply_gradient(Weight &weight, int tensor_idx, double updated_lr,
+ * @copydoc applyGradient(Weight &weight, int tensor_idx, double updated_lr,
* int iteration)
*/
- void apply_gradient(Weight &weight, double updated_lr, int iteration);
+ void applyGradient(Weight &weight, double updated_lr, int iteration);
/**
* @copydoc Optimizer::getType()
return ll;
}
-void Optimizer::apply_gradients(std::vector<Weight> &weight_list,
- int iteration) {
+void Optimizer::applyGradients(std::vector<Weight> &weight_list,
+ int iteration) {
if (weight_list.empty())
return;
/** calculate regularization gradient before applying the gradient */
weight.calcRegularizationGradient();
- apply_gradient(weight, ll, iteration);
+ applyGradient(weight, ll, iteration);
}
}
* @param[in] params Weight list
* @param[in] iteration nth epoch number
*/
- void apply_gradients(std::vector<Weight> ¶ms, int iteration);
+ void applyGradients(std::vector<Weight> ¶ms, int iteration);
/**
* @brief Read Training optimizer paramters from file
* @param[in] iteration nth epoch number
* @note weight which is called upon can be assumed to be trainable
*/
- virtual void apply_gradient(Weight &weight, double updated_lr,
- int iteration) = 0;
+ virtual void applyGradient(Weight &weight, double updated_lr,
+ int iteration) = 0;
};
} /* namespace nntrainer */
const std::string SGD::type = "sgd";
-void SGD::apply_gradient(Weight &weight, double updated_lr, int iteration) {
+void SGD::applyGradient(Weight &weight, double updated_lr, int iteration) {
weight.applyGradient(updated_lr);
}
SGD(float lr = 0.0001f, Args... args) : Optimizer(lr, args...) {}
/**
- * @copydoc apply_gradient(Weight &weight, double updated_lr,
+ * @copydoc applyGradient(Weight &weight, double updated_lr,
* int iteration)
*/
- void apply_gradient(Weight &weight, double updated_lr, int iteration);
+ void applyGradient(Weight &weight, double updated_lr, int iteration);
/**
* @copydoc Optimizer::getType()
const TensorDim &dim,
const WeightInitializer init = WeightInitializer::WEIGHT_XAVIER_UNIFORM,
const WeightRegularizer reg = WeightRegularizer::NONE,
- const float reg_const = 1.0f, bool train = true, bool alloc_now = true, std::string name = "");
+ const float reg_const = 1.0f, bool train = true, bool alloc_now = true,
+ std::string name = "");
/**
* @copydoc var_grad::initializeVariable(const Tensor &)
/**
* @brief Apply the gradient to the weight
*/
- void applyGradient(double lr) {
- var->add_i(*grad.get(), -lr);
- }
+ void applyGradient(double lr) { var->add_i(*grad.get(), -lr); }
private:
WeightInitializer initializer; /**< initializer for this variable */