simpleshot_sources = [
'simpleshot_utils.cpp',
'layers/centering.cpp',
- 'layers/l2norm.cpp',
]
simpleshot_inc = include_directories([
#include <nntrainer-api-common.h>
#include "layers/centering.h"
-#include "layers/l2norm.h"
namespace simpleshot {
if (variant_ == "UN") {
/// left empty intended
} else if (variant_ == "L2N") {
- LayerHandle l2 =
- ml::train::createLayer("l2norm", {"name=l2norm", "trainable=false"});
+ LayerHandle l2 = ml::train::createLayer(
+ "preprocess_l2norm", {"name=l2norm", "trainable=false"});
v.push_back(l2);
} else if (variant_ == "CL2N") {
LayerHandle centering = ml::train::createLayer(
try {
app_context.registerFactory(
nntrainer::createLayer<simpleshot::layers::CenteringLayer>);
- app_context.registerFactory(
- nntrainer::createLayer<simpleshot::layers::L2NormLayer>);
} catch (std::exception &e) {
std::cerr << "registering factory failed: " << e.what();
return 1;
#include <centering.h>
#include <centroid_knn.h>
-#include <l2norm.h>
#include <layers_common_tests.h>
+#include <preprocess_l2norm_layer.h>
+/// @todo move below test to the main repo
auto semantic_activation_l2norm = LayerSemanticsParamType(
- nntrainer::createLayer<simpleshot::layers::L2NormLayer>,
- simpleshot::layers::L2NormLayer::type, {}, 0, false);
+ nntrainer::createLayer<nntrainer::PreprocessL2NormLayer>,
+ nntrainer::PreprocessL2NormLayer::type, {}, 0, false);
auto semantic_activation_centroid_knn = LayerSemanticsParamType(
nntrainer::createLayer<nntrainer::CentroidKNN>, nntrainer::CentroidKNN::type,
LAYER_PREPROCESS_TRANSLATE =
ML_TRAIN_LAYER_TYPE_PREPROCESS_TRANSLATE, /**< Preprocess translate Layer
type */
- LAYER_BACKBONE_TFLITE, /**< Backbone using TFLite */
+ LAYER_PREPROCESS_L2NORM =
+ ML_TRAIN_LAYER_TYPE_PREPROCESS_L2NORM, /**< Preprocess l2norm Layer
+ type */
+ LAYER_BACKBONE_TFLITE, /**< Backbone using TFLite */
LAYER_LOSS_MSE = 500, /**< Mean Squared Error Loss Layer type */
LAYER_LOSS_CROSS_ENTROPY_SIGMOID, /**< Cross Entropy with Sigmoid Loss Layer
type */
$(NNTRAINER_ROOT)/nntrainer/layers/concat_layer.cpp \
$(NNTRAINER_ROOT)/nntrainer/layers/preprocess_flip_layer.cpp \
$(NNTRAINER_ROOT)/nntrainer/layers/preprocess_translate_layer.cpp \
+ $(NNTRAINER_ROOT)/nntrainer/layers/preprocess_l2norm_layer.cpp \
$(NNTRAINER_ROOT)/nntrainer/layers/embedding.cpp \
$(NNTRAINER_ROOT)/nntrainer/layers/rnn.cpp \
$(NNTRAINER_ROOT)/nntrainer/layers/lstm.cpp \
#include <plugged_optimizer.h>
#include <pooling2d_layer.h>
#include <preprocess_flip_layer.h>
+#include <preprocess_l2norm_layer.h>
#include <preprocess_translate_layer.h>
#include <rnn.h>
#include <split_layer.h>
ac.registerFactory(nntrainer::createLayer<PreprocessTranslateLayer>,
PreprocessTranslateLayer::type,
LayerType::LAYER_PREPROCESS_TRANSLATE);
+ ac.registerFactory(nntrainer::createLayer<PreprocessL2NormLayer>,
+ PreprocessL2NormLayer::type,
+ LayerType::LAYER_PREPROCESS_L2NORM);
/** register losses */
ac.registerFactory(nntrainer::createLayer<MSELossLayer>, MSELossLayer::type,
'pooling2d_layer.cpp',
'preprocess_flip_layer.cpp',
'preprocess_translate_layer.cpp',
+ 'preprocess_l2norm_layer.cpp',
'embedding.cpp',
'rnn.cpp',
'acti_func.cpp',
/**
* Copyright (C) 2020 Jihoon Lee <jhoon.it.lee@samsung.com>
*
- * @file l2norm.cpp
+ * @file preprocess_l2norm_layer.cpp
* @date 09 Jan 2021
* @brief This file contains the simple l2norm layer which normalizes
* the given feature
#include <nntrainer_error.h>
#include <nntrainer_log.h>
-#include <l2norm.h>
-
-namespace simpleshot {
-namespace layers {
+#include <preprocess_l2norm_layer.h>
+namespace nntrainer {
static constexpr size_t SINGLE_INOUT_IDX = 0;
-void L2NormLayer::finalize(nntrainer::InitLayerContext &context) {
+void PreprocessL2NormLayer::finalize(InitLayerContext &context) {
const auto &input_dim = context.getInputDimensions()[0];
if (context.getNumInputs() != 1)
throw std::invalid_argument(
context.setOutputDimensions(context.getInputDimensions());
}
-void L2NormLayer::forwarding(nntrainer::RunLayerContext &context,
- bool training) {
+void PreprocessL2NormLayer::forwarding(RunLayerContext &context,
+ bool training) {
auto &hidden_ = context.getOutput(SINGLE_INOUT_IDX);
auto &input_ = context.getInput(SINGLE_INOUT_IDX);
input_.multiply(1 / input_.l2norm(), hidden_);
}
-void L2NormLayer::calcDerivative(nntrainer::RunLayerContext &context) {
+void PreprocessL2NormLayer::calcDerivative(RunLayerContext &context) {
throw std::invalid_argument("[L2Norm::calcDerivative] This Layer "
"does not support backward propagation");
}
-void L2NormLayer::setProperty(const std::vector<std::string> &values) {
+void PreprocessL2NormLayer::setProperty(
+ const std::vector<std::string> &values) {
if (!values.empty()) {
std::string msg = "[FlattenLayer] Unknown Layer Properties count " +
std::to_string(values.size());
- throw nntrainer::exception::not_supported(msg);
+ throw exception::not_supported(msg);
}
}
-} // namespace layers
-} // namespace simpleshot
+} // namespace nntrainer
+
// SPDX-License-Identifier: Apache-2.0
/**
* Copyright (C) 2020 Jihoon Lee <jhoon.it.lee@samsung.com>
*
- * @file l2norm.h
+ * @file preprocess_l2norm_layer.h
* @date 09 Jan 2021
* @brief This file contains the simple l2norm layer which normalizes
* the given feature
*
*/
-#ifndef __L2NORM_H__
-#define __L2NORM_H__
+#ifndef __PREPROCESS_L2NORM_LAYER_H__
+#define __PREPROCESS_L2NORM_LAYER_H__
#include <string>
#include <layer_context.h>
#include <layer_devel.h>
#include <node_exporter.h>
-namespace simpleshot {
-namespace layers {
+namespace nntrainer {
/**
* @brief Layer class that l2normalizes a feature vector
*
*/
-class L2NormLayer : public nntrainer::Layer {
+class PreprocessL2NormLayer : public Layer {
public:
/**
* @brief Construct a new L2norm Layer object
* that normlizes given feature with l2norm
*/
- L2NormLayer() : Layer() {}
+ PreprocessL2NormLayer() : Layer() {}
/**
* @brief Move constructor.
- * @param[in] L2NormLayer &&
+ * @param[in] PreprocessL2NormLayer &&
*/
- L2NormLayer(L2NormLayer &&rhs) noexcept = default;
+ PreprocessL2NormLayer(PreprocessL2NormLayer &&rhs) noexcept = default;
/**
* @brief Move assignment operator.
- * @parma[in] rhs L2NormLayer to be moved.
+ * @parma[in] rhs PreprocessL2NormLayer to be moved.
*/
- L2NormLayer &operator=(L2NormLayer &&rhs) = default;
+ PreprocessL2NormLayer &operator=(PreprocessL2NormLayer &&rhs) = default;
/**
* @brief Destroy the Centering Layer object
*
*/
- ~L2NormLayer() {}
+ ~PreprocessL2NormLayer() {}
/**
* @copydoc Layer::finalize(InitLayerContext &context)
*/
- void finalize(nntrainer::InitLayerContext &context) override;
+ void finalize(InitLayerContext &context) override;
/**
* @copydoc Layer::forwarding(RunLayerContext &context, bool training)
*/
- void forwarding(nntrainer::RunLayerContext &context, bool training) override;
+ void forwarding(RunLayerContext &context, bool training) override;
/**
* @copydoc Layer::calcDerivative(RunLayerContext &context)
*/
- void calcDerivative(nntrainer::RunLayerContext &context) override;
+ void calcDerivative(RunLayerContext &context) override;
/**
* @copydoc bool supportBackwarding() const
/**
* @copydoc Layer::exportTo(Exporter &exporter, ExportMethods method)
*/
- void exportTo(nntrainer::Exporter &exporter,
- const nntrainer::ExportMethods &method) const override {}
+ void exportTo(Exporter &exporter,
+ const ExportMethods &method) const override {}
/**
* @copydoc Layer::getType()
*/
- const std::string getType() const override { return L2NormLayer::type; };
+ const std::string getType() const override {
+ return PreprocessL2NormLayer::type;
+ };
/**
* @copydoc Layer::setProperty(const std::vector<std::string> &values)
*/
void setProperty(const std::vector<std::string> &values) override;
- inline static const std::string type = "l2norm";
+ inline static const std::string type = "preprocess_l2norm";
};
-} // namespace layers
-} // namespace simpleshot
+} // namespace nntrainer
-#endif /* __L2NORM__H_ */
+#endif // __PREPROCESS_L2NORM_LAYER_H__