+++ /dev/null
-// SPDX-License-Identifier: Apache-2.0
-/**
- * Copyright (C) 2020 Jihoon Lee <jhoon.it.lee@samsung.com>
- *
- * @file l2norm.cpp
- * @date 09 Jan 2021
- * @brief This file contains the simple l2norm layer which normalizes
- * the given feature
- * @see https://github.com/nnstreamer/nntrainer
- * @author Jihoon Lee <jhoon.it.lee@samsung.com>
- * @bug No known bugs except for NYI items
- *
- */
-
-#include <iostream>
-#include <regex>
-#include <sstream>
-
-#include <nntrainer_error.h>
-#include <nntrainer_log.h>
-
-#include <l2norm.h>
-
-namespace simpleshot {
-namespace layers {
-
-static constexpr size_t SINGLE_INOUT_IDX = 0;
-
-void L2NormLayer::finalize(nntrainer::InitLayerContext &context) {
- const auto &input_dim = context.getInputDimensions()[0];
- if (context.getNumInputs() != 1)
- throw std::invalid_argument(
- "l2norm layer is designed for a single input only");
- if (input_dim.channel() != 1 || input_dim.height() != 1) {
- throw std::invalid_argument(
- "l2norm layer is designed for channel and height is 1 for now, "
- "please check");
- }
-
- context.setOutputDimensions(context.getInputDimensions());
-}
-
-void L2NormLayer::forwarding(nntrainer::RunLayerContext &context,
- bool training) {
- auto &hidden_ = context.getOutput(SINGLE_INOUT_IDX);
- auto &input_ = context.getInput(SINGLE_INOUT_IDX);
-
- input_.multiply(1 / input_.l2norm(), hidden_);
-}
-
-void L2NormLayer::calcDerivative(nntrainer::RunLayerContext &context) {
- throw std::invalid_argument("[L2Norm::calcDerivative] This Layer "
- "does not support backward propagation");
-}
-
-void L2NormLayer::setProperty(const std::vector<std::string> &values) {
- if (!values.empty()) {
- std::string msg = "[FlattenLayer] Unknown Layer Properties count " +
- std::to_string(values.size());
- throw nntrainer::exception::not_supported(msg);
- }
-}
-
-} // namespace layers
-} // namespace simpleshot
+++ /dev/null
-// SPDX-License-Identifier: Apache-2.0
-/**
- * Copyright (C) 2020 Jihoon Lee <jhoon.it.lee@samsung.com>
- *
- * @file l2norm.h
- * @date 09 Jan 2021
- * @brief This file contains the simple l2norm layer which normalizes
- * the given feature
- * @see https://github.com/nnstreamer/nntrainer
- * @author Jihoon Lee <jhoon.it.lee@samsung.com>
- * @bug No known bugs except for NYI items
- *
- */
-
-#ifndef __L2NORM_H__
-#define __L2NORM_H__
-#include <string>
-
-#include <layer_context.h>
-#include <layer_devel.h>
-#include <node_exporter.h>
-
-namespace simpleshot {
-namespace layers {
-
-/**
- * @brief Layer class that l2normalizes a feature vector
- *
- */
-class L2NormLayer : public nntrainer::Layer {
-public:
- /**
- * @brief Construct a new L2norm Layer object
- * that normlizes given feature with l2norm
- */
- L2NormLayer() : Layer() {}
-
- /**
- * @brief Move constructor.
- * @param[in] L2NormLayer &&
- */
- L2NormLayer(L2NormLayer &&rhs) noexcept = default;
-
- /**
- * @brief Move assignment operator.
- * @parma[in] rhs L2NormLayer to be moved.
- */
- L2NormLayer &operator=(L2NormLayer &&rhs) = default;
-
- /**
- * @brief Destroy the Centering Layer object
- *
- */
- ~L2NormLayer() {}
-
- /**
- * @copydoc Layer::finalize(InitLayerContext &context)
- */
- void finalize(nntrainer::InitLayerContext &context) override;
-
- /**
- * @copydoc Layer::forwarding(RunLayerContext &context, bool training)
- */
- void forwarding(nntrainer::RunLayerContext &context, bool training) override;
-
- /**
- * @copydoc Layer::calcDerivative(RunLayerContext &context)
- */
- void calcDerivative(nntrainer::RunLayerContext &context) override;
-
- /**
- * @copydoc bool supportBackwarding() const
- */
- bool supportBackwarding() const override { return false; };
-
- /**
- * @copydoc Layer::exportTo(Exporter &exporter, ExportMethods method)
- */
- void exportTo(nntrainer::Exporter &exporter,
- const nntrainer::ExportMethods &method) const override {}
-
- /**
- * @copydoc Layer::getType()
- */
- const std::string getType() const override { return L2NormLayer::type; };
-
- /**
- * @copydoc Layer::setProperty(const std::vector<std::string> &values)
- */
- void setProperty(const std::vector<std::string> &values) override;
-
- inline static const std::string type = "l2norm";
-};
-} // namespace layers
-} // namespace simpleshot
-
-#endif /* __L2NORM__H_ */
simpleshot_sources = [
'simpleshot_utils.cpp',
'layers/centering.cpp',
- 'layers/l2norm.cpp',
]
simpleshot_inc = include_directories([
#include <nntrainer-api-common.h>
#include "layers/centering.h"
-#include "layers/l2norm.h"
namespace simpleshot {
if (variant_ == "UN") {
/// left empty intended
} else if (variant_ == "L2N") {
- LayerHandle l2 =
- ml::train::createLayer("l2norm", {"name=l2norm", "trainable=false"});
+ LayerHandle l2 = ml::train::createLayer(
+ "preprocess_l2norm", {"name=l2norm", "trainable=false"});
v.push_back(l2);
} else if (variant_ == "CL2N") {
LayerHandle centering = ml::train::createLayer(
try {
app_context.registerFactory(
nntrainer::createLayer<simpleshot::layers::CenteringLayer>);
- app_context.registerFactory(
- nntrainer::createLayer<simpleshot::layers::L2NormLayer>);
} catch (std::exception &e) {
std::cerr << "registering factory failed: " << e.what();
return 1;
#include <centering.h>
#include <centroid_knn.h>
-#include <l2norm.h>
#include <layers_common_tests.h>
+#include <preprocess_l2norm_layer.h>
+/// @todo move below test to the main repo
auto semantic_activation_l2norm = LayerSemanticsParamType(
- nntrainer::createLayer<simpleshot::layers::L2NormLayer>,
- simpleshot::layers::L2NormLayer::type, {}, 0, false);
+ nntrainer::createLayer<nntrainer::PreprocessL2NormLayer>,
+ nntrainer::PreprocessL2NormLayer::type, {}, 0, false);
auto semantic_activation_centroid_knn = LayerSemanticsParamType(
nntrainer::createLayer<nntrainer::CentroidKNN>, nntrainer::CentroidKNN::type,
LAYER_PREPROCESS_TRANSLATE =
ML_TRAIN_LAYER_TYPE_PREPROCESS_TRANSLATE, /**< Preprocess translate Layer
type */
- LAYER_BACKBONE_TFLITE, /**< Backbone using TFLite */
+ LAYER_PREPROCESS_L2NORM =
+ ML_TRAIN_LAYER_TYPE_PREPROCESS_L2NORM, /**< Preprocess l2norm Layer
+ type */
+ LAYER_BACKBONE_TFLITE, /**< Backbone using TFLite */
LAYER_LOSS_MSE = 500, /**< Mean Squared Error Loss Layer type */
LAYER_LOSS_CROSS_ENTROPY_SIGMOID, /**< Cross Entropy with Sigmoid Loss Layer
type */
$(NNTRAINER_ROOT)/nntrainer/layers/concat_layer.cpp \
$(NNTRAINER_ROOT)/nntrainer/layers/preprocess_flip_layer.cpp \
$(NNTRAINER_ROOT)/nntrainer/layers/preprocess_translate_layer.cpp \
+ $(NNTRAINER_ROOT)/nntrainer/layers/preprocess_l2norm_layer.cpp \
$(NNTRAINER_ROOT)/nntrainer/layers/embedding.cpp \
$(NNTRAINER_ROOT)/nntrainer/layers/rnn.cpp \
$(NNTRAINER_ROOT)/nntrainer/layers/lstm.cpp \
#include <plugged_optimizer.h>
#include <pooling2d_layer.h>
#include <preprocess_flip_layer.h>
+#include <preprocess_l2norm_layer.h>
#include <preprocess_translate_layer.h>
#include <rnn.h>
#include <split_layer.h>
ac.registerFactory(nntrainer::createLayer<PreprocessTranslateLayer>,
PreprocessTranslateLayer::type,
LayerType::LAYER_PREPROCESS_TRANSLATE);
+ ac.registerFactory(nntrainer::createLayer<PreprocessL2NormLayer>,
+ PreprocessL2NormLayer::type,
+ LayerType::LAYER_PREPROCESS_L2NORM);
/** register losses */
ac.registerFactory(nntrainer::createLayer<MSELossLayer>, MSELossLayer::type,
'pooling2d_layer.cpp',
'preprocess_flip_layer.cpp',
'preprocess_translate_layer.cpp',
+ 'preprocess_l2norm_layer.cpp',
'embedding.cpp',
'rnn.cpp',
'acti_func.cpp',
--- /dev/null
+// SPDX-License-Identifier: Apache-2.0
+/**
+ * Copyright (C) 2020 Jihoon Lee <jhoon.it.lee@samsung.com>
+ *
+ * @file preprocess_l2norm_layer.cpp
+ * @date 09 Jan 2021
+ * @brief This file contains the simple l2norm layer which normalizes
+ * the given feature
+ * @see https://github.com/nnstreamer/nntrainer
+ * @author Jihoon Lee <jhoon.it.lee@samsung.com>
+ * @bug No known bugs except for NYI items
+ *
+ */
+
+#include <iostream>
+#include <regex>
+#include <sstream>
+
+#include <nntrainer_error.h>
+#include <nntrainer_log.h>
+
+#include <preprocess_l2norm_layer.h>
+
+namespace nntrainer {
+static constexpr size_t SINGLE_INOUT_IDX = 0;
+
+void PreprocessL2NormLayer::finalize(InitLayerContext &context) {
+ const auto &input_dim = context.getInputDimensions()[0];
+ if (context.getNumInputs() != 1)
+ throw std::invalid_argument(
+ "l2norm layer is designed for a single input only");
+ if (input_dim.channel() != 1 || input_dim.height() != 1) {
+ throw std::invalid_argument(
+ "l2norm layer is designed for channel and height is 1 for now, "
+ "please check");
+ }
+
+ context.setOutputDimensions(context.getInputDimensions());
+}
+
+void PreprocessL2NormLayer::forwarding(RunLayerContext &context,
+ bool training) {
+ auto &hidden_ = context.getOutput(SINGLE_INOUT_IDX);
+ auto &input_ = context.getInput(SINGLE_INOUT_IDX);
+
+ input_.multiply(1 / input_.l2norm(), hidden_);
+}
+
+void PreprocessL2NormLayer::calcDerivative(RunLayerContext &context) {
+ throw std::invalid_argument("[L2Norm::calcDerivative] This Layer "
+ "does not support backward propagation");
+}
+
+void PreprocessL2NormLayer::setProperty(
+ const std::vector<std::string> &values) {
+ if (!values.empty()) {
+ std::string msg = "[FlattenLayer] Unknown Layer Properties count " +
+ std::to_string(values.size());
+ throw exception::not_supported(msg);
+ }
+}
+
+} // namespace nntrainer
--- /dev/null
+
+// SPDX-License-Identifier: Apache-2.0
+/**
+ * Copyright (C) 2020 Jihoon Lee <jhoon.it.lee@samsung.com>
+ *
+ * @file preprocess_l2norm_layer.h
+ * @date 09 Jan 2021
+ * @brief This file contains the simple l2norm layer which normalizes
+ * the given feature
+ * @see https://github.com/nnstreamer/nntrainer
+ * @author Jihoon Lee <jhoon.it.lee@samsung.com>
+ * @bug No known bugs except for NYI items
+ *
+ */
+
+#ifndef __PREPROCESS_L2NORM_LAYER_H__
+#define __PREPROCESS_L2NORM_LAYER_H__
+#include <string>
+
+#include <layer_context.h>
+#include <layer_devel.h>
+#include <node_exporter.h>
+
+namespace nntrainer {
+
+/**
+ * @brief Layer class that l2normalizes a feature vector
+ *
+ */
+class PreprocessL2NormLayer : public Layer {
+public:
+ /**
+ * @brief Construct a new L2norm Layer object
+ * that normlizes given feature with l2norm
+ */
+ PreprocessL2NormLayer() : Layer() {}
+
+ /**
+ * @brief Move constructor.
+ * @param[in] PreprocessL2NormLayer &&
+ */
+ PreprocessL2NormLayer(PreprocessL2NormLayer &&rhs) noexcept = default;
+
+ /**
+ * @brief Move assignment operator.
+ * @parma[in] rhs PreprocessL2NormLayer to be moved.
+ */
+ PreprocessL2NormLayer &operator=(PreprocessL2NormLayer &&rhs) = default;
+
+ /**
+ * @brief Destroy the Centering Layer object
+ *
+ */
+ ~PreprocessL2NormLayer() {}
+
+ /**
+ * @copydoc Layer::finalize(InitLayerContext &context)
+ */
+ void finalize(InitLayerContext &context) override;
+
+ /**
+ * @copydoc Layer::forwarding(RunLayerContext &context, bool training)
+ */
+ void forwarding(RunLayerContext &context, bool training) override;
+
+ /**
+ * @copydoc Layer::calcDerivative(RunLayerContext &context)
+ */
+ void calcDerivative(RunLayerContext &context) override;
+
+ /**
+ * @copydoc bool supportBackwarding() const
+ */
+ bool supportBackwarding() const override { return false; };
+
+ /**
+ * @copydoc Layer::exportTo(Exporter &exporter, ExportMethods method)
+ */
+ void exportTo(Exporter &exporter,
+ const ExportMethods &method) const override {}
+
+ /**
+ * @copydoc Layer::getType()
+ */
+ const std::string getType() const override {
+ return PreprocessL2NormLayer::type;
+ };
+
+ /**
+ * @copydoc Layer::setProperty(const std::vector<std::string> &values)
+ */
+ void setProperty(const std::vector<std::string> &values) override;
+
+ inline static const std::string type = "preprocess_l2norm";
+};
+} // namespace nntrainer
+
+#endif // __PREPROCESS_L2NORM_LAYER_H__