[CAPI] Expose l2norm layer
authorJihoon Lee <jhoon.it.lee@samsung.com>
Wed, 18 Aug 2021 09:18:22 +0000 (18:18 +0900)
committerJijoong Moon <jijoong.moon@samsung.com>
Thu, 26 Aug 2021 03:58:51 +0000 (12:58 +0900)
**Changes proposed in this PR:**
- Change name of l2norm layer to PreprocessL2Norm Layer
- Move l2norm layer to the main code base

**Self evaluation:**
1. Build test: [X]Passed [ ]Failed [ ]Skipped
2. Run test: [X]Passed [ ]Failed [ ]Skipped

Signed-off-by: Jihoon Lee <jhoon.it.lee@samsung.com>
Applications/SimpleShot/layers/l2norm.cpp [deleted file]
Applications/SimpleShot/layers/l2norm.h [deleted file]
Applications/SimpleShot/meson.build
Applications/SimpleShot/task_runner.cpp
Applications/SimpleShot/test/simpleshot_layer_common_tests.cpp
api/ccapi/include/layer.h
jni/Android.mk
nntrainer/app_context.cpp
nntrainer/layers/meson.build
nntrainer/layers/preprocess_l2norm_layer.cpp [new file with mode: 0644]
nntrainer/layers/preprocess_l2norm_layer.h [new file with mode: 0644]

diff --git a/Applications/SimpleShot/layers/l2norm.cpp b/Applications/SimpleShot/layers/l2norm.cpp
deleted file mode 100644 (file)
index 1d6b7f1..0000000
+++ /dev/null
@@ -1,65 +0,0 @@
-// SPDX-License-Identifier: Apache-2.0
-/**
- * Copyright (C) 2020 Jihoon Lee <jhoon.it.lee@samsung.com>
- *
- * @file   l2norm.cpp
- * @date   09 Jan 2021
- * @brief  This file contains the simple l2norm layer which normalizes
- * the given feature
- * @see    https://github.com/nnstreamer/nntrainer
- * @author Jihoon Lee <jhoon.it.lee@samsung.com>
- * @bug    No known bugs except for NYI items
- *
- */
-
-#include <iostream>
-#include <regex>
-#include <sstream>
-
-#include <nntrainer_error.h>
-#include <nntrainer_log.h>
-
-#include <l2norm.h>
-
-namespace simpleshot {
-namespace layers {
-
-static constexpr size_t SINGLE_INOUT_IDX = 0;
-
-void L2NormLayer::finalize(nntrainer::InitLayerContext &context) {
-  const auto &input_dim = context.getInputDimensions()[0];
-  if (context.getNumInputs() != 1)
-    throw std::invalid_argument(
-      "l2norm layer is designed for a single input only");
-  if (input_dim.channel() != 1 || input_dim.height() != 1) {
-    throw std::invalid_argument(
-      "l2norm layer is designed for channel and height is 1 for now, "
-      "please check");
-  }
-
-  context.setOutputDimensions(context.getInputDimensions());
-}
-
-void L2NormLayer::forwarding(nntrainer::RunLayerContext &context,
-                             bool training) {
-  auto &hidden_ = context.getOutput(SINGLE_INOUT_IDX);
-  auto &input_ = context.getInput(SINGLE_INOUT_IDX);
-
-  input_.multiply(1 / input_.l2norm(), hidden_);
-}
-
-void L2NormLayer::calcDerivative(nntrainer::RunLayerContext &context) {
-  throw std::invalid_argument("[L2Norm::calcDerivative] This Layer "
-                              "does not support backward propagation");
-}
-
-void L2NormLayer::setProperty(const std::vector<std::string> &values) {
-  if (!values.empty()) {
-    std::string msg = "[FlattenLayer] Unknown Layer Properties count " +
-                      std::to_string(values.size());
-    throw nntrainer::exception::not_supported(msg);
-  }
-}
-
-} // namespace layers
-} // namespace simpleshot
diff --git a/Applications/SimpleShot/layers/l2norm.h b/Applications/SimpleShot/layers/l2norm.h
deleted file mode 100644 (file)
index da06d92..0000000
+++ /dev/null
@@ -1,97 +0,0 @@
-// SPDX-License-Identifier: Apache-2.0
-/**
- * Copyright (C) 2020 Jihoon Lee <jhoon.it.lee@samsung.com>
- *
- * @file   l2norm.h
- * @date   09 Jan 2021
- * @brief  This file contains the simple l2norm layer which normalizes
- * the given feature
- * @see    https://github.com/nnstreamer/nntrainer
- * @author Jihoon Lee <jhoon.it.lee@samsung.com>
- * @bug    No known bugs except for NYI items
- *
- */
-
-#ifndef __L2NORM_H__
-#define __L2NORM_H__
-#include <string>
-
-#include <layer_context.h>
-#include <layer_devel.h>
-#include <node_exporter.h>
-
-namespace simpleshot {
-namespace layers {
-
-/**
- * @brief Layer class that l2normalizes a feature vector
- *
- */
-class L2NormLayer : public nntrainer::Layer {
-public:
-  /**
-   * @brief Construct a new L2norm Layer object
-   * that normlizes given feature with l2norm
-   */
-  L2NormLayer() : Layer() {}
-
-  /**
-   *  @brief  Move constructor.
-   *  @param[in] L2NormLayer &&
-   */
-  L2NormLayer(L2NormLayer &&rhs) noexcept = default;
-
-  /**
-   * @brief  Move assignment operator.
-   * @parma[in] rhs L2NormLayer to be moved.
-   */
-  L2NormLayer &operator=(L2NormLayer &&rhs) = default;
-
-  /**
-   * @brief Destroy the Centering Layer object
-   *
-   */
-  ~L2NormLayer() {}
-
-  /**
-   * @copydoc Layer::finalize(InitLayerContext &context)
-   */
-  void finalize(nntrainer::InitLayerContext &context) override;
-
-  /**
-   * @copydoc Layer::forwarding(RunLayerContext &context, bool training)
-   */
-  void forwarding(nntrainer::RunLayerContext &context, bool training) override;
-
-  /**
-   * @copydoc Layer::calcDerivative(RunLayerContext &context)
-   */
-  void calcDerivative(nntrainer::RunLayerContext &context) override;
-
-  /**
-   * @copydoc bool supportBackwarding() const
-   */
-  bool supportBackwarding() const override { return false; };
-
-  /**
-   * @copydoc Layer::exportTo(Exporter &exporter, ExportMethods method)
-   */
-  void exportTo(nntrainer::Exporter &exporter,
-                const nntrainer::ExportMethods &method) const override {}
-
-  /**
-   * @copydoc Layer::getType()
-   */
-  const std::string getType() const override { return L2NormLayer::type; };
-
-  /**
-   * @copydoc Layer::setProperty(const std::vector<std::string> &values)
-   */
-  void setProperty(const std::vector<std::string> &values) override;
-
-  inline static const std::string type = "l2norm";
-};
-} // namespace layers
-} // namespace simpleshot
-
-#endif /* __L2NORM__H_ */
index bd8d70be3e8a0973d264fb25288680d883ad57d2..02af380e37a81ca8645e6439b8b1c5eaa116c7c8 100644 (file)
@@ -1,7 +1,6 @@
 simpleshot_sources = [
   'simpleshot_utils.cpp',
   'layers/centering.cpp',
-  'layers/l2norm.cpp',
 ]
 
 simpleshot_inc = include_directories([
index 804d198d859ea8663f14d5e115c09d010b82d9bd..c2a1400b4a93fcf32c31dc2dc54c280a371b5ffe 100644 (file)
@@ -22,7 +22,6 @@
 #include <nntrainer-api-common.h>
 
 #include "layers/centering.h"
-#include "layers/l2norm.h"
 
 namespace simpleshot {
 
@@ -142,8 +141,8 @@ std::unique_ptr<ml::train::Model> createModel(const std::string &backbone,
     if (variant_ == "UN") {
       /// left empty intended
     } else if (variant_ == "L2N") {
-      LayerHandle l2 =
-        ml::train::createLayer("l2norm", {"name=l2norm", "trainable=false"});
+      LayerHandle l2 = ml::train::createLayer(
+        "preprocess_l2norm", {"name=l2norm", "trainable=false"});
       v.push_back(l2);
     } else if (variant_ == "CL2N") {
       LayerHandle centering = ml::train::createLayer(
@@ -222,8 +221,6 @@ int main(int argc, char **argv) {
   try {
     app_context.registerFactory(
       nntrainer::createLayer<simpleshot::layers::CenteringLayer>);
-    app_context.registerFactory(
-      nntrainer::createLayer<simpleshot::layers::L2NormLayer>);
   } catch (std::exception &e) {
     std::cerr << "registering factory failed: " << e.what();
     return 1;
index fd35457b099dfa20380c4bb9afc2d8d30f8a95e5..7cdb2e1ad007069b58d7f036f5b69329c6895b24 100644 (file)
 
 #include <centering.h>
 #include <centroid_knn.h>
-#include <l2norm.h>
 #include <layers_common_tests.h>
+#include <preprocess_l2norm_layer.h>
 
+/// @todo move below test to the main repo
 auto semantic_activation_l2norm = LayerSemanticsParamType(
-  nntrainer::createLayer<simpleshot::layers::L2NormLayer>,
-  simpleshot::layers::L2NormLayer::type, {}, 0, false);
+  nntrainer::createLayer<nntrainer::PreprocessL2NormLayer>,
+  nntrainer::PreprocessL2NormLayer::type, {}, 0, false);
 
 auto semantic_activation_centroid_knn = LayerSemanticsParamType(
   nntrainer::createLayer<nntrainer::CentroidKNN>, nntrainer::CentroidKNN::type,
index 1e547097d2e25745dc53554653cb92f342b72cf5..c301971b03bcd4ab2dbe081f67348f9d2bd6b2da 100644 (file)
@@ -61,7 +61,10 @@ enum LayerType {
   LAYER_PREPROCESS_TRANSLATE =
     ML_TRAIN_LAYER_TYPE_PREPROCESS_TRANSLATE, /**< Preprocess translate Layer
                                                  type */
-  LAYER_BACKBONE_TFLITE,                      /**< Backbone using TFLite */
+  LAYER_PREPROCESS_L2NORM =
+    ML_TRAIN_LAYER_TYPE_PREPROCESS_L2NORM, /**< Preprocess l2norm Layer
+                                                 type */
+  LAYER_BACKBONE_TFLITE,                   /**< Backbone using TFLite */
   LAYER_LOSS_MSE = 500,             /**< Mean Squared Error Loss Layer type */
   LAYER_LOSS_CROSS_ENTROPY_SIGMOID, /**< Cross Entropy with Sigmoid Loss Layer
                                        type */
index afafc39b5e08773f62a7370e57e4990305b4675d..0e15d486346b64c204530771e1b2624699d3af8e 100644 (file)
@@ -156,6 +156,7 @@ NNTRAINER_SRCS := $(NNTRAINER_ROOT)/nntrainer/models/neuralnet.cpp \
                   $(NNTRAINER_ROOT)/nntrainer/layers/concat_layer.cpp \
                   $(NNTRAINER_ROOT)/nntrainer/layers/preprocess_flip_layer.cpp \
                   $(NNTRAINER_ROOT)/nntrainer/layers/preprocess_translate_layer.cpp \
+                  $(NNTRAINER_ROOT)/nntrainer/layers/preprocess_l2norm_layer.cpp \
                   $(NNTRAINER_ROOT)/nntrainer/layers/embedding.cpp \
                   $(NNTRAINER_ROOT)/nntrainer/layers/rnn.cpp \
                   $(NNTRAINER_ROOT)/nntrainer/layers/lstm.cpp \
index e2c2fdb39cf0caf7ebad661931df0771de2a41ca..2d240286dc748d33c7389fb86c92f1bf35459092 100644 (file)
@@ -52,6 +52,7 @@
 #include <plugged_optimizer.h>
 #include <pooling2d_layer.h>
 #include <preprocess_flip_layer.h>
+#include <preprocess_l2norm_layer.h>
 #include <preprocess_translate_layer.h>
 #include <rnn.h>
 #include <split_layer.h>
@@ -261,6 +262,9 @@ static void add_default_object(AppContext &ac) {
   ac.registerFactory(nntrainer::createLayer<PreprocessTranslateLayer>,
                      PreprocessTranslateLayer::type,
                      LayerType::LAYER_PREPROCESS_TRANSLATE);
+  ac.registerFactory(nntrainer::createLayer<PreprocessL2NormLayer>,
+                     PreprocessL2NormLayer::type,
+                     LayerType::LAYER_PREPROCESS_L2NORM);
 
   /** register losses */
   ac.registerFactory(nntrainer::createLayer<MSELossLayer>, MSELossLayer::type,
index 2c1b41f08e110c2141bb535d3f37155bea45979a..8682ac2ca6ff1a05d75b4738684922ffc7b02102 100644 (file)
@@ -15,6 +15,7 @@ layer_sources = [
   'pooling2d_layer.cpp',
   'preprocess_flip_layer.cpp',
   'preprocess_translate_layer.cpp',
+  'preprocess_l2norm_layer.cpp',
   'embedding.cpp',
   'rnn.cpp',
   'acti_func.cpp',
diff --git a/nntrainer/layers/preprocess_l2norm_layer.cpp b/nntrainer/layers/preprocess_l2norm_layer.cpp
new file mode 100644 (file)
index 0000000..141376d
--- /dev/null
@@ -0,0 +1,63 @@
+// SPDX-License-Identifier: Apache-2.0
+/**
+ * Copyright (C) 2020 Jihoon Lee <jhoon.it.lee@samsung.com>
+ *
+ * @file   preprocess_l2norm_layer.cpp
+ * @date   09 Jan 2021
+ * @brief  This file contains the simple l2norm layer which normalizes
+ * the given feature
+ * @see    https://github.com/nnstreamer/nntrainer
+ * @author Jihoon Lee <jhoon.it.lee@samsung.com>
+ * @bug    No known bugs except for NYI items
+ *
+ */
+
+#include <iostream>
+#include <regex>
+#include <sstream>
+
+#include <nntrainer_error.h>
+#include <nntrainer_log.h>
+
+#include <preprocess_l2norm_layer.h>
+
+namespace nntrainer {
+static constexpr size_t SINGLE_INOUT_IDX = 0;
+
+void PreprocessL2NormLayer::finalize(InitLayerContext &context) {
+  const auto &input_dim = context.getInputDimensions()[0];
+  if (context.getNumInputs() != 1)
+    throw std::invalid_argument(
+      "l2norm layer is designed for a single input only");
+  if (input_dim.channel() != 1 || input_dim.height() != 1) {
+    throw std::invalid_argument(
+      "l2norm layer is designed for channel and height is 1 for now, "
+      "please check");
+  }
+
+  context.setOutputDimensions(context.getInputDimensions());
+}
+
+void PreprocessL2NormLayer::forwarding(RunLayerContext &context,
+                                       bool training) {
+  auto &hidden_ = context.getOutput(SINGLE_INOUT_IDX);
+  auto &input_ = context.getInput(SINGLE_INOUT_IDX);
+
+  input_.multiply(1 / input_.l2norm(), hidden_);
+}
+
+void PreprocessL2NormLayer::calcDerivative(RunLayerContext &context) {
+  throw std::invalid_argument("[L2Norm::calcDerivative] This Layer "
+                              "does not support backward propagation");
+}
+
+void PreprocessL2NormLayer::setProperty(
+  const std::vector<std::string> &values) {
+  if (!values.empty()) {
+    std::string msg = "[FlattenLayer] Unknown Layer Properties count " +
+                      std::to_string(values.size());
+    throw exception::not_supported(msg);
+  }
+}
+
+} // namespace nntrainer
diff --git a/nntrainer/layers/preprocess_l2norm_layer.h b/nntrainer/layers/preprocess_l2norm_layer.h
new file mode 100644 (file)
index 0000000..62fdff7
--- /dev/null
@@ -0,0 +1,98 @@
+
+// SPDX-License-Identifier: Apache-2.0
+/**
+ * Copyright (C) 2020 Jihoon Lee <jhoon.it.lee@samsung.com>
+ *
+ * @file   preprocess_l2norm_layer.h
+ * @date   09 Jan 2021
+ * @brief  This file contains the simple l2norm layer which normalizes
+ * the given feature
+ * @see    https://github.com/nnstreamer/nntrainer
+ * @author Jihoon Lee <jhoon.it.lee@samsung.com>
+ * @bug    No known bugs except for NYI items
+ *
+ */
+
+#ifndef __PREPROCESS_L2NORM_LAYER_H__
+#define __PREPROCESS_L2NORM_LAYER_H__
+#include <string>
+
+#include <layer_context.h>
+#include <layer_devel.h>
+#include <node_exporter.h>
+
+namespace nntrainer {
+
+/**
+ * @brief Layer class that l2normalizes a feature vector
+ *
+ */
+class PreprocessL2NormLayer : public Layer {
+public:
+  /**
+   * @brief Construct a new L2norm Layer object
+   * that normlizes given feature with l2norm
+   */
+  PreprocessL2NormLayer() : Layer() {}
+
+  /**
+   *  @brief  Move constructor.
+   *  @param[in] PreprocessL2NormLayer &&
+   */
+  PreprocessL2NormLayer(PreprocessL2NormLayer &&rhs) noexcept = default;
+
+  /**
+   * @brief  Move assignment operator.
+   * @parma[in] rhs PreprocessL2NormLayer to be moved.
+   */
+  PreprocessL2NormLayer &operator=(PreprocessL2NormLayer &&rhs) = default;
+
+  /**
+   * @brief Destroy the Centering Layer object
+   *
+   */
+  ~PreprocessL2NormLayer() {}
+
+  /**
+   * @copydoc Layer::finalize(InitLayerContext &context)
+   */
+  void finalize(InitLayerContext &context) override;
+
+  /**
+   * @copydoc Layer::forwarding(RunLayerContext &context, bool training)
+   */
+  void forwarding(RunLayerContext &context, bool training) override;
+
+  /**
+   * @copydoc Layer::calcDerivative(RunLayerContext &context)
+   */
+  void calcDerivative(RunLayerContext &context) override;
+
+  /**
+   * @copydoc bool supportBackwarding() const
+   */
+  bool supportBackwarding() const override { return false; };
+
+  /**
+   * @copydoc Layer::exportTo(Exporter &exporter, ExportMethods method)
+   */
+  void exportTo(Exporter &exporter,
+                const ExportMethods &method) const override {}
+
+  /**
+   * @copydoc Layer::getType()
+   */
+  const std::string getType() const override {
+    return PreprocessL2NormLayer::type;
+  };
+
+  /**
+   * @copydoc Layer::setProperty(const std::vector<std::string> &values)
+   */
+  void setProperty(const std::vector<std::string> &values) override;
+
+  inline static const std::string type = "preprocess_l2norm";
+};
+} // namespace nntrainer
+
+#endif // __PREPROCESS_L2NORM_LAYER_H__