[CAPI] Expose l2norm layer
authorJihoon Lee <jhoon.it.lee@samsung.com>
Wed, 18 Aug 2021 09:18:22 +0000 (18:18 +0900)
committerJijoong Moon <jijoong.moon@samsung.com>
Thu, 26 Aug 2021 03:58:51 +0000 (12:58 +0900)
**Changes proposed in this PR:**
- Change name of l2norm layer to PreprocessL2Norm Layer
- Move l2norm layer to the main code base

**Self evaluation:**
1. Build test: [X]Passed [ ]Failed [ ]Skipped
2. Run test: [X]Passed [ ]Failed [ ]Skipped

Signed-off-by: Jihoon Lee <jhoon.it.lee@samsung.com>
Applications/SimpleShot/meson.build
Applications/SimpleShot/task_runner.cpp
Applications/SimpleShot/test/simpleshot_layer_common_tests.cpp
api/ccapi/include/layer.h
jni/Android.mk
nntrainer/app_context.cpp
nntrainer/layers/meson.build
nntrainer/layers/preprocess_l2norm_layer.cpp [moved from Applications/SimpleShot/layers/l2norm.cpp with 74% similarity]
nntrainer/layers/preprocess_l2norm_layer.h [moved from Applications/SimpleShot/layers/l2norm.h with 59% similarity]

index bd8d70b..02af380 100644 (file)
@@ -1,7 +1,6 @@
 simpleshot_sources = [
   'simpleshot_utils.cpp',
   'layers/centering.cpp',
-  'layers/l2norm.cpp',
 ]
 
 simpleshot_inc = include_directories([
index 804d198..c2a1400 100644 (file)
@@ -22,7 +22,6 @@
 #include <nntrainer-api-common.h>
 
 #include "layers/centering.h"
-#include "layers/l2norm.h"
 
 namespace simpleshot {
 
@@ -142,8 +141,8 @@ std::unique_ptr<ml::train::Model> createModel(const std::string &backbone,
     if (variant_ == "UN") {
       /// left empty intended
     } else if (variant_ == "L2N") {
-      LayerHandle l2 =
-        ml::train::createLayer("l2norm", {"name=l2norm", "trainable=false"});
+      LayerHandle l2 = ml::train::createLayer(
+        "preprocess_l2norm", {"name=l2norm", "trainable=false"});
       v.push_back(l2);
     } else if (variant_ == "CL2N") {
       LayerHandle centering = ml::train::createLayer(
@@ -222,8 +221,6 @@ int main(int argc, char **argv) {
   try {
     app_context.registerFactory(
       nntrainer::createLayer<simpleshot::layers::CenteringLayer>);
-    app_context.registerFactory(
-      nntrainer::createLayer<simpleshot::layers::L2NormLayer>);
   } catch (std::exception &e) {
     std::cerr << "registering factory failed: " << e.what();
     return 1;
index fd35457..7cdb2e1 100644 (file)
 
 #include <centering.h>
 #include <centroid_knn.h>
-#include <l2norm.h>
 #include <layers_common_tests.h>
+#include <preprocess_l2norm_layer.h>
 
+/// @todo move below test to the main repo
 auto semantic_activation_l2norm = LayerSemanticsParamType(
-  nntrainer::createLayer<simpleshot::layers::L2NormLayer>,
-  simpleshot::layers::L2NormLayer::type, {}, 0, false);
+  nntrainer::createLayer<nntrainer::PreprocessL2NormLayer>,
+  nntrainer::PreprocessL2NormLayer::type, {}, 0, false);
 
 auto semantic_activation_centroid_knn = LayerSemanticsParamType(
   nntrainer::createLayer<nntrainer::CentroidKNN>, nntrainer::CentroidKNN::type,
index 1e54709..c301971 100644 (file)
@@ -61,7 +61,10 @@ enum LayerType {
   LAYER_PREPROCESS_TRANSLATE =
     ML_TRAIN_LAYER_TYPE_PREPROCESS_TRANSLATE, /**< Preprocess translate Layer
                                                  type */
-  LAYER_BACKBONE_TFLITE,                      /**< Backbone using TFLite */
+  LAYER_PREPROCESS_L2NORM =
+    ML_TRAIN_LAYER_TYPE_PREPROCESS_L2NORM, /**< Preprocess l2norm Layer
+                                                 type */
+  LAYER_BACKBONE_TFLITE,                   /**< Backbone using TFLite */
   LAYER_LOSS_MSE = 500,             /**< Mean Squared Error Loss Layer type */
   LAYER_LOSS_CROSS_ENTROPY_SIGMOID, /**< Cross Entropy with Sigmoid Loss Layer
                                        type */
index afafc39..0e15d48 100644 (file)
@@ -156,6 +156,7 @@ NNTRAINER_SRCS := $(NNTRAINER_ROOT)/nntrainer/models/neuralnet.cpp \
                   $(NNTRAINER_ROOT)/nntrainer/layers/concat_layer.cpp \
                   $(NNTRAINER_ROOT)/nntrainer/layers/preprocess_flip_layer.cpp \
                   $(NNTRAINER_ROOT)/nntrainer/layers/preprocess_translate_layer.cpp \
+                  $(NNTRAINER_ROOT)/nntrainer/layers/preprocess_l2norm_layer.cpp \
                   $(NNTRAINER_ROOT)/nntrainer/layers/embedding.cpp \
                   $(NNTRAINER_ROOT)/nntrainer/layers/rnn.cpp \
                   $(NNTRAINER_ROOT)/nntrainer/layers/lstm.cpp \
index e2c2fdb..2d24028 100644 (file)
@@ -52,6 +52,7 @@
 #include <plugged_optimizer.h>
 #include <pooling2d_layer.h>
 #include <preprocess_flip_layer.h>
+#include <preprocess_l2norm_layer.h>
 #include <preprocess_translate_layer.h>
 #include <rnn.h>
 #include <split_layer.h>
@@ -261,6 +262,9 @@ static void add_default_object(AppContext &ac) {
   ac.registerFactory(nntrainer::createLayer<PreprocessTranslateLayer>,
                      PreprocessTranslateLayer::type,
                      LayerType::LAYER_PREPROCESS_TRANSLATE);
+  ac.registerFactory(nntrainer::createLayer<PreprocessL2NormLayer>,
+                     PreprocessL2NormLayer::type,
+                     LayerType::LAYER_PREPROCESS_L2NORM);
 
   /** register losses */
   ac.registerFactory(nntrainer::createLayer<MSELossLayer>, MSELossLayer::type,
index 2c1b41f..8682ac2 100644 (file)
@@ -15,6 +15,7 @@ layer_sources = [
   'pooling2d_layer.cpp',
   'preprocess_flip_layer.cpp',
   'preprocess_translate_layer.cpp',
+  'preprocess_l2norm_layer.cpp',
   'embedding.cpp',
   'rnn.cpp',
   'acti_func.cpp',
similarity index 74%
rename from Applications/SimpleShot/layers/l2norm.cpp
rename to nntrainer/layers/preprocess_l2norm_layer.cpp
index 1d6b7f1..141376d 100644 (file)
@@ -2,7 +2,7 @@
 /**
  * Copyright (C) 2020 Jihoon Lee <jhoon.it.lee@samsung.com>
  *
- * @file   l2norm.cpp
+ * @file   preprocess_l2norm_layer.cpp
  * @date   09 Jan 2021
  * @brief  This file contains the simple l2norm layer which normalizes
  * the given feature
 #include <nntrainer_error.h>
 #include <nntrainer_log.h>
 
-#include <l2norm.h>
-
-namespace simpleshot {
-namespace layers {
+#include <preprocess_l2norm_layer.h>
 
+namespace nntrainer {
 static constexpr size_t SINGLE_INOUT_IDX = 0;
 
-void L2NormLayer::finalize(nntrainer::InitLayerContext &context) {
+void PreprocessL2NormLayer::finalize(InitLayerContext &context) {
   const auto &input_dim = context.getInputDimensions()[0];
   if (context.getNumInputs() != 1)
     throw std::invalid_argument(
@@ -40,26 +38,26 @@ void L2NormLayer::finalize(nntrainer::InitLayerContext &context) {
   context.setOutputDimensions(context.getInputDimensions());
 }
 
-void L2NormLayer::forwarding(nntrainer::RunLayerContext &context,
-                             bool training) {
+void PreprocessL2NormLayer::forwarding(RunLayerContext &context,
+                                       bool training) {
   auto &hidden_ = context.getOutput(SINGLE_INOUT_IDX);
   auto &input_ = context.getInput(SINGLE_INOUT_IDX);
 
   input_.multiply(1 / input_.l2norm(), hidden_);
 }
 
-void L2NormLayer::calcDerivative(nntrainer::RunLayerContext &context) {
+void PreprocessL2NormLayer::calcDerivative(RunLayerContext &context) {
   throw std::invalid_argument("[L2Norm::calcDerivative] This Layer "
                               "does not support backward propagation");
 }
 
-void L2NormLayer::setProperty(const std::vector<std::string> &values) {
+void PreprocessL2NormLayer::setProperty(
+  const std::vector<std::string> &values) {
   if (!values.empty()) {
     std::string msg = "[FlattenLayer] Unknown Layer Properties count " +
                       std::to_string(values.size());
-    throw nntrainer::exception::not_supported(msg);
+    throw exception::not_supported(msg);
   }
 }
 
-} // namespace layers
-} // namespace simpleshot
+} // namespace nntrainer
similarity index 59%
rename from Applications/SimpleShot/layers/l2norm.h
rename to nntrainer/layers/preprocess_l2norm_layer.h
index da06d92..62fdff7 100644 (file)
@@ -1,8 +1,9 @@
+
 // SPDX-License-Identifier: Apache-2.0
 /**
  * Copyright (C) 2020 Jihoon Lee <jhoon.it.lee@samsung.com>
  *
- * @file   l2norm.h
+ * @file   preprocess_l2norm_layer.h
  * @date   09 Jan 2021
  * @brief  This file contains the simple l2norm layer which normalizes
  * the given feature
  *
  */
 
-#ifndef __L2NORM_H__
-#define __L2NORM_H__
+#ifndef __PREPROCESS_L2NORM_LAYER_H__
+#define __PREPROCESS_L2NORM_LAYER_H__
 #include <string>
 
 #include <layer_context.h>
 #include <layer_devel.h>
 #include <node_exporter.h>
 
-namespace simpleshot {
-namespace layers {
+namespace nntrainer {
 
 /**
  * @brief Layer class that l2normalizes a feature vector
  *
  */
-class L2NormLayer : public nntrainer::Layer {
+class PreprocessL2NormLayer : public Layer {
 public:
   /**
    * @brief Construct a new L2norm Layer object
    * that normlizes given feature with l2norm
    */
-  L2NormLayer() : Layer() {}
+  PreprocessL2NormLayer() : Layer() {}
 
   /**
    *  @brief  Move constructor.
-   *  @param[in] L2NormLayer &&
+   *  @param[in] PreprocessL2NormLayer &&
    */
-  L2NormLayer(L2NormLayer &&rhs) noexcept = default;
+  PreprocessL2NormLayer(PreprocessL2NormLayer &&rhs) noexcept = default;
 
   /**
    * @brief  Move assignment operator.
-   * @parma[in] rhs L2NormLayer to be moved.
+   * @parma[in] rhs PreprocessL2NormLayer to be moved.
    */
-  L2NormLayer &operator=(L2NormLayer &&rhs) = default;
+  PreprocessL2NormLayer &operator=(PreprocessL2NormLayer &&rhs) = default;
 
   /**
    * @brief Destroy the Centering Layer object
    *
    */
-  ~L2NormLayer() {}
+  ~PreprocessL2NormLayer() {}
 
   /**
    * @copydoc Layer::finalize(InitLayerContext &context)
    */
-  void finalize(nntrainer::InitLayerContext &context) override;
+  void finalize(InitLayerContext &context) override;
 
   /**
    * @copydoc Layer::forwarding(RunLayerContext &context, bool training)
    */
-  void forwarding(nntrainer::RunLayerContext &context, bool training) override;
+  void forwarding(RunLayerContext &context, bool training) override;
 
   /**
    * @copydoc Layer::calcDerivative(RunLayerContext &context)
    */
-  void calcDerivative(nntrainer::RunLayerContext &context) override;
+  void calcDerivative(RunLayerContext &context) override;
 
   /**
    * @copydoc bool supportBackwarding() const
@@ -76,22 +76,23 @@ public:
   /**
    * @copydoc Layer::exportTo(Exporter &exporter, ExportMethods method)
    */
-  void exportTo(nntrainer::Exporter &exporter,
-                const nntrainer::ExportMethods &method) const override {}
+  void exportTo(Exporter &exporter,
+                const ExportMethods &method) const override {}
 
   /**
    * @copydoc Layer::getType()
    */
-  const std::string getType() const override { return L2NormLayer::type; };
+  const std::string getType() const override {
+    return PreprocessL2NormLayer::type;
+  };
 
   /**
    * @copydoc Layer::setProperty(const std::vector<std::string> &values)
    */
   void setProperty(const std::vector<std::string> &values) override;
 
-  inline static const std::string type = "l2norm";
+  inline static const std::string type = "preprocess_l2norm";
 };
-} // namespace layers
-} // namespace simpleshot
+} // namespace nntrainer
 
-#endif /* __L2NORM__H_ */
+#endif // __PREPROCESS_L2NORM_LAYER_H__