[SimpleShot] Add centering layer
authorJihoon Lee <jhoon.it.lee@samsung.com>
Sat, 9 Jan 2021 04:23:58 +0000 (13:23 +0900)
committerJijoong Moon <jijoong.moon@samsung.com>
Tue, 2 Mar 2021 01:29:05 +0000 (10:29 +0900)
Add centering layer with tests

**minor changes**
- rename simpleshot centering test
- add test_util lib to simpleshot test

**Self evaluation:**
1. Build test: [X]Passed [ ]Failed [ ]Skipped
2. Run test: [X]Passed [ ]Failed [ ]Skipped

Signed-off-by: Jihoon Lee <jhoon.it.lee@samsung.com>
12 files changed:
Applications/SimpleShot/layers/centering.cpp [new file with mode: 0644]
Applications/SimpleShot/layers/centering.h [new file with mode: 0644]
Applications/SimpleShot/layers/l2norm.cpp [new file with mode: 0644]
Applications/SimpleShot/layers/l2norm.h [new file with mode: 0644]
Applications/SimpleShot/meson.build
Applications/SimpleShot/test/meson.build
Applications/SimpleShot/test/simpleshot_centering_test.cpp [new file with mode: 0644]
Applications/SimpleShot/test/simpleshot_l2norm_test.cpp [new file with mode: 0644]
Applications/SimpleShot/test/simpleshot_utils_test.cpp [moved from Applications/SimpleShot/test/simpleshot_utils.cpp with 97% similarity]
meson.build
nntrainer/layers/layer.cpp
nntrainer/layers/layer_internal.h

diff --git a/Applications/SimpleShot/layers/centering.cpp b/Applications/SimpleShot/layers/centering.cpp
new file mode 100644 (file)
index 0000000..1882310
--- /dev/null
@@ -0,0 +1,88 @@
+// SPDX-License-Identifier: Apache-2.0
+/**
+ * Copyright (C) 2020 Jihoon Lee <jhoon.it.lee@samsung.com>
+ *
+ * @file   centering.cpp
+ * @date   08 Jan 2021
+ * @brief  This file contains the simple centering layer
+ * @see    https://github.com/nnstreamer/nntrainer
+ * @author Jihoon Lee <jhoon.it.lee@samsung.com>
+ * @bug    No known bugs except for NYI items
+ *
+ */
+#include <fstream>
+#include <iostream>
+#include <regex>
+#include <sstream>
+
+#include <nntrainer_error.h>
+#include <tensor.h>
+#include <tensor_dim.h>
+
+#include <centering.h>
+#include <simpleshot_utils.h>
+
+namespace simpleshot {
+namespace layers {
+
+const std::string CenteringLayer::type = "centering";
+
+CenteringLayer::CenteringLayer(const std::string &feature_path_) :
+  Layer(),
+  feature_path(feature_path_) {}
+
+int CenteringLayer::setProperty(std::vector<std::string> values) {
+  const std::string FEATURE_PATH("feature_path");
+  util::Entry e;
+
+  std::vector<std::string> unhandled_values;
+
+  for (auto &val : values) {
+    try {
+      e = util::getKeyValue(val);
+    } catch (std::invalid_argument &e) {
+      std::cerr << e.what() << std::endl;
+      return ML_ERROR_INVALID_PARAMETER;
+    }
+
+    if (e.key == FEATURE_PATH) {
+      feature_path = e.value;
+    } else {
+      unhandled_values.push_back(val);
+    }
+  }
+
+  return nntrainer::Layer::setProperty(unhandled_values);
+}
+
+int CenteringLayer::initialize(nntrainer::Manager &manager) {
+  output_dim[0] = input_dim[0];
+
+  return ML_ERROR_NONE;
+}
+
+void CenteringLayer::read(std::ifstream &file) {
+  mean_feature_vector = nntrainer::Tensor(input_dim[0]);
+  std::ifstream f(feature_path, std::ios::in | std::ios::binary);
+  if (!f.good()) {
+    throw std::invalid_argument(
+      "[CenteringLayer::read] cannot read feature vector");
+  }
+  mean_feature_vector.read(f);
+}
+
+void CenteringLayer::forwarding(bool training) {
+  std::cout << net_input[0]->getVariableRef().getDim();
+  std::cout << net_hidden[0]->getVariableRef().getDim();
+  std::cout << mean_feature_vector.getDim();
+  net_input[0]->getVariableRef().add(mean_feature_vector,
+                                     net_hidden[0]->getVariableRef(), -1);
+}
+
+void CenteringLayer::calcDerivative() {
+  throw std::invalid_argument("[CenteringLayer::calcDerivative] This Layer "
+                              "does not support backward propagation");
+}
+
+} // namespace layers
+} // namespace simpleshot
diff --git a/Applications/SimpleShot/layers/centering.h b/Applications/SimpleShot/layers/centering.h
new file mode 100644 (file)
index 0000000..404f272
--- /dev/null
@@ -0,0 +1,122 @@
+// SPDX-License-Identifier: Apache-2.0
+/**
+ * Copyright (C) 2020 Jihoon Lee <jhoon.it.lee@samsung.com>
+ *
+ * @file   centering.h
+ * @date   08 Jan 2021
+ * @brief  This file contains the simple centering layer which has hardcoded
+ * mean feature vectors from given combinations
+ * @see    https://github.com/nnstreamer/nntrainer
+ * @author Jihoon Lee <jhoon.it.lee@samsung.com>
+ * @bug    No known bugs except for NYI items
+ *
+ */
+
+#ifndef __CENTERING_H__
+#define __CENTERING_H__
+#include <string>
+
+/// @todo migrate these to API
+#include <layer_internal.h>
+#include <manager.h>
+
+#include <tensor.h>
+
+namespace simpleshot {
+namespace layers {
+
+/// @todo inherit this to API
+// class CenteringLayer : public ml::train::Layer {
+class CenteringLayer : public nntrainer::Layer {
+public:
+  /**
+   * @brief Construct a new Centering Layer object that does elementwise
+   * subtraction from mean feature vector
+   */
+  CenteringLayer() : Layer() {}
+
+  /**
+   * @brief Construct a new Centering Layer object
+   *
+   * @param feature_path feature path to read the variable
+   */
+  CenteringLayer(const std::string &feature_path);
+
+  /**
+   *  @brief  Move constructor.
+   *  @param[in] CenteringLayer &&
+   */
+  CenteringLayer(CenteringLayer &&rhs) noexcept = default;
+
+  /**
+   * @brief  Move assignment operator.
+   * @parma[in] rhs CenteringLayer to be moved.
+   */
+  CenteringLayer &operator=(CenteringLayer &&rhs) = default;
+
+  /**
+   * @brief Destroy the Centering Layer object
+   *
+   */
+  ~CenteringLayer() {}
+
+  using nntrainer::Layer::setProperty;
+
+  /**
+   * @brief     set Property of layer,
+   * feature_path: feature *.bin that contains mean feature vector that will be
+   * used for the model.
+   * @param[in] values values of property
+   * @retval #ML_ERROR_NONE Successful.
+   * @retval #ML_ERROR_INVALID_PARAMETER invalid parameter.
+   */
+  int setProperty(std::vector<std::string> values) override;
+
+  /**
+   * @brief initializing nntrainer
+   *
+   * @return int ML_ERROR_NONE if success
+   */
+  int initialize(nntrainer::Manager &manager) override;
+
+  /**
+   * @brief nntrainer forwarding function
+   */
+  void forwarding(bool training = true) override;
+
+  /**
+   * @brief     calc the derivative to be passed to the previous layer
+   */
+  void calcDerivative() override;
+
+  /**
+   * @brief     read layer Weight & Bias data from file
+   * @param[in] file input file stream
+   */
+  void read(std::ifstream &file) override;
+
+  /**
+   * @brief get boolean if the function is trainable
+   *
+   * @return true trainable
+   * @return false not trainable
+   */
+  bool getTrainable() noexcept override { return false; }
+
+  /**
+   * @brief Get the Type object
+   *
+   * @return const std::string
+   */
+  const std::string getType() const override { return CenteringLayer::type; }
+
+  static const std::string type;
+
+private:
+  std::string feature_path;
+  nntrainer::Tensor mean_feature_vector;
+};
+} // namespace layers
+} // namespace simpleshot
+
+#endif /* __CENTERING_H__ */
diff --git a/Applications/SimpleShot/layers/l2norm.cpp b/Applications/SimpleShot/layers/l2norm.cpp
new file mode 100644 (file)
index 0000000..05052ec
--- /dev/null
@@ -0,0 +1,53 @@
+// SPDX-License-Identifier: Apache-2.0
+/**
+ * Copyright (C) 2020 Jihoon Lee <jhoon.it.lee@samsung.com>
+ *
+ * @file   l2norm.cpp
+ * @date   09 Jan 2021
+ * @brief  This file contains the simple l2norm layer which normalizes
+ * the given feature
+ * @see    https://github.com/nnstreamer/nntrainer
+ * @author Jihoon Lee <jhoon.it.lee@samsung.com>
+ * @bug    No known bugs except for NYI items
+ *
+ */
+
+#include <iostream>
+#include <regex>
+#include <sstream>
+
+#include <nntrainer_error.h>
+#include <nntrainer_log.h>
+#include <tensor.h>
+
+#include <l2norm.h>
+
+namespace simpleshot {
+namespace layers {
+
+const std::string L2NormLayer::type = "l2norm";
+
+int L2NormLayer::initialize(nntrainer::Manager &manager) {
+  if (input_dim[0].channel() != 1 || input_dim[0].height() != 1) {
+    ml_logw("l2norm layer is designed for channel and height is 1 for now, "
+            "please check");
+  }
+  output_dim[0] = input_dim[0];
+
+  return ML_ERROR_NONE;
+}
+
+void L2NormLayer::forwarding(bool training) {
+  auto &hidden_ = net_hidden[0]->getVariableRef();
+  auto &input_ = net_input[0]->getVariableRef();
+
+  input_.multiply(1 / input_.l2norm(), hidden_);
+}
+
+void L2NormLayer::calcDerivative() {
+  throw std::invalid_argument("[L2Norm::calcDerivative] This Layer "
+                              "does not support backward propagation");
+}
+
+} // namespace layers
+} // namespace simpleshot
diff --git a/Applications/SimpleShot/layers/l2norm.h b/Applications/SimpleShot/layers/l2norm.h
new file mode 100644 (file)
index 0000000..1a19d2c
--- /dev/null
@@ -0,0 +1,95 @@
+// SPDX-License-Identifier: Apache-2.0
+/**
+ * Copyright (C) 2020 Jihoon Lee <jhoon.it.lee@samsung.com>
+ *
+ * @file   l2norm.h
+ * @date   09 Jan 2021
+ * @brief  This file contains the simple l2norm layer which normalizes
+ * the given feature
+ * @see    https://github.com/nnstreamer/nntrainer
+ * @author Jihoon Lee <jhoon.it.lee@samsung.com>
+ * @bug    No known bugs except for NYI items
+ *
+ */
+
+#ifndef __L2NORM__H_
+#define __L2NORM__H_
+#include <string>
+
+/// @todo migrate these to API
+#include <layer_internal.h>
+#include <manager.h>
+
+#include <tensor.h>
+
+namespace simpleshot {
+namespace layers {
+
+/// @todo inherit this to API
+// class L2NormLayer : public ml::train::Layer {
+class L2NormLayer : public nntrainer::Layer {
+public:
+  /**
+   * @brief Construct a new L2norm Layer object
+   * that normlizes given feature with l2norm
+   */
+  L2NormLayer() : Layer() {}
+
+  /**
+   *  @brief  Move constructor.
+   *  @param[in] L2NormLayer &&
+   */
+  L2NormLayer(L2NormLayer &&rhs) noexcept = default;
+
+  /**
+   * @brief  Move assignment operator.
+   * @parma[in] rhs L2NormLayer to be moved.
+   */
+  L2NormLayer &operator=(L2NormLayer &&rhs) = default;
+
+  /**
+   * @brief Destroy the Centering Layer object
+   *
+   */
+  ~L2NormLayer() {}
+
+  using nntrainer::Layer::setProperty;
+
+  /**
+   * @brief initializing nntrainer
+   *
+   * @return int ML_ERROR_NONE if success
+   */
+  int initialize(nntrainer::Manager &manager) override;
+
+  /**
+   * @brief nntrainer forwarding function
+   */
+  void forwarding(bool training = true) override;
+
+  /**
+   * @brief     calc the derivative to be passed to the previous layer
+   */
+  void calcDerivative() override;
+
+  /**
+   * @brief Get the Type object
+   *
+   * @return const std::string
+   */
+  const std::string getType() const override { return L2NormLayer::type; }
+
+  /**
+   * @brief get boolean if the function is trainable
+   *
+   * @return true trainable
+   * @return false not trainable
+   */
+  bool getTrainable() noexcept override { return false; }
+
+  static const std::string type;
+};
+} // namespace layers
+} // namespace simpleshot
+
+#endif /* __L2NORM__H_ */
index 5b16b7d..9d490d3 100644 (file)
@@ -1,12 +1,17 @@
 simpleshot_sources = [
-  'simpleshot_utils.cpp'
+  'simpleshot_utils.cpp',
+  'layers/centering.cpp',
+  'layers/l2norm.cpp',
 ]
 
-simpleshot_inc = include_directories('.')
-
+simpleshot_inc = include_directories([
+  '.',
+  'layers',
+])
 
 e = executable('simpleshot_runner',
   ['task_runner.cpp'] + simpleshot_sources,
+  include_directories: simpleshot_inc,
   dependencies: [app_utils_dep, nntrainer_dep, tflite_dep],
   install: get_option('install-app'),
   install_dir: application_install_dir
index 07f408e..2849c79 100644 (file)
@@ -2,16 +2,20 @@ gtest_dep_with_main = dependency('gtest', main : true, required : false)
 
 
 test_target = [
-  'simpleshot_utils'
+  'simpleshot_utils_test.cpp',
+  'simpleshot_centering_test.cpp',
+  'simpleshot_l2norm_test.cpp',
 ]
 
-foreach target: test_target
-  exe = executable(
-    target,
-    target + '.cpp',
-    dependencies: [gtest_dep_with_main, simpleshot_test_dep],
-    install: get_option('enable-test'),
-    install_dir: application_install_dir
-  )
-  test(target, exe)
-endforeach
+exe = executable(
+  'simpleshot_tests', test_target,
+  dependencies: [gtest_dep_with_main,
+      simpleshot_test_dep,
+      nntrainer_dep,
+      nntrainer_ccapi_dep,
+      nntrainer_testutil_dep],
+  install: get_option('enable-test'),
+  install_dir: application_install_dir
+)
+test('simpleshot_tests', exe)
+
diff --git a/Applications/SimpleShot/test/simpleshot_centering_test.cpp b/Applications/SimpleShot/test/simpleshot_centering_test.cpp
new file mode 100644 (file)
index 0000000..ebcdc2c
--- /dev/null
@@ -0,0 +1,70 @@
+// SPDX-License-Identifier: Apache-2.0
+/**
+ * Copyright (C) 2020 Jihoon Lee <jhoon.it.lee@samsung.com>
+ *
+ * @file       simpleshot_centering_test.cpp
+ * @date       08 Jan 2021
+ * @brief      test for simpleshot centering layer
+ * @see                https://github.com/nnstreamer/nntrainer
+ * @author     Jihoon Lee <jhoon.it.lee@samsung.com>
+ * @bug                No known bugs except for NYI items
+ */
+
+#include <gtest/gtest.h>
+
+#include <fstream>
+#include <memory>
+
+#include <app_context.h>
+#include <manager.h>
+#include <nntrainer_test_util.h>
+
+#include <layers/centering.h>
+
+namespace simpleshot {
+namespace layers {
+
+TEST(centering, simple_functions) {
+  std::ofstream file("feature.bin", std::ios::out | std::ios::binary);
+  ASSERT_TRUE(file.good());
+
+  nntrainer::Tensor feature(4);
+  feature.setRandNormal();
+  feature.save(file);
+  file.close();
+
+  auto &app_context = nntrainer::AppContext::Global();
+  app_context.registerFactory(ml::train::createLayer<CenteringLayer>);
+
+  auto c = app_context.createObject<ml::train::Layer>(
+    "centering", {"feature_path=feature.bin", "input_shape=1:1:4"});
+
+  std::unique_ptr<CenteringLayer> layer(
+    static_cast<CenteringLayer *>(c.release()));
+
+  nntrainer::Manager manager;
+
+  manager.setInferenceInOutMemoryOptimization(false);
+  std::ifstream stub;
+  layer->initialize(manager);
+  layer->read(stub);
+  layer->setInputBuffers(manager.trackLayerInputs(
+    layer->getType(), layer->getName(), layer->getInputDimension()));
+  layer->setOutputBuffers(manager.trackLayerOutputs(
+    layer->getType(), layer->getName(), layer->getOutputDimension()));
+
+  manager.initializeTensors(true);
+  manager.allocateTensors();
+  auto t = MAKE_SHARED_TENSOR(randUniform(1, 1, 1, 4));
+
+  {
+    auto actual = layer->forwarding_with_val({t});
+    EXPECT_EQ(*actual[0], t->subtract(feature));
+  }
+
+  int status = remove("feature.bin");
+  ASSERT_EQ(status, 0);
+}
+
+} // namespace layers
+} // namespace simpleshot
diff --git a/Applications/SimpleShot/test/simpleshot_l2norm_test.cpp b/Applications/SimpleShot/test/simpleshot_l2norm_test.cpp
new file mode 100644 (file)
index 0000000..894e838
--- /dev/null
@@ -0,0 +1,54 @@
+// SPDX-License-Identifier: Apache-2.0
+/**
+ * Copyright (C) 2020 Jihoon Lee <jhoon.it.lee@samsung.com>
+ *
+ * @file       simpleshot_l2norm_test.cpp
+ * @date       08 Jan 2021
+ * @brief      test for simpleshot l2norm layer
+ * @see                https://github.com/nnstreamer/nntrainer
+ * @author     Jihoon Lee <jhoon.it.lee@samsung.com>
+ * @bug                No known bugs except for NYI items
+ */
+
+#include <gtest/gtest.h>
+
+#include <memory>
+
+#include <app_context.h>
+#include <manager.h>
+#include <nntrainer_test_util.h>
+
+#include <layers/l2norm.h>
+
+namespace simpleshot {
+namespace layers {
+
+TEST(l2norm, simple_functions) {
+  auto &app_context = nntrainer::AppContext::Global();
+  app_context.registerFactory(ml::train::createLayer<L2NormLayer>);
+
+  auto c =
+    app_context.createObject<ml::train::Layer>("l2norm", {"input_shape=1:1:4"});
+
+  std::unique_ptr<nntrainer::Layer> layer(
+    static_cast<nntrainer::Layer *>(c.release()));
+
+  nntrainer::Manager manager;
+  manager.setInferenceInOutMemoryOptimization(false);
+  layer->setInputBuffers(manager.trackLayerInputs(
+    layer->getType(), layer->getName(), layer->getInputDimension()));
+  layer->setOutputBuffers(manager.trackLayerOutputs(
+    layer->getType(), layer->getName(), layer->getOutputDimension()));
+
+  manager.initializeTensors(true);
+  manager.allocateTensors();
+  auto t = MAKE_SHARED_TENSOR(randUniform(1, 1, 1, 4));
+
+  {
+    auto actual = layer->forwarding_with_val({t});
+    EXPECT_EQ(*actual[0], t->divide(t->l2norm()));
+  }
+}
+
+} // namespace layers
+} // namespace simpleshot
@@ -2,7 +2,7 @@
 /**
  * Copyright (C) 2020 Jihoon Lee <jhoon.it.lee@samsung.com>
  *
- * @file       utils_test.cpp
+ * @file       simpleshot_utils_test.cpp
  * @date       08 Jan 2021
  * @brief      test for simpleshot utils
  * @see                https://github.com/nnstreamer/nntrainer
index 8c2e3c9..382b83e 100644 (file)
@@ -182,6 +182,14 @@ subdir('nntrainer')
 # Build api
 subdir('api')
 
+if get_option('enable-test')
+  if gtest_dep.found()
+    subdir('test')
+  else
+    error('test enabled but gtest not found')
+  endif
+endif
+
 if get_option('enable-app')
   jsoncpp_dep = dependency('jsoncpp') # jsoncpp
   libcurl_dep = dependency('libcurl')
@@ -191,14 +199,6 @@ if get_option('enable-app')
   subdir('Applications')
 endif
 
-if get_option('enable-test')
-  if gtest_dep.found()
-    subdir('test')
-  else
-    error('test enabled but gtest not found')
-  endif
-endif
-
 if get_option('enable-nnstreamer-tensor-filter')
   nnstreamer_dep = dependency('nnstreamer', required: true)
   subdir('nnstreamer/tensor_filter')
index 0bfbad5..dc27b84 100644 (file)
@@ -20,6 +20,8 @@
  * @bug                No known bugs except for NYI items
  *
  */
+#include <ostream>
+#include <sstream>
 
 #include <layer_internal.h>
 #include <nntrainer_error.h>
@@ -94,8 +96,12 @@ void Layer::copy(std::shared_ptr<Layer> l) {
 sharedConstTensors Layer::forwarding_with_val(sharedConstTensors input,
                                               sharedConstTensors label) {
 
-  if (net_input.size() != input.size())
-    throw std::invalid_argument("Number of inputs mismatched");
+  if (net_input.size() != input.size()) {
+    std::stringstream ss;
+    ss << "Number of inputs mismatched, given: " << input.size()
+       << " expected: " << net_input.size();
+    throw std::invalid_argument(ss.str().c_str());
+  }
 
   for (unsigned int i = 0; i < num_inputs; ++i) {
     net_input[i]->getVariableRef() = input[i]->clone();
index 9438d29..9bac471 100644 (file)
@@ -250,7 +250,7 @@ public:
    * @brief     get trainable for this layer
    * @retval train to enable/disable train
    */
-  bool getTrainable() noexcept { return trainable; }
+  virtual bool getTrainable() noexcept { return trainable; }
 
   /**
    * @brief     get all weights of the layer