[CAPI] Open centroid KNN
authorJihoon Lee <jhoon.it.lee@samsung.com>
Wed, 18 Aug 2021 08:35:34 +0000 (17:35 +0900)
committerJijoong Moon <jijoong.moon@samsung.com>
Mon, 23 Aug 2021 12:44:25 +0000 (21:44 +0900)
**Changes proposed in this PR:**
- Move centroid KNN to layer, delete centroid knn from the Application

**Self evaluation:**
1. Build test: [X]Passed [ ]Failed [ ]Skipped
2. Run test: [X]Passed [ ]Failed [ ]Skipped

Cc: Inki Dae <inki.dae@samsung.com>
Signed-off-by: Jihoon Lee <jhoon.it.lee@samsung.com>
Applications/SimpleShot/meson.build
Applications/SimpleShot/task_runner.cpp
Applications/SimpleShot/test/simpleshot_layer_common_tests.cpp
api/ccapi/include/layer.h
jni/Android.mk
nntrainer/app_context.cpp
nntrainer/layers/centroid_knn.cpp [moved from Applications/SimpleShot/layers/centroid_knn.cpp with 85% similarity]
nntrainer/layers/centroid_knn.h [moved from Applications/SimpleShot/layers/centroid_knn.h with 84% similarity]
nntrainer/layers/common_properties.cpp
nntrainer/layers/common_properties.h
nntrainer/layers/meson.build

index 5838cf3..bd8d70b 100644 (file)
@@ -1,6 +1,5 @@
 simpleshot_sources = [
   'simpleshot_utils.cpp',
-  'layers/centroid_knn.cpp',
   'layers/centering.cpp',
   'layers/l2norm.cpp',
 ]
index fb579ab..804d198 100644 (file)
@@ -22,7 +22,6 @@
 #include <nntrainer-api-common.h>
 
 #include "layers/centering.h"
-#include "layers/centroid_knn.h"
 #include "layers/l2norm.h"
 
 namespace simpleshot {
@@ -225,8 +224,6 @@ int main(int argc, char **argv) {
       nntrainer::createLayer<simpleshot::layers::CenteringLayer>);
     app_context.registerFactory(
       nntrainer::createLayer<simpleshot::layers::L2NormLayer>);
-    app_context.registerFactory(
-      nntrainer::createLayer<simpleshot::layers::CentroidKNN>);
   } catch (std::exception &e) {
     std::cerr << "registering factory failed: " << e.what();
     return 1;
index 98bb552..fd35457 100644 (file)
@@ -23,8 +23,8 @@ auto semantic_activation_l2norm = LayerSemanticsParamType(
   simpleshot::layers::L2NormLayer::type, {}, 0, false);
 
 auto semantic_activation_centroid_knn = LayerSemanticsParamType(
-  nntrainer::createLayer<simpleshot::layers::CentroidKNN>,
-  simpleshot::layers::CentroidKNN::type, {"num_class=1"}, 0, false);
+  nntrainer::createLayer<nntrainer::CentroidKNN>, nntrainer::CentroidKNN::type,
+  {"num_class=1"}, 0, false);
 
 auto semantic_activation_centering = LayerSemanticsParamType(
   nntrainer::createLayer<simpleshot::layers::CenteringLayer>,
index e17bb7a..1e54709 100644 (file)
@@ -318,6 +318,14 @@ TimeDistLayer(const std::vector<std::string> &properties = {}) {
 }
 
 /**
+ * @brief Helper function to create Centroid KNN Layer
+ */
+inline std::unique_ptr<Layer>
+CentroidKNN(const std::vector<std::string> &properties = {}) {
+  return createLayer(LayerType::LAYER_CENTROID_KNN, properties);
+}
+
+/**
  * @brief Helper function to create activation layer
  */
 inline std::unique_ptr<Layer>
index 43e7ac8..afafc39 100644 (file)
@@ -163,6 +163,7 @@ NNTRAINER_SRCS := $(NNTRAINER_ROOT)/nntrainer/models/neuralnet.cpp \
                   $(NNTRAINER_ROOT)/nntrainer/layers/time_dist.cpp \
                   $(NNTRAINER_ROOT)/nntrainer/layers/dropout.cpp \
                   $(NNTRAINER_ROOT)/nntrainer/layers/permute_layer.cpp \
+                  $(NNTRAINER_ROOT)/nntrainer/layers/centroid_knn.cpp \
                   $(NNTRAINER_ROOT)/nntrainer/layers/acti_func.cpp \
                   $(NNTRAINER_ROOT)/nntrainer/layers/split_layer.cpp \
                   $(NNTRAINER_ROOT)/nntrainer/layers/common_properties.cpp \
index 2c48cfc..79f6977 100644 (file)
@@ -31,6 +31,7 @@
 #include <activation_layer.h>
 #include <addition_layer.h>
 #include <bn_layer.h>
+#include <centroid_knn.h>
 #include <concat_layer.h>
 #include <conv2d_layer.h>
 #include <cross_entropy_sigmoid_loss_layer.h>
@@ -226,12 +227,7 @@ static void add_default_object(AppContext &ac) {
                      LayerType::LAYER_MULTIOUT);
   ac.registerFactory(nntrainer::createLayer<ConcatLayer>, ConcatLayer::type,
                      LayerType::LAYER_CONCAT);
-  ac.registerFactory(nntrainer::createLayer<PreprocessFlipLayer>,
-                     PreprocessFlipLayer::type,
-                     LayerType::LAYER_PREPROCESS_FLIP);
-  ac.registerFactory(nntrainer::createLayer<PreprocessTranslateLayer>,
-                     PreprocessTranslateLayer::type,
-                     LayerType::LAYER_PREPROCESS_TRANSLATE);
+
 #ifdef ENABLE_NNSTREAMER_BACKBONE
   ac.registerFactory(nntrainer::createLayer<NNStreamerLayer>,
                      NNStreamerLayer::type,
@@ -257,6 +253,16 @@ static void add_default_object(AppContext &ac) {
                      LayerType::LAYER_SPLIT);
   ac.registerFactory(nntrainer::createLayer<PermuteLayer>, PermuteLayer::type,
                      LayerType::LAYER_PERMUTE);
+  ac.registerFactory(nntrainer::createLayer<CentroidKNN>, CentroidKNN::type,
+                     LayerType::LAYER_CENTROID_KNN);
+
+  /** proprocess layers */
+  ac.registerFactory(nntrainer::createLayer<PreprocessFlipLayer>,
+                     PreprocessFlipLayer::type,
+                     LayerType::LAYER_PREPROCESS_FLIP);
+  ac.registerFactory(nntrainer::createLayer<PreprocessTranslateLayer>,
+                     PreprocessTranslateLayer::type,
+                     LayerType::LAYER_PREPROCESS_TRANSLATE);
 
   /** register losses */
   ac.registerFactory(nntrainer::createLayer<MSELossLayer>, MSELossLayer::type,
similarity index 85%
rename from Applications/SimpleShot/layers/centroid_knn.cpp
rename to nntrainer/layers/centroid_knn.cpp
index fa6db7a..3552098 100644 (file)
 #include <regex>
 #include <sstream>
 
+#include <centroid_knn.h>
 #include <nntrainer_error.h>
 #include <nntrainer_log.h>
+#include <node_exporter.h>
 #include <tensor.h>
 #include <weight.h>
 
-#include <centroid_knn.h>
-#include <simpleshot_utils.h>
-
-namespace simpleshot {
-namespace layers {
+namespace nntrainer {
 
 static constexpr size_t SINGLE_INOUT_IDX = 0;
 
 enum KNNParams { map, num_samples };
 
-void CentroidKNN::setProperty(const std::vector<std::string> &values) {
-  util::Entry e;
+CentroidKNN::CentroidKNN() :
+  Layer(),
+  centroid_knn_props(props::NumClass()),
+  weight_idx({0}) {}
 
-  for (auto &val : values) {
-    e = util::getKeyValue(val);
+CentroidKNN::~CentroidKNN() {}
 
-    if (e.key == "num_class") {
-      num_class = std::stoul(e.value);
-      if (num_class == 0) {
-        throw std::invalid_argument("[CentroidKNN] num_class cannot be zero");
-      }
-    } else {
-      std::string msg = "[CentroidKNN] Unknown Layer Properties count " + val;
-      throw nntrainer::exception::not_supported(msg);
-    }
-  }
+void CentroidKNN::setProperty(const std::vector<std::string> &values) {
+  auto left = loadProperties(values, centroid_knn_props);
+  NNTR_THROW_IF(!left.empty(), std::invalid_argument)
+    << "[Centroid KNN] there are unparsed properties " << left.front();
 }
 
 void CentroidKNN::finalize(nntrainer::InitLayerContext &context) {
@@ -57,10 +50,7 @@ void CentroidKNN::finalize(nntrainer::InitLayerContext &context) {
             "please check");
   }
 
-  if (num_class == 0) {
-    throw std::invalid_argument(
-      "Error: num_class must be a positive non-zero integer");
-  }
+  auto num_class = std::get<props::NumClass>(centroid_knn_props);
 
   auto output_dim = nntrainer::TensorDim({num_class});
   context.setOutputDimensions({output_dim});
@@ -121,7 +111,8 @@ void CentroidKNN::forwarding(nntrainer::RunLayerContext &context,
     }
   }
 
-  for (unsigned int i = 0; i < num_class; ++i) {
+  for (unsigned int i = 0; i < std::get<props::NumClass>(centroid_knn_props);
+       ++i) {
     auto saved_feature =
       map.getSharedDataTensor({feature_len}, i * feature_len);
     // nntrainer::Tensor::Map(map.getData(), {feature_len}, i * feature_len);
@@ -145,6 +136,4 @@ void CentroidKNN::calcDerivative(nntrainer::RunLayerContext &context) {
   throw std::invalid_argument("[CentroidKNN::calcDerivative] This Layer "
                               "does not support backward propagation");
 }
-
-} // namespace layers
-} // namespace simpleshot
+} // namespace nntrainer
similarity index 84%
rename from Applications/SimpleShot/layers/centroid_knn.h
rename to nntrainer/layers/centroid_knn.h
index 9e67aea..83208d6 100644 (file)
  *
  */
 
-#ifndef __NEAREST_NEIGHBORS_H__
-#define __NEAREST_NEIGHBORS_H__
+#ifndef __CENTROID_KNN_H__
+#define __CENTROID_KNN_H__
 #include <string>
 
+#include <common_properties.h>
 #include <layer_context.h>
 #include <layer_devel.h>
-#include <node_exporter.h>
 
-namespace simpleshot {
-namespace layers {
+namespace nntrainer {
 
 /**
  * @brief Centroid KNN layer which takes centroid and do k-nearest neighbor
  * classification
  */
-class CentroidKNN : public nntrainer::Layer {
+class CentroidKNN : public Layer {
 public:
   /**
    * @brief Construct a new NearestNeighbors Layer object that does elementwise
    * subtraction from mean feature vector
    */
-  CentroidKNN() : Layer(), num_class(0), weight_idx({0}) {}
+  CentroidKNN();
 
   /**
    *  @brief  Move constructor.
@@ -45,13 +44,13 @@ public:
    * @brief  Move assignment operator.
    * @parma[in] rhs CentroidKNN to be moved.
    */
-  CentroidKNN &operator=(CentroidKNN &&rhs) = default;
+  CentroidKNN &operator=(CentroidKNN &&rhs) noexcept = default;
 
   /**
    * @brief Destroy the NearestNeighbors Layer object
    *
    */
-  ~CentroidKNN() = default;
+  ~CentroidKNN();
 
   /**
    * @copydoc Layer::requireLabel()
@@ -97,10 +96,9 @@ public:
   inline static const std::string type = "centroid_knn";
 
 private:
-  unsigned int num_class;
+  std::tuple<props::NumClass> centroid_knn_props;
   std::array<unsigned int, 2> weight_idx; /**< indices of the weights */
 };
-} // namespace layers
-} // namespace simpleshot
+} // namespace nntrainer
 
-#endif /** __NEAREST_NEIGHBORS_H__ */
+#endif /** __CENTROID_KNN_H__ */
index 61b198e..9b8d945 100644 (file)
@@ -48,6 +48,8 @@ void FilePath::set(const std::string &v) {
 
 std::ifstream::pos_type FilePath::file_size() { return cached_pos_size; }
 
+bool NumClass::isValid(const unsigned int &v) const { return v > 0; }
+
 ConnectionSpec::ConnectionSpec(const std::vector<props::Name> &layer_ids_,
                                const std::string &op_type_) :
   op_type(op_type_),
index f170683..72aeec6 100644 (file)
@@ -297,6 +297,21 @@ public:
 private:
   std::ifstream::pos_type cached_pos_size;
 };
+
+/**
+ * @brief Number of class
+ * @todo deprecate this
+ */
+class NumClass final : public nntrainer::Property<unsigned int> {
+public:
+  using prop_tag = uint_prop_tag;                 /**< property type */
+  static constexpr const char *key = "num_class"; /**< unique key to access */
+
+  /**
+   * @copydoc nntrainer::Property<unsigned int>::isValid(const unsigned int &v);
+   */
+  bool isValid(const unsigned int &v) const override;
+};
 } // namespace props
 } // namespace nntrainer
 
index 23b783d..2c1b41f 100644 (file)
@@ -26,6 +26,7 @@ layer_sources = [
   'layer_impl.cpp',
   'gru.cpp',
   'dropout.cpp',
+  'centroid_knn.cpp',
   'layer_context.cpp'
 ]