[LayerImpl] Maintain LayerImpl property with props
authorhyeonseok lee <hs89.lee@samsung.com>
Mon, 13 Sep 2021 06:52:27 +0000 (15:52 +0900)
committerJijoong Moon <jijoong.moon@samsung.com>
Mon, 13 Sep 2021 11:59:57 +0000 (20:59 +0900)
 - All the layerImpl property will be maintain with props

**Self evaluation:**
1. Build test: [X]Passed [ ]Failed [ ]Skipped
2. Run test: [X]Passed [ ]Failed [ ]Skipped

Signed-off-by: hyeonseok lee <hs89.lee@samsung.com>
nntrainer/layers/common_properties.cpp
nntrainer/layers/common_properties.h
nntrainer/layers/conv2d_layer.cpp
nntrainer/layers/embedding.cpp
nntrainer/layers/fc_layer.cpp
nntrainer/layers/gru.cpp
nntrainer/layers/layer_impl.cpp
nntrainer/layers/layer_impl.h
nntrainer/layers/lstm.cpp
nntrainer/layers/rnn.cpp
test/unittest/unittest_base_properties.cpp

index b237032f53fecbbf1eb501bd0de1ca87499917d1..aaefc76e34f03866dc3bc9c7efa8669f34fe8ee1 100644 (file)
@@ -206,6 +206,14 @@ std::array<unsigned int, 4> Padding2D::compute(const TensorDim &input,
 
 std::string ConnectionSpec::NoneType = "";
 
+WeightRegularizerConstant::WeightRegularizerConstant(float value) {
+  set(value);
+}
+
+bool WeightRegularizerConstant::isValid(const float &value) const {
+  return value >= 0.0f;
+}
+
 HiddenStateActivation::HiddenStateActivation(ActivationTypeInfo::Enum value) {
   set(value);
 };
@@ -214,6 +222,10 @@ RecurrentActivation::RecurrentActivation(ActivationTypeInfo::Enum value) {
   set(value);
 };
 
+WeightInitializer::WeightInitializer(Tensor::Initializer value) { set(value); }
+
+BiasInitializer::BiasInitializer(Tensor::Initializer value) { set(value); }
+
 BNPARAMS_MU_INIT::BNPARAMS_MU_INIT(Tensor::Initializer value) { set(value); }
 
 BNPARAMS_VAR_INIT::BNPARAMS_VAR_INIT(Tensor::Initializer value) { set(value); }
@@ -226,6 +238,14 @@ BNPARAMS_BETA_INIT::BNPARAMS_BETA_INIT(Tensor::Initializer value) {
   set(value);
 }
 
+WeightRegularizer::WeightRegularizer(nntrainer::WeightRegularizer value) {
+  set(value);
+}
+
+bool WeightRegularizer::isValid(
+  const nntrainer::WeightRegularizer &value) const {
+  return value != nntrainer::WeightRegularizer::UNKNOWN;
+}
 } // namespace props
 
 static const std::vector<std::pair<char, std::string>>
index 1305c8e00788d0c85c02b7c063b369849cab1f03..968757338313cf599e12cbae86f0b98a5b5ff7c2 100644 (file)
@@ -19,6 +19,7 @@
 
 #include <base_properties.h>
 #include <tensor.h>
+#include <tensor_wrap_specs.h>
 
 namespace nntrainer {
 
@@ -552,6 +553,32 @@ public:
   bool isValid(const unsigned int &v) const override;
 };
 
+/**
+ * @brief WeightRegularizerConstant property, this defines how much regularize
+ * the weight
+ *
+ */
+class WeightRegularizerConstant : public nntrainer::Property<float> {
+
+public:
+  /**
+   * @brief Construct a new WeightRegularizerConstant object
+   *
+   */
+  WeightRegularizerConstant(float value = 1.0f);
+  static constexpr const char *key =
+    "weight_regularizer_constant"; /**< unique key to access */
+  using prop_tag = float_prop_tag; /**< property type */
+
+  /**
+   * @brief check if given value is valid
+   *
+   * @param value value to check
+   * @return bool true if valid
+   */
+  bool isValid(const float &value) const override;
+};
+
 /******** below section is for enumerations ***************/
 /**
  * @brief     Enumeration of activation function type
@@ -626,6 +653,35 @@ struct InitializerInfo {
     "he_normal",     "he_uniform",    "none"};
 };
 
+/**
+ * @brief WeightInitializer Initialization Enumeration Information
+ *
+ */
+class WeightInitializer final : public EnumProperty<InitializerInfo> {
+public:
+  /**
+   * @brief Construct a WeightInitializer object
+   */
+  WeightInitializer(
+    Tensor::Initializer value = Tensor::Initializer::XAVIER_UNIFORM);
+  using prop_tag = enum_class_prop_tag;
+  static constexpr const char *key = "weight_initializer";
+};
+
+/**
+ * @brief BiasInitializer Initialization Enumeration Information
+ *
+ */
+class BiasInitializer final : public EnumProperty<InitializerInfo> {
+public:
+  /**
+   * @brief Construct a BiasInitializer object
+   */
+  BiasInitializer(Tensor::Initializer value = Tensor::Initializer::ZEROS);
+  using prop_tag = enum_class_prop_tag;
+  static constexpr const char *key = "bias_initializer";
+};
+
 /**
  * @brief BNPARAMS_MU_INIT Initialization Enumeration Information
  *
@@ -682,6 +738,41 @@ public:
   static constexpr const char *key = "beta_initializer";
 };
 
+/**
+ * @brief     Enumeration of tensor regularization type
+ */
+struct RegularizerInfo {
+  using Enum = nntrainer::WeightRegularizer;
+  static constexpr std::initializer_list<Enum> EnumList = {
+    Enum::L2NORM, Enum::NONE, Enum::UNKNOWN};
+
+  static constexpr const char *EnumStr[] = {"l2norm", "none", "unknown"};
+};
+
+/**
+ * @brief WeightRegularizer Regularization Enumeration Information
+ *
+ */
+class WeightRegularizer final : public EnumProperty<RegularizerInfo> {
+public:
+  /**
+   * @brief Construct a WeightRegularizer object
+   */
+  WeightRegularizer(
+    nntrainer::WeightRegularizer value = nntrainer::WeightRegularizer::NONE);
+  using prop_tag = enum_class_prop_tag;
+  static constexpr const char *key = "weight_regularizer";
+
+  /**
+   * @brief WeightRegularizer validator
+   *
+   * @param value nntrainer::WeightRegularizer to validate
+   * @retval true if value is not nntrainer::WeightRegularizer::UNKNOWN
+   * @retval false if value is nntrainer::WeightRegularizer::UNKNOWN
+   */
+  bool isValid(const nntrainer::WeightRegularizer &value) const override;
+};
+
 /**
  * @brief     Enumeration of pooling type
  */
index d2066b0fcd34c3ff14f8604cc90bd24e7589332f..8f8e79a70146eb6b12a8c8a8c8d09ad15f6329e1 100644 (file)
@@ -291,6 +291,14 @@ void Conv2DLayer::finalize(InitLayerContext &context) {
 
   const TensorDim &in_dim = context.getInputDimensions()[0];
 
+  auto &weight_regularizer =
+    std::get<props::WeightRegularizer>(*layer_impl_props);
+  auto &weight_regularizer_constant =
+    std::get<props::WeightRegularizerConstant>(*layer_impl_props);
+  auto &weight_initializer =
+    std::get<props::WeightInitializer>(*layer_impl_props);
+  auto &bias_initializer = std::get<props::BiasInitializer>(*layer_impl_props);
+
   unsigned int filter_size = std::get<props::FilterSize>(conv_props);
   auto &kernel_size =
     std::get<std::array<props::KernelSize, CONV2D_DIM>>(conv_props);
index f8847f511ad50ff5994959d40cce18978b53709b..3c9e38022038b1c6f20bc674558f6e8939d06470 100644 (file)
@@ -40,6 +40,13 @@ void EmbeddingLayer::finalize(InitLayerContext &context) {
       "Embedding layer takes only one for channel size");
   }
 
+  auto &weight_regularizer =
+    std::get<props::WeightRegularizer>(*layer_impl_props);
+  auto &weight_regularizer_constant =
+    std::get<props::WeightRegularizerConstant>(*layer_impl_props);
+  auto &weight_initializer =
+    std::get<props::WeightInitializer>(*layer_impl_props);
+
   unsigned int in_dim = std::get<props::InDim>(embedding_props);
   unsigned int out_dim = std::get<props::OutDim>(embedding_props);
 
index a216ca670691e4ce41a71df33eb2a89f1f1c8b2e..add44d00e2c6cf3cfe56e0de230fe58ff77a04f1 100644 (file)
@@ -35,6 +35,14 @@ static constexpr size_t SINGLE_INOUT_IDX = 0;
 enum FCParams { weight, bias };
 
 void FullyConnectedLayer::finalize(InitLayerContext &context) {
+  auto &weight_regularizer =
+    std::get<props::WeightRegularizer>(*layer_impl_props);
+  auto &weight_regularizer_constant =
+    std::get<props::WeightRegularizerConstant>(*layer_impl_props);
+  auto &weight_initializer =
+    std::get<props::WeightInitializer>(*layer_impl_props);
+  auto &bias_initializer = std::get<props::BiasInitializer>(*layer_impl_props);
+
   auto unit = std::get<props::Unit>(fc_props).get();
 
   if (context.getNumInputs() != 1) {
index e741b55f9512d649750b3c4d54ba3be49f2ad602..19793eb7482fb87670ed8a5143503f14aa5e694d 100644 (file)
@@ -67,6 +67,14 @@ GRULayer::GRULayer() :
 // - bias_h ( hidden bias )
 //  : [1, 1, 1, unit (hidden_size) x NUM_GATE] -> f, g, i, o
 void GRULayer::finalize(InitLayerContext &context) {
+  auto &weight_regularizer =
+    std::get<props::WeightRegularizer>(*layer_impl_props);
+  auto &weight_regularizer_constant =
+    std::get<props::WeightRegularizerConstant>(*layer_impl_props);
+  auto &weight_initializer =
+    std::get<props::WeightInitializer>(*layer_impl_props);
+  auto &bias_initializer = std::get<props::BiasInitializer>(*layer_impl_props);
+
   auto unit = std::get<props::Unit>(gru_props).get();
   auto &hidden_state_activation_type =
     std::get<props::HiddenStateActivation>(gru_props);
index ef9af6eeee676488793dfc4920a156794e98957b..b24de566afcfc366eb7713867b44c2238311331e 100644 (file)
 namespace nntrainer {
 
 LayerImpl::LayerImpl() :
-  layer_impl_props(std::make_unique<std::tuple<>>()),
-  weight_regularizer(WeightRegularizer::NONE),
-  weight_regularizer_constant(1.0f),
-  weight_initializer(Tensor::Initializer::XAVIER_UNIFORM),
-  bias_initializer(Tensor::Initializer::ZEROS) {}
+  layer_impl_props(
+    std::make_unique<
+      std::tuple<props::WeightRegularizer, props::WeightRegularizerConstant,
+                 props::WeightInitializer, props::BiasInitializer>>()) {}
 
 void LayerImpl::setProperty(const std::vector<std::string> &values) {
-  loadProperties(values, *layer_impl_props);
-
-  /// @todo: deprecate this in favor of loadProperties
-  for (unsigned int i = 0; i < values.size(); ++i) {
-    std::string key;
-    std::string value;
-    std::stringstream ss;
-
-    if (getKeyValue(values[i], key, value) != ML_ERROR_NONE) {
-      throw std::invalid_argument("Error parsing the property: " + values[i]);
-    }
-
-    if (value.empty()) {
-      ss << "value is empty: key: " << key << ", value: " << value;
-      throw std::invalid_argument(ss.str());
-    }
-
-    /// @note this calls derived setProperty if available
-    setProperty(key, value);
-  }
-}
-
-void LayerImpl::setProperty(const std::string &type_str,
-                            const std::string &value) {
-  using PropertyType = nntrainer::Layer::PropertyType;
-
-  int status = ML_ERROR_NONE;
-  nntrainer::Layer::PropertyType type =
-    static_cast<nntrainer::Layer::PropertyType>(parseLayerProperty(type_str));
-
-  switch (type) {
-  case PropertyType::weight_regularizer:
-    if (!value.empty()) {
-      weight_regularizer =
-        (WeightRegularizer)parseType(value, TOKEN_WEIGHT_REGULARIZER);
-      if (weight_regularizer == WeightRegularizer::UNKNOWN) {
-        throw std::invalid_argument("[Layer] Unknown Weight decay");
-      }
-    }
-    break;
-  case PropertyType::weight_regularizer_constant:
-    if (!value.empty()) {
-      status = setFloat(weight_regularizer_constant, value);
-      throw_status(status);
-    }
-    break;
-  case PropertyType::weight_initializer:
-    if (!value.empty()) {
-      weight_initializer =
-        (Tensor::Initializer)parseType(value, TOKEN_WEIGHT_INIT);
-    }
-    break;
-  case PropertyType::bias_initializer:
-    if (!value.empty()) {
-      bias_initializer =
-        (Tensor::Initializer)parseType(value, TOKEN_WEIGHT_INIT);
-    }
-    break;
-  default:
-    std::string msg =
-      "[Layer] Unknown Layer Property Key for value, key: " + type_str +
-      " value: " + value;
-    throw exception::not_supported(msg);
-  }
+  auto remain_props = loadProperties(values, *layer_impl_props);
+  NNTR_THROW_IF(!remain_props.empty(), std::invalid_argument)
+    << "[LayerImpl] Unknown Layer Properties count " +
+         std::to_string(values.size());
 }
 
 void LayerImpl::exportTo(Exporter &exporter,
-                         const ExportMethods &method) const {}
+                         const ExportMethods &method) const {
+  exporter.saveResult(*layer_impl_props, method, this);
+}
 
 } // namespace nntrainer
index 05e871166bfd24d42e89d64971e56104054a1b68..e44c89e3ecb4ecf29e344dd3aec301c11eeb9d24 100644 (file)
@@ -63,22 +63,10 @@ public:
                         const ExportMethods &method) const override;
 
 protected:
-  std::unique_ptr<std::tuple<>> layer_impl_props; /**< layer_impl_props */
-
-  WeightRegularizer weight_regularizer;   /**< weight regularizer */
-  float weight_regularizer_constant;      /**< weight regularizer constant */
-  Tensor::Initializer weight_initializer; /**< initializer for the weights */
-  Tensor::Initializer bias_initializer;   /**< initializer for the bias */
-
-  /**
-   * @brief setProperty by type and value separated
-   * @param[in] type property type to be passed
-   * @param[in] value value to be passed
-   * @exception exception::not_supported     when property type is not valid for
-   * the particular layer
-   * @exception std::invalid_argument invalid argument
-   */
-  virtual void setProperty(const std::string &type, const std::string &value);
+  std::unique_ptr<
+    std::tuple<props::WeightRegularizer, props::WeightRegularizerConstant,
+               props::WeightInitializer, props::BiasInitializer>>
+    layer_impl_props; /**< layer_impl_props */
 };
 
 } // namespace nntrainer
index b6a22c3338c19fb181f54b555ea0da319dcbe3c0..79c8d5d3a7e26a58e3b0717b9211aed68ff958b3 100644 (file)
@@ -52,6 +52,14 @@ LSTMLayer::LSTMLayer() :
 // - bias_h ( hidden bias )
 //  : [1, 1, 1, unit (hidden_size) x NUM_GATE] -> f, g, i, o
 void LSTMLayer::finalize(InitLayerContext &context) {
+  auto &weight_regularizer =
+    std::get<props::WeightRegularizer>(*layer_impl_props);
+  auto &weight_regularizer_constant =
+    std::get<props::WeightRegularizerConstant>(*layer_impl_props);
+  auto &weight_initializer =
+    std::get<props::WeightInitializer>(*layer_impl_props);
+  auto &bias_initializer = std::get<props::BiasInitializer>(*layer_impl_props);
+
   auto unit = std::get<props::Unit>(lstm_props).get();
   auto &hidden_state_activation_type =
     std::get<props::HiddenStateActivation>(lstm_props);
index 82f467c709037c79f2b5f06d4d7cfe9bf3361b24..272bad8886f28bc78f0e404e0015e7138bdd3433 100644 (file)
@@ -40,6 +40,14 @@ RNNLayer::RNNLayer() :
   epsilon(1e-3) {}
 
 void RNNLayer::finalize(InitLayerContext &context) {
+  auto &weight_regularizer =
+    std::get<props::WeightRegularizer>(*layer_impl_props);
+  auto &weight_regularizer_constant =
+    std::get<props::WeightRegularizerConstant>(*layer_impl_props);
+  auto &weight_initializer =
+    std::get<props::WeightInitializer>(*layer_impl_props);
+  auto &bias_initializer = std::get<props::BiasInitializer>(*layer_impl_props);
+
   auto unit = std::get<props::Unit>(rnn_props).get();
   auto &hidden_state_activation_type =
     std::get<props::HiddenStateActivation>(rnn_props);
index 1e56bb93646ed6ab1e81f6275d6cb545d796e888..6d7c46da5fac47f76db42137ebfa5bd05a3996f1 100644 (file)
@@ -337,7 +337,11 @@ TEST(BasicProperty, valid_p) {
 
     auto result = e.getResult<nntrainer::ExportMethods::METHOD_STRINGVECTOR>();
     auto pair1 = std::pair<std::string, std::string>("unit", "1");
-    EXPECT_EQ(result->at(1), pair1);
+    for (unsigned int i = 0; i < (*result).size(); ++i) {
+      if (result->at(i).first == "unit") {
+        EXPECT_EQ(result->at(i), pair1);
+      }
+    }
   }
 
   { /**< load from layer */