[ Layer ] implementation of DropOut Layer
authorjijoong.moon <jijoong.moon@samsung.com>
Mon, 5 Jul 2021 12:50:58 +0000 (21:50 +0900)
committerjijoong.moon <jijoong.moon@samsung.com>
Wed, 21 Jul 2021 09:35:46 +0000 (18:35 +0900)
In this commit,
  . implementation of DropOutLayer

Resolves:

**Self evaluation:**
1. Build test:  [X]Passed [ ]Failed [ ]Skipped
2. Run test:  [X]Passed [ ]Failed [ ]Skipped

Signed-off-by: jijoong.moon <jijoong.moon@samsung.com>
api/ccapi/include/layer.h
jni/Android.mk
nntrainer/app_context.cpp
nntrainer/layers/common_properties.cpp
nntrainer/layers/common_properties.h
nntrainer/layers/dropout.cpp [new file with mode: 0644]
nntrainer/layers/dropout.h [new file with mode: 0644]
nntrainer/layers/meson.build

index 794269d..8d7f19a 100644 (file)
@@ -56,6 +56,7 @@ enum LayerType {
   LAYER_GRU,                  /** GRU Layer type */
   LAYER_TIME_DIST,            /** Time Distributed Layer type */
   LAYER_PERMUTE,              /** Permute layer */
+  LAYER_DROPOUT,              /** DropOut Layer type */
   LAYER_UNKNOWN = ML_TRAIN_LAYER_TYPE_UNKNOWN /** Unknown */
 };
 
@@ -296,6 +297,14 @@ GRU(const std::vector<std::string> &properties = {}) {
 }
 
 /**
+ * @brief Helper function to create DropOut layer
+ */
+inline std::unique_ptr<Layer>
+DropOut(const std::vector<std::string> &properties = {}) {
+  return createLayer(LayerType::LAYER_DROPOUT, properties);
+}
+
+/**
  * @brief Helper function to create Time Distributed layer
  */
 inline std::unique_ptr<Layer>
index 4b6c065..4b7aed5 100644 (file)
@@ -156,6 +156,7 @@ NNTRAINER_SRCS := $(NNTRAINER_ROOT)/nntrainer/models/neuralnet.cpp \
                   $(NNTRAINER_ROOT)/nntrainer/layers/lstm.cpp \
                   $(NNTRAINER_ROOT)/nntrainer/layers/gru.cpp \
                   $(NNTRAINER_ROOT)/nntrainer/layers/time_dist.cpp \
+                  $(NNTRAINER_ROOT)/nntrainer/layers/dropout.cpp \
                   $(NNTRAINER_ROOT)/nntrainer/layers/permute_layer.cpp \
                   $(NNTRAINER_ROOT)/nntrainer/layers/acti_func.cpp \
                   $(NNTRAINER_ROOT)/nntrainer/layers/split_layer.cpp \
index 591aa93..27a3c3c 100644 (file)
@@ -33,6 +33,7 @@
 #include <bn_layer.h>
 #include <concat_layer.h>
 #include <conv2d_layer.h>
+#include <dropout.h>
 #include <embedding.h>
 #include <fc_layer.h>
 #include <flatten_layer.h>
@@ -248,6 +249,8 @@ static void add_default_object(AppContext &ac) {
                      LayerType::LAYER_LSTM);
   ac.registerFactory(nntrainer::createLayer<GRULayer>, GRULayer::type,
                      LayerType::LAYER_GRU);
+  ac.registerFactory(nntrainer::createLayer<DropOutLayer>, DropOutLayer::type,
+                     LayerType::LAYER_DROPOUT);
   ac.registerFactory(nntrainer::createLayer<TimeDistLayer>, TimeDistLayer::type,
                      LayerType::LAYER_TIME_DIST);
   ac.registerFactory(nntrainer::createLayer<SplitLayer>, SplitLayer::type,
index a8347de..54abb22 100644 (file)
@@ -28,6 +28,13 @@ bool Name::isValid(const std::string &v) const {
   return !v.empty() && std::regex_match(v, allowed);
 }
 
+bool DropOutSpec::isValid(const float &v) const {
+  if (v <= 0.0)
+    return false;
+  else
+    return true;
+}
+
 ConnectionSpec::ConnectionSpec(const std::vector<props::Name> &layer_ids_,
                                const std::string &op_type_) :
   op_type(op_type_),
index 59eae5b..f1f69aa 100644 (file)
@@ -228,6 +228,31 @@ public:
                                       const TensorDim &kernel);
 };
 
+/**
+ * @brief DropOutSpec property, this defines drop out specification of layer
+ *
+ */
+class DropOutSpec : public nntrainer::Property<float> {
+
+public:
+  /**
+   * @brief Construct a new DropOut object wit a default value 0.0
+   *
+   */
+  DropOutSpec(float value = 0.0) : nntrainer::Property<float>(value) {}
+  static constexpr const char *key = "dropout"; /**< unique key to access */
+  using prop_tag = float_prop_tag;              /**< property type */
+
+  /**
+   * @brief DropOutSpec validator
+   *
+   * @param v float to validate
+   * @retval true if it is greater or equal than 0.0
+   * @retval false if it is samller than 0.0
+   */
+  bool isValid(const float &v) const override;
+};
+
 } // namespace props
 } // namespace nntrainer
 
diff --git a/nntrainer/layers/dropout.cpp b/nntrainer/layers/dropout.cpp
new file mode 100644 (file)
index 0000000..2496a3e
--- /dev/null
@@ -0,0 +1,71 @@
+// SPDX-License-Identifier: Apache-2.0
+/**
+ * Copyright (C) 2020 Jijoong Moon <jijoong.moon@samsung.com>
+ *
+ * @file   dropout_layer.cpp
+ * @date   16 June 2020
+ * @see    https://github.com/nnstreamer/nntrainer
+ * @author Jijoong Moon <jijoong.moon@samsung.com>
+ * @bug           No known bugs except for NYI items
+ * @brief  This is Dropout Layer Class for Neural Network
+ *
+ */
+
+#include <dropout.h>
+#include <layer_internal.h>
+#include <nntrainer_error.h>
+#include <nntrainer_log.h>
+#include <parse_util.h>
+#include <util_func.h>
+
+namespace nntrainer {
+
+int DropOutLayer::initialize(Manager &manager) {
+  output_dim = input_dim;
+  // TODO : asking Tensor to manager
+  for (auto &t : input_dim) {
+    mask.push_back(std::make_shared<Tensor>(t, true));
+  }
+
+  return ML_ERROR_NONE;
+}
+
+void DropOutLayer::forwarding(bool training) {
+  auto &rate_ = std::get<props::DropOutSpec>(dropout_rate).get();
+  // Assume it is in-place calculation. It means input and output share mem
+  // buffer. So if the training is false, the output is the same with input. In
+  // orther words, there is nothing happen during inference.
+
+  if (training && rate_ > 0.0) {
+    for (unsigned int i = 0; i < input_dim.size(); ++i) {
+      Tensor &input = net_input[i]->getVariableRef();
+      Tensor &mask_ = *mask[i].get();
+
+      mask_ = input.dropout_mask(rate_);
+      input.multiply_i(mask_);
+    }
+  }
+}
+
+void DropOutLayer::calcDerivative() {
+  // Assume it is in-place calculation
+  auto &rate_ = std::get<props::DropOutSpec>(dropout_rate).get();
+  if (rate_ > 0.0) {
+    for (unsigned int i = 0; i < input_dim.size(); ++i) {
+      Tensor deriv = net_hidden[i]->getGradientRef();
+      deriv.multiply_i(*mask[i].get());
+    }
+  }
+}
+
+int DropOutLayer::setProperty(std::vector<std::string> values) {
+  try {
+    values = loadProperties(values, dropout_rate);
+  } catch (std::invalid_argument &e) {
+    ml_loge("parsing property failed, reason: %s", e.what());
+    return ML_ERROR_INVALID_PARAMETER;
+  }
+
+  return LayerV1::setProperty(values);
+}
+} /* namespace nntrainer */
diff --git a/nntrainer/layers/dropout.h b/nntrainer/layers/dropout.h
new file mode 100644 (file)
index 0000000..27aacce
--- /dev/null
@@ -0,0 +1,119 @@
+// SPDX-License-Identifier: Apache-2.0
+/**
+ * Copyright (C) 2020 Jijoong Moon <jijoong.moon@samsung.com>
+ *
+ * @file   dropout.h
+ * @date   05 July 2021
+ * @see    https://github.com/nnstreamer/nntrainer
+ * @author Jijoong Moon <jijoong.moon@samsung.com>
+ * @bug    No known bugs except for NYI items
+ * @brief  This is DropOut Layer Class for Neural Network
+ *
+ */
+
+#ifndef __DROPOUT_H__
+#define __DROPOUT_H__
+#ifdef __cplusplus
+
+#include <common_properties.h>
+#include <layer_internal.h>
+#include <node_exporter.h>
+#include <tensor.h>
+
+namespace nntrainer {
+
+/**
+ * @class   DropOut Layer
+ * @brief   DropOut Layer
+ */
+class DropOutLayer : public LayerV1 {
+public:
+  /**
+   * @brief     Constructor of DropOut Layer
+   */
+  template <typename... Args>
+  DropOutLayer(float dropout = 0.0, Args... args) :
+    LayerV1(args...),
+    dropout_rate(props::DropOutSpec(dropout)) {
+    setTrainable(false);
+  }
+
+  /**
+   * @brief     Destructor of DropOut Layer
+   */
+  ~DropOutLayer() = default;
+
+  /**
+   *  @brief  Move constructor of DropOutLayer.
+   *  @param[in] DropOutLayer &&
+   */
+  DropOutLayer(DropOutLayer &&rhs) noexcept = default;
+
+  /**
+   * @brief  Move assignment operator.
+   * @parma[in] rhs DropOutLayer to be moved.
+   */
+  DropOutLayer &operator=(DropOutLayer &&rhs) = default;
+
+  /**
+   * @brief     initialize layer
+   * @retval #ML_ERROR_NONE Successful.
+   * @retval #ML_ERROR_INVALID_PARAMETER invalid parameter.
+   */
+  int initialize(Manager &manager) override;
+
+  /**
+   * @brief     Read Weight & Bias Data from file
+   * @param[in] file input stream file
+   */
+  void read(std::ifstream &file) override{};
+
+  /**
+   * @brief     Save Weight & Bias Data to file
+   * @param[in] file output stream file
+   */
+  void save(std::ofstream &file) override{};
+
+  /**
+   * @copydoc Layer::forwarding(bool training)
+   */
+  void forwarding(bool training = true) override;
+
+  /**
+   * @copydoc Layer::calcDerivative()
+   */
+  void calcDerivative() override;
+
+  /**
+   * @copydoc Layer::supportInPlace()
+   */
+  bool supportInPlace() const override { return true; }
+
+  /**
+   * @copydoc Layer::setProperty(std::vector<std::string> values)
+   */
+  int setProperty(std::vector<std::string> values) override;
+
+  /**
+   * @copydoc Layer::export_to(Exporter &exporter, ExportMethods method)
+   */
+  void export_to(
+    Exporter &exporter,
+    ExportMethods method = ExportMethods::METHOD_STRINGVECTOR) const override{};
+
+  /**
+   * @copydoc Layer::getType()
+   */
+  const std::string getType() const override { return DropOutLayer::type; };
+
+  inline static const std::string type = "dropout";
+
+private:
+  std::tuple<props::DropOutSpec> dropout_rate;
+  std::vector<std::shared_ptr<Tensor>> mask;
+};
+
+} // namespace nntrainer
+
+#endif /* __cplusplus */
+#endif /* __DROPOUT_H__ */
index 5377bc7..00acd50 100644 (file)
@@ -25,7 +25,8 @@ layer_sources = [
   'split_layer.cpp',
   'permute_layer.cpp',
   'layer_impl.cpp',
-  'gru.cpp'
+  'gru.cpp',
+  'dropout.cpp'
 ]
 
 layer_headers = [