[layer] Add support for reduce mean layer
authorParichay Kapoor <pk.kapoor@samsung.com>
Thu, 25 Nov 2021 06:31:08 +0000 (15:31 +0900)
committerJijoong Moon <jijoong.moon@samsung.com>
Thu, 2 Dec 2021 06:54:18 +0000 (15:54 +0900)
This patch adds support for reduce_mean layer with forwarding and
backwarding implementation.
Basic unittests are added.

Golden unittests will come in the next patch.

Signed-off-by: Parichay Kapoor <pk.kapoor@samsung.com>
nntrainer/layers/meson.build
nntrainer/layers/reduce_mean_layer.cpp [new file with mode: 0644]
nntrainer/layers/reduce_mean_layer.h [new file with mode: 0644]
test/unittest/layers/unittest_layers_reduce_mean.cpp [new file with mode: 0644]
test/unittest/layers/unittest_layers_reshape.cpp

index 64653d0..3d3f2c8 100644 (file)
@@ -37,6 +37,7 @@ layer_sources = [
   'centroid_knn.cpp',
   'layer_context.cpp',
   'reshape_layer.cpp',
+  'reduce_mean_layer.cpp'
 ]
 
 layer_headers = [
diff --git a/nntrainer/layers/reduce_mean_layer.cpp b/nntrainer/layers/reduce_mean_layer.cpp
new file mode 100644 (file)
index 0000000..c73f6fb
--- /dev/null
@@ -0,0 +1,84 @@
+// SPDX-License-Identifier: Apache-2.0
+/**
+ * Copyright (C) 2020 Jijoong Moon <jijoong.moon@samsung.com>
+ *
+ * @file   reduce_mean_layer.cpp
+ * @date   25 Nov 2021
+ * @see    https://github.com/nnstreamer/nntrainer
+ * @author Parichay Kapoor <pk.kapoor@samsung.com>
+ * @bug    No known bugs except for NYI items
+ * @brief  This is Reduce Mean Layer Class for Neural Network
+ */
+
+#include <nntrainer_error.h>
+#include <nntrainer_log.h>
+#include <node_exporter.h>
+#include <reduce_mean_layer.h>
+
+namespace nntrainer {
+
+static constexpr size_t SINGLE_INOUT_IDX = 0;
+
+void ReduceMeanLayer::finalize(InitLayerContext &context) {
+  if (context.getNumInputs() != 1) {
+    throw std::invalid_argument("Reduce mean only supports 1 input for now");
+  }
+
+  const TensorDim &in_dim = context.getInputDimensions()[0];
+  TensorDim out_dim = in_dim;
+
+  /** if reduce axis is not provided, reduction is performed across all the
+   * dimensions */
+  auto &reduce_axis = std::get<props::Axis>(reduce_mean_props);
+  if (reduce_axis.empty()) {
+    out_dim = TensorDim({1, 1, 1, 1});
+  }
+
+  out_dim.setTensorDim(reduce_axis.get(), 1);
+  context.setOutputDimensions({out_dim});
+}
+
+void ReduceMeanLayer::forwarding(RunLayerContext &context, bool training) {
+  auto &reduce_axis = std::get<props::Axis>(reduce_mean_props);
+  if (reduce_axis.empty()) {
+    context.getInput(SINGLE_INOUT_IDX)
+      .average(context.getOutput(SINGLE_INOUT_IDX));
+  } else {
+    context.getInput(SINGLE_INOUT_IDX)
+      .average(reduce_axis, context.getOutput(SINGLE_INOUT_IDX));
+  }
+}
+
+void ReduceMeanLayer::calcDerivative(RunLayerContext &context) {
+  auto &deriv = context.getOutgoingDerivative(SINGLE_INOUT_IDX);
+  auto &ret_deriv = context.getIncomingDerivative(SINGLE_INOUT_IDX);
+
+  unsigned int div = ret_deriv.size() / deriv.size();
+  auto &reduce_axis = std::get<props::Axis>(reduce_mean_props);
+
+  if (reduce_axis.empty()) {
+    ret_deriv.setValue(deriv.getValue(0));
+  } else {
+    /** TODO: optimize this by supporting broadcast in copy */
+    ret_deriv.setZero();
+    ret_deriv.add_i(deriv);
+  }
+
+  ret_deriv.divide_i(div);
+}
+
+void ReduceMeanLayer::setProperty(const std::vector<std::string> &values) {
+  auto remain_props = loadProperties(values, reduce_mean_props);
+  if (!remain_props.empty()) {
+    std::string msg = "[ReduceMeanLayer] Unknown Layer Properties count " +
+                      std::to_string(remain_props.size());
+    throw exception::not_supported(msg);
+  }
+}
+
+void ReduceMeanLayer::exportTo(Exporter &exporter,
+                               const ExportMethods &method) const {
+  exporter.saveResult(reduce_mean_props, method, this);
+}
+
+} /* namespace nntrainer */
diff --git a/nntrainer/layers/reduce_mean_layer.h b/nntrainer/layers/reduce_mean_layer.h
new file mode 100644 (file)
index 0000000..75b5171
--- /dev/null
@@ -0,0 +1,96 @@
+// SPDX-License-Identifier: Apache-2.0
+/**
+ * Copyright (C) 2021 Parichay Kapoor <pk.kapoor@samsung.com>
+ *
+ * @file   reduce_mean_layer.h
+ * @date   25 Nov 2021
+ * @see    https://github.com/nnstreamer/nntrainer
+ * @author Parichay Kapoor <pk.kapoor@samsung.com>
+ * @bug    No known bugs except for NYI items
+ * @brief  This is Reduce Mean Layer Class for Neural Network
+ *
+ */
+
+#ifndef __REDUCE_MEAN_LAYER_H__
+#define __REDUCE_MEAN_LAYER_H__
+#ifdef __cplusplus
+
+#include <layer_devel.h>
+
+namespace nntrainer {
+
+/**
+ * @class   Reduce Mean Layer
+ * @brief   Reduce Mean Layer
+ */
+class ReduceMeanLayer : public Layer {
+public:
+  /**
+   * @brief     Constructor of Reduce Mean Layer
+   */
+  ReduceMeanLayer() : Layer() {}
+
+  /**
+   * @brief     Destructor of Reduce Mean Layer
+   */
+  ~ReduceMeanLayer(){};
+
+  /**
+   *  @brief  Move constructor of ReduceMeanLayer.
+   *  @param[in] ReduceMeanLayer &&
+   */
+  ReduceMeanLayer(ReduceMeanLayer &&rhs) noexcept = default;
+
+  /**
+   * @brief  Move assignment operator.
+   * @parma[in] rhs ReduceMeanLayer to be moved.
+   */
+  ReduceMeanLayer &operator=(ReduceMeanLayer &&rhs) = default;
+
+  /**
+   * @copydoc Layer::finalize(InitLayerContext &context)
+   */
+  void finalize(InitLayerContext &context) override;
+
+  /**
+   * @copydoc Layer::forwarding(RunLayerContext &context, bool training)
+   */
+  void forwarding(RunLayerContext &context, bool training) override;
+
+  /**
+   * @copydoc Layer::calcDerivative(RunLayerContext &context)
+   */
+  void calcDerivative(RunLayerContext &context) override;
+
+  /**
+   * @copydoc bool supportBackwarding() const
+   */
+  bool supportBackwarding() const override { return true; };
+
+  /**
+   * @copydoc Layer::exportTo(Exporter &exporter, ExportMethods method)
+   */
+  void exportTo(Exporter &exporter, const ExportMethods &method) const override;
+
+  /**
+   * @copydoc Layer::setProperty(const std::vector<std::string> &values)
+   */
+  void setProperty(const std::vector<std::string> &values) override;
+
+  /**
+   * @copydoc Layer::getType()
+   */
+  const std::string getType() const override { return ReduceMeanLayer::type; };
+
+  inline static const std::string type = "reduce_mean";
+
+private:
+  /** TODO: support scalar multiplier to simulate reduce_sum */
+  std::tuple<props::Axis>
+    reduce_mean_props; /**< reduce_mean properties : axis to reduce along */
+};
+
+} // namespace nntrainer
+
+#endif /* __cplusplus */
+#endif /* __REDUCE_MEAN_LAYER_H__ */
diff --git a/test/unittest/layers/unittest_layers_reduce_mean.cpp b/test/unittest/layers/unittest_layers_reduce_mean.cpp
new file mode 100644 (file)
index 0000000..0022a4b
--- /dev/null
@@ -0,0 +1,29 @@
+// SPDX-License-Identifier: Apache-2.0
+/**
+ * Copyright (C) 2021 Parichay Kapoor <pk.kapoor@samsung.com>
+ *
+ * @file unittest_layers_reduce_mean.cpp
+ * @date 25 November 2021
+ * @brief Reduce Mean Layer Test
+ * @see        https://github.com/nnstreamer/nntrainer
+ * @author Parichay Kapoor <pk.kapoor@samsung.com>
+ * @bug No known bugs except for NYI items
+ */
+#include <tuple>
+
+#include <gtest/gtest.h>
+
+#include <layers_common_tests.h>
+#include <reduce_mean_layer.h>
+
+auto semantic_reduce_mean_all =
+  LayerSemanticsParamType(nntrainer::createLayer<nntrainer::ReduceMeanLayer>,
+                          nntrainer::ReduceMeanLayer::type, {}, 0, false, 1);
+
+auto semantic_reduce_mean = LayerSemanticsParamType(
+  nntrainer::createLayer<nntrainer::ReduceMeanLayer>,
+  nntrainer::ReduceMeanLayer::type, {"axis=1"}, 0, false, 1);
+
+INSTANTIATE_TEST_CASE_P(ReduceMean, LayerSemantics,
+                        ::testing::Values(semantic_reduce_mean,
+                                          semantic_reduce_mean_all));
index d59763b..b943f9f 100644 (file)
@@ -2,7 +2,7 @@
 /**
  * Copyright (C) 2021 Parichay Kapoor <pk.kapoor@samsung.com>
  *
- * @file unittest_layers_flatten.cpp
+ * @file unittest_layers_reshape.cpp
  * @date 19 October 2021
  * @brief Reshape Layer Test
  * @see        https://github.com/nnstreamer/nntrainer