From 899cc267ac3b635ff158128d34c429a3a3c33cef Mon Sep 17 00:00:00 2001 From: Parichay Kapoor Date: Thu, 25 Nov 2021 15:31:08 +0900 Subject: [PATCH] [layer] Add support for reduce mean layer This patch adds support for reduce_mean layer with forwarding and backwarding implementation. Basic unittests are added. Golden unittests will come in the next patch. Signed-off-by: Parichay Kapoor --- nntrainer/layers/meson.build | 1 + nntrainer/layers/reduce_mean_layer.cpp | 84 +++++++++++++++++++ nntrainer/layers/reduce_mean_layer.h | 96 ++++++++++++++++++++++ .../layers/unittest_layers_reduce_mean.cpp | 29 +++++++ test/unittest/layers/unittest_layers_reshape.cpp | 2 +- 5 files changed, 211 insertions(+), 1 deletion(-) create mode 100644 nntrainer/layers/reduce_mean_layer.cpp create mode 100644 nntrainer/layers/reduce_mean_layer.h create mode 100644 test/unittest/layers/unittest_layers_reduce_mean.cpp diff --git a/nntrainer/layers/meson.build b/nntrainer/layers/meson.build index 64653d0..3d3f2c8 100644 --- a/nntrainer/layers/meson.build +++ b/nntrainer/layers/meson.build @@ -37,6 +37,7 @@ layer_sources = [ 'centroid_knn.cpp', 'layer_context.cpp', 'reshape_layer.cpp', + 'reduce_mean_layer.cpp' ] layer_headers = [ diff --git a/nntrainer/layers/reduce_mean_layer.cpp b/nntrainer/layers/reduce_mean_layer.cpp new file mode 100644 index 0000000..c73f6fb --- /dev/null +++ b/nntrainer/layers/reduce_mean_layer.cpp @@ -0,0 +1,84 @@ +// SPDX-License-Identifier: Apache-2.0 +/** + * Copyright (C) 2020 Jijoong Moon + * + * @file reduce_mean_layer.cpp + * @date 25 Nov 2021 + * @see https://github.com/nnstreamer/nntrainer + * @author Parichay Kapoor + * @bug No known bugs except for NYI items + * @brief This is Reduce Mean Layer Class for Neural Network + */ + +#include +#include +#include +#include + +namespace nntrainer { + +static constexpr size_t SINGLE_INOUT_IDX = 0; + +void ReduceMeanLayer::finalize(InitLayerContext &context) { + if (context.getNumInputs() != 1) { + throw std::invalid_argument("Reduce mean only supports 1 input for now"); + } + + const TensorDim &in_dim = context.getInputDimensions()[0]; + TensorDim out_dim = in_dim; + + /** if reduce axis is not provided, reduction is performed across all the + * dimensions */ + auto &reduce_axis = std::get(reduce_mean_props); + if (reduce_axis.empty()) { + out_dim = TensorDim({1, 1, 1, 1}); + } + + out_dim.setTensorDim(reduce_axis.get(), 1); + context.setOutputDimensions({out_dim}); +} + +void ReduceMeanLayer::forwarding(RunLayerContext &context, bool training) { + auto &reduce_axis = std::get(reduce_mean_props); + if (reduce_axis.empty()) { + context.getInput(SINGLE_INOUT_IDX) + .average(context.getOutput(SINGLE_INOUT_IDX)); + } else { + context.getInput(SINGLE_INOUT_IDX) + .average(reduce_axis, context.getOutput(SINGLE_INOUT_IDX)); + } +} + +void ReduceMeanLayer::calcDerivative(RunLayerContext &context) { + auto &deriv = context.getOutgoingDerivative(SINGLE_INOUT_IDX); + auto &ret_deriv = context.getIncomingDerivative(SINGLE_INOUT_IDX); + + unsigned int div = ret_deriv.size() / deriv.size(); + auto &reduce_axis = std::get(reduce_mean_props); + + if (reduce_axis.empty()) { + ret_deriv.setValue(deriv.getValue(0)); + } else { + /** TODO: optimize this by supporting broadcast in copy */ + ret_deriv.setZero(); + ret_deriv.add_i(deriv); + } + + ret_deriv.divide_i(div); +} + +void ReduceMeanLayer::setProperty(const std::vector &values) { + auto remain_props = loadProperties(values, reduce_mean_props); + if (!remain_props.empty()) { + std::string msg = "[ReduceMeanLayer] Unknown Layer Properties count " + + std::to_string(remain_props.size()); + throw exception::not_supported(msg); + } +} + +void ReduceMeanLayer::exportTo(Exporter &exporter, + const ExportMethods &method) const { + exporter.saveResult(reduce_mean_props, method, this); +} + +} /* namespace nntrainer */ diff --git a/nntrainer/layers/reduce_mean_layer.h b/nntrainer/layers/reduce_mean_layer.h new file mode 100644 index 0000000..75b5171 --- /dev/null +++ b/nntrainer/layers/reduce_mean_layer.h @@ -0,0 +1,96 @@ +// SPDX-License-Identifier: Apache-2.0 +/** + * Copyright (C) 2021 Parichay Kapoor + * + * @file reduce_mean_layer.h + * @date 25 Nov 2021 + * @see https://github.com/nnstreamer/nntrainer + * @author Parichay Kapoor + * @bug No known bugs except for NYI items + * @brief This is Reduce Mean Layer Class for Neural Network + * + */ + +#ifndef __REDUCE_MEAN_LAYER_H__ +#define __REDUCE_MEAN_LAYER_H__ +#ifdef __cplusplus + +#include + +namespace nntrainer { + +/** + * @class Reduce Mean Layer + * @brief Reduce Mean Layer + */ +class ReduceMeanLayer : public Layer { +public: + /** + * @brief Constructor of Reduce Mean Layer + */ + ReduceMeanLayer() : Layer() {} + + /** + * @brief Destructor of Reduce Mean Layer + */ + ~ReduceMeanLayer(){}; + + /** + * @brief Move constructor of ReduceMeanLayer. + * @param[in] ReduceMeanLayer && + */ + ReduceMeanLayer(ReduceMeanLayer &&rhs) noexcept = default; + + /** + * @brief Move assignment operator. + * @parma[in] rhs ReduceMeanLayer to be moved. + */ + ReduceMeanLayer &operator=(ReduceMeanLayer &&rhs) = default; + + /** + * @copydoc Layer::finalize(InitLayerContext &context) + */ + void finalize(InitLayerContext &context) override; + + /** + * @copydoc Layer::forwarding(RunLayerContext &context, bool training) + */ + void forwarding(RunLayerContext &context, bool training) override; + + /** + * @copydoc Layer::calcDerivative(RunLayerContext &context) + */ + void calcDerivative(RunLayerContext &context) override; + + /** + * @copydoc bool supportBackwarding() const + */ + bool supportBackwarding() const override { return true; }; + + /** + * @copydoc Layer::exportTo(Exporter &exporter, ExportMethods method) + */ + void exportTo(Exporter &exporter, const ExportMethods &method) const override; + + /** + * @copydoc Layer::setProperty(const std::vector &values) + */ + void setProperty(const std::vector &values) override; + + /** + * @copydoc Layer::getType() + */ + const std::string getType() const override { return ReduceMeanLayer::type; }; + + inline static const std::string type = "reduce_mean"; + +private: + /** TODO: support scalar multiplier to simulate reduce_sum */ + std::tuple + reduce_mean_props; /**< reduce_mean properties : axis to reduce along */ +}; + +} // namespace nntrainer + +#endif /* __cplusplus */ +#endif /* __REDUCE_MEAN_LAYER_H__ */ diff --git a/test/unittest/layers/unittest_layers_reduce_mean.cpp b/test/unittest/layers/unittest_layers_reduce_mean.cpp new file mode 100644 index 0000000..0022a4b --- /dev/null +++ b/test/unittest/layers/unittest_layers_reduce_mean.cpp @@ -0,0 +1,29 @@ +// SPDX-License-Identifier: Apache-2.0 +/** + * Copyright (C) 2021 Parichay Kapoor + * + * @file unittest_layers_reduce_mean.cpp + * @date 25 November 2021 + * @brief Reduce Mean Layer Test + * @see https://github.com/nnstreamer/nntrainer + * @author Parichay Kapoor + * @bug No known bugs except for NYI items + */ +#include + +#include + +#include +#include + +auto semantic_reduce_mean_all = + LayerSemanticsParamType(nntrainer::createLayer, + nntrainer::ReduceMeanLayer::type, {}, 0, false, 1); + +auto semantic_reduce_mean = LayerSemanticsParamType( + nntrainer::createLayer, + nntrainer::ReduceMeanLayer::type, {"axis=1"}, 0, false, 1); + +INSTANTIATE_TEST_CASE_P(ReduceMean, LayerSemantics, + ::testing::Values(semantic_reduce_mean, + semantic_reduce_mean_all)); diff --git a/test/unittest/layers/unittest_layers_reshape.cpp b/test/unittest/layers/unittest_layers_reshape.cpp index d59763b..b943f9f 100644 --- a/test/unittest/layers/unittest_layers_reshape.cpp +++ b/test/unittest/layers/unittest_layers_reshape.cpp @@ -2,7 +2,7 @@ /** * Copyright (C) 2021 Parichay Kapoor * - * @file unittest_layers_flatten.cpp + * @file unittest_layers_reshape.cpp * @date 19 October 2021 * @brief Reshape Layer Test * @see https://github.com/nnstreamer/nntrainer -- 2.7.4