This patch provides support for reshape layer and basic unittests.
The flatten layer is also updated to use reshape layer internally.
Signed-off-by: Parichay Kapoor <pk.kapoor@samsung.com>
LAYER_BACKBONE_TFLITE, /**< Backbone using TFLite */
LAYER_ATTENTION, /**< Attention Layer type */
LAYER_CONV1D, /**< Convolution 1D Layer type */
+ LAYER_RESHAPE, /**< Reshape Layer type */
LAYER_LOSS_MSE = 500, /**< Mean Squared Error Loss Layer type */
LAYER_LOSS_CROSS_ENTROPY_SIGMOID, /**< Cross Entropy with Sigmoid Loss Layer
type */
}
/**
+ * @brief Helper function to create reshape layer
+ */
+inline std::unique_ptr<Layer>
+Reshape(const std::vector<std::string> &properties = {}) {
+ return createLayer(LayerType::LAYER_RESHAPE, properties);
+}
+
+/**
* @brief Helper function to create addition layer
*/
inline std::unique_ptr<Layer>
$(NNTRAINER_ROOT)/nntrainer/layers/pooling2d_layer.cpp \
$(NNTRAINER_ROOT)/nntrainer/layers/activation_layer.cpp \
$(NNTRAINER_ROOT)/nntrainer/layers/flatten_layer.cpp \
+ $(NNTRAINER_ROOT)/nntrainer/layers/reshape_layer.cpp \
$(NNTRAINER_ROOT)/nntrainer/layers/addition_layer.cpp \
$(NNTRAINER_ROOT)/nntrainer/layers/attention_layer.cpp \
$(NNTRAINER_ROOT)/nntrainer/layers/concat_layer.cpp \
Pooling2DLayer::type, LayerType::LAYER_POOLING2D);
ac.registerFactory(nntrainer::createLayer<FlattenLayer>, FlattenLayer::type,
LayerType::LAYER_FLATTEN);
+ ac.registerFactory(nntrainer::createLayer<ReshapeLayer>, ReshapeLayer::type,
+ LayerType::LAYER_RESHAPE);
ac.registerFactory(nntrainer::createLayer<ActivationLayer>,
ActivationLayer::type, LayerType::LAYER_ACTIVATION);
ac.registerFactory(nntrainer::createLayer<AdditionLayer>, AdditionLayer::type,
#include <common_properties.h>
#include <nntrainer_error.h>
+#include <nntrainer_log.h>
#include <tensor_dim.h>
#include <regex>
}
FlipDirection::FlipDirection(FlipDirectionInfo::Enum value) { set(value); }
+
+void GenericShape::set(const TensorDim &value) {
+ TensorDim ret = value;
+ ret.setDynDimFlag(0b1000);
+ if (ret.batch() != 1) {
+ ml_logw("Batch size set with dimension %u is ignored."
+ "Use batchsize property for the model to update batchsize.",
+ ret.batch());
+ ret.batch(1);
+ }
+ Property<TensorDim>::set(ret);
+}
+
} // namespace props
static const std::vector<std::pair<char, std::string>>
using prop_tag = uint_prop_tag; /**< property type */
};
+/**
+ * @brief generic shape property which saves a single tensor shape
+ * (practically, std::array<GenericShape> is used)
+ *
+ * @note batch dimension is ignored with this dimension. Setting of batch must
+ * be done with the model.
+ *
+ */
+class GenericShape : public Property<TensorDim> {
+
+public:
+ static constexpr const char *key =
+ "generic_shape"; /**< unique key to access */
+ using prop_tag = dimension_prop_tag; /**< property type */
+
+ /**
+ * @brief Input shape setter
+ *
+ * @param value value to set
+ */
+ void set(const TensorDim &value) override;
+};
+
+/**
+ * @brief target shape property which saves a single tensor shape
+ * (practically, std::array<TargetShape> is used)
+ *
+ */
+class TargetShape : public GenericShape {
+
+public:
+ static constexpr const char *key =
+ "target_shape"; /**< unique key to access */
+ using prop_tag = dimension_prop_tag; /**< property type */
+};
+
} // namespace props
} // namespace nntrainer
static constexpr size_t SINGLE_INOUT_IDX = 0;
void FlattenLayer::finalize(InitLayerContext &context) {
- if (context.getNumInputs() != 1) {
- throw std::invalid_argument("input_shape keyword is only for one input");
- }
+ ReshapeLayer::setProperty({"target_shape=-1"});
+ /** @note the output dimension is in invalid state till finalize of
+ * reshape_layer is finished */
+ ReshapeLayer::finalize(context);
- TensorDim out_dim;
const TensorDim &in_dim = context.getInputDimensions()[0];
if (in_dim.channel() == 1 && in_dim.height() == 1) {
ml_logw("Warning: the flatten layer is redundant");
}
-
- out_dim.batch(in_dim.batch());
- out_dim.channel(1);
- out_dim.height(1);
- out_dim.width(in_dim.getFeatureLen());
-
- context.setOutputDimensions({out_dim});
-}
-
-void FlattenLayer::forwarding(RunLayerContext &context, bool training) {
- if (!context.executeInPlace()) {
- context.getOutput(SINGLE_INOUT_IDX)
- .copyData(context.getInput(SINGLE_INOUT_IDX));
- }
-}
-
-void FlattenLayer::calcDerivative(RunLayerContext &context) {
- if (!context.executeInPlace()) {
- context.getOutgoingDerivative(SINGLE_INOUT_IDX)
- .copyData(context.getIncomingDerivative(SINGLE_INOUT_IDX));
- }
}
void FlattenLayer::setProperty(const std::vector<std::string> &values) {
#define __FLATTEN_LAYER_H__
#ifdef __cplusplus
-#include <layer_devel.h>
+#include <reshape_layer.h>
namespace nntrainer {
* @class Flatten Layer
* @brief Flatten Layer
*/
-class FlattenLayer : public Layer {
+class FlattenLayer : public ReshapeLayer {
public:
/**
* @brief Constructor of Flatten Layer
*/
- FlattenLayer() : Layer() {}
+ FlattenLayer() : ReshapeLayer() {}
/**
* @brief Destructor of Flatten Layer
void finalize(InitLayerContext &context) override;
/**
- * @copydoc Layer::forwarding(RunLayerContext &context, bool training)
- */
- void forwarding(RunLayerContext &context, bool training) override;
-
- /**
- * @copydoc Layer::calcDerivative(RunLayerContext &context)
- */
- void calcDerivative(RunLayerContext &context) override;
-
- /**
* @copydoc Layer::setProperty(const std::vector<std::string> &values)
*/
void setProperty(const std::vector<std::string> &values) override;
/**
- * @copydoc bool supportBackwarding() const
- */
- bool supportBackwarding() const override { return true; };
-
- /**
- * @copydoc Layer::supportInPlace()
- */
- bool supportInPlace() const override { return true; }
-
- /**
- * @copydoc Layer::exportTo(Exporter &exporter, ExportMethods method)
- */
- void exportTo(Exporter &exporter,
- const ExportMethods &method) const override {}
-
- /**
* @copydoc Layer::getType()
*/
const std::string getType() const override { return FlattenLayer::type; };
* (practically, std::array<InputShape> is used)
*
*/
-class InputShape : public Property<TensorDim> {
+class InputShape : public GenericShape {
public:
static constexpr const char *key = "input_shape"; /**< unique key to access */
using prop_tag = dimension_prop_tag; /**< property type */
-
- /**
- * @brief Input shape setter
- *
- * @param value value to set
- */
- void set(const TensorDim &value) override {
- TensorDim ret = value;
- ret.setDynDimFlag(0b1000);
- if (ret.batch() != 1) {
- ml_logw("Batch size set with input dimension %u is ignored."
- "Use batchsize property for the model to update batchsize.",
- ret.batch());
- ret.batch(1);
- }
- Property<TensorDim>::set(ret);
- }
};
/**
'gru.cpp',
'dropout.cpp',
'centroid_knn.cpp',
- 'layer_context.cpp'
+ 'layer_context.cpp',
+ 'reshape_layer.cpp'
]
layer_headers = [
--- /dev/null
+// SPDX-License-Identifier: Apache-2.0
+/**
+ * Copyright (C) 2020 Jijoong Moon <jijoong.moon@samsung.com>
+ *
+ * @file flatten_layer.cpp
+ * @date 16 June 2020
+ * @see https://github.com/nnstreamer/nntrainer
+ * @author Jijoong Moon <jijoong.moon@samsung.com>
+ * @bug No known bugs except for NYI items
+ * @brief This is Flatten Layer Class for Neural Network
+ *
+ * @todo Update flatten to work in-place properly.
+ */
+
+#include <nntrainer_error.h>
+#include <nntrainer_log.h>
+#include <node_exporter.h>
+#include <reshape_layer.h>
+
+namespace nntrainer {
+
+static constexpr size_t SINGLE_INOUT_IDX = 0;
+
+void ReshapeLayer::finalize(InitLayerContext &context) {
+ if (context.getNumInputs() != 1) {
+ throw std::invalid_argument("Reshape only supports 1 input for now");
+ }
+
+ const TensorDim &in_dim = context.getInputDimensions()[0];
+
+ auto &target_shape = std::get<props::TargetShape>(reshape_props);
+ if (target_shape.empty())
+ throw std::invalid_argument(
+ "Reshape layer must be provided with target shape");
+ TensorDim out_dim = target_shape.get();
+
+ /** flatten sets the dimension to 1 to indicate to flatten the rest of the
+ * dimensions */
+ if ((int)out_dim.getDataLen() == -1) {
+ out_dim.height(1);
+ out_dim.channel(1);
+ out_dim.width(in_dim.getFeatureLen());
+ }
+
+ out_dim.batch(in_dim.batch());
+
+ context.setOutputDimensions({out_dim});
+}
+
+void ReshapeLayer::forwarding(RunLayerContext &context, bool training) {
+ if (!context.executeInPlace()) {
+ context.getOutput(SINGLE_INOUT_IDX)
+ .copyData(context.getInput(SINGLE_INOUT_IDX));
+ }
+}
+
+void ReshapeLayer::calcDerivative(RunLayerContext &context) {
+ if (!context.executeInPlace()) {
+ context.getOutgoingDerivative(SINGLE_INOUT_IDX)
+ .copyData(context.getIncomingDerivative(SINGLE_INOUT_IDX));
+ }
+}
+
+void ReshapeLayer::setProperty(const std::vector<std::string> &values) {
+ auto remain_props = loadProperties(values, reshape_props);
+ if (!remain_props.empty()) {
+ std::string msg = "[ReshapeLayer] Unknown Layer Properties count " +
+ std::to_string(remain_props.size());
+ throw exception::not_supported(msg);
+ }
+}
+} /* namespace nntrainer */
--- /dev/null
+// SPDX-License-Identifier: Apache-2.0
+/**
+ * Copyright (C) 2020 Jijoong Moon <jijoong.moon@samsung.com>
+ *
+ * @file reshape_layer.h
+ * @date 16 June 2020
+ * @see https://github.com/nnstreamer/nntrainer
+ * @author Parichay Kapoor <pk.kapoor@samsung.com>
+ * @bug No known bugs except for NYI items
+ * @brief This is Reshape Layer Class for Neural Network
+ *
+ */
+
+#ifndef __RESHAPE_LAYER_H__
+#define __RESHAPE_LAYER_H__
+#ifdef __cplusplus
+
+#include <common_properties.h>
+#include <layer_devel.h>
+
+namespace nntrainer {
+
+/**
+ * @class Reshape Layer
+ * @brief Reshape Layer
+ */
+class ReshapeLayer : public Layer {
+public:
+ /**
+ * @brief Constructor of Reshape Layer
+ */
+ ReshapeLayer() : Layer() {}
+
+ /**
+ * @brief Destructor of Reshape Layer
+ */
+ ~ReshapeLayer() = default;
+
+ /**
+ * @brief Move constructor of ReshapeLayer.
+ * @param[in] ReshapeLayer &&
+ */
+ ReshapeLayer(ReshapeLayer &&rhs) noexcept = default;
+
+ /**
+ * @brief Move assignment operator.
+ * @parma[in] rhs ReshapeLayer to be moved.
+ */
+ ReshapeLayer &operator=(ReshapeLayer &&rhs) = default;
+
+ /**
+ * @copydoc Layer::finalize(InitLayerContext &context)
+ */
+ void finalize(InitLayerContext &context) override;
+
+ /**
+ * @copydoc Layer::forwarding(RunLayerContext &context, bool training)
+ */
+ void forwarding(RunLayerContext &context, bool training) override;
+
+ /**
+ * @copydoc Layer::calcDerivative(RunLayerContext &context)
+ */
+ void calcDerivative(RunLayerContext &context) override;
+
+ /**
+ * @copydoc Layer::setProperty(const std::vector<std::string> &values)
+ */
+ void setProperty(const std::vector<std::string> &values) override;
+
+ /**
+ * @copydoc bool supportBackwarding() const
+ */
+ bool supportBackwarding() const override { return true; };
+
+ /**
+ * @copydoc Layer::supportInPlace()
+ */
+ bool supportInPlace() const override { return true; }
+
+ /**
+ * @copydoc Layer::exportTo(Exporter &exporter, ExportMethods method)
+ */
+ void exportTo(Exporter &exporter,
+ const ExportMethods &method) const override {}
+
+ /**
+ * @copydoc Layer::getType()
+ */
+ const std::string getType() const override { return ReshapeLayer::type; };
+
+ inline static const std::string type = "reshape";
+
+private:
+ std::tuple<props::TargetShape>
+ reshape_props; /**< reshape properties : target_shape after reshape */
+};
+
+} // namespace nntrainer
+
+#endif /* __cplusplus */
+#endif /* __RESHAPE_LAYER_H__ */
'unittest_layers_permute.cpp',
'unittest_layers_attention.cpp',
'unittest_layers_dropout.cpp',
+ 'unittest_layers_reshape.cpp',
]
if get_option('enable-tflite-backbone')
--- /dev/null
+// SPDX-License-Identifier: Apache-2.0
+/**
+ * Copyright (C) 2021 Parichay Kapoor <pk.kapoor@samsung.com>
+ *
+ * @file unittest_layers_flatten.cpp
+ * @date 19 October 2021
+ * @brief Reshape Layer Test
+ * @see https://github.com/nnstreamer/nntrainer
+ * @author Parichay Kapoor <pk.kapoor@samsung.com>
+ * @bug No known bugs except for NYI items
+ */
+#include <tuple>
+
+#include <gtest/gtest.h>
+
+#include <layers_common_tests.h>
+#include <reshape_layer.h>
+
+auto semantic_reshape = LayerSemanticsParamType(
+ nntrainer::createLayer<nntrainer::ReshapeLayer>,
+ nntrainer::ReshapeLayer::type, {"target_shape=-1"}, 0, false, 1);
+
+INSTANTIATE_TEST_CASE_P(Reshape, LayerSemantics,
+ ::testing::Values(semantic_reshape));