#include <lazy_tensor.h>
#include <nntrainer_error.h>
#include <nntrainer_log.h>
+#include <node_exporter.h>
#include <parse_util.h>
#include <pooling2d_layer.h>
#include <util_func.h>
static constexpr size_t SINGLE_INOUT_IDX = 0;
+Pooling2DLayer::Pooling2DLayer(
+ const std::array<unsigned int, POOLING2D_DIM * 2> &padding_) :
+ Layer(),
+ padding(padding_),
+ pooling2d_props(props::PoolingType(), std::vector<props::PoolSize>(),
+ std::array<props::Stride, POOLING2D_DIM>(),
+ props::Padding2D()),
+ pool_helper_idx(0) {}
+
void Pooling2DLayer::finalize(InitLayerContext &context) {
if (context.getNumInputs() != 1) {
throw std::invalid_argument(
const TensorDim &in_dim = context.getInputDimensions()[SINGLE_INOUT_IDX];
TensorDim out_dim;
- if (pooling_type == PoolingType::global_max ||
- pooling_type == PoolingType::global_average) {
- if (pool_size[0] != 0 || pool_size[1] != 0) {
- throw std::invalid_argument(
- "[Pooling2D] global_max, global_average does not accept pool size");
- }
- pool_size[0] = in_dim.height();
- pool_size[1] = in_dim.width();
+ auto &pool_size = std::get<std::vector<props::PoolSize>>(pooling2d_props);
+ NNTR_THROW_IF(!(pool_size.empty() || pool_size.size() == 2),
+ std::invalid_argument)
+ << "[Pooling2D] the number of pool size should be 0 or 2";
+ auto &stride =
+ std::get<std::array<props::Stride, POOLING2D_DIM>>(pooling2d_props);
+ auto &pooling_type = std::get<props::PoolingType>(pooling2d_props).get();
+
+ if (pooling_type == props::PoolingTypeInfo::Enum::global_max ||
+ pooling_type == props::PoolingTypeInfo::Enum::global_average) {
+ NNTR_THROW_IF(!pool_size.empty(), std::invalid_argument)
+ << "[Pooling2D] global_max, global_average does not accept pool size";
+ pool_size.emplace_back(props::PoolSize(in_dim.height()));
+ pool_size.emplace_back(props::PoolSize(in_dim.width()));
}
- padding = std::get<props::Padding2D>(pool2d_props)
+ padding = std::get<props::Padding2D>(pooling2d_props)
.compute(in_dim, {pool_size[0], pool_size[1]});
auto [pt, pb, pl, pr] = padding;
- if (pooling_type == PoolingType::global_max ||
- pooling_type == PoolingType::global_average) {
+ if (pooling_type == props::PoolingTypeInfo::Enum::global_max ||
+ pooling_type == props::PoolingTypeInfo::Enum::global_average) {
if (pt + pb + pl + pr != 0) {
throw std::invalid_argument(
"[Pooling2D] global_max, global_average does not accept padding");
* = 12 / 4 = 3
* // clang-format on
*/
- if (pooling_type == PoolingType::global_max) {
+ if (pooling_type == props::PoolingTypeInfo::Enum::global_max) {
pool_helper_idx = context.requestTensor(
in_dim, context.getName() + ":helper_idx", Tensor::Initializer::NONE,
false, ITERATION_LIFESPAN);
}
void Pooling2DLayer::calcDerivative(RunLayerContext &context) {
+ auto &pool_size = std::get<std::vector<props::PoolSize>>(pooling2d_props);
+ auto &stride =
+ std::get<std::array<props::Stride, POOLING2D_DIM>>(pooling2d_props);
+ auto &pooling_type = std::get<props::PoolingType>(pooling2d_props).get();
+
Tensor &deriv = context.getIncomingDerivative(SINGLE_INOUT_IDX);
Tensor &result = context.getOutgoingDerivative(SINGLE_INOUT_IDX);
Tensor &pool_helper = context.getTensor(pool_helper_idx);
unsigned int in_map_size = height * width;
switch (pooling_type) {
- case PoolingType::max: {
+ case props::PoolingTypeInfo::Enum::max: {
const int *iter = pool_helper.getData<int>();
const float *deriv_data = deriv.getData();
for (unsigned int b = 0; b < batch; ++b) {
}
}
} break;
- case PoolingType::global_average:
- case PoolingType::average: {
+ case props::PoolingTypeInfo::Enum::global_average:
+ case props::PoolingTypeInfo::Enum::average: {
int heigth_stride_end = height - p_height + pb;
int width_stride_end = width - p_width + pr;
const int *iter = pool_helper.getData<int>();
}
}
} break;
- case PoolingType::global_max: {
+ case props::PoolingTypeInfo::Enum::global_max: {
float *deriv_data = deriv.getData();
for (unsigned int b = 0; b < batch; b++) {
for (unsigned int c = 0; c < channel; c++) {
}
}
-void Pooling2DLayer::setProperty(const std::vector<std::string> &values) {
- /// @todo: deprecate this in favor of loadProperties
- for (unsigned int i = 0; i < values.size(); ++i) {
- std::string key;
- std::string value;
- std::stringstream ss;
-
- if (getKeyValue(values[i], key, value) != ML_ERROR_NONE) {
- throw std::invalid_argument("Error parsing the property: " + values[i]);
- }
-
- if (value.empty()) {
- ss << "value is empty: key: " << key << ", value: " << value;
- throw std::invalid_argument(ss.str());
- }
-
- /// @note this calls derived setProperty if available
- setProperty(key, value);
- }
+void Pooling2DLayer::exportTo(Exporter &exporter,
+ const ExportMethods &method) const {
+ exporter.saveResult(pooling2d_props, method, this);
}
-void Pooling2DLayer::setProperty(const std::string &type_str,
- const std::string &value) {
- using PropertyType = nntrainer::Layer::PropertyType;
- int status = ML_ERROR_NONE;
- nntrainer::Layer::PropertyType type =
- static_cast<nntrainer::Layer::PropertyType>(parseLayerProperty(type_str));
-
- switch (type) {
- case PropertyType::pooling:
- pooling_type = (PoolingType)parseType(value, TOKEN_POOLING);
- if (pooling_type == PoolingType::unknown) {
- throw std::invalid_argument("[Pooling2d_layer]: Unknown pooling type");
- }
- break;
- case PropertyType::pool_size:
- status = getValues(POOLING2D_DIM, value, (int *)(pool_size.data()));
- throw_status(status);
- if (pool_size[0] == 0 || pool_size[1] == 0) {
- throw std::invalid_argument(
- "[Pooling2d_layer] pool_size must be greater than 0");
- }
- break;
- case PropertyType::stride:
- status = getValues(POOLING2D_DIM, value, (int *)(stride.data()));
- throw_status(status);
- if (stride[0] == 0 || stride[1] == 0) {
- throw std::invalid_argument(
- "[Pooling2d_layer] stride must be greater than 0");
- }
- break;
- case PropertyType::padding:
- from_string(value, std::get<props::Padding2D>(pool2d_props));
- break;
- default:
- std::string msg = "[Pooling2DLayer] Unknown Layer Property Key for value " +
- std::string(value);
- throw exception::not_supported(msg);
- }
+void Pooling2DLayer::setProperty(const std::vector<std::string> &values) {
+ auto remain_props = loadProperties(values, pooling2d_props);
+ NNTR_THROW_IF(!remain_props.empty(), std::invalid_argument)
+ << "[Pooling2dLayer] Unknown Layer Properties count " +
+ std::to_string(values.size());
}
void Pooling2DLayer::pooling2d(Tensor &in, bool training, Tensor &output,
Tensor &pool_helper, int batch_idx) {
+ auto &pool_size = std::get<std::vector<props::PoolSize>>(pooling2d_props);
+ auto &stride =
+ std::get<std::array<props::Stride, POOLING2D_DIM>>(pooling2d_props);
+ auto &pooling_type = std::get<props::PoolingType>(pooling2d_props).get();
+
unsigned int channel = in.channel();
auto [pt, pb, pl, pr] = padding;
unsigned int max_idx_count = 0;
switch (pooling_type) {
- case PoolingType::max: {
+ case props::PoolingTypeInfo::Enum::max: {
pool_fn = [&, this](const float *in_data, int channel_idx, int start_h,
int start_w) {
int end_h = start_h + patch_height;
};
break;
}
- case PoolingType::global_max: {
+ case props::PoolingTypeInfo::Enum::global_max: {
pool_fn = [&, this](const float *in_data, int channel_idx, int start_h,
int start_w) {
int end_h = start_h + patch_height;
};
break;
}
- case PoolingType::global_average:
- case PoolingType::average: {
+ case props::PoolingTypeInfo::Enum::global_average:
+ case props::PoolingTypeInfo::Enum::average: {
pool_fn = [&, this](const float *in_data, int channel_idx, int start_h,
int start_w) {
int end_h = start_h + patch_height;
};
break;
}
- case PoolingType::unknown:
+ case props::PoolingTypeInfo::Enum::unknown:
default:
throw std::invalid_argument("unknown pooling type given");
break;
class Pooling2DLayer : public Layer {
public:
/**
- * @brief Pooling operation type class
- */
- enum class PoolingType {
- max = 0,
- average = 1,
- global_max = 2,
- global_average = 3,
- unknown = 4,
- };
-
- /**
* @brief PaddingType Class
* @todo support keras type of padding
*/
/**
* @brief Constructor of Pooling 2D Layer
*/
- Pooling2DLayer(
- PoolingType pooling_type_ = PoolingType::average,
- const std::array<unsigned int, POOLING2D_DIM> &pool_size_ = {0, 0},
- const std::array<unsigned int, POOLING2D_DIM> &stride_ = {1, 1},
- const std::array<unsigned int, POOLING2D_DIM * 2> &padding_ = {0, 0, 0,
- 0}) :
- Layer(),
- pool_size(pool_size_),
- stride(stride_),
- padding(padding_),
- pool2d_props(),
- pool_helper_idx(0),
- pooling_type(pooling_type_) {}
+ Pooling2DLayer(const std::array<unsigned int, POOLING2D_DIM * 2> &padding_ = {
+ 0, 0, 0, 0});
/**
* @brief Destructor of Pooling 2D Layer
/**
* @copydoc Layer::exportTo(Exporter &exporter, ExportMethods method)
*/
- void exportTo(Exporter &exporter,
- const ExportMethods &method) const override {
- Layer::exportTo(exporter, method);
- }
+ void exportTo(Exporter &exporter, const ExportMethods &method) const override;
/**
* @copydoc Layer::getType()
*/
void setBatch(InitLayerContext &context, unsigned int batch) override {
context.updateTensorSpec(pool_helper_idx, batch);
- if (pooling_type == PoolingType::global_max)
+ props::PoolingTypeInfo::Enum pooling_type =
+ std::get<props::PoolingType>(pooling2d_props).get();
+ if (pooling_type == props::PoolingTypeInfo::Enum::global_max)
pool_helper_size.resize(batch *
context.getInputDimensions()[0].channel());
}
*/
void setBatch(RunLayerContext &context, unsigned int batch) override {
context.updateTensor(pool_helper_idx, batch);
- if (pooling_type == PoolingType::global_max)
+ props::PoolingTypeInfo::Enum pooling_type =
+ std::get<props::PoolingType>(pooling2d_props).get();
+ if (pooling_type == props::PoolingTypeInfo::Enum::global_max)
pool_helper_size.resize(batch * context.getInput(0).channel());
}
private:
- std::array<unsigned int, POOLING2D_DIM> pool_size;
- std::array<unsigned int, POOLING2D_DIM> stride;
std::array<unsigned int, POOLING2D_DIM * 2> padding;
- std::tuple<props::Padding2D> pool2d_props;
+ std::tuple<props::PoolingType, std::vector<props::PoolSize>,
+ std::array<props::Stride, POOLING2D_DIM>, props::Padding2D>
+ pooling2d_props;
unsigned int pool_helper_idx; /**< helper tensor idx */
std::vector<unsigned int>
pool_helper_size; /**< helper size for each elements in the case of
global_max pooling */
- PoolingType pooling_type;
/**
* @brief calculation convolution
*/
void pooling2d(Tensor &in, bool training, Tensor &output, Tensor &pool_helper,
int batch_idx);
-
- /**
- * @brief setProperty by type and value separated
- * @param[in] type property type to be passed
- * @param[in] value value to be passed
- * @exception exception::not_supported when property type is not valid for
- * the particular layer
- * @exception std::invalid_argument invalid argument
- */
- void setProperty(const std::string &type_str, const std::string &value);
};
} // namespace nntrainer