From e6ff96dead96596a94c170f0ec52f558d705a61c Mon Sep 17 00:00:00 2001 From: =?utf8?q?Ivan=20Vagin/AI=20Tools=20Lab=20/SRR/Engineer/=EC=82=BC?= =?utf8?q?=EC=84=B1=EC=A0=84=EC=9E=90?= Date: Thu, 18 Jul 2019 10:19:38 +0300 Subject: [PATCH] [shape_inference] Implemented shape inference for max pooling (#5654) * [neurun] Made ShapeInference to consume shapes instead of nodes Made ShapeInference to consume shapes instead of nodes Signed-off-by: Ivan Vagin * [shape_inference] Implemented shape inference for max pooling Implemented shape inference for max pooling Signed-off-by: Ivan Vagin --- .../core/include/model/operation/MaxPool2DNode.h | 2 +- runtimes/neurun/core/include/util/ShapeInference.h | 4 ++ runtimes/neurun/core/src/util/ShapeInference.cc | 13 ++++++ runtimes/neurun/test/util/ShapeInference.cc | 48 ++++++++++++++++++---- 4 files changed, 57 insertions(+), 10 deletions(-) diff --git a/runtimes/neurun/core/include/model/operation/MaxPool2DNode.h b/runtimes/neurun/core/include/model/operation/MaxPool2DNode.h index 2c6bb6e..e8afe86 100644 --- a/runtimes/neurun/core/include/model/operation/MaxPool2DNode.h +++ b/runtimes/neurun/core/include/model/operation/MaxPool2DNode.h @@ -39,8 +39,8 @@ public: struct Param { - uint32_t kw; uint32_t kh; + uint32_t kw; Stride stride; Padding padding; Activation activation; diff --git a/runtimes/neurun/core/include/util/ShapeInference.h b/runtimes/neurun/core/include/util/ShapeInference.h index e3722d2..c0b7f59 100644 --- a/runtimes/neurun/core/include/util/ShapeInference.h +++ b/runtimes/neurun/core/include/util/ShapeInference.h @@ -35,6 +35,10 @@ Shapes inferAvgPoolShape(const model::Shape &in_shape, const model::operation::AvgPool2DNode::Param ¶m, model::Layout layout = model::Layout::NHWC); +Shapes inferMaxPoolShape(const model::Shape &in_shape, + const model::operation::MaxPool2DNode::Param ¶m, + model::Layout layout = model::Layout::NHWC); + Shapes inferFCShape(const model::Shape &in_shape, const model::Shape &ker_shape); } // namespace shape_inference diff --git a/runtimes/neurun/core/src/util/ShapeInference.cc b/runtimes/neurun/core/src/util/ShapeInference.cc index d859afe..ad1b92f 100644 --- a/runtimes/neurun/core/src/util/ShapeInference.cc +++ b/runtimes/neurun/core/src/util/ShapeInference.cc @@ -18,6 +18,7 @@ #include "model/InternalType.h" #include "model/Shape.h" #include "model/operation/AvgPool2DNode.h" +#include "model/operation/MaxPool2DNode.h" #include "util/ShapeInference.h" namespace neurun @@ -116,6 +117,18 @@ Shapes inferAvgPoolShape(const model::Shape &in_shape, return {model::Shape{ifm_shape.N, out_h_w.first, out_h_w.second, ifm_shape.C}}; } +Shapes inferMaxPoolShape(const model::Shape &in_shape, + const model::operation::MaxPool2DNode::Param ¶m, + const model::Layout layout) +{ + assert(layout == model::Layout::NHWC); + auto ifm_shape = in_shape.asFeature(layout); + const auto out_h_w = calcConvLikeHeightAndWidth(ifm_shape.H, ifm_shape.W, param.kh, param.kw, + param.padding, param.stride); + // Pooling don't change number of channels and batch size + return {model::Shape{ifm_shape.N, out_h_w.first, out_h_w.second, ifm_shape.C}}; +} + Shapes inferFCShape(const model::Shape &in_shape, const model::Shape &ker_shape) { assert(in_shape.rank() >= 2); diff --git a/runtimes/neurun/test/util/ShapeInference.cc b/runtimes/neurun/test/util/ShapeInference.cc index 4186af1..f35084f 100644 --- a/runtimes/neurun/test/util/ShapeInference.cc +++ b/runtimes/neurun/test/util/ShapeInference.cc @@ -43,14 +43,14 @@ TEST(ShapeInference, IncorrectAddNode) std::runtime_error); } -TEST(ShapeInference, AvgPool2DNodeSame) +TEST(ShapeInference, Pool2DNodeSame) { Shape in_shape{10, 6, 12, 20}; Stride stride{3, 7}; Padding padding{PaddingType::SAME}; - operation::AvgPool2DNode::Param param{3, 6, stride, padding, Activation::NONE}; - auto infered_shapes = neurun::shape_inference::inferAvgPoolShape(in_shape, param, Layout::NHWC); + operation::AvgPool2DNode::Param avg_pool_param{3, 6, stride, padding, Activation::NONE}; + auto infered_shapes = neurun::shape_inference::inferAvgPoolShape(in_shape, avg_pool_param); auto infered_out_shape = infered_shapes[0]; ASSERT_EQ(infered_out_shape.rank(), 4); @@ -58,16 +58,26 @@ TEST(ShapeInference, AvgPool2DNodeSame) ASSERT_EQ(infered_out_shape.asFeature().H, 2); ASSERT_EQ(infered_out_shape.asFeature().W, 2); ASSERT_EQ(infered_out_shape.asFeature().C, 20); + + operation::MaxPool2DNode::Param max_pool_param{3, 6, stride, padding, Activation::NONE}; + infered_shapes = neurun::shape_inference::inferMaxPoolShape(in_shape, max_pool_param); + infered_out_shape = infered_shapes[0]; + + ASSERT_EQ(infered_out_shape.rank(), 4); + ASSERT_EQ(infered_out_shape.asFeature().N, 10); + ASSERT_EQ(infered_out_shape.asFeature().H, 2); + ASSERT_EQ(infered_out_shape.asFeature().W, 2); + ASSERT_EQ(infered_out_shape.asFeature().C, 20); } -TEST(ShapeInference, AvgPool2DNodeValid) +TEST(ShapeInference, Pool2DNodeValid) { Shape in_shape{10, 6, 12, 20}; Stride stride{3, 7}; Padding padding{PaddingType::VALID}; - operation::AvgPool2DNode::Param param{3, 6, stride, padding, Activation::NONE}; - auto infered_shapes = neurun::shape_inference::inferAvgPoolShape(in_shape, param, Layout::NHWC); + operation::AvgPool2DNode::Param avg_pool_param{3, 6, stride, padding, Activation::NONE}; + auto infered_shapes = neurun::shape_inference::inferAvgPoolShape(in_shape, avg_pool_param); auto infered_out_shape = infered_shapes[0]; ASSERT_EQ(infered_out_shape.rank(), 4); @@ -75,17 +85,27 @@ TEST(ShapeInference, AvgPool2DNodeValid) ASSERT_EQ(infered_out_shape.asFeature().H, 2); ASSERT_EQ(infered_out_shape.asFeature().W, 1); ASSERT_EQ(infered_out_shape.asFeature().C, 20); + + operation::MaxPool2DNode::Param max_pool_param{3, 6, stride, padding, Activation::NONE}; + infered_shapes = neurun::shape_inference::inferMaxPoolShape(in_shape, max_pool_param); + infered_out_shape = infered_shapes[0]; + + ASSERT_EQ(infered_out_shape.rank(), 4); + ASSERT_EQ(infered_out_shape.asFeature().N, 10); + ASSERT_EQ(infered_out_shape.asFeature().H, 2); + ASSERT_EQ(infered_out_shape.asFeature().W, 1); + ASSERT_EQ(infered_out_shape.asFeature().C, 20); } -TEST(ShapeInference, AvgPool2DNodeExplicit) +TEST(ShapeInference, Pool2DNodeExplicit) { Shape in_shape{10, 3, 5, 20}; Stride stride{3, 7}; Padding padding{PaddingType::EXPLICIT, {4, 3, 2, 1}}; - operation::AvgPool2DNode::Param param{3, 6, stride, padding, Activation::NONE}; - auto infered_shapes = neurun::shape_inference::inferAvgPoolShape(in_shape, param, Layout::NHWC); + operation::AvgPool2DNode::Param avg_pool_param{3, 6, stride, padding, Activation::NONE}; + auto infered_shapes = neurun::shape_inference::inferAvgPoolShape(in_shape, avg_pool_param); auto infered_out_shape = infered_shapes[0]; ASSERT_EQ(infered_out_shape.rank(), 4); @@ -93,6 +113,16 @@ TEST(ShapeInference, AvgPool2DNodeExplicit) ASSERT_EQ(infered_out_shape.asFeature().H, 2); ASSERT_EQ(infered_out_shape.asFeature().W, 1); ASSERT_EQ(infered_out_shape.asFeature().C, 20); + + operation::MaxPool2DNode::Param max_pool_param{3, 6, stride, padding, Activation::NONE}; + infered_shapes = neurun::shape_inference::inferMaxPoolShape(in_shape, max_pool_param); + infered_out_shape = infered_shapes[0]; + + ASSERT_EQ(infered_out_shape.rank(), 4); + ASSERT_EQ(infered_out_shape.asFeature().N, 10); + ASSERT_EQ(infered_out_shape.asFeature().H, 2); + ASSERT_EQ(infered_out_shape.asFeature().W, 1); + ASSERT_EQ(infered_out_shape.asFeature().C, 20); } TEST(ShapeInference, FullyConnectedNode) -- 2.7.4