[neurun] Made ShapeInference to consume shapes instead of nodes (#5652)
authorIvan Vagin/AI Tools Lab /SRR/Engineer/삼성전자 <ivan.vagin@samsung.com>
Tue, 16 Jul 2019 01:04:55 +0000 (04:04 +0300)
committer오형석/On-Device Lab(SR)/Staff Engineer/삼성전자 <hseok82.oh@samsung.com>
Tue, 16 Jul 2019 01:04:55 +0000 (10:04 +0900)
Made ShapeInference to consume shapes instead of nodes

Signed-off-by: Ivan Vagin <ivan.vagin@samsung.com>
runtimes/neurun/core/include/util/ShapeInference.h
runtimes/neurun/core/src/util/ShapeInference.cc
runtimes/neurun/test/graph/util/ShapeInference.cc [deleted file]
runtimes/neurun/test/util/ShapeInference.cc [new file with mode: 0644]

index de9d5d4..5a900c1 100644 (file)
 #include "model/OperationVisitor.h"
 #include "model/Operands.h"
 #include "model/Index.h"
+#include "model/Layout.h"
 
 namespace neurun
 {
-namespace graph
+namespace shape_inference
 {
 
-/**
- * @brief Class for inferring tensor shapes
- */
-class ShapeInference
-{
-public:
-  using OperandsShapes = std::unordered_map<model::OperandIndex, model::Shape>;
+using Shapes = std::vector<model::Shape>;
 
-  explicit ShapeInference(const neurun::model::Operands &ctx) : _ctx(ctx) {}
-  OperandsShapes inferShapes(const model::operation::AddNode &);
-  OperandsShapes inferShapes(const model::operation::AvgPool2DNode &);
+Shapes inferEltwiseShape(const model::Shape &lhs_shape, const model::Shape &rhs_shape);
 
-private:
-  const neurun::model::Operands &_ctx;
-};
+Shapes inferAvgPoolShape(const model::Shape &in_shape,
+                         const model::operation::AvgPool2DNode::Param &param,
+                         model::Layout layout = model::Layout::NHWC);
 
-} // namespace graph
+} // namespace shape_inference
 } // namespace neurun
 
 #endif // __NEURUN_GRAPH_SHAPE_INFERENCE_H__
index 93e48db..4a0b998 100644 (file)
  * limitations under the License.
  */
 
-#include "util/ShapeInference.h"
 #include "util/Utils.h"
 #include "model/InternalType.h"
 #include "model/Shape.h"
+#include "model/operation/AvgPool2DNode.h"
+#include "util/ShapeInference.h"
 
 namespace neurun
 {
-namespace graph
+namespace shape_inference
 {
 
 //
@@ -40,8 +41,8 @@ ceil_div(T dividend, U divisor)
   return (dividend + divisor - 1) / divisor;
 }
 
-// Calculate output shape of binary elementwise operation
-model::Shape calcEltwiseOutShape(const model::Shape &lhs_shape, const model::Shape &rhs_shape)
+// Calculate the result of broadcast of two shapes
+model::Shape broadcastShapes(const model::Shape &lhs_shape, const model::Shape &rhs_shape)
 {
   model::Shape out_shape;
   auto max_rank = std::max(lhs_shape.rank(), rhs_shape.rank());
@@ -98,32 +99,22 @@ std::pair<int, int> calcConvLikeHeightAndWidth(const int in_h, const int in_w, c
 // Shape inference
 //
 
-ShapeInference::OperandsShapes ShapeInference::inferShapes(const model::operation::AddNode &node)
+Shapes inferEltwiseShape(const model::Shape &lhs_shape, const model::Shape &rhs_shape)
 {
-  const auto lhs_index{node.getInputs().at(model::operation::AddNode::Input::LHS)};
-  const auto rhs_index{node.getInputs().at(model::operation::AddNode::Input::RHS)};
-  const auto lhs_shape{_ctx.at(lhs_index).shape()};
-  const auto rhs_shape{_ctx.at(rhs_index).shape()};
-  const auto out_index{node.getOutputs().at(0)};
-  return {{out_index, calcEltwiseOutShape(lhs_shape, rhs_shape)}};
+  return {broadcastShapes(lhs_shape, rhs_shape)};
 }
 
-ShapeInference::OperandsShapes
-ShapeInference::inferShapes(const model::operation::AvgPool2DNode &node)
+Shapes inferAvgPoolShape(const model::Shape &in_shape,
+                         const model::operation::AvgPool2DNode::Param &param,
+                         const model::Layout layout)
 {
-  const auto ofm_index{node.getOutputs().at(0)};
-  const auto ifm_index{node.getInputs().at(model::operation::AvgPool2DNode::Input::INPUT)};
-
-  const auto ifm_shape = _ctx.at(ifm_index).shape().asFeature();
-
-  const auto out_h_w =
-      calcConvLikeHeightAndWidth(ifm_shape.H, ifm_shape.W, node.param().kh, node.param().kw,
-                                 node.param().padding, node.param().stride);
-
-  assert(_ctx.at(ifm_index).layout() == model::Layout::NHWC);
-  // Average pool don't change number of channels and batch size
-  return {{ofm_index, {ifm_shape.N, out_h_w.first, out_h_w.second, ifm_shape.C}}};
+  assert(layout == model::Layout::NHWC);
+  auto ifm_shape = in_shape.asFeature(layout);
+  const auto out_h_w = calcConvLikeHeightAndWidth(ifm_shape.H, ifm_shape.W, param.kh, param.kw,
+                                                  param.padding, param.stride);
+  // Pooling don't change number of channels and batch size
+  return {model::Shape{ifm_shape.N, out_h_w.first, out_h_w.second, ifm_shape.C}};
 }
 
-} // namespace graph
+} // namespace shape_inference
 } // namespace neurun
diff --git a/runtimes/neurun/test/graph/util/ShapeInference.cc b/runtimes/neurun/test/graph/util/ShapeInference.cc
deleted file mode 100644 (file)
index 83d2788..0000000
+++ /dev/null
@@ -1,132 +0,0 @@
-/*
- * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <gtest/gtest.h>
-
-#include "model/Model.h"
-#include "model/InternalType.h"
-#include "model/TypeInfo.h"
-#include "model/DataType.h"
-#include "model/operation/AddNode.h"
-#include "util/ShapeInference.h"
-
-using namespace neurun::model;
-
-TEST(ShapeInference, AddNode)
-{
-  Model model;
-  TypeInfo type(DataType::FLOAT32);
-  auto lhs_index = model.operands.emplace(Shape{1, 299, 299, 3}, type);
-  auto rhs_index = model.operands.emplace(Shape{3}, type);
-  auto out_index = model.operands.emplace(Shape{0}, type);
-
-  operation::AddNode::Param param{Activation::NONE};
-  operation::AddNode add_op({lhs_index.value(), rhs_index.value()}, {out_index.value()}, param);
-
-  neurun::graph::ShapeInference shape_inference(model.operands);
-  auto infered_shapes = shape_inference.inferShapes(add_op);
-  auto infered_out_shape = infered_shapes.at(out_index);
-
-  ASSERT_EQ(infered_out_shape.rank(), 4);
-  ASSERT_EQ(infered_out_shape.dim(0), 1);
-  ASSERT_EQ(infered_out_shape.dim(1), 299);
-  ASSERT_EQ(infered_out_shape.dim(2), 299);
-  ASSERT_EQ(infered_out_shape.dim(3), 3);
-}
-
-TEST(ShapeInference, IncorrectAddNode)
-{
-  Model model;
-  TypeInfo type(DataType::FLOAT32);
-  auto lhs_index = model.operands.emplace(Shape{1, 299, 299, 3}, type);
-  auto rhs_index = model.operands.emplace(Shape{5}, type);
-  auto out_index = model.operands.emplace(Shape{0}, type);
-
-  operation::AddNode::Param param{Activation::NONE};
-  operation::AddNode addOp({lhs_index.value(), rhs_index.value()}, {out_index.value()}, param);
-
-  neurun::graph::ShapeInference shape_inference(model.operands);
-  ASSERT_THROW(shape_inference.inferShapes(addOp), std::runtime_error);
-}
-
-TEST(ShapeInference, AvgPool2DNodeSame)
-{
-  Model model;
-  TypeInfo type(DataType::FLOAT32);
-  auto input_index = model.operands.emplace(Shape{10, 6, 12, 20}, type);
-  auto out_index = model.operands.emplace(Shape{0}, type);
-
-  Stride stride{3, 7};
-  Padding padding{PaddingType::SAME};
-  operation::AvgPool2DNode::Param param{3, 6, stride, padding, Activation::NONE};
-  operation::AvgPool2DNode avg_pool_op({input_index.value()}, {out_index.value()}, param);
-
-  neurun::graph::ShapeInference shape_inference(model.operands);
-  auto infered_shapes = shape_inference.inferShapes(avg_pool_op);
-  auto infered_out_shape = infered_shapes.at(out_index);
-
-  ASSERT_EQ(infered_out_shape.rank(), 4);
-  ASSERT_EQ(infered_out_shape.asFeature().N, 10);
-  ASSERT_EQ(infered_out_shape.asFeature().H, 2);
-  ASSERT_EQ(infered_out_shape.asFeature().W, 2);
-  ASSERT_EQ(infered_out_shape.asFeature().C, 20);
-}
-
-TEST(ShapeInference, AvgPool2DNodeValid)
-{
-  Model model;
-  TypeInfo type(DataType::FLOAT32);
-  auto input_index = model.operands.emplace(Shape{10, 6, 12, 20}, type);
-  auto out_index = model.operands.emplace(Shape{0}, type);
-
-  Stride stride{3, 7};
-  Padding padding{PaddingType::VALID};
-  operation::AvgPool2DNode::Param param{3, 6, stride, padding, Activation::NONE};
-  operation::AvgPool2DNode avg_pool_op({input_index.value()}, {out_index.value()}, param);
-
-  neurun::graph::ShapeInference shape_inference(model.operands);
-  auto infered_shapes = shape_inference.inferShapes(avg_pool_op);
-  auto infered_out_shape = infered_shapes.at(out_index);
-
-  ASSERT_EQ(infered_out_shape.rank(), 4);
-  ASSERT_EQ(infered_out_shape.asFeature().N, 10);
-  ASSERT_EQ(infered_out_shape.asFeature().H, 2);
-  ASSERT_EQ(infered_out_shape.asFeature().W, 1);
-  ASSERT_EQ(infered_out_shape.asFeature().C, 20);
-}
-
-TEST(ShapeInference, AvgPool2DNodeExplicit)
-{
-  Model model;
-  TypeInfo type(DataType::FLOAT32);
-  auto input_index = model.operands.emplace(Shape{10, 3, 5, 20}, type);
-  auto out_index = model.operands.emplace(Shape{0}, type);
-
-  Stride stride{3, 7};
-  Padding padding{PaddingType::EXPLICIT, {4, 3, 2, 1}};
-  operation::AvgPool2DNode::Param param{3, 6, stride, padding, Activation::NONE};
-  operation::AvgPool2DNode avg_pool_op({input_index.value()}, {out_index.value()}, param);
-
-  neurun::graph::ShapeInference shape_inference(model.operands);
-  auto infered_shapes = shape_inference.inferShapes(avg_pool_op);
-  auto infered_out_shape = infered_shapes.at(out_index);
-
-  ASSERT_EQ(infered_out_shape.rank(), 4);
-  ASSERT_EQ(infered_out_shape.asFeature().N, 10);
-  ASSERT_EQ(infered_out_shape.asFeature().H, 2);
-  ASSERT_EQ(infered_out_shape.asFeature().W, 1);
-  ASSERT_EQ(infered_out_shape.asFeature().C, 20);
-}
diff --git a/runtimes/neurun/test/util/ShapeInference.cc b/runtimes/neurun/test/util/ShapeInference.cc
new file mode 100644 (file)
index 0000000..550d02b
--- /dev/null
@@ -0,0 +1,96 @@
+/*
+ * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <gtest/gtest.h>
+
+#include "model/Layout.h"
+#include "util/ShapeInference.h"
+
+using namespace neurun::model;
+
+TEST(ShapeInference, AddNode)
+{
+  Shape lhs_shape{1, 299, 299, 3};
+  Shape rhs_shape{3};
+  auto infered_shapes = neurun::shape_inference::inferEltwiseShape(lhs_shape, rhs_shape);
+  auto infered_out_shape = infered_shapes[0];
+
+  ASSERT_EQ(infered_out_shape.rank(), 4);
+  ASSERT_EQ(infered_out_shape.dim(0), 1);
+  ASSERT_EQ(infered_out_shape.dim(1), 299);
+  ASSERT_EQ(infered_out_shape.dim(2), 299);
+  ASSERT_EQ(infered_out_shape.dim(3), 3);
+}
+
+TEST(ShapeInference, IncorrectAddNode)
+{
+  Shape lhs_shape{1, 299, 299, 3};
+  Shape rhs_shape{5, 3};
+  ASSERT_THROW(neurun::shape_inference::inferEltwiseShape(lhs_shape, rhs_shape),
+               std::runtime_error);
+}
+
+TEST(ShapeInference, AvgPool2DNodeSame)
+{
+  Shape in_shape{10, 6, 12, 20};
+  Stride stride{3, 7};
+  Padding padding{PaddingType::SAME};
+
+  operation::AvgPool2DNode::Param param{3, 6, stride, padding, Activation::NONE};
+  auto infered_shapes = neurun::shape_inference::inferAvgPoolShape(in_shape, param, Layout::NHWC);
+  auto infered_out_shape = infered_shapes[0];
+
+  ASSERT_EQ(infered_out_shape.rank(), 4);
+  ASSERT_EQ(infered_out_shape.asFeature().N, 10);
+  ASSERT_EQ(infered_out_shape.asFeature().H, 2);
+  ASSERT_EQ(infered_out_shape.asFeature().W, 2);
+  ASSERT_EQ(infered_out_shape.asFeature().C, 20);
+}
+
+TEST(ShapeInference, AvgPool2DNodeValid)
+{
+  Shape in_shape{10, 6, 12, 20};
+  Stride stride{3, 7};
+  Padding padding{PaddingType::VALID};
+
+  operation::AvgPool2DNode::Param param{3, 6, stride, padding, Activation::NONE};
+  auto infered_shapes = neurun::shape_inference::inferAvgPoolShape(in_shape, param, Layout::NHWC);
+  auto infered_out_shape = infered_shapes[0];
+
+  ASSERT_EQ(infered_out_shape.rank(), 4);
+  ASSERT_EQ(infered_out_shape.asFeature().N, 10);
+  ASSERT_EQ(infered_out_shape.asFeature().H, 2);
+  ASSERT_EQ(infered_out_shape.asFeature().W, 1);
+  ASSERT_EQ(infered_out_shape.asFeature().C, 20);
+}
+
+TEST(ShapeInference, AvgPool2DNodeExplicit)
+{
+  Shape in_shape{10, 3, 5, 20};
+
+  Stride stride{3, 7};
+  Padding padding{PaddingType::EXPLICIT, {4, 3, 2, 1}};
+
+  operation::AvgPool2DNode::Param param{3, 6, stride, padding, Activation::NONE};
+  auto infered_shapes = neurun::shape_inference::inferAvgPoolShape(in_shape, param, Layout::NHWC);
+  auto infered_out_shape = infered_shapes[0];
+
+  ASSERT_EQ(infered_out_shape.rank(), 4);
+  ASSERT_EQ(infered_out_shape.asFeature().N, 10);
+  ASSERT_EQ(infered_out_shape.asFeature().H, 2);
+  ASSERT_EQ(infered_out_shape.asFeature().W, 1);
+  ASSERT_EQ(infered_out_shape.asFeature().C, 20);
+}