Resolve stride value for convolution at frontend (#5282)
author오형석/On-Device Lab(SR)/Staff Engineer/삼성전자 <hseok82.oh@samsung.com>
Mon, 27 May 2019 23:26:57 +0000 (08:26 +0900)
committer박세희/On-Device Lab(SR)/Principal Engineer/삼성전자 <saehie.park@samsung.com>
Mon, 27 May 2019 23:26:57 +0000 (08:26 +0900)
Remove stride index param in con2d node
Resolve stride value for conv2d at frontend
Use resolved constant stride value at each backend

Signed-off-by: Hyeongseok Oh <hseok82.oh@samsung.com>
runtimes/neurun/backend/acl_cl/StageGenerator.cc
runtimes/neurun/backend/acl_neon/StageGenerator.cc
runtimes/neurun/backend/cpu/StageGenerator.cc
runtimes/neurun/core/include/model/operation/Conv2DNode.h
runtimes/neurun/frontend/nnapi/wrapper/OperationFactory.cc
runtimes/neurun/test/graph/operation/SetIO.cc

index bbb4b1c..e3d90a5 100644 (file)
@@ -218,18 +218,10 @@ void StageGenerator::visit(const model::operation::Conv2DNode &node)
   const auto ker_index{node.getInputs().at(Conv2DNode::Input::KERNEL)};
   const auto bias_index{node.getInputs().at(Conv2DNode::Input::BIAS)};
 
-  const auto vstride_index{node.param().vstride_index};
-  const auto hstride_index{node.param().hstride_index};
-
   const auto ifm_shape = _ctx.at(ifm_index).shape().asFeature();
   const auto ofm_shape = _ctx.at(ofm_index).shape().asFeature();
   const auto ker_shape = _ctx.at(ker_index).shape().asKernel();
 
-  model::Stride stride;
-
-  stride.vertical = _ctx.at(vstride_index).asScalar<int32_t>();
-  stride.horizontal = _ctx.at(hstride_index).asScalar<int32_t>();
-
   // Construct operation parameters
   struct Param
   {
@@ -250,7 +242,7 @@ void StageGenerator::visit(const model::operation::Conv2DNode &node)
   param.ker_index = ker_index;
   param.bias_index = bias_index;
 
-  param.stride = stride;
+  param.stride = node.param().stride;
   param.padding = neurun::util::calculatePadding(node.param().padding, ifm_shape, ofm_shape,
                                                  param.stride, ker_shape.W, ker_shape.H);
   param.activation = node.param().activation;
index 2274752..fec9f9d 100644 (file)
@@ -185,18 +185,10 @@ void StageGenerator::visit(const model::operation::Conv2DNode &node)
   const auto ker_index{node.getInputs().at(Conv2DNode::Input::KERNEL)};
   const auto bias_index{node.getInputs().at(Conv2DNode::Input::BIAS)};
 
-  const auto vstride_index{node.param().vstride_index};
-  const auto hstride_index{node.param().hstride_index};
-
   const auto ofm_shape = _ctx.at(ofm_index).shape().asFeature();
   const auto ifm_shape = _ctx.at(ifm_index).shape().asFeature();
   const auto ker_shape = _ctx.at(ker_index).shape().asKernel();
 
-  model::Stride stride;
-
-  stride.vertical = _ctx.at(vstride_index).asScalar<int32_t>();
-  stride.horizontal = _ctx.at(hstride_index).asScalar<int32_t>();
-
   // Construct operation parameters
   struct Param
   {
@@ -217,7 +209,7 @@ void StageGenerator::visit(const model::operation::Conv2DNode &node)
   param.ker_index = ker_index;
   param.bias_index = bias_index;
 
-  param.stride = stride;
+  param.stride = node.param().stride;
   param.padding = neurun::util::calculatePadding(node.param().padding, ifm_shape, ofm_shape,
                                                  param.stride, ker_shape.W, ker_shape.H);
   param.activation = node.param().activation;
index c7f6ac2..dce76d8 100644 (file)
@@ -63,13 +63,9 @@ void StageGenerator::visit(const model::operation::Conv2DNode &node)
   const auto ker_index{node.getInputs().at(Conv2DNode::Input::KERNEL)};
   const auto bias_index{node.getInputs().at(Conv2DNode::Input::BIAS)};
 
-  const auto vstride_index{node.param().vstride_index};
-  const auto hstride_index{node.param().hstride_index};
-
-  model::Stride stride;
-
-  stride.vertical = _ctx.at(vstride_index).asScalar<int32_t>();
-  stride.horizontal = _ctx.at(hstride_index).asScalar<int32_t>();
+  const auto ifm_shape = _ctx.at(ifm_index).shape().asFeature();
+  const auto ofm_shape = _ctx.at(ofm_index).shape().asFeature();
+  const auto ker_shape = _ctx.at(ker_index).shape().asKernel();
 
   // Construct operation parameters
   struct Param
@@ -101,11 +97,7 @@ void StageGenerator::visit(const model::operation::Conv2DNode &node)
   param.ker_shape = ::neurun::backend::cpu::kernel::getShape(_ctx.at(ker_index));
   param.bias_shape = ::neurun::backend::cpu::kernel::getShape(_ctx.at(bias_index));
 
-  param.stride = stride;
-
-  const auto ifm_shape = _ctx.at(ifm_index).shape().asFeature();
-  const auto ofm_shape = _ctx.at(ofm_index).shape().asFeature();
-  const auto ker_shape = _ctx.at(ker_index).shape().asKernel();
+  param.stride = node.param().stride;
   param.padding = neurun::util::calculatePadding(node.param().padding, ifm_shape, ofm_shape,
                                                  param.stride, ker_shape.W, ker_shape.H);
   param.activation = node.param().activation;
index 9526119..4b70610 100644 (file)
@@ -42,10 +42,6 @@ public:
   struct Param
   {
     Stride stride;
-    // hstride_index and vtride_index will be deprecated
-    OperandIndex hstride_index;
-    OperandIndex vstride_index;
-
     Padding padding;
     Activation activation;
   };
index fc3da94..4172efc 100644 (file)
@@ -53,6 +53,24 @@ ExplicitPadding setExplicitPaddingParam(Operands &operands, const OperandIndex &
   return param;
 }
 
+Stride setStride(Operands &operands, const OperandIndex &horizontal_index,
+                 const OperandIndex &vertical_index)
+{
+  auto horizontal = operands.at(horizontal_index).asScalar<int32_t>();
+  auto vertical = operands.at(vertical_index).asScalar<int32_t>();
+
+  if (vertical < 0 || horizontal < 0)
+  {
+    throw std::runtime_error{"Cannot handle negative stride value"};
+  }
+
+  Stride stride;
+  stride.horizontal = horizontal;
+  stride.vertical = vertical;
+
+  return stride;
+}
+
 } // namespace
 
 OperationFactory &OperationFactory::instance()
@@ -401,12 +419,13 @@ OperationFactory::OperationFactory()
       //  6 -> Activation Index
 
       const auto padding_index = OperandIndex{init_param.inputs[3]};
+      const auto hstride_index = OperandIndex{init_param.inputs[4]};
+      const auto vstride_index = OperandIndex{init_param.inputs[5]};
       const auto activation_index = OperandIndex{init_param.inputs[6]};
 
       param.padding.type =
           NNAPIConvert::getPaddingType(operands.at(padding_index).asScalar<PaddingCode>());
-      param.hstride_index = OperandIndex{init_param.inputs[4]};
-      param.vstride_index = OperandIndex{init_param.inputs[5]};
+      param.stride = setStride(operands, hstride_index, vstride_index);
       param.activation =
           NNAPIConvert::getFusedActivation(operands.at(activation_index).asScalar<FuseCode>());
     }
@@ -426,15 +445,15 @@ OperationFactory::OperationFactory()
       const auto padding_right_index = OperandIndex{init_param.inputs[4]};
       const auto padding_top_index = OperandIndex{init_param.inputs[5]};
       const auto padding_bottom_index = OperandIndex{init_param.inputs[6]};
+      const auto hstride_index = OperandIndex{init_param.inputs[7]};
+      const auto vstride_index = OperandIndex{init_param.inputs[8]};
       const auto activation_index = OperandIndex{init_param.inputs[9]};
 
       param.padding.type = PaddingType::EXPLICIT;
       param.padding.param =
           setExplicitPaddingParam(operands, padding_left_index, padding_right_index,
                                   padding_top_index, padding_bottom_index);
-      param.hstride_index = OperandIndex{init_param.inputs[7]};
-      param.vstride_index = OperandIndex{init_param.inputs[8]};
-
+      param.stride = setStride(operands, hstride_index, vstride_index);
       param.activation =
           NNAPIConvert::getFusedActivation(operands.at(activation_index).asScalar<FuseCode>());
     }
index b98b914..939b126 100644 (file)
@@ -47,8 +47,8 @@ TEST(graph_operation_setIO, operation_setIO_conv)
 
   GraphNode::Param conv_params;
   conv_params.padding.type = neurun::model::PaddingType::SAME;
-  conv_params.hstride_index = model.operands.append(shape, type);
-  conv_params.vstride_index = model.operands.append(shape, type);
+  conv_params.stride.horizontal = 1;
+  conv_params.stride.vertical = 1;
   conv_params.activation = neurun::model::Activation::NONE;
 
   auto output_operand = model.operands.append(shape, type).value();