[neurun] Replace OperandIndex in LocalResponseNormalization::Param (#8708)
authorSergei Barannikov/AI Tools Lab /SRR/Engineer/Samsung Electronics <s.barannikov@samsung.com>
Mon, 4 Nov 2019 02:27:59 +0000 (05:27 +0300)
committer오형석/On-Device Lab(SR)/Staff Engineer/삼성전자 <hseok82.oh@samsung.com>
Mon, 4 Nov 2019 02:27:59 +0000 (11:27 +0900)
Replace `OperandIndex` in `LocalResponseNormalization::Param` with `int32_t` / `float`.

Signed-off-by: Sergei Barannikov <s.barannikov@samsung.com>
runtime/neurun/backend/acl_cl/KernelGenerator.cc
runtime/neurun/backend/acl_neon/KernelGenerator.cc
runtime/neurun/core/include/model/operation/LocalResponseNormalization.h
runtime/neurun/frontend/nnapi/wrapper/OperationFactory.cc

index 31f9520..397c7d5 100644 (file)
@@ -1943,15 +1943,11 @@ void KernelGenerator::visit(const model::operation::LocalResponseNormalization &
   const auto ofm_index{node.getOutputs().at(0)};
   const auto ifm_index{
       node.getInputs().at(model::operation::LocalResponseNormalization::Input::INPUT)};
-  const auto radius_index{node.param().radius_index};
-  const auto bias_index{node.param().bias_index};
-  const auto alpha_index{node.param().alpha_index};
-  const auto beta_index{node.param().beta_index};
-
-  auto radius = _ctx.at(radius_index).asScalar<int32_t>();
-  auto alpha = _ctx.at(alpha_index).asScalar<float>();
-  auto beta = _ctx.at(beta_index).asScalar<float>();
-  auto bias = _ctx.at(bias_index).asScalar<float>();
+
+  auto radius = node.param().radius;
+  auto alpha = node.param().alpha;
+  auto beta = node.param().beta;
+  auto bias = node.param().bias;
 
   auto ofm_alloc = _tensor_builder->at(ofm_index).get();
   auto ifm_alloc = _tensor_builder->at(ifm_index).get();
index d89312d..3109e90 100644 (file)
@@ -846,15 +846,11 @@ void KernelGenerator::visit(const model::operation::LocalResponseNormalization &
   const auto ofm_index{node.getOutputs().at(0)};
   const auto ifm_index{
       node.getInputs().at(model::operation::LocalResponseNormalization::Input::INPUT)};
-  const auto radius_index{node.param().radius_index};
-  const auto bias_index{node.param().bias_index};
-  const auto alpha_index{node.param().alpha_index};
-  const auto beta_index{node.param().beta_index};
-
-  auto radius = _ctx.at(radius_index).asScalar<int32_t>();
-  auto alpha = _ctx.at(alpha_index).asScalar<float>();
-  auto beta = _ctx.at(beta_index).asScalar<float>();
-  auto bias = _ctx.at(bias_index).asScalar<float>();
+
+  auto radius = node.param().radius;
+  auto alpha = node.param().alpha;
+  auto beta = node.param().beta;
+  auto bias = node.param().bias;
 
   auto ofm_alloc = _tensor_builder->at(ofm_index).get();
   auto ifm_alloc = _tensor_builder->at(ifm_index).get();
index bd7f85a..a50fcbe 100644 (file)
@@ -38,10 +38,10 @@ public:
 
   struct Param
   {
-    OperandIndex radius_index;
-    OperandIndex bias_index;
-    OperandIndex alpha_index;
-    OperandIndex beta_index;
+    int radius;
+    float bias;
+    float alpha;
+    float beta;
   };
 
 public:
index a57fafc..6e41557 100644 (file)
@@ -1454,7 +1454,7 @@ OperationFactory::OperationFactory()
   };
 
   _map[ANEURALNETWORKS_LOCAL_RESPONSE_NORMALIZATION] = [](const OperationFactory::Param &init_param,
-                                                          neurun::model::Operands &) {
+                                                          neurun::model::Operands &operands) {
     assert(init_param.input_count == 5 && init_param.output_count == 1);
 
     OperandIndexSequence outputs{init_param.outputs[0]};
@@ -1462,10 +1462,10 @@ OperationFactory::OperationFactory()
     OperandIndexSequence inputs{init_param.inputs[0]};
 
     operation::LocalResponseNormalization::Param param;
-    param.radius_index = OperandIndex{init_param.inputs[1]};
-    param.bias_index = OperandIndex{init_param.inputs[2]};
-    param.alpha_index = OperandIndex{init_param.inputs[3]};
-    param.beta_index = OperandIndex{init_param.inputs[4]};
+    param.radius = operands.at(OperandIndex{init_param.inputs[1]}).asScalar<std::int32_t>();
+    param.bias = operands.at(OperandIndex{init_param.inputs[2]}).asScalar<float>();
+    param.alpha = operands.at(OperandIndex{init_param.inputs[3]}).asScalar<float>();
+    param.beta = operands.at(OperandIndex{init_param.inputs[4]}).asScalar<float>();
 
     return new operation::LocalResponseNormalization{inputs, outputs, param};
   };