Frontend to support unspecified model input and output (#4495)
author오형석/On-Device Lab(SR)/Staff Engineer/삼성전자 <hseok82.oh@samsung.com>
Tue, 26 Feb 2019 07:49:41 +0000 (16:49 +0900)
committer이춘석/On-Device Lab(SR)/Staff Engineer/삼성전자 <chunseok.lee@samsung.com>
Tue, 26 Feb 2019 07:49:41 +0000 (16:49 +0900)
- Some model don't have specified shape for Input and output
- In that case, pass input and output shape to executor when prepare inference

Signed-off-by: Hyeongseok Oh <hseok82.oh@samsung.com>
runtimes/neurun/src/frontend/execution.cc
runtimes/neurun/src/frontend/wrapper/execution.cc

index c9c0058..e9ee6fe 100644 (file)
@@ -104,14 +104,6 @@ int ANeuralNetworksExecution_setInput(ANeuralNetworksExecution *execution, int32
       VERBOSE(NNAPI::Execution) << "setInput: Shape mismatch" << std::endl;
       return ANEURALNETWORKS_BAD_DATA;
     }
-
-    // TODO Handle specifed dimension on execution
-    if (execution->haveUnspecifiedDims(operand_index))
-    {
-      VERBOSE(NNAPI::Execution) << "setInput: Cannot handle specified dimension on execution yet"
-                                << std::endl;
-      return ANEURALNETWORKS_BAD_STATE;
-    }
   }
   else
   {
@@ -178,14 +170,6 @@ int ANeuralNetworksExecution_setOutput(ANeuralNetworksExecution *execution, int3
       VERBOSE(NNAPI::Execution) << "setOutput: Shape mismatch" << std::endl;
       return ANEURALNETWORKS_BAD_DATA;
     }
-
-    // TODO Handle specifed dimension on execution
-    if (execution->haveUnspecifiedDims(operand_index))
-    {
-      VERBOSE(NNAPI::Execution) << "setOutput: Cannot handle specified dimension on execution yet"
-                                << std::endl;
-      return ANEURALNETWORKS_BAD_STATE;
-    }
   }
   else
   {
index 836e03d..5ad9343 100644 (file)
@@ -77,15 +77,18 @@ bool ANeuralNetworksExecution::haveUnspecifiedDims(
   return ((operand_shape.element_nums() == 0) ? true : false);
 }
 
-bool ANeuralNetworksExecution::setInput(uint32_t index, const ANeuralNetworksOperandType * /*type*/,
+bool ANeuralNetworksExecution::setInput(uint32_t index, const ANeuralNetworksOperandType *type,
                                         const void *buffer, size_t length) noexcept
 {
   try
   {
     neurun::model::operand::IO::Index input_index{index};
     const auto operand_index = getInputOperandIndex(index);
+    bool unspecified = haveUnspecifiedDims(operand_index);
+
     const auto type_info = _executor->model().operands.at(operand_index).typeInfo();
-    const auto shape = _executor->model().operands.at(operand_index).shape();
+    const auto shape = (unspecified ? neurun::util::getShape(type)
+                                    : _executor->model().operands.at(operand_index).shape());
 
     _executor->setInput(input_index, type_info, shape, buffer, length);
   }
@@ -99,16 +102,18 @@ bool ANeuralNetworksExecution::setInput(uint32_t index, const ANeuralNetworksOpe
   return true;
 }
 
-bool ANeuralNetworksExecution::setOutput(uint32_t index,
-                                         const ANeuralNetworksOperandType * /*type*/, void *buffer,
-                                         size_t length) noexcept
+bool ANeuralNetworksExecution::setOutput(uint32_t index, const ANeuralNetworksOperandType *type,
+                                         void *buffer, size_t length) noexcept
 {
   try
   {
     neurun::model::operand::IO::Index output_index{index};
     const auto operand_index = getOutputOperandIndex(index);
+    bool unspecified = haveUnspecifiedDims(operand_index);
+
     const auto type_info = _executor->model().operands.at(operand_index).typeInfo();
-    const auto shape = _executor->model().operands.at(operand_index).shape();
+    const auto shape = (unspecified ? neurun::util::getShape(type)
+                                    : _executor->model().operands.at(operand_index).shape());
 
     _executor->setOutput(output_index, type_info, shape, buffer, length);
   }