Revert "[neurun] Fix for const model ouput (#8799)" (#8998)
author이한종/On-Device Lab(SR)/Engineer/삼성전자 <hanjoung.lee@samsung.com>
Mon, 18 Nov 2019 05:36:31 +0000 (14:36 +0900)
committer오형석/On-Device Lab(SR)/Staff Engineer/삼성전자 <hseok82.oh@samsung.com>
Mon, 18 Nov 2019 05:36:31 +0000 (14:36 +0900)
This reverts commit 0bc930bd7d816b48c148c6043a6a8abb08badf51.

Fix push builder failure.

Signed-off-by: Hanjoung Lee <hanjoung.lee@samsung.com>
runtime/neurun/core/src/compiler/Linear.cc
runtime/neurun/core/src/graph/Graph.cc
tests/tools/nnpackage_run/src/nnpackage_run.cc

index 67af4fc..db75893 100644 (file)
@@ -215,18 +215,12 @@ void Linear::planTensors()
         // NOTE This assumes an operand can have one layout, and only Permutate can have
         // different layouts for input and output
         const auto &def = *obj.getDef().list().cbegin();
-        auto frontend_layout = model::Layout::UNKNOWN;
-
-        if (_subgraphs->containsOperation(def))
+        auto frontend_layout = _subgraphs->at(_subgraphs->getOperation(def)).getLayout();
+        if (frontend_layout == model::Layout::UNKNOWN)
         {
-          frontend_layout = _subgraphs->at(_subgraphs->getOperation(def)).getLayout();
-          if (frontend_layout == model::Layout::UNKNOWN)
-          {
-            const auto &use = *obj.getUses().list().cbegin();
-            frontend_layout = _subgraphs->at(_subgraphs->getOperation(use)).getLayout();
-          }
+          const auto &use = *obj.getUses().list().cbegin();
+          frontend_layout = _subgraphs->at(_subgraphs->getOperation(use)).getLayout();
         }
-
         const auto backend_layout = lower_info->def_factors().getOnlyElement().layout();
         tensor_builder->registerTensorInfo(ind, info, frontend_layout, backend_layout, is_const);
       }
@@ -299,19 +293,15 @@ void Linear::planTensors()
   for (const auto &ind : _model->outputs)
   {
     --uses_map[ind];
-    if (uses_map[ind] == 0) // To prevent notifyLastUse from being called twice
-    {
-      tensor_builder_map[ind]->notifyLastUse(ind);
-    }
+    assert(uses_map[ind] == 0);
+    tensor_builder_map[ind]->notifyLastUse(ind);
   }
 
   for (const auto &ind : constants)
   {
     --uses_map[ind];
-    if (uses_map[ind] == 0) // To prevent notifyLastUse from being called twice
-    {
-      tensor_builder_map[ind]->notifyLastUse(ind);
-    }
+    assert(uses_map[ind] == 0);
+    tensor_builder_map[ind]->notifyLastUse(ind);
   }
 
   assert(std::all_of(
index 9b16230..b3d70b1 100644 (file)
@@ -340,23 +340,22 @@ void Graph::lower(void)
 
     _subgraphs->dump("merged and sorted operations without permutation");
 
-    const auto default_backend = backend::BackendManager::instance().getDefault();
-    for (auto index : _model->inputs)
+// NOTE This is desired way to handle model input and outputs however getDefaultBackend() is
+// cpu backend dependent for now we cannot use it.
+#if 0
+    // Add def backend to model input/output operand as default backend
+    for (auto index : getInputs())
     {
       auto &&lower_info = operands_lower_info.at(index);
-      lower_info->addDefPermuteFactor(lower_info->use_factors().getOnlyElement());
+      lower_info->addDefBackend(_backend_resolver->getDefaultBackend());
     }
-    for (auto index : _model->outputs)
+
+    for (auto index : getOutputs())
     {
       auto &&lower_info = operands_lower_info.at(index);
-      if (_model->operands.at(index).isConstant())
-      {
-        lower_info->addDefPermuteFactor(operand::PermuteFactor{
-            default_backend,
-            model::Layout::NHWC // TODO Get frontend layout of this node from IR
-        });
-      }
+      lower_info->addUseBackend(_backend_resolver->getDefaultBackend());
     }
+#endif
 
     // Add DefFactor constants same as UseFactor
     // NOTE This assumes a constant operand is used by only one operation
@@ -367,8 +366,18 @@ void Graph::lower(void)
         auto &&lower_info = operands_lower_info.at(operand);
         if (lower_info->def_factors().empty())
         {
+          // NOTE Handling model inputs here is not ideal. See above NOTE comment.
+          // If it is a model input, not a constant
+          if (_model->inputs.contains(operand))
+          {
+            // If one or more elements then any PermuteFactor is OK so pick first one
+            if (!lower_info->use_factors().empty())
+            {
+              lower_info->addDefPermuteFactor(*lower_info->use_factors().begin());
+            }
+          }
           // If it is a constant
-          if (!_model->inputs.contains(operand))
+          else
           {
             lower_info->addDefPermuteFactor(lower_info->use_factors().getOnlyElement());
           }
index c274dd9..b28d764 100644 (file)
@@ -82,6 +82,13 @@ int main(const int argc, char **argv)
 
   // verify input and output
 
+  if (num_inputs == 0)
+  {
+    std::cerr << "[ ERROR ] "
+              << "No inputs in model => execution is not possible" << std::endl;
+    exit(1);
+  }
+
   auto verifyInputTypes = [session]() {
     uint32_t sz;
     NNPR_ENSURE_STATUS(nnfw_input_size(session, &sz));