Use interpreter for model cannot compile (#4482)
author오형석/On-Device Lab(SR)/Staff Engineer/삼성전자 <hseok82.oh@samsung.com>
Mon, 25 Feb 2019 09:21:52 +0000 (18:21 +0900)
committerGitHub Enterprise <noreply-CODE@samsung.com>
Mon, 25 Feb 2019 09:21:52 +0000 (18:21 +0900)
Use interpreter if try to compile, but cannot compile

Signed-off-by: Hyeongseok Oh <hseok82.oh@samsung.com>
runtimes/neurun/src/compiler/Compiler.cc
runtimes/neurun/src/exec/interp/Interpreter.h
runtimes/neurun/src/frontend/execution.cc

index ae3d18a..ff5e136 100644 (file)
@@ -26,6 +26,7 @@
 #include "dumper/dot/DotDumper.h"
 #include "linear/Linear.h"
 #include "exec/Executor.h"
+#include "exec/interp/Interpreter.h"
 
 namespace neurun
 {
@@ -46,7 +47,7 @@ void Compiler::compile(void)
   const auto env_disable_compile = config::ConfigManager::instance().get<bool>("DISABLE_COMPILE");
   if (env_disable_compile)
   {
-    _plan = std::make_shared<Plan>();
+    _executor = std::make_shared<exec::interp::Interpreter>(_model->shareModel());
     return;
   }
 
index f8f10c1..913f067 100644 (file)
@@ -37,13 +37,18 @@ namespace interp
 class Interpreter final : public IExecutor
 {
 public:
-  Interpreter(const std::shared_ptr<const neurun::compiler::Plan> &plan) : _plan{plan}
+  Interpreter(const std::shared_ptr<const model::Model> &model) : _model{model}
   {
     // DO NOTHING
   }
 
 public:
   /**
+   * @brief   Return graph model
+   * @return  Graph model
+   */
+  const model::Model &model() { return *_model; }
+  /**
    * @brief     Set input data's information
    * @param[in] index   Input index
    * @param[in] type    Input data's type info
@@ -70,7 +75,7 @@ public:
   void execute(void);
 
 private:
-  std::shared_ptr<const neurun::compiler::Plan> _plan;
+  std::shared_ptr<const model::Model> _model;
 };
 
 } // namespace interp
index a6cbedc..c9c0058 100644 (file)
@@ -35,17 +35,16 @@ int ANeuralNetworksExecution_create(ANeuralNetworksCompilation *compilation,
     return ANEURALNETWORKS_UNEXPECTED_NULL;
   }
 
-  // Can handle compiled state only
-  if (compilation->state() != neurun::compiler::State::COMPILED)
-  {
-    VERBOSE(NNAPI::Execution) << "create: Not compiled yet" << std::endl;
-    return ANEURALNETWORKS_BAD_STATE;
-  }
-
   std::shared_ptr<neurun::exec::IExecutor> executor;
 
   compilation->publish(executor);
 
+  if (executor == nullptr)
+  {
+    VERBOSE(NNAPI::Execution) << "create: Never compiled yet" << std::endl;
+    return ANEURALNETWORKS_BAD_STATE;
+  }
+
   *execution = new (std::nothrow) ANeuralNetworksExecution{executor};
   if (*execution == nullptr)
   {