Apply subgraph on compile time (#4426)
author김용섭/On-Device Lab(SR)/Engineer/삼성전자 <yons.kim@samsung.com>
Wed, 20 Feb 2019 00:48:01 +0000 (09:48 +0900)
committer오형석/On-Device Lab(SR)/Staff Engineer/삼성전자 <hseok82.oh@samsung.com>
Wed, 20 Feb 2019 00:48:01 +0000 (09:48 +0900)
* Apply subgraph on compile time

Apply subgraph to linearization and stage generation on compile time

Signed-off-by: Yongseop Kim <yons.kim@samsung.com>
* Change the name of Element in Linear from node to subgraph

* Add todo for moving dividing graph code into graph

* Add todos for Element in Linear

* Change index of Element from const Index * to Index

runtimes/neurun/src/backend/StageSequence.h
runtimes/neurun/src/backend/interface/IStageGenerator.h
runtimes/neurun/src/compiler/Compiler.cc
runtimes/neurun/src/linear/Linear.cc
runtimes/neurun/src/linear/Linear.h
runtimes/neurun/src/model/operation/Subgraph.h

index bfada22..db3f3b2 100644 (file)
@@ -21,6 +21,7 @@
 #include <functional>
 
 #include "backend/interface/IStage.h"
+#include "backend/AtomicStage.h"
 #include "cpp14/memory.h"
 
 namespace neurun
index 23f6f4c..fbc03a3 100644 (file)
 #include "backend/interface/ITensorBuilder.h"
 #include "backend/interface/IStage.h"
 #include "model/operation/NodeVisitor.h"
+#include "model/operation/Subgraph.h"
 #include "cpp14/memory.h"
 
-// TODO Remove dependencies for below two headers
-#include "backend/AtomicStage.h"
-// for Subgraph
-//#include "backend/StageSequence.h"
+// TODO Remove dependencies for below header. Should include only interface.
+#include "backend/StageSequence.h"
 
 struct IExecutionBuilder
 {
@@ -56,8 +55,13 @@ protected:
   virtual void visit(const model::operation::InternalName &) override {}
 #include "model/operation/Op.lst"
 #undef OP
-  // TODO: Fill this
-  virtual void visit(const model::operation::Subgraph &) override {}
+  virtual void visit(const model::operation::Subgraph &subgraph) final override
+  {
+    for (const auto &e : subgraph.operations())
+    {
+      e.node->accept(std::move(*this));
+    }
+  }
 
 protected:
   void returnStage(const StageFn fn)
@@ -69,9 +73,8 @@ protected:
 public:
   std::unique_ptr<IStage> generate(const model::operation::Node &node)
   {
-    // TODO Consider Subgraph and
-    // remove directly dependency for classes not interface
-    _return = nnfw::cpp14::make_unique<AtomicStage>();
+    // TODO Remove directly dependency for classes not interface
+    _return = nnfw::cpp14::make_unique<StageSequence>();
     node.accept(std::move(*this));
     return std::move(_return);
   }
index a65108a..e8a8469 100644 (file)
@@ -67,6 +67,7 @@ void Compiler::compile(void)
 
   dot_dumper.dumpIfNeeded("after_lower");
 
+  // linearize with subgraphs
   auto linear = _model->linearize();
   _state = State::LINEARIZED;
 
@@ -100,11 +101,13 @@ void Compiler::compile(void)
 
   // Plan building
   linear->iterate([&](const linear::Element &element) {
-    auto backend = element.lower_info->backend();
+    // Assume that the backend of all nodes on a subgraph are identified on the subgraph
+    const auto &first_ind = element.subgraph->operations()[0].index;
+    auto backend = _model->getLowerInfo(first_ind)->backend();
 
     // Generate Stage
     auto stage_gen = backend->stage_gen();
-    plan_builder.addStage(stage_gen->generate(*element.node));
+    plan_builder.addStage(stage_gen->generate(*element.subgraph));
   });
 
   auto tensor_builders = linear->planTensors();
index a672d5d..ebcef95 100644 (file)
@@ -37,27 +37,109 @@ namespace linear
 
 Linear::Linear(const graph::Graph &graph) : _graph(graph)
 {
-  // Linearize with topological sort
+  // TODO: Move this code to graph
+
+  // Linearize graph with subgraphs by topological sort while assuming that
+  // a subgraph has linear form
   //
-  // Topological sort algorithm
-  //   1. Iterate with DFS
-  //   2. Append the node to vector when DFS for the node finishes(post order)
-  //   3. Reverse the order of nodes
-
-  graph::Graph::PostDfsConstIterator().iterate(
-      graph, [&](const model::operation::Index &index, const model::operation::Node &node) {
-        const auto lower_info = graph.getLowerInfo(index);
-        _operations.emplace_back(&node, lower_info);
-      });
+  // algorithm
+  //   0. Create new subgraph
+  //   1. Add a node into current subgraph
+  //   2. Test two stuff for checking new subgraph is needed
+  //   - Current node has multiple inputs like concat?
+  //     - Does current node have two or more than previous operation?
+  //
+  //  [CONV] [CONV] [CONV]  [MAX_POOL]
+  //    |      |      |       |
+  //   [0]    [1]    [2]     [3]
+  //    \      |      |      /
+  //     [    C O N C A T   ]  # current node
+  //
+  //   - Current node is on the separated branch at the beginning?
+  //     - Does current node's input operand's uses have two or more than?
+  //
+  //     [CONV]
+  //       |
+  //      [0]----.
+  //       |     |
+  //     [CONV] [CONV]  # current node
+  //       |      |
+  //      [1]    [2]
+  //       \      /
+  //       [CONCAT]
+  //
+  //   3. If needed, push current subgraph to the set and create new subgraph
+
+  auto subgraph_set =
+      nnfw::cpp14::make_unique<std::vector<std::unique_ptr<model::operation::Subgraph>>>();
+  {
+    std::unique_ptr<model::operation::Subgraph> subgraph = nullptr;
+    graph::Graph::PostDfsConstIterator().iterate(
+        graph, [&](const model::operation::Index &index, const model::operation::Node &node) {
+
+          if (!subgraph)
+            subgraph = nnfw::cpp14::make_unique<model::operation::Subgraph>();
+
+          subgraph->appendOperation(index, node);
+
+          bool new_subgraph = false;
+          size_t prev_op_cnt = 0;
+          for (auto input : node.getInputs())
+          {
+            const auto &operand = graph.operands().at(input);
+            if (operand.getDef().list().size() > 0)
+              ++prev_op_cnt;
+
+            if (prev_op_cnt > 1 || operand.getUses().list().size() > 1)
+            {
+              new_subgraph = true;
+              break;
+            }
+          }
+
+          if (new_subgraph)
+          {
+            subgraph_set->emplace_back(std::move(subgraph));
+            subgraph = nullptr;
+          }
+        });
+
+    // If the last subgraph leaves, append it to the subgraph set
+    if (subgraph && subgraph->operations().size() > 0)
+      subgraph_set->emplace_back(std::move(subgraph));
+
+    // NOTE. Now these subgraph are on the reverse order
+  }
 
-  std::reverse(std::begin(_operations), std::end(_operations));
+  // Set input/output of each subgraph while reversing
+  std::reverse(subgraph_set->begin(), subgraph_set->end());
+  for (auto &subgraph : *subgraph_set)
+  {
+    // output
+    auto it = std::begin(subgraph->operations());
+    subgraph->setOutputs((*it).node->getOutputs());
+
+    std::reverse(std::begin(subgraph->operations()), std::end(subgraph->operations()));
+
+    // input
+    it = std::begin(subgraph->operations());
+    subgraph->setInputs((*it).node->getInputs());
+  }
+
+  // Now ordered subgraphs are ready
+  for (auto &subgraph : *subgraph_set)
+    _elements.emplace_back(std::move(subgraph));
+
+  VERBOSE(LINEAR) << "Subgraphs" << std::endl;
+  for (const auto &element : _elements)
+    VERBOSE(LINEAR) << element.subgraph->getStr() << std::endl;
 }
 
 void Linear::accept(model::operation::NodeVisitor &&visitor) const
 {
-  for (const auto op : _operations)
+  for (const auto &e : _elements)
   {
-    op.node->accept(std::move(visitor));
+    e.subgraph->accept(std::move(visitor));
   }
 }
 
@@ -155,29 +237,32 @@ backend::TensorBuilderSet Linear::planTensors()
   //   2. Scan DEF of outputs. If the DEF, allocate it
   VERBOSE(LINEAR) << "TENSORS" << std::endl;
   const auto &operands = _graph.operands();
-  for (const auto op : _operations)
+  for (const auto &e : _elements)
   {
-    for (const auto &ind : op.node->getOutputs())
+    for (const auto &op : e.subgraph->operations())
     {
-      const auto &obj = operands.at(ind);
-      if (obj.getDef().size())
+      for (const auto &ind : op.node->getOutputs())
       {
-        iterTensorBuilders(ind,
-                           [](const model::operand::Index &ind, ITensorBuilderPtr tensor_builder) {
-                             tensor_builder->notifyFirstUse(ind);
-                           });
+        const auto &obj = operands.at(ind);
+        if (obj.getDef().size())
+        {
+          iterTensorBuilders(
+              ind, [](const model::operand::Index &ind, ITensorBuilderPtr tensor_builder) {
+                tensor_builder->notifyFirstUse(ind);
+              });
+        }
       }
-    }
 
-    for (const auto &ind : op.node->getInputs())
-    {
-      uses_map[ind]--;
-      if (uses_map[ind] == 0)
+      for (const auto &ind : op.node->getInputs())
       {
-        iterTensorBuilders(ind,
-                           [](const model::operand::Index &ind, ITensorBuilderPtr tensor_builder) {
-                             tensor_builder->notifyLastUse(ind);
-                           });
+        uses_map[ind]--;
+        if (uses_map[ind] == 0)
+        {
+          iterTensorBuilders(
+              ind, [](const model::operand::Index &ind, ITensorBuilderPtr tensor_builder) {
+                tensor_builder->notifyLastUse(ind);
+              });
+        }
       }
     }
   }
@@ -193,9 +278,9 @@ backend::TensorBuilderSet Linear::planTensors()
 
 void Linear::iterate(const std::function<void(const Element &element)> &fn) const
 {
-  for (const auto op : _operations)
+  for (const auto &e : _elements)
   {
-    fn(op);
+    fn(e);
   }
 }
 
index fb3f539..43563fd 100644 (file)
@@ -18,8 +18,9 @@
 #define __NEURUN_LINEAR_LINEAR_H__
 
 #include <vector>
+#include <memory>
 
-#include "model/operation/Node.h"
+#include "model/operation/Subgraph.h"
 #include "backend/interface/ITensorBuilder.h"
 
 namespace neurun
@@ -48,12 +49,15 @@ namespace linear
 
 struct Element
 {
-  const model::operation::Node *node;
-  const graph::operation::LowerInfo *lower_info;
+  // TODO: Change unique_ptr to ptr after Graph has Subgraphs
+  std::unique_ptr<model::operation::Subgraph> subgraph;
 
-  Element(const model::operation::Node *node, const graph::operation::LowerInfo *lower_info)
-      : node{node}, lower_info{lower_info}
+  // TODO
+  // graph::operation::LowerInfo *lower_info;
+
+  Element(std::unique_ptr<model::operation::Subgraph> subgraph) : subgraph{std::move(subgraph)}
   {
+    // DO NOTHING
   }
 };
 
@@ -75,7 +79,7 @@ public:
 
 private:
   const graph::Graph &_graph;
-  std::vector<Element> _operations;
+  std::vector<Element> _elements;
 };
 
 } // namespace linear
index 62d91a8..a9fe6b7 100644 (file)
@@ -31,9 +31,12 @@ namespace model
 namespace operation
 {
 
+// To support ValueSwappable, Element doesn't have members which are classes
+// as value(or can have members which are classes as value and the classes
+// support Swappable)
 struct Element
 {
-  const Index index;
+  Index index;
   const Node *node;
 
   Element(const Index *i, const Node *n) : index{*i}, node{n}