[nnc] Support sigmoid operation in acl backend (#2739)
authorEfimov Alexander/AI Tools Lab/./Samsung Electronics <a.efimov@samsung.com>
Thu, 20 Dec 2018 18:31:53 +0000 (21:31 +0300)
committerРоман Михайлович Русяев/AI Tools Lab /SRR/Staff Engineer/삼성전자 <r.rusyaev@samsung.com>
Thu, 20 Dec 2018 18:31:53 +0000 (21:31 +0300)
- Generate code for sigmoid activation function
- Add unit tests for sigmoid, tanh and capped_relu activation functions

Signed-off-by: Efimov Alexander <a.efimov@samsung.com>
contrib/nnc/passes/acl_soft_backend/AclCppOpGenerator.cpp
contrib/nnc/unittests/acl_backend/MIRToDOM.cpp

index acb8cba..62a2011 100644 (file)
@@ -40,6 +40,7 @@
 #include "core/modelIR/operations/ReshapeOp.h"
 #include "core/modelIR/operations/ResizeOp.h"
 #include "core/modelIR/operations/ScaleOp.h"
+#include "core/modelIR/operations/SigmoidOp.h"
 #include "core/modelIR/operations/SoftmaxOp.h"
 #include "core/modelIR/operations/SqrtOp.h"
 #include "core/modelIR/operations/TanhOp.h"
@@ -918,8 +919,8 @@ void AclCppOpGenerator::visit(mir::ops::GatherOp& op) {
   assert(false && "Unimplemented operation: GatherOp");
 }
 
-void AclCppOpGenerator::visit(mir::ops::SigmoidOp& op) {
-  assert(false && "Unimplemented operation: SigmoidOp");
+void AclCppOpGenerator::visit(ops::SigmoidOp& op) {
+  genActivation(op, "LOGISTIC");
 }
 
 }
index bb6e018..f983d0d 100644 (file)
@@ -30,6 +30,7 @@
 #include "core/modelIR/Graph.h"
 #include "core/modelIR/operations/VariableOp.h"
 #include "core/modelIR/operations/FullyConnectedOp.h"
+#include "core/modelIR/operations/CappedReluOp.h"
 #include "core/modelIR/operations/Conv2DOp.h"
 #include "core/modelIR/operations/DepthwiseConv2DOp.h"
 #include "core/modelIR/operations/PoolOp.h"
@@ -39,6 +40,7 @@
 #include "core/modelIR/operations/ReshapeOp.h"
 #include "core/modelIR/operations/ConcatOp.h"
 #include "core/modelIR/operations/BiasAddOp.h"
+#include "core/modelIR/operations/SigmoidOp.h"
 #include "core/modelIR/operations/SoftmaxOp.h"
 #include "core/modelIR/operations/ScaleOp.h"
 #include "core/modelIR/operations/EluOp.h"
@@ -66,7 +68,7 @@ const char* artifactName = "nnmodel";
  * @param op_constr functor which creates main operations of graph
  * @param input_shapes vector of network input shapes
  * */
-void fillGraph(Graph& g, OpConstructor op_constr, const vector<Shape>& input_shapes) {
+void fillGraph(Graph& g, const OpConstructor& op_constr, const vector<Shape>& input_shapes) {
   // Create inputs
   vector<mir::IODescriptor> inputs;
   int num_inputs = input_shapes.size();
@@ -209,10 +211,6 @@ TEST(acl_backend_mir_to_dom, scale) {
   ArtifactGeneratorCppCode code_gen(code_out);
 }
 
-TEST(acl_backend_mir_to_dom, DISABLED_capped_relu) {
-  // TODO
-}
-
 TEST(acl_backend_mir_to_dom, concat) {
   Graph g;
   OpConstructor op_generator = [](Graph& g, const vector<IODescriptor>& inputs) {
@@ -367,11 +365,12 @@ TEST(acl_backend_mir_to_dom, DISABLED_avgpool) {
   // TODO
 }
 
-TEST(acl_backend_mir_to_dom, relu) {
+/**
+ * @brief Function to test simple activation operations
+ * @param op_generator functor that generates target operator
+ */
+static void testActivationOp(const OpConstructor& op_generator) {
   Graph g;
-  OpConstructor op_generator = [](Graph& g, const std::vector<IODescriptor>& inputs) {
-      return g.create<mir::ops::ReluOp>("relu", inputs[0]);
-  };
   vector<Shape> input_shapes{{1, 10, 10, 3}};
 
   fillGraph(g, op_generator, input_shapes);
@@ -387,12 +386,41 @@ TEST(acl_backend_mir_to_dom, relu) {
   ArtifactGeneratorCppCode code_gen(code_out);
 }
 
+TEST(acl_backend_mir_to_dom, relu) {
+  OpConstructor op_generator = [](Graph& g, const std::vector<IODescriptor>& inputs) {
+      return g.create<mir::ops::ReluOp>("relu", inputs[0]);
+  };
+
+  testActivationOp(op_generator);
+}
+
+TEST(acl_backend_mir_to_dom, capped_relu) {
+  float cap = 6;
+  OpConstructor op_generator = [cap](Graph& g, const std::vector<IODescriptor>& inputs) {
+      return g.create<mir::ops::CappedReluOp>("capped_relu", inputs[0], cap);
+  };
+
+  testActivationOp(op_generator);
+}
+
+TEST(acl_backend_mir_to_dom, sigmoid) {
+  OpConstructor op_generator = [](Graph& g, const std::vector<IODescriptor>& inputs) {
+      return g.create<mir::ops::SigmoidOp>("sigmoid", inputs[0]);
+  };
+
+  testActivationOp(op_generator);
+}
+
 TEST(acl_backend_mir_to_dom, DISABLED_elu) {
   // TODO
 }
 
-TEST(acl_backend_mir_to_dom, DISABLED_tanh) {
-  // TODO
+TEST(acl_backend_mir_to_dom, tanh) {
+  OpConstructor op_generator = [](Graph& g, const std::vector<IODescriptor>& inputs) {
+      return g.create<mir::ops::TanhOp>("tanh", inputs[0]);
+  };
+
+  testActivationOp(op_generator);
 }
 
 TEST(acl_backend_mir_to_dom, DISABLED_reduce_mean) {