[Pure CL] Support ReLU6 activation (#1663)
author박종현/동작제어Lab(SR)/Staff Engineer/삼성전자 <jh1302.park@samsung.com>
Tue, 12 Jun 2018 07:54:57 +0000 (16:54 +0900)
committer서상민/동작제어Lab(SR)/Staff Engineer/삼성전자 <sangmin7.seo@samsung.com>
Tue, 12 Jun 2018 07:54:57 +0000 (16:54 +0900)
This commit allows pure CL runtime to process ReLU6 activation (which is
used in MobileNet example).

Signed-off-by: Jonghyun Park <jh1302.park@samsung.com>
runtimes/pure_arm_compute/src/compilation.cc

index 4bf4d96..0608c23 100644 (file)
@@ -244,6 +244,7 @@ public:
 
 private:
   void appendReLU(::arm_compute::ICLTensor *tensor);
+  void appendReLU6(::arm_compute::ICLTensor *tensor);
 
 public:
   void append(FuseCode code, ::arm_compute::ICLTensor *tensor);
@@ -264,6 +265,18 @@ void ActivationBuilder::appendReLU(::arm_compute::ICLTensor *ifm_alloc)
   _builder.append(std::move(fn));
 }
 
+void ActivationBuilder::appendReLU6(::arm_compute::ICLTensor *ifm_alloc)
+{
+  const ::arm_compute::ActivationLayerInfo act_info{
+      ::arm_compute::ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU, 6.0f, 0.0f};
+
+  auto fn = make_layer<::arm_compute::CLActivationLayer>();
+
+  fn->configure(ifm_alloc, nullptr, act_info);
+
+  _builder.append(std::move(fn));
+}
+
 void ActivationBuilder::append(FuseCode code, ::arm_compute::ICLTensor *ifm_alloc)
 {
   switch (code)
@@ -278,6 +291,11 @@ void ActivationBuilder::append(FuseCode code, ::arm_compute::ICLTensor *ifm_allo
       appendReLU(ifm_alloc);
       break;
     }
+    case ANEURALNETWORKS_FUSED_RELU6:
+    {
+      appendReLU6(ifm_alloc);
+      break;
+    }
     default:
     {
       throw std::runtime_error("Not supported, yet");