dnn(ocl4dnn): support log softmax in ocl4dnn
authorLi Peng <peng.li@intel.com>
Wed, 11 Oct 2017 07:29:39 +0000 (15:29 +0800)
committerLi Peng <peng.li@intel.com>
Thu, 12 Oct 2017 01:51:13 +0000 (09:51 +0800)
Signed-off-by: Li Peng <peng.li@intel.com>
modules/dnn/src/layers/softmax_layer.cpp
modules/dnn/src/ocl4dnn/include/ocl4dnn.hpp
modules/dnn/src/ocl4dnn/src/ocl4dnn_softmax.cpp
modules/dnn/src/opencl/softmax_loss.cl

index fd14e29..a6ff408 100644 (file)
@@ -100,6 +100,7 @@ public:
             config.in_shape = shape(*inputs[0]);
             config.axis = axisRaw;
             config.channels = inputs[0]->size[axisRaw];
+            config.logsoftmax = logSoftMax;
 
             softmaxOp = Ptr<OCL4DNNSoftmax<float> >(new OCL4DNNSoftmax<float>(config));
         }
@@ -108,7 +109,7 @@ public:
         srcMat = inputs[0]->getUMat(ACCESS_READ);
         dstMat = outputs[0].getUMat(ACCESS_WRITE);
 
-        if (!logSoftMax && softmaxOp->Forward(srcMat, dstMat))
+        if (softmaxOp->Forward(srcMat, dstMat))
             return true;
 
         const Mat &src = *inputs[0];
index 09bda05..c2c7b52 100644 (file)
@@ -445,11 +445,12 @@ class OCL4DNNLRN
 
 struct OCL4DNNSoftmaxConfig
 {
-    OCL4DNNSoftmaxConfig() : axis(0), channels(0)
+    OCL4DNNSoftmaxConfig() : axis(0), channels(0), logsoftmax(false)
     {}
     MatShape in_shape;
     int axis;
     int channels;
+    bool logsoftmax;
 };
 
 template<typename Dtype>
@@ -467,6 +468,7 @@ class OCL4DNNSoftmax
         int32_t channels_;
         int32_t count_;
         bool use_slm_;
+        bool log_softmax_;
         UMat scale_data_;
 };
 #endif // HAVE_OPENCL
index e4802d2..9ac5ddc 100644 (file)
@@ -52,6 +52,7 @@ OCL4DNNSoftmax<Dtype>::OCL4DNNSoftmax(OCL4DNNSoftmaxConfig config)
 {
     softmax_axis_ = config.axis;
     channels_ = config.channels;
+    log_softmax_ = config.logsoftmax;
 
     inner_num_ = 1;
     outer_num_ = 1;
@@ -90,6 +91,7 @@ bool OCL4DNNSoftmax<Dtype>::Forward(const UMat& bottom, UMat& top)
         String kname;
         ocl::Kernel oclk_softmax_forward_kernel;
 
+        if (log_softmax_) opts += " -DLOG_SOFTMAX ";
         if (use_slm_)
             kname = CL_KERNEL_SELECT("softmax_forward_slm");
         else
index d30b32b..28a43ae 100644 (file)
@@ -112,7 +112,11 @@ __kernel void TEMPLATE(softmax_forward_slm,Dtype)(const int num, const int chann
   for (int index = get_global_id(0); index < channels * spatial_dim;
       index += get_global_size(0)) {
     int s = index % spatial_dim;
-    out[n * channels * spatial_dim + index] = out_tmp[index] / scale_tmp[s];
+    Dtype v = out_tmp[index] / scale_tmp[s];
+#ifdef LOG_SOFTMAX
+    v = log(v);
+#endif
+    out[n * channels * spatial_dim + index] = v;
   }
 }
 
@@ -177,6 +181,10 @@ __kernel void TEMPLATE(softmax_forward,Dtype)(const int num, const int channels,
   for (int index = get_global_id(0); index < channels * spatial_dim;
       index += get_global_size(0)) {
     int s = index % spatial_dim;
-    out[n * channels * spatial_dim + index] /= scale[n * spatial_dim + s];
+    Dtype v = out[n * channels * spatial_dim + index] / scale[n * spatial_dim + s];
+#ifdef LOG_SOFTMAX
+    v = log(v);
+#endif
+    out[n * channels * spatial_dim + index] = v;
   }
 }