From 937b8e4277581502f09f012d66f8adb12eddab6d Mon Sep 17 00:00:00 2001 From: Li Peng Date: Wed, 11 Oct 2017 15:29:39 +0800 Subject: [PATCH] dnn(ocl4dnn): support log softmax in ocl4dnn Signed-off-by: Li Peng --- modules/dnn/src/layers/softmax_layer.cpp | 3 ++- modules/dnn/src/ocl4dnn/include/ocl4dnn.hpp | 4 +++- modules/dnn/src/ocl4dnn/src/ocl4dnn_softmax.cpp | 2 ++ modules/dnn/src/opencl/softmax_loss.cl | 12 ++++++++++-- 4 files changed, 17 insertions(+), 4 deletions(-) diff --git a/modules/dnn/src/layers/softmax_layer.cpp b/modules/dnn/src/layers/softmax_layer.cpp index fd14e29..a6ff408 100644 --- a/modules/dnn/src/layers/softmax_layer.cpp +++ b/modules/dnn/src/layers/softmax_layer.cpp @@ -100,6 +100,7 @@ public: config.in_shape = shape(*inputs[0]); config.axis = axisRaw; config.channels = inputs[0]->size[axisRaw]; + config.logsoftmax = logSoftMax; softmaxOp = Ptr >(new OCL4DNNSoftmax(config)); } @@ -108,7 +109,7 @@ public: srcMat = inputs[0]->getUMat(ACCESS_READ); dstMat = outputs[0].getUMat(ACCESS_WRITE); - if (!logSoftMax && softmaxOp->Forward(srcMat, dstMat)) + if (softmaxOp->Forward(srcMat, dstMat)) return true; const Mat &src = *inputs[0]; diff --git a/modules/dnn/src/ocl4dnn/include/ocl4dnn.hpp b/modules/dnn/src/ocl4dnn/include/ocl4dnn.hpp index 09bda05..c2c7b52 100644 --- a/modules/dnn/src/ocl4dnn/include/ocl4dnn.hpp +++ b/modules/dnn/src/ocl4dnn/include/ocl4dnn.hpp @@ -445,11 +445,12 @@ class OCL4DNNLRN struct OCL4DNNSoftmaxConfig { - OCL4DNNSoftmaxConfig() : axis(0), channels(0) + OCL4DNNSoftmaxConfig() : axis(0), channels(0), logsoftmax(false) {} MatShape in_shape; int axis; int channels; + bool logsoftmax; }; template @@ -467,6 +468,7 @@ class OCL4DNNSoftmax int32_t channels_; int32_t count_; bool use_slm_; + bool log_softmax_; UMat scale_data_; }; #endif // HAVE_OPENCL diff --git a/modules/dnn/src/ocl4dnn/src/ocl4dnn_softmax.cpp b/modules/dnn/src/ocl4dnn/src/ocl4dnn_softmax.cpp index e4802d2..9ac5ddc 100644 --- a/modules/dnn/src/ocl4dnn/src/ocl4dnn_softmax.cpp +++ b/modules/dnn/src/ocl4dnn/src/ocl4dnn_softmax.cpp @@ -52,6 +52,7 @@ OCL4DNNSoftmax::OCL4DNNSoftmax(OCL4DNNSoftmaxConfig config) { softmax_axis_ = config.axis; channels_ = config.channels; + log_softmax_ = config.logsoftmax; inner_num_ = 1; outer_num_ = 1; @@ -90,6 +91,7 @@ bool OCL4DNNSoftmax::Forward(const UMat& bottom, UMat& top) String kname; ocl::Kernel oclk_softmax_forward_kernel; + if (log_softmax_) opts += " -DLOG_SOFTMAX "; if (use_slm_) kname = CL_KERNEL_SELECT("softmax_forward_slm"); else diff --git a/modules/dnn/src/opencl/softmax_loss.cl b/modules/dnn/src/opencl/softmax_loss.cl index d30b32b..28a43ae 100644 --- a/modules/dnn/src/opencl/softmax_loss.cl +++ b/modules/dnn/src/opencl/softmax_loss.cl @@ -112,7 +112,11 @@ __kernel void TEMPLATE(softmax_forward_slm,Dtype)(const int num, const int chann for (int index = get_global_id(0); index < channels * spatial_dim; index += get_global_size(0)) { int s = index % spatial_dim; - out[n * channels * spatial_dim + index] = out_tmp[index] / scale_tmp[s]; + Dtype v = out_tmp[index] / scale_tmp[s]; +#ifdef LOG_SOFTMAX + v = log(v); +#endif + out[n * channels * spatial_dim + index] = v; } } @@ -177,6 +181,10 @@ __kernel void TEMPLATE(softmax_forward,Dtype)(const int num, const int channels, for (int index = get_global_id(0); index < channels * spatial_dim; index += get_global_size(0)) { int s = index % spatial_dim; - out[n * channels * spatial_dim + index] /= scale[n * spatial_dim + s]; + Dtype v = out[n * channels * spatial_dim + index] / scale[n * spatial_dim + s]; +#ifdef LOG_SOFTMAX + v = log(v); +#endif + out[n * channels * spatial_dim + index] = v; } } -- 2.7.4