From: Jeff Donahue Date: Sun, 2 Nov 2014 04:37:05 +0000 (-0700) Subject: SoftmaxWithLossLayer: use CreateLayer so that a CuDNNSoftmaxLayer X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=89a92b9d7a4b1ecdef69fe5e52f73b362f0ce913;p=platform%2Fupstream%2Fcaffe.git SoftmaxWithLossLayer: use CreateLayer so that a CuDNNSoftmaxLayer is created if available --- diff --git a/include/caffe/loss_layers.hpp b/include/caffe/loss_layers.hpp index 13b108a..4350136 100644 --- a/include/caffe/loss_layers.hpp +++ b/include/caffe/loss_layers.hpp @@ -697,8 +697,7 @@ template class SoftmaxWithLossLayer : public LossLayer { public: explicit SoftmaxWithLossLayer(const LayerParameter& param) - : LossLayer(param), - softmax_layer_(new SoftmaxLayer(param)) {} + : LossLayer(param) {} virtual void LayerSetUp(const vector*>& bottom, const vector*>& top); virtual void Reshape(const vector*>& bottom, @@ -751,7 +750,7 @@ class SoftmaxWithLossLayer : public LossLayer { const vector& propagate_down, const vector*>& bottom); /// The internal SoftmaxLayer used to map predictions to a distribution. - shared_ptr > softmax_layer_; + shared_ptr > softmax_layer_; /// prob stores the output probability predictions from the SoftmaxLayer. Blob prob_; /// bottom vector holder used in call to the underlying SoftmaxLayer::Forward diff --git a/src/caffe/layers/softmax_loss_layer.cpp b/src/caffe/layers/softmax_loss_layer.cpp index db8dd8b..dfc41d2 100644 --- a/src/caffe/layers/softmax_loss_layer.cpp +++ b/src/caffe/layers/softmax_loss_layer.cpp @@ -3,6 +3,7 @@ #include #include "caffe/layer.hpp" +#include "caffe/layer_factory.hpp" #include "caffe/util/math_functions.hpp" #include "caffe/vision_layers.hpp" @@ -12,6 +13,9 @@ template void SoftmaxWithLossLayer::LayerSetUp( const vector*>& bottom, const vector*>& top) { LossLayer::LayerSetUp(bottom, top); + LayerParameter softmax_param(this->layer_param_); + softmax_param.set_type(LayerParameter_LayerType_SOFTMAX); + softmax_layer_.reset(LayerRegistry::CreateLayer(softmax_param)); softmax_bottom_vec_.clear(); softmax_bottom_vec_.push_back(bottom[0]); softmax_top_vec_.clear();