From: Jeff Donahue Date: Fri, 14 Mar 2014 05:44:41 +0000 (-0700) Subject: fix softmax loss layer bug; all tests pass X-Git-Tag: submit/tizen/20180823.020014~692^2~85^2~3 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=ed23b6890612d9f21b0edb93ee0b9d0bca1b48dc;p=platform%2Fupstream%2Fcaffeonacl.git fix softmax loss layer bug; all tests pass --- diff --git a/src/caffe/layers/softmax_loss_layer.cu b/src/caffe/layers/softmax_loss_layer.cu index 5039524..ab7ee6e 100644 --- a/src/caffe/layers/softmax_loss_layer.cu +++ b/src/caffe/layers/softmax_loss_layer.cu @@ -16,9 +16,7 @@ template Dtype SoftmaxWithLossLayer::Forward_gpu( const vector*>& bottom, vector*>* top) { // The forward pass computes the softmax prob values. - softmax_bottom_vec_[0] = bottom[0]; - softmax_layer_->Forward(softmax_bottom_vec_, &softmax_top_vec_); - return Dtype(0); + return Forward_cpu(bottom, top); } template