From adab413fc65f760f9bc2097f379c2703e3641de4 Mon Sep 17 00:00:00 2001 From: Evan Shelhamer Date: Wed, 25 Jun 2014 11:01:10 +0800 Subject: [PATCH] fix SOFTMAX_LOSS to work with loss top blob interface --- include/caffe/vision_layers.hpp | 2 +- src/caffe/layers/softmax_loss_layer.cpp | 14 +++++++++++++- 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/include/caffe/vision_layers.hpp b/include/caffe/vision_layers.hpp index fc3dbbe..b7e1068 100644 --- a/include/caffe/vision_layers.hpp +++ b/include/caffe/vision_layers.hpp @@ -425,7 +425,7 @@ class SoftmaxWithLossLayer : public Layer { return LayerParameter_LayerType_SOFTMAX_LOSS; } virtual inline int ExactNumBottomBlobs() const { return 2; } - virtual inline int ExactNumTopBlobs() const { return 0; } + virtual inline int MaxTopBlobs() const { return 2; } protected: virtual Dtype Forward_cpu(const vector*>& bottom, diff --git a/src/caffe/layers/softmax_loss_layer.cpp b/src/caffe/layers/softmax_loss_layer.cpp index 8a37efe..bdb3272 100644 --- a/src/caffe/layers/softmax_loss_layer.cpp +++ b/src/caffe/layers/softmax_loss_layer.cpp @@ -20,6 +20,15 @@ void SoftmaxWithLossLayer::SetUp(const vector*>& bottom, softmax_bottom_vec_.push_back(bottom[0]); softmax_top_vec_.push_back(&prob_); softmax_layer_->SetUp(softmax_bottom_vec_, &softmax_top_vec_); + if (top->size() >= 1) { + // softmax loss (averaged across batch) + (*top)[0]->Reshape(1, 1, 1, 1); + } + if (top->size() == 2) { + // softmax output + (*top)[1]->Reshape(bottom[0]->num(), bottom[0]->channels(), + bottom[0]->height(), bottom[0]->width()); + } } template @@ -37,9 +46,12 @@ Dtype SoftmaxWithLossLayer::Forward_cpu( loss += -log(max(prob_data[i * dim + static_cast(label[i])], Dtype(FLT_MIN))); } - if (top->size() == 1) { + if (top->size() >= 1) { (*top)[0]->mutable_cpu_data()[0] = loss / num; } + if (top->size() == 2) { + (*top)[1]->ShareData(prob_); + } return loss / num; } -- 2.7.4