From 91abaec3dda9b442b25d2e7709bfe096da071c68 Mon Sep 17 00:00:00 2001 From: Yangqing Jia Date: Wed, 25 Sep 2013 22:38:20 -0700 Subject: [PATCH] bugfix --- src/caffe/blob.cpp | 1 + src/caffe/layers/softmax_layer.cpp | 2 +- src/caffe/test/test_softmax_layer.cpp | 26 +++++++++++++++++++++++--- 3 files changed, 25 insertions(+), 4 deletions(-) diff --git a/src/caffe/blob.cpp b/src/caffe/blob.cpp index aacb05c..ecb37b7 100644 --- a/src/caffe/blob.cpp +++ b/src/caffe/blob.cpp @@ -58,6 +58,7 @@ const Blob& Blob::operator=(const Blob& source) { memcpy(diff_->mutable_cpu_data(), source.cpu_diff(), count_ * sizeof(Dtype)); } + return (*this); } template diff --git a/src/caffe/layers/softmax_layer.cpp b/src/caffe/layers/softmax_layer.cpp index 31f25c3..ead05b3 100644 --- a/src/caffe/layers/softmax_layer.cpp +++ b/src/caffe/layers/softmax_layer.cpp @@ -19,7 +19,7 @@ void SoftmaxLayer::SetUp(const vector*>& bottom, sum_multiplier_.Reshape(1, bottom[0]->channels(), bottom[0]->height(), bottom[0]->width()); Dtype* multiplier_data = sum_multiplier_.mutable_cpu_data(); - for (int i = 0; i < bottom[0]->num(); ++i) { + for (int i = 0; i < sum_multiplier_.count(); ++i) { multiplier_data[i] = 1.; } scale_.Reshape(bottom[0]->num(), 1, 1, 1); diff --git a/src/caffe/test/test_softmax_layer.cpp b/src/caffe/test/test_softmax_layer.cpp index 37391ea..253ea8a 100644 --- a/src/caffe/test/test_softmax_layer.cpp +++ b/src/caffe/test/test_softmax_layer.cpp @@ -1,5 +1,6 @@ // Copyright 2013 Yangqing Jia +#include #include #include @@ -39,15 +40,34 @@ class SoftmaxLayerTest : public ::testing::Test { typedef ::testing::Types Dtypes; TYPED_TEST_CASE(SoftmaxLayerTest, Dtypes); -TYPED_TEST(SoftmaxLayerTest, TestReLUCPU) { +TYPED_TEST(SoftmaxLayerTest, TestForwardCPU) { LayerParameter layer_param; Caffe::set_mode(Caffe::CPU); SoftmaxLayer layer(layer_param); layer.SetUp(this->blob_bottom_vec_, &(this->blob_top_vec_)); layer.Forward(this->blob_bottom_vec_, &(this->blob_top_vec_)); - NOT_IMPLEMENTED; + for (int i = 0; i < this->blob_bottom_->num(); ++i) { + TypeParam scale = 0; + for (int j = 0; j < this->blob_bottom_->channels(); ++j) { + scale += exp(this->blob_bottom_->data_at(i, j, 0, 0)); + } + for (int j = 0; j < this->blob_bottom_->channels(); ++j) { + EXPECT_GE(this->blob_top_->data_at(i, j, 0, 0) + 1e-4, + exp(this->blob_bottom_->data_at(i, j, 0, 0)) / scale) + << "debug: " << i << " " << j; + EXPECT_LE(this->blob_top_->data_at(i, j, 0, 0) - 1e-4, + exp(this->blob_bottom_->data_at(i, j, 0, 0)) / scale) + << "debug: " << i << " " << j; + } + } } - +TYPED_TEST(SoftmaxLayerTest, TestGradientCPU) { + LayerParameter layer_param; + Caffe::set_mode(Caffe::CPU); + SoftmaxLayer layer(layer_param); + GradientChecker checker(1e-2, 1e-3); + checker.CheckGradientExhaustive(layer, this->blob_bottom_vec_, this->blob_top_vec_); +} } -- 2.7.4