From: Jonathan L Long Date: Fri, 15 Aug 2014 02:21:28 +0000 (-0700) Subject: test softmax and softmax with loss across channels X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=7f3ebcb8a8756675c197f40b0a3c100ad2acf593;p=platform%2Fupstream%2Fcaffe.git test softmax and softmax with loss across channels --- diff --git a/src/caffe/test/test_softmax_layer.cpp b/src/caffe/test/test_softmax_layer.cpp index 37685af..9f45f76 100644 --- a/src/caffe/test/test_softmax_layer.cpp +++ b/src/caffe/test/test_softmax_layer.cpp @@ -19,7 +19,7 @@ class SoftmaxLayerTest : public MultiDeviceTest { typedef typename TypeParam::Dtype Dtype; protected: SoftmaxLayerTest() - : blob_bottom_(new Blob(2, 10, 1, 1)), + : blob_bottom_(new Blob(2, 10, 2, 3)), blob_top_(new Blob()) { // fill the values FillerParameter filler_param; @@ -45,26 +45,28 @@ TYPED_TEST(SoftmaxLayerTest, TestForward) { layer.Forward(this->blob_bottom_vec_, &(this->blob_top_vec_)); // Test sum for (int i = 0; i < this->blob_bottom_->num(); ++i) { - Dtype sum = 0; - for (int j = 0; j < this->blob_top_->channels(); ++j) { - sum += this->blob_top_->data_at(i, j, 0, 0); - } - EXPECT_GE(sum, 0.999); - EXPECT_LE(sum, 1.001); - } - // Test exact values - for (int i = 0; i < this->blob_bottom_->num(); ++i) { - Dtype scale = 0; - for (int j = 0; j < this->blob_bottom_->channels(); ++j) { - scale += exp(this->blob_bottom_->data_at(i, j, 0, 0)); - } - for (int j = 0; j < this->blob_bottom_->channels(); ++j) { - EXPECT_GE(this->blob_top_->data_at(i, j, 0, 0) + 1e-4, - exp(this->blob_bottom_->data_at(i, j, 0, 0)) / scale) - << "debug: " << i << " " << j; - EXPECT_LE(this->blob_top_->data_at(i, j, 0, 0) - 1e-4, - exp(this->blob_bottom_->data_at(i, j, 0, 0)) / scale) - << "debug: " << i << " " << j; + for (int k = 0; k < this->blob_bottom_->height(); ++k) { + for (int l = 0; l < this->blob_bottom_->width(); ++l) { + Dtype sum = 0; + for (int j = 0; j < this->blob_top_->channels(); ++j) { + sum += this->blob_top_->data_at(i, j, k, l); + } + EXPECT_GE(sum, 0.999); + EXPECT_LE(sum, 1.001); + // Test exact values + Dtype scale = 0; + for (int j = 0; j < this->blob_bottom_->channels(); ++j) { + scale += exp(this->blob_bottom_->data_at(i, j, k, l)); + } + for (int j = 0; j < this->blob_bottom_->channels(); ++j) { + EXPECT_GE(this->blob_top_->data_at(i, j, k, l) + 1e-4, + exp(this->blob_bottom_->data_at(i, j, k, l)) / scale) + << "debug: " << i << " " << j; + EXPECT_LE(this->blob_top_->data_at(i, j, k, l) - 1e-4, + exp(this->blob_bottom_->data_at(i, j, k, l)) / scale) + << "debug: " << i << " " << j; + } + } } } } diff --git a/src/caffe/test/test_softmax_with_loss_layer.cpp b/src/caffe/test/test_softmax_with_loss_layer.cpp index 0f0adbb..246d64e 100644 --- a/src/caffe/test/test_softmax_with_loss_layer.cpp +++ b/src/caffe/test/test_softmax_with_loss_layer.cpp @@ -21,8 +21,8 @@ class SoftmaxWithLossLayerTest : public MultiDeviceTest { protected: SoftmaxWithLossLayerTest() - : blob_bottom_data_(new Blob(10, 5, 1, 1)), - blob_bottom_label_(new Blob(10, 1, 1, 1)), + : blob_bottom_data_(new Blob(10, 5, 2, 3)), + blob_bottom_label_(new Blob(10, 1, 2, 3)), blob_top_loss_(new Blob()) { // fill the values FillerParameter filler_param;