#include <cstring>
#include <vector>
+#include "boost/scoped_ptr.hpp"
#include "gtest/gtest.h"
#include "caffe/blob.hpp"
#include "caffe/test/test_caffe_main.hpp"
#include "caffe/test/test_gradient_check_util.hpp"
+using boost::scoped_ptr;
+
namespace caffe {
template <typename TypeParam>
TYPED_TEST_CASE(SoftmaxWithLossLayerTest, TestDtypesAndDevices);
-
TYPED_TEST(SoftmaxWithLossLayerTest, TestGradient) {
typedef typename TypeParam::Dtype Dtype;
LayerParameter layer_param;
this->blob_top_vec_, 0);
}
+TYPED_TEST(SoftmaxWithLossLayerTest, TestForwardIgnoreLabel) {
+ typedef typename TypeParam::Dtype Dtype;
+ LayerParameter layer_param;
+ layer_param.mutable_loss_param()->set_normalize(false);
+ // First, compute the loss with all labels
+ scoped_ptr<SoftmaxWithLossLayer<Dtype> > layer(
+ new SoftmaxWithLossLayer<Dtype>(layer_param));
+ layer->SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+ layer->Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+ Dtype full_loss = this->blob_top_loss_->cpu_data()[0];
+ // Now, accumulate the loss, ignoring each label in {0, ..., 4} in turn.
+ Dtype accum_loss = 0;
+ for (int label = 0; label < 5; ++label) {
+ layer_param.mutable_loss_param()->set_ignore_label(label);
+ layer.reset(new SoftmaxWithLossLayer<Dtype>(layer_param));
+ layer->SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
+ layer->Forward(this->blob_bottom_vec_, this->blob_top_vec_);
+ accum_loss += this->blob_top_loss_->cpu_data()[0];
+ }
+ // Check that each label was included all but once.
+ EXPECT_NEAR(4 * full_loss, accum_loss, 1e-4);
+}
+
+TYPED_TEST(SoftmaxWithLossLayerTest, TestGradientIgnoreLabel) {
+ typedef typename TypeParam::Dtype Dtype;
+ LayerParameter layer_param;
+ // labels are in {0, ..., 4}, so we'll ignore about a fifth of them
+ layer_param.mutable_loss_param()->set_ignore_label(0);
+ SoftmaxWithLossLayer<Dtype> layer(layer_param);
+ GradientChecker<Dtype> checker(1e-2, 1e-2, 1701);
+ checker.CheckGradientExhaustive(&layer, this->blob_bottom_vec_,
+ this->blob_top_vec_, 0);
+}
+
+TYPED_TEST(SoftmaxWithLossLayerTest, TestGradientUnnormalized) {
+ typedef typename TypeParam::Dtype Dtype;
+ LayerParameter layer_param;
+ layer_param.mutable_loss_param()->set_normalize(false);
+ SoftmaxWithLossLayer<Dtype> layer(layer_param);
+ GradientChecker<Dtype> checker(1e-2, 1e-2, 1701);
+ checker.CheckGradientExhaustive(&layer, this->blob_bottom_vec_,
+ this->blob_top_vec_, 0);
+}
+
} // namespace caffe