template <typename Dtype>
+class SigmoidLayer : public NeuronLayer<Dtype> {
+ public:
+ explicit SigmoidLayer(const LayerParameter& param)
+ : NeuronLayer<Dtype>(param) {}
+
+ protected:
+ virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+ vector<Blob<Dtype>*>* top);
+ virtual void Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+ vector<Blob<Dtype>*>* top);
+
+ virtual Dtype Backward_cpu(const vector<Blob<Dtype>*>& top,
+ const bool propagate_down, vector<Blob<Dtype>*>* bottom);
+ virtual Dtype Backward_gpu(const vector<Blob<Dtype>*>& top,
+ const bool propagate_down, vector<Blob<Dtype>*>* bottom);
+};
+
+
+template <typename Dtype>
class BNLLLayer : public NeuronLayer<Dtype> {
public:
explicit BNLLLayer(const LayerParameter& param)
}
+TYPED_TEST(NeuronLayerTest, TestSigmoidCPU) {
+ LayerParameter layer_param;
+ Caffe::set_mode(Caffe::CPU);
+ SigmoidLayer<TypeParam> layer(layer_param);
+ layer.SetUp(this->blob_bottom_vec_, &(this->blob_top_vec_));
+ layer.Forward(this->blob_bottom_vec_, &(this->blob_top_vec_));
+ // Now, check values
+ const TypeParam* bottom_data = this->blob_bottom_->cpu_data();
+ const TypeParam* top_data = this->blob_top_->cpu_data();
+ for (int i = 0; i < this->blob_bottom_->count(); ++i) {
+ EXPECT_FLOAT_EQ(top_data[i], 1. / (1 + exp(-bottom_data[i])));
+ //check that we squashed the value between 0 and 1
+ EXPECT_GE(top_data[i], 0.);
+ EXPECT_LE(top_data[i], 1.);
+ }
+}
+
+
+TYPED_TEST(NeuronLayerTest, TestSigmoidGradientCPU) {
+ LayerParameter layer_param;
+ Caffe::set_mode(Caffe::CPU);
+ SigmoidLayer<TypeParam> layer(layer_param);
+ GradientChecker<TypeParam> checker(1e-2, 1e-3, 1701, 0., 0.01);
+ checker.CheckGradientExhaustive(layer, this->blob_bottom_vec_, this->blob_top_vec_);
+}
+
+TYPED_TEST(NeuronLayerTest, TestSigmoidGPU) {
+ LayerParameter layer_param;
+ Caffe::set_mode(Caffe::GPU);
+ SigmoidLayer<TypeParam> layer(layer_param);
+ layer.SetUp(this->blob_bottom_vec_, &(this->blob_top_vec_));
+ layer.Forward(this->blob_bottom_vec_, &(this->blob_top_vec_));
+ // Now, check values
+ const TypeParam* bottom_data = this->blob_bottom_->cpu_data();
+ const TypeParam* top_data = this->blob_top_->cpu_data();
+ for (int i = 0; i < this->blob_bottom_->count(); ++i) {
+ EXPECT_FLOAT_EQ(top_data[i], 1. / (1 + exp(-bottom_data[i])));
+ //check that we squashed the value between 0 and 1
+ EXPECT_GE(top_data[i], 0.);
+ EXPECT_LE(top_data[i], 1.);
+ }
+}
+
+
+TYPED_TEST(NeuronLayerTest, TestSigmoidGradientGPU) {
+ LayerParameter layer_param;
+ Caffe::set_mode(Caffe::GPU);
+ SigmoidLayer<TypeParam> layer(layer_param);
+ GradientChecker<TypeParam> checker(1e-2, 1e-3, 1701, 0., 0.01);
+ checker.CheckGradientExhaustive(layer, this->blob_bottom_vec_, this->blob_top_vec_);
+}
+
+
+
TYPED_TEST(NeuronLayerTest, TestDropoutCPU) {
LayerParameter layer_param;
Caffe::set_mode(Caffe::CPU);