From 9fb7818cf0484cd62197714e91815141097709de Mon Sep 17 00:00:00 2001 From: Jeff Donahue Date: Sat, 29 Mar 2014 12:31:49 -0700 Subject: [PATCH] don't recompute pre_pad --- src/caffe/layers/lrn_layer.cpp | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/src/caffe/layers/lrn_layer.cpp b/src/caffe/layers/lrn_layer.cpp index 98f9504..d4addb5 100644 --- a/src/caffe/layers/lrn_layer.cpp +++ b/src/caffe/layers/lrn_layer.cpp @@ -30,7 +30,6 @@ void LRNLayer::SetUp(const vector*>& bottom, break; case LRNParameter_NormRegion_WITHIN_CHANNEL: { - const Dtype pre_pad = (size_ - 1) / 2; // Set up split_layer_ to use inputs in the numerator and denominator. split_top_vec_.clear(); split_top_vec_.push_back(bottom[0]); @@ -58,7 +57,7 @@ void LRNLayer::SetUp(const vector*>& bottom, LayerParameter pool_param; pool_param.mutable_pooling_param()->set_pool( PoolingParameter_PoolMethod_AVE); - pool_param.mutable_pooling_param()->set_pad(pre_pad); + pool_param.mutable_pooling_param()->set_pad(pre_pad_); pool_param.mutable_pooling_param()->set_kernel_size(size_); pool_layer_.reset(new PoolingLayer(pool_param)); pool_layer_->SetUp(square_top_vec_, &pool_top_vec_); @@ -66,8 +65,8 @@ void LRNLayer::SetUp(const vector*>& bottom, CHECK_EQ(pool_output_.channels(), channels_); CHECK_EQ(pool_output_.height(), height_); CHECK_EQ(pool_output_.width(), width_); - // Set up power_layer_ to compute (1 + alpha_/N^2 s)^-beta_, where s is the - // sum of a squared neighborhood (the output of pool_layer_). + // Set up power_layer_ to compute (1 + alpha_/N^2 s)^-beta_, where s is + // the sum of a squared neighborhood (the output of pool_layer_). power_top_vec_.clear(); power_top_vec_.push_back(&power_output_); LayerParameter power_param; -- 2.7.4