gradient checker optimization with forward pass loss: only need to run
authorJeff Donahue <jeff.donahue@gmail.com>
Fri, 14 Mar 2014 01:09:52 +0000 (18:09 -0700)
committerJeff Donahue <jeff.donahue@gmail.com>
Wed, 19 Mar 2014 19:37:31 +0000 (12:37 -0700)
backward pass to compute analytic gradient (the thing being checked) now

src/caffe/test/test_gradient_check_util.hpp

index 15a03be..6e89524 100644 (file)
@@ -103,13 +103,11 @@ void GradientChecker<Dtype>::CheckGradientSingle(Layer<Dtype>* layer,
       Caffe::set_random_seed(seed_);
       Dtype positive_objective = layer->Forward(*bottom, top);
       positive_objective += GetObjAndGradient(top, top_id, top_data_id);
-      layer->Backward(*top, true, bottom);
       // compute score by subtracting stepsize
       current_blob->mutable_cpu_data()[feat_id] -= stepsize_ * 2;
       Caffe::set_random_seed(seed_);
       Dtype negative_objective = layer->Forward(*bottom, top);
       negative_objective += GetObjAndGradient(top, top_id, top_data_id);
-      layer->Backward(*top, true, bottom);
       // Recover stepsize
       current_blob->mutable_cpu_data()[feat_id] += stepsize_;
       Dtype estimated_gradient = (positive_objective - negative_objective) /