From ccc4b30c04632416393ef261fcd17945e161aaee Mon Sep 17 00:00:00 2001 From: Jonathan L Long Date: Mon, 17 Feb 2014 16:35:38 -0800 Subject: [PATCH] add Net::Forward/Backward From/To --- include/caffe/net.hpp | 6 ++++++ src/caffe/net.cpp | 56 +++++++++++++++++++++++++++++++++++++++++---------- 2 files changed, 51 insertions(+), 11 deletions(-) diff --git a/include/caffe/net.hpp b/include/caffe/net.hpp index ce82e28..d7282f5 100644 --- a/include/caffe/net.hpp +++ b/include/caffe/net.hpp @@ -35,6 +35,9 @@ class Net { // Run forward with the input blobs already fed separately. You can get the // input blobs using input_blobs(). const vector*>& ForwardPrefilled(Dtype* loss = NULL); + Dtype ForwardFromTo(int start, int end); + Dtype ForwardFrom(int start); + Dtype ForwardTo(int end); // Run forward using a set of bottom blobs, and return the result. const vector*>& Forward(const vector* > & bottom, Dtype* loss = NULL); @@ -46,6 +49,9 @@ class Net { // computes the gradient w.r.t the parameters, and the data has already // been provided during the forward pass. void Backward(); + void BackwardFromTo(int start, int end); + void BackwardFrom(int start); + void BackwardTo(int end); Dtype ForwardBackward(const vector* > & bottom) { Dtype loss; diff --git a/src/caffe/net.cpp b/src/caffe/net.cpp index aba4cc2..cadcdcd 100644 --- a/src/caffe/net.cpp +++ b/src/caffe/net.cpp @@ -335,16 +335,34 @@ void Net::GetLearningRateAndWeightDecay() { } template -const vector*>& Net::ForwardPrefilled(Dtype* loss) { - if (loss != NULL) { - *loss = Dtype(0.); - } - for (int i = 0; i < layers_.size(); ++i) { +Dtype Net::ForwardFromTo(int start, int end) { + CHECK_GE(start, 0); + CHECK_LT(end, layers_.size()); + Dtype loss = 0; + for (int i = start; i <= end; ++i) { // LOG(ERROR) << "Forwarding " << layer_names_[i]; Dtype layer_loss = layers_[i]->Forward(bottom_vecs_[i], &top_vecs_[i]); - if (loss != NULL) { - *loss += layer_loss; - } + loss += layer_loss; + } + return loss; +} + +template +Dtype Net::ForwardFrom(int start) { + return ForwardFromTo(start, layers_.size() - 1); +} + +template +Dtype Net::ForwardTo(int end) { + return ForwardFromTo(0, end); +} + +template +const vector*>& Net::ForwardPrefilled(Dtype* loss) { + if (loss != NULL) { + *loss = ForwardFromTo(0, layers_.size() - 1); + } else { + ForwardFromTo(0, layers_.size() - 1); } return net_output_blobs_; } @@ -380,10 +398,11 @@ string Net::Forward(const string& input_blob_protos, Dtype* loss) { return output; } - template -void Net::Backward() { - for (int i = layers_.size() - 1; i >= 0; --i) { +void Net::BackwardFromTo(int start, int end) { + CHECK_GE(end, 0); + CHECK_LT(start, layers_.size()); + for (int i = start; i >= end; --i) { if (layer_need_backward_[i]) { layers_[i]->Backward( top_vecs_[i], bottom_need_backward_[i], &bottom_vecs_[i]); @@ -423,6 +442,21 @@ void Net::ShareTrainedLayersWith(Net* other) { } template +void Net::BackwardFrom(int start) { + BackwardFromTo(start, 0); +} + +template +void Net::BackwardTo(int end) { + BackwardFromTo(layers_.size() - 1, end); +} + +template +void Net::Backward() { + BackwardFromTo(layers_.size() - 1, 0); +} + +template void Net::CopyTrainedLayersFrom(const NetParameter& param) { int num_source_layers = param.layers_size(); for (int i = 0; i < num_source_layers; ++i) { -- 2.7.4