From 0551d93831ef3a293efae0ab474f459d09779aa8 Mon Sep 17 00:00:00 2001 From: Jeff Donahue Date: Sat, 15 Mar 2014 12:22:53 -0700 Subject: [PATCH] null pointer defaults for forward loss outputs --- include/caffe/net.hpp | 8 +++----- src/caffe/net.cpp | 22 +++++++--------------- 2 files changed, 10 insertions(+), 20 deletions(-) diff --git a/include/caffe/net.hpp b/include/caffe/net.hpp index d6c892f..a30491f 100644 --- a/include/caffe/net.hpp +++ b/include/caffe/net.hpp @@ -31,15 +31,13 @@ class Net { // Run forward with the input blobs already fed separately. You can get the // input blobs using input_blobs(). - const vector*>& ForwardPrefilled(Dtype* loss); - const vector*>& ForwardPrefilled(); + const vector*>& ForwardPrefilled(Dtype* loss = NULL); // Run forward using a set of bottom blobs, and return the result. const vector*>& Forward(const vector* > & bottom, - Dtype* loss); - const vector*>& Forward(const vector* > & bottom); + Dtype* loss = NULL); // Run forward using a serialized BlobProtoVector and return the result // as a serialized BlobProtoVector - string Forward(const string& input_blob_protos, Dtype* loss); + string Forward(const string& input_blob_protos, Dtype* loss = NULL); // The network backward should take no input and output, since it solely // computes the gradient w.r.t the parameters, and the data has already diff --git a/src/caffe/net.cpp b/src/caffe/net.cpp index 397ee02..f3429b2 100644 --- a/src/caffe/net.cpp +++ b/src/caffe/net.cpp @@ -207,30 +207,22 @@ void Net::GetLearningRateAndWeightDecay() { } template -const vector*>& Net::ForwardPrefilled() { - Dtype ignored_loss; - return ForwardPrefilled(&ignored_loss); -} - -template const vector*>& Net::ForwardPrefilled(Dtype* loss) { - *loss = Dtype(0.); + if (loss != NULL) { + *loss = Dtype(0.); + } for (int i = 0; i < layers_.size(); ++i) { // LOG(ERROR) << "Forwarding " << layer_names_[i]; - *loss += layers_[i]->Forward(bottom_vecs_[i], &top_vecs_[i]); + Dtype layer_loss = layers_[i]->Forward(bottom_vecs_[i], &top_vecs_[i]); + if (loss != NULL) { + *loss += layer_loss; + } } return net_output_blobs_; } template const vector*>& Net::Forward( - const vector*> & bottom) { - Dtype ignored_loss; - return Forward(bottom, &ignored_loss); -} - -template -const vector*>& Net::Forward( const vector*> & bottom, Dtype* loss) { // Copy bottom to internal bottom for (int i = 0; i < bottom.size(); ++i) { -- 2.7.4