From: Jeff Donahue Date: Sat, 15 Mar 2014 19:22:53 +0000 (-0700) Subject: null pointer defaults for forward loss outputs X-Git-Tag: submit/tizen/20180823.020014~692^2~85^2~1 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=0551d93831ef3a293efae0ab474f459d09779aa8;p=platform%2Fupstream%2Fcaffeonacl.git null pointer defaults for forward loss outputs --- diff --git a/include/caffe/net.hpp b/include/caffe/net.hpp index d6c892f..a30491f 100644 --- a/include/caffe/net.hpp +++ b/include/caffe/net.hpp @@ -31,15 +31,13 @@ class Net { // Run forward with the input blobs already fed separately. You can get the // input blobs using input_blobs(). - const vector*>& ForwardPrefilled(Dtype* loss); - const vector*>& ForwardPrefilled(); + const vector*>& ForwardPrefilled(Dtype* loss = NULL); // Run forward using a set of bottom blobs, and return the result. const vector*>& Forward(const vector* > & bottom, - Dtype* loss); - const vector*>& Forward(const vector* > & bottom); + Dtype* loss = NULL); // Run forward using a serialized BlobProtoVector and return the result // as a serialized BlobProtoVector - string Forward(const string& input_blob_protos, Dtype* loss); + string Forward(const string& input_blob_protos, Dtype* loss = NULL); // The network backward should take no input and output, since it solely // computes the gradient w.r.t the parameters, and the data has already diff --git a/src/caffe/net.cpp b/src/caffe/net.cpp index 397ee02..f3429b2 100644 --- a/src/caffe/net.cpp +++ b/src/caffe/net.cpp @@ -207,30 +207,22 @@ void Net::GetLearningRateAndWeightDecay() { } template -const vector*>& Net::ForwardPrefilled() { - Dtype ignored_loss; - return ForwardPrefilled(&ignored_loss); -} - -template const vector*>& Net::ForwardPrefilled(Dtype* loss) { - *loss = Dtype(0.); + if (loss != NULL) { + *loss = Dtype(0.); + } for (int i = 0; i < layers_.size(); ++i) { // LOG(ERROR) << "Forwarding " << layer_names_[i]; - *loss += layers_[i]->Forward(bottom_vecs_[i], &top_vecs_[i]); + Dtype layer_loss = layers_[i]->Forward(bottom_vecs_[i], &top_vecs_[i]); + if (loss != NULL) { + *loss += layer_loss; + } } return net_output_blobs_; } template const vector*>& Net::Forward( - const vector*> & bottom) { - Dtype ignored_loss; - return Forward(bottom, &ignored_loss); -} - -template -const vector*>& Net::Forward( const vector*> & bottom, Dtype* loss) { // Copy bottom to internal bottom for (int i = 0; i < bottom.size(); ++i) {