From: Ronghang Hu Date: Wed, 12 Aug 2015 19:05:56 +0000 (-0700) Subject: Apply mutex only to shared layers and fix NVCC warning X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=6b50ed6fc1897ce1ccd673cf0287788b38b58a6d;p=platform%2Fupstream%2Fcaffe.git Apply mutex only to shared layers and fix NVCC warning --- diff --git a/include/caffe/layer.hpp b/include/caffe/layer.hpp index d82197a..a0d1d4e 100644 --- a/include/caffe/layer.hpp +++ b/include/caffe/layer.hpp @@ -1,7 +1,6 @@ #ifndef CAFFE_LAYER_H_ #define CAFFE_LAYER_H_ -#include #include #include #include @@ -12,6 +11,12 @@ #include "caffe/proto/caffe.pb.h" #include "caffe/util/device_alternate.hpp" +/** + Forward declare boost::thread instead of including boost/thread.hpp + to avoid a boost/NVCC issues (#1009, #1010) on OSX. + */ +namespace boost { class mutex; } + namespace caffe { /** @@ -33,7 +38,7 @@ class Layer { * layer. */ explicit Layer(const LayerParameter& param) - : layer_param_(param) { + : layer_param_(param), is_shared_(false) { // Set phase and copy blobs (if there are any). phase_ = param.phase(); if (layer_param_.blobs_size() > 0) { @@ -61,6 +66,7 @@ class Layer { */ void SetUp(const vector*>& bottom, const vector*>& top) { + InitMutex(); CheckBlobCounts(bottom, top); LayerSetUp(bottom, top); Reshape(bottom, top); @@ -94,6 +100,22 @@ class Layer { */ virtual inline bool ShareInParallel() const { return false; } + /** @brief Return whether this layer is actually shared by other nets. + * If ShareInParallel() is true and using more than one GPU and the + * net has TRAIN phase, then this function is expected return true. + */ + inline bool IsShared() const { return is_shared_; } + + /** @brief Set whether this layer is actually shared by other nets + * If ShareInParallel() is true and using more than one GPU and the + * net has TRAIN phase, then is_shared should be set true. + */ + inline void SetShared(bool is_shared) { + CHECK(ShareInParallel() || !is_shared) + << type() << "Layer does not support sharing."; + is_shared_ = is_shared; + } + /** * @brief Adjust the shapes of top blobs and internal buffers to accommodate * the shapes of the bottom blobs. @@ -406,8 +428,18 @@ class Layer { } private: - // mutex to lock layer to ensure sequential forward - boost::mutex forward_mutex_; + /** Whether this layer is actually shared by other nets*/ + bool is_shared_; + + /** The mutex for sequential forward if this layer is shared */ + shared_ptr forward_mutex_; + + /** Initialize forward_mutex_ */ + void InitMutex(); + /** Lock forward_mutex_ if this layer is shared */ + void Lock(); + /** Unlock forward_mutex_ if this layer is shared */ + void Unlock(); DISABLE_COPY_AND_ASSIGN(Layer); }; // class Layer @@ -419,7 +451,7 @@ template inline Dtype Layer::Forward(const vector*>& bottom, const vector*>& top) { // Lock during forward to ensure sequential forward - boost::mutex::scoped_lock lock(forward_mutex_); + Lock(); Dtype loss = 0; Reshape(bottom, top); switch (Caffe::mode()) { @@ -450,6 +482,7 @@ inline Dtype Layer::Forward(const vector*>& bottom, default: LOG(FATAL) << "Unknown caffe mode."; } + Unlock(); return loss; } diff --git a/src/caffe/layer.cpp b/src/caffe/layer.cpp new file mode 100644 index 0000000..3b91289 --- /dev/null +++ b/src/caffe/layer.cpp @@ -0,0 +1,27 @@ +#include +#include "caffe/layer.hpp" + +namespace caffe { + +template +void Layer::InitMutex() { + forward_mutex_.reset(new boost::mutex()); +} + +template +void Layer::Lock() { + if (IsShared()) { + forward_mutex_->lock(); + } +} + +template +void Layer::Unlock() { + if (IsShared()) { + forward_mutex_->unlock(); + } +} + +INSTANTIATE_CLASS(Layer); + +} // namespace caffe diff --git a/src/caffe/net.cpp b/src/caffe/net.cpp index 14f8385..7f5bdf7 100644 --- a/src/caffe/net.cpp +++ b/src/caffe/net.cpp @@ -84,7 +84,7 @@ void Net::Init(const NetParameter& in_param) { bottom_need_backward_.resize(param.layer_size()); for (int layer_id = 0; layer_id < param.layer_size(); ++layer_id) { // For non-root solvers, whether this layer is shared from root_net_. - bool is_shared_layer = !Caffe::root_solver() + bool share_from_root = !Caffe::root_solver() && root_net_->layers_[layer_id]->ShareInParallel(); // Inherit phase from net if unset. if (!param.layer(layer_id).has_phase()) { @@ -98,9 +98,10 @@ void Net::Init(const NetParameter& in_param) { << "propagate_down param must be specified " << "either 0 or bottom_size times "; } - if (is_shared_layer) { + if (share_from_root) { LOG(INFO) << "Sharing layer " << layer_param.name() << " from root net"; layers_.push_back(root_net_->layers_[layer_id]); + layers_[layer_id]->SetShared(true); } else { layers_.push_back(LayerRegistry::CreateLayer(layer_param)); } @@ -137,7 +138,7 @@ void Net::Init(const NetParameter& in_param) { } } // After this layer is connected, set it up. - if (is_shared_layer) { + if (share_from_root) { // Set up size of top blobs using root_net_ const vector*>& base_top = root_net_->top_vecs_[layer_id]; const vector*>& this_top = this->top_vecs_[layer_id];