From a6ae5be95e216053574549857b77b4cb55748b78 Mon Sep 17 00:00:00 2001 From: Jeff Donahue Date: Tue, 18 Mar 2014 18:55:56 -0700 Subject: [PATCH] post rebase fixes: images layer and padding layer compute loss in forward --- include/caffe/vision_layers.hpp | 20 ++++++++++---------- src/caffe/layers/images_layer.cpp | 17 +++-------------- src/caffe/layers/padding_layer.cpp | 6 +++--- src/caffe/layers/padding_layer.cu | 6 +++--- 4 files changed, 19 insertions(+), 30 deletions(-) diff --git a/include/caffe/vision_layers.hpp b/include/caffe/vision_layers.hpp index 3a3bdfd..9c0850e 100644 --- a/include/caffe/vision_layers.hpp +++ b/include/caffe/vision_layers.hpp @@ -206,13 +206,13 @@ class PaddingLayer : public Layer { vector*>* top); protected: - virtual void Forward_cpu(const vector*>& bottom, + virtual Dtype Forward_cpu(const vector*>& bottom, vector*>* top); - virtual void Forward_gpu(const vector*>& bottom, + virtual Dtype Forward_gpu(const vector*>& bottom, vector*>* top); - virtual Dtype Backward_cpu(const vector*>& top, + virtual void Backward_cpu(const vector*>& top, const bool propagate_down, vector*>* bottom); - virtual Dtype Backward_gpu(const vector*>& top, + virtual void Backward_gpu(const vector*>& top, const bool propagate_down, vector*>* bottom); unsigned int PAD_; int NUM_; @@ -425,14 +425,14 @@ class ImagesLayer : public Layer { vector*>* top); protected: - virtual void Forward_cpu(const vector*>& bottom, + virtual Dtype Forward_cpu(const vector*>& bottom, vector*>* top); - virtual void Forward_gpu(const vector*>& bottom, + virtual Dtype Forward_gpu(const vector*>& bottom, vector*>* top); - virtual Dtype Backward_cpu(const vector*>& top, - const bool propagate_down, vector*>* bottom); - virtual Dtype Backward_gpu(const vector*>& top, - const bool propagate_down, vector*>* bottom); + virtual void Backward_cpu(const vector*>& top, + const bool propagate_down, vector*>* bottom) { return; } + virtual void Backward_gpu(const vector*>& top, + const bool propagate_down, vector*>* bottom) { return; } vector > lines_; int lines_id_; diff --git a/src/caffe/layers/images_layer.cpp b/src/caffe/layers/images_layer.cpp index e750e01..6208a9e 100644 --- a/src/caffe/layers/images_layer.cpp +++ b/src/caffe/layers/images_layer.cpp @@ -233,7 +233,7 @@ void ImagesLayer::SetUp(const vector*>& bottom, } template -void ImagesLayer::Forward_cpu(const vector*>& bottom, +Dtype ImagesLayer::Forward_cpu(const vector*>& bottom, vector*>* top) { // First, join the thread CHECK(!pthread_join(thread_, NULL)) << "Pthread joining failed."; @@ -245,10 +245,11 @@ void ImagesLayer::Forward_cpu(const vector*>& bottom, // Start a new prefetch thread CHECK(!pthread_create(&thread_, NULL, ImagesLayerPrefetch, reinterpret_cast(this))) << "Pthread execution failed."; + return Dtype(0.); } template -void ImagesLayer::Forward_gpu(const vector*>& bottom, +Dtype ImagesLayer::Forward_gpu(const vector*>& bottom, vector*>* top) { // First, join the thread CHECK(!pthread_join(thread_, NULL)) << "Pthread joining failed."; @@ -262,18 +263,6 @@ void ImagesLayer::Forward_gpu(const vector*>& bottom, // Start a new prefetch thread CHECK(!pthread_create(&thread_, NULL, ImagesLayerPrefetch, reinterpret_cast(this))) << "Pthread execution failed."; -} - -// The backward operations are dummy - they do not carry any computation. -template -Dtype ImagesLayer::Backward_cpu(const vector*>& top, - const bool propagate_down, vector*>* bottom) { - return Dtype(0.); -} - -template -Dtype ImagesLayer::Backward_gpu(const vector*>& top, - const bool propagate_down, vector*>* bottom) { return Dtype(0.); } diff --git a/src/caffe/layers/padding_layer.cpp b/src/caffe/layers/padding_layer.cpp index 4cb67df..658cc6a 100644 --- a/src/caffe/layers/padding_layer.cpp +++ b/src/caffe/layers/padding_layer.cpp @@ -29,7 +29,7 @@ void PaddingLayer::SetUp(const vector*>& bottom, } template -void PaddingLayer::Forward_cpu(const vector*>& bottom, +Dtype PaddingLayer::Forward_cpu(const vector*>& bottom, vector*>* top) { Dtype* top_data = (*top)[0]->mutable_cpu_data(); const Dtype* bottom_data = bottom[0]->cpu_data(); @@ -47,10 +47,11 @@ void PaddingLayer::Forward_cpu(const vector*>& bottom, } } } + return Dtype(0.); } template -Dtype PaddingLayer::Backward_cpu(const vector*>& top, +void PaddingLayer::Backward_cpu(const vector*>& top, const bool propagate_down, vector*>* bottom) { const Dtype* top_diff = top[0]->cpu_diff(); Dtype* bottom_diff = (*bottom)[0]->mutable_cpu_diff(); @@ -66,7 +67,6 @@ Dtype PaddingLayer::Backward_cpu(const vector*>& top, } } } - return Dtype(0.); } INSTANTIATE_CLASS(PaddingLayer); diff --git a/src/caffe/layers/padding_layer.cu b/src/caffe/layers/padding_layer.cu index 7ec28a9..d476df5 100644 --- a/src/caffe/layers/padding_layer.cu +++ b/src/caffe/layers/padding_layer.cu @@ -27,7 +27,7 @@ __global__ void PaddingForward(const int count, const Dtype* in, Dtype* out, } template -void PaddingLayer::Forward_gpu(const vector*>& bottom, +Dtype PaddingLayer::Forward_gpu(const vector*>& bottom, vector*>* top) { const Dtype* bottom_data = bottom[0]->gpu_data(); Dtype* top_data = (*top)[0]->mutable_gpu_data(); @@ -39,6 +39,7 @@ void PaddingLayer::Forward_gpu(const vector*>& bottom, count, bottom_data, top_data, NUM_, CHANNEL_, HEIGHT_IN_, WIDTH_IN_, PAD_); CUDA_POST_KERNEL_CHECK; + return Dtype(0); } template @@ -61,7 +62,7 @@ __global__ void PaddingBackward(const int count, const Dtype* in, Dtype* out, } template -Dtype PaddingLayer::Backward_gpu(const vector*>& top, +void PaddingLayer::Backward_gpu(const vector*>& top, const bool propagate_down, vector*>* bottom) { if (propagate_down) { @@ -74,7 +75,6 @@ Dtype PaddingLayer::Backward_gpu(const vector*>& top, PAD_); CUDA_POST_KERNEL_CHECK; } - return Dtype(0); } INSTANTIATE_CLASS(PaddingLayer); -- 2.7.4