1 // Copyright 2014 Sergio Guadarrama
5 #include "caffe/layer.hpp"
6 #include "caffe/vision_layers.hpp"
7 #include "caffe/util/math_functions.hpp"
11 template <typename Dtype>
12 void ConcatLayer<Dtype>::SetUp(const vector<Blob<Dtype>*>& bottom,
13 vector<Blob<Dtype>*>* top) {
14 CHECK_GT(bottom.size(), 1) <<
15 "Concat Layer takes at least two blobs as input.";
16 CHECK_EQ(top->size(), 1) <<
17 "Concat Layer takes a single blob as output.";
18 concat_dim_ = this->layer_param_.concat_dim();
19 CHECK_GE(concat_dim_, 0) << "concat_dim should be >= 0";
20 CHECK_LE(concat_dim_, 1) <<
21 "For now concat_dim <=1, it can only concat num and channels";
22 // Intialize with the first blob
23 COUNT_ = bottom[0]->count();
24 NUM_ = bottom[0]->num();
25 CHANNELS_ = bottom[0]->channels();
26 HEIGHT_ = bottom[0]->height();
27 WIDTH_ = bottom[0]->width();
28 for (int i = 1; i < bottom.size(); ++i) {
29 COUNT_ += bottom[i]->count();
30 if (concat_dim_== 0) {
31 NUM_ += bottom[i]->num();
32 } else if (concat_dim_ == 1) {
33 CHANNELS_ += bottom[i]->channels();
34 } else if (concat_dim_ == 2) {
35 HEIGHT_ += bottom[i]->height();
36 } else if (concat_dim_ == 3) {
37 WIDTH_ += bottom[i]->width();
40 (*top)[0]->Reshape(NUM_, CHANNELS_, HEIGHT_, WIDTH_);
41 CHECK_EQ(COUNT_, (*top)[0]->count());
44 template <typename Dtype>
45 void ConcatLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
46 vector<Blob<Dtype>*>* top) {
47 Dtype* top_data = (*top)[0]->mutable_cpu_data();
48 if (concat_dim_== 0) {
50 for (int i = 0; i < bottom.size(); ++i) {
51 const Dtype* bottom_data = bottom[i]->cpu_data();
52 int num_elem = bottom[i]->count();
53 caffe_copy(num_elem, bottom_data, top_data+(*top)[0]->offset(offset_num));
54 offset_num += bottom[i]->num();
56 } else if (concat_dim_ == 1) {
57 int offset_channel = 0;
58 for (int i = 0; i < bottom.size(); ++i) {
59 const Dtype* bottom_data = bottom[i]->cpu_data();
61 bottom[i]->channels()*bottom[i]->height()*bottom[i]->width();
62 for (int n = 0; n < NUM_; ++n) {
63 caffe_copy(num_elem, bottom_data+bottom[i]->offset(n),
64 top_data+(*top)[0]->offset(n, offset_channel));
66 offset_channel += bottom[i]->channels();
69 LOG(FATAL) << "concat_dim along dim" << concat_dim_ <<
70 " not implemented yet";
74 template <typename Dtype>
75 Dtype ConcatLayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>& top,
76 const bool propagate_down, vector<Blob<Dtype>*>* bottom) {
77 const Dtype* top_diff = top[0]->cpu_diff();
78 if (concat_dim_ == 0) {
80 for (int i = 0; i < bottom->size(); ++i) {
81 Blob<Dtype>* blob = (*bottom)[i];
82 Dtype* bottom_diff = blob->mutable_cpu_diff();
83 caffe_copy(blob->count(),
84 top_diff+top[0]->offset(offset_num), bottom_diff);
85 offset_num += blob->num();
87 } else if (concat_dim_ == 1) {
88 int offset_channel = 0;
89 for (int i = 0; i < bottom->size(); ++i) {
90 Blob<Dtype>* blob = (*bottom)[i];
91 Dtype* bottom_diff = blob->mutable_cpu_diff();
92 int num_elem = blob->channels()*blob->height()*blob->width();
93 for (int n = 0; n < NUM_; ++n) {
94 caffe_copy(num_elem, top_diff+top[0]->offset(n, offset_channel),
95 bottom_diff+blob->offset(n));
97 offset_channel += blob->channels();
100 LOG(FATAL) << "concat_dim along dim" << concat_dim_ <<
101 " not implemented yet";
106 INSTANTIATE_CLASS(ConcatLayer);