: Layer<Dtype>(param) {}
virtual void LayerSetUp(const vector<Blob<Dtype>*>& bottom,
vector<Blob<Dtype>*>* top);
+ virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
+ vector<Blob<Dtype>*>* top);
virtual inline LayerParameter_LayerType type() const {
return LayerParameter_LayerType_ACCURACY;
: Layer<Dtype>(param) {}
virtual void LayerSetUp(
const vector<Blob<Dtype>*>& bottom, vector<Blob<Dtype>*>* top);
+ virtual void Reshape(
+ const vector<Blob<Dtype>*>& bottom, vector<Blob<Dtype>*>* top);
virtual inline int ExactNumBottomBlobs() const { return 2; }
public:
explicit EuclideanLossLayer(const LayerParameter& param)
: LossLayer<Dtype>(param), diff_() {}
- virtual void LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+ virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
vector<Blob<Dtype>*>* top);
virtual inline LayerParameter_LayerType type() const {
: LossLayer<Dtype>(param), infogain_() {}
virtual void LayerSetUp(const vector<Blob<Dtype>*>& bottom,
vector<Blob<Dtype>*>* top);
+ virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
+ vector<Blob<Dtype>*>* top);
// InfogainLossLayer takes 2-3 bottom Blobs; if there are 3 the third should
// be the infogain matrix. (Otherwise the infogain matrix is loaded from a
public:
explicit MultinomialLogisticLossLayer(const LayerParameter& param)
: LossLayer<Dtype>(param) {}
- virtual void LayerSetUp(const vector<Blob<Dtype>*>& bottom,
+ virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
vector<Blob<Dtype>*>* top);
virtual inline LayerParameter_LayerType type() const {
sigmoid_output_(new Blob<Dtype>()) {}
virtual void LayerSetUp(const vector<Blob<Dtype>*>& bottom,
vector<Blob<Dtype>*>* top);
+ virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
+ vector<Blob<Dtype>*>* top);
virtual inline LayerParameter_LayerType type() const {
return LayerParameter_LayerType_SIGMOID_CROSS_ENTROPY_LOSS;
softmax_layer_(new SoftmaxLayer<Dtype>(param)) {}
virtual void LayerSetUp(const vector<Blob<Dtype>*>& bottom,
vector<Blob<Dtype>*>* top);
+ virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
+ vector<Blob<Dtype>*>* top);
virtual inline LayerParameter_LayerType type() const {
return LayerParameter_LayerType_SOFTMAX_LOSS;
void AccuracyLayer<Dtype>::LayerSetUp(
const vector<Blob<Dtype>*>& bottom, vector<Blob<Dtype>*>* top) {
top_k_ = this->layer_param_.accuracy_param().top_k();
+}
+
+template <typename Dtype>
+void AccuracyLayer<Dtype>::Reshape(
+ const vector<Blob<Dtype>*>& bottom, vector<Blob<Dtype>*>* top) {
CHECK_EQ(bottom[0]->num(), bottom[1]->num())
<< "The data and label should have the same number.";
CHECK_LE(top_k_, bottom[0]->count() / bottom[0]->num())
namespace caffe {
template <typename Dtype>
-void EuclideanLossLayer<Dtype>::LayerSetUp(
+void EuclideanLossLayer<Dtype>::Reshape(
const vector<Blob<Dtype>*>& bottom, vector<Blob<Dtype>*>* top) {
- LossLayer<Dtype>::LayerSetUp(bottom, top);
+ LossLayer<Dtype>::Reshape(bottom, top);
CHECK_EQ(bottom[0]->channels(), bottom[1]->channels());
CHECK_EQ(bottom[0]->height(), bottom[1]->height());
CHECK_EQ(bottom[0]->width(), bottom[1]->width());
void InfogainLossLayer<Dtype>::LayerSetUp(
const vector<Blob<Dtype>*>& bottom, vector<Blob<Dtype>*>* top) {
LossLayer<Dtype>::LayerSetUp(bottom, top);
- CHECK_EQ(bottom[1]->channels(), 1);
- CHECK_EQ(bottom[1]->height(), 1);
- CHECK_EQ(bottom[1]->width(), 1);
- Blob<Dtype>* infogain = NULL;
if (bottom.size() < 3) {
CHECK(this->layer_param_.infogain_loss_param().has_source())
<< "Infogain matrix source must be specified.";
ReadProtoFromBinaryFile(
this->layer_param_.infogain_loss_param().source(), &blob_proto);
infogain_.FromProto(blob_proto);
+ }
+}
+
+template <typename Dtype>
+void InfogainLossLayer<Dtype>::Reshape(
+ const vector<Blob<Dtype>*>& bottom, vector<Blob<Dtype>*>* top) {
+ LossLayer<Dtype>::Reshape(bottom, top);
+ Blob<Dtype>* infogain = NULL;
+ if (bottom.size() < 3) {
infogain = &infogain_;
} else {
infogain = bottom[2];
}
+ CHECK_EQ(bottom[1]->channels(), 1);
+ CHECK_EQ(bottom[1]->height(), 1);
+ CHECK_EQ(bottom[1]->width(), 1);
const int num = bottom[0]->num();
const int dim = bottom[0]->count() / num;
CHECK_EQ(infogain->num(), 1);
template <typename Dtype>
void LossLayer<Dtype>::LayerSetUp(
const vector<Blob<Dtype>*>& bottom, vector<Blob<Dtype>*>* top) {
- CHECK_EQ(bottom[0]->num(), bottom[1]->num())
- << "The data and label should have the same number.";
- (*top)[0]->Reshape(1, 1, 1, 1);
// LossLayers have a non-zero (1) loss by default.
if (this->layer_param_.loss_weight_size() == 0) {
this->layer_param_.add_loss_weight(Dtype(1));
}
}
+template <typename Dtype>
+void LossLayer<Dtype>::Reshape(
+ const vector<Blob<Dtype>*>& bottom, vector<Blob<Dtype>*>* top) {
+ CHECK_EQ(bottom[0]->num(), bottom[1]->num())
+ << "The data and label should have the same number.";
+ (*top)[0]->Reshape(1, 1, 1, 1);
+}
+
INSTANTIATE_CLASS(LossLayer);
} // namespace caffe
namespace caffe {
template <typename Dtype>
-void MultinomialLogisticLossLayer<Dtype>::LayerSetUp(
+void MultinomialLogisticLossLayer<Dtype>::Reshape(
const vector<Blob<Dtype>*>& bottom, vector<Blob<Dtype>*>* top) {
- LossLayer<Dtype>::LayerSetUp(bottom, top);
+ LossLayer<Dtype>::Reshape(bottom, top);
CHECK_EQ(bottom[1]->channels(), 1);
CHECK_EQ(bottom[1]->height(), 1);
CHECK_EQ(bottom[1]->width(), 1);
void SigmoidCrossEntropyLossLayer<Dtype>::LayerSetUp(
const vector<Blob<Dtype>*>& bottom, vector<Blob<Dtype>*>* top) {
LossLayer<Dtype>::LayerSetUp(bottom, top);
- CHECK_EQ(bottom[0]->count(), bottom[1]->count()) <<
- "SIGMOID_CROSS_ENTROPY_LOSS layer inputs must have the same count.";
sigmoid_bottom_vec_.clear();
sigmoid_bottom_vec_.push_back(bottom[0]);
sigmoid_top_vec_.clear();
}
template <typename Dtype>
+void SigmoidCrossEntropyLossLayer<Dtype>::Reshape(
+ const vector<Blob<Dtype>*>& bottom, vector<Blob<Dtype>*>* top) {
+ LossLayer<Dtype>::Reshape(bottom, top);
+ CHECK_EQ(bottom[0]->count(), bottom[1]->count()) <<
+ "SIGMOID_CROSS_ENTROPY_LOSS layer inputs must have the same count.";
+ sigmoid_layer_->Reshape(sigmoid_bottom_vec_, &sigmoid_top_vec_);
+}
+
+template <typename Dtype>
void SigmoidCrossEntropyLossLayer<Dtype>::Forward_cpu(
const vector<Blob<Dtype>*>& bottom, vector<Blob<Dtype>*>* top) {
// The forward pass computes the sigmoid outputs.
softmax_top_vec_.clear();
softmax_top_vec_.push_back(&prob_);
softmax_layer_->SetUp(softmax_bottom_vec_, &softmax_top_vec_);
+}
+
+template <typename Dtype>
+void SoftmaxWithLossLayer<Dtype>::Reshape(
+ const vector<Blob<Dtype>*>& bottom, vector<Blob<Dtype>*>* top) {
+ LossLayer<Dtype>::Reshape(bottom, top);
+ softmax_layer_->Reshape(softmax_bottom_vec_, &softmax_top_vec_);
if (top->size() >= 2) {
// softmax output
(*top)[1]->ReshapeLike(*bottom[0]);