const vector<Blob<Dtype>*>& bottom, vector<Blob<Dtype>*>* top) {}
virtual inline int ExactNumBottomBlobs() const { return 2; }
- virtual inline int ExactNumTopBlobs() const { return 0; }
+ virtual inline int MaxTopBlobs() const { return 1; }
};
/* SigmoidCrossEntropyLossLayer
/* AccuracyLayer
Note: not an actual loss layer! Does not implement backwards step.
- Computes the accuracy and logprob of a with respect to b.
+ Computes the accuracy of a with respect to b.
*/
template <typename Dtype>
class AccuracyLayer : public Layer<Dtype> {
diff_.mutable_cpu_data());
Dtype dot = caffe_cpu_dot(count, diff_.cpu_data(), diff_.cpu_data());
Dtype loss = dot / bottom[0]->num() / Dtype(2);
+ if (top->size() == 1) {
+ (*top)[0]->mutable_cpu_data()[0] = loss;
+ }
return loss;
}
loss -= infogain_mat[label * dim + j] * log(prob);
}
}
+ if (top->size() == 1) {
+ (*top)[0]->mutable_cpu_data()[0] = loss / num;
+ }
return loss / num;
}
Layer<Dtype>::SetUp(bottom, top);
CHECK_EQ(bottom[0]->num(), bottom[1]->num())
<< "The data and label should have the same number.";
+ if (top->size() == 1) {
+ // Layers should copy the loss in the top blob
+ (*top)[0]->Reshape(1, 1, 1, 1);
+ }
FurtherSetUp(bottom, top);
}
Dtype prob = max(bottom_data[i * dim + label], Dtype(kLOG_THRESHOLD));
loss -= log(prob);
}
+ if (top->size() == 1){
+ (*top)[0]->mutable_cpu_data()[0] = loss / num;
+ }
return loss / num;
}
loss -= input_data[i] * (target[i] - (input_data[i] >= 0)) -
log(1 + exp(input_data[i] - 2 * input_data[i] * (input_data[i] >= 0)));
}
+ if (top->size() == 1) {
+ (*top)[0]->mutable_cpu_data()[0] = loss / num;
+ }
return loss / num;
}
loss -= input_data[i] * (target[i] - (input_data[i] >= 0)) -
log(1 + exp(input_data[i] - 2 * input_data[i] * (input_data[i] >= 0)));
}
+ if (top->size() == 1) {
+ (*top)[0]->mutable_cpu_data()[0] = loss / num;
+ }
return loss / num;
}
loss += -log(max(prob_data[i * dim + static_cast<int>(label[i])],
Dtype(FLT_MIN)));
}
+ if (top->size() == 1) {
+ (*top)[0]->mutable_cpu_data()[0] = loss / num;
+ }
return loss / num;
}