channel dim, outputting the result.
*/
template <typename Dtype>
+class ArgMaxLayer : public Layer<Dtype> {
+ public:
+ explicit ArgMaxLayer(const LayerParameter& param)
+ : Layer<Dtype>(param) {}
+ virtual void SetUp(const vector<Blob<Dtype>*>& bottom,
+ vector<Blob<Dtype>*>* top);
+
+ protected:
+ virtual Dtype Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+ vector<Blob<Dtype>*>* top);
+ // For now ArgMax layer should not be used to compute backward operations.
+ virtual void Backward_cpu(const vector<Blob<Dtype>*>& top,
+ const bool propagate_down, vector<Blob<Dtype>*>* bottom) {
+ NOT_IMPLEMENTED;
+ }
+ bool out_max_val_;
+};
+
+template <typename Dtype>
class ConcatLayer : public Layer<Dtype> {
public:
explicit ConcatLayer(const LayerParameter& param)
switch (type) {
case LayerParameter_LayerType_ACCURACY:
return new AccuracyLayer<Dtype>(param);
+ case LayerParameter_LayerType_ARGMAX:
+ return new ArgMaxLayer<Dtype>(param);
case LayerParameter_LayerType_BNLL:
return new BNLLLayer<Dtype>(param);
case LayerParameter_LayerType_CONCAT:
--- /dev/null
+// Copyright 2014 BVLC and contributors.
+
+#include <algorithm>
+#include <vector>
+
+#include "caffe/layer.hpp"
+#include "caffe/vision_layers.hpp"
+
+using std::max;
+
+namespace caffe {
+
+template <typename Dtype>
+void ArgMaxLayer<Dtype>::SetUp(const vector<Blob<Dtype>*>& bottom,
+ vector<Blob<Dtype>*>* top) {
+ CHECK_EQ(bottom.size(), 1) << "ArgMaxLayer Layer takes 1 input.";
+ CHECK_EQ(top->size(), 1) << "ArgMaxLayer Layer takes 1 output.";
+ out_max_val_ = this->layer_param_.argmax_param().out_max_val();
+ // Produces max_ind and max_val
+ if (out_max_val_) {
+ (*top)[0]->Reshape(bottom[0]->num(), 2, 1, 1);
+ } // Produces only max_ind
+ else {
+ (*top)[0]->Reshape(bottom[0]->num(), 1, 1, 1);
+ }
+}
+
+template <typename Dtype>
+Dtype ArgMaxLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+ vector<Blob<Dtype>*>* top) {
+ const Dtype* bottom_data = bottom[0]->cpu_data();
+ Dtype* top_data = (*top)[0]->mutable_cpu_data();
+ int num = bottom[0]->num();
+ int dim = bottom[0]->count() / bottom[0]->num();
+ for (int i = 0; i < num; ++i) {
+ // Accuracy
+ Dtype max_val = -FLT_MAX;
+ int max_ind = 0;
+ for (int j = 0; j < dim; ++j) {
+ if (bottom_data[i * dim + j] > max_val) {
+ max_val = bottom_data[i * dim + j];
+ max_ind = j;
+ }
+ }
+ if (out_max_val_) {
+ top_data[i * 2] = max_ind;
+ top_data[i * 2 + 1] = max_val;
+ }
+ else {
+ top_data[i] = max_ind;
+ }
+ }
+ return Dtype(0);
+}
+
+INSTANTIATE_CLASS(ArgMaxLayer);
+
+
+} // namespace caffe
SPLIT = 22;
TANH = 23;
WINDOW_DATA = 24;
+ ARGMAX = 30;
}
optional LayerType type = 5; // the layer type from the enum above
optional PoolingParameter pooling_param = 19;
optional PowerParameter power_param = 21;
optional WindowDataParameter window_data_param = 20;
+ optional ArgMaxLayer argmax_param = 23;
// DEPRECATED: The layer parameters specified as a V0LayerParameter.
// This should never be used by any code except to upgrade to the new
optional V0LayerParameter layer = 1;
}
+// Message that stores parameters used by ArgMaxLayer
+message ArgMaxLayer {
+ // If true produce pairs (argmax, maxval)
+ optional bool out_max_val = 1 [default = false];
+
// Message that stores parameters used by ConcatLayer
message ConcatParameter {
// Concat Layer needs to specify the dimension along the concat will happen,