virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_ARGMAX;
- }
+ virtual inline const char* type() const { return "ArgMax"; }
virtual inline int ExactNumBottomBlobs() const { return 1; }
virtual inline int ExactNumTopBlobs() const { return 1; }
virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_CONCAT;
- }
+ virtual inline const char* type() const { return "Concat"; }
virtual inline int MinBottomBlobs() const { return 2; }
virtual inline int ExactNumTopBlobs() const { return 1; }
virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_ELTWISE;
- }
+ virtual inline const char* type() const { return "Eltwise"; }
virtual inline int MinBottomBlobs() const { return 2; }
virtual inline int ExactNumTopBlobs() const { return 1; }
virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_FLATTEN;
- }
+ virtual inline const char* type() const { return "Flatten"; }
virtual inline int ExactNumBottomBlobs() const { return 1; }
virtual inline int ExactNumTopBlobs() const { return 1; }
virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_INNER_PRODUCT;
- }
+ virtual inline const char* type() const { return "InnerProduct"; }
virtual inline int ExactNumBottomBlobs() const { return 1; }
virtual inline int ExactNumTopBlobs() const { return 1; }
virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_MVN;
- }
+ virtual inline const char* type() const { return "MVN"; }
virtual inline int ExactNumBottomBlobs() const { return 1; }
virtual inline int ExactNumTopBlobs() const { return 1; }
virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top) {}
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_SILENCE;
- }
+ virtual inline const char* type() const { return "Silence"; }
virtual inline int MinBottomBlobs() const { return 1; }
virtual inline int ExactNumTopBlobs() const { return 0; }
virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_SOFTMAX;
- }
+ virtual inline const char* type() const { return "Softmax"; }
virtual inline int ExactNumBottomBlobs() const { return 1; }
virtual inline int ExactNumTopBlobs() const { return 1; }
virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_SPLIT;
- }
+ virtual inline const char* type() const { return "Split"; }
virtual inline int ExactNumBottomBlobs() const { return 1; }
virtual inline int MinTopBlobs() const { return 1; }
virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_SLICE;
- }
+ virtual inline const char* type() const { return "Slice"; }
virtual inline int ExactNumBottomBlobs() const { return 1; }
virtual inline int MinTopBlobs() const { return 2; }
virtual void DataLayerSetUp(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_DATA;
- }
+ virtual inline const char* type() const { return "Data"; }
virtual inline int ExactNumBottomBlobs() const { return 0; }
virtual inline int MinTopBlobs() const { return 1; }
virtual inline int MaxTopBlobs() const { return 2; }
virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top) {}
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_DUMMY_DATA;
- }
+ virtual inline const char* type() const { return "DummyData"; }
virtual inline int ExactNumBottomBlobs() const { return 0; }
virtual inline int MinTopBlobs() const { return 1; }
virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top) {}
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_HDF5_DATA;
- }
+ virtual inline const char* type() const { return "HDF5Data"; }
virtual inline int ExactNumBottomBlobs() const { return 0; }
virtual inline int MinTopBlobs() const { return 1; }
virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top) {}
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_HDF5_OUTPUT;
- }
+ virtual inline const char* type() const { return "HDF5Output"; }
// TODO: no limit on the number of blobs
virtual inline int ExactNumBottomBlobs() const { return 2; }
virtual inline int ExactNumTopBlobs() const { return 0; }
virtual void DataLayerSetUp(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_IMAGE_DATA;
- }
+ virtual inline const char* type() const { return "ImageData"; }
virtual inline int ExactNumBottomBlobs() const { return 0; }
virtual inline int ExactNumTopBlobs() const { return 2; }
virtual void DataLayerSetUp(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_MEMORY_DATA;
- }
+ virtual inline const char* type() const { return "MemoryData"; }
virtual inline int ExactNumBottomBlobs() const { return 0; }
virtual inline int ExactNumTopBlobs() const { return 2; }
virtual void DataLayerSetUp(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_WINDOW_DATA;
- }
+ virtual inline const char* type() const { return "WindowData"; }
virtual inline int ExactNumBottomBlobs() const { return 0; }
virtual inline int ExactNumTopBlobs() const { return 2; }
}
/**
- * @brief Returns the layer type as an enum value.
+ * @brief Returns the layer type.
*/
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_NONE;
- }
-
- /**
- * @brief Returns the layer type name.
- */
- virtual inline const string& type_name() const {
- return LayerParameter_LayerType_Name(type());
- }
+ virtual inline const char* type() const { return ""; }
/**
* @brief Returns the exact number of bottom blobs required by the layer,
const vector<Blob<Dtype>*>& top) {
if (ExactNumBottomBlobs() >= 0) {
CHECK_EQ(ExactNumBottomBlobs(), bottom.size())
- << type_name() << " Layer takes " << ExactNumBottomBlobs()
+ << type() << " Layer takes " << ExactNumBottomBlobs()
<< " bottom blob(s) as input.";
}
if (MinBottomBlobs() >= 0) {
CHECK_LE(MinBottomBlobs(), bottom.size())
- << type_name() << " Layer takes at least " << MinBottomBlobs()
+ << type() << " Layer takes at least " << MinBottomBlobs()
<< " bottom blob(s) as input.";
}
if (MaxBottomBlobs() >= 0) {
CHECK_GE(MaxBottomBlobs(), bottom.size())
- << type_name() << " Layer takes at most " << MaxBottomBlobs()
+ << type() << " Layer takes at most " << MaxBottomBlobs()
<< " bottom blob(s) as input.";
}
if (ExactNumTopBlobs() >= 0) {
CHECK_EQ(ExactNumTopBlobs(), top.size())
- << type_name() << " Layer produces " << ExactNumTopBlobs()
+ << type() << " Layer produces " << ExactNumTopBlobs()
<< " top blob(s) as output.";
}
if (MinTopBlobs() >= 0) {
CHECK_LE(MinTopBlobs(), top.size())
- << type_name() << " Layer produces at least " << MinTopBlobs()
+ << type() << " Layer produces at least " << MinTopBlobs()
<< " top blob(s) as output.";
}
if (MaxTopBlobs() >= 0) {
CHECK_GE(MaxTopBlobs(), top.size())
- << type_name() << " Layer produces at most " << MaxTopBlobs()
+ << type() << " Layer produces at most " << MaxTopBlobs()
<< " top blob(s) as output.";
}
if (EqualNumBottomTopBlobs()) {
CHECK_EQ(bottom.size(), top.size())
- << type_name() << " Layer produces one top blob as output for each "
+ << type() << " Layer produces one top blob as output for each "
<< "bottom blob input.";
}
}
* // your implementations
* };
*
- * and its type is defined in the protobuffer as
- *
- * enum LayerType {
- * // other definitions
- * AWESOME = 46,
- * }
+ * and its type is its C++ class name, but without the "Layer" at the end
+ * ("MyAwesomeLayer" -> "MyAwesome").
*
* If the layer is going to be created simply by its constructor, in your c++
* file, add the following line:
*
- * REGISTER_LAYER_CLASS(AWESOME, MyAwesomeLayer);
+ * REGISTER_LAYER_CLASS(MyAwesome);
*
* Or, if the layer is going to be created by another creator function, in the
* format of:
* (for example, when your layer has multiple backends, see GetConvolutionLayer
* for a use case), then you can register the creator function instead, like
*
- * REGISTER_LAYER_CREATOR(AWESOME, GetMyAwesomeLayer)
+ * REGISTER_LAYER_CREATOR(MyAwesome, GetMyAwesomeLayer)
*
* Note that each layer type should only be registered once.
*/
#define CAFFE_LAYER_FACTORY_H_
#include <map>
+#include <string>
#include "caffe/common.hpp"
#include "caffe/proto/caffe.pb.h"
class LayerRegistry {
public:
typedef Layer<Dtype>* (*Creator)(const LayerParameter&);
- typedef std::map<LayerParameter_LayerType, Creator> CreatorRegistry;
+ typedef std::map<string, Creator> CreatorRegistry;
static CreatorRegistry& Registry() {
static CreatorRegistry* g_registry_ = new CreatorRegistry();
}
// Adds a creator.
- static void AddCreator(const LayerParameter_LayerType& type,
- Creator creator) {
+ static void AddCreator(const string& type, Creator creator) {
CreatorRegistry& registry = Registry();
CHECK_EQ(registry.count(type), 0)
<< "Layer type " << type << " already registered.";
// Get a layer using a LayerParameter.
static Layer<Dtype>* CreateLayer(const LayerParameter& param) {
LOG(INFO) << "Creating layer " << param.name();
- const LayerParameter_LayerType& type = param.type();
+ const string& type = param.type();
CreatorRegistry& registry = Registry();
- CHECK_EQ(registry.count(type), 1);
+ CHECK_EQ(registry.count(type), 1) << "Unknown layer type: " << type
+ << " (known types: " << LayerTypeList() << ")";
return registry[type](param);
}
// Layer registry should never be instantiated - everything is done with its
// static variables.
LayerRegistry() {}
+
+ static string LayerTypeList() {
+ CreatorRegistry& registry = Registry();
+ string layer_types;
+ for (typename CreatorRegistry::iterator iter = registry.begin();
+ iter != registry.end(); ++iter) {
+ if (iter != registry.begin()) {
+ layer_types += ", ";
+ }
+ layer_types += iter->first;
+ }
+ return layer_types;
+ }
};
template <typename Dtype>
class LayerRegisterer {
public:
- LayerRegisterer(const LayerParameter_LayerType& type,
+ LayerRegisterer(const string& type,
Layer<Dtype>* (*creator)(const LayerParameter&)) {
// LOG(INFO) << "Registering layer type: " << type;
LayerRegistry<Dtype>::AddCreator(type, creator);
#define REGISTER_LAYER_CREATOR(type, creator) \
- static LayerRegisterer<float> g_creator_f_##type( \
- LayerParameter_LayerType_##type, creator<float>); \
- static LayerRegisterer<double> g_creator_d_##type( \
- LayerParameter_LayerType_##type, creator<double>)
+ static LayerRegisterer<float> g_creator_f_##type(#type, creator<float>); \
+ static LayerRegisterer<double> g_creator_d_##type(#type, creator<double>) \
-#define REGISTER_LAYER_CLASS(type, clsname) \
+#define REGISTER_LAYER_CLASS(type) \
template <typename Dtype> \
- Layer<Dtype>* Creator_##clsname(const LayerParameter& param) { \
- return new clsname<Dtype>(param); \
+ Layer<Dtype>* Creator_##type##Layer(const LayerParameter& param) { \
+ return new type##Layer<Dtype>(param); \
} \
- REGISTER_LAYER_CREATOR(type, Creator_##clsname)
+ REGISTER_LAYER_CREATOR(type, Creator_##type##Layer)
} // namespace caffe
virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_ACCURACY;
- }
-
+ virtual inline const char* type() const { return "Accuracy"; }
virtual inline int ExactNumBottomBlobs() const { return 2; }
virtual inline int ExactNumTopBlobs() const { return 1; }
const vector<Blob<Dtype>*>& top);
virtual inline int ExactNumBottomBlobs() const { return 3; }
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_CONTRASTIVE_LOSS;
- }
+ virtual inline const char* type() const { return "ContrastiveLoss"; }
/**
* Unlike most loss layers, in the ContrastiveLossLayer we can backpropagate
* to the first two inputs.
virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_EUCLIDEAN_LOSS;
- }
-
+ virtual inline const char* type() const { return "EuclideanLoss"; }
/**
* Unlike most loss layers, in the EuclideanLossLayer we can backpropagate
* to both inputs -- override to return true and always allow force_backward.
explicit HingeLossLayer(const LayerParameter& param)
: LossLayer<Dtype>(param) {}
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_HINGE_LOSS;
- }
+ virtual inline const char* type() const { return "HingeLoss"; }
protected:
/// @copydoc HingeLossLayer
virtual inline int MinBottomBlobs() const { return 2; }
virtual inline int MaxBottomBlobs() const { return 3; }
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_INFOGAIN_LOSS;
- }
+ virtual inline const char* type() const { return "InfogainLoss"; }
protected:
/// @copydoc InfogainLossLayer
virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_MULTINOMIAL_LOGISTIC_LOSS;
- }
+ virtual inline const char* type() const { return "MultinomialLogisticLoss"; }
protected:
/// @copydoc MultinomialLogisticLossLayer
virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_SIGMOID_CROSS_ENTROPY_LOSS;
- }
+ virtual inline const char* type() const { return "SigmoidCrossEntropyLoss"; }
protected:
/// @copydoc SigmoidCrossEntropyLossLayer
virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_SOFTMAX_LOSS;
- }
+ virtual inline const char* type() const { return "SoftmaxWithLoss"; }
virtual inline int ExactNumBottomBlobs() const { return -1; }
virtual inline int MinBottomBlobs() const { return 2; }
virtual inline int MaxBottomBlobs() const { return 3; }
virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_NONE;
- }
virtual inline int ExactNumBottomBlobs() const { return 1; }
virtual inline int ExactNumTopBlobs() const { return 1; }
};
virtual void LayerSetUp(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_ABSVAL;
- }
+ virtual inline const char* type() const { return "AbsVal"; }
virtual inline int ExactNumBottomBlobs() const { return 1; }
virtual inline int ExactNumTopBlobs() const { return 1; }
explicit BNLLLayer(const LayerParameter& param)
: NeuronLayer<Dtype>(param) {}
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_BNLL;
- }
+ virtual inline const char* type() const { return "BNLL"; }
protected:
/// @copydoc BNLLLayer
virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_DROPOUT;
- }
+ virtual inline const char* type() const { return "Dropout"; }
protected:
/**
virtual void LayerSetUp(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_EXP;
- }
+ virtual inline const char* type() const { return "Exp"; }
protected:
/**
virtual void LayerSetUp(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_POWER;
- }
+ virtual inline const char* type() const { return "Power"; }
protected:
/**
explicit ReLULayer(const LayerParameter& param)
: NeuronLayer<Dtype>(param) {}
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_RELU;
- }
+ virtual inline const char* type() const { return "ReLU"; }
protected:
/**
explicit SigmoidLayer(const LayerParameter& param)
: NeuronLayer<Dtype>(param) {}
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_SIGMOID;
- }
+ virtual inline const char* type() const { return "Sigmoid"; }
protected:
/**
explicit TanHLayer(const LayerParameter& param)
: NeuronLayer<Dtype>(param) {}
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_TANH;
- }
+ virtual inline const char* type() const { return "TanH"; }
protected:
/**
virtual void LayerSetUp(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_THRESHOLD;
- }
+ virtual inline const char* type() const { return "Threshold"; }
protected:
/**
bool UpgradeLayerParameter(const LayerParameter& v0_layer_connection,
LayerParameter* layer_param);
-LayerParameter_LayerType UpgradeV0LayerType(const string& type);
+const char* UpgradeV0LayerType(const string& type);
// Return true iff any layer contains deprecated data transformation parameters.
bool NetNeedsDataUpgrade(const NetParameter& net_param);
*/
explicit ConvolutionLayer(const LayerParameter& param)
: BaseConvolutionLayer<Dtype>(param) {}
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_CONVOLUTION;
- }
+
+ virtual inline const char* type() const { return "Convolution"; }
protected:
virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
public:
explicit DeconvolutionLayer(const LayerParameter& param)
: BaseConvolutionLayer<Dtype>(param) {}
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_DECONVOLUTION;
- }
+
+ virtual inline const char* type() const { return "Deconvolution"; }
protected:
virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_IM2COL;
- }
+ virtual inline const char* type() const { return "Im2col"; }
virtual inline int ExactNumBottomBlobs() const { return 1; }
virtual inline int ExactNumTopBlobs() const { return 1; }
virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_LRN;
- }
+ virtual inline const char* type() const { return "LRN"; }
virtual inline int ExactNumBottomBlobs() const { return 1; }
virtual inline int ExactNumTopBlobs() const { return 1; }
virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
- virtual inline LayerParameter_LayerType type() const {
- return LayerParameter_LayerType_POOLING;
- }
+ virtual inline const char* type() const { return "Pooling"; }
virtual inline int ExactNumBottomBlobs() const { return 1; }
virtual inline int MinTopBlobs() const { return 1; }
// MAX POOL layers can output an extra top blob for the mask;
}
}
-REGISTER_LAYER_CREATOR(CONVOLUTION, GetConvolutionLayer);
+REGISTER_LAYER_CREATOR(Convolution, GetConvolutionLayer);
// Get pooling layer according to engine.
template <typename Dtype>
}
}
-REGISTER_LAYER_CREATOR(POOLING, GetPoolingLayer);
+REGISTER_LAYER_CREATOR(Pooling, GetPoolingLayer);
// Get relu layer according to engine.
template <typename Dtype>
}
}
-REGISTER_LAYER_CREATOR(RELU, GetReLULayer);
+REGISTER_LAYER_CREATOR(ReLU, GetReLULayer);
// Get sigmoid layer according to engine.
template <typename Dtype>
}
}
-REGISTER_LAYER_CREATOR(SIGMOID, GetSigmoidLayer);
+REGISTER_LAYER_CREATOR(Sigmoid, GetSigmoidLayer);
// Get softmax layer according to engine.
template <typename Dtype>
}
}
-REGISTER_LAYER_CREATOR(SOFTMAX, GetSoftmaxLayer);
+REGISTER_LAYER_CREATOR(Softmax, GetSoftmaxLayer);
// Get tanh layer according to engine.
template <typename Dtype>
}
}
-REGISTER_LAYER_CREATOR(TANH, GetTanHLayer);
+REGISTER_LAYER_CREATOR(TanH, GetTanHLayer);
// Layers that use their constructor as their default creator should be
// registered in their corresponding cpp files. Do not register them here.
void AbsValLayer<Dtype>::LayerSetUp(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top) {
NeuronLayer<Dtype>::LayerSetUp(bottom, top);
- CHECK_NE(top[0], bottom[0]) << this->type_name() << " Layer does not "
+ CHECK_NE(top[0], bottom[0]) << this->type() << " Layer does not "
"allow in-place computation.";
}
#endif
INSTANTIATE_CLASS(AbsValLayer);
-REGISTER_LAYER_CLASS(ABSVAL, AbsValLayer);
+REGISTER_LAYER_CLASS(AbsVal);
+
} // namespace caffe
}
INSTANTIATE_CLASS(AccuracyLayer);
-REGISTER_LAYER_CLASS(ACCURACY, AccuracyLayer);
+REGISTER_LAYER_CLASS(Accuracy);
+
} // namespace caffe
}
INSTANTIATE_CLASS(ArgMaxLayer);
-REGISTER_LAYER_CLASS(ARGMAX, ArgMaxLayer);
+REGISTER_LAYER_CLASS(ArgMax);
} // namespace caffe
#endif
INSTANTIATE_CLASS(BNLLLayer);
-REGISTER_LAYER_CLASS(BNLL, BNLLLayer);
+REGISTER_LAYER_CLASS(BNLL);
+
} // namespace caffe
#endif
INSTANTIATE_CLASS(ConcatLayer);
-REGISTER_LAYER_CLASS(CONCAT, ConcatLayer);
+REGISTER_LAYER_CLASS(Concat);
+
} // namespace caffe
#endif
INSTANTIATE_CLASS(ContrastiveLossLayer);
-REGISTER_LAYER_CLASS(CONTRASTIVE_LOSS, ContrastiveLossLayer);
+REGISTER_LAYER_CLASS(ContrastiveLoss);
+
} // namespace caffe
#endif
INSTANTIATE_CLASS(ConvolutionLayer);
+
} // namespace caffe
}
INSTANTIATE_CLASS(DataLayer);
-REGISTER_LAYER_CLASS(DATA, DataLayer);
+REGISTER_LAYER_CLASS(Data);
} // namespace caffe
#endif
INSTANTIATE_CLASS(DeconvolutionLayer);
-REGISTER_LAYER_CLASS(DECONVOLUTION, DeconvolutionLayer);
+REGISTER_LAYER_CLASS(Deconvolution);
+
} // namespace caffe
#endif
INSTANTIATE_CLASS(DropoutLayer);
-REGISTER_LAYER_CLASS(DROPOUT, DropoutLayer);
+REGISTER_LAYER_CLASS(Dropout);
+
} // namespace caffe
}
INSTANTIATE_CLASS(DummyDataLayer);
-REGISTER_LAYER_CLASS(DUMMY_DATA, DummyDataLayer);
+REGISTER_LAYER_CLASS(DummyData);
+
} // namespace caffe
#endif
INSTANTIATE_CLASS(EltwiseLayer);
-REGISTER_LAYER_CLASS(ELTWISE, EltwiseLayer);
+REGISTER_LAYER_CLASS(Eltwise);
+
} // namespace caffe
#endif
INSTANTIATE_CLASS(EuclideanLossLayer);
-REGISTER_LAYER_CLASS(EUCLIDEAN_LOSS, EuclideanLossLayer);
+REGISTER_LAYER_CLASS(EuclideanLoss);
+
} // namespace caffe
#endif
INSTANTIATE_CLASS(ExpLayer);
-REGISTER_LAYER_CLASS(EXP, ExpLayer);
+REGISTER_LAYER_CLASS(Exp);
+
} // namespace caffe
#endif
INSTANTIATE_CLASS(FlattenLayer);
-REGISTER_LAYER_CLASS(FLATTEN, FlattenLayer);
+REGISTER_LAYER_CLASS(Flatten);
+
} // namespace caffe
#endif
INSTANTIATE_CLASS(HDF5DataLayer);
-REGISTER_LAYER_CLASS(HDF5_DATA, HDF5DataLayer);
+REGISTER_LAYER_CLASS(HDF5Data);
+
} // namespace caffe
#endif
INSTANTIATE_CLASS(HDF5OutputLayer);
-REGISTER_LAYER_CLASS(HDF5_OUTPUT, HDF5OutputLayer);
+REGISTER_LAYER_CLASS(HDF5Output);
+
} // namespace caffe
void HingeLossLayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>& top,
const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {
if (propagate_down[1]) {
- LOG(FATAL) << this->type_name()
+ LOG(FATAL) << this->type()
<< " Layer cannot backpropagate to label inputs.";
}
if (propagate_down[0]) {
}
INSTANTIATE_CLASS(HingeLossLayer);
-REGISTER_LAYER_CLASS(HINGE_LOSS, HingeLossLayer);
+REGISTER_LAYER_CLASS(HingeLoss);
+
} // namespace caffe
#endif
INSTANTIATE_CLASS(Im2colLayer);
-REGISTER_LAYER_CLASS(IM2COL, Im2colLayer);
+REGISTER_LAYER_CLASS(Im2col);
+
} // namespace caffe
}
INSTANTIATE_CLASS(ImageDataLayer);
-REGISTER_LAYER_CLASS(IMAGE_DATA, ImageDataLayer);
+REGISTER_LAYER_CLASS(ImageData);
+
} // namespace caffe
const vector<bool>& propagate_down,
const vector<Blob<Dtype>*>& bottom) {
if (propagate_down[1]) {
- LOG(FATAL) << this->type_name()
+ LOG(FATAL) << this->type()
<< " Layer cannot backpropagate to label inputs.";
}
if (propagate_down.size() > 2 && propagate_down[2]) {
- LOG(FATAL) << this->type_name()
+ LOG(FATAL) << this->type()
<< " Layer cannot backpropagate to infogain inputs.";
}
if (propagate_down[0]) {
}
INSTANTIATE_CLASS(InfogainLossLayer);
-REGISTER_LAYER_CLASS(INFOGAIN_LOSS, InfogainLossLayer);
+REGISTER_LAYER_CLASS(InfogainLoss);
} // namespace caffe
#endif
INSTANTIATE_CLASS(InnerProductLayer);
-REGISTER_LAYER_CLASS(INNER_PRODUCT, InnerProductLayer);
+REGISTER_LAYER_CLASS(InnerProduct);
+
} // namespace caffe
#endif
INSTANTIATE_CLASS(LRNLayer);
-REGISTER_LAYER_CLASS(LRN, LRNLayer);
+REGISTER_LAYER_CLASS(LRN);
+
} // namespace caffe
}
INSTANTIATE_CLASS(MemoryDataLayer);
-REGISTER_LAYER_CLASS(MEMORY_DATA, MemoryDataLayer);
+REGISTER_LAYER_CLASS(MemoryData);
+
} // namespace caffe
const vector<Blob<Dtype>*>& top, const vector<bool>& propagate_down,
const vector<Blob<Dtype>*>& bottom) {
if (propagate_down[1]) {
- LOG(FATAL) << this->type_name()
+ LOG(FATAL) << this->type()
<< " Layer cannot backpropagate to label inputs.";
}
if (propagate_down[0]) {
}
INSTANTIATE_CLASS(MultinomialLogisticLossLayer);
-REGISTER_LAYER_CLASS(MULTINOMIAL_LOGISTIC_LOSS, MultinomialLogisticLossLayer);
+REGISTER_LAYER_CLASS(MultinomialLogisticLoss);
+
} // namespace caffe
#endif
INSTANTIATE_CLASS(MVNLayer);
-REGISTER_LAYER_CLASS(MVN, MVNLayer);
+REGISTER_LAYER_CLASS(MVN);
+
} // namespace caffe
INSTANTIATE_CLASS(PoolingLayer);
-
} // namespace caffe
#endif
INSTANTIATE_CLASS(PowerLayer);
-REGISTER_LAYER_CLASS(POWER, PowerLayer);
+REGISTER_LAYER_CLASS(Power);
+
} // namespace caffe
INSTANTIATE_CLASS(ReLULayer);
-
} // namespace caffe
const vector<Blob<Dtype>*>& top, const vector<bool>& propagate_down,
const vector<Blob<Dtype>*>& bottom) {
if (propagate_down[1]) {
- LOG(FATAL) << this->type_name()
+ LOG(FATAL) << this->type()
<< " Layer cannot backpropagate to label inputs.";
}
if (propagate_down[0]) {
#endif
INSTANTIATE_CLASS(SigmoidCrossEntropyLossLayer);
-REGISTER_LAYER_CLASS(SIGMOID_CROSS_ENTROPY_LOSS, SigmoidCrossEntropyLossLayer);
+REGISTER_LAYER_CLASS(SigmoidCrossEntropyLoss);
+
} // namespace caffe
const vector<Blob<Dtype>*>& top, const vector<bool>& propagate_down,
const vector<Blob<Dtype>*>& bottom) {
if (propagate_down[1]) {
- LOG(FATAL) << this->type_name()
+ LOG(FATAL) << this->type()
<< " Layer cannot backpropagate to label inputs.";
}
if (propagate_down[0]) {
#endif
INSTANTIATE_CLASS(SilenceLayer);
-REGISTER_LAYER_CLASS(SILENCE, SilenceLayer);
+REGISTER_LAYER_CLASS(Silence);
+
} // namespace caffe
#endif
INSTANTIATE_CLASS(SliceLayer);
-REGISTER_LAYER_CLASS(SLICE, SliceLayer);
+REGISTER_LAYER_CLASS(Slice);
+
} // namespace caffe
#endif
INSTANTIATE_CLASS(SoftmaxLayer);
+
} // namespace caffe
const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top) {
LossLayer<Dtype>::LayerSetUp(bottom, top);
LayerParameter softmax_param(this->layer_param_);
- softmax_param.set_type(LayerParameter_LayerType_SOFTMAX);
+ softmax_param.set_type("Softmax");
softmax_layer_.reset(LayerRegistry<Dtype>::CreateLayer(softmax_param));
softmax_bottom_vec_.clear();
softmax_bottom_vec_.push_back(bottom[0]);
void SoftmaxWithLossLayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>& top,
const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {
if (propagate_down[1]) {
- LOG(FATAL) << this->type_name()
+ LOG(FATAL) << this->type()
<< " Layer cannot backpropagate to label inputs.";
}
if (propagate_down[0]) {
}
INSTANTIATE_CLASS(SoftmaxWithLossLayer);
-REGISTER_LAYER_CLASS(SOFTMAX_LOSS, SoftmaxWithLossLayer);
+REGISTER_LAYER_CLASS(SoftmaxWithLoss);
} // namespace caffe
// the backward pass. (Technically, it should be possible to share the diff
// blob of the first split output with the input, but this seems to cause
// some strange effects in practice...)
- CHECK_NE(top[i], bottom[0]) << this->type_name() << " Layer does not "
+ CHECK_NE(top[i], bottom[0]) << this->type() << " Layer does not "
"allow in-place computation.";
top[i]->Reshape(bottom[0]->num(), bottom[0]->channels(),
bottom[0]->height(), bottom[0]->width());
#endif
INSTANTIATE_CLASS(SplitLayer);
-REGISTER_LAYER_CLASS(SPLIT, SplitLayer);
+REGISTER_LAYER_CLASS(Split);
+
} // namespace caffe
#endif
INSTANTIATE_CLASS(ThresholdLayer);
-REGISTER_LAYER_CLASS(THRESHOLD, ThresholdLayer);
+REGISTER_LAYER_CLASS(Threshold);
+
} // namespace caffe
}
INSTANTIATE_CLASS(WindowDataLayer);
-REGISTER_LAYER_CLASS(WINDOW_DATA, WindowDataLayer);
+REGISTER_LAYER_CLASS(WindowData);
+
} // namespace caffe
repeated NetStateRule include = 32;
repeated NetStateRule exclude = 33;
- // NOTE
- // Add new LayerTypes to the enum below in lexicographical order (other than
- // starting with NONE), starting with the next available ID in the comment
- // line above the enum. Update the next available ID when you add a new
- // LayerType.
- //
- // LayerType next available ID: 40 (last added: DECONVOLUTION)
- enum LayerType {
- // "NONE" layer type is 0th enum element so that we don't cause confusion
- // by defaulting to an existent LayerType (instead, should usually error if
- // the type is unspecified).
- NONE = 0;
- ABSVAL = 35;
- ACCURACY = 1;
- ARGMAX = 30;
- BNLL = 2;
- CONCAT = 3;
- CONTRASTIVE_LOSS = 37;
- CONVOLUTION = 4;
- DATA = 5;
- DECONVOLUTION = 39;
- DROPOUT = 6;
- DUMMY_DATA = 32;
- EUCLIDEAN_LOSS = 7;
- ELTWISE = 25;
- EXP = 38;
- FLATTEN = 8;
- HDF5_DATA = 9;
- HDF5_OUTPUT = 10;
- HINGE_LOSS = 28;
- IM2COL = 11;
- IMAGE_DATA = 12;
- INFOGAIN_LOSS = 13;
- INNER_PRODUCT = 14;
- LRN = 15;
- MEMORY_DATA = 29;
- MULTINOMIAL_LOGISTIC_LOSS = 16;
- MVN = 34;
- POOLING = 17;
- POWER = 26;
- RELU = 18;
- SIGMOID = 19;
- SIGMOID_CROSS_ENTROPY_LOSS = 27;
- SILENCE = 36;
- SOFTMAX = 20;
- SOFTMAX_LOSS = 21;
- SPLIT = 22;
- SLICE = 33;
- TANH = 23;
- WINDOW_DATA = 24;
- THRESHOLD = 31;
- }
- optional LayerType type = 5; // the layer type from the enum above
+ optional string type = 5; // the layer type from the enum above
// The blobs containing the numeric parameters of the layer
repeated BlobProto blobs = 6;
" name: 'TestNetwork' "
" layers: { "
" name: 'data' "
- " type: DUMMY_DATA "
+ " type: 'DummyData' "
" dummy_data_param { "
" num: " << num_ << " "
" channels: " << channels_ << " "
" } "
" layers: { "
" name: 'innerprod' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" inner_product_param { "
" num_output: 1 "
" weight_filler { "
" } "
" layers: { "
" name: 'loss' "
- " type: EUCLIDEAN_LOSS "
+ " type: 'EuclideanLoss' "
" bottom: 'innerprod' "
" bottom: 'targets' "
" } "
"name: 'TinyTestNetwork' "
"layers: { "
" name: 'data' "
- " type: DUMMY_DATA "
+ " type: 'DummyData' "
" dummy_data_param { "
" num: 5 "
" channels: 2 "
"} "
"layers: { "
" name: 'innerproduct' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" inner_product_param { "
" num_output: 1000 "
" weight_filler { "
"} "
"layers: { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'innerproduct' "
" bottom: 'label' "
" top: 'top_loss' "
proto +=
"layers: { "
" name: 'loss' "
- " type: ACCURACY "
+ " type: 'Accuracy' "
" bottom: 'innerproduct' "
" bottom: 'label' "
" top: 'accuracy' "
"name: 'TinyTestEuclidLossNetwork' "
"layers: { "
" name: 'data' "
- " type: DUMMY_DATA "
+ " type: 'DummyData' "
" dummy_data_param { "
" num: 5 "
" channels: 2 "
"} "
"layers: { "
" name: 'innerproduct' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" inner_product_param { "
" num_output: 1 "
" weight_filler { "
"} "
"layers: { "
" name: 'loss' "
- " type: EUCLIDEAN_LOSS "
+ " type: 'EuclideanLoss' "
" bottom: 'innerproduct' "
" bottom: 'label' "
"} ";
"name: 'TrickyTestNetwork' "
"layers: { "
" name: 'data' "
- " type: DUMMY_DATA "
+ " type: 'DummyData' "
" dummy_data_param { "
" num: 5 "
" channels: 2 "
"} "
"layers: { "
" name: 'innerproduct' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" inner_product_param { "
" num_output: 1000 "
" weight_filler { "
"} "
"layers: { "
" name: 'innerproduct' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" inner_product_param { "
" num_output: 1 "
" weight_filler { "
"} "
"layers: { "
" name: 'loss' "
- " type: SOFTMAX_LOSS " +
+ " type: 'SoftmaxWithLoss' " +
loss_weight_stream.str() +
" bottom: 'transformed_data' "
" bottom: 'transformed_label' "
InitNetFromProtoString(proto);
}
- // loss_weight is the loss weight for the EUCLIDEAN_LOSS layer output.
- // midnet_loss_weight is the loss weight for the first INNER_PRODUCT layer
+ // loss_weight is the loss weight for the 'EuclideanLoss' layer output.
+ // midnet_loss_weight is the loss weight for the first 'InnerProduct' layer
// output. Should both default to 0.0 if unspecified (i.e., if NULL is
// passed to this function).
virtual void InitUnsharedWeightsNet(const Dtype* loss_weight = NULL,
proto <<
"layers: { "
" name: 'data' "
- " type: DUMMY_DATA "
+ " type: 'DummyData' "
" dummy_data_param { "
" num: 5 "
" channels: 2 "
"} "
"layers: { "
" name: 'innerproduct1' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" inner_product_param { "
" num_output: 10 "
" bias_term: " << bias_term <<
"} "
"layers: { "
" name: 'innerproduct2' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" inner_product_param { "
" num_output: 10 "
" bias_term: " << bias_term <<
"} "
"layers: { "
" name: 'loss' "
- " type: EUCLIDEAN_LOSS ";
+ " type: 'EuclideanLoss' ";
if (loss_weight) {
proto << " loss_weight: " << *loss_weight << " ";
}
"name: 'SharedWeightsNetwork' "
"layers: { "
" name: 'data' "
- " type: DUMMY_DATA "
+ " type: 'DummyData' "
" dummy_data_param { "
" num: 5 "
" channels: 2 "
"} "
"layers: { "
" name: 'innerproduct1' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" inner_product_param { "
" num_output: 10 "
" bias_term: false "
"} "
"layers: { "
" name: 'innerproduct2' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" inner_product_param { "
" num_output: 10 "
" bias_term: false "
"} "
"layers: { "
" name: 'loss' "
- " type: EUCLIDEAN_LOSS "
+ " type: 'EuclideanLoss' "
" bottom: 'innerproduct1' "
" bottom: 'innerproduct2' "
"} ";
"name: 'DiffDataUnsharedWeightsNetwork' "
"layers: { "
" name: 'data' "
- " type: DUMMY_DATA "
+ " type: 'DummyData' "
" dummy_data_param { "
" num: 10 "
" channels: 10 "
"} "
"layers: { "
" name: 'innerproduct1' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" inner_product_param { "
" num_output: 10 "
" bias_term: false "
"} "
"layers: { "
" name: 'innerproduct2' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" inner_product_param { "
" num_output: 10 "
" bias_term: false "
"} "
"layers: { "
" name: 'loss' "
- " type: EUCLIDEAN_LOSS "
+ " type: 'EuclideanLoss' "
" bottom: 'data2' "
" bottom: 'innerproduct2' "
"} ";
"name: 'DiffDataSharedWeightsNetwork' "
"layers: { "
" name: 'data' "
- " type: DUMMY_DATA "
+ " type: 'DummyData' "
" dummy_data_param { "
" num: 10 "
" channels: 10 "
"} "
"layers: { "
" name: 'innerproduct1' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" inner_product_param { "
" num_output: 10 "
" bias_term: false "
"} "
"layers: { "
" name: 'innerproduct2' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" inner_product_param { "
" num_output: 10 "
" bias_term: false "
"} "
"layers: { "
" name: 'loss' "
- " type: EUCLIDEAN_LOSS "
+ " type: 'EuclideanLoss' "
" bottom: 'data2' "
" bottom: 'innerproduct2' "
"} ";
"input_dim: 100 "
"layers: { "
" name: 'conv1' "
- " type: CONVOLUTION "
+ " type: 'Convolution' "
" bottom: 'data' "
" top: 'conv1' "
" convolution_param { "
"} "
"layers: { "
" name: 'relu1' "
- " type: RELU "
+ " type: 'ReLU' "
" bottom: 'conv1' "
" top: 'conv1' "
"} "
"layers: { "
" name: 'pool1' "
- " type: POOLING "
+ " type: 'Pooling' "
" bottom: 'conv1' "
" top: 'pool1' "
" pooling_param { "
"} "
"layers: { "
" name: 'norm1' "
- " type: LRN "
+ " type: 'LRN' "
" bottom: 'pool1' "
" top: 'norm1' "
" lrn_param { "
"} "
"layers: { "
" name: 'softmax' "
- " type: SOFTMAX "
+ " type: 'Softmax' "
" bottom: 'norm1' "
" top: 'softmax' "
"} ";
TYPED_TEST(NetTest, TestLossWeight) {
typedef typename TypeParam::Dtype Dtype;
// First, compute the loss and gradients with no loss_weight specified.
- // In this case, the loss weight for the EUCLIDEAN_LOSS layer should default
+ // In this case, the loss weight for the 'EuclideanLoss' layer should default
// to 1.
vector<Blob<Dtype>*> bottom;
Caffe::set_random_seed(this->seed_);
const bool kForceBackward = true;
const Dtype kErrorMargin = 1e-4;
- // Get the loss and gradients with EUCLIDEAN_LOSS weight 1,
- // INNER_PRODUCT weight 1.
+ // Get the loss and gradients with 'EuclideanLoss' weight 1,
+ // 'InnerProduct' weight 1.
loss_weight = 1;
midnet_loss_weight = 1;
Caffe::set_random_seed(this->seed_);
this->InitTinyNet(kForceBackward, kAccuracyLayer);
EXPECT_TRUE(this->net_->has_blob("accuracy"));
vector<Blob<Dtype>*> bottom;
- // Test that we can do Backward even though we have an ACCURACY layer.
+ // Test that we can do Backward even though we have an 'Accuracy' layer.
this->net_->ForwardBackward(bottom);
}
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'innerprod' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod' "
"} "
"layers: { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'innerprod' "
" bottom: 'label' "
"} ";
"name: 'LeNet' "
"layers { "
" name: 'mnist' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
" data_param { "
"} "
"layers { "
" name: 'mnist' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
" data_param { "
"} "
"layers { "
" name: 'conv1' "
- " type: CONVOLUTION "
+ " type: 'Convolution' "
" bottom: 'data' "
" top: 'conv1' "
" blobs_lr: 1 "
"} "
"layers { "
" name: 'ip1' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'conv1' "
" top: 'ip1' "
" blobs_lr: 1 "
"} "
"layers { "
" name: 'accuracy' "
- " type: ACCURACY "
+ " type: 'Accuracy' "
" bottom: 'ip1' "
" bottom: 'label' "
" top: 'accuracy' "
"} "
"layers { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'ip2' "
" bottom: 'label' "
" top: 'loss' "
"name: 'LeNet' "
"layers { "
" name: 'mnist' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
" data_param { "
"} "
"layers { "
" name: 'conv1' "
- " type: CONVOLUTION "
+ " type: 'Convolution' "
" bottom: 'data' "
" top: 'conv1' "
" blobs_lr: 1 "
"} "
"layers { "
" name: 'ip1' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'conv1' "
" top: 'ip1' "
" blobs_lr: 1 "
"} "
"layers { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'ip2' "
" bottom: 'label' "
" top: 'loss' "
"name: 'LeNet' "
"layers { "
" name: 'mnist' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
" data_param { "
"} "
"layers { "
" name: 'conv1' "
- " type: CONVOLUTION "
+ " type: 'Convolution' "
" bottom: 'data' "
" top: 'conv1' "
" blobs_lr: 1 "
"} "
"layers { "
" name: 'ip1' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'conv1' "
" top: 'ip1' "
" blobs_lr: 1 "
"} "
"layers { "
" name: 'accuracy' "
- " type: ACCURACY "
+ " type: 'Accuracy' "
" bottom: 'ip1' "
" bottom: 'label' "
" top: 'accuracy' "
"} "
"layers { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'ip2' "
" bottom: 'label' "
" top: 'loss' "
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
" include: { stage: 'mystage' } "
"} "
"layers: { "
" name: 'innerprod' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod' "
"} "
"layers: { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'innerprod' "
" bottom: 'label' "
"} ";
"name: 'TestNetwork' "
"layers: { "
" name: 'innerprod' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod' "
"} "
"layers: { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'innerprod' "
" bottom: 'label' "
"} ";
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'innerprod' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod' "
" include: { stage: 'mystage' } "
"} "
"layers: { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'innerprod' "
" bottom: 'label' "
"} ";
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'innerprod' "
" bottom: 'label' "
"} ";
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'innerprod' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod' "
" include: { stage: 'mystage' } "
"} "
"layers: { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'innerprod' "
" bottom: 'label' "
"} ";
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'innerprod' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod' "
" exclude: { stage: 'mystage' } "
"} "
"layers: { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'innerprod' "
" bottom: 'label' "
"} ";
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'innerprod' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod' "
" include: { stage: 'mystage' stage: 'myotherstage' } "
"} "
"layers: { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'innerprod' "
" bottom: 'label' "
" include: { stage: 'mystage' } "
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'innerprod' "
" bottom: 'label' "
" include: { stage: 'mystage' } "
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'innerprod' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod' "
" include: { stage: 'myotherstage' } "
"} "
"layers: { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'innerprod' "
" bottom: 'label' "
" include: { stage: 'mystage' } "
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'innerprod' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod' "
" include: { stage: 'mystage' stage: 'myotherstage' } "
"} "
"layers: { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'innerprod' "
" bottom: 'label' "
" include: { stage: 'mystage' } "
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'innerprod' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod' "
" include: { not_stage: 'myotherstage' } "
"} "
"layers: { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'innerprod' "
" bottom: 'label' "
" include: { not_stage: 'myotherstage' } "
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'innerprod' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod' "
" include: { not_stage: 'mystage' } "
"} "
"layers: { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'innerprod' "
" bottom: 'label' "
" include: { not_stage: 'mystage' } "
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} ";
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'innerprod' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod' "
" include: { min_level: 3 } "
"} "
"layers: { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'innerprod' "
" bottom: 'label' "
"} ";
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'innerprod' "
" bottom: 'label' "
"} ";
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'innerprod' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod' "
" include: { max_level: -3 } "
"} "
"layers: { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'innerprod' "
" bottom: 'label' "
"} ";
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'innerprod' "
" bottom: 'label' "
"} ";
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'innerprod' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod' "
" include: { min_level: 0 } "
"} "
"layers: { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'innerprod' "
" bottom: 'label' "
"} ";
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'innerprod' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod' "
" include: { min_level: 3 } "
"} "
"layers: { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'innerprod' "
" bottom: 'label' "
"} ";
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'innerprod' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod' "
" include: { max_level: 0 } "
"} "
"layers: { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'innerprod' "
" bottom: 'label' "
"} ";
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'innerprod' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod' "
" include: { max_level: -3 } "
"} "
"layers: { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'innerprod' "
" bottom: 'label' "
"} ";
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'innerprod' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod' "
" include: { min_level: 2 phase: TRAIN } "
"} "
"layers: { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'innerprod' "
" bottom: 'label' "
" include: { min_level: 2 phase: TEST } "
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'innerprod' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod' "
" include: { min_level: 2 phase: TRAIN } "
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'innerprod' "
" bottom: 'label' "
" include: { min_level: 2 phase: TEST } "
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'innerprod' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod' "
" include: { min_level: 2 phase: TRAIN } "
"} "
"layers: { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'innerprod' "
" bottom: 'label' "
" include: { min_level: 2 phase: TEST } "
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'innerprod' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod' "
" exclude: { min_level: 2 phase: TRAIN } "
"} "
"layers: { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'innerprod' "
" bottom: 'label' "
" exclude: { min_level: 2 phase: TEST } "
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'innerprod' "
" bottom: 'label' "
" exclude: { min_level: 2 phase: TEST } "
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'innerprod' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod' "
" exclude: { min_level: 2 phase: TRAIN } "
TEST_F(ProtoTest, TestSerialization) {
LayerParameter param;
param.set_name("test");
- param.set_type(LayerParameter_LayerType_NONE);
+ param.set_type("Test");
std::cout << "Printing in binary format." << std::endl;
std::cout << param.SerializeAsString() << std::endl;
std::cout << "Printing in text format." << std::endl;
" name: 'TestNetwork' "
" layers: { "
" name: 'data' "
- " type: DUMMY_DATA "
+ " type: 'DummyData' "
" dummy_data_param { "
" num: 5 "
" channels: 3 "
" } "
" layers: { "
" name: 'innerprod' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" inner_product_param { "
" num_output: 10 "
" } "
" } "
" layers: { "
" name: 'accuracy' "
- " type: ACCURACY "
+ " type: 'Accuracy' "
" bottom: 'innerprod' "
" bottom: 'label' "
" top: 'accuracy' "
" } "
" layers: { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'innerprod' "
" bottom: 'label' "
" include: { phase: TRAIN } "
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'innerprod' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod' "
"} "
"layers: { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'innerprod' "
" bottom: 'label' "
"} ";
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'data_split' "
- " type: SPLIT "
+ " type: 'Split' "
" bottom: 'data' "
" top: 'data_split_0' "
" top: 'data_split_1' "
"} "
"layers: { "
" name: 'innerprod1' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data_split_0' "
" top: 'innerprod1' "
"} "
"layers: { "
" name: 'innerprod2' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data_split_1' "
" top: 'innerprod2' "
"} "
"layers: { "
" name: 'loss' "
- " type: EUCLIDEAN_LOSS "
+ " type: 'EuclideanLoss' "
" bottom: 'innerprod1' "
" bottom: 'innerprod2' "
"} ";
"name: 'CaffeNet' "
"layers { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" data_param { "
" source: '/home/jiayq/Data/ILSVRC12/train-leveldb' "
" batch_size: 256 "
"} "
"layers { "
" name: 'conv1' "
- " type: CONVOLUTION "
+ " type: 'Convolution' "
" convolution_param { "
" num_output: 96 "
" kernel_size: 11 "
"} "
"layers { "
" name: 'relu1' "
- " type: RELU "
+ " type: 'ReLU' "
" bottom: 'conv1' "
" top: 'conv1' "
"} "
"layers { "
" name: 'pool1' "
- " type: POOLING "
+ " type: 'Pooling' "
" pooling_param { "
" pool: MAX "
" kernel_size: 3 "
"} "
"layers { "
" name: 'norm1' "
- " type: LRN "
+ " type: 'LRN' "
" lrn_param { "
" local_size: 5 "
" alpha: 0.0001 "
"} "
"layers { "
" name: 'conv2' "
- " type: CONVOLUTION "
+ " type: 'Convolution' "
" convolution_param { "
" num_output: 256 "
" group: 2 "
"} "
"layers { "
" name: 'relu2' "
- " type: RELU "
+ " type: 'ReLU' "
" bottom: 'conv2' "
" top: 'conv2' "
"} "
"layers { "
" name: 'pool2' "
- " type: POOLING "
+ " type: 'Pooling' "
" pooling_param { "
" pool: MAX "
" kernel_size: 3 "
"} "
"layers { "
" name: 'norm2' "
- " type: LRN "
+ " type: 'LRN' "
" lrn_param { "
" local_size: 5 "
" alpha: 0.0001 "
"} "
"layers { "
" name: 'conv3' "
- " type: CONVOLUTION "
+ " type: 'Convolution' "
" convolution_param { "
" num_output: 384 "
" kernel_size: 3 "
"} "
"layers { "
" name: 'relu3' "
- " type: RELU "
+ " type: 'ReLU' "
" bottom: 'conv3' "
" top: 'conv3' "
"} "
"layers { "
" name: 'conv4' "
- " type: CONVOLUTION "
+ " type: 'Convolution' "
" convolution_param { "
" num_output: 384 "
" group: 2 "
"} "
"layers { "
" name: 'relu4' "
- " type: RELU "
+ " type: 'ReLU' "
" bottom: 'conv4' "
" top: 'conv4' "
"} "
"layers { "
" name: 'conv5' "
- " type: CONVOLUTION "
+ " type: 'Convolution' "
" convolution_param { "
" num_output: 256 "
" group: 2 "
"} "
"layers { "
" name: 'relu5' "
- " type: RELU "
+ " type: 'ReLU' "
" bottom: 'conv5' "
" top: 'conv5' "
"} "
"layers { "
" name: 'pool5' "
- " type: POOLING "
+ " type: 'Pooling' "
" pooling_param { "
" kernel_size: 3 "
" pool: MAX "
"} "
"layers { "
" name: 'fc6' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" inner_product_param { "
" num_output: 4096 "
" weight_filler { "
"} "
"layers { "
" name: 'relu6' "
- " type: RELU "
+ " type: 'ReLU' "
" bottom: 'fc6' "
" top: 'fc6' "
"} "
"layers { "
" name: 'drop6' "
- " type: DROPOUT "
+ " type: 'Dropout' "
" dropout_param { "
" dropout_ratio: 0.5 "
" } "
"} "
"layers { "
" name: 'fc7' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" inner_product_param { "
" num_output: 4096 "
" weight_filler { "
"} "
"layers { "
" name: 'relu7' "
- " type: RELU "
+ " type: 'ReLU' "
" bottom: 'fc7' "
" top: 'fc7' "
"} "
"layers { "
" name: 'drop7' "
- " type: DROPOUT "
+ " type: 'Dropout' "
" dropout_param { "
" dropout_ratio: 0.5 "
" } "
"} "
"layers { "
" name: 'fc8' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" inner_product_param { "
" num_output: 1000 "
" weight_filler { "
"} "
"layers { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'fc8' "
" bottom: 'label' "
"} ";
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'innerprod' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod' "
"} "
"layers: { "
" name: 'relu' "
- " type: RELU "
+ " type: 'ReLU' "
" bottom: 'innerprod' "
" top: 'innerprod' "
"} "
"layers: { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'innerprod' "
" bottom: 'label' "
"} ";
"force_backward: true "
"layers: { "
" name: 'data' "
- " type: DUMMY_DATA "
+ " type: 'DummyData' "
" dummy_data_param { "
" num: 5 "
" channels: 2 "
"} "
"layers: { "
" name: 'innerproduct1' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" inner_product_param { "
" num_output: 10 "
" bias_term: false "
"} "
"layers: { "
" name: 'innerproduct2' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" inner_product_param { "
" num_output: 10 "
" bias_term: false "
"} "
"layers: { "
" name: 'loss' "
- " type: EUCLIDEAN_LOSS "
+ " type: 'EuclideanLoss' "
" bottom: 'innerproduct1' "
" bottom: 'innerproduct2' "
"} ";
"force_backward: true "
"layers: { "
" name: 'data' "
- " type: DUMMY_DATA "
+ " type: 'DummyData' "
" dummy_data_param { "
" num: 5 "
" channels: 2 "
"} "
"layers: { "
" name: 'data_data_0_split' "
- " type: SPLIT "
+ " type: 'Split' "
" bottom: 'data' "
" top: 'data_data_0_split_0' "
" top: 'data_data_0_split_1' "
"} "
"layers: { "
" name: 'innerproduct1' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" inner_product_param { "
" num_output: 10 "
" bias_term: false "
"} "
"layers: { "
" name: 'innerproduct1_innerproduct1_0_split' "
- " type: SPLIT "
+ " type: 'Split' "
" bottom: 'innerproduct1' "
" top: 'innerproduct1_innerproduct1_0_split_0' "
" top: 'innerproduct1_innerproduct1_0_split_1' "
"} "
"layers: { "
" name: 'innerproduct2' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" inner_product_param { "
" num_output: 10 "
" bias_term: false "
"} "
"layers: { "
" name: 'loss' "
- " type: EUCLIDEAN_LOSS "
+ " type: 'EuclideanLoss' "
" bottom: 'innerproduct1_innerproduct1_0_split_1' "
" bottom: 'innerproduct2' "
"} ";
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'innerprod1' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod1' "
"} "
"layers: { "
" name: 'innerprod2' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod2' "
"} "
"layers: { "
" name: 'innerprod3' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod3' "
"} "
"layers: { "
" name: 'loss1' "
- " type: EUCLIDEAN_LOSS "
+ " type: 'EuclideanLoss' "
" bottom: 'innerprod1' "
" bottom: 'innerprod2' "
"} "
"layers: { "
" name: 'loss2' "
- " type: EUCLIDEAN_LOSS "
+ " type: 'EuclideanLoss' "
" bottom: 'innerprod2' "
" bottom: 'innerprod3' "
"} ";
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'data_data_0_split' "
- " type: SPLIT "
+ " type: 'Split' "
" bottom: 'data' "
" top: 'data_data_0_split_0' "
" top: 'data_data_0_split_1' "
"} "
"layers: { "
" name: 'innerprod1' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data_data_0_split_0' "
" top: 'innerprod1' "
"} "
"layers: { "
" name: 'innerprod2' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data_data_0_split_1' "
" top: 'innerprod2' "
"} "
"layers: { "
" name: 'innerprod2_innerprod2_0_split' "
- " type: SPLIT "
+ " type: 'Split' "
" bottom: 'innerprod2' "
" top: 'innerprod2_innerprod2_0_split_0' "
" top: 'innerprod2_innerprod2_0_split_1' "
"} "
"layers: { "
" name: 'innerprod3' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data_data_0_split_2' "
" top: 'innerprod3' "
"} "
"layers: { "
" name: 'loss1' "
- " type: EUCLIDEAN_LOSS "
+ " type: 'EuclideanLoss' "
" bottom: 'innerprod1' "
" bottom: 'innerprod2_innerprod2_0_split_0' "
"} "
"layers: { "
" name: 'loss2' "
- " type: EUCLIDEAN_LOSS "
+ " type: 'EuclideanLoss' "
" bottom: 'innerprod2_innerprod2_0_split_1' "
" bottom: 'innerprod3' "
"} ";
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'innerprod1' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod1' "
"} "
"layers: { "
" name: 'innerprod2' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'label' "
" top: 'innerprod2' "
"} "
"layers: { "
" name: 'innerprod3' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod3' "
"} "
"layers: { "
" name: 'innerprod4' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'label' "
" top: 'innerprod4' "
"} "
"layers: { "
" name: 'loss1' "
- " type: EUCLIDEAN_LOSS "
+ " type: 'EuclideanLoss' "
" bottom: 'innerprod1' "
" bottom: 'innerprod3' "
"} "
"layers: { "
" name: 'loss2' "
- " type: EUCLIDEAN_LOSS "
+ " type: 'EuclideanLoss' "
" bottom: 'innerprod2' "
" bottom: 'innerprod4' "
"} ";
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'data_data_0_split' "
- " type: SPLIT "
+ " type: 'Split' "
" bottom: 'data' "
" top: 'data_data_0_split_0' "
" top: 'data_data_0_split_1' "
"} "
"layers: { "
" name: 'label_data_1_split' "
- " type: SPLIT "
+ " type: 'Split' "
" bottom: 'label' "
" top: 'label_data_1_split_0' "
" top: 'label_data_1_split_1' "
"} "
"layers: { "
" name: 'innerprod1' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data_data_0_split_0' "
" top: 'innerprod1' "
"} "
"layers: { "
" name: 'innerprod2' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'label_data_1_split_0' "
" top: 'innerprod2' "
"} "
"layers: { "
" name: 'innerprod3' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data_data_0_split_1' "
" top: 'innerprod3' "
"} "
"layers: { "
" name: 'innerprod4' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'label_data_1_split_1' "
" top: 'innerprod4' "
"} "
"layers: { "
" name: 'loss1' "
- " type: EUCLIDEAN_LOSS "
+ " type: 'EuclideanLoss' "
" bottom: 'innerprod1' "
" bottom: 'innerprod3' "
"} "
"layers: { "
" name: 'loss2' "
- " type: EUCLIDEAN_LOSS "
+ " type: 'EuclideanLoss' "
" bottom: 'innerprod2' "
" bottom: 'innerprod4' "
"} ";
"input_dim: 227 "
"layers: { "
" name: 'innerprod1' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod1' "
"} "
"layers: { "
" name: 'innerprod2' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod2' "
"} "
"layers: { "
" name: 'loss' "
- " type: EUCLIDEAN_LOSS "
+ " type: 'EuclideanLoss' "
" bottom: 'innerprod1' "
" bottom: 'innerprod2' "
"} ";
"input_dim: 227 "
"layers: { "
" name: 'data_input_0_split' "
- " type: SPLIT "
+ " type: 'Split' "
" bottom: 'data' "
" top: 'data_input_0_split_0' "
" top: 'data_input_0_split_1' "
"} "
"layers: { "
" name: 'innerprod1' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data_input_0_split_0' "
" top: 'innerprod1' "
"} "
"layers: { "
" name: 'innerprod2' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data_input_0_split_1' "
" top: 'innerprod2' "
"} "
"layers: { "
" name: 'loss' "
- " type: EUCLIDEAN_LOSS "
+ " type: 'EuclideanLoss' "
" bottom: 'innerprod1' "
" bottom: 'innerprod2' "
"} ";
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'innerprod1' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data' "
" top: 'innerprod1' "
"} "
"layers: { "
" name: 'relu1' "
- " type: RELU "
+ " type: 'ReLU' "
" bottom: 'innerprod1' "
" top: 'innerprod1' "
"} "
"layers: { "
" name: 'innerprod2' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'innerprod1' "
" top: 'innerprod2' "
"} "
"layers: { "
" name: 'loss1' "
- " type: EUCLIDEAN_LOSS "
+ " type: 'EuclideanLoss' "
" bottom: 'innerprod1' "
" bottom: 'label' "
"} "
"layers: { "
" name: 'loss2' "
- " type: EUCLIDEAN_LOSS "
+ " type: 'EuclideanLoss' "
" bottom: 'innerprod2' "
" bottom: 'data' "
"} ";
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" top: 'data' "
" top: 'label' "
"} "
"layers: { "
" name: 'data_data_0_split' "
- " type: SPLIT "
+ " type: 'Split' "
" bottom: 'data' "
" top: 'data_data_0_split_0' "
" top: 'data_data_0_split_1' "
"} "
"layers: { "
" name: 'innerprod1' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'data_data_0_split_0' "
" top: 'innerprod1' "
"} "
"layers: { "
" name: 'relu1' "
- " type: RELU "
+ " type: 'ReLU' "
" bottom: 'innerprod1' "
" top: 'innerprod1' "
"} "
"layers: { "
" name: 'innerprod1_relu1_0_split' "
- " type: SPLIT "
+ " type: 'Split' "
" bottom: 'innerprod1' "
" top: 'innerprod1_relu1_0_split_0' "
" top: 'innerprod1_relu1_0_split_1' "
"} "
"layers: { "
" name: 'innerprod2' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" bottom: 'innerprod1_relu1_0_split_0' "
" top: 'innerprod2' "
"} "
"layers: { "
" name: 'loss1' "
- " type: EUCLIDEAN_LOSS "
+ " type: 'EuclideanLoss' "
" bottom: 'innerprod1_relu1_0_split_1' "
" bottom: 'label' "
"} "
"layers: { "
" name: 'loss2' "
- " type: EUCLIDEAN_LOSS "
+ " type: 'EuclideanLoss' "
" bottom: 'innerprod2' "
" bottom: 'data_data_0_split_1' "
"} ";
"name: 'CaffeNet' "
"layers { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" data_param { "
" source: '/home/jiayq/Data/ILSVRC12/train-leveldb' "
" batch_size: 256 "
"} "
"layers { "
" name: 'conv1' "
- " type: CONVOLUTION "
+ " type: 'Convolution' "
" convolution_param { "
" num_output: 96 "
" kernel_size: 11 "
"} "
"layers { "
" name: 'fc8' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" inner_product_param { "
" num_output: 1000 "
" weight_filler { "
"} "
"layers { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'fc8' "
" bottom: 'label' "
"} ";
"input_dim: 32 "
"layers { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" data_param { "
" source: '/home/jiayq/Data/ILSVRC12/train-leveldb' "
" batch_size: 256 "
"} "
"layers { "
" name: 'images' "
- " type: IMAGE_DATA "
+ " type: 'ImageData' "
" image_data_param { "
" source: '/home/jiayq/Data/ILSVRC12/train-images' "
" batch_size: 256 "
"} "
"layers { "
" name: 'window_data' "
- " type: WINDOW_DATA "
+ " type: 'WindowData' "
" window_data_param { "
" source: '/home/jiayq/Data/ILSVRC12/train-leveldb' "
" batch_size: 256 "
"} "
"layers { "
" name: 'hdf5data' "
- " type: HDF5_DATA "
+ " type: 'HDF5Data' "
" hdf5_data_param { "
" source: '/my/hdf5/data' "
" batch_size: 256 "
"} "
"layers { "
" name: 'conv1' "
- " type: CONVOLUTION "
+ " type: 'Convolution' "
" convolution_param { "
" num_output: 96 "
" bias_term: false "
"} "
"layers { "
" name: 'pool1ave' "
- " type: POOLING "
+ " type: 'Pooling' "
" pooling_param { "
" pool: AVE "
" kernel_size: 3 "
"} "
"layers { "
" name: 'pool1stoch' "
- " type: POOLING "
+ " type: 'Pooling' "
" pooling_param { "
" pool: STOCHASTIC "
" kernel_size: 4 "
"} "
"layers { "
" name: 'concat' "
- " type: CONCAT "
+ " type: 'Concat' "
" concat_param { "
" concat_dim: 2 "
" } "
"} "
"layers { "
" name: 'norm1' "
- " type: LRN "
+ " type: 'LRN' "
" lrn_param { "
" local_size: 5 "
" alpha: 0.0001 "
"} "
"layers { "
" name: 'fc6' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" inner_product_param { "
" num_output: 4096 "
" bias_term: false "
"} "
"layers { "
" name: 'relu6' "
- " type: RELU "
+ " type: 'ReLU' "
" bottom: 'fc6' "
" top: 'fc6' "
"} "
"layers { "
" name: 'drop6' "
- " type: DROPOUT "
+ " type: 'Dropout' "
" dropout_param { "
" dropout_ratio: 0.2 "
" } "
"} "
"layers { "
" name: 'loss' "
- " type: INFOGAIN_LOSS "
+ " type: 'InfogainLoss' "
" infogain_loss_param { "
" source: '/my/infogain/matrix' "
" } "
"} "
"layers { "
" name: 'accuracy' "
- " type: ACCURACY "
+ " type: 'Accuracy' "
"} "
"layers { "
" name: 'bnll' "
- " type: BNLL "
+ " type: 'BNLL' "
"} "
"layers { "
" name: 'euclidean_loss' "
- " type: EUCLIDEAN_LOSS "
+ " type: 'EuclideanLoss' "
"} "
"layers { "
" name: 'flatten' "
- " type: FLATTEN "
+ " type: 'Flatten' "
"} "
"layers { "
" name: 'hdf5_output' "
- " type: HDF5_OUTPUT "
+ " type: 'HDF5Output' "
" hdf5_output_param { "
" file_name: '/my/hdf5/output/file' "
" } "
"} "
"layers { "
" name: 'im2col' "
- " type: IM2COL "
+ " type: 'Im2Col' "
"} "
"layers { "
" name: 'images' "
- " type: IMAGE_DATA "
+ " type: 'ImageData' "
"} "
"layers { "
" name: 'multinomial_logistic_loss' "
- " type: MULTINOMIAL_LOGISTIC_LOSS "
+ " type: 'MultinomialLogisticLoss' "
"} "
"layers { "
" name: 'sigmoid' "
- " type: SIGMOID "
+ " type: 'Sigmoid' "
"} "
"layers { "
" name: 'softmax' "
- " type: SOFTMAX "
+ " type: 'Softmax' "
"} "
"layers { "
" name: 'split' "
- " type: SPLIT "
+ " type: 'Split' "
"} "
"layers { "
" name: 'tanh' "
- " type: TANH "
+ " type: 'TanH' "
"} ";
this->RunV0UpgradeTest(input_proto, expected_output_proto);
}
"name: 'CaffeNet' "
"layers { "
" name: 'data' "
- " type: DATA "
+ " type: 'Data' "
" data_param { "
" source: '/home/jiayq/Data/ILSVRC12/train-leveldb' "
" batch_size: 256 "
"} "
"layers { "
" name: 'conv1' "
- " type: CONVOLUTION "
+ " type: 'Convolution' "
" convolution_param { "
" num_output: 96 "
" kernel_size: 11 "
"} "
"layers { "
" name: 'relu1' "
- " type: RELU "
+ " type: 'ReLU' "
" bottom: 'conv1' "
" top: 'conv1' "
"} "
"layers { "
" name: 'pool1' "
- " type: POOLING "
+ " type: 'Pooling' "
" pooling_param { "
" pool: MAX "
" kernel_size: 3 "
"} "
"layers { "
" name: 'norm1' "
- " type: LRN "
+ " type: 'LRN' "
" lrn_param { "
" local_size: 5 "
" alpha: 0.0001 "
"} "
"layers { "
" name: 'conv2' "
- " type: CONVOLUTION "
+ " type: 'Convolution' "
" convolution_param { "
" num_output: 256 "
" group: 2 "
"} "
"layers { "
" name: 'relu2' "
- " type: RELU "
+ " type: 'ReLU' "
" bottom: 'conv2' "
" top: 'conv2' "
"} "
"layers { "
" name: 'pool2' "
- " type: POOLING "
+ " type: 'Pooling' "
" pooling_param { "
" pool: MAX "
" kernel_size: 3 "
"} "
"layers { "
" name: 'norm2' "
- " type: LRN "
+ " type: 'LRN' "
" lrn_param { "
" local_size: 5 "
" alpha: 0.0001 "
"} "
"layers { "
" name: 'conv3' "
- " type: CONVOLUTION "
+ " type: 'Convolution' "
" convolution_param { "
" num_output: 384 "
" kernel_size: 3 "
"} "
"layers { "
" name: 'relu3' "
- " type: RELU "
+ " type: 'ReLU' "
" bottom: 'conv3' "
" top: 'conv3' "
"} "
"layers { "
" name: 'conv4' "
- " type: CONVOLUTION "
+ " type: 'Convolution' "
" convolution_param { "
" num_output: 384 "
" group: 2 "
"} "
"layers { "
" name: 'relu4' "
- " type: RELU "
+ " type: 'ReLU' "
" bottom: 'conv4' "
" top: 'conv4' "
"} "
"layers { "
" name: 'conv5' "
- " type: CONVOLUTION "
+ " type: 'Convolution' "
" convolution_param { "
" num_output: 256 "
" group: 2 "
"} "
"layers { "
" name: 'relu5' "
- " type: RELU "
+ " type: 'ReLU' "
" bottom: 'conv5' "
" top: 'conv5' "
"} "
"layers { "
" name: 'pool5' "
- " type: POOLING "
+ " type: 'Pooling' "
" pooling_param { "
" kernel_size: 3 "
" pool: MAX "
"} "
"layers { "
" name: 'fc6' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" inner_product_param { "
" num_output: 4096 "
" weight_filler { "
"} "
"layers { "
" name: 'relu6' "
- " type: RELU "
+ " type: 'ReLU' "
" bottom: 'fc6' "
" top: 'fc6' "
"} "
"layers { "
" name: 'drop6' "
- " type: DROPOUT "
+ " type: 'Dropout' "
" dropout_param { "
" dropout_ratio: 0.5 "
" } "
"} "
"layers { "
" name: 'fc7' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" inner_product_param { "
" num_output: 4096 "
" weight_filler { "
"} "
"layers { "
" name: 'relu7' "
- " type: RELU "
+ " type: 'ReLU' "
" bottom: 'fc7' "
" top: 'fc7' "
"} "
"layers { "
" name: 'drop7' "
- " type: DROPOUT "
+ " type: 'Dropout' "
" dropout_param { "
" dropout_ratio: 0.5 "
" } "
"} "
"layers { "
" name: 'fc8' "
- " type: INNER_PRODUCT "
+ " type: 'InnerProduct' "
" inner_product_param { "
" num_output: 1000 "
" weight_filler { "
"} "
"layers { "
" name: 'loss' "
- " type: SOFTMAX_LOSS "
+ " type: 'SoftmaxWithLoss' "
" bottom: 'fc8' "
" bottom: 'label' "
"} ";
split_layer_param->Clear();
split_layer_param->add_bottom(blob_name);
split_layer_param->set_name(SplitLayerName(layer_name, blob_name, blob_idx));
- split_layer_param->set_type(LayerParameter_LayerType_SPLIT);
+ split_layer_param->set_type("Split");
for (int k = 0; k < split_count; ++k) {
split_layer_param->add_top(
SplitBlobName(layer_name, blob_name, blob_idx, k));
return is_fully_compatible;
}
-LayerParameter_LayerType UpgradeV0LayerType(const string& type) {
+const char* UpgradeV0LayerType(const string& type) {
if (type == "accuracy") {
- return LayerParameter_LayerType_ACCURACY;
+ return "Accuracy";
} else if (type == "bnll") {
- return LayerParameter_LayerType_BNLL;
+ return "BNLL";
} else if (type == "concat") {
- return LayerParameter_LayerType_CONCAT;
+ return "Concat";
} else if (type == "conv") {
- return LayerParameter_LayerType_CONVOLUTION;
+ return "Convolution";
} else if (type == "data") {
- return LayerParameter_LayerType_DATA;
+ return "Data";
} else if (type == "dropout") {
- return LayerParameter_LayerType_DROPOUT;
+ return "Dropout";
} else if (type == "euclidean_loss") {
- return LayerParameter_LayerType_EUCLIDEAN_LOSS;
+ return "EuclideanLoss";
} else if (type == "flatten") {
- return LayerParameter_LayerType_FLATTEN;
+ return "Flatten";
} else if (type == "hdf5_data") {
- return LayerParameter_LayerType_HDF5_DATA;
+ return "HDF5Data";
} else if (type == "hdf5_output") {
- return LayerParameter_LayerType_HDF5_OUTPUT;
+ return "HDF5Output";
} else if (type == "im2col") {
- return LayerParameter_LayerType_IM2COL;
+ return "Im2Col";
} else if (type == "images") {
- return LayerParameter_LayerType_IMAGE_DATA;
+ return "ImageData";
} else if (type == "infogain_loss") {
- return LayerParameter_LayerType_INFOGAIN_LOSS;
+ return "InfogainLoss";
} else if (type == "innerproduct") {
- return LayerParameter_LayerType_INNER_PRODUCT;
+ return "InnerProduct";
} else if (type == "lrn") {
- return LayerParameter_LayerType_LRN;
+ return "LRN";
} else if (type == "multinomial_logistic_loss") {
- return LayerParameter_LayerType_MULTINOMIAL_LOGISTIC_LOSS;
+ return "MultinomialLogisticLoss";
} else if (type == "pool") {
- return LayerParameter_LayerType_POOLING;
+ return "Pooling";
} else if (type == "relu") {
- return LayerParameter_LayerType_RELU;
+ return "ReLU";
} else if (type == "sigmoid") {
- return LayerParameter_LayerType_SIGMOID;
+ return "Sigmoid";
} else if (type == "softmax") {
- return LayerParameter_LayerType_SOFTMAX;
+ return "Softmax";
} else if (type == "softmax_loss") {
- return LayerParameter_LayerType_SOFTMAX_LOSS;
+ return "SoftmaxWithLoss";
} else if (type == "split") {
- return LayerParameter_LayerType_SPLIT;
+ return "Split";
} else if (type == "tanh") {
- return LayerParameter_LayerType_TANH;
+ return "TanH";
} else if (type == "window_data") {
- return LayerParameter_LayerType_WINDOW_DATA;
+ return "WindowData";
} else {
- LOG(FATAL) << "Unknown layer name: " << type;
- return LayerParameter_LayerType_NONE;
+ LOG(FATAL) << "Unknown layer type: " << type;
+ return "";
}
}
bool NetNeedsDataUpgrade(const NetParameter& net_param) {
for (int i = 0; i < net_param.layers_size(); ++i) {
- if (net_param.layers(i).type() == LayerParameter_LayerType_DATA) {
+ if (net_param.layers(i).type() == "Data") {
DataParameter layer_param = net_param.layers(i).data_param();
if (layer_param.has_scale()) { return true; }
if (layer_param.has_mean_file()) { return true; }
if (layer_param.has_crop_size()) { return true; }
if (layer_param.has_mirror()) { return true; }
}
- if (net_param.layers(i).type() == LayerParameter_LayerType_IMAGE_DATA) {
+ if (net_param.layers(i).type() == "ImageData") {
ImageDataParameter layer_param = net_param.layers(i).image_data_param();
if (layer_param.has_scale()) { return true; }
if (layer_param.has_mean_file()) { return true; }
if (layer_param.has_crop_size()) { return true; }
if (layer_param.has_mirror()) { return true; }
}
- if (net_param.layers(i).type() == LayerParameter_LayerType_WINDOW_DATA) {
+ if (net_param.layers(i).type() == "WindowData") {
WindowDataParameter layer_param = net_param.layers(i).window_data_param();
if (layer_param.has_scale()) { return true; }
if (layer_param.has_mean_file()) { return true; }
return false;
}
-#define CONVERT_LAYER_TRANSFORM_PARAM(TYPE, Name, param_name) \
+#define CONVERT_LAYER_TRANSFORM_PARAM(TYPE_NAME, PARAM_NAME) \
do { \
- if (net_param->layers(i).type() == LayerParameter_LayerType_##TYPE) { \
- Name##Parameter* layer_param = \
- net_param->mutable_layers(i)->mutable_##param_name##_param(); \
+ if (net_param->layers(i).type() == #TYPE_NAME) { \
+ TYPE_NAME##Parameter* layer_param = \
+ net_param->mutable_layers(i)->mutable_##PARAM_NAME##_param(); \
TransformationParameter* transform_param = \
net_param->mutable_layers(i)->mutable_transform_param(); \
if (layer_param->has_scale()) { \
void UpgradeNetDataTransformation(NetParameter* net_param) {
for (int i = 0; i < net_param->layers_size(); ++i) {
- CONVERT_LAYER_TRANSFORM_PARAM(DATA, Data, data);
- CONVERT_LAYER_TRANSFORM_PARAM(IMAGE_DATA, ImageData, image_data);
- CONVERT_LAYER_TRANSFORM_PARAM(WINDOW_DATA, WindowData, window_data);
+ CONVERT_LAYER_TRANSFORM_PARAM(Data, data);
+ CONVERT_LAYER_TRANSFORM_PARAM(ImageData, image_data);
+ CONVERT_LAYER_TRANSFORM_PARAM(WindowData, window_data);
}
}