+version: 1
name: "CIFAR10_full_deploy"
# N.B. input image must be in CIFAR-10 format
# as described at http://www.cs.toronto.edu/~kriz/cifar.html
+version: 1
name: "CIFAR10_full_test"
layers {
name: "cifar"
+version: 1
name: "CIFAR10_full_train"
layers {
name: "cifar"
+version: 1
name: "CIFAR10_quick_test"
+# N.B. input image must be in CIFAR-10 format
+# as described at http://www.cs.toronto.edu/~kriz/cifar.html
input: "data"
input_dim: 1
input_dim: 3
+# quick config
+version: 1
name: "CIFAR10_quick_test"
layers {
name: "cifar"
+# quick config
+version: 1
name: "CIFAR10_quick_train"
layers {
name: "cifar"
+version: 1
name: "CaffeNet"
layers {
name: "data"
+version: 1
name: "CaffeNet"
input: "data"
input_dim: 10
+version: 1
name: "CaffeNet"
layers {
name: "data"
+version: 1
name: "CaffeNet"
layers {
name: "data"
TOOLS=../../build/tools
GLOG_logtostderr=1 $TOOLS/train_net.bin \
- imagenet_solver.prototxt caffe_imagenet_train_10000.solverstate
+ imagenet_solver.prototxt caffe_imagenet_train_310000.solverstate
echo "Done."
+version: 1
name: "LeNet"
input: "data"
input_dim: 64
input_dim: 1
input_dim: 28
input_dim: 28
+# N.B. input should be 0/1 = mnist raw data scaled by 0.00390625
layers {
name: "conv1"
type: CONVOLUTION
+version: 1
name: "LeNet-test"
layers {
name: "mnist"
+version: 1
name: "LeNet"
layers {
name: "mnist"
+version: 1
name: "CaffeNet"
layers {
name: "data"
+version: 1
name: "CaffeNet"
layers {
name: "data"
using std::map;
using std::set;
+const int kNetParameterVersionNumber = 1;
+
namespace caffe {
template <typename Dtype>
<< "V0NetParameter to NetParameter (see above); continuing anyway.";
}
}
+ CHECK_EQ(param->version(), kNetParameterVersionNumber);
}
template <typename Dtype>
<< "V0NetParameter to NetParameter (see above); continuing anyway.";
}
}
+ CHECK_EQ(param->version(), kNetParameterVersionNumber);
}
template <typename Dtype>
}
message NetParameter {
- optional string name = 1; // consider giving the network a name
+ // The NetParameter version number; currently only version 1 is supported.
+ // (The version number should only be bumped for breaking changes, which
+ // ideally should never happen.)
+ required int32 version = 1 [default = 1];
+ optional string name = 2; // consider giving the network a name
// The input blobs to the network.
- repeated string input = 2;
+ repeated string input = 3;
// The dim of the input blobs. For each input blob there should be four
// values specifying the num, channels, height and width of the input blob.
// Thus, there should be a total of (4 * #input) numbers.
- repeated int32 input_dim = 3;
- repeated LayerParameter layers = 4; // a bunch of layers.
+ repeated int32 input_dim = 4;
+ repeated LayerParameter layers = 5; // a bunch of layers.
// Whether the network will force every layer to carry out backward operation.
// If set False, then whether to carry out backward is determined
// automatically according to the net structure and learning rates.
- optional bool force_backward = 5 [default = false];
+ optional bool force_backward = 6 [default = false];
}
message SolverParameter {
delete db;
const string& proto_prefix =
+ "version: 1 "
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
TYPED_TEST(SplitLayerInsertionTest, TestNoInsertion1) {
const string& input_proto =
+ "version: 1 "
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
TYPED_TEST(SplitLayerInsertionTest, TestNoInsertion2) {
const string& input_proto =
+ "version: 1 "
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
TYPED_TEST(SplitLayerInsertionTest, TestNoInsertionImageNet) {
const string& input_proto =
+ "version: 1 "
"name: 'CaffeNet' "
"layers { "
" name: 'data' "
TYPED_TEST(SplitLayerInsertionTest, TestNoInsertionWithInPlace) {
const string& input_proto =
+ "version: 1 "
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
TYPED_TEST(SplitLayerInsertionTest, TestInsertion) {
const string& input_proto =
+ "version: 1 "
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
" bottom: 'innerprod3' "
"} ";
const string& expected_output_proto =
+ "version: 1 "
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
TYPED_TEST(SplitLayerInsertionTest, TestInsertionTwoTop) {
const string& input_proto =
+ "version: 1 "
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
" bottom: 'innerprod4' "
"} ";
const string& expected_output_proto =
+ "version: 1 "
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
TYPED_TEST(SplitLayerInsertionTest, TestInputInsertion) {
const string& input_proto =
+ "version: 1 "
"name: 'TestNetwork' "
"input: 'data' "
"input_dim: 10 "
" bottom: 'innerprod2' "
"} ";
const string& expected_output_proto =
+ "version: 1 "
"name: 'TestNetwork' "
"input: 'data' "
"input_dim: 10 "
TYPED_TEST(SplitLayerInsertionTest, TestWithInPlace) {
const string& input_proto =
+ "version: 1 "
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
" bottom: 'data' "
"} ";
const string& expected_output_proto =
+ "version: 1 "
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
" bottom: 'label' "
"} ";
const string& expected_output_proto =
+ "version: 1 "
"name: 'CaffeNet' "
"layers { "
" name: 'data' "
" } "
"} ";
const string& expected_output_proto =
+ "version: 1 "
"name: 'CaffeNet' "
"input: 'input_data' "
"input_dim: 64 "
" bottom: 'label' "
"} ";
const string& expected_output_proto =
+ "version: 1 "
"name: 'CaffeNet' "
"layers { "
" name: 'data' "
// Now upgrade layer parameters.
bool is_fully_compatible = true;
net_param->Clear();
+ net_param->set_version(1);
if (v0_net_param.has_name()) {
net_param->set_name(v0_net_param.name());
}