From 8198585b4a670ee2d261d436ebecbb63688da617 Mon Sep 17 00:00:00 2001 From: Jeff Donahue Date: Sun, 23 Mar 2014 21:06:09 -0700 Subject: [PATCH] rollback previous commit adding version number to NetParameter -- going a different route --- examples/cifar10/cifar10_full.prototxt | 1 - examples/cifar10/cifar10_full_test.prototxt | 1 - examples/cifar10/cifar10_full_train.prototxt | 1 - examples/cifar10/cifar10_quick.prototxt | 3 --- examples/cifar10/cifar10_quick_test.prototxt | 2 -- examples/cifar10/cifar10_quick_train.prototxt | 2 -- examples/feature_extraction/imagenet_val.prototxt | 1 - examples/imagenet/imagenet_deploy.prototxt | 1 - examples/imagenet/imagenet_train.prototxt | 1 - examples/imagenet/imagenet_val.prototxt | 1 - examples/imagenet/resume_training.sh | 2 +- examples/lenet/lenet.prototxt | 2 -- examples/lenet/lenet_test.prototxt | 1 - examples/lenet/lenet_train.prototxt | 1 - examples/pascal-finetuning/pascal_finetune_train.prototxt | 1 - examples/pascal-finetuning/pascal_finetune_val.prototxt | 1 - src/caffe/net.cpp | 4 ---- src/caffe/proto/caffe.proto | 14 +++++--------- src/caffe/test/test_net.cpp | 1 - src/caffe/test/test_split_layer.cpp | 12 ------------ src/caffe/test/test_upgrade_proto.cpp | 3 --- src/caffe/util/upgrade_proto.cpp | 1 - 22 files changed, 6 insertions(+), 51 deletions(-) diff --git a/examples/cifar10/cifar10_full.prototxt b/examples/cifar10/cifar10_full.prototxt index 1255931..237a7a0 100644 --- a/examples/cifar10/cifar10_full.prototxt +++ b/examples/cifar10/cifar10_full.prototxt @@ -1,4 +1,3 @@ -version: 1 name: "CIFAR10_full_deploy" # N.B. input image must be in CIFAR-10 format # as described at http://www.cs.toronto.edu/~kriz/cifar.html diff --git a/examples/cifar10/cifar10_full_test.prototxt b/examples/cifar10/cifar10_full_test.prototxt index aff9938..ada373a 100644 --- a/examples/cifar10/cifar10_full_test.prototxt +++ b/examples/cifar10/cifar10_full_test.prototxt @@ -1,4 +1,3 @@ -version: 1 name: "CIFAR10_full_test" layers { name: "cifar" diff --git a/examples/cifar10/cifar10_full_train.prototxt b/examples/cifar10/cifar10_full_train.prototxt index 2f50bda..56c9306 100644 --- a/examples/cifar10/cifar10_full_train.prototxt +++ b/examples/cifar10/cifar10_full_train.prototxt @@ -1,4 +1,3 @@ -version: 1 name: "CIFAR10_full_train" layers { name: "cifar" diff --git a/examples/cifar10/cifar10_quick.prototxt b/examples/cifar10/cifar10_quick.prototxt index ec8c5b2..505158f 100644 --- a/examples/cifar10/cifar10_quick.prototxt +++ b/examples/cifar10/cifar10_quick.prototxt @@ -1,7 +1,4 @@ -version: 1 name: "CIFAR10_quick_test" -# N.B. input image must be in CIFAR-10 format -# as described at http://www.cs.toronto.edu/~kriz/cifar.html input: "data" input_dim: 1 input_dim: 3 diff --git a/examples/cifar10/cifar10_quick_test.prototxt b/examples/cifar10/cifar10_quick_test.prototxt index fb34844..a154b9a 100644 --- a/examples/cifar10/cifar10_quick_test.prototxt +++ b/examples/cifar10/cifar10_quick_test.prototxt @@ -1,5 +1,3 @@ -# quick config -version: 1 name: "CIFAR10_quick_test" layers { name: "cifar" diff --git a/examples/cifar10/cifar10_quick_train.prototxt b/examples/cifar10/cifar10_quick_train.prototxt index 62b45bf..de5b6c3 100644 --- a/examples/cifar10/cifar10_quick_train.prototxt +++ b/examples/cifar10/cifar10_quick_train.prototxt @@ -1,5 +1,3 @@ -# quick config -version: 1 name: "CIFAR10_quick_train" layers { name: "cifar" diff --git a/examples/feature_extraction/imagenet_val.prototxt b/examples/feature_extraction/imagenet_val.prototxt index a8235b2..14bfe77 100644 --- a/examples/feature_extraction/imagenet_val.prototxt +++ b/examples/feature_extraction/imagenet_val.prototxt @@ -1,4 +1,3 @@ -version: 1 name: "CaffeNet" layers { name: "data" diff --git a/examples/imagenet/imagenet_deploy.prototxt b/examples/imagenet/imagenet_deploy.prototxt index a7959f2..37ab922 100644 --- a/examples/imagenet/imagenet_deploy.prototxt +++ b/examples/imagenet/imagenet_deploy.prototxt @@ -1,4 +1,3 @@ -version: 1 name: "CaffeNet" input: "data" input_dim: 10 diff --git a/examples/imagenet/imagenet_train.prototxt b/examples/imagenet/imagenet_train.prototxt index 7d3d3e4..b34a9b4 100644 --- a/examples/imagenet/imagenet_train.prototxt +++ b/examples/imagenet/imagenet_train.prototxt @@ -1,4 +1,3 @@ -version: 1 name: "CaffeNet" layers { name: "data" diff --git a/examples/imagenet/imagenet_val.prototxt b/examples/imagenet/imagenet_val.prototxt index 5561fe4..2f1ead7 100644 --- a/examples/imagenet/imagenet_val.prototxt +++ b/examples/imagenet/imagenet_val.prototxt @@ -1,4 +1,3 @@ -version: 1 name: "CaffeNet" layers { name: "data" diff --git a/examples/imagenet/resume_training.sh b/examples/imagenet/resume_training.sh index 6f5d0ca..2b3b403 100755 --- a/examples/imagenet/resume_training.sh +++ b/examples/imagenet/resume_training.sh @@ -3,6 +3,6 @@ TOOLS=../../build/tools GLOG_logtostderr=1 $TOOLS/train_net.bin \ - imagenet_solver.prototxt caffe_imagenet_train_310000.solverstate + imagenet_solver.prototxt caffe_imagenet_train_10000.solverstate echo "Done." diff --git a/examples/lenet/lenet.prototxt b/examples/lenet/lenet.prototxt index d4506a1..491fad1 100644 --- a/examples/lenet/lenet.prototxt +++ b/examples/lenet/lenet.prototxt @@ -1,11 +1,9 @@ -version: 1 name: "LeNet" input: "data" input_dim: 64 input_dim: 1 input_dim: 28 input_dim: 28 -# N.B. input should be 0/1 = mnist raw data scaled by 0.00390625 layers { name: "conv1" type: CONVOLUTION diff --git a/examples/lenet/lenet_test.prototxt b/examples/lenet/lenet_test.prototxt index 5025ba4..3b59b75 100644 --- a/examples/lenet/lenet_test.prototxt +++ b/examples/lenet/lenet_test.prototxt @@ -1,4 +1,3 @@ -version: 1 name: "LeNet-test" layers { name: "mnist" diff --git a/examples/lenet/lenet_train.prototxt b/examples/lenet/lenet_train.prototxt index 12637bd..e8a1e74 100644 --- a/examples/lenet/lenet_train.prototxt +++ b/examples/lenet/lenet_train.prototxt @@ -1,4 +1,3 @@ -version: 1 name: "LeNet" layers { name: "mnist" diff --git a/examples/pascal-finetuning/pascal_finetune_train.prototxt b/examples/pascal-finetuning/pascal_finetune_train.prototxt index 757f671..dfc60fe 100644 --- a/examples/pascal-finetuning/pascal_finetune_train.prototxt +++ b/examples/pascal-finetuning/pascal_finetune_train.prototxt @@ -1,4 +1,3 @@ -version: 1 name: "CaffeNet" layers { name: "data" diff --git a/examples/pascal-finetuning/pascal_finetune_val.prototxt b/examples/pascal-finetuning/pascal_finetune_val.prototxt index 72db955..ff898fe 100644 --- a/examples/pascal-finetuning/pascal_finetune_val.prototxt +++ b/examples/pascal-finetuning/pascal_finetune_val.prototxt @@ -1,4 +1,3 @@ -version: 1 name: "CaffeNet" layers { name: "data" diff --git a/src/caffe/net.cpp b/src/caffe/net.cpp index 580f924..405cf1b 100644 --- a/src/caffe/net.cpp +++ b/src/caffe/net.cpp @@ -18,8 +18,6 @@ using std::pair; using std::map; using std::set; -const int kNetParameterVersionNumber = 1; - namespace caffe { template @@ -327,7 +325,6 @@ void Net::ReadParamsFromTextFile(const string& param_file, << "V0NetParameter to NetParameter (see above); continuing anyway."; } } - CHECK_EQ(param->version(), kNetParameterVersionNumber); } template @@ -348,7 +345,6 @@ void Net::ReadParamsFromBinaryFile(const string& param_file, << "V0NetParameter to NetParameter (see above); continuing anyway."; } } - CHECK_EQ(param->version(), kNetParameterVersionNumber); } template diff --git a/src/caffe/proto/caffe.proto b/src/caffe/proto/caffe.proto index a9cfa84..34d59f9 100644 --- a/src/caffe/proto/caffe.proto +++ b/src/caffe/proto/caffe.proto @@ -39,22 +39,18 @@ message FillerParameter { } message NetParameter { - // The NetParameter version number; currently only version 1 is supported. - // (The version number should only be bumped for breaking changes, which - // ideally should never happen.) - required int32 version = 1 [default = 1]; - optional string name = 2; // consider giving the network a name + optional string name = 1; // consider giving the network a name // The input blobs to the network. - repeated string input = 3; + repeated string input = 2; // The dim of the input blobs. For each input blob there should be four // values specifying the num, channels, height and width of the input blob. // Thus, there should be a total of (4 * #input) numbers. - repeated int32 input_dim = 4; - repeated LayerParameter layers = 5; // a bunch of layers. + repeated int32 input_dim = 3; + repeated LayerParameter layers = 4; // a bunch of layers. // Whether the network will force every layer to carry out backward operation. // If set False, then whether to carry out backward is determined // automatically according to the net structure and learning rates. - optional bool force_backward = 6 [default = false]; + optional bool force_backward = 5 [default = false]; } message SolverParameter { diff --git a/src/caffe/test/test_net.cpp b/src/caffe/test/test_net.cpp index 025b3fa..4c7f0e7 100644 --- a/src/caffe/test/test_net.cpp +++ b/src/caffe/test/test_net.cpp @@ -44,7 +44,6 @@ class NetTest : public ::testing::Test { delete db; const string& proto_prefix = - "version: 1 " "name: 'TestNetwork' " "layers: { " " name: 'data' " diff --git a/src/caffe/test/test_split_layer.cpp b/src/caffe/test/test_split_layer.cpp index 45b06de..3c17b0e 100644 --- a/src/caffe/test/test_split_layer.cpp +++ b/src/caffe/test/test_split_layer.cpp @@ -186,7 +186,6 @@ TYPED_TEST_CASE(SplitLayerInsertionTest, InsertionDtypes); TYPED_TEST(SplitLayerInsertionTest, TestNoInsertion1) { const string& input_proto = - "version: 1 " "name: 'TestNetwork' " "layers: { " " name: 'data' " @@ -211,7 +210,6 @@ TYPED_TEST(SplitLayerInsertionTest, TestNoInsertion1) { TYPED_TEST(SplitLayerInsertionTest, TestNoInsertion2) { const string& input_proto = - "version: 1 " "name: 'TestNetwork' " "layers: { " " name: 'data' " @@ -249,7 +247,6 @@ TYPED_TEST(SplitLayerInsertionTest, TestNoInsertion2) { TYPED_TEST(SplitLayerInsertionTest, TestNoInsertionImageNet) { const string& input_proto = - "version: 1 " "name: 'CaffeNet' " "layers { " " name: 'data' " @@ -571,7 +568,6 @@ TYPED_TEST(SplitLayerInsertionTest, TestNoInsertionImageNet) { TYPED_TEST(SplitLayerInsertionTest, TestNoInsertionWithInPlace) { const string& input_proto = - "version: 1 " "name: 'TestNetwork' " "layers: { " " name: 'data' " @@ -602,7 +598,6 @@ TYPED_TEST(SplitLayerInsertionTest, TestNoInsertionWithInPlace) { TYPED_TEST(SplitLayerInsertionTest, TestInsertion) { const string& input_proto = - "version: 1 " "name: 'TestNetwork' " "layers: { " " name: 'data' " @@ -641,7 +636,6 @@ TYPED_TEST(SplitLayerInsertionTest, TestInsertion) { " bottom: 'innerprod3' " "} "; const string& expected_output_proto = - "version: 1 " "name: 'TestNetwork' " "layers: { " " name: 'data' " @@ -699,7 +693,6 @@ TYPED_TEST(SplitLayerInsertionTest, TestInsertion) { TYPED_TEST(SplitLayerInsertionTest, TestInsertionTwoTop) { const string& input_proto = - "version: 1 " "name: 'TestNetwork' " "layers: { " " name: 'data' " @@ -744,7 +737,6 @@ TYPED_TEST(SplitLayerInsertionTest, TestInsertionTwoTop) { " bottom: 'innerprod4' " "} "; const string& expected_output_proto = - "version: 1 " "name: 'TestNetwork' " "layers: { " " name: 'data' " @@ -807,7 +799,6 @@ TYPED_TEST(SplitLayerInsertionTest, TestInsertionTwoTop) { TYPED_TEST(SplitLayerInsertionTest, TestInputInsertion) { const string& input_proto = - "version: 1 " "name: 'TestNetwork' " "input: 'data' " "input_dim: 10 " @@ -833,7 +824,6 @@ TYPED_TEST(SplitLayerInsertionTest, TestInputInsertion) { " bottom: 'innerprod2' " "} "; const string& expected_output_proto = - "version: 1 " "name: 'TestNetwork' " "input: 'data' " "input_dim: 10 " @@ -870,7 +860,6 @@ TYPED_TEST(SplitLayerInsertionTest, TestInputInsertion) { TYPED_TEST(SplitLayerInsertionTest, TestWithInPlace) { const string& input_proto = - "version: 1 " "name: 'TestNetwork' " "layers: { " " name: 'data' " @@ -909,7 +898,6 @@ TYPED_TEST(SplitLayerInsertionTest, TestWithInPlace) { " bottom: 'data' " "} "; const string& expected_output_proto = - "version: 1 " "name: 'TestNetwork' " "layers: { " " name: 'data' " diff --git a/src/caffe/test/test_upgrade_proto.cpp b/src/caffe/test/test_upgrade_proto.cpp index f36a4da..8203406 100644 --- a/src/caffe/test/test_upgrade_proto.cpp +++ b/src/caffe/test/test_upgrade_proto.cpp @@ -1193,7 +1193,6 @@ TYPED_TEST(V0UpgradeTest, TestSimple) { " bottom: 'label' " "} "; const string& expected_output_proto = - "version: 1 " "name: 'CaffeNet' " "layers { " " name: 'data' " @@ -1514,7 +1513,6 @@ TYPED_TEST(V0UpgradeTest, TestAllParams) { " } " "} "; const string& expected_output_proto = - "version: 1 " "name: 'CaffeNet' " "input: 'input_data' " "input_dim: 64 " @@ -2109,7 +2107,6 @@ TYPED_TEST(V0UpgradeTest, TestImageNet) { " bottom: 'label' " "} "; const string& expected_output_proto = - "version: 1 " "name: 'CaffeNet' " "layers { " " name: 'data' " diff --git a/src/caffe/util/upgrade_proto.cpp b/src/caffe/util/upgrade_proto.cpp index fb86f0e..48813d8 100644 --- a/src/caffe/util/upgrade_proto.cpp +++ b/src/caffe/util/upgrade_proto.cpp @@ -25,7 +25,6 @@ bool UpgradeV0Net(const V0NetParameter& v0_net_param_padding_layers, // Now upgrade layer parameters. bool is_fully_compatible = true; net_param->Clear(); - net_param->set_version(1); if (v0_net_param.has_name()) { net_param->set_name(v0_net_param.name()); } -- 2.7.4