rollback previous commit adding version number to NetParameter -- going

a different route
This commit is contained in:
Jeff Donahue 2014-03-23 21:06:09 -07:00
Родитель 23bfeeb143
Коммит 8198585b4a
22 изменённых файлов: 6 добавлений и 51 удалений

Просмотреть файл

@ -1,4 +1,3 @@
version: 1
name: "CIFAR10_full_deploy"
# N.B. input image must be in CIFAR-10 format
# as described at http://www.cs.toronto.edu/~kriz/cifar.html

Просмотреть файл

@ -1,4 +1,3 @@
version: 1
name: "CIFAR10_full_test"
layers {
name: "cifar"

Просмотреть файл

@ -1,4 +1,3 @@
version: 1
name: "CIFAR10_full_train"
layers {
name: "cifar"

Просмотреть файл

@ -1,7 +1,4 @@
version: 1
name: "CIFAR10_quick_test"
# N.B. input image must be in CIFAR-10 format
# as described at http://www.cs.toronto.edu/~kriz/cifar.html
input: "data"
input_dim: 1
input_dim: 3

Просмотреть файл

@ -1,5 +1,3 @@
# quick config
version: 1
name: "CIFAR10_quick_test"
layers {
name: "cifar"

Просмотреть файл

@ -1,5 +1,3 @@
# quick config
version: 1
name: "CIFAR10_quick_train"
layers {
name: "cifar"

Просмотреть файл

@ -1,4 +1,3 @@
version: 1
name: "CaffeNet"
layers {
name: "data"

Просмотреть файл

@ -1,4 +1,3 @@
version: 1
name: "CaffeNet"
input: "data"
input_dim: 10

Просмотреть файл

@ -1,4 +1,3 @@
version: 1
name: "CaffeNet"
layers {
name: "data"

Просмотреть файл

@ -1,4 +1,3 @@
version: 1
name: "CaffeNet"
layers {
name: "data"

Просмотреть файл

@ -3,6 +3,6 @@
TOOLS=../../build/tools
GLOG_logtostderr=1 $TOOLS/train_net.bin \
imagenet_solver.prototxt caffe_imagenet_train_310000.solverstate
imagenet_solver.prototxt caffe_imagenet_train_10000.solverstate
echo "Done."

Просмотреть файл

@ -1,11 +1,9 @@
version: 1
name: "LeNet"
input: "data"
input_dim: 64
input_dim: 1
input_dim: 28
input_dim: 28
# N.B. input should be 0/1 = mnist raw data scaled by 0.00390625
layers {
name: "conv1"
type: CONVOLUTION

Просмотреть файл

@ -1,4 +1,3 @@
version: 1
name: "LeNet-test"
layers {
name: "mnist"

Просмотреть файл

@ -1,4 +1,3 @@
version: 1
name: "LeNet"
layers {
name: "mnist"

Просмотреть файл

@ -1,4 +1,3 @@
version: 1
name: "CaffeNet"
layers {
name: "data"

Просмотреть файл

@ -1,4 +1,3 @@
version: 1
name: "CaffeNet"
layers {
name: "data"

Просмотреть файл

@ -18,8 +18,6 @@ using std::pair;
using std::map;
using std::set;
const int kNetParameterVersionNumber = 1;
namespace caffe {
template <typename Dtype>
@ -327,7 +325,6 @@ void Net<Dtype>::ReadParamsFromTextFile(const string& param_file,
<< "V0NetParameter to NetParameter (see above); continuing anyway.";
}
}
CHECK_EQ(param->version(), kNetParameterVersionNumber);
}
template <typename Dtype>
@ -348,7 +345,6 @@ void Net<Dtype>::ReadParamsFromBinaryFile(const string& param_file,
<< "V0NetParameter to NetParameter (see above); continuing anyway.";
}
}
CHECK_EQ(param->version(), kNetParameterVersionNumber);
}
template <typename Dtype>

Просмотреть файл

@ -39,22 +39,18 @@ message FillerParameter {
}
message NetParameter {
// The NetParameter version number; currently only version 1 is supported.
// (The version number should only be bumped for breaking changes, which
// ideally should never happen.)
required int32 version = 1 [default = 1];
optional string name = 2; // consider giving the network a name
optional string name = 1; // consider giving the network a name
// The input blobs to the network.
repeated string input = 3;
repeated string input = 2;
// The dim of the input blobs. For each input blob there should be four
// values specifying the num, channels, height and width of the input blob.
// Thus, there should be a total of (4 * #input) numbers.
repeated int32 input_dim = 4;
repeated LayerParameter layers = 5; // a bunch of layers.
repeated int32 input_dim = 3;
repeated LayerParameter layers = 4; // a bunch of layers.
// Whether the network will force every layer to carry out backward operation.
// If set False, then whether to carry out backward is determined
// automatically according to the net structure and learning rates.
optional bool force_backward = 6 [default = false];
optional bool force_backward = 5 [default = false];
}
message SolverParameter {

Просмотреть файл

@ -44,7 +44,6 @@ class NetTest : public ::testing::Test {
delete db;
const string& proto_prefix =
"version: 1 "
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "

Просмотреть файл

@ -186,7 +186,6 @@ TYPED_TEST_CASE(SplitLayerInsertionTest, InsertionDtypes);
TYPED_TEST(SplitLayerInsertionTest, TestNoInsertion1) {
const string& input_proto =
"version: 1 "
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
@ -211,7 +210,6 @@ TYPED_TEST(SplitLayerInsertionTest, TestNoInsertion1) {
TYPED_TEST(SplitLayerInsertionTest, TestNoInsertion2) {
const string& input_proto =
"version: 1 "
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
@ -249,7 +247,6 @@ TYPED_TEST(SplitLayerInsertionTest, TestNoInsertion2) {
TYPED_TEST(SplitLayerInsertionTest, TestNoInsertionImageNet) {
const string& input_proto =
"version: 1 "
"name: 'CaffeNet' "
"layers { "
" name: 'data' "
@ -571,7 +568,6 @@ TYPED_TEST(SplitLayerInsertionTest, TestNoInsertionImageNet) {
TYPED_TEST(SplitLayerInsertionTest, TestNoInsertionWithInPlace) {
const string& input_proto =
"version: 1 "
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
@ -602,7 +598,6 @@ TYPED_TEST(SplitLayerInsertionTest, TestNoInsertionWithInPlace) {
TYPED_TEST(SplitLayerInsertionTest, TestInsertion) {
const string& input_proto =
"version: 1 "
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
@ -641,7 +636,6 @@ TYPED_TEST(SplitLayerInsertionTest, TestInsertion) {
" bottom: 'innerprod3' "
"} ";
const string& expected_output_proto =
"version: 1 "
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
@ -699,7 +693,6 @@ TYPED_TEST(SplitLayerInsertionTest, TestInsertion) {
TYPED_TEST(SplitLayerInsertionTest, TestInsertionTwoTop) {
const string& input_proto =
"version: 1 "
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
@ -744,7 +737,6 @@ TYPED_TEST(SplitLayerInsertionTest, TestInsertionTwoTop) {
" bottom: 'innerprod4' "
"} ";
const string& expected_output_proto =
"version: 1 "
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
@ -807,7 +799,6 @@ TYPED_TEST(SplitLayerInsertionTest, TestInsertionTwoTop) {
TYPED_TEST(SplitLayerInsertionTest, TestInputInsertion) {
const string& input_proto =
"version: 1 "
"name: 'TestNetwork' "
"input: 'data' "
"input_dim: 10 "
@ -833,7 +824,6 @@ TYPED_TEST(SplitLayerInsertionTest, TestInputInsertion) {
" bottom: 'innerprod2' "
"} ";
const string& expected_output_proto =
"version: 1 "
"name: 'TestNetwork' "
"input: 'data' "
"input_dim: 10 "
@ -870,7 +860,6 @@ TYPED_TEST(SplitLayerInsertionTest, TestInputInsertion) {
TYPED_TEST(SplitLayerInsertionTest, TestWithInPlace) {
const string& input_proto =
"version: 1 "
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "
@ -909,7 +898,6 @@ TYPED_TEST(SplitLayerInsertionTest, TestWithInPlace) {
" bottom: 'data' "
"} ";
const string& expected_output_proto =
"version: 1 "
"name: 'TestNetwork' "
"layers: { "
" name: 'data' "

Просмотреть файл

@ -1193,7 +1193,6 @@ TYPED_TEST(V0UpgradeTest, TestSimple) {
" bottom: 'label' "
"} ";
const string& expected_output_proto =
"version: 1 "
"name: 'CaffeNet' "
"layers { "
" name: 'data' "
@ -1514,7 +1513,6 @@ TYPED_TEST(V0UpgradeTest, TestAllParams) {
" } "
"} ";
const string& expected_output_proto =
"version: 1 "
"name: 'CaffeNet' "
"input: 'input_data' "
"input_dim: 64 "
@ -2109,7 +2107,6 @@ TYPED_TEST(V0UpgradeTest, TestImageNet) {
" bottom: 'label' "
"} ";
const string& expected_output_proto =
"version: 1 "
"name: 'CaffeNet' "
"layers { "
" name: 'data' "

Просмотреть файл

@ -25,7 +25,6 @@ bool UpgradeV0Net(const V0NetParameter& v0_net_param_padding_layers,
// Now upgrade layer parameters.
bool is_fully_compatible = true;
net_param->Clear();
net_param->set_version(1);
if (v0_net_param.has_name()) {
net_param->set_name(v0_net_param.name());
}