cleaner version of refactoring with fields added to LayerConnection

(which retains an optional V0LayerParameter field for legacy support)
and LayerConnection renamed to LayerParameter
This commit is contained in:
Jeff Donahue 2014-03-27 14:58:45 -07:00
Родитель b7444d63e2
Коммит 57167a047c
11 изменённых файлов: 286 добавлений и 455 удалений

Просмотреть файл

@ -34,9 +34,6 @@ TOOL_SRCS := $(shell find tools -name "*.cpp")
EXAMPLE_SRCS := $(shell find examples -name "*.cpp")
# PROTO_SRCS are the protocol buffer definitions
PROTO_SRCS := $(wildcard src/$(PROJECT)/proto/*.proto)
# DEPRECATED_PROTO_SRCS are protobuf definitions we no longer officially
# support, but keep around for upgrades etc.
DEPRECATED_PROTO_SRCS := $(wildcard src/$(PROJECT)/proto/deprecated/*.proto)
# NONGEN_CXX_SRCS includes all source/header files except those generated
# automatically (e.g., by proto).
NONGEN_CXX_SRCS := $(shell find \
@ -63,17 +60,12 @@ MAT$(PROJECT)_SO := matlab/$(PROJECT)/$(PROJECT)
PROTO_GEN_HEADER := ${PROTO_SRCS:.proto=.pb.h}
PROTO_GEN_CC := ${PROTO_SRCS:.proto=.pb.cc}
PROTO_GEN_PY := ${PROTO_SRCS:.proto=_pb2.py}
# The generated files for deprecated protocol buffers
DEPRECATED_PROTO_GEN_HEADER := ${DEPRECATED_PROTO_SRCS:.proto=.pb.h}
DEPRECATED_PROTO_GEN_CC := ${DEPRECATED_PROTO_SRCS:.proto=.pb.cc}
DEPRECATED_PROTO_GEN_PY := ${DEPRECATED_PROTO_SRCS:.proto=_pb2.py}
# The objects corresponding to the source files
# These objects will be linked into the final shared library, so we
# exclude the tool, example, and test objects.
CXX_OBJS := $(addprefix $(BUILD_DIR)/, ${CXX_SRCS:.cpp=.o})
CU_OBJS := $(addprefix $(BUILD_DIR)/, ${CU_SRCS:.cu=.cuo})
PROTO_OBJS := $(addprefix $(BUILD_DIR)/, ${PROTO_GEN_CC:.cc=.o})
PROTO_OBJS += $(addprefix $(BUILD_DIR)/, ${DEPRECATED_PROTO_GEN_CC:.cc=.o})
OBJS := $(PROTO_OBJS) $(CXX_OBJS) $(CU_OBJS)
# tool, example, and test objects
TOOL_OBJS := $(addprefix $(BUILD_DIR)/, ${TOOL_SRCS:.cpp=.o})
@ -252,14 +244,12 @@ $(PROTO_GEN_PY): $(PROTO_SRCS)
protoc --proto_path=src --python_out=python $(PROTO_SRCS)
@echo
proto: $(PROTO_GEN_CC) $(DEPRECATED_PROTO_GEN_CC)
proto: $(PROTO_GEN_CC)
$(PROTO_GEN_CC): $(PROTO_SRCS) $(DEPRECATED_PROTO_SRCS)
protoc --proto_path=src --cpp_out=src $(PROTO_SRCS) $(DEPRECATED_PROTO_SRCS)
$(PROTO_GEN_CC): $(PROTO_SRCS)
protoc --proto_path=src --cpp_out=src $(PROTO_SRCS)
mkdir -p include/$(PROJECT)/proto
cp $(PROTO_GEN_HEADER) include/$(PROJECT)/proto/
mkdir -p include/$(PROJECT)/proto/deprecated
cp $(DEPRECATED_PROTO_GEN_HEADER) include/$(PROJECT)/proto/deprecated/
@echo
clean:

Просмотреть файл

@ -6,26 +6,29 @@
#include <string>
#include "caffe/proto/caffe.pb.h"
#include "caffe/proto/deprecated/caffe_v0_to_v1_bridge.pb.h"
using std::string;
namespace caffe {
// Return true iff any layer contains parameters specified using
// deprecated V0LayerParameter.
bool NetNeedsUpgrade(const NetParameter& net_param);
// Perform all necessary transformations to upgrade a V0NetParameter into a
// NetParameter (including upgrading padding layers and LayerParameters).
bool UpgradeV0Net(const V0NetParameter& v0_net_param, NetParameter* net_param);
bool UpgradeV0Net(const NetParameter& v0_net_param, NetParameter* net_param);
// Upgrade V0NetParameter with padding layers to pad-aware conv layers.
// Upgrade NetParameter with padding layers to pad-aware conv layers.
// For any padding layer, remove it and put its pad parameter in any layers
// taking its top blob as input.
// Error if any of these above layers are not-conv layers.
void UpgradeV0PaddingLayers(const V0NetParameter& param,
V0NetParameter* param_upgraded_pad);
void UpgradeV0PaddingLayers(const NetParameter& param,
NetParameter* param_upgraded_pad);
// Upgrade a single V0LayerConnection to the new LayerParameter format.
bool UpgradeV0LayerConnection(const V0LayerConnection& v0_layer_connection,
LayerParameter* layer_param);
bool UpgradeLayerParameter(const LayerParameter& v0_layer_connection,
LayerParameter* layer_param);
LayerParameter_LayerType UpgradeV0LayerType(const string& type);

Просмотреть файл

@ -7,7 +7,6 @@
#include "caffe/common.hpp"
#include "caffe/proto/caffe.pb.h"
#include "caffe/proto/deprecated/caffe_v0_to_v1_bridge.pb.h"
#include "caffe/layer.hpp"
#include "caffe/net.hpp"
#include "caffe/util/io.hpp"
@ -310,40 +309,48 @@ void Net<Dtype>::CopyTrainedLayersFrom(const string trained_filename) {
template <typename Dtype>
void Net<Dtype>::ReadParamsFromTextFile(const string& param_file,
NetParameter* param) {
if (!ReadProtoFromTextFile(param_file, param)) {
// Failed to parse file as NetParameter; try to parse as a V0NetParameter
// instead.
V0NetParameter v0_param;
CHECK(ReadProtoFromTextFile(param_file, &v0_param))
<< "Failed to parse NetParameter file: " << param_file;
LOG(ERROR) << "Parsed file as V0NetParameter: " << param_file;
CHECK(ReadProtoFromTextFile(param_file, param))
<< "Failed to parse NetParameter file: " << param_file;
if (NetNeedsUpgrade(*param)) {
// NetParameter was specified using the old style (V0LayerParameter); try to
// upgrade it.
LOG(ERROR) << "Attempting to upgrade input file specified using deprecated "
<< "V0LayerParameter: " << param_file;
NetParameter original_param(*param);
if (!UpgradeV0Net(original_param, param)) {
LOG(ERROR) << "Warning: had one or more problems upgrading "
<< "V0NetParameter to NetParameter (see above); continuing anyway.";
} else {
LOG(INFO) << "Successfully upgraded file specified using deprecated "
<< "V0LayerParameter";
}
LOG(ERROR) << "Note that future Caffe releases will not support "
<< "V0NetParameter; use ./build/tools/upgrade_net_proto.bin to upgrade "
<< "this and any other network proto files to the new format.";
if (!UpgradeV0Net(v0_param, param)) {
LOG(ERROR) << "Warning: had one or more problems upgrading "
<< "V0NetParameter to NetParameter (see above); continuing anyway.";
}
}
}
template <typename Dtype>
void Net<Dtype>::ReadParamsFromBinaryFile(const string& param_file,
NetParameter* param) {
if (!ReadProtoFromBinaryFile(param_file, param)) {
// Failed to parse file as NetParameter; try to parse as a V0NetParameter
// instead.
V0NetParameter v0_param;
CHECK(ReadProtoFromBinaryFile(param_file, &v0_param))
<< "Failed to parse NetParameter file: " << param_file;
LOG(ERROR) << "Parsed file as V0NetParameter: " << param_file;
CHECK(ReadProtoFromBinaryFile(param_file, param))
<< "Failed to parse NetParameter file: " << param_file;
if (NetNeedsUpgrade(*param)) {
// NetParameter was specified using the old style (V0LayerParameter); try to
// upgrade it.
LOG(ERROR) << "Attempting to upgrade input file specified using deprecated "
<< "V0LayerParameter: " << param_file;
NetParameter original_param(*param);
if (!UpgradeV0Net(original_param, param)) {
LOG(ERROR) << "Warning: had one or more problems upgrading "
<< "V0NetParameter to NetParameter (see above); continuing anyway.";
} else {
LOG(INFO) << "Successfully upgraded file specified using deprecated "
<< "V0LayerParameter";
}
LOG(ERROR) << "Note that future Caffe releases will not support "
<< "V0NetParameter; use ./build/tools/upgrade_net_proto.bin to upgrade "
<< "this and any other network proto files to the new format.";
if (!UpgradeV0Net(v0_param, param)) {
LOG(ERROR) << "Warning: had one or more problems upgrading "
<< "V0NetParameter to NetParameter (see above); continuing anyway.";
}
}
}

Просмотреть файл

@ -40,13 +40,13 @@ message FillerParameter {
message NetParameter {
optional string name = 1; // consider giving the network a name
repeated LayerParameter layers = 2; // a bunch of layers.
// The input blobs to the network.
repeated string input = 2;
repeated string input = 3;
// The dim of the input blobs. For each input blob there should be four
// values specifying the num, channels, height and width of the input blob.
// Thus, there should be a total of (4 * #input) numbers.
repeated int32 input_dim = 3;
repeated LayerParameter layers = 4; // a bunch of layers.
repeated int32 input_dim = 4;
// Whether the network will force every layer to carry out backward operation.
// If set False, then whether to carry out backward is determined
// automatically according to the net structure and learning rates.
@ -90,7 +90,9 @@ message SolverState {
}
message LayerParameter {
optional string name = 1; // the layer name
repeated string bottom = 2; // the name of the bottom blobs
repeated string top = 3; // the name of the top blobs
optional string name = 4; // the layer name
// Add new LayerTypes to the enum below in lexicographical order (other than
// starting with NONE), starting with the next available ID in the comment
@ -128,32 +130,34 @@ message LayerParameter {
TANH = 23;
WINDOW_DATA = 24;
}
optional LayerType type = 2; // the layer type from the enum above
repeated string bottom = 3; // the name of the bottom blobs
repeated string top = 4; // the name of the top blobs
optional LayerType type = 5; // the layer type from the enum above
// The blobs containing the numeric parameters of the layer
repeated BlobProto blobs = 5;
repeated BlobProto blobs = 6;
// The ratio that is multiplied on the global learning rate. If you want to
// set the learning ratio for one blob, you need to set it for all blobs.
repeated float blobs_lr = 6;
repeated float blobs_lr = 7;
// The weight decay that is multiplied on the global weight decay.
repeated float weight_decay = 7;
repeated float weight_decay = 8;
// Parameters for particular layer types.
optional ConcatParameter concat_param = 8;
optional ConvolutionParameter convolution_param = 9;
optional DataParameter data_param = 10;
optional DropoutParameter dropout_param = 11;
optional HDF5DataParameter hdf5_data_param = 12;
optional HDF5OutputParameter hdf5_output_param = 13;
optional ImageDataParameter image_data_param = 14;
optional InfogainLossParameter infogain_loss_param = 15;
optional InnerProductParameter inner_product_param = 16;
optional LRNParameter lrn_param = 17;
optional PoolingParameter pooling_param = 18;
optional WindowDataParameter window_data_param = 19;
optional ConcatParameter concat_param = 9;
optional ConvolutionParameter convolution_param = 10;
optional DataParameter data_param = 11;
optional DropoutParameter dropout_param = 12;
optional HDF5DataParameter hdf5_data_param = 13;
optional HDF5OutputParameter hdf5_output_param = 14;
optional ImageDataParameter image_data_param = 15;
optional InfogainLossParameter infogain_loss_param = 16;
optional InnerProductParameter inner_product_param = 17;
optional LRNParameter lrn_param = 18;
optional PoolingParameter pooling_param = 19;
optional WindowDataParameter window_data_param = 20;
// The layer parameters specified as a deprecated V0LayerParameter.
// This should never be used by any code except to upgrade to the new
// LayerParameter specification.
optional V0LayerParameter layer = 1;
}
// Message that stores parameters used by ConcatLayer
@ -305,3 +309,96 @@ message WindowDataParameter {
// square: the tightest square around the window is cropped
optional string crop_mode = 11 [default = "warp"];
}
// DEPRECATED: V0LayerParameter is the old way of specifying layer parameters
// in Caffe. We keep this message type around for legacy support.
message V0LayerParameter {
optional string name = 1; // the layer name
optional string type = 2; // the string to specify the layer type
// Parameters to specify layers with inner products.
optional uint32 num_output = 3; // The number of outputs for the layer
optional bool biasterm = 4 [default = true]; // whether to have bias terms
optional FillerParameter weight_filler = 5; // The filler for the weight
optional FillerParameter bias_filler = 6; // The filler for the bias
optional uint32 pad = 7 [default = 0]; // The padding size
optional uint32 kernelsize = 8; // The kernel size
optional uint32 group = 9 [default = 1]; // The group size for group conv
optional uint32 stride = 10 [default = 1]; // The stride
enum PoolMethod {
MAX = 0;
AVE = 1;
STOCHASTIC = 2;
}
optional PoolMethod pool = 11 [default = MAX]; // The pooling method
optional float dropout_ratio = 12 [default = 0.5]; // dropout ratio
optional uint32 local_size = 13 [default = 5]; // for local response norm
optional float alpha = 14 [default = 1.]; // for local response norm
optional float beta = 15 [default = 0.75]; // for local response norm
// For data layers, specify the data source
optional string source = 16;
// For data pre-processing, we can do simple scaling and subtracting the
// data mean, if provided. Note that the mean subtraction is always carried
// out before scaling.
optional float scale = 17 [default = 1];
optional string meanfile = 18;
// For data layers, specify the batch size.
optional uint32 batchsize = 19;
// For data layers, specify if we would like to randomly crop an image.
optional uint32 cropsize = 20 [default = 0];
// For data layers, specify if we want to randomly mirror data.
optional bool mirror = 21 [default = false];
// The blobs containing the numeric parameters of the layer
repeated BlobProto blobs = 50;
// The ratio that is multiplied on the global learning rate. If you want to
// set the learning ratio for one blob, you need to set it for all blobs.
repeated float blobs_lr = 51;
// The weight decay that is multiplied on the global weight decay.
repeated float weight_decay = 52;
// The rand_skip variable is for the data layer to skip a few data points
// to avoid all asynchronous sgd clients to start at the same point. The skip
// point would be set as rand_skip * rand(0,1). Note that rand_skip should not
// be larger than the number of keys in the leveldb.
optional uint32 rand_skip = 53 [default = 0];
// Fields related to detection (det_*)
// foreground (object) overlap threshold
optional float det_fg_threshold = 54 [default = 0.5];
// background (non-object) overlap threshold
optional float det_bg_threshold = 55 [default = 0.5];
// Fraction of batch that should be foreground objects
optional float det_fg_fraction = 56 [default = 0.25];
// optional bool OBSOLETE_can_clobber = 57 [default = true];
// Amount of contextual padding to add around a window
// (used only by the window_data_layer)
optional uint32 det_context_pad = 58 [default = 0];
// Mode for cropping out a detection window
// warp: cropped window is warped to a fixed size and aspect ratio
// square: the tightest square around the window is cropped
optional string det_crop_mode = 59 [default = "warp"];
// For ReshapeLayer, one needs to specify the new dimensions.
optional int32 new_num = 60 [default = 0];
optional int32 new_channels = 61 [default = 0];
optional int32 new_height = 62 [default = 0];
optional int32 new_width = 63 [default = 0];
// Whether or not ImageLayer should shuffle the list of files at every epoch.
// It will also resize images if new_height or new_width are not zero.
optional bool shuffle_images = 64 [default = false];
// For ConcatLayer, one needs to specify the dimension for concatenation, and
// the other dimensions must be the same for all the bottom blobs.
// By default it will concatenate blobs along the channels dimension.
optional uint32 concat_dim = 65 [default = 1];
optional HDF5OutputParameter hdf5_output_param = 1001;
}

Просмотреть файл

@ -1,185 +0,0 @@
// Copyright 2013 Yangqing Jia
package caffe;
message BlobProto {
optional int32 num = 1 [default = 0];
optional int32 channels = 2 [default = 0];
optional int32 height = 3 [default = 0];
optional int32 width = 4 [default = 0];
repeated float data = 5 [packed = true];
repeated float diff = 6 [packed = true];
}
// The BlobProtoVector is simply a way to pass multiple blobproto instances
// around.
message BlobProtoVector {
repeated BlobProto blobs = 1;
}
message Datum {
optional int32 channels = 1;
optional int32 height = 2;
optional int32 width = 3;
// the actual image data, in bytes
optional bytes data = 4;
optional int32 label = 5;
// Optionally, the datum could also hold float data.
repeated float float_data = 6;
}
message FillerParameter {
// The filler type.
optional string type = 1 [default = 'constant'];
optional float value = 2 [default = 0]; // the value in constant filler
optional float min = 3 [default = 0]; // the min value in uniform filler
optional float max = 4 [default = 1]; // the max value in uniform filler
optional float mean = 5 [default = 0]; // the mean value in gaussian filler
optional float std = 6 [default = 1]; // the std value in gaussian filler
}
message LayerParameter {
optional string name = 1; // the layer name
optional string type = 2; // the string to specify the layer type
// Parameters to specify layers with inner products.
optional uint32 num_output = 3; // The number of outputs for the layer
optional bool biasterm = 4 [default = true]; // whether to have bias terms
optional FillerParameter weight_filler = 5; // The filler for the weight
optional FillerParameter bias_filler = 6; // The filler for the bias
optional uint32 pad = 7 [default = 0]; // The padding size
optional uint32 kernelsize = 8; // The kernel size
optional uint32 group = 9 [default = 1]; // The group size for group conv
optional uint32 stride = 10 [default = 1]; // The stride
enum PoolMethod {
MAX = 0;
AVE = 1;
STOCHASTIC = 2;
}
optional PoolMethod pool = 11 [default = MAX]; // The pooling method
optional float dropout_ratio = 12 [default = 0.5]; // dropout ratio
optional uint32 local_size = 13 [default = 5]; // for local response norm
optional float alpha = 14 [default = 1.]; // for local response norm
optional float beta = 15 [default = 0.75]; // for local response norm
// For data layers, specify the data source
optional string source = 16;
// For data pre-processing, we can do simple scaling and subtracting the
// data mean, if provided. Note that the mean subtraction is always carried
// out before scaling.
optional float scale = 17 [default = 1];
optional string meanfile = 18;
// For data layers, specify the batch size.
optional uint32 batchsize = 19;
// For data layers, specify if we would like to randomly crop an image.
optional uint32 cropsize = 20 [default = 0];
// For data layers, specify if we want to randomly mirror data.
optional bool mirror = 21 [default = false];
// The blobs containing the numeric parameters of the layer
repeated BlobProto blobs = 50;
// The ratio that is multiplied on the global learning rate. If you want to
// set the learning ratio for one blob, you need to set it for all blobs.
repeated float blobs_lr = 51;
// The weight decay that is multiplied on the global weight decay.
repeated float weight_decay = 52;
// The rand_skip variable is for the data layer to skip a few data points
// to avoid all asynchronous sgd clients to start at the same point. The skip
// point would be set as rand_skip * rand(0,1). Note that rand_skip should not
// be larger than the number of keys in the leveldb.
optional uint32 rand_skip = 53 [default = 0];
// Fields related to detection (det_*)
// foreground (object) overlap threshold
optional float det_fg_threshold = 54 [default = 0.5];
// background (non-object) overlap threshold
optional float det_bg_threshold = 55 [default = 0.5];
// Fraction of batch that should be foreground objects
optional float det_fg_fraction = 56 [default = 0.25];
// optional bool OBSOLETE_can_clobber = 57 [default = true];
// Amount of contextual padding to add around a window
// (used only by the window_data_layer)
optional uint32 det_context_pad = 58 [default = 0];
// Mode for cropping out a detection window
// warp: cropped window is warped to a fixed size and aspect ratio
// square: the tightest square around the window is cropped
optional string det_crop_mode = 59 [default = "warp"];
// For ReshapeLayer, one needs to specify the new dimensions.
optional int32 new_num = 60 [default = 0];
optional int32 new_channels = 61 [default = 0];
optional int32 new_height = 62 [default = 0];
optional int32 new_width = 63 [default = 0];
// Whether or not ImageLayer should shuffle the list of files at every epoch.
// It will also resize images if new_height or new_width are not zero.
optional bool shuffle_images = 64 [default = false];
// For ConcatLayer, one needs to specify the dimension for concatenation, and
// the other dimensions must be the same for all the bottom blobs.
// By default it will concatenate blobs along the channels dimension.
optional uint32 concat_dim = 65 [default = 1];
}
message LayerConnection {
optional LayerParameter layer = 1; // the layer parameter
repeated string bottom = 2; // the name of the bottom blobs
repeated string top = 3; // the name of the top blobs
}
message NetParameter {
optional string name = 1; // consider giving the network a name
repeated LayerConnection layers = 2; // a bunch of layers.
// The input blobs to the network.
repeated string input = 3;
// The dim of the input blobs. For each input blob there should be four
// values specifying the num, channels, height and width of the input blob.
// Thus, there should be a total of (4 * #input) numbers.
repeated int32 input_dim = 4;
// Whether the network will force every layer to carry out backward operation.
// If set False, then whether to carry out backward is determined
// automatically according to the net structure and learning rates.
optional bool force_backward = 5 [default = false];
}
message SolverParameter {
optional string train_net = 1; // The proto file for the training net.
optional string test_net = 2; // The proto file for the testing net.
// The number of iterations for each testing phase.
optional int32 test_iter = 3 [default = 0];
// The number of iterations between two testing phases.
optional int32 test_interval = 4 [default = 0];
optional float base_lr = 5; // The base learning rate
// the number of iterations between displaying info. If display = 0, no info
// will be displayed.
optional int32 display = 6;
optional int32 max_iter = 7; // the maximum number of iterations
optional string lr_policy = 8; // The learning rate decay policy.
optional float gamma = 9; // The parameter to compute the learning rate.
optional float power = 10; // The parameter to compute the learning rate.
optional float momentum = 11; // The momentum value.
optional float weight_decay = 12; // The weight decay.
optional int32 stepsize = 13; // the stepsize for learning rate policy "step"
optional int32 snapshot = 14 [default = 0]; // The snapshot interval
optional string snapshot_prefix = 15; // The prefix for the snapshot.
// whether to snapshot diff in the results or not. Snapshotting diff will help
// debugging but the final protocol buffer size will be much larger.
optional bool snapshot_diff = 16 [default = false];
// the mode solver will use: 0 for CPU and 1 for GPU. Use GPU in default.
optional int32 solver_mode = 17 [default = 1];
// the device_id will that be used in GPU mode. Use device_id = 0 in default.
optional int32 device_id = 18 [default = 0];
}
// A message that stores the solver snapshots
message SolverState {
optional int32 iter = 1; // The current iteration
optional string learned_net = 2; // The file that stores the learned net.
repeated BlobProto history = 3; // The history for sgd solvers
}

Просмотреть файл

@ -1,115 +0,0 @@
// Copyright 2013 Yangqing Jia
import "caffe/proto/caffe.proto";
package caffe;
message V0NetParameter {
optional string name = 1; // consider giving the network a name
repeated V0LayerConnection layers = 2; // a bunch of layers.
// The input blobs to the network.
repeated string input = 3;
// The dim of the input blobs. For each input blob there should be four
// values specifying the num, channels, height and width of the input blob.
// Thus, there should be a total of (4 * #input) numbers.
repeated int32 input_dim = 4;
// Whether the network will force every layer to carry out backward operation.
// If set False, then whether to carry out backward is determined
// automatically according to the net structure and learning rates.
optional bool force_backward = 5 [default = false];
}
message V0LayerParameter {
optional string name = 1; // the layer name
optional string type = 2; // the string to specify the layer type
// Parameters to specify layers with inner products.
optional uint32 num_output = 3; // The number of outputs for the layer
optional bool biasterm = 4 [default = true]; // whether to have bias terms
optional FillerParameter weight_filler = 5; // The filler for the weight
optional FillerParameter bias_filler = 6; // The filler for the bias
optional uint32 pad = 7 [default = 0]; // The padding size
optional uint32 kernelsize = 8; // The kernel size
optional uint32 group = 9 [default = 1]; // The group size for group conv
optional uint32 stride = 10 [default = 1]; // The stride
enum PoolMethod {
MAX = 0;
AVE = 1;
STOCHASTIC = 2;
}
optional PoolMethod pool = 11 [default = MAX]; // The pooling method
optional float dropout_ratio = 12 [default = 0.5]; // dropout ratio
optional uint32 local_size = 13 [default = 5]; // for local response norm
optional float alpha = 14 [default = 1.]; // for local response norm
optional float beta = 15 [default = 0.75]; // for local response norm
// For data layers, specify the data source
optional string source = 16;
// For data pre-processing, we can do simple scaling and subtracting the
// data mean, if provided. Note that the mean subtraction is always carried
// out before scaling.
optional float scale = 17 [default = 1];
optional string meanfile = 18;
// For data layers, specify the batch size.
optional uint32 batchsize = 19;
// For data layers, specify if we would like to randomly crop an image.
optional uint32 cropsize = 20 [default = 0];
// For data layers, specify if we want to randomly mirror data.
optional bool mirror = 21 [default = false];
// The blobs containing the numeric parameters of the layer
repeated BlobProto blobs = 50;
// The ratio that is multiplied on the global learning rate. If you want to
// set the learning ratio for one blob, you need to set it for all blobs.
repeated float blobs_lr = 51;
// The weight decay that is multiplied on the global weight decay.
repeated float weight_decay = 52;
// The rand_skip variable is for the data layer to skip a few data points
// to avoid all asynchronous sgd clients to start at the same point. The skip
// point would be set as rand_skip * rand(0,1). Note that rand_skip should not
// be larger than the number of keys in the leveldb.
optional uint32 rand_skip = 53 [default = 0];
// Fields related to detection (det_*)
// foreground (object) overlap threshold
optional float det_fg_threshold = 54 [default = 0.5];
// background (non-object) overlap threshold
optional float det_bg_threshold = 55 [default = 0.5];
// Fraction of batch that should be foreground objects
optional float det_fg_fraction = 56 [default = 0.25];
// optional bool OBSOLETE_can_clobber = 57 [default = true];
// Amount of contextual padding to add around a window
// (used only by the window_data_layer)
optional uint32 det_context_pad = 58 [default = 0];
// Mode for cropping out a detection window
// warp: cropped window is warped to a fixed size and aspect ratio
// square: the tightest square around the window is cropped
optional string det_crop_mode = 59 [default = "warp"];
// For ReshapeLayer, one needs to specify the new dimensions.
optional int32 new_num = 60 [default = 0];
optional int32 new_channels = 61 [default = 0];
optional int32 new_height = 62 [default = 0];
optional int32 new_width = 63 [default = 0];
// Whether or not ImageLayer should shuffle the list of files at every epoch.
// It will also resize images if new_height or new_width are not zero.
optional bool shuffle_images = 64 [default = false];
// For ConcatLayer, one needs to specify the dimension for concatenation, and
// the other dimensions must be the same for all the bottom blobs.
// By default it will concatenate blobs along the channels dimension.
optional uint32 concat_dim = 65 [default = 1];
}
message V0LayerConnection {
optional V0LayerParameter layer = 1; // the layer parameter
repeated string bottom = 2; // the name of the bottom blobs
repeated string top = 3; // the name of the top blobs
}

Просмотреть файл

@ -25,18 +25,18 @@ class PaddingLayerUpgradeTest : public ::testing::Test {
// Test that UpgradeV0PaddingLayers called on the proto specified by
// input_param_string results in the proto specified by
// output_param_string.
V0NetParameter input_param;
NetParameter input_param;
CHECK(google::protobuf::TextFormat::ParseFromString(
input_param_string, &input_param));
V0NetParameter expected_output_param;
NetParameter expected_output_param;
CHECK(google::protobuf::TextFormat::ParseFromString(
output_param_string, &expected_output_param));
V0NetParameter actual_output_param;
NetParameter actual_output_param;
UpgradeV0PaddingLayers(input_param, &actual_output_param);
EXPECT_EQ(expected_output_param.DebugString(),
actual_output_param.DebugString());
// Also test idempotence.
V0NetParameter double_pad_upgrade_param;
NetParameter double_pad_upgrade_param;
UpgradeV0PaddingLayers(actual_output_param, &double_pad_upgrade_param);
EXPECT_EQ(actual_output_param.DebugString(),
double_pad_upgrade_param.DebugString());
@ -1096,10 +1096,10 @@ class V0UpgradeTest : public ::testing::Test {
protected:
void RunV0UpgradeTest(
const string& input_param_string, const string& output_param_string) {
// Test that UpgradeV0Net called on the V0NetParameter proto specified by
// Test that UpgradeV0Net called on the NetParameter proto specified by
// input_param_string results in the NetParameter proto specified by
// output_param_string.
V0NetParameter input_param;
NetParameter input_param;
CHECK(google::protobuf::TextFormat::ParseFromString(
input_param_string, &input_param));
NetParameter expected_output_param;

Просмотреть файл

@ -10,17 +10,25 @@
#include "caffe/common.hpp"
#include "caffe/util/upgrade_proto.hpp"
#include "caffe/proto/caffe.pb.h"
#include "caffe/proto/deprecated/caffe_v0_to_v1_bridge.pb.h"
using std::map;
using std::string;
namespace caffe {
bool UpgradeV0Net(const V0NetParameter& v0_net_param_padding_layers,
bool NetNeedsUpgrade(const NetParameter& net_param) {
for (int i = 0; i < net_param.layers_size(); ++i) {
if (net_param.layers(i).has_layer()) {
return true;
}
}
return false;
}
bool UpgradeV0Net(const NetParameter& v0_net_param_padding_layers,
NetParameter* net_param) {
// First upgrade padding layers to padded conv layers.
V0NetParameter v0_net_param;
NetParameter v0_net_param;
UpgradeV0PaddingLayers(v0_net_param_padding_layers, &v0_net_param);
// Now upgrade layer parameters.
bool is_fully_compatible = true;
@ -29,8 +37,8 @@ bool UpgradeV0Net(const V0NetParameter& v0_net_param_padding_layers,
net_param->set_name(v0_net_param.name());
}
for (int i = 0; i < v0_net_param.layers_size(); ++i) {
is_fully_compatible &= UpgradeV0LayerConnection(v0_net_param.layers(i),
net_param->add_layers());
is_fully_compatible &= UpgradeLayerParameter(v0_net_param.layers(i),
net_param->add_layers());
}
for (int i = 0; i < v0_net_param.input_size(); ++i) {
net_param->add_input(v0_net_param.input(i));
@ -44,8 +52,8 @@ bool UpgradeV0Net(const V0NetParameter& v0_net_param_padding_layers,
return is_fully_compatible;
}
void UpgradeV0PaddingLayers(const V0NetParameter& param,
V0NetParameter* param_upgraded_pad) {
void UpgradeV0PaddingLayers(const NetParameter& param,
NetParameter* param_upgraded_pad) {
// Copy everything other than the layers from the original param.
param_upgraded_pad->Clear();
param_upgraded_pad->CopyFrom(param);
@ -57,7 +65,7 @@ void UpgradeV0PaddingLayers(const V0NetParameter& param,
blob_name_to_last_top_idx[blob_name] = -1;
}
for (int i = 0; i < param.layers_size(); ++i) {
const V0LayerConnection& layer_connection = param.layers(i);
const LayerParameter& layer_connection = param.layers(i);
const V0LayerParameter& layer_param = layer_connection.layer();
// Add the layer to the new net, unless it's a padding layer.
if (layer_param.type() != "padding") {
@ -73,7 +81,7 @@ void UpgradeV0PaddingLayers(const V0NetParameter& param,
if (top_idx == -1) {
continue;
}
V0LayerConnection source_layer = param.layers(top_idx);
LayerParameter source_layer = param.layers(top_idx);
if (source_layer.layer().type() == "padding") {
// This layer has a padding layer as input -- check that it is a conv
// layer and takes only one input. Also check that the padding layer
@ -101,8 +109,8 @@ void UpgradeV0PaddingLayers(const V0NetParameter& param,
}
}
bool UpgradeV0LayerConnection(const V0LayerConnection& v0_layer_connection,
LayerParameter* layer_param) {
bool UpgradeLayerParameter(const LayerParameter& v0_layer_connection,
LayerParameter* layer_param) {
bool is_fully_compatible = true;
layer_param->Clear();
for (int i = 0; i < v0_layer_connection.bottom_size(); ++i) {

Просмотреть файл

@ -1,72 +0,0 @@
// Copyright 2014 BVLC and contributors.
//
// This is a script to upgrade "V0" network prototxts to the new format.
// Usage:
// upgrade_net_proto v0_net_proto_file_in net_proto_file_out
#include <cstring>
#include <iostream> // NOLINT(readability/streams)
#include <fstream> // NOLINT(readability/streams)
#include "caffe/caffe.hpp"
#include "caffe/util/io.hpp"
#include "caffe/util/upgrade_proto.hpp"
using std::ofstream;
using namespace caffe; // NOLINT(build/namespaces)
int main(int argc, char** argv) {
::google::InitGoogleLogging(argv[0]);
if (argc != 3) {
LOG(ERROR) << "Usage: "
<< "upgrade_net_proto v0_net_proto_file_in net_proto_file_out";
return 0;
}
bool success = true;
NetParameter upgraded_net_param;
bool is_binary = false;
bool is_already_upgraded = false;
// First, check whether the input file is already in the new format.
if (ReadProtoFromTextFile(argv[1], &upgraded_net_param)) {
is_already_upgraded = true;
} else if (ReadProtoFromBinaryFile(argv[1], &upgraded_net_param)) {
is_already_upgraded = true;
is_binary = true;
} else {
V0NetParameter v0_net_param;
if (ReadProtoFromTextFile(argv[1], &v0_net_param)) {
LOG(ERROR) << "Successfully parsed file as V0NetParameter prototxt: "
<< argv[1];
} else if (ReadProtoFromBinaryFile(argv[1], &v0_net_param)) {
LOG(ERROR) << "Successfully parsed file as V0NetParameter binary proto: "
<< argv[1];
is_binary = true;
} else {
LOG(FATAL) << "Failed to parse input V0NetParameter file: " << argv[1];
return 1;
}
success = UpgradeV0Net(v0_net_param, &upgraded_net_param);
if (!success) {
LOG(ERROR) << "Encountered one or more problems upgrading net param "
<< "proto; see above.";
}
}
if (is_already_upgraded) {
LOG(ERROR) << "File already in V1 proto format: " << argv[1];
}
if (is_binary) {
WriteProtoToBinaryFile(upgraded_net_param, argv[2]);
} else {
// TODO(jdonahue): figure out why WriteProtoToTextFile doesn't work
// (no file is created).
// WriteProtoToTextFile(upgraded_net_param, argv[2]);
ofstream output_proto;
output_proto.open(argv[2]);
output_proto << upgraded_net_param.DebugString();
output_proto.close();
}
LOG(ERROR) << "Wrote upgraded NetParameter proto to " << argv[2];
return !success;
}

Просмотреть файл

@ -0,0 +1,46 @@
// Copyright 2014 BVLC and contributors.
//
// This is a script to upgrade "V0" network prototxts to the new format.
// Usage:
// upgrade_net_proto_binary v0_net_proto_file_in net_proto_file_out
#include <cstring>
#include <iostream> // NOLINT(readability/streams)
#include <fstream> // NOLINT(readability/streams)
#include "caffe/caffe.hpp"
#include "caffe/util/io.hpp"
#include "caffe/util/upgrade_proto.hpp"
using std::ofstream;
using namespace caffe; // NOLINT(build/namespaces)
int main(int argc, char** argv) {
::google::InitGoogleLogging(argv[0]);
if (argc != 3) {
LOG(ERROR) << "Usage: "
<< "upgrade_net_proto v0_net_proto_file_in net_proto_file_out";
return 1;
}
NetParameter net_param;
if (!ReadProtoFromBinaryFile(argv[1], &net_param)) {
LOG(ERROR) << "Failed to parse input binary file as NetParameter: "
<< argv[1];
return 2;
}
bool need_upgrade = NetNeedsUpgrade(net_param);
bool success = true;
if (need_upgrade) {
NetParameter v0_net_param(net_param);
success = UpgradeV0Net(v0_net_param, &net_param);
} else {
LOG(ERROR) << "File already in V1 proto format: " << argv[1];
}
WriteProtoToBinaryFile(net_param, argv[2]);
LOG(ERROR) << "Wrote upgraded NetParameter binary proto to " << argv[2];
return !success;
}

Просмотреть файл

@ -0,0 +1,52 @@
// Copyright 2014 BVLC and contributors.
//
// This is a script to upgrade "V0" network prototxts to the new format.
// Usage:
// upgrade_net_proto_text v0_net_proto_file_in net_proto_file_out
#include <cstring>
#include <iostream> // NOLINT(readability/streams)
#include <fstream> // NOLINT(readability/streams)
#include "caffe/caffe.hpp"
#include "caffe/util/io.hpp"
#include "caffe/util/upgrade_proto.hpp"
using std::ofstream;
using namespace caffe; // NOLINT(build/namespaces)
int main(int argc, char** argv) {
::google::InitGoogleLogging(argv[0]);
if (argc != 3) {
LOG(ERROR) << "Usage: "
<< "upgrade_net_proto v0_net_proto_file_in net_proto_file_out";
return 1;
}
NetParameter net_param;
if (!ReadProtoFromTextFile(argv[1], &net_param)) {
LOG(ERROR) << "Failed to parse input text file as NetParameter: "
<< argv[1];
return 2;
}
bool need_upgrade = NetNeedsUpgrade(net_param);
bool success = true;
if (need_upgrade) {
NetParameter v0_net_param(net_param);
success = UpgradeV0Net(v0_net_param, &net_param);
} else {
LOG(ERROR) << "File already in V1 proto format: " << argv[1];
}
// TODO(jdonahue): figure out why WriteProtoToTextFile doesn't work
// (no file is created).
// WriteProtoToTextFile(upgraded_net_param, argv[2]);
ofstream output_proto;
output_proto.open(argv[2]);
output_proto << net_param.DebugString();
output_proto.close();
LOG(ERROR) << "Wrote upgraded NetParameter text proto to " << argv[2];
return !success;
}