move individual layer parameters to individual proto messages

This commit is contained in:
Jeff Donahue 2014-03-14 16:44:52 -07:00
Родитель 53ca9cde3f
Коммит 1dc3374913
1 изменённых файлов: 69 добавлений и 57 удалений

Просмотреть файл

@ -42,56 +42,85 @@ message LayerParameter {
optional string name = 1; // the layer name
optional string type = 2; // the string to specify the layer type
// Parameters to specify layers with inner products.
optional uint32 num_output = 3; // The number of outputs for the layer
optional bool biasterm = 4 [default = true]; // whether to have bias terms
optional FillerParameter weight_filler = 5; // The filler for the weight
optional FillerParameter bias_filler = 6; // The filler for the bias
// The blobs containing the numeric parameters of the layer
repeated BlobProto blobs = 3;
// The ratio that is multiplied on the global learning rate. If you want to
// set the learning ratio for one blob, you need to set it for all blobs.
repeated float blobs_lr = 4;
// The weight decay that is multiplied on the global weight decay.
repeated float weight_decay = 5;
optional uint32 pad = 7 [default = 0]; // The padding size
optional uint32 kernelsize = 8; // The kernel size
optional uint32 group = 9 [default = 1]; // The group size for group conv
optional uint32 stride = 10 [default = 1]; // The stride
// Parameters for particular layer types.
optional DataParameter data_param = 6;
optional InnerProductParameter inner_product_param = 7;
optional ConvolutionParameter convolution_param = 8;
optional PoolParameter pool_param = 9;
optional DropoutParameter dropout_param = 10;
optional LRNParameter lrn_param = 11;
}
message DataParameter {
// Specify the data source.
optional string source = 1;
// For data pre-processing, we can do simple scaling and subtracting the
// data mean, if provided. Note that the mean subtraction is always carried
// out before scaling.
optional float scale = 2 [default = 1];
optional string meanfile = 3;
// Specify the batch size.
optional uint32 batchsize = 4;
// Specify if we would like to randomly crop an image.
optional uint32 cropsize = 5 [default = 0];
// Specify if we want to randomly mirror data.
optional bool mirror = 6 [default = false];
// The rand_skip variable is for the data layer to skip a few data points
// to avoid all asynchronous sgd clients to start at the same point. The skip
// point would be set as rand_skip * rand(0,1). Note that rand_skip should not
// be larger than the number of keys in the leveldb.
optional uint32 rand_skip = 7 [default = 0];
}
message InnerProductParameter {
optional uint32 num_output = 1; // The number of outputs for the layer
optional bool biasterm = 2 [default = true]; // whether to have bias terms
optional FillerParameter weight_filler = 3; // The filler for the weight
optional FillerParameter bias_filler = 4; // The filler for the bias
}
message ConvolutionParameter {
optional uint32 pad = 1 [default = 0]; // The padding size
optional uint32 kernelsize = 2; // The kernel size
optional uint32 group = 3 [default = 1]; // The group size for group conv
optional uint32 stride = 4 [default = 1]; // The stride
}
message PoolParameter {
enum PoolMethod {
MAX = 0;
AVE = 1;
STOCHASTIC = 2;
}
optional PoolMethod pool = 11 [default = MAX]; // The pooling method
optional float dropout_ratio = 12 [default = 0.5]; // dropout ratio
optional PoolMethod pool = 1 [default = MAX]; // The pooling method
}
optional uint32 local_size = 13 [default = 5]; // for local response norm
optional float alpha = 14 [default = 1.]; // for local response norm
optional float beta = 15 [default = 0.75]; // for local response norm
message DropoutParameter {
optional float dropout_ratio = 1 [default = 0.5]; // dropout ratio
}
// For data layers, specify the data source
optional string source = 16;
// For data pre-processing, we can do simple scaling and subtracting the
// data mean, if provided. Note that the mean subtraction is always carried
// out before scaling.
optional float scale = 17 [default = 1];
optional string meanfile = 18;
// For data layers, specify the batch size.
optional uint32 batchsize = 19;
// For data layers, specify if we would like to randomly crop an image.
optional uint32 cropsize = 20 [default = 0];
// For data layers, specify if we want to randomly mirror data.
optional bool mirror = 21 [default = false];
message LRNParameter {
optional uint32 local_size = 1 [default = 5]; // for local response norm
optional float alpha = 2 [default = 1.]; // for local response norm
optional float beta = 3 [default = 0.75]; // for local response norm
}
// The blobs containing the numeric parameters of the layer
repeated BlobProto blobs = 50;
// The ratio that is multiplied on the global learning rate. If you want to
// set the learning ratio for one blob, you need to set it for all blobs.
repeated float blobs_lr = 51;
// The weight decay that is multiplied on the global weight decay.
repeated float weight_decay = 52;
// The rand_skip variable is for the data layer to skip a few data points
// to avoid all asynchronous sgd clients to start at the same point. The skip
// point would be set as rand_skip * rand(0,1). Note that rand_skip should not
// be larger than the number of keys in the leveldb.
optional uint32 rand_skip = 53 [default = 0];
message ConcatParameter {
// Concat Layer needs to specify the dimension along the concat will happen,
// the other dimensions must be the same for all the bottom blobs
// By default it will concatenate blobs along channels dimension
optional uint32 concat_dim = 1 [default = 1];
}
message WindowDataParameter {
// Fields related to detection (det_*)
// foreground (object) overlap threshold
optional float det_fg_threshold = 54 [default = 0.5];
@ -110,23 +139,6 @@ message LayerParameter {
// warp: cropped window is warped to a fixed size and aspect ratio
// square: the tightest square around the window is cropped
optional string det_crop_mode = 59 [default = "warp"];
// For ReshapeLayer, one needs to specify the new dimensions.
optional int32 new_num = 60 [default = 0];
optional int32 new_channels = 61 [default = 0];
optional int32 new_height = 62 [default = 0];
optional int32 new_width = 63 [default = 0];
// Whether or not ImageLayer should shuffle the list of files at every epoch.
// It will also resize images if new_height or new_width are not zero.
optional bool shuffle_images = 64 [default = false];
// For ConcatLayer, one needs to specify the dimension for concatenation, and
// the other dimensions must be the same for all the bottom blobs.
// By default it will concatenate blobs along the channels dimension.
optional uint32 concat_dim = 65 [default = 1];
optional HDF5OutputParameter hdf5_output_param = 1001;
}
message HDF5OutputParameter {