some naming standardization: ImagesLayer -> ImageDataLayer (like other

data layers), and load_hdf5_file_data -> LoadHDF5FileData
This commit is contained in:
Jeff Donahue 2014-03-21 15:14:08 -07:00
Родитель a68395c2ea
Коммит 2aea6bb3ce
8 изменённых файлов: 35 добавлений и 38 удалений

Просмотреть файл

@ -22,7 +22,7 @@ We're going to use the images that ship with caffe.
find `pwd`/examples/images -type f -exec echo {} \; > examples/_temp/temp.txt
The `ImagesLayer` we'll use expects labels after each filenames, so let's add a 0 to the end of each line
The `ImageDataLayer` we'll use expects labels after each filenames, so let's add a 0 to the end of each line
sed "s/$/ 0/" examples/_temp/temp.txt > examples/_temp/file_list.txt
@ -37,7 +37,7 @@ Download the mean image of the ILSVRC dataset.
We will use `data/ilsvrc212/imagenet_mean.binaryproto` in the network definition prototxt.
Let's copy and modify the network definition.
We'll be using the `ImagesLayer`, which will load and resize images for us.
We'll be using the `ImageDataLayer`, which will load and resize images for us.
cp examples/feature_extraction/imagenet_val.prototxt examples/_temp

Просмотреть файл

@ -255,8 +255,6 @@ class EuclideanLossLayer : public Layer<Dtype> {
vector<Blob<Dtype>*>* top);
protected:
// The loss layer will do nothing during forward - all computation are
// carried out in the backward pass.
virtual Dtype Forward_cpu(const vector<Blob<Dtype>*>& bottom,
vector<Blob<Dtype>*>* top);
// virtual Dtype Forward_gpu(const vector<Blob<Dtype>*>& bottom,
@ -308,7 +306,7 @@ class HDF5DataLayer : public Layer<Dtype> {
const bool propagate_down, vector<Blob<Dtype>*>* bottom);
virtual void Backward_gpu(const vector<Blob<Dtype>*>& top,
const bool propagate_down, vector<Blob<Dtype>*>* bottom);
virtual void load_hdf5_file_data(const char* filename);
virtual void LoadHDF5FileData(const char* filename);
std::vector<std::string> hdf_filenames_;
unsigned int num_files_;
@ -346,17 +344,17 @@ class Im2colLayer : public Layer<Dtype> {
// This function is used to create a pthread that prefetches the data.
template <typename Dtype>
void* ImagesLayerPrefetch(void* layer_pointer);
void* ImageDataLayerPrefetch(void* layer_pointer);
template <typename Dtype>
class ImagesLayer : public Layer<Dtype> {
class ImageDataLayer : public Layer<Dtype> {
// The function used to perform prefetching.
friend void* ImagesLayerPrefetch<Dtype>(void* layer_pointer);
friend void* ImageDataLayerPrefetch<Dtype>(void* layer_pointer);
public:
explicit ImagesLayer(const LayerParameter& param)
explicit ImageDataLayer(const LayerParameter& param)
: Layer<Dtype>(param) {}
virtual ~ImagesLayer();
virtual ~ImageDataLayer();
virtual void SetUp(const vector<Blob<Dtype>*>& bottom,
vector<Blob<Dtype>*>* top);
@ -391,8 +389,6 @@ class InfogainLossLayer : public Layer<Dtype> {
vector<Blob<Dtype>*>* top);
protected:
// The loss layer will do nothing during forward - all computation are
// carried out in the backward pass.
virtual Dtype Forward_cpu(const vector<Blob<Dtype>*>& bottom,
vector<Blob<Dtype>*>* top);
// virtual Dtype Forward_gpu(const vector<Blob<Dtype>*>& bottom,
@ -495,8 +491,6 @@ class MultinomialLogisticLossLayer : public Layer<Dtype> {
vector<Blob<Dtype>*>* top);
protected:
// The loss layer will do nothing during forward - all computation are
// carried out in the backward pass.
virtual Dtype Forward_cpu(const vector<Blob<Dtype>*>& bottom,
vector<Blob<Dtype>*>* top);
// virtual Dtype Forward_gpu(const vector<Blob<Dtype>*>& bottom,

Просмотреть файл

@ -43,7 +43,7 @@ Layer<Dtype>* GetLayer(const LayerParameter& param) {
case LayerParameter_LayerType_HDF5_OUTPUT:
return new HDF5OutputLayer<Dtype>(param);
case LayerParameter_LayerType_IMAGE_DATA:
return new ImagesLayer<Dtype>(param);
return new ImageDataLayer<Dtype>(param);
case LayerParameter_LayerType_IM2COL:
return new Im2colLayer<Dtype>(param);
case LayerParameter_LayerType_INFOGAIN_LOSS:

Просмотреть файл

@ -26,7 +26,7 @@ HDF5DataLayer<Dtype>::~HDF5DataLayer<Dtype>() { }
// Load data and label from HDF5 filename into the class property blobs.
template <typename Dtype>
void HDF5DataLayer<Dtype>::load_hdf5_file_data(const char* filename) {
void HDF5DataLayer<Dtype>::LoadHDF5FileData(const char* filename) {
LOG(INFO) << "Loading HDF5 file" << filename;
hid_t file_id = H5Fopen(filename, H5F_ACC_RDONLY, H5P_DEFAULT);
if (file_id < 0) {
@ -72,7 +72,7 @@ void HDF5DataLayer<Dtype>::SetUp(const vector<Blob<Dtype>*>& bottom,
LOG(INFO) << "Number of files: " << num_files_;
// Load the first HDF5 file and initialize the line counter.
load_hdf5_file_data(hdf_filenames_[current_file_].c_str());
LoadHDF5FileData(hdf_filenames_[current_file_].c_str());
current_row_ = 0;
// Reshape blobs.
@ -101,7 +101,7 @@ Dtype HDF5DataLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
current_file_ = 0;
LOG(INFO) << "looping around to first file";
}
load_hdf5_file_data(hdf_filenames_[current_file_].c_str());
LoadHDF5FileData(hdf_filenames_[current_file_].c_str());
}
current_row_ = 0;
}

Просмотреть файл

@ -36,7 +36,7 @@ Dtype HDF5DataLayer<Dtype>::Forward_gpu(const vector<Blob<Dtype>*>& bottom,
LOG(INFO) << "looping around to first file";
}
load_hdf5_file_data(hdf_filenames_[current_file_].c_str());
LoadHDF5FileData(hdf_filenames_[current_file_].c_str());
}
current_row_ = 0;
}

Просмотреть файл

@ -19,10 +19,10 @@ using std::pair;
namespace caffe {
template <typename Dtype>
void* ImagesLayerPrefetch(void* layer_pointer) {
void* ImageDataLayerPrefetch(void* layer_pointer) {
CHECK(layer_pointer);
ImagesLayer<Dtype>* layer =
reinterpret_cast<ImagesLayer<Dtype>*>(layer_pointer);
ImageDataLayer<Dtype>* layer =
reinterpret_cast<ImageDataLayer<Dtype>*>(layer_pointer);
CHECK(layer);
Datum datum;
CHECK(layer->prefetch_data_);
@ -133,13 +133,13 @@ void* ImagesLayerPrefetch(void* layer_pointer) {
}
template <typename Dtype>
ImagesLayer<Dtype>::~ImagesLayer<Dtype>() {
ImageDataLayer<Dtype>::~ImageDataLayer<Dtype>() {
// Finally, join the thread
CHECK(!pthread_join(thread_, NULL)) << "Pthread joining failed.";
}
template <typename Dtype>
void ImagesLayer<Dtype>::SetUp(const vector<Blob<Dtype>*>& bottom,
void ImageDataLayer<Dtype>::SetUp(const vector<Blob<Dtype>*>& bottom,
vector<Blob<Dtype>*>* top) {
CHECK_EQ(bottom.size(), 0) << "Input Layer takes no input blobs.";
CHECK_EQ(top->size(), 2) << "Input Layer takes two blobs as output.";
@ -228,13 +228,13 @@ void ImagesLayer<Dtype>::SetUp(const vector<Blob<Dtype>*>& bottom,
prefetch_label_->mutable_cpu_data();
data_mean_.cpu_data();
DLOG(INFO) << "Initializing prefetch";
CHECK(!pthread_create(&thread_, NULL, ImagesLayerPrefetch<Dtype>,
CHECK(!pthread_create(&thread_, NULL, ImageDataLayerPrefetch<Dtype>,
reinterpret_cast<void*>(this))) << "Pthread execution failed.";
DLOG(INFO) << "Prefetch initialized.";
}
template <typename Dtype>
Dtype ImagesLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
Dtype ImageDataLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
vector<Blob<Dtype>*>* top) {
// First, join the thread
CHECK(!pthread_join(thread_, NULL)) << "Pthread joining failed.";
@ -244,11 +244,11 @@ Dtype ImagesLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
memcpy((*top)[1]->mutable_cpu_data(), prefetch_label_->cpu_data(),
sizeof(Dtype) * prefetch_label_->count());
// Start a new prefetch thread
CHECK(!pthread_create(&thread_, NULL, ImagesLayerPrefetch<Dtype>,
CHECK(!pthread_create(&thread_, NULL, ImageDataLayerPrefetch<Dtype>,
reinterpret_cast<void*>(this))) << "Pthread execution failed.";
return Dtype(0.);
}
INSTANTIATE_CLASS(ImagesLayer);
INSTANTIATE_CLASS(ImageDataLayer);
} // namespace caffe

Просмотреть файл

Просмотреть файл

@ -22,9 +22,9 @@ namespace caffe {
extern cudaDeviceProp CAFFE_TEST_CUDA_PROP;
template <typename Dtype>
class ImagesLayerTest : public ::testing::Test {
class ImageDataLayerTest : public ::testing::Test {
protected:
ImagesLayerTest()
ImageDataLayerTest()
: blob_top_data_(new Blob<Dtype>()),
blob_top_label_(new Blob<Dtype>()),
filename(NULL) {}
@ -41,7 +41,10 @@ class ImagesLayerTest : public ::testing::Test {
outfile.close();
}
virtual ~ImagesLayerTest() { delete blob_top_data_; delete blob_top_label_; }
virtual ~ImageDataLayerTest() {
delete blob_top_data_;
delete blob_top_label_;
}
char* filename;
Blob<Dtype>* const blob_top_data_;
@ -51,15 +54,15 @@ class ImagesLayerTest : public ::testing::Test {
};
typedef ::testing::Types<float, double> Dtypes;
TYPED_TEST_CASE(ImagesLayerTest, Dtypes);
TYPED_TEST_CASE(ImageDataLayerTest, Dtypes);
TYPED_TEST(ImagesLayerTest, TestRead) {
TYPED_TEST(ImageDataLayerTest, TestRead) {
LayerParameter param;
ImageDataParameter* image_data_param = param.mutable_image_data_param();
image_data_param->set_batch_size(5);
image_data_param->set_source(this->filename);
image_data_param->set_shuffle(false);
ImagesLayer<TypeParam> layer(param);
ImageDataLayer<TypeParam> layer(param);
layer.SetUp(this->blob_bottom_vec_, &this->blob_top_vec_);
EXPECT_EQ(this->blob_top_data_->num(), 5);
EXPECT_EQ(this->blob_top_data_->channels(), 3);
@ -78,7 +81,7 @@ TYPED_TEST(ImagesLayerTest, TestRead) {
}
}
TYPED_TEST(ImagesLayerTest, TestResize) {
TYPED_TEST(ImageDataLayerTest, TestResize) {
LayerParameter param;
ImageDataParameter* image_data_param = param.mutable_image_data_param();
image_data_param->set_batch_size(5);
@ -86,7 +89,7 @@ TYPED_TEST(ImagesLayerTest, TestResize) {
image_data_param->set_new_height(256);
image_data_param->set_new_width(256);
image_data_param->set_shuffle(false);
ImagesLayer<TypeParam> layer(param);
ImageDataLayer<TypeParam> layer(param);
layer.SetUp(this->blob_bottom_vec_, &this->blob_top_vec_);
EXPECT_EQ(this->blob_top_data_->num(), 5);
EXPECT_EQ(this->blob_top_data_->channels(), 3);
@ -105,13 +108,13 @@ TYPED_TEST(ImagesLayerTest, TestResize) {
}
}
TYPED_TEST(ImagesLayerTest, TestShuffle) {
TYPED_TEST(ImageDataLayerTest, TestShuffle) {
LayerParameter param;
ImageDataParameter* image_data_param = param.mutable_image_data_param();
image_data_param->set_batch_size(5);
image_data_param->set_source(this->filename);
image_data_param->set_shuffle(true);
ImagesLayer<TypeParam> layer(param);
ImageDataLayer<TypeParam> layer(param);
layer.SetUp(this->blob_bottom_vec_, &this->blob_top_vec_);
EXPECT_EQ(this->blob_top_data_->num(), 5);
EXPECT_EQ(this->blob_top_data_->channels(), 3);