Datum datum;
Dtype* top_data = (*top)[0]->mutable_cpu_data();
Dtype* top_label = (*top)[1]->mutable_cpu_data();
+ const Dtype scale = this->layer_param_.scale();
+ const Dtype subtraction = this->layer_param_.subtraction();
for (int i = 0; i < this->layer_param_.batchsize(); ++i) {
// get a blob
datum.ParseFromString(iter_->value().ToString());
const string& data = datum.data();
for (int j = 0; j < datum_size_; ++j) {
- top_data[i * datum_size_ + j] = (uint8_t)data[j];
+ top_data[i * datum_size_ + j] =
+ (static_cast<Dtype>((uint8_t)data[j]) * scale) - subtraction;
}
top_label[i] = datum.label();
// go to the next iter
// For data layers, specify the data source
optional string source = 16;
+ // For data pre-processing, we can do simple scaling and constant subtraction
+ optional float scale = 17 [ default = 1 ];
+ optional float subtraction = 18 [ default = 0 ];
// For datay layers, specify the batch size.
- optional uint32 batchsize = 17;
+ optional uint32 batchsize = 19;
// The blobs containing the numeric parameters of the layer
repeated BlobProto blobs = 50;
optional float momentum = 10; // The momentum value.
optional string snapshot_prefix = 11; // The prefix for the snapshot.
-}
\ No newline at end of file
+}
+++ /dev/null
-// Copyright Yangqing Jia 2013
-
-// This file is merely here so we can easily get a string of the lenet.
-// It is actually not the very original LeNet, but with the sigmoid layers
-// replaced by ReLU layers.
-
-#ifndef CAFFE_TEST_LENET_HPP_
-#define CAFFE_TEST_LENET_HPP_
-
-#include <string>
-
-namespace caffe {
-
-const char* kLENET = "name: \"LeNet\"\n"
-"input: \"data\"\n"
-"input: \"label\"\n"
-"layers {\n"
-" layer {\n"
-" name: \"conv1\"\n"
-" type: \"conv\"\n"
-" num_output: 20\n"
-" kernelsize: 5\n"
-" stride: 1\n"
-" weight_filler {\n"
-" type: \"xavier\"\n"
-" }\n"
-" bias_filler {\n"
-" type: \"constant\"\n"
-" }\n"
-" }\n"
-" bottom: \"data\"\n"
-" top: \"conv1\"\n"
-"}\n"
-"layers {\n"
-" layer {\n"
-" name: \"pool1\"\n"
-" type: \"pool\"\n"
-" kernelsize: 2\n"
-" stride: 2\n"
-" pool: MAX\n"
-" }\n"
-" bottom: \"conv1\"\n"
-" top: \"pool1\"\n"
-"}\n"
-"layers {\n"
-" layer {\n"
-" name: \"conv2\"\n"
-" type: \"conv\"\n"
-" num_output: 50\n"
-" kernelsize: 5\n"
-" stride: 1\n"
-" weight_filler {\n"
-" type: \"xavier\"\n"
-" }\n"
-" bias_filler {\n"
-" type: \"constant\"\n"
-" }\n"
-" }\n"
-" bottom: \"pool1\"\n"
-" top: \"conv2\"\n"
-"}\n"
-"layers {\n"
-" layer {\n"
-" name: \"pool2\"\n"
-" type: \"pool\"\n"
-" kernelsize: 2\n"
-" stride: 2\n"
-" pool: MAX\n"
-" }\n"
-" bottom: \"conv2\"\n"
-" top: \"pool2\"\n"
-"}\n"
-"layers {\n"
-" layer {\n"
-" name: \"ip1\"\n"
-" type: \"innerproduct\"\n"
-" num_output: 500\n"
-" weight_filler {\n"
-" type: \"xavier\"\n"
-" }\n"
-" bias_filler {\n"
-" type: \"constant\"\n"
-" }\n"
-" }\n"
-" bottom: \"pool2\"\n"
-" top: \"ip1\"\n"
-"}\n"
-"layers {\n"
-" layer {\n"
-" name: \"relu1\"\n"
-" type: \"relu\"\n"
-" }\n"
-" bottom: \"ip1\"\n"
-" top: \"relu1\"\n"
-"}\n"
-"layers {\n"
-" layer {\n"
-" name: \"ip2\"\n"
-" type: \"innerproduct\"\n"
-" num_output: 10\n"
-" weight_filler {\n"
-" type: \"xavier\"\n"
-" }\n"
-" bias_filler {\n"
-" type: \"constant\"\n"
-" }\n"
-" }\n"
-" bottom: \"relu1\"\n"
-" top: \"ip2\"\n"
-"}\n"
-"layers {\n"
-" layer {\n"
-" name: \"prob\"\n"
-" type: \"softmax\"\n"
-" }\n"
-" bottom: \"ip2\"\n"
-" top: \"prob\"\n"
-"}\n"
-"layers {\n"
-" layer {\n"
-" name: \"loss\"\n"
-" type: \"multinomial_logistic_loss\"\n"
-" }\n"
-" bottom: \"prob\"\n"
-" bottom: \"label\"\n"
-"}";
-
-} // namespace caffe
-
-#endif
#include "caffe/proto/caffe.pb.h"
#include "caffe/util/io.hpp"
-#include "caffe/test/lenet.hpp"
#include "caffe/test/test_caffe_main.hpp"
namespace caffe {
typedef ::testing::Types<float, double> Dtypes;
TYPED_TEST_CASE(NetProtoTest, Dtypes);
-TYPED_TEST(NetProtoTest, TestLoadFromText) {
- NetParameter net_param;
- ReadProtoFromTextFile("caffe/test/data/lenet.prototxt", &net_param);
-}
-
TYPED_TEST(NetProtoTest, TestSetup) {
NetParameter net_param;
- string lenet_string(kLENET);
- // Load the network
- CHECK(google::protobuf::TextFormat::ParseFromString(
- lenet_string, &net_param));
+ ReadProtoFromTextFile("caffe/test/data/lenet.prototxt", &net_param);
// check if things are right
- EXPECT_EQ(net_param.layers_size(), 9);
- EXPECT_EQ(net_param.input_size(), 2);
-
- // Now, initialize a network using the parameter
- shared_ptr<Blob<TypeParam> > data(new Blob<TypeParam>(10, 1, 28, 28));
- shared_ptr<Blob<TypeParam> > label(new Blob<TypeParam>(10, 1, 1, 1));
- FillerParameter filler_param;
- shared_ptr<Filler<TypeParam> > filler;
- filler.reset(new ConstantFiller<TypeParam>(filler_param));
- filler->Fill(label.get());
- filler.reset(new UniformFiller<TypeParam>(filler_param));
- filler->Fill(data.get());
+ EXPECT_EQ(net_param.layers_size(), 10);
+ EXPECT_EQ(net_param.input_size(), 0);
vector<Blob<TypeParam>*> bottom_vec;
- bottom_vec.push_back(data.get());
- bottom_vec.push_back(label.get());
Net<TypeParam> caffe_net(net_param, bottom_vec);
- EXPECT_EQ(caffe_net.layer_names().size(), 9);
+ EXPECT_EQ(caffe_net.layer_names().size(), 10);
EXPECT_EQ(caffe_net.blob_names().size(), 10);
// Print a few statistics to see if things are correct
<< caffe_net.blobs()[i]->height() << ", "
<< caffe_net.blobs()[i]->width();
}
+ Caffe::set_mode(Caffe::CPU);
+ // Run the network without training.
+ LOG(ERROR) << "Performing Forward";
+ caffe_net.Forward(bottom_vec);
+ LOG(ERROR) << "Performing Backward";
+ LOG(ERROR) << caffe_net.Backward();
+
+ Caffe::set_mode(Caffe::GPU);
// Run the network without training.
- vector<Blob<TypeParam>*> top_vec;
LOG(ERROR) << "Performing Forward";
caffe_net.Forward(bottom_vec);
LOG(ERROR) << "Performing Backward";