echo "Computing image mean..."
./build/tools/compute_image_mean $EXAMPLE/cifar10_train_leveldb \
- $EXAMPLE/mean.binaryproto
+ $EXAMPLE/mean.binaryproto leveldb
echo "Done."
#!/usr/bin/env sh
-# Create the imagenet leveldb inputs
+# Create the imagenet lmdb inputs
# N.B. set the path to the imagenet train + val data dirs
EXAMPLE=examples/imagenet
exit 1
fi
-echo "Creating train leveldb..."
+echo "Creating train lmdb..."
GLOG_logtostderr=1 $TOOLS/convert_imageset \
--resize_height=$RESIZE_HEIGHT \
--shuffle \
$TRAIN_DATA_ROOT \
$DATA/train.txt \
- $EXAMPLE/ilsvrc12_train_leveldb
+ $EXAMPLE/ilsvrc12_train_lmdb
-echo "Creating val leveldb..."
+echo "Creating val lmdb..."
GLOG_logtostderr=1 $TOOLS/convert_imageset \
--resize_height=$RESIZE_HEIGHT \
--shuffle \
$VAL_DATA_ROOT \
$DATA/val.txt \
- $EXAMPLE/ilsvrc12_val_leveldb
+ $EXAMPLE/ilsvrc12_val_lmdb
echo "Done."
-//
-// This script converts the MNIST dataset to the leveldb format used
-// by caffe to perform classification.
+// This script converts the MNIST dataset to a lmdb (default) or
+// leveldb (--backend=leveldb) format used by caffe to load data.
// Usage:
// convert_mnist_data [FLAGS] input_image_file input_label_file
// output_db_file
#endif
gflags::SetUsageMessage("This script converts the MNIST dataset to\n"
- "the leveldb/lmdb format used by Caffe to perform classification.\n"
+ "the lmdb/leveldb format used by Caffe to load data.\n"
"Usage:\n"
" convert_mnist_data [FLAGS] input_image_file input_label_file "
"output_db_file\n"
#!/usr/bin/env sh
-# This script converts the mnist data into leveldb/lmdb format,
+# This script converts the mnist data into lmdb/leveldb format,
# depending on the value assigned to $BACKEND.
EXAMPLE=examples/mnist
cd $CAFFE_ROOT/examples/mnist
./create_mnist.sh
-If it complains that `wget` or `gunzip` are not installed, you need to install them respectively. After running the script there should be two datasets, `mnist-train-leveldb`, and `mnist-test-leveldb`.
+If it complains that `wget` or `gunzip` are not installed, you need to install them respectively. After running the script there should be two datasets, `mnist_train_lmdb`, and `mnist_test_lmdb`.
## LeNet: the MNIST Classification Model
### Writing the Data Layer
-Currently, we will read the MNIST data from the leveldb we created earlier in the demo. This is defined by a data layer:
+Currently, we will read the MNIST data from the lmdb we created earlier in the demo. This is defined by a data layer:
layers {
name: "mnist"
type: DATA
data_param {
- source: "mnist-train-leveldb"
+ source: "mnist_train_lmdb"
+ backend: LMDB
batch_size: 64
scale: 0.00390625
}
top: "label"
}
-Specifically, this layer has name `mnist`, type `data`, and it reads the data from the given leveldb source. We will use a batch size of 64, and scale the incoming pixels so that they are in the range \[0,1\). Why 0.00390625? It is 1 divided by 256. And finally, this layer produces two blobs, one is the `data` blob, and one is the `label` blob.
+Specifically, this layer has name `mnist`, type `data`, and it reads the data from the given lmdb source. We will use a batch size of 64, and scale the incoming pixels so that they are in the range \[0,1\). Why 0.00390625? It is 1 divided by 256. And finally, this layer produces two blobs, one is the `data` blob, and one is the `label` blob.
### Writing the Convolution Layer
top: "data"
top: "label"
data_param {
- source: "examples/imagenet/ilsvrc12_train_leveldb"
+ source: "examples/imagenet/ilsvrc12_train_lmdb"
+ backend: LMDB
batch_size: 256
}
transform_param {
top: "data"
top: "label"
data_param {
- source: "examples/imagenet/ilsvrc12_val_leveldb"
+ source: "examples/imagenet/ilsvrc12_val_lmdb"
+ backend: LMDB
batch_size: 50
}
transform_param {
top: "data"
top: "label"
data_param {
- source: "examples/imagenet/ilsvrc12_train_leveldb"
+ source: "examples/imagenet/ilsvrc12_train_lmdb"
+ backend: LMDB
batch_size: 256
}
transform_param {
top: "data"
top: "label"
data_param {
- source: "examples/imagenet/ilsvrc12_val_leveldb"
+ source: "examples/imagenet/ilsvrc12_val_lmdb"
+ backend: LMDB
batch_size: 50
}
transform_param {