caffe_convert_absolute_paths(test_srcs)
caffe_convert_absolute_paths(test_cuda)
- # propogate to parent scope
+ # propagate to parent scope
set(srcs ${srcs} PARENT_SCOPE)
set(cuda ${cuda} PARENT_SCOPE)
set(test_srcs ${test_srcs} PARENT_SCOPE)
"cell_type": "markdown",
"metadata": {},
"source": [
- "So we did finetuning and it is awesome. Let's take a look at what kind of results we are able to get with a longer, more complete run of the style recognition dataset. Note: the below URL might be occassionally down because it is run on a research machine.\n",
+ "So we did finetuning and it is awesome. Let's take a look at what kind of results we are able to get with a longer, more complete run of the style recognition dataset. Note: the below URL might be occasionally down because it is run on a research machine.\n",
"\n",
"http://demo.vislab.berkeleyvision.org/"
]
# executed.
#
# In order to provide additional flexibility, the following shell (environment)
-# variables can be used to controll the execution of each of the phases:
+# variables can be used to control the execution of each of the phases:
#
# DOWNLOAD_DATA: Enable (1) or disable (0) the downloading of the MNIST dataset
# CREATE_LMDB: Enable (1) or disable (0) the creation of the LMDB database
def preprocess(self, im):
"""
- preprocess() emulate the pre-processing occuring in the vgg16 caffe
+ preprocess() emulate the pre-processing occurring in the vgg16 caffe
prototxt.
"""
# looks:
self.sp['display'] = '25'
self.sp['snapshot'] = '2500'
- self.sp['snapshot_prefix'] = '"snapshot"' # string withing a string!
+ self.sp['snapshot_prefix'] = '"snapshot"' # string within a string!
# learning rate policy
self.sp['lr_policy'] = '"fixed"'
// The pointers to caffe::Solver and caffe::Net instances
static vector<shared_ptr<Solver<float> > > solvers_;
static vector<shared_ptr<Net<float> > > nets_;
-// init_key is generated at the beginning and everytime you call reset
+// init_key is generated at the beginning and every time you call reset
static double init_key = static_cast<double>(caffe_rng_rand());
/** -----------------------------------------------------------------
message(FATAL_ERROR "Matlab MEX interface (with default mex options file) can only be built if caffe is compiled as shared library. Please enable 'BUILD_SHARED_LIBS' in CMake. Aternativelly you can switch to Octave compiler.")
endif()
-# helper function to set proper mex file extention
+# helper function to set proper mex file extension
function(caffe_fetch_and_set_proper_mexext mexfile_variable)
execute_process(COMMAND ${Matlab_mexext} OUTPUT_STRIP_TRAILING_WHITESPACE RESULT_VARIABLE res OUTPUT_VARIABLE ext)
if(res MATCHES 0)
io: The io factory to use to read the file. Provided for testability.
Returns:
- True if a header was succesfully added. False otherwise.
+ True if a header was successfully added. False otherwise.
"""
headerfile = None
try:
# Let's copy the include_state so it is only messed up within this function.
include_state = include_state.copy()
- # Did we find the header for this file (if any) and succesfully load it?
+ # Did we find the header for this file (if any) and successfully load it?
header_found = False
# Use the absolute path so that matching works properly.
try:
_valid_extensions = set(val.split(','))
except ValueError:
- PrintUsage('Extensions must be comma seperated list.')
+ PrintUsage('Extensions must be comma separated list.')
if not filenames:
PrintUsage('No files were specified.')
src_data, dest_data, is_forward);
}
} else {
- // We are at the last dimensions, which is stored continously in memory
+ // We are at the last dimensions, which is stored continuously in memory
for (int i = 0; i < top[0]->shape(cur_dim); ++i) {
// prepare index vector reduced(red) and with offsets(off)
std::vector<int> ind_red(cur_dim, 0);
src_data, dest_data, is_forward);
}
} else {
- // We are at the last two dimensions, which are stored continously in memory
+ // We are at the last two dimensions, which are stored continuously in memory
// With (N,C,H,W)
// (0,1,2,3) cur_dim -> H
// cur_dim+1 -> W
// Shuffle if needed.
if (this->layer_param_.hdf5_data_param().shuffle()) {
std::random_shuffle(data_permutation_.begin(), data_permutation_.end());
- DLOG(INFO) << "Successully loaded " << hdf_blobs_[0]->shape(0)
+ DLOG(INFO) << "Successfully loaded " << hdf_blobs_[0]->shape(0)
<< " rows (shuffled)";
} else {
- DLOG(INFO) << "Successully loaded " << hdf_blobs_[0]->shape(0) << " rows";
+ DLOG(INFO) << "Successfully loaded " << hdf_blobs_[0]->shape(0) << " rows";
}
}
optional uint32 crop_size = 3 [default = 0];
// mean_file and mean_value cannot be specified at the same time
optional string mean_file = 4;
- // if specified can be repeated once (would substract it from all the channels)
+ // if specified can be repeated once (would subtract it from all the channels)
// or can be repeated the same number of times as channels
// (would subtract them from the corresponding channel)
repeated float mean_value = 5;
// Initial value of a_i. Default is a_i=0.25 for all i.
optional FillerParameter filler = 1;
- // Whether or not slope paramters are shared across channels.
+ // Whether or not slope parameters are shared across channels.
optional bool channel_shared = 2 [default = false];
}
# The option allows to include in build only selected test files and exclude all others
# Usage example:
# cmake -DBUILD_only_tests="common,net,blob,im2col_kernel"
-set(BUILD_only_tests "" CACHE STRING "Blank or comma-separated list of test files to build without 'test_' prefix and extention")
+set(BUILD_only_tests "" CACHE STRING "Blank or comma-separated list of test files to build without 'test_' prefix and extension")
caffe_leave_only_selected_tests(test_srcs ${BUILD_only_tests})
caffe_leave_only_selected_tests(test_cuda ${BUILD_only_tests})
void TestForward() {
// Get the loss without a specified objective weight -- should be
- // equivalent to explicitly specifiying a weight of 1.
+ // equivalent to explicitly specifying a weight of 1.
LayerParameter layer_param;
EuclideanLossLayer<Dtype> layer_weight_1(layer_param);
layer_weight_1.SetUp(this->blob_bottom_vec_, this->blob_top_vec_);
// Utility functions for encoding Unicode text (wide strings) in
// UTF-8.
-// A Unicode code-point can have upto 21 bits, and is encoded in UTF-8
+// A Unicode code-point can have up to 21 bits, and is encoded in UTF-8
// like this:
//
// Code-point length Encoding
return *this;
}
-// Returns a pointer to the last occurence of a valid path separator in
+// Returns a pointer to the last occurrence of a valid path separator in
// the FilePath. On Windows, for example, both '/' and '\' are valid path
// separators. Returns NULL if no path separator was found.
const char* FilePath::FindLastPathSeparator() const {
void Normalize();
- // Returns a pointer to the last occurence of a valid path separator in
+ // Returns a pointer to the last occurrence of a valid path separator in
// the FilePath. On Windows, for example, both '/' and '\' are valid path
// separators. Returns NULL if no path separator was found.
const char* FindLastPathSeparator() const;
# Be warned that the fields in the training log may change in the future.
# You had better check the data files before designing your own plots.
-# Please generate the neccessary data files with
+# Please generate the necessary data files with
# /path/to/caffe/tools/extra/parse_log.sh before plotting.
# Example usage:
# ./parse_log.sh mnist.log