// learning rate to be 1. Thus we will need to perform backward.
need_backward = true;
}
- const int blob_name_size = layer_param.blob_name_size();
- CHECK(blob_name_size == num_param_blobs || blob_name_size == 0)
- << "Incorrect blob_name size: should be either 0 or the same as "
+ const int param_size = layer_param.param_size();
+ CHECK(param_size == num_param_blobs || param_size == 0)
+ << "Incorrect param size: should be either 0 or the same as "
"the number of the layer's parameter blobs: " << num_param_blobs;
const int blob_share_mode_size = layer_param.blob_share_mode_size();
CHECK(blob_share_mode_size == num_param_blobs || blob_share_mode_size == 0)
void Net<Dtype>::AppendParam(const NetParameter& param, const int layer_id,
const int param_id) {
const LayerParameter& layer_param = layers_[layer_id]->layer_param();
- const int blob_name_size = layer_param.blob_name_size();
+ const int param_size = layer_param.param_size();
string param_name;
- if (blob_name_size) {
- param_name = layer_param.blob_name(param_id);
+ if (param_size) {
+ param_name = layer_param.param(param_id);
}
const int net_param_id = params_.size();
params_.push_back(layers_[layer_id]->blobs()[param_id]);
param_net_indices_.push_back(make_pair(layer_id, param_id));
- if (!blob_name_size || !param_name.size() || (param_name.size() &&
+ if (!param_size || !param_name.size() || (param_name.size() &&
param_names_index_.find(param_name) == param_names_index_.end())) {
// This layer "owns" this parameter blob -- it is either anonymous
// (i.e., not given a param_name) or explicitly given a name that we
// haven't already seen.
param_owners_.push_back(-1);
- if (blob_name_size) {
+ if (param_size) {
param_names_index_[param_name] = net_param_id;
}
} else {
repeated BlobProto blobs = 6;
// The names of the parameter blobs -- useful for sharing parameters among
// layers (but never required).
- repeated string blob_name = 1001;
+ repeated string param = 1001;
// Whether to require shared weights to have the same shape, or just the same
// count -- defaults to STRICT if unspecified.
repeated DimCheckMode blob_share_mode = 1002;
}
message HingeLossParameter {
- enum Norm {
+ enum Norm {
L1 = 1;
- L2 = 2;
+ L2 = 2;
}
// Specify the Norm to use L1 or L2
optional Norm norm = 1 [default = L1];
" std: 10 "
" } "
" } "
- " blob_name: 'unsharedweights1' "
+ " param: 'unsharedweights1' "
" bottom: 'data' "
" top: 'innerproduct1' "
"} "
" std: 10 "
" } "
" } "
- " blob_name: 'unsharedweights2' "
+ " param: 'unsharedweights2' "
" bottom: 'data' "
" top: 'innerproduct2' "
"} "
" std: 10 "
" } "
" } "
- " blob_name: 'sharedweights' "
+ " param: 'sharedweights' "
" bottom: 'data' "
" top: 'innerproduct1' "
"} "
" std: 10 "
" } "
" } "
- " blob_name: 'sharedweights' "
+ " param: 'sharedweights' "
" bottom: 'data' "
" top: 'innerproduct2' "
"} "
" value: 0.5 "
" } "
" } "
- " blob_name: 'unsharedweights1' "
+ " param: 'unsharedweights1' "
" bottom: 'data1' "
" top: 'innerproduct1' "
"} "
" value: 0.5 "
" } "
" } "
- " blob_name: 'unsharedweights2' "
+ " param: 'unsharedweights2' "
" bottom: 'innerproduct1' "
" top: 'innerproduct2' "
"} "
" value: 0.5 "
" } "
" } "
- " blob_name: 'sharedweights' "
+ " param: 'sharedweights' "
" bottom: 'data1' "
" top: 'innerproduct1' "
"} "
" value: 0.5 "
" } "
" } "
- " blob_name: 'sharedweights' "
+ " param: 'sharedweights' "
" bottom: 'innerproduct1' "
" top: 'innerproduct2' "
"} "