// the current NetState.
NetParameter filtered_param;
FilterNet(in_param, &filtered_param);
- if (Caffe::root_solver()) {
- LOG(INFO) << "Initializing net from parameters: " << std::endl
- << filtered_param.DebugString();
- }
+ LOG_IF(INFO, Caffe::root_solver())
+ << "Initializing net from parameters: " << std::endl
+ << filtered_param.DebugString();
// Create a copy of filtered_param with splits added where necessary.
NetParameter param;
InsertSplits(filtered_param, ¶m);
const int layer_id = -1; // inputs have fake layer ID -1
AppendTop(param, layer_id, input_id, &available_blobs, &blob_name_to_idx);
}
- DLOG_IF(INFO, Caffe::root_solver())
- << "Memory required for data: " << memory_used_ * sizeof(Dtype);
// For each layer, set up its input and output
bottom_vecs_.resize(param.layer_size());
top_vecs_.resize(param.layer_size());
layers_.push_back(LayerRegistry<Dtype>::CreateLayer(layer_param));
}
layer_names_.push_back(layer_param.name());
- if (Caffe::root_solver()) {
- LOG(INFO) << "Creating Layer " << layer_param.name();
- }
+ LOG_IF(INFO, Caffe::root_solver())
+ << "Creating Layer " << layer_param.name();
bool need_backward = false;
// Figure out this layer's input and output
} else {
layers_[layer_id]->SetUp(bottom_vecs_[layer_id], top_vecs_[layer_id]);
}
- if (Caffe::root_solver()) {
- LOG(INFO) << "Setting up " << layer_names_[layer_id];
- }
+ LOG_IF(INFO, Caffe::root_solver())
+ << "Setting up " << layer_names_[layer_id];
for (int top_id = 0; top_id < top_vecs_[layer_id].size(); ++top_id) {
if (blob_loss_weights_.size() <= top_id_vecs_[layer_id][top_id]) {
blob_loss_weights_.resize(top_id_vecs_[layer_id][top_id] + 1, Dtype(0));
}
blob_loss_weights_[top_id_vecs_[layer_id][top_id]] = layer->loss(top_id);
- if (Caffe::root_solver()) {
- LOG(INFO) << "Top shape: "
- << top_vecs_[layer_id][top_id]->shape_string();
- }
+ LOG_IF(INFO, Caffe::root_solver())
+ << "Top shape: " << top_vecs_[layer_id][top_id]->shape_string();
if (layer->loss(top_id)) {
- if (Caffe::root_solver()) {
- LOG(INFO) << " with loss weight " << layer->loss(top_id);
- }
+ LOG_IF(INFO, Caffe::root_solver())
+ << " with loss weight " << layer->loss(top_id);
}
memory_used_ += top_vecs_[layer_id][top_id]->count();
}
- if (Caffe::root_solver()) {
- DLOG(INFO) << "Memory required for data: "
- << memory_used_ * sizeof(Dtype);
- }
+ LOG_IF(INFO, Caffe::root_solver())
+ << "Memory required for data: " << memory_used_ * sizeof(Dtype);
const int param_size = layer_param.param_size();
const int num_param_blobs = layers_[layer_id]->blobs().size();
CHECK_LE(param_size, num_param_blobs)
}
}
if (!layer_contributes_loss) { layer_need_backward_[layer_id] = false; }
- if (layer_need_backward_[layer_id]) {
- if (Caffe::root_solver()) {
+ if (Caffe::root_solver()) {
+ if (layer_need_backward_[layer_id]) {
LOG(INFO) << layer_names_[layer_id] << " needs backward computation.";
- }
- } else {
- if (Caffe::root_solver()) {
+ } else {
LOG(INFO) << layer_names_[layer_id]
- << " does not need backward computation.";
+ << " does not need backward computation.";
}
}
for (int bottom_id = 0; bottom_id < bottom_vecs_[layer_id].size();
// In the end, all remaining blobs are considered output blobs.
for (set<string>::iterator it = available_blobs.begin();
it != available_blobs.end(); ++it) {
- if (Caffe::root_solver()) {
- LOG(INFO) << "This network produces output " << *it;
- }
+ LOG_IF(INFO, Caffe::root_solver())
+ << "This network produces output " << *it;
net_output_blobs_.push_back(blobs_[blob_name_to_idx[*it]].get());
net_output_blob_indices_.push_back(blob_name_to_idx[*it]);
}
}
ShareWeights();
debug_info_ = param.debug_info();
- if (Caffe::root_solver()) {
- LOG(INFO) << "Network initialization done.";
- LOG(INFO) << "Memory required for data: " << memory_used_ * sizeof(Dtype);
- }
+ LOG_IF(INFO, Caffe::root_solver()) << "Network initialization done.";
}
template <typename Dtype>
// Check whether the rule is broken due to phase.
if (rule.has_phase()) {
if (rule.phase() != state.phase()) {
- if (Caffe::root_solver()) {
- LOG(INFO) << "The NetState phase (" << state.phase()
- << ") differed from the phase (" << rule.phase()
- << ") specified by a rule in layer " << layer_name;
- }
+ LOG_IF(INFO, Caffe::root_solver())
+ << "The NetState phase (" << state.phase()
+ << ") differed from the phase (" << rule.phase()
+ << ") specified by a rule in layer " << layer_name;
return false;
}
}
// Check whether the rule is broken due to min level.
if (rule.has_min_level()) {
if (state.level() < rule.min_level()) {
- if (Caffe::root_solver()) {
- LOG(INFO) << "The NetState level (" << state.level()
- << ") is above the min_level (" << rule.min_level()
- << ") specified by a rule in layer " << layer_name;
- }
+ LOG_IF(INFO, Caffe::root_solver())
+ << "The NetState level (" << state.level()
+ << ") is above the min_level (" << rule.min_level()
+ << ") specified by a rule in layer " << layer_name;
return false;
}
}
// Check whether the rule is broken due to max level.
if (rule.has_max_level()) {
if (state.level() > rule.max_level()) {
- if (Caffe::root_solver()) {
- LOG(INFO) << "The NetState level (" << state.level()
- << ") is above the max_level (" << rule.max_level()
- << ") specified by a rule in layer " << layer_name;
- }
+ LOG_IF(INFO, Caffe::root_solver())
+ << "The NetState level (" << state.level()
+ << ") is above the max_level (" << rule.max_level()
+ << ") specified by a rule in layer " << layer_name;
return false;
}
}
if (rule.stage(i) == state.stage(j)) { has_stage = true; }
}
if (!has_stage) {
- if (Caffe::root_solver()) {
- LOG(INFO) << "The NetState did not contain stage '" << rule.stage(i)
- << "' specified by a rule in layer " << layer_name;
- }
+ LOG_IF(INFO, Caffe::root_solver())
+ << "The NetState did not contain stage '" << rule.stage(i)
+ << "' specified by a rule in layer " << layer_name;
return false;
}
}
if (rule.not_stage(i) == state.stage(j)) { has_stage = true; }
}
if (has_stage) {
- if (Caffe::root_solver()) {
- LOG(INFO) << "The NetState contained a not_stage '" << rule.not_stage(i)
- << "' specified by a rule in layer " << layer_name;
- }
+ LOG_IF(INFO, Caffe::root_solver())
+ << "The NetState contained a not_stage '" << rule.not_stage(i)
+ << "' specified by a rule in layer " << layer_name;
return false;
}
}
if (blob_name_to_idx && layer_param && layer_param->bottom_size() > top_id &&
blob_name == layer_param->bottom(top_id)) {
// In-place computation
- if (Caffe::root_solver()) {
- LOG(INFO) << layer_param->name() << " -> " << blob_name << " (in-place)";
- }
+ LOG_IF(INFO, Caffe::root_solver())
+ << layer_param->name() << " -> " << blob_name << " (in-place)";
top_vecs_[layer_id].push_back(blobs_[(*blob_name_to_idx)[blob_name]].get());
top_id_vecs_[layer_id].push_back((*blob_name_to_idx)[blob_name]);
} else if (blob_name_to_idx &&
<< layer_param.name() << "', bottom index " << bottom_id << ")";
}
const int blob_id = (*blob_name_to_idx)[blob_name];
- if (Caffe::root_solver()) {
- LOG(INFO) << layer_names_[layer_id] << " <- " << blob_name;
- }
+ LOG_IF(INFO, Caffe::root_solver())
+ << layer_names_[layer_id] << " <- " << blob_name;
bottom_vecs_[layer_id].push_back(blobs_[blob_id].get());
bottom_id_vecs_[layer_id].push_back(blob_id);
available_blobs->erase(blob_name);
const Blob<Dtype>& blob = *net_input_blobs_[input_id];
const string& blob_name = blob_names_[net_input_blob_indices_[input_id]];
const Dtype data_abs_val_mean = blob.asum_data() / blob.count();
- if (Caffe::root_solver()) {
- LOG(INFO) << " [Forward] "
- << "Input " << blob_name << " data: " << data_abs_val_mean;
- }
+ LOG_IF(INFO, Caffe::root_solver())
+ << " [Forward] "
+ << "Input " << blob_name << " data: " << data_abs_val_mean;
}
template <typename Dtype>
const Blob<Dtype>& blob = *top_vecs_[layer_id][top_id];
const string& blob_name = blob_names_[top_id_vecs_[layer_id][top_id]];
const Dtype data_abs_val_mean = blob.asum_data() / blob.count();
- if (Caffe::root_solver()) {
- LOG(INFO) << " [Forward] "
- << "Layer " << layer_names_[layer_id]
- << ", top blob " << blob_name
- << " data: " << data_abs_val_mean;
- }
+ LOG_IF(INFO, Caffe::root_solver())
+ << " [Forward] "
+ << "Layer " << layer_names_[layer_id]
+ << ", top blob " << blob_name
+ << " data: " << data_abs_val_mean;
}
for (int param_id = 0; param_id < layers_[layer_id]->blobs().size();
++param_id) {
const int net_param_id = param_id_vecs_[layer_id][param_id];
const string& blob_name = param_display_names_[net_param_id];
const Dtype data_abs_val_mean = blob.asum_data() / blob.count();
- if (Caffe::root_solver()) {
- LOG(INFO) << " [Forward] "
- << "Layer " << layer_names_[layer_id]
- << ", param blob " << blob_name
- << " data: " << data_abs_val_mean;
- }
+ LOG_IF(INFO, Caffe::root_solver())
+ << " [Forward] "
+ << "Layer " << layer_names_[layer_id]
+ << ", param blob " << blob_name
+ << " data: " << data_abs_val_mean;
}
}
const Blob<Dtype>& blob = *bottom_vec[bottom_id];
const string& blob_name = blob_names_[bottom_id_vecs_[layer_id][bottom_id]];
const Dtype diff_abs_val_mean = blob.asum_diff() / blob.count();
- if (Caffe::root_solver()) {
- LOG(INFO) << " [Backward] "
- << "Layer " << layer_names_[layer_id]
- << ", bottom blob " << blob_name
- << " diff: " << diff_abs_val_mean;
- }
+ LOG_IF(INFO, Caffe::root_solver())
+ << " [Backward] "
+ << "Layer " << layer_names_[layer_id]
+ << ", bottom blob " << blob_name
+ << " diff: " << diff_abs_val_mean;
}
for (int param_id = 0; param_id < layers_[layer_id]->blobs().size();
++param_id) {
if (!layers_[layer_id]->param_propagate_down(param_id)) { continue; }
const Blob<Dtype>& blob = *layers_[layer_id]->blobs()[param_id];
const Dtype diff_abs_val_mean = blob.asum_diff() / blob.count();
- if (Caffe::root_solver()) {
- LOG(INFO) << " [Backward] "
- << "Layer " << layer_names_[layer_id]
- << ", param blob " << param_id
- << " diff: " << diff_abs_val_mean;
- }
+ LOG_IF(INFO, Caffe::root_solver())
+ << " [Backward] "
+ << "Layer " << layer_names_[layer_id]
+ << ", param blob " << param_id
+ << " diff: " << diff_abs_val_mean;
}
}
const Dtype diff_abs_val_mean = blob.asum_diff() / blob.count();
if (param_owner < 0) {
const Dtype data_abs_val_mean = blob.asum_data() / blob.count();
- if (Caffe::root_solver()) {
- LOG(INFO) << " [Update] Layer " << layer_name
- << ", param " << param_display_name
- << " data: " << data_abs_val_mean
- << "; diff: " << diff_abs_val_mean;
- }
+ LOG_IF(INFO, Caffe::root_solver())
+ << " [Update] Layer " << layer_name
+ << ", param " << param_display_name
+ << " data: " << data_abs_val_mean
+ << "; diff: " << diff_abs_val_mean;
} else {
const string& owner_layer_name =
layer_names_[param_layer_indices_[param_owner].first];
- if (Caffe::root_solver()) {
- LOG(INFO) << " [Update] Layer " << layer_name
- << ", param blob " << param_display_name
- << " (owned by layer " << owner_layer_name << ", " << "param "
- << param_display_names_[param_owners_[param_id]] << ")"
- << " diff: " << diff_abs_val_mean;
- }
+ LOG_IF(INFO, Caffe::root_solver())
+ << " [Update] Layer " << layer_name
+ << ", param blob " << param_display_name
+ << " (owned by layer " << owner_layer_name << ", " << "param "
+ << param_display_names_[param_owners_[param_id]] << ")"
+ << " diff: " << diff_abs_val_mean;
}
}