This PR resolves the coverity issues that were identified.
**Changes proposed in this PR:**
- Specify the return type of the lambda function
- Use reference to not copy the object.
This fixes:
- Use of auto that causes a copy (AUTO_CAUSES_COPY)
**Self-evaluation:**
1. Build test: [X]Passed [ ]Failed [ ]Skipped
2. Run test: [X]Passed [ ]Failed [ ]Skipped
Signed-off-by: Donghyeon Jeong <dhyeon.jeong@samsung.com>
bool trainable = false,
TensorLifespan lifespan = TensorLifespan::ITERATION_LIFESPAN,
bool private_ = true) {
- auto prefix_ = private_ ? this->name : this->prefix;
+ const auto &prefix_ = private_ ? this->name : this->prefix;
tensors_spec.emplace_back(dim, init, trainable, prefix_ + ":" + name,
lifespan);
return tensors_spec.size() - 1;
std::get<std::vector<props::InputConnection>>(*layer_node_props);
std::vector<std::string> names;
names.reserve(input_connections.size());
- std::transform(input_connections.begin(), input_connections.end(),
- std::back_inserter(names),
- [](const Connection &con) { return con.getName(); });
+ std::transform(
+ input_connections.begin(), input_connections.end(),
+ std::back_inserter(names), [](const Connection &con) -> const auto & {
+ return con.getName();
+ });
return names;
}
layer = std::move(dlayer);
}
- auto scope = getSharedFrom().empty() ? getName() : getSharedFrom();
+ const auto &scope = getSharedFrom().empty() ? getName() : getSharedFrom();
float max_norm = 0.0;
if (!std::get<props::ClipGradByGlobalNorm>(*layer_node_props).empty())
max_norm = std::get<props::ClipGradByGlobalNorm>(*layer_node_props).get();
std::vector<unsigned int> column_size = {20, 20, 20, 20};
auto print_graph_layer_info =
[column_size](std::ostream &out, std::vector<std::string> layer_info) {
- auto trim_string = [](std::string str, unsigned int column_width) {
+ const auto &trim_string = [](std::string str,
+ unsigned int column_width) {
return str.size() < column_width ? str
: str.substr(0, column_width - 1);
};