[coverity] Fix coverity issues
authorDonghyeon Jeong <dhyeon.jeong@samsung.com>
Wed, 31 Jan 2024 06:28:38 +0000 (15:28 +0900)
committerjijoong.moon <jijoong.moon@samsung.com>
Wed, 20 Mar 2024 06:18:05 +0000 (15:18 +0900)
This PR resolves the coverity issues that were identified.

**Changes proposed in this PR:**
- Specify the return type of the lambda function
- Use reference to not copy the object.

This fixes:
- Use of auto that causes a copy (AUTO_CAUSES_COPY)

**Self-evaluation:**
1. Build test: [X]Passed [ ]Failed [ ]Skipped
2. Run test:   [X]Passed [ ]Failed [ ]Skipped

Signed-off-by: Donghyeon Jeong <dhyeon.jeong@samsung.com>
nntrainer/layers/layer_context.h
nntrainer/layers/layer_node.cpp
nntrainer/models/neuralnet.cpp

index 02dc08e9c029b9dfc8e035783f99d5e2126d816f..f86491e0df6161091966f24f6a76ca39f9660090 100644 (file)
@@ -217,7 +217,7 @@ public:
                 bool trainable = false,
                 TensorLifespan lifespan = TensorLifespan::ITERATION_LIFESPAN,
                 bool private_ = true) {
-    auto prefix_ = private_ ? this->name : this->prefix;
+    const auto &prefix_ = private_ ? this->name : this->prefix;
     tensors_spec.emplace_back(dim, init, trainable, prefix_ + ":" + name,
                               lifespan);
     return tensors_spec.size() - 1;
index cedb3caee7a7e2c25d7f5175e14fcde31445f795..937ee4391797ae992364920098efbc2d70056968 100644 (file)
@@ -305,9 +305,11 @@ const std::vector<std::string> LayerNode::getInputLayers() const {
     std::get<std::vector<props::InputConnection>>(*layer_node_props);
   std::vector<std::string> names;
   names.reserve(input_connections.size());
-  std::transform(input_connections.begin(), input_connections.end(),
-                 std::back_inserter(names),
-                 [](const Connection &con) { return con.getName(); });
+  std::transform(
+    input_connections.begin(), input_connections.end(),
+    std::back_inserter(names), [](const Connection &con) -> const auto & {
+      return con.getName();
+    });
   return names;
 }
 
@@ -572,7 +574,7 @@ LayerNode::finalize(const std::vector<TensorDim> &input_dims,
     layer = std::move(dlayer);
   }
 
-  auto scope = getSharedFrom().empty() ? getName() : getSharedFrom();
+  const auto &scope = getSharedFrom().empty() ? getName() : getSharedFrom();
   float max_norm = 0.0;
   if (!std::get<props::ClipGradByGlobalNorm>(*layer_node_props).empty())
     max_norm = std::get<props::ClipGradByGlobalNorm>(*layer_node_props).get();
index 5b1cccc34edb739d727292c08e72fb0376104da6..9223c4a5a0dd0e59fa4640ff3baa93bf705f5960 100644 (file)
@@ -1203,7 +1203,8 @@ void NeuralNetwork::print(std::ostream &out, unsigned int flags,
     std::vector<unsigned int> column_size = {20, 20, 20, 20};
     auto print_graph_layer_info =
       [column_size](std::ostream &out, std::vector<std::string> layer_info) {
-        auto trim_string = [](std::string str, unsigned int column_width) {
+        const auto &trim_string = [](std::string str,
+                                     unsigned int column_width) {
           return str.size() < column_width ? str
                                            : str.substr(0, column_width - 1);
         };