[coverity] Fix coverity issues
authorDonghyeon Jeong <dhyeon.jeong@samsung.com>
Wed, 31 Jan 2024 06:28:38 +0000 (15:28 +0900)
committerJijoong Moon <jijoong.moon@samsung.com>
Thu, 1 Feb 2024 00:56:52 +0000 (09:56 +0900)
This PR resolves the coverity issues that were identified.

**Changes proposed in this PR:**
- Specify the return type of the lambda function
- Use reference to not copy the object.

This fixes:
- Use of auto that causes a copy (AUTO_CAUSES_COPY)

**Self-evaluation:**
1. Build test: [X]Passed [ ]Failed [ ]Skipped
2. Run test:   [X]Passed [ ]Failed [ ]Skipped

Signed-off-by: Donghyeon Jeong <dhyeon.jeong@samsung.com>
nntrainer/layers/layer_context.h
nntrainer/layers/layer_node.cpp
nntrainer/models/neuralnet.cpp

index 59f435232a2f60c2524f8f2e8f91654acae6cc9a..6c8528c1338ccce04439d3902d67832a2126697c 100644 (file)
@@ -217,7 +217,7 @@ public:
                 bool trainable = false,
                 TensorLifespan lifespan = TensorLifespan::ITERATION_LIFESPAN,
                 bool private_ = true) {
-    auto prefix_ = private_ ? this->name : this->prefix;
+    const auto &prefix_ = private_ ? this->name : this->prefix;
     tensors_spec.emplace_back(dim, init, trainable, prefix_ + ":" + name,
                               lifespan);
     return tensors_spec.size() - 1;
index ed6db090fdf776e0353cd673f3fd06ebb289f956..21381a3f67261c8f53c081d0acaa9f15cc3dfc63 100644 (file)
@@ -305,9 +305,11 @@ const std::vector<std::string> LayerNode::getInputLayers() const {
     std::get<std::vector<props::InputConnection>>(*layer_node_props);
   std::vector<std::string> names;
   names.reserve(input_connections.size());
-  std::transform(input_connections.begin(), input_connections.end(),
-                 std::back_inserter(names),
-                 [](const Connection &con) { return con.getName(); });
+  std::transform(
+    input_connections.begin(), input_connections.end(),
+    std::back_inserter(names), [](const Connection &con) -> const auto & {
+      return con.getName();
+    });
   return names;
 }
 
@@ -571,7 +573,7 @@ InitLayerContext LayerNode::finalize(const std::vector<TensorDim> &input_dims,
     layer = std::move(dlayer);
   }
 
-  auto scope = getSharedFrom().empty() ? getName() : getSharedFrom();
+  const auto &scope = getSharedFrom().empty() ? getName() : getSharedFrom();
   float max_norm = 0.0;
   if (!std::get<props::ClipGradByGlobalNorm>(*layer_node_props).empty())
     max_norm = std::get<props::ClipGradByGlobalNorm>(*layer_node_props).get();
index 7a6a447eb598b77cbaef78b73e6c807ab8fa396f..ee4482c2122d3faf38e5645543ec8ead059f7d93 100644 (file)
@@ -1381,7 +1381,8 @@ void NeuralNetwork::print(std::ostream &out, unsigned int flags,
     std::vector<unsigned int> column_size = {20, 20, 20, 20};
     auto print_graph_layer_info =
       [column_size](std::ostream &out, std::vector<std::string> layer_info) {
-        auto trim_string = [](std::string str, unsigned int column_width) {
+        const auto &trim_string = [](std::string str,
+                                     unsigned int column_width) {
           return str.size() < column_width ? str
                                            : str.substr(0, column_width - 1);
         };