[Coverity] Fix the coverity issue
authorDonghyeon Jeong <dhyeon.jeong@samsung.com>
Mon, 18 Mar 2024 07:42:58 +0000 (16:42 +0900)
committerjijoong.moon <jijoong.moon@samsung.com>
Tue, 19 Mar 2024 22:41:33 +0000 (07:41 +0900)
This PR resolves the coverity issues of missing lock and use of auto that causes a copy.

**Self-evaluation:**
1. Build test: [X]Passed [ ]Failed [ ]Skipped
2. Run test:   [X]Passed [ ]Failed [ ]Skipped

Change-Id: Ibc56e24d8329b650731a9378ee3a757b7986e2f9
Signed-off-by: Donghyeon Jeong <dhyeon.jeong@samsung.com>
api/capi/include/nntrainer_internal.h
nntrainer/utils/node_exporter.cpp

index d7998df..c4c8638 100644 (file)
@@ -213,9 +213,9 @@ typedef struct {
     ML_TRAIN_VERIFY_VALID_HANDLE(obj_h);                                       \
     std::lock_guard<std::mutex> ml_train_lock(GLOCK);                          \
     ML_TRAIN_GET_VALID_HANDLE(obj, obj_h, obj_type, obj_name);                 \
+    obj->m.lock();                                                             \
     if (!obj->in_use)                                                          \
       obj->magic = 0;                                                          \
-    obj->m.lock();                                                             \
   } while (0)
 
 /**
@@ -243,8 +243,8 @@ typedef struct {
     ML_TRAIN_VERIFY_VALID_HANDLE(model);                                \
     std::lock_guard<std::mutex> ml_train_lock(GLOCK);                   \
     ML_TRAIN_GET_VALID_HANDLE(nnmodel, model, ml_train_model, "model"); \
-    nnmodel->magic = 0;                                                 \
     nnmodel->m.lock();                                                  \
+    nnmodel->magic = 0;                                                 \
   } while (0)
 
 /**
index e959ede..4701418 100644 (file)
@@ -57,9 +57,7 @@ Exporter::Exporter() : stored_result(nullptr), is_exported(false) {
  *
  */
 Exporter::Exporter(flatbuffers::FlatBufferBuilder *fbb) :
-  fbb(fbb),
-  stored_result(nullptr),
-  is_exported(false) {}
+  fbb(fbb), stored_result(nullptr), is_exported(false) {}
 #endif
 
 /**
@@ -196,8 +194,13 @@ void Exporter::saveTflResult(
 
   auto strides = std::get<std::array<props::Stride, CONV2D_DIM>>(props);
   assert(strides.size() == CONV2D_DIM);
-  auto padding = std::get<props::Padding2D>(props).get();
-  assert(padding == "same" || padding == "valid");
+  const auto &padding = std::get<props::Padding2D>(props).get();
+  if (padding != "same" && padding != "valid") {
+    std::ostringstream ss;
+    ss << "Unsupported padding type; \"" << padding
+       << "\" is not supported. Use \"same\" or \"valid\".";
+    throw std::runtime_error(ss.str());
+  }
   auto options = tflite::CreateConv2DOptions(*fbb, tflite_padding(padding),
                                              strides.at(0), strides.at(1))
                    .Union();