Fix svace issue GraphWatcher UNINIT.CTOR
authorhyeonseok lee <hs89.lee@samsung.com>
Tue, 9 Feb 2021 06:29:20 +0000 (15:29 +0900)
committerMyungJoo Ham <myungjoo.ham@samsung.com>
Tue, 9 Feb 2021 08:41:01 +0000 (17:41 +0900)
In the constructor the member variable expected_loss is not initialized.
expected_loss will be read later in readIteration function so initialized with 0.0

Signed-off-by: hyeonseok lee <hs89.lee@samsung.com>
Applications/TransferLearning/CIFAR_Classification/jni/main_func.cpp
nntrainer/tensor/manager.cpp
test/unittest/unittest_nntrainer_models.cpp

index cc59819b0c8e4dd182e7311203bfd341f77bb75f..709ab433d2542e10acc6eb8e27b64d2040451924 100644 (file)
@@ -270,7 +270,8 @@ int main(int argc, char *argv[]) {
     dataset = createDataset(ml::train::DatasetType::GENERATOR);
     dataset->setGeneratorFunc(ml::train::DatasetDataType::DATA_TRAIN,
                               getBatch_train);
-    dataset->setGeneratorFunc(ml::train::DatasetDataType::DATA_VAL, getBatch_val);
+    dataset->setGeneratorFunc(ml::train::DatasetDataType::DATA_VAL,
+                              getBatch_val);
   } catch (...) {
     std::cerr << "Error creating dataset" << std::endl;
     return 1;
index 9d41bacade0b135f3736c8bf922340ca18626304..cf8f3355b5425894989bb6f8d0483111c6f0153b 100644 (file)
@@ -81,12 +81,12 @@ MMapedMemory::MMapedMemory(size_t size, bool allocate_fd) :
   }
 
   if (buf_ == MAP_FAILED) {
-    #ifdef __ANDROID__
-      if (fd_ != -1) {
-        // unlink / close the given fd here
-        close(fd_);
-      }
-    #endif
+#ifdef __ANDROID__
+    if (fd_ != -1) {
+      // unlink / close the given fd here
+      close(fd_);
+    }
+#endif
 
     throw std::runtime_error("[MMapedMemory] mmap failed");
   }
index de76dcdd0309b013e42f5019e7a7de9594647be9..1ad3714f6b33f34a1019a3c9e0f5de0ddc2f124c 100644 (file)
@@ -292,6 +292,7 @@ void NodeWatcher::backward(int iteration, bool verify_deriv, bool verify_grad) {
 }
 
 GraphWatcher::GraphWatcher(const std::string &config, const bool opt) :
+  expected_loss(0.0),
   optimize(opt) {
   nn = nntrainer::NeuralNetwork();