From f739207de5d50db580470b672be602822e83dcf5 Mon Sep 17 00:00:00 2001 From: hyeonseok lee Date: Mon, 10 May 2021 17:26:46 +0900 Subject: [PATCH] Handle PARAMETER_HIDDEN, NULL_RETURNS issue 1. Check return value of malloc function 2. Rename function parameter of lambda function resolve: 1216568, 12126575, 1222077, 1222080, 1227326, 1227327 Signed-off-by: hyeonseok lee --- Applications/SimpleShot/task_runner.cpp | 10 +++++----- api/capi/src/nntrainer.cpp | 4 ++++ nntrainer/dataset/databuffer_func.cpp | 17 ++++++++++++++++- nntrainer/utils/parse_util.cpp | 5 +++-- nntrainer/utils/profiler.cpp | 11 ++++++----- 5 files changed, 34 insertions(+), 13 deletions(-) diff --git a/Applications/SimpleShot/task_runner.cpp b/Applications/SimpleShot/task_runner.cpp index 23a7e80..c6c8214 100644 --- a/Applications/SimpleShot/task_runner.cpp +++ b/Applications/SimpleShot/task_runner.cpp @@ -134,19 +134,19 @@ std::unique_ptr createModel(const std::string &backbone, model->addLayer(backbone_layer); auto generate_knn_part = [&backbone, &app_path, - num_classes](const std::string &variant) { + num_classes](const std::string &variant_) { std::vector v; const std::string num_class_prop = "num_class=" + std::to_string(num_classes); - if (variant == "UN") { + if (variant_ == "UN") { /// left empty intended - } else if (variant == "L2N") { + } else if (variant_ == "L2N") { LayerHandle l2 = ml::train::createLayer("l2norm", {"name=l2norm", "trainable=false"}); v.push_back(l2); - } else if (variant == "CL2N") { + } else if (variant_ == "CL2N") { LayerHandle centering = ml::train::createLayer( "centering", {"name=center", "feature_path=" + getFeatureFilePath(backbone, app_path), @@ -157,7 +157,7 @@ std::unique_ptr createModel(const std::string &backbone, v.push_back(l2); } else { std::stringstream ss; - ss << "unsupported variant type: " << variant; + ss << "unsupported variant type: " << variant_; throw std::invalid_argument(ss.str().c_str()); } diff --git a/api/capi/src/nntrainer.cpp b/api/capi/src/nntrainer.cpp index 796b89a..eef890a 100644 --- a/api/capi/src/nntrainer.cpp +++ b/api/capi/src/nntrainer.cpp @@ -387,6 +387,10 @@ int ml_train_model_get_summary(ml_train_model_h model, } *summary = (char *)malloc((size + 1) * sizeof(char)); + if (*summary == nullptr) { + ml_loge("failed to malloc"); + return ML_ERROR_OUT_OF_MEMORY; + } std::memcpy(*summary, str.c_str(), size + 1); return status; diff --git a/nntrainer/dataset/databuffer_func.cpp b/nntrainer/dataset/databuffer_func.cpp index 0751e4e..c161867 100644 --- a/nntrainer/dataset/databuffer_func.cpp +++ b/nntrainer/dataset/databuffer_func.cpp @@ -156,7 +156,7 @@ void DataBufferFromCallback::updateData(BufferType type) { try { if ((cur_size == NULL) || (running == NULL) || (data == NULL) || (datalabel == NULL)) - throw std::runtime_error("Error: assining error"); + throw std::runtime_error("Error: assigning error"); } catch (...) { globalExceptionPtr = std::current_exception(); NN_EXCEPTION_NOTI(DATA_ERROR); @@ -173,6 +173,21 @@ void DataBufferFromCallback::updateData(BufferType type) { float *veclabel = (float *)malloc(sizeof(float) * input_dim.batch() * class_num); + try { + if (vec_arr == nullptr || veclabel_arr == nullptr || vec == nullptr || + veclabel == nullptr) { + free(vec); + free(veclabel); + free(vec_arr); + free(veclabel_arr); + throw std::runtime_error("Error: assigning error"); + } + } catch (...) { + globalExceptionPtr = std::current_exception(); + NN_EXCEPTION_NOTI(DATA_ERROR); + return; + } + vec_arr[0] = vec; veclabel_arr[0] = veclabel; diff --git a/nntrainer/utils/parse_util.cpp b/nntrainer/utils/parse_util.cpp index cf12d15..8c49891 100644 --- a/nntrainer/utils/parse_util.cpp +++ b/nntrainer/utils/parse_util.cpp @@ -510,8 +510,9 @@ bool istrequal(const std::string &a, const std::string &b) { if (a.size() != b.size()) return false; - return std::equal(a.begin(), a.end(), b.begin(), - [](char a, char b) { return tolower(a) == tolower(b); }); + return std::equal(a.begin(), a.end(), b.begin(), [](char a_, char b_) { + return tolower(a_) == tolower(b_); + }); } } /* namespace nntrainer */ diff --git a/nntrainer/utils/profiler.cpp b/nntrainer/utils/profiler.cpp index 0d98867..0abbd5e 100644 --- a/nntrainer/utils/profiler.cpp +++ b/nntrainer/utils/profiler.cpp @@ -123,7 +123,7 @@ void GenericProfileListener::report(std::ostream &out) const { /// calculate metrics while skipping warmups for (auto &entry : time_taken) { - auto func = [&](std::ostream &out) { + auto func = [&](std::ostream &out_) { auto &cnt_ = std::get(entry.second); auto &min_ = std::get(entry.second); auto &max_ = std::get(entry.second); @@ -132,14 +132,15 @@ void GenericProfileListener::report(std::ostream &out) const { auto title = profiler->eventToStr(entry.first); if (warmups >= cnt_) { - out << std::left << std::setw(total_col_size) << title - << "less data then warmup\n"; - out << std::right; // Restore outputstream adjustflag to standard stream + out_ << std::left << std::setw(total_col_size) << title + << "less data then warmup\n"; + out_ + << std::right; // Restore outputstream adjustflag to standard stream return; } // clang-format off - out << std::setw(column_size[0]) << title + out_ << std::setw(column_size[0]) << title << std::setw(column_size[1]) << sum_.count() / (cnt_ - warmups) << std::setw(column_size[2]) << min_.count() << std::setw(column_size[3]) << max_.count() -- 2.7.4