From: Inki Dae Date: Fri, 17 Sep 2021 04:40:52 +0000 (+0900) Subject: mv_machine_learning: code refactoring to checking custom device type X-Git-Tag: submit/tizen/20210924.044804~2 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=5fae68afb713837cd5320b6f6fb1a96ee786b92c;p=platform%2Fcore%2Fapi%2Fmediavision.git mv_machine_learning: code refactoring to checking custom device type This patch does code refactoring for enhancing code readiability of the condition which checks if config type path is required or not according to given backend and target device types. As for this, this patch adds IsValidBackendType fucntion to check if a given backend type is valid or not, IsConfigFilePathRequired function to check if config file path is required or not, and makes these functions to be used instead of existing complicated condition. Change-Id: I195e3caeb205b159e2128a14c2b4b6516e7b952d Signed-off-by: Inki Dae --- diff --git a/mv_machine_learning/mv_inference/inference/src/mv_inference_open.cpp b/mv_machine_learning/mv_inference/inference/src/mv_inference_open.cpp index 85ff93c8..8de14d58 100644 --- a/mv_machine_learning/mv_inference/inference/src/mv_inference_open.cpp +++ b/mv_machine_learning/mv_inference/inference/src/mv_inference_open.cpp @@ -120,6 +120,25 @@ bool IsJsonFile(const std::string& fileName) return false; } +static bool IsValidBackendType(const int backend_type) +{ + if (backend_type > MV_INFERENCE_BACKEND_NONE && + backend_type < MV_INFERENCE_BACKEND_MAX) + return true; + + return false; +} + +static bool IsConfigFilePathRequired(const int target_device_type, const int backend_type) +{ + // In case of MV_INFERENCE_TARGET_DEVICE_CUSTOM via MLAPI backend, config file path is required. + if (backend_type == MV_INFERENCE_BACKEND_MLAPI && + target_device_type & MV_INFERENCE_TARGET_DEVICE_CUSTOM) + return true; + + return false; +} + int mv_inference_configure_model_open(mv_inference_h infer, mv_engine_config_h engine_config) { @@ -180,19 +199,19 @@ int mv_inference_configure_model_open(mv_inference_h infer, goto release_model_meta_file_path; } + if (!IsValidBackendType(backendType)) { + LOGE("Invalid backend type(%d).", backendType); + ret = MEDIA_VISION_ERROR_INVALID_PARAMETER; + goto release_model_meta_file_path; + } + if (access(modelWeightFilePath, F_OK)) { LOGE("weightFilePath in [%s] ", modelWeightFilePath); ret = MEDIA_VISION_ERROR_INVALID_PATH; goto release_model_meta_file_path; } - if ((backendType > MV_INFERENCE_BACKEND_NONE && - backendType < MV_INFERENCE_BACKEND_MAX) && - (backendType != MV_INFERENCE_BACKEND_TFLITE) && - (backendType != MV_INFERENCE_BACKEND_ARMNN) && - (backendType == MV_INFERENCE_BACKEND_MLAPI && - (pInfer->GetTargetType() & MV_INFERENCE_TARGET_DEVICE_CUSTOM)) && - (backendType != MV_INFERENCE_BACKEND_ONE)) { + if (IsConfigFilePathRequired(pInfer->GetTargetType(), backendType)) { if (access(modelConfigFilePath, F_OK)) { LOGE("modelConfigFilePath in [%s] ", modelConfigFilePath); ret = MEDIA_VISION_ERROR_INVALID_PATH;