~InferenceEngineCommon();
/**
- * @brief Load configuration information from ini file.
+ * @brief Load configuration information from a given ini file.
+ * If not given then /etc/inference/inference_engine_mlapi_backend.ini file will be used in default.
+ *
* The ini file provides default tensor filter types for
* MLAPI backend. Please refer to /etc/inference/inference_engine_mlapi_backend.ini file.
+ *
+ * @since_tizen 6.5
+ * @param[in] a full path of the ini file.
*/
- int LoadConfigFile(void);
+ int LoadConfigFile(std::string ini_file_path = "");
/**
* @brief Load a backend engine library with a given backend name.
LOGW("LEAVE");
}
- int InferenceEngineCommon::LoadConfigFile(void)
+ int InferenceEngineCommon::LoadConfigFile(std::string ini_file_path)
{
int ret = INFERENCE_ENGINE_ERROR_NONE;
int npu_type = -1, cpu_and_gpu_type = -1;
char *default_type_for_npu = NULL, *default_type_for_cpu_gpu = NULL;
- dictionary *dict = iniparser_load(INFERENCE_MLAPI_INI_FILENAME);
+ if (ini_file_path.empty())
+ ini_file_path = INFERENCE_MLAPI_INI_FILENAME;
+
+ LOGI("%s configuration file will be used.\n", ini_file_path.c_str());
+
+ dictionary *dict = iniparser_load(ini_file_path.c_str());
if (dict == NULL) {
- LOGE("Fail to load %s file.\n", INFERENCE_MLAPI_INI_FILENAME);
+ LOGE("Fail to load %s file.\n", ini_file_path.c_str());
return INFERENCE_ENGINE_ERROR_INVALID_OPERATION;
}