From: Inki Dae Date: Wed, 25 Mar 2020 06:41:08 +0000 (+0900) Subject: Change directory name from common to src X-Git-Tag: submit/tizen/20200423.063253~28 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=55a3f3e268cd244f66c851bd5a1ba6b6b7ad896e;p=platform%2Fcore%2Fmultimedia%2Finference-engine-interface.git Change directory name from common to src Let's just use generic directory name, src, instead of specfic one, common. We already dropped inference framework so we don't have to adhere to use common directory name. Change-Id: Ib0947dd90f5ee5c5d935d75435a4f36c1b869146 Signed-off-by: Inki Dae --- diff --git a/CMakeLists.txt b/CMakeLists.txt index afebd8e..ee4669a 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -32,12 +32,11 @@ ADD_DEFINITIONS("-DTIZEN_DEBUG") SET(CMAKE_EXE_LINKER_FLAGS "-Wl,--as-needed -Wl,--rpath=${LIB_INSTALL_DIR}") #common -aux_source_directory(common SOURCES) +aux_source_directory(src SOURCES) ADD_LIBRARY(${fw_name_common} SHARED ${SOURCES}) TARGET_LINK_LIBRARIES(${fw_name_common} dlog iniparser stdc++fs) - SET_TARGET_PROPERTIES(${fw_name_common} PROPERTIES VERSION ${FULLVER} diff --git a/common/inference_engine_common_impl.cpp b/common/inference_engine_common_impl.cpp deleted file mode 100755 index b64a11e..0000000 --- a/common/inference_engine_common_impl.cpp +++ /dev/null @@ -1,259 +0,0 @@ -/** - * Copyright (c) 2019 Samsung Electronics Co., Ltd All Rights Reserved - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#include "inference_engine_error.h" -#include "inference_engine_common_impl.h" -#include "inference_engine_ini.h" -#include -#include -#include -#include -#include -#include - -extern "C" { - -#include - -#ifdef LOG_TAG -#undef LOG_TAG -#endif - -#define LOG_TAG "INFERENCE_ENGINE_COMMON" -} - -namespace fs = std::experimental::filesystem; -namespace InferenceEngineInterface { -namespace Common { -InferenceEngineCommon::InferenceEngineCommon() : - mSelectedBackendEngine(INFERENCE_BACKEND_NONE), - handle(nullptr), - engine(nullptr) -{ - LOGE("ENTER"); - InferenceEngineInI ini; - ini.LoadInI(); - mSelectedBackendEngine = static_cast(ini.GetSelectedBackendEngine()); - SetBackendEngine(mSelectedBackendEngine); - LOGI("Backend engine is selected by ini file [%d]", mSelectedBackendEngine); - LOGE("LEAVE"); -} - -InferenceEngineCommon::InferenceEngineCommon(std::string backend) : - mSelectedBackendEngine(INFERENCE_BACKEND_NONE), - handle(nullptr), - engine(nullptr) -{ - LOGE("ENTER"); - mBackendLibName = "libinference-engine-" + backend + ".so"; - LOGE("LEAVE"); -} - -InferenceEngineCommon::InferenceEngineCommon(inference_backend_type_e backend) : - mSelectedBackendEngine(INFERENCE_BACKEND_NONE), - handle(nullptr), - engine(nullptr) -{ - LOGE("ENTER"); - SetBackendEngine(backend); - LOGI("Backend engine is selected by enum input[%d] set[%d]", backend, mSelectedBackendEngine); - LOGE("LEAVE"); -} - - -InferenceEngineCommon::InferenceEngineCommon(inference_engine_config *config) : - mSelectedBackendEngine(INFERENCE_BACKEND_NONE), - handle(nullptr), - engine(nullptr) -{ - LOGI("ENTER"); - - // TODO. - - LOGI("LEAVE"); -} - -InferenceEngineCommon::~InferenceEngineCommon() -{ - LOGW("ENTER"); - LOGW("LEAVE"); -} - -int InferenceEngineCommon::BindBackend(inference_engine_config *config) -{ - LOGI("ENTER"); - - mBackendLibName = "libinference-engine-" + config->backend_name + ".so"; - - char *error = NULL; - LOGI("lib: %s", mBackendLibName.c_str()); - handle = dlopen(mBackendLibName.c_str(), RTLD_NOW); - LOGI("HANDLE : [%p]", handle); - - if (!handle) { - LOGE("Fail to dlopen %s", mBackendLibName.c_str()); - LOGE("Error: %s\n", dlerror()); - return INFERENCE_ENGINE_ERROR_NOT_SUPPORTED; - } - - init_t* EngineInit = (init_t *)dlsym(handle, "EngineCommonInit"); - if ((error = dlerror()) != NULL) { - LOGE("Error: %s\n", error); - dlclose(handle); - return INFERENCE_ENGINE_ERROR_INTERNAL; - } - - engine = EngineInit(); - if (engine == NULL) { - LOGE("Fail to EngineInit"); - dlclose(handle); - return INFERENCE_ENGINE_ERROR_INTERNAL; - } - - LOGI("LEAVE"); - - return INFERENCE_ENGINE_ERROR_NONE; -} - -void InferenceEngineCommon::UnbindBackend(void) -{ - LOGW("ENTER"); - - if (handle) { - destroy_t *engineDestroy = (destroy_t*)dlsym(handle, "EngineCommonDestroy"); - engineDestroy(engine); - dlclose(handle); - engine = nullptr; - handle = nullptr; - } - - LOGW("LEAVE"); -} - -int InferenceEngineCommon::SetTargetDevices(int types) -{ - int ret = engine->SetTargetDevices(types); - if (ret != INFERENCE_ENGINE_ERROR_NONE) - LOGE("Fail to SetTargetDevice"); - - return ret; -} - -int InferenceEngineCommon::Load(std::vector model_paths, inference_model_format_e model_format) -{ - LOGI("ENTER"); - - int ret = engine->Load(model_paths, model_format); - if (ret != INFERENCE_ENGINE_ERROR_NONE) - LOGE("Fail to load InferenceEngineVision"); - - LOGI("LEAVE"); - - return INFERENCE_ENGINE_ERROR_NONE; -} - -int InferenceEngineCommon::GetInputTensorBuffers(std::vector &buffers) -{ - return engine->GetInputTensorBuffers(buffers); -} - -int InferenceEngineCommon::GetOutputTensorBuffers(std::vector &buffers) -{ - return engine->GetOutputTensorBuffers(buffers); -} - -int InferenceEngineCommon::GetInputLayerProperty(inference_engine_layer_property &property) -{ - return engine->GetInputLayerProperty(property); -} - -int InferenceEngineCommon::GetOutputLayerProperty(inference_engine_layer_property &property) -{ - return engine->GetOutputLayerProperty(property); -} - -int InferenceEngineCommon::SetInputLayerProperty(inference_engine_layer_property &property) -{ - return engine->SetInputLayerProperty(property); -} - -int InferenceEngineCommon::SetOutputLayerProperty(inference_engine_layer_property &property) -{ - return engine->SetOutputLayerProperty(property); -} - -int InferenceEngineCommon::GetBackendCapacity(inference_engine_capacity *capacity) -{ - return engine->GetBackendCapacity(capacity); -} - -int InferenceEngineCommon::Run(std::vector &input_buffers, - std::vector &output_buffers) -{ - return engine->Run(input_buffers, output_buffers); -} - -int InferenceEngineCommon::SetLibraryPath(std::string path) -{ - LOGE("ENTER"); - if (path.empty()) - return INFERENCE_ENGINE_ERROR_INVALID_PARAMETER; - - if (fs::is_directory(path)) { - if(path.back() != '/') - path += "/"; - - mBackendLibName = path + mBackendLibName; - } - else { - if (fs::is_regular_file(path)){ - mBackendLibName = path; - } - else { - LOGE("Fail to find path. [%s]", path.c_str()); - return INFERENCE_ENGINE_ERROR_INVALID_PARAMETER; - } - } - LOGE("lib: %s", mBackendLibName.c_str()); - LOGE("LEAVE"); - - return INFERENCE_ENGINE_ERROR_NONE; -} - -int InferenceEngineCommon::SetBackendEngine(inference_backend_type_e backend) -{ - std::string backendString; - switch(backend){ - case INFERENCE_BACKEND_OPENCV: - backendString = "opencv"; - break; - case INFERENCE_BACKEND_TFLITE: - backendString = "tflite"; - break; - case INFERENCE_BACKEND_ARMNN: - backendString = "armnn"; - break; - default: - LOGE("Not supported backend engine [%d]", backend); - return INFERENCE_ENGINE_ERROR_NOT_SUPPORTED; - } - - mBackendLibName = "libinference-engine-" + backendString + ".so"; - - return INFERENCE_ENGINE_ERROR_NONE; -} -} /* Common */ -} /* InferenceEngineInterface */ diff --git a/common/inference_ini.cpp b/common/inference_ini.cpp deleted file mode 100644 index ed05547..0000000 --- a/common/inference_ini.cpp +++ /dev/null @@ -1,84 +0,0 @@ -/** - * Copyright (c) 2019 Samsung Electronics Co., Ltd All Rights Reserved - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#include "inference_engine_ini.h" -#include "inference_engine_error.h" - -#include -#include - -extern "C" { - -#include - -#ifdef LOG_TAG -#undef LOG_TAG -#endif - -#define LOG_TAG "INFERENCE_ENGINE_COMMON" -} - -namespace InferenceEngineInterface { -namespace Common { - -const std::string INFERENCE_INI_FILENAME = "/inference/inference_engine.ini"; - -InferenceEngineInI::InferenceEngineInI() : - mIniDefaultPath(SYSCONFDIR), - mSelectedBackendEngine(INFERENCE_BACKEND_NONE) -{ - LOGE("ENTER"); - mIniDefaultPath += INFERENCE_INI_FILENAME; - LOGE("LEAVE"); -} - -InferenceEngineInI::~InferenceEngineInI() -{ - ; -} - -int InferenceEngineInI::LoadInI() -{ - LOGE("ENTER"); - dictionary *dict = iniparser_load(mIniDefaultPath.c_str()); - if (dict == NULL) { - LOGE("Fail to load ini"); - return -1; - } - - mSelectedBackendEngine = static_cast(iniparser_getint(dict, "inference backend:selected backend engine", -1)); - - if(dict) { - iniparser_freedict(dict); - dict = NULL; - } - - LOGE("LEAVE"); - return 0; -} - -void InferenceEngineInI::UnLoadInI() -{ - ; -} - -int InferenceEngineInI::GetSelectedBackendEngine() -{ - return mSelectedBackendEngine; -} - -} /* Inference */ -} /* MediaVision */ diff --git a/src/inference_engine_common_impl.cpp b/src/inference_engine_common_impl.cpp new file mode 100755 index 0000000..b64a11e --- /dev/null +++ b/src/inference_engine_common_impl.cpp @@ -0,0 +1,259 @@ +/** + * Copyright (c) 2019 Samsung Electronics Co., Ltd All Rights Reserved + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "inference_engine_error.h" +#include "inference_engine_common_impl.h" +#include "inference_engine_ini.h" +#include +#include +#include +#include +#include +#include + +extern "C" { + +#include + +#ifdef LOG_TAG +#undef LOG_TAG +#endif + +#define LOG_TAG "INFERENCE_ENGINE_COMMON" +} + +namespace fs = std::experimental::filesystem; +namespace InferenceEngineInterface { +namespace Common { +InferenceEngineCommon::InferenceEngineCommon() : + mSelectedBackendEngine(INFERENCE_BACKEND_NONE), + handle(nullptr), + engine(nullptr) +{ + LOGE("ENTER"); + InferenceEngineInI ini; + ini.LoadInI(); + mSelectedBackendEngine = static_cast(ini.GetSelectedBackendEngine()); + SetBackendEngine(mSelectedBackendEngine); + LOGI("Backend engine is selected by ini file [%d]", mSelectedBackendEngine); + LOGE("LEAVE"); +} + +InferenceEngineCommon::InferenceEngineCommon(std::string backend) : + mSelectedBackendEngine(INFERENCE_BACKEND_NONE), + handle(nullptr), + engine(nullptr) +{ + LOGE("ENTER"); + mBackendLibName = "libinference-engine-" + backend + ".so"; + LOGE("LEAVE"); +} + +InferenceEngineCommon::InferenceEngineCommon(inference_backend_type_e backend) : + mSelectedBackendEngine(INFERENCE_BACKEND_NONE), + handle(nullptr), + engine(nullptr) +{ + LOGE("ENTER"); + SetBackendEngine(backend); + LOGI("Backend engine is selected by enum input[%d] set[%d]", backend, mSelectedBackendEngine); + LOGE("LEAVE"); +} + + +InferenceEngineCommon::InferenceEngineCommon(inference_engine_config *config) : + mSelectedBackendEngine(INFERENCE_BACKEND_NONE), + handle(nullptr), + engine(nullptr) +{ + LOGI("ENTER"); + + // TODO. + + LOGI("LEAVE"); +} + +InferenceEngineCommon::~InferenceEngineCommon() +{ + LOGW("ENTER"); + LOGW("LEAVE"); +} + +int InferenceEngineCommon::BindBackend(inference_engine_config *config) +{ + LOGI("ENTER"); + + mBackendLibName = "libinference-engine-" + config->backend_name + ".so"; + + char *error = NULL; + LOGI("lib: %s", mBackendLibName.c_str()); + handle = dlopen(mBackendLibName.c_str(), RTLD_NOW); + LOGI("HANDLE : [%p]", handle); + + if (!handle) { + LOGE("Fail to dlopen %s", mBackendLibName.c_str()); + LOGE("Error: %s\n", dlerror()); + return INFERENCE_ENGINE_ERROR_NOT_SUPPORTED; + } + + init_t* EngineInit = (init_t *)dlsym(handle, "EngineCommonInit"); + if ((error = dlerror()) != NULL) { + LOGE("Error: %s\n", error); + dlclose(handle); + return INFERENCE_ENGINE_ERROR_INTERNAL; + } + + engine = EngineInit(); + if (engine == NULL) { + LOGE("Fail to EngineInit"); + dlclose(handle); + return INFERENCE_ENGINE_ERROR_INTERNAL; + } + + LOGI("LEAVE"); + + return INFERENCE_ENGINE_ERROR_NONE; +} + +void InferenceEngineCommon::UnbindBackend(void) +{ + LOGW("ENTER"); + + if (handle) { + destroy_t *engineDestroy = (destroy_t*)dlsym(handle, "EngineCommonDestroy"); + engineDestroy(engine); + dlclose(handle); + engine = nullptr; + handle = nullptr; + } + + LOGW("LEAVE"); +} + +int InferenceEngineCommon::SetTargetDevices(int types) +{ + int ret = engine->SetTargetDevices(types); + if (ret != INFERENCE_ENGINE_ERROR_NONE) + LOGE("Fail to SetTargetDevice"); + + return ret; +} + +int InferenceEngineCommon::Load(std::vector model_paths, inference_model_format_e model_format) +{ + LOGI("ENTER"); + + int ret = engine->Load(model_paths, model_format); + if (ret != INFERENCE_ENGINE_ERROR_NONE) + LOGE("Fail to load InferenceEngineVision"); + + LOGI("LEAVE"); + + return INFERENCE_ENGINE_ERROR_NONE; +} + +int InferenceEngineCommon::GetInputTensorBuffers(std::vector &buffers) +{ + return engine->GetInputTensorBuffers(buffers); +} + +int InferenceEngineCommon::GetOutputTensorBuffers(std::vector &buffers) +{ + return engine->GetOutputTensorBuffers(buffers); +} + +int InferenceEngineCommon::GetInputLayerProperty(inference_engine_layer_property &property) +{ + return engine->GetInputLayerProperty(property); +} + +int InferenceEngineCommon::GetOutputLayerProperty(inference_engine_layer_property &property) +{ + return engine->GetOutputLayerProperty(property); +} + +int InferenceEngineCommon::SetInputLayerProperty(inference_engine_layer_property &property) +{ + return engine->SetInputLayerProperty(property); +} + +int InferenceEngineCommon::SetOutputLayerProperty(inference_engine_layer_property &property) +{ + return engine->SetOutputLayerProperty(property); +} + +int InferenceEngineCommon::GetBackendCapacity(inference_engine_capacity *capacity) +{ + return engine->GetBackendCapacity(capacity); +} + +int InferenceEngineCommon::Run(std::vector &input_buffers, + std::vector &output_buffers) +{ + return engine->Run(input_buffers, output_buffers); +} + +int InferenceEngineCommon::SetLibraryPath(std::string path) +{ + LOGE("ENTER"); + if (path.empty()) + return INFERENCE_ENGINE_ERROR_INVALID_PARAMETER; + + if (fs::is_directory(path)) { + if(path.back() != '/') + path += "/"; + + mBackendLibName = path + mBackendLibName; + } + else { + if (fs::is_regular_file(path)){ + mBackendLibName = path; + } + else { + LOGE("Fail to find path. [%s]", path.c_str()); + return INFERENCE_ENGINE_ERROR_INVALID_PARAMETER; + } + } + LOGE("lib: %s", mBackendLibName.c_str()); + LOGE("LEAVE"); + + return INFERENCE_ENGINE_ERROR_NONE; +} + +int InferenceEngineCommon::SetBackendEngine(inference_backend_type_e backend) +{ + std::string backendString; + switch(backend){ + case INFERENCE_BACKEND_OPENCV: + backendString = "opencv"; + break; + case INFERENCE_BACKEND_TFLITE: + backendString = "tflite"; + break; + case INFERENCE_BACKEND_ARMNN: + backendString = "armnn"; + break; + default: + LOGE("Not supported backend engine [%d]", backend); + return INFERENCE_ENGINE_ERROR_NOT_SUPPORTED; + } + + mBackendLibName = "libinference-engine-" + backendString + ".so"; + + return INFERENCE_ENGINE_ERROR_NONE; +} +} /* Common */ +} /* InferenceEngineInterface */ diff --git a/src/inference_ini.cpp b/src/inference_ini.cpp new file mode 100644 index 0000000..ed05547 --- /dev/null +++ b/src/inference_ini.cpp @@ -0,0 +1,84 @@ +/** + * Copyright (c) 2019 Samsung Electronics Co., Ltd All Rights Reserved + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "inference_engine_ini.h" +#include "inference_engine_error.h" + +#include +#include + +extern "C" { + +#include + +#ifdef LOG_TAG +#undef LOG_TAG +#endif + +#define LOG_TAG "INFERENCE_ENGINE_COMMON" +} + +namespace InferenceEngineInterface { +namespace Common { + +const std::string INFERENCE_INI_FILENAME = "/inference/inference_engine.ini"; + +InferenceEngineInI::InferenceEngineInI() : + mIniDefaultPath(SYSCONFDIR), + mSelectedBackendEngine(INFERENCE_BACKEND_NONE) +{ + LOGE("ENTER"); + mIniDefaultPath += INFERENCE_INI_FILENAME; + LOGE("LEAVE"); +} + +InferenceEngineInI::~InferenceEngineInI() +{ + ; +} + +int InferenceEngineInI::LoadInI() +{ + LOGE("ENTER"); + dictionary *dict = iniparser_load(mIniDefaultPath.c_str()); + if (dict == NULL) { + LOGE("Fail to load ini"); + return -1; + } + + mSelectedBackendEngine = static_cast(iniparser_getint(dict, "inference backend:selected backend engine", -1)); + + if(dict) { + iniparser_freedict(dict); + dict = NULL; + } + + LOGE("LEAVE"); + return 0; +} + +void InferenceEngineInI::UnLoadInI() +{ + ; +} + +int InferenceEngineInI::GetSelectedBackendEngine() +{ + return mSelectedBackendEngine; +} + +} /* Inference */ +} /* MediaVision */