SET(CMAKE_EXE_LINKER_FLAGS "-Wl,--as-needed -Wl,--rpath=${LIB_INSTALL_DIR}")
#common
-aux_source_directory(common SOURCES)
+aux_source_directory(src SOURCES)
ADD_LIBRARY(${fw_name_common} SHARED ${SOURCES})
TARGET_LINK_LIBRARIES(${fw_name_common} dlog iniparser stdc++fs)
-
SET_TARGET_PROPERTIES(${fw_name_common}
PROPERTIES
VERSION ${FULLVER}
+++ /dev/null
-/**
- * Copyright (c) 2019 Samsung Electronics Co., Ltd All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "inference_engine_error.h"
-#include "inference_engine_common_impl.h"
-#include "inference_engine_ini.h"
-#include <fstream>
-#include <iostream>
-#include <unistd.h>
-#include <time.h>
-#include <dlfcn.h>
-#include <experimental/filesystem>
-
-extern "C" {
-
-#include <dlog.h>
-
-#ifdef LOG_TAG
-#undef LOG_TAG
-#endif
-
-#define LOG_TAG "INFERENCE_ENGINE_COMMON"
-}
-
-namespace fs = std::experimental::filesystem;
-namespace InferenceEngineInterface {
-namespace Common {
-InferenceEngineCommon::InferenceEngineCommon() :
- mSelectedBackendEngine(INFERENCE_BACKEND_NONE),
- handle(nullptr),
- engine(nullptr)
-{
- LOGE("ENTER");
- InferenceEngineInI ini;
- ini.LoadInI();
- mSelectedBackendEngine = static_cast<inference_backend_type_e>(ini.GetSelectedBackendEngine());
- SetBackendEngine(mSelectedBackendEngine);
- LOGI("Backend engine is selected by ini file [%d]", mSelectedBackendEngine);
- LOGE("LEAVE");
-}
-
-InferenceEngineCommon::InferenceEngineCommon(std::string backend) :
- mSelectedBackendEngine(INFERENCE_BACKEND_NONE),
- handle(nullptr),
- engine(nullptr)
-{
- LOGE("ENTER");
- mBackendLibName = "libinference-engine-" + backend + ".so";
- LOGE("LEAVE");
-}
-
-InferenceEngineCommon::InferenceEngineCommon(inference_backend_type_e backend) :
- mSelectedBackendEngine(INFERENCE_BACKEND_NONE),
- handle(nullptr),
- engine(nullptr)
-{
- LOGE("ENTER");
- SetBackendEngine(backend);
- LOGI("Backend engine is selected by enum input[%d] set[%d]", backend, mSelectedBackendEngine);
- LOGE("LEAVE");
-}
-
-
-InferenceEngineCommon::InferenceEngineCommon(inference_engine_config *config) :
- mSelectedBackendEngine(INFERENCE_BACKEND_NONE),
- handle(nullptr),
- engine(nullptr)
-{
- LOGI("ENTER");
-
- // TODO.
-
- LOGI("LEAVE");
-}
-
-InferenceEngineCommon::~InferenceEngineCommon()
-{
- LOGW("ENTER");
- LOGW("LEAVE");
-}
-
-int InferenceEngineCommon::BindBackend(inference_engine_config *config)
-{
- LOGI("ENTER");
-
- mBackendLibName = "libinference-engine-" + config->backend_name + ".so";
-
- char *error = NULL;
- LOGI("lib: %s", mBackendLibName.c_str());
- handle = dlopen(mBackendLibName.c_str(), RTLD_NOW);
- LOGI("HANDLE : [%p]", handle);
-
- if (!handle) {
- LOGE("Fail to dlopen %s", mBackendLibName.c_str());
- LOGE("Error: %s\n", dlerror());
- return INFERENCE_ENGINE_ERROR_NOT_SUPPORTED;
- }
-
- init_t* EngineInit = (init_t *)dlsym(handle, "EngineCommonInit");
- if ((error = dlerror()) != NULL) {
- LOGE("Error: %s\n", error);
- dlclose(handle);
- return INFERENCE_ENGINE_ERROR_INTERNAL;
- }
-
- engine = EngineInit();
- if (engine == NULL) {
- LOGE("Fail to EngineInit");
- dlclose(handle);
- return INFERENCE_ENGINE_ERROR_INTERNAL;
- }
-
- LOGI("LEAVE");
-
- return INFERENCE_ENGINE_ERROR_NONE;
-}
-
-void InferenceEngineCommon::UnbindBackend(void)
-{
- LOGW("ENTER");
-
- if (handle) {
- destroy_t *engineDestroy = (destroy_t*)dlsym(handle, "EngineCommonDestroy");
- engineDestroy(engine);
- dlclose(handle);
- engine = nullptr;
- handle = nullptr;
- }
-
- LOGW("LEAVE");
-}
-
-int InferenceEngineCommon::SetTargetDevices(int types)
-{
- int ret = engine->SetTargetDevices(types);
- if (ret != INFERENCE_ENGINE_ERROR_NONE)
- LOGE("Fail to SetTargetDevice");
-
- return ret;
-}
-
-int InferenceEngineCommon::Load(std::vector<std::string> model_paths, inference_model_format_e model_format)
-{
- LOGI("ENTER");
-
- int ret = engine->Load(model_paths, model_format);
- if (ret != INFERENCE_ENGINE_ERROR_NONE)
- LOGE("Fail to load InferenceEngineVision");
-
- LOGI("LEAVE");
-
- return INFERENCE_ENGINE_ERROR_NONE;
-}
-
-int InferenceEngineCommon::GetInputTensorBuffers(std::vector<inference_engine_tensor_buffer> &buffers)
-{
- return engine->GetInputTensorBuffers(buffers);
-}
-
-int InferenceEngineCommon::GetOutputTensorBuffers(std::vector<inference_engine_tensor_buffer> &buffers)
-{
- return engine->GetOutputTensorBuffers(buffers);
-}
-
-int InferenceEngineCommon::GetInputLayerProperty(inference_engine_layer_property &property)
-{
- return engine->GetInputLayerProperty(property);
-}
-
-int InferenceEngineCommon::GetOutputLayerProperty(inference_engine_layer_property &property)
-{
- return engine->GetOutputLayerProperty(property);
-}
-
-int InferenceEngineCommon::SetInputLayerProperty(inference_engine_layer_property &property)
-{
- return engine->SetInputLayerProperty(property);
-}
-
-int InferenceEngineCommon::SetOutputLayerProperty(inference_engine_layer_property &property)
-{
- return engine->SetOutputLayerProperty(property);
-}
-
-int InferenceEngineCommon::GetBackendCapacity(inference_engine_capacity *capacity)
-{
- return engine->GetBackendCapacity(capacity);
-}
-
-int InferenceEngineCommon::Run(std::vector<inference_engine_tensor_buffer> &input_buffers,
- std::vector<inference_engine_tensor_buffer> &output_buffers)
-{
- return engine->Run(input_buffers, output_buffers);
-}
-
-int InferenceEngineCommon::SetLibraryPath(std::string path)
-{
- LOGE("ENTER");
- if (path.empty())
- return INFERENCE_ENGINE_ERROR_INVALID_PARAMETER;
-
- if (fs::is_directory(path)) {
- if(path.back() != '/')
- path += "/";
-
- mBackendLibName = path + mBackendLibName;
- }
- else {
- if (fs::is_regular_file(path)){
- mBackendLibName = path;
- }
- else {
- LOGE("Fail to find path. [%s]", path.c_str());
- return INFERENCE_ENGINE_ERROR_INVALID_PARAMETER;
- }
- }
- LOGE("lib: %s", mBackendLibName.c_str());
- LOGE("LEAVE");
-
- return INFERENCE_ENGINE_ERROR_NONE;
-}
-
-int InferenceEngineCommon::SetBackendEngine(inference_backend_type_e backend)
-{
- std::string backendString;
- switch(backend){
- case INFERENCE_BACKEND_OPENCV:
- backendString = "opencv";
- break;
- case INFERENCE_BACKEND_TFLITE:
- backendString = "tflite";
- break;
- case INFERENCE_BACKEND_ARMNN:
- backendString = "armnn";
- break;
- default:
- LOGE("Not supported backend engine [%d]", backend);
- return INFERENCE_ENGINE_ERROR_NOT_SUPPORTED;
- }
-
- mBackendLibName = "libinference-engine-" + backendString + ".so";
-
- return INFERENCE_ENGINE_ERROR_NONE;
-}
-} /* Common */
-} /* InferenceEngineInterface */
+++ /dev/null
-/**
- * Copyright (c) 2019 Samsung Electronics Co., Ltd All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "inference_engine_ini.h"
-#include "inference_engine_error.h"
-
-#include <iniparser.h>
-#include <unistd.h>
-
-extern "C" {
-
-#include <dlog.h>
-
-#ifdef LOG_TAG
-#undef LOG_TAG
-#endif
-
-#define LOG_TAG "INFERENCE_ENGINE_COMMON"
-}
-
-namespace InferenceEngineInterface {
-namespace Common {
-
-const std::string INFERENCE_INI_FILENAME = "/inference/inference_engine.ini";
-
-InferenceEngineInI::InferenceEngineInI() :
- mIniDefaultPath(SYSCONFDIR),
- mSelectedBackendEngine(INFERENCE_BACKEND_NONE)
-{
- LOGE("ENTER");
- mIniDefaultPath += INFERENCE_INI_FILENAME;
- LOGE("LEAVE");
-}
-
-InferenceEngineInI::~InferenceEngineInI()
-{
- ;
-}
-
-int InferenceEngineInI::LoadInI()
-{
- LOGE("ENTER");
- dictionary *dict = iniparser_load(mIniDefaultPath.c_str());
- if (dict == NULL) {
- LOGE("Fail to load ini");
- return -1;
- }
-
- mSelectedBackendEngine = static_cast<inference_backend_type_e>(iniparser_getint(dict, "inference backend:selected backend engine", -1));
-
- if(dict) {
- iniparser_freedict(dict);
- dict = NULL;
- }
-
- LOGE("LEAVE");
- return 0;
-}
-
-void InferenceEngineInI::UnLoadInI()
-{
- ;
-}
-
-int InferenceEngineInI::GetSelectedBackendEngine()
-{
- return mSelectedBackendEngine;
-}
-
-} /* Inference */
-} /* MediaVision */
--- /dev/null
+/**
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "inference_engine_error.h"
+#include "inference_engine_common_impl.h"
+#include "inference_engine_ini.h"
+#include <fstream>
+#include <iostream>
+#include <unistd.h>
+#include <time.h>
+#include <dlfcn.h>
+#include <experimental/filesystem>
+
+extern "C" {
+
+#include <dlog.h>
+
+#ifdef LOG_TAG
+#undef LOG_TAG
+#endif
+
+#define LOG_TAG "INFERENCE_ENGINE_COMMON"
+}
+
+namespace fs = std::experimental::filesystem;
+namespace InferenceEngineInterface {
+namespace Common {
+InferenceEngineCommon::InferenceEngineCommon() :
+ mSelectedBackendEngine(INFERENCE_BACKEND_NONE),
+ handle(nullptr),
+ engine(nullptr)
+{
+ LOGE("ENTER");
+ InferenceEngineInI ini;
+ ini.LoadInI();
+ mSelectedBackendEngine = static_cast<inference_backend_type_e>(ini.GetSelectedBackendEngine());
+ SetBackendEngine(mSelectedBackendEngine);
+ LOGI("Backend engine is selected by ini file [%d]", mSelectedBackendEngine);
+ LOGE("LEAVE");
+}
+
+InferenceEngineCommon::InferenceEngineCommon(std::string backend) :
+ mSelectedBackendEngine(INFERENCE_BACKEND_NONE),
+ handle(nullptr),
+ engine(nullptr)
+{
+ LOGE("ENTER");
+ mBackendLibName = "libinference-engine-" + backend + ".so";
+ LOGE("LEAVE");
+}
+
+InferenceEngineCommon::InferenceEngineCommon(inference_backend_type_e backend) :
+ mSelectedBackendEngine(INFERENCE_BACKEND_NONE),
+ handle(nullptr),
+ engine(nullptr)
+{
+ LOGE("ENTER");
+ SetBackendEngine(backend);
+ LOGI("Backend engine is selected by enum input[%d] set[%d]", backend, mSelectedBackendEngine);
+ LOGE("LEAVE");
+}
+
+
+InferenceEngineCommon::InferenceEngineCommon(inference_engine_config *config) :
+ mSelectedBackendEngine(INFERENCE_BACKEND_NONE),
+ handle(nullptr),
+ engine(nullptr)
+{
+ LOGI("ENTER");
+
+ // TODO.
+
+ LOGI("LEAVE");
+}
+
+InferenceEngineCommon::~InferenceEngineCommon()
+{
+ LOGW("ENTER");
+ LOGW("LEAVE");
+}
+
+int InferenceEngineCommon::BindBackend(inference_engine_config *config)
+{
+ LOGI("ENTER");
+
+ mBackendLibName = "libinference-engine-" + config->backend_name + ".so";
+
+ char *error = NULL;
+ LOGI("lib: %s", mBackendLibName.c_str());
+ handle = dlopen(mBackendLibName.c_str(), RTLD_NOW);
+ LOGI("HANDLE : [%p]", handle);
+
+ if (!handle) {
+ LOGE("Fail to dlopen %s", mBackendLibName.c_str());
+ LOGE("Error: %s\n", dlerror());
+ return INFERENCE_ENGINE_ERROR_NOT_SUPPORTED;
+ }
+
+ init_t* EngineInit = (init_t *)dlsym(handle, "EngineCommonInit");
+ if ((error = dlerror()) != NULL) {
+ LOGE("Error: %s\n", error);
+ dlclose(handle);
+ return INFERENCE_ENGINE_ERROR_INTERNAL;
+ }
+
+ engine = EngineInit();
+ if (engine == NULL) {
+ LOGE("Fail to EngineInit");
+ dlclose(handle);
+ return INFERENCE_ENGINE_ERROR_INTERNAL;
+ }
+
+ LOGI("LEAVE");
+
+ return INFERENCE_ENGINE_ERROR_NONE;
+}
+
+void InferenceEngineCommon::UnbindBackend(void)
+{
+ LOGW("ENTER");
+
+ if (handle) {
+ destroy_t *engineDestroy = (destroy_t*)dlsym(handle, "EngineCommonDestroy");
+ engineDestroy(engine);
+ dlclose(handle);
+ engine = nullptr;
+ handle = nullptr;
+ }
+
+ LOGW("LEAVE");
+}
+
+int InferenceEngineCommon::SetTargetDevices(int types)
+{
+ int ret = engine->SetTargetDevices(types);
+ if (ret != INFERENCE_ENGINE_ERROR_NONE)
+ LOGE("Fail to SetTargetDevice");
+
+ return ret;
+}
+
+int InferenceEngineCommon::Load(std::vector<std::string> model_paths, inference_model_format_e model_format)
+{
+ LOGI("ENTER");
+
+ int ret = engine->Load(model_paths, model_format);
+ if (ret != INFERENCE_ENGINE_ERROR_NONE)
+ LOGE("Fail to load InferenceEngineVision");
+
+ LOGI("LEAVE");
+
+ return INFERENCE_ENGINE_ERROR_NONE;
+}
+
+int InferenceEngineCommon::GetInputTensorBuffers(std::vector<inference_engine_tensor_buffer> &buffers)
+{
+ return engine->GetInputTensorBuffers(buffers);
+}
+
+int InferenceEngineCommon::GetOutputTensorBuffers(std::vector<inference_engine_tensor_buffer> &buffers)
+{
+ return engine->GetOutputTensorBuffers(buffers);
+}
+
+int InferenceEngineCommon::GetInputLayerProperty(inference_engine_layer_property &property)
+{
+ return engine->GetInputLayerProperty(property);
+}
+
+int InferenceEngineCommon::GetOutputLayerProperty(inference_engine_layer_property &property)
+{
+ return engine->GetOutputLayerProperty(property);
+}
+
+int InferenceEngineCommon::SetInputLayerProperty(inference_engine_layer_property &property)
+{
+ return engine->SetInputLayerProperty(property);
+}
+
+int InferenceEngineCommon::SetOutputLayerProperty(inference_engine_layer_property &property)
+{
+ return engine->SetOutputLayerProperty(property);
+}
+
+int InferenceEngineCommon::GetBackendCapacity(inference_engine_capacity *capacity)
+{
+ return engine->GetBackendCapacity(capacity);
+}
+
+int InferenceEngineCommon::Run(std::vector<inference_engine_tensor_buffer> &input_buffers,
+ std::vector<inference_engine_tensor_buffer> &output_buffers)
+{
+ return engine->Run(input_buffers, output_buffers);
+}
+
+int InferenceEngineCommon::SetLibraryPath(std::string path)
+{
+ LOGE("ENTER");
+ if (path.empty())
+ return INFERENCE_ENGINE_ERROR_INVALID_PARAMETER;
+
+ if (fs::is_directory(path)) {
+ if(path.back() != '/')
+ path += "/";
+
+ mBackendLibName = path + mBackendLibName;
+ }
+ else {
+ if (fs::is_regular_file(path)){
+ mBackendLibName = path;
+ }
+ else {
+ LOGE("Fail to find path. [%s]", path.c_str());
+ return INFERENCE_ENGINE_ERROR_INVALID_PARAMETER;
+ }
+ }
+ LOGE("lib: %s", mBackendLibName.c_str());
+ LOGE("LEAVE");
+
+ return INFERENCE_ENGINE_ERROR_NONE;
+}
+
+int InferenceEngineCommon::SetBackendEngine(inference_backend_type_e backend)
+{
+ std::string backendString;
+ switch(backend){
+ case INFERENCE_BACKEND_OPENCV:
+ backendString = "opencv";
+ break;
+ case INFERENCE_BACKEND_TFLITE:
+ backendString = "tflite";
+ break;
+ case INFERENCE_BACKEND_ARMNN:
+ backendString = "armnn";
+ break;
+ default:
+ LOGE("Not supported backend engine [%d]", backend);
+ return INFERENCE_ENGINE_ERROR_NOT_SUPPORTED;
+ }
+
+ mBackendLibName = "libinference-engine-" + backendString + ".so";
+
+ return INFERENCE_ENGINE_ERROR_NONE;
+}
+} /* Common */
+} /* InferenceEngineInterface */
--- /dev/null
+/**
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "inference_engine_ini.h"
+#include "inference_engine_error.h"
+
+#include <iniparser.h>
+#include <unistd.h>
+
+extern "C" {
+
+#include <dlog.h>
+
+#ifdef LOG_TAG
+#undef LOG_TAG
+#endif
+
+#define LOG_TAG "INFERENCE_ENGINE_COMMON"
+}
+
+namespace InferenceEngineInterface {
+namespace Common {
+
+const std::string INFERENCE_INI_FILENAME = "/inference/inference_engine.ini";
+
+InferenceEngineInI::InferenceEngineInI() :
+ mIniDefaultPath(SYSCONFDIR),
+ mSelectedBackendEngine(INFERENCE_BACKEND_NONE)
+{
+ LOGE("ENTER");
+ mIniDefaultPath += INFERENCE_INI_FILENAME;
+ LOGE("LEAVE");
+}
+
+InferenceEngineInI::~InferenceEngineInI()
+{
+ ;
+}
+
+int InferenceEngineInI::LoadInI()
+{
+ LOGE("ENTER");
+ dictionary *dict = iniparser_load(mIniDefaultPath.c_str());
+ if (dict == NULL) {
+ LOGE("Fail to load ini");
+ return -1;
+ }
+
+ mSelectedBackendEngine = static_cast<inference_backend_type_e>(iniparser_getint(dict, "inference backend:selected backend engine", -1));
+
+ if(dict) {
+ iniparser_freedict(dict);
+ dict = NULL;
+ }
+
+ LOGE("LEAVE");
+ return 0;
+}
+
+void InferenceEngineInI::UnLoadInI()
+{
+ ;
+}
+
+int InferenceEngineInI::GetSelectedBackendEngine()
+{
+ return mSelectedBackendEngine;
+}
+
+} /* Inference */
+} /* MediaVision */