Change directory name from common to src
authorInki Dae <inki.dae@samsung.com>
Wed, 25 Mar 2020 06:41:08 +0000 (15:41 +0900)
committerInki Dae <inki.dae@samsung.com>
Tue, 14 Apr 2020 00:42:53 +0000 (09:42 +0900)
Let's just use generic directory name, src, instead of specfic one,
common. We already dropped inference framework so we don't have to
adhere to use common directory name.

Change-Id: Ib0947dd90f5ee5c5d935d75435a4f36c1b869146
Signed-off-by: Inki Dae <inki.dae@samsung.com>
CMakeLists.txt
common/inference_engine_common_impl.cpp [deleted file]
common/inference_ini.cpp [deleted file]
src/inference_engine_common_impl.cpp [new file with mode: 0755]
src/inference_ini.cpp [new file with mode: 0644]

index afebd8ee3502fdf7c7e0abcd7d7da1b0df290484..ee4669a0322a50dd8c59ce6c70720d1ee7d40694 100644 (file)
@@ -32,12 +32,11 @@ ADD_DEFINITIONS("-DTIZEN_DEBUG")
 SET(CMAKE_EXE_LINKER_FLAGS "-Wl,--as-needed -Wl,--rpath=${LIB_INSTALL_DIR}")
 
 #common
-aux_source_directory(common SOURCES)
+aux_source_directory(src SOURCES)
 ADD_LIBRARY(${fw_name_common} SHARED ${SOURCES})
 
 TARGET_LINK_LIBRARIES(${fw_name_common} dlog iniparser stdc++fs)
 
-
 SET_TARGET_PROPERTIES(${fw_name_common}
      PROPERTIES
      VERSION ${FULLVER}
diff --git a/common/inference_engine_common_impl.cpp b/common/inference_engine_common_impl.cpp
deleted file mode 100755 (executable)
index b64a11e..0000000
+++ /dev/null
@@ -1,259 +0,0 @@
-/**
- * Copyright (c) 2019 Samsung Electronics Co., Ltd All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "inference_engine_error.h"
-#include "inference_engine_common_impl.h"
-#include "inference_engine_ini.h"
-#include <fstream>
-#include <iostream>
-#include <unistd.h>
-#include <time.h>
-#include <dlfcn.h>
-#include <experimental/filesystem>
-
-extern "C" {
-
-#include <dlog.h>
-
-#ifdef LOG_TAG
-#undef LOG_TAG
-#endif
-
-#define LOG_TAG "INFERENCE_ENGINE_COMMON"
-}
-
-namespace fs = std::experimental::filesystem;
-namespace InferenceEngineInterface {
-namespace Common {
-InferenceEngineCommon::InferenceEngineCommon() :
-    mSelectedBackendEngine(INFERENCE_BACKEND_NONE),
-    handle(nullptr),
-    engine(nullptr)
-{
-    LOGE("ENTER");
-    InferenceEngineInI ini;
-    ini.LoadInI();
-    mSelectedBackendEngine = static_cast<inference_backend_type_e>(ini.GetSelectedBackendEngine());
-    SetBackendEngine(mSelectedBackendEngine);
-    LOGI("Backend engine is selected by ini file [%d]", mSelectedBackendEngine);
-    LOGE("LEAVE");
-}
-
-InferenceEngineCommon::InferenceEngineCommon(std::string backend) :
-    mSelectedBackendEngine(INFERENCE_BACKEND_NONE),
-    handle(nullptr),
-    engine(nullptr)
-{
-    LOGE("ENTER");
-    mBackendLibName = "libinference-engine-" + backend + ".so";
-    LOGE("LEAVE");
-}
-
-InferenceEngineCommon::InferenceEngineCommon(inference_backend_type_e backend) :
-    mSelectedBackendEngine(INFERENCE_BACKEND_NONE),
-    handle(nullptr),
-    engine(nullptr)
-{
-    LOGE("ENTER");
-    SetBackendEngine(backend);
-    LOGI("Backend engine is selected by enum input[%d] set[%d]", backend, mSelectedBackendEngine);
-    LOGE("LEAVE");
-}
-
-
-InferenceEngineCommon::InferenceEngineCommon(inference_engine_config *config) :
-    mSelectedBackendEngine(INFERENCE_BACKEND_NONE),
-    handle(nullptr),
-    engine(nullptr)
-{
-    LOGI("ENTER");
-
-    // TODO.
-
-    LOGI("LEAVE");
-}
-
-InferenceEngineCommon::~InferenceEngineCommon()
-{
-    LOGW("ENTER");
-    LOGW("LEAVE");
-}
-
-int InferenceEngineCommon::BindBackend(inference_engine_config *config)
-{
-    LOGI("ENTER");
-
-    mBackendLibName = "libinference-engine-" + config->backend_name + ".so";
-
-    char *error = NULL;
-    LOGI("lib: %s", mBackendLibName.c_str());
-    handle = dlopen(mBackendLibName.c_str(), RTLD_NOW);
-    LOGI("HANDLE : [%p]", handle);
-
-    if (!handle) {
-        LOGE("Fail to dlopen %s", mBackendLibName.c_str());
-        LOGE("Error: %s\n", dlerror());
-        return  INFERENCE_ENGINE_ERROR_NOT_SUPPORTED;
-    }
-
-    init_t* EngineInit = (init_t *)dlsym(handle, "EngineCommonInit");
-    if ((error = dlerror()) != NULL) {
-        LOGE("Error: %s\n", error);
-        dlclose(handle);
-        return INFERENCE_ENGINE_ERROR_INTERNAL;
-    }
-
-    engine = EngineInit();
-    if (engine == NULL) {
-        LOGE("Fail to EngineInit");
-        dlclose(handle);
-        return INFERENCE_ENGINE_ERROR_INTERNAL;
-    }
-
-    LOGI("LEAVE");
-
-    return INFERENCE_ENGINE_ERROR_NONE;
-}
-
-void InferenceEngineCommon::UnbindBackend(void)
-{
-    LOGW("ENTER");
-
-    if (handle) {
-        destroy_t *engineDestroy = (destroy_t*)dlsym(handle, "EngineCommonDestroy");
-        engineDestroy(engine);
-        dlclose(handle);
-        engine = nullptr;
-        handle = nullptr;
-    }
-
-    LOGW("LEAVE");
-}
-
-int InferenceEngineCommon::SetTargetDevices(int types)
-{
-    int ret = engine->SetTargetDevices(types);
-    if (ret != INFERENCE_ENGINE_ERROR_NONE)
-        LOGE("Fail to SetTargetDevice");
-
-    return ret;
-}
-
-int InferenceEngineCommon::Load(std::vector<std::string> model_paths, inference_model_format_e model_format)
-{
-    LOGI("ENTER");
-
-    int ret = engine->Load(model_paths, model_format);
-    if (ret != INFERENCE_ENGINE_ERROR_NONE)
-        LOGE("Fail to load InferenceEngineVision");
-
-    LOGI("LEAVE");
-
-    return INFERENCE_ENGINE_ERROR_NONE;
-}
-
-int InferenceEngineCommon::GetInputTensorBuffers(std::vector<inference_engine_tensor_buffer> &buffers)
-{
-    return engine->GetInputTensorBuffers(buffers);
-}
-
-int InferenceEngineCommon::GetOutputTensorBuffers(std::vector<inference_engine_tensor_buffer> &buffers)
-{
-    return engine->GetOutputTensorBuffers(buffers);
-}
-
-int InferenceEngineCommon::GetInputLayerProperty(inference_engine_layer_property &property)
-{
-    return engine->GetInputLayerProperty(property);
-}
-
-int InferenceEngineCommon::GetOutputLayerProperty(inference_engine_layer_property &property)
-{
-    return engine->GetOutputLayerProperty(property);
-}
-
-int InferenceEngineCommon::SetInputLayerProperty(inference_engine_layer_property &property)
-{
-    return engine->SetInputLayerProperty(property);
-}
-
-int InferenceEngineCommon::SetOutputLayerProperty(inference_engine_layer_property &property)
-{
-    return engine->SetOutputLayerProperty(property);
-}
-
-int InferenceEngineCommon::GetBackendCapacity(inference_engine_capacity *capacity)
-{
-    return engine->GetBackendCapacity(capacity);
-}
-
-int InferenceEngineCommon::Run(std::vector<inference_engine_tensor_buffer> &input_buffers,
-                                std::vector<inference_engine_tensor_buffer> &output_buffers)
-{
-    return engine->Run(input_buffers, output_buffers);
-}
-
-int InferenceEngineCommon::SetLibraryPath(std::string path)
-{
-    LOGE("ENTER");
-    if (path.empty())
-        return INFERENCE_ENGINE_ERROR_INVALID_PARAMETER;
-
-    if (fs::is_directory(path)) {
-        if(path.back() != '/')
-            path += "/";
-
-        mBackendLibName = path + mBackendLibName;
-    }
-    else {
-        if (fs::is_regular_file(path)){
-            mBackendLibName = path;
-        }
-        else {
-            LOGE("Fail to find path. [%s]", path.c_str());
-            return INFERENCE_ENGINE_ERROR_INVALID_PARAMETER;
-        }
-    }
-    LOGE("lib: %s", mBackendLibName.c_str());
-    LOGE("LEAVE");
-
-    return INFERENCE_ENGINE_ERROR_NONE;
-}
-
-int InferenceEngineCommon::SetBackendEngine(inference_backend_type_e backend)
-{
-    std::string backendString;
-    switch(backend){
-        case INFERENCE_BACKEND_OPENCV:
-        backendString = "opencv";
-        break;
-    case INFERENCE_BACKEND_TFLITE:
-        backendString = "tflite";
-        break;
-    case INFERENCE_BACKEND_ARMNN:
-        backendString = "armnn";
-        break;
-    default:
-        LOGE("Not supported backend engine [%d]", backend);
-        return INFERENCE_ENGINE_ERROR_NOT_SUPPORTED;
-    }
-
-    mBackendLibName = "libinference-engine-" + backendString + ".so";
-
-    return INFERENCE_ENGINE_ERROR_NONE;
-}
-} /* Common */
-} /* InferenceEngineInterface */
diff --git a/common/inference_ini.cpp b/common/inference_ini.cpp
deleted file mode 100644 (file)
index ed05547..0000000
+++ /dev/null
@@ -1,84 +0,0 @@
-/**
- * Copyright (c) 2019 Samsung Electronics Co., Ltd All Rights Reserved
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "inference_engine_ini.h"
-#include "inference_engine_error.h"
-
-#include <iniparser.h>
-#include <unistd.h>
-
-extern "C" {
-
-#include <dlog.h>
-
-#ifdef LOG_TAG
-#undef LOG_TAG
-#endif
-
-#define LOG_TAG "INFERENCE_ENGINE_COMMON"
-}
-
-namespace InferenceEngineInterface {
-namespace Common {
-
-const std::string INFERENCE_INI_FILENAME = "/inference/inference_engine.ini";
-
-InferenceEngineInI::InferenceEngineInI() :
-    mIniDefaultPath(SYSCONFDIR),
-    mSelectedBackendEngine(INFERENCE_BACKEND_NONE)
-{
-    LOGE("ENTER");
-    mIniDefaultPath += INFERENCE_INI_FILENAME;
-    LOGE("LEAVE");
-}
-
-InferenceEngineInI::~InferenceEngineInI()
-{
-    ;
-}
-
-int InferenceEngineInI::LoadInI()
-{
-       LOGE("ENTER");
-       dictionary *dict = iniparser_load(mIniDefaultPath.c_str());
-       if (dict == NULL) {
-               LOGE("Fail to load ini");
-               return  -1;
-       }
-
-       mSelectedBackendEngine = static_cast<inference_backend_type_e>(iniparser_getint(dict, "inference backend:selected backend engine", -1));
-
-       if(dict) {
-               iniparser_freedict(dict);
-               dict = NULL;
-       }
-
-       LOGE("LEAVE");
-       return 0;
-}
-
-void InferenceEngineInI::UnLoadInI()
-{
-       ;
-}
-
-int InferenceEngineInI::GetSelectedBackendEngine()
-{
-       return mSelectedBackendEngine;
-}
-
-} /* Inference */
-} /* MediaVision */
diff --git a/src/inference_engine_common_impl.cpp b/src/inference_engine_common_impl.cpp
new file mode 100755 (executable)
index 0000000..b64a11e
--- /dev/null
@@ -0,0 +1,259 @@
+/**
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "inference_engine_error.h"
+#include "inference_engine_common_impl.h"
+#include "inference_engine_ini.h"
+#include <fstream>
+#include <iostream>
+#include <unistd.h>
+#include <time.h>
+#include <dlfcn.h>
+#include <experimental/filesystem>
+
+extern "C" {
+
+#include <dlog.h>
+
+#ifdef LOG_TAG
+#undef LOG_TAG
+#endif
+
+#define LOG_TAG "INFERENCE_ENGINE_COMMON"
+}
+
+namespace fs = std::experimental::filesystem;
+namespace InferenceEngineInterface {
+namespace Common {
+InferenceEngineCommon::InferenceEngineCommon() :
+    mSelectedBackendEngine(INFERENCE_BACKEND_NONE),
+    handle(nullptr),
+    engine(nullptr)
+{
+    LOGE("ENTER");
+    InferenceEngineInI ini;
+    ini.LoadInI();
+    mSelectedBackendEngine = static_cast<inference_backend_type_e>(ini.GetSelectedBackendEngine());
+    SetBackendEngine(mSelectedBackendEngine);
+    LOGI("Backend engine is selected by ini file [%d]", mSelectedBackendEngine);
+    LOGE("LEAVE");
+}
+
+InferenceEngineCommon::InferenceEngineCommon(std::string backend) :
+    mSelectedBackendEngine(INFERENCE_BACKEND_NONE),
+    handle(nullptr),
+    engine(nullptr)
+{
+    LOGE("ENTER");
+    mBackendLibName = "libinference-engine-" + backend + ".so";
+    LOGE("LEAVE");
+}
+
+InferenceEngineCommon::InferenceEngineCommon(inference_backend_type_e backend) :
+    mSelectedBackendEngine(INFERENCE_BACKEND_NONE),
+    handle(nullptr),
+    engine(nullptr)
+{
+    LOGE("ENTER");
+    SetBackendEngine(backend);
+    LOGI("Backend engine is selected by enum input[%d] set[%d]", backend, mSelectedBackendEngine);
+    LOGE("LEAVE");
+}
+
+
+InferenceEngineCommon::InferenceEngineCommon(inference_engine_config *config) :
+    mSelectedBackendEngine(INFERENCE_BACKEND_NONE),
+    handle(nullptr),
+    engine(nullptr)
+{
+    LOGI("ENTER");
+
+    // TODO.
+
+    LOGI("LEAVE");
+}
+
+InferenceEngineCommon::~InferenceEngineCommon()
+{
+    LOGW("ENTER");
+    LOGW("LEAVE");
+}
+
+int InferenceEngineCommon::BindBackend(inference_engine_config *config)
+{
+    LOGI("ENTER");
+
+    mBackendLibName = "libinference-engine-" + config->backend_name + ".so";
+
+    char *error = NULL;
+    LOGI("lib: %s", mBackendLibName.c_str());
+    handle = dlopen(mBackendLibName.c_str(), RTLD_NOW);
+    LOGI("HANDLE : [%p]", handle);
+
+    if (!handle) {
+        LOGE("Fail to dlopen %s", mBackendLibName.c_str());
+        LOGE("Error: %s\n", dlerror());
+        return  INFERENCE_ENGINE_ERROR_NOT_SUPPORTED;
+    }
+
+    init_t* EngineInit = (init_t *)dlsym(handle, "EngineCommonInit");
+    if ((error = dlerror()) != NULL) {
+        LOGE("Error: %s\n", error);
+        dlclose(handle);
+        return INFERENCE_ENGINE_ERROR_INTERNAL;
+    }
+
+    engine = EngineInit();
+    if (engine == NULL) {
+        LOGE("Fail to EngineInit");
+        dlclose(handle);
+        return INFERENCE_ENGINE_ERROR_INTERNAL;
+    }
+
+    LOGI("LEAVE");
+
+    return INFERENCE_ENGINE_ERROR_NONE;
+}
+
+void InferenceEngineCommon::UnbindBackend(void)
+{
+    LOGW("ENTER");
+
+    if (handle) {
+        destroy_t *engineDestroy = (destroy_t*)dlsym(handle, "EngineCommonDestroy");
+        engineDestroy(engine);
+        dlclose(handle);
+        engine = nullptr;
+        handle = nullptr;
+    }
+
+    LOGW("LEAVE");
+}
+
+int InferenceEngineCommon::SetTargetDevices(int types)
+{
+    int ret = engine->SetTargetDevices(types);
+    if (ret != INFERENCE_ENGINE_ERROR_NONE)
+        LOGE("Fail to SetTargetDevice");
+
+    return ret;
+}
+
+int InferenceEngineCommon::Load(std::vector<std::string> model_paths, inference_model_format_e model_format)
+{
+    LOGI("ENTER");
+
+    int ret = engine->Load(model_paths, model_format);
+    if (ret != INFERENCE_ENGINE_ERROR_NONE)
+        LOGE("Fail to load InferenceEngineVision");
+
+    LOGI("LEAVE");
+
+    return INFERENCE_ENGINE_ERROR_NONE;
+}
+
+int InferenceEngineCommon::GetInputTensorBuffers(std::vector<inference_engine_tensor_buffer> &buffers)
+{
+    return engine->GetInputTensorBuffers(buffers);
+}
+
+int InferenceEngineCommon::GetOutputTensorBuffers(std::vector<inference_engine_tensor_buffer> &buffers)
+{
+    return engine->GetOutputTensorBuffers(buffers);
+}
+
+int InferenceEngineCommon::GetInputLayerProperty(inference_engine_layer_property &property)
+{
+    return engine->GetInputLayerProperty(property);
+}
+
+int InferenceEngineCommon::GetOutputLayerProperty(inference_engine_layer_property &property)
+{
+    return engine->GetOutputLayerProperty(property);
+}
+
+int InferenceEngineCommon::SetInputLayerProperty(inference_engine_layer_property &property)
+{
+    return engine->SetInputLayerProperty(property);
+}
+
+int InferenceEngineCommon::SetOutputLayerProperty(inference_engine_layer_property &property)
+{
+    return engine->SetOutputLayerProperty(property);
+}
+
+int InferenceEngineCommon::GetBackendCapacity(inference_engine_capacity *capacity)
+{
+    return engine->GetBackendCapacity(capacity);
+}
+
+int InferenceEngineCommon::Run(std::vector<inference_engine_tensor_buffer> &input_buffers,
+                                std::vector<inference_engine_tensor_buffer> &output_buffers)
+{
+    return engine->Run(input_buffers, output_buffers);
+}
+
+int InferenceEngineCommon::SetLibraryPath(std::string path)
+{
+    LOGE("ENTER");
+    if (path.empty())
+        return INFERENCE_ENGINE_ERROR_INVALID_PARAMETER;
+
+    if (fs::is_directory(path)) {
+        if(path.back() != '/')
+            path += "/";
+
+        mBackendLibName = path + mBackendLibName;
+    }
+    else {
+        if (fs::is_regular_file(path)){
+            mBackendLibName = path;
+        }
+        else {
+            LOGE("Fail to find path. [%s]", path.c_str());
+            return INFERENCE_ENGINE_ERROR_INVALID_PARAMETER;
+        }
+    }
+    LOGE("lib: %s", mBackendLibName.c_str());
+    LOGE("LEAVE");
+
+    return INFERENCE_ENGINE_ERROR_NONE;
+}
+
+int InferenceEngineCommon::SetBackendEngine(inference_backend_type_e backend)
+{
+    std::string backendString;
+    switch(backend){
+        case INFERENCE_BACKEND_OPENCV:
+        backendString = "opencv";
+        break;
+    case INFERENCE_BACKEND_TFLITE:
+        backendString = "tflite";
+        break;
+    case INFERENCE_BACKEND_ARMNN:
+        backendString = "armnn";
+        break;
+    default:
+        LOGE("Not supported backend engine [%d]", backend);
+        return INFERENCE_ENGINE_ERROR_NOT_SUPPORTED;
+    }
+
+    mBackendLibName = "libinference-engine-" + backendString + ".so";
+
+    return INFERENCE_ENGINE_ERROR_NONE;
+}
+} /* Common */
+} /* InferenceEngineInterface */
diff --git a/src/inference_ini.cpp b/src/inference_ini.cpp
new file mode 100644 (file)
index 0000000..ed05547
--- /dev/null
@@ -0,0 +1,84 @@
+/**
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "inference_engine_ini.h"
+#include "inference_engine_error.h"
+
+#include <iniparser.h>
+#include <unistd.h>
+
+extern "C" {
+
+#include <dlog.h>
+
+#ifdef LOG_TAG
+#undef LOG_TAG
+#endif
+
+#define LOG_TAG "INFERENCE_ENGINE_COMMON"
+}
+
+namespace InferenceEngineInterface {
+namespace Common {
+
+const std::string INFERENCE_INI_FILENAME = "/inference/inference_engine.ini";
+
+InferenceEngineInI::InferenceEngineInI() :
+    mIniDefaultPath(SYSCONFDIR),
+    mSelectedBackendEngine(INFERENCE_BACKEND_NONE)
+{
+    LOGE("ENTER");
+    mIniDefaultPath += INFERENCE_INI_FILENAME;
+    LOGE("LEAVE");
+}
+
+InferenceEngineInI::~InferenceEngineInI()
+{
+    ;
+}
+
+int InferenceEngineInI::LoadInI()
+{
+       LOGE("ENTER");
+       dictionary *dict = iniparser_load(mIniDefaultPath.c_str());
+       if (dict == NULL) {
+               LOGE("Fail to load ini");
+               return  -1;
+       }
+
+       mSelectedBackendEngine = static_cast<inference_backend_type_e>(iniparser_getint(dict, "inference backend:selected backend engine", -1));
+
+       if(dict) {
+               iniparser_freedict(dict);
+               dict = NULL;
+       }
+
+       LOGE("LEAVE");
+       return 0;
+}
+
+void InferenceEngineInI::UnLoadInI()
+{
+       ;
+}
+
+int InferenceEngineInI::GetSelectedBackendEngine()
+{
+       return mSelectedBackendEngine;
+}
+
+} /* Inference */
+} /* MediaVision */