It is needed when user want to set full path of so file.
Change-Id: I397f9750733454fa5b75c064a61034987bb41f22
Signed-off-by: Hyunsoo Park <hance.park@samsung.com>
SET(INC_DIR "${PROJECT_SOURCE_DIR}/include")
-SET(dependents "dlog")
+SET(dependents "dlog iniparser")
SET(pc_dependents "capi-base-common")
INCLUDE(FindPkgConfig)
aux_source_directory(common SOURCES)
ADD_LIBRARY(${fw_name_common} SHARED ${SOURCES})
-TARGET_LINK_LIBRARIES(${fw_name_common} dlog)
+TARGET_LINK_LIBRARIES(${fw_name_common} dlog iniparser)
SET_TARGET_PROPERTIES(${fw_name_common}
aux_source_directory(vision SOURCES)
ADD_LIBRARY(${fw_name_vision} SHARED ${SOURCES})
-TARGET_LINK_LIBRARIES(${fw_name_vision} ${OpenCV_LIBS} dlog)
+TARGET_LINK_LIBRARIES(${fw_name_vision} ${OpenCV_LIBS} dlog stdc++fs)
SET_TARGET_PROPERTIES(${fw_name_vision}
VERBATIM
)
-ENDIF(UNIX)
\ No newline at end of file
+ENDIF(UNIX)
#include "inference_engine_error.h"
#include "inference_engine_common_impl.h"
-
+#include "inference_engine_ini.h"
#include <fstream>
#include <iostream>
#include <unistd.h>
#include <time.h>
#include <dlfcn.h>
+#include <experimental/filesystem>
extern "C" {
#define LOG_TAG "INFERENCE_ENGINE_COMMON"
}
+namespace fs = std::experimental::filesystem;
namespace InferenceEngineInterface {
namespace Common {
+InferenceEngineCommon::InferenceEngineCommon() :
+ handle(nullptr),
+ engine(nullptr)
+{
+ LOGE("ENTER");
+ InferenceEngineInI ini;
+ ini.LoadInI();
+ mSelectedBackendEngine = static_cast<inference_backend_type_e>(ini.GetSelectedBackendEngine());
+ SetBackendEngine(mSelectedBackendEngine);
+ LOGI("Backend engine is selected by ini file [%d]", mSelectedBackendEngine);
+ LOGE("LEAVE");
+}
InferenceEngineCommon::InferenceEngineCommon(std::string backend) :
handle(nullptr),
{
LOGE("ENTER");
mBackendLibName = "libinference-engine-" + backend + ".so";
- LOGE("lib: %s", mBackendLibName.c_str());
+ LOGE("LEAVE");
+}
+
+InferenceEngineCommon::InferenceEngineCommon(inference_backend_type_e backend) :
+ handle(nullptr),
+ engine(nullptr)
+{
+ LOGE("ENTER");
+ SetBackendEngine(backend);
+ LOGI("Backend engine is selected by enum input[%d] set[%d]", backend, mSelectedBackendEngine);
LOGE("LEAVE");
}
LOGE("LEAVE");
return ret;
}
+
+int InferenceEngineCommon::SetLibraryPath(std::string path)
+{
+ LOGE("ENTER");
+ if (path.empty())
+ return INFERENCE_ENGINE_ERROR_INVALID_PARAMETER;
+
+ if (fs::is_directory(path)) {
+ if(path.back() != '/')
+ path += "/";
+
+ mBackendLibName = path + mBackendLibName;
+ }
+ else {
+ if (fs::is_regular_file(path)){
+ mBackendLibName = path;
+ }
+ else {
+ LOGE("Fail to find path. [%s]", path.c_str());
+ return INFERENCE_ENGINE_ERROR_INVALID_PARAMETER;
+ }
+ }
+ LOGE("lib: %s", mBackendLibName.c_str());
+ LOGE("LEAVE");
+
+ return INFERENCE_ENGINE_ERROR_NONE;
+}
+
+int InferenceEngineCommon::SetBackendEngine(inference_backend_type_e backend)
+{
+ std::string backendString;
+ switch(backend){
+ case INFERENCE_BACKEND_OPENCV:
+ backendString = "opencv";
+ break;
+ case INFERENCE_BACKEND_TFLITE:
+ backendString = "tflite";
+ break;
+ default:
+ LOGE("Not supported backend engine [%d]", backend);
+ return INFERENCE_ENGINE_ERROR_NOT_SUPPORTED;
+ }
+
+ mBackendLibName = "libinference-engine-" + backendString + ".so";
+
+ return INFERENCE_ENGINE_ERROR_NONE;
+}
} /* Common */
} /* InferenceEngineInterface */
--- /dev/null
+/**
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "inference_engine_ini.h"
+#include "inference_engine_error.h"
+
+#include <iniparser.h>
+#include <unistd.h>
+
+extern "C" {
+
+#include <dlog.h>
+
+#ifdef LOG_TAG
+#undef LOG_TAG
+#endif
+
+#define LOG_TAG "INFERENCE_ENGINE_COMMON"
+}
+
+namespace InferenceEngineInterface {
+namespace Common {
+
+const std::string INFERENCE_INI_FILENAME = "/inference/inference_engine.ini";
+
+InferenceEngineInI::InferenceEngineInI() :
+ mIniDefaultPath(SYSCONFDIR)
+{
+ LOGE("ENTER");
+ mIniDefaultPath += INFERENCE_INI_FILENAME;
+ LOGE("LEAVE");
+}
+
+InferenceEngineInI::~InferenceEngineInI()
+{
+ ;
+}
+
+int InferenceEngineInI::LoadInI()
+{
+ LOGE("ENTER");
+ dictionary *dict = iniparser_load(mIniDefaultPath.c_str());
+ if (dict == NULL) {
+ LOGE("Fail to load ini");
+ return -1;
+ }
+
+ mSelectedBackendEngine = static_cast<inference_backend_type_e>(iniparser_getint(dict, "inference backend:selected backend engine", -1));
+
+ if(dict) {
+ iniparser_freedict(dict);
+ dict = NULL;
+ }
+
+ LOGE("LEAVE");
+ return 0;
+}
+
+void InferenceEngineInI::UnLoadInI()
+{
+ ;
+}
+
+int InferenceEngineInI::GetSelectedBackendEngine()
+{
+ return mSelectedBackendEngine;
+}
+
+} /* Inference */
+} /* MediaVision */
* @since_tizen 5.5
*/
virtual int GetInferenceResult(tensor_t& results) = 0;
+ virtual int SetLibraryPath(std::string path) = 0;
};
typedef void destroy_t(IInferenceEngineCommon*);
class InferenceEngineCommon {
public:
+ InferenceEngineCommon();
+
InferenceEngineCommon(std::string backend);
+ InferenceEngineCommon(inference_backend_type_e backend);
+
~InferenceEngineCommon();
/**
* @since_tizen 5.5
*/
int GetInferenceResult(tensor_t& results);
+ int SetLibraryPath(std::string path);
+ int SetBackendEngine(inference_backend_type_e backend);
private:
std::string mBackendLibName;
-
+ inference_backend_type_e mSelectedBackendEngine;
protected:
void *handle;
IInferenceEngineCommon *engine;
--- /dev/null
+/**
+ * Copyright (c) 2019 Samsung Electronics Co., Ltd All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __INFERENCE_ENGINE_INI_H__
+#define __INFERENCE_ENGINE_INI_H__
+
+#include <string>
+#include <vector>
+#include <inference_engine_type.h>
+
+namespace InferenceEngineInterface {
+namespace Common {
+
+class InferenceEngineInI {
+public:
+ /**
+ * @brief Creates an Inference ini class instance.
+ *
+ * @since_tizen 5.5
+ */
+ InferenceEngineInI();
+
+ /**
+ * @brief Destroys an Inference ini class instance including
+ * its all resources.
+ *
+ * @since_tizen 5.5
+ */
+ ~InferenceEngineInI();
+
+ /**
+ * @brief Load InI class()
+ *
+ * @since_tizen 5.5
+ */
+ int LoadInI();
+
+ /**
+ * @brief UnLoad InI class()
+ *
+ * @since_tizen 5.5
+ */
+ void UnLoadInI();
+
+ int GetSelectedBackendEngine();
+private:
+ std::string mIniDefaultPath;
+ inference_backend_type_e mSelectedBackendEngine;
+};
+
+} /* InferenceEngineInterface */
+} /* Common */
+
+#endif /* __INFERENCE_ENGINE_INI_H__ */
*
*/
typedef enum {
- INFERENCE_TARGET_NONE = -1,
- INFERENCE_TARGET_CPU, /**< CPU */
- INFERENCE_TARGET_GPU, /**< GPU */
- INFERENCE_TARGET_CUSTOM, /**< NPU */
- INFERENCE_TARGET_MAX
+ INFERENCE_BACKEND_NONE = -1, /**< None */
+ INFERENCE_BACKEND_OPENCV, /**< OpenCV */
+ INFERENCE_BACKEND_TFLITE, /**< TensorFlow-Lite */
+ INFERENCE_BACKEND_MAX /**< Backend MAX */
+} inference_backend_type_e;
+
+/**
+ * @brief Enumeration for inference target.
+ *
+ * @since_tizen 5.5
+ *
+ */
+typedef enum {
+ INFERENCE_TARGET_NONE = -1,
+ INFERENCE_TARGET_CPU, /**< CPU */
+ INFERENCE_TARGET_GPU, /**< GPU */
+ INFERENCE_TARGET_CUSTOM, /**< NPU */
+ INFERENCE_TARGET_MAX
} inference_target_type_e;
/**
class InferenceEngineVision {
public:
-
InferenceEngineVision(std::string backend);
~InferenceEngineVision();
* @since_tizen 5.5
*/
int SetUserFile(std::string filename);
-
+ int SetLibraryPath(std::string path);
protected:
/**
* @brief Set an input with a type of cv::Mat
* @since_tizen 5.5
*/
void SetUserListName(std::string userlist);
-
private:
std::string mBackendLibName;
std::vector<std::string> mUserListName;
Name: inference-engine-interface
Summary: Interface of inference engines
Version: 0.0.1
-Release: 8
+Release: 9
Group: Multimedia/Framework
License: Apache-2.0
Source0: %{name}-%{version}.tar.gz
BuildRequires: pkgconfig(capi-base-common)
BuildRequires: pkgconfig(opencv) >= 3.4.1
BuildRequires: pkgconfig(python)
+BuildRequires: pkgconfig(iniparser)
%description
Interface of inference engines
export FFLAGS="$FFLAGS -DTIZEN_DEBUG_ENABLE"
%endif
-export CFLAGS+=" -DPATH_LIBDIR=\\\"%{_libdir}\\\""
-export CXXFLAGS+=" -DPATH_LIBDIR=\\\"%{_libdir}\\\""
+export CFLAGS+=" -DPATH_LIBDIR=\\\"%{_libdir}\\\" -DSYSCONFDIR=\\\"%{_sysconfdir}\\\""
+export CXXFLAGS+=" -DPATH_LIBDIR=\\\"%{_libdir}\\\" -DSYSCONFDIR=\\\"%{_sysconfdir}\\\""
MAJORVER=`echo %{version} | awk 'BEGIN {FS="."}{print $1}'`
%cmake . -DFULLVER=%{version} -DMAJORVER=${MAJORVER} -DTZ_SYS_BIN=%TZ_SYS_BIN \
namespace InferenceEngineInterface {
namespace Vision {
-
InferenceEngineVision::InferenceEngineVision(std::string backend) :
mCh(0),
mDim(0),
return ret;
}
+int InferenceEngineVision::SetLibraryPath(std::string path)
+{
+ int ret = 0;
+ ret = mCommonEngine->SetLibraryPath(path);
+ if (ret != INFERENCE_ENGINE_ERROR_NONE)
+ LOGE("Fail to run SetLibraryPath");
+ LOGI("LEAVE");
+ return ret;
+}
} /* Vision */
} /* InferenceEngineInterface */