namespace InferenceEngineInterface {
namespace Common {
-const char* engineLibs[] = {
- "libinference-engine-caffe.so",
- "libinference-engine-tf.so",
- "libinference-engine-tflite.so"};
-
-InferenceEngineCommon::InferenceEngineCommon(inference_backend_type_e backend) :
- mBackend(backend)
+InferenceEngineCommon::InferenceEngineCommon(std::string backend)
{
LOGE("ENTER");
+ mBackendLibName = "libinference-engine-" + backend + ".so";
LOGE("LEAVE");
}
{
LOGW("ENTER");
char *error = NULL;
- handle = dlopen(engineLibs[mBackend], RTLD_LAZY);
+ handle = dlopen(mBackendLibName.c_str(), RTLD_LAZY);
if (!handle) {
- LOGE("Fail to dlopen %s", engineLibs[mBackend]);
+ LOGE("Fail to dlopen %s", mBackendLibName.c_str());
LOGE("Error: %s\n", dlerror());
return INFERENCE_ENGINE_ERROR_NOT_SUPPORTED;
}
class InferenceEngineCommon {
public:
- InferenceEngineCommon(inference_backend_type_e backend);
+
+ InferenceEngineCommon(std::string backend);
~InferenceEngineCommon();
private:
void *handle;
IInferenceEngineCommon *engine;
- inference_backend_type_e mBackend;
+ std::string mBackendLibName;
std::vector<std::string> mUserListName;
};
* inference engine API.
*/
-/**
- * @brief Enumeration for inference backend.
- *
- * @since_tizen 5.5
- *
- * @see mv_inference_prepare()
- */
-typedef enum {
- INFERENCE_BACKEND_NONE = -1,
- INFERENCE_BACKEND_OPENCV, /**< OpenCV */
- INFERENCE_BACKEND_CAFFE, /**< Caffe */
- INFERENCE_BACKEND_TF, /**< TensorFlow */
- INFERENCE_BACKEND_TFLite, /**< TensorFlow-Lite */
- INFERENCE_BACKEND_MAX
-} inference_backend_type_e;
-
/**
* @brief Enumeration for inference target.
*
*
*/
typedef enum {
- INFERENCE_TARGET_NONE = -1,
+ INFERENCE_TARGET_NONE = -1,
INFERENCE_TARGET_CPU, /**< CPU */
- INFERENCE_TARGET_GPU, /**< GPU*/
- INFERENCE_TARGET_MAX
+ INFERENCE_TARGET_GPU, /**< GPU */
+ INFERENCE_TARGET_CUSTOM, /**< NPU */
+ INFERENCE_TARGET_MAX
} inference_target_type_e;
typedef enum {
}
#endif /* __cplusplus */
-#endif /* __INFERENCE_ENGINE_TYPE_H__ */
\ No newline at end of file
+#endif /* __INFERENCE_ENGINE_TYPE_H__ */
class InferenceEngineVision {
public:
- InferenceEngineVision(inference_backend_type_e backend);
+ InferenceEngineVision(std::string backend);
~InferenceEngineVision();
int GetInferenceResult(std::vector<std::vector<int>>& dimInfo, std::vector<float*>& results);
int GetNumberOfOutputs();
-
+
void SetUserListName(std::string userlist);
private:
void *handle;
IInferenceEngineVision *engine;
- inference_backend_type_e mBackend;
+ std::string mBackendLibName;
std::vector<std::string> mUserListName;
};
} /* Vision */
} /* InferenceEngineInterface */
-#endif /* __INFERENCE_ENGINE_VISION_IMPL_H__ */
\ No newline at end of file
+#endif /* __INFERENCE_ENGINE_VISION_IMPL_H__ */
Name: inference-engine-interface
Summary: Interface of inference engines
Version: 0.0.1
-Release: 1
+Release: 2
Group: Multimedia/Framework
License: Apache-2.0
Source0: %{name}-%{version}.tar.gz
%files vision-devel
%{_includedir}/media/*.h
%{_libdir}/pkgconfig/*vision.pc
-%{_libdir}/lib*-vision.so
\ No newline at end of file
+%{_libdir}/lib*-vision.so
namespace InferenceEngineInterface {
namespace Vision {
-const char* engineLibs[] = {
- "libinference-engine-opencv.so",
- "libinference-engine-caffe.so",
- "libinference-engine-tf.so",
- "libinference-engine-tflite.so"};
-
-InferenceEngineVision::InferenceEngineVision(inference_backend_type_e backend) :
- mBackend(backend)
+InferenceEngineVision::InferenceEngineVision(std::string backend)
{
LOGE("ENTER");
+ mBackendLibName = "libinference-engine-" + backend + ".so";
LOGE("LEAVE");
}
{
LOGW("ENTER");
char *error = NULL;
- handle = dlopen(engineLibs[mBackend], RTLD_LAZY);
+ handle = dlopen(mBackendLibName.c_str(), RTLD_LAZY);
+ LOGE("dlopen %s", mBackendLibName.c_str());
if (!handle) {
- LOGE("Fail to dlopen %s", engineLibs[mBackend]);
+ LOGE("Fail to dlopen %s", mBackendLibName.c_str());
LOGE("Error: %s\n", dlerror());
return INFERENCE_ENGINE_ERROR_NOT_SUPPORTED;
}
-
+
init_t* EngineInit = (init_t *)dlsym(handle, "EngineVisionInit");
if ((error = dlerror()) != NULL) {
LOGE("Error: %s\n", error);
dlclose(handle);
return INFERENCE_ENGINE_ERROR_INTERNAL;
}
-
+
engine = EngineInit(configFile, weightFile, userFile);
if (engine == NULL) {
LOGE("Fail to EngineInit");