Rename engine to mBackendHandle
authorInki Dae <inki.dae@samsung.com>
Fri, 27 Mar 2020 06:13:08 +0000 (15:13 +0900)
committerInki Dae <inki.dae@samsung.com>
Tue, 14 Apr 2020 00:42:53 +0000 (09:42 +0900)
Change-Id: I0ef2b66e57062fbd98b2b92189807198d36bfd51
Signed-off-by: Inki Dae <inki.dae@samsung.com>
include/inference_engine_common_impl.h
src/inference_engine_common_impl.cpp

index 7bb6445..d51e24c 100755 (executable)
@@ -219,7 +219,7 @@ private:
     inference_backend_type_e mSelectedBackendEngine;
 protected:
     void *mBackendModule;
-    IInferenceEngineCommon *engine;
+    IInferenceEngineCommon *mBackendHandle;
 
 };
 
index 7d0f513..45adedf 100755 (executable)
@@ -41,7 +41,7 @@ namespace Common {
 InferenceEngineCommon::InferenceEngineCommon() :
     mSelectedBackendEngine(INFERENCE_BACKEND_NONE),
     mBackendModule(nullptr),
-    engine(nullptr)
+    mBackendHandle(nullptr)
 {
     LOGE("ENTER");
     InferenceEngineInI ini;
@@ -55,7 +55,7 @@ InferenceEngineCommon::InferenceEngineCommon() :
 InferenceEngineCommon::InferenceEngineCommon(std::string backend) :
     mSelectedBackendEngine(INFERENCE_BACKEND_NONE),
     mBackendModule(nullptr),
-    engine(nullptr)
+    mBackendHandle(nullptr)
 {
     LOGE("ENTER");
     mBackendLibName = "libinference-engine-" + backend + ".so";
@@ -65,7 +65,7 @@ InferenceEngineCommon::InferenceEngineCommon(std::string backend) :
 InferenceEngineCommon::InferenceEngineCommon(inference_backend_type_e backend) :
     mSelectedBackendEngine(INFERENCE_BACKEND_NONE),
     mBackendModule(nullptr),
-    engine(nullptr)
+    mBackendHandle(nullptr)
 {
     LOGE("ENTER");
     SetBackendEngine(backend);
@@ -77,7 +77,7 @@ InferenceEngineCommon::InferenceEngineCommon(inference_backend_type_e backend) :
 InferenceEngineCommon::InferenceEngineCommon(inference_engine_config *config) :
     mSelectedBackendEngine(INFERENCE_BACKEND_NONE),
     mBackendModule(nullptr),
-    engine(nullptr)
+    mBackendHandle(nullptr)
 {
     LOGI("ENTER");
 
@@ -117,8 +117,8 @@ int InferenceEngineCommon::BindBackend(inference_engine_config *config)
         return INFERENCE_ENGINE_ERROR_INTERNAL;
     }
 
-    engine = EngineInit();
-    if (engine == NULL) {
+    mBackendHandle = EngineInit();
+    if (mBackendHandle == NULL) {
         LOGE("Fail to EngineInit");
         dlclose(mBackendModule);
                mBackendModule = nullptr;
@@ -136,9 +136,9 @@ void InferenceEngineCommon::UnbindBackend(void)
 
     if (mBackendModule) {
         destroy_t *engineDestroy = (destroy_t*)dlsym(mBackendModule, "EngineCommonDestroy");
-        engineDestroy(engine);
+        engineDestroy(mBackendHandle);
         dlclose(mBackendModule);
-        engine = nullptr;
+        mBackendHandle = nullptr;
         mBackendModule = nullptr;
     }
 
@@ -147,7 +147,7 @@ void InferenceEngineCommon::UnbindBackend(void)
 
 int InferenceEngineCommon::SetTargetDevices(int types)
 {
-    int ret = engine->SetTargetDevices(types);
+    int ret = mBackendHandle->SetTargetDevices(types);
     if (ret != INFERENCE_ENGINE_ERROR_NONE)
         LOGE("Fail to SetTargetDevice");
 
@@ -158,7 +158,7 @@ int InferenceEngineCommon::Load(std::vector<std::string> model_paths, inference_
 {
     LOGI("ENTER");
 
-    int ret = engine->Load(model_paths, model_format);
+    int ret = mBackendHandle->Load(model_paths, model_format);
     if (ret != INFERENCE_ENGINE_ERROR_NONE)
         LOGE("Fail to load InferenceEngineVision");
 
@@ -169,43 +169,43 @@ int InferenceEngineCommon::Load(std::vector<std::string> model_paths, inference_
 
 int InferenceEngineCommon::GetInputTensorBuffers(std::vector<inference_engine_tensor_buffer> &buffers)
 {
-    return engine->GetInputTensorBuffers(buffers);
+    return mBackendHandle->GetInputTensorBuffers(buffers);
 }
 
 int InferenceEngineCommon::GetOutputTensorBuffers(std::vector<inference_engine_tensor_buffer> &buffers)
 {
-    return engine->GetOutputTensorBuffers(buffers);
+    return mBackendHandle->GetOutputTensorBuffers(buffers);
 }
 
 int InferenceEngineCommon::GetInputLayerProperty(inference_engine_layer_property &property)
 {
-    return engine->GetInputLayerProperty(property);
+    return mBackendHandle->GetInputLayerProperty(property);
 }
 
 int InferenceEngineCommon::GetOutputLayerProperty(inference_engine_layer_property &property)
 {
-    return engine->GetOutputLayerProperty(property);
+    return mBackendHandle->GetOutputLayerProperty(property);
 }
 
 int InferenceEngineCommon::SetInputLayerProperty(inference_engine_layer_property &property)
 {
-    return engine->SetInputLayerProperty(property);
+    return mBackendHandle->SetInputLayerProperty(property);
 }
 
 int InferenceEngineCommon::SetOutputLayerProperty(inference_engine_layer_property &property)
 {
-    return engine->SetOutputLayerProperty(property);
+    return mBackendHandle->SetOutputLayerProperty(property);
 }
 
 int InferenceEngineCommon::GetBackendCapacity(inference_engine_capacity *capacity)
 {
-    return engine->GetBackendCapacity(capacity);
+    return mBackendHandle->GetBackendCapacity(capacity);
 }
 
 int InferenceEngineCommon::Run(std::vector<inference_engine_tensor_buffer> &input_buffers,
                                 std::vector<inference_engine_tensor_buffer> &output_buffers)
 {
-    return engine->Run(input_buffers, output_buffers);
+    return mBackendHandle->Run(input_buffers, output_buffers);
 }
 
 int InferenceEngineCommon::SetLibraryPath(std::string path)