InferenceEngineCommon::InferenceEngineCommon() :
mSelectedBackendEngine(INFERENCE_BACKEND_NONE),
mBackendModule(nullptr),
- engine(nullptr)
+ mBackendHandle(nullptr)
{
LOGE("ENTER");
InferenceEngineInI ini;
InferenceEngineCommon::InferenceEngineCommon(std::string backend) :
mSelectedBackendEngine(INFERENCE_BACKEND_NONE),
mBackendModule(nullptr),
- engine(nullptr)
+ mBackendHandle(nullptr)
{
LOGE("ENTER");
mBackendLibName = "libinference-engine-" + backend + ".so";
InferenceEngineCommon::InferenceEngineCommon(inference_backend_type_e backend) :
mSelectedBackendEngine(INFERENCE_BACKEND_NONE),
mBackendModule(nullptr),
- engine(nullptr)
+ mBackendHandle(nullptr)
{
LOGE("ENTER");
SetBackendEngine(backend);
InferenceEngineCommon::InferenceEngineCommon(inference_engine_config *config) :
mSelectedBackendEngine(INFERENCE_BACKEND_NONE),
mBackendModule(nullptr),
- engine(nullptr)
+ mBackendHandle(nullptr)
{
LOGI("ENTER");
return INFERENCE_ENGINE_ERROR_INTERNAL;
}
- engine = EngineInit();
- if (engine == NULL) {
+ mBackendHandle = EngineInit();
+ if (mBackendHandle == NULL) {
LOGE("Fail to EngineInit");
dlclose(mBackendModule);
mBackendModule = nullptr;
if (mBackendModule) {
destroy_t *engineDestroy = (destroy_t*)dlsym(mBackendModule, "EngineCommonDestroy");
- engineDestroy(engine);
+ engineDestroy(mBackendHandle);
dlclose(mBackendModule);
- engine = nullptr;
+ mBackendHandle = nullptr;
mBackendModule = nullptr;
}
int InferenceEngineCommon::SetTargetDevices(int types)
{
- int ret = engine->SetTargetDevices(types);
+ int ret = mBackendHandle->SetTargetDevices(types);
if (ret != INFERENCE_ENGINE_ERROR_NONE)
LOGE("Fail to SetTargetDevice");
{
LOGI("ENTER");
- int ret = engine->Load(model_paths, model_format);
+ int ret = mBackendHandle->Load(model_paths, model_format);
if (ret != INFERENCE_ENGINE_ERROR_NONE)
LOGE("Fail to load InferenceEngineVision");
int InferenceEngineCommon::GetInputTensorBuffers(std::vector<inference_engine_tensor_buffer> &buffers)
{
- return engine->GetInputTensorBuffers(buffers);
+ return mBackendHandle->GetInputTensorBuffers(buffers);
}
int InferenceEngineCommon::GetOutputTensorBuffers(std::vector<inference_engine_tensor_buffer> &buffers)
{
- return engine->GetOutputTensorBuffers(buffers);
+ return mBackendHandle->GetOutputTensorBuffers(buffers);
}
int InferenceEngineCommon::GetInputLayerProperty(inference_engine_layer_property &property)
{
- return engine->GetInputLayerProperty(property);
+ return mBackendHandle->GetInputLayerProperty(property);
}
int InferenceEngineCommon::GetOutputLayerProperty(inference_engine_layer_property &property)
{
- return engine->GetOutputLayerProperty(property);
+ return mBackendHandle->GetOutputLayerProperty(property);
}
int InferenceEngineCommon::SetInputLayerProperty(inference_engine_layer_property &property)
{
- return engine->SetInputLayerProperty(property);
+ return mBackendHandle->SetInputLayerProperty(property);
}
int InferenceEngineCommon::SetOutputLayerProperty(inference_engine_layer_property &property)
{
- return engine->SetOutputLayerProperty(property);
+ return mBackendHandle->SetOutputLayerProperty(property);
}
int InferenceEngineCommon::GetBackendCapacity(inference_engine_capacity *capacity)
{
- return engine->GetBackendCapacity(capacity);
+ return mBackendHandle->GetBackendCapacity(capacity);
}
int InferenceEngineCommon::Run(std::vector<inference_engine_tensor_buffer> &input_buffers,
std::vector<inference_engine_tensor_buffer> &output_buffers)
{
- return engine->Run(input_buffers, output_buffers);
+ return mBackendHandle->Run(input_buffers, output_buffers);
}
int InferenceEngineCommon::SetLibraryPath(std::string path)