check inference engine backend handle correctly 11/233111/1
authorInki Dae <inki.dae@samsung.com>
Tue, 12 May 2020 06:27:59 +0000 (15:27 +0900)
committerInki Dae <inki.dae@samsung.com>
Tue, 12 May 2020 07:41:27 +0000 (16:41 +0900)
inference engine backend callbacks should be requested
after the backend handle is checked if it is valid or not.

Change-Id: I15d25664186710d455835ed6d7055317d43275ca
Signed-off-by: Inki Dae <inki.dae@samsung.com>
src/inference_engine_common_impl.cpp

index d49e074c82de00e06728bbe7245e0902bcc0a80e..d85e5c4c31c45d38a1d9aa580620616eeed63d81 100755 (executable)
@@ -35,6 +35,12 @@ extern "C" {
 #define LOG_TAG "INFERENCE_ENGINE_COMMON"
 }
 
+#define CHECK_ENGINE_INSTANCE(object)                                          \
+       if (object == nullptr) {                                                                \
+               LOGE("Inference engine handle is null.");                       \
+               return INFERENCE_ENGINE_ERROR_INVALID_OPERATION;        \
+       }
+
 namespace fs = std::experimental::filesystem;
 namespace InferenceEngineInterface {
 namespace Common {
@@ -230,6 +236,8 @@ void InferenceEngineCommon::UnbindBackend(void)
 
 int InferenceEngineCommon::SetTargetDevices(int types)
 {
+       CHECK_ENGINE_INSTANCE(mBackendHandle);
+
     int ret = mBackendHandle->SetTargetDevices(types);
     if (ret != INFERENCE_ENGINE_ERROR_NONE)
         LOGE("Fail to SetTargetDevice");
@@ -245,6 +253,8 @@ int InferenceEngineCommon::Load(std::vector<std::string> model_paths, inference_
 {
     LOGI("ENTER");
 
+       CHECK_ENGINE_INSTANCE(mBackendHandle);
+
        if (mUseProfiler == true) {
                mProfiler->AddModelName(model_paths[0]);
                mProfiler->PushEnv();
@@ -266,6 +276,8 @@ int InferenceEngineCommon::Load(std::vector<std::string> model_paths, inference_
 
 int InferenceEngineCommon::GetInputTensorBuffers(std::vector<inference_engine_tensor_buffer> &buffers)
 {
+       CHECK_ENGINE_INSTANCE(mBackendHandle);
+
     int ret = mBackendHandle->GetInputTensorBuffers(buffers);
        if (ret != INFERENCE_ENGINE_ERROR_NONE) {
                LOGE("Failed to get input tensor buffers.");
@@ -283,6 +295,8 @@ int InferenceEngineCommon::GetInputTensorBuffers(std::vector<inference_engine_te
 
 int InferenceEngineCommon::GetOutputTensorBuffers(std::vector<inference_engine_tensor_buffer> &buffers)
 {
+       CHECK_ENGINE_INSTANCE(mBackendHandle);
+
     int ret = mBackendHandle->GetOutputTensorBuffers(buffers);
        if (ret != INFERENCE_ENGINE_ERROR_NONE) {
                LOGE("Failed to get output tensor buffers.");
@@ -300,6 +314,8 @@ int InferenceEngineCommon::GetOutputTensorBuffers(std::vector<inference_engine_t
 
 int InferenceEngineCommon::GetInputLayerProperty(inference_engine_layer_property &property)
 {
+       CHECK_ENGINE_INSTANCE(mBackendHandle);
+
     int ret = mBackendHandle->GetInputLayerProperty(property);
        if (ret != INFERENCE_ENGINE_ERROR_NONE) {
                LOGE("Failed to get input layer property.");
@@ -318,6 +334,8 @@ int InferenceEngineCommon::GetInputLayerProperty(inference_engine_layer_property
 
 int InferenceEngineCommon::GetOutputLayerProperty(inference_engine_layer_property &property)
 {
+       CHECK_ENGINE_INSTANCE(mBackendHandle);
+
     int ret = mBackendHandle->GetOutputLayerProperty(property);
        if (ret != INFERENCE_ENGINE_ERROR_NONE) {
                LOGE("Failed to get output layer property.");
@@ -336,6 +354,8 @@ int InferenceEngineCommon::GetOutputLayerProperty(inference_engine_layer_propert
 
 int InferenceEngineCommon::SetInputLayerProperty(inference_engine_layer_property &property)
 {
+       CHECK_ENGINE_INSTANCE(mBackendHandle);
+
        int ret = CheckLayerProperty(property);
        if (ret != INFERENCE_ENGINE_ERROR_NONE) {
                LOGE("Given input layer property is invalid.");
@@ -347,6 +367,8 @@ int InferenceEngineCommon::SetInputLayerProperty(inference_engine_layer_property
 
 int InferenceEngineCommon::SetOutputLayerProperty(inference_engine_layer_property &property)
 {
+       CHECK_ENGINE_INSTANCE(mBackendHandle);
+
        int ret = CheckLayerProperty(property);
        if (ret != INFERENCE_ENGINE_ERROR_NONE) {
                LOGE("Given output layer property is invalid.");
@@ -358,6 +380,8 @@ int InferenceEngineCommon::SetOutputLayerProperty(inference_engine_layer_propert
 
 int InferenceEngineCommon::GetBackendCapacity(inference_engine_capacity *capacity)
 {
+       CHECK_ENGINE_INSTANCE(mBackendHandle);
+
        if (capacity == nullptr) {
                LOGE("Given inference_engine_capacity object is invalid.");
                return INFERENCE_ENGINE_ERROR_INVALID_PARAMETER;
@@ -369,6 +393,8 @@ int InferenceEngineCommon::GetBackendCapacity(inference_engine_capacity *capacit
 int InferenceEngineCommon::Run(std::vector<inference_engine_tensor_buffer> &input_buffers,
                                 std::vector<inference_engine_tensor_buffer> &output_buffers)
 {
+       CHECK_ENGINE_INSTANCE(mBackendHandle);
+
        if (mUseProfiler == true) {
                mProfiler->Start(IE_PROFILER_LATENCY);
        }