test: clear owner_is_backend in case of using user-allocated tensor buffers
authorInki Dae <inki.dae@samsung.com>
Thu, 5 Mar 2020 04:24:12 +0000 (13:24 +0900)
committerInki Dae <inki.dae@samsung.com>
Tue, 14 Apr 2020 00:42:53 +0000 (09:42 +0900)
If user allocates the tensor buffers then clear owner_is_backend
so that user can also release the buffers.

Change-Id: Ie06199fd0a8bc5dde0c73648e43f1fd000526093
Signed-off-by: Inki Dae <inki.dae@samsung.com>
test/src/inference_engine_test.cpp

index 0b763dc259c9ecf3aff346a487615b6c6444e0ed..9d73438e158c0a1ab5da2b96bf936c3a724bf1f6 100644 (file)
@@ -143,7 +143,8 @@ int PrepareTensorBuffers(InferenceEngineCommon *engine, std::vector<inference_en
                 tensor_buffer.size = tensor_info.size;
                        }
 
-            EXPECT_TRUE(tensor_buffer.buffer);
+                       EXPECT_TRUE(tensor_buffer.buffer);
+                       tensor_buffer.owner_is_backend = 0;
                        tensor_buffer.data_type = tensor_info.data_type;
                        inputs.push_back(tensor_buffer);
                }
@@ -173,7 +174,8 @@ int PrepareTensorBuffers(InferenceEngineCommon *engine, std::vector<inference_en
                 tensor_buffer.size = tensor_info.size;
                        }
 
-            EXPECT_TRUE(tensor_buffer.buffer);
+                       EXPECT_TRUE(tensor_buffer.buffer);
+                       tensor_buffer.owner_is_backend = 0;
                        tensor_buffer.data_type = tensor_info.data_type;
                        outputs.push_back(tensor_buffer);
                }