test: Release tensor buffers after inference
authorInki Dae <inki.dae@samsung.com>
Thu, 5 Mar 2020 01:25:21 +0000 (10:25 +0900)
committerInki Dae <inki.dae@samsung.com>
Tue, 14 Apr 2020 00:42:53 +0000 (09:42 +0900)
Change-Id: I27cfdd2ae2a9d1ee97c60c3211e309e26334399e
Signed-off-by: Inki Dae <inki.dae@samsung.com>
test/src/inference_engine_test.cpp

index 27125d18ecfbcdedc49620d4da1dff63e92bff8b..96f6d33cb0ce1beb280bc769ff9932935e4aac01 100644 (file)
@@ -182,6 +182,33 @@ int PrepareTensorBuffers(InferenceEngineCommon *engine, std::vector<inference_en
     return INFERENCE_ENGINE_ERROR_NONE;
 }
 
+void CleanupTensorBuffers(std::vector<inference_engine_tensor_buffer> &inputs, std::vector<inference_engine_tensor_buffer> &outputs)
+{
+       if (!inputs.empty()) {
+               std::vector<inference_engine_tensor_buffer>::iterator iter;
+               for (iter = inputs.begin(); iter != inputs.end(); iter++) {
+                       inference_engine_tensor_buffer tensor_buffer = *iter;
+                       if (tensor_buffer.data_type == TENSOR_DATA_TYPE_FLOAT32)
+                               delete[] (float *)tensor_buffer.buffer;
+                       else
+                               delete[] (unsigned char *)tensor_buffer.buffer;
+               }
+               std::vector<inference_engine_tensor_buffer>().swap(inputs);
+       }
+
+       if (!outputs.empty()) {
+               std::vector<inference_engine_tensor_buffer>::iterator iter;
+               for (iter = outputs.begin(); iter != outputs.end(); iter++) {
+                       inference_engine_tensor_buffer tensor_buffer = *iter;
+                       if (tensor_buffer.data_type == TENSOR_DATA_TYPE_FLOAT32)
+                               delete[] (float *)tensor_buffer.buffer;
+                       else
+                               delete[] (unsigned char *)tensor_buffer.buffer;
+               }
+               std::vector<inference_engine_tensor_buffer>().swap(outputs);
+       }
+}
+
 void CopyFileToMemory(const char *file_name, inference_engine_tensor_buffer &buffer, unsigned int size)
 {
     int fd = open(file_name, O_RDONLY);
@@ -497,6 +524,8 @@ TEST_P(InferenceEngineCommonTest_3, Inference)
         break;
     }
 
+    CleanupTensorBuffers(inputs, outputs);
+
     engine->UnbindBackend();
 
     delete engine;