return INFERENCE_ENGINE_ERROR_NONE;
}
+void CleanupTensorBuffers(std::vector<inference_engine_tensor_buffer> &inputs, std::vector<inference_engine_tensor_buffer> &outputs)
+{
+ if (!inputs.empty()) {
+ std::vector<inference_engine_tensor_buffer>::iterator iter;
+ for (iter = inputs.begin(); iter != inputs.end(); iter++) {
+ inference_engine_tensor_buffer tensor_buffer = *iter;
+ if (tensor_buffer.data_type == TENSOR_DATA_TYPE_FLOAT32)
+ delete[] (float *)tensor_buffer.buffer;
+ else
+ delete[] (unsigned char *)tensor_buffer.buffer;
+ }
+ std::vector<inference_engine_tensor_buffer>().swap(inputs);
+ }
+
+ if (!outputs.empty()) {
+ std::vector<inference_engine_tensor_buffer>::iterator iter;
+ for (iter = outputs.begin(); iter != outputs.end(); iter++) {
+ inference_engine_tensor_buffer tensor_buffer = *iter;
+ if (tensor_buffer.data_type == TENSOR_DATA_TYPE_FLOAT32)
+ delete[] (float *)tensor_buffer.buffer;
+ else
+ delete[] (unsigned char *)tensor_buffer.buffer;
+ }
+ std::vector<inference_engine_tensor_buffer>().swap(outputs);
+ }
+}
+
void CopyFileToMemory(const char *file_name, inference_engine_tensor_buffer &buffer, unsigned int size)
{
int fd = open(file_name, O_RDONLY);
break;
}
+ CleanupTensorBuffers(inputs, outputs);
+
engine->UnbindBackend();
delete engine;