From 71caed377fb20320ba6f069b531ebc2bfed7d28a Mon Sep 17 00:00:00 2001 From: Hyo Jong Kim Date: Tue, 25 Aug 2020 11:20:46 +0900 Subject: [PATCH] Support multiple output tensor Get the information and the number of output tensor Set the output tensor according to that number Change-Id: Ie803aa0aee194091006db29bd86a3d24a4f922df Signed-off-by: Hyo Jong Kim --- src/inference_engine_mlapi.cpp | 39 +++++++++++++++++++++++----------- 1 file changed, 27 insertions(+), 12 deletions(-) diff --git a/src/inference_engine_mlapi.cpp b/src/inference_engine_mlapi.cpp index b4d7a04..7570359 100644 --- a/src/inference_engine_mlapi.cpp +++ b/src/inference_engine_mlapi.cpp @@ -524,14 +524,29 @@ namespace MLAPIImpl return INFERENCE_ENGINE_ERROR_INVALID_OPERATION; } - unsigned int cnt; - err = ml_tensors_info_get_count(in_info, &cnt); + unsigned int in_cnt; + err = ml_tensors_info_get_count(in_info, &in_cnt); if (err != ML_ERROR_NONE) { LOGE("Failed to request ml_tensors_info_get_count(%d).", err); return INFERENCE_ENGINE_ERROR_INVALID_OPERATION; } - for (unsigned int i = 0; i < cnt; ++i) { + ml_tensors_info_h out_info = NULL; + + err = ml_single_get_output_info(mSingle, &out_info); + if (err != ML_ERROR_NONE) { + LOGE("Failed to request ml_single_get_output_info(%d).", err); + return INFERENCE_ENGINE_ERROR_INVALID_OPERATION; + } + + unsigned int out_cnt; + err = ml_tensors_info_get_count(out_info, &out_cnt); + if (err != ML_ERROR_NONE) { + LOGE("Failed to request ml_tensors_info_get_count(%d).", err); + return INFERENCE_ENGINE_ERROR_INVALID_OPERATION; + } + + for (unsigned int i = 0; i < in_cnt; ++i) { LOGI("index(%d) : buffer = %p, size = %zu\n", i, input_buffers[i].buffer, input_buffers[i].size); err = ml_tensors_data_set_tensor_data(input_data, i, @@ -551,17 +566,17 @@ namespace MLAPIImpl return INFERENCE_ENGINE_ERROR_INVALID_OPERATION; } - // TODO. Consider multiple output tensors. - - err = ml_tensors_data_get_tensor_data( - output_data, 0, (void **) &output_buffers[0].buffer, - &output_buffers[0].size); - if (err != ML_ERROR_NONE) { - LOGE("Failed to request ml_tensors_data_get_tensor_data(%d).", err); - return INFERENCE_ENGINE_ERROR_INVALID_OPERATION; + for (unsigned int i = 0; i < out_cnt; ++i) { + err = ml_tensors_data_get_tensor_data( + output_data, i, (void **) &output_buffers[i].buffer, + &output_buffers[i].size); + if (err != ML_ERROR_NONE) { + LOGE("Failed to request ml_tensors_data_get_tensor_data(%d).", err); + return INFERENCE_ENGINE_ERROR_INVALID_OPERATION; + } + LOGI("Output tensor[%u] = %zu", i, output_buffers[0].size); } - LOGI("Output tensor = %zu", output_buffers[0].size); LOGI("LEAVE"); return INFERENCE_ENGINE_ERROR_NONE; -- 2.34.1