Merge "Support two more tensor type" into tizen accepted/tizen_6.0_unified accepted/tizen_6.0_unified_hotfix tizen_6.0 tizen_6.0_hotfix accepted/tizen/6.0/unified/20201030.121108 accepted/tizen/6.0/unified/hotfix/20201103.051119 accepted/tizen/unified/20200831.002556 submit/tizen/20200828.025650 submit/tizen/20200828.100528 submit/tizen_6.0/20201029.205103 submit/tizen_6.0_hotfix/20201102.192503 submit/tizen_6.0_hotfix/20201103.114803 tizen_6.0.m2_release
authorInki Dae <inki.dae@samsung.com>
Wed, 26 Aug 2020 06:35:27 +0000 (06:35 +0000)
committerGerrit Code Review <gerrit@review>
Wed, 26 Aug 2020 06:35:27 +0000 (06:35 +0000)
1  2 
src/inference_engine_mlapi.cpp

@@@ -487,6 -487,10 +487,10 @@@ namespace MLAPIImp
                        return INFERENCE_TENSOR_DATA_TYPE_UINT8;
                case ML_TENSOR_TYPE_UINT16:
                        return INFERENCE_TENSOR_DATA_TYPE_UINT16;
+               case ML_TENSOR_TYPE_INT64:
+                       return INFERENCE_TENSOR_DATA_TYPE_INT64;
+               case ML_TENSOR_TYPE_UINT64:
+                       return INFERENCE_TENSOR_DATA_TYPE_UINT64;
                default:
                        LOGE("Tensor type(%d) is invalid.", tensor_type);
                        return INFERENCE_ENGINE_ERROR_INVALID_PARAMETER;
                        return INFERENCE_ENGINE_ERROR_INVALID_OPERATION;
                }
  
 -              unsigned int cnt;
 -              err = ml_tensors_info_get_count(in_info, &cnt);
 +              unsigned int in_cnt;
 +              err = ml_tensors_info_get_count(in_info, &in_cnt);
                if (err != ML_ERROR_NONE) {
                        LOGE("Failed to request ml_tensors_info_get_count(%d).", err);
                        return INFERENCE_ENGINE_ERROR_INVALID_OPERATION;
                }
  
 -              for (unsigned int i = 0; i < cnt; ++i) {
 +              ml_tensors_info_h out_info = NULL;
 +
 +              err = ml_single_get_output_info(mSingle, &out_info);
 +              if (err != ML_ERROR_NONE) {
 +                      LOGE("Failed to request ml_single_get_output_info(%d).", err);
 +                      return INFERENCE_ENGINE_ERROR_INVALID_OPERATION;
 +              }
 +
 +              unsigned int out_cnt;
 +              err = ml_tensors_info_get_count(out_info, &out_cnt);
 +              if (err != ML_ERROR_NONE) {
 +                      LOGE("Failed to request ml_tensors_info_get_count(%d).", err);
 +                      return INFERENCE_ENGINE_ERROR_INVALID_OPERATION;
 +              }
 +
 +              for (unsigned int i = 0; i < in_cnt; ++i) {
                        LOGI("index(%d) : buffer = %p, size = %zu\n", i,
                                 input_buffers[i].buffer, input_buffers[i].size);
                        err = ml_tensors_data_set_tensor_data(input_data, i,
                        return INFERENCE_ENGINE_ERROR_INVALID_OPERATION;
                }
  
 -              // TODO. Consider multiple output tensors.
 -
 -              err = ml_tensors_data_get_tensor_data(
 -                              output_data, 0, (void **) &output_buffers[0].buffer,
 -                              &output_buffers[0].size);
 -              if (err != ML_ERROR_NONE) {
 -                      LOGE("Failed to request ml_tensors_data_get_tensor_data(%d).", err);
 -                      return INFERENCE_ENGINE_ERROR_INVALID_OPERATION;
 +              for (unsigned int i = 0; i < out_cnt; ++i) {
 +                      err = ml_tensors_data_get_tensor_data(
 +                              output_data, i, (void **) &output_buffers[i].buffer,
 +                              &output_buffers[i].size);
 +                      if (err != ML_ERROR_NONE) {
 +                              LOGE("Failed to request ml_tensors_data_get_tensor_data(%d).", err);
 +                              return INFERENCE_ENGINE_ERROR_INVALID_OPERATION;
 +                      }
 +                      LOGI("Output tensor[%u] = %zu", i, output_buffers[0].size);
                }
  
 -              LOGI("Output tensor = %zu", output_buffers[0].size);
                LOGI("LEAVE");
  
                return INFERENCE_ENGINE_ERROR_NONE;