return INFERENCE_ENGINE_ERROR_NONE;
}
- int InferenceMLAPI::CreateMLAPITensorInfo(ml_tensors_info_h& tensor_info,
- inference_engine_layer_property& layer_property)
+ int InferenceMLAPI::SetTensorInfo(ml_tensors_info_h& tensor_info,
+ inference_engine_layer_property& layer_property)
{
- if (layer_property.layers.empty()) {
- LOGE("input or output property is empty.");
- return INFERENCE_ENGINE_ERROR_INVALID_PARAMETER;
- }
-
- int err = ml_tensors_info_create(&tensor_info);
- if (err != ML_ERROR_NONE) {
- LOGE("Failed to create tensor info(%d).", err);
- return INFERENCE_ENGINE_ERROR_INVALID_OPERATION;
- }
-
- err = ml_tensors_info_set_count(tensor_info, layer_property.layers.size());
+ int err = ml_tensors_info_set_count(tensor_info, layer_property.layers.size());
if (err != ML_ERROR_NONE) {
LOGE("Failed to set tensor count(%d).", err);
return INFERENCE_ENGINE_ERROR_INVALID_PARAMETER;
return INFERENCE_ENGINE_ERROR_NONE;
}
+ int InferenceMLAPI::CreateMLAPITensorInfo(ml_tensors_info_h& tensor_info,
+ inference_engine_layer_property& layer_property)
+ {
+ if (layer_property.layers.empty()) {
+ LOGE("input or output property is empty.");
+ return INFERENCE_ENGINE_ERROR_INVALID_PARAMETER;
+ }
+
+ int err = ml_tensors_info_create(&tensor_info);
+ if (err != ML_ERROR_NONE) {
+ LOGE("Failed to create tensor info(%d).", err);
+ return INFERENCE_ENGINE_ERROR_INVALID_OPERATION;
+ }
+
+ err = SetTensorInfo(tensor_info, layer_property);
+ if (err != INFERENCE_ENGINE_ERROR_NONE)
+ ml_tensors_info_destroy(tensor_info);
+
+ return err;
+ }
+
std::tuple<ml_nnfw_type_e, ml_nnfw_hw_e> InferenceMLAPI::GetNNFWInfo()
{
switch (mPluginType) {
return ret;
ret = CreateMLAPITensorInfo(out_info, mOutputProperty);
- if (ret != INFERENCE_ENGINE_ERROR_NONE)
+ if (ret != INFERENCE_ENGINE_ERROR_NONE) {
+ ml_tensors_info_destroy(in_info);
return ret;
+ }
}
int err = ml_single_open_full(&mSingle, model_str.c_str(), in_info, out_info,
nnfw_type, nnfw_hw, GetCustomProp());
if (err != ML_ERROR_NONE) {
LOGE("Failed to request ml_single_open_full(%d).", err);
+ ml_tensors_info_destroy(in_info);
+ ml_tensors_info_destroy(out_info);
return INFERENCE_ENGINE_ERROR_INVALID_OPERATION;
}
+ ml_tensors_info_destroy(in_info);
+ ml_tensors_info_destroy(out_info);
+
if (mInputInfoHandle) {
ml_tensors_info_destroy(mInputInfoHandle);
mInputInfoHandle = NULL;
err = ml_single_get_output_info(mSingle, &mOutputInfoHandle);
if (err != ML_ERROR_NONE) {
LOGE("Failed to request ml_single_get_output_info(%d).", err);
+ ml_tensors_info_destroy(mOutputInfoHandle);
return INFERENCE_ENGINE_ERROR_INVALID_OPERATION;
}
ret = ml_tensors_data_get_tensor_data(mInputDataHandle, input.second, &in_buffer.buffer, &in_buffer.size);
if (ret != ML_ERROR_NONE) {
LOGE("Failed to request ml_tensors_data_get_tensor_data(%d).", ret);
+ ml_tensors_data_destroy(mInputDataHandle);
+
return INFERENCE_ENGINE_ERROR_INVALID_OPERATION;
}
ret = ml_tensors_info_get_tensor_type(mInputInfoHandle, input.second, &in_type);
if (ret != ML_ERROR_NONE) {
LOGE("Failed to request ml_tensors_info_get_tensor_type(%d).", ret);
+ ml_tensors_data_destroy(mInputDataHandle);
+
return INFERENCE_ENGINE_ERROR_INVALID_OPERATION;
}
type = ConvertTensorTypeToInternal(in_type);
} catch (const std::invalid_argument& ex) {
LOGE("Error (%s) (%d)", ex.what(), in_type);
+ ml_tensors_data_destroy(mInputDataHandle);
+
return INFERENCE_ENGINE_ERROR_INVALID_PARAMETER;
}
ret = ml_tensors_data_get_tensor_data(mOutputDataHandle, output.second, &out_buffer.buffer, &out_buffer.size);
if (ret != ML_ERROR_NONE) {
LOGE("Failed to request ml_tensors_data_get_tensor_data(%d).", ret);
+ ml_tensors_data_destroy(mOutputDataHandle);
+
return INFERENCE_ENGINE_ERROR_INVALID_OPERATION;
}
ret = ml_tensors_info_get_tensor_type(mOutputInfoHandle, output.second, &out_type);
if (ret != ML_ERROR_NONE) {
LOGE("Failed to request ml_tensors_info_get_tensor_type(%d).", ret);
+ ml_tensors_data_destroy(mOutputDataHandle);
+
return INFERENCE_ENGINE_ERROR_INVALID_OPERATION;
}
type = ConvertTensorTypeToInternal(out_type);
} catch (const std::invalid_argument& ex) {
LOGE("Error (%s) (%d)", ex.what(), out_type);
+ ml_tensors_data_destroy(mOutputDataHandle);
+
return INFERENCE_ENGINE_ERROR_INVALID_PARAMETER;
}