From 8956e8e6a00f8dac753a84c822c1a296be67ebb0 Mon Sep 17 00:00:00 2001 From: Inki Dae Date: Wed, 5 Jan 2022 13:13:58 +0900 Subject: [PATCH] add NNTRAINER backend support [Version] : 0.4.0-0 [Issue type] : new feature Added NNTRAINER backend support. NNTRAINER is an training engine including inference feature for its internal model. Change-Id: If20ccbf8b709f0af6ac6b71f53c5995b6ec05a4c Signed-off-by: Inki Dae --- packaging/inference-engine-mlapi.spec | 2 +- src/inference_engine_mlapi.cpp | 8 +++++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/packaging/inference-engine-mlapi.spec b/packaging/inference-engine-mlapi.spec index 97e668c..6be3452 100644 --- a/packaging/inference-engine-mlapi.spec +++ b/packaging/inference-engine-mlapi.spec @@ -1,6 +1,6 @@ Name: inference-engine-mlapi Summary: ML Single API backend of NNStreamer for MediaVision -Version: 0.3.2 +Version: 0.4.0 Release: 0 Group: Multimedia/Libraries License: Apache-2.0 diff --git a/src/inference_engine_mlapi.cpp b/src/inference_engine_mlapi.cpp index e119e57..c079ee3 100644 --- a/src/inference_engine_mlapi.cpp +++ b/src/inference_engine_mlapi.cpp @@ -233,6 +233,10 @@ namespace MLAPIImpl LOGI("SNPE tensor filter will be used."); return std::make_tuple(ML_NNFW_TYPE_SNPE, ML_NNFW_HW_ANY); + case INFERENCE_BACKEND_NNTRAINER: + LOGI("NNTRAINER tensor filter will be used."); + return std::make_tuple(ML_NNFW_TYPE_NNTR_INF, ML_NNFW_HW_ANY); + default: LOGE("Invalid plugin type."); throw std::invalid_argument("invalid tensor type."); @@ -268,6 +272,8 @@ namespace MLAPIImpl case INFERENCE_BACKEND_TFLITE: /* fall through */ case INFERENCE_BACKEND_SNPE: + /* fall through */ + case INFERENCE_BACKEND_NNTRAINER: if (!IsFileReadable(model_paths[0])) throw std::runtime_error("invalid path"); return model_paths[0]; @@ -326,7 +332,7 @@ namespace MLAPIImpl int err = ml_single_open_full(&mSingle, model_str.c_str(), in_info, out_info, nnfw_type, nnfw_hw, GetCustomProp()); if (err != ML_ERROR_NONE) { - LOGE("Failed to request ml_single_open(%d).", err); + LOGE("Failed to request ml_single_open_full(%d).", err); return INFERENCE_ENGINE_ERROR_INVALID_OPERATION; } -- 2.34.1