add NNTRAINER backend support 37/268937/3
authorInki Dae <inki.dae@samsung.com>
Wed, 5 Jan 2022 04:13:58 +0000 (13:13 +0900)
committerInki Dae <inki.dae@samsung.com>
Wed, 5 Jan 2022 05:58:47 +0000 (05:58 +0000)
[Version] : 0.4.0-0
[Issue type] : new feature

Added NNTRAINER backend support. NNTRAINER is an training engine
including inference feature for its internal model.

Change-Id: If20ccbf8b709f0af6ac6b71f53c5995b6ec05a4c
Signed-off-by: Inki Dae <inki.dae@samsung.com>
packaging/inference-engine-mlapi.spec
src/inference_engine_mlapi.cpp

index 97e668c..6be3452 100644 (file)
@@ -1,6 +1,6 @@
 Name:       inference-engine-mlapi
 Summary:    ML Single API backend of NNStreamer for MediaVision
-Version:    0.3.2
+Version:    0.4.0
 Release:    0
 Group:      Multimedia/Libraries
 License:    Apache-2.0
index e119e57..c079ee3 100644 (file)
@@ -233,6 +233,10 @@ namespace MLAPIImpl
                        LOGI("SNPE tensor filter will be used.");
                        return std::make_tuple(ML_NNFW_TYPE_SNPE, ML_NNFW_HW_ANY);
 
+               case INFERENCE_BACKEND_NNTRAINER:
+                       LOGI("NNTRAINER tensor filter will be used.");
+                       return std::make_tuple(ML_NNFW_TYPE_NNTR_INF, ML_NNFW_HW_ANY);
+
                default:
                        LOGE("Invalid plugin type.");
                        throw std::invalid_argument("invalid tensor type.");
@@ -268,6 +272,8 @@ namespace MLAPIImpl
                case INFERENCE_BACKEND_TFLITE:
                        /* fall through */
                case INFERENCE_BACKEND_SNPE:
+                       /* fall through */
+               case INFERENCE_BACKEND_NNTRAINER:
                        if (!IsFileReadable(model_paths[0]))
                                throw std::runtime_error("invalid path");
                        return model_paths[0];
@@ -326,7 +332,7 @@ namespace MLAPIImpl
                int err = ml_single_open_full(&mSingle, model_str.c_str(), in_info, out_info,
                                                                 nnfw_type, nnfw_hw, GetCustomProp());
                if (err != ML_ERROR_NONE) {
-                       LOGE("Failed to request ml_single_open(%d).", err);
+                       LOGE("Failed to request ml_single_open_full(%d).", err);
                        return INFERENCE_ENGINE_ERROR_INVALID_OPERATION;
                }