add SNPE tensor filter support 34/267034/1
authorInki Dae <inki.dae@samsung.com>
Wed, 24 Nov 2021 09:11:54 +0000 (18:11 +0900)
committerInki Dae <inki.dae@samsung.com>
Wed, 24 Nov 2021 09:11:54 +0000 (18:11 +0900)
[Version] : 0.3.0-0
[Issue type] : new feature

Change-Id: If3c8591938e35b0d84bf0c2c2f12bb0e50b84cd5
Signed-off-by: Inki Dae <inki.dae@samsung.com>
packaging/inference-engine-mlapi.spec
src/inference_engine_mlapi.cpp

index 717393b..a9bce91 100644 (file)
@@ -1,6 +1,6 @@
 Name:       inference-engine-mlapi
 Summary:    ML Single API backend of NNStreamer for MediaVision
-Version:    0.2.0
+Version:    0.3.0
 Release:    0
 Group:      Multimedia/Libraries
 License:    Apache-2.0
index b5e823e..9a5c2f8 100644 (file)
@@ -231,6 +231,7 @@ namespace MLAPIImpl
                case INFERENCE_BACKEND_ONE:
                case INFERENCE_BACKEND_ARMNN:
                case INFERENCE_BACKEND_TFLITE:
+               case INFERENCE_BACKEND_SNPE:
                        if (mPluginType == INFERENCE_BACKEND_ONE) {
                                nnfw_type = ML_NNFW_TYPE_NNFW;
 
@@ -258,6 +259,12 @@ namespace MLAPIImpl
                                LOGI("TFLITE tensor filter will be used.");
                        }
 
+                       if (mPluginType == INFERENCE_BACKEND_SNPE) {
+                               nnfw_type = ML_NNFW_TYPE_SNPE;
+                               nnfw_hw = ML_NNFW_HW_ANY;
+                               LOGI("SNPE tensor filter will be used.");
+                       }
+
                        if (access(model_str.c_str(), R_OK)) {
                                LOGE("model file path in [%s]", model_str.c_str());
                                return INFERENCE_ENGINE_ERROR_INVALID_PATH;