Change a backend type from VIVANTE to MLAPI 68/235168/1
authorInki Dae <inki.dae@samsung.com>
Tue, 2 Jun 2020 09:12:42 +0000 (18:12 +0900)
committerInki Dae <inki.dae@samsung.com>
Tue, 2 Jun 2020 09:12:42 +0000 (18:12 +0900)
Change-Id: Ia4210279f8efc4875f0c0db813c9f03d8411f7ff
Signed-off-by: Inki Dae <inki.dae@samsung.com>
include/inference_engine_type.h
src/inference_engine_common_impl.cpp

index 33c656e..a8f70d2 100644 (file)
@@ -38,7 +38,7 @@ typedef enum {
     INFERENCE_BACKEND_OPENCV,    /**< OpenCV */
     INFERENCE_BACKEND_TFLITE,    /**< TensorFlow-Lite */
     INFERENCE_BACKEND_ARMNN,     /**< ARMNN */
-    INFERENCE_BACKEND_VIVANTE,   /** < Vivante */
+    INFERENCE_BACKEND_MLAPI,   /** < ML Single API of NNStreamer.*/
     INFERENCE_BACKEND_NNFW,      /** < NNFW */
     INFERENCE_BACKEND_MAX        /**< Backend MAX */
 } inference_backend_type_e;
index ada716f..0befd03 100755 (executable)
@@ -197,7 +197,7 @@ int InferenceEngineCommon::InitBackendEngine(const std::string &backend_path, in
        }
 
        // If a backend is nnstreamer then set a tensor filter plugin type.
-       if (backend_type == INFERENCE_BACKEND_NNFW || backend_type == INFERENCE_BACKEND_VIVANTE) {
+       if (backend_type == INFERENCE_BACKEND_NNFW || backend_type == INFERENCE_BACKEND_MLAPI) {
                int ret = mBackendHandle->SetPluginType(backend_type);
                if (ret != INFERENCE_ENGINE_ERROR_NONE) {
                        LOGE("Failed to set a tensor filter plugin.");
@@ -266,7 +266,7 @@ int InferenceEngineCommon::BindBackend(int backend_type)
                [INFERENCE_BACKEND_OPENCV] = "opencv",
                [INFERENCE_BACKEND_TFLITE] = "tflite",
                [INFERENCE_BACKEND_ARMNN] = "armnn",
-               [INFERENCE_BACKEND_VIVANTE] = "nnstreamer",
+               [INFERENCE_BACKEND_MLAPI] = "nnstreamer",
                [INFERENCE_BACKEND_NNFW] = "nnstreamer"
        };