Change MLAPI backend string to "mlapi" 72/235172/1
authorInki Dae <inki.dae@samsung.com>
Tue, 2 Jun 2020 09:36:23 +0000 (18:36 +0900)
committerInki Dae <inki.dae@samsung.com>
Tue, 2 Jun 2020 09:36:23 +0000 (18:36 +0900)
Change-Id: I6914c71d00b59cadd3145bcd725129c827b1b6ce
Signed-off-by: Inki Dae <inki.dae@samsung.com>
src/inference_engine_common_impl.cpp

index 0befd03..a3e0d52 100755 (executable)
@@ -196,7 +196,7 @@ int InferenceEngineCommon::InitBackendEngine(const std::string &backend_path, in
                return INFERENCE_ENGINE_ERROR_INTERNAL;
        }
 
-       // If a backend is nnstreamer then set a tensor filter plugin type.
+       // If a backend is ML Single API of NNStreamer or NNFW then set a tensor filter plugin type.
        if (backend_type == INFERENCE_BACKEND_NNFW || backend_type == INFERENCE_BACKEND_MLAPI) {
                int ret = mBackendHandle->SetPluginType(backend_type);
                if (ret != INFERENCE_ENGINE_ERROR_NONE) {
@@ -266,8 +266,8 @@ int InferenceEngineCommon::BindBackend(int backend_type)
                [INFERENCE_BACKEND_OPENCV] = "opencv",
                [INFERENCE_BACKEND_TFLITE] = "tflite",
                [INFERENCE_BACKEND_ARMNN] = "armnn",
-               [INFERENCE_BACKEND_MLAPI] = "nnstreamer",
-               [INFERENCE_BACKEND_NNFW] = "nnstreamer"
+               [INFERENCE_BACKEND_MLAPI] = "mlapi",
+               [INFERENCE_BACKEND_NNFW] = "mlapi"
        };
 
     std::string backendLibName = "libinference-engine-" + backendNameTable[backend_type] + ".so";