return INFERENCE_ENGINE_ERROR_INTERNAL;
}
- // If a backend is nnstreamer then set a tensor filter plugin type.
+ // If a backend is ML Single API of NNStreamer or NNFW then set a tensor filter plugin type.
if (backend_type == INFERENCE_BACKEND_NNFW || backend_type == INFERENCE_BACKEND_MLAPI) {
int ret = mBackendHandle->SetPluginType(backend_type);
if (ret != INFERENCE_ENGINE_ERROR_NONE) {
[INFERENCE_BACKEND_OPENCV] = "opencv",
[INFERENCE_BACKEND_TFLITE] = "tflite",
[INFERENCE_BACKEND_ARMNN] = "armnn",
- [INFERENCE_BACKEND_MLAPI] = "nnstreamer",
- [INFERENCE_BACKEND_NNFW] = "nnstreamer"
+ [INFERENCE_BACKEND_MLAPI] = "mlapi",
+ [INFERENCE_BACKEND_NNFW] = "mlapi"
};
std::string backendLibName = "libinference-engine-" + backendNameTable[backend_type] + ".so";