INFERENCE_BACKEND_TFLITE, /**< TensorFlow-Lite */
INFERENCE_BACKEND_ARMNN, /**< ARMNN */
INFERENCE_BACKEND_MLAPI, /** < ML Single API of NNStreamer.*/
- INFERENCE_BACKEND_NNFW, /** < NNFW */
+ INFERENCE_BACKEND_ONE, /** < On-device Neural Engine. */
INFERENCE_BACKEND_MAX /**< Backend MAX */
} inference_backend_type_e;
return INFERENCE_ENGINE_ERROR_INTERNAL;
}
- // If a backend is ML Single API of NNStreamer or NNFW then set a tensor filter plugin type.
- if (backend_type == INFERENCE_BACKEND_NNFW ||
+ // If a backend is ML Single API of NNStreamer or ONE then set a tensor filter plugin type.
+ if (backend_type == INFERENCE_BACKEND_ONE ||
backend_type == INFERENCE_BACKEND_MLAPI) {
int ret = mBackendHandle->SetPrivateData(&backend_type);
if (ret != INFERENCE_ENGINE_ERROR_NONE) {
[INFERENCE_BACKEND_TFLITE] = "tflite",
[INFERENCE_BACKEND_ARMNN] = "armnn",
[INFERENCE_BACKEND_MLAPI] = "mlapi",
- [INFERENCE_BACKEND_NNFW] = "mlapi"
+ [INFERENCE_BACKEND_ONE] = "mlapi"
};
std::string backendLibName =