MV_INFERENCE_BACKEND_OPENCV, /**< OpenCV */
MV_INFERENCE_BACKEND_TFLITE, /**< TensorFlow-Lite */
MV_INFERENCE_BACKEND_ARMNN, /**< ARMNN (Since 6.0) */
- MV_INFERENCE_BACKEND_VIVANTE, /**< Vivante (Since 6.0) */
+ MV_INFERENCE_BACKEND_MLAPI, /**< Vivante (Since 6.0) */
MV_INFERENCE_BACKEND_NNFW, /**< NNFW (Since 6.0) */
MV_INFERENCE_BACKEND_MAX /**< Backend MAX */
} mv_inference_backend_type_e;
mSupportedInferenceBackend.insert(std::make_pair(MV_INFERENCE_BACKEND_OPENCV, std::make_pair("opencv", false)));
mSupportedInferenceBackend.insert(std::make_pair(MV_INFERENCE_BACKEND_TFLITE, std::make_pair("tflite", false)));
mSupportedInferenceBackend.insert(std::make_pair(MV_INFERENCE_BACKEND_ARMNN, std::make_pair("armnn", false)));
- mSupportedInferenceBackend.insert(std::make_pair(MV_INFERENCE_BACKEND_VIVANTE, std::make_pair("nnstreamer", false)));
+ mSupportedInferenceBackend.insert(std::make_pair(MV_INFERENCE_BACKEND_MLAPI, std::make_pair("nnstreamer", false)));
mSupportedInferenceBackend.insert(std::make_pair(MV_INFERENCE_BACKEND_NNFW, std::make_pair("nnstreamer", false)));
CheckSupportedInferenceBackend();
mv_engine_config_set_int_attribute(handle,
MV_INFERENCE_BACKEND_TYPE,
- MV_INFERENCE_BACKEND_VIVANTE);
+ MV_INFERENCE_BACKEND_MLAPI);
mv_engine_config_set_int_attribute(handle,
MV_INFERENCE_TARGET_DEVICE_TYPE,