INFERENCE_BACKEND_OPENCV, /**< OpenCV */
INFERENCE_BACKEND_TFLITE, /**< TensorFlow-Lite */
INFERENCE_BACKEND_ARMNN, /**< ARMNN */
+ INFERENCE_BACKEND_VIVANTE, /** < Vivante */
INFERENCE_BACKEND_MAX /**< Backend MAX */
} inference_backend_type_e;
INFERENCE_MODEL_DARKNET, /**< Darknet. *.cfg config file is needed. */
INFERENCE_MODEL_DLDT, /**< DLDT. *.xml config file is needed. */
INFERENCE_MODEL_ONNX, /**< ONNX */
+ INFERENCE_MODEL_VIVANTE, /**< Vivante. model specific so library and nb model files are needed. */
INFERENCE_MODEL_MAX
} inference_model_format_e;
std::string backendNameTable[INFERENCE_BACKEND_MAX] = {
[INFERENCE_BACKEND_OPENCV] = "opencv",
[INFERENCE_BACKEND_TFLITE] = "tflite",
- [INFERENCE_BACKEND_ARMNN] = "armnn"
+ [INFERENCE_BACKEND_ARMNN] = "armnn",
+ [INFERENCE_BACKEND_VIVANTE] = "nnstreamer"
};
std::string backendLibName = "libinference-engine-" + backendNameTable[backend_type] + ".so";