INFERENCE_BACKEND_MLAPI, /** < ML Single API of NNStreamer.*/
INFERENCE_BACKEND_ONE, /** < On-device Neural Engine. */
INFERENCE_BACKEND_NNTRAINER, /** < NNTrainer. */
+ INFERENCE_BACKEND_SNPE, /** < SNPE. */
INFERENCE_BACKEND_MAX /**< Backend MAX */
} inference_backend_type_e;
INFERENCE_MODEL_ONNX, /**< ONNX */
INFERENCE_MODEL_VIVANTE, /**< Vivante. model specific so library and nb model files are needed. */
INFERENCE_MODEL_NNTRAINER, /**< NNTrainer. only *.ini file is used. */
+ INFERENCE_MODEL_SNPE, /**< SNPE. only *.dlc file is used. */
INFERENCE_MODEL_MAX
} inference_model_format_e;
Name: inference-engine-interface
Summary: Interface of inference engines
-Version: 0.1.4
+Version: 0.2.0
Release: 0
Group: Multimedia/Framework
License: Apache-2.0
device_type == INFERENCE_TARGET_CUSTOM ||
backend_type == INFERENCE_BACKEND_ONE ||
backend_type == INFERENCE_BACKEND_NNTRAINER ||
+ backend_type == INFERENCE_BACKEND_SNPE ||
(backend_type == INFERENCE_BACKEND_TFLITE && sApiFwForTFLITE == INFERENCE_BACKEND_MLAPI) ||
(backend_type == INFERENCE_BACKEND_ARMNN && sApiFwForARMNN == INFERENCE_BACKEND_MLAPI))
return 1;
BackendTable.insert(std::make_pair("mlapi",INFERENCE_BACKEND_MLAPI));
BackendTable.insert(std::make_pair("one",INFERENCE_BACKEND_ONE));
BackendTable.insert(std::make_pair("nntrainer", INFERENCE_BACKEND_NNTRAINER));
+ BackendTable.insert(std::make_pair("snpe", INFERENCE_BACKEND_SNPE));
config->backend_type = BackendTable[config->backend_name];
}