int &target_devices) -> int {
inference_engine_config config = { backend_name, 0, target_devices };
+ // backend_type is valid only in case backend_name is "mlapi".
+ if (!backend_name.compare("mlapi")) {
+ if (!(target_devices & INFERENCE_TARGET_CUSTOM))
+ config.backend_type = INFERENCE_BACKEND_ONE;
+ else
+ config.backend_type = INFERENCE_BACKEND_MLAPI;
+ }
+
int ret = engine->BindBackend(&config);
if (ret != INFERENCE_ENGINE_ERROR_NONE)
return ret;
// TFLITE.
ParamType_One("tflite"),
// OPENCV
- ParamType_One("opencv")
+ ParamType_One("opencv"),
+ // ML Single API for NNStreamer.
+ ParamType_One("mlapi")
/* TODO */
));
ParamType_Two("armnn", INFERENCE_TARGET_CPU),
// TFLITE.
ParamType_Two("tflite", INFERENCE_TARGET_CPU),
- // OPENCV,
- ParamType_Two("opencv", INFERENCE_TARGET_CPU)
+ // OPENCV.
+ ParamType_Two("opencv", INFERENCE_TARGET_CPU),
+ // ML Single API for NNStreamer.
+ ParamType_Two("mlapi", INFERENCE_TARGET_CPU)
/* TODO */
));
ParamType_Three(
"opencv", INFERENCE_TARGET_CPU,
{ "/usr/share/capi-media-vision/models/IC/caffe/ic_caffe_model_squeezenet.caffemodel",
- "/usr/share/capi-media-vision/models/IC/caffe/ic_caffe_model_squeezenet.prototxt" })
+ "/usr/share/capi-media-vision/models/IC/caffe/ic_caffe_model_squeezenet.prototxt" }),
+ // ML Single API for NNStreamer.
+ ParamType_Three(
+ "mlapi", INFERENCE_TARGET_CPU,
+ { "/usr/share/capi-media-vision/models/IC_Q/tflite/ic_tflite_model.tflite" })
/* TODO */
));
224, 3, { "test_name" }),
// OPENCV.
ParamType_Six("opencv", INFERENCE_TENSOR_DATA_TYPE_FLOAT32, 224,
+ 224, 3, { "test_name" }),
+ // ML Single API for NNStreamer.
+ ParamType_Six("mlapi", INFERENCE_TENSOR_DATA_TYPE_UINT8, 224,
224, 3, { "test_name" })
/* TODO */
));
ParamType_Six("opencv", INFERENCE_TENSOR_DATA_TYPE_FLOAT32, 224,
224, 0, { "test_name" }),
ParamType_Six("opencv", INFERENCE_TENSOR_DATA_TYPE_FLOAT32, 224,
+ 224, 3, { "" }),
+ // ML Single API for NNStreamer.
+ ParamType_Six("mlapi", -1, 224, 224, 3, { "test_name" }),
+ ParamType_Six("mlapi", INFERENCE_TENSOR_DATA_TYPE_UINT8, 0,
+ 224, 3, { "test_name" }),
+ ParamType_Six("mlapi", INFERENCE_TENSOR_DATA_TYPE_UINT8, 224,
+ 0, 3, { "test_name" }),
+ ParamType_Six("mlapi", INFERENCE_TENSOR_DATA_TYPE_UINT8, 224,
+ 224, 0, { "test_name" }),
+ ParamType_Six("mlapi", INFERENCE_TENSOR_DATA_TYPE_UINT8, 224,
224, 3, { "" })
/* TODO */
));
{ "/usr/share/capi-media-vision/models/IC/caffe/ic_caffe_model_squeezenet.caffemodel",
"/usr/share/capi-media-vision/models/IC/caffe/ic_caffe_model_squeezenet.prototxt" },
{ 281 }),
+ // ML Single API for NNStreamer.
+ ParamType_Many(
+ "mlapi", INFERENCE_ENGINE_PROFILER_OFF,
+ INFERENCE_TARGET_CPU, TEST_IMAGE_CLASSIFICATION, 10,
+ INFERENCE_TENSOR_DATA_TYPE_UINT8,
+ { "/opt/usr/images/image_classification_q.bin" },
+ 224, 224, 3, { "input" }, { "MobilenetV1/Predictions/Reshape_1" },
+ { "/usr/share/capi-media-vision/models/IC_Q/tflite/ic_tflite_model.tflite" },
+ { 955 }),
// ARMNN.
ParamType_Many(
"armnn", INFERENCE_ENGINE_PROFILER_FILE,
{ "/usr/share/capi-media-vision/models/IC/caffe/ic_caffe_model_squeezenet.caffemodel",
"/usr/share/capi-media-vision/models/IC/caffe/ic_caffe_model_squeezenet.prototxt" },
{ 281 }),
+ // ML Single API for NNStreamer.
+ ParamType_Many(
+ "mlapi", INFERENCE_ENGINE_PROFILER_FILE,
+ INFERENCE_TARGET_CPU, TEST_IMAGE_CLASSIFICATION, 10,
+ INFERENCE_TENSOR_DATA_TYPE_UINT8,
+ { "/opt/usr/images/image_classification_q.bin" },
+ 224, 224, 3, { "input" }, { "MobilenetV1/Predictions/Reshape_1" },
+ { "/usr/share/capi-media-vision/models/IC_Q/tflite/ic_tflite_model.tflite" },
+ { 955 }),
// ARMNN.
ParamType_Many(
"armnn", INFERENCE_ENGINE_PROFILER_CONSOLE,
227, 227, 3, { "data" }, { "prob" },
{ "/usr/share/capi-media-vision/models/IC/caffe/ic_caffe_model_squeezenet.caffemodel",
"/usr/share/capi-media-vision/models/IC/caffe/ic_caffe_model_squeezenet.prototxt" },
- { 281 })
+ { 281 }),
+ // ML Single API for NNStreamer.
+ ParamType_Many(
+ "mlapi", INFERENCE_ENGINE_PROFILER_CONSOLE,
+ INFERENCE_TARGET_CPU, TEST_IMAGE_CLASSIFICATION, 10,
+ INFERENCE_TENSOR_DATA_TYPE_UINT8,
+ { "/opt/usr/images/image_classification_q.bin" },
+ 224, 224, 3, { "input" }, { "MobilenetV1/Predictions/Reshape_1" },
+ { "/usr/share/capi-media-vision/models/IC_Q/tflite/ic_tflite_model.tflite" },
+ { 955 })
/* TODO */
));
// TFLITE.
ParamType_One_Int(INFERENCE_BACKEND_TFLITE),
// OPENCV.
- ParamType_One_Int(INFERENCE_BACKEND_OPENCV)
+ ParamType_One_Int(INFERENCE_BACKEND_OPENCV),
+ // ML Single API for NNStreamer with On-device Neural Engine tensor filter.
+ ParamType_One_Int(INFERENCE_BACKEND_ONE),
+ // ML Single API for NNStreamer with Vinvante NPU.
+ ParamType_One_Int(INFERENCE_BACKEND_MLAPI)
/* TODO */
));
// Wrong backend type.
ParamType_One_Int(-1)
/* TODO */
- ));
\ No newline at end of file
+ ));