#define INFERENCE_ITERATION 10
// Macros for tflite inference test cases.
+// parameter order : backend name, target device, inference model type,
+// test iteration count, input tensor type, input image path/s,
+// height, width, channel count, input layer names, output layer names,
+// model path/s, inference result
#define PARAM_TYPE_TFLITE_IC_INFER(backend, device, iter) \
backend, device, TEST_MODEL_IMAGE_CLASSIFICATION, \
iter, INFERENCE_TENSOR_DATA_TYPE_FLOAT32, \
// Macros for tflite model based cltuner test cases.
+// parameter order : CLTuner active flag, CLTuner update flag, CLTuner tuning level,
+// backend name, target device, test iteration count
#define PARAM_TYPE_TFLITE_IC_CLTUNER(active, update, mode, backend, device, iter) \
active, update, mode, \
PARAM_TYPE_TFLITE_IC_INFER(backend, device, iter)
INSTANTIATE_TEST_CASE_P(
Opensource, InferenceEngineTfliteTest,
testing::Values(
- // parameter order : backend name, target device, input image path/s, height, width, channel count, input layer names, output layer names, model path/s, inference result
+ // parameter order : backend name, target device, test iteration count.
// mobilenet based image classification test
// ARMNN.
ParamType_Infer(
PARAM_TYPE_TFLITE_AICHG_2_INFER("armnn", INFERENCE_TARGET_GPU, INFERENCE_ITERATION)),
/*********************************************************************************/
- // parameter order : backend name, target device, input image path/s, height, width, channel count, input layer names, output layer names, model path/s, inference result
+ // parameter order : backend name, target device, test iteration count.
// mobilenet based image classification test
// TFLITE.
ParamType_Infer(
Inhouse, InferenceEngineTfliteTest,
testing::Values(
/*********************************************************************************/
- // parameter order : backend name, target device, input image path/s, height, width, channel count, input layer names, output layer names, model path/s, inference result
+ // parameter order : backend name, target device, test iteration count.
// mobilenet based image classification test
// ONE via MLAPI.
ParamType_Infer(
PARAM_TYPE_TFLITE_AICHG_2_INFER("one", INFERENCE_TARGET_GPU, INFERENCE_ITERATION)),
/*********************************************************************************/
- // parameter order : backend name, target device, input image path/s, height, width, channel count, input layer names, output layer names, model path/s, inference result
+ // parameter order : backend name, target device, test iteration count.
// mobilenet based image classification test
// TFLITE via MLAPI.
ParamType_Infer(
INSTANTIATE_TEST_CASE_P(
Opensource, InferenceEngineTfliteCLTunerTest,
testing::Values(
- // parameter order : backend name, target device, CLTuner active flag, CLTuner update flag, CLTuner tuning mode, input image path/s, height, width, channel count, input layer names, output layer names, model path/s, inference result
+ // parameter order : CLTuner active flag, CLTuner update flag, CLTuner tuning level or mode, backend name, target device, test iteration count.
// mobilenet based image classification test
// ARMNN.
ParamType_CLTuner(