From: Inki Dae Date: Thu, 14 May 2020 05:40:14 +0000 (+0900) Subject: test: Enable profiler for inference test X-Git-Tag: submit/tizen/20200602.011936^0 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=5f5dc3e5a37f1623008a8aa338fd6cc79850feab;p=platform%2Fcore%2Fmultimedia%2Finference-engine-interface.git test: Enable profiler for inference test This patch enables and disbles the inference engine profiler according to a given inference test request with a given mode, storing profile data on file or just printing out it on console screen. Change-Id: I1528ecfb593d11a4686e4c388c01bb8dff9b6b97 Signed-off-by: Inki Dae --- diff --git a/test/src/inference_engine_tc.cpp b/test/src/inference_engine_tc.cpp index 71dabe4..2738c5c 100644 --- a/test/src/inference_engine_tc.cpp +++ b/test/src/inference_engine_tc.cpp @@ -28,11 +28,18 @@ #include "inference_engine_common_impl.h" #include "inference_engine_test_common.h" +enum { + INFERENCE_ENGINE_PROFILER_OFF = 0, /**< Do not profile inference engine. */ + INFERENCE_ENGINE_PROFILER_FILE, /**< Profile inference engine, and store the collected data to file. */ + INFERENCE_ENGINE_PROFILER_CONSOLE, /**< Profile inference engine, and print out the collected data on console screen. */ + INFERENCE_ENGINE_PROFILER_MAX +}; + typedef std::tuple ParamType_One; typedef std::tuple ParamType_Two; typedef std::tuple> ParamType_Three; typedef std::tuple> ParamType_Six; -typedef std::tuple, int, int, int, std::vector, std::vector, std::vector, std::vector> ParamType_Many; +typedef std::tuple, int, int, int, std::vector, std::vector, std::vector, std::vector> ParamType_Many; typedef std::tuple ParamType_One_Int; class InferenceEngineTestCase_G1 : public testing::TestWithParam { }; @@ -461,6 +468,7 @@ TEST_P(InferenceEngineTestCase_G1, SetOutputLayer_N2) TEST_P(InferenceEngineTestCase_G6, Inference_P) { std::string backend_name; + int profiler; int target_devices; int test_type; int iteration; @@ -474,7 +482,7 @@ TEST_P(InferenceEngineTestCase_G6, Inference_P) std::vector model_paths; std::vector answers; - std::tie(backend_name, target_devices, test_type, iteration, tensor_type, image_paths, height, width, ch, input_layers, output_layers, model_paths, answers) = GetParam(); + std::tie(backend_name, profiler, target_devices, test_type, iteration, tensor_type, image_paths, height, width, ch, input_layers, output_layers, model_paths, answers) = GetParam(); if (iteration < 1) { iteration = 1; @@ -497,6 +505,19 @@ TEST_P(InferenceEngineTestCase_G6, Inference_P) return; } + if (profiler > INFERENCE_ENGINE_PROFILER_OFF && profiler < INFERENCE_ENGINE_PROFILER_MAX) { + int ret = engine->EnableProfiler(true); + ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE); + + if (profiler == INFERENCE_ENGINE_PROFILER_FILE) { + ret = engine->DumpProfileToFile("profile_data_" + backend_name + "_tflite_model.txt"); + ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE); + } else { + ret = engine->DumpProfileToConsole(); + ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE); + } + } + int ret = InferenceEngineInit_Two_Params(engine.get(), backend_name, target_devices); ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE); @@ -688,11 +709,23 @@ INSTANTIATE_TEST_CASE_P(Prefix, InferenceEngineTestCase_G6, // parameter order : backend name, target device, input image path/s, height, width, channel count, input layer names, output layer names, model path/s, inference result // mobilenet based image classification test // ARMNN. - ParamType_Many("armnn", INFERENCE_TARGET_CPU, TEST_IMAGE_CLASSIFICATION, 10, INFERENCE_TENSOR_DATA_TYPE_FLOAT32, { "/opt/usr/images/image_classification.bin" }, 224, 224, 3, { "input_2" }, { "dense_3/Softmax" }, { "/usr/share/capi-media-vision/models/IC/tflite/ic_tflite_model.tflite" }, { 3 }), + ParamType_Many("armnn", INFERENCE_ENGINE_PROFILER_OFF, INFERENCE_TARGET_CPU, TEST_IMAGE_CLASSIFICATION, 10, INFERENCE_TENSOR_DATA_TYPE_FLOAT32, { "/opt/usr/images/image_classification.bin" }, 224, 224, 3, { "input_2" }, { "dense_3/Softmax" }, { "/usr/share/capi-media-vision/models/IC/tflite/ic_tflite_model.tflite" }, { 3 }), + // TFLITE. + ParamType_Many("tflite", INFERENCE_ENGINE_PROFILER_OFF, INFERENCE_TARGET_CPU, TEST_IMAGE_CLASSIFICATION, 10, INFERENCE_TENSOR_DATA_TYPE_FLOAT32, { "/opt/usr/images/image_classification.bin" }, 224, 224, 3, { "input_2" }, { "dense_3/Softmax" }, { "/usr/share/capi-media-vision/models/IC/tflite/ic_tflite_model.tflite" }, { 3 }), + // OPENCV. + ParamType_Many("opencv", INFERENCE_ENGINE_PROFILER_OFF, INFERENCE_TARGET_CPU, TEST_IMAGE_CLASSIFICATION, 10, INFERENCE_TENSOR_DATA_TYPE_FLOAT32, { "/opt/usr/images/image_classification_caffe.bin" }, 227, 227, 3, { "data" }, { "prob" }, { "/usr/share/capi-media-vision/models/IC/caffe/ic_caffe_model_squeezenet.caffemodel", "/usr/share/capi-media-vision/models/IC/caffe/ic_caffe_model_squeezenet.prototxt" }, { 281 }), + // ARMNN. + ParamType_Many("armnn", INFERENCE_ENGINE_PROFILER_FILE, INFERENCE_TARGET_CPU, TEST_IMAGE_CLASSIFICATION, 10, INFERENCE_TENSOR_DATA_TYPE_FLOAT32, { "/opt/usr/images/image_classification.bin" }, 224, 224, 3, { "input_2" }, { "dense_3/Softmax" }, { "/usr/share/capi-media-vision/models/IC/tflite/ic_tflite_model.tflite" }, { 3 }), + // TFLITE. + ParamType_Many("tflite", INFERENCE_ENGINE_PROFILER_FILE, INFERENCE_TARGET_CPU, TEST_IMAGE_CLASSIFICATION, 10, INFERENCE_TENSOR_DATA_TYPE_FLOAT32, { "/opt/usr/images/image_classification.bin" }, 224, 224, 3, { "input_2" }, { "dense_3/Softmax" }, { "/usr/share/capi-media-vision/models/IC/tflite/ic_tflite_model.tflite" }, { 3 }), + // OPENCV. + ParamType_Many("opencv", INFERENCE_ENGINE_PROFILER_FILE, INFERENCE_TARGET_CPU, TEST_IMAGE_CLASSIFICATION, 10, INFERENCE_TENSOR_DATA_TYPE_FLOAT32, { "/opt/usr/images/image_classification_caffe.bin" }, 227, 227, 3, { "data" }, { "prob" }, { "/usr/share/capi-media-vision/models/IC/caffe/ic_caffe_model_squeezenet.caffemodel", "/usr/share/capi-media-vision/models/IC/caffe/ic_caffe_model_squeezenet.prototxt" }, { 281 }), + // ARMNN. + ParamType_Many("armnn", INFERENCE_ENGINE_PROFILER_CONSOLE, INFERENCE_TARGET_CPU, TEST_IMAGE_CLASSIFICATION, 10, INFERENCE_TENSOR_DATA_TYPE_FLOAT32, { "/opt/usr/images/image_classification.bin" }, 224, 224, 3, { "input_2" }, { "dense_3/Softmax" }, { "/usr/share/capi-media-vision/models/IC/tflite/ic_tflite_model.tflite" }, { 3 }), // TFLITE. - ParamType_Many("tflite", INFERENCE_TARGET_CPU, TEST_IMAGE_CLASSIFICATION, 10, INFERENCE_TENSOR_DATA_TYPE_FLOAT32, { "/opt/usr/images/image_classification.bin" }, 224, 224, 3, { "input_2" }, { "dense_3/Softmax" }, { "/usr/share/capi-media-vision/models/IC/tflite/ic_tflite_model.tflite" }, { 3 }), + ParamType_Many("tflite", INFERENCE_ENGINE_PROFILER_CONSOLE, INFERENCE_TARGET_CPU, TEST_IMAGE_CLASSIFICATION, 10, INFERENCE_TENSOR_DATA_TYPE_FLOAT32, { "/opt/usr/images/image_classification.bin" }, 224, 224, 3, { "input_2" }, { "dense_3/Softmax" }, { "/usr/share/capi-media-vision/models/IC/tflite/ic_tflite_model.tflite" }, { 3 }), // OPENCV. - ParamType_Many("opencv", INFERENCE_TARGET_CPU, TEST_IMAGE_CLASSIFICATION, 10, INFERENCE_TENSOR_DATA_TYPE_FLOAT32, { "/opt/usr/images/image_classification_caffe.bin" }, 227, 227, 3, { "data" }, { "prob" }, { "/usr/share/capi-media-vision/models/IC/caffe/ic_caffe_model_squeezenet.caffemodel", "/usr/share/capi-media-vision/models/IC/caffe/ic_caffe_model_squeezenet.prototxt" }, { 281 }) + ParamType_Many("opencv", INFERENCE_ENGINE_PROFILER_CONSOLE, INFERENCE_TARGET_CPU, TEST_IMAGE_CLASSIFICATION, 10, INFERENCE_TENSOR_DATA_TYPE_FLOAT32, { "/opt/usr/images/image_classification_caffe.bin" }, 227, 227, 3, { "data" }, { "prob" }, { "/usr/share/capi-media-vision/models/IC/caffe/ic_caffe_model_squeezenet.caffemodel", "/usr/share/capi-media-vision/models/IC/caffe/ic_caffe_model_squeezenet.prototxt" }, { 281 }) /* TODO */ ) );