From: Kwanghoon Son Date: Wed, 8 Apr 2020 06:28:33 +0000 (+0900) Subject: Print custom target X-Git-Tag: submit/tizen/20200423.063253~12 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=1f5988de265604ac33bc2dd44838b9d29663d766;p=platform%2Fcore%2Fmultimedia%2Finference-engine-interface.git Print custom target Myriad is custom target. Change-Id: Ib7ddc231d3fe8147e1b3a5152b5cc784b9be90af Signed-off-by: Kwanghoon Son --- diff --git a/test/src/inference_engine_test.cpp b/test/src/inference_engine_test.cpp index 416ec0d..6526721 100644 --- a/test/src/inference_engine_test.cpp +++ b/test/src/inference_engine_test.cpp @@ -48,6 +48,12 @@ std::map Model_Formats = { { "onnx", INFERENCE_MODEL_ONNX } }; +std::map Target_Formats = { + { INFERENCE_TARGET_CPU, "cpu" }, + { INFERENCE_TARGET_GPU, "gpu" }, + { INFERENCE_TARGET_CUSTOM, "custom" } +}; + enum { TEST_IMAGE_CLASSIFICATION = 0, TEST_OBJECT_DETECTION, @@ -63,7 +69,7 @@ TEST_P(InferenceEngineCommonTest, Bind) std::tie(backend_name, target_devices) = GetParam(); - std::cout <<"Bind test : backend = " << backend_name << ", target device = " << target_devices << "\n"; + std::cout <<"Bind test : backend = " << backend_name << ", target device = " << Target_Formats[target_devices] << std::endl; inference_engine_config config = { .backend_name = backend_name, @@ -248,6 +254,7 @@ void CopyFileToMemory(const char *file_name, inference_engine_tensor_buffer &buf close(fd); } + TEST_P(InferenceEngineCommonTest_2, Load) { std::string backend_name; @@ -256,7 +263,7 @@ TEST_P(InferenceEngineCommonTest_2, Load) std::tie(backend_name, target_devices, model_paths) = GetParam(); - std::cout <<"Load test : backend = " << backend_name << ", target device = " << (target_devices == INFERENCE_TARGET_CPU ? "CPU" : "GPU") << "\n"; + std::cout <<"Load test : backend = " << backend_name << ", target device = " << Target_Formats[target_devices] << std::endl; inference_engine_config config = { .backend_name = backend_name, @@ -526,8 +533,7 @@ TEST_P(InferenceEngineTfliteTest, Inference) break; } - std::cout << test_name << " inference test : backend = " << backend_name << ", target device = " << (target_devices == INFERENCE_TARGET_CPU ? "CPU" : "GPU") << "\n"; - + std::cout << test_name << " inference test : backend = " << backend_name << ", target device = " << Target_Formats[target_devices] << std::endl; inference_engine_config config = { .backend_name = backend_name, .target_devices = target_devices @@ -712,8 +718,7 @@ TEST_P(InferenceEngineCaffeTest, Inference) break; } - std::cout << test_name << " inference test : backend = " << backend_name << ", target device = " << (target_devices == INFERENCE_TARGET_CPU ? "CPU" : "GPU") << "\n"; - + std::cout << test_name << " inference test : backend = " << backend_name << ", target device = " << Target_Formats[target_devices] << std::endl; inference_engine_config config = { .backend_name = backend_name, .target_devices = target_devices