Print custom target
authorKwanghoon Son <k.son@samsung.com>
Wed, 8 Apr 2020 06:28:33 +0000 (15:28 +0900)
committerInki Dae <inki.dae@samsung.com>
Tue, 14 Apr 2020 00:42:53 +0000 (09:42 +0900)
Myriad is custom target.

Change-Id: Ib7ddc231d3fe8147e1b3a5152b5cc784b9be90af
Signed-off-by: Kwanghoon Son <k.son@samsung.com>
test/src/inference_engine_test.cpp

index 416ec0d16375f085f870e59da0e2b9c4ef5d22ab..65267210394031c082e11925866309b8269771f2 100644 (file)
@@ -48,6 +48,12 @@ std::map<std::string, int> Model_Formats = {
        { "onnx", INFERENCE_MODEL_ONNX }
 };
 
+std::map<int, std::string> Target_Formats = {
+       { INFERENCE_TARGET_CPU, "cpu" },
+       { INFERENCE_TARGET_GPU, "gpu" },
+       { INFERENCE_TARGET_CUSTOM, "custom" }
+};
+
 enum {
        TEST_IMAGE_CLASSIFICATION = 0,
        TEST_OBJECT_DETECTION,
@@ -63,7 +69,7 @@ TEST_P(InferenceEngineCommonTest, Bind)
 
        std::tie(backend_name, target_devices) = GetParam();
 
-       std::cout <<"Bind test : backend = " << backend_name << ", target device = " << target_devices << "\n";
+       std::cout <<"Bind test : backend = " << backend_name << ", target device = " << Target_Formats[target_devices] << std::endl;
 
        inference_engine_config config = {
                .backend_name = backend_name,
@@ -248,6 +254,7 @@ void CopyFileToMemory(const char *file_name, inference_engine_tensor_buffer &buf
        close(fd);
 }
 
+
 TEST_P(InferenceEngineCommonTest_2, Load)
 {
        std::string backend_name;
@@ -256,7 +263,7 @@ TEST_P(InferenceEngineCommonTest_2, Load)
 
        std::tie(backend_name, target_devices, model_paths) = GetParam();
 
-       std::cout <<"Load test : backend = " << backend_name << ", target device = " << (target_devices == INFERENCE_TARGET_CPU ? "CPU" : "GPU") << "\n";
+       std::cout <<"Load test : backend = " << backend_name << ", target device = " << Target_Formats[target_devices] << std::endl;
 
        inference_engine_config config = {
                .backend_name = backend_name,
@@ -526,8 +533,7 @@ TEST_P(InferenceEngineTfliteTest, Inference)
                break;
        }
 
-       std::cout << test_name << " inference test : backend = " << backend_name << ", target device = " << (target_devices == INFERENCE_TARGET_CPU ? "CPU" : "GPU")  << "\n";
-
+       std::cout << test_name << " inference test : backend = " << backend_name << ", target device = " << Target_Formats[target_devices] << std::endl;
        inference_engine_config config = {
                .backend_name = backend_name,
                .target_devices = target_devices
@@ -712,8 +718,7 @@ TEST_P(InferenceEngineCaffeTest, Inference)
                break;
        }
 
-       std::cout << test_name << " inference test : backend = " << backend_name << ", target device = " << (target_devices == INFERENCE_TARGET_CPU ? "CPU" : "GPU")  << "\n";
-
+       std::cout << test_name << " inference test : backend = " << backend_name << ", target device = " << Target_Formats[target_devices] << std::endl;
        inference_engine_config config = {
                .backend_name = backend_name,
                .target_devices = target_devices