Add hand gesture model profiling with TFLITE 2.3 gpu delegate sandbox/inki.dae/tflite2.3_gpu
authorInki Dae <inki.dae@samsung.com>
Tue, 3 Nov 2020 09:00:39 +0000 (18:00 +0900)
committerInki Dae <inki.dae@samsung.com>
Tue, 3 Nov 2020 09:02:50 +0000 (18:02 +0900)
Change-Id: I0d822d8e28d1c497c9371741397d1018161a09af
Signed-off-by: Inki Dae <inki.dae@samsung.com>
src/inference_engine_common_impl.cpp
start_profiler.sh
test/src/inference_engine_profiler.cpp

index 66a7e90..8003165 100644 (file)
@@ -296,6 +296,12 @@ out:
                        return INFERENCE_ENGINE_ERROR_NOT_SUPPORTED;
                }
 
+               ret = mBackendHandle->SetTargetDevices(device_type);
+               if (ret != INFERENCE_ENGINE_ERROR_NONE) {
+                       LOGE("Failed to set target device.");
+                       return INFERENCE_ENGINE_ERROR_NOT_SUPPORTED;
+               }
+
                return INFERENCE_ENGINE_ERROR_NONE;
        }
 
@@ -422,6 +428,8 @@ out:
                        return INFERENCE_ENGINE_ERROR_INVALID_PARAMETER;
                }
 
+               LOGI("target device type is %d", types);
+
                int ret = mBackendHandle->SetTargetDevices(types);
                if (ret != INFERENCE_ENGINE_ERROR_NONE)
                        LOGE("Fail to SetTargetDevice");
index 5bb9253..7ae873a 100644 (file)
@@ -12,7 +12,7 @@ do
 done
 
 # Hand gesture model from AIC
-CNT=9
+CNT=11
 
 echo "Hand gesture model from AIC test case count = $CNT"
 
index 7b34a37..f2da85e 100644 (file)
@@ -1374,6 +1374,18 @@ INSTANTIATE_TEST_CASE_P(Prefix, InferenceEngineHandGestureTest,
                                                  61, 36, 59, 36, 52, 39, 35, 32, 40, 34,
                                                  62, 39, 70, 40, 58, 41, 34, 42, 34, 41,
                                                  38, 38, 12 }),
+                               // TFLITE 2.3 GPU delegate
+                               ParamType_Infer("tflite", INFERENCE_TARGET_GPU, TEST_AIC_HAND_GESTURE_1, 100, INFERENCE_TENSOR_DATA_TYPE_FLOAT32,
+                                               { "/opt/usr/images/hand.bin" }, 224, 224, 3, { "input" }, { "mobilenetv2/boundingbox", "mobilenetv2/heatmap" },
+                                               { "/usr/share/capi-media-vision/models/PE_1/tflite/posenet1_lite_224.tflite" }, { 0 }),
+                               ParamType_Infer("tflite", INFERENCE_TARGET_GPU, TEST_AIC_HAND_GESTURE_2, 100, INFERENCE_TENSOR_DATA_TYPE_FLOAT32,
+                                               { "/opt/usr/images/hand.bin" }, 56, 56, 21, { "input" }, { "mobilenetv2/coord_refine", "mobilenetv2/gesture" },
+                                               { "/usr/share/capi-media-vision/models/PE_2/tflite/posenet2_lite_224.tflite" },
+                                               { 55, 39, 51, 40, 50, 42, 61, 43, 71, 39,
+                                                 78, 36, 82, 42, 82, 44, 83, 45, 35, 37,
+                                                 61, 36, 59, 36, 52, 39, 35, 32, 40, 34,
+                                                 62, 39, 70, 40, 58, 41, 34, 42, 34, 41,
+                                                 38, 38, 12 }),
                                // TFLITE via MLAPI
                                ParamType_Infer("mlapi", INFERENCE_TARGET_CPU, TEST_AIC_HAND_GESTURE_1, 100, INFERENCE_TENSOR_DATA_TYPE_FLOAT32,
                                                { "/opt/usr/images/hand.bin" }, 224, 224, 3, { "input" }, { "mobilenetv2/boundingbox", "mobilenetv2/heatmap" },