Add performance test for scheduler and profiler (#5675)
authorДилшоджон Умронхонович Пошшоев/AI Tools Lab /SRR/Engineer/삼성전자 <d.poshshoev@samsung.com>
Fri, 19 Jul 2019 07:06:25 +0000 (16:06 +0900)
committer이한종/On-Device Lab(SR)/Engineer/삼성전자 <hanjoung.lee@samsung.com>
Fri, 19 Jul 2019 07:06:25 +0000 (16:06 +0900)
* Add performance test for scheduler and profiler

Add a script to test performance of scheduler and profiler.
Since each model it compiles and executes 9 times, it takes
much time to finish.
Related issue: 5660

Signed-off-by: Dilshodzhon Poshshoev <d.poshshoev@samsung.com>
* Fix comment of PROFILING_RUN_CNT

Signed-off-by: Dilshodzhon Poshshoev <d.poshshoev@samsung.com>
* Move exec_time.json to log dir, not remove

Signed-off-by: Dilshodzhon Poshshoev <d.poshshoev@samsung.com>
* Add some more fixes

Signed-off-by: Dilshodzhon Poshshoev <d.poshshoev@samsung.com>
tests/scripts/common.sh
tests/scripts/run_benchmark.sh
tests/scripts/test_performance.sh [new file with mode: 0644]

index 12a35fd..1e7ab39 100755 (executable)
@@ -32,3 +32,31 @@ function switch_nnfw_kernel_env()
         fi
     done
 }
+
+function get_result_of_benchmark_test()
+{
+    local RUN_TEST_SH=$1
+    local DRIVER_BIN=$2
+    local MODEL=$3
+    local LOG_FILE=$4
+
+    local RET=0
+    $RUN_TEST_SH --driverbin=$DRIVER_BIN $MODEL > $LOG_FILE 2>&1
+    RET=$?
+    if [[ $RET -ne 0 ]]; then
+        echo "Testing $MODEL aborted... exit code: $RET"
+        exit $RET
+    fi
+
+    local RESULT=`grep -E '^Mean:' $LOG_FILE | sed -e 's/ms//g' | awk '{print $2}'`
+    echo "$RESULT"
+}
+
+function print_result_of_benchmark_test()
+{
+    local NAME=$1
+    local RESULT=$2
+    local RESULT_FILE=$3
+
+    echo "$NAME $RESULT" > $RESULT_FILE
+}
index 6980475..e22b400 100755 (executable)
@@ -51,34 +51,6 @@ do
     shift
 done
 
-function get_result_of_benchmark_test()
-{
-    local RUN_TEST_SH=$1
-    local DRIVER_BIN=$2
-    local MODEL=$3
-    local LOG_FILE=$4
-
-    local RET=0
-    $RUN_TEST_SH --driverbin=$DRIVER_BIN $MODEL > $LOG_FILE 2>&1
-    RET=$?
-    if [[ $RET -ne 0 ]]; then
-        echo "Testing $MODEL aborted... exit code: $RET"
-        exit $RET
-    fi
-
-    local RESULT=`grep -E '^Mean:' $LOG_FILE | sed -e 's/ms//g' | awk '{print $2}'`
-    echo "$RESULT"
-}
-
-function print_result_of_benchmark_test()
-{
-    local NAME=$1
-    local RESULT=$2
-    local RESULT_FILE=$3
-
-    echo "$NAME $RESULT" > $RESULT_FILE
-}
-
 function run_benchmark_test()
 {
     local DRIVER_BIN=$BENCHMARK_DRIVER_BIN
diff --git a/tests/scripts/test_performance.sh b/tests/scripts/test_performance.sh
new file mode 100644 (file)
index 0000000..3ed7b45
--- /dev/null
@@ -0,0 +1,166 @@
+#!/bin/bash
+
+MY_PATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+source $MY_PATH/common.sh
+
+PRINT_WIDTH=45
+BACKEND_CNT=3
+# Run profiler BACKEND_CNT+1 times: on each run of the first BACKEND_CNT runs it will
+#     collect metrics for one unmeasured backend. On the last run metrics for data transfer
+PROFILING_RUN_CNT=$((BACKEND_CNT+1))
+TEST_DRIVER_DIR="$( cd "$( dirname "${BASH_SOURCE}" )" && pwd )"
+ARTIFACT_PATH="$TEST_DRIVER_DIR/../.."
+BENCHMARK_DRIVER_BIN=$ARTIFACT_PATH/Product/out/bin/tflite_benchmark
+REPORT_DIR=$ARTIFACT_PATH/report
+RUN_TEST_SH=$ARTIFACT_PATH/tests/framework/run_test.sh
+BENCHMARK_MODEL_LIST="MODELS/inception_nonslim MODELS/inception_slim MODELS/mobilenet"
+
+if [ ! -e "$RUN_TEST_SH" ]; then
+    echo "Cannot find $RUN_TEST_SH"
+    exit 1
+fi
+
+
+BENCHMARK_REPORT_DIR=$REPORT_DIR/benchmark
+BENCHMARK_MODELS_FILE=$BENCHMARK_REPORT_DIR/benchmark_models.txt
+
+function print_with_dots()
+{
+    local MSG=$1
+    pad=$(printf '%0.1s' "."{1..45})
+    padlength=$((PRINT_WIDTH- ${#MSG}))
+    printf '%s' "$MSG"
+    printf '%*.*s ' 0 $padlength "$pad"
+}
+
+function run_without_sched()
+{
+    local RESULT_SCH_INT=$1
+    local REPORT_MODEL_DIR=$2
+    local MODEL=$3
+    local EXECUTOR=$4
+    local BACKEND=$5
+
+    LOG_FILE=$REPORT_MODEL_DIR/tflite_${EXECUTOR,,}_$BACKEND.txt
+    export OP_BACKEND_ALLOPS=$BACKEND
+    export EXECUTOR=$EXECUTOR
+
+    print_with_dots "$EXECUTOR $BACKEND without scheduler"
+
+    RESULT=$(get_result_of_benchmark_test $RUN_TEST_SH $BENCHMARK_DRIVER_BIN $MODEL $LOG_FILE)
+
+    printf -v RESULT_INT '%d' $RESULT 2>/dev/null
+    PERCENTAGE=$((100-RESULT_SCH_INT*100/RESULT_INT))
+    echo "$RESULT ms. Scheduler is $PERCENTAGE% faster"
+}
+
+function run_benchmark_test()
+{
+    local LOG_FILE=
+    local RESULT=
+    local REPORT_MODEL_DIR=
+
+    export COUNT=5
+    echo "============================================"
+    local i=0
+    export USE_NNAPI=1
+    export BACKENDS="acl_cl;acl_neon;cpu"
+    # Remove metrics so that profiler can get metrics for operations
+    #      with input&output sizes the same as the model
+    rm "exec_time.json" 2>/dev/null
+    for MODEL in $BENCHMARK_MODEL_LIST; do
+
+        echo "Benchmark test with `basename $BENCHMARK_DRIVER_BIN` & `echo $MODEL`"
+        echo $MODEL >> $BENCHMARK_MODELS_FILE
+
+        REPORT_MODEL_DIR=$BENCHMARK_REPORT_DIR/scheduler_benchmark/$MODEL
+        mkdir -p $REPORT_MODEL_DIR
+
+##################################################################################
+        # Get metrics by running profiler
+##################################################################################
+        export USE_SCHEDULER=1
+        export PROFILING_MODE=1
+        export EXECUTOR="Dataflow"
+        export NEURUN_LOG_ENABLE=1
+        for ((j = 1 ; j <= $PROFILING_RUN_CNT ; j++)); do
+            # Save the verbose log of each run
+            LOG_FILE=$REPORT_MODEL_DIR/tflite_profiling_$j.txt
+
+            print_with_dots "Profiling run #$j out of $PROFILING_RUN_CNT"
+
+            $RUN_TEST_SH --driverbin=$BENCHMARK_DRIVER_BIN $MODEL > $LOG_FILE 2>&1
+            RET=$?
+            if [[ $RET -ne 0 ]]; then
+                echo "Profiling $MODEL aborted in run#$j... exit code: $RET"xX
+                exit $RET
+            fi
+            echo "finished"
+            # Save the exec_time.json of each run
+            cp "exec_time.json" $REPORT_MODEL_DIR/"exec_time_$j.json"
+        done
+        unset NEURUN_LOG_ENABLE
+
+
+##################################################################################
+        # Turn off profiling
+##################################################################################
+        export PROFILING_MODE=0
+
+##################################################################################
+        # Run ParallelExecutor with scheduler
+##################################################################################
+        LOG_FILE=$REPORT_MODEL_DIR/tflite_parallel_with_scheduler.txt
+        export EXECUTOR="Parallel"
+        export GRAPH_DOT_DUMP=1
+        print_with_dots "Parallel with scheduler"
+
+        RESULT=$(get_result_of_benchmark_test $RUN_TEST_SH $BENCHMARK_DRIVER_BIN $MODEL $LOG_FILE)
+        echo "$RESULT ms"
+
+        printf -v RESULT_SCH_INT '%d' $RESULT 2>/dev/null
+
+        # Remove metrics so that for next model in profiler can get metrics
+        #   for operations with input&output sizes the same as the model
+        mv "exec_time.json" $REPORT_MODEL_DIR
+        # Save the dot graph
+        mv "after_lower.dot" $REPORT_MODEL_DIR
+        unset GRAPH_DOT_DUMP
+
+##################################################################################
+        # Turn off scheduler
+##################################################################################
+        export USE_SCHEDULER=0
+
+        # Run LinearExecutor on acl_cl without scheduler
+        run_without_sched $RESULT_SCH_INT $REPORT_MODEL_DIR $MODEL "Linear" "acl_cl"
+
+        # Run LinearExecutor on acl_neon without scheduler
+        run_without_sched $RESULT_SCH_INT $REPORT_MODEL_DIR $MODEL "Linear" "acl_neon"
+
+        # Run ParallelExecutor on acl_cl without scheduler
+        run_without_sched $RESULT_SCH_INT $REPORT_MODEL_DIR $MODEL "Parallel" "acl_cl"
+
+        # Run ParallelExecutor on acl_neon without scheduler
+        run_without_sched $RESULT_SCH_INT $REPORT_MODEL_DIR $MODEL "Parallel" "acl_neon"
+
+        if [[ $i -ne $(echo $BENCHMARK_MODEL_LIST | wc -w)-1 ]]; then
+            echo ""
+        fi
+        i=$((i+1))
+
+        unset USE_SCHEDULER
+        unset PROFILING_MODE
+        unset EXECUTOR
+        unset OP_BACKEND_ALLOPS
+    done
+    unset BACKENDS
+    echo "============================================"
+    unset COUNT
+    unset USE_NNAPI
+
+}
+
+echo ""
+run_benchmark_test
+echo ""