Imported Upstream version 1.7.0
[platform/core/ml/nnfw.git] / tests / scripts / test-driver.sh
1 #!/bin/bash
2 #
3 # Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
4 #
5 # Licensed under the Apache License, Version 2.0 (the "License");
6 # you may not use this file except in compliance with the License.
7 # You may obtain a copy of the License at
8 #
9 #    http://www.apache.org/licenses/LICENSE-2.0
10 #
11 # Unless required by applicable law or agreed to in writing, software
12 # distributed under the License is distributed on an "AS IS" BASIS,
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 # See the License for the specific language governing permissions and
15 # limitations under the License.
16
17 set -e
18 # NOTE: Supposed that this script would be executed with an artifact path.
19 #       The artifact path has tests/(test suite) and Product/
20 #       Reference this PR(https://github.sec.samsung.net/STAR/nnfw/pull/375).
21
22 function Usage()
23 {
24     echo "Usage: ./$0 --artifactpath=.    # run all tests"
25     echo "Usage: ./$0 --artifactpath=/home/dragon/nnfw --frameworktest --verification --benchmark    # run fw test & verfication and benchmark"
26     echo ""
27     echo "--artifactpath            - (default={test-driver.sh's path}/../../) it should contain tests/ and Product/"
28     echo ""
29     echo "Following options are needed when you want to tests of specific types. If you don't pass any one, unittest and verification will be run"
30     echo "--unittest                - (default=on) run unit test"
31     echo "--frameworktest           - (default=off) run framework test"
32     echo "--verification            - (default=on) run verification"
33     echo "--frameworktest_list_file - filepath of model list for test"
34     echo ""
35     echo "Following option is only needed when you want to test benchmark."
36     echo "--benchmark_onert_op     - (default=off) run benchmark per operation on onert"
37     echo ""
38     echo "etc."
39     echo "--framework_driverbin     - (default=../../Product/out/bin/tflite_run) runner for runnning framework tests"
40     echo "--verification_driverbin  - (default=../../Product/out/bin/nnapi_test) runner for runnning verification tests"
41     echo "--runtestsh               - (default=\$ARTIFACT_PATH/tests/scripts/framework/run_test.sh) run_test.sh with path where it is for framework test and verification"
42     echo "--unittestdir             - (default=\$ARTIFACT_PATH/Product/out/unittest) directory that has unittest binaries for unit test"
43     echo ""
44     echo "--reportdir               - (default=\$ARTIFACT_PATH/report) directory to save report"
45     echo ""
46 }
47
48 TEST_DRIVER_DIR="$( cd "$( dirname "${BASH_SOURCE}" )" && pwd )"
49 ARTIFACT_PATH="$TEST_DRIVER_DIR/../../"
50 FRAMEWORK_DRIVER_BIN=""
51 VERIFICATION_DRIVER_BIN=""
52 RUN_TEST_SH=""
53 UNIT_TEST_DIR=""
54 ALLTEST_ON="true"
55 UNITTEST_ON="false"
56 FRAMEWORKTEST_ON="false"
57 VERIFICATION_ON="false"
58 BENCHMARK_ONERT_OP_ON="false"
59 REPORT_DIR=""
60
61 for i in "$@"
62 do
63     case $i in
64         -h|--help|help)
65             Usage
66             exit 1
67             ;;
68         --artifactpath=*)
69             ARTIFACT_PATH=${i#*=}
70             ;;
71         --framework_driverbin=*)
72             FRAMEWORK_DRIVER_BIN=${i#*=}
73             ;;
74         --verification_driverbin=*)
75             VERIFICATION_DRIVER_BIN=${i#*=}
76             ;;
77         --runtestsh=*)
78             RUN_TEST_SH=${i#*=}
79             ;;
80         --unittestdir=*)
81             UNIT_TEST_DIR=${i#*=}
82             ;;
83         --unittest)
84             ALLTEST_ON="false"
85             UNITTEST_ON="true"
86             ;;
87         --frameworktest)
88             ALLTEST_ON="false"
89             FRAMEWORKTEST_ON="true"
90             ;;
91         --frameworktest_list_file=*)
92             FRAMEWORKTEST_LIST_FILE=$PWD/${i#*=}
93             if [ ! -e "$FRAMEWORKTEST_LIST_FILE" ]; then
94                 echo "Pass on with proper frameworktest_list_file"
95                 exit 1
96             fi
97             ;;
98         --verification)
99             ALLTEST_ON="false"
100             VERIFICATION_ON="true"
101             ;;
102         --benchmark_onert_op)
103             ALLTEST_ON="false"
104             BENCHMARK_ONERT_OP_ON="true"
105             ;;
106         --reportdir=*)
107             REPORT_DIR=${i#*=}
108             ;;
109         *)
110             # Be careful that others params are handled as $ARTIFACT_PATH
111             ARTIFACT_PATH="$i"
112             ;;
113     esac
114     shift
115 done
116
117 ARTIFACT_PATH="$(readlink -f $ARTIFACT_PATH)"
118
119 if [ -z "$RUN_TEST_SH" ]; then
120     RUN_TEST_SH=$ARTIFACT_PATH/tests/scripts/framework/run_test.sh
121 fi
122
123 if [ ! -e "$RUN_TEST_SH" ]; then
124     echo "Cannot find $RUN_TEST_SH"
125     exit 1
126 fi
127
128 if [ -z "$UNIT_TEST_DIR" ]; then
129     UNIT_TEST_DIR=$ARTIFACT_PATH/Product/out/unittest
130 fi
131
132 if [ -z "$REPORT_DIR" ]; then
133     REPORT_DIR=$ARTIFACT_PATH/report
134 fi
135
136 source $TEST_DRIVER_DIR/common.sh
137
138 # Run unittest in each part such as Runtime
139 if [ "$ALLTEST_ON" == "true" ] || [ "$UNITTEST_ON" == "true" ]; then
140     $TEST_DRIVER_DIR/unittest.sh \
141         --reportdir=$REPORT_DIR \
142         --unittestdir=$UNIT_TEST_DIR
143 fi
144
145 # Run tflite_run with various tflite models
146 if [ "$FRAMEWORKTEST_ON" == "true" ]; then
147     if [ -z "$FRAMEWORK_DRIVER_BIN" ]; then
148         FRAMEWORK_DRIVER_BIN=$ARTIFACT_PATH/Product/out/bin/tflite_run
149     fi
150
151     $TEST_DRIVER_DIR/test_framework.sh \
152         --runtestsh=$RUN_TEST_SH \
153         --driverbin=$FRAMEWORK_DRIVER_BIN \
154         --reportdir=$REPORT_DIR \
155         --tapname=framework_test.tap \
156         --logname=framework_test.log \
157         --testname="Frameworktest" \
158         --frameworktest_list_file=${FRAMEWORKTEST_LIST_FILE:-}
159 fi
160
161 # Run nnapi_test with various tflite models
162 if [ "$ALLTEST_ON" == "true" ] || [ "$VERIFICATION_ON" == "true" ]; then
163     if [ -z "$VERIFICATION_DRIVER_BIN" ]; then
164         VERIFICATION_DRIVER_BIN=$ARTIFACT_PATH/Product/out/bin/nnapi_test
165     fi
166
167     # verification uses the same script as frameworktest does
168     $TEST_DRIVER_DIR/test_framework.sh \
169         --runtestsh=$RUN_TEST_SH \
170         --driverbin=$VERIFICATION_DRIVER_BIN \
171         --reportdir=$REPORT_DIR \
172         --tapname=verification_test.tap \
173         --logname=verification_test.log \
174         --testname="Verification" \
175         --frameworktest_list_file=${FRAMEWORKTEST_LIST_FILE:-}
176 fi
177
178 if [ "$BENCHMARK_ONERT_OP_ON" == "true" ]; then
179     DRIVER_BIN=$ARTIFACT_PATH/Product/out/bin/tflite_run
180
181     $TEST_DRIVER_DIR/benchmark_nnapi.sh \
182         --test_op \
183         --runtestsh=$RUN_TEST_SH \
184         --driverbin=$DRIVER_BIN \
185         --reportdir=$REPORT_DIR/benchmark_op \
186         --modelfilepath=$ARTIFACT_PATH/tests/scripts/framework
187 fi
188
189 # Make json file. Actually, this process is only needed on CI. That's why it is in test-driver.sh.
190 if [ "$BENCHMARK_ONERT_OP_ON" == "true" ]; then
191     # functions to fill json with benchmark results
192     source $ARTIFACT_PATH/tests/scripts/print_to_json.sh
193     if [ "$BENCHMARK_ONERT_OP_ON" == "true" ]; then
194         print_to_json $REPORT_DIR/benchmark_op $REPORT_DIR "benchmark_op_result.json"
195     else
196         print_to_json $REPORT_DIR/benchmark $REPORT_DIR "benchmark_result.json"
197     fi
198 fi