From: gichan Date: Wed, 8 Jun 2022 01:07:49 +0000 (+0900) Subject: [TEST] Modify query test X-Git-Tag: accepted/tizen/unified/20220811.135951~26 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=dab25fbe99684bd1f95ba98aacb691bfd258bf95;p=platform%2Fupstream%2Fnnstreamer.git [TEST] Modify query test - Some parameters are removed, change the query test. - Apply back ground test Signed-off-by: gichan --- diff --git a/tests/nnstreamer_query/runTest.sh b/tests/nnstreamer_query/runTest.sh index c86d4c9..564343f 100644 --- a/tests/nnstreamer_query/runTest.sh +++ b/tests/nnstreamer_query/runTest.sh @@ -42,164 +42,111 @@ if [[ ! $check_query ]]; then exit fi -function waitformarker { - for i in $(seq 1 ${TIMEOUT_SEC}) - do - if [ -f 'marker.log' ]; then - markersize=$(${StatCmd_GetSize} marker.log) - if [ $markersize -ge 48 ]; then - testResult 1 $1 "$2" 0 0 - return 0 - fi +## @brief Execute file comparison test if the file exist +function _callCompareTest() { + if [[ ! -f "$1" || ! -f "$2" ]]; then + echo "$1 or $2 don't exist." + return fi - sleep 1 - done - testResult 0 $1 "$2" 0 0 - exit + + callCompareTest $1 $2 $3 "$4" $5 $6 } -WAIT4MARKER=" videotestsrc num-buffers=1 ! video/x-raw,format=RGB,height=4,width=4 ! filesink location=marker.log " # Run tensor query server as echo server with default address option. -rm -f marker.log -PORT1=`python3 ../get_available_port.py` -PORT2=`python3 ../get_available_port.py` -gst-launch-1.0 --gst-plugin-path=${PATH_TO_PLUGIN} tensor_query_serversrc num-buffers=3 port=${PORT1} ! other/tensors,num_tensors=1,dimensions=3:300:300:1,types=uint8 ! tensor_query_serversink port=${PORT2} async=false ${WAIT4MARKER} & -pid=$! -waitformarker 1-1-T "query-server launching" -gstTest "--gst-plugin-path=${PATH_TO_PLUGIN} videotestsrc num-buffers=3 ! videoconvert ! videoscale ! video/x-raw,width=300,height=300,format=RGB ! tensor_converter ! tee name = t t. ! queue ! multifilesink location= raw1_%1d.log t. ! queue ! tensor_query_client src-port=${PORT1} sink-port=${PORT2} ! multifilesink location=result1_%1d.log" 1-2 0 0 $PERFORMANCE -callCompareTest raw1_0.log result1_0.log 1-3 "Compare 1-3" 1 0 -callCompareTest raw1_1.log result1_1.log 1-4 "Compare 1-4" 1 0 -callCompareTest raw1_2.log result1_2.log 1-5 "Compare 1-5" 1 0 +PORT=`python3 ../get_available_port.py` +gstTestBackground "--gst-plugin-path=${PATH_TO_PLUGIN} tensor_query_serversrc port=${PORT} ! other/tensors,num_tensors=1,dimensions=3:300:300:1,types=uint8 ! tensor_query_serversink async=false" 1-1 0 0 30 +gstTest "--gst-plugin-path=${PATH_TO_PLUGIN} videotestsrc is-live=true num-buffers=10 ! videoconvert ! videoscale ! video/x-raw,width=300,height=300,format=RGB ! tensor_converter ! tee name = t t. ! queue ! multifilesink location= raw1_%1d.log t. ! queue ! tensor_query_client port=${PORT} ! multifilesink location=result1_%1d.log sync=true " 1-2 0 0 $PERFORMANCE +_callCompareTest raw1_0.log result1_0.log 1-3 "Compare 1-3" 1 0 +_callCompareTest raw1_1.log result1_1.log 1-4 "Compare 1-4" 1 0 +_callCompareTest raw1_2.log result1_2.log 1-5 "Compare 1-5" 1 0 # Since the server operates in the background, wait for the server to stop before starting the next test. kill -9 $pid &> /dev/null wait $pid - # Run tensor query server as echo server with given address option. (multi clients) -rm marker.log PORT1=`python3 ../get_available_port.py` -PORT2=`python3 ../get_available_port.py` -gst-launch-1.0 --gst-plugin-path=${PATH_TO_PLUGIN} tensor_query_serversrc host=127.0.0.1 port=${PORT1} num-buffers=6 ! other/tensors,num_tensors=1,dimensions=3:300:300:1,types=uint8 ! tensor_query_serversink async=false host=127.0.0.1 port=${PORT2} ${WAIT4MARKER} & -pid=$! -waitformarker 2-1-T "query-server launching" -rm marker.log -gst-launch-1.0 --gst-plugin-path=${PATH_TO_PLUGIN} videotestsrc num-buffers=3 ! videoconvert ! videoscale ! video/x-raw,width=300,height=300,format=RGB ! tensor_converter ! tee name = t t. ! queue ! multifilesink location= raw2_%1d.log t. ! queue ! tensor_query_client src-host=127.0.0.1 src-port=${PORT1} sink-host=127.0.0.1 sink-port=${PORT2} ! multifilesink location=result2_%1d.log ${WAIT4MARKER} & -pid2=$! -waitformarker 2-2-T "query-client 1 launching" -gstTest "--gst-plugin-path=${PATH_TO_PLUGIN} videotestsrc num-buffers=3 ! videoconvert ! videoscale ! video/x-raw,width=300,height=300,format=RGB ! tensor_converter ! tee name = t t. ! queue ! multifilesink location= raw2_2_%1d.log t. ! queue ! tensor_query_client src-host=127.0.0.1 src-port=${PORT1} sink-host=127.0.0.1 sink-port=${PORT2} ! multifilesink location=result2_2_%1d.log" 2-3 0 0 $PERFORMANCE -callCompareTest raw2_0.log result2_0.log 2-4 "Compare 2-4" 1 0 -callCompareTest raw2_1.log result2_1.log 2-5 "Compare 2-5" 1 0 -callCompareTest raw2_2.log result2_2.log 2-6 "Compare 2-6" 1 0 -callCompareTest raw2_2_0.log result2_2_0.log 2-7 "Compare 2-7" 1 0 -callCompareTest raw2_2_1.log result2_2_1.log 2-8 "Compare 2-8" 1 0 -callCompareTest raw2_2_2.log result2_2_2.log 2-9 "Compare 2-9" 1 0 +gstTestBackground "--gst-plugin-path=${PATH_TO_PLUGIN} tensor_query_serversrc host=127.0.0.1 port=${PORT1} ! other/tensors,num_tensors=1,dimensions=3:300:300:1,types=uint8 ! tensor_query_serversink async=false" 2-1 0 0 30 +gstTest "--gst-plugin-path=${PATH_TO_PLUGIN} videotestsrc is-live=true num-buffers=10 ! videoconvert ! videoscale ! video/x-raw,width=300,height=300,format=RGB ! tensor_converter ! tee name = t t. ! queue ! multifilesink location= raw2_%1d.log t. ! queue ! tensor_query_client host=127.0.0.1 port=${PORT1} ! multifilesink location=result2_%1d.log" 2-2 0 0 $PERFORMANCE +gstTest "--gst-plugin-path=${PATH_TO_PLUGIN} videotestsrc is-live=true num-buffers=10 ! videoconvert ! videoscale ! video/x-raw,width=300,height=300,format=RGB ! tensor_converter ! tee name = t t. ! queue ! multifilesink location= raw2_2_%1d.log t. ! queue ! tensor_query_client host=127.0.0.1 port=${PORT1} ! multifilesink location=result2_2_%1d.log" 2-3 0 0 $PERFORMANCE +_callCompareTest raw2_0.log result2_0.log 2-4 "Compare 2-4" 1 0 +_callCompareTest raw2_1.log result2_1.log 2-5 "Compare 2-5" 1 0 +_callCompareTest raw2_2.log result2_2.log 2-6 "Compare 2-6" 1 0 +_callCompareTest raw2_2_0.log result2_2_0.log 2-7 "Compare 2-7" 1 0 +_callCompareTest raw2_2_1.log result2_2_1.log 2-8 "Compare 2-8" 1 0 +_callCompareTest raw2_2_2.log result2_2_2.log 2-9 "Compare 2-9" 1 0 kill -9 $pid &> /dev/null -kill -9 $pid2 &> /dev/null wait $pid -wait $pid2 - # Test flexible tensors -rm marker.log -PORT1=`python3 ../get_available_port.py` -PORT2=`python3 ../get_available_port.py` -gst-launch-1.0 --gst-plugin-path=${PATH_TO_PLUGIN} tensor_query_serversrc num-buffers=3 port=${PORT1} ! other/tensors,format=flexible ! tensor_query_serversink port=${PORT2} async=false ${WAIT4MARKER} & -pid=$! -waitformarker 3-1-T "query-server launching" -gstTest "--gst-plugin-path=${PATH_TO_PLUGIN} videotestsrc num-buffers=3 ! videoconvert ! videoscale ! video/x-raw,width=300,height=300,format=RGB ! tensor_converter ! other/tensors,format=flexible ! tee name = t t. ! queue ! multifilesink location= raw3_%1d.log t. ! queue ! tensor_query_client src-port=${PORT1} sink-port=${PORT2} ! multifilesink location=result3_%1d.log" 3-2 0 0 $PERFORMANCE -callCompareTest raw3_0.log result3_0.log 3-3 "Compare 3-3" 1 0 -callCompareTest raw3_1.log result3_1.log 3-4 "Compare 3-4" 1 0 -callCompareTest raw3_2.log result3_2.log 3-5 "Compare 3-5" 1 0 +PORT=`python3 ../get_available_port.py` +gstTestBackground "--gst-plugin-path=${PATH_TO_PLUGIN} tensor_query_serversrc port=${PORT} ! other/tensors,format=flexible ! tensor_query_serversink async=false" 3-1 0 0 30 +gstTest "--gst-plugin-path=${PATH_TO_PLUGIN} videotestsrc is-live=true num-buffers=10 ! videoconvert ! videoscale ! video/x-raw,width=300,height=300,format=RGB ! tensor_converter ! other/tensors,format=flexible ! tee name = t t. ! queue ! multifilesink location= raw3_%1d.log t. ! queue ! tensor_query_client port=${PORT} ! multifilesink location=result3_%1d.log" 3-2 0 0 $PERFORMANCE +_callCompareTest raw3_0.log result3_0.log 3-3 "Compare 3-3" 1 0 +_callCompareTest raw3_1.log result3_1.log 3-4 "Compare 3-4" 1 0 +_callCompareTest raw3_2.log result3_2.log 3-5 "Compare 3-5" 1 0 kill -9 $pid &> /dev/null wait $pid # Test multiple query server src and sink. -rm marker.log PORT1=`python3 ../get_available_port.py` PORT2=`python3 ../get_available_port.py` -PORT3=`python3 ../get_available_port.py` -PORT4=`python3 ../get_available_port.py` -gst-launch-1.0 --gst-plugin-path=${PATH_TO_PLUGIN} \ - tensor_query_serversrc id=0 port=${PORT1} num-buffers=3 ! other/tensors,format=flexible ! tensor_query_serversink port=${PORT2} id=0 async=false \ - tensor_query_serversrc id=1 port=${PORT3} num-buffers=3 ! other/tensors,format=flexible ! tensor_query_serversink id=1 port=${PORT4} async=false ${WAIT4MARKER} & -pid=$! -waitformarker 5-1-T "query-server launching" +gstTestBackground "--gst-plugin-path=${PATH_TO_PLUGIN} tensor_query_serversrc id=0 port=${PORT1} ! other/tensors,format=flexible ! tensor_query_serversink id=0 async=false \ + tensor_query_serversrc id=1 port=${PORT2} ! other/tensors,format=flexible ! tensor_query_serversink id=1 async=false" 5-1 0 0 30 # Client pipeline 5-2 is connected to server ID 0. -rm marker.log -gst-launch-1.0 --gst-plugin-path=${PATH_TO_PLUGIN} \ - videotestsrc num-buffers=3 ! videoconvert ! videoscale ! video/x-raw,width=640,height=480,format=RGB ! \ +gstTest "--gst-plugin-path=${PATH_TO_PLUGIN} \ + videotestsrc is-live=true num-buffers=10 ! videoconvert ! videoscale ! video/x-raw,width=640,height=480,format=RGB ! \ tensor_converter ! other/tensors,format=flexible ! tee name=t \ t. ! queue ! multifilesink location= raw5_2_%1d.log \ - t. ! queue ! tensor_query_client src-port=${PORT1} sink-port=${PORT2} ! multifilesink location=result5_2_%1d.log ${WAIT4MARKER} & -pid2=$! -waitformarker 5-2-T "query-client 1 launching" + t. ! queue ! tensor_query_client port=${PORT1} ! multifilesink location=result5_2_%1d.log" 5-2 0 0 $PERFORMANCE # Client pipeline 5-3 is connected to server ID 1. gstTest "--gst-plugin-path=${PATH_TO_PLUGIN} \ - videotestsrc pattern=13 num-buffers=3 ! videoconvert ! videoscale ! video/x-raw,width=300,height=300,format=RGB ! \ + videotestsrc is-live=true pattern=13 num-buffers=10 ! videoconvert ! videoscale ! video/x-raw,width=300,height=300,format=RGB ! \ tensor_converter ! other/tensors,format=flexible ! tee name=t \ t. ! queue ! multifilesink location= raw5_3_%1d.log \ - t. ! queue ! tensor_query_client src-port=${PORT3} sink-port=${PORT4} ! multifilesink location=result5_3_%1d.log" 5-3 0 0 $PERFORMANCE -callCompareTest raw5_2_0.log result5_2_0.log 5-4 "Compare 5-4" 1 0 -callCompareTest raw5_2_1.log result5_2_1.log 5-5 "Compare 5-5" 1 0 -callCompareTest raw5_2_2.log result5_2_2.log 5-6 "Compare 5-6" 1 0 -callCompareTest raw5_3_0.log result5_3_0.log 5-7 "Compare 5-7" 1 0 -callCompareTest raw5_3_1.log result5_3_1.log 5-8 "Compare 5-8" 1 0 -callCompareTest raw5_3_2.log result5_3_2.log 5-9 "Compare 5-9" 1 0 + t. ! queue ! tensor_query_client port=${PORT2} ! multifilesink location=result5_3_%1d.log" 5-3 0 0 $PERFORMANCE +_callCompareTest raw5_2_0.log result5_2_0.log 5-4 "Compare 5-4" 1 0 +_callCompareTest raw5_2_1.log result5_2_1.log 5-5 "Compare 5-5" 1 0 +_callCompareTest raw5_2_2.log result5_2_2.log 5-6 "Compare 5-6" 1 0 +_callCompareTest raw5_3_0.log result5_3_0.log 5-7 "Compare 5-7" 1 0 +_callCompareTest raw5_3_1.log result5_3_1.log 5-8 "Compare 5-8" 1 0 +_callCompareTest raw5_3_2.log result5_3_2.log 5-9 "Compare 5-9" 1 0 kill -9 $pid &> /dev/null -kill -9 $pid2 &> /dev/null wait $pid -wait $pid2 - # Sever src cap: Video, Server sink cap: Viedo test -rm marker.log -PORT1=`python3 ../get_available_port.py` -PORT2=`python3 ../get_available_port.py` -gst-launch-1.0 --gst-plugin-path=${PATH_TO_PLUGIN} tensor_query_serversrc port=${PORT1} num-buffers=3 ! video/x-raw,width=300,height=300,format=RGB,framerate=0/1 ! tensor_query_serversink port=${PORT2} async=false ${WAIT4MARKER} & -pid=$! -waitformarker 6-1-T "query-server launching" -gstTest "--gst-plugin-path=${PATH_TO_PLUGIN} videotestsrc num-buffers=3 ! videoconvert ! videoscale ! video/x-raw,width=300,height=300,format=RGB ! tee name = t t. ! queue ! multifilesink location= raw6_%1d.log t. ! queue ! tensor_query_client src-port=${PORT1} sink-port=${PORT2} ! multifilesink location=result6_%1d.log" 6-2 0 0 $PERFORMANCE -callCompareTest raw6_0.log result6_0.log 6-3 "Compare 6-3" 1 0 -callCompareTest raw6_1.log result6_1.log 6-4 "Compare 6-4" 1 0 -callCompareTest raw6_2.log result6_2.log 6-5 "Compare 6-5" 1 0 +PORT=`python3 ../get_available_port.py` +gstTestBackground "--gst-plugin-path=${PATH_TO_PLUGIN} tensor_query_serversrc port=${PORT} ! video/x-raw,width=300,height=300,format=RGB,framerate=0/1 ! tensor_query_serversink async=false" 6-1 0 0 30 +gstTest "--gst-plugin-path=${PATH_TO_PLUGIN} videotestsrc is-live=true num-buffers=10 ! videoconvert ! videoscale ! video/x-raw,width=300,height=300,format=RGB ! tee name = t t. ! queue ! multifilesink location= raw6_%1d.log t. ! queue ! tensor_query_client port=${PORT} ! multifilesink location=result6_%1d.log" 6-2 0 0 $PERFORMANCE +_callCompareTest raw6_0.log result6_0.log 6-3 "Compare 6-3" 1 0 +_callCompareTest raw6_1.log result6_1.log 6-4 "Compare 6-4" 1 0 +_callCompareTest raw6_2.log result6_2.log 6-5 "Compare 6-5" 1 0 kill -9 $pid &> /dev/null wait $pid - # Sever src cap: Video, Server sink cap: Tensor test -rm marker.log -PORT1=`python3 ../get_available_port.py` -PORT2=`python3 ../get_available_port.py` -gst-launch-1.0 --gst-plugin-path=${PATH_TO_PLUGIN} tensor_query_serversrc port=${PORT1} num-buffers=3 ! video/x-raw,width=300,height=300,format=RGB,framerate=0/1 ! tensor_converter ! tensor_query_serversink port=${PORT2} async=false ${WAIT4MARKER} & -pid=$! -waitformarker 7-1-T "query-server launching" -gstTest "--gst-plugin-path=${PATH_TO_PLUGIN} videotestsrc num-buffers=3 ! videoconvert ! videoscale ! video/x-raw,width=300,height=300,format=RGB ! tee name = t t. ! queue ! multifilesink location= raw7_%1d.log t. ! queue ! tensor_query_client src-port=${PORT1} sink-port=${PORT2} ! multifilesink location=result7_%1d.log" 7-2 0 0 $PERFORMANCE -callCompareTest raw7_0.log result7_0.log 7-3 "Compare 7-3" 1 0 -callCompareTest raw7_1.log result7_1.log 7-4 "Compare 7-4" 1 0 -callCompareTest raw7_2.log result7_2.log 7-5 "Compare 7-5" 1 0 +PORT=`python3 ../get_available_port.py` +gstTestBackground "--gst-plugin-path=${PATH_TO_PLUGIN} tensor_query_serversrc port=${PORT} ! video/x-raw,width=300,height=300,format=RGB,framerate=0/1 ! tensor_converter ! tensor_query_serversink async=false" 7-1 0 0 30 +gstTest "--gst-plugin-path=${PATH_TO_PLUGIN} videotestsrc is-live=true num-buffers=10 ! videoconvert ! videoscale ! video/x-raw,width=300,height=300,format=RGB ! tee name = t t. ! queue ! multifilesink location= raw7_%1d.log t. ! queue ! tensor_query_client port=${PORT} ! multifilesink location=result7_%1d.log" 7-2 0 0 $PERFORMANCE +_callCompareTest raw7_0.log result7_0.log 7-3 "Compare 7-3" 1 0 +_callCompareTest raw7_1.log result7_1.log 7-4 "Compare 7-4" 1 0 +_callCompareTest raw7_2.log result7_2.log 7-5 "Compare 7-5" 1 0 kill -9 $pid &> /dev/null wait $pid - # Sever src cap: Tensor, Server sink cap: Video test -rm marker.log -PORT1=`python3 ../get_available_port.py` -PORT2=`python3 ../get_available_port.py` -gst-launch-1.0 --gst-plugin-path=${PATH_TO_PLUGIN} tensor_query_serversrc port=${PORT1} num-buffers=3 ! other/tensors,num_tensors=1,dimensions=3:300:300:1,types=uint8,format=static,framerate=0/1 ! tensor_decoder mode=direct_video ! videoconvert ! tensor_query_serversink port=${PORT2} async=false ${WAIT4MARKER} & -pid=$! -waitformarker 8-1-T "query-server launching" -gstTest "--gst-plugin-path=${PATH_TO_PLUGIN} videotestsrc num-buffers=3 ! videoconvert ! videoscale ! video/x-raw,width=300,height=300,format=RGB ! tensor_converter ! tee name = t t. ! queue ! multifilesink location= raw8_%1d.log t. ! queue ! tensor_query_client src-port=${PORT1} sink-port=${PORT2} ! multifilesink location=result8_%1d.log" 8-2 0 0 $PERFORMANCE -callCompareTest raw8_0.log result8_0.log 8-3 "Compare 8-3" 1 0 -callCompareTest raw8_1.log result8_1.log 8-4 "Compare 8-4" 1 0 -callCompareTest raw8_2.log result8_2.log 8-5 "Compare 8-5" 1 0 +PORT=`python3 ../get_available_port.py` +gstTestBackground "--gst-plugin-path=${PATH_TO_PLUGIN} tensor_query_serversrc port=${PORT} ! other/tensors,num_tensors=1,dimensions=3:300:300:1,types=uint8,format=static,framerate=0/1 ! tensor_decoder mode=direct_video ! videoconvert ! tensor_query_serversink async=false" 8-1 0 0 30 +gstTest "--gst-plugin-path=${PATH_TO_PLUGIN} videotestsrc is-live=true num-buffers=10 ! videoconvert ! videoscale ! video/x-raw,width=300,height=300,format=RGB ! tensor_converter ! tee name = t t. ! queue ! multifilesink location= raw8_%1d.log t. ! queue ! tensor_query_client port=${PORT} ! multifilesink location=result8_%1d.log" 8-2 0 0 $PERFORMANCE +_callCompareTest raw8_0.log result8_0.log 8-3 "Compare 8-3" 1 0 +_callCompareTest raw8_1.log result8_1.log 8-4 "Compare 8-4" 1 0 +_callCompareTest raw8_2.log result8_2.log 8-5 "Compare 8-5" 1 0 kill -9 $pid &> /dev/null wait $pid - # TODO enable query-hybrid test # Now nnsquery library is not available. # After publishing the nnsquery pkg, enable below testcases. -rm *.log +# rm *.log report exit @@ -218,22 +165,22 @@ fi gstTest "--gst-plugin-path=${PATH_TO_PLUGIN} tensor_query_serversrc num-buffers=3 operation=passthrough ! other/tensors,format=flexible ! tee name = t t. ! queue ! multifilesink location=server1_%1d.log t. ! queue ! tensor_query_serversink" 4-1 0 0 $PERFORMANCE $TIMEOUT_SEC & gstTest "--gst-plugin-path=${PATH_TO_PLUGIN} tensor_query_serversrc num-buffers=3 operation=passthrough port=5000 ! other/tensors,format=flexible ! tee name = t t. ! queue ! multifilesink location=server2_%1d.log t. ! queue ! tensor_query_serversink port=5001" 4-2 0 0 $PERFORMANCE $TIMEOUT_SEC & sleep $SLEEPTIME_SEC -gstTest "--gst-plugin-path=${PATH_TO_PLUGIN} videotestsrc num-buffers=7 ! videoconvert ! videoscale ! video/x-raw,width=300,height=300,format=RGB ! tensor_converter ! other/tensors,format=flexible ! tee name = t t. ! queue ! multifilesink location= raw4_%1d.log t. ! queue ! tensor_query_client operation=passthrough ! multifilesink location=result4_%1d.log" 4-3 0 0 $PERFORMANCE -callCompareTest raw4_0.log result4_0.log 4-4 "Compare 4-4" 1 0 -callCompareTest raw4_1.log result4_1.log 4-5 "Compare 4-5" 1 0 -callCompareTest raw4_2.log result4_2.log 4-6 "Compare 4-6" 1 0 +gstTest "--gst-plugin-path=${PATH_TO_PLUGIN} videotestsrc is-live=true num-buffers=7 ! videoconvert ! videoscale ! video/x-raw,width=300,height=300,format=RGB ! tensor_converter ! other/tensors,format=flexible ! tee name = t t. ! queue ! multifilesink location= raw4_%1d.log t. ! queue ! tensor_query_client operation=passthrough ! multifilesink location=result4_%1d.log" 4-3 0 0 $PERFORMANCE +_callCompareTest raw4_0.log result4_0.log 4-4 "Compare 4-4" 1 0 +_callCompareTest raw4_1.log result4_1.log 4-5 "Compare 4-5" 1 0 +_callCompareTest raw4_2.log result4_2.log 4-6 "Compare 4-6" 1 0 # Server 1 is stopped and lost the fourth buffer. -callCompareTest raw4_4.log result4_3.log 4-7 "Compare 4-7" 1 0 -callCompareTest raw4_5.log result4_4.log 4-8 "Compare 4-8" 1 0 -callCompareTest raw4_6.log result4_5.log 4-9 "Compare 4-9" 1 0 +_callCompareTest raw4_4.log result4_3.log 4-7 "Compare 4-7" 1 0 +_callCompareTest raw4_5.log result4_4.log 4-8 "Compare 4-8" 1 0 +_callCompareTest raw4_6.log result4_5.log 4-9 "Compare 4-9" 1 0 # Compare the results of the server and the client. -callCompareTest server1_0.log result4_0.log 4-10 "Compare 4-10" 1 0 -callCompareTest server1_1.log result4_1.log 4-11 "Compare 4-11" 1 0 -callCompareTest server1_2.log result4_2.log 4-12 "Compare 4-12" 1 0 -callCompareTest server2_0.log result4_3.log 4-13 "Compare 4-13" 1 0 -callCompareTest server2_1.log result4_4.log 4-14 "Compare 4-14" 1 0 -callCompareTest server2_2.log result4_5.log 4-15 "Compare 4-15" 1 0 +_callCompareTest server1_0.log result4_0.log 4-10 "Compare 4-10" 1 0 +_callCompareTest server1_1.log result4_1.log 4-11 "Compare 4-11" 1 0 +_callCompareTest server1_2.log result4_2.log 4-12 "Compare 4-12" 1 0 +_callCompareTest server2_0.log result4_3.log 4-13 "Compare 4-13" 1 0 +_callCompareTest server2_1.log result4_4.log 4-14 "Compare 4-14" 1 0 +_callCompareTest server2_2.log result4_5.log 4-15 "Compare 4-15" 1 0 sleep $SLEEPTIME_SEC rm *.log diff --git a/tests/nnstreamer_query/unittest_query.cc b/tests/nnstreamer_query/unittest_query.cc index ff040a5..1c398d8 100644 --- a/tests/nnstreamer_query/unittest_query.cc +++ b/tests/nnstreamer_query/unittest_query.cc @@ -59,16 +59,15 @@ TEST (tensorQuery, serverProperties0) gint int_val; guint uint_val; gchar *str_val; - guint src_port, sink_port; + guint src_port; src_port = _get_available_port (); - sink_port = _get_available_port (); /* Create a nnstreamer pipeline */ pipeline = g_strdup_printf ( "tensor_query_serversrc host=127.0.0.1 name=serversrc port=%u ! " "other/tensors,num_tensors=1,dimensions=3:300:300:1,types=uint8 ! " - "tensor_query_serversink host=127.0.0.1 name=serversink port=%u", src_port, sink_port); + "tensor_query_serversink name=serversink", src_port); gstpipe = gst_parse_launch (pipeline, NULL); EXPECT_NE (gstpipe, nullptr); @@ -113,12 +112,6 @@ TEST (tensorQuery, serverProperties0) /* Get properties of query server sink */ srv_handle = gst_bin_get_by_name (GST_BIN (gstpipe), "serversink"); EXPECT_NE (srv_handle, nullptr); - g_object_get (srv_handle, "host", &str_val, NULL); - EXPECT_STREQ ("127.0.0.1", str_val); - g_free (str_val); - - g_object_get (srv_handle, "port", &uint_val, NULL); - EXPECT_EQ (sink_port, uint_val); g_object_get (srv_handle, "protocol", &int_val, NULL); EXPECT_EQ (0, int_val); @@ -126,16 +119,6 @@ TEST (tensorQuery, serverProperties0) g_object_get (srv_handle, "timeout", &uint_val, NULL); EXPECT_EQ (10U, uint_val); - /* Set properties of query server sink */ - g_object_set (srv_handle, "host", "127.0.0.2", NULL); - g_object_get (srv_handle, "host", &str_val, NULL); - EXPECT_STREQ ("127.0.0.2", str_val); - g_free (str_val); - - g_object_set (srv_handle, "port", 5000U, NULL); - g_object_get (srv_handle, "port", &uint_val, NULL); - EXPECT_EQ (5000U, uint_val); - g_object_set (srv_handle, "protocol", 1, NULL); g_object_get (srv_handle, "protocol", &int_val, NULL); EXPECT_EQ (1, int_val); @@ -151,47 +134,21 @@ TEST (tensorQuery, serverProperties0) } /** - * @brief Test for tensor_query_server with same port. - */ -TEST (tensorQuery, serverProperties1_n) -{ - gchar *pipeline; - GstElement *gstpipe; - guint port; - - port = _get_available_port (); - - /* Create a nnstreamer pipeline */ - pipeline = g_strdup_printf ( - "tensor_query_serversrc name=serversrc port=%u ! " - "other/tensors,num_tensors=1,dimensions=3:300:300:1,types=uint8 ! " - "tensor_query_serversink port=%u sync=false async=false", port, port); - gstpipe = gst_parse_launch (pipeline, NULL); - EXPECT_NE (gstpipe, nullptr); - - EXPECT_NE (setPipelineStateSync (gstpipe, GST_STATE_PLAYING, UNITTEST_STATECHANGE_TIMEOUT), 0); - - gst_object_unref (gstpipe); - g_free (pipeline); -} - -/** * @brief Test for tensor_query_server with invalid host name. */ TEST (tensorQuery, serverProperties2_n) { gchar *pipeline; GstElement *gstpipe; - guint src_port, sink_port; + guint src_port; src_port = _get_available_port (); - sink_port = _get_available_port (); /* Create a nnstreamer pipeline */ pipeline = g_strdup_printf ( "tensor_query_serversrc name=serversrc host=f.a.i.l port=%u ! " "other/tensors,num_tensors=1,dimensions=3:300:300:1,types=uint8 ! " - "tensor_query_serversink port=%u sync=false async=false", src_port, sink_port); + "tensor_query_serversink sync=false async=false", src_port); gstpipe = gst_parse_launch (pipeline, NULL); EXPECT_NE (gstpipe, nullptr); @@ -209,7 +166,7 @@ TEST (tensorQuery, clientProperties0) gchar *pipeline; GstElement *gstpipe; GstElement *client_handle; - TensorQueryProtocol protocol; + nns_edge_protocol_e protocol; guint uint_val; gchar *str_val; gboolean bool_val; @@ -217,7 +174,7 @@ TEST (tensorQuery, clientProperties0) /* Create a query client pipeline */ pipeline = g_strdup_printf ( "videotestsrc ! videoconvert ! videoscale ! video/x-raw,width=300,height=300,format=RGB !" - "tensor_converter ! tensor_query_client name=client protocol=TCP src-host=127.0.0.1 sink-host=127.0.0.1 ! tensor_sink"); + "tensor_converter ! tensor_query_client name=client protocol=TCP ! tensor_sink"); gstpipe = gst_parse_launch (pipeline, NULL); EXPECT_NE (gstpipe, nullptr); @@ -225,43 +182,27 @@ TEST (tensorQuery, clientProperties0) client_handle = gst_bin_get_by_name (GST_BIN (gstpipe), "client"); EXPECT_NE (client_handle, nullptr); - g_object_get (client_handle, "src-host", &str_val, NULL); - EXPECT_STREQ ("127.0.0.1", str_val); + g_object_get (client_handle, "host", &str_val, NULL); + EXPECT_STREQ ("localhost", str_val); g_free (str_val); - g_object_get (client_handle, "src-port", &uint_val, NULL); + g_object_get (client_handle, "port", &uint_val, NULL); EXPECT_EQ (3001U, uint_val); - g_object_get (client_handle, "sink-host", &str_val, NULL); - EXPECT_STREQ ("127.0.0.1", str_val); - g_free (str_val); - - g_object_get (client_handle, "sink-port", &uint_val, NULL); - EXPECT_EQ (3000U, uint_val); - g_object_get (client_handle, "protocol", &protocol, NULL); - EXPECT_EQ (protocol, _TENSOR_QUERY_PROTOCOL_TCP); + EXPECT_EQ (protocol, NNS_EDGE_PROTOCOL_TCP); g_object_get (client_handle, "silent", &bool_val, NULL); EXPECT_EQ (TRUE, bool_val); /* Set properties of query client */ - g_object_set (client_handle, "src-host", "127.0.0.2", NULL); - g_object_get (client_handle, "src-host", &str_val, NULL); - EXPECT_STREQ ("127.0.0.2", str_val); - g_free (str_val); - - g_object_set (client_handle, "src-port", 5001U, NULL); - g_object_get (client_handle, "src-port", &uint_val, NULL); - EXPECT_EQ (5001U, uint_val); - - g_object_set (client_handle, "sink-host", "127.0.0.2", NULL); - g_object_get (client_handle, "sink-host", &str_val, NULL); + g_object_set (client_handle, "host", "127.0.0.2", NULL); + g_object_get (client_handle, "host", &str_val, NULL); EXPECT_STREQ ("127.0.0.2", str_val); g_free (str_val); - g_object_set (client_handle, "sink-port", 5001U, NULL); - g_object_get (client_handle, "sink-port", &uint_val, NULL); + g_object_set (client_handle, "port", 5001U, NULL); + g_object_get (client_handle, "port", &uint_val, NULL); EXPECT_EQ (5001U, uint_val); g_object_set (client_handle, "silent", FALSE, NULL); @@ -295,53 +236,6 @@ TEST (tensorQuery, clientAlone_n) } /** - * @brief Test for nnstreamer_query_server_init - */ -TEST (tensorQueryCommon, serverInit0) -{ - query_server_handle server_data = NULL; - guint port; - - port = _get_available_port (); - - server_data = nnstreamer_query_server_data_new (); - EXPECT_NE ((void *) NULL, server_data); - - EXPECT_EQ (0, nnstreamer_query_server_init (server_data, _TENSOR_QUERY_PROTOCOL_TCP, "localhost", port, TRUE)); - nnstreamer_query_server_data_free (server_data); -} - -/** - * @brief Test for nnstreamer_query_server_init with invalid parameter. - */ -TEST (tensorQueryCommon, serverInit1_n) -{ - guint port; - - port = _get_available_port (); - - EXPECT_NE (0, nnstreamer_query_server_init (NULL, _TENSOR_QUERY_PROTOCOL_TCP, "localhost", port, TRUE)); -} - -/** - * @brief Test for nnstreamer_query_server_init with invalid parameter. - */ -TEST (tensorQueryCommon, serverInit2_n) -{ - query_server_handle server_data = NULL; - guint port; - - port = _get_available_port (); - - server_data = nnstreamer_query_server_data_new (); - EXPECT_NE ((void *) NULL, server_data); - - EXPECT_NE (0, nnstreamer_query_server_init (server_data, _TENSOR_QUERY_PROTOCOL_END, "localhost", port, TRUE)); - - nnstreamer_query_server_data_free (server_data); -} - -/** * @brief Main GTest */ int