From: José Rivero Date: Wed, 19 Apr 2017 00:59:00 +0000 (-0700) Subject: Set default BenchView run type to local X-Git-Tag: submit/tizen/20210909.063632~11030^2~7167^2~3 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=bf0cd36bdf6c9d776b56dcfd0f984a302771c256;p=platform%2Fupstream%2Fdotnet%2Fruntime.git Set default BenchView run type to local - This is the case when running on the dev boxes (outside automation) Commit migrated from https://github.com/dotnet/coreclr/commit/76785ed6f2236cd6884ab22139916c874478ef46 --- diff --git a/src/coreclr/tests/scripts/run-xunit-perf.cmd b/src/coreclr/tests/scripts/run-xunit-perf.cmd index 26f8c88..71511fa 100644 --- a/src/coreclr/tests/scripts/run-xunit-perf.cmd +++ b/src/coreclr/tests/scripts/run-xunit-perf.cmd @@ -7,6 +7,7 @@ setlocal set ERRORLEVEL= + set BENCHVIEW_RUN_TYPE=local set CORECLR_REPO=%CD% set TEST_FILE_EXT=exe set TEST_ARCH=x64 @@ -109,7 +110,7 @@ rem **************************************************************************** goto :parse_command_line_arguments ) IF /I [%~1] == [-runtype] ( - set RUN_TYPE=%~2 + set BENCHVIEW_RUN_TYPE=%~2 shift shift goto :parse_command_line_arguments @@ -222,23 +223,25 @@ endlocal& exit /b %ERRORLEVEL% rem **************************************************************************** rem Generates BenchView's submission data and upload it rem **************************************************************************** - call :run_cmd py.exe "%BENCHVIEW_PATH%\submission.py" measurement.json ^ - --build ..\build.json ^ - --machine-data ..\machinedata.json ^ - --metadata ..\submission-metadata.json ^ - --group "CoreCLR" ^ - --type "%RUN_TYPE%" ^ - --config-name "%TEST_CONFIG%" ^ - --config Configuration "%TEST_CONFIG%" ^ - --config OS "Windows_NT" ^ - --arch "%TEST_ARCHITECTURE%" ^ - --machinepool "PerfSnake" +setlocal + set LV_SUBMISSION_ARGS= + set LV_SUBMISSION_ARGS=%LV_SUBMISSION_ARGS% --build ..\build.json + set LV_SUBMISSION_ARGS=%LV_SUBMISSION_ARGS% --machine-data ..\machinedata.json + set LV_SUBMISSION_ARGS=%LV_SUBMISSION_ARGS% --metadata ..\submission-metadata.json + set LV_SUBMISSION_ARGS=%LV_SUBMISSION_ARGS% --group "CoreCLR" + set LV_SUBMISSION_ARGS=%LV_SUBMISSION_ARGS% --type "%BENCHVIEW_RUN_TYPE%" + set LV_SUBMISSION_ARGS=%LV_SUBMISSION_ARGS% --config-name "%TEST_CONFIG%" + set LV_SUBMISSION_ARGS=%LV_SUBMISSION_ARGS% --config Configuration "%TEST_CONFIG%" + set LV_SUBMISSION_ARGS=%LV_SUBMISSION_ARGS% --config OS "Windows_NT" + set LV_SUBMISSION_ARGS=%LV_SUBMISSION_ARGS% --arch "%TEST_ARCHITECTURE%" + set LV_SUBMISSION_ARGS=%LV_SUBMISSION_ARGS% --machinepool "PerfSnake" + call :run_cmd py.exe "%BENCHVIEW_PATH%\submission.py" measurement.json %LV_SUBMISSION_ARGS% IF %ERRORLEVEL% NEQ 0 ( call :print_error Creating BenchView submission data failed. exit /b 1 ) - REM FIXME: call :run_cmd py.exe "%BENCHVIEW_PATH%\upload.py" submission.json --container coreclr + call :run_cmd py.exe "%BENCHVIEW_PATH%\upload.py" submission.json --container coreclr IF %ERRORLEVEL% NEQ 0 ( call :print_error Uploading to BenchView failed. exit /b 1