1 // Import the utility functionality.
3 import jobs.generation.*;
5 def project = GithubProject
6 def branch = GithubBranchName
7 def projectName = Utilities.getFolderName(project)
8 def projectFolder = projectName + '/' + Utilities.getFolderName(branch)
10 def static getOSGroup(def os) {
11 def osGroupMap = ['Ubuntu14.04':'Linux',
13 'Ubuntu16.04': 'Linux',
17 'Windows_NT':'Windows_NT',
20 'OpenSUSE13.2': 'Linux',
21 'OpenSUSE42.1': 'Linux',
22 'LinuxARMEmulator': 'Linux']
23 def osGroup = osGroupMap.get(os, null)
24 assert osGroup != null : "Could not find os group for ${os}"
28 // Setup perflab tests runs
29 [true, false].each { isPR ->
30 ['Windows_NT'].each { os ->
31 ['x64', 'x86'].each { arch ->
32 [true, false].each { isSmoketest ->
33 def architecture = arch
34 def jobName = isSmoketest ? "perf_perflab_${os}_${arch}_smoketest" : "perf_perflab_${os}_${arch}"
37 testEnv = '-testEnv %WORKSPACE%\\tests\\x86\\ryujit_x86_testenv.cmd'
40 def newJob = job(Utilities.getFullJobName(project, jobName, isPR)) {
42 label('windows_clr_perf')
45 string('BV_UPLOAD_SAS_TOKEN', 'CoreCLR Perf BenchView Sas')
51 stringParam('BenchviewCommitName', '\${ghprbPullTitle}', 'The name that you will be used to build the full title of a run in Benchview. The final name will be of the form <branch> private BenchviewCommitName')
57 stringParam('XUNIT_PERFORMANCE_MAX_ITERATION', '2', 'Sets the number of iterations to two. We want to do this so that we can run as fast as possible as this is just for smoke testing')
58 stringParam('XUNIT_PERFORMANCE_MAX_ITERATION_INNER_SPECIFIED', '2', 'Sets the number of iterations to two. We want to do this so that we can run as fast as possible as this is just for smoke testing')
63 stringParam('XUNIT_PERFORMANCE_MAX_ITERATION', '21', 'Sets the number of iterations to twenty one. We are doing this to limit the amount of data that we upload as 20 iterations is enought to get a good sample')
64 stringParam('XUNIT_PERFORMANCE_MAX_ITERATION_INNER_SPECIFIED', '21', 'Sets the number of iterations to twenty one. We are doing this to limit the amount of data that we upload as 20 iterations is enought to get a good sample')
68 def configuration = 'Release'
69 def runType = isPR ? 'private' : 'rolling'
70 def benchViewName = isPR ? 'coreclr private %BenchviewCommitName%' : 'coreclr rolling %GIT_BRANCH_WITHOUT_ORIGIN% %GIT_COMMIT%'
71 def uploadString = isSmoketest ? '' : '-uploadToBenchview'
76 batchFile("powershell wget https://dist.nuget.org/win-x86-commandline/latest/nuget.exe -OutFile \"%WORKSPACE%\\nuget.exe\"")
77 batchFile("if exist \"%WORKSPACE%\\Microsoft.BenchView.JSONFormat\" rmdir /s /q \"%WORKSPACE%\\Microsoft.BenchView.JSONFormat\"")
78 batchFile("\"%WORKSPACE%\\nuget.exe\" install Microsoft.BenchView.JSONFormat -Source http://benchviewtestfeed.azurewebsites.net/nuget -OutputDirectory \"%WORKSPACE%\" -Prerelease -ExcludeVersion")
79 //Do this here to remove the origin but at the front of the branch name as this is a problem for BenchView
80 //we have to do it all as one statement because cmd is called each time and we lose the set environment variable
81 batchFile("if \"%GIT_BRANCH:~0,7%\" == \"origin/\" (set \"GIT_BRANCH_WITHOUT_ORIGIN=%GIT_BRANCH:origin/=%\") else (set \"GIT_BRANCH_WITHOUT_ORIGIN=%GIT_BRANCH%\")\n" +
82 "set \"BENCHVIEWNAME=${benchViewName}\"\n" +
83 "set \"BENCHVIEWNAME=%BENCHVIEWNAME:\"=%\"\n" +
84 "py \"%WORKSPACE%\\Microsoft.BenchView.JSONFormat\\tools\\submission-metadata.py\" --name \"%BENCHVIEWNAME%\" --user \"dotnet-bot@microsoft.com\"\n" +
85 "py \"%WORKSPACE%\\Microsoft.BenchView.JSONFormat\\tools\\build.py\" git --branch %GIT_BRANCH_WITHOUT_ORIGIN% --type ${runType}")
86 batchFile("py \"%WORKSPACE%\\Microsoft.BenchView.JSONFormat\\tools\\machinedata.py\"")
87 batchFile("set __TestIntermediateDir=int&&build.cmd ${configuration} ${architecture}")
89 batchFile("tests\\runtest.cmd ${configuration} ${architecture} GenerateLayoutOnly")
91 def runXUnitPerfCommonArgs = "-arch ${arch} -configuration ${configuration} -generateBenchviewData \"%WORKSPACE%\\Microsoft.Benchview.JSONFormat\\tools\" ${uploadString} -runtype ${runType} -stabilityPrefix \"START \"CORECLR_PERF_RUN\" /B /WAIT /HIGH /AFFINITY 0x2\""
93 // Run with just stopwatch: Profile=Off
94 batchFile("tests\\scripts\\run-xunit-perf.cmd ${runXUnitPerfCommonArgs} -testBinLoc bin\\tests\\${os}.${architecture}.${configuration}\\performance\\perflab\\Perflab -library")
95 batchFile("tests\\scripts\\run-xunit-perf.cmd ${runXUnitPerfCommonArgs} -testBinLoc bin\\tests\\${os}.${architecture}.${configuration}\\Jit\\Performance\\CodeQuality")
97 // Run with the full set of counters enabled: Profile=On
98 batchFile("tests\\scripts\\run-xunit-perf.cmd ${runXUnitPerfCommonArgs} -testBinLoc bin\\tests\\${os}.${architecture}.${configuration}\\performance\\perflab\\Perflab -library -collectionFlags default+BranchMispredictions+CacheMisses+InstructionRetired+gcapi")
99 batchFile("tests\\scripts\\run-xunit-perf.cmd ${runXUnitPerfCommonArgs} -testBinLoc bin\\tests\\${os}.${architecture}.${configuration}\\Jit\\Performance\\CodeQuality -collectionFlags default+BranchMispredictions+CacheMisses+InstructionRetired+gcapi")
104 Utilities.setMachineAffinity(newJob, "Windows_NT", '20170427-elevated')
107 // Save machinedata.json to /artifact/bin/ Jenkins dir
108 def archiveSettings = new ArchivalSettings()
109 archiveSettings.addFiles('.\\bin\\sandbox\\Logs\\Perf-*.xml')
110 archiveSettings.addFiles('.\\bin\\sandbox\\Logs\\Perf-*.etl')
111 archiveSettings.addFiles('.\\bin\\sandbox\\Logs\\Perf-*.log')
112 archiveSettings.addFiles('machinedata.json')
113 Utilities.addArchival(newJob, archiveSettings)
115 Utilities.standardJobSetup(newJob, project, isPR, "*/${branch}")
126 TriggerBuilder builder = TriggerBuilder.triggerOnPullRequest()
129 builder.setGithubContext("${os} ${arch} CoreCLR Perf Tests Correctness")
133 builder.setGithubContext("${os} ${arch} CoreCLR Perf Tests")
134 builder.triggerOnlyOnComment()
135 builder.setCustomTriggerPhrase("(?i).*test\\W+${os}\\W+${arch}\\W+perf.*")
137 builder.triggerForBranch(branch)
138 builder.emitTrigger(newJob)
141 // Set a push trigger
142 TriggerBuilder builder = TriggerBuilder.triggerOnCommit()
143 builder.emitTrigger(newJob)
150 // Setup throughput perflab tests runs
151 [true, false].each { isPR ->
152 ['Windows_NT'].each { os ->
153 ['x64', 'x86'].each { arch ->
154 ['full_opt', 'min_opt'].each { opt_level ->
155 def architecture = arch
157 def newJob = job(Utilities.getFullJobName(project, "perf_throughput_perflab_${os}_${arch}_${opt_level}", isPR)) {
159 label('windows_clr_perf')
162 string('BV_UPLOAD_SAS_TOKEN', 'CoreCLR Perf BenchView Sas')
168 stringParam('BenchviewCommitName', '\${ghprbPullTitle}', 'The name that will be used to build the full title of a run in Benchview.')
172 def configuration = 'Release'
173 def runType = isPR ? 'private' : 'rolling'
174 def benchViewName = isPR ? 'coreclr-throughput private %BenchviewCommitName%' : 'coreclr-throughput rolling %GIT_BRANCH_WITHOUT_ORIGIN% %GIT_COMMIT%'
178 batchFile("if exist \"%WORKSPACE%\\Microsoft.BenchView.JSONFormat\" rmdir /s /q \"%WORKSPACE%\\Microsoft.BenchView.JSONFormat\"")
179 batchFile("if exist \"%WORKSPACE%\\Microsoft.BenchView.ThroughputBenchmarks.${architecture}.${os}\" rmdir /s /q \"%WORKSPACE%\\Microsoft.BenchView.ThroughputBenchmarks.${architecture}.${os}\"")
180 batchFile("C:\\Tools\\nuget.exe install Microsoft.BenchView.JSONFormat -Source http://benchviewtestfeed.azurewebsites.net/nuget -OutputDirectory \"%WORKSPACE%\" -Prerelease -ExcludeVersion")
181 batchFile("C:\\Tools\\nuget.exe install Microsoft.BenchView.ThroughputBenchmarks.${architecture}.${os} -Source https://dotnet.myget.org/F/dotnet-core -OutputDirectory \"%WORKSPACE%\" -Prerelease -ExcludeVersion")
182 //Do this here to remove the origin but at the front of the branch name as this is a problem for BenchView
183 //we have to do it all as one statement because cmd is called each time and we lose the set environment variable
184 batchFile("if \"%GIT_BRANCH:~0,7%\" == \"origin/\" (set \"GIT_BRANCH_WITHOUT_ORIGIN=%GIT_BRANCH:origin/=%\") else (set \"GIT_BRANCH_WITHOUT_ORIGIN=%GIT_BRANCH%\")\n" +
185 "set \"BENCHVIEWNAME=${benchViewName}\"\n" +
186 "set \"BENCHVIEWNAME=%BENCHVIEWNAME:\"=%\"\n" +
187 "py \"%WORKSPACE%\\Microsoft.BenchView.JSONFormat\\tools\\submission-metadata.py\" --name \"${benchViewName}\" --user \"dotnet-bot@microsoft.com\"\n" +
188 "py \"%WORKSPACE%\\Microsoft.BenchView.JSONFormat\\tools\\build.py\" git --branch %GIT_BRANCH_WITHOUT_ORIGIN% --type ${runType}")
189 batchFile("py \"%WORKSPACE%\\Microsoft.BenchView.JSONFormat\\tools\\machinedata.py\"")
190 batchFile("set __TestIntermediateDir=int&&build.cmd ${configuration} ${architecture} skiptests")
191 batchFile("tests\\runtest.cmd ${configuration} ${architecture} GenerateLayoutOnly")
192 batchFile("py -u tests\\scripts\\run-throughput-perf.py -arch ${arch} -os ${os} -configuration ${configuration} -opt_level ${opt_level} -clr_root \"%WORKSPACE%\" -assembly_root \"%WORKSPACE%\\Microsoft.BenchView.ThroughputBenchmarks.${architecture}.${os}\\lib\" -benchview_path \"%WORKSPACE%\\Microsoft.Benchview.JSONFormat\\tools\" -run_type ${runType}")
196 // Save machinedata.json to /artifact/bin/ Jenkins dir
197 def archiveSettings = new ArchivalSettings()
198 archiveSettings.addFiles('throughput-*.csv')
199 Utilities.addArchival(newJob, archiveSettings)
201 Utilities.standardJobSetup(newJob, project, isPR, "*/${branch}")
205 if (opt_level == 'min_opts')
207 opts = '\\W+min_opts'
209 TriggerBuilder builder = TriggerBuilder.triggerOnPullRequest()
210 builder.setGithubContext("${os} ${arch} ${opt_level} CoreCLR Throughput Perf Tests")
211 builder.triggerOnlyOnComment()
212 builder.setCustomTriggerPhrase("(?i).*test\\W+${os}\\W+${arch}${opts}\\W+throughput.*")
213 builder.triggerForBranch(branch)
214 builder.emitTrigger(newJob)
217 // Set a push trigger
218 TriggerBuilder builder = TriggerBuilder.triggerOnCommit()
219 builder.emitTrigger(newJob)
226 def static getFullPerfJobName(def project, def os, def isPR) {
227 return Utilities.getFullJobName(project, "perf_${os}", isPR)
230 // Create the Linux/OSX/CentOS coreclr test leg for debug and release and each scenario
231 [true, false].each { isPR ->
232 def fullBuildJobName = Utilities.getFullJobName(project, 'perf_linux_build', isPR)
233 def architecture = 'x64'
234 def configuration = 'Release'
236 // Build has to happen on RHEL7.2 (that's where we produce the bits we ship)
237 ['RHEL7.2'].each { os ->
238 def newBuildJob = job(fullBuildJobName) {
240 shell("./build.sh verbose ${architecture} ${configuration}")
243 Utilities.setMachineAffinity(newBuildJob, os, 'latest-or-auto')
244 Utilities.standardJobSetup(newBuildJob, project, isPR, "*/${branch}")
245 Utilities.addArchival(newBuildJob, "bin/Product/**,bin/obj/*/tests/**/*.dylib,bin/obj/*/tests/**/*.so", "bin/Product/**/.nuget/**")
249 // Actual perf testing on the following OSes
250 def perfOSList = ['Ubuntu14.04']
251 perfOSList.each { os ->
252 def newJob = job(getFullPerfJobName(project, os, isPR)) {
254 label('linux_clr_perf')
257 string('BV_UPLOAD_SAS_TOKEN', 'CoreCLR Perf BenchView Sas')
263 stringParam('BenchviewCommitName', '\${ghprbPullTitle}', 'The name that you will be used to build the full title of a run in Benchview. The final name will be of the form <branch> private BenchviewCommitName')
268 // Cap the maximum number of iterations to 21.
269 stringParam('XUNIT_PERFORMANCE_MAX_ITERATION', '21', 'Sets the number of iterations to twenty one. We are doing this to limit the amount of data that we upload as 20 iterations is enought to get a good sample')
270 stringParam('XUNIT_PERFORMANCE_MAX_ITERATION_INNER_SPECIFIED', '21', 'Sets the number of iterations to twenty one. We are doing this to limit the amount of data that we upload as 20 iterations is enought to get a good sample')
271 stringParam('PRODUCT_BUILD', '', 'Build number from which to copy down the CoreCLR Product binaries built for Linux')
274 def osGroup = getOSGroup(os)
275 def runType = isPR ? 'private' : 'rolling'
276 def benchViewName = isPR ? 'coreclr private \$BenchviewCommitName' : 'coreclr rolling \$GIT_BRANCH_WITHOUT_ORIGIN \$GIT_COMMIT'
279 shell("./tests/scripts/perf-prep.sh")
280 shell("./init-tools.sh")
281 copyArtifacts(fullBuildJobName) {
282 includePatterns("bin/**")
284 buildNumber('\${PRODUCT_BUILD}')
287 shell("GIT_BRANCH_WITHOUT_ORIGIN=\$(echo \$GIT_BRANCH | sed \"s/[^/]*\\/\\(.*\\)/\\1 /\")\n" +
288 "python3.5 \"\${WORKSPACE}/tests/scripts/Microsoft.BenchView.JSONFormat/tools/submission-metadata.py\" --name \" ${benchViewName} \" --user \"dotnet-bot@microsoft.com\"\n" +
289 "python3.5 \"\${WORKSPACE}/tests/scripts/Microsoft.BenchView.JSONFormat/tools/build.py\" git --branch \$GIT_BRANCH_WITHOUT_ORIGIN --type ${runType}")
290 shell("""./tests/scripts/run-xunit-perf.sh \\
291 --testRootDir=\"\${WORKSPACE}/bin/tests/Windows_NT.${architecture}.${configuration}\" \\
292 --testNativeBinDir=\"\${WORKSPACE}/bin/obj/${osGroup}.${architecture}.${configuration}/tests\" \\
293 --coreClrBinDir=\"\${WORKSPACE}/bin/Product/${osGroup}.${architecture}.${configuration}\" \\
294 --mscorlibDir=\"\${WORKSPACE}/bin/Product/${osGroup}.${architecture}.${configuration}\" \\
295 --coreFxBinDir=\"\${WORKSPACE}/corefx\" \\
296 --runType=\"${runType}\" \\
297 --benchViewOS=\"${os}\" \\
298 --generatebenchviewdata=\"\${WORKSPACE}/tests/scripts/Microsoft.BenchView.JSONFormat/tools\" \\
299 --stabilityPrefix=\"taskset 0x00000002 nice --adjustment=-10\" \\
300 --uploadToBenchview""")
304 // Save machinedata.json to /artifact/bin/ Jenkins dir
305 def archiveSettings = new ArchivalSettings()
306 archiveSettings.addFiles('./bin/sandbox/Logs/Perf-*.log')
307 archiveSettings.addFiles('./bin/sandbox/Logs/Perf-*.xml')
308 archiveSettings.addFiles('machinedata.json')
309 Utilities.addArchival(newJob, archiveSettings)
311 Utilities.standardJobSetup(newJob, project, isPR, "*/${branch}")
313 // For perf, we need to keep the run results longer
315 // Enable the log rotator
317 artifactDaysToKeep(7)
319 artifactNumToKeep(25)
325 def flowJobPerfRunList = perfOSList.collect { os ->
326 "{ build(params + [PRODUCT_BUILD: b.build.number], '${getFullPerfJobName(project, os, isPR)}') }"
328 def newFlowJob = buildFlowJob(Utilities.getFullJobName(project, "perf_linux_flow", isPR, '')) {
331 stringParam('BenchviewCommitName', '\${ghprbPullTitle}', 'The name that you will be used to build the full title of a run in Benchview. The final name will be of the form <branch> private BenchviewCommitName')
335 // First, build the bits on RHEL7.2
336 b = build(params, '${fullBuildJobName}')
338 // Then, run the perf tests
340 ${flowJobPerfRunList.join(",\n ")}
345 Utilities.setMachineAffinity(newFlowJob, 'Windows_NT', 'latest-or-auto')
346 Utilities.standardJobSetup(newFlowJob, project, isPR, "*/${branch}")
349 TriggerBuilder builder = TriggerBuilder.triggerOnPullRequest()
350 builder.setGithubContext("Linux Perf Test Flow")
351 builder.triggerOnlyOnComment()
352 builder.setCustomTriggerPhrase("(?i).*test\\W+linux\\W+perf\\W+flow.*")
353 builder.triggerForBranch(branch)
354 builder.emitTrigger(newFlowJob)
357 // Set a push trigger
358 TriggerBuilder builder = TriggerBuilder.triggerOnCommit()
359 builder.emitTrigger(newFlowJob)
364 def static getFullThroughputJobName(def project, def os, def isPR) {
365 return Utilities.getFullJobName(project, "perf_throughput_${os}", isPR)
368 // Create the Linux/OSX/CentOS coreclr test leg for debug and release and each scenario
369 [true, false].each { isPR ->
370 def fullBuildJobName = Utilities.getFullJobName(project, 'perf_throughput_linux_build', isPR)
371 def architecture = 'x64'
372 def configuration = 'Release'
374 // Build has to happen on RHEL7.2 (that's where we produce the bits we ship)
375 ['RHEL7.2'].each { os ->
376 def newBuildJob = job(fullBuildJobName) {
378 shell("./build.sh verbose ${architecture} ${configuration}")
381 Utilities.setMachineAffinity(newBuildJob, os, 'latest-or-auto')
382 Utilities.standardJobSetup(newBuildJob, project, isPR, "*/${branch}")
383 Utilities.addArchival(newBuildJob, "bin/Product/**")
386 // Actual perf testing on the following OSes
387 def throughputOSList = ['Ubuntu14.04']
388 def throughputOptLevelList = ['full_opt', 'min_opt']
390 def throughputOSOptLevelList = []
392 throughputOSList.each { os ->
393 throughputOptLevelList.each { opt_level ->
394 throughputOSOptLevelList.add("${os}_${opt_level}")
398 throughputOSList.each { os ->
399 throughputOptLevelList.each { opt_level ->
400 def newJob = job(getFullThroughputJobName(project, "${os}_${opt_level}", isPR)) {
402 label('linux_clr_perf')
405 string('BV_UPLOAD_SAS_TOKEN', 'CoreCLR Perf BenchView Sas')
413 stringParam('BenchviewCommitName', '\${ghprbPullTitle}', 'The name that will be used to build the full title of a run in Benchview.')
418 stringParam('PRODUCT_BUILD', '', 'Build number from which to copy down the CoreCLR Product binaries built for Linux')
421 def osGroup = getOSGroup(os)
422 def runType = isPR ? 'private' : 'rolling'
423 def benchViewName = isPR ? 'coreclr-throughput private \$BenchviewCommitName' : 'coreclr-throughput rolling \$GIT_BRANCH_WITHOUT_ORIGIN \$GIT_COMMIT'
426 shell("bash ./tests/scripts/perf-prep.sh --throughput")
427 shell("./init-tools.sh")
428 copyArtifacts(fullBuildJobName) {
429 includePatterns("bin/Product/**")
431 buildNumber('\${PRODUCT_BUILD}')
434 shell("GIT_BRANCH_WITHOUT_ORIGIN=\$(echo \$GIT_BRANCH | sed \"s/[^/]*\\/\\(.*\\)/\\1 /\")\n" +
435 "python3.5 \"\${WORKSPACE}/tests/scripts/Microsoft.BenchView.JSONFormat/tools/submission-metadata.py\" --name \" ${benchViewName} \" --user \"dotnet-bot@microsoft.com\"\n" +
436 "python3.5 \"\${WORKSPACE}/tests/scripts/Microsoft.BenchView.JSONFormat/tools/build.py\" git --branch \$GIT_BRANCH_WITHOUT_ORIGIN --type ${runType}")
437 shell("""python3.5 ./tests/scripts/run-throughput-perf.py \\
438 -arch \"${architecture}\" \\
440 -configuration \"${configuration}\" \\
441 -opt_level \"${opt_level}\" \\
442 -clr_root \"\${WORKSPACE}\" \\
443 -assembly_root \"\${WORKSPACE}/Microsoft.Benchview.ThroughputBenchmarks.${architecture}.Windows_NT/lib\" \\
444 -run_type \"${runType}\" \\
445 -benchview_path \"\${WORKSPACE}/tests/scripts/Microsoft.BenchView.JSONFormat/tools\"""")
449 // Save machinedata.json to /artifact/bin/ Jenkins dir
450 def archiveSettings = new ArchivalSettings()
451 archiveSettings.addFiles('throughput-*.csv')
452 archiveSettings.addFiles('machinedata.json')
453 Utilities.addArchival(newJob, archiveSettings)
455 Utilities.standardJobSetup(newJob, project, isPR, "*/${branch}")
457 // For perf, we need to keep the run results longer
459 // Enable the log rotator
461 artifactDaysToKeep(7)
463 artifactNumToKeep(25)
470 def flowJobTPRunList = throughputOSOptLevelList.collect { os ->
471 "{ build(params + [PRODUCT_BUILD: b.build.number], '${getFullThroughputJobName(project, os, isPR)}') }"
473 def newFlowJob = buildFlowJob(Utilities.getFullJobName(project, "perf_throughput_linux_flow", isPR, '')) {
476 stringParam('BenchviewCommitName', '\${ghprbPullTitle}', 'The name that you will be used to build the full title of a run in Benchview. The final name will be of the form <branch> private BenchviewCommitName')
480 // First, build the bits on RHEL7.2
481 b = build(params, '${fullBuildJobName}')
483 // Then, run the perf tests
485 ${flowJobTPRunList.join(",\n ")}
490 Utilities.setMachineAffinity(newFlowJob, 'Windows_NT', 'latest-or-auto')
491 Utilities.standardJobSetup(newFlowJob, project, isPR, "*/${branch}")
494 TriggerBuilder builder = TriggerBuilder.triggerOnPullRequest()
495 builder.setGithubContext("Linux Throughput Perf Test Flow")
496 builder.triggerOnlyOnComment()
497 builder.setCustomTriggerPhrase("(?i).*test\\W+linux\\W+throughput\\W+flow.*")
498 builder.triggerForBranch(branch)
499 builder.emitTrigger(newFlowJob)
502 // Set a push trigger
503 TriggerBuilder builder = TriggerBuilder.triggerOnCommit()
504 builder.emitTrigger(newFlowJob)
509 // Setup CoreCLR-Scenarios tests
510 [true, false].each { isPR ->
511 ['Windows_NT'].each { os ->
512 ['x64', 'x86'].each { arch ->
513 def architecture = arch
514 def newJob = job(Utilities.getFullJobName(project, "perf_scenarios_${os}_${arch}", isPR)) {
516 label('windows_clr_perf')
519 string('BV_UPLOAD_SAS_TOKEN', 'CoreCLR Perf BenchView Sas')
525 stringParam('BenchviewCommitName', '\${ghprbPullTitle}', 'The name that you will be used to build the full title of a run in Benchview. The final name will be of the form <branch> private BenchviewCommitName')
530 stringParam('XUNIT_PERFORMANCE_MAX_ITERATION', '1', 'Size test, one iteration is sufficient')
531 stringParam('XUNIT_PERFORMANCE_MAX_ITERATION_INNER_SPECIFIED', '1', 'Size test, one iteration is sufficient')
534 def configuration = 'Release'
535 def runType = isPR ? 'private' : 'rolling'
536 def benchViewName = isPR ? 'CoreCLR-Scenarios private %BenchviewCommitName%' : 'CoreCLR-Scenarios rolling %GIT_BRANCH_WITHOUT_ORIGIN% %GIT_COMMIT%'
537 def uploadString = '-uploadToBenchview'
541 batchFile("powershell wget https://dist.nuget.org/win-x86-commandline/latest/nuget.exe -OutFile \"%WORKSPACE%\\nuget.exe\"")
542 batchFile("if exist \"%WORKSPACE%\\Microsoft.BenchView.JSONFormat\" rmdir /s /q \"%WORKSPACE%\\Microsoft.BenchView.JSONFormat\"")
543 batchFile("\"%WORKSPACE%\\nuget.exe\" install Microsoft.BenchView.JSONFormat -Source http://benchviewtestfeed.azurewebsites.net/nuget -OutputDirectory \"%WORKSPACE%\" -Prerelease -ExcludeVersion")
545 //Do this here to remove the origin but at the front of the branch name as this is a problem for BenchView
546 //we have to do it all as one statement because cmd is called each time and we lose the set environment variable
547 batchFile("if \"%GIT_BRANCH:~0,7%\" == \"origin/\" (set \"GIT_BRANCH_WITHOUT_ORIGIN=%GIT_BRANCH:origin/=%\") else (set \"GIT_BRANCH_WITHOUT_ORIGIN=%GIT_BRANCH%\")\n" +
548 "set \"BENCHVIEWNAME=${benchViewName}\"\n" +
549 "set \"BENCHVIEWNAME=%BENCHVIEWNAME:\"=%\"\n" +
550 "py \"%WORKSPACE%\\Microsoft.BenchView.JSONFormat\\tools\\submission-metadata.py\" --name \"%BENCHVIEWNAME%\" --user \"dotnet-bot@microsoft.com\"\n" +
551 "py \"%WORKSPACE%\\Microsoft.BenchView.JSONFormat\\tools\\build.py\" git --branch %GIT_BRANCH_WITHOUT_ORIGIN% --type ${runType}")
552 batchFile("py \"%WORKSPACE%\\Microsoft.BenchView.JSONFormat\\tools\\machinedata.py\"")
553 batchFile("set __TestIntermediateDir=int&&build.cmd ${configuration} ${architecture}")
555 batchFile("tests\\runtest.cmd ${configuration} ${architecture} GenerateLayoutOnly")
557 def runXUnitPerfCommonArgs = "-arch ${arch} -configuration ${configuration} -generateBenchviewData \"%WORKSPACE%\\Microsoft.Benchview.JSONFormat\\tools\" ${uploadString} -runtype ${runType} -scenarioTest"
558 def failedOutputLogFilename = "run-xunit-perf-scenario.log"
560 // Using a sentinel file to
561 batchFile("if exist \"${failedOutputLogFilename}\" del /q /f \"${failedOutputLogFilename}\"")
562 batchFile("if exist \"${failedOutputLogFilename}\" (echo [ERROR] Failed to delete previously created \"${failedOutputLogFilename}\" file.& exit /b 1)")
564 // Scenario: JitBench
565 batchFile("tests\\scripts\\run-xunit-perf.cmd ${runXUnitPerfCommonArgs} -testBinLoc bin\\tests\\${os}.${architecture}.${configuration}\\performance\\Scenario\\JitBench -group CoreCLR-Scenarios || (echo [ERROR] JitBench failed. 1>>\"${failedOutputLogFilename}\"& exit /b 0)")
569 batchFile("tests\\scripts\\run-xunit-perf.cmd ${runXUnitPerfCommonArgs} -testBinLoc bin\\tests\\${os}.${architecture}.${configuration}\\performance\\linkbench\\linkbench -group ILLink -nowarmup || (echo [ERROR] IlLink failed. 1>>\"${failedOutputLogFilename}\"& exit /b 0)")
572 batchFile("if exist \"${failedOutputLogFilename}\" (type \"${failedOutputLogFilename}\"& exit /b 1)")
576 // Save machinedata.json to /artifact/bin/ Jenkins dir
577 def archiveSettings = new ArchivalSettings()
578 archiveSettings.addFiles('.\\bin\\sandbox\\Perf-*.xml')
579 archiveSettings.addFiles('.\\bin\\sandbox\\Perf-*.log')
580 archiveSettings.addFiles('machinedata.json')
581 Utilities.addArchival(newJob, archiveSettings)
583 Utilities.standardJobSetup(newJob, project, isPR, "*/${branch}")
594 TriggerBuilder builder = TriggerBuilder.triggerOnPullRequest()
595 builder.setGithubContext("${os} ${arch} Performance Scenarios Tests")
596 builder.triggerOnlyOnComment()
597 builder.setCustomTriggerPhrase("(?i).*test\\W+${os}\\W+${arch}\\W+perf\\W+scenarios.*")
598 builder.triggerForBranch(branch)
599 builder.emitTrigger(newJob)
602 // Set a push trigger
603 TriggerBuilder builder = TriggerBuilder.triggerOnCommit()
604 builder.emitTrigger(newJob)
610 Utilities.createHelperJob(this, project, branch,
611 "Welcome to the ${project} Perf help",