assert false
}
+def static getTestArtifactsTgzFileName(def osGroup, def architecture, def configuration) {
+ return "bin-tests-${osGroup}.${architecture}.${configuration}.tgz"
+}
// We have a limited amount of some hardware. For these, scale back the periodic testing we do,
// and only allowing using this hardware in some specific branches.
case 'Tizen':
case 'Fedora24': // editor brace matching: {
switch (architecture) {
- case 'x64':
case 'x86':
- if (architecture == 'x86' && os == 'Ubuntu') {
+ if (os == 'Ubuntu') {
// build and PAL test
def dockerImage = getDockerImageName(architecture, os, true)
buildCommands += "docker run -i --rm -v \${WORKSPACE}:/opt/code -w /opt/code -e ROOTFS_DIR=/crossrootfs/x86 ${dockerImage} ./build.sh ${architecture} cross ${lowerConfiguration}"
buildCommands += "docker run -i --rm -v \${WORKSPACE}:/opt/code -w /opt/code ${dockerImage} ./src/pal/tests/palsuite/runpaltests.sh /opt/code/bin/obj/${osGroup}.${architecture}.${configuration} /opt/code/bin/paltestout"
Utilities.addArchival(newJob, "bin/Product/**,bin/obj/*/tests/**/*.so", "bin/Product/**/.nuget/**")
Utilities.addXUnitDotNETResults(newJob, '**/pal_tests.xml')
- break
}
+ break
+ case 'x64':
if (scenario == 'formatting') {
buildCommands += "python tests/scripts/format.py -c \${WORKSPACE} -o Linux -a ${architecture}"
Utilities.addArchival(newJob, "format.patch", "", true, false)
def bootstrapRidEnv = bootstrapRid != null ? "__PUBLISH_RID=${bootstrapRid} " : ''
buildCommands += "${bootstrapRidEnv}./build.sh ${lowerConfiguration} ${architecture}"
+
+ def testBuildOpts = ""
+ if (priority == '1') {
+ testBuildOpts = "priority1"
+ }
+
+ buildCommands += "./build-test.sh ${lowerConfiguration} ${architecture} ${testBuildOpts}"
buildCommands += "src/pal/tests/palsuite/runpaltests.sh \${WORKSPACE}/bin/obj/${osGroup}.${architecture}.${configuration} \${WORKSPACE}/bin/paltestout"
- // Basic archiving of the build
- Utilities.addArchival(newJob, "bin/Product/**,bin/obj/*/tests/**/*.dylib,bin/obj/*/tests/**/*.so", "bin/Product/**/.nuget/**")
+ // Archive the bin/tests folder for *_tst jobs
+ def testArtifactsTgzFileName = getTestArtifactsTgzFileName(osGroup, architecture, configuration)
+ buildCommands += "tar -czf ${testArtifactsTgzFileName} bin/tests/${osGroup}.${architecture}.${configuration}"
+ Utilities.addArchival(newJob, "${testArtifactsTgzFileName}", "")
// And pal tests
Utilities.addXUnitDotNETResults(newJob, '**/pal_tests.xml')
}
// Archive the built artifacts
Utilities.addArchival(newJob, "coreroot.${os}.${architecture}.${lowerConfiguration}.zip,coreroot.baseline.${os}.${architecture}.${lowerConfiguration}.zip")
}
- else if (architecture == 'arm') {
- // Then, using the same docker image, generate the CORE_ROOT layout using build-test.sh to
- // download the appropriate CoreFX packages.
- // Note that docker should not be necessary here, for the "generatelayoutonly" case, but we use it
- // just to be consistent with the "build.sh" case -- so both are run with the same environment.
-
- buildCommands += "${dockerCmd}\${WORKSPACE}/build-test.sh ${lowerConfiguration} ${architecture} cross generatelayoutonly"
-
- // ZIP up for the test job (created in the flow job code):
- // (1) the built CORE_ROOT, /home/user/coreclr/bin/tests/Linux.arm.Checked/Tests/Core_Root,
- // used by runtest.sh as the "--coreOverlayDir" argument.
- // (2) the native parts of the test build: /home/user/coreclr/bin/obj/Linux.arm.Checked/tests,
- // used by runtest.sh as the "--testNativeBinDir" argument.
-
- // These commands are assumed to be run from the root of the workspace.
- buildCommands += "zip -r coreroot.${lowerConfiguration}.zip ./bin/tests/Linux.${architecture}.${configuration}/Tests/Core_Root"
- buildCommands += "zip -r testnativebin.${lowerConfiguration}.zip ./bin/obj/Linux.${architecture}.${configuration}/tests"
-
- Utilities.addArchival(newJob, "coreroot.${lowerConfiguration}.zip,testnativebin.${lowerConfiguration}.zip", "")
- }
else {
- assert architecture == 'arm64'
-
// Then, using the same docker image, build the tests and generate the CORE_ROOT layout.
- // Linux/arm64 does not use Windows-built tests.
def testBuildOpts = ""
if (priority == '1') {
buildCommands += "${dockerCmd}\${WORKSPACE}/build-test.sh ${lowerConfiguration} ${architecture} cross ${testBuildOpts}"
// ZIP up the built tests (including CORE_ROOT and native test components copied to the CORE_ROOT) for the test job (created in the flow job code)
- buildCommands += "zip -r tests.${lowerConfiguration}.zip ./bin/tests/Linux.${architecture}.${configuration}"
-
- // We still use the testnativebin files until they get placed properly in the tests directory (next to their respective tests).
- // With https://github.com/dotnet/coreclr/pull/19918 this shouldn't be needed anymore.
- buildCommands += "zip -r testnativebin.${lowerConfiguration}.zip ./bin/obj/Linux.${architecture}.${configuration}/tests"
+ def testArtifactsTgzFileName = getTestArtifactsTgzFileName(osGroup, architecture, configuration)
+ buildCommands += "tar -czf ${testArtifactsTgzFileName} bin/tests/${osGroup}.${architecture}.${configuration}"
- Utilities.addArchival(newJob, "tests.${lowerConfiguration}.zip,testnativebin.${lowerConfiguration}.zip", "")
+ Utilities.addArchival(newJob, "${testArtifactsTgzFileName}", "")
}
// Archive the build logs from both product and test builds.
// Create a test job not covered by the "Windows ARM" case that will be used by a flow job.
// E.g., non-Windows tests.
// Returns the newly created job.
-def static CreateOtherTestJob(def dslFactory, def project, def branch, def architecture, def os, def configuration, def scenario, def isPR, def inputCoreCLRBuildName, def inputTestsBuildName)
+def static CreateOtherTestJob(def dslFactory, def project, def branch, def architecture, def os, def configuration, def scenario, def isPR, def inputCoreCLRBuildName)
{
def lowerConfiguration = configuration.toLowerCase()
}
}
- // The ARM Ubuntu corefx test job doesn't depend on a Windows test build, and hence inputTestsBuildName
- // will be null in this case.
-
def jobFolder = getJobFolder(scenario)
def newJob = dslFactory.job(Utilities.getFullJobName(project, jobName, isPR, jobFolder)) {
parameters {
- if (inputTestsBuildName != null) {
- stringParam('CORECLR_WINDOWS_BUILD', '', 'Build number to copy CoreCLR Windows test binaries from')
- }
stringParam('CORECLR_BUILD', '', "Build number to copy CoreCLR ${osGroup} binaries from")
}
steps {
// Set up the copies
- // Coreclr build containing the tests and mscorlib
- // pri1 jobs still need to copy windows_nt built tests
- if (inputTestsBuildName != null) {
- copyArtifacts(inputTestsBuildName) {
- excludePatterns('**/testResults.xml', '**/*.ni.dll')
- buildSelector {
- buildNumber('${CORECLR_WINDOWS_BUILD}')
- }
- }
- }
-
// Coreclr build we are trying to test
//
- // ** NOTE ** This will, correctly, overwrite the CORE_ROOT from the Windows test archive
- //
// HACK: the Ubuntu arm64 copyArtifacts Jenkins plug-in is ridiculously slow (45 minutes to
// 1.5 hours for this step). Instead, directly use wget, which is fast (1 minute).
shell("wget --progress=dot:giga --directory-prefix=${workspaceRelativeFxRootLinux} ${inputUrlRoot}/${workspaceRelativeFxRootLinux}/fxruntime.zip")
}
else {
- shell("wget --progress=dot:giga ${inputUrlRoot}/testnativebin.${lowerConfiguration}.zip")
- shell("wget --progress=dot:giga ${inputUrlRoot}/tests.${lowerConfiguration}.zip")
+ def testArtifactsTgzFileName = getTestArtifactsTgzFileName(osGroup, architecture, configuration)
+ shell("wget --progress=dot:giga ${inputUrlRoot}/${testArtifactsTgzFileName}")
}
}
shell("unzip -q -o ${workspaceRelativeFxRootLinux}/fxtests.zip || exit 0")
shell("unzip -q -o ${workspaceRelativeFxRootLinux}/fxruntime.zip || exit 0")
}
- else if (architecture != 'arm64') {
- // ARM64 copies the tests from the build machine; this is for unzip'ing tests copied from a Windows build.
- //
- // Unzip the tests first. Exit with 0
- shell("unzip -q -o ./bin/tests/tests.zip -d ./bin/tests/${osGroup}.${architecture}.${configuration} || exit 0")
- shell("rm -r ./bin/tests/${osGroup}.${architecture}.${configuration}/Tests/Core_Root || exit 0")
- }
-
- // For arm Ubuntu (on hardware), we do the "build-test" step on the build machine, not on the test
- // machine. The arm Ubuntu test machines do no building -- they have no CLI, for example.
- // We should probably do the "generatelayoutonly" step on the build machine for all architectures.
- // However, it's believed that perhaps there's an issue with executable permission bits not getting
- // copied correctly.
- if (!doCoreFxTesting) {
- if (isUbuntuArmJob) {
- if (!isPmiAsmDiffsScenario) {
- if (architecture == 'arm') {
- shell("unzip -q -o ./coreroot.${lowerConfiguration}.zip || exit 0") // unzips to ./bin/tests/Linux.${architecture}.${configuration}/Tests/Core_Root
- shell("unzip -q -o ./testnativebin.${lowerConfiguration}.zip || exit 0") // unzips to ./bin/obj/Linux.${architecture}.${configuration}/tests
- }
- else {
- assert architecture == 'arm64'
- shell("unzip -q -o ./tests.${lowerConfiguration}.zip || exit 0") // unzips to ./bin/tests/Linux.${architecture}.${configuration}
-
- // We still the testnativebin files until they get placed properly in the tests directory (next to their respective tests).
- shell("unzip -q -o ./testnativebin.${lowerConfiguration}.zip || exit 0") // unzips to ./bin/obj/Linux.${architecture}.${configuration}/tests
- }
- }
- }
- else {
- shell("./build-test.sh ${architecture} ${configuration} generatelayoutonly")
- }
+ else {
+ def testArtifactsTgzFileName = getTestArtifactsTgzFileName(osGroup, architecture, configuration)
+ shell("tar -xzf ./${testArtifactsTgzFileName} || exit 0") // extracts to ./bin/tests/${osGroup}.${architecture}.${configuration}
}
// Execute the tests
else {
def runScript = "${dockerCmd}./tests/runtest.sh"
- // TODO: the testNativeBinDir shouldn't be necessary if the native test binaries are placed properly with their corresponding managed test code.
-
shell("""\
${runScript} \\
${lowerConfiguration} \\
--testRootDir=\"\${WORKSPACE}/bin/tests/${osGroup}.${architecture}.${configuration}\" \\
--coreOverlayDir=\"\${WORKSPACE}/bin/tests/${osGroup}.${architecture}.${configuration}/Tests/Core_Root\" \\
- --testNativeBinDir=\"\${WORKSPACE}/bin/obj/${osGroup}.${architecture}.${configuration}/tests\" \\
- --copyNativeTestBin --limitedDumpGeneration ${testOpts}""")
+ --limitedDumpGeneration ${testOpts}""")
}
if (isGcReliabilityFramework(scenario)) {
// Returns the newly created job.
// Note that we don't add tests jobs to the various views, since they are always used by a flow job, which is in the views,
// and we want the views to be the minimal set of "top-level" jobs that represent all work.
-def static CreateTestJob(def dslFactory, def project, def branch, def architecture, def os, def configuration, def scenario, def isPR, def inputCoreCLRBuildName, def inputTestsBuildName)
+def static CreateTestJob(def dslFactory, def project, def branch, def architecture, def os, def configuration, def scenario, def isPR, def inputCoreCLRBuildName)
{
def windowsArmJob = ((os == "Windows_NT") && (architecture in Constants.armWindowsCrossArchitectureList))
def newJob = null
if (windowsArmJob) {
- assert inputTestsBuildName == null
newJob = CreateWindowsArmTestJob(dslFactory, project, architecture, os, configuration, scenario, isPR, inputCoreCLRBuildName)
}
else if (isCrossGenComparisonScenario(scenario)) {
- assert inputTestsBuildName == null
newJob = CreateNonWindowsCrossGenComparisonTestJob(dslFactory, project, architecture, os, configuration, scenario, isPR, inputCoreCLRBuildName)
}
else {
- newJob = CreateOtherTestJob(dslFactory, project, branch, architecture, os, configuration, scenario, isPR, inputCoreCLRBuildName, inputTestsBuildName)
+ newJob = CreateOtherTestJob(dslFactory, project, branch, architecture, os, configuration, scenario, isPR, inputCoreCLRBuildName)
}
setJobMachineAffinity(architecture, os, false, true, false, newJob) // isBuildJob = false, isTestJob = true, isFlowJob = false
}
// Create a flow job to tie together a build job with the given test job.
-// The 'inputTestsBuildName' argument might be null if the flow job doesn't depend on a Windows build job.
// Returns the new flow job.
-def static CreateFlowJob(def dslFactory, def project, def branch, def architecture, def os, def configuration, def scenario, def isPR, def fullTestJobName, def inputCoreCLRBuildName, def inputTestsBuildName)
+def static CreateFlowJob(def dslFactory, def project, def branch, def architecture, def os, def configuration, def scenario, def isPR, def fullTestJobName, def inputCoreCLRBuildName)
{
// Windows CoreCLR build and Linux CoreCLR build (in parallel) ->
// Linux CoreCLR test
def flowJobName = getJobName(configuration, architecture, os, scenario, false) + "_flow"
def jobFolder = getJobFolder(scenario)
- def newFlowJob = null
-
- if (inputTestsBuildName == null) {
- newFlowJob = dslFactory.buildFlowJob(Utilities.getFullJobName(project, flowJobName, isPR, jobFolder)) {
- buildFlow("""\
+ def newFlowJob = dslFactory.buildFlowJob(Utilities.getFullJobName(project, flowJobName, isPR, jobFolder)) {
+ buildFlow("""\
coreclrBuildJob = build(params, '${inputCoreCLRBuildName}')
// And then build the test build
build(params + [CORECLR_BUILD: coreclrBuildJob.build.number], '${fullTestJobName}')
""")
- }
- JobReport.Report.addReference(inputCoreCLRBuildName)
- JobReport.Report.addReference(fullTestJobName)
- }
- else {
- newFlowJob = dslFactory.buildFlowJob(Utilities.getFullJobName(project, flowJobName, isPR, jobFolder)) {
- buildFlow("""\
-// Build the input jobs in parallel
-parallel (
-{ coreclrBuildJob = build(params, '${inputCoreCLRBuildName}') },
-{ windowsBuildJob = build(params, '${inputTestsBuildName}') }
-)
-
-// And then build the test build
-build(params + [CORECLR_BUILD: coreclrBuildJob.build.number,
- CORECLR_WINDOWS_BUILD: windowsBuildJob.build.number], '${fullTestJobName}')
-""")
- }
- JobReport.Report.addReference(inputCoreCLRBuildName)
- JobReport.Report.addReference(inputTestsBuildName)
- JobReport.Report.addReference(fullTestJobName)
}
+ JobReport.Report.addReference(inputCoreCLRBuildName)
+ JobReport.Report.addReference(fullTestJobName)
addToViews(newFlowJob, true, isPR, architecture, os, configuration, scenario) // isFlowJob = true
def inputCoreCLRBuildName = projectFolder + '/' +
Utilities.getFullJobName(project, getJobName(configuration, architecture, os, inputCoreCLRBuildScenario, inputCoreCLRBuildIsBuildOnly), isPR, inputCoreCLRFolderName)
- // Figure out the name of the build job that the test job will depend on.
- // For Windows ARM tests, this is not used, as the CoreCLR build creates the tests. For other
- // tests (e.g., Linux ARM), we depend on a Windows build to get the tests.
- // For CoreFX tests, however, Linux doesn't need the Windows build for the tests, since the
- // CoreFX build creates the tests.
-
- def inputTestsBuildName = null
-
- // Ubuntu Arm64 jobs do the test build on the build machine, and thus don't depend on a Windows build.
- def isUbuntuArm64Job = ((os == "Ubuntu16.04") && (architecture == 'arm64'))
-
- if (!windowsArmJob && !doCoreFxTesting & !doCrossGenComparison && !isUbuntuArm64Job && !isPmiAsmDiffsScenario) {
- def testBuildScenario = isInnerloopTestScenario(scenario) ? 'innerloop' : 'normal'
-
- def inputTestsBuildArch = architecture
- if (architecture == "arm") {
- // Use the x86 test build for arm unix
- inputTestsBuildArch = "x86"
- }
-
- def inputTestsBuildIsBuildOnly = true
-
- inputTestsBuildName = projectFolder + '/' +
- Utilities.getFullJobName(project, getJobName(configuration, inputTestsBuildArch, 'windows_nt', testBuildScenario, inputTestsBuildIsBuildOnly), isPR)
- }
-
// =============================================================================================
// Create the test job
// =============================================================================================
- def testJob = CreateTestJob(this, project, branch, architecture, os, configuration, scenario, isPR, inputCoreCLRBuildName, inputTestsBuildName)
+ def testJob = CreateTestJob(this, project, branch, architecture, os, configuration, scenario, isPR, inputCoreCLRBuildName)
// =============================================================================================
// Create a build flow to join together the build and tests required to run this test.
}
def fullTestJobName = projectFolder + '/' + testJob.name
- def flowJob = CreateFlowJob(this, project, branch, architecture, os, configuration, scenario, isPR, fullTestJobName, inputCoreCLRBuildName, inputTestsBuildName)
+ def flowJob = CreateFlowJob(this, project, branch, architecture, os, configuration, scenario, isPR, fullTestJobName, inputCoreCLRBuildName)
} // os
} // configuration