From: Juan Sebastian Hoyos Ayala Date: Thu, 1 Nov 2018 20:59:59 +0000 (-0700) Subject: Add base arcade scripts and versioning files X-Git-Tag: accepted/tizen/unified/20190422.045933~834^2 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=84387fa5f788603ff6eb85f5a0c6c954bd90d362;p=platform%2Fupstream%2Fcoreclr.git Add base arcade scripts and versioning files --- diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml new file mode 100644 index 0000000..ca7e333 --- /dev/null +++ b/eng/Version.Details.xml @@ -0,0 +1,14 @@ + + + + + + https://github.com/dotnet/arcade + 1bd69581da9d5a4744d60ccfcb929a9c25496599 + + + https://github.com/dotnet/arcade + 1bd69581da9d5a4744d60ccfcb929a9c25496599 + + + diff --git a/eng/Versions.props b/eng/Versions.props new file mode 100644 index 0000000..8a440dc --- /dev/null +++ b/eng/Versions.props @@ -0,0 +1,21 @@ + + + + + 3.0.0 + preview + + false + true + 1.0.0-alpha-004 + + + + + $(RestoreSources); + https://dotnetfeed.blob.core.windows.net/dotnet-core/index.json; + https://dotnet.myget.org/F/symreader-converter/api/v3/index.json; + https:%2F%2Fdotnet.myget.org/F/symreader/api/v3/index.json + + + diff --git a/eng/common/CIBuild.cmd b/eng/common/CIBuild.cmd new file mode 100644 index 0000000..6544b0c --- /dev/null +++ b/eng/common/CIBuild.cmd @@ -0,0 +1,3 @@ +@echo off +powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0Build.ps1""" -restore -build -test -sign -pack -publish -ci %*" +exit /b %ErrorLevel% diff --git a/eng/common/PublishBuildAssets.cmd b/eng/common/PublishBuildAssets.cmd new file mode 100644 index 0000000..4f9d54d --- /dev/null +++ b/eng/common/PublishBuildAssets.cmd @@ -0,0 +1,3 @@ +@echo off +powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0Build.ps1""" -restore -publishBuildAssets %*" +exit /b %ErrorLevel% diff --git a/eng/common/build.ps1 b/eng/common/build.ps1 new file mode 100644 index 0000000..ca45b3d --- /dev/null +++ b/eng/common/build.ps1 @@ -0,0 +1,107 @@ +[CmdletBinding(PositionalBinding=$false)] +Param( + [string] $configuration = "Debug", + [string] $projects = "", + [string] $verbosity = "minimal", + [string] $msbuildEngine = $null, + [bool] $warnaserror = $true, + [bool] $nodereuse = $true, + [switch] $restore, + [switch] $deployDeps, + [switch] $build, + [switch] $rebuild, + [switch] $deploy, + [switch] $test, + [switch] $integrationTest, + [switch] $performanceTest, + [switch] $sign, + [switch] $pack, + [switch] $publish, + [switch] $publishBuildAssets, + [switch] $ci, + [switch] $prepareMachine, + [switch] $help, + [Parameter(ValueFromRemainingArguments=$true)][String[]]$properties +) + +. $PSScriptRoot\tools.ps1 + +function Print-Usage() { + Write-Host "Common settings:" + Write-Host " -configuration Build configuration Debug, Release" + Write-Host " -verbosity Msbuild verbosity (q[uiet], m[inimal], n[ormal], d[etailed], and diag[nostic])" + Write-Host " -help Print help and exit" + Write-Host "" + + Write-Host "Actions:" + Write-Host " -restore Restore dependencies" + Write-Host " -build Build solution" + Write-Host " -rebuild Rebuild solution" + Write-Host " -deploy Deploy built VSIXes" + Write-Host " -deployDeps Deploy dependencies (e.g. VSIXes for integration tests)" + Write-Host " -test Run all unit tests in the solution" + Write-Host " -pack Package build outputs into NuGet packages and Willow components" + Write-Host " -integrationTest Run all integration tests in the solution" + Write-Host " -performanceTest Run all performance tests in the solution" + Write-Host " -sign Sign build outputs" + Write-Host " -publish Publish artifacts (e.g. symbols)" + Write-Host " -publishBuildAssets Push assets to BAR" + Write-Host "" + + Write-Host "Advanced settings:" + Write-Host " -projects Semi-colon delimited list of sln/proj's to build. Globbing is supported (*.sln)" + Write-Host " -ci Set when running on CI server" + Write-Host " -prepareMachine Prepare machine for CI run" + Write-Host " -msbuildEngine Msbuild engine to use to run build ('dotnet', 'vs', or unspecified)." + Write-Host "" + Write-Host "Command line arguments not listed above are passed thru to msbuild." + Write-Host "The above arguments can be shortened as much as to be unambiguous (e.g. -co for configuration, -t for test, etc.)." +} + +if ($help -or (($properties -ne $null) -and ($properties.Contains("/help") -or $properties.Contains("/?")))) { + Print-Usage + exit 0 +} + +try { + if ($projects -eq "") { + $projects = Join-Path $RepoRoot "*.sln" + } + + InitializeTools + + $BuildLog = Join-Path $LogDir "Build.binlog" + + MSBuild $ToolsetBuildProj ` + /bl:$BuildLog ` + /p:Configuration=$configuration ` + /p:Projects=$projects ` + /p:RepoRoot=$RepoRoot ` + /p:Restore=$restore ` + /p:DeployDeps=$deployDeps ` + /p:Build=$build ` + /p:Rebuild=$rebuild ` + /p:Deploy=$deploy ` + /p:Test=$test ` + /p:Pack=$pack ` + /p:IntegrationTest=$integrationTest ` + /p:PerformanceTest=$performanceTest ` + /p:Sign=$sign ` + /p:Publish=$publish ` + /p:PublishBuildAssets=$publishBuildAssets ` + /p:ContinuousIntegrationBuild=$ci ` + @properties + + if ($lastExitCode -ne 0) { + Write-Host "Build Failed (exit code '$lastExitCode'). See log: $BuildLog" -ForegroundColor Red + ExitWithExitCode $lastExitCode + } + + ExitWithExitCode $lastExitCode +} +catch { + Write-Host $_ + Write-Host $_.Exception + Write-Host $_.ScriptStackTrace + ExitWithExitCode 1 +} diff --git a/eng/common/build.sh b/eng/common/build.sh new file mode 100644 index 0000000..941db3b --- /dev/null +++ b/eng/common/build.sh @@ -0,0 +1,171 @@ +#!/usr/bin/env bash + +source="${BASH_SOURCE[0]}" + +# resolve $source until the file is no longer a symlink +while [[ -h "$source" ]]; do + scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" + source="$(readlink "$source")" + # if $source was a relative symlink, we need to resolve it relative to the path where the + # symlink file was located + [[ $source != /* ]] && source="$scriptroot/$source" +done +scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" + +help=false +restore=false +build=false +rebuild=false +test=false +pack=false +publish=false +integration_test=false +performance_test=false +sign=false +public=false +ci=false + +warnaserror=true +nodereuse=true + +projects='' +configuration='Debug' +prepare_machine=false +verbosity='minimal' +properties='' + +while (($# > 0)); do + lowerI="$(echo $1 | awk '{print tolower($0)}')" + case $lowerI in + --build) + build=true + shift 1 + ;; + --ci) + ci=true + shift 1 + ;; + --configuration) + configuration=$2 + shift 2 + ;; + --help) + echo "Common settings:" + echo " --configuration Build configuration Debug, Release" + echo " --verbosity Msbuild verbosity (q[uiet], m[inimal], n[ormal], d[etailed], and diag[nostic])" + echo " --help Print help and exit" + echo "" + echo "Actions:" + echo " --restore Restore dependencies" + echo " --build Build solution" + echo " --rebuild Rebuild solution" + echo " --test Run all unit tests in the solution" + echo " --sign Sign build outputs" + echo " --publish Publish artifacts (e.g. symbols)" + echo " --pack Package build outputs into NuGet packages and Willow components" + echo "" + echo "Advanced settings:" + echo " --solution Path to solution to build" + echo " --ci Set when running on CI server" + echo " --prepareMachine Prepare machine for CI run" + echo "" + echo "Command line arguments not listed above are passed through to MSBuild." + exit 0 + ;; + --pack) + pack=true + shift 1 + ;; + --preparemachine) + prepare_machine=true + shift 1 + ;; + --rebuild) + rebuild=true + shift 1 + ;; + --restore) + restore=true + shift 1 + ;; + --sign) + sign=true + shift 1 + ;; + --solution) + solution=$2 + shift 2 + ;; + --projects) + projects=$2 + shift 2 + ;; + --test) + test=true + shift 1 + ;; + --integrationtest) + integration_test=true + shift 1 + ;; + --performancetest) + performance_test=true + shift 1 + ;; + --publish) + publish=true + shift 1 + ;; + --verbosity) + verbosity=$2 + shift 2 + ;; + --warnaserror) + warnaserror=$2 + shift 2 + ;; + --nodereuse) + nodereuse=$2 + shift 2 + ;; + *) + properties="$properties $1" + shift 1 + ;; + esac +done + +. "$scriptroot/tools.sh" + +if [[ -z $projects ]]; then + projects="$repo_root/*.sln" +fi + +InitializeTools + +build_log="$log_dir/Build.binlog" + +MSBuild "$toolset_build_proj" \ + /bl:"$build_log" \ + /p:Configuration=$configuration \ + /p:Projects="$projects" \ + /p:RepoRoot="$repo_root" \ + /p:Restore=$restore \ + /p:Build=$build \ + /p:Rebuild=$rebuild \ + /p:Test=$test \ + /p:Pack=$pack \ + /p:IntegrationTest=$integration_test \ + /p:PerformanceTest=$performance_test \ + /p:Sign=$sign \ + /p:Publish=$publish \ + /p:ContinuousIntegrationBuild=$ci \ + $properties + +lastexitcode=$? + +if [[ $lastexitcode != 0 ]]; then + echo "Build failed (exit code '$lastexitcode'). See log: $build_log" +fi + +ExitWithExitCode $lastexitcode diff --git a/eng/common/cibuild.sh b/eng/common/cibuild.sh new file mode 100644 index 0000000..1a02c0d --- /dev/null +++ b/eng/common/cibuild.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env bash + +source="${BASH_SOURCE[0]}" + +# resolve $SOURCE until the file is no longer a symlink +while [[ -h $source ]]; do + scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" + source="$(readlink "$source")" + + # if $source was a relative symlink, we need to resolve it relative to the path where + # the symlink file was located + [[ $source != /* ]] && source="$scriptroot/$source" +done +scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" + +. "$scriptroot/build.sh" --restore --build --test --pack --publish --ci $@ \ No newline at end of file diff --git a/eng/common/cross/android/arm/toolchain.cmake b/eng/common/cross/android/arm/toolchain.cmake new file mode 100644 index 0000000..a7e1c73 --- /dev/null +++ b/eng/common/cross/android/arm/toolchain.cmake @@ -0,0 +1,41 @@ +set(CROSS_NDK_TOOLCHAIN $ENV{ROOTFS_DIR}/../) +set(CROSS_ROOTFS ${CROSS_NDK_TOOLCHAIN}/sysroot) +set(CLR_CMAKE_PLATFORM_ANDROID "Android") + +set(CMAKE_SYSTEM_NAME Linux) +set(CMAKE_SYSTEM_VERSION 1) +set(CMAKE_SYSTEM_PROCESSOR arm) + +## Specify the toolchain +set(TOOLCHAIN "arm-linux-androideabi") +set(CMAKE_PREFIX_PATH ${CROSS_NDK_TOOLCHAIN}) +set(TOOLCHAIN_PREFIX ${TOOLCHAIN}-) + +find_program(CMAKE_C_COMPILER ${TOOLCHAIN_PREFIX}clang) +find_program(CMAKE_CXX_COMPILER ${TOOLCHAIN_PREFIX}clang++) +find_program(CMAKE_ASM_COMPILER ${TOOLCHAIN_PREFIX}clang) +find_program(CMAKE_AR ${TOOLCHAIN_PREFIX}ar) +find_program(CMAKE_LD ${TOOLCHAIN_PREFIX}ar) +find_program(CMAKE_OBJCOPY ${TOOLCHAIN_PREFIX}objcopy) +find_program(CMAKE_OBJDUMP ${TOOLCHAIN_PREFIX}objdump) + +add_compile_options(--sysroot=${CROSS_ROOTFS}) +add_compile_options(-fPIE) +add_compile_options(-mfloat-abi=soft) +include_directories(SYSTEM ${CROSS_NDK_TOOLCHAIN}/include/c++/4.9.x/) +include_directories(SYSTEM ${CROSS_NDK_TOOLCHAIN}/include/c++/4.9.x/arm-linux-androideabi/) + +set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} -B ${CROSS_ROOTFS}/usr/lib/gcc/${TOOLCHAIN}") +set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} -L${CROSS_ROOTFS}/lib/${TOOLCHAIN}") +set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} --sysroot=${CROSS_ROOTFS}") +set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} -fPIE -pie") + +set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} ${CROSS_LINK_FLAGS}" CACHE STRING "" FORCE) +set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} ${CROSS_LINK_FLAGS}" CACHE STRING "" FORCE) +set(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} ${CROSS_LINK_FLAGS}" CACHE STRING "" FORCE) + +set(CMAKE_FIND_ROOT_PATH "${CROSS_ROOTFS}") +set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER) +set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY) +set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY) +set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY) diff --git a/eng/common/cross/android/arm64/toolchain.cmake b/eng/common/cross/android/arm64/toolchain.cmake new file mode 100644 index 0000000..2941589 --- /dev/null +++ b/eng/common/cross/android/arm64/toolchain.cmake @@ -0,0 +1,42 @@ +set(CROSS_NDK_TOOLCHAIN $ENV{ROOTFS_DIR}/../) +set(CROSS_ROOTFS ${CROSS_NDK_TOOLCHAIN}/sysroot) +set(CLR_CMAKE_PLATFORM_ANDROID "Android") + +set(CMAKE_SYSTEM_NAME Linux) +set(CMAKE_SYSTEM_VERSION 1) +set(CMAKE_SYSTEM_PROCESSOR aarch64) + +## Specify the toolchain +set(TOOLCHAIN "aarch64-linux-android") +set(CMAKE_PREFIX_PATH ${CROSS_NDK_TOOLCHAIN}) +set(TOOLCHAIN_PREFIX ${TOOLCHAIN}-) + +find_program(CMAKE_C_COMPILER ${TOOLCHAIN_PREFIX}clang) +find_program(CMAKE_CXX_COMPILER ${TOOLCHAIN_PREFIX}clang++) +find_program(CMAKE_ASM_COMPILER ${TOOLCHAIN_PREFIX}clang) +find_program(CMAKE_AR ${TOOLCHAIN_PREFIX}ar) +find_program(CMAKE_LD ${TOOLCHAIN_PREFIX}ar) +find_program(CMAKE_OBJCOPY ${TOOLCHAIN_PREFIX}objcopy) +find_program(CMAKE_OBJDUMP ${TOOLCHAIN_PREFIX}objdump) + +add_compile_options(--sysroot=${CROSS_ROOTFS}) +add_compile_options(-fPIE) + +## Needed for Android or bionic specific conditionals +add_compile_options(-D__ANDROID__) +add_compile_options(-D__BIONIC__) + +set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} -B ${CROSS_ROOTFS}/usr/lib/gcc/${TOOLCHAIN}") +set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} -L${CROSS_ROOTFS}/lib/${TOOLCHAIN}") +set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} --sysroot=${CROSS_ROOTFS}") +set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} -fPIE -pie") + +set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} ${CROSS_LINK_FLAGS}" CACHE STRING "" FORCE) +set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} ${CROSS_LINK_FLAGS}" CACHE STRING "" FORCE) +set(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} ${CROSS_LINK_FLAGS}" CACHE STRING "" FORCE) + +set(CMAKE_FIND_ROOT_PATH "${CROSS_ROOTFS}") +set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER) +set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY) +set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY) +set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY) diff --git a/eng/common/cross/arm/sources.list.bionic b/eng/common/cross/arm/sources.list.bionic new file mode 100644 index 0000000..2109557 --- /dev/null +++ b/eng/common/cross/arm/sources.list.bionic @@ -0,0 +1,11 @@ +deb http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted +deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted + +deb http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse +deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse diff --git a/eng/common/cross/arm/sources.list.jessie b/eng/common/cross/arm/sources.list.jessie new file mode 100644 index 0000000..4d142ac --- /dev/null +++ b/eng/common/cross/arm/sources.list.jessie @@ -0,0 +1,3 @@ +# Debian (sid) # UNSTABLE +deb http://ftp.debian.org/debian/ sid main contrib non-free +deb-src http://ftp.debian.org/debian/ sid main contrib non-free diff --git a/eng/common/cross/arm/sources.list.trusty b/eng/common/cross/arm/sources.list.trusty new file mode 100644 index 0000000..07d8f88 --- /dev/null +++ b/eng/common/cross/arm/sources.list.trusty @@ -0,0 +1,11 @@ +deb http://ports.ubuntu.com/ubuntu-ports/ trusty main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ trusty-updates main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-updates main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ trusty-backports main restricted +deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-backports main restricted + +deb http://ports.ubuntu.com/ubuntu-ports/ trusty-security main restricted universe multiverse +deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-security main restricted universe multiverse \ No newline at end of file diff --git a/eng/common/cross/arm/sources.list.vivid b/eng/common/cross/arm/sources.list.vivid new file mode 100644 index 0000000..0b1215e --- /dev/null +++ b/eng/common/cross/arm/sources.list.vivid @@ -0,0 +1,11 @@ +deb http://ports.ubuntu.com/ubuntu-ports/ vivid main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ vivid main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ vivid-updates main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ vivid-updates main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ vivid-backports main restricted +deb-src http://ports.ubuntu.com/ubuntu-ports/ vivid-backports main restricted + +deb http://ports.ubuntu.com/ubuntu-ports/ vivid-security main restricted universe multiverse +deb-src http://ports.ubuntu.com/ubuntu-ports/ vivid-security main restricted universe multiverse \ No newline at end of file diff --git a/eng/common/cross/arm/sources.list.wily b/eng/common/cross/arm/sources.list.wily new file mode 100644 index 0000000..e23d1e0 --- /dev/null +++ b/eng/common/cross/arm/sources.list.wily @@ -0,0 +1,11 @@ +deb http://ports.ubuntu.com/ubuntu-ports/ wily main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ wily main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ wily-updates main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ wily-updates main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ wily-backports main restricted +deb-src http://ports.ubuntu.com/ubuntu-ports/ wily-backports main restricted + +deb http://ports.ubuntu.com/ubuntu-ports/ wily-security main restricted universe multiverse +deb-src http://ports.ubuntu.com/ubuntu-ports/ wily-security main restricted universe multiverse \ No newline at end of file diff --git a/eng/common/cross/arm/sources.list.xenial b/eng/common/cross/arm/sources.list.xenial new file mode 100644 index 0000000..eacd86b --- /dev/null +++ b/eng/common/cross/arm/sources.list.xenial @@ -0,0 +1,11 @@ +deb http://ports.ubuntu.com/ubuntu-ports/ xenial main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ xenial-updates main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-updates main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ xenial-backports main restricted +deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-backports main restricted + +deb http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse +deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse \ No newline at end of file diff --git a/eng/common/cross/arm/sources.list.zesty b/eng/common/cross/arm/sources.list.zesty new file mode 100644 index 0000000..ea2c14a --- /dev/null +++ b/eng/common/cross/arm/sources.list.zesty @@ -0,0 +1,11 @@ +deb http://ports.ubuntu.com/ubuntu-ports/ zesty main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ zesty-updates main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-updates main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ zesty-backports main restricted +deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-backports main restricted + +deb http://ports.ubuntu.com/ubuntu-ports/ zesty-security main restricted universe multiverse +deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-security main restricted universe multiverse diff --git a/eng/common/cross/arm/trusty-lttng-2.4.patch b/eng/common/cross/arm/trusty-lttng-2.4.patch new file mode 100644 index 0000000..8e4dd7a --- /dev/null +++ b/eng/common/cross/arm/trusty-lttng-2.4.patch @@ -0,0 +1,71 @@ +From e72c9d7ead60e3317bd6d1fade995c07021c947b Mon Sep 17 00:00:00 2001 +From: Mathieu Desnoyers +Date: Thu, 7 May 2015 13:25:04 -0400 +Subject: [PATCH] Fix: building probe providers with C++ compiler + +Robert Daniels wrote: +> > I'm attempting to use lttng userspace tracing with a C++ application +> > on an ARM platform. I'm using GCC 4.8.4 on Linux 3.14 with the 2.6 +> > release of lttng. I've compiled lttng-modules, lttng-ust, and +> > lttng-tools and have been able to get a simple test working with C +> > code. When I attempt to run the hello.cxx test on my target it will +> > segfault. +> +> +> I spent a little time digging into this issue and finally discovered the +> cause of my segfault with ARM C++ tracepoints. +> +> There is a struct called 'lttng_event' in ust-events.h which contains an +> empty union 'u'. This was the cause of my issue. Under C, this empty union +> compiles to a zero byte member while under C++ it compiles to a one byte +> member, and in my case was four-byte aligned which caused my C++ code to +> have the 'cds_list_head node' offset incorrectly by four bytes. This lead +> to an incorrect linked list structure which caused my issue. +> +> Since this union is empty, I simply removed it from the struct and everything +> worked correctly. +> +> I don't know the history or purpose behind this empty union so I'd like to +> know if this is a safe fix. If it is I can submit a patch with the union +> removed. + +That's a very nice catch! + +We do not support building tracepoint probe provider with +g++ yet, as stated in lttng-ust(3): + +"- Note for C++ support: although an application instrumented with + tracepoints can be compiled with g++, tracepoint probes should be + compiled with gcc (only tested with gcc so far)." + +However, if it works fine with this fix, then I'm tempted to take it, +especially because removing the empty union does not appear to affect +the layout of struct lttng_event as seen from liblttng-ust, which must +be compiled with a C compiler, and from probe providers compiled with +a C compiler. So all we are changing is the layout of a probe provider +compiled with a C++ compiler, which is anyway buggy at the moment, +because it is not compatible with the layout expected by liblttng-ust +compiled with a C compiler. + +Reported-by: Robert Daniels +Signed-off-by: Mathieu Desnoyers +--- + include/lttng/ust-events.h | 2 -- + 1 file changed, 2 deletions(-) + +diff --git a/usr/include/lttng/ust-events.h b/usr/include/lttng/ust-events.h +index 328a875..3d7a274 100644 +--- a/usr/include/lttng/ust-events.h ++++ b/usr/include/lttng/ust-events.h +@@ -407,8 +407,6 @@ struct lttng_event { + void *_deprecated1; + struct lttng_ctx *ctx; + enum lttng_ust_instrumentation instrumentation; +- union { +- } u; + struct cds_list_head node; /* Event list in session */ + struct cds_list_head _deprecated2; + void *_deprecated3; +-- +2.7.4 + diff --git a/eng/common/cross/arm/trusty.patch b/eng/common/cross/arm/trusty.patch new file mode 100644 index 0000000..2f2972f --- /dev/null +++ b/eng/common/cross/arm/trusty.patch @@ -0,0 +1,97 @@ +diff -u -r a/usr/include/urcu/uatomic/generic.h b/usr/include/urcu/uatomic/generic.h +--- a/usr/include/urcu/uatomic/generic.h 2014-03-28 06:04:42.000000000 +0900 ++++ b/usr/include/urcu/uatomic/generic.h 2017-02-13 10:35:21.189927116 +0900 +@@ -65,17 +65,17 @@ + switch (len) { + #ifdef UATOMIC_HAS_ATOMIC_BYTE + case 1: +- return __sync_val_compare_and_swap_1(addr, old, _new); ++ return __sync_val_compare_and_swap_1((uint8_t *) addr, old, _new); + #endif + #ifdef UATOMIC_HAS_ATOMIC_SHORT + case 2: +- return __sync_val_compare_and_swap_2(addr, old, _new); ++ return __sync_val_compare_and_swap_2((uint16_t *) addr, old, _new); + #endif + case 4: +- return __sync_val_compare_and_swap_4(addr, old, _new); ++ return __sync_val_compare_and_swap_4((uint32_t *) addr, old, _new); + #if (CAA_BITS_PER_LONG == 64) + case 8: +- return __sync_val_compare_and_swap_8(addr, old, _new); ++ return __sync_val_compare_and_swap_8((uint64_t *) addr, old, _new); + #endif + } + _uatomic_link_error(); +@@ -100,20 +100,20 @@ + switch (len) { + #ifdef UATOMIC_HAS_ATOMIC_BYTE + case 1: +- __sync_and_and_fetch_1(addr, val); ++ __sync_and_and_fetch_1((uint8_t *) addr, val); + return; + #endif + #ifdef UATOMIC_HAS_ATOMIC_SHORT + case 2: +- __sync_and_and_fetch_2(addr, val); ++ __sync_and_and_fetch_2((uint16_t *) addr, val); + return; + #endif + case 4: +- __sync_and_and_fetch_4(addr, val); ++ __sync_and_and_fetch_4((uint32_t *) addr, val); + return; + #if (CAA_BITS_PER_LONG == 64) + case 8: +- __sync_and_and_fetch_8(addr, val); ++ __sync_and_and_fetch_8((uint64_t *) addr, val); + return; + #endif + } +@@ -139,20 +139,20 @@ + switch (len) { + #ifdef UATOMIC_HAS_ATOMIC_BYTE + case 1: +- __sync_or_and_fetch_1(addr, val); ++ __sync_or_and_fetch_1((uint8_t *) addr, val); + return; + #endif + #ifdef UATOMIC_HAS_ATOMIC_SHORT + case 2: +- __sync_or_and_fetch_2(addr, val); ++ __sync_or_and_fetch_2((uint16_t *) addr, val); + return; + #endif + case 4: +- __sync_or_and_fetch_4(addr, val); ++ __sync_or_and_fetch_4((uint32_t *) addr, val); + return; + #if (CAA_BITS_PER_LONG == 64) + case 8: +- __sync_or_and_fetch_8(addr, val); ++ __sync_or_and_fetch_8((uint64_t *) addr, val); + return; + #endif + } +@@ -180,17 +180,17 @@ + switch (len) { + #ifdef UATOMIC_HAS_ATOMIC_BYTE + case 1: +- return __sync_add_and_fetch_1(addr, val); ++ return __sync_add_and_fetch_1((uint8_t *) addr, val); + #endif + #ifdef UATOMIC_HAS_ATOMIC_SHORT + case 2: +- return __sync_add_and_fetch_2(addr, val); ++ return __sync_add_and_fetch_2((uint16_t *) addr, val); + #endif + case 4: +- return __sync_add_and_fetch_4(addr, val); ++ return __sync_add_and_fetch_4((uint32_t *) addr, val); + #if (CAA_BITS_PER_LONG == 64) + case 8: +- return __sync_add_and_fetch_8(addr, val); ++ return __sync_add_and_fetch_8((uint64_t *) addr, val); + #endif + } + _uatomic_link_error(); diff --git a/eng/common/cross/arm64/sources.list.bionic b/eng/common/cross/arm64/sources.list.bionic new file mode 100644 index 0000000..2109557 --- /dev/null +++ b/eng/common/cross/arm64/sources.list.bionic @@ -0,0 +1,11 @@ +deb http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted +deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted + +deb http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse +deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse diff --git a/eng/common/cross/arm64/sources.list.trusty b/eng/common/cross/arm64/sources.list.trusty new file mode 100644 index 0000000..07d8f88 --- /dev/null +++ b/eng/common/cross/arm64/sources.list.trusty @@ -0,0 +1,11 @@ +deb http://ports.ubuntu.com/ubuntu-ports/ trusty main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ trusty-updates main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-updates main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ trusty-backports main restricted +deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-backports main restricted + +deb http://ports.ubuntu.com/ubuntu-ports/ trusty-security main restricted universe multiverse +deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-security main restricted universe multiverse \ No newline at end of file diff --git a/eng/common/cross/arm64/sources.list.vivid b/eng/common/cross/arm64/sources.list.vivid new file mode 100644 index 0000000..0b1215e --- /dev/null +++ b/eng/common/cross/arm64/sources.list.vivid @@ -0,0 +1,11 @@ +deb http://ports.ubuntu.com/ubuntu-ports/ vivid main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ vivid main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ vivid-updates main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ vivid-updates main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ vivid-backports main restricted +deb-src http://ports.ubuntu.com/ubuntu-ports/ vivid-backports main restricted + +deb http://ports.ubuntu.com/ubuntu-ports/ vivid-security main restricted universe multiverse +deb-src http://ports.ubuntu.com/ubuntu-ports/ vivid-security main restricted universe multiverse \ No newline at end of file diff --git a/eng/common/cross/arm64/sources.list.wily b/eng/common/cross/arm64/sources.list.wily new file mode 100644 index 0000000..e23d1e0 --- /dev/null +++ b/eng/common/cross/arm64/sources.list.wily @@ -0,0 +1,11 @@ +deb http://ports.ubuntu.com/ubuntu-ports/ wily main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ wily main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ wily-updates main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ wily-updates main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ wily-backports main restricted +deb-src http://ports.ubuntu.com/ubuntu-ports/ wily-backports main restricted + +deb http://ports.ubuntu.com/ubuntu-ports/ wily-security main restricted universe multiverse +deb-src http://ports.ubuntu.com/ubuntu-ports/ wily-security main restricted universe multiverse \ No newline at end of file diff --git a/eng/common/cross/arm64/sources.list.xenial b/eng/common/cross/arm64/sources.list.xenial new file mode 100644 index 0000000..eacd86b --- /dev/null +++ b/eng/common/cross/arm64/sources.list.xenial @@ -0,0 +1,11 @@ +deb http://ports.ubuntu.com/ubuntu-ports/ xenial main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ xenial-updates main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-updates main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ xenial-backports main restricted +deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-backports main restricted + +deb http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse +deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse \ No newline at end of file diff --git a/eng/common/cross/arm64/sources.list.zesty b/eng/common/cross/arm64/sources.list.zesty new file mode 100644 index 0000000..ea2c14a --- /dev/null +++ b/eng/common/cross/arm64/sources.list.zesty @@ -0,0 +1,11 @@ +deb http://ports.ubuntu.com/ubuntu-ports/ zesty main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ zesty-updates main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-updates main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ zesty-backports main restricted +deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-backports main restricted + +deb http://ports.ubuntu.com/ubuntu-ports/ zesty-security main restricted universe multiverse +deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-security main restricted universe multiverse diff --git a/eng/common/cross/armel/sources.list.jessie b/eng/common/cross/armel/sources.list.jessie new file mode 100644 index 0000000..3d9c305 --- /dev/null +++ b/eng/common/cross/armel/sources.list.jessie @@ -0,0 +1,3 @@ +# Debian (jessie) # Stable +deb http://ftp.debian.org/debian/ jessie main contrib non-free +deb-src http://ftp.debian.org/debian/ jessie main contrib non-free diff --git a/eng/common/cross/armel/tizen-build-rootfs.sh b/eng/common/cross/armel/tizen-build-rootfs.sh new file mode 100644 index 0000000..87c48e7 --- /dev/null +++ b/eng/common/cross/armel/tizen-build-rootfs.sh @@ -0,0 +1,44 @@ +#!/usr/bin/env bash +set -e + +__ARM_SOFTFP_CrossDir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd ) +__TIZEN_CROSSDIR="$__ARM_SOFTFP_CrossDir/tizen" + +if [[ -z "$ROOTFS_DIR" ]]; then + echo "ROOTFS_DIR is not defined." + exit 1; +fi + +# Clean-up (TODO-Cleanup: We may already delete $ROOTFS_DIR at ./cross/build-rootfs.sh.) +# hk0110 +if [ -d "$ROOTFS_DIR" ]; then + umount $ROOTFS_DIR/* + rm -rf $ROOTFS_DIR +fi + +TIZEN_TMP_DIR=$ROOTFS_DIR/tizen_tmp +mkdir -p $TIZEN_TMP_DIR + +# Download files +echo ">>Start downloading files" +VERBOSE=1 $__ARM_SOFTFP_CrossDir/tizen-fetch.sh $TIZEN_TMP_DIR +echo "<>Start constructing Tizen rootfs" +TIZEN_RPM_FILES=`ls $TIZEN_TMP_DIR/*.rpm` +cd $ROOTFS_DIR +for f in $TIZEN_RPM_FILES; do + rpm2cpio $f | cpio -idm --quiet +done +echo "<>Start configuring Tizen rootfs" +rm ./usr/lib/libunwind.so +ln -s libunwind.so.8 ./usr/lib/libunwind.so +ln -sfn asm-arm ./usr/include/asm +patch -p1 < $__TIZEN_CROSSDIR/tizen.patch +echo "</dev/null; then + VERBOSE=0 +fi + +Log() +{ + if [ $VERBOSE -ge $1 ]; then + echo ${@:2} + fi +} + +Inform() +{ + Log 1 -e "\x1B[0;34m$@\x1B[m" +} + +Debug() +{ + Log 2 -e "\x1B[0;32m$@\x1B[m" +} + +Error() +{ + >&2 Log 0 -e "\x1B[0;31m$@\x1B[m" +} + +Fetch() +{ + URL=$1 + FILE=$2 + PROGRESS=$3 + if [ $VERBOSE -ge 1 ] && [ $PROGRESS ]; then + CURL_OPT="--progress-bar" + else + CURL_OPT="--silent" + fi + curl $CURL_OPT $URL > $FILE +} + +hash curl 2> /dev/null || { Error "Require 'curl' Aborting."; exit 1; } +hash xmllint 2> /dev/null || { Error "Require 'xmllint' Aborting."; exit 1; } +hash sha256sum 2> /dev/null || { Error "Require 'sha256sum' Aborting."; exit 1; } + +TMPDIR=$1 +if [ ! -d $TMPDIR ]; then + TMPDIR=./tizen_tmp + Debug "Create temporary directory : $TMPDIR" + mkdir -p $TMPDIR +fi + +TIZEN_URL=http://download.tizen.org/releases/milestone/tizen +BUILD_XML=build.xml +REPOMD_XML=repomd.xml +PRIMARY_XML=primary.xml +TARGET_URL="http://__not_initialized" + +Xpath_get() +{ + XPATH_RESULT='' + XPATH=$1 + XML_FILE=$2 + RESULT=$(xmllint --xpath $XPATH $XML_FILE) + if [[ -z ${RESULT// } ]]; then + Error "Can not find target from $XML_FILE" + Debug "Xpath = $XPATH" + exit 1 + fi + XPATH_RESULT=$RESULT +} + +fetch_tizen_pkgs_init() +{ + TARGET=$1 + PROFILE=$2 + Debug "Initialize TARGET=$TARGET, PROFILE=$PROFILE" + + TMP_PKG_DIR=$TMPDIR/tizen_${PROFILE}_pkgs + if [ -d $TMP_PKG_DIR ]; then rm -rf $TMP_PKG_DIR; fi + mkdir -p $TMP_PKG_DIR + + PKG_URL=$TIZEN_URL/$PROFILE/latest + + BUILD_XML_URL=$PKG_URL/$BUILD_XML + TMP_BUILD=$TMP_PKG_DIR/$BUILD_XML + TMP_REPOMD=$TMP_PKG_DIR/$REPOMD_XML + TMP_PRIMARY=$TMP_PKG_DIR/$PRIMARY_XML + TMP_PRIMARYGZ=${TMP_PRIMARY}.gz + + Fetch $BUILD_XML_URL $TMP_BUILD + + Debug "fetch $BUILD_XML_URL to $TMP_BUILD" + + TARGET_XPATH="//build/buildtargets/buildtarget[@name=\"$TARGET\"]/repo[@type=\"binary\"]/text()" + Xpath_get $TARGET_XPATH $TMP_BUILD + TARGET_PATH=$XPATH_RESULT + TARGET_URL=$PKG_URL/$TARGET_PATH + + REPOMD_URL=$TARGET_URL/repodata/repomd.xml + PRIMARY_XPATH='string(//*[local-name()="data"][@type="primary"]/*[local-name()="location"]/@href)' + + Fetch $REPOMD_URL $TMP_REPOMD + + Debug "fetch $REPOMD_URL to $TMP_REPOMD" + + Xpath_get $PRIMARY_XPATH $TMP_REPOMD + PRIMARY_XML_PATH=$XPATH_RESULT + PRIMARY_URL=$TARGET_URL/$PRIMARY_XML_PATH + + Fetch $PRIMARY_URL $TMP_PRIMARYGZ + + Debug "fetch $PRIMARY_URL to $TMP_PRIMARYGZ" + + gunzip $TMP_PRIMARYGZ + + Debug "unzip $TMP_PRIMARYGZ to $TMP_PRIMARY" +} + +fetch_tizen_pkgs() +{ + ARCH=$1 + PACKAGE_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="location"]/@href)' + + PACKAGE_CHECKSUM_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="checksum"]/text())' + + for pkg in ${@:2} + do + Inform "Fetching... $pkg" + XPATH=${PACKAGE_XPATH_TPL/_PKG_/$pkg} + XPATH=${XPATH/_ARCH_/$ARCH} + Xpath_get $XPATH $TMP_PRIMARY + PKG_PATH=$XPATH_RESULT + + XPATH=${PACKAGE_CHECKSUM_XPATH_TPL/_PKG_/$pkg} + XPATH=${XPATH/_ARCH_/$ARCH} + Xpath_get $XPATH $TMP_PRIMARY + CHECKSUM=$XPATH_RESULT + + PKG_URL=$TARGET_URL/$PKG_PATH + PKG_FILE=$(basename $PKG_PATH) + PKG_PATH=$TMPDIR/$PKG_FILE + + Debug "Download $PKG_URL to $PKG_PATH" + Fetch $PKG_URL $PKG_PATH true + + echo "$CHECKSUM $PKG_PATH" | sha256sum -c - > /dev/null + if [ $? -ne 0 ]; then + Error "Fail to fetch $PKG_URL to $PKG_PATH" + Debug "Checksum = $CHECKSUM" + exit 1 + fi + done +} + +Inform "Initialize arm base" +fetch_tizen_pkgs_init standard base +Inform "fetch common packages" +fetch_tizen_pkgs armv7l gcc glibc glibc-devel libicu libicu-devel +fetch_tizen_pkgs noarch linux-glibc-devel +Inform "fetch coreclr packages" +fetch_tizen_pkgs armv7l lldb lldb-devel libgcc libstdc++ libstdc++-devel libunwind libunwind-devel tizen-release lttng-ust-devel lttng-ust userspace-rcu-devel userspace-rcu +Inform "fetch corefx packages" +fetch_tizen_pkgs armv7l libcom_err libcom_err-devel zlib zlib-devel libopenssl libopenssl-devel krb5 krb5-devel libcurl libcurl-devel + +Inform "Initialize standard unified" +fetch_tizen_pkgs_init standard unified +Inform "fetch corefx packages" +fetch_tizen_pkgs armv7l gssdp gssdp-devel + diff --git a/eng/common/cross/armel/tizen/tizen-dotnet.ks b/eng/common/cross/armel/tizen/tizen-dotnet.ks new file mode 100644 index 0000000..506d455 --- /dev/null +++ b/eng/common/cross/armel/tizen/tizen-dotnet.ks @@ -0,0 +1,50 @@ +lang en_US.UTF-8 +keyboard us +timezone --utc Asia/Seoul + +part / --fstype="ext4" --size=3500 --ondisk=mmcblk0 --label rootfs --fsoptions=defaults,noatime + +rootpw tizen +desktop --autologinuser=root +user --name root --groups audio,video --password 'tizen' + +repo --name=standard --baseurl=http://download.tizen.org/releases/milestone/tizen/unified/latest/repos/standard/packages/ --ssl_verify=no +repo --name=base --baseurl=http://download.tizen.org/releases/milestone/tizen/base/latest/repos/standard/packages/ --ssl_verify=no + +%packages +tar +gzip + +sed +grep +gawk +perl + +binutils +findutils +util-linux +lttng-ust +userspace-rcu +procps-ng +tzdata +ca-certificates + + +### Core FX +libicu +libunwind +iputils +zlib +krb5 +libcurl +libopenssl + +%end + +%post + +### Update /tmp privilege +chmod 777 /tmp +#################################### + +%end diff --git a/eng/common/cross/armel/tizen/tizen.patch b/eng/common/cross/armel/tizen/tizen.patch new file mode 100644 index 0000000..d223427 --- /dev/null +++ b/eng/common/cross/armel/tizen/tizen.patch @@ -0,0 +1,18 @@ +diff -u -r a/usr/lib/libc.so b/usr/lib/libc.so +--- a/usr/lib/libc.so 2016-12-30 23:00:08.284951863 +0900 ++++ b/usr/lib/libc.so 2016-12-30 23:00:32.140951815 +0900 +@@ -2,4 +2,4 @@ + Use the shared library, but some functions are only in + the static library, so try that secondarily. */ + OUTPUT_FORMAT(elf32-littlearm) +-GROUP ( /lib/libc.so.6 /usr/lib/libc_nonshared.a AS_NEEDED ( /lib/ld-linux.so.3 ) ) ++GROUP ( libc.so.6 libc_nonshared.a AS_NEEDED ( ld-linux.so.3 ) ) +diff -u -r a/usr/lib/libpthread.so b/usr/lib/libpthread.so +--- a/usr/lib/libpthread.so 2016-12-30 23:00:19.408951841 +0900 ++++ b/usr/lib/libpthread.so 2016-12-30 23:00:39.068951801 +0900 +@@ -2,4 +2,4 @@ + Use the shared library, but some functions are only in + the static library, so try that secondarily. */ + OUTPUT_FORMAT(elf32-littlearm) +-GROUP ( /lib/libpthread.so.0 /usr/lib/libpthread_nonshared.a ) ++GROUP ( libpthread.so.0 libpthread_nonshared.a ) diff --git a/eng/common/cross/build-android-rootfs.sh b/eng/common/cross/build-android-rootfs.sh new file mode 100644 index 0000000..adceda8 --- /dev/null +++ b/eng/common/cross/build-android-rootfs.sh @@ -0,0 +1,137 @@ +#!/usr/bin/env bash +set -e +__NDK_Version=r14 + +usage() +{ + echo "Creates a toolchain and sysroot used for cross-compiling for Android." + echo. + echo "Usage: $0 [BuildArch] [ApiLevel]" + echo. + echo "BuildArch is the target architecture of Android. Currently only arm64 is supported." + echo "ApiLevel is the target Android API level. API levels usually match to Android releases. See https://source.android.com/source/build-numbers.html" + echo. + echo "By default, the toolchain and sysroot will be generated in cross/android-rootfs/toolchain/[BuildArch]. You can change this behavior" + echo "by setting the TOOLCHAIN_DIR environment variable" + echo. + echo "By default, the NDK will be downloaded into the cross/android-rootfs/android-ndk-$__NDK_Version directory. If you already have an NDK installation," + echo "you can set the NDK_DIR environment variable to have this script use that installation of the NDK." + echo "By default, this script will generate a file, android_platform, in the root of the ROOTFS_DIR directory that contains the RID for the supported and tested Android build: android.21-arm64. This file is to replace '/etc/os-release', which is not available for Android." + exit 1 +} + +__ApiLevel=21 # The minimum platform for arm64 is API level 21 +__BuildArch=arm64 +__AndroidArch=aarch64 +__AndroidToolchain=aarch64-linux-android + +for i in "$@" + do + lowerI="$(echo $i | awk '{print tolower($0)}')" + case $lowerI in + -?|-h|--help) + usage + exit 1 + ;; + arm64) + __BuildArch=arm64 + __AndroidArch=aarch64 + __AndroidToolchain=aarch64-linux-android + ;; + arm) + __BuildArch=arm + __AndroidArch=arm + __AndroidToolchain=arm-linux-androideabi + ;; + *[0-9]) + __ApiLevel=$i + ;; + *) + __UnprocessedBuildArgs="$__UnprocessedBuildArgs $i" + ;; + esac +done + +# Obtain the location of the bash script to figure out where the root of the repo is. +__CrossDir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" + +__Android_Cross_Dir="$__CrossDir/android-rootfs" +__NDK_Dir="$__Android_Cross_Dir/android-ndk-$__NDK_Version" +__libunwind_Dir="$__Android_Cross_Dir/libunwind" +__lldb_Dir="$__Android_Cross_Dir/lldb" +__ToolchainDir="$__Android_Cross_Dir/toolchain/$__BuildArch" + +if [[ -n "$TOOLCHAIN_DIR" ]]; then + __ToolchainDir=$TOOLCHAIN_DIR +fi + +if [[ -n "$NDK_DIR" ]]; then + __NDK_Dir=$NDK_DIR +fi + +echo "Target API level: $__ApiLevel" +echo "Target architecture: $__BuildArch" +echo "NDK location: $__NDK_Dir" +echo "Target Toolchain location: $__ToolchainDir" + +# Download the NDK if required +if [ ! -d $__NDK_Dir ]; then + echo Downloading the NDK into $__NDK_Dir + mkdir -p $__NDK_Dir + wget -nv -nc --show-progress https://dl.google.com/android/repository/android-ndk-$__NDK_Version-linux-x86_64.zip -O $__Android_Cross_Dir/android-ndk-$__NDK_Version-linux-x86_64.zip + unzip -q $__Android_Cross_Dir/android-ndk-$__NDK_Version-linux-x86_64.zip -d $__Android_Cross_Dir +fi + +if [ ! -d $__lldb_Dir ]; then + mkdir -p $__lldb_Dir + echo Downloading LLDB into $__lldb_Dir + wget -nv -nc --show-progress https://dl.google.com/android/repository/lldb-2.3.3614996-linux-x86_64.zip -O $__Android_Cross_Dir/lldb-2.3.3614996-linux-x86_64.zip + unzip -q $__Android_Cross_Dir/lldb-2.3.3614996-linux-x86_64.zip -d $__lldb_Dir +fi + +# Create the RootFS for both arm64 as well as aarch +rm -rf $__Android_Cross_Dir/toolchain + +echo Generating the $__BuildArch toolchain +$__NDK_Dir/build/tools/make_standalone_toolchain.py --arch $__BuildArch --api $__ApiLevel --install-dir $__ToolchainDir + +# Install the required packages into the toolchain +# TODO: Add logic to get latest pkg version instead of specific version number +rm -rf $__Android_Cross_Dir/deb/ +rm -rf $__Android_Cross_Dir/tmp + +mkdir -p $__Android_Cross_Dir/deb/ +mkdir -p $__Android_Cross_Dir/tmp/$arch/ +wget -nv -nc http://termux.net/dists/stable/main/binary-$__AndroidArch/libicu_60.2_$__AndroidArch.deb -O $__Android_Cross_Dir/deb/libicu_60.2_$__AndroidArch.deb +wget -nv -nc http://termux.net/dists/stable/main/binary-$__AndroidArch/libicu-dev_60.2_$__AndroidArch.deb -O $__Android_Cross_Dir/deb/libicu-dev_60.2_$__AndroidArch.deb + +wget -nv -nc http://termux.net/dists/stable/main/binary-$__AndroidArch/libandroid-glob-dev_0.4_$__AndroidArch.deb -O $__Android_Cross_Dir/deb/libandroid-glob-dev_0.4_$__AndroidArch.deb +wget -nv -nc http://termux.net/dists/stable/main/binary-$__AndroidArch/libandroid-glob_0.4_$__AndroidArch.deb -O $__Android_Cross_Dir/deb/libandroid-glob_0.4_$__AndroidArch.deb +wget -nv -nc http://termux.net/dists/stable/main/binary-$__AndroidArch/libandroid-support-dev_22_$__AndroidArch.deb -O $__Android_Cross_Dir/deb/libandroid-support-dev_22_$__AndroidArch.deb +wget -nv -nc http://termux.net/dists/stable/main/binary-$__AndroidArch/libandroid-support_22_$__AndroidArch.deb -O $__Android_Cross_Dir/deb/libandroid-support_22_$__AndroidArch.deb +wget -nv -nc http://termux.net/dists/stable/main/binary-$__AndroidArch/liblzma-dev_5.2.3_$__AndroidArch.deb -O $__Android_Cross_Dir/deb/liblzma-dev_5.2.3_$__AndroidArch.deb +wget -nv -nc http://termux.net/dists/stable/main/binary-$__AndroidArch/liblzma_5.2.3_$__AndroidArch.deb -O $__Android_Cross_Dir/deb/liblzma_5.2.3_$__AndroidArch.deb +wget -nv -nc http://termux.net/dists/stable/main/binary-$__AndroidArch/libunwind-dev_1.2.20170304_$__AndroidArch.deb -O $__Android_Cross_Dir/deb/libunwind-dev_1.2.20170304_$__AndroidArch.deb +wget -nv -nc http://termux.net/dists/stable/main/binary-$__AndroidArch/libunwind_1.2.20170304_$__AndroidArch.deb -O $__Android_Cross_Dir/deb/libunwind_1.2.20170304_$__AndroidArch.deb + +echo Unpacking Termux packages +dpkg -x $__Android_Cross_Dir/deb/libicu_60.2_$__AndroidArch.deb $__Android_Cross_Dir/tmp/$__AndroidArch/ +dpkg -x $__Android_Cross_Dir/deb/libicu-dev_60.2_$__AndroidArch.deb $__Android_Cross_Dir/tmp/$__AndroidArch/ +dpkg -x $__Android_Cross_Dir/deb/libandroid-glob-dev_0.4_$__AndroidArch.deb $__Android_Cross_Dir/tmp/$__AndroidArch/ +dpkg -x $__Android_Cross_Dir/deb/libandroid-glob_0.4_$__AndroidArch.deb $__Android_Cross_Dir/tmp/$__AndroidArch/ +dpkg -x $__Android_Cross_Dir/deb/libandroid-support-dev_22_$__AndroidArch.deb $__Android_Cross_Dir/tmp/$__AndroidArch/ +dpkg -x $__Android_Cross_Dir/deb/libandroid-support_22_$__AndroidArch.deb $__Android_Cross_Dir/tmp/$__AndroidArch/ +dpkg -x $__Android_Cross_Dir/deb/liblzma-dev_5.2.3_$__AndroidArch.deb $__Android_Cross_Dir/tmp/$__AndroidArch/ +dpkg -x $__Android_Cross_Dir/deb/liblzma_5.2.3_$__AndroidArch.deb $__Android_Cross_Dir/tmp/$__AndroidArch/ +dpkg -x $__Android_Cross_Dir/deb/libunwind-dev_1.2.20170304_$__AndroidArch.deb $__Android_Cross_Dir/tmp/$__AndroidArch/ +dpkg -x $__Android_Cross_Dir/deb/libunwind_1.2.20170304_$__AndroidArch.deb $__Android_Cross_Dir/tmp/$__AndroidArch/ + +cp -R $__Android_Cross_Dir/tmp/$__AndroidArch/data/data/com.termux/files/usr/* $__ToolchainDir/sysroot/usr/ + +# Generate platform file for build.sh script to assign to __DistroRid +echo "Generating platform file..." + +echo "RID=android.21-arm64" > $__ToolchainDir/sysroot/android_platform +echo Now run: +echo CONFIG_DIR=\`realpath cross/android/$__BuildArch\` ROOTFS_DIR=\`realpath $__ToolchainDir/sysroot\` ./build.sh cross $__BuildArch skipgenerateversion skipnuget cmakeargs -DENABLE_LLDBPLUGIN=0 + diff --git a/eng/common/cross/build-rootfs.sh b/eng/common/cross/build-rootfs.sh new file mode 100644 index 0000000..805948c --- /dev/null +++ b/eng/common/cross/build-rootfs.sh @@ -0,0 +1,210 @@ +#!/usr/bin/env bash + +usage() +{ + echo "Usage: $0 [BuildArch] [LinuxCodeName] [lldbx.y] [--skipunmount]" + echo "BuildArch can be: arm(default), armel, arm64, x86" + echo "LinuxCodeName - optional, Code name for Linux, can be: trusty(default), vivid, wily, xenial, zesty, bionic, alpine. If BuildArch is armel, LinuxCodeName is jessie(default) or tizen." + echo "lldbx.y - optional, LLDB version, can be: lldb3.6(default), lldb3.8, lldb3.9, lldb4.0, no-lldb. Ignored for alpine" + echo "--skipunmount - optional, will skip the unmount of rootfs folder." + exit 1 +} + +__LinuxCodeName=trusty +__CrossDir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd ) +__InitialDir=$PWD +__BuildArch=arm +__UbuntuArch=armhf +__UbuntuRepo="http://ports.ubuntu.com/" +__LLDB_Package="lldb-3.6-dev" +__SkipUnmount=0 + +# base development support +__UbuntuPackages="build-essential" + +__AlpinePackages="alpine-base" +__AlpinePackages+=" build-base" +__AlpinePackages+=" linux-headers" +__AlpinePackages+=" lldb-dev" +__AlpinePackages+=" llvm-dev" + +# symlinks fixer +__UbuntuPackages+=" symlinks" + +# CoreCLR and CoreFX dependencies +__UbuntuPackages+=" libicu-dev" +__UbuntuPackages+=" liblttng-ust-dev" +__UbuntuPackages+=" libunwind8-dev" + +__AlpinePackages+=" gettext-dev" +__AlpinePackages+=" icu-dev" +__AlpinePackages+=" libunwind-dev" +__AlpinePackages+=" lttng-ust-dev" + +# CoreFX dependencies +__UbuntuPackages+=" libcurl4-openssl-dev" +__UbuntuPackages+=" libkrb5-dev" +__UbuntuPackages+=" libssl-dev" +__UbuntuPackages+=" zlib1g-dev" + +__AlpinePackages+=" curl-dev" +__AlpinePackages+=" krb5-dev" +__AlpinePackages+=" openssl-dev" +__AlpinePackages+=" zlib-dev" + +__UnprocessedBuildArgs= +for i in "$@" ; do + lowerI="$(echo $i | awk '{print tolower($0)}')" + case $lowerI in + -?|-h|--help) + usage + exit 1 + ;; + arm) + __BuildArch=arm + __UbuntuArch=armhf + __AlpineArch=armhf + __QEMUArch=arm + ;; + arm64) + __BuildArch=arm64 + __UbuntuArch=arm64 + __AlpineArch=aarch64 + __QEMUArch=aarch64 + ;; + armel) + __BuildArch=armel + __UbuntuArch=armel + __UbuntuRepo="http://ftp.debian.org/debian/" + __LinuxCodeName=jessie + ;; + x86) + __BuildArch=x86 + __UbuntuArch=i386 + __UbuntuRepo="http://archive.ubuntu.com/ubuntu/" + ;; + lldb3.6) + __LLDB_Package="lldb-3.6-dev" + ;; + lldb3.8) + __LLDB_Package="lldb-3.8-dev" + ;; + lldb3.9) + __LLDB_Package="liblldb-3.9-dev" + ;; + lldb4.0) + __LLDB_Package="liblldb-4.0-dev" + ;; + no-lldb) + unset __LLDB_Package + ;; + vivid) + if [ "$__LinuxCodeName" != "jessie" ]; then + __LinuxCodeName=vivid + fi + ;; + wily) + if [ "$__LinuxCodeName" != "jessie" ]; then + __LinuxCodeName=wily + fi + ;; + xenial) + if [ "$__LinuxCodeName" != "jessie" ]; then + __LinuxCodeName=xenial + fi + ;; + zesty) + if [ "$__LinuxCodeName" != "jessie" ]; then + __LinuxCodeName=zesty + fi + ;; + bionic) + if [ "$__LinuxCodeName" != "jessie" ]; then + __LinuxCodeName=bionic + fi + ;; + jessie) + __LinuxCodeName=jessie + __UbuntuRepo="http://ftp.debian.org/debian/" + ;; + tizen) + if [ "$__BuildArch" != "armel" ]; then + echo "Tizen is available only for armel." + usage; + exit 1; + fi + __LinuxCodeName= + __UbuntuRepo= + __Tizen=tizen + ;; + alpine) + __LinuxCodeName=alpine + __UbuntuRepo= + ;; + --skipunmount) + __SkipUnmount=1 + ;; + *) + __UnprocessedBuildArgs="$__UnprocessedBuildArgs $i" + ;; + esac +done + +if [ "$__BuildArch" == "armel" ]; then + __LLDB_Package="lldb-3.5-dev" +fi +__UbuntuPackages+=" ${__LLDB_Package:-}" + +__RootfsDir="$__CrossDir/rootfs/$__BuildArch" + +if [[ -n "$ROOTFS_DIR" ]]; then + __RootfsDir=$ROOTFS_DIR +fi + +if [ -d "$__RootfsDir" ]; then + if [ $__SkipUnmount == 0 ]; then + umount $__RootfsDir/* + fi + rm -rf $__RootfsDir +fi + +if [[ "$__LinuxCodeName" == "alpine" ]]; then + __ApkToolsVersion=2.9.1 + __AlpineVersion=3.7 + __ApkToolsDir=$(mktemp -d) + wget https://github.com/alpinelinux/apk-tools/releases/download/v$__ApkToolsVersion/apk-tools-$__ApkToolsVersion-x86_64-linux.tar.gz -P $__ApkToolsDir + tar -xf $__ApkToolsDir/apk-tools-$__ApkToolsVersion-x86_64-linux.tar.gz -C $__ApkToolsDir + mkdir -p $__RootfsDir/usr/bin + cp -v /usr/bin/qemu-$__QEMUArch-static $__RootfsDir/usr/bin + $__ApkToolsDir/apk-tools-$__ApkToolsVersion/apk \ + -X http://dl-cdn.alpinelinux.org/alpine/v$__AlpineVersion/main \ + -X http://dl-cdn.alpinelinux.org/alpine/v$__AlpineVersion/community \ + -X http://dl-cdn.alpinelinux.org/alpine/edge/testing \ + -U --allow-untrusted --root $__RootfsDir --arch $__AlpineArch --initdb \ + add $__AlpinePackages + rm -r $__ApkToolsDir +elif [[ -n $__LinuxCodeName ]]; then + qemu-debootstrap --arch $__UbuntuArch $__LinuxCodeName $__RootfsDir $__UbuntuRepo + cp $__CrossDir/$__BuildArch/sources.list.$__LinuxCodeName $__RootfsDir/etc/apt/sources.list + chroot $__RootfsDir apt-get update + chroot $__RootfsDir apt-get -f -y install + chroot $__RootfsDir apt-get -y install $__UbuntuPackages + chroot $__RootfsDir symlinks -cr /usr + + if [ $__SkipUnmount == 0 ]; then + umount $__RootfsDir/* + fi + + if [[ "$__BuildArch" == "arm" && "$__LinuxCodeName" == "trusty" ]]; then + pushd $__RootfsDir + patch -p1 < $__CrossDir/$__BuildArch/trusty.patch + patch -p1 < $__CrossDir/$__BuildArch/trusty-lttng-2.4.patch + popd + fi +elif [ "$__Tizen" == "tizen" ]; then + ROOTFS_DIR=$__RootfsDir $__CrossDir/$__BuildArch/tizen-build-rootfs.sh +else + echo "Unsupported target platform." + usage; + exit 1 +fi diff --git a/eng/common/cross/toolchain.cmake b/eng/common/cross/toolchain.cmake new file mode 100644 index 0000000..071d411 --- /dev/null +++ b/eng/common/cross/toolchain.cmake @@ -0,0 +1,138 @@ +set(CROSS_ROOTFS $ENV{ROOTFS_DIR}) + +set(TARGET_ARCH_NAME $ENV{TARGET_BUILD_ARCH}) +set(CMAKE_SYSTEM_NAME Linux) +set(CMAKE_SYSTEM_VERSION 1) + +if(TARGET_ARCH_NAME STREQUAL "armel") + set(CMAKE_SYSTEM_PROCESSOR armv7l) + set(TOOLCHAIN "arm-linux-gnueabi") + if("$ENV{__DistroRid}" MATCHES "tizen.*") + set(TIZEN_TOOLCHAIN "armv7l-tizen-linux-gnueabi/6.2.1") + endif() +elseif(TARGET_ARCH_NAME STREQUAL "arm") + set(CMAKE_SYSTEM_PROCESSOR armv7l) + if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/armv6-alpine-linux-musleabihf) + set(TOOLCHAIN "armv6-alpine-linux-musleabihf") + else() + set(TOOLCHAIN "arm-linux-gnueabihf") + endif() +elseif(TARGET_ARCH_NAME STREQUAL "arm64") + set(CMAKE_SYSTEM_PROCESSOR aarch64) + if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/aarch64-alpine-linux-musl) + set(TOOLCHAIN "aarch64-alpine-linux-musl") + else() + set(TOOLCHAIN "aarch64-linux-gnu") + endif() +elseif(TARGET_ARCH_NAME STREQUAL "x86") + set(CMAKE_SYSTEM_PROCESSOR i686) + set(TOOLCHAIN "i686-linux-gnu") +else() + message(FATAL_ERROR "Arch is ${TARGET_ARCH_NAME}. Only armel, arm, arm64 and x86 are supported!") +endif() + +# Specify include paths +if(TARGET_ARCH_NAME STREQUAL "armel") + if(DEFINED TIZEN_TOOLCHAIN) + include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}/include/c++/) + include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}/include/c++/armv7l-tizen-linux-gnueabi) + endif() +endif() + +# add_compile_param - adds only new options without duplicates. +# arg0 - list with result options, arg1 - list with new options. +# arg2 - optional argument, quick summary string for optional using CACHE FORCE mode. +macro(add_compile_param) + if(NOT ${ARGC} MATCHES "^(2|3)$") + message(FATAL_ERROR "Wrong using add_compile_param! Two or three parameters must be given! See add_compile_param description.") + endif() + foreach(OPTION ${ARGV1}) + if(NOT ${ARGV0} MATCHES "${OPTION}($| )") + set(${ARGV0} "${${ARGV0}} ${OPTION}") + if(${ARGC} EQUAL "3") # CACHE FORCE mode + set(${ARGV0} "${${ARGV0}}" CACHE STRING "${ARGV2}" FORCE) + endif() + endif() + endforeach() +endmacro() + +# Specify link flags +add_compile_param(CROSS_LINK_FLAGS "--sysroot=${CROSS_ROOTFS}") +add_compile_param(CROSS_LINK_FLAGS "--gcc-toolchain=${CROSS_ROOTFS}/usr") +add_compile_param(CROSS_LINK_FLAGS "--target=${TOOLCHAIN}") +add_compile_param(CROSS_LINK_FLAGS "-fuse-ld=gold") + +if(TARGET_ARCH_NAME STREQUAL "armel") + if(DEFINED TIZEN_TOOLCHAIN) # For Tizen only + add_compile_param(CROSS_LINK_FLAGS "-B${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}") + add_compile_param(CROSS_LINK_FLAGS "-L${CROSS_ROOTFS}/lib") + add_compile_param(CROSS_LINK_FLAGS "-L${CROSS_ROOTFS}/usr/lib") + add_compile_param(CROSS_LINK_FLAGS "-L${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}") + endif() +elseif(TARGET_ARCH_NAME STREQUAL "x86") + add_compile_param(CROSS_LINK_FLAGS "-m32") +endif() + +add_compile_param(CMAKE_EXE_LINKER_FLAGS "${CROSS_LINK_FLAGS}" "TOOLCHAIN_EXE_LINKER_FLAGS") +add_compile_param(CMAKE_SHARED_LINKER_FLAGS "${CROSS_LINK_FLAGS}" "TOOLCHAIN_EXE_LINKER_FLAGS") +add_compile_param(CMAKE_MODULE_LINKER_FLAGS "${CROSS_LINK_FLAGS}" "TOOLCHAIN_EXE_LINKER_FLAGS") + +# Specify compile options +add_compile_options("--sysroot=${CROSS_ROOTFS}") +add_compile_options("--target=${TOOLCHAIN}") +add_compile_options("--gcc-toolchain=${CROSS_ROOTFS}/usr") + +if(TARGET_ARCH_NAME MATCHES "^(arm|armel|arm64)$") + set(CMAKE_C_COMPILER_TARGET ${TOOLCHAIN}) + set(CMAKE_CXX_COMPILER_TARGET ${TOOLCHAIN}) + set(CMAKE_ASM_COMPILER_TARGET ${TOOLCHAIN}) +endif() + +if(TARGET_ARCH_NAME MATCHES "^(arm|armel)$") + add_compile_options(-mthumb) + add_compile_options(-mfpu=vfpv3) + if(TARGET_ARCH_NAME STREQUAL "armel") + add_compile_options(-mfloat-abi=softfp) + if(DEFINED TIZEN_TOOLCHAIN) + add_compile_options(-Wno-deprecated-declarations) # compile-time option + add_compile_options(-D__extern_always_inline=inline) # compile-time option + endif() + endif() +elseif(TARGET_ARCH_NAME STREQUAL "x86") + add_compile_options(-m32) + add_compile_options(-Wno-error=unused-command-line-argument) +endif() + +# Set LLDB include and library paths +if(TARGET_ARCH_NAME MATCHES "^(arm|armel|x86)$") + if(TARGET_ARCH_NAME STREQUAL "x86") + set(LLVM_CROSS_DIR "$ENV{LLVM_CROSS_HOME}") + else() # arm/armel case + set(LLVM_CROSS_DIR "$ENV{LLVM_ARM_HOME}") + endif() + if(LLVM_CROSS_DIR) + set(WITH_LLDB_LIBS "${LLVM_CROSS_DIR}/lib/" CACHE STRING "") + set(WITH_LLDB_INCLUDES "${LLVM_CROSS_DIR}/include" CACHE STRING "") + set(LLDB_H "${WITH_LLDB_INCLUDES}" CACHE STRING "") + set(LLDB "${LLVM_CROSS_DIR}/lib/liblldb.so" CACHE STRING "") + else() + if(TARGET_ARCH_NAME STREQUAL "x86") + set(WITH_LLDB_LIBS "${CROSS_ROOTFS}/usr/lib/i386-linux-gnu" CACHE STRING "") + set(CHECK_LLVM_DIR "${CROSS_ROOTFS}/usr/lib/llvm-3.8/include") + if(EXISTS "${CHECK_LLVM_DIR}" AND IS_DIRECTORY "${CHECK_LLVM_DIR}") + set(WITH_LLDB_INCLUDES "${CHECK_LLVM_DIR}") + else() + set(WITH_LLDB_INCLUDES "${CROSS_ROOTFS}/usr/lib/llvm-3.6/include") + endif() + else() # arm/armel case + set(WITH_LLDB_LIBS "${CROSS_ROOTFS}/usr/lib/${TOOLCHAIN}" CACHE STRING "") + set(WITH_LLDB_INCLUDES "${CROSS_ROOTFS}/usr/lib/llvm-3.6/include" CACHE STRING "") + endif() + endif() +endif() + +set(CMAKE_FIND_ROOT_PATH "${CROSS_ROOTFS}") +set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER) +set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY) +set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY) +set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY) diff --git a/eng/common/cross/x86/sources.list.bionic b/eng/common/cross/x86/sources.list.bionic new file mode 100644 index 0000000..a71ccad --- /dev/null +++ b/eng/common/cross/x86/sources.list.bionic @@ -0,0 +1,11 @@ +deb http://archive.ubuntu.com/ubuntu/ bionic main restricted universe +deb-src http://archive.ubuntu.com/ubuntu/ bionic main restricted universe + +deb http://archive.ubuntu.com/ubuntu/ bionic-updates main restricted universe +deb-src http://archive.ubuntu.com/ubuntu/ bionic-updates main restricted universe + +deb http://archive.ubuntu.com/ubuntu/ bionic-backports main restricted +deb-src http://archive.ubuntu.com/ubuntu/ bionic-backports main restricted + +deb http://archive.ubuntu.com/ubuntu/ bionic-security main restricted universe multiverse +deb-src http://archive.ubuntu.com/ubuntu/ bionic-security main restricted universe multiverse diff --git a/eng/common/cross/x86/sources.list.trusty b/eng/common/cross/x86/sources.list.trusty new file mode 100644 index 0000000..9b30854 --- /dev/null +++ b/eng/common/cross/x86/sources.list.trusty @@ -0,0 +1,11 @@ +deb http://archive.ubuntu.com/ubuntu/ trusty main restricted universe +deb-src http://archive.ubuntu.com/ubuntu/ trusty main restricted universe + +deb http://archive.ubuntu.com/ubuntu/ trusty-updates main restricted universe +deb-src http://archive.ubuntu.com/ubuntu/ trusty-updates main restricted universe + +deb http://archive.ubuntu.com/ubuntu/ trusty-backports main restricted +deb-src http://archive.ubuntu.com/ubuntu/ trusty-backports main restricted + +deb http://archive.ubuntu.com/ubuntu/ trusty-security main restricted universe multiverse +deb-src http://archive.ubuntu.com/ubuntu/ trusty-security main restricted universe multiverse diff --git a/eng/common/cross/x86/sources.list.vivid b/eng/common/cross/x86/sources.list.vivid new file mode 100644 index 0000000..26d37b2 --- /dev/null +++ b/eng/common/cross/x86/sources.list.vivid @@ -0,0 +1,11 @@ +deb http://archive.ubuntu.com/ubuntu/ vivid main restricted universe +deb-src http://archive.ubuntu.com/ubuntu/ vivid main restricted universe + +deb http://archive.ubuntu.com/ubuntu/ vivid-updates main restricted universe +deb-src http://archive.ubuntu.com/ubuntu/ vivid-updates main restricted universe + +deb http://archive.ubuntu.com/ubuntu/ vivid-backports main restricted +deb-src http://archive.ubuntu.com/ubuntu/ vivid-backports main restricted + +deb http://archive.ubuntu.com/ubuntu/ vivid-security main restricted universe multiverse +deb-src http://archive.ubuntu.com/ubuntu/ vivid-security main restricted universe multiverse diff --git a/eng/common/cross/x86/sources.list.wily b/eng/common/cross/x86/sources.list.wily new file mode 100644 index 0000000..c4b0b44 --- /dev/null +++ b/eng/common/cross/x86/sources.list.wily @@ -0,0 +1,11 @@ +deb http://archive.ubuntu.com/ubuntu/ wily main restricted universe +deb-src http://archive.ubuntu.com/ubuntu/ wily main restricted universe + +deb http://archive.ubuntu.com/ubuntu/ wily-updates main restricted universe +deb-src http://archive.ubuntu.com/ubuntu/ wily-updates main restricted universe + +deb http://archive.ubuntu.com/ubuntu/ wily-backports main restricted +deb-src http://archive.ubuntu.com/ubuntu/ wily-backports main restricted + +deb http://archive.ubuntu.com/ubuntu/ wily-security main restricted universe multiverse +deb-src http://archive.ubuntu.com/ubuntu/ wily-security main restricted universe multiverse diff --git a/eng/common/cross/x86/sources.list.xenial b/eng/common/cross/x86/sources.list.xenial new file mode 100644 index 0000000..ad9c5a0 --- /dev/null +++ b/eng/common/cross/x86/sources.list.xenial @@ -0,0 +1,11 @@ +deb http://archive.ubuntu.com/ubuntu/ xenial main restricted universe +deb-src http://archive.ubuntu.com/ubuntu/ xenial main restricted universe + +deb http://archive.ubuntu.com/ubuntu/ xenial-updates main restricted universe +deb-src http://archive.ubuntu.com/ubuntu/ xenial-updates main restricted universe + +deb http://archive.ubuntu.com/ubuntu/ xenial-backports main restricted +deb-src http://archive.ubuntu.com/ubuntu/ xenial-backports main restricted + +deb http://archive.ubuntu.com/ubuntu/ xenial-security main restricted universe multiverse +deb-src http://archive.ubuntu.com/ubuntu/ xenial-security main restricted universe multiverse diff --git a/eng/common/darc-init.ps1 b/eng/common/darc-init.ps1 new file mode 100644 index 0000000..af182d8 --- /dev/null +++ b/eng/common/darc-init.ps1 @@ -0,0 +1,21 @@ +$verbosity = "m" +. $PSScriptRoot\tools.ps1 + +function InstallDarcCli { + $darcCliPackageName = "microsoft.dotnet.darc" + $dotnet = "$env:DOTNET_INSTALL_DIR\dotnet.exe" + $toolList = Invoke-Expression "& `"$dotnet`" tool list -g" + + if ($toolList -like "*$darcCliPackageName*") { + Invoke-Expression "& `"$dotnet`" tool uninstall $darcCliPackageName -g" + } + + $toolsetVersion = $GlobalJson.'msbuild-sdks'.'Microsoft.DotNet.Arcade.Sdk' + + Write-Host "Installing Darc CLI version $toolsetVersion..." + Write-Host "You may need to restart your command window if this is the first dotnet tool you have installed." + Invoke-Expression "& `"$dotnet`" tool install $darcCliPackageName --version $toolsetVersion -v $verbosity -g" +} + +InitializeTools +InstallDarcCli diff --git a/eng/common/darc-init.sh b/eng/common/darc-init.sh new file mode 100644 index 0000000..a0c733a --- /dev/null +++ b/eng/common/darc-init.sh @@ -0,0 +1,35 @@ +#!/usr/bin/env bash + +source="${BASH_SOURCE[0]}" + +# resolve $source until the file is no longer a symlink +while [[ -h "$source" ]]; do + scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" + source="$(readlink "$source")" + # if $source was a relative symlink, we need to resolve it relative to the path where the + # symlink file was located + [[ $source != /* ]] && source="$scriptroot/$source" +done +scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" +verbosity=m + +. "$scriptroot/tools.sh" + +function InstallDarcCli { + local darc_cli_package_name="microsoft.dotnet.darc" + local uninstall_command=`$DOTNET_INSTALL_DIR/dotnet tool uninstall $darc_cli_package_name -g` + local tool_list=$($DOTNET_INSTALL_DIR/dotnet tool list -g) + if [[ $tool_list = *$darc_cli_package_name* ]]; then + echo $($DOTNET_INSTALL_DIR/dotnet tool uninstall $darc_cli_package_name -g) + fi + + ReadGlobalVersion "Microsoft.DotNet.Arcade.Sdk" + local toolset_version=$_ReadGlobalVersion + + echo "Installing Darc CLI version $toolset_version..." + echo "You may need to restart your command shell if this is the first dotnet tool you have installed." + echo $($DOTNET_INSTALL_DIR/dotnet tool install $darc_cli_package_name --version $toolset_version -v $verbosity -g) +} + +InitializeTools +InstallDarcCli diff --git a/eng/common/helixpublish.proj b/eng/common/helixpublish.proj new file mode 100644 index 0000000..df2ad59 --- /dev/null +++ b/eng/common/helixpublish.proj @@ -0,0 +1,15 @@ + + + + + %(Identity) + + + + + + $(WorkItemDirectory) + $(WorkItemCommand) + + + diff --git a/eng/common/init-tools-native.cmd b/eng/common/init-tools-native.cmd new file mode 100644 index 0000000..438cd54 --- /dev/null +++ b/eng/common/init-tools-native.cmd @@ -0,0 +1,3 @@ +@echo off +powershell -NoProfile -NoLogo -ExecutionPolicy ByPass -command "& """%~dp0init-tools-native.ps1""" %*" +exit /b %ErrorLevel% \ No newline at end of file diff --git a/eng/common/init-tools-native.ps1 b/eng/common/init-tools-native.ps1 new file mode 100644 index 0000000..e25c60f --- /dev/null +++ b/eng/common/init-tools-native.ps1 @@ -0,0 +1,128 @@ +<# +.SYNOPSIS +Entry point script for installing native tools + +.DESCRIPTION +Reads $RepoRoot\global.json file to determine native assets to install +and executes installers for those tools + +.PARAMETER BaseUri +Base file directory or Url from which to acquire tool archives + +.PARAMETER InstallDirectory +Directory to install native toolset. This is a command-line override for the default +Install directory precedence order: +- InstallDirectory command-line override +- NETCOREENG_INSTALL_DIRECTORY environment variable +- (default) %USERPROFILE%/.netcoreeng/native + +.PARAMETER Clean +Switch specifying to not install anything, but cleanup native asset folders + +.PARAMETER Force +Clean and then install tools + +.PARAMETER DownloadRetries +Total number of retry attempts + +.PARAMETER RetryWaitTimeInSeconds +Wait time between retry attempts in seconds + +.PARAMETER GlobalJsonFile +File path to global.json file + +.NOTES +#> +[CmdletBinding(PositionalBinding=$false)] +Param ( + [string] $BaseUri = "https://netcorenativeassets.blob.core.windows.net/resource-packages/external", + [string] $InstallDirectory, + [switch] $Clean = $False, + [switch] $Force = $False, + [int] $DownloadRetries = 5, + [int] $RetryWaitTimeInSeconds = 30, + [string] $GlobalJsonFile = "$PSScriptRoot\..\..\global.json" +) + +Set-StrictMode -version 2.0 +$ErrorActionPreference="Stop" + +Import-Module -Name (Join-Path $PSScriptRoot "native\CommonLibrary.psm1") + +try { + # Define verbose switch if undefined + $Verbose = $VerbosePreference -Eq "Continue" + + $EngCommonBaseDir = Join-Path $PSScriptRoot "native\" + $NativeBaseDir = $InstallDirectory + if (!$NativeBaseDir) { + $NativeBaseDir = CommonLibrary\Get-NativeInstallDirectory + } + $Env:CommonLibrary_NativeInstallDir = $NativeBaseDir + $InstallBin = Join-Path $NativeBaseDir "bin" + $InstallerPath = Join-Path $EngCommonBaseDir "install-tool.ps1" + + # Process tools list + Write-Host "Processing $GlobalJsonFile" + If (-Not (Test-Path $GlobalJsonFile)) { + Write-Host "Unable to find '$GlobalJsonFile'" + exit 0 + } + $NativeTools = Get-Content($GlobalJsonFile) -Raw | + ConvertFrom-Json | + Select-Object -Expand "native-tools" -ErrorAction SilentlyContinue + if ($NativeTools) { + $NativeTools.PSObject.Properties | ForEach-Object { + $ToolName = $_.Name + $ToolVersion = $_.Value + $LocalInstallerCommand = $InstallerPath + $LocalInstallerCommand += " -ToolName $ToolName" + $LocalInstallerCommand += " -InstallPath $InstallBin" + $LocalInstallerCommand += " -BaseUri $BaseUri" + $LocalInstallerCommand += " -CommonLibraryDirectory $EngCommonBaseDir" + $LocalInstallerCommand += " -Version $ToolVersion" + + if ($Verbose) { + $LocalInstallerCommand += " -Verbose" + } + if (Get-Variable 'Force' -ErrorAction 'SilentlyContinue') { + if($Force) { + $LocalInstallerCommand += " -Force" + } + } + if ($Clean) { + $LocalInstallerCommand += " -Clean" + } + + Write-Verbose "Installing $ToolName version $ToolVersion" + Write-Verbose "Executing '$LocalInstallerCommand'" + Invoke-Expression "$LocalInstallerCommand" + if ($LASTEXITCODE -Ne "0") { + Write-Error "Execution failed" + exit 1 + } + } + } + else { + Write-Host "No native tools defined in global.json" + exit 0 + } + + if ($Clean) { + exit 0 + } + if (Test-Path $InstallBin) { + Write-Host "Native tools are available from" (Convert-Path -Path $InstallBin) + Write-Host "##vso[task.prependpath]$(Convert-Path -Path $InstallBin)" + } + else { + Write-Error "Native tools install directory does not exist, installation failed" + exit 1 + } + exit 0 +} +catch { + Write-Host $_ + Write-Host $_.Exception + exit 1 +} diff --git a/eng/common/init-tools-native.sh b/eng/common/init-tools-native.sh new file mode 100644 index 0000000..54b70f6 --- /dev/null +++ b/eng/common/init-tools-native.sh @@ -0,0 +1,145 @@ +#!/usr/bin/env bash + +source="${BASH_SOURCE[0]}" +scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" + +base_uri='https://netcorenativeassets.blob.core.windows.net/resource-packages/external' +install_directory='' +clean=false +force=false +download_retries=5 +retry_wait_time_seconds=30 +global_json_file="${scriptroot}/../../global.json" +declare -A native_assets + +. $scriptroot/native/common-library.sh + +while (($# > 0)); do + lowerI="$(echo $1 | awk '{print tolower($0)}')" + case $lowerI in + --baseuri) + base_uri=$2 + shift 2 + ;; + --installdirectory) + install_directory=$2 + shift 2 + ;; + --clean) + clean=true + shift 1 + ;; + --force) + force=true + shift 1 + ;; + --downloadretries) + download_retries=$2 + shift 2 + ;; + --retrywaittimeseconds) + retry_wait_time_seconds=$2 + shift 2 + ;; + --help) + echo "Common settings:" + echo " --installdirectory Directory to install native toolset." + echo " This is a command-line override for the default" + echo " Install directory precedence order:" + echo " - InstallDirectory command-line override" + echo " - NETCOREENG_INSTALL_DIRECTORY environment variable" + echo " - (default) %USERPROFILE%/.netcoreeng/native" + echo "" + echo " --clean Switch specifying not to install anything, but cleanup native asset folders" + echo " --force Clean and then install tools" + echo " --help Print help and exit" + echo "" + echo "Advanced settings:" + echo " --baseuri Base URI for where to download native tools from" + echo " --downloadretries Number of times a download should be attempted" + echo " --retrywaittimeseconds Wait time between download attempts" + echo "" + exit 0 + ;; + esac +done + +function ReadGlobalJsonNativeTools { + # Get the native-tools section from the global.json. + local native_tools_section=$(cat $global_json_file | awk '/"native-tools"/,/}/') + # Only extract the contents of the object. + local native_tools_list=$(echo $native_tools_section | awk -F"[{}]" '{print $2}') + native_tools_list=${native_tools_list//[\" ]/} + native_tools_list=${native_tools_list//,/$'\n'} + + local old_IFS=$IFS + while read -r line; do + # Lines are of the form: 'tool:version' + IFS=: + while read -r key value; do + native_assets[$key]=$value + done <<< "$line" + done <<< "$native_tools_list" + IFS=$old_IFS + + return 0; +} + +native_base_dir=$install_directory +if [[ -z $install_directory ]]; then + native_base_dir=$(GetNativeInstallDirectory) +fi + +install_bin="${native_base_dir}/bin" + +ReadGlobalJsonNativeTools + +if [[ ${#native_assets[@]} -eq 0 ]]; then + echo "No native tools defined in global.json" + exit 0; +else + native_installer_dir="$scriptroot/native" + for tool in "${!native_assets[@]}" + do + tool_version=${native_assets[$tool]} + installer_name="install-$tool.sh" + installer_command="$native_installer_dir/$installer_name" + installer_command+=" --baseuri $base_uri" + installer_command+=" --installpath $install_bin" + installer_command+=" --version $tool_version" + + if [[ $force = true ]]; then + installer_command+=" --force" + fi + + if [[ $clean = true ]]; then + installer_command+=" --clean" + fi + + echo "Installing $tool version $tool_version" + echo "Executing '$installer_command'" + $installer_command + + if [[ $? != 0 ]]; then + echo "Execution Failed" >&2 + exit 1 + fi + done +fi + +if [[ ! -z $clean ]]; then + exit 0 +fi + +if [[ -d $install_bin ]]; then + echo "Native tools are available from $install_bin" + if [[ !-z BUILD_BUILDNUMBER ]]; then + echo "##vso[task.prependpath]$install_bin" + fi +else + echo "Native tools install directory does not exist, installation failed" >&2 + exit 1 +fi + +exit 0 + diff --git a/eng/common/msbuild.ps1 b/eng/common/msbuild.ps1 new file mode 100644 index 0000000..43b837f --- /dev/null +++ b/eng/common/msbuild.ps1 @@ -0,0 +1,23 @@ +[CmdletBinding(PositionalBinding=$false)] +Param( + [string] $verbosity = "minimal", + [bool] $warnaserror = $true, + [bool] $nodereuse = $true, + [switch] $ci, + [switch] $prepareMachine, + [Parameter(ValueFromRemainingArguments=$true)][String[]]$extraArgs +) + +. $PSScriptRoot\tools.ps1 + +try { + InitializeTools + MSBuild @extraArgs + ExitWithExitCode $lastExitCode +} +catch { + Write-Host $_ + Write-Host $_.Exception + Write-Host $_.ScriptStackTrace + ExitWithExitCode 1 +} diff --git a/eng/common/msbuild.sh b/eng/common/msbuild.sh new file mode 100644 index 0000000..b102448 --- /dev/null +++ b/eng/common/msbuild.sh @@ -0,0 +1,55 @@ +#!/usr/bin/env bash + +source="${BASH_SOURCE[0]}" + +# resolve $source until the file is no longer a symlink +while [[ -h "$source" ]]; do + scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" + source="$(readlink "$source")" + # if $source was a relative symlink, we need to resolve it relative to the path where the + # symlink file was located + [[ $source != /* ]] && source="$scriptroot/$source" +done +scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" + +verbosity='minimal' +warnaserror=true +nodereuse=true +prepare_machine=false +extraargs='' + +while (($# > 0)); do + lowerI="$(echo $1 | awk '{print tolower($0)}')" + case $lowerI in + --verbosity) + verbosity=$2 + shift 2 + ;; + --warnaserror) + warnaserror=$2 + shift 2 + ;; + --nodereuse) + nodereuse=$2 + shift 2 + ;; + --ci) + ci=true + shift 1 + ;; + --preparemachine) + prepare_machine=true + shift 1 + ;; + *) + extraargs="$extraargs $1" + shift 1 + ;; + esac +done + +. "$scriptroot/tools.sh" + +InitializeTools +MSBuild $extraargs +ExitWithExitCode $? diff --git a/eng/common/native/CommonLibrary.psm1 b/eng/common/native/CommonLibrary.psm1 new file mode 100644 index 0000000..f286ae0 --- /dev/null +++ b/eng/common/native/CommonLibrary.psm1 @@ -0,0 +1,358 @@ +<# +.SYNOPSIS +Helper module to install an archive to a directory + +.DESCRIPTION +Helper module to download and extract an archive to a specified directory + +.PARAMETER Uri +Uri of artifact to download + +.PARAMETER InstallDirectory +Directory to extract artifact contents to + +.PARAMETER Force +Force download / extraction if file or contents already exist. Default = False + +.PARAMETER DownloadRetries +Total number of retry attempts. Default = 5 + +.PARAMETER RetryWaitTimeInSeconds +Wait time between retry attempts in seconds. Default = 30 + +.NOTES +Returns False if download or extraction fail, True otherwise +#> +function DownloadAndExtract { + [CmdletBinding(PositionalBinding=$false)] + Param ( + [Parameter(Mandatory=$True)] + [string] $Uri, + [Parameter(Mandatory=$True)] + [string] $InstallDirectory, + [switch] $Force = $False, + [int] $DownloadRetries = 5, + [int] $RetryWaitTimeInSeconds = 30 + ) + # Define verbose switch if undefined + $Verbose = $VerbosePreference -Eq "Continue" + + $TempToolPath = CommonLibrary\Get-TempPathFilename -Path $Uri + + # Download native tool + $DownloadStatus = CommonLibrary\Get-File -Uri $Uri ` + -Path $TempToolPath ` + -DownloadRetries $DownloadRetries ` + -RetryWaitTimeInSeconds $RetryWaitTimeInSeconds ` + -Force:$Force ` + -Verbose:$Verbose + + if ($DownloadStatus -Eq $False) { + Write-Error "Download failed" + return $False + } + + # Extract native tool + $UnzipStatus = CommonLibrary\Expand-Zip -ZipPath $TempToolPath ` + -OutputDirectory $InstallDirectory ` + -Force:$Force ` + -Verbose:$Verbose + + if ($UnzipStatus -Eq $False) { + Write-Error "Unzip failed" + return $False + } + return $True +} + +<# +.SYNOPSIS +Download a file, retry on failure + +.DESCRIPTION +Download specified file and retry if attempt fails + +.PARAMETER Uri +Uri of file to download. If Uri is a local path, the file will be copied instead of downloaded + +.PARAMETER Path +Path to download or copy uri file to + +.PARAMETER Force +Overwrite existing file if present. Default = False + +.PARAMETER DownloadRetries +Total number of retry attempts. Default = 5 + +.PARAMETER RetryWaitTimeInSeconds +Wait time between retry attempts in seconds Default = 30 + +#> +function Get-File { + [CmdletBinding(PositionalBinding=$false)] + Param ( + [Parameter(Mandatory=$True)] + [string] $Uri, + [Parameter(Mandatory=$True)] + [string] $Path, + [int] $DownloadRetries = 5, + [int] $RetryWaitTimeInSeconds = 30, + [switch] $Force = $False + ) + $Attempt = 0 + + if ($Force) { + if (Test-Path $Path) { + Remove-Item $Path -Force + } + } + if (Test-Path $Path) { + Write-Host "File '$Path' already exists, skipping download" + return $True + } + + $DownloadDirectory = Split-Path -ErrorAction Ignore -Path "$Path" -Parent + if (-Not (Test-Path $DownloadDirectory)) { + New-Item -path $DownloadDirectory -force -itemType "Directory" | Out-Null + } + + if (Test-Path -IsValid -Path $Uri) { + Write-Verbose "'$Uri' is a file path, copying file to '$Path'" + Copy-Item -Path $Uri -Destination $Path + return $? + } + else { + Write-Verbose "Downloading $Uri" + while($Attempt -Lt $DownloadRetries) + { + try { + Invoke-WebRequest -UseBasicParsing -Uri $Uri -OutFile $Path + Write-Verbose "Downloaded to '$Path'" + return $True + } + catch { + $Attempt++ + if ($Attempt -Lt $DownloadRetries) { + $AttemptsLeft = $DownloadRetries - $Attempt + Write-Warning "Download failed, $AttemptsLeft attempts remaining, will retry in $RetryWaitTimeInSeconds seconds" + Start-Sleep -Seconds $RetryWaitTimeInSeconds + } + else { + Write-Error $_ + Write-Error $_.Exception + } + } + } + } + + return $False +} + +<# +.SYNOPSIS +Generate a shim for a native tool + +.DESCRIPTION +Creates a wrapper script (shim) that passes arguments forward to native tool assembly + +.PARAMETER ShimName +The name of the shim + +.PARAMETER ShimDirectory +The directory where shims are stored + +.PARAMETER ToolFilePath +Path to file that shim forwards to + +.PARAMETER Force +Replace shim if already present. Default = False + +.NOTES +Returns $True if generating shim succeeds, $False otherwise +#> +function New-ScriptShim { + [CmdletBinding(PositionalBinding=$false)] + Param ( + [Parameter(Mandatory=$True)] + [string] $ShimName, + [Parameter(Mandatory=$True)] + [string] $ShimDirectory, + [Parameter(Mandatory=$True)] + [string] $ToolFilePath, + [Parameter(Mandatory=$True)] + [string] $BaseUri, + [switch] $Force + ) + try { + Write-Verbose "Generating '$ShimName' shim" + + if (-Not (Test-Path $ToolFilePath)){ + Write-Error "Specified tool file path '$ToolFilePath' does not exist" + return $False + } + + # WinShimmer is a small .NET Framework program that creates .exe shims to bootstrapped programs + # Many of the checks for installed programs expect a .exe extension for Windows tools, rather + # than a .bat or .cmd file. + # Source: https://github.com/dotnet/arcade/tree/master/src/WinShimmer + if (-Not (Test-Path "$ShimDirectory\WinShimmer\winshimmer.exe")) { + $InstallStatus = DownloadAndExtract -Uri "$BaseUri/windows/winshimmer/WinShimmer.zip" ` + -InstallDirectory $ShimDirectory\WinShimmer ` + -Force:$Force ` + -DownloadRetries 2 ` + -RetryWaitTimeInSeconds 5 ` + -Verbose:$Verbose + } + + if ((Test-Path (Join-Path $ShimDirectory "$ShimName.exe"))) { + Write-Host "$ShimName.exe already exists; replacing..." + Remove-Item (Join-Path $ShimDirectory "$ShimName.exe") + } + + Invoke-Expression "$ShimDirectory\WinShimmer\winshimmer.exe $ShimName $ToolFilePath $ShimDirectory" + return $True + } + catch { + Write-Host $_ + Write-Host $_.Exception + return $False + } +} + +<# +.SYNOPSIS +Returns the machine architecture of the host machine + +.NOTES +Returns 'x64' on 64 bit machines + Returns 'x86' on 32 bit machines +#> +function Get-MachineArchitecture { + $ProcessorArchitecture = $Env:PROCESSOR_ARCHITECTURE + $ProcessorArchitectureW6432 = $Env:PROCESSOR_ARCHITEW6432 + if($ProcessorArchitecture -Eq "X86") + { + if(($ProcessorArchitectureW6432 -Eq "") -Or + ($ProcessorArchitectureW6432 -Eq "X86")) { + return "x86" + } + $ProcessorArchitecture = $ProcessorArchitectureW6432 + } + if (($ProcessorArchitecture -Eq "AMD64") -Or + ($ProcessorArchitecture -Eq "IA64") -Or + ($ProcessorArchitecture -Eq "ARM64")) { + return "x64" + } + return "x86" +} + +<# +.SYNOPSIS +Get the name of a temporary folder under the native install directory +#> +function Get-TempDirectory { + return Join-Path (Get-NativeInstallDirectory) "temp/" +} + +function Get-TempPathFilename { + [CmdletBinding(PositionalBinding=$false)] + Param ( + [Parameter(Mandatory=$True)] + [string] $Path + ) + $TempDir = CommonLibrary\Get-TempDirectory + $TempFilename = Split-Path $Path -leaf + $TempPath = Join-Path $TempDir $TempFilename + return $TempPath +} + +<# +.SYNOPSIS +Returns the base directory to use for native tool installation + +.NOTES +Returns the value of the NETCOREENG_INSTALL_DIRECTORY if that environment variable +is set, or otherwise returns an install directory under the %USERPROFILE% +#> +function Get-NativeInstallDirectory { + $InstallDir = $Env:NETCOREENG_INSTALL_DIRECTORY + if (!$InstallDir) { + $InstallDir = Join-Path $Env:USERPROFILE ".netcoreeng/native/" + } + return $InstallDir +} + +<# +.SYNOPSIS +Unzip an archive + +.DESCRIPTION +Powershell module to unzip an archive to a specified directory + +.PARAMETER ZipPath (Required) +Path to archive to unzip + +.PARAMETER OutputDirectory (Required) +Output directory for archive contents + +.PARAMETER Force +Overwrite output directory contents if they already exist + +.NOTES +- Returns True and does not perform an extraction if output directory already exists but Overwrite is not True. +- Returns True if unzip operation is successful +- Returns False if Overwrite is True and it is unable to remove contents of OutputDirectory +- Returns False if unable to extract zip archive +#> +function Expand-Zip { + [CmdletBinding(PositionalBinding=$false)] + Param ( + [Parameter(Mandatory=$True)] + [string] $ZipPath, + [Parameter(Mandatory=$True)] + [string] $OutputDirectory, + [switch] $Force + ) + + Write-Verbose "Extracting '$ZipPath' to '$OutputDirectory'" + try { + if ((Test-Path $OutputDirectory) -And (-Not $Force)) { + Write-Host "Directory '$OutputDirectory' already exists, skipping extract" + return $True + } + if (Test-Path $OutputDirectory) { + Write-Verbose "'Force' is 'True', but '$OutputDirectory' exists, removing directory" + Remove-Item $OutputDirectory -Force -Recurse + if ($? -Eq $False) { + Write-Error "Unable to remove '$OutputDirectory'" + return $False + } + } + if (-Not (Test-Path $OutputDirectory)) { + New-Item -path $OutputDirectory -Force -itemType "Directory" | Out-Null + } + + Add-Type -assembly "system.io.compression.filesystem" + [io.compression.zipfile]::ExtractToDirectory("$ZipPath", "$OutputDirectory") + if ($? -Eq $False) { + Write-Error "Unable to extract '$ZipPath'" + return $False + } + } + catch { + Write-Host $_ + Write-Host $_.Exception + + return $False + } + return $True +} + +export-modulemember -function DownloadAndExtract +export-modulemember -function Expand-Zip +export-modulemember -function Get-File +export-modulemember -function Get-MachineArchitecture +export-modulemember -function Get-NativeInstallDirectory +export-modulemember -function Get-TempDirectory +export-modulemember -function Get-TempPathFilename +export-modulemember -function New-ScriptShim diff --git a/eng/common/native/common-library.sh b/eng/common/native/common-library.sh new file mode 100644 index 0000000..271bddf --- /dev/null +++ b/eng/common/native/common-library.sh @@ -0,0 +1,168 @@ +#!/usr/bin/env bash + +function GetNativeInstallDirectory { + local install_dir + + if [[ -z $NETCOREENG_INSTALL_DIRECTORY ]]; then + install_dir=$HOME/.netcoreeng/native/ + else + install_dir=$NETCOREENG_INSTALL_DIRECTORY + fi + + echo $install_dir + return 0 +} + +function GetTempDirectory { + + echo $(GetNativeInstallDirectory)temp/ + return 0 +} + +function ExpandZip { + local zip_path=$1 + local output_directory=$2 + local force=${3:-false} + + echo "Extracting $zip_path to $output_directory" + if [[ -d $output_directory ]] && [[ $force = false ]]; then + echo "Directory '$output_directory' already exists, skipping extract" + return 0 + fi + + if [[ -d $output_directory ]]; then + echo "'Force flag enabled, but '$output_directory' exists. Removing directory" + rm -rf $output_directory + if [[ $? != 0 ]]; then + echo Unable to remove '$output_directory'>&2 + return 1 + fi + fi + + echo "Creating directory: '$output_directory'" + mkdir -p $output_directory + + echo "Extracting archive" + tar -xf $zip_path -C $output_directory + if [[ $? != 0 ]]; then + echo "Unable to extract '$zip_path'" >&2 + return 1 + fi + + return 0 +} + +function GetCurrentOS { + local unameOut="$(uname -s)" + case $unameOut in + Linux*) echo "Linux";; + Darwin*) echo "MacOS";; + esac + return 0 +} + +function GetFile { + local uri=$1 + local path=$2 + local force=${3:-false} + local download_retries=${4:-5} + local retry_wait_time_seconds=${5:-30} + + if [[ -f $path ]]; then + if [[ $force = false ]]; then + echo "File '$path' already exists. Skipping download" + return 0 + else + rm -rf $path + fi + fi + + if [[ -f $uri ]]; then + echo "'$uri' is a file path, copying file to '$path'" + cp $uri $path + return $? + fi + + echo "Downloading $uri" + # Use curl if available, otherwise use wget + if command -v curl > /dev/null; then + curl "$uri" -sSL --retry $download_retries --retry-delay $retry_wait_time_seconds --create-dirs -o "$path" --fail + else + wget -q -O "$path" "$uri" --tries="$download_retries" + fi + + return $? +} + +function GetTempPathFileName { + local path=$1 + + local temp_dir=$(GetTempDirectory) + local temp_file_name=$(basename $path) + echo $temp_dir$temp_file_name + return 0 +} + +function DownloadAndExtract { + local uri=$1 + local installDir=$2 + local force=${3:-false} + local download_retries=${4:-5} + local retry_wait_time_seconds=${5:-30} + + local temp_tool_path=$(GetTempPathFileName $uri) + + echo "downloading to: $temp_tool_path" + + # Download file + GetFile "$uri" "$temp_tool_path" $force $download_retries $retry_wait_time_seconds + if [[ $? != 0 ]]; then + echo "Failed to download '$uri' to '$temp_tool_path'." >&2 + return 1 + fi + + # Extract File + echo "extracting from $temp_tool_path to $installDir" + ExpandZip "$temp_tool_path" "$installDir" $force $download_retries $retry_wait_time_seconds + if [[ $? != 0 ]]; then + echo "Failed to extract '$temp_tool_path' to '$installDir'." >&2 + return 1 + fi + + return 0 +} + +function NewScriptShim { + local shimpath=$1 + local tool_file_path=$2 + local force=${3:-false} + + echo "Generating '$shimpath' shim" + if [[ -f $shimpath ]]; then + if [[ $force = false ]]; then + echo "File '$shimpath' already exists." >&2 + return 1 + else + rm -rf $shimpath + fi + fi + + if [[ ! -f $tool_file_path ]]; then + echo "Specified tool file path:'$tool_file_path' does not exist" >&2 + return 1 + fi + + local shim_contents=$'#!/usr/bin/env bash\n' + shim_contents+="SHIMARGS="$'$1\n' + shim_contents+="$tool_file_path"$' $SHIMARGS\n' + + # Write shim file + echo "$shim_contents" > $shimpath + + chmod +x $shimpath + + echo "Finished generating shim '$shimpath'" + + return $? +} + diff --git a/eng/common/native/install-cmake.sh b/eng/common/native/install-cmake.sh new file mode 100644 index 0000000..293af60 --- /dev/null +++ b/eng/common/native/install-cmake.sh @@ -0,0 +1,117 @@ +#!/usr/bin/env bash + +source="${BASH_SOURCE[0]}" +scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" + +. $scriptroot/common-library.sh + +base_uri= +install_path= +version= +clean=false +force=false +download_retries=5 +retry_wait_time_seconds=30 + +while (($# > 0)); do + lowerI="$(echo $1 | awk '{print tolower($0)}')" + case $lowerI in + --baseuri) + base_uri=$2 + shift 2 + ;; + --installpath) + install_path=$2 + shift 2 + ;; + --version) + version=$2 + shift 2 + ;; + --clean) + clean=true + shift 1 + ;; + --force) + force=true + shift 1 + ;; + --downloadretries) + download_retries=$2 + shift 2 + ;; + --retrywaittimeseconds) + retry_wait_time_seconds=$2 + shift 2 + ;; + --help) + echo "Common settings:" + echo " --baseuri Base file directory or Url wrom which to acquire tool archives" + echo " --installpath Base directory to install native tool to" + echo " --clean Don't install the tool, just clean up the current install of the tool" + echo " --force Force install of tools even if they previously exist" + echo " --help Print help and exit" + echo "" + echo "Advanced settings:" + echo " --downloadretries Total number of retry attempts" + echo " --retrywaittimeseconds Wait time between retry attempts in seconds" + echo "" + exit 0 + ;; + esac +done + +tool_name="cmake" +tool_os=$(GetCurrentOS) +tool_folder=$(echo $tool_os | awk '{print tolower($0)}') +tool_arch="x86_64" +tool_name_moniker="$tool_name-$version-$tool_os-$tool_arch" +tool_install_directory="$install_path/$tool_name/$version" +tool_file_path="$tool_install_directory/$tool_name_moniker/bin/$tool_name" +shim_path="$install_path/$tool_name.sh" +uri="${base_uri}/$tool_folder/cmake/$tool_name_moniker.tar.gz" + +# Clean up tool and installers +if [[ $clean = true ]]; then + echo "Cleaning $tool_install_directory" + if [[ -d $tool_install_directory ]]; then + rm -rf $tool_install_directory + fi + + echo "Cleaning $shim_path" + if [[ -f $shim_path ]]; then + rm -rf $shim_path + fi + + tool_temp_path=$(GetTempPathFileName $uri) + echo "Cleaning $tool_temp_path" + if [[ -f $tool_temp_path ]]; then + rm -rf $tool_temp_path + fi + + exit 0 +fi + +# Install tool +if [[ -f $tool_file_path ]] && [[ $force = false ]]; then + echo "$tool_name ($version) already exists, skipping install" + exit 0 +fi + +DownloadAndExtract $uri $tool_install_directory $force $download_retries $retry_wait_time_seconds + +if [[ $? != 0 ]]; then + echo "Installation failed" >&2 + exit 1 +fi + +# Generate Shim +# Always rewrite shims so that we are referencing the expected version +NewScriptShim $shim_path $tool_file_path true + +if [[ $? != 0 ]]; then + echo "Shim generation failed" >&2 + exit 1 +fi + +exit 0 \ No newline at end of file diff --git a/eng/common/native/install-tool.ps1 b/eng/common/native/install-tool.ps1 new file mode 100644 index 0000000..635ab3f --- /dev/null +++ b/eng/common/native/install-tool.ps1 @@ -0,0 +1,130 @@ +<# +.SYNOPSIS +Install native tool + +.DESCRIPTION +Install cmake native tool from Azure blob storage + +.PARAMETER InstallPath +Base directory to install native tool to + +.PARAMETER BaseUri +Base file directory or Url from which to acquire tool archives + +.PARAMETER CommonLibraryDirectory +Path to folder containing common library modules + +.PARAMETER Force +Force install of tools even if they previously exist + +.PARAMETER Clean +Don't install the tool, just clean up the current install of the tool + +.PARAMETER DownloadRetries +Total number of retry attempts + +.PARAMETER RetryWaitTimeInSeconds +Wait time between retry attempts in seconds + +.NOTES +Returns 0 if install succeeds, 1 otherwise +#> +[CmdletBinding(PositionalBinding=$false)] +Param ( + [Parameter(Mandatory=$True)] + [string] $ToolName, + [Parameter(Mandatory=$True)] + [string] $InstallPath, + [Parameter(Mandatory=$True)] + [string] $BaseUri, + [Parameter(Mandatory=$True)] + [string] $Version, + [string] $CommonLibraryDirectory = $PSScriptRoot, + [switch] $Force = $False, + [switch] $Clean = $False, + [int] $DownloadRetries = 5, + [int] $RetryWaitTimeInSeconds = 30 +) + +# Import common library modules +Import-Module -Name (Join-Path $CommonLibraryDirectory "CommonLibrary.psm1") + +try { + # Define verbose switch if undefined + $Verbose = $VerbosePreference -Eq "Continue" + + $Arch = CommonLibrary\Get-MachineArchitecture + $ToolOs = "win64" + if($Arch -Eq "x32") { + $ToolOs = "win32" + } + $ToolNameMoniker = "$ToolName-$Version-$ToolOs-$Arch" + $ToolInstallDirectory = Join-Path $InstallPath "$ToolName\$Version\" + $Uri = "$BaseUri/windows/$ToolName/$ToolNameMoniker.zip" + $ShimPath = Join-Path $InstallPath "$ToolName.exe" + + if ($Clean) { + Write-Host "Cleaning $ToolInstallDirectory" + if (Test-Path $ToolInstallDirectory) { + Remove-Item $ToolInstallDirectory -Force -Recurse + } + Write-Host "Cleaning $ShimPath" + if (Test-Path $ShimPath) { + Remove-Item $ShimPath -Force + } + $ToolTempPath = CommonLibrary\Get-TempPathFilename -Path $Uri + Write-Host "Cleaning $ToolTempPath" + if (Test-Path $ToolTempPath) { + Remove-Item $ToolTempPath -Force + } + exit 0 + } + + # Install tool + if ((Test-Path $ToolInstallDirectory) -And (-Not $Force)) { + Write-Verbose "$ToolName ($Version) already exists, skipping install" + } + else { + $InstallStatus = CommonLibrary\DownloadAndExtract -Uri $Uri ` + -InstallDirectory $ToolInstallDirectory ` + -Force:$Force ` + -DownloadRetries $DownloadRetries ` + -RetryWaitTimeInSeconds $RetryWaitTimeInSeconds ` + -Verbose:$Verbose + + if ($InstallStatus -Eq $False) { + Write-Error "Installation failed" + exit 1 + } + } + + $ToolFilePath = Get-ChildItem $ToolInstallDirectory -Recurse -Filter "$ToolName.exe" | % { $_.FullName } + if (@($ToolFilePath).Length -Gt 1) { + Write-Error "There are multiple copies of $ToolName in $($ToolInstallDirectory): `n$(@($ToolFilePath | out-string))" + exit 1 + } elseif (@($ToolFilePath).Length -Lt 1) { + Write-Error "$ToolName was not found in $ToolFilePath." + exit 1 + } + + # Generate shim + # Always rewrite shims so that we are referencing the expected version + $GenerateShimStatus = CommonLibrary\New-ScriptShim -ShimName $ToolName ` + -ShimDirectory $InstallPath ` + -ToolFilePath "$ToolFilePath" ` + -BaseUri $BaseUri ` + -Force:$Force ` + -Verbose:$Verbose + + if ($GenerateShimStatus -Eq $False) { + Write-Error "Generate shim failed" + return 1 + } + + exit 0 +} +catch { + Write-Host $_ + Write-Host $_.Exception + exit 1 +} diff --git a/eng/common/templates/phases/base.yml b/eng/common/templates/phases/base.yml new file mode 100644 index 0000000..92a7933 --- /dev/null +++ b/eng/common/templates/phases/base.yml @@ -0,0 +1,108 @@ +parameters: + # Optional: Clean sources before building + clean: true + + # Optional: Git fetch depth + fetchDepth: '' + + # Optional: name of the phase (not specifying phase name may cause name collisions) + name: '' + + # Required: A defined YAML queue + queue: {} + + # Required: build steps + steps: [] + + # Optional: variables + variables: {} + + ## Telemetry variables + + # Optional: enable sending telemetry + # if 'true', these "variables" must be specified in the variables object or as part of the queue matrix + # _HelixBuildConfig - differentiate between Debug, Release, other + # _HelixSource - Example: build/product + # _HelixType - Example: official/dotnet/arcade/$(Build.SourceBranch) + enableTelemetry: false + + # Optional: Enable installing Microbuild plugin + # if 'true', these "variables" must be specified in the variables object or as part of the queue matrix + # _TeamName - the name of your team + # _SignType - 'test' or 'real' + enableMicrobuild: false + +# Internal resources (telemetry, microbuild) can only be accessed from non-public projects, +# and some (Microbuild) should only be applied to non-PR cases for internal builds. + +phases: +- phase: ${{ parameters.name }} + + queue: ${{ parameters.queue }} + + ${{ if ne(parameters.variables, '') }}: + variables: + ${{ insert }}: ${{ parameters.variables }} + + steps: + - checkout: self + clean: ${{ parameters.clean }} + ${{ if ne(parameters.fetchDepth, '') }}: + fetchDepth: ${{ parameters.fetchDepth }} + + - ${{ if eq(parameters.enableTelemetry, 'true') }}: + - template: /eng/common/templates/steps/telemetry-start.yml + parameters: + buildConfig: $(_HelixBuildConfig) + helixSource: $(_HelixSource) + helixType: $(_HelixType) + + - ${{ if eq(parameters.enableMicrobuild, 'true') }}: + # Internal only resource, and Microbuild signing shouldn't be applied to PRs. + - ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), ne(variables['Build.Repository.Provider'], 'GitHub')) }}: + - task: MicroBuildSigningPlugin@2 + displayName: Install MicroBuild plugin + inputs: + signType: $(_SignType) + zipSources: false + feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json + + env: + TeamName: $(_TeamName) + continueOnError: false + condition: and(succeeded(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT')) + + # Run provided build steps + - ${{ parameters.steps }} + + - ${{ if eq(parameters.enableMicrobuild, 'true') }}: + # Internal only resources + - ${{ if and(ne(variables['System.TeamProject'], 'public'), ne(variables['Build.Repository.Provider'], 'GitHub')) }}: + - task: MicroBuildCleanup@1 + displayName: Execute Microbuild cleanup tasks + condition: and(always(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT')) + env: + TeamName: $(_TeamName) + + - ${{ if eq(parameters.enableTelemetry, 'true') }}: + - template: /eng/common/templates/steps/telemetry-end.yml + parameters: + helixSource: $(_HelixSource) + helixType: $(_HelixType) + + - ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), ne(variables['Build.Repository.Provider'], 'GitHub')) }}: + - task: CopyFiles@2 + displayName: Gather Asset Manifests + inputs: + SourceFolder: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)/AssetManifest' + TargetFolder: '$(Build.StagingDirectory)/AssetManifests' + continueOnError: false + condition: and(succeeded(), eq(variables['_DotNetPublishToBlobFeed'], 'true')) + - task: PublishBuildArtifacts@1 + displayName: Push Asset Manifests + inputs: + PathtoPublish: '$(Build.StagingDirectory)/AssetManifests' + PublishLocation: Container + ArtifactName: AssetManifests + continueOnError: false + condition: and(succeeded(), eq(variables['_DotNetPublishToBlobFeed'], 'true')) diff --git a/eng/common/templates/phases/publish-build-assets.yml b/eng/common/templates/phases/publish-build-assets.yml new file mode 100644 index 0000000..c7ed5ca --- /dev/null +++ b/eng/common/templates/phases/publish-build-assets.yml @@ -0,0 +1,37 @@ +parameters: + dependsOn: '' + queue: {} + configuration: 'Debug' + condition: succeeded() + continueOnError: false +phases: + - phase: Asset_Registry_Publish + displayName: Publish to Build Asset Registry + dependsOn: ${{ parameters.dependsOn }} + queue: ${{ parameters.queue }} + variables: + config: ${{ parameters.configuration }} + steps: + - ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), ne(variables['Build.Repository.Provider'], 'GitHub')) }}: + - task: DownloadBuildArtifacts@0 + displayName: Download artifact + inputs: + artifactName: AssetManifests + downloadPath: '$(Build.StagingDirectory)/Download' + condition: ${{ parameters.condition }} + continueOnError: ${{ parameters.continueOnError }} + - task: AzureKeyVault@1 + inputs: + azureSubscription: 'DotNet-Engineering-Services_KeyVault' + KeyVaultName: EngKeyVault + SecretsFilter: 'MaestroAccessToken' + condition: ${{ parameters.condition }} + continueOnError: ${{ parameters.continueOnError }} + - script: eng\common\publishbuildassets.cmd + /p:ManifestsPath='$(Build.StagingDirectory)/Download/AssetManifests' + /p:BuildAssetRegistryToken=$(MaestroAccessToken) + /p:MaestroApiEndpoint=https://maestro-prod.westus2.cloudapp.azure.com + /p:Configuration=$(config) + displayName: Publish Build Assets + condition: ${{ parameters.condition }} + continueOnError: ${{ parameters.continueOnError }} diff --git a/eng/common/templates/steps/build-reason.yml b/eng/common/templates/steps/build-reason.yml new file mode 100644 index 0000000..eba5810 --- /dev/null +++ b/eng/common/templates/steps/build-reason.yml @@ -0,0 +1,12 @@ +# build-reason.yml +# Description: runs steps if build.reason condition is valid. conditions is a string of valid build reasons +# to include steps (',' separated). +parameters: + conditions: '' + steps: [] + +steps: + - ${{ if and( not(startsWith(parameters.conditions, 'not')), contains(parameters.conditions, variables['build.reason'])) }}: + - ${{ parameters.steps }} + - ${{ if and( startsWith(parameters.conditions, 'not'), not(contains(parameters.conditions, variables['build.reason']))) }}: + - ${{ parameters.steps }} diff --git a/eng/common/templates/steps/helix-publish.yml b/eng/common/templates/steps/helix-publish.yml new file mode 100644 index 0000000..f145467 --- /dev/null +++ b/eng/common/templates/steps/helix-publish.yml @@ -0,0 +1,44 @@ +parameters: + HelixSource: 'pr/dotnet-github-anon-kaonashi-bot' + HelixType: ̓'tests/default' + HelixBuild: $(Build.BuildNumber) + HelixTargetQueues: '' + HelixAccessToken: '' + HelixPreCommands: '' + HelixPostCommands: '' + WorkItemDirectory: '' + WorkItemCommand: '' + CorrelationPayloadDirectory: '' + IncludeDotNetCli: false + DotNetCliPackageType: '' + DotNetCliVersion: '' + EnableXUnitReporter: false + WaitForWorkItemCompletion: true + condition: succeeded() + continueOnError: false + +steps: + - task: DotNetCoreCLI@2 + inputs: + command: custom + projects: eng/common/helixpublish.proj + custom: msbuild + arguments: '/t:test /p:Language=msbuild' + displayName: Send job to Helix + env: + HelixSource: ${{ parameters.HelixSource }} + HelixType: ${{ parameters.HelixType }} + HelixBuild: ${{ parameters.HelixBuild }} + HelixTargetQueues: ${{ parameters.HelixTargetQueues }} + HelixAccessToken: ${{ parameters.HelixAccessToken }} + HelixPreCommands: ${{ parameters.HelixPreCommands }} + HelixPostCommands: ${{ parameters.HelixPostCommands }} + WorkItemDirectory: ${{ parameters.WorkItemDirectory }} + WorkItemCommand: ${{ parameters.WorkItemCommand }} + IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }} + DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }} + DotNetCliVersion: ${{ parameters.DotNetCliVersion }} + EnableXUnitReporter: ${{ parameters.EnableXUnitReporter }} + WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }} + condition: ${{ parameters.condition }} + continueOnError: ${{ parameters.continueOnError }} diff --git a/eng/common/templates/steps/run-on-unix.yml b/eng/common/templates/steps/run-on-unix.yml new file mode 100644 index 0000000..e173381 --- /dev/null +++ b/eng/common/templates/steps/run-on-unix.yml @@ -0,0 +1,7 @@ +parameters: + agentOs: '' + steps: [] + +steps: +- ${{ if ne(parameters.agentOs, 'Windows_NT') }}: + - ${{ parameters.steps }} diff --git a/eng/common/templates/steps/run-on-windows.yml b/eng/common/templates/steps/run-on-windows.yml new file mode 100644 index 0000000..73e7e9c --- /dev/null +++ b/eng/common/templates/steps/run-on-windows.yml @@ -0,0 +1,7 @@ +parameters: + agentOs: '' + steps: [] + +steps: +- ${{ if eq(parameters.agentOs, 'Windows_NT') }}: + - ${{ parameters.steps }} diff --git a/eng/common/templates/steps/run-script-ifequalelse.yml b/eng/common/templates/steps/run-script-ifequalelse.yml new file mode 100644 index 0000000..3d1242f --- /dev/null +++ b/eng/common/templates/steps/run-script-ifequalelse.yml @@ -0,0 +1,33 @@ +parameters: + # if parameter1 equals parameter 2, run 'ifScript' command, else run 'elsescript' command + parameter1: '' + parameter2: '' + ifScript: '' + elseScript: '' + + # name of script step + name: Script + + # display name of script step + displayName: If-Equal-Else Script + + # environment + env: {} + + # conditional expression for step execution + condition: '' + +steps: +- ${{ if and(ne(parameters.ifScript, ''), eq(parameters.parameter1, parameters.parameter2)) }}: + - script: ${{ parameters.ifScript }} + name: ${{ parameters.name }} + displayName: ${{ parameters.displayName }} + env: ${{ parameters.env }} + condition: ${{ parameters.condition }} + +- ${{ if and(ne(parameters.elseScript, ''), ne(parameters.parameter1, parameters.parameter2)) }}: + - script: ${{ parameters.elseScript }} + name: ${{ parameters.name }} + displayName: ${{ parameters.displayName }} + env: ${{ parameters.env }} + condition: ${{ parameters.condition }} \ No newline at end of file diff --git a/eng/common/templates/steps/telemetry-end.yml b/eng/common/templates/steps/telemetry-end.yml new file mode 100644 index 0000000..9cfe360 --- /dev/null +++ b/eng/common/templates/steps/telemetry-end.yml @@ -0,0 +1,67 @@ +parameters: + helixSource: 'undefined_defaulted_in_telemetry.yml' + helixType: 'undefined_defaulted_in_telemetry.yml' + +steps: +- bash: | + if [ "$AGENT_JOBSTATUS" = "Succeeded" ] || [ "$AGENT_JOBSTATUS" = "PartiallySucceeded" ]; then + errorCount=0 + else + errorCount=1 + fi + warningCount=0 + + # create a temporary file for curl output + res=`mktemp` + + curlResult=` + curl --verbose --output $res --write-out "%{http_code}"\ + -H 'Content-Type: application/json' \ + -H "X-Helix-Job-Token: $Helix_JobToken" \ + -H 'Content-Length: 0' \ + -X POST -G "https://helix.dot.net/api/2018-03-14/telemetry/job/build/$Helix_WorkItemId/finish" \ + --data-urlencode "errorCount=$errorCount" \ + --data-urlencode "warningCount=$warningCount"` + curlStatus=$? + + if [ $curlStatus -eq 0 ]; then + if [ $curlResult -gt 299 ] || [ $curlResult -lt 200 ]; then + curlStatus=$curlResult + fi + fi + + if [ $curlStatus -ne 0 ]; then + echo "Failed to Send Build Finish information" + vstsLogOutput="vso[task.logissue type=error;sourcepath=templates/steps/telemetry-end.yml;code=1;]Failed to Send Build Finish information: $curlStatus" + echo "##$vstsLogOutput" + exit 1 + fi + displayName: Send Unix Build End Telemetry + env: + # defined via VSTS variables in start-job.sh + Helix_JobToken: $(Helix_JobToken) + Helix_WorkItemId: $(Helix_WorkItemId) + condition: and(always(), ne(variables['Agent.Os'], 'Windows_NT')) +- powershell: | + if (($env:Agent_JobStatus -eq 'Succeeded') -or ($env:Agent_JobStatus -eq 'PartiallySucceeded')) { + $ErrorCount = 0 + } else { + $ErrorCount = 1 + } + $WarningCount = 0 + + try { + Invoke-RestMethod -Uri "https://helix.dot.net/api/2018-03-14/telemetry/job/build/$env:Helix_WorkItemId/finish?errorCount=$ErrorCount&warningCount=$WarningCount" -Method Post -ContentType "application/json" -Body "" ` + -Headers @{ 'X-Helix-Job-Token'=$env:Helix_JobToken } + } + catch { + Write-Error $_ + Write-Error $_.Exception + exit 1 + } + displayName: Send Windows Build End Telemetry + env: + # defined via VSTS variables in start-job.ps1 + Helix_JobToken: $(Helix_JobToken) + Helix_WorkItemId: $(Helix_WorkItemId) + condition: and(always(),eq(variables['Agent.Os'], 'Windows_NT')) diff --git a/eng/common/templates/steps/telemetry-start.yml b/eng/common/templates/steps/telemetry-start.yml new file mode 100644 index 0000000..109b4f5d --- /dev/null +++ b/eng/common/templates/steps/telemetry-start.yml @@ -0,0 +1,154 @@ +parameters: + helixSource: 'undefined_defaulted_in_telemetry.yml' + helixType: 'undefined_defaulted_in_telemetry.yml' + buildConfig: '' + +steps: +- ${{ if and(not(eq(variables['System.TeamProject'], 'public')), ne(variables['Build.Repository.Provider'], 'GitHub')) }}: + - task: AzureKeyVault@1 + inputs: + azureSubscription: 'HelixProd_KeyVault' + KeyVaultName: HelixProdKV + SecretsFilter: 'HelixApiAccessToken' + condition: always() +- bash: | + # create a temporary file + jobInfo=`mktemp` + + # write job info content to temporary file + cat > $jobInfo <' | Set-Content $proj + MSBuild $proj /t:__WriteToolsetLocation /clp:None /bl:$ToolsetRestoreLog /p:__ToolsetLocationOutputFile=$toolsetLocationFile + + if ($lastExitCode -ne 0) { + Write-Host "Failed to restore toolset (exit code '$lastExitCode'). See log: $ToolsetRestoreLog" -ForegroundColor Red + ExitWithExitCode $lastExitCode + } + + $path = Get-Content $toolsetLocationFile -TotalCount 1 + if (!(Test-Path $path)) { + throw "Invalid toolset path: $path" + } + + $script:ToolsetBuildProj = $path +} + +function InitializeCustomToolset { + if (-not $restore) { + return + } + + $script = Join-Path $EngRoot "restore-toolset.ps1" + + if (Test-Path $script) { + . $script + } +} + +function ExitWithExitCode([int] $exitCode) { + if ($ci -and $prepareMachine) { + Stop-Processes + } + exit $exitCode +} + +function Stop-Processes() { + Write-Host "Killing running build processes..." + Get-Process -Name "msbuild" -ErrorAction SilentlyContinue | Stop-Process + Get-Process -Name "dotnet" -ErrorAction SilentlyContinue | Stop-Process + Get-Process -Name "vbcscompiler" -ErrorAction SilentlyContinue | Stop-Process +} + +function MsBuild() { + $warnaserrorSwitch = if ($warnaserror) { "/warnaserror" } else { "" } + & $buildDriver $buildArgs $warnaserrorSwitch /m /nologo /clp:Summary /v:$verbosity /nr:$nodereuse $args +} + +$RepoRoot = Resolve-Path (Join-Path $PSScriptRoot "..\..") +$EngRoot = Resolve-Path (Join-Path $PSScriptRoot "..") +$ArtifactsDir = Join-Path $RepoRoot "artifacts" +$ToolsetDir = Join-Path $ArtifactsDir "toolset" +$LogDir = Join-Path (Join-Path $ArtifactsDir "log") $configuration +$TempDir = Join-Path (Join-Path $ArtifactsDir "tmp") $configuration +$GlobalJson = Get-Content -Raw -Path (Join-Path $RepoRoot "global.json") | ConvertFrom-Json + +if ($env:NUGET_PACKAGES -eq $null) { + # Use local cache on CI to ensure deterministic build, + # use global cache in dev builds to avoid cost of downloading packages. + $env:NUGET_PACKAGES = if ($ci) { Join-Path $RepoRoot ".packages" } + else { Join-Path $env:UserProfile ".nuget\packages" } +} + +Create-Directory $ToolsetDir +Create-Directory $LogDir + +if ($ci) { + Create-Directory $TempDir + $env:TEMP = $TempDir + $env:TMP = $TempDir +} diff --git a/eng/common/tools.sh b/eng/common/tools.sh new file mode 100644 index 0000000..c7d03ee --- /dev/null +++ b/eng/common/tools.sh @@ -0,0 +1,258 @@ +#!/usr/bin/env bash + +# Stop script if unbound variable found (use ${var:-} if intentional) +set -u + +ci=${ci:-false} +configuration=${configuration:-'Debug'} +nodereuse=${nodereuse:-true} +prepare_machine=${prepare_machine:-false} +restore=${restore:-true} +verbosity=${verbosity:-'minimal'} +warnaserror=${warnaserror:-true} +useInstalledDotNetCli=${useInstalledDotNetCli:-true} + +repo_root="$scriptroot/../.." +eng_root="$scriptroot/.." +artifacts_dir="$repo_root/artifacts" +toolset_dir="$artifacts_dir/toolset" +log_dir="$artifacts_dir/log/$configuration" +temp_dir="$artifacts_dir/tmp/$configuration" + +global_json_file="$repo_root/global.json" +build_driver="" +toolset_build_proj="" + +function ResolvePath { + local path=$1 + + # resolve $path until the file is no longer a symlink + while [[ -h $path ]]; do + local dir="$( cd -P "$( dirname "$path" )" && pwd )" + path="$(readlink "$path")" + + # if $path was a relative symlink, we need to resolve it relative to the path where the + # symlink file was located + [[ $path != /* ]] && path="$dir/$path" + done + + # return value + _ResolvePath="$path" +} + +# ReadVersionFromJson [json key] +function ReadGlobalVersion { + local key=$1 + + local line=`grep -m 1 "$key" "$global_json_file"` + local pattern="\"$key\" *: *\"(.*)\"" + + if [[ ! $line =~ $pattern ]]; then + echo "Error: Cannot find \"$key\" in $global_json_file" >&2 + ExitWithExitCode 1 + fi + + # return value + _ReadGlobalVersion=${BASH_REMATCH[1]} +} + +function InitializeDotNetCli { + local install=$1 + + # Don't resolve runtime, shared framework, or SDK from other locations to ensure build determinism + export DOTNET_MULTILEVEL_LOOKUP=0 + + # Disable first run since we want to control all package sources + export DOTNET_SKIP_FIRST_TIME_EXPERIENCE=1 + + # Source Build uses DotNetCoreSdkDir variable + if [[ -n "${DotNetCoreSdkDir:-}" ]]; then + export DOTNET_INSTALL_DIR="$DotNetCoreSdkDir" + fi + + # Find the first path on $PATH that contains the dotnet.exe + if [[ "$useInstalledDotNetCli" == true && -z "${DOTNET_INSTALL_DIR:-}" ]]; then + local dotnet_path=`command -v dotnet` + if [[ -n "$dotnet_path" ]]; then + ResolvePath "$dotnet_path" + export DOTNET_INSTALL_DIR=`dirname "$_ResolvePath"` + fi + fi + + ReadGlobalVersion "dotnet" + local dotnet_sdk_version=$_ReadGlobalVersion + local dotnet_root="" + + # Use dotnet installation specified in DOTNET_INSTALL_DIR if it contains the required SDK version, + # otherwise install the dotnet CLI and SDK to repo local .dotnet directory to avoid potential permission issues. + if [[ -n "${DOTNET_INSTALL_DIR:-}" && -d "$DOTNET_INSTALL_DIR/sdk/$dotnet_sdk_version" ]]; then + dotnet_root="$DOTNET_INSTALL_DIR" + else + dotnet_root="$repo_root/.dotnet" + export DOTNET_INSTALL_DIR="$dotnet_root" + + if [[ ! -d "$DOTNET_INSTALL_DIR/sdk/$dotnet_sdk_version" ]]; then + if [[ "$install" == true ]]; then + InstallDotNetSdk "$dotnet_root" "$dotnet_sdk_version" + else + echo "Unable to find dotnet with SDK version '$dotnet_sdk_version'" >&2 + ExitWithExitCode 1 + fi + fi + fi + + # return value + _InitializeDotNetCli="$dotnet_root" +} + +function InstallDotNetSdk { + local root=$1 + local version=$2 + + GetDotNetInstallScript "$root" + local install_script=$_GetDotNetInstallScript + + bash "$install_script" --version $version --install-dir "$root" + local lastexitcode=$? + + if [[ $lastexitcode != 0 ]]; then + echo "Failed to install dotnet SDK (exit code '$lastexitcode')." >&2 + ExitWithExitCode $lastexitcode + fi +} + +function GetDotNetInstallScript { + local root=$1 + local install_script="$root/dotnet-install.sh" + local install_script_url="https://dot.net/v1/dotnet-install.sh" + + if [[ ! -a "$install_script" ]]; then + mkdir -p "$root" + + echo "Downloading '$install_script_url'" + + # Use curl if available, otherwise use wget + if command -v curl > /dev/null; then + curl "$install_script_url" -sSL --retry 10 --create-dirs -o "$install_script" + else + wget -q -O "$install_script" "$install_script_url" + fi + fi + + # return value + _GetDotNetInstallScript="$install_script" +} + +function InitializeToolset { + ReadGlobalVersion "Microsoft.DotNet.Arcade.Sdk" + + local toolset_version=$_ReadGlobalVersion + local toolset_location_file="$toolset_dir/$toolset_version.txt" + + if [[ -a "$toolset_location_file" ]]; then + local path=`cat "$toolset_location_file"` + if [[ -a "$path" ]]; then + toolset_build_proj="$path" + return + fi + fi + + if [[ "$restore" != true ]]; then + echo "Toolset version $toolsetVersion has not been restored." >&2 + ExitWithExitCode 2 + fi + + local toolset_restore_log="$log_dir/ToolsetRestore.binlog" + local proj="$toolset_dir/restore.proj" + + echo '' > "$proj" + + MSBuild "$proj" /t:__WriteToolsetLocation /clp:None /bl:"$toolset_restore_log" /p:__ToolsetLocationOutputFile="$toolset_location_file" + local lastexitcode=$? + + if [[ $lastexitcode != 0 ]]; then + echo "Failed to restore toolset (exit code '$lastexitcode'). See log: $toolset_restore_log" >&2 + ExitWithExitCode $lastexitcode + fi + + toolset_build_proj=`cat "$toolset_location_file"` + + if [[ ! -a "$toolset_build_proj" ]]; then + echo "Invalid toolset path: $toolset_build_proj" >&2 + ExitWithExitCode 3 + fi +} + +function InitializeCustomToolset { + local script="$eng_root/restore-toolset.sh" + + if [[ -a "$script" ]]; then + . "$script" + fi +} + +function ConfigureTools { + local script="$eng_root/configure-toolset.sh" + + if [[ -a "$script" ]]; then + . "$script" + fi +} + +function InitializeTools { + ConfigureTools + + InitializeDotNetCli $restore + build_driver="$_InitializeDotNetCli/dotnet" + + InitializeToolset + InitializeCustomToolset +} + +function ExitWithExitCode { + if [[ "$ci" == true && "$prepare_machine" == true ]]; then + StopProcesses + fi + exit $1 +} + +function StopProcesses { + echo "Killing running build processes..." + pkill -9 "dotnet" + pkill -9 "vbcscompiler" + return 0 +} + +function MSBuild { + local warnaserror_switch="" + if [[ $warnaserror == true ]]; then + warnaserror_switch="/warnaserror" + fi + + "$build_driver" msbuild /m /nologo /clp:Summary /v:$verbosity /nr:$nodereuse $warnaserror_switch "$@" + + return $? +} + +# HOME may not be defined in some scenarios, but it is required by NuGet +if [[ -z $HOME ]]; then + export HOME="$repo_root/artifacts/.home/" + mkdir -p "$HOME" +fi + +if [[ -z ${NUGET_PACKAGES:-} ]]; then + if [[ $ci == true ]]; then + export NUGET_PACKAGES="$repo_root/.packages" + else + export NUGET_PACKAGES="$HOME/.nuget/packages" + fi +fi + +mkdir -p "$toolset_dir" +mkdir -p "$log_dir" + +if [[ $ci == true ]]; then + mkdir -p "$temp_dir" + export TEMP="$temp_dir" + export TMP="$temp_dir" +fi diff --git a/global.json b/global.json index 104bebe..5c68f14 100644 --- a/global.json +++ b/global.json @@ -1,3 +1,13 @@ { - "projects": [ "./tests/src", "./tests/src/Common" ] + "tools": { + "dotnet": "2.1.401" + }, + "native-tools": { + "cmake": "3.11.1", + "python": "2.7.15" + }, + "msbuild-sdks": { + "Microsoft.DotNet.Arcade.Sdk": "1.0.0-beta.18529.8", + "Microsoft.DotNet.Helix.Sdk": "1.0.0-beta.18529.8" + } }