diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 1fd2a48c93..5a77c75a37 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -1,4 +1,10 @@ -# Pipeline YAML for PR +trigger: +- main +- rel/* + +pr: +- main +- rel/* jobs: - job: Windows @@ -165,11 +171,11 @@ jobs: clean: all strategy: matrix: - Ubuntu_18_04: - vmImage: ubuntu-18.04 + Ubuntu_22_04: + vmImage: ubuntu-22.04 pwsh: true - macOS_10_15_Catalina: - vmImage: macOS-10.15 + macOS_11: + vmImage: macOS-11 pwsh: true pool: vmImage: $[ variables['vmImage'] ] diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index a5cb0ad8d9..1e3c7146a8 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -5,45 +5,45 @@ https://dev.azure.com/devdiv/DevDiv/_git/vs-code-coverage eeb5df41e1a9aca42c42ed01f3cde37c221b1266 - + https://github.com/dotnet/diagnostics - d9290918615eff2f0711818558d8d4f653a28898 + e3e1490a23f27a6e0ed30d323035660c3ffc52cd - + https://github.com/dotnet/diagnostics - d9290918615eff2f0711818558d8d4f653a28898 + e3e1490a23f27a6e0ed30d323035660c3ffc52cd - + https://github.com/dotnet/arcade - ccfe6da198c5f05534863bbb1bff66e830e0c6ab + 4f864b2841b3317bdfc516ead0ba6e7856fab575 - + https://github.com/dotnet/arcade - ccfe6da198c5f05534863bbb1bff66e830e0c6ab + 4f864b2841b3317bdfc516ead0ba6e7856fab575 - + https://github.com/dotnet/arcade - ccfe6da198c5f05534863bbb1bff66e830e0c6ab + 4f864b2841b3317bdfc516ead0ba6e7856fab575 - + https://github.com/dotnet/arcade - ccfe6da198c5f05534863bbb1bff66e830e0c6ab + 4f864b2841b3317bdfc516ead0ba6e7856fab575 - + https://github.com/dotnet/arcade - ccfe6da198c5f05534863bbb1bff66e830e0c6ab + 4f864b2841b3317bdfc516ead0ba6e7856fab575 https://github.com/dotnet/arcade-services cd705029f2675970b42f9273ae359d0926c5e815 - + https://github.com/dotnet/roslyn - 4b309dc5400e39a2eea09d82077737c1df5f347a + 50008a99118bdb6091b692c393c1a1a28947f934 https://github.com/dotnet/sourcelink diff --git a/eng/Versions.props b/eng/Versions.props index 81210565d2..de4504d99f 100644 --- a/eng/Versions.props +++ b/eng/Versions.props @@ -2,7 +2,7 @@ - 17.3.1 + 17.3.2 release false @@ -33,12 +33,12 @@ 2.1.0 2.0.0 2.1.0 - 4.1.0-1.21507.14 + 4.3.0-3.22457.6 15.7.2 4.16.1 5.3.0.1 2.3.0 - 9.0.1 + 13.0.1 4.6.0-preview4.19202.2 4.4.0 5.3.0 @@ -54,7 +54,7 @@ 4.5.1 4.4.0 1.4.2 - 4.5.0 + 5.0.0 4.5.1 4.5.1 4.4.0 @@ -62,8 +62,8 @@ 2.4.1 2.0.3 2.4.1 - 7.0.0-beta.22316.2 - 7.0.0-beta.22316.2 + 8.0.0-beta.22456.4 + 8.0.0-beta.22456.4 1.22.0 1.1.2 2.0.0 @@ -75,7 +75,7 @@ 1.7.0 1.1.0-beta.20074.1 1.0.0-beta2-19554-01 - 7.0.0-beta.22316.2 + 8.0.0-beta.22456.4 1.0.0-beta.21272.1 3.8.0-3.20427.2 17.3.6 - 0.2.0-preview.21508.1 - 5.0.0-preview.21508.1 + 0.2.0-preview.22424.1 + 6.0.0-preview.22424.1 + 16.5.2 + 16.3.44 + 16.3.36 diff --git a/eng/common/SetupNugetSources.ps1 b/eng/common/SetupNugetSources.ps1 index 18823840b1..6e99723945 100644 --- a/eng/common/SetupNugetSources.ps1 +++ b/eng/common/SetupNugetSources.ps1 @@ -146,22 +146,22 @@ $userName = "dn-bot" # Insert credential nodes for Maestro's private feeds InsertMaestroPrivateFeedCredentials -Sources $sources -Creds $creds -Username $userName -Password $Password +# 3.1 uses a different feed url format so it's handled differently here $dotnet31Source = $sources.SelectSingleNode("add[@key='dotnet3.1']") if ($dotnet31Source -ne $null) { AddPackageSource -Sources $sources -SourceName "dotnet3.1-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal/nuget/v2" -Creds $creds -Username $userName -Password $Password AddPackageSource -Sources $sources -SourceName "dotnet3.1-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-transport/nuget/v2" -Creds $creds -Username $userName -Password $Password } -$dotnet5Source = $sources.SelectSingleNode("add[@key='dotnet5']") -if ($dotnet5Source -ne $null) { - AddPackageSource -Sources $sources -SourceName "dotnet5-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet5-internal/nuget/v2" -Creds $creds -Username $userName -Password $Password - AddPackageSource -Sources $sources -SourceName "dotnet5-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet5-internal-transport/nuget/v2" -Creds $creds -Username $userName -Password $Password -} +$dotnetVersions = @('5','6','7') -$dotnet6Source = $sources.SelectSingleNode("add[@key='dotnet6']") -if ($dotnet6Source -ne $null) { - AddPackageSource -Sources $sources -SourceName "dotnet6-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet6-internal/nuget/v2" -Creds $creds -Username $userName -Password $Password - AddPackageSource -Sources $sources -SourceName "dotnet6-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet6-internal-transport/nuget/v2" -Creds $creds -Username $userName -Password $Password +foreach ($dotnetVersion in $dotnetVersions) { + $feedPrefix = "dotnet" + $dotnetVersion; + $dotnetSource = $sources.SelectSingleNode("add[@key='$feedPrefix']") + if ($dotnetSource -ne $null) { + AddPackageSource -Sources $sources -SourceName "$feedPrefix-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/$feedPrefix-internal/nuget/v2" -Creds $creds -Username $userName -Password $Password + AddPackageSource -Sources $sources -SourceName "$feedPrefix-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/$feedPrefix-internal-transport/nuget/v2" -Creds $creds -Username $userName -Password $Password + } } $doc.Save($filename) diff --git a/eng/common/SetupNugetSources.sh b/eng/common/SetupNugetSources.sh index ad3fb74fd2..8af7d899db 100644 --- a/eng/common/SetupNugetSources.sh +++ b/eng/common/SetupNugetSources.sh @@ -105,53 +105,33 @@ if [ "$?" == "0" ]; then PackageSources+=('dotnet3.1-internal-transport') fi -# Ensure dotnet5-internal and dotnet5-internal-transport are in the packageSources if the public dotnet5 feeds are present -grep -i "" - - sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile - fi - PackageSources+=('dotnet5-internal') - - grep -i "" $ConfigFile - if [ "$?" != "0" ]; then - echo "Adding dotnet5-internal-transport to the packageSources." - PackageSourcesNodeFooter="" - PackageSourceTemplate="${TB}" - - sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile - fi - PackageSources+=('dotnet5-internal-transport') -fi - -# Ensure dotnet6-internal and dotnet6-internal-transport are in the packageSources if the public dotnet6 feeds are present -grep -i "" +DotNetVersions=('5' '6' '7') + +for DotNetVersion in ${DotNetVersions[@]} ; do + FeedPrefix="dotnet${DotNetVersion}"; + grep -i "" + + sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile + fi + PackageSources+=("$FeedPrefix-internal") - sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile - fi - PackageSources+=('dotnet6-internal') + grep -i "" $ConfigFile + if [ "$?" != "0" ]; then + echo "Adding $FeedPrefix-internal-transport to the packageSources." + PackageSourcesNodeFooter="" + PackageSourceTemplate="${TB}" - grep -i "" $ConfigFile - if [ "$?" != "0" ]; then - echo "Adding dotnet6-internal-transport to the packageSources." - PackageSourcesNodeFooter="" - PackageSourceTemplate="${TB}" - - sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile + sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile + fi + PackageSources+=("$FeedPrefix-internal-transport") fi - PackageSources+=('dotnet6-internal-transport') -fi +done # I want things split line by line PrevIFS=$IFS diff --git a/eng/common/build.sh b/eng/common/build.sh index 55b298f16c..50af40cdd2 100755 --- a/eng/common/build.sh +++ b/eng/common/build.sh @@ -19,6 +19,9 @@ usage() echo "Actions:" echo " --restore Restore dependencies (short: -r)" echo " --build Build solution (short: -b)" + echo " --sourceBuild Source-build the solution (short: -sb)" + echo " Will additionally trigger the following actions: --restore, --build, --pack" + echo " If --configuration is not set explicitly, will also set it to 'Release'" echo " --rebuild Rebuild solution" echo " --test Run all unit tests in the solution (short: -t)" echo " --integrationTest Run all integration tests in the solution" @@ -55,6 +58,7 @@ scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" restore=false build=false +source_build=false rebuild=false test=false integration_test=false @@ -73,7 +77,7 @@ exclude_ci_binary_log=false pipelines_log=false projects='' -configuration='Debug' +configuration='' prepare_machine=false verbosity='minimal' runtime_source_feed='' @@ -119,6 +123,12 @@ while [[ $# > 0 ]]; do -pack) pack=true ;; + -sourcebuild|-sb) + build=true + source_build=true + restore=true + pack=true + ;; -test|-t) test=true ;; @@ -168,6 +178,10 @@ while [[ $# > 0 ]]; do shift done +if [[ -z "$configuration" ]]; then + if [[ "$source_build" = true ]]; then configuration="Release"; else configuration="Debug"; fi +fi + if [[ "$ci" == true ]]; then pipelines_log=true node_reuse=false @@ -205,6 +219,7 @@ function Build { /p:RepoRoot="$repo_root" \ /p:Restore=$restore \ /p:Build=$build \ + /p:ArcadeBuildFromSource=$source_build \ /p:Rebuild=$rebuild \ /p:Test=$test \ /p:Pack=$pack \ diff --git a/eng/common/cross/arm/sources.list.focal b/eng/common/cross/arm/sources.list.focal new file mode 100644 index 0000000000..4de2600c17 --- /dev/null +++ b/eng/common/cross/arm/sources.list.focal @@ -0,0 +1,11 @@ +deb http://ports.ubuntu.com/ubuntu-ports/ focal main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ focal main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ focal-updates main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ focal-updates main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ focal-backports main restricted +deb-src http://ports.ubuntu.com/ubuntu-ports/ focal-backports main restricted + +deb http://ports.ubuntu.com/ubuntu-ports/ focal-security main restricted universe multiverse +deb-src http://ports.ubuntu.com/ubuntu-ports/ focal-security main restricted universe multiverse diff --git a/eng/common/cross/arm/sources.list.jammy b/eng/common/cross/arm/sources.list.jammy new file mode 100644 index 0000000000..6bb0453029 --- /dev/null +++ b/eng/common/cross/arm/sources.list.jammy @@ -0,0 +1,11 @@ +deb http://ports.ubuntu.com/ubuntu-ports/ jammy main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ jammy-updates main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy-updates main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ jammy-backports main restricted +deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy-backports main restricted + +deb http://ports.ubuntu.com/ubuntu-ports/ jammy-security main restricted universe multiverse +deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy-security main restricted universe multiverse diff --git a/eng/common/cross/arm64/sources.list.focal b/eng/common/cross/arm64/sources.list.focal new file mode 100644 index 0000000000..4de2600c17 --- /dev/null +++ b/eng/common/cross/arm64/sources.list.focal @@ -0,0 +1,11 @@ +deb http://ports.ubuntu.com/ubuntu-ports/ focal main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ focal main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ focal-updates main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ focal-updates main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ focal-backports main restricted +deb-src http://ports.ubuntu.com/ubuntu-ports/ focal-backports main restricted + +deb http://ports.ubuntu.com/ubuntu-ports/ focal-security main restricted universe multiverse +deb-src http://ports.ubuntu.com/ubuntu-ports/ focal-security main restricted universe multiverse diff --git a/eng/common/cross/arm64/sources.list.jammy b/eng/common/cross/arm64/sources.list.jammy new file mode 100644 index 0000000000..6bb0453029 --- /dev/null +++ b/eng/common/cross/arm64/sources.list.jammy @@ -0,0 +1,11 @@ +deb http://ports.ubuntu.com/ubuntu-ports/ jammy main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ jammy-updates main restricted universe +deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy-updates main restricted universe + +deb http://ports.ubuntu.com/ubuntu-ports/ jammy-backports main restricted +deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy-backports main restricted + +deb http://ports.ubuntu.com/ubuntu-ports/ jammy-security main restricted universe multiverse +deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy-security main restricted universe multiverse diff --git a/eng/common/cross/build-rootfs.sh b/eng/common/cross/build-rootfs.sh index 5a59dcff28..5680980fa2 100644 --- a/eng/common/cross/build-rootfs.sh +++ b/eng/common/cross/build-rootfs.sh @@ -5,22 +5,26 @@ set -e usage() { echo "Usage: $0 [BuildArch] [CodeName] [lldbx.y] [llvmx[.y]] [--skipunmount] --rootfsdir ]" - echo "BuildArch can be: arm(default), armel, arm64, x86" + echo "BuildArch can be: arm(default), arm64, armel, armv6, ppc64le, riscv64, s390x, x64, x86" echo "CodeName - optional, Code name for Linux, can be: xenial(default), zesty, bionic, alpine, alpine3.13 or alpine3.14. If BuildArch is armel, LinuxCodeName is jessie(default) or tizen." echo " for FreeBSD can be: freebsd12, freebsd13" - echo " for illumos can be: illumos." + echo " for illumos can be: illumos" + echo " for Haiku can be: haiku." echo "lldbx.y - optional, LLDB version, can be: lldb3.9(default), lldb4.0, lldb5.0, lldb6.0 no-lldb. Ignored for alpine and FreeBSD" echo "llvmx[.y] - optional, LLVM version for LLVM related packages." echo "--skipunmount - optional, will skip the unmount of rootfs folder." echo "--use-mirror - optional, use mirror URL to fetch resources, when available." + echo "--jobs N - optional, restrict to N jobs." exit 1 } __CodeName=xenial __CrossDir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd ) -__InitialDir=$PWD __BuildArch=arm __AlpineArch=armv7 +__FreeBSDArch=arm +__FreeBSDMachineArch=armv7 +__IllumosArch=arm7 __QEMUArch=arm __UbuntuArch=armhf __UbuntuRepo="http://ports.ubuntu.com/" @@ -40,7 +44,7 @@ __AlpinePackages+=" libedit" # symlinks fixer __UbuntuPackages+=" symlinks" -# CoreCLR and CoreFX dependencies +# runtime dependencies __UbuntuPackages+=" libicu-dev" __UbuntuPackages+=" liblttng-ust-dev" __UbuntuPackages+=" libunwind8-dev" @@ -51,7 +55,7 @@ __AlpinePackages+=" libunwind-dev" __AlpinePackages+=" lttng-ust-dev" __AlpinePackages+=" compiler-rt-static" -# CoreFX dependencies +# runtime libraries' dependencies __UbuntuPackages+=" libcurl4-openssl-dev" __UbuntuPackages+=" libkrb5-dev" __UbuntuPackages+=" libssl-dev" @@ -72,26 +76,38 @@ __FreeBSDPackages+=" openssl" __FreeBSDPackages+=" krb5" __FreeBSDPackages+=" terminfo-db" -__IllumosPackages="icu-64.2nb2" -__IllumosPackages+=" mit-krb5-1.16.2nb4" -__IllumosPackages+=" openssl-1.1.1e" -__IllumosPackages+=" zlib-1.2.11" +__IllumosPackages="icu" +__IllumosPackages+=" mit-krb5" +__IllumosPackages+=" openssl" +__IllumosPackages+=" zlib" + +__HaikuPackages="gmp" +__HaikuPackages+=" gmp_devel" +__HaikuPackages+=" krb5" +__HaikuPackages+=" krb5_devel" +__HaikuPackages+=" libiconv" +__HaikuPackages+=" libiconv_devel" +__HaikuPackages+=" llvm12_libunwind" +__HaikuPackages+=" llvm12_libunwind_devel" +__HaikuPackages+=" mpfr" +__HaikuPackages+=" mpfr_devel" # ML.NET dependencies __UbuntuPackages+=" libomp5" __UbuntuPackages+=" libomp-dev" +__Keyring= __UseMirror=0 __UnprocessedBuildArgs= while :; do - if [ $# -le 0 ]; then + if [[ "$#" -le 0 ]]; then break fi - lowerI="$(echo $1 | tr "[:upper:]" "[:lower:]")" + lowerI="$(echo "$1" | tr "[:upper:]" "[:lower:]")" case $lowerI in - -?|-h|--help) + -\?|-h|--help) usage exit 1 ;; @@ -101,20 +117,13 @@ while :; do __AlpineArch=armv7 __QEMUArch=arm ;; - armv6) - __BuildArch=armv6 - __UbuntuArch=armhf - __QEMUArch=arm - __UbuntuRepo="http://raspbian.raspberrypi.org/raspbian/" - __CodeName=buster - __LLDB_Package="liblldb-6.0-dev" - __Keyring="/usr/share/keyrings/raspbian-archive-keyring.gpg" - ;; arm64) __BuildArch=arm64 __UbuntuArch=arm64 __AlpineArch=aarch64 __QEMUArch=aarch64 + __FreeBSDArch=arm64 + __FreeBSDMachineArch=aarch64 ;; armel) __BuildArch=armel @@ -122,6 +131,18 @@ while :; do __UbuntuRepo="http://ftp.debian.org/debian/" __CodeName=jessie ;; + armv6) + __BuildArch=armv6 + __UbuntuArch=armhf + __QEMUArch=arm + __UbuntuRepo="http://raspbian.raspberrypi.org/raspbian/" + __CodeName=buster + __LLDB_Package="liblldb-6.0-dev" + + if [[ -e "/usr/share/keyrings/raspbian-archive-keyring.gpg" ]]; then + __Keyring="--keyring /usr/share/keyrings/raspbian-archive-keyring.gpg" + fi + ;; ppc64le) __BuildArch=ppc64le __UbuntuArch=ppc64el @@ -131,6 +152,18 @@ while :; do __UbuntuPackages=$(echo ${__UbuntuPackages} | sed 's/ libomp5//') unset __LLDB_Package ;; + riscv64) + __BuildArch=riscv64 + __UbuntuArch=riscv64 + __UbuntuRepo="http://deb.debian.org/debian-ports" + __CodeName=sid + __UbuntuPackages=$(echo ${__UbuntuPackages} | sed 's/ libunwind8-dev//') + unset __LLDB_Package + + if [[ -e "/usr/share/keyrings/debian-ports-archive-keyring.gpg" ]]; then + __Keyring="--keyring /usr/share/keyrings/debian-ports-archive-keyring.gpg --include=debian-ports-archive-keyring" + fi + ;; s390x) __BuildArch=s390x __UbuntuArch=s390x @@ -140,56 +173,69 @@ while :; do __UbuntuPackages=$(echo ${__UbuntuPackages} | sed 's/ libomp5//') unset __LLDB_Package ;; + x64) + __BuildArch=x64 + __UbuntuArch=amd64 + __FreeBSDArch=amd64 + __FreeBSDMachineArch=amd64 + __illumosArch=x86_64 + __UbuntuRepo= + ;; x86) __BuildArch=x86 __UbuntuArch=i386 __UbuntuRepo="http://archive.ubuntu.com/ubuntu/" ;; - lldb3.6) - __LLDB_Package="lldb-3.6-dev" - ;; - lldb3.8) - __LLDB_Package="lldb-3.8-dev" - ;; - lldb3.9) - __LLDB_Package="liblldb-3.9-dev" - ;; - lldb4.0) - __LLDB_Package="liblldb-4.0-dev" - ;; - lldb5.0) - __LLDB_Package="liblldb-5.0-dev" - ;; - lldb6.0) - __LLDB_Package="liblldb-6.0-dev" + lldb*) + version="${lowerI/lldb/}" + parts=(${version//./ }) + + # for versions > 6.0, lldb has dropped the minor version + if [[ "${parts[0]}" -gt 6 ]]; then + version="${parts[0]}" + fi + + __LLDB_Package="liblldb-${version}-dev" ;; no-lldb) unset __LLDB_Package ;; llvm*) - version="$(echo "$lowerI" | tr -d '[:alpha:]-=')" + version="${lowerI/llvm/}" parts=(${version//./ }) __LLVM_MajorVersion="${parts[0]}" __LLVM_MinorVersion="${parts[1]}" + + # for versions > 6.0, llvm has dropped the minor version if [[ -z "$__LLVM_MinorVersion" && "$__LLVM_MajorVersion" -le 6 ]]; then __LLVM_MinorVersion=0; fi ;; xenial) # Ubuntu 16.04 - if [ "$__CodeName" != "jessie" ]; then + if [[ "$__CodeName" != "jessie" ]]; then __CodeName=xenial fi ;; zesty) # Ubuntu 17.04 - if [ "$__CodeName" != "jessie" ]; then + if [[ "$__CodeName" != "jessie" ]]; then __CodeName=zesty fi ;; bionic) # Ubuntu 18.04 - if [ "$__CodeName" != "jessie" ]; then + if [[ "$__CodeName" != "jessie" ]]; then __CodeName=bionic fi ;; + focal) # Ubuntu 20.04 + if [[ "$__CodeName" != "jessie" ]]; then + __CodeName=focal + fi + ;; + jammy) # Ubuntu 22.04 + if [[ "$__CodeName" != "jessie" ]]; then + __CodeName=jammy + fi + ;; jessie) # Debian 8 __CodeName=jessie __UbuntuRepo="http://ftp.debian.org/debian/" @@ -205,11 +251,6 @@ while :; do __LLDB_Package="liblldb-6.0-dev" ;; tizen) - if [ "$__BuildArch" != "arm" ] && [ "$__BuildArch" != "armel" ] && [ "$__BuildArch" != "arm64" ] && [ "$__BuildArch" != "x86" ] ; then - echo "Tizen is available only for arm, armel, arm64 and x86." - usage; - exit 1; - fi __CodeName= __UbuntuRepo= __Tizen=tizen @@ -228,18 +269,20 @@ while :; do ;; freebsd12) __CodeName=freebsd - __BuildArch=x64 __SkipUnmount=1 ;; freebsd13) __CodeName=freebsd __FreeBSDBase="13.0-RELEASE" __FreeBSDABI="13" - __BuildArch=x64 __SkipUnmount=1 ;; illumos) __CodeName=illumos + __SkipUnmount=1 + ;; + haiku) + __CodeName=haiku __BuildArch=x64 __SkipUnmount=1 ;; @@ -248,11 +291,15 @@ while :; do ;; --rootfsdir|-rootfsdir) shift - __RootfsDir=$1 + __RootfsDir="$1" ;; --use-mirror) __UseMirror=1 ;; + --use-jobs) + shift + MAXJOBS=$1 + ;; *) __UnprocessedBuildArgs="$__UnprocessedBuildArgs $1" ;; @@ -261,81 +308,76 @@ while :; do shift done -if [ -e "$__Keyring" ]; then - __Keyring="--keyring=$__Keyring" -else - __Keyring="" -fi - -if [ "$__BuildArch" == "armel" ]; then +if [[ "$__BuildArch" == "armel" ]]; then __LLDB_Package="lldb-3.5-dev" fi + __UbuntuPackages+=" ${__LLDB_Package:-}" -if [ ! -z "$__LLVM_MajorVersion" ]; then +if [[ -n "$__LLVM_MajorVersion" ]]; then __UbuntuPackages+=" libclang-common-${__LLVM_MajorVersion}${__LLVM_MinorVersion:+.$__LLVM_MinorVersion}-dev" fi -if [ -z "$__RootfsDir" ] && [ ! -z "$ROOTFS_DIR" ]; then - __RootfsDir=$ROOTFS_DIR +if [[ -z "$__RootfsDir" && -n "$ROOTFS_DIR" ]]; then + __RootfsDir="$ROOTFS_DIR" fi -if [ -z "$__RootfsDir" ]; then +if [[ -z "$__RootfsDir" ]]; then __RootfsDir="$__CrossDir/../../../.tools/rootfs/$__BuildArch" fi -if [ -d "$__RootfsDir" ]; then - if [ $__SkipUnmount == 0 ]; then - umount $__RootfsDir/* || true +if [[ -d "$__RootfsDir" ]]; then + if [[ "$__SkipUnmount" == "0" ]]; then + umount "$__RootfsDir"/* || true fi - rm -rf $__RootfsDir + rm -rf "$__RootfsDir" fi -mkdir -p $__RootfsDir +mkdir -p "$__RootfsDir" __RootfsDir="$( cd "$__RootfsDir" && pwd )" if [[ "$__CodeName" == "alpine" ]]; then __ApkToolsVersion=2.9.1 - __ApkToolsDir=$(mktemp -d) - wget https://github.com/alpinelinux/apk-tools/releases/download/v$__ApkToolsVersion/apk-tools-$__ApkToolsVersion-x86_64-linux.tar.gz -P $__ApkToolsDir - tar -xf $__ApkToolsDir/apk-tools-$__ApkToolsVersion-x86_64-linux.tar.gz -C $__ApkToolsDir - mkdir -p $__RootfsDir/usr/bin - cp -v /usr/bin/qemu-$__QEMUArch-static $__RootfsDir/usr/bin - - $__ApkToolsDir/apk-tools-$__ApkToolsVersion/apk \ - -X http://dl-cdn.alpinelinux.org/alpine/v$__AlpineVersion/main \ - -X http://dl-cdn.alpinelinux.org/alpine/v$__AlpineVersion/community \ - -U --allow-untrusted --root $__RootfsDir --arch $__AlpineArch --initdb \ + __ApkToolsDir="$(mktemp -d)" + wget "https://github.com/alpinelinux/apk-tools/releases/download/v$__ApkToolsVersion/apk-tools-$__ApkToolsVersion-x86_64-linux.tar.gz" -P "$__ApkToolsDir" + tar -xf "$__ApkToolsDir/apk-tools-$__ApkToolsVersion-x86_64-linux.tar.gz" -C "$__ApkToolsDir" + mkdir -p "$__RootfsDir"/usr/bin + cp -v "/usr/bin/qemu-$__QEMUArch-static" "$__RootfsDir/usr/bin" + + "$__ApkToolsDir/apk-tools-$__ApkToolsVersion/apk" \ + -X "http://dl-cdn.alpinelinux.org/alpine/v$__AlpineVersion/main" \ + -X "http://dl-cdn.alpinelinux.org/alpine/v$__AlpineVersion/community" \ + -U --allow-untrusted --root "$__RootfsDir" --arch "$__AlpineArch" --initdb \ add $__AlpinePackages - rm -r $__ApkToolsDir + rm -r "$__ApkToolsDir" elif [[ "$__CodeName" == "freebsd" ]]; then - mkdir -p $__RootfsDir/usr/local/etc - JOBS="$(getconf _NPROCESSORS_ONLN)" - wget -O - https://download.freebsd.org/ftp/releases/amd64/${__FreeBSDBase}/base.txz | tar -C $__RootfsDir -Jxf - ./lib ./usr/lib ./usr/libdata ./usr/include ./usr/share/keys ./etc ./bin/freebsd-version - echo "ABI = \"FreeBSD:${__FreeBSDABI}:amd64\"; FINGERPRINTS = \"${__RootfsDir}/usr/share/keys\"; REPOS_DIR = [\"${__RootfsDir}/etc/pkg\"]; REPO_AUTOUPDATE = NO; RUN_SCRIPTS = NO;" > ${__RootfsDir}/usr/local/etc/pkg.conf - echo "FreeBSD: { url: "pkg+http://pkg.FreeBSD.org/\${ABI}/quarterly", mirror_type: \"srv\", signature_type: \"fingerprints\", fingerprints: \"${__RootfsDir}/usr/share/keys/pkg\", enabled: yes }" > ${__RootfsDir}/etc/pkg/FreeBSD.conf - mkdir -p $__RootfsDir/tmp + mkdir -p "$__RootfsDir"/usr/local/etc + JOBS=${MAXJOBS:="$(getconf _NPROCESSORS_ONLN)"} + wget -O - "https://download.freebsd.org/ftp/releases/${__FreeBSDArch}/${__FreeBSDMachineArch}/${__FreeBSDBase}/base.txz" | tar -C "$__RootfsDir" -Jxf - ./lib ./usr/lib ./usr/libdata ./usr/include ./usr/share/keys ./etc ./bin/freebsd-version + echo "ABI = \"FreeBSD:${__FreeBSDABI}:${__FreeBSDMachineArch}\"; FINGERPRINTS = \"${__RootfsDir}/usr/share/keys\"; REPOS_DIR = [\"${__RootfsDir}/etc/pkg\"]; REPO_AUTOUPDATE = NO; RUN_SCRIPTS = NO;" > "${__RootfsDir}"/usr/local/etc/pkg.conf + echo "FreeBSD: { url: \"pkg+http://pkg.FreeBSD.org/\${ABI}/quarterly\", mirror_type: \"srv\", signature_type: \"fingerprints\", fingerprints: \"${__RootfsDir}/usr/share/keys/pkg\", enabled: yes }" > "${__RootfsDir}"/etc/pkg/FreeBSD.conf + mkdir -p "$__RootfsDir"/tmp # get and build package manager - wget -O - https://github.com/freebsd/pkg/archive/${__FreeBSDPkg}.tar.gz | tar -C $__RootfsDir/tmp -zxf - - cd $__RootfsDir/tmp/pkg-${__FreeBSDPkg} + wget -O - "https://github.com/freebsd/pkg/archive/${__FreeBSDPkg}.tar.gz" | tar -C "$__RootfsDir"/tmp -zxf - + cd "$__RootfsDir/tmp/pkg-${__FreeBSDPkg}" # needed for install to succeed - mkdir -p $__RootfsDir/host/etc - ./autogen.sh && ./configure --prefix=$__RootfsDir/host && make -j "$JOBS" && make install - rm -rf $__RootfsDir/tmp/pkg-${__FreeBSDPkg} + mkdir -p "$__RootfsDir"/host/etc + ./autogen.sh && ./configure --prefix="$__RootfsDir"/host && make -j "$JOBS" && make install + rm -rf "$__RootfsDir/tmp/pkg-${__FreeBSDPkg}" # install packages we need. - INSTALL_AS_USER=$(whoami) $__RootfsDir/host/sbin/pkg -r $__RootfsDir -C $__RootfsDir/usr/local/etc/pkg.conf update - INSTALL_AS_USER=$(whoami) $__RootfsDir/host/sbin/pkg -r $__RootfsDir -C $__RootfsDir/usr/local/etc/pkg.conf install --yes $__FreeBSDPackages + INSTALL_AS_USER=$(whoami) "$__RootfsDir"/host/sbin/pkg -r "$__RootfsDir" -C "$__RootfsDir"/usr/local/etc/pkg.conf update + INSTALL_AS_USER=$(whoami) "$__RootfsDir"/host/sbin/pkg -r "$__RootfsDir" -C "$__RootfsDir"/usr/local/etc/pkg.conf install --yes $__FreeBSDPackages elif [[ "$__CodeName" == "illumos" ]]; then mkdir "$__RootfsDir/tmp" pushd "$__RootfsDir/tmp" - JOBS="$(getconf _NPROCESSORS_ONLN)" + JOBS=${MAXJOBS:="$(getconf _NPROCESSORS_ONLN)"} echo "Downloading sysroot." wget -O - https://github.com/illumos/sysroot/releases/download/20181213-de6af22ae73b-v1/illumos-sysroot-i386-20181213-de6af22ae73b-v1.tar.gz | tar -C "$__RootfsDir" -xzf - echo "Building binutils. Please wait.." wget -O - https://ftp.gnu.org/gnu/binutils/binutils-2.33.1.tar.bz2 | tar -xjf - mkdir build-binutils && cd build-binutils - ../binutils-2.33.1/configure --prefix="$__RootfsDir" --target="x86_64-sun-solaris2.10" --program-prefix="x86_64-illumos-" --with-sysroot="$__RootfsDir" + ../binutils-2.33.1/configure --prefix="$__RootfsDir" --target="${__illumosArch}-sun-solaris2.10" --program-prefix="${__illumosArch}-illumos-" --with-sysroot="$__RootfsDir" make -j "$JOBS" && make install && cd .. echo "Building gcc. Please wait.." wget -O - https://ftp.gnu.org/gnu/gcc/gcc-8.4.0/gcc-8.4.0.tar.xz | tar -xJf - @@ -345,7 +387,7 @@ elif [[ "$__CodeName" == "illumos" ]]; then CFLAGS_FOR_TARGET="-fPIC" export CFLAGS CXXFLAGS CXXFLAGS_FOR_TARGET CFLAGS_FOR_TARGET mkdir build-gcc && cd build-gcc - ../gcc-8.4.0/configure --prefix="$__RootfsDir" --target="x86_64-sun-solaris2.10" --program-prefix="x86_64-illumos-" --with-sysroot="$__RootfsDir" --with-gnu-as \ + ../gcc-8.4.0/configure --prefix="$__RootfsDir" --target="${__illumosArch}-sun-solaris2.10" --program-prefix="${__illumosArch}-illumos-" --with-sysroot="$__RootfsDir" --with-gnu-as \ --with-gnu-ld --disable-nls --disable-libgomp --disable-libquadmath --disable-libssp --disable-libvtv --disable-libcilkrts --disable-libada --disable-libsanitizer \ --disable-libquadmath-support --disable-shared --enable-tls make -j "$JOBS" && make install && cd .. @@ -353,14 +395,18 @@ elif [[ "$__CodeName" == "illumos" ]]; then if [[ "$__UseMirror" == 1 ]]; then BaseUrl=http://pkgsrc.smartos.skylime.net fi - BaseUrl="$BaseUrl"/packages/SmartOS/2020Q1/x86_64/All + BaseUrl="$BaseUrl/packages/SmartOS/trunk/${__illumosArch}/All" + echo "Downloading manifest" + wget "$BaseUrl" echo "Downloading dependencies." read -ra array <<<"$__IllumosPackages" for package in "${array[@]}"; do - echo "Installing $package..." + echo "Installing '$package'" + package="$(grep ">$package-[0-9]" All | sed -En 's/.*href="(.*)\.tgz".*/\1/p')" + echo "Resolved name '$package'" wget "$BaseUrl"/"$package".tgz ar -x "$package".tgz - tar --skip-old-files -xzf "$package".tmp.tgz -C "$__RootfsDir" 2>/dev/null + tar --skip-old-files -xzf "$package".tmp.tg* -C "$__RootfsDir" 2>/dev/null done echo "Cleaning up temporary files." popd @@ -371,26 +417,90 @@ elif [[ "$__CodeName" == "illumos" ]]; then wget -P "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/dlt.h wget -P "$__RootfsDir"/usr/include/netpacket https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/inet/sockmods/netpacket/packet.h wget -P "$__RootfsDir"/usr/include/sys https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/sys/sdt.h -elif [[ -n $__CodeName ]]; then - qemu-debootstrap $__Keyring --arch $__UbuntuArch $__CodeName $__RootfsDir $__UbuntuRepo - cp $__CrossDir/$__BuildArch/sources.list.$__CodeName $__RootfsDir/etc/apt/sources.list - chroot $__RootfsDir apt-get update - chroot $__RootfsDir apt-get -f -y install - chroot $__RootfsDir apt-get -y install $__UbuntuPackages - chroot $__RootfsDir symlinks -cr /usr - chroot $__RootfsDir apt-get clean - - if [ $__SkipUnmount == 0 ]; then - umount $__RootfsDir/* || true +elif [[ "$__CodeName" == "haiku" ]]; then + JOBS=${MAXJOBS:="$(getconf _NPROCESSORS_ONLN)"} + + echo "Building Haiku sysroot for x86_64" + mkdir -p "$__RootfsDir/tmp" + cd "$__RootfsDir/tmp" + git clone -b hrev56235 https://review.haiku-os.org/haiku + git clone -b btrev43195 https://review.haiku-os.org/buildtools + cd "$__RootfsDir/tmp/buildtools" && git checkout 7487388f5110021d400b9f3b88e1a7f310dc066d + + # Fetch some unmerged patches + cd "$__RootfsDir/tmp/haiku" + ## Add development build profile (slimmer than nightly) + git fetch origin refs/changes/64/4164/1 && git -c commit.gpgsign=false cherry-pick FETCH_HEAD + + # Build jam + cd "$__RootfsDir/tmp/buildtools/jam" + make + + # Configure cross tools + echo "Building cross-compiler" + mkdir -p "$__RootfsDir/generated" + cd "$__RootfsDir/generated" + "$__RootfsDir/tmp/haiku/configure" -j"$JOBS" --sysroot "$__RootfsDir" --cross-tools-source "$__RootfsDir/tmp/buildtools" --build-cross-tools x86_64 + + # Build Haiku packages + echo "Building Haiku" + echo 'HAIKU_BUILD_PROFILE = "development-raw" ;' > UserProfileConfig + "$__RootfsDir/tmp/buildtools/jam/jam0" -j"$JOBS" -q 'package' 'Haiku' + + BaseUrl="https://depot.haiku-os.org/__api/v2/pkg/get-pkg" + + # Download additional packages + echo "Downloading additional required packages" + read -ra array <<<"$__HaikuPackages" + for package in "${array[@]}"; do + echo "Downloading $package..." + # API documented here: https://github.com/haiku/haikudepotserver/blob/master/haikudepotserver-api2/src/main/resources/api2/pkg.yaml#L60 + # The schema here: https://github.com/haiku/haikudepotserver/blob/master/haikudepotserver-api2/src/main/resources/api2/pkg.yaml#L598 + hpkgDownloadUrl="$(wget -qO- --post-data='{"name":"'"$package"'","repositorySourceCode":"haikuports_x86_64","versionType":"LATEST","naturalLanguageCode":"en"}' \ + --header='Content-Type:application/json' "$BaseUrl" | jq -r '.result.versions[].hpkgDownloadURL')" + wget -P "$__RootfsDir/generated/download" "$hpkgDownloadUrl" + done + + # Setup the sysroot + echo "Setting up sysroot and extracting needed packages" + mkdir -p "$__RootfsDir/boot/system" + for file in "$__RootfsDir/generated/objects/haiku/x86_64/packaging/packages/"*.hpkg; do + "$__RootfsDir/generated/objects/linux/x86_64/release/tools/package/package" extract -C "$__RootfsDir/boot/system" "$file" + done + for file in "$__RootfsDir/generated/download/"*.hpkg; do + "$__RootfsDir/generated/objects/linux/x86_64/release/tools/package/package" extract -C "$__RootfsDir/boot/system" "$file" + done + + # Cleaning up temporary files + echo "Cleaning up temporary files" + rm -rf "$__RootfsDir/tmp" + for name in "$__RootfsDir/generated/"*; do + if [[ "$name" =~ "cross-tools-" ]]; then + : # Keep the cross-compiler + else + rm -rf "$name" + fi + done +elif [[ -n "$__CodeName" ]]; then + qemu-debootstrap $__Keyring --arch "$__UbuntuArch" "$__CodeName" "$__RootfsDir" "$__UbuntuRepo" + cp "$__CrossDir/$__BuildArch/sources.list.$__CodeName" "$__RootfsDir/etc/apt/sources.list" + chroot "$__RootfsDir" apt-get update + chroot "$__RootfsDir" apt-get -f -y install + chroot "$__RootfsDir" apt-get -y install $__UbuntuPackages + chroot "$__RootfsDir" symlinks -cr /usr + chroot "$__RootfsDir" apt-get clean + + if [[ "$__SkipUnmount" == "0" ]]; then + umount "$__RootfsDir"/* || true fi if [[ "$__BuildArch" == "armel" && "$__CodeName" == "jessie" ]]; then - pushd $__RootfsDir - patch -p1 < $__CrossDir/$__BuildArch/armel.jessie.patch + pushd "$__RootfsDir" + patch -p1 < "$__CrossDir/$__BuildArch/armel.jessie.patch" popd fi elif [[ "$__Tizen" == "tizen" ]]; then - ROOTFS_DIR=$__RootfsDir $__CrossDir/$__BuildArch/tizen-build-rootfs.sh + ROOTFS_DIR="$__RootfsDir" "$__CrossDir/$__BuildArch/tizen-build-rootfs.sh" else echo "Unsupported target platform." usage; diff --git a/eng/common/cross/riscv64/sources.list.sid b/eng/common/cross/riscv64/sources.list.sid new file mode 100644 index 0000000000..65f730d224 --- /dev/null +++ b/eng/common/cross/riscv64/sources.list.sid @@ -0,0 +1 @@ +deb http://deb.debian.org/debian-ports sid main diff --git a/eng/common/cross/toolchain.cmake b/eng/common/cross/toolchain.cmake index eaeeab38fa..561576be97 100644 --- a/eng/common/cross/toolchain.cmake +++ b/eng/common/cross/toolchain.cmake @@ -7,6 +7,8 @@ if(EXISTS ${CROSS_ROOTFS}/bin/freebsd-version) elseif(EXISTS ${CROSS_ROOTFS}/usr/platform/i86pc) set(CMAKE_SYSTEM_NAME SunOS) set(ILLUMOS 1) +elseif(EXISTS ${CROSS_ROOTFS}/boot/system/develop/headers/config/HaikuConfig.h) + set(CMAKE_SYSTEM_NAME Haiku) else() set(CMAKE_SYSTEM_NAME Linux) set(LINUX 1) @@ -19,13 +21,7 @@ elseif(EXISTS ${CROSS_ROOTFS}/android_platform) set(ANDROID 1) endif() -if(TARGET_ARCH_NAME STREQUAL "armel") - set(CMAKE_SYSTEM_PROCESSOR armv7l) - set(TOOLCHAIN "arm-linux-gnueabi") - if(TIZEN) - set(TIZEN_TOOLCHAIN "armv7l-tizen-linux-gnueabi/9.2.0") - endif() -elseif(TARGET_ARCH_NAME STREQUAL "arm") +if(TARGET_ARCH_NAME STREQUAL "arm") set(CMAKE_SYSTEM_PROCESSOR armv7l) if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/armv7-alpine-linux-musleabihf) set(TOOLCHAIN "armv7-alpine-linux-musleabihf") @@ -37,43 +33,62 @@ elseif(TARGET_ARCH_NAME STREQUAL "arm") if(TIZEN) set(TIZEN_TOOLCHAIN "armv7hl-tizen-linux-gnueabihf/9.2.0") endif() -elseif(TARGET_ARCH_NAME STREQUAL "armv6") - set(CMAKE_SYSTEM_PROCESSOR armv6l) - if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/armv6-alpine-linux-musleabihf) - set(TOOLCHAIN "armv6-alpine-linux-musleabihf") - else() - set(TOOLCHAIN "arm-linux-gnueabihf") - endif() elseif(TARGET_ARCH_NAME STREQUAL "arm64") set(CMAKE_SYSTEM_PROCESSOR aarch64) if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/aarch64-alpine-linux-musl) set(TOOLCHAIN "aarch64-alpine-linux-musl") - else() + elseif(LINUX) set(TOOLCHAIN "aarch64-linux-gnu") + if(TIZEN) + set(TIZEN_TOOLCHAIN "aarch64-tizen-linux-gnu/9.2.0") + endif() + elseif(FREEBSD) + set(triple "aarch64-unknown-freebsd12") endif() +elseif(TARGET_ARCH_NAME STREQUAL "armel") + set(CMAKE_SYSTEM_PROCESSOR armv7l) + set(TOOLCHAIN "arm-linux-gnueabi") if(TIZEN) - set(TIZEN_TOOLCHAIN "aarch64-tizen-linux-gnu/9.2.0") + set(TIZEN_TOOLCHAIN "armv7l-tizen-linux-gnueabi/9.2.0") + endif() +elseif(TARGET_ARCH_NAME STREQUAL "armv6") + set(CMAKE_SYSTEM_PROCESSOR armv6l) + if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/armv6-alpine-linux-musleabihf) + set(TOOLCHAIN "armv6-alpine-linux-musleabihf") + else() + set(TOOLCHAIN "arm-linux-gnueabihf") endif() elseif(TARGET_ARCH_NAME STREQUAL "ppc64le") set(CMAKE_SYSTEM_PROCESSOR ppc64le) set(TOOLCHAIN "powerpc64le-linux-gnu") +elseif(TARGET_ARCH_NAME STREQUAL "riscv64") + set(CMAKE_SYSTEM_PROCESSOR riscv64) + set(TOOLCHAIN "riscv64-linux-gnu") elseif(TARGET_ARCH_NAME STREQUAL "s390x") set(CMAKE_SYSTEM_PROCESSOR s390x) set(TOOLCHAIN "s390x-linux-gnu") +elseif(TARGET_ARCH_NAME STREQUAL "x64") + set(CMAKE_SYSTEM_PROCESSOR x86_64) + if(LINUX) + set(TOOLCHAIN "x86_64-linux-gnu") + if(TIZEN) + set(TIZEN_TOOLCHAIN "x86_64-tizen-linux-gnu/9.2.0") + endif() + elseif(FREEBSD) + set(triple "x86_64-unknown-freebsd12") + elseif(ILLUMOS) + set(TOOLCHAIN "x86_64-illumos") + elseif(HAIKU) + set(TOOLCHAIN "x64_64-unknown-haiku") + endif() elseif(TARGET_ARCH_NAME STREQUAL "x86") set(CMAKE_SYSTEM_PROCESSOR i686) set(TOOLCHAIN "i686-linux-gnu") if(TIZEN) set(TIZEN_TOOLCHAIN "i586-tizen-linux-gnu/9.2.0") endif() -elseif (FREEBSD) - set(CMAKE_SYSTEM_PROCESSOR "x86_64") - set(triple "x86_64-unknown-freebsd12") -elseif (ILLUMOS) - set(CMAKE_SYSTEM_PROCESSOR "x86_64") - set(TOOLCHAIN "x86_64-illumos") else() - message(FATAL_ERROR "Arch is ${TARGET_ARCH_NAME}. Only armel, arm, armv6, arm64, ppc64le, s390x and x86 are supported!") + message(FATAL_ERROR "Arch is ${TARGET_ARCH_NAME}. Only arm, arm64, armel, armv6, ppc64le, riscv64, s390x, x64 and x86 are supported!") endif() if(DEFINED ENV{TOOLCHAIN}) @@ -159,6 +174,41 @@ elseif(ILLUMOS) set(CMAKE_C_STANDARD_LIBRARIES "${CMAKE_C_STANDARD_LIBRARIES} -lssp") set(CMAKE_CXX_STANDARD_LIBRARIES "${CMAKE_CXX_STANDARD_LIBRARIES} -lssp") +elseif(HAIKU) + set(CMAKE_SYSROOT "${CROSS_ROOTFS}") + + set(TOOLSET_PREFIX ${TOOLCHAIN}-) + function(locate_toolchain_exec exec var) + string(TOUPPER ${exec} EXEC_UPPERCASE) + if(NOT "$ENV{CLR_${EXEC_UPPERCASE}}" STREQUAL "") + set(${var} "$ENV{CLR_${EXEC_UPPERCASE}}" PARENT_SCOPE) + return() + endif() + + set(SEARCH_PATH "${CROSS_ROOTFS}/generated/cross-tools-x86_64/bin") + + find_program(EXEC_LOCATION_${exec} + PATHS ${SEARCH_PATH} + NAMES + "${TOOLSET_PREFIX}${exec}${CLR_CMAKE_COMPILER_FILE_NAME_VERSION}" + "${TOOLSET_PREFIX}${exec}") + + if (EXEC_LOCATION_${exec} STREQUAL "EXEC_LOCATION_${exec}-NOTFOUND") + message(FATAL_ERROR "Unable to find toolchain executable. Name: ${exec}, Prefix: ${TOOLSET_PREFIX}.") + endif() + set(${var} ${EXEC_LOCATION_${exec}} PARENT_SCOPE) + endfunction() + + set(CMAKE_SYSTEM_PREFIX_PATH "${CROSS_ROOTFS}") + + locate_toolchain_exec(gcc CMAKE_C_COMPILER) + locate_toolchain_exec(g++ CMAKE_CXX_COMPILER) + + set(CMAKE_C_STANDARD_LIBRARIES "${CMAKE_C_STANDARD_LIBRARIES} -lssp") + set(CMAKE_CXX_STANDARD_LIBRARIES "${CMAKE_CXX_STANDARD_LIBRARIES} -lssp") + + # let CMake set up the correct search paths + include(Platform/Haiku) else() set(CMAKE_SYSROOT "${CROSS_ROOTFS}") @@ -218,7 +268,7 @@ endif() # Specify compile options -if((TARGET_ARCH_NAME MATCHES "^(arm|armv6|armel|arm64|ppc64le|s390x)$" AND NOT ANDROID) OR ILLUMOS) +if((TARGET_ARCH_NAME MATCHES "^(arm|arm64|armel|armv6|ppc64le|riscv64|s390x)$" AND NOT ANDROID AND NOT FREEBSD) OR ILLUMOS OR HAIKU) set(CMAKE_C_COMPILER_TARGET ${TOOLCHAIN}) set(CMAKE_CXX_COMPILER_TARGET ${TOOLCHAIN}) set(CMAKE_ASM_COMPILER_TARGET ${TOOLCHAIN}) diff --git a/eng/common/cross/x86/sources.list.focal b/eng/common/cross/x86/sources.list.focal new file mode 100644 index 0000000000..99d5731330 --- /dev/null +++ b/eng/common/cross/x86/sources.list.focal @@ -0,0 +1,11 @@ +deb http://archive.ubuntu.com/ubuntu/ focal main restricted universe +deb-src http://archive.ubuntu.com/ubuntu/ focal main restricted universe + +deb http://archive.ubuntu.com/ubuntu/ focal-updates main restricted universe +deb-src http://archive.ubuntu.com/ubuntu/ focal-updates main restricted universe + +deb http://archive.ubuntu.com/ubuntu/ focal-backports main restricted +deb-src http://archive.ubuntu.com/ubuntu/ focal-backports main restricted + +deb http://archive.ubuntu.com/ubuntu/ focal-security main restricted universe multiverse +deb-src http://archive.ubuntu.com/ubuntu/ focal-security main restricted universe multiverse diff --git a/eng/common/cross/x86/sources.list.jammy b/eng/common/cross/x86/sources.list.jammy new file mode 100644 index 0000000000..af1c1feaea --- /dev/null +++ b/eng/common/cross/x86/sources.list.jammy @@ -0,0 +1,11 @@ +deb http://archive.ubuntu.com/ubuntu/ jammy main restricted universe +deb-src http://archive.ubuntu.com/ubuntu/ jammy main restricted universe + +deb http://archive.ubuntu.com/ubuntu/ jammy-updates main restricted universe +deb-src http://archive.ubuntu.com/ubuntu/ jammy-updates main restricted universe + +deb http://archive.ubuntu.com/ubuntu/ jammy-backports main restricted +deb-src http://archive.ubuntu.com/ubuntu/ jammy-backports main restricted + +deb http://archive.ubuntu.com/ubuntu/ jammy-security main restricted universe multiverse +deb-src http://archive.ubuntu.com/ubuntu/ jammy-security main restricted universe multiverse diff --git a/eng/common/generate-locproject.ps1 b/eng/common/generate-locproject.ps1 index 25e97ac007..bab18543d6 100644 --- a/eng/common/generate-locproject.ps1 +++ b/eng/common/generate-locproject.ps1 @@ -10,9 +10,7 @@ Param( Set-StrictMode -Version 2.0 $ErrorActionPreference = "Stop" -. $PSScriptRoot\tools.ps1 - -Import-Module -Name (Join-Path $PSScriptRoot 'native\CommonLibrary.psm1') +. $PSScriptRoot\pipeline-logging-functions.ps1 $exclusionsFilePath = "$SourcesDirectory\eng\Localize\LocExclusions.json" $exclusions = @{ Exclusions = @() } @@ -28,13 +26,15 @@ $jsonFiles = @() $jsonTemplateFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory" | Where-Object { $_.FullName -Match "\.template\.config\\localize\\.+\.en\.json" } # .NET templating pattern $jsonTemplateFiles | ForEach-Object { $null = $_.Name -Match "(.+)\.[\w-]+\.json" # matches '[filename].[langcode].json - + $destinationFile = "$($_.Directory.FullName)\$($Matches.1).json" $jsonFiles += Copy-Item "$($_.FullName)" -Destination $destinationFile -PassThru } $jsonWinformsTemplateFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory" | Where-Object { $_.FullName -Match "en\\strings\.json" } # current winforms pattern +$wxlFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory" | Where-Object { $_.FullName -Match "\\.+\.wxl" -And -Not( $_.Directory.Name -Match "\d{4}" ) } # localized files live in four digit lang ID directories; this excludes them + $xlfFiles = @() $allXlfFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory\*\*.xlf" @@ -46,7 +46,7 @@ if ($allXlfFiles) { } $langXlfFiles | ForEach-Object { $null = $_.Name -Match "(.+)\.[\w-]+\.xlf" # matches '[filename].[langcode].xlf - + $destinationFile = "$($_.Directory.FullName)\$($Matches.1).xlf" $xlfFiles += Copy-Item "$($_.FullName)" -Destination $destinationFile -PassThru } @@ -59,10 +59,10 @@ $locJson = @{ LanguageSet = $LanguageSet LocItems = @( $locFiles | ForEach-Object { - $outputPath = "$(($_.DirectoryName | Resolve-Path -Relative) + "\")" + $outputPath = "$(($_.DirectoryName | Resolve-Path -Relative) + "\")" $continue = $true foreach ($exclusion in $exclusions.Exclusions) { - if ($outputPath.Contains($exclusion)) + if ($_.FullName.Contains($exclusion)) { $continue = $false } @@ -79,8 +79,7 @@ $locJson = @{ CopyOption = "LangIDOnPath" OutputPath = "$($_.Directory.Parent.FullName | Resolve-Path -Relative)\" } - } - else { + } else { return @{ SourceFile = $sourceFile CopyOption = "LangIDOnName" @@ -90,6 +89,32 @@ $locJson = @{ } } ) + }, + @{ + CloneLanguageSet = "WiX_CloneLanguages" + LssFiles = @( "wxl_loc.lss" ) + LocItems = @( + $wxlFiles | ForEach-Object { + $outputPath = "$($_.Directory.FullName | Resolve-Path -Relative)\" + $continue = $true + foreach ($exclusion in $exclusions.Exclusions) { + if ($_.FullName.Contains($exclusion)) + { + $continue = $false + } + } + $sourceFile = ($_.FullName | Resolve-Path -Relative) + if ($continue) + { + return @{ + SourceFile = $sourceFile + CopyOption = "LangIDOnPath" + OutputPath = $outputPath + Languages = "cs-CZ;de-DE;es-ES;fr-FR;it-IT;ja-JP;ko-KR;pl-PL;pt-BR;ru-RU;tr-TR;zh-CN;zh-TW" + } + } + } + ) } ) } @@ -108,10 +133,10 @@ else { if ((Get-FileHash "$SourcesDirectory\eng\Localize\LocProject-generated.json").Hash -ne (Get-FileHash "$SourcesDirectory\eng\Localize\LocProject.json").Hash) { Write-PipelineTelemetryError -Category "OneLocBuild" -Message "Existing LocProject.json differs from generated LocProject.json. Download LocProject-generated.json and compare them." - + exit 1 } else { Write-Host "Generated LocProject.json and current LocProject.json are identical." } -} \ No newline at end of file +} diff --git a/eng/common/generate-sbom-prep.ps1 b/eng/common/generate-sbom-prep.ps1 index a733a88858..3e5c1c74a1 100644 --- a/eng/common/generate-sbom-prep.ps1 +++ b/eng/common/generate-sbom-prep.ps1 @@ -2,6 +2,8 @@ Param( [Parameter(Mandatory=$true)][string] $ManifestDirPath # Manifest directory where sbom will be placed ) +. $PSScriptRoot\pipeline-logging-functions.ps1 + Write-Host "Creating dir $ManifestDirPath" # create directory for sbom manifest to be placed if (!(Test-Path -path $ManifestDirPath)) diff --git a/eng/common/generate-sbom-prep.sh b/eng/common/generate-sbom-prep.sh index f6c7745314..d5c76dc827 100644 --- a/eng/common/generate-sbom-prep.sh +++ b/eng/common/generate-sbom-prep.sh @@ -2,6 +2,18 @@ source="${BASH_SOURCE[0]}" +# resolve $SOURCE until the file is no longer a symlink +while [[ -h $source ]]; do + scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" + source="$(readlink "$source")" + + # if $source was a relative symlink, we need to resolve it relative to the path where the + # symlink file was located + [[ $source != /* ]] && source="$scriptroot/$source" +done +scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" +. $scriptroot/pipeline-logging-functions.sh + manifest_dir=$1 if [ ! -d "$manifest_dir" ] ; then diff --git a/eng/common/init-tools-native.ps1 b/eng/common/init-tools-native.ps1 index 24a5e65de1..8d48ec5680 100644 --- a/eng/common/init-tools-native.ps1 +++ b/eng/common/init-tools-native.ps1 @@ -87,6 +87,7 @@ try { $NativeTools.PSObject.Properties | ForEach-Object { $ToolName = $_.Name $ToolVersion = $_.Value + $InstalledTools = @{} if ((Get-Command "$ToolName" -ErrorAction SilentlyContinue) -eq $null) { if ($ToolVersion -eq "latest") { @@ -111,9 +112,10 @@ try { $ToolPath = Convert-Path -Path $BinPath Write-Host "Adding $ToolName to the path ($ToolPath)..." Write-Host "##vso[task.prependpath]$ToolPath" + $InstalledTools += @{ $ToolName = $ToolDirectory.FullName } } } - exit 0 + return $InstalledTools } else { $NativeTools.PSObject.Properties | ForEach-Object { $ToolName = $_.Name diff --git a/eng/common/native/init-compiler.sh b/eng/common/native/init-compiler.sh index 6d7ba15e5f..41a26d802a 100644 --- a/eng/common/native/init-compiler.sh +++ b/eng/common/native/init-compiler.sh @@ -71,7 +71,7 @@ if [[ -z "$CLR_CC" ]]; then # Set default versions if [[ -z "$majorVersion" ]]; then # note: gcc (all versions) and clang versions higher than 6 do not have minor version in file name, if it is zero. - if [[ "$compiler" == "clang" ]]; then versions=( 13 12 11 10 9 8 7 6.0 5.0 4.0 3.9 3.8 3.7 3.6 3.5 ) + if [[ "$compiler" == "clang" ]]; then versions=( 15 14 13 12 11 10 9 8 7 6.0 5.0 4.0 3.9 3.8 3.7 3.6 3.5 ) elif [[ "$compiler" == "gcc" ]]; then versions=( 12 11 10 9 8 7 6 5 4.9 ); fi for version in "${versions[@]}"; do diff --git a/eng/common/sdk-task.ps1 b/eng/common/sdk-task.ps1 index 119a6c660d..c35087a060 100644 --- a/eng/common/sdk-task.ps1 +++ b/eng/common/sdk-task.ps1 @@ -64,7 +64,7 @@ try { $GlobalJson.tools | Add-Member -Name "vs" -Value (ConvertFrom-Json "{ `"version`": `"16.5`" }") -MemberType NoteProperty } if( -not ($GlobalJson.tools.PSObject.Properties.Name -match "xcopy-msbuild" )) { - $GlobalJson.tools | Add-Member -Name "xcopy-msbuild" -Value "17.1.0" -MemberType NoteProperty + $GlobalJson.tools | Add-Member -Name "xcopy-msbuild" -Value "17.2.1" -MemberType NoteProperty } if ($GlobalJson.tools."xcopy-msbuild".Trim() -ine "none") { $xcopyMSBuildToolsFolder = InitializeXCopyMSBuild $GlobalJson.tools."xcopy-msbuild" -install $true diff --git a/eng/common/sdl/sdl.ps1 b/eng/common/sdl/sdl.ps1 new file mode 100644 index 0000000000..ac196e164a --- /dev/null +++ b/eng/common/sdl/sdl.ps1 @@ -0,0 +1,37 @@ + +function Install-Gdn { + param( + [string]$Path, + + # If omitted, install the latest version of Guardian, otherwise install that specific version. + [string]$Version + ) + + $ErrorActionPreference = 'Stop' + Set-StrictMode -Version 2.0 + $disableConfigureToolsetImport = $true + $global:LASTEXITCODE = 0 + + # `tools.ps1` checks $ci to perform some actions. Since the SDL + # scripts don't necessarily execute in the same agent that run the + # build.ps1/sh script this variable isn't automatically set. + $ci = $true + . $PSScriptRoot\..\tools.ps1 + + $argumentList = @("install", "Microsoft.Guardian.Cli", "-Source https://securitytools.pkgs.visualstudio.com/_packaging/Guardian/nuget/v3/index.json", "-OutputDirectory $Path", "-NonInteractive", "-NoCache") + + if ($Version) { + $argumentList += "-Version $Version" + } + + Start-Process nuget -Verbose -ArgumentList $argumentList -NoNewWindow -Wait + + $gdnCliPath = Get-ChildItem -Filter guardian.cmd -Recurse -Path $Path + + if (!$gdnCliPath) + { + Write-PipelineTelemetryError -Category 'Sdl' -Message 'Failure installing Guardian' + } + + return $gdnCliPath.FullName +} \ No newline at end of file diff --git a/eng/common/templates/job/execute-sdl.yml b/eng/common/templates/job/execute-sdl.yml index 24cec0424e..9ff6a10a68 100644 --- a/eng/common/templates/job/execute-sdl.yml +++ b/eng/common/templates/job/execute-sdl.yml @@ -54,7 +54,7 @@ jobs: # If it's not devdiv, it's dnceng ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}: name: NetCore1ESPool-Internal - demands: ImageOverride -equals Build.Server.Amd64.VS2019 + demands: ImageOverride -equals windows.vs2019.amd64 steps: - checkout: self clean: true diff --git a/eng/common/templates/job/onelocbuild.yml b/eng/common/templates/job/onelocbuild.yml index 3bcd243c46..6c523b714f 100644 --- a/eng/common/templates/job/onelocbuild.yml +++ b/eng/common/templates/job/onelocbuild.yml @@ -41,7 +41,7 @@ jobs: # If it's not devdiv, it's dnceng ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}: name: NetCore1ESPool-Internal - demands: ImageOverride -equals Build.Server.Amd64.VS2019 + demands: ImageOverride -equals windows.vs2019.amd64 variables: - group: OneLocBuildVariables # Contains the CeapexPat and GithubPat diff --git a/eng/common/templates/job/source-build.yml b/eng/common/templates/job/source-build.yml index 5cd5325d7b..88f6f75a62 100644 --- a/eng/common/templates/job/source-build.yml +++ b/eng/common/templates/job/source-build.yml @@ -46,7 +46,7 @@ jobs: # source-build builds run in Docker, including the default managed platform. pool: ${{ if eq(variables['System.TeamProject'], 'public') }}: - name: NetCore1ESPool-Public + name: NetCore-Public demands: ImageOverride -equals Build.Ubuntu.1804.Amd64.Open ${{ if eq(variables['System.TeamProject'], 'internal') }}: name: NetCore1ESPool-Internal diff --git a/eng/common/templates/job/source-index-stage1.yml b/eng/common/templates/job/source-index-stage1.yml index c2d51098d3..21fd12276b 100644 --- a/eng/common/templates/job/source-index-stage1.yml +++ b/eng/common/templates/job/source-index-stage1.yml @@ -1,6 +1,6 @@ parameters: runAsPublic: false - sourceIndexPackageVersion: 1.0.1-20210614.1 + sourceIndexPackageVersion: 1.0.1-20220804.1 sourceIndexPackageSource: https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json sourceIndexBuildCommand: powershell -NoLogo -NoProfile -ExecutionPolicy Bypass -Command "eng/common/build.ps1 -restore -build -binarylog -ci" preSteps: [] @@ -28,11 +28,11 @@ jobs: ${{ if eq(parameters.pool, '') }}: pool: ${{ if eq(variables['System.TeamProject'], 'public') }}: - name: NetCore1ESPool-Public - demands: ImageOverride -equals Build.Server.Amd64.VS2019.Open + name: NetCore-Public + demands: ImageOverride -equals windows.vs2019.amd64.open ${{ if eq(variables['System.TeamProject'], 'internal') }}: name: NetCore1ESPool-Internal - demands: ImageOverride -equals Build.Server.Amd64.VS2019 + demands: ImageOverride -equals windows.vs2019.amd64 steps: - ${{ each preStep in parameters.preSteps }}: diff --git a/eng/common/templates/jobs/jobs.yml b/eng/common/templates/jobs/jobs.yml index 2cca53c2d1..64e5929f22 100644 --- a/eng/common/templates/jobs/jobs.yml +++ b/eng/common/templates/jobs/jobs.yml @@ -96,7 +96,7 @@ jobs: # If it's not devdiv, it's dnceng ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}: name: NetCore1ESPool-Internal - demands: ImageOverride -equals Build.Server.Amd64.VS2019 + demands: ImageOverride -equals windows.vs2019.amd64 runAsPublic: ${{ parameters.runAsPublic }} publishUsingPipelines: ${{ parameters.enablePublishUsingPipelines }} diff --git a/eng/common/templates/post-build/post-build.yml b/eng/common/templates/post-build/post-build.yml index 5a9056f6b2..87fcae940c 100644 --- a/eng/common/templates/post-build/post-build.yml +++ b/eng/common/templates/post-build/post-build.yml @@ -49,6 +49,7 @@ parameters: type: object default: enable: false + publishGdn: false continueOnError: false params: '' artifactNames: '' @@ -106,7 +107,7 @@ stages: # If it's not devdiv, it's dnceng ${{ else }}: name: NetCore1ESPool-Internal - demands: ImageOverride -equals Build.Server.Amd64.VS2019 + demands: ImageOverride -equals windows.vs2019.amd64 steps: - template: setup-maestro-vars.yml @@ -143,7 +144,7 @@ stages: # If it's not devdiv, it's dnceng ${{ else }}: name: NetCore1ESPool-Internal - demands: ImageOverride -equals Build.Server.Amd64.VS2019 + demands: ImageOverride -equals windows.vs2019.amd64 steps: - template: setup-maestro-vars.yml parameters: @@ -203,7 +204,7 @@ stages: # If it's not devdiv, it's dnceng ${{ else }}: name: NetCore1ESPool-Internal - demands: ImageOverride -equals Build.Server.Amd64.VS2019 + demands: ImageOverride -equals windows.vs2019.amd64 steps: - template: setup-maestro-vars.yml parameters: @@ -235,6 +236,7 @@ stages: - template: /eng/common/templates/job/execute-sdl.yml parameters: enable: ${{ parameters.SDLValidationParameters.enable }} + publishGuardianDirectoryToPipeline: ${{ parameters.SDLValidationParameters.publishGdn }} additionalParameters: ${{ parameters.SDLValidationParameters.params }} continueOnError: ${{ parameters.SDLValidationParameters.continueOnError }} artifactNames: ${{ parameters.SDLValidationParameters.artifactNames }} @@ -261,7 +263,7 @@ stages: # If it's not devdiv, it's dnceng ${{ else }}: name: NetCore1ESPool-Internal - demands: ImageOverride -equals Build.Server.Amd64.VS2019 + demands: ImageOverride -equals windows.vs2019.amd64 steps: - template: setup-maestro-vars.yml parameters: diff --git a/eng/common/templates/steps/execute-sdl.yml b/eng/common/templates/steps/execute-sdl.yml index 7b8ee18a28..86cf578c43 100644 --- a/eng/common/templates/steps/execute-sdl.yml +++ b/eng/common/templates/steps/execute-sdl.yml @@ -8,29 +8,26 @@ parameters: condition: '' steps: -- ${{ if ne(parameters.overrideGuardianVersion, '') }}: - - powershell: | - $content = Get-Content $(GuardianPackagesConfigFile) - - Write-Host "packages.config content was:`n$content" - - $content = $content.Replace('$(DefaultGuardianVersion)', '$(GuardianVersion)') - $content | Set-Content $(GuardianPackagesConfigFile) - - Write-Host "packages.config content updated to:`n$content" - displayName: Use overridden Guardian version ${{ parameters.overrideGuardianVersion }} +- task: NuGetAuthenticate@1 + inputs: + nuGetServiceConnections: GuardianConnect - task: NuGetToolInstaller@1 displayName: 'Install NuGet.exe' -- task: NuGetCommand@2 - displayName: 'Install Guardian' - inputs: - restoreSolution: $(Build.SourcesDirectory)\eng\common\sdl\packages.config - feedsToUse: config - nugetConfigPath: $(Build.SourcesDirectory)\eng\common\sdl\NuGet.config - externalFeedCredentials: GuardianConnect - restoreDirectory: $(Build.SourcesDirectory)\.packages +- ${{ if ne(parameters.overrideGuardianVersion, '') }}: + - pwsh: | + . $(Build.SourcesDirectory)\eng\common\sdl\sdl.ps1 + $guardianCliLocation = Install-Gdn -Path $(Build.SourcesDirectory)\.artifacts -Version ${{ parameters.overrideGuardianVersion }} + Write-Host "##vso[task.setvariable variable=GuardianCliLocation]$guardianCliLocation" + displayName: Install Guardian (Overridden) + +- ${{ if eq(parameters.overrideGuardianVersion, '') }}: + - pwsh: | + . $(Build.SourcesDirectory)\eng\common\sdl\sdl.ps1 + $guardianCliLocation = Install-Gdn -Path $(Build.SourcesDirectory)\.artifacts + Write-Host "##vso[task.setvariable variable=GuardianCliLocation]$guardianCliLocation" + displayName: Install Guardian - ${{ if ne(parameters.overrideParameters, '') }}: - powershell: ${{ parameters.executeAllSdlToolsScript }} ${{ parameters.overrideParameters }} @@ -40,7 +37,7 @@ steps: - ${{ if eq(parameters.overrideParameters, '') }}: - powershell: ${{ parameters.executeAllSdlToolsScript }} - -GuardianPackageName Microsoft.Guardian.Cli.$(GuardianVersion) + -GuardianCliLocation $(GuardianCliLocation) -NugetPackageDirectory $(Build.SourcesDirectory)\.packages -AzureDevOpsAccessToken $(dn-bot-dotnet-build-rw-code-rw) ${{ parameters.additionalParameters }} @@ -62,7 +59,28 @@ steps: c i condition: succeededOrFailed() + - publish: $(Agent.BuildDirectory)/.gdn artifact: GuardianConfiguration displayName: Publish GuardianConfiguration + condition: succeededOrFailed() + + # Publish the SARIF files in a container named CodeAnalysisLogs to enable integration + # with the "SARIF SAST Scans Tab" Azure DevOps extension + - task: CopyFiles@2 + displayName: Copy SARIF files + inputs: + flattenFolders: true + sourceFolder: $(Agent.BuildDirectory)/.gdn/rc/ + contents: '**/*.sarif' + targetFolder: $(Build.SourcesDirectory)/CodeAnalysisLogs + condition: succeededOrFailed() + + # Use PublishBuildArtifacts because the SARIF extension only checks this case + # see microsoft/sarif-azuredevops-extension#4 + - task: PublishBuildArtifacts@1 + displayName: Publish SARIF files to CodeAnalysisLogs container + inputs: + pathToPublish: $(Build.SourcesDirectory)/CodeAnalysisLogs + artifactName: CodeAnalysisLogs condition: succeededOrFailed() \ No newline at end of file diff --git a/eng/common/templates/steps/send-to-helix.yml b/eng/common/templates/steps/send-to-helix.yml index 09a223989f..3eb7e2d5f8 100644 --- a/eng/common/templates/steps/send-to-helix.yml +++ b/eng/common/templates/steps/send-to-helix.yml @@ -3,7 +3,7 @@ parameters: HelixSource: 'pr/default' # required -- sources must start with pr/, official/, prodcon/, or agent/ HelixType: 'tests/default/' # required -- Helix telemetry which identifies what type of data this is; should include "test" for clarity and must end in '/' HelixBuild: $(Build.BuildNumber) # required -- the build number Helix will use to identify this -- automatically set to the AzDO build number - HelixTargetQueues: '' # required -- semicolon delimited list of Helix queues to test on; see https://helix.dot.net/ for a list of queues + HelixTargetQueues: '' # required -- semicolon-delimited list of Helix queues to test on; see https://helix.dot.net/ for a list of queues HelixAccessToken: '' # required -- access token to make Helix API requests; should be provided by the appropriate variable group HelixConfiguration: '' # optional -- additional property attached to a job HelixPreCommands: '' # optional -- commands to run before Helix work item execution @@ -12,7 +12,7 @@ parameters: WorkItemCommand: '' # optional -- a command to execute on the payload; requires WorkItemDirectory; incompatible with XUnitProjects WorkItemTimeout: '' # optional -- a timeout in TimeSpan.Parse-ready value (e.g. 00:02:00) for the work item command; requires WorkItemDirectory; incompatible with XUnitProjects CorrelationPayloadDirectory: '' # optional -- a directory to zip up and send to Helix as a correlation payload - XUnitProjects: '' # optional -- semicolon delimited list of XUnitProjects to parse and send to Helix; requires XUnitRuntimeTargetFramework, XUnitPublishTargetFramework, XUnitRunnerVersion, and IncludeDotNetCli=true + XUnitProjects: '' # optional -- semicolon-delimited list of XUnitProjects to parse and send to Helix; requires XUnitRuntimeTargetFramework, XUnitPublishTargetFramework, XUnitRunnerVersion, and IncludeDotNetCli=true XUnitWorkItemTimeout: '' # optional -- the workitem timeout in seconds for all workitems created from the xUnit projects specified by XUnitProjects XUnitPublishTargetFramework: '' # optional -- framework to use to publish your xUnit projects XUnitRuntimeTargetFramework: '' # optional -- framework to use for the xUnit console runner @@ -22,14 +22,14 @@ parameters: DotNetCliVersion: '' # optional -- version of the CLI to send to Helix; based on this: https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json WaitForWorkItemCompletion: true # optional -- true will make the task wait until work items have been completed and fail the build if work items fail. False is "fire and forget." IsExternal: false # [DEPRECATED] -- doesn't do anything, jobs are external if HelixAccessToken is empty and Creator is set - HelixBaseUri: 'https://helix.dot.net/' # optional -- sets the Helix API base URI (allows targeting int) + HelixBaseUri: 'https://helix.dot.net/' # optional -- sets the Helix API base URI (allows targeting https://helix.int-dot.net ) Creator: '' # optional -- if the build is external, use this to specify who is sending the job DisplayNamePrefix: 'Run Tests' # optional -- rename the beginning of the displayName of the steps in AzDO condition: succeeded() # optional -- condition for step to execute; defaults to succeeded() continueOnError: false # optional -- determines whether to continue the build if the step errors; defaults to false steps: - - powershell: 'powershell "$env:BUILD_SOURCESDIRECTORY\eng\common\msbuild.ps1 $env:BUILD_SOURCESDIRECTORY\eng\common\helixpublish.proj /restore /t:Test /bl:$env:BUILD_SOURCESDIRECTORY\artifacts\log\$env:BuildConfig\SendToHelix.binlog"' + - powershell: 'powershell "$env:BUILD_SOURCESDIRECTORY\eng\common\msbuild.ps1 $env:BUILD_SOURCESDIRECTORY\eng\common\helixpublish.proj /restore /p:TreatWarningsAsErrors=false /t:Test /bl:$env:BUILD_SOURCESDIRECTORY\artifacts\log\$env:BuildConfig\SendToHelix.binlog"' displayName: ${{ parameters.DisplayNamePrefix }} (Windows) env: BuildConfig: $(_BuildConfig) @@ -59,7 +59,7 @@ steps: SYSTEM_ACCESSTOKEN: $(System.AccessToken) condition: and(${{ parameters.condition }}, eq(variables['Agent.Os'], 'Windows_NT')) continueOnError: ${{ parameters.continueOnError }} - - script: $BUILD_SOURCESDIRECTORY/eng/common/msbuild.sh $BUILD_SOURCESDIRECTORY/eng/common/helixpublish.proj /restore /t:Test /bl:$BUILD_SOURCESDIRECTORY/artifacts/log/$BuildConfig/SendToHelix.binlog + - script: $BUILD_SOURCESDIRECTORY/eng/common/msbuild.sh $BUILD_SOURCESDIRECTORY/eng/common/helixpublish.proj /restore /p:TreatWarningsAsErrors=false /t:Test /bl:$BUILD_SOURCESDIRECTORY/artifacts/log/$BuildConfig/SendToHelix.binlog displayName: ${{ parameters.DisplayNamePrefix }} (Unix) env: BuildConfig: $(_BuildConfig) diff --git a/eng/common/tools.ps1 b/eng/common/tools.ps1 index 423bd962e9..aba6308ad3 100644 --- a/eng/common/tools.ps1 +++ b/eng/common/tools.ps1 @@ -365,10 +365,17 @@ function InitializeVisualStudioMSBuild([bool]$install, [object]$vsRequirements = # If the version of msbuild is going to be xcopied, # use this version. Version matches a package here: - # https://dev.azure.com/dnceng/public/_packaging?_a=package&feed=dotnet-eng&package=RoslynTools.MSBuild&protocolType=NuGet&version=17.1.0&view=overview - $defaultXCopyMSBuildVersion = '17.1.0' + # https://dev.azure.com/dnceng/public/_packaging?_a=package&feed=dotnet-eng&package=RoslynTools.MSBuild&protocolType=NuGet&version=17.2.1&view=overview + $defaultXCopyMSBuildVersion = '17.2.1' - if (!$vsRequirements) { $vsRequirements = $GlobalJson.tools.vs } + if (!$vsRequirements) { + if (Get-Member -InputObject $GlobalJson.tools -Name 'vs') { + $vsRequirements = $GlobalJson.tools.vs + } + else { + $vsRequirements = New-Object PSObject -Property @{ version = $vsMinVersionReqdStr } + } + } $vsMinVersionStr = if ($vsRequirements.version) { $vsRequirements.version } else { $vsMinVersionReqdStr } $vsMinVersion = [Version]::new($vsMinVersionStr) @@ -573,7 +580,7 @@ function InitializeBuildTool() { ExitWithExitCode 1 } $dotnetPath = Join-Path $dotnetRoot (GetExecutableFileName 'dotnet') - $buildTool = @{ Path = $dotnetPath; Command = 'msbuild'; Tool = 'dotnet'; Framework = 'netcoreapp3.1' } + $buildTool = @{ Path = $dotnetPath; Command = 'msbuild'; Tool = 'dotnet'; Framework = 'net7.0' } } elseif ($msbuildEngine -eq "vs") { try { $msbuildPath = InitializeVisualStudioMSBuild -install:$restore @@ -635,7 +642,7 @@ function InitializeNativeTools() { InstallDirectory = "$ToolsDir" } } - if (Test-Path variable:NativeToolsOnMachine) { + if ($env:NativeToolsOnMachine) { Write-Host "Variable NativeToolsOnMachine detected, enabling native tool path promotion..." $nativeArgs += @{ PathPromotion = $true } } diff --git a/eng/common/tools.sh b/eng/common/tools.sh index 17f0a36580..c110d0ed41 100644 --- a/eng/common/tools.sh +++ b/eng/common/tools.sh @@ -312,7 +312,7 @@ function InitializeBuildTool { # return values _InitializeBuildTool="$_InitializeDotNetCli/dotnet" _InitializeBuildToolCommand="msbuild" - _InitializeBuildToolFramework="netcoreapp3.1" + _InitializeBuildToolFramework="net7.0" } # Set RestoreNoCache as a workaround for https://github.com/NuGet/Home/issues/3116 diff --git a/scripts/build/TestPlatform.Settings.targets b/scripts/build/TestPlatform.Settings.targets index 44b6f2701f..017526dc65 100644 --- a/scripts/build/TestPlatform.Settings.targets +++ b/scripts/build/TestPlatform.Settings.targets @@ -5,7 +5,7 @@ - 17.3.1 + 17.3.2 preview true preview diff --git a/src/Microsoft.TestPlatform.PlatformAbstractions/common/System/ProcessHelper.cs b/src/Microsoft.TestPlatform.PlatformAbstractions/common/System/ProcessHelper.cs index 79c69169a7..11ea44b8d3 100644 --- a/src/Microsoft.TestPlatform.PlatformAbstractions/common/System/ProcessHelper.cs +++ b/src/Microsoft.TestPlatform.PlatformAbstractions/common/System/ProcessHelper.cs @@ -25,6 +25,20 @@ public partial class ProcessHelper : IProcessHelper private static readonly string Arm = "arm"; private readonly Process _currentProcess = Process.GetCurrentProcess(); + private IEnvironment _environment; + + /// + /// Default constructor. + /// + public ProcessHelper() : this(new PlatformEnvironment()) + { + } + + internal ProcessHelper(IEnvironment environment) + { + _environment = environment; + } + /// public object LaunchProcess(string processPath, string? arguments, string? workingDirectory, IDictionary? envVariables, Action? errorCallback, Action? exitCallBack, Action? outputCallBack) { @@ -85,6 +99,8 @@ void InitializeAndStart() { process.Exited += async (sender, args) => { + const int timeout = 500; + if (sender is Process p) { try @@ -101,11 +117,47 @@ void InitializeAndStart() // // For older frameworks, the solution is more tricky but it seems we can get the expected // behavior using the parameterless 'WaitForExit()' combined with an awaited Task.Run call. - var cts = new CancellationTokenSource(500); + var cts = new CancellationTokenSource(timeout); #if NET5_0_OR_GREATER await p.WaitForExitAsync(cts.Token); #else - await Task.Run(() => p.WaitForExit(), cts.Token); + // NOTE: In case we run on Windows we must call 'WaitForExit(timeout)' instead of calling + // the parameterless overload. The reason for this requirement stems from the behavior of + // the Selenium WebDriver when debugging a test. If the debugger is detached, the default + // action is to kill the testhost process that it was attached to, but for some reason we + // end up with a zombie process that would make us wait indefinitely with a simple + // 'WaitForExit()' call. This in turn causes the vstest.console to block waiting for the + // test request to finish and this behavior will be visible to the user since TW will + // show the Selenium test as still running. Only killing the Edge Driver process would help + // unblock vstest.console, but this is not a reasonable ask to our users. + // + // TODO: This fix is not ideal, it's only a workaround to make Selenium tests usable again. + // Ideally, we should spend some more time here in order to better understand what causes + // the testhost to become a zombie process in the first place. + if (_environment.OperatingSystem is PlatformOperatingSystem.Windows) + { + p.WaitForExit(timeout); + } + else + { + cts.Token.Register(() => + { + try + { + if (!p.HasExited) + { + p.Kill(); + } + } + catch + { + // Ignore all exceptions thrown when trying to kill a process that may be + // left hanging. This is a best effort to kill it, but should we fail for + // any reason we'd probably block on 'WaitForExit()' anyway. + } + }); + await Task.Run(() => p.WaitForExit(), cts.Token).ConfigureAwait(false); + } #endif } catch