From a3bf64057771889c9397d84d804fca732fb28e7f Mon Sep 17 00:00:00 2001 From: Anipik Date: Fri, 22 Nov 2019 16:08:54 -0800 Subject: [PATCH 1/2] enabling ci --- eng/build-job.yml | 107 ++++++++++++++++++++++++++++++++ eng/job.yml | 121 +++++++++++++++++++++++++++++++++++++ eng/pipelines/pr.yml | 19 ++++++ eng/platform-matrix.yml | 62 +++++++++++++++++++ eng/xplat-job.yml | 131 ++++++++++++++++++++++++++++++++++++++++ 5 files changed, 440 insertions(+) create mode 100644 eng/build-job.yml create mode 100644 eng/job.yml create mode 100644 eng/pipelines/pr.yml create mode 100644 eng/platform-matrix.yml create mode 100644 eng/xplat-job.yml diff --git a/eng/build-job.yml b/eng/build-job.yml new file mode 100644 index 000000000000..6e3679e4f164 --- /dev/null +++ b/eng/build-job.yml @@ -0,0 +1,107 @@ +parameters: + buildConfig: '' + archType: '' + osGroup: '' + osIdentifier: '' + container: '' + crossrootfsDir: '' + timeoutInMinutes: '' + +### Product build +jobs: +- template: xplat-job.yml + parameters: + buildConfig: ${{ parameters.buildConfig }} + archType: ${{ parameters.archType }} + osGroup: ${{ parameters.osGroup }} + osIdentifier: ${{ parameters.osIdentifier }} + helixType: 'build/product/' + enableMicrobuild: true + + # Compute job name from template parameters + name: ${{ format('build_{0}_{1}_{2}', parameters.osIdentifier, parameters.archType, parameters.buildConfig) }} + displayName: ${{ format('{0} {1} {2}', parameters.osIdentifier, parameters.archType, parameters.buildConfig) }} + + # Run all steps in the container. + # Note that the containers are defined in platform-matrix.yml + container: ${{ parameters.container }} + + timeoutInMinutes: ${{ parameters.timeoutInMinutes }} + + crossrootfsDir: ${{ parameters.crossrootfsDir }} + + gatherAssetManifests: true + variables: + - name: osIdentifier + value: ${{ parameters.osIdentifier }} + - name: stripSymbolsArg + value: '' + # Strip symbols only on the release build + - ${{ if eq(parameters.buildConfig, 'Release') }}: + - name: stripSymbolsArg + value: '-stripsymbols' + - name: portableBuildArg + value: '' + # Ensure that we produce os-specific packages for the following distros: + - ${{ if in(parameters.osIdentifier, 'Linux_rhel6') }}: + - name: portableBuildArg + value: '-portablebuild=false' + - name: clangArg + value: '' + # Our FreeBSD doesn't yet detect available clang versions, so pass it explicitly. + - ${{ if eq(parameters.osGroup, 'FreeBSD') }}: + - name: clangArg + value: '-clang6.0' + - ${{ if eq(parameters.archType, 'arm64') }}: + - name: clangArg + value: '-clang5.0' + + steps: + + # Install native dependencies + # Linux builds use docker images with dependencies preinstalled, + # and FreeBSD builds use a build agent with dependencies + # preinstalled, so we only need this step for OSX and Windows. + - ${{ if eq(parameters.osGroup, 'OSX') }}: + - script: sh eng/install-native-dependencies.sh $(osGroup) + displayName: Install native dependencies + - ${{ if eq(parameters.osGroup, 'Windows_NT') }}: + # Necessary to install python + - script: eng\common\init-tools-native.cmd -InstallDirectory $(Build.SourcesDirectory)\native-tools -Force + displayName: Install native dependencies + + # Install internal tools on official builds + # Since our internal tools are behind an authenticated feed, + # we need to use the DotNetCli AzDO task to restore from the feed using a service connection. + # We can't do this from within the build, so we need to do this as a separate step. + - ${{ if and(and(eq(variables['System.TeamProject'], 'internal'), ne(variables['Build.Reason'], 'PullRequest')), eq(parameters.osGroup, 'Windows_NT')) }}: + - task: DotNetCoreInstaller@0 + inputs: + packageType: 'sdk' + version: '2.1.503' + - task: DotNetCoreCLI@2 + displayName: Restore internal tools + inputs: + command: restore + feedsToUse: config + projects: '$(Build.SourcesDirectory)/eng/common/internal/Tools.csproj' + nugetConfigPath: 'eng/internal/NuGet.config' + restoreDirectory: '$(Build.SourcesDirectory)\.packages' + verbosityRestore: 'normal' + externalFeedCredentials: 'dotnet-core-internal-tooling' + + # Build + - ${{ if ne(parameters.osGroup, 'Windows_NT') }}: + - script: ./build.sh $(buildConfig) $(archType) $(crossArg) -skiptests -skipnuget $(clangArg) $(stripSymbolsArg) $(officialBuildIdArg) /p:ContinuousIntegrationBuild=true + displayName: Build product + - ${{ if eq(parameters.osGroup, 'Windows_NT') }}: + - script: set __TestIntermediateDir=int&&build.cmd $(buildConfig) $(archType) -skiptests -skipbuildpackages $(officialBuildIdArg) $(ibcOptimizeArg) $(enforcePgoArg) /p:ContinuousIntegrationBuild=true + displayName: Build product + + # Build packages + - ${{ if ne(parameters.osGroup, 'Windows_NT') }}: + - script: ./build-packages.sh -BuildArch=$(archType) -BuildType=$(_BuildConfig) $(crossPackagesArg) $(officialBuildIdArg) $(portableBuildArg) -ci + displayName: Build packages + - ${{ if eq(parameters.osGroup, 'Windows_NT') }}: + - script: build-packages.cmd -BuildArch=$(archType) -BuildType=$(_BuildConfig) $(officialBuildIdArg) -ci + displayName: Build packages \ No newline at end of file diff --git a/eng/job.yml b/eng/job.yml new file mode 100644 index 000000000000..a109f44baea2 --- /dev/null +++ b/eng/job.yml @@ -0,0 +1,121 @@ +parameters: +# Job schema parameters - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job + cancelTimeoutInMinutes: '' + + condition: '' + + continueOnError: false + + container: '' + + dependsOn: '' + + displayName: '' + + steps: [] + + pool: '' + + strategy: '' + + timeoutInMinutes: '' + + variables: [] + + workspace: '' + + # Job base template specific parameters + # Optional: Enable installing Microbuild plugin + # if 'true', these "variables" must be specified in the variables object or as part of the queue matrix + # _TeamName - the name of your team + # _SignType - 'test' or 'real' + enableMicrobuild: false + + # Optional: Include PublishBuildArtifacts task + enablePublishBuildArtifacts: false + + # Optional: Enable publishing to the build asset registry + enablePublishBuildAssets: false + + # Optional: Prevent gather/push manifest from executing when using publishing pipelines + enablePublishUsingPipelines: false + + # Optional: Include PublishTestResults task + enablePublishTestResults: false + + # Optional: enable sending telemetry + enableTelemetry: false + + # Optional: define the helix repo for telemetry (example: 'dotnet/arcade') + helixRepo: '' + + # Optional: define the helix type for telemetry (example: 'build/product/') + helixType: '' + + # Required: name of the job + name: '' + + # Optional: should run as a public build even in the internal project + # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects. + runAsPublic: false + +# Internal resources (telemetry, microbuild) can only be accessed from non-public projects, +# and some (Microbuild) should only be applied to non-PR cases for internal builds. + +jobs: +- job: ${{ parameters.name }} + + ${{ if ne(parameters.cancelTimeoutInMinutes, '') }}: + cancelTimeoutInMinutes: ${{ parameters.cancelTimeoutInMinutes }} + + ${{ if ne(parameters.condition, '') }}: + condition: ${{ parameters.condition }} + + ${{ if ne(parameters.container, '') }}: + container: ${{ parameters.container }} + + ${{ if ne(parameters.continueOnError, '') }}: + continueOnError: ${{ parameters.continueOnError }} + + ${{ if ne(parameters.dependsOn, '') }}: + dependsOn: ${{ parameters.dependsOn }} + + ${{ if ne(parameters.displayName, '') }}: + displayName: ${{ parameters.displayName }} + + ${{ if ne(parameters.pool, '') }}: + pool: ${{ parameters.pool }} + + ${{ if ne(parameters.strategy, '') }}: + strategy: ${{ parameters.strategy }} + + ${{ if ne(parameters.timeoutInMinutes, '') }}: + timeoutInMinutes: ${{ parameters.timeoutInMinutes }} + + variables: + - ${{ if eq(parameters.enableTelemetry, 'true') }}: + - name: DOTNET_CLI_TELEMETRY_PROFILE + value: '$(Build.Repository.Uri)' + - ${{ each variable in parameters.variables }}: + # handle name-value variable syntax + # example: + # - name: [key] + # value: [value] + - ${{ if ne(variable.name, '') }}: + - name: ${{ variable.name }} + value: ${{ variable.value }} + + # handle variable groups + - ${{ if ne(variable.group, '') }}: + - group: ${{ variable.group }} + + # handle key-value variable syntax. + # example: + # - [key]: [value] + - ${{ if and(eq(variable.name, ''), eq(variable.group, '')) }}: + - ${{ each pair in variable }}: + - name: ${{ pair.key }} + value: ${{ pair.value }} + + ${{ if ne(parameters.workspace, '') }}: + workspace: ${{ parameters.workspace }} \ No newline at end of file diff --git a/eng/pipelines/pr.yml b/eng/pipelines/pr.yml new file mode 100644 index 000000000000..bf39ca8a0bcc --- /dev/null +++ b/eng/pipelines/pr.yml @@ -0,0 +1,19 @@ +trigger: none + +pr: +- master +- release/2.1 +- release/2.2 + +jobs: +# +# Checked builds +# +- template: /eng/platform-matrix.yml + parameters: + jobTemplate: build-job.yml + buildConfig: checked + platforms: + - Linux_x64 + - OSX_x64 + - Windows_NT_x64 \ No newline at end of file diff --git a/eng/platform-matrix.yml b/eng/platform-matrix.yml new file mode 100644 index 000000000000..000bc2474a9c --- /dev/null +++ b/eng/platform-matrix.yml @@ -0,0 +1,62 @@ +parameters: + jobTemplate: '' + buildConfig: '' + platforms: [] + # platformGroup is a named collection of platforms. Allowed values: + # 'all' - all platforms + # 'gcstress' - platforms that support running under GCStress0x3 and GCStress0xC scenarios + platformGroup: '' + # helixQueueGroup is a named collection of Helix Queues. If specified, it determines which Helix queues are + # used, instead of the usual criteria. Allowed values: + # 'pr' - the queues used for a pull request for the platform. Typically a small set. + # 'ci' - the queues used for a CI (post-merge) test run. + # 'all' - the queues used for non-PR, non-CI test runs, e.g., Manual or Scheduled runs. Typically this is all available queues. + # 'corefx' - the queues used for a corefx test run. + helixQueueGroup: 'pr' + ignoreDependencyOnBuildJobs: false + jobParameters: {} + +jobs: + +# TODO: simplify osIdentifier by adding osGroup and osSubGroup. See +# https://github.com/Microsoft/azure-pipelines-yaml/pull/46 for more information + + +# Linux x64 + +- ${{ if or(containsValue(parameters.platforms, 'Linux_x64'), in(parameters.platformGroup, 'all', 'gcstress')) }}: + - template: ${{ parameters.jobTemplate }} + parameters: + ignoreDependencyOnBuildJobs: ${{ parameters.ignoreDependencyOnBuildJobs }} + buildConfig: ${{ parameters.buildConfig }} + archType: x64 + osGroup: Linux + osIdentifier: Linux + container: + image: centos-7-3e800f1-20190501005343 + registry: mcr + ${{ insert }}: ${{ parameters.jobParameters }} + +# macOS x64 + +- ${{ if or(containsValue(parameters.platforms, 'OSX_x64'), eq(parameters.platformGroup, 'all')) }}: + - template: ${{ parameters.jobTemplate }} + parameters: + ignoreDependencyOnBuildJobs: ${{ parameters.ignoreDependencyOnBuildJobs }} + buildConfig: ${{ parameters.buildConfig }} + archType: x64 + osGroup: OSX + osIdentifier: OSX + ${{ insert }}: ${{ parameters.jobParameters }} + +# Windows x64 + +- ${{ if or(containsValue(parameters.platforms, 'Windows_NT_x64'), in(parameters.platformGroup, 'all', 'gcstress')) }}: + - template: ${{ parameters.jobTemplate }} + parameters: + ignoreDependencyOnBuildJobs: ${{ parameters.ignoreDependencyOnBuildJobs }} + buildConfig: ${{ parameters.buildConfig }} + archType: x64 + osGroup: Windows_NT + osIdentifier: Windows_NT + ${{ insert }}: ${{ parameters.jobParameters }} diff --git a/eng/xplat-job.yml b/eng/xplat-job.yml new file mode 100644 index 000000000000..910a94e5ac24 --- /dev/null +++ b/eng/xplat-job.yml @@ -0,0 +1,131 @@ +parameters: + buildConfig: '' + archType: '' + osGroup: '' + osIdentifier: '' + name: '' + helixType: '(unspecified)' + container: '' + crossrootfsDir: '' + + # arcade-specific parameters + condition: '' + continueOnError: false + dependsOn: '' + displayName: '' + timeoutInMinutes: '' + enableMicrobuild: '' + gatherAssetManifests: false + + variables: {} ## any extra variables to add to the defaults defined below + +jobs: +- template: /eng/job.yml + parameters: + + name: ${{ parameters.name }} + displayName: ${{ parameters.displayName }} + + condition: ${{ parameters.condition }} + dependsOn: ${{ parameters.dependsOn }} + timeoutInMinutes: ${{ parameters.timeoutInMinutes }} + continueOnError: ${{ parameters.continueOnError }} + + # Send telemetry for all builds + enableTelemetry: true + helixRepo: 'dotnet/coreclr' + helixType: ${{ parameters.helixType }} + + enableMicrobuild: ${{ parameters.enableMicrobuild }} + enablePublishUsingPipelines: true + + pool: + + # Public Linux Build Pool + ${{ if and(eq(parameters.osGroup, 'Linux'), eq(variables['System.TeamProject'], 'public')) }}: + name: NetCorePublic-Pool + queue: BuildPool.Ubuntu.1604.Amd64.Open + + # Public OSX Build Pool + ${{ if and(eq(parameters.osGroup, 'OSX'), ne(variables['System.TeamProject'], 'public')) }}: + name: Hosted Mac Internal + + # Public Windows Build Pool + ${{ if and(eq(parameters.osGroup, 'Windows_NT'), eq(variables['System.TeamProject'], 'public')) }}: + name: NetCorePublic-Pool + queue: BuildPool.Windows.10.Amd64.VS2017.Open + + workspace: + clean: all + + ${{ if ne(parameters.container, '') }}: + ${{ if eq(parameters.container.registry, 'mcr') }}: + container: ${{ format('{0}:{1}', 'mcr.microsoft.com/dotnet-buildtools/prereqs', parameters.container.image) }} + ${{ if ne(parameters.container.registry, 'mcr') }}: + container: ${{ format('{0}:{1}', parameters.container.registry, parameters.container.image) }} + + ${{ if eq(parameters.osGroup, 'Linux') }}: + agentOs: Ubuntu + ${{ if eq(parameters.osGroup, 'OSX') }}: + agentOs: MacOS + ${{ if eq(parameters.osGroup, 'Windows_NT') }}: + agentOs: Windows_NT + + # Setting this results in the arcade job template including a step + # that gathers asset manifests and publishes them to pipeline + # storage. Only relevant for build jobs. + enablePublishBuildAssets: ${{ parameters.gatherAssetManifests }} + + variables: + - name: buildConfig + value: ${{ parameters.buildConfig }} + + - ${{ if eq(parameters.buildConfig, 'checked') }}: + - name: buildConfigUpper + value: 'Checked' + + - ${{ if eq(parameters.buildConfig, 'debug') }}: + - name: buildConfigUpper + value: 'Debug' + + - ${{ if eq(parameters.buildConfig, 'release') }}: + - name: buildConfigUpper + value: 'Release' + + - name: _BuildConfig + value: $(buildConfigUpper) + + - name: archType + value: ${{ parameters.archType }} + + - name: osGroup + value: ${{ parameters.osGroup }} + + - name: osIdentifier + value: ${{ parameters.osIdentifier }} + + - ${{ if and(eq(variables['System.TeamProject'], 'public'), eq(variables['Build.Reason'], 'PullRequest')) }}: + - name: _HelixSource + value: pr/dotnet/coreclr/$(Build.SourceBranch) + - ${{ if and(eq(variables['System.TeamProject'], 'public'), ne(variables['Build.Reason'], 'PullRequest')) }}: + - name: _HelixSource + value: ci/dotnet/coreclr/$(Build.SourceBranch) + + - ${{ if ne(parameters.crossrootfsDir, '') }}: + - name: crossArg + value: 'cross' + - name: crossPackagesArg + value: '-__DoCrossArchBuild=1' + # This is only required for cross builds. + - name: ROOTFS_DIR + value: ${{ parameters.crossrootfsDir }} + - ${{ if eq(parameters.crossrootfsDir, '') }}: + - name: crossArg + value: '' + - name: crossPackagesArg + value: '' + + - ${{ each variable in parameters.variables }}: + - ${{insert}}: ${{ variable }} + + steps: ${{ parameters.steps }} \ No newline at end of file From 045cfa427c0095e9397700f0558d69dd7a1f27f6 Mon Sep 17 00:00:00 2001 From: Anipik Date: Mon, 25 Nov 2019 14:56:24 -0800 Subject: [PATCH 2/2] removing groovy files --- ...tNet-CoreClr-Trusted-Linux-Crossbuild.json | 621 --- .../DotNet-CoreClr-Trusted-Linux.json | 572 --- buildpipeline/DotNet-CoreClr-Trusted-Mac.json | 360 -- .../DotNet-CoreClr-Trusted-Windows-x86.json | 486 --- .../DotNet-CoreClr-Trusted-Windows.json | 479 --- .../DotNet-Trusted-Publish-Symbols.json | 390 -- buildpipeline/DotNet-Trusted-Publish.json | 709 ---- buildpipeline/README.md | 5 - buildpipeline/linux-musl.groovy | 24 - buildpipeline/perf-pipeline.groovy | 409 -- buildpipeline/perf_pipelinejobs.groovy | 35 - buildpipeline/pipelinejobs.groovy | 55 - buildpipeline/pipelines.json | 259 -- .../DotNet-CoreClr-Security-Windows.json | 581 --- buildpipeline/security/pipeline.json | 22 - .../Dotnet-CoreClr-Trusted-BuildTests.json | 432 -- buildpipeline/tests/test_pipelines.json | 232 -- netci.groovy | 3584 ----------------- perf.groovy | 868 ---- 19 files changed, 10123 deletions(-) delete mode 100644 buildpipeline/DotNet-CoreClr-Trusted-Linux-Crossbuild.json delete mode 100644 buildpipeline/DotNet-CoreClr-Trusted-Linux.json delete mode 100644 buildpipeline/DotNet-CoreClr-Trusted-Mac.json delete mode 100644 buildpipeline/DotNet-CoreClr-Trusted-Windows-x86.json delete mode 100644 buildpipeline/DotNet-CoreClr-Trusted-Windows.json delete mode 100644 buildpipeline/DotNet-Trusted-Publish-Symbols.json delete mode 100644 buildpipeline/DotNet-Trusted-Publish.json delete mode 100644 buildpipeline/README.md delete mode 100644 buildpipeline/linux-musl.groovy delete mode 100644 buildpipeline/perf-pipeline.groovy delete mode 100644 buildpipeline/perf_pipelinejobs.groovy delete mode 100644 buildpipeline/pipelinejobs.groovy delete mode 100644 buildpipeline/pipelines.json delete mode 100644 buildpipeline/security/DotNet-CoreClr-Security-Windows.json delete mode 100644 buildpipeline/security/pipeline.json delete mode 100644 buildpipeline/tests/Dotnet-CoreClr-Trusted-BuildTests.json delete mode 100644 buildpipeline/tests/test_pipelines.json delete mode 100755 netci.groovy delete mode 100644 perf.groovy diff --git a/buildpipeline/DotNet-CoreClr-Trusted-Linux-Crossbuild.json b/buildpipeline/DotNet-CoreClr-Trusted-Linux-Crossbuild.json deleted file mode 100644 index ef671ffe68f7..000000000000 --- a/buildpipeline/DotNet-CoreClr-Trusted-Linux-Crossbuild.json +++ /dev/null @@ -1,621 +0,0 @@ -{ - "build": [ - { - "environment": {}, - "enabled": true, - "continueOnError": true, - "alwaysRun": false, - "displayName": "Run AgentTools/Begin.sh", - "timeoutInMinutes": 0, - "condition": "succeededOrFailed()", - "task": { - "id": "e213ff0f-5d5c-4791-802d-52ea3e7be1f1", - "versionSpec": "2.*", - "definitionType": "task" - }, - "inputs": { - "targetType": "inline", - "filePath": "", - "arguments": "", - "script": "if (Test-Path \"$(AgentToolsPath)\\Begin.sh\") {\n \"Begin.sh script found. Executing...\"\n cd $(AgentToolsPath)\n & $(AgentToolsPath)\\Begin.sh\n} else {\n \"Begin.sh script does not exist. Moving on...\"\n}", - "errorActionPreference": "continue", - "failOnStderr": "false", - "ignoreLASTEXITCODE": "true", - "workingDirectory": "" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Initialize tools", - "timeoutInMinutes": 0, - "refName": "Task1", - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "$(Build.SourcesDirectory)/init-tools.sh", - "arguments": "", - "workingFolder": "", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Initialize Docker", - "timeoutInMinutes": 0, - "refName": "Task2", - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "$(Build.SourcesDirectory)/Tools/scripts/docker/init-docker.sh", - "arguments": "$(DockerImageName)", - "workingFolder": "", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Clone repository", - "timeoutInMinutes": 0, - "refName": "Task3", - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "docker", - "arguments": "run --rm $(DockerCommonRunArgs) git clone $(VsoCoreClrGitUrl) $(GitHubDirectory)", - "workingFolder": "", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Run clean.sh", - "timeoutInMinutes": 0, - "refName": "Task4", - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "docker", - "arguments": "run --rm $(DockerCommonRunArgs) ./clean.sh -all", - "workingFolder": "", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Check out the specified commit", - "timeoutInMinutes": 0, - "refName": "Task5", - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "docker", - "arguments": "run --rm $(DockerCommonRunArgs) git checkout $(Build.SourceVersion)", - "workingFolder": "", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Run sync.sh", - "timeoutInMinutes": 0, - "refName": "Task6", - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "docker", - "arguments": "run --rm $(DockerCommonRunArgs) ./sync.sh -- /p:BuildType=$(PB_BuildType)", - "workingFolder": "", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": false, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Run build-rootfs.sh", - "timeoutInMinutes": 0, - "refName": "Task7", - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "sudo", - "arguments": "docker run --privileged --rm $(DockerCommonRunArgs) ./cross/build-rootfs.sh $(Architecture) $(CrossToolsetVersion) $(SkipUnmount)", - "workingFolder": "", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Run build.sh", - "timeoutInMinutes": 0, - "refName": "Task8", - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "docker", - "arguments": "run --rm -e ROOTFS_DIR=$(ROOTFS_DIR) -e CAC_ROOTFS_DIR=$(CAC_ROOTFS_DIR) $(DockerCommonRunArgs) ./build.sh $(PB_BuildType) $(Architecture) skipnuget cross $(CrossArchBuildArgs) -skiprestore stripSymbols -OfficialBuildId=$(OfficialBuildId) -- /flp:\"v=diag\"", - "workingFolder": "", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Run build-packages.sh", - "timeoutInMinutes": 0, - "refName": "Task9", - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "docker", - "arguments": "run --rm $(DockerCommonRunArgs) ./build-packages.sh -BuildType=$(PB_BuildType) -BuildArch=$(Architecture) $(CrossArchBuildPackagesArgs) -- /p:OfficialBuildId=$(OfficialBuildId)", - "workingFolder": "", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": false, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Cleanup RootFS", - "timeoutInMinutes": 0, - "refName": "Task10", - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "docker", - "arguments": "run --rm --privileged $(DockerCommonRunArgs) git clean -xdf $(GitHubDirectory)/cross/", - "workingFolder": "", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Publish packages", - "timeoutInMinutes": 0, - "refName": "Task11", - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "docker", - "arguments": "run --rm $(DockerCommonRunArgs) ./publish-packages.sh -AzureAccount=$(CloudDropAccountName) -AzureToken=$(CloudDropAccessToken) -BuildType=$(PB_BuildType) -BuildArch=$(Architecture) -Container=$(PB_ContainerName) -distroRid=$(Rid) -PublishPackages -- /p:RelativePath=$(PB_BlobNamePrefix)$(PB_BuildType)/pkg /p:PublishFlatContainer=$(PublishFlat) /p:OverwriteOnPublish=true", - "workingFolder": "", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Publish symbol packages", - "timeoutInMinutes": 0, - "refName": "Task12", - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "docker", - "arguments": "run --rm $(DockerCommonRunArgs) ./publish-packages.sh -AzureAccount=$(CloudDropAccountName) -AzureToken=$(CloudDropAccessToken) -BuildType=$(PB_BuildType) -BuildArch=$(Architecture) -Container=$(PB_ContainerName) -distroRid=$(Rid) -PublishSymbols -- /p:RelativePath=$(PB_BlobNamePrefix)$(PB_BuildType)/symbolpkg /p:OverwriteOnPublish=true", - "workingFolder": "", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Publish test native binaries", - "timeoutInMinutes": 0, - "refName": "Task13", - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "docker", - "arguments": "run --rm $(DockerCommonRunArgs) ./publish-packages.sh -AzureAccount=$(CloudDropAccountName) -AzureToken=$(CloudDropAccessToken) -BuildType=$(PB_BuildType) -BuildArch=$(Architecture) -Container=$(PB_ContainerName) -distroRid=$(Rid) -PublishTestNativeBins -- /p:RelativePath=$(PB_BlobNamePrefix)$(PB_BuildType)/TestNativeBins/$(Rid)-$(Architecture) /p:OverwriteOnPublish=true", - "workingFolder": "", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": true, - "alwaysRun": false, - "displayName": "Create Copy Container", - "timeoutInMinutes": 0, - "refName": "Task14", - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "docker", - "arguments": "run $(DockerCommonRunArgs) echo", - "workingFolder": "", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": true, - "alwaysRun": true, - "displayName": "Expose Docker repo for publishing", - "timeoutInMinutes": 0, - "condition": "succeededOrFailed()", - "refName": "Task15", - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "docker", - "arguments": "cp $(DockerContainerName):$(GitHubDirectory) $(DockerCopyDest)", - "workingFolder": "", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": true, - "alwaysRun": true, - "displayName": "Copy Files to: $(Build.StagingDirectory)\\BuildLogs", - "timeoutInMinutes": 0, - "condition": "succeededOrFailed()", - "refName": "CopyFiles1", - "task": { - "id": "5bfb729a-a7c8-4a78-a7c3-8d717bb7c13c", - "versionSpec": "2.*", - "definitionType": "task" - }, - "inputs": { - "SourceFolder": "$(DockerCopyDest)", - "Contents": "**/*.log", - "TargetFolder": "$(Build.StagingDirectory)\\BuildLogs", - "CleanTargetFolder": "false", - "OverWrite": "false", - "flattenFolders": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": true, - "alwaysRun": true, - "displayName": "Publish Artifact: BuildLogs", - "timeoutInMinutes": 0, - "condition": "succeededOrFailed()", - "refName": "PublishBuildArtifacts2", - "task": { - "id": "2ff763a7-ce83-4e1f-bc89-0ae63477cebe", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "PathtoPublish": "$(Build.StagingDirectory)\\BuildLogs", - "ArtifactName": "BuildLogs", - "ArtifactType": "Container", - "TargetPath": "\\\\my\\share\\$(Build.DefinitionName)\\$(Build.BuildNumber)", - "Parallel": "false", - "ParallelCount": "8" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": true, - "alwaysRun": false, - "displayName": "Cleanup Docker", - "timeoutInMinutes": 0, - "condition": "always()", - "refName": "Task16", - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "$(Build.SourcesDirectory)/Tools/scripts/docker/cleanup-docker.sh", - "arguments": "", - "workingFolder": "", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": true, - "alwaysRun": true, - "displayName": "Run AgentTools/End.sh", - "timeoutInMinutes": 0, - "condition": "succeededOrFailed()", - "task": { - "id": "e213ff0f-5d5c-4791-802d-52ea3e7be1f1", - "versionSpec": "2.*", - "definitionType": "task" - }, - "inputs": { - "targetType": "inline", - "filePath": "", - "arguments": "", - "script": "if (Test-Path \"$(AgentToolsPath)\\End.sh\") {\n \"End.sh script found. Executing...\"\n cd $(AgentToolsPath)\n & $(AgentToolsPath)\\End.sh\n} else {\n \"End.sh script does not exist. Moving on...\"\n}", - "errorActionPreference": "continue", - "failOnStderr": "false", - "ignoreLASTEXITCODE": "true", - "workingDirectory": "" - } - } - ], - "options": [ - { - "enabled": false, - "definition": { - "id": "a9db38f9-9fdc-478c-b0f9-464221e58316" - }, - "inputs": { - "workItemType": "234347", - "assignToRequestor": "true", - "additionalFields": "{}" - } - }, - { - "enabled": false, - "definition": { - "id": "57578776-4c22-4526-aeb0-86b6da17ee9c" - }, - "inputs": { - "additionalFields": "{}" - } - }, - { - "enabled": false, - "definition": { - "id": "5d58cc01-7c75-450c-be18-a388ddb129ec" - }, - "inputs": { - "branchFilters": "[\"+refs/heads/*\"]", - "additionalFields": "{}" - } - } - ], - "variables": { - "system.debug": { - "value": "false", - "allowOverride": true - }, - "Platform": { - "value": "x64" - }, - "PB_BuildType": { - "value": "Release" - }, - "GitHubRepo": { - "value": "http://github.com/dotnet/coreclr.git" - }, - "GitHubDirectory": { - "value": "/root/coreclr" - }, - "DockerContainerName": { - "value": "coreclr-cross-$(Build.BuildId)" - }, - "DockerImageName": { - "value": "$(DockerRepository):$(DockerTag)" - }, - "DockerRepository": { - "value": "microsoft/dotnet-buildtools-prereqs" - }, - "DockerTag": { - "value": "ubuntu-14.04-cross-0cd4667-20170319080304", - "allowOverride": true - }, - "CloudDropAccountName": { - "value": "dotnetbuildoutput" - }, - "CloudDropAccessToken": { - "value": null, - "isSecret": true - }, - "OfficialBuildId": { - "value": "$(Build.BuildNumber)", - "allowOverride": true - }, - "Label": { - "value": "$(Build.BuildNumber)" - }, - "GitHubBranch": { - "value": "sni_plus_latestbuildtools" - }, - "Priority": { - "value": "0" - }, - "RepoAccessToken": { - "value": null, - "isSecret": true - }, - "Architecture": { - "value": "arm" - }, - "CrossArchBuildArgs": { - "value": "", - "allowOverride": true - }, - "CrossArchBuildPackagesArgs": { - "value": "", - "allowOverride": true - }, - "CommitToCheckout": { - "value": "HEAD", - "allowOverride": true - }, - "DockerCopyDest": { - "value": "$(Build.BinariesDirectory)/docker_repo" - }, - "ROOTFS_DIR": { - "value": "/crossrootfs/$(Architecture)" - }, - "CAC_ROOTFS_DIR": { - "value": "", - "allowOverride": true - }, - "DockerVolumeName": { - "value": "coreclr-cross-$(Build.BuildId)" - }, - "DockerCommonRunArgs": { - "value": "--name $(DockerContainerName) --ulimit core=-1 -v \"$(DockerVolumeName):$(GitHubDirectory)\" -w=\"$(GitHubDirectory)\" $(DockerImageName)" - }, - "PB_CleanAgent": { - "value": "true" - }, - "VsoAccountName": { - "value": "dn-bot" - }, - "VsoCoreClrGitUrl": { - "value": "https://$(VsoAccountName):$(VsoPassword)@devdiv.visualstudio.com/DevDiv/_git/$(VsoRepositoryName)/" - }, - "VsoPassword": { - "value": null, - "isSecret": true - }, - "VsoRepositoryName": { - "value": "DotNet-CoreCLR-Trusted" - } - }, - "demands": [ - "Agent.OS -equals linux" - ], - "retentionRules": [ - { - "branches": [ - "+refs/heads/*" - ], - "artifacts": [], - "artifactTypesToDelete": [ - "FilePath", - "SymbolStore" - ], - "daysToKeep": 10, - "minimumToKeep": 1, - "deleteBuildRecord": true, - "deleteTestResults": true - } - ], - "buildNumberFormat": "$(date:yyyyMMdd)$(rev:-rr)-$(DockerTag)", - "jobAuthorizationScope": "projectCollection", - "jobTimeoutInMinutes": 90, - "jobCancelTimeoutInMinutes": 5, - "repository": { - "properties": { - "labelSources": "0", - "reportBuildStatus": "true", - "fetchDepth": "0", - "gitLfsSupport": "false", - "skipSyncSource": "false", - "cleanOptions": "0", - "checkoutNestedSubmodules": "false", - "labelSourcesFormat": "$(build.buildNumber)" - }, - "id": "670e3783-ab4f-44fc-9786-d332007da311", - "type": "TfsGit", - "name": "DotNet-CoreCLR-Trusted", - "url": "https://devdiv.visualstudio.com/DevDiv/_git/DotNet-CoreCLR-Trusted", - "defaultBranch": "refs/heads/master", - "clean": "true", - "checkoutSubmodules": false - }, - "processParameters": {}, - "quality": "definition", - "drafts": [], - "queue": { - "id": 36, - "name": "DotNet-Build", - "pool": { - "id": 39, - "name": "DotNet-Build" - } - }, - "id": 5019, - "name": "DotNet-CoreClr-Trusted-Linux-Crossbuild", - "path": "\\", - "type": "build", - "queueStatus": "enabled", - "project": { - "id": "0bdbc590-a062-4c3f-b0f6-9383f67865ee", - "name": "DevDiv", - "description": "Visual Studio and DevDiv team project for git source code repositories. Work items will be added for Adams, Dev14 work items are tracked in vstfdevdiv. ", - "url": "https://devdiv.visualstudio.com/DefaultCollection/_apis/projects/0bdbc590-a062-4c3f-b0f6-9383f67865ee", - "state": "wellFormed", - "revision": 418098432, - "visibility": "organization" - } -} diff --git a/buildpipeline/DotNet-CoreClr-Trusted-Linux.json b/buildpipeline/DotNet-CoreClr-Trusted-Linux.json deleted file mode 100644 index e60215750f3e..000000000000 --- a/buildpipeline/DotNet-CoreClr-Trusted-Linux.json +++ /dev/null @@ -1,572 +0,0 @@ -{ - "build": [ - { - "environment": {}, - "enabled": true, - "continueOnError": true, - "alwaysRun": false, - "displayName": "Run AgentTools/Begin.sh", - "timeoutInMinutes": 0, - "condition": "succeededOrFailed()", - "task": { - "id": "e213ff0f-5d5c-4791-802d-52ea3e7be1f1", - "versionSpec": "2.*", - "definitionType": "task" - }, - "inputs": { - "targetType": "inline", - "filePath": "", - "arguments": "", - "script": "if (Test-Path \"$(AgentToolsPath)\\Begin.sh\") {\n \"Begin.sh script found. Executing...\"\n cd $(AgentToolsPath)\n & $(AgentToolsPath)\\Begin.sh\n} else {\n \"Begin.sh script does not exist. Moving on...\"\n}", - "errorActionPreference": "continue", - "failOnStderr": "false", - "ignoreLASTEXITCODE": "true", - "workingDirectory": "" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Initialize tools", - "timeoutInMinutes": 0, - "refName": "Task1", - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "$(Build.SourcesDirectory)/init-tools.sh", - "arguments": "", - "workingFolder": "", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Initialize Docker", - "timeoutInMinutes": 0, - "refName": "Task2", - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "$(Build.SourcesDirectory)/Tools/scripts/docker/init-docker.sh", - "arguments": "$(DockerImageName)", - "workingFolder": "", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Clone repository", - "timeoutInMinutes": 0, - "refName": "Task3", - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "docker", - "arguments": "run --rm $(DockerCommonRunArgs) git clone $(VsoCoreClrGitUrl) $(GitHubDirectory)", - "workingFolder": "", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Run clean.sh", - "timeoutInMinutes": 0, - "refName": "Task4", - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "docker", - "arguments": "run --rm $(DockerCommonRunArgs) ./clean.sh -all", - "workingFolder": "", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Check out the specified commit", - "timeoutInMinutes": 0, - "refName": "Task5", - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "docker", - "arguments": "run --rm $(DockerCommonRunArgs) git checkout $(Build.SourceVersion)", - "workingFolder": "", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Run sync.sh", - "timeoutInMinutes": 0, - "refName": "Task6", - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "docker", - "arguments": "run --rm $(DockerCommonRunArgs) ./sync.sh -- /p:BuildType=$(PB_BuildType)", - "workingFolder": "", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Run build.sh", - "timeoutInMinutes": 0, - "refName": "Task7", - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "docker", - "arguments": "run --rm $(DockerCommonRunArgs) ./build.sh $(PB_BuildType) $(Architecture) skipnuget -skiprestore stripSymbols -OfficialBuildId=$(OfficialBuildId) $(PB_AdditionalBuildArgs) -- /flp:\"v=diag\"", - "workingFolder": "", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Run build-packages.sh", - "timeoutInMinutes": 0, - "refName": "Task8", - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "docker", - "arguments": "run --rm $(DockerCommonRunArgs) ./build-packages.sh -BuildType=$(PB_BuildType) -BuildArch=$(Architecture) $(PB_AdditionalBuildArgs) -- /p:OutputRID=$(PB_OutputRID) /p:OfficialBuildId=$(OfficialBuildId)", - "workingFolder": "", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Publish packages", - "timeoutInMinutes": 0, - "refName": "Task9", - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "docker", - "arguments": "run --rm $(DockerCommonRunArgs) ./publish-packages.sh -AzureAccount=$(CloudDropAccountName) -AzureToken=$(CloudDropAccessToken) -BuildType=$(PB_BuildType) -BuildArch=$(Platform) -Container=$(PB_ContainerName) -PublishPackages -- /p:RelativePath=$(PB_BlobNamePrefix)$(PB_BuildType)/pkg /p:PublishFlatContainer=$(PublishFlat) /p:OverwriteOnPublish=true", - "workingFolder": "", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Publish symbol packages", - "timeoutInMinutes": 0, - "refName": "Task10", - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "docker", - "arguments": "run --rm $(DockerCommonRunArgs) ./publish-packages.sh -AzureAccount=$(CloudDropAccountName) -AzureToken=$(CloudDropAccessToken) -BuildType=$(PB_BuildType) -BuildArch=$(Platform) -Container=$(PB_ContainerName) -PublishSymbols -- /p:RelativePath=$(PB_BlobNamePrefix)$(PB_BuildType)/symbolpkg /p:OverwriteOnPublish=true", - "workingFolder": "", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Publish test native binaries", - "timeoutInMinutes": 0, - "refName": "Task11", - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "docker", - "arguments": "run --rm $(DockerCommonRunArgs) ./publish-packages.sh -AzureAccount=$(CloudDropAccountName) -AzureToken=$(CloudDropAccessToken) -BuildType=$(PB_BuildType) -BuildArch=$(Platform) -Container=$(PB_ContainerName) -PublishTestNativeBins -- /p:RelativePath=$(PB_BlobNamePrefix)$(PB_BuildType)/TestNativeBins/$(Rid)-$(Architecture) /p:OverwriteOnPublish=true", - "workingFolder": "", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": true, - "alwaysRun": false, - "displayName": "Create Copy Container", - "timeoutInMinutes": 0, - "refName": "Task12", - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "docker", - "arguments": "run $(DockerCommonRunArgs) echo", - "workingFolder": "", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": true, - "alwaysRun": true, - "displayName": "Expose Docker repo for publishing", - "timeoutInMinutes": 0, - "condition": "succeededOrFailed()", - "refName": "Task13", - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "docker", - "arguments": "cp $(DockerContainerName):$(GitHubDirectory) $(DockerCopyDest)", - "workingFolder": "", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": true, - "alwaysRun": true, - "displayName": "Copy Files to: $(Build.StagingDirectory)\\BuildLogs", - "timeoutInMinutes": 0, - "condition": "succeededOrFailed()", - "refName": "CopyFiles1", - "task": { - "id": "5bfb729a-a7c8-4a78-a7c3-8d717bb7c13c", - "versionSpec": "2.*", - "definitionType": "task" - }, - "inputs": { - "SourceFolder": "$(DockerCopyDest)", - "Contents": "**/*.log", - "TargetFolder": "$(Build.StagingDirectory)\\BuildLogs", - "CleanTargetFolder": "false", - "OverWrite": "false", - "flattenFolders": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": true, - "alwaysRun": true, - "displayName": "Publish Artifact: BuildLogs", - "timeoutInMinutes": 0, - "condition": "succeededOrFailed()", - "refName": "PublishBuildArtifacts2", - "task": { - "id": "2ff763a7-ce83-4e1f-bc89-0ae63477cebe", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "PathtoPublish": "$(Build.StagingDirectory)\\BuildLogs", - "ArtifactName": "BuildLogs", - "ArtifactType": "Container", - "TargetPath": "\\\\my\\share\\$(Build.DefinitionName)\\$(Build.BuildNumber)", - "Parallel": "false", - "ParallelCount": "8" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": true, - "alwaysRun": false, - "displayName": "Cleanup Docker", - "timeoutInMinutes": 0, - "condition": "always()", - "refName": "Task14", - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "$(Build.SourcesDirectory)/Tools/scripts/docker/cleanup-docker.sh", - "arguments": "", - "workingFolder": "", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": true, - "alwaysRun": true, - "displayName": "Run AgentTools/End.sh", - "timeoutInMinutes": 0, - "condition": "succeededOrFailed()", - "task": { - "id": "e213ff0f-5d5c-4791-802d-52ea3e7be1f1", - "versionSpec": "2.*", - "definitionType": "task" - }, - "inputs": { - "targetType": "inline", - "filePath": "", - "arguments": "", - "script": "if (Test-Path \"$(AgentToolsPath)\\End.sh\") {\n \"End.sh script found. Executing...\"\n cd $(AgentToolsPath)\n & $(AgentToolsPath)\\End.sh\n} else {\n \"End.sh script does not exist. Moving on...\"\n}", - "errorActionPreference": "continue", - "failOnStderr": "false", - "ignoreLASTEXITCODE": "true", - "workingDirectory": "" - } - } - ], - "options": [ - { - "enabled": false, - "definition": { - "id": "a9db38f9-9fdc-478c-b0f9-464221e58316" - }, - "inputs": { - "workItemType": "234347", - "assignToRequestor": "true", - "additionalFields": "{}" - } - }, - { - "enabled": false, - "definition": { - "id": "57578776-4c22-4526-aeb0-86b6da17ee9c" - }, - "inputs": { - "additionalFields": "{}" - } - }, - { - "enabled": false, - "definition": { - "id": "5d58cc01-7c75-450c-be18-a388ddb129ec" - }, - "inputs": { - "branchFilters": "[\"+refs/heads/*\"]", - "additionalFields": "{}" - } - } - ], - "variables": { - "system.debug": { - "value": "false", - "allowOverride": true - }, - "Platform": { - "value": "x64" - }, - "PB_BuildType": { - "value": "Release" - }, - "GitHubRepo": { - "value": "http://github.com/dotnet/coreclr.git" - }, - "GitHubDirectory": { - "value": "/root/coreclr" - }, - "DockerContainerName": { - "value": "coreclr-$(Build.BuildId)" - }, - "DockerImageName": { - "value": "$(DockerRepository):$(DockerTag)" - }, - "DockerRepository": { - "value": "microsoft/dotnet-buildtools-prereqs" - }, - "DockerTag": { - "value": "centos-7-d485f41-20173404063424", - "allowOverride": true - }, - "CloudDropAccountName": { - "value": "dotnetbuildoutput" - }, - "CloudDropAccessToken": { - "value": null, - "isSecret": true - }, - "OfficialBuildId": { - "value": "$(Build.BuildNumber)", - "allowOverride": true - }, - "Label": { - "value": "$(Build.BuildNumber)" - }, - "GitHubBranch": { - "value": "sni_plus_latestbuildtools" - }, - "Priority": { - "value": "0" - }, - "RepoAccessToken": { - "value": null, - "isSecret": true - }, - "Architecture": { - "value": "x64" - }, - "CommitToCheckout": { - "value": "HEAD", - "allowOverride": true - }, - "DockerVolumeName": { - "value": "coreclr-$(Build.BuildId)" - }, - "DockerCommonRunArgs": { - "value": "--name $(DockerContainerName) --ulimit core=-1 -v \"$(DockerVolumeName):$(GitHubDirectory)\" -w=\"$(GitHubDirectory)\" $(DockerImageName)" - }, - "DockerCopyDest": { - "value": "$(Build.BinariesDirectory)/docker_repo" - }, - "PB_CleanAgent": { - "value": "true" - }, - "PB_AdditionalBuildArgs": { - "value": "" - }, - "VsoAccountName": { - "value": "dn-bot" - }, - "VsoCoreClrGitUrl": { - "value": "https://$(VsoAccountName):$(VsoPassword)@devdiv.visualstudio.com/DevDiv/_git/$(VsoRepositoryName)/" - }, - "VsoPassword": { - "value": null, - "isSecret": true - }, - "VsoRepositoryName": { - "value": "DotNet-CoreCLR-Trusted" - }, - "PB_OutputRID": { - "value": "" - } - }, - "demands": [ - "Agent.OS -equals linux" - ], - "retentionRules": [ - { - "branches": [ - "+refs/heads/*" - ], - "artifacts": [], - "artifactTypesToDelete": [ - "FilePath", - "SymbolStore" - ], - "daysToKeep": 10, - "minimumToKeep": 1, - "deleteBuildRecord": true, - "deleteTestResults": true - } - ], - "buildNumberFormat": "$(date:yyyyMMdd)$(rev:-rr)-$(DockerTag)", - "jobAuthorizationScope": "projectCollection", - "jobTimeoutInMinutes": 90, - "jobCancelTimeoutInMinutes": 5, - "repository": { - "properties": { - "labelSources": "0", - "reportBuildStatus": "true", - "fetchDepth": "0", - "gitLfsSupport": "false", - "skipSyncSource": "false", - "cleanOptions": "1", - "checkoutNestedSubmodules": "false", - "labelSourcesFormat": "$(build.buildNumber)" - }, - "id": "670e3783-ab4f-44fc-9786-d332007da311", - "type": "TfsGit", - "name": "DotNet-CoreCLR-Trusted", - "url": "https://devdiv.visualstudio.com/DevDiv/_git/DotNet-CoreCLR-Trusted", - "defaultBranch": "refs/heads/master", - "clean": "true", - "checkoutSubmodules": false - }, - "processParameters": {}, - "quality": "definition", - "drafts": [], - "queue": { - "id": 36, - "name": "DotNet-Build", - "pool": { - "id": 39, - "name": "DotNet-Build" - } - }, - "id": 1713, - "name": "DotNet-CoreClr-Trusted-Linux", - "path": "\\", - "type": "build", - "queueStatus": "enabled", - "project": { - "id": "0bdbc590-a062-4c3f-b0f6-9383f67865ee", - "name": "DevDiv", - "description": "Visual Studio and DevDiv team project for git source code repositories. Work items will be added for Adams, Dev14 work items are tracked in vstfdevdiv. ", - "url": "https://devdiv.visualstudio.com/DefaultCollection/_apis/projects/0bdbc590-a062-4c3f-b0f6-9383f67865ee", - "state": "wellFormed", - "revision": 418098432, - "visibility": "organization" - } -} diff --git a/buildpipeline/DotNet-CoreClr-Trusted-Mac.json b/buildpipeline/DotNet-CoreClr-Trusted-Mac.json deleted file mode 100644 index 34ffacdd3c12..000000000000 --- a/buildpipeline/DotNet-CoreClr-Trusted-Mac.json +++ /dev/null @@ -1,360 +0,0 @@ -{ - "build": [ - { - "environment": {}, - "enabled": true, - "continueOnError": true, - "alwaysRun": false, - "displayName": "Run AgentTools/Begin.sh", - "timeoutInMinutes": 0, - "condition": "succeededOrFailed()", - "task": { - "id": "e213ff0f-5d5c-4791-802d-52ea3e7be1f1", - "versionSpec": "2.*", - "definitionType": "task" - }, - "inputs": { - "targetType": "inline", - "filePath": "", - "arguments": "", - "script": "if (Test-Path \"$(AgentToolsPath)\\Begin.sh\") {\n \"Begin.sh script found. Executing...\"\n cd $(AgentToolsPath)\n & $(AgentToolsPath)\\Begin.sh\n} else {\n \"Begin.sh script does not exist. Moving on...\"\n}", - "errorActionPreference": "continue", - "failOnStderr": "false", - "ignoreLASTEXITCODE": "true", - "workingDirectory": "" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Run $(Agent.BuildDirectory)/s/clean.sh", - "timeoutInMinutes": 0, - "refName": "Task1", - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "$(Agent.BuildDirectory)/s/clean.sh", - "arguments": "-all", - "workingFolder": "", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Run $(Agent.BuildDirectory)/s/sync.sh", - "timeoutInMinutes": 0, - "refName": "Task2", - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "$(Agent.BuildDirectory)/s/sync.sh", - "arguments": " -- /p:BuildType=$(PB_BuildType)", - "workingFolder": "", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Run $(Agent.BuildDirectory)/s/build.sh", - "timeoutInMinutes": 0, - "refName": "Task3", - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "$(Agent.BuildDirectory)/s/build.sh", - "arguments": "$(PB_BuildType) $(Architecture) skipnuget -skiprestore stripSymbols -OfficialBuildId=$(OfficialBuildId)", - "workingFolder": "", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Run $(Agent.BuildDirectory)/s/build-packages.sh", - "timeoutInMinutes": 0, - "refName": "Task4", - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "$(Agent.BuildDirectory)/s/build-packages.sh", - "arguments": "-BuildType=$(PB_BuildType) -BuildArch=$(Architecture) -- /p:OfficialBuildId=$(OfficialBuildId)", - "workingFolder": "", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Publish packages", - "timeoutInMinutes": 0, - "refName": "Task5", - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "$(Agent.BuildDirectory)/s/publish-packages.sh", - "arguments": "-AzureAccount=$(CloudDropAccountName) -AzureToken=$(CloudDropAccessToken) -BuildType=$(PB_BuildType) -BuildArch=$(Architecture) -Container=$(PB_ContainerName) -distroRid=$(Rid) -PublishPackages -- /p:RelativePath=$(PB_BlobNamePrefix)$(PB_BuildType)/pkg /p:PublishFlatContainer=$(PublishFlat) /p:OverwriteOnPublish=true", - "workingFolder": "", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Publish symbol packages", - "timeoutInMinutes": 0, - "refName": "Task6", - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "$(Agent.BuildDirectory)/s/publish-packages.sh", - "arguments": "-AzureAccount=$(CloudDropAccountName) -AzureToken=$(CloudDropAccessToken) -BuildType=$(PB_BuildType) -BuildArch=$(Architecture) -Container=$(PB_ContainerName) -distroRid=$(Rid) -PublishSymbols -- /p:RelativePath=$(PB_BlobNamePrefix)$(PB_BuildType)/symbolpkg /p:OverwriteOnPublish=true", - "workingFolder": "", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Publish test native binaries", - "timeoutInMinutes": 0, - "refName": "Task7", - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "$(Agent.BuildDirectory)/s/publish-packages.sh", - "arguments": "-AzureAccount=$(CloudDropAccountName) -AzureToken=$(CloudDropAccessToken) -BuildType=$(PB_BuildType) -BuildArch=$(Architecture) -Container=$(PB_ContainerName) -distroRid=$(Rid) -PublishSymbols -PublishTestNativeBins -- /p:RelativePath=$(PB_BlobNamePrefix)$(PB_BuildType)/TestNativeBins/$(Rid)-$(Architecture) /p:OverwriteOnPublish=true", - "workingFolder": "", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": true, - "alwaysRun": true, - "displayName": "Copy Files to: $(Build.StagingDirectory)\\BuildLogs", - "timeoutInMinutes": 0, - "condition": "succeededOrFailed()", - "refName": "CopyFiles1", - "task": { - "id": "5bfb729a-a7c8-4a78-a7c3-8d717bb7c13c", - "versionSpec": "2.*", - "definitionType": "task" - }, - "inputs": { - "SourceFolder": "", - "Contents": "**/*.log", - "TargetFolder": "$(Build.StagingDirectory)\\BuildLogs", - "CleanTargetFolder": "false", - "OverWrite": "false", - "flattenFolders": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": true, - "alwaysRun": true, - "displayName": "Publish Artifact: BuildLogs", - "timeoutInMinutes": 0, - "condition": "succeededOrFailed()", - "refName": "PublishBuildArtifacts1", - "task": { - "id": "2ff763a7-ce83-4e1f-bc89-0ae63477cebe", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "PathtoPublish": "$(Build.StagingDirectory)\\BuildLogs", - "ArtifactName": "BuildLogs", - "ArtifactType": "Container", - "TargetPath": "\\\\my\\share\\$(Build.DefinitionName)\\$(Build.BuildNumber)", - "Parallel": "false", - "ParallelCount": "8" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": true, - "alwaysRun": true, - "displayName": "Run AgentTools/End.sh", - "timeoutInMinutes": 0, - "condition": "succeededOrFailed()", - "task": { - "id": "e213ff0f-5d5c-4791-802d-52ea3e7be1f1", - "versionSpec": "2.*", - "definitionType": "task" - }, - "inputs": { - "targetType": "inline", - "filePath": "", - "arguments": "", - "script": "if (Test-Path \"$(AgentToolsPath)\\End.sh\") {\n \"End.sh script found. Executing...\"\n cd $(AgentToolsPath)\n & $(AgentToolsPath)\\End.sh\n} else {\n \"End.sh script does not exist. Moving on...\"\n}", - "errorActionPreference": "continue", - "failOnStderr": "false", - "ignoreLASTEXITCODE": "true", - "workingDirectory": "" - } - } - ], - "options": [ - { - "enabled": false, - "definition": { - "id": "a9db38f9-9fdc-478c-b0f9-464221e58316" - }, - "inputs": { - "workItemType": "234347", - "assignToRequestor": "true", - "additionalFields": "{}" - } - }, - { - "enabled": false, - "definition": { - "id": "57578776-4c22-4526-aeb0-86b6da17ee9c" - }, - "inputs": { - "additionalFields": "{}" - } - }, - { - "enabled": false, - "definition": { - "id": "5d58cc01-7c75-450c-be18-a388ddb129ec" - }, - "inputs": { - "branchFilters": "[\"+refs/heads/*\"]", - "additionalFields": "{}" - } - } - ], - "variables": { - "system.debug": { - "value": "false", - "allowOverride": true - }, - "PB_BuildType": { - "value": "Release" - }, - "Architecture": { - "value": "x64" - }, - "CloudDropAccountName": { - "value": "dotnetbuildoutput" - }, - "CloudDropAccessToken": { - "value": null, - "isSecret": true - }, - "OfficialBuildId": { - "value": "$(Build.BuildNumber)" - }, - "Label": { - "value": "$(Build.BuildNumber)" - } - }, - "demands": [], - "retentionRules": [ - { - "branches": [ - "+refs/heads/*" - ], - "artifacts": [], - "artifactTypesToDelete": [ - "FilePath", - "SymbolStore" - ], - "daysToKeep": 10, - "minimumToKeep": 1, - "deleteBuildRecord": true, - "deleteTestResults": true - } - ], - "buildNumberFormat": "$(date:yyyyMMdd)$(rev:-rr)", - "jobAuthorizationScope": "projectCollection", - "jobTimeoutInMinutes": 60, - "jobCancelTimeoutInMinutes": 5, - "repository": { - "properties": { - "labelSources": "0", - "reportBuildStatus": "true", - "fetchDepth": "0", - "gitLfsSupport": "false", - "skipSyncSource": "false", - "cleanOptions": "0", - "checkoutNestedSubmodules": "false", - "labelSourcesFormat": "$(build.buildNumber)" - }, - "id": "670e3783-ab4f-44fc-9786-d332007da311", - "type": "TfsGit", - "name": "DotNet-CoreCLR-Trusted", - "url": "https://devdiv.visualstudio.com/DevDiv/_git/DotNet-CoreCLR-Trusted", - "defaultBranch": "refs/heads/master", - "clean": "true", - "checkoutSubmodules": false - }, - "processParameters": {}, - "quality": "definition", - "drafts": [], - "queue": { - "id": 681, - "name": "VSEng-MicroBuildMacSierra", - "pool": { - "id": 120, - "name": "VSEng-MicroBuildMacSierra" - } - }, - "id": 1680, - "name": "DotNet-CoreClr-Trusted-Mac", - "path": "\\", - "type": "build", - "queueStatus": "enabled", - "project": { - "id": "0bdbc590-a062-4c3f-b0f6-9383f67865ee", - "name": "DevDiv", - "description": "Visual Studio and DevDiv team project for git source code repositories. Work items will be added for Adams, Dev14 work items are tracked in vstfdevdiv. ", - "url": "https://devdiv.visualstudio.com/DefaultCollection/_apis/projects/0bdbc590-a062-4c3f-b0f6-9383f67865ee", - "state": "wellFormed", - "revision": 418098432, - "visibility": "organization" - } -} diff --git a/buildpipeline/DotNet-CoreClr-Trusted-Windows-x86.json b/buildpipeline/DotNet-CoreClr-Trusted-Windows-x86.json deleted file mode 100644 index 0c68d3b237e9..000000000000 --- a/buildpipeline/DotNet-CoreClr-Trusted-Windows-x86.json +++ /dev/null @@ -1,486 +0,0 @@ -{ - "build": [ - { - "environment": {}, - "enabled": true, - "continueOnError": true, - "alwaysRun": false, - "displayName": "Run AgentTools/Begin.ps1", - "timeoutInMinutes": 0, - "condition": "succeededOrFailed()", - "task": { - "id": "e213ff0f-5d5c-4791-802d-52ea3e7be1f1", - "versionSpec": "2.*", - "definitionType": "task" - }, - "inputs": { - "targetType": "inline", - "filePath": "", - "arguments": "", - "script": "if (Test-Path \"$(AgentToolsPath)\\Begin.ps1\") {\n \"Begin.ps1 script found. Executing...\"\n cd $(AgentToolsPath)\n & $(AgentToolsPath)\\Begin.ps1\n} else {\n \"Begin.ps1 script does not exist. Moving on...\"\n}", - "errorActionPreference": "continue", - "failOnStderr": "false", - "ignoreLASTEXITCODE": "true", - "workingDirectory": "" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Install Signing Plugin", - "timeoutInMinutes": 0, - "condition": "and(succeeded(), in(variables.PB_SignType, 'real', 'test'))", - "refName": "Task1", - "task": { - "id": "30666190-6959-11e5-9f96-f56098202fef", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "signType": "real", - "zipSources": "false", - "version": "", - "feedSource": "https://devdiv.pkgs.visualstudio.com/DefaultCollection/_packaging/MicroBuildToolset/nuget/v3/index.json", - "legacySigning": "$(PB_UseLegacySigning)" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Setup vs dev env", - "timeoutInMinutes": 0, - "refName": "Task2", - "task": { - "id": "bfc8bf76-e7ac-4a8c-9a55-a944a9f632fd", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "setup_vs_tools.cmd", - "arguments": "", - "modifyEnvironment": "true", - "workingFolder": "", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Run clean.cmd", - "timeoutInMinutes": 0, - "refName": "Task3", - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "clean.cmd", - "arguments": "-all", - "workingFolder": "", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Run sync.cmd", - "timeoutInMinutes": 0, - "refName": "Task4", - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "sync.cmd", - "arguments": "-p -- /p:BuildType=$(PB_BuildType)", - "workingFolder": "", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Run build.cmd", - "timeoutInMinutes": 0, - "refName": "Task5", - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "build.cmd", - "arguments": "$(Architecture) $(PB_BuildType) skiptests skipbuildpackages $(PB_EnforcePGO) -OfficialBuildId=$(OfficialBuildId) -skiprestore -Priority=$(Priority) -- /p:SignType=$(PB_SignType)", - "workingFolder": "", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Sign binaries", - "timeoutInMinutes": 0, - "refName": "Task6", - "task": { - "id": "c6c4c611-aa2e-4a33-b606-5eaba2196824", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "solution": "src\\sign.builds", - "msbuildLocationMethod": "version", - "msbuildVersion": "14.0", - "msbuildArchitecture": "x86", - "msbuildLocation": "", - "platform": "", - "configuration": "", - "msbuildArguments": "/p:SignType=$(PB_SignType) /p:BuildType=$(PB_BuildType) /p:BuildArch=$(Architecture)", - "clean": "false", - "maximumCpuCount": "false", - "restoreNugetPackages": "false", - "logProjectEvents": "false", - "createLogFile": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Run build-packages.cmd", - "timeoutInMinutes": 0, - "refName": "Task7", - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "build-packages.cmd", - "arguments": "-BuildArch=$(Architecture) -BuildType=$(PB_BuildType) -- /p:SignType=$(PB_SignType) /p:OfficialBuildId=$(OfficialBuildId)", - "workingFolder": "", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Publish packages", - "timeoutInMinutes": 0, - "refName": "Task8", - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "publish-packages.cmd", - "arguments": "-AzureAccount=$(CloudDropAccountName) -AzureToken=$(CloudDropAccessToken) -BuildArch=$(Architecture) -BuildType=$(PB_BuildType) -Container=$(PB_ContainerName) -PublishPackages -- /p:RelativePath=$(PB_BlobNamePrefix)$(PB_BuildType)/pkg /p:PublishFlatContainer=$(PublishFlat) /p:OverwriteOnPublish=true", - "workingFolder": "", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Publish symbol packages", - "timeoutInMinutes": 0, - "refName": "Task9", - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "publish-packages.cmd", - "arguments": "-AzureAccount=$(CloudDropAccountName) -AzureToken=$(CloudDropAccessToken) -BuildArch=$(Architecture) -BuildType=$(PB_BuildType) -Container=$(PB_ContainerName) -PublishSymbols -- /p:RelativePath=$(PB_BlobNamePrefix)$(PB_BuildType)/symbolpkg /p:OverwriteOnPublish=true", - "workingFolder": "", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Perform Cleanup Tasks", - "timeoutInMinutes": 0, - "condition": "always()", - "refName": "Task10", - "task": { - "id": "521a94ea-9e68-468a-8167-6dcf361ea776", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": {} - }, - { - "environment": {}, - "enabled": true, - "continueOnError": true, - "alwaysRun": true, - "displayName": "Copy Files to: $(Build.StagingDirectory)\\BuildLogs", - "timeoutInMinutes": 0, - "condition": "succeededOrFailed()", - "refName": "CopyFiles1", - "task": { - "id": "5bfb729a-a7c8-4a78-a7c3-8d717bb7c13c", - "versionSpec": "2.*", - "definitionType": "task" - }, - "inputs": { - "SourceFolder": "", - "Contents": "**/*.log", - "TargetFolder": "$(Build.StagingDirectory)\\BuildLogs", - "CleanTargetFolder": "false", - "OverWrite": "false", - "flattenFolders": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": true, - "alwaysRun": true, - "displayName": "Publish Artifact: BuildLogs", - "timeoutInMinutes": 0, - "condition": "succeededOrFailed()", - "refName": "PublishBuildArtifacts1", - "task": { - "id": "2ff763a7-ce83-4e1f-bc89-0ae63477cebe", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "PathtoPublish": "$(Build.StagingDirectory)\\BuildLogs", - "ArtifactName": "BuildLogs", - "ArtifactType": "Container", - "TargetPath": "\\\\my\\share\\$(Build.DefinitionName)\\$(Build.BuildNumber)", - "Parallel": "false", - "ParallelCount": "8" - } - }, - { - "environment": {}, - "enabled": false, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Copy Publish Artifact: symbols", - "timeoutInMinutes": 0, - "refName": "PublishBuildArtifacts2", - "task": { - "id": "2ff763a7-ce83-4e1f-bc89-0ae63477cebe", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "PathtoPublish": "$(Build.StagingDirectory)\\symbols", - "ArtifactName": "symbols", - "ArtifactType": "FilePath", - "TargetPath": "\\\\cpvsbuild\\drops\\DotNetCore\\$(Build.DefinitionName)\\$(Build.BuildNumber)\\FullSymbols", - "Parallel": "false", - "ParallelCount": "8" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": true, - "alwaysRun": true, - "displayName": "Run AgentTools/End.ps1", - "timeoutInMinutes": 0, - "condition": "succeededOrFailed()", - "task": { - "id": "e213ff0f-5d5c-4791-802d-52ea3e7be1f1", - "versionSpec": "2.*", - "definitionType": "task" - }, - "inputs": { - "targetType": "inline", - "filePath": "", - "arguments": "", - "script": "if (Test-Path \"$(AgentToolsPath)\\End.ps1\") {\n \"End.ps1 script found. Executing...\"\n cd $(AgentToolsPath)\n & $(AgentToolsPath)\\End.ps1\n} else {\n \"End.ps1 script does not exist. Moving on...\"\n}", - "errorActionPreference": "continue", - "failOnStderr": "false", - "ignoreLASTEXITCODE": "true", - "workingDirectory": "" - } - } - ], - "options": [ - { - "enabled": false, - "definition": { - "id": "a9db38f9-9fdc-478c-b0f9-464221e58316" - }, - "inputs": { - "workItemType": "234347", - "assignToRequestor": "true", - "additionalFields": "{}" - } - }, - { - "enabled": false, - "definition": { - "id": "57578776-4c22-4526-aeb0-86b6da17ee9c" - }, - "inputs": { - "additionalFields": "{}" - } - }, - { - "enabled": false, - "definition": { - "id": "5d58cc01-7c75-450c-be18-a388ddb129ec" - }, - "inputs": { - "branchFilters": "[\"+refs/heads/*\"]", - "additionalFields": "{}" - } - } - ], - "variables": { - "system.debug": { - "value": "false", - "allowOverride": true - }, - "PB_BuildType": { - "value": "Release" - }, - "Architecture": { - "value": "x86" - }, - "Priority": { - "value": "0" - }, - "CloudDropAccountName": { - "value": "dotnetbuildoutput" - }, - "CloudDropAccessToken": { - "value": null, - "isSecret": true - }, - "OfficialBuildId": { - "value": "$(Build.BuildNumber)" - }, - "Label": { - "value": "$(Build.BuildNumber)" - }, - "PB_SignType": { - "value": "test", - "allowOverride": true - }, - "PB_UseLegacySigning": { - "value": "false", - "allowOverride": true - }, - "TeamName": { - "value": "DotNetCore" - }, - "Jit32Repo": { - "value": "https://$(VsoAccountName):$(VsoPassword)@devdiv.visualstudio.com/DefaultCollection/DevDiv/_git/DotNet-JIT32-Internal" - }, - "Jit32Branch": { - "value": "master" - }, - "VsoAccountName": { - "value": "dn-bot" - }, - "VsoPassword": { - "value": null, - "isSecret": true - }, - "PB_EnforcePGO": { - "value": "", - "allowOverride": true - } - }, - "retentionRules": [ - { - "branches": [ - "+refs/heads/*" - ], - "artifacts": [], - "artifactTypesToDelete": [ - "FilePath", - "SymbolStore" - ], - "daysToKeep": 10, - "minimumToKeep": 1, - "deleteBuildRecord": true, - "deleteTestResults": true - } - ], - "buildNumberFormat": "$(date:yyyyMMdd)$(rev:-rr)", - "jobAuthorizationScope": "projectCollection", - "jobTimeoutInMinutes": 180, - "jobCancelTimeoutInMinutes": 5, - "repository": { - "properties": { - "labelSources": "0", - "reportBuildStatus": "true", - "fetchDepth": "0", - "gitLfsSupport": "false", - "skipSyncSource": "false", - "cleanOptions": "0", - "checkoutNestedSubmodules": "false", - "labelSourcesFormat": "$(build.buildNumber)" - }, - "id": "670e3783-ab4f-44fc-9786-d332007da311", - "type": "TfsGit", - "name": "DotNet-CoreCLR-Trusted", - "url": "https://devdiv.visualstudio.com/DevDiv/_git/DotNet-CoreCLR-Trusted", - "defaultBranch": "refs/heads/master", - "clean": "true", - "checkoutSubmodules": false - }, - "processParameters": {}, - "quality": "definition", - "drafts": [], - "queue": { - "_links": { - "self": { - "href": "https://devdiv.visualstudio.com/DefaultCollection/_apis/build/Queues/330" - } - }, - "id": 330, - "name": "DotNetCore-Build", - "url": "https://devdiv.visualstudio.com/DefaultCollection/_apis/build/Queues/330", - "pool": { - "id": 97, - "name": "DotNetCore-Build" - } - }, - "id": 1828, - "name": "DotNet-CoreClr-Trusted-Windows-x86", - "path": "\\", - "type": "build", - "queueStatus": "enabled", - "project": { - "id": "0bdbc590-a062-4c3f-b0f6-9383f67865ee", - "name": "DevDiv", - "description": "Visual Studio and DevDiv team project for git source code repositories. Work items will be added for Adams, Dev14 work items are tracked in vstfdevdiv. ", - "url": "https://devdiv.visualstudio.com/DefaultCollection/_apis/projects/0bdbc590-a062-4c3f-b0f6-9383f67865ee", - "state": "wellFormed", - "revision": 418098432, - "visibility": "organization" - } -} diff --git a/buildpipeline/DotNet-CoreClr-Trusted-Windows.json b/buildpipeline/DotNet-CoreClr-Trusted-Windows.json deleted file mode 100644 index 1c9e727d0e0a..000000000000 --- a/buildpipeline/DotNet-CoreClr-Trusted-Windows.json +++ /dev/null @@ -1,479 +0,0 @@ -{ - "build": [ - { - "environment": {}, - "enabled": true, - "continueOnError": true, - "alwaysRun": false, - "displayName": "Run AgentTools/Begin.ps1", - "timeoutInMinutes": 0, - "condition": "succeededOrFailed()", - "task": { - "id": "e213ff0f-5d5c-4791-802d-52ea3e7be1f1", - "versionSpec": "2.*", - "definitionType": "task" - }, - "inputs": { - "targetType": "inline", - "filePath": "", - "arguments": "", - "script": "if (Test-Path \"$(AgentToolsPath)\\Begin.ps1\") {\n \"Begin.ps1 script found. Executing...\"\n cd $(AgentToolsPath)\n & $(AgentToolsPath)\\Begin.ps1\n} else {\n \"Begin.ps1 script does not exist. Moving on...\"\n}", - "errorActionPreference": "continue", - "failOnStderr": "false", - "ignoreLASTEXITCODE": "true", - "workingDirectory": "" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Install Signing Plugin", - "timeoutInMinutes": 0, - "condition": "and(succeeded(), in(variables.PB_SignType, 'real', 'test'))", - "refName": "Task1", - "task": { - "id": "30666190-6959-11e5-9f96-f56098202fef", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "signType": "real", - "zipSources": "false", - "version": "", - "feedSource": "https://devdiv.pkgs.visualstudio.com/DefaultCollection/_packaging/MicroBuildToolset/nuget/v3/index.json", - "legacySigning": "$(PB_UseLegacySigning)" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Setup vs dev env", - "timeoutInMinutes": 0, - "refName": "Task2", - "task": { - "id": "bfc8bf76-e7ac-4a8c-9a55-a944a9f632fd", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "setup_vs_tools.cmd", - "arguments": "", - "modifyEnvironment": "true", - "workingFolder": "", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Run clean.cmd", - "timeoutInMinutes": 0, - "refName": "Task3", - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "clean.cmd", - "arguments": "-all", - "workingFolder": "", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Run sync.cmd", - "timeoutInMinutes": 0, - "refName": "Task4", - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "sync.cmd", - "arguments": "-p -- /p:BuildType=$(PB_BuildType)", - "workingFolder": "", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Run build.cmd", - "timeoutInMinutes": 0, - "refName": "Task5", - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "build.cmd", - "arguments": "$(Architecture) $(PB_BuildType) skiptests skipbuildpackages $(PB_EnforcePGO) $(ToolsetArgs) -OfficialBuildId=$(OfficialBuildId) -Priority=$(Priority) -skiprestore -- /p:SignType=$(PB_SignType) /flp:\"v=diag\"", - "workingFolder": "", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Sign binaries", - "timeoutInMinutes": 0, - "refName": "Task6", - "task": { - "id": "c6c4c611-aa2e-4a33-b606-5eaba2196824", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "solution": "src\\sign.builds", - "msbuildLocationMethod": "version", - "msbuildVersion": "14.0", - "msbuildArchitecture": "x86", - "msbuildLocation": "", - "platform": "", - "configuration": "", - "msbuildArguments": "/p:SignType=$(PB_SignType) /p:BuildType=$(PB_BuildType) /p:BuildArch=$(Architecture)", - "clean": "false", - "maximumCpuCount": "false", - "restoreNugetPackages": "false", - "logProjectEvents": "false", - "createLogFile": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Run build-packages.cmd", - "timeoutInMinutes": 0, - "refName": "Task7", - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "build-packages.cmd", - "arguments": "-BuildArch=$(Architecture) -BuildType=$(PB_BuildType) -- /p:SignType=$(PB_SignType) /p:OfficialBuildId=$(OfficialBuildId)", - "workingFolder": "", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Publish packages", - "timeoutInMinutes": 0, - "refName": "Task8", - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "publish-packages.cmd", - "arguments": "-AzureAccount=$(CloudDropAccountName) -AzureToken=$(CloudDropAccessToken) -BuildArch=$(Architecture) -BuildType=$(PB_BuildType) -Container=$(PB_ContainerName) -PublishPackages -- /p:RelativePath=$(PB_BlobNamePrefix)$(PB_BuildType)/pkg /p:PublishFlatContainer=$(PublishFlat) /p:OverwriteOnPublish=true", - "workingFolder": "", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Publish symbol packages", - "timeoutInMinutes": 0, - "refName": "Task9", - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "publish-packages.cmd", - "arguments": "-AzureAccount=$(CloudDropAccountName) -AzureToken=$(CloudDropAccessToken) -BuildArch=$(Architecture) -BuildType=$(PB_BuildType) -Container=$(PB_ContainerName) -PublishSymbols -- /p:RelativePath=$(PB_BlobNamePrefix)$(PB_BuildType)/symbolpkg /p:OverwriteOnPublish=true", - "workingFolder": "", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Perform Cleanup Tasks", - "timeoutInMinutes": 0, - "condition": "always()", - "refName": "Task10", - "task": { - "id": "521a94ea-9e68-468a-8167-6dcf361ea776", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": {} - }, - { - "environment": {}, - "enabled": true, - "continueOnError": true, - "alwaysRun": true, - "displayName": "Copy Files to: $(Build.StagingDirectory)\\BuildLogs", - "timeoutInMinutes": 0, - "condition": "succeededOrFailed()", - "refName": "CopyFiles1", - "task": { - "id": "5bfb729a-a7c8-4a78-a7c3-8d717bb7c13c", - "versionSpec": "2.*", - "definitionType": "task" - }, - "inputs": { - "SourceFolder": "", - "Contents": "**/*.log", - "TargetFolder": "$(Build.StagingDirectory)\\BuildLogs", - "CleanTargetFolder": "false", - "OverWrite": "false", - "flattenFolders": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": true, - "alwaysRun": true, - "displayName": "Publish Artifact: BuildLogs", - "timeoutInMinutes": 0, - "condition": "succeededOrFailed()", - "refName": "PublishBuildArtifacts1", - "task": { - "id": "2ff763a7-ce83-4e1f-bc89-0ae63477cebe", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "PathtoPublish": "$(Build.StagingDirectory)\\BuildLogs", - "ArtifactName": "BuildLogs", - "ArtifactType": "Container", - "TargetPath": "\\\\my\\share\\$(Build.DefinitionName)\\$(Build.BuildNumber)", - "Parallel": "false", - "ParallelCount": "8" - } - }, - { - "environment": {}, - "enabled": false, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Copy Publish Artifact: symbols", - "timeoutInMinutes": 0, - "refName": "PublishBuildArtifacts2", - "task": { - "id": "2ff763a7-ce83-4e1f-bc89-0ae63477cebe", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "PathtoPublish": "$(Build.StagingDirectory)\\symbols", - "ArtifactName": "symbols", - "ArtifactType": "FilePath", - "TargetPath": "\\\\cpvsbuild\\drops\\DotNetCore\\$(Build.DefinitionName)\\$(Build.BuildNumber)\\FullSymbols", - "Parallel": "false", - "ParallelCount": "8" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": true, - "alwaysRun": true, - "displayName": "Run AgentTools/End.ps1", - "timeoutInMinutes": 0, - "condition": "succeededOrFailed()", - "task": { - "id": "e213ff0f-5d5c-4791-802d-52ea3e7be1f1", - "versionSpec": "2.*", - "definitionType": "task" - }, - "inputs": { - "targetType": "inline", - "filePath": "", - "arguments": "", - "script": "if (Test-Path \"$(AgentToolsPath)\\End.ps1\") {\n \"End.ps1 script found. Executing...\"\n cd $(AgentToolsPath)\n & $(AgentToolsPath)\\End.ps1\n} else {\n \"End.ps1 script does not exist. Moving on...\"\n}", - "errorActionPreference": "continue", - "failOnStderr": "false", - "ignoreLASTEXITCODE": "true", - "workingDirectory": "" - } - } - ], - "options": [ - { - "enabled": false, - "definition": { - "id": "a9db38f9-9fdc-478c-b0f9-464221e58316" - }, - "inputs": { - "workItemType": "234347", - "assignToRequestor": "true", - "additionalFields": "{}" - } - }, - { - "enabled": false, - "definition": { - "id": "57578776-4c22-4526-aeb0-86b6da17ee9c" - }, - "inputs": { - "additionalFields": "{}" - } - }, - { - "enabled": false, - "definition": { - "id": "5d58cc01-7c75-450c-be18-a388ddb129ec" - }, - "inputs": { - "branchFilters": "[\"+refs/heads/*\"]", - "additionalFields": "{}" - } - } - ], - "variables": { - "system.debug": { - "value": "false", - "allowOverride": true - }, - "PB_BuildType": { - "value": "Release", - "allowOverride": true - }, - "Architecture": { - "value": "x64", - "allowOverride": true - }, - "ToolsetArgs": { - "value": "", - "allowOverride": true - }, - "Priority": { - "value": "0" - }, - "CloudDropAccountName": { - "value": "dotnetbuildoutput" - }, - "CloudDropAccessToken": { - "value": null, - "isSecret": true - }, - "OfficialBuildId": { - "value": "$(Build.BuildNumber)" - }, - "Label": { - "value": "$(Build.BuildNumber)" - }, - "PB_SignType": { - "value": "test", - "allowOverride": true - }, - "PB_UseLegacySigning": { - "value": "false", - "allowOverride": true - }, - "TeamName": { - "value": "DotNetCore" - }, - "PB_EnforcePGO": { - "value": "", - "allowOverride": true - } - }, - "retentionRules": [ - { - "branches": [ - "+refs/heads/*" - ], - "artifacts": [], - "artifactTypesToDelete": [ - "FilePath", - "SymbolStore" - ], - "daysToKeep": 10, - "minimumToKeep": 1, - "deleteBuildRecord": true, - "deleteTestResults": true - } - ], - "buildNumberFormat": "$(date:yyyyMMdd)$(rev:-rr)-$(Architecture)", - "jobAuthorizationScope": "projectCollection", - "jobTimeoutInMinutes": 180, - "jobCancelTimeoutInMinutes": 5, - "repository": { - "properties": { - "labelSources": "0", - "reportBuildStatus": "true", - "fetchDepth": "0", - "gitLfsSupport": "false", - "skipSyncSource": "false", - "cleanOptions": "0", - "checkoutNestedSubmodules": "false", - "labelSourcesFormat": "$(build.buildNumber)" - }, - "id": "670e3783-ab4f-44fc-9786-d332007da311", - "type": "TfsGit", - "name": "DotNet-CoreCLR-Trusted", - "url": "https://devdiv.visualstudio.com/DevDiv/_git/DotNet-CoreCLR-Trusted", - "defaultBranch": "refs/heads/master", - "clean": "false", - "checkoutSubmodules": false - }, - "processParameters": {}, - "quality": "definition", - "drafts": [], - "queue": { - "_links": { - "self": { - "href": "https://devdiv.visualstudio.com/DefaultCollection/_apis/build/Queues/330" - } - }, - "id": 330, - "name": "DotNetCore-Build", - "url": "https://devdiv.visualstudio.com/DefaultCollection/_apis/build/Queues/330", - "pool": { - "id": 97, - "name": "DotNetCore-Build" - } - }, - "id": 1676, - "name": "DotNet-CoreClr-Trusted-Windows", - "path": "\\", - "type": "build", - "queueStatus": "enabled", - "project": { - "id": "0bdbc590-a062-4c3f-b0f6-9383f67865ee", - "name": "DevDiv", - "description": "Visual Studio and DevDiv team project for git source code repositories. Work items will be added for Adams, Dev14 work items are tracked in vstfdevdiv. ", - "url": "https://devdiv.visualstudio.com/DefaultCollection/_apis/projects/0bdbc590-a062-4c3f-b0f6-9383f67865ee", - "state": "wellFormed", - "revision": 418098432, - "visibility": "organization" - } -} diff --git a/buildpipeline/DotNet-Trusted-Publish-Symbols.json b/buildpipeline/DotNet-Trusted-Publish-Symbols.json deleted file mode 100644 index ad915ba7da1f..000000000000 --- a/buildpipeline/DotNet-Trusted-Publish-Symbols.json +++ /dev/null @@ -1,390 +0,0 @@ -{ - "build": [ - { - "environment": {}, - "enabled": true, - "continueOnError": true, - "alwaysRun": false, - "displayName": "Run AgentTools/Begin.ps1", - "timeoutInMinutes": 0, - "condition": "succeededOrFailed()", - "task": { - "id": "e213ff0f-5d5c-4791-802d-52ea3e7be1f1", - "versionSpec": "2.*", - "definitionType": "task" - }, - "inputs": { - "targetType": "inline", - "filePath": "", - "arguments": "", - "script": "if (Test-Path \"$(AgentToolsPath)\\Begin.ps1\") {\n \"Begin.ps1 script found. Executing...\"\n cd $(AgentToolsPath)\n & $(AgentToolsPath)\\Begin.ps1\n} else {\n \"Begin.ps1 script does not exist. Moving on...\"\n}", - "errorActionPreference": "continue", - "failOnStderr": "false", - "ignoreLASTEXITCODE": "true", - "workingDirectory": "" - } - }, - { - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Set up pipeline-specific git repository", - "timeoutInMinutes": 0, - "task": { - "id": "e213ff0f-5d5c-4791-802d-52ea3e7be1f1", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "scriptType": "inlineScript", - "scriptName": "", - "arguments": "-gitUrl $(PB_VstsRepoGitUrl) -root $(Pipeline.SourcesDirectory)", - "workingFolder": "", - "inlineScript": "param($gitUrl, $root)\n\nif (Test-Path $root)\n{\n Remove-Item -Recurse -Force $root\n}\ngit clone --no-checkout $gitUrl $root 2>&1 | Write-Host\ncd $root\ngit checkout $env:SourceVersion 2>&1 | Write-Host\n\nWrite-Host (\"##vso[task.setvariable variable=Pipeline.SourcesDirectory;]$root\")", - "failOnStandardError": "true" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Setup vs dev env", - "timeoutInMinutes": 0, - "task": { - "id": "bfc8bf76-e7ac-4a8c-9a55-a944a9f632fd", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "$(Pipeline.SourcesDirectory)\\setup_vs_tools.cmd", - "arguments": "", - "modifyEnvironment": "true", - "workingFolder": "", - "failOnStandardError": "false" - } - }, - { - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Sync symbol packages", - "timeoutInMinutes": 0, - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "sync.cmd", - "arguments": "-ab -AzureAccount=$(CloudDropAccountName) -AzureToken=$(CloudDropAccessToken) -Container=$(PB_ContainerName) -BlobNamePrefix=$(PB_BlobNamePrefix)$(ConfigurationGroup)/symbolpkg/ -- /p:DownloadFlatFiles=true /p:BlobNameExtension=\".nupkg\" /p:DownloadDirectory=$(AzureContainerSymbolPackageDirectory)symbolpkg", - "workingFolder": "$(Pipeline.SourcesDirectory)", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Symbol Packages -> Blob Feed", - "timeoutInMinutes": 0, - "condition": "and(succeeded(), contains(variables.PB_PublishType, 'blob'), eq(variables.ConfigurationGroup, 'Release'))", - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "msbuild", - "arguments": "src\\publish.proj /v:D /t:PublishSymbolPackages /p:__PublishSymbols=true $(FeedPublishArguments) /fileloggerparameters:Verbosity=diag;LogFile=publishsympkg.log", - "workingFolder": "$(Pipeline.SourcesDirectory)", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Publish symbols to msdl", - "timeoutInMinutes": 0, - "condition": "and(succeeded(), contains(variables.PB_PublishType, 'msdl'), eq(variables.ConfigurationGroup, 'Release'))", - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "msbuild", - "arguments": "src\\publish.proj /v:D /t:PublishAllSymbols $(FeedPublishArguments) /p:SymbolServerPath=$(PB_MsdlSymbolServerPath) /p:SymbolServerPAT=$(PB_MsdlSymbolServerPAT) /p:SymbolExpirationInDays=$(PB_SymbolExpirationInDays)", - "workingFolder": "$(Pipeline.SourcesDirectory)", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Publish symbols to symweb", - "timeoutInMinutes": 0, - "condition": "and(succeeded(), contains(variables.PB_PublishType, 'symweb'), eq(variables.ConfigurationGroup, 'Release'))", - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "msbuild", - "arguments": "src\\publish.proj /v:D /t:PublishAllSymbols $(FeedPublishArguments) /p:SymbolServerPath=$(PB_SymwebSymbolServerPath) /p:SymbolServerPAT=$(PB_SymwebSymbolServerPAT) /p:SymbolExpirationInDays=$(PB_SymbolExpirationInDays)", - "workingFolder": "$(Pipeline.SourcesDirectory)", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": true, - "alwaysRun": true, - "displayName": "Run AgentTools/End.ps1", - "timeoutInMinutes": 0, - "condition": "succeededOrFailed()", - "task": { - "id": "e213ff0f-5d5c-4791-802d-52ea3e7be1f1", - "versionSpec": "2.*", - "definitionType": "task" - }, - "inputs": { - "targetType": "inline", - "filePath": "", - "arguments": "", - "script": "if (Test-Path \"$(AgentToolsPath)\\End.ps1\") {\n \"End.ps1 script found. Executing...\"\n cd $(AgentToolsPath)\n & $(AgentToolsPath)\\End.ps1\n} else {\n \"End.ps1 script does not exist. Moving on...\"\n}", - "errorActionPreference": "continue", - "failOnStderr": "false", - "ignoreLASTEXITCODE": "true", - "workingDirectory": "" - } - } - ], - "options": [ - { - "enabled": false, - "definition": { - "id": "5bc3cfb7-6b54-4a4b-b5d2-a3905949f8a6" - }, - "inputs": { - "additionalFields": "{}" - } - }, - { - "enabled": false, - "definition": { - "id": "7c555368-ca64-4199-add6-9ebaf0b0137d" - }, - "inputs": { - "multipliers": "[]", - "parallel": "false", - "continueOnError": "true", - "additionalFields": "{}" - } - }, - { - "enabled": false, - "definition": { - "id": "a9db38f9-9fdc-478c-b0f9-464221e58316" - }, - "inputs": { - "workItemType": "234347", - "assignToRequestor": "true", - "additionalFields": "{}" - } - }, - { - "enabled": false, - "definition": { - "id": "57578776-4c22-4526-aeb0-86b6da17ee9c" - }, - "inputs": { - "additionalFields": "{}" - } - } - ], - "variables": { - "system.debug": { - "value": "false", - "allowOverride": true - }, - "ConfigurationGroup": { - "value": "$(PB_BuildType)", - "allowOverride": true - }, - "CloudDropAccountName": { - "value": "dotnetbuildoutput", - "allowOverride": true - }, - "CloudDropAccessToken": { - "value": null, - "allowOverride": true, - "isSecret": true - }, - "PB_ContainerName": { - "value": "$(Build.BuildNumber)", - "allowOverride": true - }, - "PB_BlobNamePrefix": { - "value": "$(PB_PipeBuildIdentifier)/", - "allowOverride": true - }, - "AzureBlobFeedAccountName": { - "value": "dotnetfeed", - "allowOverride": true - }, - "AzureBlobFeedAccessToken": { - "value": null, - "allowOverride": true, - "isSecret": true - }, - "AzureBlobFeedContainerName": { - "value": "dotnet-core", - "allowOverride": true - }, - "Pipeline.SourcesDirectory": { - "value": "$(Build.BinariesDirectory)\\pipelineRepository" - }, - "PB_VstsAccountName": { - "value": "dn-bot" - }, - "PB_VstsRepositoryName": { - "value": "DotNet-CoreCLR-Trusted", - "allowOverride": true - }, - "PB_VstsRepoGitUrl": { - "value": "https://$(PB_VstsAccountName):$(VstsRepoPat)@devdiv.visualstudio.com/DevDiv/_git/$(PB_VstsRepositoryName)/" - }, - "VstsRepoPat": { - "value": null, - "isSecret": true - }, - "OfficialBuildId": { - "value": "$(Build.BuildNumber)", - "allowOverride": true - }, - "SourceVersion": { - "value": "master", - "allowOverride": true - }, - "SourceBranch": { - "value": "master", - "allowOverride": true - }, - "AzureContainerSymbolPackageDirectory": { - "value": "$(Pipeline.SourcesDirectory)\\packages\\AzureTransfer\\$(ConfigurationGroup)\\", - "allowOverride": true - }, - "AzureContainerSymbolPackageGlob": { - "value": "symbolpkg\\*.nupkg", - }, - "FeedPublishArguments": { - "value": "$(PB_BuildOutputManifestArguments) /p:ExpectedFeedUrl=$(PB_PublishBlobFeedUrl) /p:CloudDropAccessToken=$(PB_PublishBlobFeedKey) /p:CloudDropAccountName=$(AzureBlobFeedAccountName) /p:ContainerName=$(AzureBlobFeedContainerName) /p:OverwriteOnPublish=true /p:PackagesPatternDir=$(AzureContainerSymbolPackageDirectory) /p:__BuildType=$(ConfigurationGroup) /p:OfficialPublish=true /p:PublishFlatContainer=false" - }, - "PB_PublishType": { - "value": "", - "allowOverride": true - }, - "PB_PublishBlobFeedUrl": { - "value": "", - "allowOverride": true - }, - "PB_PublishBlobFeedKey": { - "value": "", - "allowOverride": true - }, - "PB_BuildOutputManifestArguments": { - "value": "/p:ManifestBuildId=$(OfficialBuildId) /p:ManifestBranch=$(SourceBranch) /p:ManifestCommit=$(SourceVersion)" - }, - "PB_MsdlSymbolServerPath": { - "value": "https://microsoftpublicsymbols.artifacts.visualstudio.com/DefaultCollection" - }, - "PB_MsdlSymbolServerPAT": { - "value": null, - "isSecret": true - }, - "PB_SymwebSymbolServerPath": { - "value": "https://microsoft.artifacts.visualstudio.com/DefaultCollection" - }, - "PB_SymwebSymbolServerPAT": { - "value": null, - "isSecret": true - }, - "PB_SymbolExpirationInDays": { - "value": "30" - }, - }, - "retentionRules": [ - { - "branches": [ - "+refs/heads/*" - ], - "artifacts": [], - "artifactTypesToDelete": [ - "FilePath", - "SymbolStore" - ], - "daysToKeep": 10, - "minimumToKeep": 1, - "deleteBuildRecord": true, - "deleteTestResults": true - } - ], - "buildNumberFormat": "$(date:yyyyMMdd)$(rev:-rr)", - "jobAuthorizationScope": "projectCollection", - "jobTimeoutInMinutes": 180, - "jobCancelTimeoutInMinutes": 5, - "repository": { - "properties": { - "labelSources": "0", - "reportBuildStatus": "false", - "fetchDepth": "0", - "gitLfsSupport": "false", - "skipSyncSource": "false", - "cleanOptions": "0", - "labelSourcesFormat": "$(build.buildNumber)" - }, - "id": "0a2b2664-c1be-429c-9b40-8a24dee27a4a", - "type": "TfsGit", - "name": "DotNet-BuildPipeline", - "url": "https://devdiv.visualstudio.com/DevDiv/_git/DotNet-BuildPipeline", - "defaultBranch": "refs/heads/master", - "clean": "true", - "checkoutSubmodules": false - }, - "processParameters": {}, - "quality": "definition", - "queue": { - "_links": { - "self": { - "href": "https://devdiv.visualstudio.com/DefaultCollection/_apis/build/Queues/330" - } - }, - "id": 330, - "name": "DotNetCore-Build", - "url": "https://devdiv.visualstudio.com/DefaultCollection/_apis/build/Queues/330", - "pool": { - "id": 97, - "name": "DotNetCore-Build" - } - }, - "id": -1, - "name": "DotNet-Trusted-Publish-Symbols", - "path": "\\", - "type": "build", - "project": { - "id": "0bdbc590-a062-4c3f-b0f6-9383f67865ee", - "name": "DevDiv", - "description": "Visual Studio and DevDiv team project for git source code repositories. Work items will be added for Adams, Dev14 work items are tracked in vstfdevdiv. ", - "url": "https://devdiv.visualstudio.com/DefaultCollection/_apis/projects/0bdbc590-a062-4c3f-b0f6-9383f67865ee", - "state": "wellFormed", - "revision": 418097642 - } -} diff --git a/buildpipeline/DotNet-Trusted-Publish.json b/buildpipeline/DotNet-Trusted-Publish.json deleted file mode 100644 index ed8c3206fa2f..000000000000 --- a/buildpipeline/DotNet-Trusted-Publish.json +++ /dev/null @@ -1,709 +0,0 @@ -{ - "build": [ - { - "environment": {}, - "enabled": true, - "continueOnError": true, - "alwaysRun": false, - "displayName": "Run AgentTools/Begin.ps1", - "timeoutInMinutes": 0, - "condition": "succeededOrFailed()", - "task": { - "id": "e213ff0f-5d5c-4791-802d-52ea3e7be1f1", - "versionSpec": "2.*", - "definitionType": "task" - }, - "inputs": { - "targetType": "inline", - "filePath": "", - "arguments": "", - "script": "if (Test-Path \"$(AgentToolsPath)\\Begin.ps1\") {\n \"Begin.ps1 script found. Executing...\"\n cd $(AgentToolsPath)\n & $(AgentToolsPath)\\Begin.ps1\n} else {\n \"Begin.ps1 script does not exist. Moving on...\"\n}", - "errorActionPreference": "continue", - "failOnStderr": "false", - "ignoreLASTEXITCODE": "true", - "workingDirectory": "" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Install Signing Plugin", - "timeoutInMinutes": 0, - "condition": "and(succeeded(), in(variables.PB_SignType, 'real', 'test'))", - "task": { - "id": "30666190-6959-11e5-9f96-f56098202fef", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "signType": "real", - "zipSources": "true", - "version": "", - "feedSource": "https://devdiv.pkgs.visualstudio.com/DefaultCollection/_packaging/MicroBuildToolset/nuget/v3/index.json", - "legacySigning": "$(PB_UseLegacySigning)" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Fetch custom tooling (NuGet)", - "timeoutInMinutes": 0, - "task": { - "id": "e213ff0f-5d5c-4791-802d-52ea3e7be1f1", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "scriptType": "filePath", - "scriptName": "scripts/DotNet-Trusted-Publish/Fetch-Tools.ps1", - "arguments": "$(Build.StagingDirectory)\\ToolingDownload", - "workingFolder": "", - "inlineScript": "# You can write your powershell scripts inline here. \n# You can also pass predefined and custom variables to this scripts using arguments\n\n Write-Host \"Hello World\"", - "failOnStandardError": "true" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Set up pipeline-specific git repository", - "timeoutInMinutes": 0, - "task": { - "id": "e213ff0f-5d5c-4791-802d-52ea3e7be1f1", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "scriptType": "inlineScript", - "scriptName": "", - "arguments": "-gitUrl $(VstsRepoGitUrl) -root $(Pipeline.SourcesDirectory)", - "workingFolder": "", - "inlineScript": "param($gitUrl, $root)\n\nif (Test-Path $root)\n{\n Remove-Item -Recurse -Force $root\n}\ngit clone $gitUrl $root 2>&1 | Write-Host\ncd $root\ngit checkout $env:SourceVersion 2>&1 | Write-Host\n\nWrite-Host (\"##vso[task.setvariable variable=Pipeline.SourcesDirectory;]$root\")", - "failOnStandardError": "true" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Setup vs dev env", - "timeoutInMinutes": 0, - "task": { - "id": "bfc8bf76-e7ac-4a8c-9a55-a944a9f632fd", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "$(Pipeline.SourcesDirectory)\\setup_vs_tools.cmd", - "arguments": "", - "modifyEnvironment": "true", - "workingFolder": "", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Sync packages", - "timeoutInMinutes": 0, - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "sync.cmd", - "arguments": "-ab -AzureAccount=$(CloudDropAccountName) -AzureToken=$(CloudDropAccessToken) -Container=$(PB_ContainerName) -BlobNamePrefix=$(PB_BlobNamePrefix)$(ConfigurationGroup)/pkg/flatcontainer/ -- /p:DownloadFlatFiles=true /p:BlobNameExtension=\".nupkg\" /p:DownloadDirectory=$(AzureContainerPackageDirectory)pkg", - "workingFolder": "$(Pipeline.SourcesDirectory)", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Generate Version Assets", - "timeoutInMinutes": 0, - "task": { - "id": "e213ff0f-5d5c-4791-802d-52ea3e7be1f1", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "scriptType": "inlineScript", - "scriptName": "", - "arguments": "", - "workingFolder": "$(Pipeline.SourcesDirectory)", - "inlineScript": "#if ($env:UseLegacyBuildScripts -eq \"true\")\n#{\n msbuild build.proj /t:CreateOrUpdateCurrentVersionFile /p:OfficialBuildId=$env:OfficialBuildId /p:BuildVersionFile=bin\\obj\\BuildVersion-$env:OfficialBuildId.props\n#}\n#else\n#{\n# .\\build-managed.cmd -GenerateVersion \"-OfficialBuildId=$env:OfficialBuildId\"\n#}", - "failOnStandardError": "true" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": true, - "alwaysRun": false, - "displayName": "Log Native Version Assets Files", - "timeoutInMinutes": 0, - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "dir", - "arguments": "$(Pipeline.SourcesDirectory)\\bin\\obj\\BuildVersion*", - "workingFolder": "", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "packages -> dotnet.myget.org", - "timeoutInMinutes": 0, - "condition": "and(succeeded(), contains(variables.PB_PublishType, 'myget'), eq(variables.ConfigurationGroup, 'Release'))", - "task": { - "id": "e213ff0f-5d5c-4791-802d-52ea3e7be1f1", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "scriptType": "inlineScript", - "scriptName": "", - "arguments": "-ApiKey $(MyGetApiKey) -ConfigurationGroup $(ConfigurationGroup) -PackagesGlob $(AzureContainerPackageDirectory)$(AzureContainerPackageGlob) -MyGetFeedUrl $(MyGetFeedUrl)", - "workingFolder": "$(Pipeline.SourcesDirectory)", - "inlineScript": "param($ApiKey, $ConfigurationGroup, $PackagesGlob, $MyGetFeedUrl)\n\nif ($ConfigurationGroup.ToLower() -ne \"release\") { Write-host \"Chose not to publish\"; exit }\n\nmsbuild /t:NuGetPush /v:Normal `\n/p:NuGetExePath=$env:CustomNuGetPath `\n/p:NuGetApiKey=$ApiKey `\n/p:NuGetSource=$MyGetFeedUrl `\n/p:PackagesGlob=$PackagesGlob", - "failOnStandardError": "true" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Packages -> Blob Feed", - "timeoutInMinutes": 0, - "condition": "and(succeeded(), contains(variables.PB_PublishType, 'blob'), eq(variables.ConfigurationGroup, 'Release'))", - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "msbuild", - "arguments": "src\\publish.proj /t:PublishPackages /p:__PublishPackages=true $(FeedPublishArguments) /fileloggerparameters:Verbosity=diag;LogFile=publishpkg.log", - "workingFolder": "$(Pipeline.SourcesDirectory)", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": false, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Create VSTS auth NuGet.Config", - "timeoutInMinutes": 0, - "task": { - "id": "e213ff0f-5d5c-4791-802d-52ea3e7be1f1", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "scriptType": "inlineScript", - "scriptName": "", - "arguments": "$(VstsAuthedNuGetConfigPath) $(VstsFeedUrl) $(VstsPat)", - "workingFolder": "", - "inlineScript": "param($path, $url, $pat)\n\nSet-Content $path @\"\n\n\n \n \n \n \n \n \n \n \n \n\n\"@", - "failOnStandardError": "true" - } - }, - { - "environment": {}, - "enabled": false, - "continueOnError": false, - "alwaysRun": false, - "displayName": "packages -> VSTS", - "timeoutInMinutes": 0, - "task": { - "id": "e213ff0f-5d5c-4791-802d-52ea3e7be1f1", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "scriptType": "inlineScript", - "scriptName": "", - "arguments": "", - "workingFolder": "", - "inlineScript": "if ($env:ConfigurationGroup -ne \"Release\") { exit }\n& $env:CustomNuGetPath push $env:AzureContainerPackageDirectory$env:AzureContainerPackageGlob placeholderapikey -Source vsts-dotnet-core -ConfigFile $env:VstsAuthedNuGetConfigPath -Timeout 3600", - "failOnStandardError": "true" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Update versions repository", - "timeoutInMinutes": 0, - "condition": "and(succeeded(), contains(variables.PB_PublishType, 'versions'), eq(variables.ConfigurationGroup, 'Release'))", - "task": { - "id": "e213ff0f-5d5c-4791-802d-52ea3e7be1f1", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "scriptType": "inlineScript", - "scriptName": "", - "arguments": "-gitHubAuthToken $(UpdatePublishedVersions.AuthToken) -root $(Pipeline.SourcesDirectory) -configGroup $(ConfigurationGroup)", - "workingFolder": "", - "inlineScript": "param($gitHubAuthToken, $root, $configGroup)\nif ($configGroup -ne \"Release\" ) { exit }\ncd $root\n. $root\\UpdatePublishedVersions.ps1 `\n -gitHubUser dotnet-build-bot -gitHubEmail dotnet-build-bot@microsoft.com `\n -gitHubAuthToken $gitHubAuthToken `\n -versionsRepoOwner $env:VersionsRepoOwner -versionsRepo versions `\n -versionsRepoPath build-info/dotnet/$env:GitHubRepositoryName/$env:SourceBranch `\n -nupkgPath $env:AzureContainerPackageDirectory$env:AzureContainerPackageGlob", - "failOnStandardError": "true" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Get Build Number", - "timeoutInMinutes": 0, - "task": { - "id": "e213ff0f-5d5c-4791-802d-52ea3e7be1f1", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "scriptType": "inlineScript", - "scriptName": "", - "arguments": "$(OfficialBuildId) $(Pipeline.SourcesDirectory)", - "workingFolder": "", - "inlineScript": "param(\n [string]$OfficialBuildId,\n [string]$SourcesDir\n)\n$VersionPropsFile=$SourcesDir + \"\\bin\\obj\\BuildVersion-\" + $OfficialBuildId + \".props\"\n[xml]$versionXml=Get-Content $VersionPropsFile\n$env:BuildNumber=$versionXml.Project.PropertyGroup.BuildNumberMajor.InnerText + \".\" + $versionXml.Project.PropertyGroup.BuildNumberMinor.InnerText\nWrite-Host (\"##vso[task.setvariable variable=BuildNumber;]$env:BuildNumber\")", - "failOnStandardError": "true" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": true, - "alwaysRun": false, - "displayName": "Publish to Artifact Services Drop (OfficialBuildId)", - "timeoutInMinutes": 0, - "task": { - "id": "f9d96d25-0c81-4e77-8282-1ad1f785cbb4", - "versionSpec": "0.*", - "definitionType": "task" - }, - "inputs": { - "dropServiceURI": "https://devdiv.artifacts.visualstudio.com/DefaultCollection", - "buildNumber": "dotnet/$(GitHubRepositoryName)/$(SourceBranch)/$(OfficialBuildId)/packages/$(ConfigurationGroup)", - "sourcePath": "$(AzureContainerPackageDirectory)", - "dropExePath": "", - "toLowerCase": "true", - "detailedLog": "false", - "usePat": "false", - "retentionDays": "", - "dropMetadataContainerName": "Drop-OfficialBuildId" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": true, - "alwaysRun": false, - "displayName": "Publish to Artifact Services Drop (BuildNumber)", - "timeoutInMinutes": 0, - "task": { - "id": "f9d96d25-0c81-4e77-8282-1ad1f785cbb4", - "versionSpec": "0.*", - "definitionType": "task" - }, - "inputs": { - "dropServiceURI": "https://devdiv.artifacts.visualstudio.com/DefaultCollection", - "buildNumber": "dotnet/$(GitHubRepositoryName)/$(SourceBranch)/$(BuildNumber)/packages/$(ConfigurationGroup)", - "sourcePath": "$(AzureContainerPackageDirectory)", - "dropExePath": "", - "toLowerCase": "true", - "detailedLog": "false", - "usePat": "false", - "retentionDays": "", - "dropMetadataContainerName": "Drop-BuildNumber" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": true, - "alwaysRun": true, - "displayName": "Copy Files to: $(Build.StagingDirectory)\\DebugLogs", - "timeoutInMinutes": 0, - "condition": "succeededOrFailed()", - "task": { - "id": "5bfb729a-a7c8-4a78-a7c3-8d717bb7c13c", - "versionSpec": "2.*", - "definitionType": "task" - }, - "inputs": { - "SourceFolder": "$(Pipeline.SourcesDirectory)", - "Contents": "**/*.log", - "TargetFolder": "$(Build.StagingDirectory)\\DebugLogs", - "CleanTargetFolder": "false", - "OverWrite": "false", - "flattenFolders": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": true, - "alwaysRun": true, - "displayName": "Publish Artifact: DebugLogs", - "timeoutInMinutes": 0, - "condition": "succeededOrFailed()", - "task": { - "id": "2ff763a7-ce83-4e1f-bc89-0ae63477cebe", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "PathtoPublish": "$(Build.StagingDirectory)\\DebugLogs", - "ArtifactName": "DebugLogs", - "ArtifactType": "Container", - "TargetPath": "\\\\my\\share\\$(Build.DefinitionName)\\$(Build.BuildNumber)", - "Parallel": "false", - "ParallelCount": "8" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Send Telemetry", - "timeoutInMinutes": 0, - "condition": "always()", - "task": { - "id": "521a94ea-9e68-468a-8167-6dcf361ea776", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": {} - }, - { - "environment": {}, - "enabled": true, - "continueOnError": true, - "alwaysRun": true, - "displayName": "Run AgentTools/End.ps1", - "timeoutInMinutes": 0, - "condition": "succeededOrFailed()", - "task": { - "id": "e213ff0f-5d5c-4791-802d-52ea3e7be1f1", - "versionSpec": "2.*", - "definitionType": "task" - }, - "inputs": { - "targetType": "inline", - "filePath": "", - "arguments": "", - "script": "if (Test-Path \"$(AgentToolsPath)\\End.ps1\") {\n \"End.ps1 script found. Executing...\"\n cd $(AgentToolsPath)\n & $(AgentToolsPath)\\End.ps1\n} else {\n \"End.ps1 script does not exist. Moving on...\"\n}", - "errorActionPreference": "continue", - "failOnStderr": "false", - "ignoreLASTEXITCODE": "true", - "workingDirectory": "" - } - } - ], - "options": [ - { - "enabled": false, - "definition": { - "id": "a9db38f9-9fdc-478c-b0f9-464221e58316" - }, - "inputs": { - "workItemType": "234347", - "assignToRequestor": "true", - "additionalFields": "{}" - } - }, - { - "enabled": false, - "definition": { - "id": "57578776-4c22-4526-aeb0-86b6da17ee9c" - }, - "inputs": { - "additionalFields": "{}" - } - }, - { - "enabled": false, - "definition": { - "id": "5d58cc01-7c75-450c-be18-a388ddb129ec" - }, - "inputs": { - "branchFilters": "[\"+refs/heads/*\"]", - "additionalFields": "{}" - } - } - ], - "variables": { - "system.debug": { - "value": "false", - "allowOverride": true - }, - "ConfigurationGroup": { - "value": "$(PB_BuildType)", - "allowOverride": true - }, - "CloudDropAccountName": { - "value": "dotnetbuildoutput", - "allowOverride": true - }, - "CloudDropAccessToken": { - "value": null, - "allowOverride": true, - "isSecret": true - }, - "PB_ContainerName": { - "value": "$(Build.BuildNumber)", - "allowOverride": true - }, - "PB_BlobNamePrefix": { - "value": "$(PB_PipeBuildIdentifier)/", - "allowOverride": true - }, - "TeamName": { - "value": "DotNetCore" - }, - "AzureBlobFeedAccountName": { - "value": "dotnetfeed", - "allowOverride": true - }, - "AzureBlobFeedAccessToken": { - "value": null, - "allowOverride": true, - "isSecret": true - }, - "AzureBlobFeedContainerName": { - "value": "dotnet-core", - "allowOverride": true - }, - "OfficialBuildId": { - "value": "$(Build.BuildNumber)", - "allowOverride": true - }, - "MyGetFeedUrl": { - "value": "https://dotnet.myget.org/F/dotnet-core-test/api/v2/package", - "allowOverride": true - }, - "MyGetApiKey": { - "value": null, - "allowOverride": true, - "isSecret": true - }, - "VstsFeedUrl": { - "value": "https://devdiv.pkgs.visualstudio.com/DefaultCollection/_packaging/dotnet-core-test/nuget/v3/index.json", - "allowOverride": true - }, - "VstsPat": { - "value": null, - "allowOverride": true, - "isSecret": true - }, - "VstsAuthedNuGetConfigPath": { - "value": "$(Build.StagingDirectory)\\VstsAuthed.NuGet.Config" - }, - "UpdatePublishedVersions.AuthToken": { - "value": null, - "allowOverride": true, - "isSecret": true - }, - "VersionsRepoOwner": { - "value": "dagood", - "allowOverride": true - }, - "BuildConfiguration": { - "value": "Release" - }, - "BuildPlatform": { - "value": "any cpu" - }, - "ReleaseBaseUrl": { - "value": "https://devdiv.vsrm.visualstudio.com/DefaultCollection", - "allowOverride": true - }, - "ReleaseProjectName": { - "value": "DevDiv", - "allowOverride": true - }, - "ReleaseDefinitionId": { - "value": "40", - "allowOverride": true - }, - "ManualReleaseName": { - "value": "" - }, - "OfficialBuild": { - "value": "fake-test" - }, - "BranchGroup": { - "value": "" - }, - "DefinitionNames": { - "value": "Fake-Windows Fake-Windows-Native" - }, - "CreateRequestContacts": { - "value": "a b c" - }, - "VstsReleaseAccessToken": { - "value": null, - "allowOverride": true, - "isSecret": true - }, - "Pipeline.SourcesDirectory": { - "value": "$(Build.BinariesDirectory)\\pipelineRepository" - }, - "VstsAccountName": { - "value": "dagood", - "allowOverride": true - }, - "VstsRepositoryName": { - "value": "DotNet-CoreFX-Trusted", - "allowOverride": true - }, - "VstsRepoGitUrl": { - "value": "https://$(VstsAccountName):$(VstsRepoPat)@devdiv.visualstudio.com/DevDiv/_git/$(VstsRepositoryName)/" - }, - "VstsRepoPat": { - "value": null, - "allowOverride": true, - "isSecret": true - }, - "SourceVersion": { - "value": "master", - "allowOverride": true - }, - "SourceBranch": { - "value": "master", - "allowOverride": true - }, - "AzureContainerPackageDirectory": { - "value": "$(Pipeline.SourcesDirectory)\\packages\\AzureTransfer\\$(ConfigurationGroup)\\", - "allowOverride": true - }, - "AzureContainerPackageGlob": { - "value": "pkg\\*.nupkg", - }, - "GitHubRepositoryName": { - "value": "corefx", - "allowOverride": true - }, - "UseLegacyBuildScripts": { - "value": "false", - "allowOverride": true - }, - "FeedPublishArguments": { - "value": "$(PB_BuildOutputManifestArguments) /p:ExpectedFeedUrl=$(PB_PublishBlobFeedUrl) /p:CloudDropAccessToken=$(PB_PublishBlobFeedKey) /p:CloudDropAccountName=$(AzureBlobFeedAccountName) /p:ContainerName=$(AzureBlobFeedContainerName) /p:OverwriteOnPublish=true /p:PackagesPatternDir=$(AzureContainerPackageDirectory) /p:__BuildType=$(ConfigurationGroup) /p:OfficialPublish=true /p:PublishFlatContainer=false", - }, - "PB_PublishType": { - "value": "", - "allowOverride": true - }, - "PB_PublishBlobFeedUrl": { - "value": "", - "allowOverride": true - }, - "PB_PublishBlobFeedKey": { - "value": "", - "allowOverride": true - }, - "PB_BuildOutputManifestArguments": { - "value": "/p:ManifestBuildId=$(OfficialBuildId) /p:ManifestBranch=$(SourceBranch) /p:ManifestCommit=$(SourceVersion)" - }, - "PB_UseLegacySigning": { - "value": "false", - "allowOverride": true - } - }, - "retentionRules": [ - { - "branches": [ - "+refs/heads/*" - ], - "artifacts": [], - "artifactTypesToDelete": [ - "FilePath", - "SymbolStore" - ], - "daysToKeep": 10, - "minimumToKeep": 1, - "deleteBuildRecord": true, - "deleteTestResults": true - } - ], - "buildNumberFormat": "$(date:yyyyMMdd)$(rev:-rr)", - "jobAuthorizationScope": "projectCollection", - "jobTimeoutInMinutes": 180, - "jobCancelTimeoutInMinutes": 5, - "repository": { - "properties": { - "labelSources": "0", - "reportBuildStatus": "false", - "fetchDepth": "0", - "gitLfsSupport": "false", - "skipSyncSource": "false", - "cleanOptions": "0", - "checkoutNestedSubmodules": "false", - "labelSourcesFormat": "$(build.buildNumber)" - }, - "id": "0a2b2664-c1be-429c-9b40-8a24dee27a4a", - "type": "TfsGit", - "name": "DotNet-BuildPipeline", - "url": "https://devdiv.visualstudio.com/DevDiv/_git/DotNet-BuildPipeline", - "defaultBranch": "refs/heads/master", - "clean": "true", - "checkoutSubmodules": false - }, - "processParameters": {}, - "quality": "definition", - "drafts": [], - "queue": { - "_links": { - "self": { - "href": "https://devdiv.visualstudio.com/DefaultCollection/_apis/build/Queues/330" - } - }, - "id": 330, - "name": "DotNetCore-Build", - "url": "https://devdiv.visualstudio.com/DefaultCollection/_apis/build/Queues/330", - "pool": { - "id": 97, - "name": "DotNetCore-Build" - } - }, - "id": 2943, - "name": "DotNet-Trusted-Publish", - "path": "\\", - "type": "build", - "queueStatus": "enabled", - "project": { - "id": "0bdbc590-a062-4c3f-b0f6-9383f67865ee", - "name": "DevDiv", - "description": "Visual Studio and DevDiv team project for git source code repositories. Work items will be added for Adams, Dev14 work items are tracked in vstfdevdiv. ", - "url": "https://devdiv.visualstudio.com/DefaultCollection/_apis/projects/0bdbc590-a062-4c3f-b0f6-9383f67865ee", - "state": "wellFormed", - "revision": 418098432, - "visibility": "organization" - } -} diff --git a/buildpipeline/README.md b/buildpipeline/README.md deleted file mode 100644 index ef575c8767ec..000000000000 --- a/buildpipeline/README.md +++ /dev/null @@ -1,5 +0,0 @@ -These are the checked in build definitions used by BuildPipeline. - -You may edit build steps, variables, and other artifacts of the definition directly to modify the BuildPipeline builds. - -If you want to make major changes to these definitions such as adding / deleting build steps, or other major rewrites, chcosta has tools to assist with those changes and can provide guidance. It is important that we know what kinds of changes are being made to the build definitions so that we can invest in improving those experiences. \ No newline at end of file diff --git a/buildpipeline/linux-musl.groovy b/buildpipeline/linux-musl.groovy deleted file mode 100644 index d82ae726c0f8..000000000000 --- a/buildpipeline/linux-musl.groovy +++ /dev/null @@ -1,24 +0,0 @@ -@Library('dotnet-ci') _ - -// Incoming parameters. Access with "params.". -// Note that the parameters will be set as env variables so we cannot use names that conflict -// with the engineering system parameter names. -// CGroup - Build configuration. -// TestOuter - If true, runs outerloop, if false runs just innerloop - -simpleDockerNode('microsoft/dotnet-buildtools-prereqs:alpine-3.6-3148f11-20171119021156') { - stage ('Checkout source') { - checkoutRepo() - } - - stage ('Initialize tools') { - // Init tools - sh './init-tools.sh' - } - stage ('Sync') { - sh "./sync.sh" - } - stage ('Build Product') { - sh "./build.sh -x64 -${params.CGroup} -skiprestore -stripSymbols -portablebuild=false" - } -} diff --git a/buildpipeline/perf-pipeline.groovy b/buildpipeline/perf-pipeline.groovy deleted file mode 100644 index f2043dc742e0..000000000000 --- a/buildpipeline/perf-pipeline.groovy +++ /dev/null @@ -1,409 +0,0 @@ -@Library('dotnet-ci') _ - -// Incoming parameters. Access with "params.". -// Note that the parameters will be set as env variables so we cannot use names that conflict -// with the engineering system parameter names. - -//--------------------- Windows Functions ----------------------------// - -def windowsBuild(String arch, String config, String pgo, boolean isBaseline) { - checkout scm - - String pgoBuildFlag = ((pgo == 'nopgo') ? '-nopgooptimize' : '-enforcepgo') - String baselineString = "" - - // For baseline builds, checkout the merge's parent - if (isBaseline) { - baselineString = "-baseline" - bat "git checkout HEAD^^1" - } - - bat "set __TestIntermediateDir=int&&.\\build.cmd -${config} -${arch} -skipbuildpackages ${pgoBuildFlag}" - bat "tests\\runtest.cmd ${config} ${arch} GenerateLayoutOnly" - bat "rd /s /q bin\\obj" - - // Stash build artifacts. Stash tests in an additional stash to be used by Linux test runs - stash name: "nt-${arch}-${pgo}${baselineString}-build-artifacts", includes: 'bin/**' - stash name: "nt-${arch}-${pgo}${baselineString}-test-artifacts", includes: 'bin/tests/**' -} - -def windowsPerf(String arch, String config, String uploadString, String runType, String opt_level, String jit, String pgo, String scenario, boolean isBaseline, boolean isProfileOn, int slice) { - withCredentials([string(credentialsId: 'CoreCLR Perf BenchView Sas', variable: 'BV_UPLOAD_SAS_TOKEN')]) { - checkout scm - String baselineString = "" - if (isBaseline) { - baselineString = "-baseline" - } - dir ('.') { - unstash "nt-${arch}-${pgo}${baselineString}-test-artifacts" - unstash "benchview-tools" - unstash "metadata" - } - - String pgoTestFlag = ((pgo == 'nopgo') ? '-nopgo' : '') - - // We want to use the baseline metadata for baseline runs. We expect to find the submission metadata in - // submission-metadata.py - if (isBaseline) { - bat "move /y submission-metadata-baseline.json submission-metadata.json" - } - - String testEnv = "" - - String failedOutputLogFilename = "run-xunit-perf-scenario.log" - - bat "py \".\\Microsoft.BenchView.JSONFormat\\tools\\machinedata.py\"" - bat ".\\init-tools.cmd" - - // We run run-xunit-perf differently for each of the different job types - - String profileArg = isProfileOn ? "BranchMispredictions+CacheMisses+InstructionRetired" : "stopwatch" - - String runXUnitCommonArgs = "-arch ${arch} -configuration ${config} -generateBenchviewData \"%WORKSPACE%\\Microsoft.Benchview.JSONFormat\\tools\" ${uploadString} ${pgoTestFlag} -runtype ${runType} ${testEnv} -optLevel ${opt_level} -jitName ${jit} -outputdir \"%WORKSPACE%\\bin\\sandbox_logs\"" - if (scenario == 'perf') { - String runXUnitPerfCommonArgs = "${runXUnitCommonArgs} -stabilityPrefix \"START \\\"CORECLR_PERF_RUN\\\" /B /WAIT /HIGH /AFFINITY 0x2\"" - if (slice == -1) - { - String runXUnitPerflabArgs = "${runXUnitPerfCommonArgs} -testBinLoc bin\\tests\\${os}.${arch}.${config}\\performance\\perflab\\Perflab -library" - - profileArg = isProfileOn ? "default+${profileArg}+gcapi" : profileArg - bat "py tests\\scripts\\run-xunit-perf.py ${runXUnitPerflabArgs} -collectionFlags ${profileArg}" - - String runXUnitCodeQualityArgs = "${runXUnitPerfCommonArgs} -testBinLoc bin\\tests\\${os}.${arch}.${config}\\Jit\\Performance\\CodeQuality\\" - bat "py tests\\scripts\\run-xunit-perf.py ${runXUnitCodeQualityArgs} -collectionFlags ${profileArg}" - } - - else { - String runXUnitCodeQualityArgs = "${runXUnitPerfCommonArgs} -slice ${slice} -sliceConfigFile \"%WORKSPACE%\\tests\\scripts\\perf-slices.json\" -testBinLoc bin\\tests\\${os}.${arch}.${config}" - bat "py tests\\scripts\\run-xunit-perf.py ${runXUnitCodeQualityArgs} -collectionFlags ${profileArg}" - } - } - else if (scenario == 'jitbench') { - String runXUnitPerfCommonArgs = "${runXUnitCommonArgs} -stabilityPrefix \"START \"CORECLR_PERF_RUN\" /B /WAIT /HIGH\" -scenarioTest" - runXUnitPerfCommonArgs = "${runXUnitPerfCommonArgs} -testBinLoc bin\\tests\\${os}.${arch}.${config}\\performance\\Scenario\\JitBench -group CoreCLR-Scenarios" - - if (!(opt_level == 'min_opt' && isProfileOn)) { - bat "py tests\\scripts\\run-xunit-perf.py ${runXUnitPerfCommonArgs} -collectionFlags ${profileArgs}" - } - } - else if (scenario == 'illink') { - String runXUnitPerfCommonArgs = "${runXUnitCommonArgs} -scenarioTest" - bat "\"%VS140COMNTOOLS%\\..\\..\\VC\\vcvarsall.bat\" x86_amd64\n" + - "py tests\\scripts\\run-xunit-perf.py ${runXUnitPerfCommonArgs} -testBinLoc bin\\tests\\${os}.${arch}.${config}\\performance\\linkbench\\linkbench -group ILLink -nowarmup" - } - archiveArtifacts allowEmptyArchive: false, artifacts:'bin/sandbox_logs/**,machinedata.json', onlyIfSuccessful: false - } -} - -def windowsThroughput(String arch, String os, String config, String runType, String optLevel, String jit, String pgo, boolean isBaseline) { - withCredentials([string(credentialsId: 'CoreCLR Perf BenchView Sas', variable: 'BV_UPLOAD_SAS_TOKEN')]) { - checkout scm - - String baselineString = "" - if (isBaseline) { - baselineString = "-baseline" - } - - String pgoTestFlag = ((pgo == 'nopgo') ? '-nopgo' : '') - - dir ('.') { - unstash "nt-${arch}-${pgo}${baselineString}-build-artifacts" - unstash "benchview-tools" - unstash "throughput-benchmarks-${arch}" - unstash "metadata" - } - - // We want to use the baseline metadata for baseline runs. We expect to find the submission metadata in - // submission-metadata.py - if (isBaseline) { - bat "move /y submission-metadata-baseline.json submission-metadata.json" - } - - bat "py \".\\Microsoft.BenchView.JSONFormat\\tools\\machinedata.py\"" - bat ".\\init-tools.cmd" - bat "py -u tests\\scripts\\run-throughput-perf.py -arch ${arch} -os ${os} -configuration ${config} -opt_level ${optLevel} -jit_name ${jit} ${pgoTestFlag} -clr_root \"%WORKSPACE%\" -assembly_root \"%WORKSPACE%\\${arch}ThroughputBenchmarks\\lib\" -benchview_path \"%WORKSPACE%\\Microsoft.Benchview.JSONFormat\\tools\" -run_type ${runType}" - archiveArtifacts allowEmptyArchive: false, artifacts:'throughput-*.csv,machinedata.json', onlyIfSuccessful: false - } -} - -//------------------------ Linux Functions ----------------------------// - -def linuxBuild(String arch, String config, String pgo, boolean isBaseline) { - checkout scm - - String pgoBuildFlag = ((pgo == 'nopgo') ? '-nopgooptimize' : '') - String baselineString = "" - - // For baseline runs, checkout the merge's parent - if (isBaseline) { - baselineString = "-baseline" - sh "git checkout HEAD^1" - } - - sh "./build.sh -verbose -${config} -${arch} ${pgoBuildFlag}" - stash name: "linux-${arch}-${pgo}${baselineString}-build-artifacts", includes: 'bin/**' -} - -def linuxPerf(String arch, String os, String config, String uploadString, String runType, String optLevel, String pgo, boolean isBaseline) { - withCredentials([string(credentialsId: 'CoreCLR Perf BenchView Sas', variable: 'BV_UPLOAD_SAS_TOKEN')]) { - checkout scm - - String baselineString = "" - if (isBaseline) { - baselineString = "-baseline" - } - - String pgoTestFlag = ((pgo == 'nopgo') ? '-nopgo' : '') - - dir ('.') { - unstash "linux-${arch}-${pgo}${baselineString}-build-artifacts" - unstash "nt-${arch}-${pgo}${baselineString}-test-artifacts" - unstash "metadata" - } - dir ('./tests/scripts') { - unstash "benchview-tools" - } - - // We want to use the baseline metadata for baseline runs. We expect to find the submission metadata in - // submission-metadata.py - if (isBaseline) { - sh "mv -f submission-metadata-baseline.json submission-metadata.json" - } - - sh "./tests/scripts/perf-prep.sh --nocorefx" - sh "./init-tools.sh" - sh "./build-test.sh release $arch generatelayoutonly" - - String runXUnitCommonArgs = "-arch ${arch} -os Ubuntu16.04 -configuration ${config} -stabilityPrefix \"taskset 0x00000002 nice --adjustment=-10\" -generateBenchviewData \"\${WORKSPACE}/tests/scripts/Microsoft.BenchView.JSONFormat/tools\" ${uploadString} ${pgoTestFlag} -runtype ${runType} -optLevel ${optLevel} -outputdir \"\${WORKSPACE}/bin/sandbox_logs\"" - - sh "python3 ./tests/scripts/run-xunit-perf.py -testBinLoc bin/tests/Windows_NT.${arch}.${config}/JIT/Performance/CodeQuality ${runXUnitCommonArgs}" - archiveArtifacts allowEmptyArchive: false, artifacts:'bin/toArchive/**,machinedata.json', onlyIfSuccessful: false - } -} - -def linuxThroughput(String arch, String os, String config, String uploadString, String runType, String optLevel, String pgo, boolean isBaseline) { - withCredentials([string(credentialsId: 'CoreCLR Perf BenchView Sas', variable: 'BV_UPLOAD_SAS_TOKEN')]) { - checkout scm - - String baselineString = "" - if (isBaseline) { - baselineString = "-baseline" - } - - String pgoTestFlag = ((pgo == 'nopgo') ? '-nopgo' : '') - - dir ('.') { - unstash "linux-${arch}-${pgo}${baselineString}-build-artifacts" - unstash "throughput-benchmarks-${arch}" - unstash "metadata" - } - dir ('./tests/scripts') { - unstash "benchview-tools" - } - - // We want to use the baseline metadata for baseline runs. We expect to find the submission metadata in - // submission-metadata.py - if (isBaseline) { - sh "mv -f submission-metadata-baseline.json submission-metadata.json" - } - - sh "./tests/scripts/perf-prep.sh --throughput" - sh "./init-tools.sh" - sh "python3 ./tests/scripts/run-throughput-perf.py -arch \"${arch}\" -os \"${os}\" -configuration \"${config}\" -opt_level ${optLevel} ${pgoTestFlag} -clr_root \"\${WORKSPACE}\" -assembly_root \"\${WORKSPACE}/${arch}ThroughputBenchmarks/lib\" -run_type \"${runType}\" -benchview_path \"\${WORKSPACE}/tests/scripts/Microsoft.BenchView.JSONFormat/tools\"" - archiveArtifacts allowEmptyArchive: false, artifacts:'throughput-*.csv,machinedata.json', onlyIfSuccessful: false - } -} - -//-------------------------- Job Definitions --------------------------// - -String config = "Release" -String runType = isPR() ? 'private' : 'rolling' - -String uploadString = '-uploadToBenchview' - -stage ('Get Metadata and download Throughput Benchmarks') { - simpleNode('Windows_NT', '20170427-elevated') { - checkout scm - String commit = getCommit() - def benchViewName = isPR() ? "coreclr private %ghprbPullTitle%" : "coreclr rolling %GIT_BRANCH_WITHOUT_ORIGIN% ${commit}" - def benchViewUser = getUserEmail() - bat "mkdir tools\n" + - "powershell Invoke-WebRequest https://dist.nuget.org/win-x86-commandline/v4.1.0/nuget.exe -OutFile %WORKSPACE%\\tools\\nuget.exe" - bat "%WORKSPACE%\\tools\\nuget.exe install Microsoft.BenchView.JSONFormat -Source http://benchviewtestfeed.azurewebsites.net/nuget -Prerelease -ExcludeVersion" - bat "%WORKSPACE%\\tools\\nuget.exe install Microsoft.BenchView.ThroughputBenchmarks.x64.Windows_NT -Source https://dotnet.myget.org/F/dotnet-core -Prerelease -ExcludeVersion" - bat "%WORKSPACE%\\tools\\nuget.exe install Microsoft.BenchView.ThroughputBenchmarks.x86.Windows_NT -Source https://dotnet.myget.org/F/dotnet-core -Prerelease -ExcludeVersion" - bat "set \"GIT_BRANCH_WITHOUT_ORIGIN=%GitBranchOrCommit:*/=%\"\n" + - "py \"%WORKSPACE%\\Microsoft.BenchView.JSONFormat\\tools\\submission-metadata.py\" --name \"${benchViewName}\" --user-email \"${benchViewUser}\"\n" + - "py \"%WORKSPACE%\\Microsoft.BenchView.JSONFormat\\tools\\build.py\" git --branch %GIT_BRANCH_WITHOUT_ORIGIN% --type ${runType}\n" + - "py \"%WORKSPACE%\\Microsoft.BenchView.JSONFormat\\tools\\submission-metadata.py\" --name \"${benchViewName}-baseline\" --user-email \"${benchViewUser}\" -o submission-metadata-baseline.json\n" - - // TODO: revisit these moves. Originally, stash could not find the directories as currently named - bat "move Microsoft.BenchView.ThroughputBenchmarks.x64.Windows_NT x64ThroughputBenchmarks" - bat "move Microsoft.BenchView.ThroughputBenchmarks.x86.Windows_NT x86ThroughputBenchmarks" - - stash includes: 'Microsoft.BenchView.JSONFormat/**/*', name: 'benchview-tools' - stash name: "metadata", includes: "*.json" - stash name: "throughput-benchmarks-x64", includes: "x64ThroughputBenchmarks/**/*" - stash name: "throughput-benchmarks-x86", includes: "x86ThroughputBenchmarks/**/*" - } -} - -// TODO: use non-pgo builds for throughput? -def innerLoopBuilds = [ - "windows x64 pgo build": { - simpleNode('Windows_NT','latest') { - windowsBuild('x64', config, 'pgo', false) - } - }, - "windows x86 pgo build": { - simpleNode('Windows_NT','latest') { - windowsBuild('x86', config, 'pgo', false) - } - }, - "linux x64 pgo build": { - simpleNode('RHEL7.2', 'latest-or-auto') { - linuxBuild('x64', config, 'pgo', false) - } - } -] - -// Only run non-pgo builds on offical builds -def outerLoopBuilds = [:] - -if (!isPR()) { - outerLoopBuilds = [ - "windows x64 nopgo build": { - simpleNode('Windows_NT','latest') { - windowsBuild('x64', config, 'nopgo', false) - } - }, - "windows x86 nopgo build": { - simpleNode('Windows_NT','latest') { - windowsBuild('x86', config, 'nopgo', false) - } - }, - "linux x64 nopgo build": { - simpleNode('RHEL7.2', 'latest-or-auto') { - linuxBuild('x64', config, 'nopgo', false) - } - } - ] -} - -/*def baselineBuilds = [:] - -if (isPR()) { - baselineBuilds = [ - "windows x64 pgo baseline build": { - simpleNode('Windows_NT','latest') { - windowsBuild('x64', config, 'pgo', true) - } - }, - "windows x86 pgo baseline build": { - simpleNode('Windows_NT','latest') { - windowsBuild('x86', config, 'pgo', true) - } - } - ] -}*/ - -stage ('Build Product') { - parallel innerLoopBuilds //+ outerLoopBuilds //+ baselineBuilds -} - -// Pipeline builds don't allow outside scripts (ie ArrayList.Add) if running from a script from SCM, so manually list these for now. -// Run the main test mix on all runs (PR + official) - -def innerLoopTests = [:] - -['x64', 'x86'].each { arch -> - ['full_opt'].each { opt_level -> - [false].each { isBaseline -> - [0,1,2,3,4,5].each { slice -> - String baseline = "" - if (isBaseline) { - baseline = " baseline" - } - if (isPR() || !isBaseline) { - innerLoopTests["windows ${arch} ryujit ${opt_level} pgo ${slice}${baseline} perf"] = { - simpleNode('windows_server_2016_clr_perf', 180) { - windowsPerf(arch, config, uploadString, runType, opt_level, 'ryujit', 'pgo', 'perf', isBaseline, true, slice) - } - } - - } - } - - if (arch == 'x64') { - innerLoopTests["linux ${arch} ryujit ${opt_level} pgo perf"] = { - simpleNode('ubuntu_1604_clr_perf', 180) { - linuxPerf(arch, 'Ubuntu16.04', config, uploadString, runType, opt_level, 'pgo', false) - } - } - } - } - } -} - -// Run the full test mix only on commits, not PRs -def outerLoopTests = [:] - -if (!isPR()) { - ['x64', 'x86'].each { arch -> - outerLoopTests["windows ${arch} ryujit full_opt pgo${baseline} jitbench"] = { - simpleNode('windows_server_2016_clr_perf', 180) { - windowsPerf(arch, config, uploadString, runType, 'full_opt', 'ryujit', 'pgo', 'jitbench', false, false, -1) - } - } - - outerLoopTests["windows ${arch} ryujit full_opt pgo illink"] = { - simpleNode('Windows_NT', '20170427-elevated') { - windowsPerf(arch, config, uploadString, runType, 'full_opt', 'ryujit', 'pgo', 'illink', false, false, -1) - } - } - } - - ['x64', 'x86'].each { arch -> - ['min_opt', 'full_opt'].each { opt_level -> - ['ryujit'].each { jit -> - ['pgo', 'nopgo'].each { pgo_enabled -> - [true, false].each { isProfileOn -> - outerLoopTests["windows ${arch} ${jit} ${opt_level} ${pgo_enabled} perf"] = { - simpleNode('windows_server_2016_clr_perf', 180) { - windowsPerf(arch, config, uploadString, runType, opt_level, jit, pgo_enabled, 'perf', false, isProfileOn, -1) - } - } - - outerLoopTests["windows ${arch} ${jit} ${opt_level} ${pgo_enabled} throughput"] = { - simpleNode('windows_server_2016_clr_perf', 180) { - windowsThroughput(arch, 'Windows_NT', config, runType, opt_level, jit, pgo_enabled, false, isProfileOn) - } - } - } - } - } - } - } - - ['x64'].each { arch -> - ['min_opt', 'full_opt'].each { opt_level -> - ['pgo', 'nopgo'].each { pgo_enabled -> - outerLoopTests["linux ${arch} ryujit ${opt_level} ${pgo_enabled} perf"] = { - simpleNode('ubuntu_1604_clr_perf', 180) { - linuxPerf(arch, 'Ubuntu16.04', config, uploadString, runType, opt_level, pgo_enabled, false) - } - } - - outerLoopTests["linux ${arch} ryujit ${opt_level} ${pgo_enabled} throughput"] = { - simpleNode('ubuntu_1604_clr_perf', 180) { - linuxThroughput(arch, 'Ubuntu16.04', config, uploadString, runType, opt_level, pgo_enabled, false) - } - } - } - } - } -} - -stage ('Run testing') { - parallel innerLoopTests //+ outerLoopTests -} diff --git a/buildpipeline/perf_pipelinejobs.groovy b/buildpipeline/perf_pipelinejobs.groovy deleted file mode 100644 index 26e118a9f449..000000000000 --- a/buildpipeline/perf_pipelinejobs.groovy +++ /dev/null @@ -1,35 +0,0 @@ -import jobs.generation.JobReport; -import jobs.generation.Utilities; -import org.dotnet.ci.pipelines.Pipeline - -// The input project name (e.g. dotnet/corefx) -def project = GithubProject -// The input branch name (e.g. master) -def branch = GithubBranchName - -// ************************** -// Define innerloop testing. Any configuration in ForPR will run for every PR but all other configurations -// will have a trigger that can be -// ************************** - -def perfPipeline = Pipeline.createPipelineForGithub(this, project, branch, 'buildpipeline/perf-pipeline.groovy') - -def triggerName = "Perf Build and Test" -def pipeline = perfPipeline - -// If we were using parameters for the pipeline job, we would define an array of parameter pairs -// and pass that array as a parameter to the trigger functions. Ie: -// def params = ['CGroup':'Release', -// 'AGroup':'x64', -// 'OGroup':'Windows_NT'] -// pipeline.triggerPipelinOnGithubPRComment(triggerName, params) - -params = ['XUNIT_PERFORMANCE_MAX_ITERATION':'6', - 'XUNIT_PERFORMANCE_MAX_ITERATION_INNER_SPECIFIED':'6'] - -// Allow PR jobs on request. -pipeline.triggerPipelineOnGithubPRComment(triggerName, params) - -// Disable automatic PR runs until throughput issues are addressed. -// pipeline.triggerPipelineOnEveryGithubPR(triggerName, params) -pipeline.triggerPipelinePeriodically('*/12 * * * *', params) diff --git a/buildpipeline/pipelinejobs.groovy b/buildpipeline/pipelinejobs.groovy deleted file mode 100644 index 75b26d100d67..000000000000 --- a/buildpipeline/pipelinejobs.groovy +++ /dev/null @@ -1,55 +0,0 @@ -// Import the utility functionality. - -import jobs.generation.JobReport; -import jobs.generation.Utilities; -import org.dotnet.ci.pipelines.Pipeline - -// The input project name (e.g. dotnet/coreclr) -def project = GithubProject -// The input branch name (e.g. master) -def branch = GithubBranchName - -// ************************** -// Define innerloop testing. Any configuration in ForPR will run for every PR but all other configurations -// will have a trigger that can be -// ************************** - -def linmuslPipeline = Pipeline.createPipelineForGithub(this, project, branch, 'buildpipeline/linux-musl.groovy') - -def configurations = [ - ['TGroup':"netcoreapp", 'Pipeline':linmuslPipeline, 'Name':'Linux-musl' ,'ForPR':"Debug-x64", 'Arch':['x64']], -] - -configurations.each { config -> - ['Debug', 'Release'].each { configurationGroup -> - (config.Arch ?: ['x64', 'x86']).each { archGroup -> - def triggerName = "${config.Name} ${archGroup} ${configurationGroup} Build" - - def pipeline = config.Pipeline - def params = ['TGroup':config.TGroup, - 'CGroup':configurationGroup, - 'AGroup':archGroup, - 'TestOuter': false] - - // Add default PR triggers for particular configurations but manual triggers for all - if (config.ForPR.contains("${configurationGroup}-${archGroup}")) { - pipeline.triggerPipelineOnEveryGithubPR(triggerName, params) - } - else { - pipeline.triggerPipelineOnGithubPRComment(triggerName, params) - } - - // Add trigger for all configurations to run on merge - pipeline.triggerPipelineOnGithubPush(params) - - // Add optional PR trigger for Outerloop test runs - params.TestOuter = true - pipeline.triggerPipelineOnGithubPRComment("Outerloop ${triggerName}", params) -}}} - -JobReport.Report.generateJobReport(out) - -// Make the call to generate the help job -Utilities.createHelperJob(this, project, branch, - "Welcome to the ${project} Repository", // This is prepended to the help message - "Have a nice day!") // This is appended to the help message. You might put known issues here. diff --git a/buildpipeline/pipelines.json b/buildpipeline/pipelines.json deleted file mode 100644 index 66183a1163ab..000000000000 --- a/buildpipeline/pipelines.json +++ /dev/null @@ -1,259 +0,0 @@ -{ - "Repository": "coreclr", - "Definitions": { - "Path": ".", - "Type": "VSTS", - "BaseUrl": "https://devdiv.visualstudio.com/DefaultCollection", - "SkipBranchAndVersionOverrides": "false" - }, - "PrivateRun": { - "property-overrides": { - "PB_PublishType": "" - } - }, - "DefinitionGroups": [ - { - "Name": "Product-Build", - "Definitions": [ - { - "Name": "DotNet-CoreClr-Trusted-Linux", - "Parameters": { - "DockerTag": "centos-7-d485f41-20173404063424", - "Rid": "linux" - }, - "ReportingParameters": { - "OperatingSystem": "Linux", - "Type": "build/product/", - "Architecture": "x64", - "PB_BuildType": null - } - }, - { - "Name": "DotNet-CoreClr-Trusted-Linux", - "Parameters": { - "DockerTag": "centos-6-376e1a3-20174311014331", - "Rid": "rhel.6", - "PB_AdditionalBuildArgs": "-portablebuild=false" - }, - "ReportingParameters": { - "OperatingSystem": "RedHat6", - "Type": "build/product/", - "Architecture": "x64", - "PB_BuildType": null - } - }, - { - "Name": "DotNet-CoreClr-Trusted-Linux", - "Parameters": { - "DockerTag": "alpine-3.6-3148f11-20171119021156", - "Rid": "linux-musl", - "PB_OutputRID": "linux-musl-x64", - "PB_AdditionalBuildArgs": "-portablebuild=false" - }, - "ReportingParameters": { - "OperatingSystem": "Linux-musl", - "Type": "build/product/", - "Architecture": "x64", - "PB_BuildType": null - } - }, - { - "Name": "DotNet-CoreClr-Trusted-Mac", - "Parameters": { - "Rid": "osx" - }, - "ReportingParameters": { - "OperatingSystem": "OSX", - "Type": "build/product/", - "SubType": "PortableBuild", - "Architecture": "x64", - "PB_BuildType": null - } - }, - { - "Name": "DotNet-CoreClr-Trusted-Windows", - "Parameters": { - "Architecture": "x64" - }, - "ReportingParameters": { - "OperatingSystem": "Windows", - "Type": "build/product/", - "SubType" : "PortableBuild", - "Architecture": "x64", - "PB_BuildType": null - } - }, - { - "Name": "DotNet-CoreClr-Trusted-Windows", - "Parameters": { - "Architecture": "arm64", - "ToolsetArgs": "toolset_dir C:\\tools\\clr" - }, - "ReportingParameters": { - "OperatingSystem": "Windows", - "Type": "build/product/", - "SubType" : "PortableBuild", - "Architecture": "arm64", - "PB_BuildType": null - } - }, - { - "Name": "DotNet-CoreClr-Trusted-Windows", - "Parameters": { - "Architecture": "arm" - }, - "ReportingParameters": { - "OperatingSystem": "Windows", - "Type": "build/product/", - "SubType" : "PortableBuild", - "Architecture": "arm", - "PB_BuildType": null - } - }, - { - "Name": "DotNet-CoreClr-Trusted-Windows-x86", - "Parameters": { - "Architecture": "x86" - }, - "ReportingParameters": { - "OperatingSystem": "Windows", - "Type": "build/product/", - "SubType" : "PortableBuild", - "Architecture": "x86", - "PB_BuildType": null - } - } - ] - }, - { - "Name": "Linux-CrossBuild", - "Definitions": [ - { - "Name": "DotNet-CoreClr-Trusted-Linux-Crossbuild", - "Parameters": { - "DockerTag": "ubuntu-14.04-cross-e435274-20180426002420", - "Architecture": "arm", - "Rid": "linux", - "CrossArchitecture": "x86", - "CrossArchBuildArgs": "crosscomponent", - "CrossArchBuildPackagesArgs": "-__DoCrossArchBuild=1", - "CAC_ROOTFS_DIR": "/crossrootfs/$(CrossArchitecture)" - }, - "ReportingParameters": { - "OperatingSystem": "Linux", - "SubType": "PortableCrossBuild", - "Type": "build/product/", - "Architecture": "arm", - "PB_BuildType": null - } - }, - { - "Name": "DotNet-CoreClr-Trusted-Linux-Crossbuild", - "Parameters": { - "DockerTag": "ubuntu-16.04-cross-arm64-a3ae44b-20180315221921", - "Architecture": "arm64", - "Rid": "linux", - "CrossArchBuildArgs": "crosscomponent", - "CrossArchBuildPackagesArgs": "-__DoCrossArchBuild=1" - }, - "ReportingParameters": { - "OperatingSystem": "Linux", - "SubType": "PortableCrossBuild", - "Type": "build/product/", - "Architecture": "arm64", - "PB_BuildType": null - } - } - ] - } - ], - "Pipelines": [ - { - "Name": "Trusted-All-Release", - "Parameters": { - "TreatWarningsAsErrors": "false" - }, - "BuildParameters": { - "PB_BuildType": "Release", - "PublishFlat": "false", - "PB_EnforcePGO": "enforcepgo" - }, - "ReportingParameters": { - "PB_BuildType": "Release" - }, - "DefinitionGroupRefs": [ - "Product-Build" - ] - }, - { - "Name": "Trusted-Crossbuild-Release", - "Parameters": { - "TreatWarningsAsErrors": "false" - }, - "BuildParameters": { - "PB_BuildType": "Release", - "PublishFlat": "false" - }, - "ReportingParameters": { - "PB_BuildType": "Release" - }, - "DefinitionGroupRefs": [ - "Linux-CrossBuild" - ] - }, - { - "Name": "Publish Packages to Feeds - Release", - "Parameters": { - "TreatWarningsAsErrors": "false" - }, - "BuildParameters": { - "PB_BuildType": "Release" - }, - "Definitions": [ - { - "Name": "DotNet-Trusted-Publish", - "SkipBranchAndVersionOverrides": "true", - "Parameters": { - "VstsRepositoryName": "DotNet-CoreCLR-Trusted", - "GitHubRepositoryName": "coreclr" - }, - "ReportingParameters": { - "TaskName": "Publish", - "Type": "build/publish/", - "ConfigurationGroup": "Release" - } - } - ], - "DependsOn": [ - "Trusted-All-Release", - "Trusted-Crossbuild-Release" - ] - }, - { - "Name": "Publish Symbols - Release", - "Parameters": { - "TreatWarningsAsErrors": "false" - }, - "BuildParameters": { - "PB_BuildType": "Release" - }, - "Definitions": [ - { - "Name": "DotNet-Trusted-Publish-Symbols", - "SkipBranchAndVersionOverrides": "true", - "Parameters": { - }, - "ReportingParameters": { - "TaskName": "Symbol Publish", - "Type": "build/publish/", - "ConfigurationGroup": "Release" - } - } - ], - "DependsOn": [ - "Trusted-All-Release", - "Trusted-Crossbuild-Release" - ] - } - ] -} diff --git a/buildpipeline/security/DotNet-CoreClr-Security-Windows.json b/buildpipeline/security/DotNet-CoreClr-Security-Windows.json deleted file mode 100644 index 4a27f1c2fc6b..000000000000 --- a/buildpipeline/security/DotNet-CoreClr-Security-Windows.json +++ /dev/null @@ -1,581 +0,0 @@ -{ - "build": [ - { - "enabled": true, - "continueOnError": true, - "alwaysRun": false, - "displayName": "Run clean.cmd", - "timeoutInMinutes": 0, - "condition": "succeeded()", - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "clean.cmd", - "arguments": "-all", - "workingFolder": "$(Build.SourcesDirectory)", - "failOnStandardError": "false" - } - }, - { - "enabled": true, - "continueOnError": true, - "alwaysRun": false, - "displayName": "Run sync to download packages", - "timeoutInMinutes": 0, - "condition": "succeeded()", - "task": { - "id": "e213ff0f-5d5c-4791-802d-52ea3e7be1f1", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "scriptType": "inlineScript", - "scriptName": "", - "arguments": "$(PB_CloudDropAccountName) $(PB_CloudDropAccessToken) $(PB_CloudDropContainer)", - "workingFolder": "$(Build.SourcesDirectory)", - "inlineScript": "param($account, $token, $container)\n.\\sync.cmd -ab -- /p:CloudDropAccountName=$account /p:CloudDropAccessToken=$token /p:ContainerName=$container", - "failOnStandardError": "true" - } - }, - { - "enabled": true, - "continueOnError": true, - "alwaysRun": false, - "displayName": "Extract downloaded nupkgs", - "timeoutInMinutes": 0, - "condition": "succeeded()", - "task": { - "id": "e213ff0f-5d5c-4791-802d-52ea3e7be1f1", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "scriptType": "inlineScript", - "scriptName": "", - "arguments": "$(Build.SourcesDirectory)", - "workingFolder": "$(Build.SourcesDirectory)", - "inlineScript": "param($SrcDir)\n$secDir = Join-Path \"$SrcDir\" \"security\"\n$pkgDir = \"$SrcDir\\packages\\AzureTransfer\\Release\\symbolpkg\"\ngci \"$pkgDir\\*.nupkg\" | rename-item -newname { [io.path]::ChangeExtension($_.name, \"zip\") }\ngci \"$pkgDir\\*.zip\" | % {\n$dstDir = Join-Path \"$secDir\" $($_.BaseName)\nExpand-Archive -Path $($_.FullName) -DestinationPath \"$dstDir\" -Force\n}\n", - "failOnStandardError": "true" - } - }, - { - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "List all files", - "timeoutInMinutes": 0, - "condition": "succeeded()", - "task": { - "id": "e213ff0f-5d5c-4791-802d-52ea3e7be1f1", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "scriptType": "inlineScript", - "scriptName": "", - "arguments": "$(Build.SourcesDirectory)", - "workingFolder": "$(Build.SourcesDirectory)", - "inlineScript": "param($SrcDir)\n$fileCount = 0\ngci $SrcDir -recurse | % {\nWrite-Host $($_.FullName)\n$fileCount += 1\n}\nWrite-Host \"File Count: $fileCount\"\n", - "failOnStandardError": "true" - } - }, - { - "enabled": true, - "continueOnError": true, - "alwaysRun": false, - "displayName": "Remove api-*, ucrtbase*, and files other than DLLs, PDBs and TXT", - "timeoutInMinutes": 0, - "condition": "succeeded()", - "task": { - "id": "e213ff0f-5d5c-4791-802d-52ea3e7be1f1", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "scriptType": "inlineScript", - "scriptName": "", - "arguments": "$(Build.SourcesDirectory)", - "workingFolder": "$(Build.SourcesDirectory)", - "inlineScript": "param($SrcDir)\n$secDir = Join-Path \"$SrcDir\" \"security\"\n$extList = \".dll\", \".pdb\", \".txt\"\ngci $secDir -Recurse | where { !$_.PSIsContainer } | % {\nif ($extList -inotcontains $_.Extension -or $_.BaseName -like \"api-*\" -or $_.BaseName -like \"ucrtbase*\")\n{\n rm $_.FullName -Force\n Write-Host \"Removed $($_.FullName)\"\n}\n}\n\n", - "failOnStandardError": "true" - } - }, - { - "enabled": true, - "continueOnError": true, - "alwaysRun": false, - "displayName": "Workaround for long path - DELETE files with path length greater than or equal to 240 characters", - "timeoutInMinutes": 0, - "condition": "succeeded()", - "task": { - "id": "e213ff0f-5d5c-4791-802d-52ea3e7be1f1", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "scriptType": "inlineScript", - "scriptName": "", - "arguments": "$(Build.SourcesDirectory)", - "workingFolder": "$(Build.SourcesDirectory)", - "inlineScript": "param($SrcDir)\n$longPath = New-Object System.Collections.ArrayList\ngci \"$SrcDir\\*\" -recurse | where {!$_.PSIsContainer} | % {\nif ($($_.FullName.Length) -ge 240)\n{\n$longPath.Add($($_.Directory.FullName)) | Out-Null\n}\n}\n$longPath | % {\nStart-Process \"cmd\" -ArgumentList \"/c rd /S /Q $_\" -Wait\nWrite-Host \"DELETED $_\"\n}\n", - "failOnStandardError": "true" - } - }, - { - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "List all files - post delete", - "timeoutInMinutes": 0, - "condition": "succeeded()", - "task": { - "id": "e213ff0f-5d5c-4791-802d-52ea3e7be1f1", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "scriptType": "inlineScript", - "scriptName": "", - "arguments": "$(Build.SourcesDirectory)", - "workingFolder": "$(Build.SourcesDirectory)", - "inlineScript": "param($SrcDir)\n$fileCount = 0\ngci $SrcDir -recurse | % {\nWrite-Host $($_.FullName)\n$fileCount += 1\n}\nWrite-Host \"File Count: $fileCount\"\n", - "failOnStandardError": "true" - } - }, - { - "enabled": true, - "continueOnError": true, - "alwaysRun": false, - "displayName": "Run BinSkim ", - "timeoutInMinutes": 0, - "condition": "succeeded()", - "task": { - "id": "3056813a-40e9-4b2f-8f6b-612d1bc4e045", - "versionSpec": "3.*", - "definitionType": "task" - }, - "inputs": { - "InputType": "CommandLine", - "arguments": "analyze security\\*.dll --recurse --sympath security\\*.pdb --verbose --statistics", - "Function": "analyze", - "AnalyzeTarget": "$(Build.ArtifactStagingDirectory)", - "AnalyzeSymPath": "", - "AnalyzeConfigPath": "default", - "AnalyzePluginPath": "", - "AnalyzeRecurse": "true", - "AnalyzeVerbose": "true", - "AnalyzeHashes": "true", - "AnalyzeStatistics": "false", - "AnalyzeEnvironment": "false", - "ExportRulesOutputType": "SARIF", - "DumpTarget": "$(Build.ArtifactStagingDirectory)", - "DumpRecurse": "true", - "DumpVerbose": "true", - "toolVersion": "Latest" - } - }, - { - "enabled": true, - "continueOnError": true, - "alwaysRun": false, - "displayName": "Run APIScan", - "timeoutInMinutes": 0, - "condition": "succeeded()", - "task": { - "id": "9adea2b1-3752-438c-80c6-a6f0a812abdd", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "targetMode": "binarysym", - "softwareFolder": "$(Build.SourcesDirectory)\\security", - "mpdFolder": "", - "softwareName": "CoreCLR", - "softwareVersionNum": "$(PB_BuildNumber)", - "softwareBuildNum": "$(PB_BuildNumber)", - "modeType": "prerelease", - "noCopySymbols": "false", - "noCopyBinaries": "false", - "noDecompress": "true", - "exclusionList": "", - "email": "", - "symbolsFolder": "$(Build.SourcesDirectory)\\security", - "preBbtBinariesFolder": "", - "preBbtSymbolsFolder": "", - "isLargeApp": "false", - "analyzerTimeout": "00:00:00", - "preserveTempFiles": "false", - "toolVersion": "Latest" - } - }, - { - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "git checkout", - "timeoutInMinutes": 0, - "condition": "succeeded()", - "task": { - "id": "e213ff0f-5d5c-4791-802d-52ea3e7be1f1", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "scriptType": "inlineScript", - "scriptName": "", - "arguments": "$(Build.SourcesDirectory) \"$(PB_Git)\"", - "workingFolder": "$(Build.SourcesDirectory)", - "inlineScript": "param($SrcDir, $git)\n$pkgExtPath= Join-Path \"$SrcDir\" \"security\"\ngci \"$pkgExtPath\\version.txt\" -Recurse | % { \n$sha = gc $_\nWrite-Host \"$sha\"\nif (-not [string]::IsNullOrWhiteSpace($sha))\n{\nStart-Process \"$git\" -ArgumentList \"checkout -- .\" -Wait -Verbose -ErrorAction Stop\nStart-Process \"$git\" -ArgumentList \"checkout $sha\" -Wait -Verbose -ErrorAction Stop\nWrite-Host \"Checked out at $sha\"\nbreak\n}\n}", - "failOnStandardError": "true" - } - }, - { - "enabled": true, - "continueOnError": true, - "alwaysRun": false, - "displayName": "Run CredScan", - "timeoutInMinutes": 0, - "condition": "succeeded()", - "task": { - "id": "ea576cd4-c61f-48f8-97e7-a3cb07b90a6f", - "versionSpec": "2.*", - "definitionType": "task" - }, - "inputs": { - "outputFormat": "pre", - "toolVersion": "Latest", - "scanFolder": "$(Build.SourcesDirectory)", - "searchersFileType": "Default", - "searchersFile": "", - "suppressionsFile": "", - "suppressAsError": "false", - "batchSize": "" - } - }, - { - "enabled": true, - "continueOnError": true, - "alwaysRun": false, - "displayName": "Run PoliCheck", - "timeoutInMinutes": 0, - "condition": "succeeded()", - "task": { - "id": "d785890c-0d0d-46bd-8167-8fa9d49990c7", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "inputType": "Basic", - "cmdLineArgs": "/F:$(Build.SourcesDirectory) /T:9 /O:PoliCheck.xml", - "targetType": "F", - "targetArgument": "$(Build.SourcesDirectory)", - "importEx": "0", - "termTypeT": "0029a9", - "termTypeTCustom": "9", - "termTypeK": "", - "termTypeL": "", - "EXGT": "false", - "result": "PoliCheck.xml", - "optionsFC": "1", - "optionsXS": "1", - "optionsCTGLEN": "", - "optionsSEV": "", - "optionsPE": "", - "optionsHMENABLE": "", - "optionsHPATH": "", - "optionsHVER": "", - "optionsRulesDBPath": "", - "optionsRule": "", - "optionsXCLASS": "", - "optionsTASKNAME": "", - "optionsWORKINGDIRECTORY": "", - "optionsFTPATH": "", - "optionsD": "", - "optionsB1": "", - "optionsB2": "", - "optionsB3": "", - "optionsOCDB": "", - "toolVersion": "Latest" - } - }, - { - "enabled": true, - "continueOnError": true, - "alwaysRun": false, - "displayName": "Post Analysis", - "timeoutInMinutes": 0, - "condition": "succeeded()", - "task": { - "id": "f5679091-e6da-4974-a8dc-0eec03a8ea63", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "APIScan": "true", - "BinScope": "false", - "BinSkim": "true", - "BinSkimBreakOn": "Error", - "CredScan": "true", - "FortifySCA": "false", - "FxCop": "false", - "FxCopBreakOn": "ErrorAbove", - "ModernCop": "false", - "ModernCopBreakOn": "Error", - "PoliCheck": "true", - "PoliCheckBreakOn": "Severity1", - "SDLNativeRules": "false" - } - }, - { - "enabled": true, - "continueOnError": true, - "alwaysRun": false, - "displayName": "Publish Security Analysis Logs", - "timeoutInMinutes": 0, - "condition": "succeeded()", - "task": { - "id": "4096c760-3a8a-435d-9689-88c0311bbc0e", - "versionSpec": "2.*", - "definitionType": "task" - }, - "inputs": { - "ArtifactName": "CodeAnalysisLogs", - "ArtifactType": "Container", - "TargetPath": "\\\\my\\share\\$(Build.DefinitionName)\\$(Build.BuildNumber)", - "RvName": "", - "ProductComponentName": "", - "ProductVersionNumber": "", - "PlatformName": "", - "SDLToolName": "", - "SDLToolResultFile": "" - } - }, - { - "enabled": true, - "continueOnError": true, - "alwaysRun": false, - "displayName": "TSA upload to Codebase: DotNet-CoreCLR-Trusted_$(CodeBase) Stamp: Azure", - "timeoutInMinutes": 0, - "condition": "succeeded()", - "task": { - "id": "3da26988-bb64-4a23-8f06-45531d297dae", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "codebase": "NewOrUpdate", - "tsaStamp": "Azure", - "tsaWebApiUrl": "$(TSAStamp)", - "codeBaseName": "DotNet-CoreCLR-Trusted_$(CodeBase)", - "notificationAlias": "$(NotificationAlias)", - "codeBaseAdmins": "NORTHAMERICA\\raeda", - "instanceUrlAzure": "MSAZURE", - "instanceUrlBing": "", - "instanceUrlCarbon": "", - "instanceUrlDevDiv": "DEVDIV", - "instanceUrlSkype": "", - "instanceUrlTsa": "", - "instanceUrlPpe": "", - "projectNameDAIPVSTF": "", - "projectNameDYNAMICSCRM": "", - "projectNameMSAZURE": "One", - "projectNameMSDYENG": "", - "projectNameMSECG": "", - "projectNameVSTFRD": "", - "projectNameMSASG": "", - "projectNameMICROSOFTVSTS": "", - "projectNameMSDATA": "", - "projectNameMSENG": "", - "projectNameDEVDIV": "DevDiv", - "projectNameSKYPETEST2": "", - "projectNameONEDRIVE": "", - "projectNameSQLBUVSTS": "", - "projectNamePOWERBI": "", - "projectNameAZUREVSTFPPE": "", - "projectNameSKYPE": "", - "projectNameDOMOREEXP": "", - "projectNameSQLBUVSTSTEST": "", - "areaPath": "One\\DevDiv\\DotNetCore", - "iterationPath": "One", - "uploadAPIScan": "true", - "uploadBinScope": "false", - "uploadBinSkim": "true", - "uploadCredScan": "true", - "uploadFortifySCA": "false", - "uploadFxCop": "false", - "uploadModernCop": "false", - "uploadPoliCheck": "true", - "uploadPREfast": "false", - "validateToolOutput": "Warning", - "validateCompatibility": "Error", - "uploadAsync": "true" - } - }, - { - "enabled": true, - "continueOnError": true, - "alwaysRun": false, - "displayName": "Run clean.cmd", - "timeoutInMinutes": 0, - "condition": "succeeded()", - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "clean.cmd", - "arguments": "-all", - "workingFolder": "$(Build.SourcesDirectory)", - "failOnStandardError": "false" - } - } - ], - "options": [ - { - "enabled": false, - "definition": { - "id": "5bc3cfb7-6b54-4a4b-b5d2-a3905949f8a6" - }, - "inputs": {} - }, - { - "enabled": false, - "definition": { - "id": "7c555368-ca64-4199-add6-9ebaf0b0137d" - }, - "inputs": { - "multipliers": "[]", - "parallel": "false", - "continueOnError": "true", - "additionalFields": "{}" - } - }, - { - "enabled": false, - "definition": { - "id": "a9db38f9-9fdc-478c-b0f9-464221e58316" - }, - "inputs": { - "workItemType": "234347", - "assignToRequestor": "true", - "additionalFields": "{}" - } - }, - { - "enabled": false, - "definition": { - "id": "57578776-4c22-4526-aeb0-86b6da17ee9c" - }, - "inputs": {} - }, - { - "enabled": false, - "definition": { - "id": "5d58cc01-7c75-450c-be18-a388ddb129ec" - }, - "inputs": { - "branchFilters": "[\"+refs/heads/*\"]", - "additionalFields": "{}" - } - } - ], - "variables": { - "system.debug": { - "value": "false" - } - }, - "demands": [ - "Agent.OS -equals windows_nt", - "msbuild" - ], - "retentionRules": [ - { - "branches": [ - "+refs/heads/*" - ], - "artifacts": [ - "build.SourceLabel" - ], - "artifactTypesToDelete": [], - "daysToKeep": 10, - "minimumToKeep": 1, - "deleteBuildRecord": true, - "deleteTestResults": true - } - ], - "_links": { - "self": { - "href": "https://devdiv.visualstudio.com/0bdbc590-a062-4c3f-b0f6-9383f67865ee/_apis/build/Definitions/6389" - }, - "web": { - "href": "https://devdiv.visualstudio.com/_permalink/_build/index?collectionId=011b8bdf-6d56-4f87-be0d-0092136884d9&projectId=0bdbc590-a062-4c3f-b0f6-9383f67865ee&definitionId=6389" - }, - "editor": { - "href": "https://devdiv.visualstudio.com/_permalink/_build/definitionEditor?collectionId=011b8bdf-6d56-4f87-be0d-0092136884d9&projectId=0bdbc590-a062-4c3f-b0f6-9383f67865ee&definitionId=6389" - }, - "badge": { - "href": "https://devdiv.visualstudio.com/_apis/public/build/definitions/0bdbc590-a062-4c3f-b0f6-9383f67865ee/6389/badge" - } - }, - "buildNumberFormat": "$(date:yyyyMMdd)$(rev:-rr)", - "jobAuthorizationScope": 1, - "jobTimeoutInMinutes": 600, - "jobCancelTimeoutInMinutes": 5, - "badgeEnabled": true, - "repository": { - "properties": { - "cleanOptions": "3", - "labelSources": "0", - "labelSourcesFormat": "$(build.buildNumber)", - "reportBuildStatus": "true", - "gitLfsSupport": "false", - "skipSyncSource": "false", - "checkoutNestedSubmodules": "false", - "fetchDepth": "0" - }, - "id": "670e3783-ab4f-44fc-9786-d332007da311", - "type": "TfsGit", - "name": "DotNet-CoreCLR-Trusted", - "url": "https://devdiv.visualstudio.com/DevDiv/_git/DotNet-CoreCLR-Trusted", - "defaultBranch": "refs/heads/master", - "clean": "false", - "checkoutSubmodules": false - }, - "processParameters": {}, - "quality": "definition", - "authoredBy": { - "id": "9d5fdf9f-36b6-4d0c-a12e-2737a673af94", - "displayName": "Ravi Eda", - "uniqueName": "raeda@microsoft.com", - "url": "https://app.vssps.visualstudio.com/Aa44b2c06-f247-425c-8464-4a0676af910a/_apis/Identities/9d5fdf9f-36b6-4d0c-a12e-2737a673af94", - "imageUrl": "https://devdiv.visualstudio.com/_api/_common/identityImage?id=9d5fdf9f-36b6-4d0c-a12e-2737a673af94" - }, - "queue": { - "id": 36, - "name": "DotNet-Build", - "pool": { - "id": 39, - "name": "DotNet-Build" - } - }, - "id": 6389, - "name": "DotNet-CoreCLR-Security-Windows", - "url": "https://devdiv.visualstudio.com/0bdbc590-a062-4c3f-b0f6-9383f67865ee/_apis/build/Definitions/6389", - "uri": "vstfs:///Build/Definition/6389", - "path": "\\", - "revision": 2, - "createdDate": "2017-06-13T19:06:48.730Z", - "project": { - "id": "0bdbc590-a062-4c3f-b0f6-9383f67865ee", - "name": "DevDiv", - "description": "Visual Studio and DevDiv team project for git source code repositories. Work items will be added for Adams, Dev14 work items are tracked in vstfdevdiv. ", - "url": "https://devdiv.visualstudio.com/_apis/projects/0bdbc590-a062-4c3f-b0f6-9383f67865ee", - "state": "wellFormed", - "revision": 418097695, - "visibility": 0 - } -} diff --git a/buildpipeline/security/pipeline.json b/buildpipeline/security/pipeline.json deleted file mode 100644 index 95416f09ec38..000000000000 --- a/buildpipeline/security/pipeline.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "Repository": "coreclr", - "Definitions": { - "Path": ".", - "Type": "VSTS", - "BaseUrl": "https://devdiv.visualstudio.com/DefaultCollection", - "SkipBranchAndVersionOverrides": "false" - }, - "Pipelines": [ - { - "Name": "Security Build for Windows", - "Parameters": { - "TreatWarningsAsErrors": "false" - }, - "Definitions": [ - { - "Name": "DotNet-CoreCLR-Security-Windows" - } - ] - } - ] -} diff --git a/buildpipeline/tests/Dotnet-CoreClr-Trusted-BuildTests.json b/buildpipeline/tests/Dotnet-CoreClr-Trusted-BuildTests.json deleted file mode 100644 index 4b5cbd82de42..000000000000 --- a/buildpipeline/tests/Dotnet-CoreClr-Trusted-BuildTests.json +++ /dev/null @@ -1,432 +0,0 @@ -{ - "build": [ - { - "environment": {}, - "enabled": true, - "continueOnError": true, - "alwaysRun": false, - "displayName": "Run AgentTools/Begin.ps1", - "timeoutInMinutes": 0, - "condition": "succeededOrFailed()", - "task": { - "id": "e213ff0f-5d5c-4791-802d-52ea3e7be1f1", - "versionSpec": "2.*", - "definitionType": "task" - }, - "inputs": { - "targetType": "inline", - "filePath": "", - "arguments": "", - "script": "if (Test-Path \"$(AgentToolsPath)\\Begin.ps1\") {\n \"Begin.ps1 script found. Executing...\"\n cd $(AgentToolsPath)\n & $(AgentToolsPath)\\Begin.ps1\n} else {\n \"Begin.ps1 script does not exist. Moving on...\"\n}", - "errorActionPreference": "continue", - "failOnStderr": "false", - "ignoreLASTEXITCODE": "true", - "workingDirectory": "" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Setup vs dev env", - "timeoutInMinutes": 0, - "condition": "and(succeeded(), ne(variables['PB_SkipTests'], 'true'))", - "refName": "Task1", - "task": { - "id": "bfc8bf76-e7ac-4a8c-9a55-a944a9f632fd", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "setup_vs_tools.cmd", - "arguments": "", - "modifyEnvironment": "true", - "workingFolder": "", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Run clean.cmd", - "timeoutInMinutes": 0, - "condition": "and(succeeded(), ne(variables['PB_SkipTests'], 'true'))", - "refName": "Task2", - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "clean.cmd", - "arguments": "-all", - "workingFolder": "", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Run sync.cmd", - "timeoutInMinutes": 0, - "condition": "and(succeeded(),ne(variables['PB_SkipTests'], 'true'))", - "refName": "Task3", - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "sync.cmd", - "arguments": "-ab -AzureAccount=$(CloudDropAccountName) -AzureToken=$(CloudDropAccessToken) -Container=$(ParentContainerName) -RuntimeId=$(Rid) -BlobNamePrefix=$(ParentBlobNamePrefix)$(ParentBuildType)/TestNativeBins/$(Rid) -- /p:DownloadDirectory=$(Build.SourcesDirectory)/packages/TestNativeBins/$(Rid)/$(ParentBuildType)", - "workingFolder": "", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Generate version props file", - "timeoutInMinutes": 0, - "condition": "and(succeeded(), ne(variables['PB_SkipTests'], 'true'))", - "refName": "Task4", - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "msbuild", - "arguments": "createVersionFile.proj /p:OfficialBuildId=$(ParentOfficialBuildId)", - "workingFolder": "src", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Run build-test.cmd", - "timeoutInMinutes": 0, - "condition": "and(succeeded(), ne(variables['PB_SkipTests'], 'true'))", - "refName": "Task5", - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "build-test.cmd", - "arguments": "$(ParentBuildType) $(Architecture) buildagainstpackages runtimeid $(Rid) $(TargetsNonWindowsArg)$(CrossgenArg)-OfficialBuildId=$(ParentOfficialBuildId) -OverwriteCoreClrPackageVersion -Priority=$(Priority) -- /p:IntermediateAzureFeed=$(IntermediateAzureFeed)", - "workingFolder": "", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": false, - "alwaysRun": false, - "displayName": "Send job to Helix", - "timeoutInMinutes": 0, - "condition": "and(succeeded(), ne(variables['PB_SkipTests'], 'true'))", - "refName": "Task6", - "task": { - "id": "d9bafed4-0b18-4f58-968d-86655b4d2ce9", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "filename": "msbuild", - "arguments": "helixpublish.proj /p:CloudDropAccessToken=$(CloudDropAccessToken) /p:CloudDropAccountName=$(CloudDropAccountName) /p:ContainerName=$(ParentContainerName) /p:Platform=$(Architecture) /p:BuildType=$(ParentBuildType) /p:CloudResultsAccountName=$(CloudResultsAccountName) /p:CloudResultsAccessToken=$(CloudResultsAccessToken) /p:TargetsWindows=$(TargetsWindows) /p:OverwriteOnUpload=true /p:Rid=$(Rid) /p:TargetQueues=\"$(TargetQueues)\" /p:TestProduct=$(TestProduct) /p:Branch=$(SourceBranch) /p:HelixApiAccessKey=$(HelixApiAccessKey) /p:HelixApiEndpoint=$(HelixApiEndpoint) /p:FilterToOSGroup=$(FilterToOSGroup) /p:FilterToTestTFM=$(FilterToTestTFM) /p:TimeoutInSeconds=1800 /p:HelixJobType=$(HelixJobType) /fileloggerparameters:Verbosity=diag;LogFile=helix.log", - "workingFolder": "tests", - "failOnStandardError": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": true, - "alwaysRun": true, - "displayName": "Copy Files to: $(Build.StagingDirectory)\\BuildLogs", - "timeoutInMinutes": 0, - "condition": "succeededOrFailed()", - "refName": "CopyFiles1", - "task": { - "id": "5bfb729a-a7c8-4a78-a7c3-8d717bb7c13c", - "versionSpec": "2.*", - "definitionType": "task" - }, - "inputs": { - "SourceFolder": "", - "Contents": "**/*.log", - "TargetFolder": "$(Build.StagingDirectory)\\BuildLogs", - "CleanTargetFolder": "false", - "OverWrite": "false", - "flattenFolders": "false" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": true, - "alwaysRun": true, - "displayName": "Publish Artifact: BuildLogs", - "timeoutInMinutes": 0, - "condition": "succeededOrFailed()", - "refName": "PublishBuildArtifacts1", - "task": { - "id": "2ff763a7-ce83-4e1f-bc89-0ae63477cebe", - "versionSpec": "1.*", - "definitionType": "task" - }, - "inputs": { - "PathtoPublish": "$(Build.StagingDirectory)\\BuildLogs", - "ArtifactName": "BuildLogs", - "ArtifactType": "Container", - "TargetPath": "\\\\my\\share\\$(Build.DefinitionName)\\$(Build.BuildNumber)", - "Parallel": "false", - "ParallelCount": "8" - } - }, - { - "environment": {}, - "enabled": true, - "continueOnError": true, - "alwaysRun": true, - "displayName": "Run AgentTools/End.ps1", - "timeoutInMinutes": 0, - "condition": "succeededOrFailed()", - "task": { - "id": "e213ff0f-5d5c-4791-802d-52ea3e7be1f1", - "versionSpec": "2.*", - "definitionType": "task" - }, - "inputs": { - "targetType": "inline", - "filePath": "", - "arguments": "", - "script": "if (Test-Path \"$(AgentToolsPath)\\End.ps1\") {\n \"End.ps1 script found. Executing...\"\n cd $(AgentToolsPath)\n & $(AgentToolsPath)\\End.ps1\n} else {\n \"End.ps1 script does not exist. Moving on...\"\n}", - "errorActionPreference": "continue", - "failOnStderr": "false", - "ignoreLASTEXITCODE": "true", - "workingDirectory": "" - } - } - ], - "options": [ - { - "enabled": false, - "definition": { - "id": "a9db38f9-9fdc-478c-b0f9-464221e58316" - }, - "inputs": { - "workItemType": "234347", - "assignToRequestor": "true", - "additionalFields": "{}" - } - }, - { - "enabled": false, - "definition": { - "id": "57578776-4c22-4526-aeb0-86b6da17ee9c" - }, - "inputs": { - "additionalFields": "{}" - } - }, - { - "enabled": false, - "definition": { - "id": "5d58cc01-7c75-450c-be18-a388ddb129ec" - }, - "inputs": { - "branchFilters": "[\"+refs/heads/*\"]", - "additionalFields": "{}" - } - } - ], - "variables": { - "system.debug": { - "value": "false", - "allowOverride": true - }, - "PB_BuildType": { - "value": "Release", - "allowOverride": true - }, - "Architecture": { - "value": "x64", - "allowOverride": true - }, - "Priority": { - "value": "1" - }, - "CloudDropAccountName": { - "value": "dotnetbuildoutput" - }, - "CloudDropAccessToken": { - "value": null, - "isSecret": true - }, - "OfficialBuildId": { - "value": "$(Build.BuildNumber)", - "allowOverride": true - }, - "Label": { - "value": "$(Build.BuildNumber)", - "allowOverride": true - }, - "PB_SkipTests": { - "value": "false", - "allowOverride": true - }, - "CloudResultsAccountName": { - "value": "dotnetjobresults" - }, - "CloudResultsAccessToken": { - "value": null, - "isSecret": true - }, - "TargetsWindows": { - "value": "false" - }, - "Rid": { - "value": "linux-x64" - }, - "TargetQueues": { - "value": "Debian.8.Amd64,Fedora.27.Amd64,Redhat.7.amd64,Ubuntu.1404.Amd64,Ubuntu.1604.Amd64,Ubuntu.1804.Amd64,Opensuse.423.Amd64,Sles.12.Amd64" - }, - "TestProduct": { - "value": "coreclr" - }, - "SourceBranch": { - "value": "master" - }, - "HelixApiAccessKey": { - "value": null, - "isSecret": true - }, - "ParentContainerName": { - "value": "", - "allowOverride": true - }, - "ParentBlobNamePrefix": { - "value": "", - "allowOverride": true - }, - "ParentBuildType": { - "value": "", - "allowOverride": true - }, - "ParentOfficialBuildId": { - "value": "", - "allowOverride": true - }, - "HelixApiEndpoint": { - "value": "https://helix.dot.net/api/2016-09-12/jobs" - }, - "FilterToOSGroup": { - "value": "Dummy" - }, - "FilterToTestTFM": { - "value": "Dummy" - }, - "PB_ContainerName": { - "value": "$(Label)-$(TestContainerSuffix)-test" - }, - "TestContainerSuffix": { - "value": "linux" - }, - "IntermediateAzureFeed": { - "value": "https://dotnetbuildoutput.blob.core.windows.net/$(ParentContainerName)/$(ParentBlobNamePrefix)$(ParentBuildType)/pkg/index.json" - }, - "RuntimeIDArg": { - "value": "runtimeid linux-x64" - }, - "TargetsNonWindowsArg": { - "value": "" - }, - "CrossgenArg": { - "value": "" - } - }, - "retentionRules": [ - { - "branches": [ - "+refs/heads/*" - ], - "artifacts": [], - "artifactTypesToDelete": [ - "FilePath", - "SymbolStore" - ], - "daysToKeep": 10, - "minimumToKeep": 1, - "deleteBuildRecord": true, - "deleteTestResults": true - } - ], - "buildNumberFormat": "$(date:yyyyMMdd)$(rev:-rr)-$(Rid)", - "jobAuthorizationScope": "projectCollection", - "jobTimeoutInMinutes": 180, - "jobCancelTimeoutInMinutes": 5, - "repository": { - "properties": { - "labelSources": "0", - "reportBuildStatus": "true", - "fetchDepth": "0", - "gitLfsSupport": "false", - "skipSyncSource": "false", - "cleanOptions": "0", - "checkoutNestedSubmodules": "false", - "labelSourcesFormat": "$(build.buildNumber)" - }, - "id": "670e3783-ab4f-44fc-9786-d332007da311", - "type": "TfsGit", - "name": "DotNet-CoreCLR-Trusted", - "url": "https://devdiv.visualstudio.com/DevDiv/_git/DotNet-CoreCLR-Trusted", - "defaultBranch": "refs/heads/master", - "clean": "true", - "checkoutSubmodules": false - }, - "processParameters": {}, - "quality": "definition", - "drafts": [], - "queue": { - "_links": { - "self": { - "href": "https://devdiv.visualstudio.com/DefaultCollection/_apis/build/Queues/330" - } - }, - "id": 330, - "name": "DotNetCore-Build", - "url": "https://devdiv.visualstudio.com/DefaultCollection/_apis/build/Queues/330", - "pool": { - "id": 97, - "name": "DotNetCore-Build" - } - }, - "id": 5159, - "name": "Dotnet-CoreClr-Trusted-BuildTests", - "path": "\\", - "type": "build", - "queueStatus": "enabled", - "project": { - "id": "0bdbc590-a062-4c3f-b0f6-9383f67865ee", - "name": "DevDiv", - "description": "Visual Studio and DevDiv team project for git source code repositories. Work items will be added for Adams, Dev14 work items are tracked in vstfdevdiv. ", - "url": "https://devdiv.visualstudio.com/DefaultCollection/_apis/projects/0bdbc590-a062-4c3f-b0f6-9383f67865ee", - "state": "wellFormed", - "revision": 418098432, - "visibility": "organization" - } -} diff --git a/buildpipeline/tests/test_pipelines.json b/buildpipeline/tests/test_pipelines.json deleted file mode 100644 index 12ad04f8b1e0..000000000000 --- a/buildpipeline/tests/test_pipelines.json +++ /dev/null @@ -1,232 +0,0 @@ -{ - "Repository": "coreclr", - "Definitions": { - "Path": ".", - "Type": "VSTS", - "BaseUrl": "https://devdiv.visualstudio.com/DefaultCollection", - "SkipBranchAndVersionOverrides": "false" - }, - "Pipelines": [ - { - "Name": "Build And Run Tests - Release", - "Parameters": { - "TreatWarningsAsErrors": "false" - }, - "BuildParameters": { - "PB_BuildType": "Release" - }, - "Definitions": [ - { - "Name": "Dotnet-CoreClr-Trusted-BuildTests", - "Parameters": { - "HelixJobType": "test/functional/cli/", - "TargetsWindows": "true", - "Rid": "win-x64", - "TargetQueues": "Windows.10.Amd64,(Windows.Nano.1803.Amd64)windows.10.amd64.serverrs4@mcr.microsoft.com/dotnet-buildtools/prereqs:nanoserver-1803-helix-amd64-05227e1-20190509225944,Windows.10.Amd64.Core,Windows.7.Amd64,Windows.81.Amd64", - "TestContainerSuffix": "windows", - }, - "ReportingParameters": { - "OperatingSystem": "Windows", - "SubType": "Build-Tests", - "Type": "build/product/", - "PB_BuildType": "Release" - } - }, - { - "Name": "Dotnet-CoreClr-Trusted-BuildTests", - "Parameters": { - "HelixJobType": "test/functional/r2r/cli/", - "TargetsWindows": "true", - "Rid": "win-x64", - "TargetQueues": "Windows.10.Amd64,(Windows.Nano.1803.Amd64)windows.10.amd64.serverrs4@mcr.microsoft.com/dotnet-buildtools/prereqs:nanoserver-1803-helix-amd64-05227e1-20190509225944,Windows.10.Amd64.Core,Windows.7.Amd64,Windows.81.Amd64", - "TestContainerSuffix": "windows-r2r", - "CrossgenArg": "Crossgen " - }, - "ReportingParameters": { - "OperatingSystem": "Windows", - "SubType": "Build-Tests-R2R", - "Type": "build/product/", - "PB_BuildType": "Release" - } - }, - { - "Name": "Dotnet-CoreClr-Trusted-BuildTests", - "Parameters": { - "Architecture": "arm", - "HelixJobType": "test/functional/cli/", - "TargetsWindows": "true", - "Rid": "win-arm", - "TargetQueues": "Windows.10.Arm64", - "TestContainerSuffix": "windows", - }, - "ReportingParameters": { - "OperatingSystem": "Windows", - "Architecture": "arm64", - "SubType": "Build-Tests", - "Type": "build/product/", - "PB_BuildType": "Release" - } - }, - { - "Name": "Dotnet-CoreClr-Trusted-BuildTests", - "Parameters": { - "Architecture": "arm", - "HelixJobType": "test/functional/r2r/cli/", - "TargetsWindows": "true", - "Rid": "win-arm", - "TargetQueues": "Windows.10.Arm64", - "TestContainerSuffix": "windows-r2r", - "CrossgenArg": "Crossgen " - }, - "ReportingParameters": { - "OperatingSystem": "Windows", - "Architecture": "arm64", - "SubType": "Build-Tests-R2R", - "Type": "build/product/", - "PB_BuildType": "Release" - } - }, - { - "Name": "Dotnet-CoreClr-Trusted-BuildTests", - "Parameters": { - "HelixJobType": "test/functional/cli/", - "TargetsWindows": "false", - "Rid": "osx-x64", - "TargetQueues": "OSX.1012.Amd64,OSX.1013.Amd64", - "TestContainerSuffix": "osx", - "TargetsNonWindowsArg": "TargetsNonWindows " - }, - "ReportingParameters": { - "OperatingSystem": "OSX", - "SubType": "Build-Tests", - "Type": "build/product/", - "PB_BuildType": "Release" - } - }, - { - "Name": "Dotnet-CoreClr-Trusted-BuildTests", - "Parameters": { - "HelixJobType": "test/functional/r2r/cli/", - "TargetsWindows": "false", - "Rid": "osx-x64", - "TargetQueues": "OSX.1012.Amd64,OSX.1013.Amd64", - "TestContainerSuffix": "osx-r2r", - "CrossgenArg": "Crossgen ", - "TargetsNonWindowsArg": "TargetsNonWindows " - }, - "ReportingParameters": { - "OperatingSystem": "OSX", - "SubType": "Build-Tests-R2R", - "Type": "build/product/", - "PB_BuildType": "Release" - } - }, - { - "Name": "Dotnet-CoreClr-Trusted-BuildTests", - "Parameters": { - "HelixJobType": "test/functional/cli/", - "TargetsWindows": "false", - "Rid": "linux-x64", - "TargetQueues": "Debian.8.Amd64,(Fedora.28.Amd64)ubuntu.1604.amd64@mcr.microsoft.com/dotnet-buildtools/prereqs:fedora-28-helix-09ca40b-20190508143249,Redhat.7.Amd64,Ubuntu.1404.Amd64,Ubuntu.1604.Amd64,Ubuntu.1804.Amd64,Opensuse.423.Amd64,SLES.12.Amd64", - "TestContainerSuffix": "linux", - "TargetsNonWindowsArg": "TargetsNonWindows " - }, - "ReportingParameters": { - "OperatingSystem": "RedHat 7", - "SubType": "Build-Tests", - "Type": "build/product/", - "PB_BuildType": "Release" - } - }, - { - "Name": "Dotnet-CoreClr-Trusted-BuildTests", - "Parameters": { - "HelixJobType": "test/functional/r2r/cli/", - "TargetsWindows": "false", - "Rid": "linux-x64", - "TargetQueues": "Debian.8.Amd64,(Fedora.28.Amd64)ubuntu.1604.amd64@mcr.microsoft.com/dotnet-buildtools/prereqs:fedora-28-helix-09ca40b-20190508143249,Redhat.7.Amd64,Ubuntu.1404.Amd64,Ubuntu.1604.Amd64,Ubuntu.1804.Amd64,Opensuse.423.Amd64,SLES.12.Amd64", - "TestContainerSuffix": "linux-r2r", - "CrossgenArg": "Crossgen ", - "TargetsNonWindowsArg": "TargetsNonWindows " - }, - "ReportingParameters": { - "OperatingSystem": "RedHat 7", - "SubType": "Build-Tests-R2R", - "Type": "build/product/", - "PB_BuildType": "Release" - } - }, - { - "Name": "Dotnet-CoreClr-Trusted-BuildTests", - "Parameters": { - "HelixJobType": "test/functional/cli/", - "TargetsWindows": "false", - "Rid": "rhel.6-x64", - "TargetQueues": "Redhat.6.Amd64", - "TestContainerSuffix": "rhel6", - "TargetsNonWindowsArg": "TargetsNonWindows " - }, - "ReportingParameters": { - "OperatingSystem": "RedHat6", - "SubType": "Build-Tests", - "Type": "build/product/", - "PB_BuildType": "Release" - } - }, - { - "Name": "Dotnet-CoreClr-Trusted-BuildTests", - "Parameters": { - "HelixJobType": "test/functional/r2r/cli/", - "TargetsWindows": "false", - "Rid": "rhel.6-x64", - "TargetQueues": "Redhat.6.Amd64", - "TestContainerSuffix": "rhel6-r2r", - "CrossgenArg": "Crossgen ", - "TargetsNonWindowsArg": "TargetsNonWindows " - }, - "ReportingParameters": { - "OperatingSystem": "RedHat6", - "SubType": "Build-Tests-R2R", - "Type": "build/product/", - "PB_BuildType": "Release" - } - }, - { - "Name": "Dotnet-CoreClr-Trusted-BuildTests", - "Parameters": { - "HelixJobType": "test/functional/cli/", - "TargetsWindows": "false", - "Rid": "linux-musl-x64", - "TargetQueues": "(Alpine.310.Amd64)Ubuntu.1604.Amd64@mcr.microsoft.com/dotnet-buildtools/prereqs:alpine-3.10-helix-3043688-20190918214010", - "TestContainerSuffix": "alpine36", - "TargetsNonWindowsArg": "TargetsNonWindows " - }, - "ReportingParameters": { - "OperatingSystem": "Alpine3.9", - "SubType": "Build-Tests", - "Type": "build/product/", - "PB_BuildType": "Release" - } - }, - { - "Name": "Dotnet-CoreClr-Trusted-BuildTests", - "Parameters": { - "HelixJobType": "test/functional/r2r/cli/", - "TargetsWindows": "false", - "Rid": "linux-musl-x64", - "TargetQueues": "(Alpine.310.Amd64)Ubuntu.1604.Amd64@mcr.microsoft.com/dotnet-buildtools/prereqs:alpine-3.10-helix-3043688-20190918214010", - "TestContainerSuffix": "alpine36-r2r", - "CrossgenArg": "Crossgen ", - "TargetsNonWindowsArg": "TargetsNonWindows " - }, - "ReportingParameters": { - "OperatingSystem": "Alpine3.9", - "SubType": "Build-Tests-R2R", - "Type": "build/product/", - "PB_BuildType": "Release" - } - } - ], - } - ] -} diff --git a/netci.groovy b/netci.groovy deleted file mode 100755 index b15b2f82730b..000000000000 --- a/netci.groovy +++ /dev/null @@ -1,3584 +0,0 @@ -// Import the utility functionality. - -import jobs.generation.* - -// The input project name (e.g. dotnet/coreclr) -def project = GithubProject -// The input branch name (e.g. master) -def branch = GithubBranchName -def projectFolder = Utilities.getFolderName(project) + '/' + Utilities.getFolderName(branch) - -// Create a folder for JIT stress jobs and associated folder views -folder('jitstress') -Utilities.addStandardFolderView(this, 'jitstress', project) - -// Create a folder for testing via illink -folder('illink') -Utilities.addStandardFolderView(this, 'illink', project) - -def static getOSGroup(def os) { - def osGroupMap = ['Ubuntu':'Linux', - 'RHEL7.2': 'Linux', - 'Ubuntu16.04': 'Linux', - 'Ubuntu16.10': 'Linux', - 'Debian8.4':'Linux', - 'Fedora24':'Linux', - 'OSX10.12':'OSX', - 'Windows_NT':'Windows_NT', - 'CentOS7.1': 'Linux', - 'Tizen': 'Linux'] - def osGroup = osGroupMap.get(os, null) - assert osGroup != null : "Could not find os group for ${os}" - return osGroupMap[os] -} - -// We use this class (vs variables) so that the static functions can access data here. -class Constants { - - // We have very limited ARM64 hardware (used for ARM/ARMLB/ARM64 testing). So only allow certain branches to use it. - def static WindowsArm64Branches = [ - 'master'] - - // Innerloop build OS's - // The Windows_NT_BuildOnly OS is a way to speed up the Non-Windows builds by avoiding - // test execution in the build flow runs. It generates the exact same build - // as Windows_NT but without running the tests. - def static osList = [ - 'Ubuntu', - 'Debian8.4', - 'OSX10.12', - 'Windows_NT', - 'Windows_NT_BuildOnly', - 'CentOS7.1', - 'RHEL7.2', - 'Ubuntu16.04', - 'Ubuntu16.10', - 'Fedora24', - 'Tizen'] - - def static crossList = [ - 'Ubuntu', - 'Debian8.4', - 'OSX10.12', - 'Windows_NT', - 'CentOS7.1', - 'RHEL7.2'] - - // This is a set of JIT stress modes combined with the set of variables that - // need to be set to actually enable that stress mode. The key of the map is the stress mode and - // the values are the environment variables - def static jitStressModeScenarios = [ - 'minopts' : ['COMPlus_JITMinOpts' : '1'], - 'tieredcompilation' : ['COMPlus_EXPERIMENTAL_TieredCompilation' : '1'], - 'forcerelocs' : ['COMPlus_ForceRelocs' : '1'], - 'jitstress1' : ['COMPlus_JitStress' : '1'], - 'jitstress2' : ['COMPlus_JitStress' : '2'], - 'jitstressregs1' : ['COMPlus_JitStressRegs' : '1'], - 'jitstressregs2' : ['COMPlus_JitStressRegs' : '2'], - 'jitstressregs3' : ['COMPlus_JitStressRegs' : '3'], - 'jitstressregs4' : ['COMPlus_JitStressRegs' : '4'], - 'jitstressregs8' : ['COMPlus_JitStressRegs' : '8'], - 'jitstressregs0x10' : ['COMPlus_JitStressRegs' : '0x10'], - 'jitstressregs0x80' : ['COMPlus_JitStressRegs' : '0x80'], - 'jitstressregs0x1000' : ['COMPlus_JitStressRegs' : '0x1000'], - 'jitstress2_jitstressregs1' : ['COMPlus_JitStress' : '2', 'COMPlus_JitStressRegs' : '1'], - 'jitstress2_jitstressregs2' : ['COMPlus_JitStress' : '2', 'COMPlus_JitStressRegs' : '2'], - 'jitstress2_jitstressregs3' : ['COMPlus_JitStress' : '2', 'COMPlus_JitStressRegs' : '3'], - 'jitstress2_jitstressregs4' : ['COMPlus_JitStress' : '2', 'COMPlus_JitStressRegs' : '4'], - 'jitstress2_jitstressregs8' : ['COMPlus_JitStress' : '2', 'COMPlus_JitStressRegs' : '8'], - 'jitstress2_jitstressregs0x10' : ['COMPlus_JitStress' : '2', 'COMPlus_JitStressRegs' : '0x10'], - 'jitstress2_jitstressregs0x80' : ['COMPlus_JitStress' : '2', 'COMPlus_JitStressRegs' : '0x80'], - 'jitstress2_jitstressregs0x1000' : ['COMPlus_JitStress' : '2', 'COMPlus_JitStressRegs' : '0x1000'], - 'tailcallstress' : ['COMPlus_TailcallStress' : '1'], - 'jitsse2only' : ['COMPlus_EnableAVX' : '0', 'COMPlus_EnableSSE3_4' : '0'], - 'jitnosimd' : ['COMPlus_FeatureSIMD' : '0'], - 'jitincompletehwintrinsic' : ['COMPlus_EnableIncompleteISAClass' : '1'], - 'jitx86hwintrinsicnoavx' : ['COMPlus_EnableIncompleteISAClass' : '1', 'COMPlus_EnableAVX' : '0'], // testing the legacy SSE encoding - 'jitx86hwintrinsicnoavx2' : ['COMPlus_EnableIncompleteISAClass' : '1', 'COMPlus_EnableAVX2' : '0'], // testing SNB/IVB - 'jitx86hwintrinsicnosimd' : ['COMPlus_EnableIncompleteISAClass' : '1', 'COMPlus_FeatureSIMD' : '0'], // match "jitnosimd", may need to remove after decoupling HW intrinsic from FeatureSIMD - 'jitnox86hwintrinsic' : ['COMPlus_EnableIncompleteISAClass' : '1', 'COMPlus_EnableSSE' : '0' , 'COMPlus_EnableSSE2' : '0' , 'COMPlus_EnableSSE3' : '0' , 'COMPlus_EnableSSSE3' : '0' , 'COMPlus_EnableSSE41' : '0' , 'COMPlus_EnableSSE42' : '0' , 'COMPlus_EnableAVX' : '0' , 'COMPlus_EnableAVX2' : '0' , 'COMPlus_EnableAES' : '0' , 'COMPlus_EnableBMI1' : '0' , 'COMPlus_EnableBMI2' : '0' , 'COMPlus_EnableFMA' : '0' , 'COMPlus_EnableLZCNT' : '0' , 'COMPlus_EnablePCLMULQDQ' : '0' , 'COMPlus_EnablePOPCNT' : '0'], - 'corefx_baseline' : [ : ], // corefx baseline - 'corefx_minopts' : ['COMPlus_JITMinOpts' : '1'], - 'corefx_tieredcompilation' : ['COMPlus_EXPERIMENTAL_TieredCompilation' : '1'], - 'corefx_jitstress1' : ['COMPlus_JitStress' : '1'], - 'corefx_jitstress2' : ['COMPlus_JitStress' : '2'], - 'corefx_jitstressregs1' : ['COMPlus_JitStressRegs' : '1'], - 'corefx_jitstressregs2' : ['COMPlus_JitStressRegs' : '2'], - 'corefx_jitstressregs3' : ['COMPlus_JitStressRegs' : '3'], - 'corefx_jitstressregs4' : ['COMPlus_JitStressRegs' : '4'], - 'corefx_jitstressregs8' : ['COMPlus_JitStressRegs' : '8'], - 'corefx_jitstressregs0x10' : ['COMPlus_JitStressRegs' : '0x10'], - 'corefx_jitstressregs0x80' : ['COMPlus_JitStressRegs' : '0x80'], - 'corefx_jitstressregs0x1000' : ['COMPlus_JitStressRegs' : '0x1000'], - 'gcstress0x3' : ['COMPlus_GCStress' : '0x3'], - 'gcstress0xc' : ['COMPlus_GCStress' : '0xC'], - 'zapdisable' : ['COMPlus_ZapDisable' : '1', 'COMPlus_ReadyToRun' : '0'], - 'heapverify1' : ['COMPlus_HeapVerify' : '1'], - 'gcstress0xc_zapdisable' : ['COMPlus_GCStress' : '0xC', 'COMPlus_ZapDisable' : '1', 'COMPlus_ReadyToRun' : '0'], - 'gcstress0xc_zapdisable_jitstress2' : ['COMPlus_GCStress' : '0xC', 'COMPlus_ZapDisable' : '1', 'COMPlus_ReadyToRun' : '0', 'COMPlus_JitStress' : '2'], - 'gcstress0xc_zapdisable_heapverify1' : ['COMPlus_GCStress' : '0xC', 'COMPlus_ZapDisable' : '1', 'COMPlus_ReadyToRun' : '0', 'COMPlus_HeapVerify' : '1'], - 'gcstress0xc_jitstress1' : ['COMPlus_GCStress' : '0xC', 'COMPlus_JitStress' : '1'], - 'gcstress0xc_jitstress2' : ['COMPlus_GCStress' : '0xC', 'COMPlus_JitStress' : '2'], - 'gcstress0xc_minopts_heapverify1' : ['COMPlus_GCStress' : '0xC', 'COMPlus_JITMinOpts' : '1', 'COMPlus_HeapVerify' : '1'] - ] - - // This is a set of ReadyToRun stress scenarios - def static r2rStressScenarios = [ - 'r2r_jitstress1' : ["COMPlus_JitStress": "1"], - 'r2r_jitstress2' : ["COMPlus_JitStress": "2"], - 'r2r_jitstressregs1' : ["COMPlus_JitStressRegs": "1"], - 'r2r_jitstressregs2' : ["COMPlus_JitStressRegs": "2"], - 'r2r_jitstressregs3' : ["COMPlus_JitStressRegs": "3"], - 'r2r_jitstressregs4' : ["COMPlus_JitStressRegs": "4"], - 'r2r_jitstressregs8' : ["COMPlus_JitStressRegs": "8"], - 'r2r_jitstressregs0x10' : ["COMPlus_JitStressRegs": "0x10"], - 'r2r_jitstressregs0x80' : ["COMPlus_JitStressRegs": "0x80"], - 'r2r_jitstressregs0x1000' : ["COMPlus_JitStressRegs": "0x1000"], - 'r2r_jitminopts' : ["COMPlus_JITMinOpts": "1"], - 'r2r_jitforcerelocs' : ["COMPlus_ForceRelocs": "1"], - 'r2r_gcstress15' : ["COMPlus_GCStress": "0xF"] - ] - - // This is the basic set of scenarios - def static basicScenarios = [ - 'innerloop', - 'normal', - 'ilrt', - 'r2r', - 'longgc', - 'formatting', - 'gcsimulator', - // 'jitdiff', // jitdiff is currently disabled, until someone spends the effort to make it fully work - 'standalone_gc', - 'gc_reliability_framework', - 'illink'] - - def static allScenarios = basicScenarios + r2rStressScenarios.keySet() + jitStressModeScenarios.keySet() - - // Valid PR trigger combinations. - def static prTriggeredValidInnerLoopCombos = [ - 'Windows_NT': [ - 'x64': [ - 'Checked' - ], - 'x86': [ - 'Checked', - 'Release' - ], - 'arm': [ - 'Checked', - ], - 'arm64': [ - 'Checked' - ], - 'armlb': [ - 'Checked' - ] - ], - 'Windows_NT_BuildOnly': [ - 'x64': [ - 'Checked', - 'Release' - ], - 'x86': [ - 'Checked', - 'Release' - ], - 'arm': [ - 'Checked' - ], - ], - 'Ubuntu': [ - 'x64': [ - 'Checked' - ], - 'arm64': [ - 'Debug' - ], - 'arm': [ - 'Checked' - ] - ], - 'CentOS7.1': [ - 'x64': [ - 'Debug', - 'Checked' - ] - ], - 'OSX10.12': [ - 'x64': [ - 'Checked' - ] - ], - 'Tizen': [ - 'armem': [ - 'Checked' - ] - ], - ] - - // A set of scenarios that are valid for arm/arm64/armlb tests run on hardware. This is a map from valid scenario name - // to Tests.lst file categories to exclude. - // - // This list should contain a subset of the scenarios from `allScenarios`. Please keep this in the same order as that, - // and with the same values, with some commented out, for easier maintenance. - // - // Note that some scenarios that are commented out should be enabled, but haven't yet been. - // - def static validArmWindowsScenarios = [ - 'innerloop': [], - 'normal': [], - // 'ilrt' - 'r2r': ["R2R_FAIL", "R2R_EXCLUDE"], - // 'longgc' - // 'formatting' - // 'gcsimulator' - // 'jitdiff' - // 'standalone_gc' - // 'gc_reliability_framework' - // 'illink' - 'r2r_jitstress1': ["R2R_FAIL", "R2R_EXCLUDE", "JITSTRESS_FAIL", "JITSTRESS_EXCLUDE"], - 'r2r_jitstress2': ["R2R_FAIL", "R2R_EXCLUDE", "JITSTRESS_FAIL", "JITSTRESS_EXCLUDE"], - 'r2r_jitstressregs1': ["R2R_FAIL", "R2R_EXCLUDE", "JITSTRESS_FAIL", "JITSTRESS_EXCLUDE"], - 'r2r_jitstressregs2': ["R2R_FAIL", "R2R_EXCLUDE", "JITSTRESS_FAIL", "JITSTRESS_EXCLUDE"], - 'r2r_jitstressregs3': ["R2R_FAIL", "R2R_EXCLUDE", "JITSTRESS_FAIL", "JITSTRESS_EXCLUDE"], - 'r2r_jitstressregs4': ["R2R_FAIL", "R2R_EXCLUDE", "JITSTRESS_FAIL", "JITSTRESS_EXCLUDE"], - 'r2r_jitstressregs8': ["R2R_FAIL", "R2R_EXCLUDE", "JITSTRESS_FAIL", "JITSTRESS_EXCLUDE"], - 'r2r_jitstressregs0x10': ["R2R_FAIL", "R2R_EXCLUDE", "JITSTRESS_FAIL", "JITSTRESS_EXCLUDE"], - 'r2r_jitstressregs0x80': ["R2R_FAIL", "R2R_EXCLUDE", "JITSTRESS_FAIL", "JITSTRESS_EXCLUDE"], - 'r2r_jitstressregs0x1000': ["R2R_FAIL", "R2R_EXCLUDE", "JITSTRESS_FAIL", "JITSTRESS_EXCLUDE"], - 'r2r_jitminopts': ["R2R_FAIL", "R2R_EXCLUDE", "JITSTRESS_FAIL", "JITSTRESS_EXCLUDE", "MINOPTS_FAIL", "MINOPTS_EXCLUDE"], - 'r2r_jitforcerelocs': ["R2R_FAIL", "R2R_EXCLUDE", "JITSTRESS_FAIL", "JITSTRESS_EXCLUDE"], - 'r2r_gcstress15': ["R2R_FAIL", "R2R_EXCLUDE", "JITSTRESS_FAIL", "JITSTRESS_EXCLUDE", "GCSTRESS_FAIL", "GCSTRESS_EXCLUDE"], - 'minopts': ["MINOPTS_FAIL", "MINOPTS_EXCLUDE"], - 'tieredcompilation': [], - 'forcerelocs': [], - 'jitstress1': ["JITSTRESS_FAIL", "JITSTRESS_EXCLUDE"], - 'jitstress2': ["JITSTRESS_FAIL", "JITSTRESS_EXCLUDE"], - 'jitstressregs1': ["JITSTRESS_FAIL", "JITSTRESS_EXCLUDE"], - 'jitstressregs2': ["JITSTRESS_FAIL", "JITSTRESS_EXCLUDE"], - 'jitstressregs3': ["JITSTRESS_FAIL", "JITSTRESS_EXCLUDE"], - 'jitstressregs4': ["JITSTRESS_FAIL", "JITSTRESS_EXCLUDE"], - 'jitstressregs8': ["JITSTRESS_FAIL", "JITSTRESS_EXCLUDE"], - 'jitstressregs0x10': ["JITSTRESS_FAIL", "JITSTRESS_EXCLUDE"], - 'jitstressregs0x80': ["JITSTRESS_FAIL", "JITSTRESS_EXCLUDE"], - 'jitstressregs0x1000': ["JITSTRESS_FAIL", "JITSTRESS_EXCLUDE"], - 'jitstress2_jitstressregs1': ["JITSTRESS_FAIL", "JITSTRESS_EXCLUDE"], - 'jitstress2_jitstressregs2': ["JITSTRESS_FAIL", "JITSTRESS_EXCLUDE"], - 'jitstress2_jitstressregs3': ["JITSTRESS_FAIL", "JITSTRESS_EXCLUDE"], - 'jitstress2_jitstressregs4': ["JITSTRESS_FAIL", "JITSTRESS_EXCLUDE"], - 'jitstress2_jitstressregs8': ["JITSTRESS_FAIL", "JITSTRESS_EXCLUDE"], - 'jitstress2_jitstressregs0x10': ["JITSTRESS_FAIL", "JITSTRESS_EXCLUDE"], - 'jitstress2_jitstressregs0x80': ["JITSTRESS_FAIL", "JITSTRESS_EXCLUDE"], - 'jitstress2_jitstressregs0x1000': ["JITSTRESS_FAIL", "JITSTRESS_EXCLUDE"], - 'tailcallstress': ["TAILCALLSTRESS_FAIL", "TAILCALLSTRESS_EXCLUDE"], - // 'jitsse2only' // Only relevant to xarch - 'jitnosimd': [], // Only interesting on platforms where SIMD support exists. - // 'jitincompletehwintrinsic' - // 'jitx86hwintrinsicnoavx' - // 'jitx86hwintrinsicnoavx2' - // 'jitx86hwintrinsicnosimd' - // 'jitnox86hwintrinsic' - 'corefx_baseline': [], // corefx tests don't use smarty - 'corefx_minopts': [], // corefx tests don't use smarty - 'corefx_tieredcompilation': [], // corefx tests don't use smarty - 'corefx_jitstress1': [], // corefx tests don't use smarty - 'corefx_jitstress2': [], // corefx tests don't use smarty - 'corefx_jitstressregs1': [], // corefx tests don't use smarty - 'corefx_jitstressregs2': [], // corefx tests don't use smarty - 'corefx_jitstressregs3': [], // corefx tests don't use smarty - 'corefx_jitstressregs4': [], // corefx tests don't use smarty - 'corefx_jitstressregs8': [], // corefx tests don't use smarty - 'corefx_jitstressregs0x10': [], // corefx tests don't use smarty - 'corefx_jitstressregs0x80': [], // corefx tests don't use smarty - 'corefx_jitstressregs0x1000': [], // corefx tests don't use smarty - 'gcstress0x3': ["GCSTRESS_FAIL", "GCSTRESS_EXCLUDE"], - 'gcstress0xc': ["GCSTRESS_FAIL", "GCSTRESS_EXCLUDE"], - 'zapdisable': ["ZAPDISABLE_FAIL", "ZAPDISABLE_EXCLUDE"], - 'heapverify1': [], - 'gcstress0xc_zapdisable': ["GCSTRESS_FAIL", "GCSTRESS_EXCLUDE", "ZAPDISABLE_FAIL", "ZAPDISABLE_EXCLUDE"], - 'gcstress0xc_zapdisable_jitstress2': ["GCSTRESS_FAIL", "GCSTRESS_EXCLUDE", "ZAPDISABLE_FAIL", "ZAPDISABLE_EXCLUDE", "JITSTRESS_FAIL", "JITSTRESS_EXCLUDE"], - 'gcstress0xc_zapdisable_heapverify1': ["GCSTRESS_FAIL", "GCSTRESS_EXCLUDE", "ZAPDISABLE_FAIL", "ZAPDISABLE_EXCLUDE"], - 'gcstress0xc_jitstress1': ["GCSTRESS_FAIL", "GCSTRESS_EXCLUDE", "JITSTRESS_FAIL", "JITSTRESS_EXCLUDE"], - 'gcstress0xc_jitstress2': ["GCSTRESS_FAIL", "GCSTRESS_EXCLUDE", "JITSTRESS_FAIL", "JITSTRESS_EXCLUDE"], - 'gcstress0xc_minopts_heapverify1': ["GCSTRESS_FAIL", "GCSTRESS_EXCLUDE", "MINOPTS_FAIL", "MINOPTS_EXCLUDE"], - - // - // NOTE: the following scenarios are not defined in the 'allScenarios' list! Is this a bug? - // - - 'minopts_zapdisable': ["ZAPDISABLE_FAIL", "ZAPDISABLE_EXCLUDE", "MINOPTS_FAIL", "MINOPTS_EXCLUDE"], - 'gcstress0x3_jitstress1': ["GCSTRESS_FAIL", "GCSTRESS_EXCLUDE", "JITSTRESS_FAIL", "JITSTRESS_EXCLUDE"], - 'gcstress0x3_jitstress2': ["GCSTRESS_FAIL", "GCSTRESS_EXCLUDE", "JITSTRESS_FAIL", "JITSTRESS_EXCLUDE"], - 'gcstress0x3_jitstressregs1': ["GCSTRESS_FAIL", "GCSTRESS_EXCLUDE", "JITSTRESS_FAIL", "JITSTRESS_EXCLUDE"], - 'gcstress0x3_jitstressregs2': ["GCSTRESS_FAIL", "GCSTRESS_EXCLUDE", "JITSTRESS_FAIL", "JITSTRESS_EXCLUDE"], - 'gcstress0x3_jitstressregs3': ["GCSTRESS_FAIL", "GCSTRESS_EXCLUDE", "JITSTRESS_FAIL", "JITSTRESS_EXCLUDE"], - 'gcstress0x3_jitstressregs4': ["GCSTRESS_FAIL", "GCSTRESS_EXCLUDE", "JITSTRESS_FAIL", "JITSTRESS_EXCLUDE"], - 'gcstress0x3_jitstressregs8': ["GCSTRESS_FAIL", "GCSTRESS_EXCLUDE", "JITSTRESS_FAIL", "JITSTRESS_EXCLUDE"], - 'gcstress0x3_jitstressregs0x10': ["GCSTRESS_FAIL", "GCSTRESS_EXCLUDE", "JITSTRESS_FAIL", "JITSTRESS_EXCLUDE"], - 'gcstress0x3_jitstressregs0x80': ["GCSTRESS_FAIL", "GCSTRESS_EXCLUDE", "JITSTRESS_FAIL", "JITSTRESS_EXCLUDE"], - 'gcstress0x3_jitstressregs0x1000': ["GCSTRESS_FAIL", "GCSTRESS_EXCLUDE", "JITSTRESS_FAIL", "JITSTRESS_EXCLUDE"], - 'gcstress0xc_jitstressregs1': ["GCSTRESS_FAIL", "GCSTRESS_EXCLUDE", "JITSTRESS_FAIL", "JITSTRESS_EXCLUDE"], - 'gcstress0xc_jitstressregs2': ["GCSTRESS_FAIL", "GCSTRESS_EXCLUDE", "JITSTRESS_FAIL", "JITSTRESS_EXCLUDE"], - 'gcstress0xc_jitstressregs3': ["GCSTRESS_FAIL", "GCSTRESS_EXCLUDE", "JITSTRESS_FAIL", "JITSTRESS_EXCLUDE"], - 'gcstress0xc_jitstressregs4': ["GCSTRESS_FAIL", "GCSTRESS_EXCLUDE", "JITSTRESS_FAIL", "JITSTRESS_EXCLUDE"], - 'gcstress0xc_jitstressregs8': ["GCSTRESS_FAIL", "GCSTRESS_EXCLUDE", "JITSTRESS_FAIL", "JITSTRESS_EXCLUDE"], - 'gcstress0xc_jitstressregs0x10': ["GCSTRESS_FAIL", "GCSTRESS_EXCLUDE", "JITSTRESS_FAIL", "JITSTRESS_EXCLUDE"], - 'gcstress0xc_jitstressregs0x80': ["GCSTRESS_FAIL", "GCSTRESS_EXCLUDE", "JITSTRESS_FAIL", "JITSTRESS_EXCLUDE"], - 'gcstress0xc_jitstressregs0x1000': ["GCSTRESS_FAIL", "GCSTRESS_EXCLUDE", "JITSTRESS_FAIL", "JITSTRESS_EXCLUDE"] - ] - - def static validLinuxArm64Scenarios = [ - 'innerloop', - 'normal', - 'r2r', - 'gcstress0x3', - 'gcstress0xc' - ] - - def static validLinuxArmScenarios = [ - 'innerloop', - 'normal', - 'r2r', - 'r2r_jitstress1', - 'r2r_jitstress2', - 'r2r_jitstressregs1', - 'r2r_jitstressregs2', - 'r2r_jitstressregs3', - 'r2r_jitstressregs4', - 'r2r_jitstressregs8', - 'r2r_jitstressregs0x10', - 'r2r_jitstressregs0x80', - 'r2r_jitstressregs0x1000', - 'r2r_jitminopts', - 'r2r_jitforcerelocs', - 'r2r_gcstress15', - 'minopts', - 'forcerelocs', - 'jitstress1', - 'jitstress2', - 'jitstressregs1', - 'jitstressregs2', - 'jitstressregs3', - 'jitstressregs4', - 'jitstressregs8', - 'jitstressregs0x10', - 'jitstressregs0x80', - 'jitstressregs0x1000', - 'jitstress2_jitstressregs1', - 'jitstress2_jitstressregs2', - 'jitstress2_jitstressregs3', - 'jitstress2_jitstressregs4', - 'jitstress2_jitstressregs8', - 'jitstress2_jitstressregs0x10', - 'jitstress2_jitstressregs0x80', - 'jitstress2_jitstressregs0x1000', - 'tailcallstress', - 'gcstress0x3', - 'gcstress0xc', - 'zapdisable', - 'heapverify1', - 'gcstress0xc_zapdisable', - 'gcstress0xc_zapdisable_jitstress2', - 'gcstress0xc_zapdisable_heapverify1', - 'gcstress0xc_jitstress1', - 'gcstress0xc_jitstress2', - 'gcstress0xc_minopts_heapverify1' - ] - - def static configurationList = ['Debug', 'Checked', 'Release'] - - // This is the set of architectures - // Some of these are pseudo-architectures: - // armlb -- same as arm, but use the LEGACY_BACKEND JIT - // armem -- ARM builds/runs using an emulator. Used for Ubuntu/Ubuntu16.04/Tizen runs. - // x86_arm_altjit -- ARM runs on x86 using the ARM altjit - // x64_arm64_altjit -- ARM64 runs on x64 using the ARM64 altjit - def static architectureList = ['arm', 'armlb', 'armem', 'x86_arm_altjit', 'x64_arm64_altjit', 'arm64', 'x64', 'x86'] - - // This set of architectures that cross build on Windows and run on Windows ARM64 hardware. - def static armWindowsCrossArchitectureList = ['arm', 'armlb', 'arm64'] -} - -// ************************************************************** -// Create some specific views -// -// These aren't using the Utilities.addStandardFolderView() function, because that creates -// views based on a single regular expression. These views will be generated by adding a -// specific set of jobs to them. -// -// Utilities.addStandardFolderView() also creates a lot of additional stuff around the -// view, like "Build Statistics", "Job Statistics", "Unstable Jobs". Until it is determined -// those are required, don't add them (which simplifies the view pages, as well). -// ************************************************************** - -class Views { - def static MergeJobView = null - def static PeriodicJobView = null - def static ArchitectureViews = [:] - def static OSViews = [:] -} - -// MergeJobView: include all jobs that execute when a PR change is merged. -Views.MergeJobView = listView('Merge') { - recurse() - columns { - status() - weather() - name() - lastSuccess() - lastFailure() - lastDuration() - buildButton() - } -} - -// PeriodicJobView: include all jobs that execute on a schedule -Views.PeriodicJobView = listView('Periodic') { - recurse() - columns { - status() - weather() - name() - lastSuccess() - lastFailure() - lastDuration() - buildButton() - } -} - -// Create a view for non-PR jobs for each architecture. -Constants.architectureList.each { architecture -> - Views.ArchitectureViews[architecture] = listView(architecture) { - recurse() - columns { - status() - weather() - name() - lastSuccess() - lastFailure() - lastDuration() - buildButton() - } - } -} - -// Create a view for non-PR jobs for each OS. -Constants.osList.each { os -> - // Don't create one for the special 'Windows_NT_BuildOnly' - if (os == 'Windows_NT_BuildOnly') { - return - } - Views.OSViews[os] = listView(os) { - recurse() - columns { - status() - weather() - name() - lastSuccess() - lastFailure() - lastDuration() - buildButton() - } - } -} - -def static addToMergeView(def job) { - Views.MergeJobView.with { - jobs { - name(job.name) - } - } -} - -def static addToPeriodicView(def job) { - Views.PeriodicJobView.with { - jobs { - name(job.name) - } - } -} - -def static addToViews(def job, def isPR, def architecture, def os) { - if (isPR) { - // No views want PR jobs currently. - return - } - - // Add to architecture view. - Views.ArchitectureViews[architecture].with { - jobs { - name(job.name) - } - } - - // Add to OS view. - Views.OSViews[os].with { - jobs { - name(job.name) - } - } -} - -def static addPeriodicTriggerHelper(def job, String cronString, boolean alwaysRuns = false) { - addToPeriodicView(job) - Utilities.addPeriodicTrigger(job, cronString, alwaysRuns) -} - -def static addGithubPushTriggerHelper(def job) { - addToMergeView(job) - Utilities.addGithubPushTrigger(job) -} - - -def static setMachineAffinity(def job, def os, def architecture, def options = null) { - assert os instanceof String - assert architecture instanceof String - - def armArches = ['arm', 'armlb', 'armem', 'arm64'] - def supportedArmLinuxOs = ['Ubuntu', 'Ubuntu16.04', 'Tizen'] - - if (!(architecture in armArches)) { - assert options == null - Utilities.setMachineAffinity(job, os, 'latest-or-auto') - - return - } - - // This is an arm(64) job. - // - // There are several options. - // - // Windows_NT - // - // Arm32 (Build) -> latest-arm64 - // |-> os == "Windows_NT" && (architecture == "arm" || architecture == "armlb") && options['use_arm64_build_machine'] == true - // Arm32 (Test) -> arm64-windows_nt - // |-> os == "Windows_NT" && (architecture == "arm" || architecture == "armlb") && options['use_arm64_build_machine'] == false - // - // Arm64 (Build) -> latest-arm64 - // |-> os == "Windows_NT" && architecture == "arm64" && options['use_arm64_build_machine'] == true - // Arm64 (Test) -> arm64-windows_nt - // |-> os == "Windows_NT" && architecture == "arm64" && options['use_arm64_build_machine'] == false - // - // Ubuntu - // - // Arm32 emulator (Build, Test) -> arm-cross-latest - // |-> os in supportedArmLinuxOs && (architecture == "armem") - // - // Arm32 hardware (Flow) -> Ubuntu 16.04 latest-or-auto (don't use limited arm hardware) - // |-> os == "Ubuntu" && (architecture == "arm") && options['is_flow_job'] == true - // Arm32 hardware (Build) -> Ubuntu 16.04 latest-or-auto - // |-> os == "Ubuntu" && (architecture == "arm") && options['is_build_job'] == true - // Arm32 hardware (Test) -> ubuntu.1404.arm32.open - // |-> os == "Ubuntu" && (architecture == "arm") - // - // Arm64 (Build) -> arm64-cross-latest - // |-> os != "Windows_NT" && architecture == "arm64" && options['is_build_only'] == true - // Arm64 Small Page Size (Test) -> arm64-small-page-size - // |-> os != "Windows_NT" && architecture == "arm64" && options['large_pages'] == false - // Arm64 Large Page Size (Test) -> arm64-huge-page-size - // |-> os != "Windows_NT" && architecture == "arm64" && options['large_pages'] == true - - // This has to be a arm arch - assert architecture in armArches - if (os == "Windows_NT") { - // arm32/arm64 Windows jobs share the same machines for now - def isBuild = options['use_arm64_build_machine'] == true - - if (isBuild == true) { - Utilities.setMachineAffinity(job, os, 'latest-arm64') - } else { - Utilities.setMachineAffinity(job, os, 'arm64-windows_nt') - } - } else { - assert os != 'Windows_NT' - assert os in supportedArmLinuxOs - - if (architecture == 'arm64') { - if ((options != null) && (options['is_build_only'] == true)) { - // Arm64 Linux build machine - Utilities.setMachineAffinity(job, os, 'arm64-cross-latest') - } else { - // Arm64 Linux test machines - if ((options != null) && (options['large_pages'] == true)) { - Utilities.setMachineAffinity(job, os, 'arm64-huge-page-size') - } else { - Utilities.setMachineAffinity(job, os, 'arm64-small-page-size') - } - } - } - else if (architecture == 'armem') { - // arm emulator (Ubuntu/Ubuntu16.04/Tizen). Build and test on same machine, - // using Docker. - Utilities.setMachineAffinity(job, 'Ubuntu', 'arm-cross-latest') - } - else { - // arm Ubuntu on hardware. - assert (architecture == 'arm') && (os == 'Ubuntu') - def isFlow = (options != null) && (options['is_flow_job'] == true) - def isBuild = (options != null) && (options['is_build_job'] == true) - if (isFlow || isBuild) { - // arm Ubuntu build machine. Build uses docker, so the actual host OS is not - // very important. Therefore, use latest or auto. Flow jobs don't need to use - // arm hardware. - Utilities.setMachineAffinity(job, 'Ubuntu16.04', 'latest-or-auto') - } else { - // arm Ubuntu test machine - // There is no tag (like, e.g., "arm-latest") for this, so don't call - // Utilities.setMachineAffinity. Just add the machine affinity - // manually. We specify the Helix queue name here. - job.with { - label('ubuntu.1404.arm32.open') - } - } - } - } -} - -// setJobMachineAffinity: compute the machine affinity options for a job, -// then set the job with those affinity options. -def static setJobMachineAffinity(def architecture, def os, def isBuildJob, def isTestJob, def isFlowJob, def job) -{ - assert (isBuildJob && !isTestJob && !isFlowJob) || - (!isBuildJob && isTestJob && !isFlowJob) || - (!isBuildJob && !isTestJob && isFlowJob) - - def affinityOptions = null - def affinityArchitecture = architecture - - if (os == "Windows_NT") { - if (architecture in Constants.armWindowsCrossArchitectureList) { - if (isBuildJob) { - affinityOptions = [ "use_arm64_build_machine" : true ] - } else if (isTestJob) { - affinityOptions = [ "use_arm64_build_machine" : false ] - } else if (isFlowJob) { - // For the flow jobs set the machine affinity as x64 - affinityArchitecture = 'x64' - } - } - } - else { - if (architecture == 'arm64') { - if (isBuildJob) { - affinityOptions = ['is_build_only': true] - } else if (isTestJob) { - affinityOptions = [ "large_pages" : false ] - } - } - else if (architecture == 'arm') { - if (isBuildJob) { - affinityOptions = ['is_build_job': true] - } else if (isFlowJob) { - affinityOptions = ['is_flow_job': true] - } - } - } - - setMachineAffinity(job, os, affinityArchitecture, affinityOptions) -} - -def static isGCStressRelatedTesting(def scenario) { - // The 'r2r_gcstress15' scenario is a basic scenario. - // Detect it and make it a GCStress related. - if (scenario == 'r2r_gcstress15') - { - return true; - } - - def gcStressTestEnvVars = [ 'COMPlus_GCStress', 'COMPlus_ZapDisable', 'COMPlus_HeapVerify'] - def scenarioName = scenario.toLowerCase() - def isGCStressTesting = false - Constants.jitStressModeScenarios[scenario].each{ k, v -> - if (k in gcStressTestEnvVars) { - isGCStressTesting = true; - } - } - return isGCStressTesting -} - -def static isCoreFxScenario(def scenario) { - def corefx_prefix = 'corefx_' - if (scenario.length() < corefx_prefix.length()) { - return false - } - return scenario.substring(0,corefx_prefix.length()) == corefx_prefix -} - -def static isR2RBaselineScenario(def scenario) { - return (scenario == 'r2r') -} - -def static isR2RStressScenario(def scenario) { - return Constants.r2rStressScenarios.containsKey(scenario) -} - -def static isR2RScenario(def scenario) { - return isR2RBaselineScenario(scenario) || isR2RStressScenario(scenario) -} - -def static isJitStressScenario(def scenario) { - return Constants.jitStressModeScenarios.containsKey(scenario) -} - -def static isLongGc(def scenario) { - return (scenario == 'longgc' || scenario == 'gcsimulator') -} - -def static isJitDiff(def scenario) { - return (scenario == 'jitdiff') -} - -def static isGcReliabilityFramework(def scenario) { - return (scenario == 'gc_reliability_framework') -} - -def static isArmWindowsScenario(def scenario) { - return Constants.validArmWindowsScenarios.containsKey(scenario) -} - -def static isValidPrTriggeredInnerLoopJob(os, architecture, configuration, isBuildOnly) { - if (isBuildOnly == true) { - os = 'Windows_NT_BuildOnly' - } - - def validOsPrTriggerArchConfigs = Constants.prTriggeredValidInnerLoopCombos[os] - - if (validOsPrTriggerArchConfigs == null) { - return false - } - - if (validOsPrTriggerArchConfigs[architecture] != null) { - def validOsPrTriggerConfigs = validOsPrTriggerArchConfigs[architecture] - - if (!(configuration in validOsPrTriggerConfigs)) { - return false - } - } else { - return false - } - - return true -} - -def static setJobTimeout(newJob, isPR, architecture, configuration, scenario, isBuildOnly) { - // 2 hours (120 minutes) is the default timeout - def timeout = 120 - def innerLoop = (scenario == "innerloop") - - if (!innerLoop) { - // Pri-1 test builds take a long time. Default PR jobs are Pri-0; everything else is Pri-1 - // (see calculateBuildCommands()). So up the Pri-1 build jobs timeout. - timeout = 240 - } - - if (!isBuildOnly) { - // Note that these can only increase, never decrease, the Pri-1 timeout possibly set above. - if (isGCStressRelatedTesting(scenario)) { - timeout = 4320 - } - else if (isCoreFxScenario(scenario)) { - timeout = 360 - } - else if (isJitStressScenario(scenario)) { - timeout = 300 - } - else if (isR2RBaselineScenario(scenario)) { - timeout = 240 - } - else if (isLongGc(scenario)) { - timeout = 1440 - } - else if (isJitDiff(scenario)) { - timeout = 240 - } - else if (isGcReliabilityFramework(scenario)) { - timeout = 1440 - } - else if (architecture == 'armlb' || architecture == 'armem' || architecture == 'arm64') { - timeout = 240 - } - - if (architecture == 'arm') { - // ARM32 machines are particularly slow. - timeout += 120 - } - } - - if (configuration == 'Debug') { - // Debug runs can be very slow. Add an hour. - timeout += 60 - } - - if (architecture == 'x86_arm_altjit' || architecture == 'x64_arm64_altjit') { - // AltJit runs compile all methods twice. - timeout *= 2 - } - - // If we've changed the timeout from the default, set it in the job. - - if (timeout != 120) { - Utilities.setJobTimeout(newJob, timeout) - } -} - -def static getJobFolder(def scenario) { - if (isJitStressScenario(scenario) || isR2RStressScenario(scenario)) { - return 'jitstress' - } - if (scenario == 'illink') { - return 'illink' - } - return '' -} - -def static getStressModeDisplayName(def scenario) { - def displayStr = '' - Constants.jitStressModeScenarios[scenario].each{ k, v -> - def prefixLength = 'COMPlus_'.length() - if (k.length() >= prefixLength) { - def modeName = k.substring(prefixLength, k.length()) - displayStr += ' ' + modeName + '=' + v - } - } - - if (isCoreFxScenario(scenario)) { - displayStr = ('CoreFx ' + displayStr).trim() - } - - return displayStr -} - -def static getR2RDisplayName(def scenario) { - // Assume the scenario name is one from the r2rStressScenarios dict, and remove its "r2r_" prefix. - def displayStr = scenario - def prefixLength = 'r2r_'.length() - if (displayStr.length() >= prefixLength) { - displayStr = "R2R " + displayStr.substring(prefixLength, displayStr.length()) - } else if (scenario == 'r2r') { - displayStr = "R2R" - } - return displayStr -} - -// -// Functions to create an environment script. -// envScriptCreate -- initialize the script (call first) -// envScriptFinalize -- finalize the script (call last) -// envScriptSetStressModeVariables -- set stress mode variables in the env script -// envScriptAppendExistingScript -- append an existing script to the generated script -// -// Each script returns a string of commands. Concatenate all the strings together before -// adding them to the builds commands, to make sure they get executed as one Jenkins script. -// - -// Initialize the environment setting script. -def static envScriptCreate(def os, def stepScriptLocation) { - def stepScript = '' - if (os == 'Windows_NT') { - stepScript += "echo Creating TestEnv script\r\n" - stepScript += "if exist ${stepScriptLocation} del ${stepScriptLocation}\r\n" - - // Create at least an empty script. - stepScript += "echo. > ${stepScriptLocation}\r\n" - } - else { - stepScript += "echo Creating environment setting script\n" - stepScript += "echo \\#\\!/usr/bin/env bash > ${stepScriptLocation}\n" - } - - return stepScript -} - -// Generates the string for setting stress mode variables. -def static envScriptSetStressModeVariables(def os, def stressModeVars, def stepScriptLocation) { - def stepScript = '' - if (os == 'Windows_NT') { - stressModeVars.each{ k, v -> - // Write out what we are writing to the script file - stepScript += "echo Setting ${k}=${v}\r\n" - // Write out the set itself to the script file` - stepScript += "echo set ${k}=${v} >> ${stepScriptLocation}\r\n" - } - } - else { - stressModeVars.each{ k, v -> - // Write out what we are writing to the script file - stepScript += "echo Setting ${k}=${v}\n" - // Write out the set itself to the script file` - stepScript += "echo export ${k}=${v} >> ${stepScriptLocation}\n" - } - } - - return stepScript -} - -// Append an existing script to an environment script. -// Returns string of commands to do this. -def static envScriptAppendExistingScript(def os, def appendScript, def stepScriptLocation) { - assert (os == 'Windows_NT') - def stepScript = '' - - stepScript += "echo Appending ${appendScript} to ${stepScriptLocation}\r\n" - stepScript += "type ${appendScript} >> ${stepScriptLocation}\r\n" - - return stepScript -} - -// Finalize an environment setting script. -// Returns string of commands to do this. -def static envScriptFinalize(def os, def stepScriptLocation) { - def stepScript = '' - - if (os == 'Windows_NT') { - // Display the resulting script. This is useful when looking at the output log file. - stepScript += "echo Display the total script ${stepScriptLocation}\r\n" - stepScript += "type ${stepScriptLocation}\r\n" - } - else { - stepScript += "chmod +x ${stepScriptLocation}\n" - } - - return stepScript -} - -def static isNeedDocker(def architecture, def os, def isBuild) { - if (isBuild) { - if (architecture == 'x86' && os == 'Ubuntu') { - return true - } - else if (architecture == 'armem') { - return true - } - else if (architecture == 'arm') { - if (os == 'Ubuntu') { - return true - } - } - } - else { - if (architecture == 'x86' && os == 'Ubuntu') { - return true - } - } - return false -} - -def static getDockerImageName(def architecture, def os, def isBuild) { - // We must change some docker private images to official later - if (isBuild) { - if (architecture == 'x86' && os == 'Ubuntu') { - return "hseok82/dotnet-buildtools-prereqs:ubuntu-16.04-crossx86-ef0ac75-20175511035548" - } - else if (architecture == 'armem') { - if (os == 'Ubuntu') { - return "microsoft/dotnet-buildtools-prereqs:ubuntu-14.04-cross-e435274-20180405193556" - } - else if (os == 'Ubuntu16.04') { - return "microsoft/dotnet-buildtools-prereqs:ubuntu-16.04-cross-e435274-20180404203310" - } - else if (os == 'Tizen') { - return "tizendotnet/dotnet-buildtools-prereqs:ubuntu-16.04-cross-e435274-20180426002255-tizen-rootfs-5.0m1" - } - } - else if (architecture == 'arm') { - if (os == 'Ubuntu') { - return "microsoft/dotnet-buildtools-prereqs:ubuntu-14.04-cross-e435274-20180426002420" - } - } - } - else { - if (architecture == 'x86' && os == 'Ubuntu') { - return "hseok82/dotnet-buildtools-prereqs:ubuntu1604_x86_test" - } - } - println("Unknown architecture to use docker: ${architecture} ${os}"); - assert false -} - - -// We have a limited amount of some hardware. For these, scale back the periodic testing we do. -def static jobRequiresLimitedHardware(def architecture, def os) { - if (((architecture == 'arm64') || (architecture == 'arm') || (architecture == 'armlb')) && (os == 'Windows_NT')) { - // These test jobs require ARM64 hardware - return true - } - else if ((architecture == 'arm') && (os == 'Ubuntu')) { - // These test jobs require Linux/arm32 hardware - return true - } - else { - return false - } -} - -// Calculates the name of the build job based on some typical parameters. -// -def static getJobName(def configuration, def architecture, def os, def scenario, def isBuildOnly) { - // If the architecture is x64, do not add that info into the build name. - // Need to change around some systems and other builds to pick up the right builds - // to do that. - - def suffix = scenario != 'normal' ? "_${scenario}" : ''; - if (isBuildOnly) { - suffix += '_bld' - } - def baseName = '' - switch (architecture) { - case 'x64': - if (scenario == 'normal') { - // For now we leave x64 off of the name for compatibility with other jobs - baseName = configuration.toLowerCase() + '_' + os.toLowerCase() - } - else if (scenario == 'formatting') { - // we don't care about the configuration for the formatting job. It runs all configs - baseName = architecture.toLowerCase() + '_' + os.toLowerCase() - } - else { - baseName = architecture.toLowerCase() + '_' + configuration.toLowerCase() + '_' + os.toLowerCase() - } - break - case 'arm64': - if (os.toLowerCase() == "windows_nt") { - // These are cross builds - baseName = architecture.toLowerCase() + '_cross_' + configuration.toLowerCase() + '_' + os.toLowerCase() - } - else { - // Defaults to a small page size set of machines. - baseName = architecture.toLowerCase() + '_' + configuration.toLowerCase() + '_' + "small_page_size" - } - break - case 'armem': - // These are cross builds - if (os == 'Tizen') { - // ABI: softfp - baseName = 'armel_cross_' + configuration.toLowerCase() + '_' + os.toLowerCase() - } - else { - baseName = architecture.toLowerCase() + '_cross_' + configuration.toLowerCase() + '_' + os.toLowerCase() - } - break - case 'armlb': - case 'arm': - baseName = architecture.toLowerCase() + '_cross_' + configuration.toLowerCase() + '_' + os.toLowerCase() - break - case 'x86': - case 'x86_arm_altjit': - case 'x64_arm64_altjit': - baseName = architecture.toLowerCase() + '_' + configuration.toLowerCase() + '_' + os.toLowerCase() - break - default: - println("Unknown architecture: ${architecture}"); - assert false - break - } - - return baseName + suffix -} - -def static addNonPRTriggers(def job, def branch, def isPR, def architecture, def os, def configuration, def scenario, def isFlowJob, def isWindowsBuildOnlyJob, def bidailyCrossList) { - - // Don't run non-PR jobs in release/2.1 branch: it takes too many resources. - if (branch == 'release/2.1') { - return - } - - // Limited Windows ARM64 hardware is restricted for non-PR triggers to certain branches. - if (os == 'Windows_NT') { - if ((architecture == 'arm64') || (architecture == 'arm') || (architecture == 'armlb')) { - if (!(branch in Constants.WindowsArm64Branches)) { - return - } - } - } - - if ((architecture == 'arm') && (os != 'Windows_NT') && isGCStressRelatedTesting(scenario)) { - // Non-Windows Arm GCStress jobs currently don't get cron or push triggers (until they are functional). - // See https://github.com/dotnet/coreclr/issues/17241. - return - } - - // Check scenario. - switch (scenario) { - case 'innerloop': - break - case 'normal': - switch (architecture) { - case 'x64': - case 'x86': - if (isFlowJob && architecture == 'x86' && os == 'Ubuntu') { - addPeriodicTriggerHelper(job, '@daily') - } - else if (isFlowJob || os == 'Windows_NT' || !(os in Constants.crossList)) { - addGithubPushTriggerHelper(job) - } - break - case 'arm': - if (os == 'Windows_NT') { - addGithubPushTriggerHelper(job) - } - else { - // Currently no push triggers, with limited arm Linux hardware. - // TODO: If we have enough machine capacity, add some arm Linux push triggers. - assert os == 'Ubuntu' - if (isFlowJob) { - addPeriodicTriggerHelper(job, '@daily') - } - } - break - case 'armem': - case 'armlb': - case 'x86_arm_altjit': - case 'x64_arm64_altjit': - addGithubPushTriggerHelper(job) - break - case 'arm64': - // We would normally want a per-push trigger, but with limited hardware we can't keep up - addPeriodicTriggerHelper(job, "H H/4 * * *") - break - default: - println("Unknown architecture: ${architecture}"); - assert false - break - } - break - case 'r2r': - assert !(os in bidailyCrossList) - // r2r gets a push trigger for checked/release - if (configuration == 'Checked' || configuration == 'Release') { - assert (os == 'Windows_NT') || (os in Constants.crossList) - if (architecture == 'x64' && os != 'OSX10.12') { - //Flow jobs should be Windows, Ubuntu, OSX0.12, or CentOS - if (isFlowJob || os == 'Windows_NT') { - addGithubPushTriggerHelper(job) - } - // OSX10.12 r2r jobs should only run every 12 hours, not daily. - } else if (architecture == 'x64' && os == 'OSX10.12'){ - if (isFlowJob) { - addPeriodicTriggerHelper(job, 'H H/12 * * *') - } - } - // For x86, only add per-commit jobs for Windows - else if (architecture == 'x86') { - if (os == 'Windows_NT') { - addGithubPushTriggerHelper(job) - } - } - // arm64 r2r jobs should only run daily. - else if (architecture == 'arm64') { - if (os == 'Windows_NT') { - addPeriodicTriggerHelper(job, '@daily') - } - } - } - break - case 'r2r_jitstress1': - case 'r2r_jitstress2': - case 'r2r_jitstressregs1': - case 'r2r_jitstressregs2': - case 'r2r_jitstressregs3': - case 'r2r_jitstressregs4': - case 'r2r_jitstressregs8': - case 'r2r_jitstressregs0x10': - case 'r2r_jitstressregs0x80': - case 'r2r_jitstressregs0x1000': - case 'r2r_jitminopts': - case 'r2r_jitforcerelocs': - case 'r2r_gcstress15': - assert !(os in bidailyCrossList) - - // GCStress=C is currently not supported on OS X - if (os == 'OSX10.12' && isGCStressRelatedTesting(scenario)) { - break - } - - // GC Stress 15 r2r gets a push trigger for checked/release - if (configuration == 'Checked' || configuration == 'Release') { - assert (os == 'Windows_NT') || (os in Constants.crossList) - if (architecture == 'x64') { - //Flow jobs should be Windows, Ubuntu, OSX10.12, or CentOS - if (isFlowJob || os == 'Windows_NT') { - // Add a weekly periodic trigger - addPeriodicTriggerHelper(job, 'H H * * 3,6') // some time every Wednesday and Saturday - } - } - // For x86, only add per-commit jobs for Windows - else if (architecture == 'x86') { - if (os == 'Windows_NT') { - addPeriodicTriggerHelper(job, 'H H * * 3,6') // some time every Wednesday and Saturday - } - } - } - break - case 'longgc': - assert (os == 'Ubuntu' || os == 'Windows_NT' || os == 'OSX10.12') - assert configuration == 'Release' - assert architecture == 'x64' - addPeriodicTriggerHelper(job, '@daily') - // TODO: Add once external email sending is available again - // addEmailPublisher(job, 'dotnetgctests@microsoft.com') - break - case 'gcsimulator': - assert (os == 'Ubuntu' || os == 'Windows_NT' || os == 'OSX10.12') - assert configuration == 'Release' - assert architecture == 'x64' - addPeriodicTriggerHelper(job, 'H H * * 3,6') // some time every Wednesday and Saturday - // TODO: Add once external email sending is available again - // addEmailPublisher(job, 'dotnetgctests@microsoft.com') - break - case 'standalone_gc': - assert (os == 'Ubuntu' || os == 'Windows_NT' || os == 'OSX10.12') - assert (configuration == 'Release' || configuration == 'Checked') - // TODO: Add once external email sending is available again - // addEmailPublisher(job, 'dotnetgctests@microsoft.com') - addPeriodicTriggerHelper(job, '@daily') - break - case 'gc_reliability_framework': - assert (os == 'Ubuntu' || os == 'Windows_NT' || os == 'OSX10.12') - assert (configuration == 'Release' || configuration == 'Checked') - // Only triggered by phrase. - break - case 'ilrt': - assert !(os in bidailyCrossList) - // ILASM/ILDASM roundtrip one gets a daily build, and only for release - if (architecture == 'x64' && configuration == 'Release') { - // We don't expect to see a job generated except in these scenarios - assert (os == 'Windows_NT') || (os in Constants.crossList) - if (isFlowJob || os == 'Windows_NT') { - addPeriodicTriggerHelper(job, '@daily') - } - } - break - case 'jitdiff': - assert (os == 'Ubuntu' || os == 'Windows_NT' || os == 'OSX10.12') - assert configuration == 'Checked' - assert (architecture == 'x64' || architecture == 'x86') - addGithubPushTriggerHelper(job) - break - case 'formatting': - assert (os == 'Windows_NT' || os == "Ubuntu") - assert architecture == 'x64' - addGithubPushTriggerHelper(job) - break - case 'jitstressregs1': - case 'jitstressregs2': - case 'jitstressregs3': - case 'jitstressregs4': - case 'jitstressregs8': - case 'jitstressregs0x10': - case 'jitstressregs0x80': - case 'jitstressregs0x1000': - case 'minopts': - case 'forcerelocs': - case 'jitstress1': - case 'jitstress2': - case 'jitstress2_jitstressregs1': - case 'jitstress2_jitstressregs2': - case 'jitstress2_jitstressregs3': - case 'jitstress2_jitstressregs4': - case 'jitstress2_jitstressregs8': - case 'jitstress2_jitstressregs0x10': - case 'jitstress2_jitstressregs0x80': - case 'jitstress2_jitstressregs0x1000': - case 'tailcallstress': - case 'jitsse2only': - case 'jitnosimd': - case 'jitnox86hwintrinsic': - case 'jitincompletehwintrinsic': - case 'jitx86hwintrinsicnoavx': - case 'jitx86hwintrinsicnoavx2': - case 'jitx86hwintrinsicnosimd': - case 'corefx_baseline': - case 'corefx_minopts': - case 'corefx_jitstress1': - case 'corefx_jitstress2': - case 'corefx_jitstressregs1': - case 'corefx_jitstressregs2': - case 'corefx_jitstressregs3': - case 'corefx_jitstressregs4': - case 'corefx_jitstressregs8': - case 'corefx_jitstressregs0x10': - case 'corefx_jitstressregs0x80': - case 'corefx_jitstressregs0x1000': - case 'zapdisable': - if (os == 'CentOS7.1') { - break - } - if (os in bidailyCrossList) { - break - } - assert (os == 'Windows_NT') || (os in Constants.crossList) - if (jobRequiresLimitedHardware(architecture, os)) { - addPeriodicTriggerHelper(job, '@weekly') - } - else { - addPeriodicTriggerHelper(job, '@daily') - } - break - case 'heapverify1': - case 'gcstress0x3': - if (os == 'CentOS7.1') { - break - } - if (os in bidailyCrossList) { - break - } - if ((architecture == 'arm64') && (os != 'Windows_NT')) { - // TODO: should we have cron jobs for arm64 Linux GCStress? - break - } - assert (os == 'Windows_NT') || (os in Constants.crossList) - addPeriodicTriggerHelper(job, '@weekly') - break - case 'gcstress0xc': - case 'gcstress0xc_zapdisable': - case 'gcstress0xc_zapdisable_jitstress2': - case 'gcstress0xc_zapdisable_heapverify1': - case 'gcstress0xc_jitstress1': - case 'gcstress0xc_jitstress2': - case 'gcstress0xc_minopts_heapverify1': - if (os == 'CentOS7.1') { - break - } - if (os == 'OSX10.12') { - // GCStress=C is currently not supported on OS X - break - } - if (os in bidailyCrossList) { - break - } - if ((architecture == 'arm64') && (os != 'Windows_NT')) { - // TODO: should we have cron jobs for arm64 Linux GCStress? - break - } - assert (os == 'Windows_NT') || (os in Constants.crossList) - addPeriodicTriggerHelper(job, '@weekly') - break - - case 'illink': - // Testing on other operating systems TBD - assert (os == 'Windows_NT' || os == 'Ubuntu') - if (architecture == 'x64' || architecture == 'x86') { - if (configuration == 'Checked') { - addPeriodicTriggerHelper(job, '@daily') - } - } - break - - case 'tieredcompilation': - case 'corefx_tieredcompilation': - // No periodic jobs just yet, still testing - break - - default: - println("Unknown scenario: ${scenario}"); - assert false - break - } - return -} - -// ************************** -// Define the basic inner loop builds for PR and commit. This is basically just the set -// of coreclr builds over linux/osx 10.12/windows and debug/release/checked. In addition, the windows -// builds will do a couple extra steps. -// ************************** - -// Adds a trigger for the PR build if one is needed. If isFlowJob is true, then this is the -// flow job that rolls up the build and test for non-windows OS's. // If the job is a windows build only job, -// it's just used for internal builds -// If you add a job with a trigger phrase, please add that phrase to coreclr/Documentation/project-docs/ci-trigger-phrases.md -def static addTriggers(def job, def branch, def isPR, def architecture, def os, def configuration, def scenario, def isFlowJob, def isWindowsBuildOnlyJob) { - def isNormalOrInnerloop = (scenario == "normal" || scenario == "innerloop") - - if (isWindowsBuildOnlyJob) { - return - } - - def bidailyCrossList = ['RHEL7.2', 'Debian8.4'] - // Non pull request builds. - if (!isPR) { - addNonPRTriggers(job, branch, isPR, architecture, os, configuration, scenario, isFlowJob, isWindowsBuildOnlyJob, bidailyCrossList) - return - } - - def arm64Users = [ - 'AndyAyersMS', - 'briansull', - 'BruceForstall', - 'CarolEidt', - 'cmckinsey', - 'echesakovMSFT', - 'erozenfeld', - 'janvorli', - 'jashook', - 'JosephTremoulet', - 'pgodeq', - 'russellhadley', - 'RussKeldorph', - 'sandreenko', - 'sdmaclea', - 'swaroop-sridhar', - 'jkotas', - 'markwilkie', - 'weshaggard' - ] - - // Pull request builds. Generally these fall into two categories: default triggers and on-demand triggers - // We generally only have a distinct set of default triggers but a bunch of on-demand ones. - def osGroup = getOSGroup(os) - switch (architecture) { - case 'x64': // editor brace matching: { - if (scenario == 'formatting') { - assert configuration == 'Checked' - if (os == 'Windows_NT' || os == 'Ubuntu') { - Utilities.addGithubPRTriggerForBranch(job, branch, "${os} ${architecture} Formatting") - } - - break - } - - switch (os) { - // OpenSUSE, Debian & RedHat get trigger phrases for pri 0 build, and pri 1 build & test - case 'Debian8.4': - case 'RHEL7.2': - if (scenario == 'innerloop') { - assert !isFlowJob - Utilities.addGithubPRTriggerForBranch(job, branch, "${os} ${architecture} ${configuration} Innerloop Build") - } - else if (scenario == 'normal') { - Utilities.addGithubPRTriggerForBranch(job, branch, "${os} ${architecture} ${configuration} Build", "(?i).*test\\W+${os}\\W+${architecture}.*") - } - break - - case 'Ubuntu16.04': - assert !isFlowJob - assert scenario != 'innerloop' - // Distinguish with the other architectures (arm and x86) - Utilities.addGithubPRTriggerForBranch(job, branch, "${os} ${architecture} ${configuration} Build", "(?i).*test\\W+${os}\\W+${architecture}.*") - break - - case 'Fedora24': - case 'Ubuntu16.10': - assert !isFlowJob - assert scenario != 'innerloop' - Utilities.addGithubPRTriggerForBranch(job, branch, "${os} ${architecture} ${configuration} Build", "(?i).*test\\W+${os}\\W+.*") - break - - case 'Ubuntu': - if (scenario == 'illink') { - Utilities.addGithubPRTriggerForBranch(job, branch, "${os} ${architecture} ${configuration} via ILLink", "(?i).*test\\W+${os}\\W+${architecture}\\W+${configuration}\\W+${scenario}.*") - break - } - // fall through - - case 'OSX10.12': - // Triggers on the non-flow jobs aren't necessary here - // Corefx testing uses non-flow jobs. - if (!isFlowJob && !isCoreFxScenario(scenario)) { - break - } - switch (scenario) { - case 'innerloop': - // PR Triggered jobs. These jobs will run pri0 tests. - if (configuration == 'Checked') { - Utilities.addGithubPRTriggerForBranch(job, branch, "${os} ${architecture} ${configuration} Innerloop Build and Test") - } - break - - case 'normal': - // OSX uses checked for default PR tests - if (configuration == 'Checked') { - // Default trigger - assert !job.name.contains("centos") - Utilities.addGithubPRTriggerForBranch(job, branch, "${os} ${architecture} ${configuration} Build and Test", "(?i).*test\\W+${os}\\W+${architecture}\\W+Build and Test.*") - } - break - - case 'jitdiff': - if (configuration == 'Checked') { - Utilities.addGithubPRTriggerForBranch(job, branch, "${os} ${architecture} ${configuration} Jit Diff Build and Test", "(?i).*test\\W+${os}\\W+${scenario}.*") - } - break - - case 'ilrt': - if (configuration == 'Release') { - Utilities.addGithubPRTriggerForBranch(job, branch, "${os} ${architecture} ${configuration} IL RoundTrip Build and Test", "(?i).*test\\W+${os}\\W+${scenario}.*") - } - break - - case 'longgc': - if (configuration == 'Release') { - Utilities.addGithubPRTriggerForBranch(job, branch, "${os} ${architecture} ${configuration} Long-Running GC Build & Test", "(?i).*test\\W+${os}\\W+${configuration}\\W+${scenario}.*") - } - break - - case 'gcsimulator': - if (configuration == 'Release') { - Utilities.addGithubPRTriggerForBranch(job, branch, "${os} ${architecture} ${configuration} GC Simulator", "(?i).*test\\W+${os}\\W+${configuration}\\W+${scenario}.*") - } - break - - case 'standalone_gc': - if (configuration == 'Release' || configuration == 'Checked') { - Utilities.addGithubPRTriggerForBranch(job, branch, "${os} ${architecture} ${configuration} Standalone GC", "(?i).*test\\W+${os}\\W+${configuration}\\W+${scenario}.*") - } - break - - case 'gc_reliability_framework': - if (configuration == 'Release' || configuration == 'Checked') { - Utilities.addGithubPRTriggerForBranch(job, branch, "${os} ${architecture} ${configuration} GC Reliability Framework", "(?i).*test\\W+${os}\\W+${configuration}\\W+${scenario}.*") - } - break - - default: - if (isJitStressScenario(scenario)) { - def displayStr = getStressModeDisplayName(scenario) - assert (os == 'Windows_NT') || (os in Constants.crossList) - Utilities.addGithubPRTriggerForBranch(job, branch, "${os} ${architecture} ${configuration} Build and Test (Jit - ${displayStr})", - "(?i).*test\\W+${os}\\W+${architecture}\\W+${configuration}\\W+${scenario}.*") - } - else if (isR2RScenario(scenario)) { - if (configuration == 'Release' || configuration == 'Checked') { - def displayStr = getR2RDisplayName(scenario) - Utilities.addGithubPRTriggerForBranch(job, branch, "${os} ${architecture} ${configuration} ${displayStr} Build and Test", - "(?i).*test\\W+${os}\\W+${architecture}\\W+${configuration}\\W+${scenario}.*") - } - } - else { - println("Unknown scenario: ${scenario}"); - assert false - } - break - - } - break - - case 'CentOS7.1': - switch (scenario) { - case 'innerloop': - // CentOS uses checked for default PR tests while debug is build only - if (configuration == 'Debug') { - // Default trigger - Utilities.addGithubPRTriggerForBranch(job, branch, "${os} ${architecture} ${configuration} Innerloop Build") - } - - // Make sure this is a flow job to get build and test. - if (configuration == 'Checked' && isFlowJob) { - assert job.name.contains("flow") - // Default trigger - Utilities.addGithubPRTriggerForBranch(job, branch, "${os} ${architecture} ${configuration} Innerloop Build and Test") - } - break - - case 'normal': - // Make sure this is a flow job to get build and test. - if (configuration == 'Checked' && isFlowJob) { - assert job.name.contains("flow") - // Default trigger - Utilities.addGithubPRTriggerForBranch(job, branch, "${os} ${architecture} ${configuration} Build and Test", "(?i).*test\\W+${os}\\W+${architecture}\\W+Build and Test.*") - } - break - - default: - if (isR2RScenario(scenario)) { - if (configuration == 'Release' || configuration == 'Checked') { - def displayStr = getR2RDisplayName(scenario) - Utilities.addGithubPRTriggerForBranch(job, branch, "${os} ${architecture} ${configuration} ${displayStr} Build & Test", - "(?i).*test\\W+${os}\\W+${architecture}\\W+${configuration}\\W+${scenario}.*") - } - } - break - - } - break - - case 'Windows_NT': - switch (scenario) { - case 'innerloop': - // Default trigger - if (configuration == 'Checked' || configuration == 'Release') { - Utilities.addGithubPRTriggerForBranch(job, branch, "${os} ${architecture} ${configuration} Innerloop Build and Test") - } - break - - case 'normal': - if (configuration == 'Checked') { - Utilities.addGithubPRTriggerForBranch(job, branch, "${os} ${architecture} ${configuration} Build and Test", "(?i).*test\\W+${os}\\W+${architecture}\\W+Build and Test.*") - } - break - - case 'jitdiff': - if (configuration == 'Checked') { - Utilities.addGithubPRTriggerForBranch(job, branch, "${os} ${architecture} ${configuration} Jit Diff Build and Test", "(?i).*test\\W+${os}\\W+${scenario}.*") - } - break - - case 'ilrt': - if (configuration == 'Release') { - Utilities.addGithubPRTriggerForBranch(job, branch, "${os} ${architecture} ${configuration} IL RoundTrip Build and Test", "(?i).*test\\W+${os}\\W+${scenario}.*") - } - break - - case 'longgc': - if (configuration == 'Release') { - Utilities.addGithubPRTriggerForBranch(job, branch, "${os} ${architecture} ${configuration} Long-Running GC Build & Test", "(?i).*test\\W+${os}\\W+${configuration}\\W+${scenario}.*") - } - break - - case 'gcsimulator': - if (configuration == 'Release') { - Utilities.addGithubPRTriggerForBranch(job, branch, "${os} ${architecture} ${configuration} GC Simulator", "(?i).*test\\W+${os}\\W+${configuration}\\W+${scenario}.*") - } - break - - case 'standalone_gc': - if (configuration == 'Release' || configuration == 'Checked') { - Utilities.addGithubPRTriggerForBranch(job, branch, "${os} ${architecture} ${configuration} Standalone GC", "(?i).*test\\W+${os}\\W+${configuration}\\W+${scenario}.*") - } - break - - case 'gc_reliability_framework': - if (configuration == 'Release' || configuration == 'Checked') { - Utilities.addGithubPRTriggerForBranch(job, branch, "${os} ${architecture} ${configuration} GC Reliability Framework", "(?i).*test\\W+${os}\\W+${configuration}\\W+${scenario}.*") - } - break - - case 'illink': - Utilities.addGithubPRTriggerForBranch(job, branch, "${os} ${architecture} ${configuration} via ILLink", "(?i).*test\\W+${os}\\W+${architecture}\\W+${configuration}\\W+${scenario}.*") - break - - default: - if (isJitStressScenario(scenario)) { - def displayStr = getStressModeDisplayName(scenario) - assert (os == 'Windows_NT') || (os in Constants.crossList) - Utilities.addGithubPRTriggerForBranch(job, branch, "${os} ${architecture} ${configuration} Build and Test (Jit - ${displayStr})", - "(?i).*test\\W+${os}\\W+${architecture}\\W+${configuration}\\W+${scenario}.*") - } - else if (isR2RScenario(scenario)) { - if (configuration == 'Release' || configuration == 'Checked') { - def displayStr = getR2RDisplayName(scenario) - Utilities.addGithubPRTriggerForBranch(job, branch, "${os} ${architecture} ${configuration} ${displayStr} Build & Test", - "(?i).*test\\W+${os}\\W+${architecture}\\W+${configuration}\\W+${scenario}.*") - } - } - else { - println("Unknown scenario: ${scenario}"); - assert false - } - break - - } - break - - default: - println("Unknown os: ${os}"); - assert false - break - - } - - break - // editor brace matching: } - - case 'armem': // editor brace matching: { - job.with { - publishers { - azureVMAgentPostBuildAction { - agentPostBuildAction('Delete agent if the build was not successful (when idle).') - } - } - } - - switch (os) { - case 'Ubuntu': - case 'Ubuntu16.04': - assert scenario != 'innerloop' - Utilities.addGithubPRTriggerForBranch(job, branch, "${os} ${architecture} Cross ${configuration} Build", - "(?i).*test\\W+${os}\\W+${architecture}\\W+Cross\\W+${configuration}\\W+Build.*") - break - - case 'Tizen': - architecture = 'armel' - - if (scenario == 'innerloop') { - if (configuration == 'Checked') { - Utilities.addGithubPRTriggerForBranch(job, branch, "${os} ${architecture} Cross ${configuration} Innerloop Build and Test") - } - } - else { - Utilities.addGithubPRTriggerForBranch(job, branch, "${os} ${architecture} Cross ${configuration} Build", - "(?i).*test\\W+${os}\\W+${architecture}\\W+Cross\\W+${configuration}\\W+Build.*") - } - break - } - - break - // editor brace matching: } - - case 'armlb': - case 'arm': // editor brace matching: { - - // Triggers on the non-flow jobs aren't necessary - if (!isFlowJob) { - break - } - - // Set up a private trigger - def contextString = "${os} ${architecture} Cross ${configuration}" - def triggerString = "(?i).*test\\W+${os}\\W+${architecture}\\W+Cross\\W+${configuration}" - if (scenario == 'innerloop') { - contextString += " Innerloop" - triggerString += "\\W+Innerloop" - } - else { - contextString += " ${scenario}" - triggerString += "\\W+${scenario}" - } - - if (configuration == 'Debug') { - contextString += " Build" - triggerString += "\\W+Build" - } else { - contextString += " Build and Test" - triggerString += "\\W+Build and Test" - } - - triggerString += ".*" - - switch (os) { - case 'Ubuntu': - if (architecture == 'armlb') { // No arm legacy backend testing for Ubuntu - break - } - - if (scenario == 'innerloop') { - if (configuration == 'Checked') { - Utilities.addGithubPRTriggerForBranch(job, branch, contextString) - } - } - else { - Utilities.addGithubPRTriggerForBranch(job, branch, contextString, triggerString) - } - break - - case 'Windows_NT': - if (architecture == "armlb") { - // Disable armlb windows jobs - break - } - switch (scenario) { - case 'innerloop': - // Only Checked is an innerloop trigger. - if (configuration == 'Checked') - { - Utilities.addPrivateGithubPRTriggerForBranch(job, branch, contextString, triggerString, null, arm64Users) - } - break - case 'normal': - Utilities.addPrivateGithubPRTriggerForBranch(job, branch, contextString, triggerString, null, arm64Users) - break - default: - // Stress jobs will use this code path. - if (isArmWindowsScenario(scenario)) { - Utilities.addPrivateGithubPRTriggerForBranch(job, branch, contextString, triggerString, null, arm64Users) - } - break - } - break - default: - println("NYI os: ${os}"); - assert false - break - } - break - // editor brace matching: } - case 'arm64': // editor brace matching: { - // Set up a private trigger - def contextString = "${os} ${architecture} Cross ${configuration}" - def triggerString = "(?i).*test\\W+${os}\\W+${architecture}\\W+Cross\\W+${configuration}" - - if (scenario == 'innerloop') { - contextString += " Innerloop" - triggerString += "\\W+Innerloop" - } - else { - contextString += " ${scenario}" - triggerString += "\\W+${scenario}" - } - - if (configuration == 'Debug') { - contextString += " Build" - triggerString += "\\W+Build" - } else { - contextString += " Build and Test" - triggerString += "\\W+Build and Test" - } - - triggerString += ".*" - - switch (os) { - case 'Ubuntu': - case 'Ubuntu16.04': - switch (scenario) { - case 'innerloop': - if (configuration == 'Debug' && !isFlowJob) { - Utilities.addGithubPRTriggerForBranch(job, branch, "${os} ${architecture} Cross ${configuration} Innerloop Build", triggerString) - } - - break - case 'normal': - Utilities.addGithubPRTriggerForBranch(job, branch, "${os} ${architecture} ${configuration} Build and Test", triggerString) - break - default: - if (isR2RScenario(scenario)) { - if (configuration == 'Checked' || configuration == 'Release') { - def displayStr = getR2RDisplayName(scenario) - Utilities.addGithubPRTriggerForBranch(job, branch, "${os} ${architecture} ${configuration} ${displayStr} Build and Test", triggerString) - } - } - break - } - break - - case 'Windows_NT': - // Triggers on the non-flow jobs aren't necessary here - if (!isFlowJob) { - break - } - - assert isArmWindowsScenario(scenario) - switch (scenario) { - case 'innerloop': - if (configuration == 'Checked') { - Utilities.addPrivateGithubPRTriggerForBranch(job, branch, contextString, triggerString, null, arm64Users) - } - - break - case 'normal': - Utilities.addPrivateGithubPRTriggerForBranch(job, branch, contextString, triggerString, null, arm64Users) - break - default: - // Stress jobs will use this code path. - if (isArmWindowsScenario(scenario)) { - Utilities.addPrivateGithubPRTriggerForBranch(job, branch, contextString, triggerString, null, arm64Users) - } - break - } - break - default: - println("NYI os: ${os}"); - assert false - break - } - break - - // editor brace matching: } - case 'x86': // editor brace matching: { - assert ((os == 'Windows_NT') || ((os == 'Ubuntu') && isNormalOrInnerloop)) - if (os == 'Ubuntu') { - // Triggers on the non-flow jobs aren't necessary here - if (!isFlowJob) { - break - } - - // on-demand only for ubuntu x86 - Utilities.addGithubPRTriggerForBranch(job, branch, "${os} ${architecture} ${configuration} Build", - "(?i).*test\\W+${os}\\W+${architecture}\\W+${configuration}.*") - break - - } - switch (scenario) { - case 'innerloop': - if (configuration == 'Checked' || configuration == 'Release') { - Utilities.addGithubPRTriggerForBranch(job, branch, "${os} ${architecture} ${configuration} Innerloop Build and Test") - } - break - - case 'normal': - if (configuration == 'Checked') { - Utilities.addGithubPRTriggerForBranch(job, branch, "${os} ${architecture} ${configuration} Build and Test", - "(?i).*test\\W+${os}\\W+${architecture}\\W+${configuration}\\W+Build and Test.*") - } - break - - case 'ilrt': - if (configuration == 'Release') { - Utilities.addGithubPRTriggerForBranch(job, branch, "${os} ${architecture} ${configuration} IL RoundTrip Build and Test", - "(?i).*test\\W+${os}\\W+${architecture}\\W+${configuration}\\W+${scenario}.*") - } - break - - case 'longgc': - if (configuration == 'Release') { - Utilities.addGithubPRTriggerForBranch(job, branch, "${os} ${architecture} ${configuration} Long-Running GC Build & Test", - "(?i).*test\\W+${os}\\W+${architecture}\\W+${configuration}\\W+${scenario}.*") - } - break - - case 'gcsimulator': - if (configuration == 'Release') { - Utilities.addGithubPRTriggerForBranch(job, branch, "${os} ${architecture} ${configuration} GC Simulator", - "(?i).*test\\W+${os}\\W+${architecture}\\W+${configuration}\\W+${scenario}.*") - } - break - - case 'standalone_gc': - if (configuration == 'Release' || configuration == 'Checked') { - Utilities.addGithubPRTriggerForBranch(job, branch, "${os} ${architecture} ${configuration} Standalone GC", - "(?i).*test\\W+${os}\\W+${architecture}\\W+${configuration}\\W+${scenario}.*") - } - break - - case 'illink': - Utilities.addGithubPRTriggerForBranch(job, branch, "${os} ${architecture} ${configuration} via ILLink", "(?i).*test\\W+${os}\\W+${architecture}\\W+${configuration}\\W+${scenario}.*") - break - - default: - if (isJitStressScenario(scenario)) { - def displayStr = getStressModeDisplayName(scenario) - Utilities.addGithubPRTriggerForBranch(job, branch, "${os} ${architecture} ${configuration} Build and Test (Jit - ${displayStr})", - "(?i).*test\\W+${os}\\W+${architecture}\\W+${configuration}\\W+${scenario}.*") - } - else if (isR2RScenario(scenario)) { - if (configuration == 'Release' || configuration == 'Checked') { - def displayStr = getR2RDisplayName(scenario) - Utilities.addGithubPRTriggerForBranch(job, branch, "${os} ${architecture} ${configuration} ${displayStr} Build & Test", - "(?i).*test\\W+${os}\\W+${architecture}\\W+${configuration}\\W+${scenario}.*") - } - } - else { - println("Unknown scenario: ${os} ${architecture} ${scenario}"); - assert false - } - break - - } - break - - // editor brace matching: } - case 'x64_arm64_altjit': - case 'x86_arm_altjit': // editor brace matching: { - assert (os == 'Windows_NT') - switch (scenario) { - case 'normal': - Utilities.addGithubPRTriggerForBranch(job, branch, "${os} ${architecture} ${configuration} Build and Test", - "(?i).*test\\W+${os}\\W+${architecture}\\W+${configuration}\\W+Build and Test.*") - break - default: - Utilities.addGithubPRTriggerForBranch(job, branch, "${os} ${architecture} ${configuration} ${scenario}", - "(?i).*test\\W+${os}\\W+${architecture}\\W+${configuration}\\W+${scenario}.*") - break - } - break - - // editor brace matching: } - default: - println("Unknown architecture: ${architecture}"); - assert false - break - } -} - -def static calculateBuildCommands(def newJob, def scenario, def branch, def isPR, def architecture, def configuration, def os, def isBuildOnly) { - def buildCommands = [] - def osGroup = getOSGroup(os) - def lowerConfiguration = configuration.toLowerCase() - - def priority = '1' - if (scenario == 'innerloop') { - priority = '0' - } - - def doCoreFxTesting = isCoreFxScenario(scenario) - - // Calculate the build steps, archival, and xunit results - switch (os) { - case 'Windows_NT': // editor brace matching: { - switch (architecture) { - case 'x64': - case 'x86': - case 'x86_arm_altjit': - case 'x64_arm64_altjit': - def arch = architecture - def buildOpts = '' - if (architecture == 'x86_arm_altjit') { - arch = 'x86' - } - else if (architecture == 'x64_arm64_altjit') { - arch = 'x64' - } - - if (scenario == 'formatting') { - buildCommands += "python -u tests\\scripts\\format.py -c %WORKSPACE% -o Windows_NT -a ${arch}" - Utilities.addArchival(newJob, "format.patch", "", true, false) - break - } - - if (scenario == 'illink') { - buildCommands += "tests\\scripts\\build_illink.cmd clone ${arch}" - } - - // If it is a release build for Windows, ensure PGO is used, else fail the build. - if ((lowerConfiguration == 'release') && - (scenario in Constants.basicScenarios) && - (architecture != 'x86_arm_altjit') && - (architecture != 'x64_arm64_altjit')) { - - buildOpts += ' -enforcepgo' - } - - if (doCoreFxTesting) { - buildOpts += ' skiptests'; - } else { - buildOpts += " -priority=${priority}" - } - - // Set __TestIntermediateDir to something short. If __TestIntermediateDir is already set, build-test.cmd will - // output test binaries to that directory. If it is not set, the binaries are sent to a default directory whose name is about - // 35 characters long. - - buildCommands += "set __TestIntermediateDir=int&&build.cmd ${lowerConfiguration} ${arch} ${buildOpts}" - - if (!isBuildOnly) { - def runtestArguments = '' - def testOpts = 'collectdumps' - - if (isR2RScenario(scenario)) { - - // If this is a ReadyToRun scenario, pass 'crossgen' or 'crossgenaltjit' - // to cause framework assemblies to be crossgen'ed. Pass 'runcrossgentests' - // to cause the tests to be crossgen'ed. - - if ((architecture == 'x86_arm_altjit') || (architecture == 'x64_arm64_altjit')) { - testOpts += ' crossgenaltjit protononjit.dll' - } else { - testOpts += ' crossgen' - } - - testOpts += ' runcrossgentests' - } - else if (scenario == 'jitdiff') { - testOpts += ' jitdisasm crossgen' - } - else if (scenario == 'ilrt') { - testOpts += ' ilasmroundtrip' - } - else if (isLongGc(scenario)) { - testOpts += " ${scenario} sequential" - } - else if (scenario == 'standalone_gc') { - testOpts += ' gcname clrgc.dll' - } - else if (scenario == 'illink') { - testOpts += " link %WORKSPACE%\\linker\\linker\\bin\\netcore_Release\\netcoreapp2.0\\win10-${arch}\\publish\\illink.exe" - } - - // Default per-test timeout is 10 minutes. For stress modes and Debug scenarios, increase this - // to 30 minutes (30 * 60 * 1000 = 180000). The "timeout" argument to runtest.cmd sets this, by - // taking a timeout value in milliseconds. (Note that it sets the __TestTimeout environment variable, - // which is read by the xunit harness.) - if (isJitStressScenario(scenario) || isR2RStressScenario(scenario) || (lowerConfiguration == 'debug')) - { - def timeout = 1800000 - testOpts += " timeout ${timeout}" - } - - // If we are running a stress mode, we should write out the set of key - // value env pairs to a file at this point and then we'll pass that to runtest.cmd - - def envScriptPath = '' - if (isJitStressScenario(scenario) || isR2RStressScenario(scenario)) { - def buildCommandsStr = '' - envScriptPath = "%WORKSPACE%\\SetStressModes.bat" - buildCommandsStr += envScriptCreate(os, envScriptPath) - - if (isJitStressScenario(scenario)) { - buildCommandsStr += envScriptSetStressModeVariables(os, Constants.jitStressModeScenarios[scenario], envScriptPath) - } - else if (isR2RStressScenario(scenario)) { - buildCommandsStr += envScriptSetStressModeVariables(os, Constants.r2rStressScenarios[scenario], envScriptPath) - } - - if (architecture == 'x86_arm_altjit') { - buildCommandsStr += envScriptAppendExistingScript(os, "%WORKSPACE%\\tests\\x86_arm_altjit.cmd", envScriptPath) - } - else if (architecture == 'x64_arm64_altjit') { - buildCommandsStr += envScriptAppendExistingScript(os, "%WORKSPACE%\\tests\\x64_arm64_altjit.cmd", envScriptPath) - } - - envScriptFinalize(os, envScriptPath) - - // Note that buildCommands is an array of individually executed commands; we want all the commands used to - // create the SetStressModes.bat script to be executed together, hence we accumulate them as strings - // into a single script. - buildCommands += buildCommandsStr - } - else if (architecture == 'x86_arm_altjit') { - envScriptPath = "%WORKSPACE%\\tests\\x86_arm_altjit.cmd" - } - else if (architecture == 'x64_arm64_altjit') { - envScriptPath = "%WORKSPACE%\\tests\\x64_arm64_altjit.cmd" - } - if (envScriptPath != '') { - testOpts += " TestEnv ${envScriptPath}" - } - - runtestArguments = "${lowerConfiguration} ${arch} ${testOpts}" - - if (doCoreFxTesting) { - def workspaceRelativeFxRoot = "_/fx" - def absoluteFxRoot = "%WORKSPACE%\\_\\fx" - - buildCommands += "python -u %WORKSPACE%\\tests\\scripts\\run-corefx-tests.py -arch ${arch} -ci_arch ${architecture} -build_type ${configuration} -fx_root ${absoluteFxRoot} -fx_branch ${branch} -env_script ${envScriptPath}" - - // Archive and process (only) the test results - Utilities.addArchival(newJob, "${workspaceRelativeFxRoot}/bin/**/testResults.xml") - Utilities.addXUnitDotNETResults(newJob, "${workspaceRelativeFxRoot}/bin/**/testResults.xml") - - //Archive additional build stuff to diagnose why my attempt at fault injection isn't causing CI to fail - Utilities.addArchival(newJob, "SetStressModes.bat", "", true, false) - Utilities.addArchival(newJob, "${workspaceRelativeFxRoot}/bin/testhost/**", "", true, false) - } - else if (isGcReliabilityFramework(scenario)) { - buildCommands += "tests\\runtest.cmd ${runtestArguments} GenerateLayoutOnly" - buildCommands += "tests\\scripts\\run-gc-reliability-framework.cmd ${arch} ${configuration}" - } - else { - buildCommands += "tests\\runtest.cmd ${runtestArguments}" - } - } // end if (!isBuildOnly) - - if (!doCoreFxTesting) { - // Run the rest of the build - // Build the mscorlib for the other OS's - buildCommands += "build.cmd ${lowerConfiguration} ${arch} linuxmscorlib" - buildCommands += "build.cmd ${lowerConfiguration} ${arch} osxmscorlib" - - if (arch == 'x64') { - buildCommands += "build.cmd ${lowerConfiguration} arm64 linuxmscorlib" - } - - // Zip up the tests directory so that we don't use so much space/time copying - // 10s of thousands of files around. - buildCommands += "powershell -NoProfile -Command \"Add-Type -Assembly 'System.IO.Compression.FileSystem'; [System.IO.Compression.ZipFile]::CreateFromDirectory('.\\bin\\tests\\${osGroup}.${arch}.${configuration}', '.\\bin\\tests\\tests.zip')\""; - - if (!isJitStressScenario(scenario)) { - // For Windows, pull full test results and test drops for x86/x64. - // No need to pull for stress mode scenarios (downstream builds use the default scenario) - Utilities.addArchival(newJob, "bin/Product/**,bin/tests/tests.zip", "bin/Product/**/.nuget/**") - } - - if (scenario == 'jitdiff') { - // retrieve jit-dasm output for base commit, and run jit-diff - if (!isBuildOnly) { - // if this is a build only job, we want to keep the default (build) artifacts for the flow job - Utilities.addArchival(newJob, "bin/tests/${osGroup}.${arch}.${configuration}/dasm/**") - } - } - - if (!isBuildOnly) { - Utilities.addXUnitDotNETResults(newJob, 'bin/**/TestRun*.xml', true) - } - } - break - case 'armlb': - case 'arm': - assert isArmWindowsScenario(scenario) - - def buildArchitecture = 'arm' - - def buildOpts = '' - - // For 'armlb' (the JIT LEGACY_BACKEND architecture for arm), tell build.cmd to use legacy backend for crossgen compilation. - // Legacy backend is not the default JIT; it is an aljit. So, this is a special case. - if (architecture == 'armlb') { - buildOpts += ' -crossgenaltjit legacyjit.dll' - } - - if (doCoreFxTesting) { - // We shouldn't need to build the tests. However, run-corefx-tests.py currently depends on having the restored corefx - // package available, to determine the correct corefx version git commit hash, and we need to build the tests before - // running "tests\\runtest.cmd GenerateLayoutOnly". So build the pri-0 tests to make this happen. - // - // buildOpts += ' skiptests'; - buildOpts += " -priority=0" - } else { - buildOpts += " -priority=${priority}" - } - - // This is now a build only job. Do not run tests. Use the flow job. - buildCommands += "set __TestIntermediateDir=int&&build.cmd ${lowerConfiguration} ${buildArchitecture} ${buildOpts}" - - if (doCoreFxTesting) { - assert isBuildOnly - assert architecture == 'arm' - - // Generate the test layout because it restores the corefx package which allows run-corefx-tests.py - // to determine the correct matching corefx version git commit hash. - buildCommands += "tests\\runtest.cmd ${lowerConfiguration} ${architecture} GenerateLayoutOnly" - - // Set the stress mode variables; this is incorporated into the generated CoreFx RunTests.cmd files. - def envScriptPath = '' - def buildCommandsStr = '' - envScriptPath = "%WORKSPACE%\\SetStressModes.bat" - buildCommandsStr += envScriptCreate(os, envScriptPath) - buildCommandsStr += envScriptSetStressModeVariables(os, Constants.jitStressModeScenarios[scenario], envScriptPath) - envScriptFinalize(os, envScriptPath) - buildCommands += buildCommandsStr - - def workspaceRelativeFxRootLinux = "_/fx" - def workspaceRelativeFxRootWin = "_\\fx" - def absoluteFxRoot = "%WORKSPACE%\\_\\fx" - - buildCommands += "python -u %WORKSPACE%\\tests\\scripts\\run-corefx-tests.py -arch ${architecture} -ci_arch ${architecture} -build_type ${configuration} -fx_root ${absoluteFxRoot} -fx_branch ${branch} -env_script ${envScriptPath} -no_run_tests" - - // Zip up the CoreFx runtime and tests. We don't need the CoreCLR binaries; they have been copied to the CoreFX tree. - buildCommands += "powershell -NoProfile -Command \"Add-Type -Assembly 'System.IO.Compression.FileSystem'; [System.IO.Compression.ZipFile]::CreateFromDirectory('${workspaceRelativeFxRootWin}\\bin\\testhost\\netcoreapp-Windows_NT-Release-arm', '${workspaceRelativeFxRootWin}\\fxruntime.zip')\""; - buildCommands += "powershell -NoProfile -Command \"Add-Type -Assembly 'System.IO.Compression.FileSystem'; [System.IO.Compression.ZipFile]::CreateFromDirectory('${workspaceRelativeFxRootWin}\\bin\\tests', '${workspaceRelativeFxRootWin}\\fxtests.zip')\""; - - Utilities.addArchival(newJob, "${workspaceRelativeFxRootLinux}/fxruntime.zip") - Utilities.addArchival(newJob, "${workspaceRelativeFxRootLinux}/fxtests.zip") - } else { - // Zip up the tests directory so that we don't use so much space/time copying - // 10s of thousands of files around. - buildCommands += "powershell -NoProfile -Command \"Add-Type -Assembly 'System.IO.Compression.FileSystem'; [System.IO.Compression.ZipFile]::CreateFromDirectory('.\\bin\\tests\\${osGroup}.${buildArchitecture}.${configuration}', '.\\bin\\tests\\tests.zip')\""; - - // Add archival. - Utilities.addArchival(newJob, "bin/Product/**,bin/tests/tests.zip", "bin/Product/**/.nuget/**") - } - break - case 'arm64': - assert isArmWindowsScenario(scenario) - - // This is now a build only job. Do not run tests. Use the flow job. - buildCommands += "set __TestIntermediateDir=int&&build.cmd ${lowerConfiguration} ${architecture} toolset_dir C:\\ats2 -priority=${priority}" - - // Zip up the tests directory so that we don't use so much space/time copying - // 10s of thousands of files around. - buildCommands += "powershell -NoProfile -Command \"Add-Type -Assembly 'System.IO.Compression.FileSystem'; [System.IO.Compression.ZipFile]::CreateFromDirectory('.\\bin\\tests\\${osGroup}.${architecture}.${configuration}', '.\\bin\\tests\\tests.zip')\""; - - // Add archival. - Utilities.addArchival(newJob, "bin/Product/**,bin/tests/tests.zip", "bin/Product/**/.nuget/**") - break - default: - println("Unknown architecture: ${architecture}"); - assert false - break - } - break - // end case 'Windows_NT'; editor brace matching: } - case 'Ubuntu': - case 'Ubuntu16.04': - case 'Ubuntu16.10': - case 'Debian8.4': - case 'OSX10.12': - case 'CentOS7.1': - case 'RHEL7.2': - case 'Tizen': - case 'Fedora24': // editor brace matching: { - switch (architecture) { - case 'x64': - case 'x86': - if (architecture == 'x86' && os == 'Ubuntu') { - // build and PAL test - def dockerImage = getDockerImageName(architecture, os, true) - buildCommands += "docker run -i --rm -v \${WORKSPACE}:/opt/code -w /opt/code -e ROOTFS_DIR=/crossrootfs/x86 ${dockerImage} ./build.sh ${architecture} cross ${lowerConfiguration}" - dockerImage = getDockerImageName(architecture, os, false) - buildCommands += "docker run -i --rm -v \${WORKSPACE}:/opt/code -w /opt/code ${dockerImage} ./src/pal/tests/palsuite/runpaltests.sh /opt/code/bin/obj/${osGroup}.${architecture}.${configuration} /opt/code/bin/paltestout" - Utilities.addArchival(newJob, "bin/Product/**,bin/obj/*/tests/**/*.so", "bin/Product/**/.nuget/**") - Utilities.addXUnitDotNETResults(newJob, '**/pal_tests.xml') - break - } - - if (scenario == 'formatting') { - buildCommands += "python tests/scripts/format.py -c \${WORKSPACE} -o Linux -a ${architecture}" - Utilities.addArchival(newJob, "format.patch", "", true, false) - break - } - - if (scenario == 'illink') { - assert(os == 'Ubuntu') - buildCommands += "./tests/scripts/build_illink.sh --clone --arch=${architecture}" - } - - if (!doCoreFxTesting) { - // We run pal tests on all OS but generate mscorlib (and thus, nuget packages) - // only on supported OS platforms. - def bootstrapRid = Utilities.getBoostrapPublishRid(os) - def bootstrapRidEnv = bootstrapRid != null ? "__PUBLISH_RID=${bootstrapRid} " : '' - - buildCommands += "${bootstrapRidEnv}./build.sh verbose ${lowerConfiguration} ${architecture}" - buildCommands += "src/pal/tests/palsuite/runpaltests.sh \${WORKSPACE}/bin/obj/${osGroup}.${architecture}.${configuration} \${WORKSPACE}/bin/paltestout" - - // Basic archiving of the build - Utilities.addArchival(newJob, "bin/Product/**,bin/obj/*/tests/**/*.dylib,bin/obj/*/tests/**/*.so", "bin/Product/**/.nuget/**") - // And pal tests - Utilities.addXUnitDotNETResults(newJob, '**/pal_tests.xml') - } - else { - // Corefx stress testing - assert os == 'Ubuntu' - assert architecture == 'x64' - assert lowerConfiguration == 'checked' - assert isJitStressScenario(scenario) - - // Build coreclr - buildCommands += "./build.sh verbose ${lowerConfiguration} ${architecture}" - - def scriptFileName = "\$WORKSPACE/set_stress_test_env.sh" - - def envScriptCmds = envScriptCreate(os, scriptFileName) - envScriptCmds += envScriptSetStressModeVariables(os, Constants.jitStressModeScenarios[scenario], scriptFileName) - envScriptCmds += envScriptFinalize(os, scriptFileName) - buildCommands += envScriptCmds - - // Build and text corefx - def workspaceRelativeFxRoot = "_/fx" - def absoluteFxRoot = "\$WORKSPACE/${workspaceRelativeFxRoot}" - - buildCommands += "python -u \$WORKSPACE/tests/scripts/run-corefx-tests.py -arch ${architecture} -ci_arch ${architecture} -build_type ${configuration} -fx_root ${absoluteFxRoot} -fx_branch ${branch} -env_script ${scriptFileName}" - - // Archive and process (only) the test results - Utilities.addArchival(newJob, "${workspaceRelativeFxRoot}/bin/**/testResults.xml") - Utilities.addXUnitDotNETResults(newJob, "${workspaceRelativeFxRoot}/bin/**/testResults.xml") - } - break - case 'arm64': - if (!doCoreFxTesting) { - buildCommands += "ROOTFS_DIR=/opt/arm64-xenial-rootfs ./build.sh verbose ${lowerConfiguration} ${architecture} cross clang3.8" - - // HACK -- Arm64 does not have corefx jobs yet. - buildCommands += "git clone https://github.com/dotnet/corefx fx" - buildCommands += "ROOTFS_DIR=/opt/arm64-xenial-rootfs-corefx ./fx/build-native.sh -release -buildArch=arm64 -- verbose cross clang3.8" - buildCommands += "mkdir ./bin/Product/Linux.arm64.${configuration}/corefxNative" - buildCommands += "cp fx/bin/Linux.arm64.Release/native/* ./bin/Product/Linux.arm64.${configuration}/corefxNative" - - // Basic archiving of the build - Utilities.addArchival(newJob, "bin/Product/**,bin/obj/*/tests/**/*.dylib,bin/obj/*/tests/**/*.so", "bin/Product/**/.nuget/**") - } - break - case 'armem': - // Emulator cross builds for ARM runs on Ubuntu, Ubuntu16.04 and Tizen currently - assert (os == 'Ubuntu') || (os == 'Ubuntu16.04') || (os == 'Tizen') - - // default values for Ubuntu - def arm_abi = "arm" - def linuxCodeName = "trusty" - if (os == 'Ubuntu16.04') { - linuxCodeName = "xenial" - } - else if (os == 'Tizen') { - arm_abi = "armel" - linuxCodeName = "tizen" - } - - // Unzip the Windows test binaries first. Exit with 0 - buildCommands += "unzip -q -o ./bin/tests/tests.zip -d ./bin/tests/Windows_NT.x64.${configuration} || exit 0" - - // Unpack the corefx binaries - buildCommands += "mkdir ./bin/CoreFxBinDir" - buildCommands += "tar -xf ./bin/build.tar.gz -C ./bin/CoreFxBinDir" - if (os != 'Tizen') { - buildCommands += "chmod a+x ./bin/CoreFxBinDir/corerun" - } - // Test environment emulation using docker and qemu has some problem to use lttng library. - // We should remove libcoreclrtraceptprovider.so to avoid test hang. - if (os == 'Ubuntu') { - buildCommands += "rm -f -v ./bin/CoreFxBinDir/libcoreclrtraceptprovider.so" - } - - // Call the ARM CI script to cross build and test using docker - buildCommands += """./tests/scripts/arm32_ci_script.sh \\ - --mode=docker \\ - --${arm_abi} \\ - --linuxCodeName=${linuxCodeName} \\ - --buildConfig=${lowerConfiguration} \\ - --testRootDir=./bin/tests/Windows_NT.x64.${configuration} \\ - --coreFxBinDir=./bin/CoreFxBinDir \\ - --testDirFile=./tests/testsRunningInsideARM.txt""" - - // Basic archiving of the build, no pal tests - Utilities.addArchival(newJob, "bin/Product/**,bin/obj/*/tests/**/*.dylib,bin/obj/*/tests/**/*.so", "bin/Product/**/.nuget/**") - break - case 'arm': - // Non-Windows ARM cross builds on hardware run on Ubuntu only - assert (os == 'Ubuntu') - - // Add some useful information to the log file. Ignore return codes. - buildCommands += "uname -a || true" - - // Cross build the Ubuntu/arm product using docker with a docker image that contains the correct - // Ubuntu cross-compilation toolset (running on a Ubuntu x64 host). - - def dockerImage = getDockerImageName(architecture, os, true) - def dockerCmd = "docker run -i --rm -v \${WORKSPACE}:\${WORKSPACE} -w \${WORKSPACE} -e ROOTFS_DIR=/crossrootfs/arm -e CAC_ROOTFS_DIR=/crossrootfs/x86 ${dockerImage} " - - buildCommands += "${dockerCmd}\${WORKSPACE}/build.sh ${lowerConfiguration} ${architecture} cross crosscomponent" - - // Then, using the same docker image, generate the CORE_ROOT layout using build-test.sh to - // download the appropriate CoreFX packages. - // Note that docker should not be necessary here, for the "generatelayoutonly" case, but we use it - // just to be consistent with the "build.sh" case -- so both are run with the same environment. - - buildCommands += "${dockerCmd}\${WORKSPACE}/build-test.sh ${lowerConfiguration} ${architecture} cross generatelayoutonly" - - // ZIP up for the test job (created in the flow job code): - // (1) the built CORE_ROOT, /home/user/coreclr/bin/tests/Linux.arm.Checked/Tests/Core_Root, - // used by runtest.sh as the "--coreOverlayDir" argument. - // (2) the native parts of the test build: /home/user/coreclr/bin/obj/Linux.arm.Checked/tests, - // used by runtest.sh as the "--testNativeBinDir" argument. - - // These commands are assumed to be run from the root of the workspace. - buildCommands += "zip -r coreroot.${lowerConfiguration}.zip ./bin/tests/Linux.arm.${configuration}/Tests/Core_Root" - buildCommands += "zip -r testnativebin.${lowerConfiguration}.zip ./bin/obj/Linux.arm.${configuration}/tests" - - Utilities.addArchival(newJob, "coreroot.${lowerConfiguration}.zip,testnativebin.${lowerConfiguration}.zip", "") - - // We need to clean up the build machines; the docker build leaves newly built files with root permission, which - // the cleanup task in Jenkins can't remove. - newJob.with { - publishers { - azureVMAgentPostBuildAction { - agentPostBuildAction('Delete agent after build execution (when idle).') - } - } - } - - break - default: - println("Unknown architecture: ${architecture}"); - assert false - break - } - break - // editor brace matching: } - default: - println("Unknown os: ${os}"); - assert false - break - } // os - - return buildCommands -} - -// Determine if we should generate a job for the given parameters. This is for non-flow jobs: either build and test, or build only. -// Returns true if the job should be generated. -def static shouldGenerateJob(def scenario, def isPR, def architecture, def configuration, def os, def isBuildOnly) -{ - // The "innerloop" (Pri-0 testing) scenario is only available as PR triggered. - // All other scenarios do Pri-1 testing. - if (scenario == 'innerloop' && !isPR) { - return false - } - - // Tizen is only supported for armem architecture - if (os == 'Tizen' && architecture != 'armem') { - return false - } - - // Filter based on architecture. - - switch (architecture) { - case 'arm64': - case 'arm': - if ((os != 'Windows_NT') && (os != 'Ubuntu')) { - return false - } - break - case 'armem': - if ((os != 'Ubuntu') && (os != 'Ubuntu16.04') && (os != 'Tizen')) { - return false - } - break - case 'armlb': - // Do not create armlb jobs - return false - case 'x86_arm_altjit': - case 'x64_arm64_altjit': - if (os != 'Windows_NT') { - return false - } - break - case 'x86': - if ((os != 'Windows_NT') && (os != 'Ubuntu')) { - return false - } - break - case 'x64': - // Everything implemented - break - default: - println("Unknown architecture: ${architecture}") - assert false - break - } - - // Which (Windows) build only jobs are required? - - def isNormalOrInnerloop = (scenario == 'innerloop' || scenario == 'normal') - - if (isBuildOnly) { - switch (architecture) { - case 'arm': - // We use build only jobs for Windows arm cross-compilation corefx testing, so we need to generate builds for that. - if (!isCoreFxScenario(scenario)) { - return false - } - break - case 'x64': - case 'x86': - if (!isNormalOrInnerloop) { - return false - } - break - default: - return false - } - } - - // Filter based on scenario. - - if (isJitStressScenario(scenario)) { - if (configuration != 'Checked') { - return false - } - - def isEnabledOS = (os == 'Windows_NT') || (os == 'Ubuntu' && architecture == 'arm') || (os == 'Ubuntu' && isCoreFxScenario(scenario)) - if (!isEnabledOS) { - return false - } - - switch (architecture) { - case 'x64': - case 'x86_arm_altjit': - case 'x64_arm64_altjit': - break - - case 'x86': - // x86 ubuntu: no stress modes - if (os == 'Ubuntu') { - return false - } - break - - case 'arm': - // We use build only jobs for Windows arm cross-compilation corefx testing, so we need to generate builds for that. - if (! (isBuildOnly && isCoreFxScenario(scenario)) ) { - return false - } - break - - default: - // arm64, armlb: stress is handled through flow jobs. - // armem: no stress jobs for ARM emulator. - return false - } - } - else if (isR2RScenario(scenario)) { - if (os != 'Windows_NT') { - return false - } - // Stress scenarios only run with Checked builds, not Release (they would work with Debug, but be slow). - if ((configuration != 'Checked') && isR2RStressScenario(scenario)) { - return false - } - } - else { - // Skip scenarios - switch (scenario) { - case 'ilrt': - // The ilrt build isn't necessary except for Windows_NT2003. Non-Windows NT uses - // the default scenario build - if (os != 'Windows_NT') { - return false - } - // Only x64 for now - if (architecture != 'x64') { - return false - } - // Release only - if (configuration != 'Release') { - return false - } - break - case 'jitdiff': - if (os != 'Windows_NT' && os != 'Ubuntu' && os != 'OSX10.12') { - return false - } - if (architecture != 'x64') { - return false - } - if (configuration != 'Checked') { - return false - } - break - case 'longgc': - case 'gcsimulator': - if (os != 'Windows_NT' && os != 'Ubuntu' && os != 'OSX10.12') { - return false - } - if (architecture != 'x64') { - return false - } - if (configuration != 'Release') { - return false - } - break - case 'gc_reliability_framework': - case 'standalone_gc': - if (os != 'Windows_NT' && os != 'Ubuntu' && os != 'OSX10.12') { - return false - } - - if (architecture != 'x64') { - return false - } - - if (configuration != 'Release' && configuration != 'Checked') { - return false - } - break - // We only run Windows and Ubuntu x64 Checked for formatting right now - case 'formatting': - if (os != 'Windows_NT' && os != 'Ubuntu') { - return false - } - if (architecture != 'x64') { - return false - } - if (configuration != 'Checked') { - return false - } - break - case 'illink': - if (os != 'Windows_NT' && (os != 'Ubuntu' || architecture != 'x64')) { - return false - } - if (architecture != 'x64' && architecture != 'x86') { - return false - } - break - case 'normal': - // Nothing skipped - break - case 'innerloop': - if (!isValidPrTriggeredInnerLoopJob(os, architecture, configuration, isBuildOnly)) { - return false - } - break - default: - println("Unknown scenario: ${scenario}") - assert false - break - } - } - - // For altjit, don't do any scenarios that don't change compilation. That is, scenarios that only change - // runtime behavior, not compile-time behavior, are not interesting. - switch (architecture) { - case 'x86_arm_altjit': - case 'x64_arm64_altjit': - if (isGCStressRelatedTesting(scenario)) { - return false - } - break - default: - break - } - - // The job was not filtered out, so we should generate it! - return true -} - -Constants.allScenarios.each { scenario -> - [true, false].each { isPR -> - Constants.architectureList.each { architecture -> - Constants.configurationList.each { configuration -> - Constants.osList.each { os -> - // If the OS is Windows_NT_BuildOnly, set the isBuildOnly flag to true - // and reset the os to Windows_NT - def isBuildOnly = false - if (os == 'Windows_NT_BuildOnly') { - isBuildOnly = true - os = 'Windows_NT' - } - - if (!shouldGenerateJob(scenario, isPR, architecture, configuration, os, isBuildOnly)) { - return - } - - // Calculate names - def jobName = getJobName(configuration, architecture, os, scenario, isBuildOnly) - def folderName = getJobFolder(scenario) - - // Create the new job - def newJob = job(Utilities.getFullJobName(project, jobName, isPR, folderName)) {} - addToViews(newJob, isPR, architecture, os) - - setJobMachineAffinity(architecture, os, true, false, false, newJob) // isBuildJob = true, isTestJob = false, isFlowJob = false - - Utilities.standardJobSetup(newJob, project, isPR, "*/${branch}") - addTriggers(newJob, branch, isPR, architecture, os, configuration, scenario, false, isBuildOnly) // isFlowJob==false - setJobTimeout(newJob, isPR, architecture, configuration, scenario, isBuildOnly) - - // Copy Windows build test binaries and corefx build artifacts for Linux cross build for armem. - // We don't use a flow job for this, but we do depend on there being existing builds with these - // artifacts produced. - if (architecture == 'armem' && (os == 'Ubuntu' || os == 'Ubuntu16.04' || os == 'Tizen')) { - // Define the Windows Tests and Corefx build job names - def lowerConfiguration = configuration.toLowerCase() - def WindowsTestsName = projectFolder + '/' + - Utilities.getFullJobName(project, - getJobName(lowerConfiguration, 'x64' , 'windows_nt', 'normal', true), - false) - def corefxFolder = Utilities.getFolderName('dotnet/corefx') + '/' + - Utilities.getFolderName(branch) - - def arm_abi = 'arm' - def corefx_os = 'linux' - if (os == 'Tizen') { - arm_abi = 'armel' - corefx_os = 'tizen' - } - - // Let's use release CoreFX to test checked CoreCLR, - // because we do not generate checked CoreFX in CoreFX CI yet. - def corefx_lowerConfiguration = lowerConfiguration - if (lowerConfiguration == 'checked') { - corefx_lowerConfiguration = 'release' - } - - // Copy the Windows test binaries and the Corefx build binaries - newJob.with { - steps { - copyArtifacts(WindowsTestsName) { - includePatterns('bin/tests/tests.zip') - buildSelector { - latestSuccessful(true) - } - } - copyArtifacts("${corefxFolder}/${corefx_os}_${arm_abi}_cross_${corefx_lowerConfiguration}") { - includePatterns('bin/build.tar.gz') - buildSelector { - latestSuccessful(true) - } - } - } // steps - } // newJob.with - } - - def buildCommands = calculateBuildCommands(newJob, scenario, branch, isPR, architecture, configuration, os, isBuildOnly) - - newJob.with { - steps { - if (os == 'Windows_NT') { - buildCommands.each { buildCommand -> - batchFile(buildCommand) - } - } - else { - buildCommands.each { buildCommand -> - shell(buildCommand) - } - } - } // steps - } // newJob.with - - } // os - } // configuration - } // architecture - } // isPR -} // scenario - -// Create a Windows ARM/ARMLB/ARM64 test job that will be used by a flow job. -// Returns the newly created job. -def static CreateWindowsArmTestJob(def dslFactory, def project, def architecture, def os, def configuration, def scenario, def isPR, def inputCoreCLRBuildName) -{ - def osGroup = getOSGroup(os) - def jobName = getJobName(configuration, architecture, os, scenario, false) + "_tst" - - def jobFolder = getJobFolder(scenario) - def newJob = dslFactory.job(Utilities.getFullJobName(project, jobName, isPR, jobFolder)) { - parameters { - stringParam('CORECLR_BUILD', '', "Build number to copy CoreCLR ${osGroup} binaries from") - } - - steps { - // Set up the copies - - // Coreclr build we are trying to test - // - // ** NOTE ** This will, correctly, overwrite the CORE_ROOT from the Windows test archive - - copyArtifacts(inputCoreCLRBuildName) { - excludePatterns('**/testResults.xml', '**/*.ni.dll') - buildSelector { - buildNumber('${CORECLR_BUILD}') - } - } - - if (isCoreFxScenario(scenario)) { - - // Only arm supported for corefx testing now. - assert architecture == 'arm' - - // Unzip CoreFx runtime - batchFile("powershell -NoProfile -Command \"Add-Type -Assembly 'System.IO.Compression.FileSystem'; [System.IO.Compression.ZipFile]::ExtractToDirectory('_\\fx\\fxruntime.zip', '_\\fx\\bin\\testhost\\netcoreapp-Windows_NT-Release-arm')") - - // Unzip CoreFx tests. - batchFile("powershell -NoProfile -Command \"Add-Type -Assembly 'System.IO.Compression.FileSystem'; [System.IO.Compression.ZipFile]::ExtractToDirectory('_\\fx\\fxtests.zip', '_\\fx\\bin\\tests')") - - // Add the script to run the corefx tests - def corefx_runtime_path = "%WORKSPACE%\\_\\fx\\bin\\testhost\\netcoreapp-Windows_NT-Release-arm" - def corefx_tests_dir = "%WORKSPACE%\\_\\fx\\bin\\tests" - def corefx_exclusion_file = "%WORKSPACE%\\tests\\arm\\corefx_test_exclusions.txt" - batchFile("call %WORKSPACE%\\tests\\scripts\\run-corefx-tests.bat ${corefx_runtime_path} ${corefx_tests_dir} ${corefx_exclusion_file}") - - } else { // !isCoreFxScenario(scenario) - - // Unzip tests. - batchFile("powershell -NoProfile -Command \"Add-Type -Assembly 'System.IO.Compression.FileSystem'; [System.IO.Compression.ZipFile]::ExtractToDirectory('bin\\tests\\tests.zip', 'bin\\tests\\${osGroup}.${architecture}.${configuration}')") - - def buildCommands = "" - - def coreRootLocation = "%WORKSPACE%\\bin\\tests\\Windows_NT.${architecture}.${configuration}\\Tests\\Core_Root" - def addEnvVariable = { variable, value -> buildCommands += "set ${variable}=${value}\r\n"} - def addCommand = { cmd -> buildCommands += "${cmd}\r\n"} - - // Make sure Command Extensions are enabled. Used so %ERRORLEVEL% is available. - addCommand("SETLOCAL ENABLEEXTENSIONS") - - // For all jobs - addEnvVariable("CORE_ROOT", coreRootLocation) - addEnvVariable("COMPlus_NoGuiOnAssert", "1") - addEnvVariable("COMPlus_ContinueOnAssert", "0") - - // ARM legacy backend; this is an altjit. - if (architecture == 'armlb') { - addEnvVariable("COMPlus_AltJit", "*") - addEnvVariable("COMPlus_AltJitNgen", "*") - addEnvVariable("COMPlus_AltJitName", "legacyjit.dll") - addEnvVariable("COMPlus_AltJitAssertOnNYI", "1") - } - - // If we are running a stress mode, we'll set those variables as well - if (isJitStressScenario(scenario) || isR2RStressScenario(scenario)) { - def stressValues = null - if (isJitStressScenario(scenario)) { - stressValues = Constants.jitStressModeScenarios[scenario] - } - else { - stressValues = Constants.r2rStressScenarios[scenario] - } - - stressValues.each { key, value -> - addEnvVariable(key, value) - } - } - - if (isR2RScenario(scenario)) { - // Crossgen the framework assemblies. - buildCommands += """ -@for %%F in (%CORE_ROOT%\\*.dll) do @call :PrecompileAssembly "%CORE_ROOT%" "%%F" %%~nxF -@goto skip_PrecompileAssembly - -:PrecompileAssembly -@REM Skip mscorlib since it is already precompiled. -@if /I "%3" == "mscorlib.dll" exit /b 0 -@if /I "%3" == "mscorlib.ni.dll" exit /b 0 - -"%CORE_ROOT%\\crossgen.exe" /Platform_Assemblies_Paths "%CORE_ROOT%" %2 >nul 2>nul -@if "%errorlevel%" == "-2146230517" ( - echo %2 is not a managed assembly. -) else if "%errorlevel%" == "-2146234344" ( - echo %2 is not a managed assembly. -) else if %errorlevel% neq 0 ( - echo Unable to precompile %2 -) else ( - echo Precompiled %2 -) -@exit /b 0 - -:skip_PrecompileAssembly -""" - - // Set RunCrossGen variable to cause test wrappers to invoke their logic to run - // crossgen on tests before running them. - addEnvVariable("RunCrossGen", "true") - } // isR2RScenario(scenario) - - // Create the smarty command - def smartyCommand = "C:\\Tools\\Smarty.exe /noecid /noie /workers 9 /inc EXPECTED_PASS " - def addSmartyFlag = { flag -> smartyCommand += flag + " "} - def addExclude = { exclude -> addSmartyFlag("/exc " + exclude)} - def addArchSpecificExclude = { architectureToExclude, exclude -> if (architectureToExclude == "armlb") { addExclude("LEGACYJIT_" + exclude) } else { addExclude(exclude) } } - - if (architecture == 'armlb') { - addExclude("LEGACYJIT_FAIL") - } - - // Exclude tests based on scenario. - Constants.validArmWindowsScenarios[scenario].each { excludeTag -> - addArchSpecificExclude(architecture, excludeTag) - } - - // Innerloop jobs run Pri-0 tests; everyone else runs Pri-1. - if (scenario == 'innerloop') { - addExclude("pri1") - } - - // Exclude any test marked LONG_RUNNING; these often exceed the standard timeout and fail as a result. - // TODO: We should create a "long running" job that runs these with a longer timeout. - addExclude("LONG_RUNNING") - - smartyCommand += "/lstFile Tests.lst" - - def testListArch = [ - 'arm64': 'arm64', - 'arm': 'arm', - 'armlb': 'arm' - ] - - def archLocation = testListArch[architecture] - - addCommand("copy %WORKSPACE%\\tests\\${archLocation}\\Tests.lst bin\\tests\\${osGroup}.${architecture}.${configuration}") - addCommand("pushd bin\\tests\\${osGroup}.${architecture}.${configuration}") - addCommand("${smartyCommand}") - - // Save the errorlevel from the smarty command to be used as the errorlevel of this batch file. - // However, we also need to remove all the variables that were set during this batch file, so we - // can run the ZIP powershell command (below) in a clean environment. (We can't run the powershell - // command with the COMPlus_AltJit variables set, for example.) To do that, we do ENDLOCAL as well - // as save the current errorlevel on the same line. This works because CMD evaluates the %errorlevel% - // variable expansion (or any variable expansion on the line) BEFORE it executes the ENDLOCAL command. - // Note that the ENDLOCAL also undoes the pushd command, but we add the popd here for clarity. - addCommand("popd & ENDLOCAL & set __save_smarty_errorlevel=%errorlevel%") - - // ZIP up the smarty output, no matter what the smarty result. - addCommand("powershell -NoProfile -Command \"Add-Type -Assembly 'System.IO.Compression.FileSystem'; [System.IO.Compression.ZipFile]::CreateFromDirectory('.\\bin\\tests\\${osGroup}.${architecture}.${configuration}\\Smarty.run.0', '.\\bin\\tests\\${osGroup}.${architecture}.${configuration}\\Smarty.run.0.zip')\"") - - addCommand("echo %errorlevel%") - addCommand("dir .\\bin\\tests\\${osGroup}.${architecture}.${configuration}") - - // Use the smarty errorlevel as the script errorlevel. - addCommand("exit /b %__save_smarty_errorlevel%") - - batchFile(buildCommands) - } // non-corefx testing - } // steps - } // job - - if (!isCoreFxScenario(scenario)) { - Utilities.addArchival(newJob, "bin/tests/${osGroup}.${architecture}.${configuration}/Smarty.run.0/*.smrt", '', true, false) - - // Archive a ZIP file of the entire Smarty.run.0 directory. This is possibly a little too much, - // but there is no easy way to only archive the HTML/TXT files of the failing tests, so we get - // all the passing test info as well. Not necessarily a bad thing, but possibly somewhat large. - Utilities.addArchival(newJob, "bin/tests/${osGroup}.${architecture}.${configuration}/Smarty.run.0.zip", '', true, false) - } - - return newJob -} - -// Create a test job not covered by the "Windows ARM" case that will be used by a flow job. -// E.g., non-Windows tests. -// Returns the newly created job. -def static CreateOtherTestJob(def dslFactory, def project, def branch, def architecture, def os, def configuration, def scenario, def isPR, def inputCoreCLRBuildName, def inputTestsBuildName) -{ - def isUbuntuArmJob = ((os == "Ubuntu") && (architecture == 'arm')) // ARM Ubuntu running on hardware (not emulator) - - def osGroup = getOSGroup(os) - def jobName = getJobName(configuration, architecture, os, scenario, false) + "_tst" - - def testOpts = '' - def useServerGC = false - - // Enable Server GC for Ubuntu PR builds - // REVIEW: why? Does this apply to all architectures? Why only PR? - if (os == 'Ubuntu' && isPR) { - testOpts += ' --useServerGC' - useServerGC = true - } - - if (isR2RScenario(scenario)) { - - testOpts += ' --crossgen --runcrossgentests' - - if (scenario == 'r2r_jitstress1') { - testOpts += ' --jitstress=1' - } - else if (scenario == 'r2r_jitstress2') { - testOpts += ' --jitstress=2' - } - else if (scenario == 'r2r_jitstressregs1') { - testOpts += ' --jitstressregs=1' - } - else if (scenario == 'r2r_jitstressregs2') { - testOpts += ' --jitstressregs=2' - } - else if (scenario == 'r2r_jitstressregs3') { - testOpts += ' --jitstressregs=3' - } - else if (scenario == 'r2r_jitstressregs4') { - testOpts += ' --jitstressregs=4' - } - else if (scenario == 'r2r_jitstressregs8') { - testOpts += ' --jitstressregs=8' - } - else if (scenario == 'r2r_jitstressregs0x10') { - testOpts += ' --jitstressregs=0x10' - } - else if (scenario == 'r2r_jitstressregs0x80') { - testOpts += ' --jitstressregs=0x80' - } - else if (scenario == 'r2r_jitstressregs0x1000') { - testOpts += ' --jitstressregs=0x1000' - } - else if (scenario == 'r2r_jitminopts') { - testOpts += ' --jitminopts' - } - else if (scenario == 'r2r_jitforcerelocs') { - testOpts += ' --jitforcerelocs' - } - else if (scenario == 'r2r_gcstress15') { - testOpts += ' --gcstresslevel=0xF' - } - } - else if (scenario == 'jitdiff') { - testOpts += ' --jitdisasm --crossgen' - } - else if (scenario == 'illink') { - testOpts += ' --link=\$WORKSPACE/linker/linker/bin/netcore_Release/netcoreapp2.0/ubuntu-x64/publish/illink' - } - else if (isLongGc(scenario)) { - // Long GC tests behave very poorly when they are not - // the only test running (many of them allocate until OOM). - testOpts += ' --sequential' - - // A note - runtest.sh does have "--long-gc" and "--gcsimulator" options - // for running long GC and GCSimulator tests, respectively. We don't use them - // here because using a playlist file produces much more readable output on the CI machines - // and reduces running time. - // - // The Long GC playlist contains all of the tests that are - // going to be run. The GCSimulator playlist contains all of - // the GC simulator tests. - if (scenario == 'longgc') { - testOpts += ' --long-gc --playlist=./tests/longRunningGcTests.txt' - } - else if (scenario == 'gcsimulator') { - testOpts += ' --gcsimulator --playlist=./tests/gcSimulatorTests.txt' - } - } - else if (isGcReliabilityFramework(scenario)) { - testOpts += ' --build-overlay-only' - } - else if (scenario == 'standalone_gc') { - if (osGroup == 'OSX') { - testOpts += ' --gcname=libclrgc.dylib' - } - else if (osGroup == 'Linux') { - testOpts += ' --gcname=libclrgc.so' - } - else { - println("Unexpected OS group: ${osGroup} for os ${os}") - assert false - } - } - - def jobFolder = getJobFolder(scenario) - def newJob = dslFactory.job(Utilities.getFullJobName(project, jobName, isPR, jobFolder)) { - parameters { - stringParam('CORECLR_WINDOWS_BUILD', '', 'Build number to copy CoreCLR Windows test binaries from') - stringParam('CORECLR_BUILD', '', "Build number to copy CoreCLR ${osGroup} binaries from") - } - - steps { - // Set up the copies - - // Coreclr build containing the tests and mscorlib - // pri1 jobs still need to copy windows_nt built tests - assert inputTestsBuildName != null - copyArtifacts(inputTestsBuildName) { - excludePatterns('**/testResults.xml', '**/*.ni.dll') - buildSelector { - buildNumber('${CORECLR_WINDOWS_BUILD}') - } - } - - // Coreclr build we are trying to test - // - // ** NOTE ** This will, correctly, overwrite the CORE_ROOT from the Windows test archive - - copyArtifacts(inputCoreCLRBuildName) { - excludePatterns('**/testResults.xml', '**/*.ni.dll') - buildSelector { - buildNumber('${CORECLR_BUILD}') - } - } - - if (isUbuntuArmJob) { - // Add some useful information to the log file. Ignore return codes. - shell("uname -a || true") - } - - if (architecture == 'arm64') { - shell("mkdir -p ./bin/CoreFxBinDir") - shell("cp ./bin/Product/Linux.arm64.${configuration}/corefxNative/* ./bin/CoreFxBinDir") - shell("chmod +x ./bin/Product/Linux.arm64.${configuration}/corerun") - } - else if (architecture == 'x86') { - shell("mkdir ./bin/CoreFxNative") - - def corefxFolder = Utilities.getFolderName('dotnet/corefx') + '/' + Utilities.getFolderName(branch) - - copyArtifacts("${corefxFolder}/ubuntu16.04_x86_release") { - includePatterns('bin/build.tar.gz') - targetDirectory('bin/CoreFxNative') - buildSelector { - latestSuccessful(true) - } - } - - shell("tar -xf ./bin/CoreFxNative/bin/build.tar.gz -C ./bin/CoreFxBinDir") - } - - // Unzip the tests first. Exit with 0 - shell("unzip -q -o ./bin/tests/tests.zip -d ./bin/tests/${osGroup}.${architecture}.${configuration} || exit 0") - shell("rm -r ./bin/tests/${osGroup}.${architecture}.${configuration}/Tests/Core_Root || exit 0") - - // For arm Ubuntu (on hardware), we do the "build-test" step on the build machine, not on the test - // machine. The arm Ubuntu test machines do no building -- they have no CLI, for example. - // We should probably do the "generatelayoutonly" step on the build machine for all architectures. - // However, it's believed that perhaps there's an issue with executable permission bits not getting - // copied correctly. - if (isUbuntuArmJob) { - def lowerConfiguration = configuration.toLowerCase() - shell("unzip -o ./coreroot.${lowerConfiguration}.zip || exit 0") // unzips to ./bin/tests/Linux.arm.${configuration}/Tests/Core_Root - shell("unzip -o ./testnativebin.${lowerConfiguration}.zip || exit 0") // unzips to ./bin/obj/Linux.arm.${configuration}/tests - } - else { - shell("./build-test.sh ${architecture} ${configuration} generatelayoutonly") - } - - // Execute the tests - def runDocker = isNeedDocker(architecture, os, false) - def dockerPrefix = "" - def dockerCmd = "" - if (runDocker) { - def dockerImage = getDockerImageName(architecture, os, false) - dockerPrefix = "docker run -i --rm -v \${WORKSPACE}:\${WORKSPACE} -w \${WORKSPACE} " - dockerCmd = dockerPrefix + "${dockerImage} " - } - - // If we are running a stress mode, we'll set those variables first - if (isJitStressScenario(scenario)) { - def scriptFileName = "\${WORKSPACE}/set_stress_test_env.sh" - def envScriptCmds = envScriptCreate(os, scriptFileName) - envScriptCmds += envScriptSetStressModeVariables(os, Constants.jitStressModeScenarios[scenario], scriptFileName) - envScriptCmds += envScriptFinalize(os, scriptFileName) - shell("${envScriptCmds}") - testOpts += " --test-env=${scriptFileName}" - } - - // setup-stress-dependencies.sh, invoked by runtest.sh to download the coredistools package, depends on the "dotnet" - // tool downloaded by the "init-tools.sh" script. However, it only invokes setup-stress-dependencies.sh for x64. The - // coredistools package is used by GCStress on x86 and x64 to disassemble code to determine instruction boundaries. - // On arm/arm64, it is not required as determining instruction boundaries is trivial. - if (isGCStressRelatedTesting(scenario)) { - if (architecture == 'x64') { - shell('./init-tools.sh') - } - } - - def runScript = "${dockerCmd}./tests/runtest.sh" - - shell("""\ -${runScript} \\ - --testRootDir=\"\${WORKSPACE}/bin/tests/${osGroup}.${architecture}.${configuration}\" \\ - --coreOverlayDir=\"\${WORKSPACE}/bin/tests/${osGroup}.${architecture}.${configuration}/Tests/Core_Root\" \\ - --testNativeBinDir=\"\${WORKSPACE}/bin/obj/${osGroup}.${architecture}.${configuration}/tests\" \\ - --copyNativeTestBin --limitedDumpGeneration ${testOpts}""") - - if (isGcReliabilityFramework(scenario)) { - // runtest.sh doesn't actually execute the reliability framework - do it here. - if (useServerGC) { - if (runDocker) { - dockerCmd = dockerPrefix + "-e COMPlus_gcServer=1 ${dockerImage} " - } - else { - shell("export COMPlus_gcServer=1") - } - } - - shell("${dockerCmd}./tests/scripts/run-gc-reliability-framework.sh ${architecture} ${configuration}") - } - } // steps - } // job - - // Experimental: If on Ubuntu 14.04, then attempt to pull in crash dump links - if (os in ['Ubuntu']) { - SummaryBuilder summaries = new SummaryBuilder() - summaries.addLinksSummaryFromFile('Crash dumps from this run:', 'dumplings.txt') - summaries.emit(newJob) - } - - Utilities.addArchival(newJob, "bin/tests/${osGroup}.${architecture}.${configuration}/coreclrtests.*.txt") - Utilities.addXUnitDotNETResults(newJob, '**/coreclrtests.xml') - - return newJob -} - -// Create a test job that will be used by a flow job. -// Returns the newly created job. -def static CreateTestJob(def dslFactory, def project, def branch, def architecture, def os, def configuration, def scenario, def isPR, def inputCoreCLRBuildName, def inputTestsBuildName) -{ - def windowsArmJob = ((os == "Windows_NT") && (architecture in Constants.armWindowsCrossArchitectureList)) - - def newJob = null - if (windowsArmJob) { - assert inputTestsBuildName == null - newJob = CreateWindowsArmTestJob(dslFactory, project, architecture, os, configuration, scenario, isPR, inputCoreCLRBuildName) - } else { - newJob = CreateOtherTestJob(dslFactory, project, branch, architecture, os, configuration, scenario, isPR, inputCoreCLRBuildName, inputTestsBuildName) - } - - setJobMachineAffinity(architecture, os, false, true, false, newJob) // isBuildJob = false, isTestJob = true, isFlowJob = false - - addToViews(newJob, isPR, architecture, os) - - if (scenario == 'jitdiff') { - def osGroup = getOSGroup(os) - Utilities.addArchival(newJob, "bin/tests/${osGroup}.${architecture}.${configuration}/dasm/**") - } - - Utilities.standardJobSetup(newJob, project, isPR, "*/${branch}") - setJobTimeout(newJob, isPR, architecture, configuration, scenario, false) - - return newJob -} - -// Create a flow job to tie together a build job with the given test job. -// Returns the new flow job. -def static CreateFlowJob(def dslFactory, def project, def branch, def architecture, def os, def configuration, def scenario, def isPR, def fullTestJobName, def inputCoreCLRBuildName, def inputTestsBuildName) -{ - // Windows CoreCLR build and Linux CoreCLR build (in parallel) -> - // Linux CoreCLR test - def flowJobName = getJobName(configuration, architecture, os, scenario, false) + "_flow" - def jobFolder = getJobFolder(scenario) - - def newFlowJob = null - - def windowsArmJob = ((os == "Windows_NT") && (architecture in Constants.armWindowsCrossArchitectureList)) - if (windowsArmJob) { - - assert inputTestsBuildName == null - - // For Windows arm jobs there is no reason to build a parallel test job. - // The product build supports building and archiving the tests. - - newFlowJob = dslFactory.buildFlowJob(Utilities.getFullJobName(project, flowJobName, isPR, jobFolder)) { - buildFlow("""\ -coreclrBuildJob = build(params, '${inputCoreCLRBuildName}') - -// And then build the test build -build(params + [CORECLR_BUILD: coreclrBuildJob.build.number], '${fullTestJobName}') -""") - } - JobReport.Report.addReference(inputCoreCLRBuildName) - JobReport.Report.addReference(fullTestJobName) - } - else { - newFlowJob = dslFactory.buildFlowJob(Utilities.getFullJobName(project, flowJobName, isPR, jobFolder)) { - buildFlow("""\ -// Build the input jobs in parallel -parallel ( -{ coreclrBuildJob = build(params, '${inputCoreCLRBuildName}') }, -{ windowsBuildJob = build(params, '${inputTestsBuildName}') } -) - -// And then build the test build -build(params + [CORECLR_BUILD: coreclrBuildJob.build.number, - CORECLR_WINDOWS_BUILD: windowsBuildJob.build.number], '${fullTestJobName}') -""") - } - JobReport.Report.addReference(inputCoreCLRBuildName) - JobReport.Report.addReference(inputTestsBuildName) - JobReport.Report.addReference(fullTestJobName) - } - - addToViews(newFlowJob, isPR, architecture, os) - - setJobMachineAffinity(architecture, os, false, false, true, newFlowJob) // isBuildJob = false, isTestJob = false, isFlowJob = true - - Utilities.standardJobSetup(newFlowJob, project, isPR, "*/${branch}") - addTriggers(newFlowJob, branch, isPR, architecture, os, configuration, scenario, true, false) // isFlowJob==true, isWindowsBuildOnlyJob==false - - return newFlowJob -} - -// Determine if we should generate a flow job for the given parameters. -// Returns true if the job should be generated. -def static shouldGenerateFlowJob(def scenario, def isPR, def architecture, def configuration, def os) -{ - // The "innerloop" (Pri-0 testing) scenario is only available as PR triggered. - // All other scenarios do Pri-1 testing. - if (scenario == 'innerloop' && !isPR) { - return false - } - - // Filter based on OS and architecture. - - switch (architecture) { - case 'arm64': - if (os != "Ubuntu" && os != "Windows_NT") { - return false - } - break - case 'armlb': - if (os != 'Windows_NT') { - return false - } - // Do not create armlb windows jobs. - return false - case 'arm': - if (os != "Ubuntu" && os != "Windows_NT") { - return false - } - break - case 'x86': - if (os != "Ubuntu") { - return false - } - break - case 'x64': - if (!(os in Constants.crossList)) { - return false - } - if (os == "Windows_NT") { - return false - } - break - case 'armem': - case 'x86_arm_altjit': - case 'x64_arm64_altjit': - // No flow jobs - return false - default: - println("Unknown architecture: ${architecture}") - assert false - break - } - - def isNormalOrInnerloop = (scenario == 'innerloop' || scenario == 'normal') - - // Filter based on scenario in OS. - - if (os == 'Windows_NT') { - if (!isArmWindowsScenario(scenario)) { - return false - } - } - else { - // Non-Windows - if (architecture == 'arm64') { - if (!(scenario in Constants.validLinuxArm64Scenarios)) { - return false - } - } - else if (architecture == 'arm') { - if (!(scenario in Constants.validLinuxArmScenarios)) { - return false - } - } - else if (architecture == 'x86') { - // Linux/x86 only want innerloop and default test - if (!isNormalOrInnerloop) { - return false - } - } - } - - // For CentOS, we only want Checked/Release builds. - if (os == 'CentOS7.1') { - if (configuration != 'Checked' && configuration != 'Release') { - return false - } - if (!isNormalOrInnerloop && !isR2RScenario(scenario) && !isJitStressScenario(scenario)) { - return false - } - } - - // For RedHat and Debian, we only do Release builds. - else if (os == 'RHEL7.2' || os == 'Debian8.4') { - if (configuration != 'Release') { - return false - } - if (!isNormalOrInnerloop) { - return false - } - } - - // Next, filter based on scenario. - - if (isJitStressScenario(scenario)) { - if (configuration != 'Checked') { - return false - } - - // CoreFx JIT stress tests currently only implemented for Windows ARM. - if (isCoreFxScenario(scenario) && !( (architecture == 'arm') && (os == 'Windows_NT') )) { - return false - } - } - else if (isR2RBaselineScenario(scenario)) { - if (configuration != 'Checked' && configuration != 'Release') { - return false - } - } - else if (isR2RStressScenario(scenario)) { - if (configuration != 'Checked') { - return false - } - } - else { - // Skip scenarios - switch (scenario) { - case 'ilrt': - case 'longgc': - case 'gcsimulator': - // Long GC tests take a long time on non-Release builds - // ilrt is also Release only - if (configuration != 'Release') { - return false - } - break - - case 'jitdiff': - if (configuration != 'Checked') { - return false - } - break - - case 'gc_reliability_framework': - case 'standalone_gc': - if (configuration != 'Release' && configuration != 'Checked') { - return false - } - break - - case 'formatting': - return false - case 'illink': - if (os != 'Windows_NT' && os != 'Ubuntu') { - return false - } - break - - case 'normal': - // Nothing skipped - break - - case 'innerloop': - // Nothing skipped - if (!isValidPrTriggeredInnerLoopJob(os, architecture, configuration, false)) { - return false - } - break - - default: - println("Unknown scenario: ${scenario}") - assert false - break - } - } - - // The job was not filtered out, so we should generate it! - return true -} - -// Create jobs requiring flow jobs. This includes x64 non-Windows, arm/arm64 Ubuntu, and arm/arm64/armlb Windows. -// Note: no armlb non-Windows; we expect to deprecate/remove armlb soon, so don't want to add new testing for it. -Constants.allScenarios.each { scenario -> - [true, false].each { isPR -> - Constants.architectureList.each { architecture -> - Constants.configurationList.each { configuration -> - Constants.osList.each { os -> - - if (!shouldGenerateFlowJob(scenario, isPR, architecture, configuration, os)) { - return - } - - // Figure out the job name of the CoreCLR build the test will depend on. - - def inputCoreCLRBuildScenario = scenario == 'innerloop' ? 'innerloop' : 'normal' - def inputCoreCLRBuildIsBuildOnly = false - if (isCoreFxScenario(scenario)) { - // Every CoreFx test depends on its own unique build. - inputCoreCLRBuildScenario = scenario - inputCoreCLRBuildIsBuildOnly = true - } - def inputCoreCLRFolderName = getJobFolder(inputCoreCLRBuildScenario) - def inputCoreCLRBuildName = projectFolder + '/' + - Utilities.getFullJobName(project, getJobName(configuration, architecture, os, inputCoreCLRBuildScenario, inputCoreCLRBuildIsBuildOnly), isPR, inputCoreCLRFolderName) - - // Figure out the name of the build job that the test job will depend on. - // For Windows ARM tests, this is not used, as the CoreCLR build creates the tests. For other - // tests (e.g., Linux ARM), we depend on a Windows build to get the tests. - - def inputTestsBuildName = null - - def windowsArmJob = ((os == "Windows_NT") && (architecture in Constants.armWindowsCrossArchitectureList)) - if (!windowsArmJob) { - def testBuildScenario = scenario == 'innerloop' ? 'innerloop' : 'normal' - - def inputTestsBuildArch = architecture - if (architecture == "arm64") { - // Use the x64 test build for arm64 unix - inputTestsBuildArch = "x64" - } - else if (architecture == "arm") { - // Use the x86 test build for arm unix - inputTestsBuildArch = "x86" - } - - def inputTestsBuildIsBuildOnly = true - - inputTestsBuildName = projectFolder + '/' + - Utilities.getFullJobName(project, getJobName(configuration, inputTestsBuildArch, 'windows_nt', testBuildScenario, inputTestsBuildIsBuildOnly), isPR) - } - - // ============================================================================================= - // Create the test job - // ============================================================================================= - - def testJob = CreateTestJob(this, project, branch, architecture, os, configuration, scenario, isPR, inputCoreCLRBuildName, inputTestsBuildName) - - // ============================================================================================= - // Create a build flow to join together the build and tests required to run this test. - // ============================================================================================= - - if (os == 'RHEL7.2' || os == 'Debian8.4') { - // Do not create the flow job for RHEL jobs. - return - } - - def fullTestJobName = projectFolder + '/' + testJob.name - def flowJob = CreateFlowJob(this, project, branch, architecture, os, configuration, scenario, isPR, fullTestJobName, inputCoreCLRBuildName, inputTestsBuildName) - - } // os - } // configuration - } // architecture - } // isPR -} // scenario - -JobReport.Report.generateJobReport(out) - -// Make the call to generate the help job -Utilities.createHelperJob(this, project, branch, - "Welcome to the ${project} Repository", // This is prepended to the help message - "Have a nice day!") // This is appended to the help message. You might put known issues here. - -Utilities.addCROSSCheck(this, project, branch) diff --git a/perf.groovy b/perf.groovy deleted file mode 100644 index e406590826d4..000000000000 --- a/perf.groovy +++ /dev/null @@ -1,868 +0,0 @@ -// Import the utility functionality. - -import jobs.generation.*; - -def project = GithubProject -def branch = GithubBranchName -def projectName = Utilities.getFolderName(project) -def projectFolder = projectName + '/' + Utilities.getFolderName(branch) -def branchFolder = Utilities.getFolderName(branch) - -def static getOSGroup(def os) { - def osGroupMap = ['Ubuntu14.04':'Linux', - 'RHEL7.2': 'Linux', - 'Ubuntu16.04': 'Linux', - 'Debian8.4':'Linux', - 'Fedora24':'Linux', - 'OSX':'OSX', - 'Windows_NT':'Windows_NT', - 'FreeBSD':'FreeBSD', - 'CentOS7.1': 'Linux', - 'OpenSUSE13.2': 'Linux', - 'OpenSUSE42.1': 'Linux', - 'LinuxARMEmulator': 'Linux'] - def osGroup = osGroupMap.get(os, null) - assert osGroup != null : "Could not find os group for ${os}" - return osGroupMap[os] -} - -// Setup perflab tests runs -[true, false].each { isPR -> - ['Windows_NT'].each { os -> - ['x64', 'x86'].each { arch -> - [true, false].each { isSmoketest -> - ['ryujit'].each { jit -> - ['full_opt'].each { opt_level -> - - def architecture = arch - def jobName = isSmoketest ? "perf_perflab_${os}_${arch}_${opt_level}_${jit}_smoketest" : "perf_perflab_${os}_${arch}_${opt_level}_${jit}" - def testEnv = "" - def python = "C:\\Python35\\python.exe" - - def newJob = job(Utilities.getFullJobName(project, jobName, isPR)) { - // Set the label. - if (isSmoketest) { - label('Windows.10.Amd64.ClientRS4.DevEx.15.8.Open') - python = "C:\\python3.7.0\\python.exe" - } - else { - label('windows_server_2016_clr_perf') - } - wrappers { - credentialsBinding { - string('BV_UPLOAD_SAS_TOKEN', 'CoreCLR Perf BenchView Sas') - } - } - - if (isPR) { - parameters { - stringParam('BenchviewCommitName', '\${ghprbPullTitle}', 'The name that you will be used to build the full title of a run in Benchview. The final name will be of the form private BenchviewCommitName') - } - } - - if (isSmoketest) { - parameters { - stringParam('XUNIT_PERFORMANCE_MAX_ITERATION', '2', 'Sets the number of iterations to twenty one. We are doing this to limit the amount of data that we upload as 20 iterations is enough to get a good sample') - stringParam('XUNIT_PERFORMANCE_MAX_ITERATION_INNER_SPECIFIED', '2', 'Sets the number of iterations to twenty one. We are doing this to limit the amount of data that we upload as 20 iterations is enough to get a good sample') - } - } - else{ - parameters { - stringParam('XUNIT_PERFORMANCE_MAX_ITERATION', '21', 'Sets the number of iterations to twenty one. We are doing this to limit the amount of data that we upload as 20 iterations is enough to get a good sample') - stringParam('XUNIT_PERFORMANCE_MAX_ITERATION_INNER_SPECIFIED', '21', 'Sets the number of iterations to twenty one. We are doing this to limit the amount of data that we upload as 20 iterations is enough to get a good sample') - } - } - - def configuration = 'Release' - def runType = isPR ? 'private' : 'rolling' - def benchViewName = isPR ? 'coreclr private %BenchviewCommitName%' : 'coreclr rolling %GIT_BRANCH_WITHOUT_ORIGIN% %GIT_COMMIT%' - def uploadString = isSmoketest ? '' : '-uploadToBenchview' - - steps { - // Batch - - batchFile("powershell -NoProfile wget https://dist.nuget.org/win-x86-commandline/latest/nuget.exe -OutFile \"%WORKSPACE%\\nuget.exe\"") - batchFile("if exist \"%WORKSPACE%\\Microsoft.BenchView.JSONFormat\" rmdir /s /q \"%WORKSPACE%\\Microsoft.BenchView.JSONFormat\"") - batchFile("\"%WORKSPACE%\\nuget.exe\" install Microsoft.BenchView.JSONFormat -Source http://benchviewtestfeed.azurewebsites.net/nuget -OutputDirectory \"%WORKSPACE%\" -Prerelease -ExcludeVersion") - //Do this here to remove the origin but at the front of the branch name as this is a problem for BenchView - //we have to do it all as one statement because cmd is called each time and we lose the set environment variable - batchFile("if \"%GIT_BRANCH:~0,7%\" == \"origin/\" (set \"GIT_BRANCH_WITHOUT_ORIGIN=%GIT_BRANCH:origin/=%\") else (set \"GIT_BRANCH_WITHOUT_ORIGIN=%GIT_BRANCH%\")\n" + - "set \"BENCHVIEWNAME=${benchViewName}\"\n" + - "set \"BENCHVIEWNAME=%BENCHVIEWNAME:\"=\"\"%\"\n" + - "${python} \"%WORKSPACE%\\Microsoft.BenchView.JSONFormat\\tools\\submission-metadata.py\" --name \"%BENCHVIEWNAME%\" --user-email \"dotnet-bot@microsoft.com\"\n" + - "${python} \"%WORKSPACE%\\Microsoft.BenchView.JSONFormat\\tools\\build.py\" git --branch %GIT_BRANCH_WITHOUT_ORIGIN% --type ${runType}") - batchFile("${python} \"%WORKSPACE%\\Microsoft.BenchView.JSONFormat\\tools\\machinedata.py\"") - batchFile("set __TestIntermediateDir=int&&build.cmd ${configuration} ${architecture}") - - batchFile("tests\\runtest.cmd ${configuration} ${architecture} GenerateLayoutOnly") - - def runXUnitPerfCommonArgs = "-arch ${arch} -configuration ${configuration} -os ${os} -generateBenchviewData \"%WORKSPACE%\\Microsoft.Benchview.JSONFormat\\tools\" ${uploadString} -runtype ${runType} ${testEnv} -optLevel ${opt_level} -jitName ${jit} -outputdir \"%WORKSPACE%\\bin\\sandbox_logs\" -stabilityPrefix \"START \\\"CORECLR_PERF_RUN\\\" /B /WAIT /HIGH /AFFINITY 0x2\"" - - // Run with just stopwatch: Profile=Off - batchFile("${python} tests\\scripts\\run-xunit-perf.py ${runXUnitPerfCommonArgs} -testBinLoc bin\\tests\\${os}.${architecture}.${configuration}\\performance\\perflab\\Perflab -library") - batchFile("${python} tests\\scripts\\run-xunit-perf.py ${runXUnitPerfCommonArgs} -testBinLoc bin\\tests\\${os}.${architecture}.${configuration}\\Jit\\Performance\\CodeQuality") - - // Run with the full set of counters enabled: Profile=On - if (opt_level != 'min_opt') { - batchFile("${python} tests\\scripts\\run-xunit-perf.py ${runXUnitPerfCommonArgs} -testBinLoc bin\\tests\\${os}.${architecture}.${configuration}\\performance\\perflab\\Perflab -library -collectionFlags default+BranchMispredictions+CacheMisses+InstructionRetired+gcapi") - batchFile("${python} tests\\scripts\\run-xunit-perf.py ${runXUnitPerfCommonArgs} -testBinLoc bin\\tests\\${os}.${architecture}.${configuration}\\Jit\\Performance\\CodeQuality -collectionFlags default+BranchMispredictions+CacheMisses+InstructionRetired+gcapi") - } - } - } - - def archiveSettings = new ArchivalSettings() - archiveSettings.addFiles('bin/sandbox_logs/**') - archiveSettings.addFiles('machinedata.json') - archiveSettings.setAlwaysArchive() - - Utilities.addArchival(newJob, archiveSettings) - Utilities.standardJobSetup(newJob, project, isPR, "*/${branch}") - - newJob.with { - logRotator { - artifactDaysToKeep(30) - daysToKeep(30) - artifactNumToKeep(200) - numToKeep(200) - } - wrappers { - timeout { - absolute(240) - } - } - } - - if (isPR) { - TriggerBuilder builder = TriggerBuilder.triggerOnPullRequest() - if (isSmoketest) { - builder.setGithubContext("${os} ${arch} ${opt_level} ${jit} CoreCLR Perf Tests Correctness") - } - else { - builder.setGithubContext("${os} ${arch} ${opt_level} ${jit} CoreCLR Perf Tests") - - def opts = "" - if (opt_level == 'min_opt') { - opts = '\\W+min_opts' - } - def jitt = "" - if (jit != 'ryujit') { - jitt = "\\W+${jit}" - } - - builder.triggerOnlyOnComment() - builder.setCustomTriggerPhrase("(?i).*test\\W+${os}\\W+${arch}${opts}${jitt}\\W+perf.*") - } - - builder.triggerForBranch(branch) - builder.emitTrigger(newJob) - } - else if (opt_level == 'full_opt') { - // Set a push trigger - TriggerBuilder builder = TriggerBuilder.triggerOnCommit() - builder.emitTrigger(newJob) - } - else { - // Set periodic trigger - Utilities.addPeriodicTrigger(newJob, '@daily') - } - } - } - } - } - } -} - -// Setup throughput perflab tests runs -[true, false].each { isPR -> - ['Windows_NT'].each { os -> - ['x64', 'x86'].each { arch -> - ['ryujit'].each { jit -> - [true, false].each { pgo_optimized -> - ['full_opt'].each { opt_level -> - def architecture = arch - - def python = "C:\\Python35\\python.exe" - - pgo_build = "" - pgo_test = "" - pgo_string = "pgo" - if (!pgo_optimized) { - pgo_build = " -nopgooptimize" - pgo_test = " -nopgo" - pgo_string = "nopgo" - } - - def newJob = job(Utilities.getFullJobName(project, "perf_throughput_perflab_${os}_${arch}_${opt_level}_${jit}_${pgo_string}", isPR)) { - // Set the label. - label('windows_server_2016_clr_perf') - wrappers { - credentialsBinding { - string('BV_UPLOAD_SAS_TOKEN', 'CoreCLR Perf BenchView Sas') - } - } - - if (isPR) { - parameters { - stringParam('BenchviewCommitName', '\${ghprbPullTitle}', 'The name that will be used to build the full title of a run in Benchview.') - } - } - - def configuration = 'Release' - def runType = isPR ? 'private' : 'rolling' - def benchViewName = isPR ? 'coreclr-throughput private %BenchviewCommitName%' : 'coreclr-throughput rolling %GIT_BRANCH_WITHOUT_ORIGIN% %GIT_COMMIT%' - - steps { - // Batch - batchFile("if exist \"%WORKSPACE%\\Microsoft.BenchView.JSONFormat\" rmdir /s /q \"%WORKSPACE%\\Microsoft.BenchView.JSONFormat\"") - batchFile("if exist \"%WORKSPACE%\\Microsoft.BenchView.ThroughputBenchmarks.${architecture}.${os}\" rmdir /s /q \"%WORKSPACE%\\Microsoft.BenchView.ThroughputBenchmarks.${architecture}.${os}\"") - batchFile("C:\\Tools\\nuget.exe install Microsoft.BenchView.JSONFormat -Source http://benchviewtestfeed.azurewebsites.net/nuget -OutputDirectory \"%WORKSPACE%\" -Prerelease -ExcludeVersion") - batchFile("C:\\Tools\\nuget.exe install Microsoft.BenchView.ThroughputBenchmarks.${architecture}.${os} -Source https://dotnet.myget.org/F/dotnet-core -OutputDirectory \"%WORKSPACE%\" -Prerelease -ExcludeVersion") - //Do this here to remove the origin but at the front of the branch name as this is a problem for BenchView - //we have to do it all as one statement because cmd is called each time and we lose the set environment variable - batchFile("if \"%GIT_BRANCH:~0,7%\" == \"origin/\" (set \"GIT_BRANCH_WITHOUT_ORIGIN=%GIT_BRANCH:origin/=%\") else (set \"GIT_BRANCH_WITHOUT_ORIGIN=%GIT_BRANCH%\")\n" + - "set \"BENCHVIEWNAME=${benchViewName}\"\n" + - "set \"BENCHVIEWNAME=%BENCHVIEWNAME:\"=\"\"%\"\n" + - "${python} \"%WORKSPACE%\\Microsoft.BenchView.JSONFormat\\tools\\submission-metadata.py\" --name \"${benchViewName}\" --user-email \"dotnet-bot@microsoft.com\"\n" + - "${python} \"%WORKSPACE%\\Microsoft.BenchView.JSONFormat\\tools\\build.py\" git --branch %GIT_BRANCH_WITHOUT_ORIGIN% --type ${runType}") - batchFile("${python} \"%WORKSPACE%\\Microsoft.BenchView.JSONFormat\\tools\\machinedata.py\"") - batchFile("set __TestIntermediateDir=int&&build.cmd ${configuration} ${architecture}${pgo_build} skiptests") - batchFile("${python} -u tests\\scripts\\run-throughput-perf.py -arch ${arch} -os ${os} -configuration ${configuration} -opt_level ${opt_level} -jit_name ${jit}${pgo_test} -clr_root \"%WORKSPACE%\" -assembly_root \"%WORKSPACE%\\Microsoft.BenchView.ThroughputBenchmarks.${architecture}.${os}\\lib\" -benchview_path \"%WORKSPACE%\\Microsoft.Benchview.JSONFormat\\tools\" -run_type ${runType}") - } - } - - // Save machinedata.json to /artifact/bin/ Jenkins dir - def archiveSettings = new ArchivalSettings() - archiveSettings.addFiles('throughput-*.csv') - archiveSettings.setAlwaysArchive() - Utilities.addArchival(newJob, archiveSettings) - - Utilities.standardJobSetup(newJob, project, isPR, "*/${branch}") - - if (isPR) { - def opts = "" - if (opt_level == 'min_opt') { - opts = '\\W+min_opts' - } - - def jitt = "" - if (jit != 'ryujit') { - jitt = "\\W+${jit}" - } - - def pgo_trigger = "" - if (pgo_optimized) { - pgo_trigger = "\\W+nopgo" - } - - - TriggerBuilder builder = TriggerBuilder.triggerOnPullRequest() - builder.setGithubContext("${os} ${arch} ${opt_level} ${jit} ${pgo_string} CoreCLR Throughput Perf Tests") - builder.triggerOnlyOnComment() - builder.setCustomTriggerPhrase("(?i).*test\\W+${os}\\W+${arch}${opts}${jitt}${pgo_trigger}\\W+throughput.*") - builder.triggerForBranch(branch) - builder.emitTrigger(newJob) - } - else if (opt_level == 'full_opt' && pgo_optimized) { - // Set a push trigger - TriggerBuilder builder = TriggerBuilder.triggerOnCommit() - builder.emitTrigger(newJob) - } - else { - // Set periodic trigger - Utilities.addPeriodicTrigger(newJob, '@daily') - } - } - } - } - } - } -} - -def static getFullPerfJobName(def project, def os, def isPR) { - return Utilities.getFullJobName(project, "perf_${os}", isPR) -} - -// Create the Linux/OSX/CentOS coreclr test leg for debug and release and each scenario -[true, false].each { isPR -> - def fullBuildJobName = Utilities.getFullJobName(project, 'perf_linux_build', isPR) - def architecture = 'x64' - def configuration = 'Release' - - // Build has to happen on RHEL7.2 (that's where we produce the bits we ship) - ['RHEL7.2'].each { os -> - def newBuildJob = job(fullBuildJobName) { - steps { - shell("./build.sh verbose ${architecture} ${configuration}") - shell("./build-test.sh generatelayoutonly ${architecture} ${configuration}") - } - } - Utilities.setMachineAffinity(newBuildJob, os, 'latest-or-auto') - Utilities.standardJobSetup(newBuildJob, project, isPR, "*/${branch}") - Utilities.addArchival(newBuildJob, "bin/Product/**,bin/obj/*/tests/**/*.dylib,bin/obj/*/tests/**/*.so,bin/tests/**", "bin/Product/**/.nuget/**") - } - - - // Actual perf testing on the following OSes - def perfOSList = ['Ubuntu16.04'] - perfOSList.each { os -> - def newJob = job(getFullPerfJobName(project, os, isPR)) { - - label('ubuntu_1604_clr_perf') - wrappers { - credentialsBinding { - string('BV_UPLOAD_SAS_TOKEN', 'CoreCLR Perf BenchView Sas') - } - } - - if (isPR) { - parameters { - stringParam('BenchviewCommitName', '\${ghprbPullTitle}', 'The name that you will be used to build the full title of a run in Benchview. The final name will be of the form private BenchviewCommitName') - } - } - - parameters { - // Cap the maximum number of iterations to 21. - stringParam('XUNIT_PERFORMANCE_MAX_ITERATION', '21', 'Sets the number of iterations to twenty one. We are doing this to limit the amount of data that we upload as 20 iterations is enough to get a good sample') - stringParam('XUNIT_PERFORMANCE_MAX_ITERATION_INNER_SPECIFIED', '21', 'Sets the number of iterations to twenty one. We are doing this to limit the amount of data that we upload as 20 iterations is enough to get a good sample') - stringParam('PRODUCT_BUILD', '', 'Build number from which to copy down the CoreCLR Product binaries built for Linux') - } - - def osGroup = getOSGroup(os) - def runType = isPR ? 'private' : 'rolling' - def benchViewName = isPR ? 'coreclr private \$BenchviewCommitName' : 'coreclr rolling \$GIT_BRANCH_WITHOUT_ORIGIN \$GIT_COMMIT' - def uploadString = '-uploadToBenchview' - - def runXUnitCommonArgs = "-arch ${architecture} -os ${os} -configuration ${configuration} -stabilityPrefix \"taskset 0x00000002 nice --adjustment=-10\" -generateBenchviewData \"\${WORKSPACE}/tests/scripts/Microsoft.BenchView.JSONFormat/tools\" ${uploadString} -runtype ${runType} -outputdir \"\${WORKSPACE}/bin/sandbox_logs\"" - - steps { - shell("./tests/scripts/perf-prep.sh --nocorefx --branch=${branchFolder}") - shell("./init-tools.sh") - copyArtifacts(fullBuildJobName) { - includePatterns("bin/**") - buildSelector { - buildNumber('\${PRODUCT_BUILD}') - } - } - shell("GIT_BRANCH_WITHOUT_ORIGIN=\$(echo \$GIT_BRANCH | sed \"s/[^/]*\\/\\(.*\\)/\\1 /\")\n" + - "python3.5 \"\${WORKSPACE}/tests/scripts/Microsoft.BenchView.JSONFormat/tools/submission-metadata.py\" --name \" ${benchViewName} \" --user-email \"dotnet-bot@microsoft.com\"\n" + - "python3.5 \"\${WORKSPACE}/tests/scripts/Microsoft.BenchView.JSONFormat/tools/build.py\" git --branch \$GIT_BRANCH_WITHOUT_ORIGIN --type ${runType}") - shell("""python3 ./tests/scripts/run-xunit-perf.py -testBinLoc bin/tests/Windows_NT.${architecture}.${configuration}/JIT/Performance/CodeQuality ${runXUnitCommonArgs}""") - } - } - - def archiveSettings = new ArchivalSettings() - archiveSettings.addFiles('bin/sandbox_logs/**') - archiveSettings.addFiles('machinedata.json') - archiveSettings.setAlwaysArchive() - - Utilities.addArchival(newJob, archiveSettings) - Utilities.standardJobSetup(newJob, project, isPR, "*/${branch}") - - // For perf, we need to keep the run results longer - newJob.with { - // Enable the log rotator - logRotator { - artifactDaysToKeep(30) - daysToKeep(30) - artifactNumToKeep(200) - numToKeep(200) - } - wrappers { - timeout { - absolute(240) - } - } - } - } // os - - def flowJobPerfRunList = perfOSList.collect { os -> - "{ build(params + [PRODUCT_BUILD: b.build.number], '${getFullPerfJobName(project, os, isPR)}') }" - } - def newFlowJob = buildFlowJob(Utilities.getFullJobName(project, "perf_linux_flow", isPR, '')) { - if (isPR) { - parameters { - stringParam('BenchviewCommitName', '\${ghprbPullTitle}', 'The name that you will be used to build the full title of a run in Benchview. The final name will be of the form private BenchviewCommitName') - } - } - buildFlow(""" -// First, build the bits on RHEL7.2 -b = build(params, '${fullBuildJobName}') - -// Then, run the perf tests -parallel( - ${flowJobPerfRunList.join(",\n ")} -) -""") - } - - Utilities.setMachineAffinity(newFlowJob, 'Windows_NT', 'latest-or-auto') - Utilities.standardJobSetup(newFlowJob, project, isPR, "*/${branch}") - - if (isPR) { - TriggerBuilder builder = TriggerBuilder.triggerOnPullRequest() - builder.setGithubContext("Linux Perf Test Flow") - builder.triggerOnlyOnComment() - builder.setCustomTriggerPhrase("(?i).*test\\W+linux\\W+perf\\W+flow.*") - builder.triggerForBranch(branch) - builder.emitTrigger(newFlowJob) - } - else { - // Set a push trigger - TriggerBuilder builder = TriggerBuilder.triggerOnCommit() - builder.emitTrigger(newFlowJob) - } - -} // isPR - -def static getFullThroughputJobName(def project, def os, def isPR) { - return Utilities.getFullJobName(project, "perf_throughput_${os}", isPR) -} - -// Create the Linux/OSX/CentOS coreclr test leg for debug and release and each scenario -[true, false].each { isPR -> - def fullBuildJobName = Utilities.getFullJobName(project, 'perf_throughput_linux_build', isPR) - def architecture = 'x64' - def configuration = 'Release' - - // Build has to happen on RHEL7.2 (that's where we produce the bits we ship) - ['RHEL7.2'].each { os -> - def newBuildJob = job(fullBuildJobName) { - steps { - shell("./build.sh verbose ${architecture} ${configuration}") - } - } - Utilities.setMachineAffinity(newBuildJob, os, 'latest-or-auto') - Utilities.standardJobSetup(newBuildJob, project, isPR, "*/${branch}") - Utilities.addArchival(newBuildJob, "bin/Product/**") - } - - // Actual perf testing on the following OSes - def throughputOSList = ['Ubuntu16.04'] - def throughputOptLevelList = ['full_opt'] - - def throughputOSOptLevelList = [] - - throughputOSList.each { os -> - throughputOptLevelList.each { opt_level -> - throughputOSOptLevelList.add("${os}_${opt_level}") - } - } - - throughputOSList.each { os -> - throughputOptLevelList.each { opt_level -> - def newJob = job(getFullThroughputJobName(project, "${os}_${opt_level}", isPR)) { - - label('ubuntu_1604_clr_perf') - wrappers { - credentialsBinding { - string('BV_UPLOAD_SAS_TOKEN', 'CoreCLR Perf BenchView Sas') - } - } - - if (isPR) { - parameters { - stringParam('BenchviewCommitName', '\${ghprbPullTitle}', 'The name that will be used to build the full title of a run in Benchview.') - } - } - - parameters { - stringParam('PRODUCT_BUILD', '', 'Build number from which to copy down the CoreCLR Product binaries built for Linux') - } - - def osGroup = getOSGroup(os) - def runType = isPR ? 'private' : 'rolling' - def benchViewName = isPR ? 'coreclr-throughput private \$BenchviewCommitName' : 'coreclr-throughput rolling \$GIT_BRANCH_WITHOUT_ORIGIN \$GIT_COMMIT' - - steps { - shell("bash ./tests/scripts/perf-prep.sh --throughput --branch=${branchFolder}") - shell("./init-tools.sh") - copyArtifacts(fullBuildJobName) { - includePatterns("bin/Product/**") - buildSelector { - buildNumber('\${PRODUCT_BUILD}') - } - } - shell("GIT_BRANCH_WITHOUT_ORIGIN=\$(echo \$GIT_BRANCH | sed \"s/[^/]*\\/\\(.*\\)/\\1 /\")\n" + - "python3.5 \"\${WORKSPACE}/tests/scripts/Microsoft.BenchView.JSONFormat/tools/submission-metadata.py\" --name \" ${benchViewName} \" --user-email \"dotnet-bot@microsoft.com\"\n" + - "python3.5 \"\${WORKSPACE}/tests/scripts/Microsoft.BenchView.JSONFormat/tools/build.py\" git --branch \$GIT_BRANCH_WITHOUT_ORIGIN --type ${runType}") - shell("""python3.5 ./tests/scripts/run-throughput-perf.py \\ - -arch \"${architecture}\" \\ - -os \"${os}\" \\ - -configuration \"${configuration}\" \\ - -opt_level \"${opt_level}\" \\ - -clr_root \"\${WORKSPACE}\" \\ - -assembly_root \"\${WORKSPACE}/Microsoft.Benchview.ThroughputBenchmarks.${architecture}.Windows_NT/lib\" \\ - -run_type \"${runType}\" \\ - -benchview_path \"\${WORKSPACE}/tests/scripts/Microsoft.BenchView.JSONFormat/tools\"""") - } - } - - // Save machinedata.json to /artifact/bin/ Jenkins dir - def archiveSettings = new ArchivalSettings() - archiveSettings.addFiles('throughput-*.csv') - archiveSettings.addFiles('machinedata.json') - archiveSettings.setAlwaysArchive() - Utilities.addArchival(newJob, archiveSettings) - - Utilities.standardJobSetup(newJob, project, isPR, "*/${branch}") - - // For perf, we need to keep the run results longer - newJob.with { - // Enable the log rotator - logRotator { - artifactDaysToKeep(7) - daysToKeep(300) - artifactNumToKeep(25) - numToKeep(1000) - } - } - } // opt_level - } // os - - def flowJobTPRunList = throughputOSOptLevelList.collect { os -> - "{ build(params + [PRODUCT_BUILD: b.build.number], '${getFullThroughputJobName(project, os, isPR)}') }" - } - def newFlowJob = buildFlowJob(Utilities.getFullJobName(project, "perf_throughput_linux_flow", isPR, '')) { - if (isPR) { - parameters { - stringParam('BenchviewCommitName', '\${ghprbPullTitle}', 'The name that you will be used to build the full title of a run in Benchview. The final name will be of the form private BenchviewCommitName') - } - } - buildFlow(""" -// First, build the bits on RHEL7.2 -b = build(params, '${fullBuildJobName}') - -// Then, run the perf tests -parallel( - ${flowJobTPRunList.join(",\n ")} -) -""") - } - - Utilities.setMachineAffinity(newFlowJob, 'Windows_NT', 'latest-or-auto') - Utilities.standardJobSetup(newFlowJob, project, isPR, "*/${branch}") - - if (isPR) { - TriggerBuilder builder = TriggerBuilder.triggerOnPullRequest() - builder.setGithubContext("Linux Throughput Perf Test Flow") - builder.triggerOnlyOnComment() - builder.setCustomTriggerPhrase("(?i).*test\\W+linux\\W+throughput\\W+flow.*") - builder.triggerForBranch(branch) - builder.emitTrigger(newFlowJob) - } - else { - // Set a push trigger - TriggerBuilder builder = TriggerBuilder.triggerOnCommit() - builder.emitTrigger(newFlowJob) - } - -} // isPR - -// Setup CoreCLR-Scenarios tests -[true, false].each { isPR -> - ['Windows_NT'].each { os -> - ['x64', 'x86'].each { arch -> - ['ryujit'].each { jit -> - ['full_opt'].each { opt_level -> - def architecture = arch - def newJob = job(Utilities.getFullJobName(project, "perf_scenarios_${os}_${arch}_${opt_level}_${jit}", isPR)) { - - def testEnv = "" - def python = "C:\\Python35\\python.exe" - - // Set the label. - label('windows_server_2016_clr_perf') - wrappers { - credentialsBinding { - string('BV_UPLOAD_SAS_TOKEN', 'CoreCLR Perf BenchView Sas') - } - } - - if (isPR) { - parameters { - stringParam('BenchviewCommitName', '\${ghprbPullTitle}', 'The name that you will be used to build the full title of a run in Benchview. The final name will be of the form private BenchviewCommitName') - } - } - - parameters { - stringParam('XUNIT_PERFORMANCE_MAX_ITERATION', '1', 'Size test, one iteration is sufficient') - stringParam('XUNIT_PERFORMANCE_MAX_ITERATION_INNER_SPECIFIED', '1', 'Size test, one iteration is sufficient') - } - - def configuration = 'Release' - def runType = isPR ? 'private' : 'rolling' - def benchViewName = isPR ? 'CoreCLR-Scenarios private %BenchviewCommitName%' : 'CoreCLR-Scenarios rolling %GIT_BRANCH_WITHOUT_ORIGIN% %GIT_COMMIT%' - def uploadString = '-uploadToBenchview' - - steps { - // Batch - batchFile("powershell -NoProfile wget https://dist.nuget.org/win-x86-commandline/latest/nuget.exe -OutFile \"%WORKSPACE%\\nuget.exe\"") - batchFile("if exist \"%WORKSPACE%\\Microsoft.BenchView.JSONFormat\" rmdir /s /q \"%WORKSPACE%\\Microsoft.BenchView.JSONFormat\"") - batchFile("\"%WORKSPACE%\\nuget.exe\" install Microsoft.BenchView.JSONFormat -Source http://benchviewtestfeed.azurewebsites.net/nuget -OutputDirectory \"%WORKSPACE%\" -Prerelease -ExcludeVersion") - - //Do this here to remove the origin but at the front of the branch name as this is a problem for BenchView - //we have to do it all as one statement because cmd is called each time and we lose the set environment variable - batchFile("if \"%GIT_BRANCH:~0,7%\" == \"origin/\" (set \"GIT_BRANCH_WITHOUT_ORIGIN=%GIT_BRANCH:origin/=%\") else (set \"GIT_BRANCH_WITHOUT_ORIGIN=%GIT_BRANCH%\")\n" + - "set \"BENCHVIEWNAME=${benchViewName}\"\n" + - "set \"BENCHVIEWNAME=%BENCHVIEWNAME:\"=\"\"%\"\n" + - "${python} \"%WORKSPACE%\\Microsoft.BenchView.JSONFormat\\tools\\submission-metadata.py\" --name \"%BENCHVIEWNAME%\" --user-email \"dotnet-bot@microsoft.com\"\n" + - "${python} \"%WORKSPACE%\\Microsoft.BenchView.JSONFormat\\tools\\build.py\" git --branch %GIT_BRANCH_WITHOUT_ORIGIN% --type ${runType}") - batchFile("${python} \"%WORKSPACE%\\Microsoft.BenchView.JSONFormat\\tools\\machinedata.py\"") - batchFile("set __TestIntermediateDir=int&&build.cmd ${configuration} ${architecture}") - - batchFile("tests\\runtest.cmd ${configuration} ${architecture} GenerateLayoutOnly") - - def runXUnitPerfCommonArgs = "-arch ${arch} -configuration ${configuration} -os ${os} -generateBenchviewData \"%WORKSPACE%\\Microsoft.Benchview.JSONFormat\\tools\" ${uploadString} -runtype ${runType} ${testEnv} -optLevel ${opt_level} -jitName ${jit} -outputdir \"%WORKSPACE%\\bin\\sandbox_logs\" -stabilityPrefix \"START \\\"CORECLR_PERF_RUN\\\" /B /WAIT /HIGH\" -scenarioTest" - - // Profile=Off - batchFile("${python} tests\\scripts\\run-xunit-perf.py ${runXUnitPerfCommonArgs} -testBinLoc bin\\tests\\${os}.${architecture}.${configuration}\\performance\\Scenario\\JitBench -group CoreCLR-Scenarios") - - // Profile=On - if (opt_level != 'min_opt') { - batchFile("${python} tests\\scripts\\run-xunit-perf.py ${runXUnitPerfCommonArgs} -testBinLoc bin\\tests\\${os}.${architecture}.${configuration}\\performance\\Scenario\\JitBench -group CoreCLR-Scenarios -collectionFlags BranchMispredictions+CacheMisses+InstructionRetired") - } - } - } - - def archiveSettings = new ArchivalSettings() - archiveSettings.addFiles('bin/sandbox_logs/**') - archiveSettings.addFiles('machinedata.json') - archiveSettings.setAlwaysArchive() - - Utilities.addArchival(newJob, archiveSettings) - Utilities.standardJobSetup(newJob, project, isPR, "*/${branch}") - - newJob.with { - logRotator { - artifactDaysToKeep(30) - daysToKeep(30) - artifactNumToKeep(200) - numToKeep(200) - } - wrappers { - timeout { - absolute(240) - } - } - } - - if (isPR) { - def opts = "" - if (opt_level == 'min_opt') { - opts = '\\W+min_opts' - } - def jitt = "" - if (jit != 'ryujit') { - jitt = "\\W+${jit}" - } - - TriggerBuilder builder = TriggerBuilder.triggerOnPullRequest() - builder.setGithubContext("${os} ${arch} ${opt_level} ${jit} Performance Scenarios Tests") - builder.triggerOnlyOnComment() - builder.setCustomTriggerPhrase("(?i).*test\\W+${os}\\W+${arch}${opts}${jitt}\\W+perf\\W+scenarios.*") - builder.triggerForBranch(branch) - builder.emitTrigger(newJob) - } - else if (opt_level == 'full_opt') { - // Set a push trigger - TriggerBuilder builder = TriggerBuilder.triggerOnCommit() - builder.emitTrigger(newJob) - } - else { - // Set periodic trigger - Utilities.addPeriodicTrigger(newJob, '@daily') - } - } - } - } - } -} - -// Setup size-on-disk test -['Windows_NT'].each { os -> - ['x64', 'x86'].each { arch -> - def architecture = arch - def newJob = job(Utilities.getFullJobName(project, "sizeondisk_${arch}", false)) { - label('Windows.10.Amd64.ClientRS4.DevEx.15.8.Open') - - wrappers { - credentialsBinding { - string('BV_UPLOAD_SAS_TOKEN', 'CoreCLR Perf BenchView Sas') - } - } - - def channel = 'master' - def configuration = 'Release' - def runType = 'rolling' - def benchViewName = 'Dotnet Size on Disk %DATE% %TIME%' - def testBin = "%WORKSPACE%\\bin\\tests\\${os}.${architecture}.${configuration}" - def coreRoot = "${testBin}\\Tests\\Core_Root" - def benchViewTools = "%WORKSPACE%\\Microsoft.BenchView.JSONFormat\\tools" - def python = "C:\\python3.7.0\\python.exe" - - steps { - // Install nuget and get BenchView tools - batchFile("powershell -NoProfile wget https://dist.nuget.org/win-x86-commandline/latest/nuget.exe -OutFile \"%WORKSPACE%\\nuget.exe\"") - batchFile("if exist \"%WORKSPACE%\\Microsoft.BenchView.JSONFormat\" rmdir /s /q \"%WORKSPACE%\\Microsoft.BenchView.JSONFormat\"") - batchFile("\"%WORKSPACE%\\nuget.exe\" install Microsoft.BenchView.JSONFormat -Source http://benchviewtestfeed.azurewebsites.net/nuget -OutputDirectory \"%WORKSPACE%\" -Prerelease -ExcludeVersion") - - // Generate submission metadata for BenchView - // Do this here to remove the origin but at the front of the branch name as this is a problem for BenchView - // we have to do it all as one statement because cmd is called each time and we lose the set environment variable - batchFile("if \"%GIT_BRANCH:~0,7%\" == \"origin/\" (set \"GIT_BRANCH_WITHOUT_ORIGIN=%GIT_BRANCH:origin/=%\") else (set \"GIT_BRANCH_WITHOUT_ORIGIN=%GIT_BRANCH%\")\n" + - "set \"BENCHVIEWNAME=${benchViewName}\"\n" + - "set \"BENCHVIEWNAME=%BENCHVIEWNAME:\"=\"\"%\"\n" + - "${python} \"${benchViewTools}\\submission-metadata.py\" --name \"%BENCHVIEWNAME%\" --user-email \"dotnet-bot@microsoft.com\"\n" + - "${python} \"${benchViewTools}\\build.py\" git --branch %GIT_BRANCH_WITHOUT_ORIGIN% --type ${runType}") - - // Generate machine data from BenchView - batchFile("${python} \"${benchViewTools}\\machinedata.py\"") - - // Build CoreCLR and gnerate test layout - batchFile("set __TestIntermediateDir=int&&build.cmd ${configuration} ${architecture}") - batchFile("tests\\runtest.cmd ${configuration} ${architecture} GenerateLayoutOnly") - - // Run the size on disk benchmark - batchFile("\"${coreRoot}\\CoreRun.exe\" \"${testBin}\\sizeondisk\\sodbench\\SoDBench\\SoDBench.exe\" -o \"%WORKSPACE%\\sodbench.csv\" --architecture ${arch} --channel ${channel}") - - // From sodbench.csv, create measurment.json, then submission.json - batchFile("${python} \"${benchViewTools}\\measurement.py\" csv \"%WORKSPACE%\\sodbench.csv\" --metric \"Size on Disk\" --unit \"bytes\" --better \"desc\"") - batchFile("${python} \"${benchViewTools}\\submission.py\" measurement.json --build build.json --machine-data machinedata.json --metadata submission-metadata.json --group \"Dotnet Size on Disk\" --type ${runType} --config-name ${configuration} --architecture ${arch} --machinepool VM --config Channel ${channel}") - - // If this is a PR, upload submission.json - batchFile("${python} \"${benchViewTools}\\upload.py\" submission.json --container coreclr") - } - } - - def archiveSettings = new ArchivalSettings() - archiveSettings.addFiles('bin/toArchive/**') - archiveSettings.addFiles('machinedata.json') - archiveSettings.setAlwaysArchive() - - Utilities.addArchival(newJob, archiveSettings) - Utilities.standardJobSetup(newJob, project, false, "*/${branch}") - - // Set the cron job here. We run nightly on each flavor, regardless of code changes - Utilities.addPeriodicTrigger(newJob, "@daily", true /*always run*/) - - newJob.with { - logRotator { - artifactDaysToKeep(30) - daysToKeep(30) - artifactNumToKeep(200) - numToKeep(200) - } - wrappers { - timeout { - absolute(240) - } - } - } - } -} - -// Setup IlLink tests -[true, false].each { isPR -> - ['Windows_NT'].each { os -> - ['x64'].each { arch -> - ['ryujit'].each { jit -> - ['full_opt'].each { opt_level -> - def architecture = arch - def newJob = job(Utilities.getFullJobName(project, "perf_illink_${os}_${arch}_${opt_level}_${jit}", isPR)) { - label('Windows.10.Amd64.ClientRS4.DevEx.15.8.Open') - - def testEnv = "" - def python = "C:\\python3.7.0\\python.exe" - wrappers { - credentialsBinding { - string('BV_UPLOAD_SAS_TOKEN', 'CoreCLR Perf BenchView Sas') - } - } - - if (isPR) { - parameters { - stringParam('BenchviewCommitName', '\${ghprbPullTitle}', 'The name that you will be used to build the full title of a run in Benchview. The final name will be of the form private BenchviewCommitName') - } - } - - parameters { - stringParam('XUNIT_PERFORMANCE_MAX_ITERATION', '1', 'Size test, one iteration is sufficient') - stringParam('XUNIT_PERFORMANCE_MAX_ITERATION_INNER_SPECIFIED', '1', 'Size test, one iteration is sufficient') - } - - def configuration = 'Release' - def runType = isPR ? 'private' : 'rolling' - def benchViewName = isPR ? 'CoreCLR-Scenarios private %BenchviewCommitName%' : 'CoreCLR-Scenarios rolling %GIT_BRANCH_WITHOUT_ORIGIN% %GIT_COMMIT%' - def uploadString = '-uploadToBenchview' - - steps { - // Batch - batchFile("powershell -NoProfile wget https://dist.nuget.org/win-x86-commandline/latest/nuget.exe -OutFile \"%WORKSPACE%\\nuget.exe\"") - batchFile("if exist \"%WORKSPACE%\\Microsoft.BenchView.JSONFormat\" rmdir /s /q \"%WORKSPACE%\\Microsoft.BenchView.JSONFormat\"") - batchFile("\"%WORKSPACE%\\nuget.exe\" install Microsoft.BenchView.JSONFormat -Source http://benchviewtestfeed.azurewebsites.net/nuget -OutputDirectory \"%WORKSPACE%\" -Prerelease -ExcludeVersion") - - //Do this here to remove the origin but at the front of the branch name as this is a problem for BenchView - //we have to do it all as one statement because cmd is called each time and we lose the set environment variable - batchFile("if \"%GIT_BRANCH:~0,7%\" == \"origin/\" (set \"GIT_BRANCH_WITHOUT_ORIGIN=%GIT_BRANCH:origin/=%\") else (set \"GIT_BRANCH_WITHOUT_ORIGIN=%GIT_BRANCH%\")\n" + - "set \"BENCHVIEWNAME=${benchViewName}\"\n" + - "set \"BENCHVIEWNAME=%BENCHVIEWNAME:\"=\"\"%\"\n" + - "${python} \"%WORKSPACE%\\Microsoft.BenchView.JSONFormat\\tools\\submission-metadata.py\" --name \"%BENCHVIEWNAME%\" --user-email \"dotnet-bot@microsoft.com\"\n" + - "${python} \"%WORKSPACE%\\Microsoft.BenchView.JSONFormat\\tools\\build.py\" git --branch %GIT_BRANCH_WITHOUT_ORIGIN% --type ${runType}") - batchFile("${python} \"%WORKSPACE%\\Microsoft.BenchView.JSONFormat\\tools\\machinedata.py\"") - batchFile("set __TestIntermediateDir=int&&build.cmd ${configuration} ${architecture}") - - batchFile("tests\\runtest.cmd ${configuration} ${architecture} GenerateLayoutOnly") - - def runXUnitPerfCommonArgs = "-arch ${arch} -configuration ${configuration} -os ${os} -generateBenchviewData \"%WORKSPACE%\\Microsoft.Benchview.JSONFormat\\tools\" ${uploadString} -runtype ${runType} ${testEnv} -optLevel ${opt_level} -jitName ${jit} -outputdir \"%WORKSPACE%\\bin\\sandbox_logs\" -scenarioTest" - - // Scenario: ILLink - batchFile("\"%VS140COMNTOOLS%\\..\\..\\VC\\vcvarsall.bat\" x86_amd64 && " + - "${python} tests\\scripts\\run-xunit-perf.py ${runXUnitPerfCommonArgs} -testBinLoc bin\\tests\\${os}.${architecture}.${configuration}\\performance\\linkbench\\linkbench -group ILLink -nowarmup") - } - } - - def archiveSettings = new ArchivalSettings() - archiveSettings.addFiles('bin/sandbox_logs/**') - archiveSettings.addFiles('machinedata.json') - archiveSettings.setAlwaysArchive() - - // Set the label (currently we are only measuring size, therefore we are running on VM). - Utilities.addArchival(newJob, archiveSettings) - Utilities.standardJobSetup(newJob, project, isPR, "*/${branch}") - - newJob.with { - logRotator { - artifactDaysToKeep(30) - daysToKeep(30) - artifactNumToKeep(200) - numToKeep(200) - } - wrappers { - timeout { - absolute(240) - } - } - } - - if (isPR) { - TriggerBuilder builder = TriggerBuilder.triggerOnPullRequest() - builder.setGithubContext("${os} ${arch} ${opt_level} ${jit} IlLink Tests") - builder.triggerOnlyOnComment() - builder.setCustomTriggerPhrase("(?i).*test\\W+${os}\\W+${arch}\\W+illink.*") - builder.triggerForBranch(branch) - builder.emitTrigger(newJob) - } - else { - // Set a push trigger - TriggerBuilder builder = TriggerBuilder.triggerOnCommit() - builder.emitTrigger(newJob) - } - } - } - } - } -} - -Utilities.createHelperJob(this, project, branch, - "Welcome to the ${project} Perf help", - "Have a nice day!")