diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml
index 5a584ea6..c558dd80 100644
--- a/eng/Version.Details.xml
+++ b/eng/Version.Details.xml
@@ -3,9 +3,9 @@
-
+
https://github.com/dotnet/arcade
- 65e09c040143048211dcf6b2bd69336cbf27eec6
+ 1f7eece09d5c6fc2a1319d04f6ae4b7d18455e2d
diff --git a/eng/common/build.ps1 b/eng/common/build.ps1
index 8cfee107..18397a60 100644
--- a/eng/common/build.ps1
+++ b/eng/common/build.ps1
@@ -6,6 +6,7 @@ Param(
[string][Alias('v')]$verbosity = "minimal",
[string] $msbuildEngine = $null,
[bool] $warnAsError = $true,
+ [string] $warnNotAsError = '',
[bool] $nodeReuse = $true,
[switch] $buildCheck = $false,
[switch][Alias('r')]$restore,
@@ -70,6 +71,7 @@ function Print-Usage() {
Write-Host " -excludeCIBinarylog Don't output binary log (short: -nobl)"
Write-Host " -prepareMachine Prepare machine for CI run, clean up processes after build"
Write-Host " -warnAsError Sets warnaserror msbuild parameter ('true' or 'false')"
+ Write-Host " -warnNotAsError Sets a semi-colon delimited list of warning codes that should not be treated as errors"
Write-Host " -msbuildEngine Msbuild engine to use to run build ('dotnet', 'vs', or unspecified)."
Write-Host " -excludePrereleaseVS Set to exclude build engines in prerelease versions of Visual Studio"
Write-Host " -nativeToolsOnMachine Sets the native tools on machine environment variable (indicating that the script should use native tools on machine)"
diff --git a/eng/common/build.sh b/eng/common/build.sh
index ec3e80d1..5883e53b 100755
--- a/eng/common/build.sh
+++ b/eng/common/build.sh
@@ -42,6 +42,7 @@ usage()
echo " --prepareMachine Prepare machine for CI run, clean up processes after build"
echo " --nodeReuse Sets nodereuse msbuild parameter ('true' or 'false')"
echo " --warnAsError Sets warnaserror msbuild parameter ('true' or 'false')"
+ echo " --warnNotAsError Sets a semi-colon delimited list of warning codes that should not be treated as errors"
echo " --buildCheck Sets /check msbuild parameter"
echo " --fromVMR Set when building from within the VMR"
echo ""
@@ -78,6 +79,7 @@ ci=false
clean=false
warn_as_error=true
+warn_not_as_error=''
node_reuse=true
build_check=false
binary_log=false
@@ -176,6 +178,10 @@ while [[ $# -gt 0 ]]; do
warn_as_error=$2
shift
;;
+ -warnnotaserror)
+ warn_not_as_error=$2
+ shift
+ ;;
-nodereuse)
node_reuse=$2
shift
diff --git a/eng/common/core-templates/job/job.yml b/eng/common/core-templates/job/job.yml
index 748c4f07..66c7988f 100644
--- a/eng/common/core-templates/job/job.yml
+++ b/eng/common/core-templates/job/job.yml
@@ -26,12 +26,12 @@ parameters:
enablePublishBuildArtifacts: false
enablePublishBuildAssets: false
enablePublishTestResults: false
+ enablePublishing: false
enableBuildRetry: false
mergeTestResults: false
testRunTitle: ''
testResultsFormat: ''
name: ''
- componentGovernanceSteps: []
preSteps: []
artifactPublishSteps: []
runAsPublic: false
@@ -152,9 +152,6 @@ jobs:
- ${{ each step in parameters.steps }}:
- ${{ step }}
- - ${{ each step in parameters.componentGovernanceSteps }}:
- - ${{ step }}
-
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- template: /eng/common/core-templates/steps/cleanup-microbuild.yml
parameters:
diff --git a/eng/common/core-templates/job/onelocbuild.yml b/eng/common/core-templates/job/onelocbuild.yml
index c5788829..eefed3b6 100644
--- a/eng/common/core-templates/job/onelocbuild.yml
+++ b/eng/common/core-templates/job/onelocbuild.yml
@@ -52,13 +52,13 @@ jobs:
# We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
name: AzurePipelines-EO
- image: 1ESPT-Windows2022
+ image: 1ESPT-Windows2025
demands: Cmd
os: windows
# If it's not devdiv, it's dnceng
${{ if ne(variables['System.TeamProject'], 'DevDiv') }}:
name: $(DncEngInternalBuildPool)
- image: 1es-windows-2022
+ image: windows.vs2026.amd64
os: windows
steps:
diff --git a/eng/common/core-templates/job/publish-build-assets.yml b/eng/common/core-templates/job/publish-build-assets.yml
index 8b5c635f..700f7711 100644
--- a/eng/common/core-templates/job/publish-build-assets.yml
+++ b/eng/common/core-templates/job/publish-build-assets.yml
@@ -74,13 +74,13 @@ jobs:
# We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
name: AzurePipelines-EO
- image: 1ESPT-Windows2022
+ image: 1ESPT-Windows2025
demands: Cmd
os: windows
# If it's not devdiv, it's dnceng
${{ if ne(variables['System.TeamProject'], 'DevDiv') }}:
name: NetCore1ESPool-Publishing-Internal
- image: windows.vs2019.amd64
+ image: windows.vs2026.amd64
os: windows
steps:
- ${{ if eq(parameters.is1ESPipeline, '') }}:
@@ -172,17 +172,18 @@ jobs:
targetPath: '$(Build.ArtifactStagingDirectory)/MergedManifest.xml'
artifactName: AssetManifests
displayName: 'Publish Merged Manifest'
- retryCountOnTaskFailure: 10 # for any logs being locked
- sbomEnabled: false # we don't need SBOM for logs
+ retryCountOnTaskFailure: 10 # for any files being locked
+ isProduction: false # just metadata for publishing
- - template: /eng/common/core-templates/steps/publish-build-artifacts.yml
+ - template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
args:
displayName: Publish ReleaseConfigs Artifact
- pathToPublish: '$(Build.StagingDirectory)/ReleaseConfigs'
- publishLocation: Container
+ targetPath: '$(Build.StagingDirectory)/ReleaseConfigs'
artifactName: ReleaseConfigs
+ retryCountOnTaskFailure: 10 # for any files being locked
+ isProduction: false # just metadata for publishing
- ${{ if or(eq(parameters.publishAssetsImmediately, 'true'), eq(parameters.isAssetlessBuild, 'true')) }}:
- template: /eng/common/core-templates/post-build/setup-maestro-vars.yml
@@ -218,4 +219,5 @@ jobs:
- template: /eng/common/core-templates/steps/publish-logs.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ StageLabel: 'BuildAssetRegistry'
JobLabel: 'Publish_Artifacts_Logs'
diff --git a/eng/common/core-templates/job/renovate.yml b/eng/common/core-templates/job/renovate.yml
new file mode 100644
index 00000000..ff86c80b
--- /dev/null
+++ b/eng/common/core-templates/job/renovate.yml
@@ -0,0 +1,196 @@
+# --------------------------------------------------------------------------------------
+# Renovate Bot Job Template
+# --------------------------------------------------------------------------------------
+# This Azure DevOps pipeline job template runs Renovate (https://docs.renovatebot.com/)
+# to automatically update dependencies in a GitHub repository.
+#
+# Renovate scans the repository for dependency files and creates pull requests to update
+# outdated dependencies based on the configuration specified in the renovateConfigPath
+# parameter.
+#
+# Usage:
+# For each product repo wanting to make use of Renovate, this template is called from
+# an internal Azure DevOps pipeline, typically with a schedule trigger, to check for
+# and propose dependency updates.
+#
+# For more info, see https://github.com/dotnet/arcade/blob/main/Documentation/Renovate.md
+# --------------------------------------------------------------------------------------
+
+parameters:
+
+# Path to the Renovate configuration file within the repository.
+- name: renovateConfigPath
+ type: string
+ default: 'eng/renovate.json'
+
+# GitHub repository to run Renovate against, in the format 'owner/repo'.
+# This could technically be any repo but convention is to target the same
+# repo that contains the calling pipeline. The Renovate config file would
+# be co-located with the pipeline's repo and, in most cases, the config
+# file is specific to the repo being targeted.
+- name: gitHubRepo
+ type: string
+
+# List of base branches to target for Renovate PRs.
+# NOTE: The Renovate configuration file is always read from the branch where the
+# pipeline is run, NOT from the target branches specified here. If you need different
+# configurations for different branches, run the pipeline from each branch separately.
+- name: baseBranches
+ type: object
+ default:
+ - main
+
+# When true, Renovate will run in dry run mode, which previews changes without creating PRs.
+# See the 'Run Renovate' step log output for details of what would have been changed.
+- name: dryRun
+ type: boolean
+ default: false
+
+# By default, Renovate will not recreate a PR for a given dependency/version pair that was
+# previously closed. This allows opting in to always recreating PRs even if they were
+# previously closed.
+- name: forceRecreatePR
+ type: boolean
+ default: false
+
+# Name of the arcade repository resource in the pipeline.
+# This allows repos which haven't been onboarded to Arcade to still use this
+# template by checking out the repo as a resource with a custom name and pointing
+# this parameter to it.
+- name: arcadeRepoResource
+ type: string
+ default: self
+
+# Directory name for the self repo under $(Build.SourcesDirectory) in multi-checkout.
+# In multi-checkout (when arcadeRepoResource != 'self'), Azure DevOps checks out the
+# self repo to $(Build.SourcesDirectory)/. Set this to match the auto-generated
+# directory name. Using the auto-generated name is necessary rather than explicitly
+# defining a checkout path because container jobs expect repos to live under the agent's
+# workspace ($(Pipeline.Workspace)). On some self-hosted setups the host path
+# (e.g., /mnt/vss/_work) differs from the container path (e.g., /__w), and a custom checkout
+# path can fail validation. Using the default checkout location keeps the paths consistent
+# and avoids this issue.
+- name: selfRepoName
+ type: string
+ default: ''
+- name: arcadeRepoName
+ type: string
+ default: ''
+
+# Pool configuration for the job.
+- name: pool
+ type: object
+ default:
+ name: NetCore1ESPool-Internal
+ image: build.azurelinux.3.amd64
+ os: linux
+
+jobs:
+- job: Renovate
+ displayName: Run Renovate
+ container: RenovateContainer
+ variables:
+ - group: dotnet-renovate-bot
+ # The Renovate version is automatically updated by https://github.com/dotnet/arcade/blob/main/azure-pipelines-renovate.yml.
+ # Changing the variable name here would require updating the name in https://github.com/dotnet/arcade/blob/main/eng/renovate.json as well.
+ - name: renovateVersion
+ value: '42'
+ readonly: true
+ - name: renovateLogFilePath
+ value: '$(Build.ArtifactStagingDirectory)/renovate.json'
+ readonly: true
+ - name: dryRunArg
+ readonly: true
+ ${{ if eq(parameters.dryRun, true) }}:
+ value: 'full'
+ ${{ else }}:
+ value: ''
+ - name: recreateWhenArg
+ readonly: true
+ ${{ if eq(parameters.forceRecreatePR, true) }}:
+ value: 'always'
+ ${{ else }}:
+ value: ''
+ # In multi-checkout (without custom paths), Azure DevOps places each repo under
+ # $(Build.SourcesDirectory)/. selfRepoName must be provided in that case.
+ - name: selfRepoPath
+ readonly: true
+ ${{ if eq(parameters.arcadeRepoResource, 'self') }}:
+ value: '$(Build.SourcesDirectory)'
+ ${{ else }}:
+ value: '$(Build.SourcesDirectory)/${{ parameters.selfRepoName }}'
+ - name: arcadeRepoPath
+ readonly: true
+ ${{ if eq(parameters.arcadeRepoResource, 'self') }}:
+ value: '$(Build.SourcesDirectory)'
+ ${{ else }}:
+ value: '$(Build.SourcesDirectory)/${{ parameters.arcadeRepoName }}'
+ pool: ${{ parameters.pool }}
+
+ templateContext:
+ outputParentDirectory: $(Build.ArtifactStagingDirectory)
+ outputs:
+ - output: pipelineArtifact
+ displayName: Publish Renovate Log
+ condition: succeededOrFailed()
+ targetPath: $(Build.ArtifactStagingDirectory)
+ artifactName: $(Agent.JobName)_Logs_Attempt$(System.JobAttempt)
+ isProduction: false # logs are non-production artifacts
+
+ steps:
+ - checkout: self
+ fetchDepth: 1
+
+ - ${{ if ne(parameters.arcadeRepoResource, 'self') }}:
+ - checkout: ${{ parameters.arcadeRepoResource }}
+ fetchDepth: 1
+
+ - script: |
+ renovate-config-validator $(selfRepoPath)/${{parameters.renovateConfigPath}} 2>&1 | tee /tmp/renovate-config-validator.out
+ validatorExit=${PIPESTATUS[0]}
+ if grep -q '^ WARN:' /tmp/renovate-config-validator.out; then
+ echo "##vso[task.logissue type=warning]Renovate config validator produced warnings."
+ echo "##vso[task.complete result=SucceededWithIssues]"
+ fi
+ exit $validatorExit
+ displayName: Validate Renovate config
+ env:
+ LOG_LEVEL: info
+ LOG_FILE_LEVEL: debug
+ LOG_FILE: $(Build.ArtifactStagingDirectory)/renovate-config-validator.json
+
+ - script: |
+ . $(arcadeRepoPath)/eng/common/renovate.env
+ renovate 2>&1 | tee /tmp/renovate.out
+ renovateExit=${PIPESTATUS[0]}
+ if grep -q '^ WARN:' /tmp/renovate.out; then
+ echo "##vso[task.logissue type=warning]Renovate produced warnings."
+ echo "##vso[task.complete result=SucceededWithIssues]"
+ fi
+ exit $renovateExit
+ displayName: Run Renovate
+ env:
+ RENOVATE_FORK_TOKEN: $(BotAccount-dotnet-renovate-bot-PAT)
+ RENOVATE_TOKEN: $(BotAccount-dotnet-renovate-bot-PAT)
+ RENOVATE_REPOSITORIES: ${{parameters.gitHubRepo}}
+ RENOVATE_BASE_BRANCHES: ${{ convertToJson(parameters.baseBranches) }}
+ RENOVATE_DRY_RUN: $(dryRunArg)
+ RENOVATE_RECREATE_WHEN: $(recreateWhenArg)
+ LOG_LEVEL: info
+ LOG_FILE_LEVEL: debug
+ LOG_FILE: $(renovateLogFilePath)
+ RENOVATE_CONFIG_FILE: $(selfRepoPath)/${{parameters.renovateConfigPath}}
+
+ - script: |
+ echo "PRs created by Renovate:"
+ if [ -s "$(renovateLogFilePath)" ]; then
+ if ! jq -r 'select(.msg == "PR created" and .pr != null) | "https://github.com/\(.repository)/pull/\(.pr)"' "$(renovateLogFilePath)" | sort -u; then
+ echo "##vso[task.logissue type=warning]Failed to parse Renovate log file with jq."
+ echo "##vso[task.complete result=SucceededWithIssues]"
+ fi
+ else
+ echo "##vso[task.logissue type=warning]No Renovate log file found or file is empty."
+ echo "##vso[task.complete result=SucceededWithIssues]"
+ fi
+ displayName: List created PRs
+ condition: and(succeededOrFailed(), eq('${{ parameters.dryRun }}', false))
diff --git a/eng/common/core-templates/job/source-build.yml b/eng/common/core-templates/job/source-build.yml
index 9d820f97..1997c2ae 100644
--- a/eng/common/core-templates/job/source-build.yml
+++ b/eng/common/core-templates/job/source-build.yml
@@ -60,19 +60,19 @@ jobs:
pool:
${{ if eq(variables['System.TeamProject'], 'public') }}:
name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore-Svc-Public' ), False, 'NetCore-Public')]
- demands: ImageOverride -equals build.ubuntu.2204.amd64
+ demands: ImageOverride -equals build.azurelinux.3.amd64.open
${{ if eq(variables['System.TeamProject'], 'internal') }}:
name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore1ESPool-Svc-Internal'), False, 'NetCore1ESPool-Internal')]
- image: 1es-azurelinux-3
+ image: build.azurelinux.3.amd64
os: linux
${{ else }}:
pool:
${{ if eq(variables['System.TeamProject'], 'public') }}:
name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore-Svc-Public' ), False, 'NetCore-Public')]
- demands: ImageOverride -equals Build.Ubuntu.2204.Amd64.Open
+ demands: ImageOverride -equals build.azurelinux.3.amd64.open
${{ if eq(variables['System.TeamProject'], 'internal') }}:
name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore1ESPool-Svc-Internal'), False, 'NetCore1ESPool-Internal')]
- demands: ImageOverride -equals Build.Ubuntu.2204.Amd64
+ demands: ImageOverride -equals build.azurelinux.3.amd64
${{ if ne(parameters.platform.pool, '') }}:
pool: ${{ parameters.platform.pool }}
diff --git a/eng/common/core-templates/job/source-index-stage1.yml b/eng/common/core-templates/job/source-index-stage1.yml
index 76baf5c2..bac6ac5f 100644
--- a/eng/common/core-templates/job/source-index-stage1.yml
+++ b/eng/common/core-templates/job/source-index-stage1.yml
@@ -15,6 +15,8 @@ jobs:
variables:
- name: BinlogPath
value: ${{ parameters.binlogPath }}
+ - name: skipComponentGovernanceDetection
+ value: true
- template: /eng/common/core-templates/variables/pool-providers.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
@@ -25,10 +27,10 @@ jobs:
pool:
${{ if eq(variables['System.TeamProject'], 'public') }}:
name: $(DncEngPublicBuildPool)
- image: windows.vs2026preview.scout.amd64.open
+ image: windows.vs2026.amd64.open
${{ if eq(variables['System.TeamProject'], 'internal') }}:
name: $(DncEngInternalBuildPool)
- image: windows.vs2026preview.scout.amd64
+ image: windows.vs2026.amd64
steps:
- ${{ if eq(parameters.is1ESPipeline, '') }}:
diff --git a/eng/common/core-templates/jobs/codeql-build.yml b/eng/common/core-templates/jobs/codeql-build.yml
deleted file mode 100644
index dbc14ac5..00000000
--- a/eng/common/core-templates/jobs/codeql-build.yml
+++ /dev/null
@@ -1,32 +0,0 @@
-parameters:
- # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md
- continueOnError: false
- # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
- jobs: []
- # Optional: if specified, restore and use this version of Guardian instead of the default.
- overrideGuardianVersion: ''
- is1ESPipeline: ''
-
-jobs:
-- template: /eng/common/core-templates/jobs/jobs.yml
- parameters:
- is1ESPipeline: ${{ parameters.is1ESPipeline }}
- enableMicrobuild: false
- enablePublishBuildArtifacts: false
- enablePublishTestResults: false
- enablePublishBuildAssets: false
- enableTelemetry: true
-
- variables:
- - group: Publish-Build-Assets
- # The Guardian version specified in 'eng/common/sdl/packages.config'. This value must be kept in
- # sync with the packages.config file.
- - name: DefaultGuardianVersion
- value: 0.109.0
- - name: GuardianPackagesConfigFile
- value: $(System.DefaultWorkingDirectory)\eng\common\sdl\packages.config
- - name: GuardianVersion
- value: ${{ coalesce(parameters.overrideGuardianVersion, '$(DefaultGuardianVersion)') }}
-
- jobs: ${{ parameters.jobs }}
-
diff --git a/eng/common/core-templates/jobs/jobs.yml b/eng/common/core-templates/jobs/jobs.yml
index 01ada747..cc8cce45 100644
--- a/eng/common/core-templates/jobs/jobs.yml
+++ b/eng/common/core-templates/jobs/jobs.yml
@@ -43,6 +43,10 @@ parameters:
artifacts: {}
is1ESPipeline: ''
+
+ # Publishing version w/default.
+ publishingVersion: 3
+
repositoryAlias: self
officialBuildId: ''
@@ -102,6 +106,7 @@ jobs:
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
continueOnError: ${{ parameters.continueOnError }}
+ publishingVersion: ${{ parameters.publishingVersion }}
dependsOn:
- ${{ if ne(parameters.publishBuildAssetsDependsOn, '') }}:
- ${{ each job in parameters.publishBuildAssetsDependsOn }}:
diff --git a/eng/common/core-templates/post-build/common-variables.yml b/eng/common/core-templates/post-build/common-variables.yml
index d5627a99..db298ae1 100644
--- a/eng/common/core-templates/post-build/common-variables.yml
+++ b/eng/common/core-templates/post-build/common-variables.yml
@@ -11,8 +11,6 @@ variables:
- name: MaestroApiVersion
value: "2020-02-20"
- - name: SourceLinkCLIVersion
- value: 3.0.0
- name: SymbolToolVersion
value: 1.0.1
- name: BinlogToolVersion
diff --git a/eng/common/core-templates/post-build/post-build.yml b/eng/common/core-templates/post-build/post-build.yml
index 06864cd1..fcf40d1d 100644
--- a/eng/common/core-templates/post-build/post-build.yml
+++ b/eng/common/core-templates/post-build/post-build.yml
@@ -9,6 +9,7 @@ parameters:
default: 3
values:
- 3
+ - 4
- name: BARBuildId
displayName: BAR Build Id
@@ -50,16 +51,6 @@ parameters:
type: boolean
default: false
-- name: SDLValidationParameters
- type: object
- default:
- enable: false
- publishGdn: false
- continueOnError: false
- params: ''
- artifactNames: ''
- downloadArtifacts: true
-
- name: isAssetlessBuild
type: boolean
displayName: Is Assetless Build
@@ -103,7 +94,7 @@ parameters:
default: false
stages:
-- ${{ if or(eq( parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}:
+- ${{ if or(eq( parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true')) }}:
- stage: Validate
dependsOn: ${{ parameters.validateDependsOn }}
displayName: Validate Build Assets
@@ -120,18 +111,18 @@ stages:
# We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
name: AzurePipelines-EO
- image: 1ESPT-Windows2022
+ image: 1ESPT-Windows2025
demands: Cmd
os: windows
# If it's not devdiv, it's dnceng
${{ else }}:
${{ if eq(parameters.is1ESPipeline, true) }}:
name: $(DncEngInternalBuildPool)
- image: windows.vs2026preview.scout.amd64
+ image: windows.vs2026.amd64
os: windows
${{ else }}:
name: $(DncEngInternalBuildPool)
- demands: ImageOverride -equals windows.vs2026preview.scout.amd64
+ demands: ImageOverride -equals windows.vs2026.amd64
steps:
- template: /eng/common/core-templates/post-build/setup-maestro-vars.yml
@@ -140,16 +131,30 @@ stages:
PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
is1ESPipeline: ${{ parameters.is1ESPipeline }}
- - task: DownloadBuildArtifacts@0
- displayName: Download Package Artifacts
- inputs:
- buildType: specific
- buildVersionToDownload: specific
- project: $(AzDOProjectName)
- pipeline: $(AzDOPipelineId)
- buildId: $(AzDOBuildId)
- artifactName: PackageArtifacts
- checkDownloadedFiles: true
+ - ${{ if ne(parameters.publishingInfraVersion, 4) }}:
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Package Artifacts
+ inputs:
+ buildType: specific
+ buildVersionToDownload: specific
+ project: $(AzDOProjectName)
+ pipeline: $(AzDOPipelineId)
+ buildId: $(AzDOBuildId)
+ artifactName: PackageArtifacts
+ checkDownloadedFiles: true
+ - ${{ if eq(parameters.publishingInfraVersion, 4) }}:
+ - task: DownloadPipelineArtifact@2
+ displayName: Download Pipeline Artifacts (V4)
+ inputs:
+ itemPattern: '*/packages/**/*.nupkg'
+ targetPath: '$(Build.ArtifactStagingDirectory)/PipelineArtifactsDownload'
+ - task: CopyFiles@2
+ displayName: Flatten packages to PackageArtifacts
+ inputs:
+ SourceFolder: '$(Build.ArtifactStagingDirectory)/PipelineArtifactsDownload'
+ Contents: '**/*.nupkg'
+ TargetFolder: '$(Build.ArtifactStagingDirectory)/PackageArtifacts'
+ flattenFolders: true
- task: PowerShell@2
displayName: Validate
@@ -164,18 +169,18 @@ stages:
# We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
name: AzurePipelines-EO
- image: 1ESPT-Windows2022
+ image: 1ESPT-Windows2025
demands: Cmd
os: windows
# If it's not devdiv, it's dnceng
${{ else }}:
${{ if eq(parameters.is1ESPipeline, true) }}:
name: $(DncEngInternalBuildPool)
- image: 1es-windows-2022
+ image: windows.vs2026.amd64
os: windows
${{ else }}:
name: $(DncEngInternalBuildPool)
- demands: ImageOverride -equals windows.vs2026preview.scout.amd64
+ demands: ImageOverride -equals windows.vs2026.amd64
steps:
- template: /eng/common/core-templates/post-build/setup-maestro-vars.yml
parameters:
@@ -183,16 +188,30 @@ stages:
PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
is1ESPipeline: ${{ parameters.is1ESPipeline }}
- - task: DownloadBuildArtifacts@0
- displayName: Download Package Artifacts
- inputs:
- buildType: specific
- buildVersionToDownload: specific
- project: $(AzDOProjectName)
- pipeline: $(AzDOPipelineId)
- buildId: $(AzDOBuildId)
- artifactName: PackageArtifacts
- checkDownloadedFiles: true
+ - ${{ if ne(parameters.publishingInfraVersion, 4) }}:
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Package Artifacts
+ inputs:
+ buildType: specific
+ buildVersionToDownload: specific
+ project: $(AzDOProjectName)
+ pipeline: $(AzDOPipelineId)
+ buildId: $(AzDOBuildId)
+ artifactName: PackageArtifacts
+ checkDownloadedFiles: true
+ - ${{ if eq(parameters.publishingInfraVersion, 4) }}:
+ - task: DownloadPipelineArtifact@2
+ displayName: Download Pipeline Artifacts (V4)
+ inputs:
+ itemPattern: '*/packages/**/*.nupkg'
+ targetPath: '$(Build.ArtifactStagingDirectory)/PipelineArtifactsDownload'
+ - task: CopyFiles@2
+ displayName: Flatten packages to PackageArtifacts
+ inputs:
+ SourceFolder: '$(Build.ArtifactStagingDirectory)/PipelineArtifactsDownload'
+ Contents: '**/*.nupkg'
+ TargetFolder: '$(Build.ArtifactStagingDirectory)/PackageArtifacts'
+ flattenFolders: true
# This is necessary whenever we want to publish/restore to an AzDO private feed
# Since sdk-task.ps1 tries to restore packages we need to do this authentication here
@@ -206,7 +225,7 @@ stages:
displayName: Validate
inputs:
filePath: eng\common\sdk-task.ps1
- arguments: -task SigningValidation -restore -msbuildEngine vs
+ arguments: -task SigningValidation -restore
/p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts'
/p:SignCheckExclusionsFile='$(System.DefaultWorkingDirectory)/eng/SignCheckExclusionsFile.txt'
${{ parameters.signingValidationAdditionalParameters }}
@@ -218,57 +237,24 @@ stages:
JobLabel: 'Signing'
BinlogToolVersion: $(BinlogToolVersion)
- - job:
- displayName: SourceLink Validation
- condition: eq( ${{ parameters.enableSourceLinkValidation }}, 'true')
- pool:
- # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
- ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
- name: AzurePipelines-EO
- image: 1ESPT-Windows2022
- demands: Cmd
- os: windows
- # If it's not devdiv, it's dnceng
- ${{ else }}:
- ${{ if eq(parameters.is1ESPipeline, true) }}:
- name: $(DncEngInternalBuildPool)
- image: 1es-windows-2022
- os: windows
- ${{ else }}:
- name: $(DncEngInternalBuildPool)
- demands: ImageOverride -equals windows.vs2026preview.scout.amd64
- steps:
- - template: /eng/common/core-templates/post-build/setup-maestro-vars.yml
- parameters:
- BARBuildId: ${{ parameters.BARBuildId }}
- PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
- is1ESPipeline: ${{ parameters.is1ESPipeline }}
-
- - task: DownloadBuildArtifacts@0
- displayName: Download Blob Artifacts
- inputs:
- buildType: specific
- buildVersionToDownload: specific
- project: $(AzDOProjectName)
- pipeline: $(AzDOPipelineId)
- buildId: $(AzDOBuildId)
- artifactName: BlobArtifacts
- checkDownloadedFiles: true
-
- - task: PowerShell@2
- displayName: Validate
- inputs:
- filePath: $(System.DefaultWorkingDirectory)/eng/common/post-build/sourcelink-validation.ps1
- arguments: -InputPath $(Build.ArtifactStagingDirectory)/BlobArtifacts/
- -ExtractPath $(Agent.BuildDirectory)/Extract/
- -GHRepoName $(Build.Repository.Name)
- -GHCommit $(Build.SourceVersion)
- -SourcelinkCliVersion $(SourceLinkCLIVersion)
- continueOnError: true
+ # SourceLink validation has been removed — the underlying CLI tool
+ # (targeting netcoreapp2.1) has not functioned for years.
+ # The enableSourceLinkValidation parameter is kept but ignored so
+ # existing pipelines that pass it are not broken.
+ # See https://github.com/dotnet/arcade/issues/16647
+ - ${{ if eq(parameters.enableSourceLinkValidation, 'true') }}:
+ - job:
+ displayName: 'SourceLink Validation Removed - please remove enableSourceLinkValidation from your pipeline'
+ pool: server
+ steps:
+ - task: Delay@1
+ displayName: 'Warning: SourceLink validation removed (see https://github.com/dotnet/arcade/issues/16647)'
+ inputs:
+ delayForMinutes: '0'
- ${{ if ne(parameters.publishAssetsImmediately, 'true') }}:
- stage: publish_using_darc
- ${{ if or(eq(parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}:
+ ${{ if or(eq(parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true')) }}:
dependsOn: ${{ parameters.publishDependsOn }}
${{ else }}:
dependsOn: ${{ parameters.validateDependsOn }}
@@ -286,18 +272,18 @@ stages:
# We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
name: AzurePipelines-EO
- image: 1ESPT-Windows2022
+ image: 1ESPT-Windows2025
demands: Cmd
os: windows
# If it's not devdiv, it's dnceng
${{ else }}:
${{ if eq(parameters.is1ESPipeline, true) }}:
name: NetCore1ESPool-Publishing-Internal
- image: windows.vs2019.amd64
+ image: windows.vs2026.amd64
os: windows
${{ else }}:
name: NetCore1ESPool-Publishing-Internal
- demands: ImageOverride -equals windows.vs2019.amd64
+ demands: ImageOverride -equals windows.vs2026.amd64
steps:
- template: /eng/common/core-templates/post-build/setup-maestro-vars.yml
parameters:
@@ -327,7 +313,7 @@ stages:
scriptPath: $(System.DefaultWorkingDirectory)/eng/common/post-build/publish-using-darc.ps1
arguments: >
-BuildId $(BARBuildId)
- -PublishingInfraVersion ${{ parameters.publishingInfraVersion }}
+ -PublishingInfraVersion 3
-AzdoToken '$(System.AccessToken)'
-WaitPublishingFinish true
-RequireDefaultChannels ${{ parameters.requireDefaultChannels }}
diff --git a/eng/common/core-templates/post-build/setup-maestro-vars.yml b/eng/common/core-templates/post-build/setup-maestro-vars.yml
index a7abd58c..6dfa99ec 100644
--- a/eng/common/core-templates/post-build/setup-maestro-vars.yml
+++ b/eng/common/core-templates/post-build/setup-maestro-vars.yml
@@ -8,12 +8,11 @@ steps:
- 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error
- ${{ if eq(coalesce(parameters.PromoteToChannelIds, 0), 0) }}:
- - task: DownloadBuildArtifacts@0
+ - task: DownloadPipelineArtifact@2
displayName: Download Release Configs
inputs:
- buildType: current
artifactName: ReleaseConfigs
- checkDownloadedFiles: true
+ targetPath: '$(Build.StagingDirectory)/ReleaseConfigs'
- task: AzureCLI@2
name: setReleaseVars
diff --git a/eng/common/core-templates/stages/renovate.yml b/eng/common/core-templates/stages/renovate.yml
new file mode 100644
index 00000000..edab2818
--- /dev/null
+++ b/eng/common/core-templates/stages/renovate.yml
@@ -0,0 +1,111 @@
+# --------------------------------------------------------------------------------------
+# Renovate Pipeline Template
+# --------------------------------------------------------------------------------------
+# This template provides a complete reusable pipeline definition for running Renovate
+# in a 1ES Official pipeline. Pipelines can extend from this template and only need
+# to pass the Renovate job parameters.
+#
+# For more info, see https://github.com/dotnet/arcade/blob/main/Documentation/Renovate.md
+# --------------------------------------------------------------------------------------
+
+parameters:
+
+# Path to the Renovate configuration file within the repository.
+- name: renovateConfigPath
+ type: string
+ default: 'eng/renovate.json'
+
+# GitHub repository to run Renovate against, in the format 'owner/repo'.
+- name: gitHubRepo
+ type: string
+
+# List of base branches to target for Renovate PRs.
+- name: baseBranches
+ type: object
+ default:
+ - main
+
+# When true, Renovate will run in dry run mode.
+- name: dryRun
+ type: boolean
+ default: false
+
+# When true, Renovate will recreate PRs even if they were previously closed.
+- name: forceRecreatePR
+ type: boolean
+ default: false
+
+# Name of the arcade repository resource in the pipeline.
+# This allows repos which haven't been onboarded to Arcade to still use this
+# template by checking out the repo as a resource with a custom name and pointing
+# this parameter to it.
+- name: arcadeRepoResource
+ type: string
+ default: 'self'
+
+- name: selfRepoName
+ type: string
+ default: ''
+- name: arcadeRepoName
+ type: string
+ default: ''
+
+# Pool configuration for the pipeline.
+- name: pool
+ type: object
+ default:
+ name: NetCore1ESPool-Internal
+ image: build.azurelinux.3.amd64
+ os: linux
+
+# Renovate version used in the container image tag.
+- name: renovateVersion
+ default: 43
+ type: number
+
+# Pool configuration for SDL analysis.
+- name: sdlPool
+ type: object
+ default:
+ name: NetCore1ESPool-Internal
+ image: windows.vs2026.amd64
+ os: windows
+
+resources:
+ repositories:
+ - repository: 1ESPipelineTemplates
+ type: git
+ name: 1ESPipelineTemplates/1ESPipelineTemplates
+ ref: refs/tags/release
+
+extends:
+ template: v1/1ES.Official.PipelineTemplate.yml@1ESPipelineTemplates
+ parameters:
+ pool: ${{ parameters.pool }}
+ sdl:
+ sourceAnalysisPool: ${{ parameters.sdlPool }}
+ # When repos that aren't onboarded to Arcade use this template, they set the
+ # arcadeRepoResource parameter to point to their Arcade repo resource. In that case,
+ # Aracde will be excluded from SDL analysis.
+ ${{ if ne(parameters.arcadeRepoResource, 'self') }}:
+ sourceRepositoriesToScan:
+ exclude:
+ - repository: ${{ parameters.arcadeRepoResource }}
+ containers:
+ RenovateContainer:
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-renovate-${{ parameters.renovateVersion }}-amd64
+ stages:
+ - stage: Renovate
+ displayName: Run Renovate
+ jobs:
+ - template: /eng/common/core-templates/job/renovate.yml@${{ parameters.arcadeRepoResource }}
+ parameters:
+ renovateConfigPath: ${{ parameters.renovateConfigPath }}
+ gitHubRepo: ${{ parameters.gitHubRepo }}
+ baseBranches: ${{ parameters.baseBranches }}
+ dryRun: ${{ parameters.dryRun }}
+ forceRecreatePR: ${{ parameters.forceRecreatePR }}
+ pool: ${{ parameters.pool }}
+ arcadeRepoResource: ${{ parameters.arcadeRepoResource }}
+ selfRepoName: ${{ parameters.selfRepoName }}
+ arcadeRepoName: ${{ parameters.arcadeRepoName }}
diff --git a/eng/common/core-templates/steps/component-governance.yml b/eng/common/core-templates/steps/component-governance.yml
deleted file mode 100644
index cf0649aa..00000000
--- a/eng/common/core-templates/steps/component-governance.yml
+++ /dev/null
@@ -1,16 +0,0 @@
-parameters:
- disableComponentGovernance: false
- componentGovernanceIgnoreDirectories: ''
- is1ESPipeline: false
- displayName: 'Component Detection'
-
-steps:
-- ${{ if eq(parameters.disableComponentGovernance, 'true') }}:
- - script: echo "##vso[task.setvariable variable=skipComponentGovernanceDetection]true"
- displayName: Set skipComponentGovernanceDetection variable
-- ${{ if ne(parameters.disableComponentGovernance, 'true') }}:
- - task: ComponentGovernanceComponentDetection@0
- continueOnError: true
- displayName: ${{ parameters.displayName }}
- inputs:
- ignoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }}
diff --git a/eng/common/core-templates/steps/generate-sbom.yml b/eng/common/core-templates/steps/generate-sbom.yml
index 003f7eae..aad0a8ae 100644
--- a/eng/common/core-templates/steps/generate-sbom.yml
+++ b/eng/common/core-templates/steps/generate-sbom.yml
@@ -1,54 +1,14 @@
-# BuildDropPath - The root folder of the drop directory for which the manifest file will be generated.
-# PackageName - The name of the package this SBOM represents.
-# PackageVersion - The version of the package this SBOM represents.
-# ManifestDirPath - The path of the directory where the generated manifest files will be placed
-# IgnoreDirectories - Directories to ignore for SBOM generation. This will be passed through to the CG component detector.
-
parameters:
- PackageVersion: 11.0.0
- BuildDropPath: '$(System.DefaultWorkingDirectory)/artifacts'
- PackageName: '.NET'
- ManifestDirPath: $(Build.ArtifactStagingDirectory)/sbom
- IgnoreDirectories: ''
- sbomContinueOnError: true
- is1ESPipeline: false
- # disable publishArtifacts if some other step is publishing the artifacts (like job.yml).
- publishArtifacts: true
+ PackageVersion: unused
+ BuildDropPath: unused
+ PackageName: unused
+ ManifestDirPath: unused
+ IgnoreDirectories: unused
+ sbomContinueOnError: unused
+ is1ESPipeline: unused
+ publishArtifacts: unused
steps:
-- task: PowerShell@2
- displayName: Prep for SBOM generation in (Non-linux)
- condition: or(eq(variables['Agent.Os'], 'Windows_NT'), eq(variables['Agent.Os'], 'Darwin'))
- inputs:
- filePath: ./eng/common/generate-sbom-prep.ps1
- arguments: ${{parameters.manifestDirPath}}
-
-# Chmodding is a workaround for https://github.com/dotnet/arcade/issues/8461
- script: |
- chmod +x ./eng/common/generate-sbom-prep.sh
- ./eng/common/generate-sbom-prep.sh ${{parameters.manifestDirPath}}
- displayName: Prep for SBOM generation in (Linux)
- condition: eq(variables['Agent.Os'], 'Linux')
- continueOnError: ${{ parameters.sbomContinueOnError }}
-
-- task: AzureArtifacts.manifest-generator-task.manifest-generator-task.ManifestGeneratorTask@0
- displayName: 'Generate SBOM manifest'
- continueOnError: ${{ parameters.sbomContinueOnError }}
- inputs:
- PackageName: ${{ parameters.packageName }}
- BuildDropPath: ${{ parameters.buildDropPath }}
- PackageVersion: ${{ parameters.packageVersion }}
- ManifestDirPath: ${{ parameters.manifestDirPath }}/$(ARTIFACT_NAME)
- ${{ if ne(parameters.IgnoreDirectories, '') }}:
- AdditionalComponentDetectorArgs: '--IgnoreDirectories ${{ parameters.IgnoreDirectories }}'
-
-- ${{ if eq(parameters.publishArtifacts, 'true')}}:
- - template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
- parameters:
- is1ESPipeline: ${{ parameters.is1ESPipeline }}
- args:
- displayName: Publish SBOM manifest
- continueOnError: ${{parameters.sbomContinueOnError}}
- targetPath: '${{ parameters.manifestDirPath }}'
- artifactName: $(ARTIFACT_NAME)
-
+ echo "##vso[task.logissue type=warning]Including generate-sbom.yml is deprecated, SBOM generation is handled 1ES PT now. Remove this include."
+ displayName: Issue generate-sbom.yml deprecation warning
diff --git a/eng/common/core-templates/steps/install-microbuild-impl.yml b/eng/common/core-templates/steps/install-microbuild-impl.yml
index b9e0143e..da22beb3 100644
--- a/eng/common/core-templates/steps/install-microbuild-impl.yml
+++ b/eng/common/core-templates/steps/install-microbuild-impl.yml
@@ -18,7 +18,7 @@ parameters:
type: boolean
steps:
-- ${{ if eq(parameters.enablePreviewMicrobuild, 'true') }}:
+- ${{ if eq(parameters.enablePreviewMicrobuild, true) }}:
- task: MicroBuildSigningPluginPreview@4
displayName: Install Preview MicroBuild plugin
inputs: ${{ parameters.microbuildTaskInputs }}
diff --git a/eng/common/core-templates/steps/install-microbuild.yml b/eng/common/core-templates/steps/install-microbuild.yml
index 4f4b56ed..76a54e15 100644
--- a/eng/common/core-templates/steps/install-microbuild.yml
+++ b/eng/common/core-templates/steps/install-microbuild.yml
@@ -73,7 +73,7 @@ steps:
# YAML expansion, and Windows vs. Linux/Mac uses different service connections. However,
# we can avoid including the MB install step if not enabled at all. This avoids a bunch of
# extra pipeline authorizations, since most pipelines do not sign on non-Windows.
- - template: /eng/common/core-templates/steps/install-microbuild-impl.yml@self
+ - template: /eng/common/core-templates/steps/install-microbuild-impl.yml
parameters:
enablePreviewMicrobuild: ${{ parameters.enablePreviewMicrobuild }}
microbuildTaskInputs:
@@ -95,7 +95,7 @@ steps:
condition: and(succeeded(), eq(variables['Agent.Os'], 'Windows_NT'), in(variables['_SignType'], 'real', 'test'))
- ${{ if eq(parameters.enableMicrobuildForMacAndLinux, true) }}:
- - template: /eng/common/core-templates/steps/install-microbuild-impl.yml@self
+ - template: /eng/common/core-templates/steps/install-microbuild-impl.yml
parameters:
enablePreviewMicrobuild: ${{ parameters.enablePreviewMicrobuild }}
microbuildTaskInputs:
diff --git a/eng/common/core-templates/steps/publish-logs.yml b/eng/common/core-templates/steps/publish-logs.yml
index 5a927b4c..84a1922c 100644
--- a/eng/common/core-templates/steps/publish-logs.yml
+++ b/eng/common/core-templates/steps/publish-logs.yml
@@ -31,7 +31,6 @@ steps:
-runtimeSourceFeed https://ci.dot.net/internal
-runtimeSourceFeedKey '$(dotnetbuilds-internal-container-read-token-base64)'
'$(publishing-dnceng-devdiv-code-r-build-re)'
- '$(MaestroAccessToken)'
'$(dn-bot-all-orgs-artifact-feeds-rw)'
'$(akams-client-id)'
'$(microsoft-symbol-server-pat)'
@@ -51,13 +50,15 @@ steps:
TargetFolder: '$(Build.ArtifactStagingDirectory)/PostBuildLogs'
condition: always()
-- template: /eng/common/core-templates/steps/publish-build-artifacts.yml
+- template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
args:
displayName: Publish Logs
- pathToPublish: '$(Build.ArtifactStagingDirectory)/PostBuildLogs'
- publishLocation: Container
- artifactName: PostBuildLogs
+ targetPath: '$(Build.ArtifactStagingDirectory)/PostBuildLogs'
+ artifactName: PostBuildLogs_${{ parameters.StageLabel }}_${{ parameters.JobLabel }}_Attempt$(System.JobAttempt)
continueOnError: true
condition: always()
+ retryCountOnTaskFailure: 10 # for any files being locked
+ isProduction: false # logs are non-production artifacts
+
diff --git a/eng/common/core-templates/steps/source-build.yml b/eng/common/core-templates/steps/source-build.yml
index acf16ed3..b75f59c4 100644
--- a/eng/common/core-templates/steps/source-build.yml
+++ b/eng/common/core-templates/steps/source-build.yml
@@ -62,4 +62,4 @@ steps:
artifactName: BuildLogs_SourceBuild_${{ parameters.platform.name }}_Attempt$(System.JobAttempt)
continueOnError: true
condition: succeededOrFailed()
- sbomEnabled: false # we don't need SBOM for logs
+ isProduction: false # logs are non-production artifacts
diff --git a/eng/common/core-templates/steps/source-index-stage1-publish.yml b/eng/common/core-templates/steps/source-index-stage1-publish.yml
index ac019e2d..3ad83b8c 100644
--- a/eng/common/core-templates/steps/source-index-stage1-publish.yml
+++ b/eng/common/core-templates/steps/source-index-stage1-publish.yml
@@ -14,8 +14,8 @@ steps:
workingDirectory: $(Agent.TempDirectory)
- script: |
- $(Agent.TempDirectory)/dotnet/dotnet tool install BinLogToSln --version ${{parameters.sourceIndexProcessBinlogPackageVersion}} --source ${{parameters.SourceIndexPackageSource}} --tool-path $(Agent.TempDirectory)/.source-index/tools
- $(Agent.TempDirectory)/dotnet/dotnet tool install UploadIndexStage1 --version ${{parameters.sourceIndexUploadPackageVersion}} --source ${{parameters.SourceIndexPackageSource}} --tool-path $(Agent.TempDirectory)/.source-index/tools
+ $(Agent.TempDirectory)/dotnet/dotnet tool install BinLogToSln --version ${{parameters.sourceIndexProcessBinlogPackageVersion}} --source ${{parameters.sourceIndexPackageSource}} --tool-path $(Agent.TempDirectory)/.source-index/tools
+ $(Agent.TempDirectory)/dotnet/dotnet tool install UploadIndexStage1 --version ${{parameters.sourceIndexUploadPackageVersion}} --source ${{parameters.sourceIndexPackageSource}} --tool-path $(Agent.TempDirectory)/.source-index/tools
displayName: "Source Index: Download netsourceindex Tools"
# Set working directory to temp directory so 'dotnet' doesn't try to use global.json and use the repo's sdk.
workingDirectory: $(Agent.TempDirectory)
diff --git a/eng/common/cross/build-rootfs.sh b/eng/common/cross/build-rootfs.sh
index 8abfb71f..314c93c5 100755
--- a/eng/common/cross/build-rootfs.sh
+++ b/eng/common/cross/build-rootfs.sh
@@ -9,6 +9,7 @@ usage()
echo "CodeName - optional, Code name for Linux, can be: xenial(default), zesty, bionic, alpine"
echo " for alpine can be specified with version: alpineX.YY or alpineedge"
echo " for FreeBSD can be: freebsd13, freebsd14"
+ echo " for OpenBSD can be: openbsd"
echo " for illumos can be: illumos"
echo " for Haiku can be: haiku."
echo "lldbx.y - optional, LLDB version, can be: lldb3.9(default), lldb4.0, lldb5.0, lldb6.0 no-lldb. Ignored for alpine and FreeBSD"
@@ -27,6 +28,8 @@ __BuildArch=arm
__AlpineArch=armv7
__FreeBSDArch=arm
__FreeBSDMachineArch=armv7
+__OpenBSDArch=arm
+__OpenBSDMachineArch=armv7
__IllumosArch=arm7
__HaikuArch=arm
__QEMUArch=arm
@@ -72,7 +75,7 @@ __AlpinePackages+=" krb5-dev"
__AlpinePackages+=" openssl-dev"
__AlpinePackages+=" zlib-dev"
-__FreeBSDBase="13.4-RELEASE"
+__FreeBSDBase="13.5-RELEASE"
__FreeBSDPkg="1.21.3"
__FreeBSDABI="13"
__FreeBSDPackages="libunwind"
@@ -82,6 +85,12 @@ __FreeBSDPackages+=" openssl"
__FreeBSDPackages+=" krb5"
__FreeBSDPackages+=" terminfo-db"
+__OpenBSDVersion="7.8"
+__OpenBSDPackages="heimdal-libs"
+__OpenBSDPackages+=" icu4c"
+__OpenBSDPackages+=" inotify-tools"
+__OpenBSDPackages+=" openssl"
+
__IllumosPackages="icu"
__IllumosPackages+=" mit-krb5"
__IllumosPackages+=" openssl"
@@ -160,6 +169,8 @@ while :; do
__QEMUArch=aarch64
__FreeBSDArch=arm64
__FreeBSDMachineArch=aarch64
+ __OpenBSDArch=arm64
+ __OpenBSDMachineArch=aarch64
;;
armel)
__BuildArch=armel
@@ -235,6 +246,8 @@ while :; do
__UbuntuArch=amd64
__FreeBSDArch=amd64
__FreeBSDMachineArch=amd64
+ __OpenBSDArch=amd64
+ __OpenBSDMachineArch=amd64
__illumosArch=x86_64
__HaikuArch=x86_64
__UbuntuRepo="http://archive.ubuntu.com/ubuntu/"
@@ -295,9 +308,7 @@ while :; do
;;
noble) # Ubuntu 24.04
__CodeName=noble
- if [[ -z "$__LLDB_Package" ]]; then
- __LLDB_Package="liblldb-19-dev"
- fi
+ __LLDB_Package="liblldb-19-dev"
;;
stretch) # Debian 9
__CodeName=stretch
@@ -383,10 +394,14 @@ while :; do
;;
freebsd14)
__CodeName=freebsd
- __FreeBSDBase="14.2-RELEASE"
+ __FreeBSDBase="14.3-RELEASE"
__FreeBSDABI="14"
__SkipUnmount=1
;;
+ openbsd)
+ __CodeName=openbsd
+ __SkipUnmount=1
+ ;;
illumos)
__CodeName=illumos
__SkipUnmount=1
@@ -595,6 +610,62 @@ elif [[ "$__CodeName" == "freebsd" ]]; then
INSTALL_AS_USER=$(whoami) "$__RootfsDir"/host/sbin/pkg -r "$__RootfsDir" -C "$__RootfsDir"/usr/local/etc/pkg.conf update
# shellcheck disable=SC2086
INSTALL_AS_USER=$(whoami) "$__RootfsDir"/host/sbin/pkg -r "$__RootfsDir" -C "$__RootfsDir"/usr/local/etc/pkg.conf install --yes $__FreeBSDPackages
+elif [[ "$__CodeName" == "openbsd" ]]; then
+ # determine mirrors
+ OPENBSD_MIRROR="https://cdn.openbsd.org/pub/OpenBSD/$__OpenBSDVersion/$__OpenBSDMachineArch"
+
+ # download base system sets
+ ensureDownloadTool
+
+ BASE_SETS=(base comp)
+ for set in "${BASE_SETS[@]}"; do
+ FILE="${set}${__OpenBSDVersion//./}.tgz"
+ echo "Downloading $FILE..."
+ if [[ "$__hasWget" == 1 ]]; then
+ wget -O- "$OPENBSD_MIRROR/$FILE" | tar -C "$__RootfsDir" -xzpf -
+ else
+ curl -SL "$OPENBSD_MIRROR/$FILE" | tar -C "$__RootfsDir" -xzpf -
+ fi
+ done
+
+ PKG_MIRROR="https://cdn.openbsd.org/pub/OpenBSD/${__OpenBSDVersion}/packages/${__OpenBSDMachineArch}"
+
+ echo "Installing packages into sysroot..."
+
+ # Fetch package index once
+ if [[ "$__hasWget" == 1 ]]; then
+ PKG_INDEX=$(wget -qO- "$PKG_MIRROR/")
+ else
+ PKG_INDEX=$(curl -s "$PKG_MIRROR/")
+ fi
+
+ for pkg in $__OpenBSDPackages; do
+ PKG_FILE=$(echo "$PKG_INDEX" | grep -Po ">\K${pkg}-[0-9][^\" ]*\.tgz" \
+ | sort -V | tail -n1)
+
+ echo "Resolved package filename for $pkg: $PKG_FILE"
+
+ [[ -z "$PKG_FILE" ]] && { echo "ERROR: Package $pkg not found"; exit 1; }
+
+ if [[ "$__hasWget" == 1 ]]; then
+ wget -O- "$PKG_MIRROR/$PKG_FILE" | tar -C "$__RootfsDir" -xzpf -
+ else
+ curl -SL "$PKG_MIRROR/$PKG_FILE" | tar -C "$__RootfsDir" -xzpf -
+ fi
+ done
+
+ echo "Creating versionless symlinks for shared libraries..."
+ # Find all versioned .so files and create the base .so symlink
+ for lib in "$__RootfsDir/usr/lib/libc++.so."* "$__RootfsDir/usr/lib/libc++abi.so."* "$__RootfsDir/usr/lib/libpthread.so."*; do
+ if [ -f "$lib" ]; then
+ # Extract the filename (e.g., libc++.so.12.0)
+ VERSIONED_NAME=$(basename "$lib")
+ # Remove the trailing version numbers (e.g., libc++.so)
+ BASE_NAME=${VERSIONED_NAME%.so.*}.so
+ # Create the symlink in the same directory
+ ln -sf "$VERSIONED_NAME" "$__RootfsDir/usr/lib/$BASE_NAME"
+ fi
+ done
elif [[ "$__CodeName" == "illumos" ]]; then
mkdir "$__RootfsDir/tmp"
pushd "$__RootfsDir/tmp"
diff --git a/eng/common/cross/toolchain.cmake b/eng/common/cross/toolchain.cmake
index 0ff85cf0..ff2dfdb4 100644
--- a/eng/common/cross/toolchain.cmake
+++ b/eng/common/cross/toolchain.cmake
@@ -3,15 +3,22 @@ set(CROSS_ROOTFS $ENV{ROOTFS_DIR})
# reset platform variables (e.g. cmake 3.25 sets LINUX=1)
unset(LINUX)
unset(FREEBSD)
+unset(OPENBSD)
unset(ILLUMOS)
unset(ANDROID)
unset(TIZEN)
unset(HAIKU)
set(TARGET_ARCH_NAME $ENV{TARGET_BUILD_ARCH})
+
+file(GLOB OPENBSD_PROBE "${CROSS_ROOTFS}/etc/signify/openbsd-*.pub")
+
if(EXISTS ${CROSS_ROOTFS}/bin/freebsd-version)
set(CMAKE_SYSTEM_NAME FreeBSD)
set(FREEBSD 1)
+elseif(OPENBSD_PROBE)
+ set(CMAKE_SYSTEM_NAME OpenBSD)
+ set(OPENBSD 1)
elseif(EXISTS ${CROSS_ROOTFS}/usr/platform/i86pc)
set(CMAKE_SYSTEM_NAME SunOS)
set(ILLUMOS 1)
@@ -53,6 +60,8 @@ elseif(TARGET_ARCH_NAME STREQUAL "arm64")
endif()
elseif(FREEBSD)
set(triple "aarch64-unknown-freebsd12")
+ elseif(OPENBSD)
+ set(triple "aarch64-unknown-openbsd")
endif()
elseif(TARGET_ARCH_NAME STREQUAL "armel")
set(CMAKE_SYSTEM_PROCESSOR armv7l)
@@ -109,6 +118,8 @@ elseif(TARGET_ARCH_NAME STREQUAL "x64")
endif()
elseif(FREEBSD)
set(triple "x86_64-unknown-freebsd12")
+ elseif(OPENBSD)
+ set(triple "x86_64-unknown-openbsd")
elseif(ILLUMOS)
set(TOOLCHAIN "x86_64-illumos")
elseif(HAIKU)
@@ -193,7 +204,7 @@ if(ANDROID)
# include official NDK toolchain script
include(${CROSS_ROOTFS}/../build/cmake/android.toolchain.cmake)
-elseif(FREEBSD)
+elseif(FREEBSD OR OPENBSD)
# we cross-compile by instructing clang
set(CMAKE_C_COMPILER_TARGET ${triple})
set(CMAKE_CXX_COMPILER_TARGET ${triple})
@@ -291,7 +302,7 @@ endif()
# Specify compile options
-if((TARGET_ARCH_NAME MATCHES "^(arm|arm64|armel|armv6|loongarch64|ppc64le|riscv64|s390x|x64|x86)$" AND NOT ANDROID AND NOT FREEBSD) OR ILLUMOS OR HAIKU)
+if((TARGET_ARCH_NAME MATCHES "^(arm|arm64|armel|armv6|loongarch64|ppc64le|riscv64|s390x|x64|x86)$" AND NOT ANDROID AND NOT FREEBSD AND NOT OPENBSD) OR ILLUMOS OR HAIKU)
set(CMAKE_C_COMPILER_TARGET ${TOOLCHAIN})
set(CMAKE_CXX_COMPILER_TARGET ${TOOLCHAIN})
set(CMAKE_ASM_COMPILER_TARGET ${TOOLCHAIN})
diff --git a/eng/common/darc-init.ps1 b/eng/common/darc-init.ps1
index e3374310..a5be41db 100644
--- a/eng/common/darc-init.ps1
+++ b/eng/common/darc-init.ps1
@@ -29,11 +29,11 @@ function InstallDarcCli ($darcVersion, $toolpath) {
Write-Host "Installing Darc CLI version $darcVersion..."
Write-Host 'You may need to restart your command window if this is the first dotnet tool you have installed.'
if (-not $toolpath) {
- Write-Host "'$dotnet' tool install $darcCliPackageName --version $darcVersion --add-source '$arcadeServicesSource' -v $verbosity -g"
- & "$dotnet" tool install $darcCliPackageName --version $darcVersion --add-source "$arcadeServicesSource" -v $verbosity -g
+ Write-Host "'$dotnet' tool install $darcCliPackageName --version $darcVersion --source '$arcadeServicesSource' -v $verbosity -g"
+ & "$dotnet" tool install $darcCliPackageName --version $darcVersion --source "$arcadeServicesSource" -v $verbosity -g
}else {
- Write-Host "'$dotnet' tool install $darcCliPackageName --version $darcVersion --add-source '$arcadeServicesSource' -v $verbosity --tool-path '$toolpath'"
- & "$dotnet" tool install $darcCliPackageName --version $darcVersion --add-source "$arcadeServicesSource" -v $verbosity --tool-path "$toolpath"
+ Write-Host "'$dotnet' tool install $darcCliPackageName --version $darcVersion --source '$arcadeServicesSource' -v $verbosity --tool-path '$toolpath'"
+ & "$dotnet" tool install $darcCliPackageName --version $darcVersion --source "$arcadeServicesSource" -v $verbosity --tool-path "$toolpath"
}
}
diff --git a/eng/common/darc-init.sh b/eng/common/darc-init.sh
index 9f5ad6b7..b56d40e5 100755
--- a/eng/common/darc-init.sh
+++ b/eng/common/darc-init.sh
@@ -73,9 +73,9 @@ function InstallDarcCli {
echo "Installing Darc CLI version $darcVersion..."
echo "You may need to restart your command shell if this is the first dotnet tool you have installed."
if [ -z "$toolpath" ]; then
- echo $($dotnet_root/dotnet tool install $darc_cli_package_name --version $darcVersion --add-source "$arcadeServicesSource" -v $verbosity -g)
+ echo $($dotnet_root/dotnet tool install $darc_cli_package_name --version $darcVersion --source "$arcadeServicesSource" -v $verbosity -g)
else
- echo $($dotnet_root/dotnet tool install $darc_cli_package_name --version $darcVersion --add-source "$arcadeServicesSource" -v $verbosity --tool-path "$toolpath")
+ echo $($dotnet_root/dotnet tool install $darc_cli_package_name --version $darcVersion --source "$arcadeServicesSource" -v $verbosity --tool-path "$toolpath")
fi
}
diff --git a/eng/common/generate-sbom-prep.ps1 b/eng/common/generate-sbom-prep.ps1
deleted file mode 100644
index a0c7d792..00000000
--- a/eng/common/generate-sbom-prep.ps1
+++ /dev/null
@@ -1,29 +0,0 @@
-Param(
- [Parameter(Mandatory=$true)][string] $ManifestDirPath # Manifest directory where sbom will be placed
-)
-
-. $PSScriptRoot\pipeline-logging-functions.ps1
-
-# Normally - we'd listen to the manifest path given, but 1ES templates will overwrite if this level gets uploaded directly
-# with their own overwriting ours. So we create it as a sub directory of the requested manifest path.
-$ArtifactName = "${env:SYSTEM_STAGENAME}_${env:AGENT_JOBNAME}_SBOM"
-$SafeArtifactName = $ArtifactName -replace '["/:<>\\|?@*"() ]', '_'
-$SbomGenerationDir = Join-Path $ManifestDirPath $SafeArtifactName
-
-Write-Host "Artifact name before : $ArtifactName"
-Write-Host "Artifact name after : $SafeArtifactName"
-
-Write-Host "Creating dir $ManifestDirPath"
-
-# create directory for sbom manifest to be placed
-if (!(Test-Path -path $SbomGenerationDir))
-{
- New-Item -ItemType Directory -path $SbomGenerationDir
- Write-Host "Successfully created directory $SbomGenerationDir"
-}
-else{
- Write-PipelineTelemetryError -category 'Build' "Unable to create sbom folder."
-}
-
-Write-Host "Updating artifact name"
-Write-Host "##vso[task.setvariable variable=ARTIFACT_NAME]$SafeArtifactName"
diff --git a/eng/common/generate-sbom-prep.sh b/eng/common/generate-sbom-prep.sh
deleted file mode 100644
index b8ecca72..00000000
--- a/eng/common/generate-sbom-prep.sh
+++ /dev/null
@@ -1,39 +0,0 @@
-#!/usr/bin/env bash
-
-source="${BASH_SOURCE[0]}"
-
-# resolve $SOURCE until the file is no longer a symlink
-while [[ -h $source ]]; do
- scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
- source="$(readlink "$source")"
-
- # if $source was a relative symlink, we need to resolve it relative to the path where the
- # symlink file was located
- [[ $source != /* ]] && source="$scriptroot/$source"
-done
-scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
-. $scriptroot/pipeline-logging-functions.sh
-
-
-# replace all special characters with _, some builds use special characters like : in Agent.Jobname, that is not a permissible name while uploading artifacts.
-artifact_name=$SYSTEM_STAGENAME"_"$AGENT_JOBNAME"_SBOM"
-safe_artifact_name="${artifact_name//["/:<>\\|?@*$" ]/_}"
-manifest_dir=$1
-
-# Normally - we'd listen to the manifest path given, but 1ES templates will overwrite if this level gets uploaded directly
-# with their own overwriting ours. So we create it as a sub directory of the requested manifest path.
-sbom_generation_dir="$manifest_dir/$safe_artifact_name"
-
-if [ ! -d "$sbom_generation_dir" ] ; then
- mkdir -p "$sbom_generation_dir"
- echo "Sbom directory created." $sbom_generation_dir
-else
- Write-PipelineTelemetryError -category 'Build' "Unable to create sbom folder."
-fi
-
-echo "Artifact name before : "$artifact_name
-echo "Artifact name after : "$safe_artifact_name
-export ARTIFACT_NAME=$safe_artifact_name
-echo "##vso[task.setvariable variable=ARTIFACT_NAME]$safe_artifact_name"
-
-exit 0
diff --git a/eng/common/internal-feed-operations.ps1 b/eng/common/internal-feed-operations.ps1
index 92b77347..c282d3ae 100644
--- a/eng/common/internal-feed-operations.ps1
+++ b/eng/common/internal-feed-operations.ps1
@@ -26,7 +26,7 @@ function SetupCredProvider {
$url = 'https://raw.githubusercontent.com/microsoft/artifacts-credprovider/master/helpers/installcredprovider.ps1'
Write-Host "Writing the contents of 'installcredprovider.ps1' locally..."
- Invoke-WebRequest $url -OutFile installcredprovider.ps1
+ Invoke-WebRequest $url -UseBasicParsing -OutFile installcredprovider.ps1
Write-Host 'Installing plugin...'
.\installcredprovider.ps1 -Force
diff --git a/eng/common/native/init-distro-rid.sh b/eng/common/native/init-distro-rid.sh
index 83ea7aab..8fc6d2fe 100644
--- a/eng/common/native/init-distro-rid.sh
+++ b/eng/common/native/init-distro-rid.sh
@@ -39,6 +39,8 @@ getNonPortableDistroRid()
# $rootfsDir can be empty. freebsd-version is a shell script and should always work.
__freebsd_major_version=$("$rootfsDir"/bin/freebsd-version | cut -d'.' -f1)
nonPortableRid="freebsd.$__freebsd_major_version-${targetArch}"
+ elif [ "$targetOs" = "openbsd" ]; then
+ nonPortableRid="openbsd.$(uname -r)-${targetArch}"
elif command -v getprop >/dev/null && getprop ro.product.system.model | grep -qi android; then
__android_sdk_version=$(getprop ro.build.version.sdk)
nonPortableRid="android.$__android_sdk_version-${targetArch}"
diff --git a/eng/common/native/install-dependencies.sh b/eng/common/native/install-dependencies.sh
index 64b87d0b..4742177a 100644
--- a/eng/common/native/install-dependencies.sh
+++ b/eng/common/native/install-dependencies.sh
@@ -24,16 +24,16 @@ case "$os" in
apt update
apt install -y build-essential gettext locales cmake llvm clang lld lldb liblldb-dev libunwind8-dev libicu-dev liblttng-ust-dev \
- libssl-dev libkrb5-dev pigz cpio
+ libssl-dev libkrb5-dev pigz cpio ninja-build
localedef -i en_US -c -f UTF-8 -A /usr/share/locale/locale.alias en_US.UTF-8
- elif [ "$ID" = "fedora" ] || [ "$ID" = "rhel" ] || [ "$ID" = "azurelinux" ] || [ "$ID" = "centos"]; then
+ elif [ "$ID" = "fedora" ] || [ "$ID" = "rhel" ] || [ "$ID" = "azurelinux" ] || [ "$ID" = "centos" ]; then
pkg_mgr="$(command -v tdnf 2>/dev/null || command -v dnf)"
- $pkg_mgr install -y cmake llvm lld lldb clang python curl libicu-devel openssl-devel krb5-devel lttng-ust-devel pigz cpio
+ $pkg_mgr install -y cmake llvm lld lldb clang python curl libicu-devel openssl-devel krb5-devel lttng-ust-devel pigz cpio ninja-build
elif [ "$ID" = "amzn" ]; then
- dnf install -y cmake llvm lld lldb clang python libicu-devel openssl-devel krb5-devel lttng-ust-devel pigz cpio
+ dnf install -y cmake llvm lld lldb clang python libicu-devel openssl-devel krb5-devel lttng-ust-devel pigz cpio ninja-build
elif [ "$ID" = "alpine" ]; then
- apk add build-base cmake bash curl clang llvm-dev lld lldb krb5-dev lttng-ust-dev icu-dev openssl-dev pigz cpio
+ apk add build-base cmake bash curl clang llvm llvm-dev lld lldb-dev krb5-dev lttng-ust-dev icu-dev openssl-dev pigz cpio ninja
else
echo "Unsupported distro. distro: $ID"
exit 1
@@ -54,6 +54,7 @@ brew "openssl@3"
brew "pkgconf"
brew "python3"
brew "pigz"
+brew "ninja"
EOF
;;
diff --git a/eng/common/post-build/nuget-verification.ps1 b/eng/common/post-build/nuget-verification.ps1
index ac5c69ff..eea88e65 100644
--- a/eng/common/post-build/nuget-verification.ps1
+++ b/eng/common/post-build/nuget-verification.ps1
@@ -65,7 +65,7 @@ if ($NuGetExePath) {
Write-Host "Downloading nuget.exe from $nugetExeUrl..."
$ProgressPreference = 'SilentlyContinue'
try {
- Invoke-WebRequest $nugetExeUrl -OutFile $downloadedNuGetExe
+ Invoke-WebRequest $nugetExeUrl -UseBasicParsing -OutFile $downloadedNuGetExe
$ProgressPreference = 'Continue'
} catch {
$ProgressPreference = 'Continue'
diff --git a/eng/common/post-build/redact-logs.ps1 b/eng/common/post-build/redact-logs.ps1
index fc0218a0..672f4e26 100644
--- a/eng/common/post-build/redact-logs.ps1
+++ b/eng/common/post-build/redact-logs.ps1
@@ -49,8 +49,8 @@ try {
Write-Host "Installing Binlog redactor CLI..."
Write-Host "'$dotnet' new tool-manifest"
& "$dotnet" new tool-manifest
- Write-Host "'$dotnet' tool install $packageName --local --add-source '$PackageFeed' -v $verbosity --version $BinlogToolVersion"
- & "$dotnet" tool install $packageName --local --add-source "$PackageFeed" -v $verbosity --version $BinlogToolVersion
+ Write-Host "'$dotnet' tool install $packageName --local --source '$PackageFeed' -v $verbosity --version $BinlogToolVersion"
+ & "$dotnet" tool install $packageName --local --source "$PackageFeed" -v $verbosity --version $BinlogToolVersion
if (Test-Path $TokensFilePath) {
Write-Host "Adding additional sensitive data for redaction from file: " $TokensFilePath
diff --git a/eng/common/post-build/sourcelink-validation.ps1 b/eng/common/post-build/sourcelink-validation.ps1
deleted file mode 100644
index 1976ef70..00000000
--- a/eng/common/post-build/sourcelink-validation.ps1
+++ /dev/null
@@ -1,327 +0,0 @@
-param(
- [Parameter(Mandatory=$true)][string] $InputPath, # Full path to directory where Symbols.NuGet packages to be checked are stored
- [Parameter(Mandatory=$true)][string] $ExtractPath, # Full path to directory where the packages will be extracted during validation
- [Parameter(Mandatory=$false)][string] $GHRepoName, # GitHub name of the repo including the Org. E.g., dotnet/arcade
- [Parameter(Mandatory=$false)][string] $GHCommit, # GitHub commit SHA used to build the packages
- [Parameter(Mandatory=$true)][string] $SourcelinkCliVersion # Version of SourceLink CLI to use
-)
-
-$ErrorActionPreference = 'Stop'
-Set-StrictMode -Version 2.0
-
-# `tools.ps1` checks $ci to perform some actions. Since the post-build
-# scripts don't necessarily execute in the same agent that run the
-# build.ps1/sh script this variable isn't automatically set.
-$ci = $true
-$disableConfigureToolsetImport = $true
-. $PSScriptRoot\..\tools.ps1
-
-# Cache/HashMap (File -> Exist flag) used to consult whether a file exist
-# in the repository at a specific commit point. This is populated by inserting
-# all files present in the repo at a specific commit point.
-$global:RepoFiles = @{}
-
-# Maximum number of jobs to run in parallel
-$MaxParallelJobs = 16
-
-$MaxRetries = 5
-$RetryWaitTimeInSeconds = 30
-
-# Wait time between check for system load
-$SecondsBetweenLoadChecks = 10
-
-if (!$InputPath -or !(Test-Path $InputPath)){
- Write-Host "No files to validate."
- ExitWithExitCode 0
-}
-
-$ValidatePackage = {
- param(
- [string] $PackagePath # Full path to a Symbols.NuGet package
- )
-
- . $using:PSScriptRoot\..\tools.ps1
-
- # Ensure input file exist
- if (!(Test-Path $PackagePath)) {
- Write-Host "Input file does not exist: $PackagePath"
- return [pscustomobject]@{
- result = 1
- packagePath = $PackagePath
- }
- }
-
- # Extensions for which we'll look for SourceLink information
- # For now we'll only care about Portable & Embedded PDBs
- $RelevantExtensions = @('.dll', '.exe', '.pdb')
-
- Write-Host -NoNewLine 'Validating ' ([System.IO.Path]::GetFileName($PackagePath)) '...'
-
- $PackageId = [System.IO.Path]::GetFileNameWithoutExtension($PackagePath)
- $ExtractPath = Join-Path -Path $using:ExtractPath -ChildPath $PackageId
- $FailedFiles = 0
-
- Add-Type -AssemblyName System.IO.Compression.FileSystem
-
- [System.IO.Directory]::CreateDirectory($ExtractPath) | Out-Null
-
- try {
- $zip = [System.IO.Compression.ZipFile]::OpenRead($PackagePath)
-
- $zip.Entries |
- Where-Object {$RelevantExtensions -contains [System.IO.Path]::GetExtension($_.Name)} |
- ForEach-Object {
- $FileName = $_.FullName
- $Extension = [System.IO.Path]::GetExtension($_.Name)
- $FakeName = -Join((New-Guid), $Extension)
- $TargetFile = Join-Path -Path $ExtractPath -ChildPath $FakeName
-
- # We ignore resource DLLs
- if ($FileName.EndsWith('.resources.dll')) {
- return [pscustomobject]@{
- result = 0
- packagePath = $PackagePath
- }
- }
-
- [System.IO.Compression.ZipFileExtensions]::ExtractToFile($_, $TargetFile, $true)
-
- $ValidateFile = {
- param(
- [string] $FullPath, # Full path to the module that has to be checked
- [string] $RealPath,
- [ref] $FailedFiles
- )
-
- $sourcelinkExe = "$env:USERPROFILE\.dotnet\tools"
- $sourcelinkExe = Resolve-Path "$sourcelinkExe\sourcelink.exe"
- $SourceLinkInfos = & $sourcelinkExe print-urls $FullPath | Out-String
-
- if ($LASTEXITCODE -eq 0 -and -not ([string]::IsNullOrEmpty($SourceLinkInfos))) {
- $NumFailedLinks = 0
-
- # We only care about Http addresses
- $Matches = (Select-String '(http[s]?)(:\/\/)([^\s,]+)' -Input $SourceLinkInfos -AllMatches).Matches
-
- if ($Matches.Count -ne 0) {
- $Matches.Value |
- ForEach-Object {
- $Link = $_
- $CommitUrl = "https://raw.githubusercontent.com/${using:GHRepoName}/${using:GHCommit}/"
-
- $FilePath = $Link.Replace($CommitUrl, "")
- $Status = 200
- $Cache = $using:RepoFiles
-
- $attempts = 0
-
- while ($attempts -lt $using:MaxRetries) {
- if ( !($Cache.ContainsKey($FilePath)) ) {
- try {
- $Uri = $Link -as [System.URI]
-
- if ($Link -match "submodules") {
- # Skip submodule links until sourcelink properly handles submodules
- $Status = 200
- }
- elseif ($Uri.AbsoluteURI -ne $null -and ($Uri.Host -match 'github' -or $Uri.Host -match 'githubusercontent')) {
- # Only GitHub links are valid
- $Status = (Invoke-WebRequest -Uri $Link -UseBasicParsing -Method HEAD -TimeoutSec 5).StatusCode
- }
- else {
- # If it's not a github link, we want to break out of the loop and not retry.
- $Status = 0
- $attempts = $using:MaxRetries
- }
- }
- catch {
- Write-Host $_
- $Status = 0
- }
- }
-
- if ($Status -ne 200) {
- $attempts++
-
- if ($attempts -lt $using:MaxRetries)
- {
- $attemptsLeft = $using:MaxRetries - $attempts
- Write-Warning "Download failed, $attemptsLeft attempts remaining, will retry in $using:RetryWaitTimeInSeconds seconds"
- Start-Sleep -Seconds $using:RetryWaitTimeInSeconds
- }
- else {
- if ($NumFailedLinks -eq 0) {
- if ($FailedFiles.Value -eq 0) {
- Write-Host
- }
-
- Write-Host "`tFile $RealPath has broken links:"
- }
-
- Write-Host "`t`tFailed to retrieve $Link"
-
- $NumFailedLinks++
- }
- }
- else {
- break
- }
- }
- }
- }
-
- if ($NumFailedLinks -ne 0) {
- $FailedFiles.value++
- $global:LASTEXITCODE = 1
- }
- }
- }
-
- &$ValidateFile $TargetFile $FileName ([ref]$FailedFiles)
- }
- }
- catch {
- Write-Host $_
- }
- finally {
- $zip.Dispose()
- }
-
- if ($FailedFiles -eq 0) {
- Write-Host 'Passed.'
- return [pscustomobject]@{
- result = 0
- packagePath = $PackagePath
- }
- }
- else {
- Write-PipelineTelemetryError -Category 'SourceLink' -Message "$PackagePath has broken SourceLink links."
- return [pscustomobject]@{
- result = 1
- packagePath = $PackagePath
- }
- }
-}
-
-function CheckJobResult(
- $result,
- $packagePath,
- [ref]$ValidationFailures,
- [switch]$logErrors) {
- if ($result -ne '0') {
- if ($logErrors) {
- Write-PipelineTelemetryError -Category 'SourceLink' -Message "$packagePath has broken SourceLink links."
- }
- $ValidationFailures.Value++
- }
-}
-
-function ValidateSourceLinkLinks {
- if ($GHRepoName -ne '' -and !($GHRepoName -Match '^[^\s\/]+/[^\s\/]+$')) {
- if (!($GHRepoName -Match '^[^\s-]+-[^\s]+$')) {
- Write-PipelineTelemetryError -Category 'SourceLink' -Message "GHRepoName should be in the format / or -. '$GHRepoName'"
- ExitWithExitCode 1
- }
- else {
- $GHRepoName = $GHRepoName -replace '^([^\s-]+)-([^\s]+)$', '$1/$2';
- }
- }
-
- if ($GHCommit -ne '' -and !($GHCommit -Match '^[0-9a-fA-F]{40}$')) {
- Write-PipelineTelemetryError -Category 'SourceLink' -Message "GHCommit should be a 40 chars hexadecimal string. '$GHCommit'"
- ExitWithExitCode 1
- }
-
- if ($GHRepoName -ne '' -and $GHCommit -ne '') {
- $RepoTreeURL = -Join('http://api.github.com/repos/', $GHRepoName, '/git/trees/', $GHCommit, '?recursive=1')
- $CodeExtensions = @('.cs', '.vb', '.fs', '.fsi', '.fsx', '.fsscript')
-
- try {
- # Retrieve the list of files in the repo at that particular commit point and store them in the RepoFiles hash
- $Data = Invoke-WebRequest $RepoTreeURL -UseBasicParsing | ConvertFrom-Json | Select-Object -ExpandProperty tree
-
- foreach ($file in $Data) {
- $Extension = [System.IO.Path]::GetExtension($file.path)
-
- if ($CodeExtensions.Contains($Extension)) {
- $RepoFiles[$file.path] = 1
- }
- }
- }
- catch {
- Write-Host "Problems downloading the list of files from the repo. Url used: $RepoTreeURL . Execution will proceed without caching."
- }
- }
- elseif ($GHRepoName -ne '' -or $GHCommit -ne '') {
- Write-Host 'For using the http caching mechanism both GHRepoName and GHCommit should be informed.'
- }
-
- if (Test-Path $ExtractPath) {
- Remove-Item $ExtractPath -Force -Recurse -ErrorAction SilentlyContinue
- }
-
- $ValidationFailures = 0
-
- # Process each NuGet package in parallel
- Get-ChildItem "$InputPath\*.symbols.nupkg" |
- ForEach-Object {
- Write-Host "Starting $($_.FullName)"
- Start-Job -ScriptBlock $ValidatePackage -ArgumentList $_.FullName | Out-Null
- $NumJobs = @(Get-Job -State 'Running').Count
-
- while ($NumJobs -ge $MaxParallelJobs) {
- Write-Host "There are $NumJobs validation jobs running right now. Waiting $SecondsBetweenLoadChecks seconds to check again."
- sleep $SecondsBetweenLoadChecks
- $NumJobs = @(Get-Job -State 'Running').Count
- }
-
- foreach ($Job in @(Get-Job -State 'Completed')) {
- $jobResult = Wait-Job -Id $Job.Id | Receive-Job
- CheckJobResult $jobResult.result $jobResult.packagePath ([ref]$ValidationFailures) -LogErrors
- Remove-Job -Id $Job.Id
- }
- }
-
- foreach ($Job in @(Get-Job)) {
- $jobResult = Wait-Job -Id $Job.Id | Receive-Job
- CheckJobResult $jobResult.result $jobResult.packagePath ([ref]$ValidationFailures)
- Remove-Job -Id $Job.Id
- }
- if ($ValidationFailures -gt 0) {
- Write-PipelineTelemetryError -Category 'SourceLink' -Message "$ValidationFailures package(s) failed validation."
- ExitWithExitCode 1
- }
-}
-
-function InstallSourcelinkCli {
- $sourcelinkCliPackageName = 'sourcelink'
-
- $dotnetRoot = InitializeDotNetCli -install:$true
- $dotnet = "$dotnetRoot\dotnet.exe"
- $toolList = & "$dotnet" tool list --global
-
- if (($toolList -like "*$sourcelinkCliPackageName*") -and ($toolList -like "*$sourcelinkCliVersion*")) {
- Write-Host "SourceLink CLI version $sourcelinkCliVersion is already installed."
- }
- else {
- Write-Host "Installing SourceLink CLI version $sourcelinkCliVersion..."
- Write-Host 'You may need to restart your command window if this is the first dotnet tool you have installed.'
- & "$dotnet" tool install $sourcelinkCliPackageName --version $sourcelinkCliVersion --verbosity "minimal" --global
- }
-}
-
-try {
- InstallSourcelinkCli
-
- foreach ($Job in @(Get-Job)) {
- Remove-Job -Id $Job.Id
- }
-
- ValidateSourceLinkLinks
-}
-catch {
- Write-Host $_.Exception
- Write-Host $_.ScriptStackTrace
- Write-PipelineTelemetryError -Category 'SourceLink' -Message $_
- ExitWithExitCode 1
-}
diff --git a/eng/common/renovate.env b/eng/common/renovate.env
new file mode 100644
index 00000000..17ecc05d
--- /dev/null
+++ b/eng/common/renovate.env
@@ -0,0 +1,42 @@
+# Renovate Global Configuration
+# https://docs.renovatebot.com/self-hosted-configuration/
+#
+# NOTE: This file uses bash/shell format and is sourced via `. renovate.env`.
+# Values containing spaces or special characters must be quoted.
+
+# Author to use for git commits made by Renovate
+# https://docs.renovatebot.com/configuration-options/#gitauthor
+export RENOVATE_GIT_AUTHOR='.NET Renovate '
+
+# Disable rate limiting for PR creation (0 = unlimited)
+# https://docs.renovatebot.com/presets-default/#prhourlylimitnone
+# https://docs.renovatebot.com/presets-default/#prconcurrentlimitnone
+export RENOVATE_PR_HOURLY_LIMIT=0
+export RENOVATE_PR_CONCURRENT_LIMIT=0
+
+# Skip the onboarding PR that Renovate normally creates for new repos
+# https://docs.renovatebot.com/config-overview/#onboarding
+export RENOVATE_ONBOARDING=false
+
+# Any Renovate config file in the cloned repository is ignored. Only
+# the Renovate config file from the repo where the pipeline is running
+# is used (yes, those are the same repo but the sources may be different).
+# https://docs.renovatebot.com/self-hosted-configuration/#requireconfig
+export RENOVATE_REQUIRE_CONFIG=ignored
+
+# Customize the PR body content. This removes some of the default
+# sections that aren't relevant in a self-hosted config.
+# https://docs.renovatebot.com/configuration-options/#prheader
+# https://docs.renovatebot.com/configuration-options/#prbodynotes
+# https://docs.renovatebot.com/configuration-options/#prbodytemplate
+export RENOVATE_PR_HEADER='## Automated Dependency Update'
+export RENOVATE_PR_BODY_NOTES='["This PR has been created automatically by the [.NET Renovate Bot](https://github.com/dotnet/arcade/blob/main/Documentation/Renovate.md) to update one or more dependencies in your repo. Please review the changes and merge the PR if everything looks good."]'
+export RENOVATE_PR_BODY_TEMPLATE='{{{header}}}{{{table}}}{{{warnings}}}{{{notes}}}{{{changelogs}}}'
+
+# Extend the global config with additional presets
+# https://docs.renovatebot.com/self-hosted-configuration/#globalextends
+# Disable the Dependency Dashboard issue that tracks all updates
+export RENOVATE_GLOBAL_EXTENDS='[":disableDependencyDashboard"]'
+
+# Allow all commands for post-upgrade commands.
+export RENOVATE_ALLOWED_COMMANDS='[".*"]'
diff --git a/eng/common/sdk-task.ps1 b/eng/common/sdk-task.ps1
index b64b66a6..64fd2f8a 100644
--- a/eng/common/sdk-task.ps1
+++ b/eng/common/sdk-task.ps1
@@ -66,20 +66,7 @@ try {
if( $msbuildEngine -eq "vs") {
# Ensure desktop MSBuild is available for sdk tasks.
- if( -not ($GlobalJson.tools.PSObject.Properties.Name -contains "vs" )) {
- $GlobalJson.tools | Add-Member -Name "vs" -Value (ConvertFrom-Json "{ `"version`": `"16.5`" }") -MemberType NoteProperty
- }
- if( -not ($GlobalJson.tools.PSObject.Properties.Name -match "xcopy-msbuild" )) {
- $GlobalJson.tools | Add-Member -Name "xcopy-msbuild" -Value "18.0.0" -MemberType NoteProperty
- }
- if ($GlobalJson.tools."xcopy-msbuild".Trim() -ine "none") {
- $xcopyMSBuildToolsFolder = InitializeXCopyMSBuild $GlobalJson.tools."xcopy-msbuild" -install $true
- }
- if ($xcopyMSBuildToolsFolder -eq $null) {
- throw 'Unable to get xcopy downloadable version of msbuild'
- }
-
- $global:_MSBuildExe = "$($xcopyMSBuildToolsFolder)\MSBuild\Current\Bin\MSBuild.exe"
+ $global:_MSBuildExe = InitializeVisualStudioMSBuild
}
$taskProject = GetSdkTaskProject $task
diff --git a/eng/common/sdl/NuGet.config b/eng/common/sdl/NuGet.config
deleted file mode 100644
index 3849bdb3..00000000
--- a/eng/common/sdl/NuGet.config
+++ /dev/null
@@ -1,18 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/eng/common/sdl/configure-sdl-tool.ps1 b/eng/common/sdl/configure-sdl-tool.ps1
deleted file mode 100644
index 27f5a411..00000000
--- a/eng/common/sdl/configure-sdl-tool.ps1
+++ /dev/null
@@ -1,130 +0,0 @@
-Param(
- [string] $GuardianCliLocation,
- [string] $WorkingDirectory,
- [string] $TargetDirectory,
- [string] $GdnFolder,
- # The list of Guardian tools to configure. For each object in the array:
- # - If the item is a [hashtable], it must contain these entries:
- # - Name = The tool name as Guardian knows it.
- # - Scenario = (Optional) Scenario-specific name for this configuration entry. It must be unique
- # among all tool entries with the same Name.
- # - Args = (Optional) Array of Guardian tool configuration args, like '@("Target > C:\temp")'
- # - If the item is a [string] $v, it is treated as '@{ Name="$v" }'
- [object[]] $ToolsList,
- [string] $GuardianLoggerLevel='Standard',
- # Optional: Additional params to add to any tool using CredScan.
- [string[]] $CrScanAdditionalRunConfigParams,
- # Optional: Additional params to add to any tool using PoliCheck.
- [string[]] $PoliCheckAdditionalRunConfigParams,
- # Optional: Additional params to add to any tool using CodeQL/Semmle.
- [string[]] $CodeQLAdditionalRunConfigParams,
- # Optional: Additional params to add to any tool using Binskim.
- [string[]] $BinskimAdditionalRunConfigParams
-)
-
-$ErrorActionPreference = 'Stop'
-Set-StrictMode -Version 2.0
-$disableConfigureToolsetImport = $true
-$global:LASTEXITCODE = 0
-
-try {
- # `tools.ps1` checks $ci to perform some actions. Since the SDL
- # scripts don't necessarily execute in the same agent that run the
- # build.ps1/sh script this variable isn't automatically set.
- $ci = $true
- . $PSScriptRoot\..\tools.ps1
-
- # Normalize tools list: all in [hashtable] form with defined values for each key.
- $ToolsList = $ToolsList |
- ForEach-Object {
- if ($_ -is [string]) {
- $_ = @{ Name = $_ }
- }
-
- if (-not ($_['Scenario'])) { $_.Scenario = "" }
- if (-not ($_['Args'])) { $_.Args = @() }
- $_
- }
-
- Write-Host "List of tools to configure:"
- $ToolsList | ForEach-Object { $_ | Out-String | Write-Host }
-
- # We store config files in the r directory of .gdn
- $gdnConfigPath = Join-Path $GdnFolder 'r'
- $ValidPath = Test-Path $GuardianCliLocation
-
- if ($ValidPath -eq $False)
- {
- Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Invalid Guardian CLI Location."
- ExitWithExitCode 1
- }
-
- foreach ($tool in $ToolsList) {
- # Put together the name and scenario to make a unique key.
- $toolConfigName = $tool.Name
- if ($tool.Scenario) {
- $toolConfigName += "_" + $tool.Scenario
- }
-
- Write-Host "=== Configuring $toolConfigName..."
-
- $gdnConfigFile = Join-Path $gdnConfigPath "$toolConfigName-configure.gdnconfig"
-
- # For some tools, add default and automatic args.
- switch -Exact ($tool.Name) {
- 'credscan' {
- if ($targetDirectory) {
- $tool.Args += "`"TargetDirectory < $TargetDirectory`""
- }
- $tool.Args += "`"OutputType < pre`""
- $tool.Args += $CrScanAdditionalRunConfigParams
- }
- 'policheck' {
- if ($targetDirectory) {
- $tool.Args += "`"Target < $TargetDirectory`""
- }
- $tool.Args += $PoliCheckAdditionalRunConfigParams
- }
- {$_ -in 'semmle', 'codeql'} {
- if ($targetDirectory) {
- $tool.Args += "`"SourceCodeDirectory < $TargetDirectory`""
- }
- $tool.Args += $CodeQLAdditionalRunConfigParams
- }
- 'binskim' {
- if ($targetDirectory) {
- # Binskim crashes due to specific PDBs. GitHub issue: https://github.com/microsoft/binskim/issues/924.
- # We are excluding all `_.pdb` files from the scan.
- $tool.Args += "`"Target < $TargetDirectory\**;-:file|$TargetDirectory\**\_.pdb`""
- }
- $tool.Args += $BinskimAdditionalRunConfigParams
- }
- }
-
- # Create variable pointing to the args array directly so we can use splat syntax later.
- $toolArgs = $tool.Args
-
- # Configure the tool. If args array is provided or the current tool has some default arguments
- # defined, add "--args" and splat each element on the end. Arg format is "{Arg id} < {Value}",
- # one per parameter. Doc page for "guardian configure":
- # https://dev.azure.com/securitytools/SecurityIntegration/_wiki/wikis/Guardian/1395/configure
- Exec-BlockVerbosely {
- & $GuardianCliLocation configure `
- --working-directory $WorkingDirectory `
- --tool $tool.Name `
- --output-path $gdnConfigFile `
- --logger-level $GuardianLoggerLevel `
- --noninteractive `
- --force `
- $(if ($toolArgs) { "--args" }) @toolArgs
- Exit-IfNZEC "Sdl"
- }
-
- Write-Host "Created '$toolConfigName' configuration file: $gdnConfigFile"
- }
-}
-catch {
- Write-Host $_.ScriptStackTrace
- Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
- ExitWithExitCode 1
-}
diff --git a/eng/common/sdl/execute-all-sdl-tools.ps1 b/eng/common/sdl/execute-all-sdl-tools.ps1
deleted file mode 100644
index 4715d75e..00000000
--- a/eng/common/sdl/execute-all-sdl-tools.ps1
+++ /dev/null
@@ -1,167 +0,0 @@
-Param(
- [string] $GuardianPackageName, # Required: the name of guardian CLI package (not needed if GuardianCliLocation is specified)
- [string] $NugetPackageDirectory, # Required: directory where NuGet packages are installed (not needed if GuardianCliLocation is specified)
- [string] $GuardianCliLocation, # Optional: Direct location of Guardian CLI executable if GuardianPackageName & NugetPackageDirectory are not specified
- [string] $Repository=$env:BUILD_REPOSITORY_NAME, # Required: the name of the repository (e.g. dotnet/arcade)
- [string] $BranchName=$env:BUILD_SOURCEBRANCH, # Optional: name of branch or version of gdn settings; defaults to master
- [string] $SourceDirectory=$env:BUILD_SOURCESDIRECTORY, # Required: the directory where source files are located
- [string] $ArtifactsDirectory = (Join-Path $env:BUILD_ARTIFACTSTAGINGDIRECTORY ('artifacts')), # Required: the directory where build artifacts are located
- [string] $AzureDevOpsAccessToken, # Required: access token for dnceng; should be provided via KeyVault
-
- # Optional: list of SDL tools to run on source code. See 'configure-sdl-tool.ps1' for tools list
- # format.
- [object[]] $SourceToolsList,
- # Optional: list of SDL tools to run on built artifacts. See 'configure-sdl-tool.ps1' for tools
- # list format.
- [object[]] $ArtifactToolsList,
- # Optional: list of SDL tools to run without automatically specifying a target directory. See
- # 'configure-sdl-tool.ps1' for tools list format.
- [object[]] $CustomToolsList,
-
- [bool] $TsaPublish=$False, # Optional: true will publish results to TSA; only set to true after onboarding to TSA; TSA is the automated framework used to upload test results as bugs.
- [string] $TsaBranchName=$env:BUILD_SOURCEBRANCH, # Optional: required for TSA publish; defaults to $(Build.SourceBranchName); TSA is the automated framework used to upload test results as bugs.
- [string] $TsaRepositoryName=$env:BUILD_REPOSITORY_NAME, # Optional: TSA repository name; will be generated automatically if not submitted; TSA is the automated framework used to upload test results as bugs.
- [string] $BuildNumber=$env:BUILD_BUILDNUMBER, # Optional: required for TSA publish; defaults to $(Build.BuildNumber)
- [bool] $UpdateBaseline=$False, # Optional: if true, will update the baseline in the repository; should only be run after fixing any issues which need to be fixed
- [bool] $TsaOnboard=$False, # Optional: if true, will onboard the repository to TSA; should only be run once; TSA is the automated framework used to upload test results as bugs.
- [string] $TsaInstanceUrl, # Optional: only needed if TsaOnboard or TsaPublish is true; the instance-url registered with TSA; TSA is the automated framework used to upload test results as bugs.
- [string] $TsaCodebaseName, # Optional: only needed if TsaOnboard or TsaPublish is true; the name of the codebase registered with TSA; TSA is the automated framework used to upload test results as bugs.
- [string] $TsaProjectName, # Optional: only needed if TsaOnboard or TsaPublish is true; the name of the project registered with TSA; TSA is the automated framework used to upload test results as bugs.
- [string] $TsaNotificationEmail, # Optional: only needed if TsaOnboard is true; the email(s) which will receive notifications of TSA bug filings (e.g. alias@microsoft.com); TSA is the automated framework used to upload test results as bugs.
- [string] $TsaCodebaseAdmin, # Optional: only needed if TsaOnboard is true; the aliases which are admins of the TSA codebase (e.g. DOMAIN\alias); TSA is the automated framework used to upload test results as bugs.
- [string] $TsaBugAreaPath, # Optional: only needed if TsaOnboard is true; the area path where TSA will file bugs in AzDO; TSA is the automated framework used to upload test results as bugs.
- [string] $TsaIterationPath, # Optional: only needed if TsaOnboard is true; the iteration path where TSA will file bugs in AzDO; TSA is the automated framework used to upload test results as bugs.
- [string] $GuardianLoggerLevel='Standard', # Optional: the logger level for the Guardian CLI; options are Trace, Verbose, Standard, Warning, and Error
- [string[]] $CrScanAdditionalRunConfigParams, # Optional: Additional Params to custom build a CredScan run config in the format @("xyz:abc","sdf:1")
- [string[]] $PoliCheckAdditionalRunConfigParams, # Optional: Additional Params to custom build a Policheck run config in the format @("xyz:abc","sdf:1")
- [string[]] $CodeQLAdditionalRunConfigParams, # Optional: Additional Params to custom build a Semmle/CodeQL run config in the format @("xyz < abc","sdf < 1")
- [string[]] $BinskimAdditionalRunConfigParams, # Optional: Additional Params to custom build a Binskim run config in the format @("xyz < abc","sdf < 1")
- [bool] $BreakOnFailure=$False # Optional: Fail the build if there were errors during the run
-)
-
-try {
- $ErrorActionPreference = 'Stop'
- Set-StrictMode -Version 2.0
- $disableConfigureToolsetImport = $true
- $global:LASTEXITCODE = 0
-
- # `tools.ps1` checks $ci to perform some actions. Since the SDL
- # scripts don't necessarily execute in the same agent that run the
- # build.ps1/sh script this variable isn't automatically set.
- $ci = $true
- . $PSScriptRoot\..\tools.ps1
-
- #Replace repo names to the format of org/repo
- if (!($Repository.contains('/'))) {
- $RepoName = $Repository -replace '(.*?)-(.*)', '$1/$2';
- }
- else{
- $RepoName = $Repository;
- }
-
- if ($GuardianPackageName) {
- $guardianCliLocation = Join-Path $NugetPackageDirectory (Join-Path $GuardianPackageName (Join-Path 'tools' 'guardian.cmd'))
- } else {
- $guardianCliLocation = $GuardianCliLocation
- }
-
- $workingDirectory = (Split-Path $SourceDirectory -Parent)
- $ValidPath = Test-Path $guardianCliLocation
-
- if ($ValidPath -eq $False)
- {
- Write-PipelineTelemetryError -Force -Category 'Sdl' -Message 'Invalid Guardian CLI Location.'
- ExitWithExitCode 1
- }
-
- Exec-BlockVerbosely {
- & $(Join-Path $PSScriptRoot 'init-sdl.ps1') -GuardianCliLocation $guardianCliLocation -Repository $RepoName -BranchName $BranchName -WorkingDirectory $workingDirectory -AzureDevOpsAccessToken $AzureDevOpsAccessToken -GuardianLoggerLevel $GuardianLoggerLevel
- }
- $gdnFolder = Join-Path $workingDirectory '.gdn'
-
- if ($TsaOnboard) {
- if ($TsaCodebaseName -and $TsaNotificationEmail -and $TsaCodebaseAdmin -and $TsaBugAreaPath) {
- Exec-BlockVerbosely {
- & $guardianCliLocation tsa-onboard --codebase-name "$TsaCodebaseName" --notification-alias "$TsaNotificationEmail" --codebase-admin "$TsaCodebaseAdmin" --instance-url "$TsaInstanceUrl" --project-name "$TsaProjectName" --area-path "$TsaBugAreaPath" --iteration-path "$TsaIterationPath" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel
- }
- if ($LASTEXITCODE -ne 0) {
- Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Guardian tsa-onboard failed with exit code $LASTEXITCODE."
- ExitWithExitCode $LASTEXITCODE
- }
- } else {
- Write-PipelineTelemetryError -Force -Category 'Sdl' -Message 'Could not onboard to TSA -- not all required values ($TsaCodebaseName, $TsaNotificationEmail, $TsaCodebaseAdmin, $TsaBugAreaPath) were specified.'
- ExitWithExitCode 1
- }
- }
-
- # Configure a list of tools with a default target directory. Populates the ".gdn/r" directory.
- function Configure-ToolsList([object[]] $tools, [string] $targetDirectory) {
- if ($tools -and $tools.Count -gt 0) {
- Exec-BlockVerbosely {
- & $(Join-Path $PSScriptRoot 'configure-sdl-tool.ps1') `
- -GuardianCliLocation $guardianCliLocation `
- -WorkingDirectory $workingDirectory `
- -TargetDirectory $targetDirectory `
- -GdnFolder $gdnFolder `
- -ToolsList $tools `
- -AzureDevOpsAccessToken $AzureDevOpsAccessToken `
- -GuardianLoggerLevel $GuardianLoggerLevel `
- -CrScanAdditionalRunConfigParams $CrScanAdditionalRunConfigParams `
- -PoliCheckAdditionalRunConfigParams $PoliCheckAdditionalRunConfigParams `
- -CodeQLAdditionalRunConfigParams $CodeQLAdditionalRunConfigParams `
- -BinskimAdditionalRunConfigParams $BinskimAdditionalRunConfigParams
- if ($BreakOnFailure) {
- Exit-IfNZEC "Sdl"
- }
- }
- }
- }
-
- # Configure Artifact and Source tools with default Target directories.
- Configure-ToolsList $ArtifactToolsList $ArtifactsDirectory
- Configure-ToolsList $SourceToolsList $SourceDirectory
- # Configure custom tools with no default Target directory.
- Configure-ToolsList $CustomToolsList $null
-
- # At this point, all tools are configured in the ".gdn" directory. Run them all in a single call.
- # (If we used "run" multiple times, each run would overwrite data from earlier runs.)
- Exec-BlockVerbosely {
- & $(Join-Path $PSScriptRoot 'run-sdl.ps1') `
- -GuardianCliLocation $guardianCliLocation `
- -WorkingDirectory $SourceDirectory `
- -UpdateBaseline $UpdateBaseline `
- -GdnFolder $gdnFolder
- }
-
- if ($TsaPublish) {
- if ($TsaBranchName -and $BuildNumber) {
- if (-not $TsaRepositoryName) {
- $TsaRepositoryName = "$($Repository)-$($BranchName)"
- }
- Exec-BlockVerbosely {
- & $guardianCliLocation tsa-publish --all-tools --repository-name "$TsaRepositoryName" --branch-name "$TsaBranchName" --build-number "$BuildNumber" --onboard $True --codebase-name "$TsaCodebaseName" --notification-alias "$TsaNotificationEmail" --codebase-admin "$TsaCodebaseAdmin" --instance-url "$TsaInstanceUrl" --project-name "$TsaProjectName" --area-path "$TsaBugAreaPath" --iteration-path "$TsaIterationPath" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel
- }
- if ($LASTEXITCODE -ne 0) {
- Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Guardian tsa-publish failed with exit code $LASTEXITCODE."
- ExitWithExitCode $LASTEXITCODE
- }
- } else {
- Write-PipelineTelemetryError -Force -Category 'Sdl' -Message 'Could not publish to TSA -- not all required values ($TsaBranchName, $BuildNumber) were specified.'
- ExitWithExitCode 1
- }
- }
-
- if ($BreakOnFailure) {
- Write-Host "Failing the build in case of breaking results..."
- Exec-BlockVerbosely {
- & $guardianCliLocation break --working-directory $workingDirectory --logger-level $GuardianLoggerLevel
- }
- } else {
- Write-Host "Letting the build pass even if there were breaking results..."
- }
-}
-catch {
- Write-Host $_.ScriptStackTrace
- Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
- exit 1
-}
diff --git a/eng/common/sdl/extract-artifact-archives.ps1 b/eng/common/sdl/extract-artifact-archives.ps1
deleted file mode 100644
index 68da4fbf..00000000
--- a/eng/common/sdl/extract-artifact-archives.ps1
+++ /dev/null
@@ -1,63 +0,0 @@
-# This script looks for each archive file in a directory and extracts it into the target directory.
-# For example, the file "$InputPath/bin.tar.gz" extracts to "$ExtractPath/bin.tar.gz.extracted/**".
-# Uses the "tar" utility added to Windows 10 / Windows 2019 that supports tar.gz and zip.
-param(
- # Full path to directory where archives are stored.
- [Parameter(Mandatory=$true)][string] $InputPath,
- # Full path to directory to extract archives into. May be the same as $InputPath.
- [Parameter(Mandatory=$true)][string] $ExtractPath
-)
-
-$ErrorActionPreference = 'Stop'
-Set-StrictMode -Version 2.0
-
-$disableConfigureToolsetImport = $true
-
-try {
- # `tools.ps1` checks $ci to perform some actions. Since the SDL
- # scripts don't necessarily execute in the same agent that run the
- # build.ps1/sh script this variable isn't automatically set.
- $ci = $true
- . $PSScriptRoot\..\tools.ps1
-
- Measure-Command {
- $jobs = @()
-
- # Find archive files for non-Windows and Windows builds.
- $archiveFiles = @(
- Get-ChildItem (Join-Path $InputPath "*.tar.gz")
- Get-ChildItem (Join-Path $InputPath "*.zip")
- )
-
- foreach ($targzFile in $archiveFiles) {
- $jobs += Start-Job -ScriptBlock {
- $file = $using:targzFile
- $fileName = [System.IO.Path]::GetFileName($file)
- $extractDir = Join-Path $using:ExtractPath "$fileName.extracted"
-
- New-Item $extractDir -ItemType Directory -Force | Out-Null
-
- Write-Host "Extracting '$file' to '$extractDir'..."
-
- # Pipe errors to stdout to prevent PowerShell detecting them and quitting the job early.
- # This type of quit skips the catch, so we wouldn't be able to tell which file triggered the
- # error. Save output so it can be stored in the exception string along with context.
- $output = tar -xf $file -C $extractDir 2>&1
- # Handle NZEC manually rather than using Exit-IfNZEC: we are in a background job, so we
- # don't have access to the outer scope.
- if ($LASTEXITCODE -ne 0) {
- throw "Error extracting '$file': non-zero exit code ($LASTEXITCODE). Output: '$output'"
- }
-
- Write-Host "Extracted to $extractDir"
- }
- }
-
- Receive-Job $jobs -Wait
- }
-}
-catch {
- Write-Host $_
- Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
- ExitWithExitCode 1
-}
diff --git a/eng/common/sdl/extract-artifact-packages.ps1 b/eng/common/sdl/extract-artifact-packages.ps1
deleted file mode 100644
index f031ed5b..00000000
--- a/eng/common/sdl/extract-artifact-packages.ps1
+++ /dev/null
@@ -1,82 +0,0 @@
-param(
- [Parameter(Mandatory=$true)][string] $InputPath, # Full path to directory where artifact packages are stored
- [Parameter(Mandatory=$true)][string] $ExtractPath # Full path to directory where the packages will be extracted
-)
-
-$ErrorActionPreference = 'Stop'
-Set-StrictMode -Version 2.0
-
-$disableConfigureToolsetImport = $true
-
-function ExtractArtifacts {
- if (!(Test-Path $InputPath)) {
- Write-Host "Input Path does not exist: $InputPath"
- ExitWithExitCode 0
- }
- $Jobs = @()
- Get-ChildItem "$InputPath\*.nupkg" |
- ForEach-Object {
- $Jobs += Start-Job -ScriptBlock $ExtractPackage -ArgumentList $_.FullName
- }
-
- foreach ($Job in $Jobs) {
- Wait-Job -Id $Job.Id | Receive-Job
- }
-}
-
-try {
- # `tools.ps1` checks $ci to perform some actions. Since the SDL
- # scripts don't necessarily execute in the same agent that run the
- # build.ps1/sh script this variable isn't automatically set.
- $ci = $true
- . $PSScriptRoot\..\tools.ps1
-
- $ExtractPackage = {
- param(
- [string] $PackagePath # Full path to a NuGet package
- )
-
- if (!(Test-Path $PackagePath)) {
- Write-PipelineTelemetryError -Category 'Build' -Message "Input file does not exist: $PackagePath"
- ExitWithExitCode 1
- }
-
- $RelevantExtensions = @('.dll', '.exe', '.pdb')
- Write-Host -NoNewLine 'Extracting ' ([System.IO.Path]::GetFileName($PackagePath)) '...'
-
- $PackageId = [System.IO.Path]::GetFileNameWithoutExtension($PackagePath)
- $ExtractPath = Join-Path -Path $using:ExtractPath -ChildPath $PackageId
-
- Add-Type -AssemblyName System.IO.Compression.FileSystem
-
- [System.IO.Directory]::CreateDirectory($ExtractPath);
-
- try {
- $zip = [System.IO.Compression.ZipFile]::OpenRead($PackagePath)
-
- $zip.Entries |
- Where-Object {$RelevantExtensions -contains [System.IO.Path]::GetExtension($_.Name)} |
- ForEach-Object {
- $TargetPath = Join-Path -Path $ExtractPath -ChildPath (Split-Path -Path $_.FullName)
- [System.IO.Directory]::CreateDirectory($TargetPath);
-
- $TargetFile = Join-Path -Path $ExtractPath -ChildPath $_.FullName
- [System.IO.Compression.ZipFileExtensions]::ExtractToFile($_, $TargetFile)
- }
- }
- catch {
- Write-Host $_
- Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
- ExitWithExitCode 1
- }
- finally {
- $zip.Dispose()
- }
- }
- Measure-Command { ExtractArtifacts }
-}
-catch {
- Write-Host $_
- Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
- ExitWithExitCode 1
-}
diff --git a/eng/common/sdl/init-sdl.ps1 b/eng/common/sdl/init-sdl.ps1
deleted file mode 100644
index 3ac1d92b..00000000
--- a/eng/common/sdl/init-sdl.ps1
+++ /dev/null
@@ -1,55 +0,0 @@
-Param(
- [string] $GuardianCliLocation,
- [string] $Repository,
- [string] $BranchName='master',
- [string] $WorkingDirectory,
- [string] $AzureDevOpsAccessToken,
- [string] $GuardianLoggerLevel='Standard'
-)
-
-$ErrorActionPreference = 'Stop'
-Set-StrictMode -Version 2.0
-$disableConfigureToolsetImport = $true
-$global:LASTEXITCODE = 0
-
-# `tools.ps1` checks $ci to perform some actions. Since the SDL
-# scripts don't necessarily execute in the same agent that run the
-# build.ps1/sh script this variable isn't automatically set.
-$ci = $true
-. $PSScriptRoot\..\tools.ps1
-
-# Don't display the console progress UI - it's a huge perf hit
-$ProgressPreference = 'SilentlyContinue'
-
-# Construct basic auth from AzDO access token; construct URI to the repository's gdn folder stored in that repository; construct location of zip file
-$encodedPat = [Convert]::ToBase64String([System.Text.Encoding]::ASCII.GetBytes(":$AzureDevOpsAccessToken"))
-$escapedRepository = [Uri]::EscapeDataString("/$Repository/$BranchName/.gdn")
-$uri = "https://dev.azure.com/dnceng/internal/_apis/git/repositories/sdl-tool-cfg/Items?path=$escapedRepository&versionDescriptor[versionOptions]=0&`$format=zip&api-version=5.0"
-$zipFile = "$WorkingDirectory/gdn.zip"
-
-Add-Type -AssemblyName System.IO.Compression.FileSystem
-$gdnFolder = (Join-Path $WorkingDirectory '.gdn')
-
-try {
- # if the folder does not exist, we'll do a guardian init and push it to the remote repository
- Write-Host 'Initializing Guardian...'
- Write-Host "$GuardianCliLocation init --working-directory $WorkingDirectory --logger-level $GuardianLoggerLevel"
- & $GuardianCliLocation init --working-directory $WorkingDirectory --logger-level $GuardianLoggerLevel
- if ($LASTEXITCODE -ne 0) {
- Write-PipelineTelemetryError -Force -Category 'Build' -Message "Guardian init failed with exit code $LASTEXITCODE."
- ExitWithExitCode $LASTEXITCODE
- }
- # We create the mainbaseline so it can be edited later
- Write-Host "$GuardianCliLocation baseline --working-directory $WorkingDirectory --name mainbaseline"
- & $GuardianCliLocation baseline --working-directory $WorkingDirectory --name mainbaseline
- if ($LASTEXITCODE -ne 0) {
- Write-PipelineTelemetryError -Force -Category 'Build' -Message "Guardian baseline failed with exit code $LASTEXITCODE."
- ExitWithExitCode $LASTEXITCODE
- }
- ExitWithExitCode 0
-}
-catch {
- Write-Host $_.ScriptStackTrace
- Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
- ExitWithExitCode 1
-}
diff --git a/eng/common/sdl/packages.config b/eng/common/sdl/packages.config
deleted file mode 100644
index e5f543ea..00000000
--- a/eng/common/sdl/packages.config
+++ /dev/null
@@ -1,4 +0,0 @@
-
-
-
-
diff --git a/eng/common/sdl/run-sdl.ps1 b/eng/common/sdl/run-sdl.ps1
deleted file mode 100644
index 2eac8c78..00000000
--- a/eng/common/sdl/run-sdl.ps1
+++ /dev/null
@@ -1,49 +0,0 @@
-Param(
- [string] $GuardianCliLocation,
- [string] $WorkingDirectory,
- [string] $GdnFolder,
- [string] $UpdateBaseline,
- [string] $GuardianLoggerLevel='Standard'
-)
-
-$ErrorActionPreference = 'Stop'
-Set-StrictMode -Version 2.0
-$disableConfigureToolsetImport = $true
-$global:LASTEXITCODE = 0
-
-try {
- # `tools.ps1` checks $ci to perform some actions. Since the SDL
- # scripts don't necessarily execute in the same agent that run the
- # build.ps1/sh script this variable isn't automatically set.
- $ci = $true
- . $PSScriptRoot\..\tools.ps1
-
- # We store config files in the r directory of .gdn
- $gdnConfigPath = Join-Path $GdnFolder 'r'
- $ValidPath = Test-Path $GuardianCliLocation
-
- if ($ValidPath -eq $False)
- {
- Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Invalid Guardian CLI Location."
- ExitWithExitCode 1
- }
-
- $gdnConfigFiles = Get-ChildItem $gdnConfigPath -Recurse -Include '*.gdnconfig'
- Write-Host "Discovered Guardian config files:"
- $gdnConfigFiles | Out-String | Write-Host
-
- Exec-BlockVerbosely {
- & $GuardianCliLocation run `
- --working-directory $WorkingDirectory `
- --baseline mainbaseline `
- --update-baseline $UpdateBaseline `
- --logger-level $GuardianLoggerLevel `
- --config @gdnConfigFiles
- Exit-IfNZEC "Sdl"
- }
-}
-catch {
- Write-Host $_.ScriptStackTrace
- Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
- ExitWithExitCode 1
-}
diff --git a/eng/common/sdl/sdl.ps1 b/eng/common/sdl/sdl.ps1
deleted file mode 100644
index 648c5068..00000000
--- a/eng/common/sdl/sdl.ps1
+++ /dev/null
@@ -1,38 +0,0 @@
-
-function Install-Gdn {
- param(
- [Parameter(Mandatory=$true)]
- [string]$Path,
-
- # If omitted, install the latest version of Guardian, otherwise install that specific version.
- [string]$Version
- )
-
- $ErrorActionPreference = 'Stop'
- Set-StrictMode -Version 2.0
- $disableConfigureToolsetImport = $true
- $global:LASTEXITCODE = 0
-
- # `tools.ps1` checks $ci to perform some actions. Since the SDL
- # scripts don't necessarily execute in the same agent that run the
- # build.ps1/sh script this variable isn't automatically set.
- $ci = $true
- . $PSScriptRoot\..\tools.ps1
-
- $argumentList = @("install", "Microsoft.Guardian.Cli", "-Source https://securitytools.pkgs.visualstudio.com/_packaging/Guardian/nuget/v3/index.json", "-OutputDirectory $Path", "-NonInteractive", "-NoCache")
-
- if ($Version) {
- $argumentList += "-Version $Version"
- }
-
- Start-Process nuget -Verbose -ArgumentList $argumentList -NoNewWindow -Wait
-
- $gdnCliPath = Get-ChildItem -Filter guardian.cmd -Recurse -Path $Path
-
- if (!$gdnCliPath)
- {
- Write-PipelineTelemetryError -Category 'Sdl' -Message 'Failure installing Guardian'
- }
-
- return $gdnCliPath.FullName
-}
\ No newline at end of file
diff --git a/eng/common/sdl/trim-assets-version.ps1 b/eng/common/sdl/trim-assets-version.ps1
deleted file mode 100644
index 0daa2a9e..00000000
--- a/eng/common/sdl/trim-assets-version.ps1
+++ /dev/null
@@ -1,75 +0,0 @@
-<#
-.SYNOPSIS
-Install and run the 'Microsoft.DotNet.VersionTools.Cli' tool with the 'trim-artifacts-version' command to trim the version from the NuGet assets file name.
-
-.PARAMETER InputPath
-Full path to directory where artifact packages are stored
-
-.PARAMETER Recursive
-Search for NuGet packages recursively
-
-#>
-
-Param(
- [string] $InputPath,
- [bool] $Recursive = $true
-)
-
-$CliToolName = "Microsoft.DotNet.VersionTools.Cli"
-
-function Install-VersionTools-Cli {
- param(
- [Parameter(Mandatory=$true)][string]$Version
- )
-
- Write-Host "Installing the package '$CliToolName' with a version of '$version' ..."
- $feed = "https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json"
-
- $argumentList = @("tool", "install", "--local", "$CliToolName", "--add-source $feed", "--no-cache", "--version $Version", "--create-manifest-if-needed")
- Start-Process "$dotnet" -Verbose -ArgumentList $argumentList -NoNewWindow -Wait
-}
-
-# -------------------------------------------------------------------
-
-if (!(Test-Path $InputPath)) {
- Write-Host "Input Path '$InputPath' does not exist"
- ExitWithExitCode 1
-}
-
-$ErrorActionPreference = 'Stop'
-Set-StrictMode -Version 2.0
-
-$disableConfigureToolsetImport = $true
-$global:LASTEXITCODE = 0
-
-# `tools.ps1` checks $ci to perform some actions. Since the SDL
-# scripts don't necessarily execute in the same agent that run the
-# build.ps1/sh script this variable isn't automatically set.
-$ci = $true
-. $PSScriptRoot\..\tools.ps1
-
-try {
- $dotnetRoot = InitializeDotNetCli -install:$true
- $dotnet = "$dotnetRoot\dotnet.exe"
-
- $toolsetVersion = Read-ArcadeSdkVersion
- Install-VersionTools-Cli -Version $toolsetVersion
-
- $cliToolFound = (& "$dotnet" tool list --local | Where-Object {$_.Split(' ')[0] -eq $CliToolName})
- if ($null -eq $cliToolFound) {
- Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "The '$CliToolName' tool is not installed."
- ExitWithExitCode 1
- }
-
- Exec-BlockVerbosely {
- & "$dotnet" $CliToolName trim-assets-version `
- --assets-path $InputPath `
- --recursive $Recursive
- Exit-IfNZEC "Sdl"
- }
-}
-catch {
- Write-Host $_
- Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
- ExitWithExitCode 1
-}
diff --git a/eng/common/template-guidance.md b/eng/common/template-guidance.md
index 4bf4cf41..f772aa3d 100644
--- a/eng/common/template-guidance.md
+++ b/eng/common/template-guidance.md
@@ -71,7 +71,6 @@ eng\common\
source-build.yml (shim)
source-index-stage1.yml (shim)
jobs\
- codeql-build.yml (shim)
jobs.yml (shim)
source-build.yml (shim)
post-build\
@@ -82,14 +81,12 @@ eng\common\
publish-build-artifacts.yml (logic)
publish-pipeline-artifacts.yml (logic)
component-governance.yml (shim)
- generate-sbom.yml (shim)
publish-logs.yml (shim)
retain-build.yml (shim)
send-to-helix.yml (shim)
source-build.yml (shim)
variables\
pool-providers.yml (logic + redirect) # templates/variables/pool-providers.yml will redirect to templates-official/variables/pool-providers.yml if you are running in the internal project
- sdl-variables.yml (logic)
core-templates\
job\
job.yml (logic)
@@ -98,7 +95,6 @@ eng\common\
source-build.yml (logic)
source-index-stage1.yml (logic)
jobs\
- codeql-build.yml (logic)
jobs.yml (logic)
source-build.yml (logic)
post-build\
@@ -107,7 +103,6 @@ eng\common\
setup-maestro-vars.yml (logic)
steps\
component-governance.yml (logic)
- generate-sbom.yml (logic)
publish-build-artifacts.yml (redirect)
publish-logs.yml (logic)
publish-pipeline-artifacts.yml (redirect)
diff --git a/eng/common/templates-official/job/job.yml b/eng/common/templates-official/job/job.yml
index 92a0664f..d68e9fbc 100644
--- a/eng/common/templates-official/job/job.yml
+++ b/eng/common/templates-official/job/job.yml
@@ -1,24 +1,15 @@
parameters:
-# Sbom related params
- enableSbom: true
runAsPublic: false
- PackageVersion: 9.0.0
- BuildDropPath: '$(System.DefaultWorkingDirectory)/artifacts'
+# Sbom related params, unused now and can eventually be removed
+ enableSbom: unused
+ PackageVersion: unused
+ BuildDropPath: unused
jobs:
- template: /eng/common/core-templates/job/job.yml
parameters:
is1ESPipeline: true
- componentGovernanceSteps:
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.enableSbom, 'true')) }}:
- - template: /eng/common/templates/steps/generate-sbom.yml
- parameters:
- PackageVersion: ${{ parameters.packageVersion }}
- BuildDropPath: ${{ parameters.buildDropPath }}
- ManifestDirPath: $(Build.ArtifactStagingDirectory)/sbom
- publishArtifacts: false
-
# publish artifacts
# for 1ES managed templates, use the templateContext.output to handle multiple outputs.
templateContext:
@@ -26,12 +17,19 @@ jobs:
outputs:
- ${{ if ne(parameters.artifacts.publish, '') }}:
- ${{ if and(ne(parameters.artifacts.publish.artifacts, 'false'), ne(parameters.artifacts.publish.artifacts, '')) }}:
- - output: buildArtifacts
+ - output: pipelineArtifact
displayName: Publish pipeline artifacts
- PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts'
- ArtifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }}
- condition: always()
- retryCountOnTaskFailure: 10 # for any logs being locked
+ targetPath: '$(Build.ArtifactStagingDirectory)/artifacts'
+ artifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }}
+ condition: succeeded()
+ retryCountOnTaskFailure: 10 # for any files being locked
+ continueOnError: true
+ - output: pipelineArtifact
+ displayName: Publish pipeline artifacts
+ targetPath: '$(Build.ArtifactStagingDirectory)/artifacts'
+ artifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }}_Attempt$(System.JobAttempt)
+ condition: not(succeeded())
+ retryCountOnTaskFailure: 10 # for any files being locked
continueOnError: true
- ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}:
- output: pipelineArtifact
@@ -40,18 +38,18 @@ jobs:
displayName: 'Publish logs'
continueOnError: true
condition: always()
- retryCountOnTaskFailure: 10 # for any logs being locked
- sbomEnabled: false # we don't need SBOM for logs
+ retryCountOnTaskFailure: 10 # for any files being locked
+ isProduction: false # logs are non-production artifacts
- ${{ if eq(parameters.enablePublishBuildArtifacts, true) }}:
- - output: buildArtifacts
+ - output: pipelineArtifact
displayName: Publish Logs
- PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts/log/$(_BuildConfig)'
- publishLocation: Container
- ArtifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)_Attempt$(System.JobAttempt)' ) }}
+ targetPath: '$(Build.ArtifactStagingDirectory)/artifacts/log/$(_BuildConfig)'
+ artifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)_Attempt$(System.JobAttempt)' ) }}
continueOnError: true
condition: always()
- sbomEnabled: false # we don't need SBOM for logs
+ retryCountOnTaskFailure: 10 # for any files being locked
+ isProduction: false # logs are non-production artifacts
- ${{ if eq(parameters.enableBuildRetry, 'true') }}:
- output: pipelineArtifact
@@ -59,14 +57,20 @@ jobs:
artifactName: 'BuildConfiguration'
displayName: 'Publish build retry configuration'
continueOnError: true
- sbomEnabled: false # we don't need SBOM for BuildConfiguration
+ retryCountOnTaskFailure: 10 # for any files being locked
+ isProduction: false # BuildConfiguration is a non-production artifact
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.enableSbom, 'true')) }}:
+ # V4 publishing: automatically publish staged artifacts as a pipeline artifact.
+ # The artifact name matches the SDK's FutureArtifactName ($(System.PhaseName)_Artifacts),
+ # which is encoded in the asset manifest for downstream publishing to discover.
+ # Jobs can opt in by setting enablePublishing: true.
+ - ${{ if and(eq(parameters.publishingVersion, 4), eq(parameters.enablePublishing, 'true')) }}:
- output: pipelineArtifact
- displayName: Publish SBOM manifest
+ displayName: 'Publish V4 pipeline artifacts'
+ targetPath: '$(Build.ArtifactStagingDirectory)/artifacts'
+ artifactName: '$(System.PhaseName)_Artifacts'
continueOnError: true
- targetPath: $(Build.ArtifactStagingDirectory)/sbom
- artifactName: $(ARTIFACT_NAME)
+ retryCountOnTaskFailure: 10 # for any files being locked
# add any outputs provided via root yaml
- ${{ if ne(parameters.templateContext.outputs, '') }}:
diff --git a/eng/common/templates-official/jobs/codeql-build.yml b/eng/common/templates-official/jobs/codeql-build.yml
deleted file mode 100644
index a726322e..00000000
--- a/eng/common/templates-official/jobs/codeql-build.yml
+++ /dev/null
@@ -1,7 +0,0 @@
-jobs:
-- template: /eng/common/core-templates/jobs/codeql-build.yml
- parameters:
- is1ESPipeline: true
-
- ${{ each parameter in parameters }}:
- ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates-official/steps/component-governance.yml b/eng/common/templates-official/steps/component-governance.yml
deleted file mode 100644
index 30bb3985..00000000
--- a/eng/common/templates-official/steps/component-governance.yml
+++ /dev/null
@@ -1,7 +0,0 @@
-steps:
-- template: /eng/common/core-templates/steps/component-governance.yml
- parameters:
- is1ESPipeline: true
-
- ${{ each parameter in parameters }}:
- ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates-official/steps/publish-pipeline-artifacts.yml b/eng/common/templates-official/steps/publish-pipeline-artifacts.yml
index 172f9f0f..9e598136 100644
--- a/eng/common/templates-official/steps/publish-pipeline-artifacts.yml
+++ b/eng/common/templates-official/steps/publish-pipeline-artifacts.yml
@@ -24,5 +24,7 @@ steps:
artifactName: ${{ parameters.args.artifactName }}
${{ if parameters.args.properties }}:
properties: ${{ parameters.args.properties }}
- ${{ if parameters.args.sbomEnabled }}:
+ ${{ if ne(parameters.args.sbomEnabled, '') }}:
sbomEnabled: ${{ parameters.args.sbomEnabled }}
+ ${{ if ne(parameters.args.isProduction, '') }}:
+ isProduction: ${{ parameters.args.isProduction }}
diff --git a/eng/common/templates-official/variables/pool-providers.yml b/eng/common/templates-official/variables/pool-providers.yml
index 1f308b24..2cc3ae30 100644
--- a/eng/common/templates-official/variables/pool-providers.yml
+++ b/eng/common/templates-official/variables/pool-providers.yml
@@ -23,7 +23,7 @@
#
# pool:
# name: $(DncEngInternalBuildPool)
-# image: 1es-windows-2022
+# image: windows.vs2026.amd64
variables:
# Coalesce the target and source branches so we know when a PR targets a release branch
diff --git a/eng/common/templates-official/variables/sdl-variables.yml b/eng/common/templates-official/variables/sdl-variables.yml
deleted file mode 100644
index f1311bbb..00000000
--- a/eng/common/templates-official/variables/sdl-variables.yml
+++ /dev/null
@@ -1,7 +0,0 @@
-variables:
-# The Guardian version specified in 'eng/common/sdl/packages.config'. This value must be kept in
-# sync with the packages.config file.
-- name: DefaultGuardianVersion
- value: 0.109.0
-- name: GuardianPackagesConfigFile
- value: $(System.DefaultWorkingDirectory)\eng\common\sdl\packages.config
\ No newline at end of file
diff --git a/eng/common/templates/job/job.yml b/eng/common/templates/job/job.yml
index 238fa081..5e261f34 100644
--- a/eng/common/templates/job/job.yml
+++ b/eng/common/templates/job/job.yml
@@ -1,12 +1,12 @@
parameters:
enablePublishBuildArtifacts: false
- disableComponentGovernance: ''
- componentGovernanceIgnoreDirectories: ''
-# Sbom related params
- enableSbom: true
runAsPublic: false
- PackageVersion: 9.0.0
- BuildDropPath: '$(System.DefaultWorkingDirectory)/artifacts'
+# CG related params, unused now and can eventually be removed
+ disableComponentGovernance: unused
+# Sbom related params, unused now and can eventually be removed
+ enableSbom: unused
+ PackageVersion: unused
+ BuildDropPath: unused
jobs:
- template: /eng/common/core-templates/job/job.yml
@@ -21,32 +21,34 @@ jobs:
- ${{ each step in parameters.steps }}:
- ${{ step }}
- componentGovernanceSteps:
- - template: /eng/common/templates/steps/component-governance.yml
- parameters:
- ${{ if eq(parameters.disableComponentGovernance, '') }}:
- ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.runAsPublic, 'false'), or(startsWith(variables['Build.SourceBranch'], 'refs/heads/release/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/dotnet/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/microsoft/'), eq(variables['Build.SourceBranch'], 'refs/heads/main'))) }}:
- disableComponentGovernance: false
- ${{ else }}:
- disableComponentGovernance: true
- ${{ else }}:
- disableComponentGovernance: ${{ parameters.disableComponentGovernance }}
- componentGovernanceIgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }}
+ # we don't run CG in public
+ - ${{ if eq(variables['System.TeamProject'], 'public') }}:
+ - script: echo "##vso[task.setvariable variable=skipComponentGovernanceDetection]true"
+ displayName: Set skipComponentGovernanceDetection variable
artifactPublishSteps:
- ${{ if ne(parameters.artifacts.publish, '') }}:
- ${{ if and(ne(parameters.artifacts.publish.artifacts, 'false'), ne(parameters.artifacts.publish.artifacts, '')) }}:
- - template: /eng/common/core-templates/steps/publish-build-artifacts.yml
+ - template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
parameters:
is1ESPipeline: false
args:
displayName: Publish pipeline artifacts
- pathToPublish: '$(Build.ArtifactStagingDirectory)/artifacts'
- publishLocation: Container
+ targetPath: '$(Build.ArtifactStagingDirectory)/artifacts'
artifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }}
continueOnError: true
- condition: always()
- retryCountOnTaskFailure: 10 # for any logs being locked
+ condition: succeeded()
+ retryCountOnTaskFailure: 10 # for any files being locked
+ - template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
+ parameters:
+ is1ESPipeline: false
+ args:
+ displayName: Publish pipeline artifacts
+ targetPath: '$(Build.ArtifactStagingDirectory)/artifacts'
+ artifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }}_Attempt$(System.JobAttempt)
+ continueOnError: true
+ condition: not(succeeded())
+ retryCountOnTaskFailure: 10 # for any files being locked
- ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}:
- template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
parameters:
@@ -57,20 +59,19 @@ jobs:
displayName: 'Publish logs'
continueOnError: true
condition: always()
- retryCountOnTaskFailure: 10 # for any logs being locked
- sbomEnabled: false # we don't need SBOM for logs
+ retryCountOnTaskFailure: 10 # for any files being locked
- ${{ if ne(parameters.enablePublishBuildArtifacts, 'false') }}:
- - template: /eng/common/core-templates/steps/publish-build-artifacts.yml
+ - template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
parameters:
is1ESPipeline: false
args:
displayName: Publish Logs
- pathToPublish: '$(Build.ArtifactStagingDirectory)/artifacts/log/$(_BuildConfig)'
- publishLocation: Container
+ targetPath: '$(Build.ArtifactStagingDirectory)/artifacts/log/$(_BuildConfig)'
artifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)_Attempt$(System.JobAttempt)' ) }}
continueOnError: true
condition: always()
+ retryCountOnTaskFailure: 10 # for any files being locked
- ${{ if eq(parameters.enableBuildRetry, 'true') }}:
- template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
@@ -81,4 +82,4 @@ jobs:
artifactName: 'BuildConfiguration'
displayName: 'Publish build retry configuration'
continueOnError: true
- sbomEnabled: false # we don't need SBOM for BuildConfiguration
+ retryCountOnTaskFailure: 10 # for any files being locked
diff --git a/eng/common/templates/jobs/codeql-build.yml b/eng/common/templates/jobs/codeql-build.yml
deleted file mode 100644
index 517f24d6..00000000
--- a/eng/common/templates/jobs/codeql-build.yml
+++ /dev/null
@@ -1,7 +0,0 @@
-jobs:
-- template: /eng/common/core-templates/jobs/codeql-build.yml
- parameters:
- is1ESPipeline: false
-
- ${{ each parameter in parameters }}:
- ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates/steps/component-governance.yml b/eng/common/templates/steps/component-governance.yml
deleted file mode 100644
index c12a5f8d..00000000
--- a/eng/common/templates/steps/component-governance.yml
+++ /dev/null
@@ -1,7 +0,0 @@
-steps:
-- template: /eng/common/core-templates/steps/component-governance.yml
- parameters:
- is1ESPipeline: false
-
- ${{ each parameter in parameters }}:
- ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates/steps/vmr-sync.yml b/eng/common/templates/steps/vmr-sync.yml
index 599afb61..eb619c50 100644
--- a/eng/common/templates/steps/vmr-sync.yml
+++ b/eng/common/templates/steps/vmr-sync.yml
@@ -38,27 +38,6 @@ steps:
displayName: Label PR commit
workingDirectory: $(Agent.BuildDirectory)/repo
-- script: |
- vmr_sha=$(grep -oP '(?<=Sha=")[^"]*' $(Agent.BuildDirectory)/repo/eng/Version.Details.xml)
- echo "##vso[task.setvariable variable=vmr_sha]$vmr_sha"
- displayName: Obtain the vmr sha from Version.Details.xml (Unix)
- condition: ne(variables['Agent.OS'], 'Windows_NT')
- workingDirectory: $(Agent.BuildDirectory)/repo
-
-- powershell: |
- [xml]$xml = Get-Content -Path $(Agent.BuildDirectory)/repo/eng/Version.Details.xml
- $vmr_sha = $xml.SelectSingleNode("//Source").Sha
- Write-Output "##vso[task.setvariable variable=vmr_sha]$vmr_sha"
- displayName: Obtain the vmr sha from Version.Details.xml (Windows)
- condition: eq(variables['Agent.OS'], 'Windows_NT')
- workingDirectory: $(Agent.BuildDirectory)/repo
-
-- script: |
- git fetch --all
- git checkout $(vmr_sha)
- displayName: Checkout VMR at correct sha for repo flow
- workingDirectory: ${{ parameters.vmrPath }}
-
- script: |
git config --global user.name "dotnet-maestro[bot]"
git config --global user.email "dotnet-maestro[bot]@users.noreply.github.com"
diff --git a/eng/common/templates/variables/pool-providers.yml b/eng/common/templates/variables/pool-providers.yml
index e0b19c14..587770f0 100644
--- a/eng/common/templates/variables/pool-providers.yml
+++ b/eng/common/templates/variables/pool-providers.yml
@@ -23,7 +23,7 @@
#
# pool:
# name: $(DncEngInternalBuildPool)
-# demands: ImageOverride -equals windows.vs2019.amd64
+# demands: ImageOverride -equals windows.vs2026.amd64
variables:
- ${{ if eq(variables['System.TeamProject'], 'internal') }}:
- template: /eng/common/templates-official/variables/pool-providers.yml
diff --git a/eng/common/templates/vmr-build-pr.yml b/eng/common/templates/vmr-build-pr.yml
index ce3c29a6..2f3694fa 100644
--- a/eng/common/templates/vmr-build-pr.yml
+++ b/eng/common/templates/vmr-build-pr.yml
@@ -34,6 +34,7 @@ resources:
type: github
name: dotnet/dotnet
endpoint: dotnet
+ ref: refs/heads/main # Set to whatever VMR branch the PR build should insert into
stages:
- template: /eng/pipelines/templates/stages/vmr-build.yml@vmr
diff --git a/eng/common/tools.ps1 b/eng/common/tools.ps1
index 1556562c..b6787991 100644
--- a/eng/common/tools.ps1
+++ b/eng/common/tools.ps1
@@ -34,6 +34,9 @@
# Configures warning treatment in msbuild.
[bool]$warnAsError = if (Test-Path variable:warnAsError) { $warnAsError } else { $true }
+# Specifies semi-colon delimited list of warning codes that should not be treated as errors.
+[string]$warnNotAsError = if (Test-Path variable:warnNotAsError) { $warnNotAsError } else { '' }
+
# Specifies which msbuild engine to use for build: 'vs', 'dotnet' or unspecified (determined based on presence of tools.vs in global.json).
[string]$msbuildEngine = if (Test-Path variable:msbuildEngine) { $msbuildEngine } else { $null }
@@ -273,7 +276,7 @@ function GetDotNetInstallScript([string] $dotnetRoot) {
Retry({
Write-Host "GET $uri"
- Invoke-WebRequest $uri -OutFile $installScript
+ Invoke-WebRequest $uri -UseBasicParsing -OutFile $installScript
})
}
@@ -295,6 +298,8 @@ function InstallDotNet([string] $dotnetRoot,
$dotnetVersionLabel = "'sdk v$version'"
+ # For performance this check is duplicated in src/Microsoft.DotNet.Arcade.Sdk/src/InstallDotNetCore.cs
+ # if you are making changes here, consider if you need to make changes there as well.
if ($runtime -ne '' -and $runtime -ne 'sdk') {
$runtimePath = $dotnetRoot
$runtimePath = $runtimePath + "\shared"
@@ -370,12 +375,11 @@ function InstallDotNet([string] $dotnetRoot,
#
# 1. MSBuild from an active VS command prompt
# 2. MSBuild from a compatible VS installation
-# 3. MSBuild from the xcopy tool package
#
# Returns full path to msbuild.exe.
# Throws on failure.
#
-function InitializeVisualStudioMSBuild([bool]$install, [object]$vsRequirements = $null) {
+function InitializeVisualStudioMSBuild([object]$vsRequirements = $null) {
if (-not (IsWindowsPlatform)) {
throw "Cannot initialize Visual Studio on non-Windows"
}
@@ -385,13 +389,7 @@ function InitializeVisualStudioMSBuild([bool]$install, [object]$vsRequirements =
}
# Minimum VS version to require.
- $vsMinVersionReqdStr = '17.7'
- $vsMinVersionReqd = [Version]::new($vsMinVersionReqdStr)
-
- # If the version of msbuild is going to be xcopied,
- # use this version. Version matches a package here:
- # https://dev.azure.com/dnceng/public/_artifacts/feed/dotnet-eng/NuGet/Microsoft.DotNet.Arcade.MSBuild.Xcopy/versions/18.0.0
- $defaultXCopyMSBuildVersion = '18.0.0'
+ $vsMinVersionReqdStr = '18.0'
if (!$vsRequirements) {
if (Get-Member -InputObject $GlobalJson.tools -Name 'vs') {
@@ -421,46 +419,16 @@ function InitializeVisualStudioMSBuild([bool]$install, [object]$vsRequirements =
}
}
- # Locate Visual Studio installation or download x-copy msbuild.
+ # Locate Visual Studio installation.
$vsInfo = LocateVisualStudio $vsRequirements
- if ($vsInfo -ne $null -and $env:ForceUseXCopyMSBuild -eq $null) {
+ if ($vsInfo -ne $null) {
# Ensure vsInstallDir has a trailing slash
$vsInstallDir = Join-Path $vsInfo.installationPath "\"
$vsMajorVersion = $vsInfo.installationVersion.Split('.')[0]
InitializeVisualStudioEnvironmentVariables $vsInstallDir $vsMajorVersion
} else {
- if (Get-Member -InputObject $GlobalJson.tools -Name 'xcopy-msbuild') {
- $xcopyMSBuildVersion = $GlobalJson.tools.'xcopy-msbuild'
- $vsMajorVersion = $xcopyMSBuildVersion.Split('.')[0]
- } else {
- #if vs version provided in global.json is incompatible (too low) then use the default version for xcopy msbuild download
- if($vsMinVersion -lt $vsMinVersionReqd){
- Write-Host "Using xcopy-msbuild version of $defaultXCopyMSBuildVersion since VS version $vsMinVersionStr provided in global.json is not compatible"
- $xcopyMSBuildVersion = $defaultXCopyMSBuildVersion
- $vsMajorVersion = $xcopyMSBuildVersion.Split('.')[0]
- }
- else{
- # If the VS version IS compatible, look for an xcopy msbuild package
- # with a version matching VS.
- # Note: If this version does not exist, then an explicit version of xcopy msbuild
- # can be specified in global.json. This will be required for pre-release versions of msbuild.
- $vsMajorVersion = $vsMinVersion.Major
- $vsMinorVersion = $vsMinVersion.Minor
- $xcopyMSBuildVersion = "$vsMajorVersion.$vsMinorVersion.0"
- }
- }
-
- $vsInstallDir = $null
- if ($xcopyMSBuildVersion.Trim() -ine "none") {
- $vsInstallDir = InitializeXCopyMSBuild $xcopyMSBuildVersion $install
- if ($vsInstallDir -eq $null) {
- throw "Could not xcopy msbuild. Please check that package 'Microsoft.DotNet.Arcade.MSBuild.Xcopy @ $xcopyMSBuildVersion' exists on feed 'dotnet-eng'."
- }
- }
- if ($vsInstallDir -eq $null) {
- throw 'Unable to find Visual Studio that has required version and components installed'
- }
+ throw 'Unable to find Visual Studio that has required version and components installed'
}
$msbuildVersionDir = if ([int]$vsMajorVersion -lt 16) { "$vsMajorVersion.0" } else { "Current" }
@@ -487,38 +455,6 @@ function InitializeVisualStudioEnvironmentVariables([string] $vsInstallDir, [str
}
}
-function InstallXCopyMSBuild([string]$packageVersion) {
- return InitializeXCopyMSBuild $packageVersion -install $true
-}
-
-function InitializeXCopyMSBuild([string]$packageVersion, [bool]$install) {
- $packageName = 'Microsoft.DotNet.Arcade.MSBuild.Xcopy'
- $packageDir = Join-Path $ToolsDir "msbuild\$packageVersion"
- $packagePath = Join-Path $packageDir "$packageName.$packageVersion.nupkg"
-
- if (!(Test-Path $packageDir)) {
- if (!$install) {
- return $null
- }
-
- Create-Directory $packageDir
-
- Write-Host "Downloading $packageName $packageVersion"
- $ProgressPreference = 'SilentlyContinue' # Don't display the console progress UI - it's a huge perf hit
- Retry({
- Invoke-WebRequest "https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/flat2/$packageName/$packageVersion/$packageName.$packageVersion.nupkg" -OutFile $packagePath
- })
-
- if (!(Test-Path $packagePath)) {
- Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "See https://dev.azure.com/dnceng/internal/_wiki/wikis/DNCEng%20Services%20Wiki/1074/Updating-Microsoft.DotNet.Arcade.MSBuild.Xcopy-WAS-RoslynTools.MSBuild-(xcopy-msbuild)-generation?anchor=troubleshooting for help troubleshooting issues with XCopy MSBuild"
- throw
- }
- Unzip $packagePath $packageDir
- }
-
- return Join-Path $packageDir 'tools'
-}
-
#
# Locates Visual Studio instance that meets the minimal requirements specified by tools.vs object in global.json.
#
@@ -552,23 +488,30 @@ function LocateVisualStudio([object]$vsRequirements = $null){
Write-Host "Downloading vswhere $vswhereVersion"
$ProgressPreference = 'SilentlyContinue' # Don't display the console progress UI - it's a huge perf hit
Retry({
- Invoke-WebRequest "https://netcorenativeassets.blob.core.windows.net/resource-packages/external/windows/vswhere/$vswhereVersion/vswhere.exe" -OutFile $vswhereExe
+ Invoke-WebRequest "https://netcorenativeassets.blob.core.windows.net/resource-packages/external/windows/vswhere/$vswhereVersion/vswhere.exe" -UseBasicParsing -OutFile $vswhereExe
})
}
- if (!$vsRequirements) { $vsRequirements = $GlobalJson.tools.vs }
+ if (!$vsRequirements) {
+ if (Get-Member -InputObject $GlobalJson.tools -Name 'vs' -ErrorAction SilentlyContinue) {
+ $vsRequirements = $GlobalJson.tools.vs
+ } else {
+ $vsRequirements = $null
+ }
+ }
+
$args = @('-latest', '-format', 'json', '-requires', 'Microsoft.Component.MSBuild', '-products', '*')
if (!$excludePrereleaseVS) {
$args += '-prerelease'
}
- if (Get-Member -InputObject $vsRequirements -Name 'version') {
+ if ($vsRequirements -and (Get-Member -InputObject $vsRequirements -Name 'version' -ErrorAction SilentlyContinue)) {
$args += '-version'
$args += $vsRequirements.version
}
- if (Get-Member -InputObject $vsRequirements -Name 'components') {
+ if ($vsRequirements -and (Get-Member -InputObject $vsRequirements -Name 'components' -ErrorAction SilentlyContinue)) {
foreach ($component in $vsRequirements.components) {
$args += '-requires'
$args += $component
@@ -581,6 +524,11 @@ function LocateVisualStudio([object]$vsRequirements = $null){
return $null
}
+ if ($null -eq $vsInfo -or $vsInfo.Count -eq 0) {
+ throw "No instance of Visual Studio meeting the requirements specified was found. Requirements: $($args -join ' ')"
+ return $null
+ }
+
# use first matching instance
return $vsInfo[0]
}
@@ -616,7 +564,7 @@ function InitializeBuildTool() {
$buildTool = @{ Path = $dotnetPath; Command = 'msbuild'; Tool = 'dotnet'; Framework = 'net' }
} elseif ($msbuildEngine -eq "vs") {
try {
- $msbuildPath = InitializeVisualStudioMSBuild -install:$restore
+ $msbuildPath = InitializeVisualStudioMSBuild
} catch {
Write-PipelineTelemetryError -Category 'InitializeToolset' -Message $_
ExitWithExitCode 1
@@ -813,6 +761,11 @@ function MSBuild-Core() {
$cmdArgs = "$($buildTool.Command) /m /nologo /clp:Summary /v:$verbosity /nr:$nodeReuse /p:ContinuousIntegrationBuild=$ci"
+ # Add -mt flag for MSBuild multithreaded mode if enabled via environment variable
+ if ($env:MSBUILD_MT_ENABLED -eq "1") {
+ $cmdArgs += ' -mt'
+ }
+
if ($warnAsError) {
$cmdArgs += ' /warnaserror /p:TreatWarningsAsErrors=true'
}
@@ -820,6 +773,10 @@ function MSBuild-Core() {
$cmdArgs += ' /p:TreatWarningsAsErrors=false'
}
+ if ($warnNotAsError) {
+ $cmdArgs += " /warnnotaserror:$warnNotAsError /p:AdditionalWarningsNotAsErrors=$warnNotAsError"
+ }
+
foreach ($arg in $args) {
if ($null -ne $arg -and $arg.Trim() -ne "") {
if ($arg.EndsWith('\')) {
diff --git a/eng/common/tools.sh b/eng/common/tools.sh
index 6c121300..a6e0ed59 100755
--- a/eng/common/tools.sh
+++ b/eng/common/tools.sh
@@ -52,6 +52,9 @@ fi
# Configures warning treatment in msbuild.
warn_as_error=${warn_as_error:-true}
+# Specifies semi-colon delimited list of warning codes that should not be treated as errors.
+warn_not_as_error=${warn_not_as_error:-''}
+
# True to attempt using .NET Core already that meets requirements specified in global.json
# installed on the machine instead of downloading one.
use_installed_dotnet_cli=${use_installed_dotnet_cli:-true}
@@ -184,6 +187,8 @@ function InstallDotNet {
local version=$2
local runtime=$4
+ # For performance this check is duplicated in src/Microsoft.DotNet.Arcade.Sdk/src/InstallDotNetCore.cs
+ # if you are making changes here, consider if you need to make changes there as well.
local dotnetVersionLabel="'$runtime v$version'"
if [[ -n "${4:-}" ]] && [ "$4" != 'sdk' ]; then
runtimePath="$root"
@@ -522,7 +527,18 @@ function MSBuild-Core {
}
}
- RunBuildTool "$_InitializeBuildToolCommand" /m /nologo /clp:Summary /v:$verbosity /nr:$node_reuse $warnaserror_switch /p:TreatWarningsAsErrors=$warn_as_error /p:ContinuousIntegrationBuild=$ci "$@"
+ # Add -mt flag for MSBuild multithreaded mode if enabled via environment variable
+ local mt_switch=""
+ if [[ "${MSBUILD_MT_ENABLED:-}" == "1" ]]; then
+ mt_switch="-mt"
+ fi
+
+ local warnnotaserror_switch=""
+ if [[ -n "$warn_not_as_error" ]]; then
+ warnnotaserror_switch="/warnnotaserror:$warn_not_as_error /p:AdditionalWarningsNotAsErrors=$warn_not_as_error"
+ fi
+
+ RunBuildTool "$_InitializeBuildToolCommand" /m /nologo /clp:Summary /v:$verbosity /nr:$node_reuse $warnaserror_switch $mt_switch $warnnotaserror_switch /p:TreatWarningsAsErrors=$warn_as_error /p:ContinuousIntegrationBuild=$ci "$@"
}
function GetDarc {
diff --git a/eng/common/vmr-sync.ps1 b/eng/common/vmr-sync.ps1
index 97302f32..b37992d9 100644
--- a/eng/common/vmr-sync.ps1
+++ b/eng/common/vmr-sync.ps1
@@ -103,12 +103,20 @@ Set-StrictMode -Version Latest
Highlight 'Installing .NET, preparing the tooling..'
. .\eng\common\tools.ps1
$dotnetRoot = InitializeDotNetCli -install:$true
+$env:DOTNET_ROOT = $dotnetRoot
$darc = Get-Darc
-$dotnet = "$dotnetRoot\dotnet.exe"
Highlight "Starting the synchronization of VMR.."
# Synchronize the VMR
+$versionDetailsPath = Resolve-Path (Join-Path $PSScriptRoot '..\Version.Details.xml') | Select-Object -ExpandProperty Path
+[xml]$versionDetails = Get-Content -Path $versionDetailsPath
+$repoName = $versionDetails.SelectSingleNode('//Source').Mapping
+if (-not $repoName) {
+ Fail "Failed to resolve repo mapping from $versionDetailsPath"
+ exit 1
+}
+
$darcArgs = (
"vmr", "forwardflow",
"--tmp", $tmpDir,
@@ -130,9 +138,27 @@ if ($LASTEXITCODE -eq 0) {
Highlight "Synchronization succeeded"
}
else {
- Fail "Synchronization of repo to VMR failed!"
- Fail "'$vmrDir' is left in its last state (re-run of this script will reset it)."
- Fail "Please inspect the logs which contain path to the failing patch file (use -debugOutput to get all the details)."
- Fail "Once you make changes to the conflicting VMR patch, commit it locally and re-run this script."
- exit 1
+ Highlight "Failed to flow code into the local VMR. Falling back to resetting the VMR to match repo contents..."
+ git -C $vmrDir reset --hard
+
+ $resetArgs = (
+ "vmr", "reset",
+ "${repoName}:HEAD",
+ "--vmr", $vmrDir,
+ "--tmp", $tmpDir,
+ "--additional-remotes", "${repoName}:${repoRoot}"
+ )
+
+ & "$darc" $resetArgs
+
+ if ($LASTEXITCODE -eq 0) {
+ Highlight "Successfully reset the VMR using 'darc vmr reset'"
+ }
+ else {
+ Fail "Synchronization of repo to VMR failed!"
+ Fail "'$vmrDir' is left in its last state (re-run of this script will reset it)."
+ Fail "Please inspect the logs which contain path to the failing patch file (use -debugOutput to get all the details)."
+ Fail "Once you make changes to the conflicting VMR patch, commit it locally and re-run this script."
+ exit 1
+ }
}
diff --git a/eng/common/vmr-sync.sh b/eng/common/vmr-sync.sh
index 44239e33..198caec5 100644
--- a/eng/common/vmr-sync.sh
+++ b/eng/common/vmr-sync.sh
@@ -186,6 +186,13 @@ fi
# Synchronize the VMR
+version_details_path=$(cd "$scriptroot/.."; pwd -P)/Version.Details.xml
+repo_name=$(grep -m 1 '