├── .config
├── CredScanSuppressions.json
└── tsaoptions.json
├── .github
└── workflows
│ └── placeholder.yml
├── eng
├── common
│ ├── BuildConfiguration
│ │ └── build-configuration.json
│ ├── dotnet-install.cmd
│ ├── build.cmd
│ ├── sdl
│ │ ├── packages.config
│ │ ├── NuGet.config
│ │ ├── sdl.ps1
│ │ ├── run-sdl.ps1
│ │ ├── trim-assets-version.ps1
│ │ ├── extract-artifact-archives.ps1
│ │ ├── init-sdl.ps1
│ │ ├── extract-artifact-packages.ps1
│ │ └── configure-sdl-tool.ps1
│ ├── CIBuild.cmd
│ ├── init-tools-native.cmd
│ ├── templates
│ │ ├── jobs
│ │ │ ├── jobs.yml
│ │ │ ├── source-build.yml
│ │ │ └── codeql-build.yml
│ │ ├── job
│ │ │ ├── onelocbuild.yml
│ │ │ ├── source-build.yml
│ │ │ ├── publish-build-assets.yml
│ │ │ ├── source-index-stage1.yml
│ │ │ └── job.yml
│ │ ├── steps
│ │ │ ├── publish-logs.yml
│ │ │ ├── retain-build.yml
│ │ │ ├── source-build.yml
│ │ │ ├── generate-sbom.yml
│ │ │ ├── send-to-helix.yml
│ │ │ ├── component-governance.yml
│ │ │ ├── get-delegation-sas.yml
│ │ │ ├── enable-internal-sources.yml
│ │ │ ├── get-federated-access-token.yml
│ │ │ ├── source-index-stage1-publish.yml
│ │ │ ├── enable-internal-runtimes.yml
│ │ │ ├── publish-build-artifacts.yml
│ │ │ └── publish-pipeline-artifacts.yml
│ │ ├── post-build
│ │ │ ├── post-build.yml
│ │ │ ├── common-variables.yml
│ │ │ └── setup-maestro-vars.yml
│ │ ├── vmr-build-pr.yml
│ │ └── variables
│ │ │ └── pool-providers.yml
│ ├── templates-official
│ │ ├── jobs
│ │ │ ├── jobs.yml
│ │ │ ├── source-build.yml
│ │ │ └── codeql-build.yml
│ │ ├── job
│ │ │ ├── onelocbuild.yml
│ │ │ ├── source-build.yml
│ │ │ ├── publish-build-assets.yml
│ │ │ ├── source-index-stage1.yml
│ │ │ └── job.yml
│ │ ├── steps
│ │ │ ├── generate-sbom.yml
│ │ │ ├── publish-logs.yml
│ │ │ ├── retain-build.yml
│ │ │ ├── send-to-helix.yml
│ │ │ ├── source-build.yml
│ │ │ ├── get-delegation-sas.yml
│ │ │ ├── component-governance.yml
│ │ │ ├── enable-internal-sources.yml
│ │ │ ├── get-federated-access-token.yml
│ │ │ ├── source-index-stage1-publish.yml
│ │ │ ├── enable-internal-runtimes.yml
│ │ │ ├── publish-pipeline-artifacts.yml
│ │ │ └── publish-build-artifacts.yml
│ │ ├── post-build
│ │ │ ├── post-build.yml
│ │ │ ├── common-variables.yml
│ │ │ └── setup-maestro-vars.yml
│ │ └── variables
│ │ │ ├── sdl-variables.yml
│ │ │ └── pool-providers.yml
│ ├── dotnet.cmd
│ ├── core-templates
│ │ ├── variables
│ │ │ └── pool-providers.yml
│ │ ├── steps
│ │ │ ├── publish-pipeline-artifacts.yml
│ │ │ ├── component-governance.yml
│ │ │ ├── publish-build-artifacts.yml
│ │ │ ├── cleanup-microbuild.yml
│ │ │ ├── enable-internal-runtimes.yml
│ │ │ ├── retain-build.yml
│ │ │ ├── get-federated-access-token.yml
│ │ │ ├── get-delegation-sas.yml
│ │ │ ├── source-index-stage1-publish.yml
│ │ │ ├── enable-internal-sources.yml
│ │ │ ├── generate-sbom.yml
│ │ │ ├── publish-logs.yml
│ │ │ └── source-build.yml
│ │ ├── post-build
│ │ │ ├── common-variables.yml
│ │ │ └── setup-maestro-vars.yml
│ │ ├── jobs
│ │ │ ├── codeql-build.yml
│ │ │ └── source-build.yml
│ │ └── job
│ │ │ ├── source-index-stage1.yml
│ │ │ └── onelocbuild.yml
│ ├── dotnet.ps1
│ ├── internal
│ │ ├── NuGet.config
│ │ ├── Directory.Build.props
│ │ └── Tools.csproj
│ ├── cross
│ │ ├── x86
│ │ │ └── tizen
│ │ │ │ └── tizen.patch
│ │ ├── armel
│ │ │ └── tizen
│ │ │ │ └── tizen.patch
│ │ ├── arm
│ │ │ └── tizen
│ │ │ │ └── tizen.patch
│ │ ├── x64
│ │ │ └── tizen
│ │ │ │ └── tizen.patch
│ │ ├── arm64
│ │ │ └── tizen
│ │ │ │ └── tizen.patch
│ │ ├── riscv64
│ │ │ └── tizen
│ │ │ │ └── tizen.patch
│ │ └── tizen-build-rootfs.sh
│ ├── PSScriptAnalyzerSettings.psd1
│ ├── cibuild.sh
│ ├── enable-cross-org-publishing.ps1
│ ├── msbuild.ps1
│ ├── post-build
│ │ ├── nuget-validation.ps1
│ │ ├── check-channel-consistency.ps1
│ │ ├── publish-using-darc.ps1
│ │ ├── redact-logs.ps1
│ │ └── nuget-verification.ps1
│ ├── dotnet-install.ps1
│ ├── dotnet.sh
│ ├── helixpublish.proj
│ ├── generate-sbom-prep.ps1
│ ├── msbuild.sh
│ ├── README.md
│ ├── retain-build.ps1
│ ├── generate-sbom-prep.sh
│ ├── loc
│ │ └── P22DotNetHtmlLocalization.lss
│ ├── darc-init.ps1
│ ├── native
│ │ ├── install-dependencies.sh
│ │ ├── init-os-and-arch.sh
│ │ ├── install-cmake.sh
│ │ ├── install-cmake-test.sh
│ │ ├── init-distro-rid.sh
│ │ ├── install-tool.ps1
│ │ ├── common-library.sh
│ │ └── init-compiler.sh
│ ├── dotnet-install.sh
│ ├── darc-init.sh
│ ├── sdk-task.sh
│ ├── sdk-task.ps1
│ ├── vmr-sync.ps1
│ ├── pipeline-logging-functions.sh
│ ├── internal-feed-operations.sh
│ └── internal-feed-operations.ps1
├── pipelines
│ └── public.yml
└── Version.Details.xml
├── .azuredevops
└── dependabot.yml
├── CODE-OF-CONDUCT.md
├── global.json
├── LICENSE.md
├── .gitignore
├── README.md
├── NuGet.config
└── doc
└── test-workload-updates.md
/.config/CredScanSuppressions.json:
--------------------------------------------------------------------------------
1 | {
2 | "tool": "Credential Scanner",
3 | "suppressions": []
4 | }
5 |
--------------------------------------------------------------------------------
/.github/workflows/placeholder.yml:
--------------------------------------------------------------------------------
1 | # This is a placeholder file to satisfy the policy for allowing GitHub Actions in the repo.
--------------------------------------------------------------------------------
/eng/common/BuildConfiguration/build-configuration.json:
--------------------------------------------------------------------------------
1 | {
2 | "RetryCountLimit": 1,
3 | "RetryByAnyError": false
4 | }
5 |
--------------------------------------------------------------------------------
/eng/common/dotnet-install.cmd:
--------------------------------------------------------------------------------
1 | @echo off
2 | powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0dotnet-install.ps1""" %*"
--------------------------------------------------------------------------------
/eng/common/build.cmd:
--------------------------------------------------------------------------------
1 | @echo off
2 | powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0build.ps1""" %*"
3 | exit /b %ErrorLevel%
4 |
--------------------------------------------------------------------------------
/eng/common/sdl/packages.config:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
--------------------------------------------------------------------------------
/eng/common/CIBuild.cmd:
--------------------------------------------------------------------------------
1 | @echo off
2 | powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0Build.ps1""" -restore -build -test -sign -pack -publish -ci %*"
3 |
--------------------------------------------------------------------------------
/eng/common/init-tools-native.cmd:
--------------------------------------------------------------------------------
1 | @echo off
2 | powershell -NoProfile -NoLogo -ExecutionPolicy ByPass -command "& """%~dp0init-tools-native.ps1""" %*"
3 | exit /b %ErrorLevel%
--------------------------------------------------------------------------------
/.azuredevops/dependabot.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 |
3 | # Disabling dependabot on Azure DevOps as this is a mirrored repo. Updates should go through github.
4 | enable-campaigned-updates: false
5 | enable-security-updates: false
6 |
--------------------------------------------------------------------------------
/eng/common/templates/jobs/jobs.yml:
--------------------------------------------------------------------------------
1 | jobs:
2 | - template: /eng/common/core-templates/jobs/jobs.yml
3 | parameters:
4 | is1ESPipeline: false
5 |
6 | ${{ each parameter in parameters }}:
7 | ${{ parameter.key }}: ${{ parameter.value }}
8 |
--------------------------------------------------------------------------------
/eng/common/templates-official/jobs/jobs.yml:
--------------------------------------------------------------------------------
1 | jobs:
2 | - template: /eng/common/core-templates/jobs/jobs.yml
3 | parameters:
4 | is1ESPipeline: true
5 |
6 | ${{ each parameter in parameters }}:
7 | ${{ parameter.key }}: ${{ parameter.value }}
8 |
--------------------------------------------------------------------------------
/eng/common/templates/job/onelocbuild.yml:
--------------------------------------------------------------------------------
1 | jobs:
2 | - template: /eng/common/core-templates/job/onelocbuild.yml
3 | parameters:
4 | is1ESPipeline: false
5 |
6 | ${{ each parameter in parameters }}:
7 | ${{ parameter.key }}: ${{ parameter.value }}
8 |
--------------------------------------------------------------------------------
/eng/common/templates/job/source-build.yml:
--------------------------------------------------------------------------------
1 | jobs:
2 | - template: /eng/common/core-templates/job/source-build.yml
3 | parameters:
4 | is1ESPipeline: false
5 |
6 | ${{ each parameter in parameters }}:
7 | ${{ parameter.key }}: ${{ parameter.value }}
8 |
--------------------------------------------------------------------------------
/eng/common/templates/jobs/source-build.yml:
--------------------------------------------------------------------------------
1 | jobs:
2 | - template: /eng/common/core-templates/jobs/source-build.yml
3 | parameters:
4 | is1ESPipeline: false
5 |
6 | ${{ each parameter in parameters }}:
7 | ${{ parameter.key }}: ${{ parameter.value }}
--------------------------------------------------------------------------------
/eng/common/templates-official/job/onelocbuild.yml:
--------------------------------------------------------------------------------
1 | jobs:
2 | - template: /eng/common/core-templates/job/onelocbuild.yml
3 | parameters:
4 | is1ESPipeline: true
5 |
6 | ${{ each parameter in parameters }}:
7 | ${{ parameter.key }}: ${{ parameter.value }}
8 |
--------------------------------------------------------------------------------
/eng/common/templates-official/jobs/source-build.yml:
--------------------------------------------------------------------------------
1 | jobs:
2 | - template: /eng/common/core-templates/jobs/source-build.yml
3 | parameters:
4 | is1ESPipeline: true
5 |
6 | ${{ each parameter in parameters }}:
7 | ${{ parameter.key }}: ${{ parameter.value }}
--------------------------------------------------------------------------------
/eng/common/templates/jobs/codeql-build.yml:
--------------------------------------------------------------------------------
1 | jobs:
2 | - template: /eng/common/core-templates/jobs/codeql-build.yml
3 | parameters:
4 | is1ESPipeline: false
5 |
6 | ${{ each parameter in parameters }}:
7 | ${{ parameter.key }}: ${{ parameter.value }}
8 |
--------------------------------------------------------------------------------
/eng/common/templates/steps/publish-logs.yml:
--------------------------------------------------------------------------------
1 | steps:
2 | - template: /eng/common/core-templates/steps/publish-logs.yml
3 | parameters:
4 | is1ESPipeline: false
5 |
6 | ${{ each parameter in parameters }}:
7 | ${{ parameter.key }}: ${{ parameter.value }}
8 |
--------------------------------------------------------------------------------
/eng/common/templates/steps/retain-build.yml:
--------------------------------------------------------------------------------
1 | steps:
2 | - template: /eng/common/core-templates/steps/retain-build.yml
3 | parameters:
4 | is1ESPipeline: false
5 |
6 | ${{ each parameter in parameters }}:
7 | ${{ parameter.key }}: ${{ parameter.value }}
8 |
--------------------------------------------------------------------------------
/eng/common/templates/steps/source-build.yml:
--------------------------------------------------------------------------------
1 | steps:
2 | - template: /eng/common/core-templates/steps/source-build.yml
3 | parameters:
4 | is1ESPipeline: false
5 |
6 | ${{ each parameter in parameters }}:
7 | ${{ parameter.key }}: ${{ parameter.value }}
8 |
--------------------------------------------------------------------------------
/eng/common/dotnet.cmd:
--------------------------------------------------------------------------------
1 | @echo off
2 |
3 | :: This script is used to install the .NET SDK.
4 | :: It will also invoke the SDK with any provided arguments.
5 |
6 | powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0dotnet.ps1""" %*"
7 | exit /b %ErrorLevel%
8 |
--------------------------------------------------------------------------------
/eng/common/templates-official/job/source-build.yml:
--------------------------------------------------------------------------------
1 | jobs:
2 | - template: /eng/common/core-templates/job/source-build.yml
3 | parameters:
4 | is1ESPipeline: true
5 |
6 | ${{ each parameter in parameters }}:
7 | ${{ parameter.key }}: ${{ parameter.value }}
8 |
--------------------------------------------------------------------------------
/eng/common/templates-official/jobs/codeql-build.yml:
--------------------------------------------------------------------------------
1 | jobs:
2 | - template: /eng/common/core-templates/jobs/codeql-build.yml
3 | parameters:
4 | is1ESPipeline: true
5 |
6 | ${{ each parameter in parameters }}:
7 | ${{ parameter.key }}: ${{ parameter.value }}
8 |
--------------------------------------------------------------------------------
/eng/common/templates/steps/generate-sbom.yml:
--------------------------------------------------------------------------------
1 | steps:
2 | - template: /eng/common/core-templates/steps/generate-sbom.yml
3 | parameters:
4 | is1ESPipeline: false
5 |
6 | ${{ each parameter in parameters }}:
7 | ${{ parameter.key }}: ${{ parameter.value }}
8 |
--------------------------------------------------------------------------------
/eng/common/templates/steps/send-to-helix.yml:
--------------------------------------------------------------------------------
1 | steps:
2 | - template: /eng/common/core-templates/steps/send-to-helix.yml
3 | parameters:
4 | is1ESPipeline: false
5 |
6 | ${{ each parameter in parameters }}:
7 | ${{ parameter.key }}: ${{ parameter.value }}
8 |
--------------------------------------------------------------------------------
/eng/common/templates-official/steps/generate-sbom.yml:
--------------------------------------------------------------------------------
1 | steps:
2 | - template: /eng/common/core-templates/steps/generate-sbom.yml
3 | parameters:
4 | is1ESPipeline: true
5 |
6 | ${{ each parameter in parameters }}:
7 | ${{ parameter.key }}: ${{ parameter.value }}
8 |
--------------------------------------------------------------------------------
/eng/common/templates-official/steps/publish-logs.yml:
--------------------------------------------------------------------------------
1 | steps:
2 | - template: /eng/common/core-templates/steps/publish-logs.yml
3 | parameters:
4 | is1ESPipeline: true
5 |
6 | ${{ each parameter in parameters }}:
7 | ${{ parameter.key }}: ${{ parameter.value }}
8 |
--------------------------------------------------------------------------------
/eng/common/templates-official/steps/retain-build.yml:
--------------------------------------------------------------------------------
1 | steps:
2 | - template: /eng/common/core-templates/steps/retain-build.yml
3 | parameters:
4 | is1ESPipeline: true
5 |
6 | ${{ each parameter in parameters }}:
7 | ${{ parameter.key }}: ${{ parameter.value }}
8 |
--------------------------------------------------------------------------------
/eng/common/templates-official/steps/send-to-helix.yml:
--------------------------------------------------------------------------------
1 | steps:
2 | - template: /eng/common/core-templates/steps/send-to-helix.yml
3 | parameters:
4 | is1ESPipeline: true
5 |
6 | ${{ each parameter in parameters }}:
7 | ${{ parameter.key }}: ${{ parameter.value }}
8 |
--------------------------------------------------------------------------------
/eng/common/templates-official/steps/source-build.yml:
--------------------------------------------------------------------------------
1 | steps:
2 | - template: /eng/common/core-templates/steps/source-build.yml
3 | parameters:
4 | is1ESPipeline: true
5 |
6 | ${{ each parameter in parameters }}:
7 | ${{ parameter.key }}: ${{ parameter.value }}
8 |
--------------------------------------------------------------------------------
/eng/common/templates/job/publish-build-assets.yml:
--------------------------------------------------------------------------------
1 | jobs:
2 | - template: /eng/common/core-templates/job/publish-build-assets.yml
3 | parameters:
4 | is1ESPipeline: false
5 |
6 | ${{ each parameter in parameters }}:
7 | ${{ parameter.key }}: ${{ parameter.value }}
8 |
--------------------------------------------------------------------------------
/eng/common/templates/job/source-index-stage1.yml:
--------------------------------------------------------------------------------
1 | jobs:
2 | - template: /eng/common/core-templates/job/source-index-stage1.yml
3 | parameters:
4 | is1ESPipeline: false
5 |
6 | ${{ each parameter in parameters }}:
7 | ${{ parameter.key }}: ${{ parameter.value }}
8 |
--------------------------------------------------------------------------------
/eng/common/templates/steps/component-governance.yml:
--------------------------------------------------------------------------------
1 | steps:
2 | - template: /eng/common/core-templates/steps/component-governance.yml
3 | parameters:
4 | is1ESPipeline: false
5 |
6 | ${{ each parameter in parameters }}:
7 | ${{ parameter.key }}: ${{ parameter.value }}
8 |
--------------------------------------------------------------------------------
/eng/common/templates/steps/get-delegation-sas.yml:
--------------------------------------------------------------------------------
1 | steps:
2 | - template: /eng/common/core-templates/steps/get-delegation-sas.yml
3 | parameters:
4 | is1ESPipeline: false
5 |
6 | ${{ each parameter in parameters }}:
7 | ${{ parameter.key }}: ${{ parameter.value }}
8 |
--------------------------------------------------------------------------------
/eng/common/templates-official/job/publish-build-assets.yml:
--------------------------------------------------------------------------------
1 | jobs:
2 | - template: /eng/common/core-templates/job/publish-build-assets.yml
3 | parameters:
4 | is1ESPipeline: true
5 |
6 | ${{ each parameter in parameters }}:
7 | ${{ parameter.key }}: ${{ parameter.value }}
8 |
--------------------------------------------------------------------------------
/eng/common/templates-official/job/source-index-stage1.yml:
--------------------------------------------------------------------------------
1 | jobs:
2 | - template: /eng/common/core-templates/job/source-index-stage1.yml
3 | parameters:
4 | is1ESPipeline: true
5 |
6 | ${{ each parameter in parameters }}:
7 | ${{ parameter.key }}: ${{ parameter.value }}
8 |
--------------------------------------------------------------------------------
/eng/common/templates-official/steps/get-delegation-sas.yml:
--------------------------------------------------------------------------------
1 | steps:
2 | - template: /eng/common/core-templates/steps/get-delegation-sas.yml
3 | parameters:
4 | is1ESPipeline: true
5 |
6 | ${{ each parameter in parameters }}:
7 | ${{ parameter.key }}: ${{ parameter.value }}
8 |
--------------------------------------------------------------------------------
/eng/common/templates/steps/enable-internal-sources.yml:
--------------------------------------------------------------------------------
1 | steps:
2 | - template: /eng/common/core-templates/steps/enable-internal-sources.yml
3 | parameters:
4 | is1ESPipeline: false
5 |
6 | ${{ each parameter in parameters }}:
7 | ${{ parameter.key }}: ${{ parameter.value }}
--------------------------------------------------------------------------------
/eng/common/templates-official/steps/component-governance.yml:
--------------------------------------------------------------------------------
1 | steps:
2 | - template: /eng/common/core-templates/steps/component-governance.yml
3 | parameters:
4 | is1ESPipeline: true
5 |
6 | ${{ each parameter in parameters }}:
7 | ${{ parameter.key }}: ${{ parameter.value }}
8 |
--------------------------------------------------------------------------------
/eng/common/templates-official/steps/enable-internal-sources.yml:
--------------------------------------------------------------------------------
1 | steps:
2 | - template: /eng/common/core-templates/steps/enable-internal-sources.yml
3 | parameters:
4 | is1ESPipeline: true
5 |
6 | ${{ each parameter in parameters }}:
7 | ${{ parameter.key }}: ${{ parameter.value }}
--------------------------------------------------------------------------------
/eng/common/templates/steps/get-federated-access-token.yml:
--------------------------------------------------------------------------------
1 | steps:
2 | - template: /eng/common/core-templates/steps/get-federated-access-token.yml
3 | parameters:
4 | is1ESPipeline: false
5 |
6 | ${{ each parameter in parameters }}:
7 | ${{ parameter.key }}: ${{ parameter.value }}
--------------------------------------------------------------------------------
/CODE-OF-CONDUCT.md:
--------------------------------------------------------------------------------
1 | # Code of Conduct
2 |
3 | This project has adopted the code of conduct defined by the Contributor Covenant
4 | to clarify expected behavior in our community.
5 |
6 | For more information, see the [.NET Foundation Code of Conduct](https://dotnetfoundation.org/code-of-conduct).
7 |
--------------------------------------------------------------------------------
/eng/common/templates-official/steps/get-federated-access-token.yml:
--------------------------------------------------------------------------------
1 | steps:
2 | - template: /eng/common/core-templates/steps/get-federated-access-token.yml
3 | parameters:
4 | is1ESPipeline: true
5 |
6 | ${{ each parameter in parameters }}:
7 | ${{ parameter.key }}: ${{ parameter.value }}
--------------------------------------------------------------------------------
/eng/common/templates/steps/source-index-stage1-publish.yml:
--------------------------------------------------------------------------------
1 | steps:
2 | - template: /eng/common/core-templates/steps/source-index-stage1-publish.yml
3 | parameters:
4 | is1ESPipeline: false
5 |
6 | ${{ each parameter in parameters }}:
7 | ${{ parameter.key }}: ${{ parameter.value }}
8 |
--------------------------------------------------------------------------------
/eng/common/templates-official/steps/source-index-stage1-publish.yml:
--------------------------------------------------------------------------------
1 | steps:
2 | - template: /eng/common/core-templates/steps/source-index-stage1-publish.yml
3 | parameters:
4 | is1ESPipeline: true
5 |
6 | ${{ each parameter in parameters }}:
7 | ${{ parameter.key }}: ${{ parameter.value }}
8 |
--------------------------------------------------------------------------------
/eng/common/templates/post-build/post-build.yml:
--------------------------------------------------------------------------------
1 | stages:
2 | - template: /eng/common/core-templates/post-build/post-build.yml
3 | parameters:
4 | # Specifies whether to use 1ES
5 | is1ESPipeline: false
6 |
7 | ${{ each parameter in parameters }}:
8 | ${{ parameter.key }}: ${{ parameter.value }}
--------------------------------------------------------------------------------
/eng/common/templates-official/post-build/post-build.yml:
--------------------------------------------------------------------------------
1 | stages:
2 | - template: /eng/common/core-templates/post-build/post-build.yml
3 | parameters:
4 | # Specifies whether to use 1ES
5 | is1ESPipeline: true
6 |
7 | ${{ each parameter in parameters }}:
8 | ${{ parameter.key }}: ${{ parameter.value }}
9 |
--------------------------------------------------------------------------------
/eng/common/templates/post-build/common-variables.yml:
--------------------------------------------------------------------------------
1 | variables:
2 | - template: /eng/common/core-templates/post-build/common-variables.yml
3 | parameters:
4 | # Specifies whether to use 1ES
5 | is1ESPipeline: false
6 |
7 | ${{ each parameter in parameters }}:
8 | ${{ parameter.key }}: ${{ parameter.value }}
--------------------------------------------------------------------------------
/eng/common/templates/post-build/setup-maestro-vars.yml:
--------------------------------------------------------------------------------
1 | steps:
2 | - template: /eng/common/core-templates/post-build/setup-maestro-vars.yml
3 | parameters:
4 | # Specifies whether to use 1ES
5 | is1ESPipeline: false
6 |
7 | ${{ each parameter in parameters }}:
8 | ${{ parameter.key }}: ${{ parameter.value }}
--------------------------------------------------------------------------------
/eng/common/templates-official/post-build/common-variables.yml:
--------------------------------------------------------------------------------
1 | variables:
2 | - template: /eng/common/core-templates/post-build/common-variables.yml
3 | parameters:
4 | # Specifies whether to use 1ES
5 | is1ESPipeline: true
6 |
7 | ${{ each parameter in parameters }}:
8 | ${{ parameter.key }}: ${{ parameter.value }}
--------------------------------------------------------------------------------
/eng/common/templates-official/post-build/setup-maestro-vars.yml:
--------------------------------------------------------------------------------
1 | steps:
2 | - template: /eng/common/core-templates/post-build/setup-maestro-vars.yml
3 | parameters:
4 | # Specifies whether to use 1ES
5 | is1ESPipeline: true
6 |
7 | ${{ each parameter in parameters }}:
8 | ${{ parameter.key }}: ${{ parameter.value }}
--------------------------------------------------------------------------------
/eng/common/core-templates/variables/pool-providers.yml:
--------------------------------------------------------------------------------
1 | parameters:
2 | is1ESPipeline: false
3 |
4 | variables:
5 | - ${{ if eq(parameters.is1ESPipeline, 'true') }}:
6 | - template: /eng/common/templates-official/variables/pool-providers.yml
7 | - ${{ else }}:
8 | - template: /eng/common/templates/variables/pool-providers.yml
--------------------------------------------------------------------------------
/eng/common/templates-official/variables/sdl-variables.yml:
--------------------------------------------------------------------------------
1 | variables:
2 | # The Guardian version specified in 'eng/common/sdl/packages.config'. This value must be kept in
3 | # sync with the packages.config file.
4 | - name: DefaultGuardianVersion
5 | value: 0.109.0
6 | - name: GuardianPackagesConfigFile
7 | value: $(System.DefaultWorkingDirectory)\eng\common\sdl\packages.config
--------------------------------------------------------------------------------
/eng/common/dotnet.ps1:
--------------------------------------------------------------------------------
1 | # This script is used to install the .NET SDK.
2 | # It will also invoke the SDK with any provided arguments.
3 |
4 | . $PSScriptRoot\tools.ps1
5 | $dotnetRoot = InitializeDotNetCli -install:$true
6 |
7 | # Invoke acquired SDK with args if they are provided
8 | if ($args.count -gt 0) {
9 | $env:DOTNET_NOLOGO=1
10 | & "$dotnetRoot\dotnet.exe" $args
11 | }
12 |
--------------------------------------------------------------------------------
/.config/tsaoptions.json:
--------------------------------------------------------------------------------
1 | {
2 | "instanceUrl": "https://devdiv.visualstudio.com/",
3 | "template": "TFSDEVDIV",
4 | "projectName": "DEVDIV",
5 | "areaPath": "DevDiv\\NET Tools\\SDK",
6 | "iterationPath": "DevDiv",
7 | "notificationAliases": [ "dotnetdevexcli@microsoft.com" ],
8 | "repositoryName": "workload-versions",
9 | "codebaseName": "workload-versions"
10 | }
11 |
--------------------------------------------------------------------------------
/eng/common/internal/NuGet.config:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/global.json:
--------------------------------------------------------------------------------
1 | {
2 | "sdk": {
3 | "version": "10.0.100",
4 | "allowPrerelease": true,
5 | "rollForward": "major"
6 | },
7 | "tools": {
8 | "dotnet": "10.0.100"
9 | },
10 | "msbuild-sdks": {
11 | "Microsoft.Build.NoTargets": "3.7.0",
12 | "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25612.5",
13 | "Microsoft.VisualStudio.Internal.MicroBuild.Vsman": "2.0.174"
14 | }
15 | }
16 |
--------------------------------------------------------------------------------
/eng/common/templates-official/steps/enable-internal-runtimes.yml:
--------------------------------------------------------------------------------
1 | # Obtains internal runtime download credentials and populates the 'dotnetbuilds-internal-container-read-token-base64'
2 | # variable with the base64-encoded SAS token, by default
3 | steps:
4 | - template: /eng/common/core-templates/steps/enable-internal-runtimes.yml
5 | parameters:
6 | is1ESPipeline: true
7 |
8 | ${{ each parameter in parameters }}:
9 | ${{ parameter.key }}: ${{ parameter.value }}
10 |
--------------------------------------------------------------------------------
/eng/common/templates/steps/enable-internal-runtimes.yml:
--------------------------------------------------------------------------------
1 | # Obtains internal runtime download credentials and populates the 'dotnetbuilds-internal-container-read-token-base64'
2 | # variable with the base64-encoded SAS token, by default
3 |
4 | steps:
5 | - template: /eng/common/core-templates/steps/enable-internal-runtimes.yml
6 | parameters:
7 | is1ESPipeline: false
8 |
9 | ${{ each parameter in parameters }}:
10 | ${{ parameter.key }}: ${{ parameter.value }}
11 |
--------------------------------------------------------------------------------
/eng/common/internal/Directory.Build.props:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | false
6 | false
7 |
8 |
9 |
10 |
11 |
12 |
--------------------------------------------------------------------------------
/eng/common/cross/x86/tizen/tizen.patch:
--------------------------------------------------------------------------------
1 | diff -u -r a/usr/lib/libc.so b/usr/lib/libc.so
2 | --- a/usr/lib/libc.so 2016-12-30 23:00:08.284951863 +0900
3 | +++ b/usr/lib/libc.so 2016-12-30 23:00:32.140951815 +0900
4 | @@ -2,4 +2,4 @@
5 | Use the shared library, but some functions are only in
6 | the static library, so try that secondarily. */
7 | OUTPUT_FORMAT(elf32-i386)
8 | -GROUP ( /lib/libc.so.6 /usr/lib/libc_nonshared.a AS_NEEDED ( /lib/ld-linux.so.2 ) )
9 | +GROUP ( libc.so.6 libc_nonshared.a AS_NEEDED ( ld-linux.so.2 ) )
10 |
--------------------------------------------------------------------------------
/eng/common/cross/armel/tizen/tizen.patch:
--------------------------------------------------------------------------------
1 | diff -u -r a/usr/lib/libc.so b/usr/lib/libc.so
2 | --- a/usr/lib/libc.so 2016-12-30 23:00:08.284951863 +0900
3 | +++ b/usr/lib/libc.so 2016-12-30 23:00:32.140951815 +0900
4 | @@ -2,4 +2,4 @@
5 | Use the shared library, but some functions are only in
6 | the static library, so try that secondarily. */
7 | OUTPUT_FORMAT(elf32-littlearm)
8 | -GROUP ( /lib/libc.so.6 /usr/lib/libc_nonshared.a AS_NEEDED ( /lib/ld-linux.so.3 ) )
9 | +GROUP ( libc.so.6 libc_nonshared.a AS_NEEDED ( ld-linux.so.3 ) )
10 |
--------------------------------------------------------------------------------
/eng/common/PSScriptAnalyzerSettings.psd1:
--------------------------------------------------------------------------------
1 | @{
2 | IncludeRules=@('PSAvoidUsingCmdletAliases',
3 | 'PSAvoidUsingWMICmdlet',
4 | 'PSAvoidUsingPositionalParameters',
5 | 'PSAvoidUsingInvokeExpression',
6 | 'PSUseDeclaredVarsMoreThanAssignments',
7 | 'PSUseCmdletCorrectly',
8 | 'PSStandardDSCFunctionsInResource',
9 | 'PSUseIdenticalMandatoryParametersForDSC',
10 | 'PSUseIdenticalParametersForDSC')
11 | }
--------------------------------------------------------------------------------
/eng/common/cross/arm/tizen/tizen.patch:
--------------------------------------------------------------------------------
1 | diff -u -r a/usr/lib/libc.so b/usr/lib/libc.so
2 | --- a/usr/lib/libc.so 2016-12-30 23:00:08.284951863 +0900
3 | +++ b/usr/lib/libc.so 2016-12-30 23:00:32.140951815 +0900
4 | @@ -2,4 +2,4 @@
5 | Use the shared library, but some functions are only in
6 | the static library, so try that secondarily. */
7 | OUTPUT_FORMAT(elf32-littlearm)
8 | -GROUP ( /lib/libc.so.6 /usr/lib/libc_nonshared.a AS_NEEDED ( /lib/ld-linux-armhf.so.3 ) )
9 | +GROUP ( libc.so.6 libc_nonshared.a AS_NEEDED ( ld-linux-armhf.so.3 ) )
10 |
--------------------------------------------------------------------------------
/eng/common/cross/x64/tizen/tizen.patch:
--------------------------------------------------------------------------------
1 | diff -u -r a/usr/lib64/libc.so b/usr/lib64/libc.so
2 | --- a/usr/lib64/libc.so 2016-12-30 23:00:08.284951863 +0900
3 | +++ b/usr/lib64/libc.so 2016-12-30 23:00:32.140951815 +0900
4 | @@ -2,4 +2,4 @@
5 | Use the shared library, but some functions are only in
6 | the static library, so try that secondarily. */
7 | OUTPUT_FORMAT(elf64-x86-64)
8 | -GROUP ( /lib64/libc.so.6 /usr/lib64/libc_nonshared.a AS_NEEDED ( /lib64/ld-linux-x86-64.so.2 ) )
9 | +GROUP ( libc.so.6 libc_nonshared.a AS_NEEDED ( ld-linux-x86-64.so.2 ) )
10 |
--------------------------------------------------------------------------------
/eng/common/cross/arm64/tizen/tizen.patch:
--------------------------------------------------------------------------------
1 | diff -u -r a/usr/lib/libc.so b/usr/lib/libc.so
2 | --- a/usr/lib64/libc.so 2016-12-30 23:00:08.284951863 +0900
3 | +++ b/usr/lib64/libc.so 2016-12-30 23:00:32.140951815 +0900
4 | @@ -2,4 +2,4 @@
5 | Use the shared library, but some functions are only in
6 | the static library, so try that secondarily. */
7 | OUTPUT_FORMAT(elf64-littleaarch64)
8 | -GROUP ( /lib64/libc.so.6 /usr/lib64/libc_nonshared.a AS_NEEDED ( /lib64/ld-linux-aarch64.so.1 ) )
9 | +GROUP ( libc.so.6 libc_nonshared.a AS_NEEDED ( ld-linux-aarch64.so.1 ) )
10 |
--------------------------------------------------------------------------------
/eng/common/cross/riscv64/tizen/tizen.patch:
--------------------------------------------------------------------------------
1 | diff -u -r a/usr/lib/libc.so b/usr/lib/libc.so
2 | --- a/usr/lib64/libc.so 2016-12-30 23:00:08.284951863 +0900
3 | +++ b/usr/lib64/libc.so 2016-12-30 23:00:32.140951815 +0900
4 | @@ -2,4 +2,4 @@
5 | Use the shared library, but some functions are only in
6 | the static library, so try that secondarily. */
7 | OUTPUT_FORMAT(elf64-littleriscv)
8 | -GROUP ( /lib64/libc.so.6 /usr/lib64/libc_nonshared.a AS_NEEDED ( /lib64/ld-linux-riscv64-lp64d.so.1 ) )
9 | +GROUP ( libc.so.6 libc_nonshared.a AS_NEEDED ( ld-linux-riscv64-lp64d.so.1 ) )
10 |
--------------------------------------------------------------------------------
/eng/common/cibuild.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | source="${BASH_SOURCE[0]}"
4 |
5 | # resolve $SOURCE until the file is no longer a symlink
6 | while [[ -h $source ]]; do
7 | scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
8 | source="$(readlink "$source")"
9 |
10 | # if $source was a relative symlink, we need to resolve it relative to the path where
11 | # the symlink file was located
12 | [[ $source != /* ]] && source="$scriptroot/$source"
13 | done
14 | scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
15 |
16 | . "$scriptroot/build.sh" --restore --build --test --pack --publish --ci $@
17 |
--------------------------------------------------------------------------------
/eng/common/sdl/NuGet.config:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
--------------------------------------------------------------------------------
/eng/common/enable-cross-org-publishing.ps1:
--------------------------------------------------------------------------------
1 | param(
2 | [string] $token
3 | )
4 |
5 |
6 | . $PSScriptRoot\pipeline-logging-functions.ps1
7 |
8 | # Write-PipelineSetVariable will no-op if a variable named $ci is not defined
9 | # Since this script is only ever called in AzDO builds, just universally set it
10 | $ci = $true
11 |
12 | Write-PipelineSetVariable -Name 'VSS_NUGET_ACCESSTOKEN' -Value $token -IsMultiJobVariable $false
13 | Write-PipelineSetVariable -Name 'VSS_NUGET_URI_PREFIXES' -Value 'https://dnceng.pkgs.visualstudio.com/;https://pkgs.dev.azure.com/dnceng/;https://devdiv.pkgs.visualstudio.com/;https://pkgs.dev.azure.com/devdiv/' -IsMultiJobVariable $false
14 |
--------------------------------------------------------------------------------
/eng/common/core-templates/steps/publish-pipeline-artifacts.yml:
--------------------------------------------------------------------------------
1 | parameters:
2 | - name: is1ESPipeline
3 | type: boolean
4 | default: false
5 |
6 | - name: args
7 | type: object
8 | default: {}
9 |
10 | steps:
11 | - ${{ if ne(parameters.is1ESPipeline, true) }}:
12 | - template: /eng/common/templates/steps/publish-pipeline-artifacts.yml
13 | parameters:
14 | ${{ each parameter in parameters }}:
15 | ${{ parameter.key }}: ${{ parameter.value }}
16 | - ${{ else }}:
17 | - template: /eng/common/templates-official/steps/publish-pipeline-artifacts.yml
18 | parameters:
19 | ${{ each parameter in parameters }}:
20 | ${{ parameter.key }}: ${{ parameter.value }}
21 |
--------------------------------------------------------------------------------
/eng/common/msbuild.ps1:
--------------------------------------------------------------------------------
1 | [CmdletBinding(PositionalBinding=$false)]
2 | Param(
3 | [string] $verbosity = 'minimal',
4 | [bool] $warnAsError = $true,
5 | [bool] $nodeReuse = $true,
6 | [switch] $ci,
7 | [switch] $prepareMachine,
8 | [switch] $excludePrereleaseVS,
9 | [string] $msbuildEngine = $null,
10 | [Parameter(ValueFromRemainingArguments=$true)][String[]]$extraArgs
11 | )
12 |
13 | . $PSScriptRoot\tools.ps1
14 |
15 | try {
16 | if ($ci) {
17 | $nodeReuse = $false
18 | }
19 |
20 | MSBuild @extraArgs
21 | }
22 | catch {
23 | Write-Host $_.ScriptStackTrace
24 | Write-PipelineTelemetryError -Category 'Build' -Message $_
25 | ExitWithExitCode 1
26 | }
27 |
28 | ExitWithExitCode 0
--------------------------------------------------------------------------------
/eng/common/core-templates/post-build/common-variables.yml:
--------------------------------------------------------------------------------
1 | variables:
2 | - group: Publish-Build-Assets
3 |
4 | # Whether the build is internal or not
5 | - name: IsInternalBuild
6 | value: ${{ and(ne(variables['System.TeamProject'], 'public'), contains(variables['Build.SourceBranch'], 'internal')) }}
7 |
8 | # Default Maestro++ API Endpoint and API Version
9 | - name: MaestroApiEndPoint
10 | value: "https://maestro.dot.net"
11 | - name: MaestroApiVersion
12 | value: "2020-02-20"
13 |
14 | - name: SourceLinkCLIVersion
15 | value: 3.0.0
16 | - name: SymbolToolVersion
17 | value: 1.0.1
18 | - name: BinlogToolVersion
19 | value: 1.0.11
20 |
21 | - name: runCodesignValidationInjection
22 | value: false
23 |
--------------------------------------------------------------------------------
/eng/common/core-templates/steps/component-governance.yml:
--------------------------------------------------------------------------------
1 | parameters:
2 | disableComponentGovernance: false
3 | componentGovernanceIgnoreDirectories: ''
4 | is1ESPipeline: false
5 | displayName: 'Component Detection'
6 |
7 | steps:
8 | - ${{ if eq(parameters.disableComponentGovernance, 'true') }}:
9 | - script: echo "##vso[task.setvariable variable=skipComponentGovernanceDetection]true"
10 | displayName: Set skipComponentGovernanceDetection variable
11 | - ${{ if ne(parameters.disableComponentGovernance, 'true') }}:
12 | - task: ComponentGovernanceComponentDetection@0
13 | continueOnError: true
14 | displayName: ${{ parameters.displayName }}
15 | inputs:
16 | ignoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }}
17 |
--------------------------------------------------------------------------------
/eng/common/core-templates/steps/publish-build-artifacts.yml:
--------------------------------------------------------------------------------
1 | parameters:
2 | - name: is1ESPipeline
3 | type: boolean
4 | default: false
5 | - name: args
6 | type: object
7 | default: {}
8 | steps:
9 | - ${{ if ne(parameters.is1ESPipeline, true) }}:
10 | - template: /eng/common/templates/steps/publish-build-artifacts.yml
11 | parameters:
12 | is1ESPipeline: ${{ parameters.is1ESPipeline }}
13 | ${{ each parameter in parameters.args }}:
14 | ${{ parameter.key }}: ${{ parameter.value }}
15 | - ${{ else }}:
16 | - template: /eng/common/templates-official/steps/publish-build-artifacts.yml
17 | parameters:
18 | is1ESPipeline: ${{ parameters.is1ESPipeline }}
19 | ${{ each parameter in parameters.args }}:
20 | ${{ parameter.key }}: ${{ parameter.value }}
--------------------------------------------------------------------------------
/eng/pipelines/public.yml:
--------------------------------------------------------------------------------
1 | # Pipeline: https://dev.azure.com/dnceng-public/public/_build?definitionId=264
2 |
3 | pr:
4 | branches:
5 | include:
6 | - main
7 | - release/*
8 | - eng
9 |
10 | trigger: none
11 |
12 | resources:
13 | repositories:
14 | - repository: eng
15 | type: github
16 | name: dotnet/workload-versions
17 | ref: refs/heads/eng
18 | # Service connection: https://dev.azure.com/dnceng-public/public/_settings/adminservices?resourceId=690f39b4-7746-42c2-be89-281bd7c78b9e
19 | endpoint: public
20 |
21 | variables:
22 | - template: /eng/pipelines/templates/variables/workload-public.yml@eng
23 | # Variables used: DncEngPublicBuildPool
24 | - template: /eng/common/templates/variables/pool-providers.yml@self
25 |
26 | stages:
27 | - template: /eng/pipelines/templates/stages/workload-public-build.yml@eng
--------------------------------------------------------------------------------
/eng/common/post-build/nuget-validation.ps1:
--------------------------------------------------------------------------------
1 | # This script validates NuGet package metadata information using this
2 | # tool: https://github.com/NuGet/NuGetGallery/tree/jver-verify/src/VerifyMicrosoftPackage
3 |
4 | param(
5 | [Parameter(Mandatory=$true)][string] $PackagesPath # Path to where the packages to be validated are
6 | )
7 |
8 | # `tools.ps1` checks $ci to perform some actions. Since the post-build
9 | # scripts don't necessarily execute in the same agent that run the
10 | # build.ps1/sh script this variable isn't automatically set.
11 | $ci = $true
12 | $disableConfigureToolsetImport = $true
13 | . $PSScriptRoot\..\tools.ps1
14 |
15 | try {
16 | & $PSScriptRoot\nuget-verification.ps1 ${PackagesPath}\*.nupkg
17 | }
18 | catch {
19 | Write-Host $_.ScriptStackTrace
20 | Write-PipelineTelemetryError -Category 'NuGetValidation' -Message $_
21 | ExitWithExitCode 1
22 | }
23 |
--------------------------------------------------------------------------------
/eng/common/dotnet-install.ps1:
--------------------------------------------------------------------------------
1 | [CmdletBinding(PositionalBinding=$false)]
2 | Param(
3 | [string] $verbosity = 'minimal',
4 | [string] $architecture = '',
5 | [string] $version = 'Latest',
6 | [string] $runtime = 'dotnet',
7 | [string] $RuntimeSourceFeed = '',
8 | [string] $RuntimeSourceFeedKey = ''
9 | )
10 |
11 | . $PSScriptRoot\tools.ps1
12 |
13 | $dotnetRoot = Join-Path $RepoRoot '.dotnet'
14 |
15 | $installdir = $dotnetRoot
16 | try {
17 | if ($architecture -and $architecture.Trim() -eq 'x86') {
18 | $installdir = Join-Path $installdir 'x86'
19 | }
20 | InstallDotNet $installdir $version $architecture $runtime $true -RuntimeSourceFeed $RuntimeSourceFeed -RuntimeSourceFeedKey $RuntimeSourceFeedKey
21 | }
22 | catch {
23 | Write-Host $_.ScriptStackTrace
24 | Write-PipelineTelemetryError -Category 'InitializeToolset' -Message $_
25 | ExitWithExitCode 1
26 | }
27 |
28 | ExitWithExitCode 0
29 |
--------------------------------------------------------------------------------
/eng/common/dotnet.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | # This script is used to install the .NET SDK.
4 | # It will also invoke the SDK with any provided arguments.
5 |
6 | source="${BASH_SOURCE[0]}"
7 | # resolve $SOURCE until the file is no longer a symlink
8 | while [[ -h $source ]]; do
9 | scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
10 | source="$(readlink "$source")"
11 |
12 | # if $source was a relative symlink, we need to resolve it relative to the path where the
13 | # symlink file was located
14 | [[ $source != /* ]] && source="$scriptroot/$source"
15 | done
16 | scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
17 |
18 | source $scriptroot/tools.sh
19 | InitializeDotNetCli true # install
20 |
21 | # Invoke acquired SDK with args if they are provided
22 | if [[ $# > 0 ]]; then
23 | __dotnetDir=${_InitializeDotNetCli}
24 | dotnetPath=${__dotnetDir}/dotnet
25 | ${dotnetPath} "$@"
26 | fi
27 |
--------------------------------------------------------------------------------
/eng/common/helixpublish.proj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | msbuild
6 |
7 |
8 |
9 |
10 | %(Identity)
11 |
12 |
13 |
14 |
15 |
16 | $(WorkItemDirectory)
17 | $(WorkItemCommand)
18 | $(WorkItemTimeout)
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
--------------------------------------------------------------------------------
/eng/common/core-templates/steps/cleanup-microbuild.yml:
--------------------------------------------------------------------------------
1 | parameters:
2 | # Enable cleanup tasks for MicroBuild
3 | enableMicrobuild: false
4 | # Enable cleanup tasks for MicroBuild on Mac and Linux
5 | # Will be ignored if 'enableMicrobuild' is false or 'Agent.Os' is 'Windows_NT'
6 | enableMicrobuildForMacAndLinux: false
7 | continueOnError: false
8 |
9 | steps:
10 | - ${{ if eq(parameters.enableMicrobuild, 'true') }}:
11 | - task: MicroBuildCleanup@1
12 | displayName: Execute Microbuild cleanup tasks
13 | condition: and(
14 | always(),
15 | or(
16 | and(
17 | eq(variables['Agent.Os'], 'Windows_NT'),
18 | in(variables['_SignType'], 'real', 'test')
19 | ),
20 | and(
21 | ${{ eq(parameters.enableMicrobuildForMacAndLinux, true) }},
22 | ne(variables['Agent.Os'], 'Windows_NT'),
23 | eq(variables['_SignType'], 'real')
24 | )
25 | ))
26 | continueOnError: ${{ parameters.continueOnError }}
27 | env:
28 | TeamName: $(_TeamName)
29 |
--------------------------------------------------------------------------------
/eng/common/templates-official/steps/publish-pipeline-artifacts.yml:
--------------------------------------------------------------------------------
1 | parameters:
2 | - name: is1ESPipeline
3 | type: boolean
4 | default: true
5 |
6 | - name: args
7 | type: object
8 | default: {}
9 |
10 | steps:
11 | - ${{ if ne(parameters.is1ESPipeline, true) }}:
12 | - 'eng/common/templates-official cannot be referenced from a non-1ES managed template': error
13 | - task: 1ES.PublishPipelineArtifact@1
14 | displayName: ${{ coalesce(parameters.args.displayName, 'Publish to Build Artifact') }}
15 | ${{ if parameters.args.condition }}:
16 | condition: ${{ parameters.args.condition }}
17 | ${{ else }}:
18 | condition: succeeded()
19 | ${{ if parameters.args.continueOnError }}:
20 | continueOnError: ${{ parameters.args.continueOnError }}
21 | inputs:
22 | targetPath: ${{ parameters.args.targetPath }}
23 | ${{ if parameters.args.artifactName }}:
24 | artifactName: ${{ parameters.args.artifactName }}
25 | ${{ if parameters.args.properties }}:
26 | properties: ${{ parameters.args.properties }}
27 | ${{ if parameters.args.sbomEnabled }}:
28 | sbomEnabled: ${{ parameters.args.sbomEnabled }}
29 |
--------------------------------------------------------------------------------
/LICENSE.md:
--------------------------------------------------------------------------------
1 | The MIT License (MIT)
2 |
3 | Copyright (c) .NET Foundation and Contributors
4 |
5 | All rights reserved.
6 |
7 | Permission is hereby granted, free of charge, to any person obtaining a copy
8 | of this software and associated documentation files (the "Software"), to deal
9 | in the Software without restriction, including without limitation the rights
10 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
11 | copies of the Software, and to permit persons to whom the Software is
12 | furnished to do so, subject to the following conditions:
13 |
14 | The above copyright notice and this permission notice shall be included in all
15 | copies or substantial portions of the Software.
16 |
17 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
20 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
22 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
23 | SOFTWARE.
24 |
--------------------------------------------------------------------------------
/eng/common/core-templates/steps/enable-internal-runtimes.yml:
--------------------------------------------------------------------------------
1 | # Obtains internal runtime download credentials and populates the 'dotnetbuilds-internal-container-read-token-base64'
2 | # variable with the base64-encoded SAS token, by default
3 |
4 | parameters:
5 | - name: federatedServiceConnection
6 | type: string
7 | default: 'dotnetbuilds-internal-read'
8 | - name: outputVariableName
9 | type: string
10 | default: 'dotnetbuilds-internal-container-read-token-base64'
11 | - name: expiryInHours
12 | type: number
13 | default: 1
14 | - name: base64Encode
15 | type: boolean
16 | default: true
17 | - name: is1ESPipeline
18 | type: boolean
19 | default: false
20 |
21 | steps:
22 | - ${{ if ne(variables['System.TeamProject'], 'public') }}:
23 | - template: /eng/common/core-templates/steps/get-delegation-sas.yml
24 | parameters:
25 | federatedServiceConnection: ${{ parameters.federatedServiceConnection }}
26 | outputVariableName: ${{ parameters.outputVariableName }}
27 | expiryInHours: ${{ parameters.expiryInHours }}
28 | base64Encode: ${{ parameters.base64Encode }}
29 | storageAccount: dotnetbuilds
30 | container: internal
31 | permissions: rl
32 | is1ESPipeline: ${{ parameters.is1ESPipeline }}
--------------------------------------------------------------------------------
/eng/common/generate-sbom-prep.ps1:
--------------------------------------------------------------------------------
1 | Param(
2 | [Parameter(Mandatory=$true)][string] $ManifestDirPath # Manifest directory where sbom will be placed
3 | )
4 |
5 | . $PSScriptRoot\pipeline-logging-functions.ps1
6 |
7 | # Normally - we'd listen to the manifest path given, but 1ES templates will overwrite if this level gets uploaded directly
8 | # with their own overwriting ours. So we create it as a sub directory of the requested manifest path.
9 | $ArtifactName = "${env:SYSTEM_STAGENAME}_${env:AGENT_JOBNAME}_SBOM"
10 | $SafeArtifactName = $ArtifactName -replace '["/:<>\\|?@*"() ]', '_'
11 | $SbomGenerationDir = Join-Path $ManifestDirPath $SafeArtifactName
12 |
13 | Write-Host "Artifact name before : $ArtifactName"
14 | Write-Host "Artifact name after : $SafeArtifactName"
15 |
16 | Write-Host "Creating dir $ManifestDirPath"
17 |
18 | # create directory for sbom manifest to be placed
19 | if (!(Test-Path -path $SbomGenerationDir))
20 | {
21 | New-Item -ItemType Directory -path $SbomGenerationDir
22 | Write-Host "Successfully created directory $SbomGenerationDir"
23 | }
24 | else{
25 | Write-PipelineTelemetryError -category 'Build' "Unable to create sbom folder."
26 | }
27 |
28 | Write-Host "Updating artifact name"
29 | Write-Host "##vso[task.setvariable variable=ARTIFACT_NAME]$SafeArtifactName"
30 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | ###############################################################################
2 | # Build Artifacts
3 | ###############################################################################
4 | artifacts/
5 | bin/
6 | obj/
7 |
8 | ###############################################################################
9 | # Visual Studio
10 | ###############################################################################
11 | .vs/
12 | .vscode/
13 | .dotnet/
14 | .tools/
15 | .packages/
16 |
17 | # OS X Device Services Store
18 | .DS_Store
19 |
20 | *.binlog
21 |
22 | # User specific files
23 | *.user
24 |
25 | # Debian and python stuff
26 | *.dsc
27 | *.tar.gz
28 | *.build
29 | *.changes
30 | *.deb
31 | *.pyc
32 | *.pyo
33 |
34 | # OSX Packaging temp files
35 | *.pkg
36 |
37 | # CMake generated files
38 | cmake/
39 |
40 | # Helix payload
41 | .dotnet.payload
42 |
43 | # MSBuild Logs
44 | **/MSBuild_Logs/MSBuild_pid-*.failure.txt
45 |
46 | ###############################################################################
47 | # Eng branch specific files
48 | ###############################################################################
49 | eng/
50 | !eng/common/
51 | !eng/pipelines/public.yml
52 | !eng/Version.Details.xml
53 | !eng/Versions.props
54 | src/
55 | tools/
56 | Directory.Build.props
57 | Directory.Build.targets
58 | workload-versions.sln
--------------------------------------------------------------------------------
/eng/common/core-templates/steps/retain-build.yml:
--------------------------------------------------------------------------------
1 | parameters:
2 | # Optional azure devops PAT with build execute permissions for the build's organization,
3 | # only needed if the build that should be retained ran on a different organization than
4 | # the pipeline where this template is executing from
5 | Token: ''
6 | # Optional BuildId to retain, defaults to the current running build
7 | BuildId: ''
8 | # Azure devops Organization URI for the build in the https://dev.azure.com/ format.
9 | # Defaults to the organization the current pipeline is running on
10 | AzdoOrgUri: '$(System.CollectionUri)'
11 | # Azure devops project for the build. Defaults to the project the current pipeline is running on
12 | AzdoProject: '$(System.TeamProject)'
13 |
14 | steps:
15 | - task: powershell@2
16 | inputs:
17 | targetType: 'filePath'
18 | filePath: eng/common/retain-build.ps1
19 | pwsh: true
20 | arguments: >
21 | -AzdoOrgUri: ${{parameters.AzdoOrgUri}}
22 | -AzdoProject ${{parameters.AzdoProject}}
23 | -Token ${{coalesce(parameters.Token, '$env:SYSTEM_ACCESSTOKEN') }}
24 | -BuildId ${{coalesce(parameters.BuildId, '$env:BUILD_ID')}}
25 | displayName: Enable permanent build retention
26 | env:
27 | SYSTEM_ACCESSTOKEN: $(System.AccessToken)
28 | BUILD_ID: $(Build.BuildId)
--------------------------------------------------------------------------------
/eng/common/templates/steps/publish-build-artifacts.yml:
--------------------------------------------------------------------------------
1 | parameters:
2 | - name: is1ESPipeline
3 | type: boolean
4 | default: false
5 |
6 | - name: displayName
7 | type: string
8 | default: 'Publish to Build Artifact'
9 |
10 | - name: condition
11 | type: string
12 | default: succeeded()
13 |
14 | - name: artifactName
15 | type: string
16 |
17 | - name: pathToPublish
18 | type: string
19 |
20 | - name: continueOnError
21 | type: boolean
22 | default: false
23 |
24 | - name: publishLocation
25 | type: string
26 | default: 'Container'
27 |
28 | - name: retryCountOnTaskFailure
29 | type: string
30 | default: 10
31 |
32 | steps:
33 | - ${{ if eq(parameters.is1ESPipeline, true) }}:
34 | - 'eng/common/templates cannot be referenced from a 1ES managed template': error
35 | - task: PublishBuildArtifacts@1
36 | displayName: ${{ parameters.displayName }}
37 | condition: ${{ parameters.condition }}
38 | ${{ if parameters.continueOnError }}:
39 | continueOnError: ${{ parameters.continueOnError }}
40 | inputs:
41 | PublishLocation: ${{ parameters.publishLocation }}
42 | PathtoPublish: ${{ parameters.pathToPublish }}
43 | ${{ if parameters.artifactName }}:
44 | ArtifactName: ${{ parameters.artifactName }}
45 | ${{ if parameters.retryCountOnTaskFailure }}:
46 | retryCountOnTaskFailure: ${{ parameters.retryCountOnTaskFailure }}
47 |
--------------------------------------------------------------------------------
/eng/common/sdl/sdl.ps1:
--------------------------------------------------------------------------------
1 |
2 | function Install-Gdn {
3 | param(
4 | [Parameter(Mandatory=$true)]
5 | [string]$Path,
6 |
7 | # If omitted, install the latest version of Guardian, otherwise install that specific version.
8 | [string]$Version
9 | )
10 |
11 | $ErrorActionPreference = 'Stop'
12 | Set-StrictMode -Version 2.0
13 | $disableConfigureToolsetImport = $true
14 | $global:LASTEXITCODE = 0
15 |
16 | # `tools.ps1` checks $ci to perform some actions. Since the SDL
17 | # scripts don't necessarily execute in the same agent that run the
18 | # build.ps1/sh script this variable isn't automatically set.
19 | $ci = $true
20 | . $PSScriptRoot\..\tools.ps1
21 |
22 | $argumentList = @("install", "Microsoft.Guardian.Cli", "-Source https://securitytools.pkgs.visualstudio.com/_packaging/Guardian/nuget/v3/index.json", "-OutputDirectory $Path", "-NonInteractive", "-NoCache")
23 |
24 | if ($Version) {
25 | $argumentList += "-Version $Version"
26 | }
27 |
28 | Start-Process nuget -Verbose -ArgumentList $argumentList -NoNewWindow -Wait
29 |
30 | $gdnCliPath = Get-ChildItem -Filter guardian.cmd -Recurse -Path $Path
31 |
32 | if (!$gdnCliPath)
33 | {
34 | Write-PipelineTelemetryError -Category 'Sdl' -Message 'Failure installing Guardian'
35 | }
36 |
37 | return $gdnCliPath.FullName
38 | }
--------------------------------------------------------------------------------
/eng/common/templates-official/steps/publish-build-artifacts.yml:
--------------------------------------------------------------------------------
1 | parameters:
2 | - name: displayName
3 | type: string
4 | default: 'Publish to Build Artifact'
5 |
6 | - name: condition
7 | type: string
8 | default: succeeded()
9 |
10 | - name: artifactName
11 | type: string
12 |
13 | - name: pathToPublish
14 | type: string
15 |
16 | - name: continueOnError
17 | type: boolean
18 | default: false
19 |
20 | - name: publishLocation
21 | type: string
22 | default: 'Container'
23 |
24 | - name: is1ESPipeline
25 | type: boolean
26 | default: true
27 |
28 | - name: retryCountOnTaskFailure
29 | type: string
30 | default: 10
31 |
32 | steps:
33 | - ${{ if ne(parameters.is1ESPipeline, true) }}:
34 | - 'eng/common/templates-official cannot be referenced from a non-1ES managed template': error
35 | - task: 1ES.PublishBuildArtifacts@1
36 | displayName: ${{ parameters.displayName }}
37 | condition: ${{ parameters.condition }}
38 | ${{ if parameters.continueOnError }}:
39 | continueOnError: ${{ parameters.continueOnError }}
40 | inputs:
41 | PublishLocation: ${{ parameters.publishLocation }}
42 | PathtoPublish: ${{ parameters.pathToPublish }}
43 | ${{ if parameters.artifactName }}:
44 | ArtifactName: ${{ parameters.artifactName }}
45 | ${{ if parameters.retryCountOnTaskFailure }}:
46 | retryCountOnTaskFailure: ${{ parameters.retryCountOnTaskFailure }}
47 |
--------------------------------------------------------------------------------
/eng/common/core-templates/jobs/codeql-build.yml:
--------------------------------------------------------------------------------
1 | parameters:
2 | # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md
3 | continueOnError: false
4 | # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
5 | jobs: []
6 | # Optional: if specified, restore and use this version of Guardian instead of the default.
7 | overrideGuardianVersion: ''
8 | is1ESPipeline: ''
9 |
10 | jobs:
11 | - template: /eng/common/core-templates/jobs/jobs.yml
12 | parameters:
13 | is1ESPipeline: ${{ parameters.is1ESPipeline }}
14 | enableMicrobuild: false
15 | enablePublishBuildArtifacts: false
16 | enablePublishTestResults: false
17 | enablePublishBuildAssets: false
18 | enableTelemetry: true
19 |
20 | variables:
21 | - group: Publish-Build-Assets
22 | # The Guardian version specified in 'eng/common/sdl/packages.config'. This value must be kept in
23 | # sync with the packages.config file.
24 | - name: DefaultGuardianVersion
25 | value: 0.109.0
26 | - name: GuardianPackagesConfigFile
27 | value: $(System.DefaultWorkingDirectory)\eng\common\sdl\packages.config
28 | - name: GuardianVersion
29 | value: ${{ coalesce(parameters.overrideGuardianVersion, '$(DefaultGuardianVersion)') }}
30 |
31 | jobs: ${{ parameters.jobs }}
32 |
33 |
--------------------------------------------------------------------------------
/eng/common/msbuild.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | source="${BASH_SOURCE[0]}"
4 |
5 | # resolve $source until the file is no longer a symlink
6 | while [[ -h "$source" ]]; do
7 | scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
8 | source="$(readlink "$source")"
9 | # if $source was a relative symlink, we need to resolve it relative to the path where the
10 | # symlink file was located
11 | [[ $source != /* ]] && source="$scriptroot/$source"
12 | done
13 | scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
14 |
15 | verbosity='minimal'
16 | warn_as_error=true
17 | node_reuse=true
18 | prepare_machine=false
19 | extra_args=''
20 |
21 | while (($# > 0)); do
22 | lowerI="$(echo $1 | tr "[:upper:]" "[:lower:]")"
23 | case $lowerI in
24 | --verbosity)
25 | verbosity=$2
26 | shift 2
27 | ;;
28 | --warnaserror)
29 | warn_as_error=$2
30 | shift 2
31 | ;;
32 | --nodereuse)
33 | node_reuse=$2
34 | shift 2
35 | ;;
36 | --ci)
37 | ci=true
38 | shift 1
39 | ;;
40 | --preparemachine)
41 | prepare_machine=true
42 | shift 1
43 | ;;
44 | *)
45 | extra_args="$extra_args $1"
46 | shift 1
47 | ;;
48 | esac
49 | done
50 |
51 | . "$scriptroot/tools.sh"
52 |
53 | if [[ "$ci" == true ]]; then
54 | node_reuse=false
55 | fi
56 |
57 | MSBuild $extra_args
58 | ExitWithExitCode 0
59 |
--------------------------------------------------------------------------------
/eng/common/README.md:
--------------------------------------------------------------------------------
1 | # Don't touch this folder
2 |
3 | uuuuuuuuuuuuuuuuuuuu
4 | u" uuuuuuuuuuuuuuuuuu "u
5 | u" u$$$$$$$$$$$$$$$$$$$$u "u
6 | u" u$$$$$$$$$$$$$$$$$$$$$$$$u "u
7 | u" u$$$$$$$$$$$$$$$$$$$$$$$$$$$$u "u
8 | u" u$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$u "u
9 | u" u$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$u "u
10 | $ $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$ $
11 | $ $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$ $
12 | $ $$$" ... "$... ...$" ... "$$$ ... "$$$ $
13 | $ $$$u `"$$$$$$$ $$$ $$$$$ $$ $$$ $$$ $
14 | $ $$$$$$uu "$$$$ $$$ $$$$$ $$ """ u$$$ $
15 | $ $$$""$$$ $$$$ $$$u "$$$" u$$ $$$$$$$$ $
16 | $ $$$$....,$$$$$..$$$$$....,$$$$..$$$$$$$$ $
17 | $ $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$ $
18 | "u "$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$" u"
19 | "u "$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$" u"
20 | "u "$$$$$$$$$$$$$$$$$$$$$$$$$$$$" u"
21 | "u "$$$$$$$$$$$$$$$$$$$$$$$$" u"
22 | "u "$$$$$$$$$$$$$$$$$$$$" u"
23 | "u """""""""""""""""" u"
24 | """"""""""""""""""""
25 |
26 | !!! Changes made in this directory are subject to being overwritten by automation !!!
27 |
28 | The files in this directory are shared by all Arcade repos and managed by automation. If you need to make changes to these files, open an issue or submit a pull request to https://github.com/dotnet/arcade first.
29 |
--------------------------------------------------------------------------------
/eng/common/templates/steps/publish-pipeline-artifacts.yml:
--------------------------------------------------------------------------------
1 | parameters:
2 | - name: is1ESPipeline
3 | type: boolean
4 | default: false
5 |
6 | - name: args
7 | type: object
8 | default: {}
9 |
10 | steps:
11 | - ${{ if eq(parameters.is1ESPipeline, true) }}:
12 | - 'eng/common/templates cannot be referenced from a 1ES managed template': error
13 | - task: PublishPipelineArtifact@1
14 | displayName: ${{ coalesce(parameters.args.displayName, 'Publish to Build Artifact') }}
15 | ${{ if parameters.args.condition }}:
16 | condition: ${{ parameters.args.condition }}
17 | ${{ else }}:
18 | condition: succeeded()
19 | ${{ if parameters.args.continueOnError }}:
20 | continueOnError: ${{ parameters.args.continueOnError }}
21 | inputs:
22 | targetPath: ${{ parameters.args.targetPath }}
23 | ${{ if parameters.args.artifactName }}:
24 | artifactName: ${{ parameters.args.artifactName }}
25 | ${{ if parameters.args.publishLocation }}:
26 | publishLocation: ${{ parameters.args.publishLocation }}
27 | ${{ if parameters.args.fileSharePath }}:
28 | fileSharePath: ${{ parameters.args.fileSharePath }}
29 | ${{ if parameters.args.Parallel }}:
30 | parallel: ${{ parameters.args.Parallel }}
31 | ${{ if parameters.args.parallelCount }}:
32 | parallelCount: ${{ parameters.args.parallelCount }}
33 | ${{ if parameters.args.properties }}:
34 | properties: ${{ parameters.args.properties }}
--------------------------------------------------------------------------------
/eng/common/templates/vmr-build-pr.yml:
--------------------------------------------------------------------------------
1 | # This pipeline is used for running the VMR verification of the PR changes in repo-level PRs.
2 | #
3 | # It will run a full set of verification jobs defined in:
4 | # https://github.com/dotnet/dotnet/blob/10060d128e3f470e77265f8490f5e4f72dae738e/eng/pipelines/templates/stages/vmr-build.yml#L27-L38
5 | #
6 | # For repos that do not need to run the full set, you would do the following:
7 | #
8 | # 1. Copy this YML file to a repo-specific location, i.e. outside of eng/common.
9 | #
10 | # 2. Add `verifications` parameter to VMR template reference
11 | #
12 | # Examples:
13 | # - For source-build stage 1 verification, add the following:
14 | # verifications: [ "source-build-stage1" ]
15 | #
16 | # - For Windows only verifications, add the following:
17 | # verifications: [ "unified-build-windows-x64", "unified-build-windows-x86" ]
18 |
19 | trigger: none
20 | pr: none
21 |
22 | variables:
23 | - template: /eng/common/templates/variables/pool-providers.yml@self
24 |
25 | - name: skipComponentGovernanceDetection # we run CG on internal builds only
26 | value: true
27 |
28 | - name: Codeql.Enabled # we run CodeQL on internal builds only
29 | value: false
30 |
31 | resources:
32 | repositories:
33 | - repository: vmr
34 | type: github
35 | name: dotnet/dotnet
36 | endpoint: dotnet
37 |
38 | stages:
39 | - template: /eng/pipelines/templates/stages/vmr-build.yml@vmr
40 | parameters:
41 | isBuiltFromVmr: false
42 | scope: lite
43 |
--------------------------------------------------------------------------------
/eng/common/retain-build.ps1:
--------------------------------------------------------------------------------
1 |
2 | Param(
3 | [Parameter(Mandatory=$true)][int] $buildId,
4 | [Parameter(Mandatory=$true)][string] $azdoOrgUri,
5 | [Parameter(Mandatory=$true)][string] $azdoProject,
6 | [Parameter(Mandatory=$true)][string] $token
7 | )
8 |
9 | $ErrorActionPreference = 'Stop'
10 | Set-StrictMode -Version 2.0
11 |
12 | function Get-AzDOHeaders(
13 | [string] $token)
14 | {
15 | $base64AuthInfo = [Convert]::ToBase64String([Text.Encoding]::ASCII.GetBytes(":${token}"))
16 | $headers = @{"Authorization"="Basic $base64AuthInfo"}
17 | return $headers
18 | }
19 |
20 | function Update-BuildRetention(
21 | [string] $azdoOrgUri,
22 | [string] $azdoProject,
23 | [int] $buildId,
24 | [string] $token)
25 | {
26 | $headers = Get-AzDOHeaders -token $token
27 | $requestBody = "{
28 | `"keepForever`": `"true`"
29 | }"
30 |
31 | $requestUri = "${azdoOrgUri}/${azdoProject}/_apis/build/builds/${buildId}?api-version=6.0"
32 | write-Host "Attempting to retain build using the following URI: ${requestUri} ..."
33 |
34 | try {
35 | Invoke-RestMethod -Uri $requestUri -Method Patch -Body $requestBody -Header $headers -contentType "application/json"
36 | Write-Host "Updated retention settings for build ${buildId}."
37 | }
38 | catch {
39 | Write-Error "Failed to update retention settings for build: $_.Exception.Response.StatusDescription"
40 | exit 1
41 | }
42 | }
43 |
44 | Update-BuildRetention -azdoOrgUri $azdoOrgUri -azdoProject $azdoProject -buildId $buildId -token $token
45 | exit 0
46 |
--------------------------------------------------------------------------------
/eng/common/sdl/run-sdl.ps1:
--------------------------------------------------------------------------------
1 | Param(
2 | [string] $GuardianCliLocation,
3 | [string] $WorkingDirectory,
4 | [string] $GdnFolder,
5 | [string] $UpdateBaseline,
6 | [string] $GuardianLoggerLevel='Standard'
7 | )
8 |
9 | $ErrorActionPreference = 'Stop'
10 | Set-StrictMode -Version 2.0
11 | $disableConfigureToolsetImport = $true
12 | $global:LASTEXITCODE = 0
13 |
14 | try {
15 | # `tools.ps1` checks $ci to perform some actions. Since the SDL
16 | # scripts don't necessarily execute in the same agent that run the
17 | # build.ps1/sh script this variable isn't automatically set.
18 | $ci = $true
19 | . $PSScriptRoot\..\tools.ps1
20 |
21 | # We store config files in the r directory of .gdn
22 | $gdnConfigPath = Join-Path $GdnFolder 'r'
23 | $ValidPath = Test-Path $GuardianCliLocation
24 |
25 | if ($ValidPath -eq $False)
26 | {
27 | Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Invalid Guardian CLI Location."
28 | ExitWithExitCode 1
29 | }
30 |
31 | $gdnConfigFiles = Get-ChildItem $gdnConfigPath -Recurse -Include '*.gdnconfig'
32 | Write-Host "Discovered Guardian config files:"
33 | $gdnConfigFiles | Out-String | Write-Host
34 |
35 | Exec-BlockVerbosely {
36 | & $GuardianCliLocation run `
37 | --working-directory $WorkingDirectory `
38 | --baseline mainbaseline `
39 | --update-baseline $UpdateBaseline `
40 | --logger-level $GuardianLoggerLevel `
41 | --config @gdnConfigFiles
42 | Exit-IfNZEC "Sdl"
43 | }
44 | }
45 | catch {
46 | Write-Host $_.ScriptStackTrace
47 | Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
48 | ExitWithExitCode 1
49 | }
50 |
--------------------------------------------------------------------------------
/eng/common/internal/Tools.csproj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | net472
6 | false
7 | false
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
--------------------------------------------------------------------------------
/eng/common/core-templates/steps/get-federated-access-token.yml:
--------------------------------------------------------------------------------
1 | parameters:
2 | - name: federatedServiceConnection
3 | type: string
4 | - name: outputVariableName
5 | type: string
6 | - name: is1ESPipeline
7 | type: boolean
8 | - name: stepName
9 | type: string
10 | default: 'getFederatedAccessToken'
11 | - name: condition
12 | type: string
13 | default: ''
14 | # Resource to get a token for. Common values include:
15 | # - '499b84ac-1321-427f-aa17-267ca6975798' for Azure DevOps
16 | # - 'https://storage.azure.com/' for storage
17 | # Defaults to Azure DevOps
18 | - name: resource
19 | type: string
20 | default: '499b84ac-1321-427f-aa17-267ca6975798'
21 | - name: isStepOutputVariable
22 | type: boolean
23 | default: false
24 |
25 | steps:
26 | - task: AzureCLI@2
27 | displayName: 'Getting federated access token for feeds'
28 | name: ${{ parameters.stepName }}
29 | ${{ if ne(parameters.condition, '') }}:
30 | condition: ${{ parameters.condition }}
31 | inputs:
32 | azureSubscription: ${{ parameters.federatedServiceConnection }}
33 | scriptType: 'pscore'
34 | scriptLocation: 'inlineScript'
35 | inlineScript: |
36 | $accessToken = az account get-access-token --query accessToken --resource ${{ parameters.resource }} --output tsv
37 | if ($LASTEXITCODE -ne 0) {
38 | Write-Error "Failed to get access token for resource '${{ parameters.resource }}'"
39 | exit 1
40 | }
41 | Write-Host "Setting '${{ parameters.outputVariableName }}' with the access token value"
42 | Write-Host "##vso[task.setvariable variable=${{ parameters.outputVariableName }};issecret=true;isOutput=${{ parameters.isStepOutputVariable }}]$accessToken"
--------------------------------------------------------------------------------
/eng/common/generate-sbom-prep.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | source="${BASH_SOURCE[0]}"
4 |
5 | # resolve $SOURCE until the file is no longer a symlink
6 | while [[ -h $source ]]; do
7 | scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
8 | source="$(readlink "$source")"
9 |
10 | # if $source was a relative symlink, we need to resolve it relative to the path where the
11 | # symlink file was located
12 | [[ $source != /* ]] && source="$scriptroot/$source"
13 | done
14 | scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
15 | . $scriptroot/pipeline-logging-functions.sh
16 |
17 |
18 | # replace all special characters with _, some builds use special characters like : in Agent.Jobname, that is not a permissible name while uploading artifacts.
19 | artifact_name=$SYSTEM_STAGENAME"_"$AGENT_JOBNAME"_SBOM"
20 | safe_artifact_name="${artifact_name//["/:<>\\|?@*$" ]/_}"
21 | manifest_dir=$1
22 |
23 | # Normally - we'd listen to the manifest path given, but 1ES templates will overwrite if this level gets uploaded directly
24 | # with their own overwriting ours. So we create it as a sub directory of the requested manifest path.
25 | sbom_generation_dir="$manifest_dir/$safe_artifact_name"
26 |
27 | if [ ! -d "$sbom_generation_dir" ] ; then
28 | mkdir -p "$sbom_generation_dir"
29 | echo "Sbom directory created." $sbom_generation_dir
30 | else
31 | Write-PipelineTelemetryError -category 'Build' "Unable to create sbom folder."
32 | fi
33 |
34 | echo "Artifact name before : "$artifact_name
35 | echo "Artifact name after : "$safe_artifact_name
36 | export ARTIFACT_NAME=$safe_artifact_name
37 | echo "##vso[task.setvariable variable=ARTIFACT_NAME]$safe_artifact_name"
38 |
39 | exit 0
40 |
--------------------------------------------------------------------------------
/eng/common/core-templates/steps/get-delegation-sas.yml:
--------------------------------------------------------------------------------
1 | parameters:
2 | - name: federatedServiceConnection
3 | type: string
4 | - name: outputVariableName
5 | type: string
6 | - name: expiryInHours
7 | type: number
8 | default: 1
9 | - name: base64Encode
10 | type: boolean
11 | default: false
12 | - name: storageAccount
13 | type: string
14 | - name: container
15 | type: string
16 | - name: permissions
17 | type: string
18 | default: 'rl'
19 | - name: is1ESPipeline
20 | type: boolean
21 | default: false
22 |
23 | steps:
24 | - task: AzureCLI@2
25 | displayName: 'Generate delegation SAS Token for ${{ parameters.storageAccount }}/${{ parameters.container }}'
26 | inputs:
27 | azureSubscription: ${{ parameters.federatedServiceConnection }}
28 | scriptType: 'pscore'
29 | scriptLocation: 'inlineScript'
30 | inlineScript: |
31 | # Calculate the expiration of the SAS token and convert to UTC
32 | $expiry = (Get-Date).AddHours(${{ parameters.expiryInHours }}).ToUniversalTime().ToString("yyyy-MM-ddTHH:mm:ssZ")
33 |
34 | $sas = az storage container generate-sas --account-name ${{ parameters.storageAccount }} --name ${{ parameters.container }} --permissions ${{ parameters.permissions }} --expiry $expiry --auth-mode login --as-user -o tsv
35 |
36 | if ($LASTEXITCODE -ne 0) {
37 | Write-Error "Failed to generate SAS token."
38 | exit 1
39 | }
40 |
41 | if ('${{ parameters.base64Encode }}' -eq 'true') {
42 | $sas = [Convert]::ToBase64String([System.Text.Encoding]::UTF8.GetBytes($sas))
43 | }
44 |
45 | Write-Host "Setting '${{ parameters.outputVariableName }}' with the access token value"
46 | Write-Host "##vso[task.setvariable variable=${{ parameters.outputVariableName }};issecret=true]$sas"
47 |
--------------------------------------------------------------------------------
/eng/common/core-templates/job/source-index-stage1.yml:
--------------------------------------------------------------------------------
1 | parameters:
2 | runAsPublic: false
3 | sourceIndexBuildCommand: powershell -NoLogo -NoProfile -ExecutionPolicy Bypass -Command "eng/common/build.ps1 -restore -build -binarylog -ci"
4 | preSteps: []
5 | binlogPath: artifacts/log/Debug/Build.binlog
6 | condition: eq(variables['Build.SourceBranch'], 'refs/heads/main')
7 | dependsOn: ''
8 | pool: ''
9 | is1ESPipeline: ''
10 |
11 | jobs:
12 | - job: SourceIndexStage1
13 | dependsOn: ${{ parameters.dependsOn }}
14 | condition: ${{ parameters.condition }}
15 | variables:
16 | - name: BinlogPath
17 | value: ${{ parameters.binlogPath }}
18 | - template: /eng/common/core-templates/variables/pool-providers.yml
19 | parameters:
20 | is1ESPipeline: ${{ parameters.is1ESPipeline }}
21 |
22 | ${{ if ne(parameters.pool, '') }}:
23 | pool: ${{ parameters.pool }}
24 | ${{ if eq(parameters.pool, '') }}:
25 | pool:
26 | ${{ if eq(variables['System.TeamProject'], 'public') }}:
27 | name: $(DncEngPublicBuildPool)
28 | image: windows.vs2026preview.scout.amd64.open
29 | ${{ if eq(variables['System.TeamProject'], 'internal') }}:
30 | name: $(DncEngInternalBuildPool)
31 | image: windows.vs2026preview.scout.amd64
32 |
33 | steps:
34 | - ${{ if eq(parameters.is1ESPipeline, '') }}:
35 | - 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error
36 |
37 | - ${{ each preStep in parameters.preSteps }}:
38 | - ${{ preStep }}
39 | - script: ${{ parameters.sourceIndexBuildCommand }}
40 | displayName: Build Repository
41 |
42 | - template: /eng/common/core-templates/steps/source-index-stage1-publish.yml
43 | parameters:
44 | binLogPath: ${{ parameters.binLogPath }}
45 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | ## Welcome to the .NET SDK Workload Versions repo
2 |
3 | This repository contains the version information for .NET SDK Workloads.
4 |
5 | ### Pre-requisites for local VS insertion build
6 |
7 | 1. Install the latest [Visual Studio](https://visualstudio.microsoft.com/downloads/) with the .NET Desktop workload
8 | - Make sure to restart your PC after the installation is complete.
9 | 2. [Install Azure CLI](https://learn.microsoft.com/cli/azure/install-azure-cli-windows#install-or-update)
10 | 3. Run `az login` to authenticate
11 | - When it asks for a subscription to select, just press Enter. The default subscription selection does not affect DARC.
12 | 4. [Install DARC](https://github.com/dotnet/arcade/blob/main/Documentation/Darc.md#setting-up-your-darc-client)
13 | 5. [Add GitHub auth for DARC](https://github.com/dotnet/arcade/blob/main/Documentation/Darc.md#step-3-set-additional-pats-for-azure-devops-and-github-operations)
14 | - Use the [darc authenticate](https://github.com/dotnet/arcade/blob/main/Documentation/Darc.md#authenticate) command.
15 | - Generate the GitHub PAT [here](https://github.com/settings/tokens?type=beta). Create a fine-grained PAT instead of the classic PAT.
16 | - **Do not** create an AzDO PAT. Leave that entry blank in the darc-authenticate file for it to use local machine auth.
17 | 6. Request access to the [.NET Daily Internal Build Access](https://coreidentity.microsoft.com/manage/Entitlement/entitlement/netdailyinte-q2ql) entitlement
18 | - This allows the local AzDO machine auth to gather internal assets from AzDO.
19 | - **Send a message** to one of the primary owners on the entitlement page for approval after requesting access to the entitlement.
20 | - Should take about 20 mins for the entitlement process to complete (will appear on your [entitlements list](https://coreidentity.microsoft.com/manage/entitlement)) and another 30 mins the access to propagate to DARC. Basically, after approval, wait an hour until you actually attempt to build.
--------------------------------------------------------------------------------
/eng/common/loc/P22DotNetHtmlLocalization.lss:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
22 |
25 |
26 |
27 |
28 |
29 |
--------------------------------------------------------------------------------
/eng/common/core-templates/jobs/source-build.yml:
--------------------------------------------------------------------------------
1 | parameters:
2 | # This template adds arcade-powered source-build to CI. A job is created for each platform, as
3 | # well as an optional server job that completes when all platform jobs complete.
4 |
5 | # See /eng/common/core-templates/job/source-build.yml
6 | jobNamePrefix: 'Source_Build'
7 |
8 | # This is the default platform provided by Arcade, intended for use by a managed-only repo.
9 | defaultManagedPlatform:
10 | name: 'Managed'
11 | container: 'mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream-10-amd64'
12 |
13 | # Defines the platforms on which to run build jobs. One job is created for each platform, and the
14 | # object in this array is sent to the job template as 'platform'. If no platforms are specified,
15 | # one job runs on 'defaultManagedPlatform'.
16 | platforms: []
17 |
18 | is1ESPipeline: ''
19 |
20 | # If set to true and running on a non-public project,
21 | # Internal nuget and blob storage locations will be enabled.
22 | # This is not enabled by default because many repositories do not need internal sources
23 | # and do not need to have the required service connections approved in the pipeline.
24 | enableInternalSources: false
25 |
26 | jobs:
27 |
28 | - ${{ each platform in parameters.platforms }}:
29 | - template: /eng/common/core-templates/job/source-build.yml
30 | parameters:
31 | is1ESPipeline: ${{ parameters.is1ESPipeline }}
32 | jobNamePrefix: ${{ parameters.jobNamePrefix }}
33 | platform: ${{ platform }}
34 | enableInternalSources: ${{ parameters.enableInternalSources }}
35 |
36 | - ${{ if eq(length(parameters.platforms), 0) }}:
37 | - template: /eng/common/core-templates/job/source-build.yml
38 | parameters:
39 | is1ESPipeline: ${{ parameters.is1ESPipeline }}
40 | jobNamePrefix: ${{ parameters.jobNamePrefix }}
41 | platform: ${{ parameters.defaultManagedPlatform }}
42 | enableInternalSources: ${{ parameters.enableInternalSources }}
43 |
--------------------------------------------------------------------------------
/eng/common/darc-init.ps1:
--------------------------------------------------------------------------------
1 | param (
2 | $darcVersion = $null,
3 | $versionEndpoint = 'https://maestro.dot.net/api/assets/darc-version?api-version=2020-02-20',
4 | $verbosity = 'minimal',
5 | $toolpath = $null
6 | )
7 |
8 | . $PSScriptRoot\tools.ps1
9 |
10 | function InstallDarcCli ($darcVersion, $toolpath) {
11 | $darcCliPackageName = 'microsoft.dotnet.darc'
12 |
13 | $dotnetRoot = InitializeDotNetCli -install:$true
14 | $dotnet = "$dotnetRoot\dotnet.exe"
15 | $toolList = & "$dotnet" tool list -g
16 |
17 | if ($toolList -like "*$darcCliPackageName*") {
18 | & "$dotnet" tool uninstall $darcCliPackageName -g
19 | }
20 |
21 | # If the user didn't explicitly specify the darc version,
22 | # query the Maestro API for the correct version of darc to install.
23 | if (-not $darcVersion) {
24 | $darcVersion = $(Invoke-WebRequest -Uri $versionEndpoint -UseBasicParsing).Content
25 | }
26 |
27 | $arcadeServicesSource = 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json'
28 |
29 | Write-Host "Installing Darc CLI version $darcVersion..."
30 | Write-Host 'You may need to restart your command window if this is the first dotnet tool you have installed.'
31 | if (-not $toolpath) {
32 | Write-Host "'$dotnet' tool install $darcCliPackageName --version $darcVersion --add-source '$arcadeServicesSource' -v $verbosity -g"
33 | & "$dotnet" tool install $darcCliPackageName --version $darcVersion --add-source "$arcadeServicesSource" -v $verbosity -g
34 | }else {
35 | Write-Host "'$dotnet' tool install $darcCliPackageName --version $darcVersion --add-source '$arcadeServicesSource' -v $verbosity --tool-path '$toolpath'"
36 | & "$dotnet" tool install $darcCliPackageName --version $darcVersion --add-source "$arcadeServicesSource" -v $verbosity --tool-path "$toolpath"
37 | }
38 | }
39 |
40 | try {
41 | InstallDarcCli $darcVersion $toolpath
42 | }
43 | catch {
44 | Write-Host $_.ScriptStackTrace
45 | Write-PipelineTelemetryError -Category 'Darc' -Message $_
46 | ExitWithExitCode 1
47 | }
--------------------------------------------------------------------------------
/eng/common/native/install-dependencies.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | set -e
4 |
5 | # This is a simple script primarily used for CI to install necessary dependencies
6 | #
7 | # Usage:
8 | #
9 | # ./install-dependencies.sh
10 |
11 | os="$(echo "$1" | tr "[:upper:]" "[:lower:]")"
12 |
13 | if [ -z "$os" ]; then
14 | . "$(dirname "$0")"/init-os-and-arch.sh
15 | fi
16 |
17 | case "$os" in
18 | linux)
19 | if [ -e /etc/os-release ]; then
20 | . /etc/os-release
21 | fi
22 |
23 | if [ "$ID" = "debian" ] || [ "$ID_LIKE" = "debian" ]; then
24 | apt update
25 |
26 | apt install -y build-essential gettext locales cmake llvm clang lld lldb liblldb-dev libunwind8-dev libicu-dev liblttng-ust-dev \
27 | libssl-dev libkrb5-dev pigz cpio
28 |
29 | localedef -i en_US -c -f UTF-8 -A /usr/share/locale/locale.alias en_US.UTF-8
30 | elif [ "$ID" = "fedora" ] || [ "$ID" = "rhel" ] || [ "$ID" = "azurelinux" ]; then
31 | pkg_mgr="$(command -v tdnf 2>/dev/null || command -v dnf)"
32 | $pkg_mgr install -y cmake llvm lld lldb clang python curl libicu-devel openssl-devel krb5-devel lttng-ust-devel pigz cpio
33 | elif [ "$ID" = "alpine" ]; then
34 | apk add build-base cmake bash curl clang llvm-dev lld lldb krb5-dev lttng-ust-dev icu-dev openssl-dev pigz cpio
35 | else
36 | echo "Unsupported distro. distro: $ID"
37 | exit 1
38 | fi
39 | ;;
40 |
41 | osx|maccatalyst|ios|iossimulator|tvos|tvossimulator)
42 | echo "Installed xcode version: $(xcode-select -p)"
43 |
44 | export HOMEBREW_NO_INSTALL_CLEANUP=1
45 | export HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK=1
46 | # Skip brew update for now, see https://github.com/actions/setup-python/issues/577
47 | # brew update --preinstall
48 | brew bundle --no-upgrade --file=- <&1 | grep -qi android; then
7 | OSName="android"
8 | fi
9 |
10 | case "$OSName" in
11 | freebsd|linux|netbsd|openbsd|sunos|android|haiku)
12 | os="$OSName" ;;
13 | darwin)
14 | os=osx ;;
15 | *)
16 | echo "Unsupported OS $OSName detected!"
17 | exit 1 ;;
18 | esac
19 |
20 | # On Solaris, `uname -m` is discouraged, see https://docs.oracle.com/cd/E36784_01/html/E36870/uname-1.html
21 | # and `uname -p` returns processor type (e.g. i386 on amd64).
22 | # The appropriate tool to determine CPU is isainfo(1) https://docs.oracle.com/cd/E36784_01/html/E36870/isainfo-1.html.
23 | if [ "$os" = "sunos" ]; then
24 | if uname -o 2>&1 | grep -q illumos; then
25 | os="illumos"
26 | else
27 | os="solaris"
28 | fi
29 | CPUName=$(isainfo -n)
30 | else
31 | # For the rest of the operating systems, use uname(1) to determine what the CPU is.
32 | CPUName=$(uname -m)
33 | fi
34 |
35 | case "$CPUName" in
36 | arm64|aarch64)
37 | arch=arm64
38 | if [ "$(getconf LONG_BIT)" -lt 64 ]; then
39 | # This is 32-bit OS running on 64-bit CPU (for example Raspberry Pi OS)
40 | arch=arm
41 | fi
42 | ;;
43 |
44 | loongarch64)
45 | arch=loongarch64
46 | ;;
47 |
48 | riscv64)
49 | arch=riscv64
50 | ;;
51 |
52 | amd64|x86_64)
53 | arch=x64
54 | ;;
55 |
56 | armv7l|armv8l)
57 | # shellcheck disable=SC1091
58 | if (NAME=""; . /etc/os-release; test "$NAME" = "Tizen"); then
59 | arch=armel
60 | else
61 | arch=arm
62 | fi
63 | ;;
64 |
65 | armv6l)
66 | arch=armv6
67 | ;;
68 |
69 | i[3-6]86)
70 | echo "Unsupported CPU $CPUName detected, build might not succeed!"
71 | arch=x86
72 | ;;
73 |
74 | s390x)
75 | arch=s390x
76 | ;;
77 |
78 | ppc64le)
79 | arch=ppc64le
80 | ;;
81 | *)
82 | echo "Unknown CPU $CPUName detected!"
83 | exit 1
84 | ;;
85 | esac
86 |
--------------------------------------------------------------------------------
/eng/common/post-build/check-channel-consistency.ps1:
--------------------------------------------------------------------------------
1 | param(
2 | [Parameter(Mandatory=$true)][string] $PromoteToChannels, # List of channels that the build should be promoted to
3 | [Parameter(Mandatory=$true)][array] $AvailableChannelIds # List of channel IDs available in the YAML implementation
4 | )
5 |
6 | try {
7 | $ErrorActionPreference = 'Stop'
8 | Set-StrictMode -Version 2.0
9 |
10 | # `tools.ps1` checks $ci to perform some actions. Since the post-build
11 | # scripts don't necessarily execute in the same agent that run the
12 | # build.ps1/sh script this variable isn't automatically set.
13 | $ci = $true
14 | $disableConfigureToolsetImport = $true
15 | . $PSScriptRoot\..\tools.ps1
16 |
17 | if ($PromoteToChannels -eq "") {
18 | Write-PipelineTaskError -Type 'warning' -Message "This build won't publish assets as it's not configured to any Maestro channel. If that wasn't intended use Darc to configure a default channel using add-default-channel for this branch or to promote it to a channel using add-build-to-channel. See https://github.com/dotnet/arcade/blob/main/Documentation/Darc.md#assigning-an-individual-build-to-a-channel for more info."
19 | ExitWithExitCode 0
20 | }
21 |
22 | # Check that every channel that Maestro told to promote the build to
23 | # is available in YAML
24 | $PromoteToChannelsIds = $PromoteToChannels -split "\D" | Where-Object { $_ }
25 |
26 | $hasErrors = $false
27 |
28 | foreach ($id in $PromoteToChannelsIds) {
29 | if (($id -ne 0) -and ($id -notin $AvailableChannelIds)) {
30 | Write-PipelineTaskError -Message "Channel $id is not present in the post-build YAML configuration! This is an error scenario. Please contact @dnceng."
31 | $hasErrors = $true
32 | }
33 | }
34 |
35 | # The `Write-PipelineTaskError` doesn't error the script and we might report several errors
36 | # in the previous lines. The check below makes sure that we return an error state from the
37 | # script if we reported any validation error
38 | if ($hasErrors) {
39 | ExitWithExitCode 1
40 | }
41 |
42 | Write-Host 'done.'
43 | }
44 | catch {
45 | Write-Host $_
46 | Write-PipelineTelemetryError -Category 'CheckChannelConsistency' -Message "There was an error while trying to check consistency of Maestro default channels for the build and post-build YAML configuration."
47 | ExitWithExitCode 1
48 | }
49 |
--------------------------------------------------------------------------------
/eng/common/templates-official/variables/pool-providers.yml:
--------------------------------------------------------------------------------
1 | # Select a pool provider based off branch name. Anything with branch name containing 'release' must go into an -Svc pool,
2 | # otherwise it should go into the "normal" pools. This separates out the queueing and billing of released branches.
3 |
4 | # Motivation:
5 | # Once a given branch of a repository's output has been officially "shipped" once, it is then considered to be COGS
6 | # (Cost of goods sold) and should be moved to a servicing pool provider. This allows both separation of queueing
7 | # (allowing release builds and main PR builds to not intefere with each other) and billing (required for COGS.
8 | # Additionally, the pool provider name itself may be subject to change when the .NET Core Engineering Services
9 | # team needs to move resources around and create new and potentially differently-named pools. Using this template
10 | # file from an Arcade-ified repo helps guard against both having to update one's release/* branches and renaming.
11 |
12 | # How to use:
13 | # This yaml assumes your shipped product branches use the naming convention "release/..." (which many do).
14 | # If we find alternate naming conventions in broad usage it can be added to the condition below.
15 | #
16 | # First, import the template in an arcade-ified repo to pick up the variables, e.g.:
17 | #
18 | # variables:
19 | # - template: /eng/common/templates-official/variables/pool-providers.yml
20 | #
21 | # ... then anywhere specifying the pool provider use the runtime variables,
22 | # $(DncEngInternalBuildPool)
23 | #
24 | # pool:
25 | # name: $(DncEngInternalBuildPool)
26 | # image: 1es-windows-2022
27 |
28 | variables:
29 | # Coalesce the target and source branches so we know when a PR targets a release branch
30 | # If these variables are somehow missing, fall back to main (tends to have more capacity)
31 |
32 | # Any new -Svc alternative pools should have variables added here to allow for splitting work
33 |
34 | - name: DncEngInternalBuildPool
35 | value: $[
36 | replace(
37 | replace(
38 | eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'),
39 | True,
40 | 'NetCore1ESPool-Svc-Internal'
41 | ),
42 | False,
43 | 'NetCore1ESPool-Internal'
44 | )
45 | ]
--------------------------------------------------------------------------------
/eng/common/cross/tizen-build-rootfs.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | set -e
3 |
4 | ARCH=$1
5 | LINK_ARCH=$ARCH
6 |
7 | case "$ARCH" in
8 | arm)
9 | TIZEN_ARCH="armv7hl"
10 | ;;
11 | armel)
12 | TIZEN_ARCH="armv7l"
13 | LINK_ARCH="arm"
14 | ;;
15 | arm64)
16 | TIZEN_ARCH="aarch64"
17 | ;;
18 | x86)
19 | TIZEN_ARCH="i686"
20 | ;;
21 | x64)
22 | TIZEN_ARCH="x86_64"
23 | LINK_ARCH="x86"
24 | ;;
25 | riscv64)
26 | TIZEN_ARCH="riscv64"
27 | LINK_ARCH="riscv"
28 | ;;
29 | *)
30 | echo "Unsupported architecture for tizen: $ARCH"
31 | exit 1
32 | esac
33 |
34 | __CrossDir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
35 | __TIZEN_CROSSDIR="$__CrossDir/${ARCH}/tizen"
36 |
37 | if [[ -z "$ROOTFS_DIR" ]]; then
38 | echo "ROOTFS_DIR is not defined."
39 | exit 1;
40 | fi
41 |
42 | TIZEN_TMP_DIR=$ROOTFS_DIR/tizen_tmp
43 | mkdir -p $TIZEN_TMP_DIR
44 |
45 | # Download files
46 | echo ">>Start downloading files"
47 | VERBOSE=1 $__CrossDir/tizen-fetch.sh $TIZEN_TMP_DIR $TIZEN_ARCH
48 | echo "<>Start constructing Tizen rootfs"
51 | TIZEN_RPM_FILES=`ls $TIZEN_TMP_DIR/*.rpm`
52 | cd $ROOTFS_DIR
53 | for f in $TIZEN_RPM_FILES; do
54 | rpm2cpio $f | cpio -idm --quiet
55 | done
56 | echo "<>Start configuring Tizen rootfs"
63 | ln -sfn asm-${LINK_ARCH} ./usr/include/asm
64 | patch -p1 < $__TIZEN_CROSSDIR/tizen.patch
65 | if [[ "$TIZEN_ARCH" == "riscv64" ]]; then
66 | echo "Fixing broken symlinks in $PWD"
67 | rm ./usr/lib64/libresolv.so
68 | ln -s ../../lib64/libresolv.so.2 ./usr/lib64/libresolv.so
69 | rm ./usr/lib64/libpthread.so
70 | ln -s ../../lib64/libpthread.so.0 ./usr/lib64/libpthread.so
71 | rm ./usr/lib64/libdl.so
72 | ln -s ../../lib64/libdl.so.2 ./usr/lib64/libdl.so
73 | rm ./usr/lib64/libutil.so
74 | ln -s ../../lib64/libutil.so.1 ./usr/lib64/libutil.so
75 | rm ./usr/lib64/libm.so
76 | ln -s ../../lib64/libm.so.6 ./usr/lib64/libm.so
77 | rm ./usr/lib64/librt.so
78 | ln -s ../../lib64/librt.so.1 ./usr/lib64/librt.so
79 | rm ./lib/ld-linux-riscv64-lp64d.so.1
80 | ln -s ../lib64/ld-linux-riscv64-lp64d.so.1 ./lib/ld-linux-riscv64-lp64d.so.1
81 | fi
82 | echo "<
12 |
13 | Param(
14 | [string] $InputPath,
15 | [bool] $Recursive = $true
16 | )
17 |
18 | $CliToolName = "Microsoft.DotNet.VersionTools.Cli"
19 |
20 | function Install-VersionTools-Cli {
21 | param(
22 | [Parameter(Mandatory=$true)][string]$Version
23 | )
24 |
25 | Write-Host "Installing the package '$CliToolName' with a version of '$version' ..."
26 | $feed = "https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json"
27 |
28 | $argumentList = @("tool", "install", "--local", "$CliToolName", "--add-source $feed", "--no-cache", "--version $Version", "--create-manifest-if-needed")
29 | Start-Process "$dotnet" -Verbose -ArgumentList $argumentList -NoNewWindow -Wait
30 | }
31 |
32 | # -------------------------------------------------------------------
33 |
34 | if (!(Test-Path $InputPath)) {
35 | Write-Host "Input Path '$InputPath' does not exist"
36 | ExitWithExitCode 1
37 | }
38 |
39 | $ErrorActionPreference = 'Stop'
40 | Set-StrictMode -Version 2.0
41 |
42 | $disableConfigureToolsetImport = $true
43 | $global:LASTEXITCODE = 0
44 |
45 | # `tools.ps1` checks $ci to perform some actions. Since the SDL
46 | # scripts don't necessarily execute in the same agent that run the
47 | # build.ps1/sh script this variable isn't automatically set.
48 | $ci = $true
49 | . $PSScriptRoot\..\tools.ps1
50 |
51 | try {
52 | $dotnetRoot = InitializeDotNetCli -install:$true
53 | $dotnet = "$dotnetRoot\dotnet.exe"
54 |
55 | $toolsetVersion = Read-ArcadeSdkVersion
56 | Install-VersionTools-Cli -Version $toolsetVersion
57 |
58 | $cliToolFound = (& "$dotnet" tool list --local | Where-Object {$_.Split(' ')[0] -eq $CliToolName})
59 | if ($null -eq $cliToolFound) {
60 | Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "The '$CliToolName' tool is not installed."
61 | ExitWithExitCode 1
62 | }
63 |
64 | Exec-BlockVerbosely {
65 | & "$dotnet" $CliToolName trim-assets-version `
66 | --assets-path $InputPath `
67 | --recursive $Recursive
68 | Exit-IfNZEC "Sdl"
69 | }
70 | }
71 | catch {
72 | Write-Host $_
73 | Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
74 | ExitWithExitCode 1
75 | }
76 |
--------------------------------------------------------------------------------
/eng/common/sdl/extract-artifact-archives.ps1:
--------------------------------------------------------------------------------
1 | # This script looks for each archive file in a directory and extracts it into the target directory.
2 | # For example, the file "$InputPath/bin.tar.gz" extracts to "$ExtractPath/bin.tar.gz.extracted/**".
3 | # Uses the "tar" utility added to Windows 10 / Windows 2019 that supports tar.gz and zip.
4 | param(
5 | # Full path to directory where archives are stored.
6 | [Parameter(Mandatory=$true)][string] $InputPath,
7 | # Full path to directory to extract archives into. May be the same as $InputPath.
8 | [Parameter(Mandatory=$true)][string] $ExtractPath
9 | )
10 |
11 | $ErrorActionPreference = 'Stop'
12 | Set-StrictMode -Version 2.0
13 |
14 | $disableConfigureToolsetImport = $true
15 |
16 | try {
17 | # `tools.ps1` checks $ci to perform some actions. Since the SDL
18 | # scripts don't necessarily execute in the same agent that run the
19 | # build.ps1/sh script this variable isn't automatically set.
20 | $ci = $true
21 | . $PSScriptRoot\..\tools.ps1
22 |
23 | Measure-Command {
24 | $jobs = @()
25 |
26 | # Find archive files for non-Windows and Windows builds.
27 | $archiveFiles = @(
28 | Get-ChildItem (Join-Path $InputPath "*.tar.gz")
29 | Get-ChildItem (Join-Path $InputPath "*.zip")
30 | )
31 |
32 | foreach ($targzFile in $archiveFiles) {
33 | $jobs += Start-Job -ScriptBlock {
34 | $file = $using:targzFile
35 | $fileName = [System.IO.Path]::GetFileName($file)
36 | $extractDir = Join-Path $using:ExtractPath "$fileName.extracted"
37 |
38 | New-Item $extractDir -ItemType Directory -Force | Out-Null
39 |
40 | Write-Host "Extracting '$file' to '$extractDir'..."
41 |
42 | # Pipe errors to stdout to prevent PowerShell detecting them and quitting the job early.
43 | # This type of quit skips the catch, so we wouldn't be able to tell which file triggered the
44 | # error. Save output so it can be stored in the exception string along with context.
45 | $output = tar -xf $file -C $extractDir 2>&1
46 | # Handle NZEC manually rather than using Exit-IfNZEC: we are in a background job, so we
47 | # don't have access to the outer scope.
48 | if ($LASTEXITCODE -ne 0) {
49 | throw "Error extracting '$file': non-zero exit code ($LASTEXITCODE). Output: '$output'"
50 | }
51 |
52 | Write-Host "Extracted to $extractDir"
53 | }
54 | }
55 |
56 | Receive-Job $jobs -Wait
57 | }
58 | }
59 | catch {
60 | Write-Host $_
61 | Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
62 | ExitWithExitCode 1
63 | }
64 |
--------------------------------------------------------------------------------
/eng/common/dotnet-install.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | source="${BASH_SOURCE[0]}"
4 | # resolve $source until the file is no longer a symlink
5 | while [[ -h "$source" ]]; do
6 | scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
7 | source="$(readlink "$source")"
8 | # if $source was a relative symlink, we need to resolve it relative to the path where the
9 | # symlink file was located
10 | [[ $source != /* ]] && source="$scriptroot/$source"
11 | done
12 | scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
13 |
14 | . "$scriptroot/tools.sh"
15 |
16 | version='Latest'
17 | architecture=''
18 | runtime='dotnet'
19 | runtimeSourceFeed=''
20 | runtimeSourceFeedKey=''
21 | while [[ $# > 0 ]]; do
22 | opt="$(echo "$1" | tr "[:upper:]" "[:lower:]")"
23 | case "$opt" in
24 | -version|-v)
25 | shift
26 | version="$1"
27 | ;;
28 | -architecture|-a)
29 | shift
30 | architecture="$1"
31 | ;;
32 | -runtime|-r)
33 | shift
34 | runtime="$1"
35 | ;;
36 | -runtimesourcefeed)
37 | shift
38 | runtimeSourceFeed="$1"
39 | ;;
40 | -runtimesourcefeedkey)
41 | shift
42 | runtimeSourceFeedKey="$1"
43 | ;;
44 | *)
45 | Write-PipelineTelemetryError -Category 'Build' -Message "Invalid argument: $1"
46 | exit 1
47 | ;;
48 | esac
49 | shift
50 | done
51 |
52 | # Use uname to determine what the CPU is, see https://en.wikipedia.org/wiki/Uname#Examples
53 | cpuname=$(uname -m)
54 | case $cpuname in
55 | arm64|aarch64)
56 | buildarch=arm64
57 | if [ "$(getconf LONG_BIT)" -lt 64 ]; then
58 | # This is 32-bit OS running on 64-bit CPU (for example Raspberry Pi OS)
59 | buildarch=arm
60 | fi
61 | ;;
62 | loongarch64)
63 | buildarch=loongarch64
64 | ;;
65 | amd64|x86_64)
66 | buildarch=x64
67 | ;;
68 | armv*l)
69 | buildarch=arm
70 | ;;
71 | i[3-6]86)
72 | buildarch=x86
73 | ;;
74 | riscv64)
75 | buildarch=riscv64
76 | ;;
77 | *)
78 | echo "Unknown CPU $cpuname detected, treating it as x64"
79 | buildarch=x64
80 | ;;
81 | esac
82 |
83 | dotnetRoot="${repo_root}.dotnet"
84 | if [[ $architecture != "" ]] && [[ $architecture != $buildarch ]]; then
85 | dotnetRoot="$dotnetRoot/$architecture"
86 | fi
87 |
88 | InstallDotNet "$dotnetRoot" $version "$architecture" $runtime true $runtimeSourceFeed $runtimeSourceFeedKey || {
89 | local exit_code=$?
90 | Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "dotnet-install.sh failed (exit code '$exit_code')." >&2
91 | ExitWithExitCode $exit_code
92 | }
93 |
94 | ExitWithExitCode 0
95 |
--------------------------------------------------------------------------------
/eng/Version.Details.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | https://dev.azure.com/dnceng/internal/_git/dotnet-dotnet
6 | 44525024595742ebe09023abe709df51de65009b
7 |
8 |
9 |
10 | https://dev.azure.com/dnceng/internal/_git/dotnet-dotnet
11 | 44525024595742ebe09023abe709df51de65009b
12 |
13 |
14 | https://github.com/dotnet/android
15 | 01024bb616e7b80417a2c6d320885bfdb956f20a
16 |
17 |
18 | https://github.com/dotnet/macios
19 | 29d3d81f416abddb44209f488673afe56077cd27
20 |
21 |
22 | https://github.com/dotnet/macios
23 | 29d3d81f416abddb44209f488673afe56077cd27
24 |
25 |
26 | https://github.com/dotnet/macios
27 | 29d3d81f416abddb44209f488673afe56077cd27
28 |
29 |
30 | https://github.com/dotnet/macios
31 | 29d3d81f416abddb44209f488673afe56077cd27
32 |
33 |
34 | https://github.com/dotnet/maui
35 | c1c069ac0957d8241f5714c4700855728eeb6216
36 |
37 |
38 | https://dev.azure.com/dnceng/internal/_git/dotnet-dotnet
39 | 44525024595742ebe09023abe709df51de65009b
40 |
41 |
42 |
43 |
44 | https://github.com/dotnet/arcade
45 | 55631983c8583162122687fddeac13424c1e40a8
46 |
47 |
48 |
49 |
50 |
--------------------------------------------------------------------------------
/eng/common/sdl/init-sdl.ps1:
--------------------------------------------------------------------------------
1 | Param(
2 | [string] $GuardianCliLocation,
3 | [string] $Repository,
4 | [string] $BranchName='master',
5 | [string] $WorkingDirectory,
6 | [string] $AzureDevOpsAccessToken,
7 | [string] $GuardianLoggerLevel='Standard'
8 | )
9 |
10 | $ErrorActionPreference = 'Stop'
11 | Set-StrictMode -Version 2.0
12 | $disableConfigureToolsetImport = $true
13 | $global:LASTEXITCODE = 0
14 |
15 | # `tools.ps1` checks $ci to perform some actions. Since the SDL
16 | # scripts don't necessarily execute in the same agent that run the
17 | # build.ps1/sh script this variable isn't automatically set.
18 | $ci = $true
19 | . $PSScriptRoot\..\tools.ps1
20 |
21 | # Don't display the console progress UI - it's a huge perf hit
22 | $ProgressPreference = 'SilentlyContinue'
23 |
24 | # Construct basic auth from AzDO access token; construct URI to the repository's gdn folder stored in that repository; construct location of zip file
25 | $encodedPat = [Convert]::ToBase64String([System.Text.Encoding]::ASCII.GetBytes(":$AzureDevOpsAccessToken"))
26 | $escapedRepository = [Uri]::EscapeDataString("/$Repository/$BranchName/.gdn")
27 | $uri = "https://dev.azure.com/dnceng/internal/_apis/git/repositories/sdl-tool-cfg/Items?path=$escapedRepository&versionDescriptor[versionOptions]=0&`$format=zip&api-version=5.0"
28 | $zipFile = "$WorkingDirectory/gdn.zip"
29 |
30 | Add-Type -AssemblyName System.IO.Compression.FileSystem
31 | $gdnFolder = (Join-Path $WorkingDirectory '.gdn')
32 |
33 | try {
34 | # if the folder does not exist, we'll do a guardian init and push it to the remote repository
35 | Write-Host 'Initializing Guardian...'
36 | Write-Host "$GuardianCliLocation init --working-directory $WorkingDirectory --logger-level $GuardianLoggerLevel"
37 | & $GuardianCliLocation init --working-directory $WorkingDirectory --logger-level $GuardianLoggerLevel
38 | if ($LASTEXITCODE -ne 0) {
39 | Write-PipelineTelemetryError -Force -Category 'Build' -Message "Guardian init failed with exit code $LASTEXITCODE."
40 | ExitWithExitCode $LASTEXITCODE
41 | }
42 | # We create the mainbaseline so it can be edited later
43 | Write-Host "$GuardianCliLocation baseline --working-directory $WorkingDirectory --name mainbaseline"
44 | & $GuardianCliLocation baseline --working-directory $WorkingDirectory --name mainbaseline
45 | if ($LASTEXITCODE -ne 0) {
46 | Write-PipelineTelemetryError -Force -Category 'Build' -Message "Guardian baseline failed with exit code $LASTEXITCODE."
47 | ExitWithExitCode $LASTEXITCODE
48 | }
49 | ExitWithExitCode 0
50 | }
51 | catch {
52 | Write-Host $_.ScriptStackTrace
53 | Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
54 | ExitWithExitCode 1
55 | }
56 |
--------------------------------------------------------------------------------
/eng/common/core-templates/steps/generate-sbom.yml:
--------------------------------------------------------------------------------
1 | # BuildDropPath - The root folder of the drop directory for which the manifest file will be generated.
2 | # PackageName - The name of the package this SBOM represents.
3 | # PackageVersion - The version of the package this SBOM represents.
4 | # ManifestDirPath - The path of the directory where the generated manifest files will be placed
5 | # IgnoreDirectories - Directories to ignore for SBOM generation. This will be passed through to the CG component detector.
6 |
7 | parameters:
8 | PackageVersion: 10.0.0
9 | BuildDropPath: '$(System.DefaultWorkingDirectory)/artifacts'
10 | PackageName: '.NET'
11 | ManifestDirPath: $(Build.ArtifactStagingDirectory)/sbom
12 | IgnoreDirectories: ''
13 | sbomContinueOnError: true
14 | is1ESPipeline: false
15 | # disable publishArtifacts if some other step is publishing the artifacts (like job.yml).
16 | publishArtifacts: true
17 |
18 | steps:
19 | - task: PowerShell@2
20 | displayName: Prep for SBOM generation in (Non-linux)
21 | condition: or(eq(variables['Agent.Os'], 'Windows_NT'), eq(variables['Agent.Os'], 'Darwin'))
22 | inputs:
23 | filePath: ./eng/common/generate-sbom-prep.ps1
24 | arguments: ${{parameters.manifestDirPath}}
25 |
26 | # Chmodding is a workaround for https://github.com/dotnet/arcade/issues/8461
27 | - script: |
28 | chmod +x ./eng/common/generate-sbom-prep.sh
29 | ./eng/common/generate-sbom-prep.sh ${{parameters.manifestDirPath}}
30 | displayName: Prep for SBOM generation in (Linux)
31 | condition: eq(variables['Agent.Os'], 'Linux')
32 | continueOnError: ${{ parameters.sbomContinueOnError }}
33 |
34 | - task: AzureArtifacts.manifest-generator-task.manifest-generator-task.ManifestGeneratorTask@0
35 | displayName: 'Generate SBOM manifest'
36 | continueOnError: ${{ parameters.sbomContinueOnError }}
37 | inputs:
38 | PackageName: ${{ parameters.packageName }}
39 | BuildDropPath: ${{ parameters.buildDropPath }}
40 | PackageVersion: ${{ parameters.packageVersion }}
41 | ManifestDirPath: ${{ parameters.manifestDirPath }}/$(ARTIFACT_NAME)
42 | ${{ if ne(parameters.IgnoreDirectories, '') }}:
43 | AdditionalComponentDetectorArgs: '--IgnoreDirectories ${{ parameters.IgnoreDirectories }}'
44 |
45 | - ${{ if eq(parameters.publishArtifacts, 'true')}}:
46 | - template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
47 | parameters:
48 | is1ESPipeline: ${{ parameters.is1ESPipeline }}
49 | args:
50 | displayName: Publish SBOM manifest
51 | continueOnError: ${{parameters.sbomContinueOnError}}
52 | targetPath: '${{ parameters.manifestDirPath }}'
53 | artifactName: $(ARTIFACT_NAME)
54 |
55 |
--------------------------------------------------------------------------------
/eng/common/darc-init.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | source="${BASH_SOURCE[0]}"
4 | darcVersion=''
5 | versionEndpoint='https://maestro.dot.net/api/assets/darc-version?api-version=2020-02-20'
6 | verbosity='minimal'
7 |
8 | while [[ $# > 0 ]]; do
9 | opt="$(echo "$1" | tr "[:upper:]" "[:lower:]")"
10 | case "$opt" in
11 | --darcversion)
12 | darcVersion=$2
13 | shift
14 | ;;
15 | --versionendpoint)
16 | versionEndpoint=$2
17 | shift
18 | ;;
19 | --verbosity)
20 | verbosity=$2
21 | shift
22 | ;;
23 | --toolpath)
24 | toolpath=$2
25 | shift
26 | ;;
27 | *)
28 | echo "Invalid argument: $1"
29 | usage
30 | exit 1
31 | ;;
32 | esac
33 |
34 | shift
35 | done
36 |
37 | # resolve $source until the file is no longer a symlink
38 | while [[ -h "$source" ]]; do
39 | scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
40 | source="$(readlink "$source")"
41 | # if $source was a relative symlink, we need to resolve it relative to the path where the
42 | # symlink file was located
43 | [[ $source != /* ]] && source="$scriptroot/$source"
44 | done
45 | scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
46 |
47 | . "$scriptroot/tools.sh"
48 |
49 | if [ -z "$darcVersion" ]; then
50 | darcVersion=$(curl -X GET "$versionEndpoint" -H "accept: text/plain")
51 | fi
52 |
53 | function InstallDarcCli {
54 | local darc_cli_package_name="microsoft.dotnet.darc"
55 |
56 | InitializeDotNetCli true
57 | local dotnet_root=$_InitializeDotNetCli
58 |
59 | if [ -z "$toolpath" ]; then
60 | local tool_list=$($dotnet_root/dotnet tool list -g)
61 | if [[ $tool_list = *$darc_cli_package_name* ]]; then
62 | echo $($dotnet_root/dotnet tool uninstall $darc_cli_package_name -g)
63 | fi
64 | else
65 | local tool_list=$($dotnet_root/dotnet tool list --tool-path "$toolpath")
66 | if [[ $tool_list = *$darc_cli_package_name* ]]; then
67 | echo $($dotnet_root/dotnet tool uninstall $darc_cli_package_name --tool-path "$toolpath")
68 | fi
69 | fi
70 |
71 | local arcadeServicesSource="https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json"
72 |
73 | echo "Installing Darc CLI version $darcVersion..."
74 | echo "You may need to restart your command shell if this is the first dotnet tool you have installed."
75 | if [ -z "$toolpath" ]; then
76 | echo $($dotnet_root/dotnet tool install $darc_cli_package_name --version $darcVersion --add-source "$arcadeServicesSource" -v $verbosity -g)
77 | else
78 | echo $($dotnet_root/dotnet tool install $darc_cli_package_name --version $darcVersion --add-source "$arcadeServicesSource" -v $verbosity --tool-path "$toolpath")
79 | fi
80 | }
81 |
82 | InstallDarcCli
83 |
--------------------------------------------------------------------------------
/eng/common/post-build/publish-using-darc.ps1:
--------------------------------------------------------------------------------
1 | param(
2 | [Parameter(Mandatory=$true)][int] $BuildId,
3 | [Parameter(Mandatory=$true)][int] $PublishingInfraVersion,
4 | [Parameter(Mandatory=$true)][string] $AzdoToken,
5 | [Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro.dot.net',
6 | [Parameter(Mandatory=$true)][string] $WaitPublishingFinish,
7 | [Parameter(Mandatory=$false)][string] $ArtifactsPublishingAdditionalParameters,
8 | [Parameter(Mandatory=$false)][string] $SymbolPublishingAdditionalParameters,
9 | [Parameter(Mandatory=$false)][string] $RequireDefaultChannels,
10 | [Parameter(Mandatory=$false)][string] $SkipAssetsPublishing,
11 | [Parameter(Mandatory=$false)][string] $runtimeSourceFeed,
12 | [Parameter(Mandatory=$false)][string] $runtimeSourceFeedKey
13 | )
14 |
15 | try {
16 | # `tools.ps1` checks $ci to perform some actions. Since the post-build
17 | # scripts don't necessarily execute in the same agent that run the
18 | # build.ps1/sh script this variable isn't automatically set.
19 | $ci = $true
20 | $disableConfigureToolsetImport = $true
21 | . $PSScriptRoot\..\tools.ps1
22 |
23 | $darc = Get-Darc
24 |
25 | $optionalParams = [System.Collections.ArrayList]::new()
26 |
27 | if ("" -ne $ArtifactsPublishingAdditionalParameters) {
28 | $optionalParams.Add("--artifact-publishing-parameters") | Out-Null
29 | $optionalParams.Add($ArtifactsPublishingAdditionalParameters) | Out-Null
30 | }
31 |
32 | if ("" -ne $SymbolPublishingAdditionalParameters) {
33 | $optionalParams.Add("--symbol-publishing-parameters") | Out-Null
34 | $optionalParams.Add($SymbolPublishingAdditionalParameters) | Out-Null
35 | }
36 |
37 | if ("false" -eq $WaitPublishingFinish) {
38 | $optionalParams.Add("--no-wait") | Out-Null
39 | }
40 |
41 | if ("true" -eq $RequireDefaultChannels) {
42 | $optionalParams.Add("--default-channels-required") | Out-Null
43 | }
44 |
45 | if ("true" -eq $SkipAssetsPublishing) {
46 | $optionalParams.Add("--skip-assets-publishing") | Out-Null
47 | }
48 |
49 | & $darc add-build-to-channel `
50 | --id $buildId `
51 | --publishing-infra-version $PublishingInfraVersion `
52 | --default-channels `
53 | --source-branch main `
54 | --azdev-pat "$AzdoToken" `
55 | --bar-uri "$MaestroApiEndPoint" `
56 | --ci `
57 | --verbose `
58 | @optionalParams
59 |
60 | if ($LastExitCode -ne 0) {
61 | Write-Host "Problems using Darc to promote build ${buildId} to default channels. Stopping execution..."
62 | exit 1
63 | }
64 |
65 | Write-Host 'done.'
66 | }
67 | catch {
68 | Write-Host $_
69 | Write-PipelineTelemetryError -Category 'PromoteBuild' -Message "There was an error while trying to publish build '$BuildId' to default channels."
70 | ExitWithExitCode 1
71 | }
72 |
--------------------------------------------------------------------------------
/eng/common/core-templates/steps/publish-logs.yml:
--------------------------------------------------------------------------------
1 | parameters:
2 | StageLabel: ''
3 | JobLabel: ''
4 | CustomSensitiveDataList: ''
5 | # A default - in case value from eng/common/core-templates/post-build/common-variables.yml is not passed
6 | BinlogToolVersion: '1.0.11'
7 | is1ESPipeline: false
8 |
9 | steps:
10 | - task: Powershell@2
11 | displayName: Prepare Binlogs to Upload
12 | inputs:
13 | targetType: inline
14 | script: |
15 | New-Item -ItemType Directory $(System.DefaultWorkingDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/
16 | Move-Item -Path $(System.DefaultWorkingDirectory)/artifacts/log/Debug/* $(System.DefaultWorkingDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/
17 | continueOnError: true
18 | condition: always()
19 |
20 | - task: PowerShell@2
21 | displayName: Redact Logs
22 | inputs:
23 | filePath: $(System.DefaultWorkingDirectory)/eng/common/post-build/redact-logs.ps1
24 | # For now this needs to have explicit list of all sensitive data. Taken from eng/publishing/v3/publish.yml
25 | # Sensitive data can as well be added to $(System.DefaultWorkingDirectory)/eng/BinlogSecretsRedactionFile.txt'
26 | # If the file exists - sensitive data for redaction will be sourced from it
27 | # (single entry per line, lines starting with '# ' are considered comments and skipped)
28 | arguments: -InputPath '$(System.DefaultWorkingDirectory)/PostBuildLogs'
29 | -BinlogToolVersion '${{parameters.BinlogToolVersion}}'
30 | -TokensFilePath '$(System.DefaultWorkingDirectory)/eng/BinlogSecretsRedactionFile.txt'
31 | -runtimeSourceFeed https://ci.dot.net/internal
32 | -runtimeSourceFeedKey '$(dotnetbuilds-internal-container-read-token-base64)'
33 | '$(publishing-dnceng-devdiv-code-r-build-re)'
34 | '$(MaestroAccessToken)'
35 | '$(dn-bot-all-orgs-artifact-feeds-rw)'
36 | '$(akams-client-id)'
37 | '$(microsoft-symbol-server-pat)'
38 | '$(symweb-symbol-server-pat)'
39 | '$(dnceng-symbol-server-pat)'
40 | '$(dn-bot-all-orgs-build-rw-code-rw)'
41 | '$(System.AccessToken)'
42 | ${{parameters.CustomSensitiveDataList}}
43 | continueOnError: true
44 | condition: always()
45 |
46 | - task: CopyFiles@2
47 | displayName: Gather post build logs
48 | inputs:
49 | SourceFolder: '$(System.DefaultWorkingDirectory)/PostBuildLogs'
50 | Contents: '**'
51 | TargetFolder: '$(Build.ArtifactStagingDirectory)/PostBuildLogs'
52 | condition: always()
53 |
54 | - template: /eng/common/core-templates/steps/publish-build-artifacts.yml
55 | parameters:
56 | is1ESPipeline: ${{ parameters.is1ESPipeline }}
57 | args:
58 | displayName: Publish Logs
59 | pathToPublish: '$(Build.ArtifactStagingDirectory)/PostBuildLogs'
60 | publishLocation: Container
61 | artifactName: PostBuildLogs
62 | continueOnError: true
63 | condition: always()
64 |
--------------------------------------------------------------------------------
/eng/common/sdl/extract-artifact-packages.ps1:
--------------------------------------------------------------------------------
1 | param(
2 | [Parameter(Mandatory=$true)][string] $InputPath, # Full path to directory where artifact packages are stored
3 | [Parameter(Mandatory=$true)][string] $ExtractPath # Full path to directory where the packages will be extracted
4 | )
5 |
6 | $ErrorActionPreference = 'Stop'
7 | Set-StrictMode -Version 2.0
8 |
9 | $disableConfigureToolsetImport = $true
10 |
11 | function ExtractArtifacts {
12 | if (!(Test-Path $InputPath)) {
13 | Write-Host "Input Path does not exist: $InputPath"
14 | ExitWithExitCode 0
15 | }
16 | $Jobs = @()
17 | Get-ChildItem "$InputPath\*.nupkg" |
18 | ForEach-Object {
19 | $Jobs += Start-Job -ScriptBlock $ExtractPackage -ArgumentList $_.FullName
20 | }
21 |
22 | foreach ($Job in $Jobs) {
23 | Wait-Job -Id $Job.Id | Receive-Job
24 | }
25 | }
26 |
27 | try {
28 | # `tools.ps1` checks $ci to perform some actions. Since the SDL
29 | # scripts don't necessarily execute in the same agent that run the
30 | # build.ps1/sh script this variable isn't automatically set.
31 | $ci = $true
32 | . $PSScriptRoot\..\tools.ps1
33 |
34 | $ExtractPackage = {
35 | param(
36 | [string] $PackagePath # Full path to a NuGet package
37 | )
38 |
39 | if (!(Test-Path $PackagePath)) {
40 | Write-PipelineTelemetryError -Category 'Build' -Message "Input file does not exist: $PackagePath"
41 | ExitWithExitCode 1
42 | }
43 |
44 | $RelevantExtensions = @('.dll', '.exe', '.pdb')
45 | Write-Host -NoNewLine 'Extracting ' ([System.IO.Path]::GetFileName($PackagePath)) '...'
46 |
47 | $PackageId = [System.IO.Path]::GetFileNameWithoutExtension($PackagePath)
48 | $ExtractPath = Join-Path -Path $using:ExtractPath -ChildPath $PackageId
49 |
50 | Add-Type -AssemblyName System.IO.Compression.FileSystem
51 |
52 | [System.IO.Directory]::CreateDirectory($ExtractPath);
53 |
54 | try {
55 | $zip = [System.IO.Compression.ZipFile]::OpenRead($PackagePath)
56 |
57 | $zip.Entries |
58 | Where-Object {$RelevantExtensions -contains [System.IO.Path]::GetExtension($_.Name)} |
59 | ForEach-Object {
60 | $TargetPath = Join-Path -Path $ExtractPath -ChildPath (Split-Path -Path $_.FullName)
61 | [System.IO.Directory]::CreateDirectory($TargetPath);
62 |
63 | $TargetFile = Join-Path -Path $ExtractPath -ChildPath $_.FullName
64 | [System.IO.Compression.ZipFileExtensions]::ExtractToFile($_, $TargetFile)
65 | }
66 | }
67 | catch {
68 | Write-Host $_
69 | Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
70 | ExitWithExitCode 1
71 | }
72 | finally {
73 | $zip.Dispose()
74 | }
75 | }
76 | Measure-Command { ExtractArtifacts }
77 | }
78 | catch {
79 | Write-Host $_
80 | Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
81 | ExitWithExitCode 1
82 | }
83 |
--------------------------------------------------------------------------------
/eng/common/templates/variables/pool-providers.yml:
--------------------------------------------------------------------------------
1 | # Select a pool provider based off branch name. Anything with branch name containing 'release' must go into an -Svc pool,
2 | # otherwise it should go into the "normal" pools. This separates out the queueing and billing of released branches.
3 |
4 | # Motivation:
5 | # Once a given branch of a repository's output has been officially "shipped" once, it is then considered to be COGS
6 | # (Cost of goods sold) and should be moved to a servicing pool provider. This allows both separation of queueing
7 | # (allowing release builds and main PR builds to not intefere with each other) and billing (required for COGS.
8 | # Additionally, the pool provider name itself may be subject to change when the .NET Core Engineering Services
9 | # team needs to move resources around and create new and potentially differently-named pools. Using this template
10 | # file from an Arcade-ified repo helps guard against both having to update one's release/* branches and renaming.
11 |
12 | # How to use:
13 | # This yaml assumes your shipped product branches use the naming convention "release/..." (which many do).
14 | # If we find alternate naming conventions in broad usage it can be added to the condition below.
15 | #
16 | # First, import the template in an arcade-ified repo to pick up the variables, e.g.:
17 | #
18 | # variables:
19 | # - template: /eng/common/templates/variables/pool-providers.yml
20 | #
21 | # ... then anywhere specifying the pool provider use the runtime variables,
22 | # $(DncEngInternalBuildPool) and $ (DncEngPublicBuildPool), e.g.:
23 | #
24 | # pool:
25 | # name: $(DncEngInternalBuildPool)
26 | # demands: ImageOverride -equals windows.vs2019.amd64
27 | variables:
28 | - ${{ if eq(variables['System.TeamProject'], 'internal') }}:
29 | - template: /eng/common/templates-official/variables/pool-providers.yml
30 | - ${{ else }}:
31 | # Coalesce the target and source branches so we know when a PR targets a release branch
32 | # If these variables are somehow missing, fall back to main (tends to have more capacity)
33 |
34 | # Any new -Svc alternative pools should have variables added here to allow for splitting work
35 | - name: DncEngPublicBuildPool
36 | value: $[
37 | replace(
38 | replace(
39 | eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'),
40 | True,
41 | 'NetCore-Svc-Public'
42 | ),
43 | False,
44 | 'NetCore-Public'
45 | )
46 | ]
47 |
48 | - name: DncEngInternalBuildPool
49 | value: $[
50 | replace(
51 | replace(
52 | eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'),
53 | True,
54 | 'NetCore1ESPool-Svc-Internal'
55 | ),
56 | False,
57 | 'NetCore1ESPool-Internal'
58 | )
59 | ]
60 |
--------------------------------------------------------------------------------
/eng/common/core-templates/post-build/setup-maestro-vars.yml:
--------------------------------------------------------------------------------
1 | parameters:
2 | BARBuildId: ''
3 | PromoteToChannelIds: ''
4 | is1ESPipeline: ''
5 |
6 | steps:
7 | - ${{ if eq(parameters.is1ESPipeline, '') }}:
8 | - 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error
9 |
10 | - ${{ if eq(coalesce(parameters.PromoteToChannelIds, 0), 0) }}:
11 | - task: DownloadBuildArtifacts@0
12 | displayName: Download Release Configs
13 | inputs:
14 | buildType: current
15 | artifactName: ReleaseConfigs
16 | checkDownloadedFiles: true
17 |
18 | - task: AzureCLI@2
19 | name: setReleaseVars
20 | displayName: Set Release Configs Vars
21 | inputs:
22 | azureSubscription: "Darc: Maestro Production"
23 | scriptType: pscore
24 | scriptLocation: inlineScript
25 | inlineScript: |
26 | try {
27 | if (!$Env:PromoteToMaestroChannels -or $Env:PromoteToMaestroChannels.Trim() -eq '') {
28 | $Content = Get-Content $(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt
29 |
30 | $BarId = $Content | Select -Index 0
31 | $Channels = $Content | Select -Index 1
32 | $IsStableBuild = $Content | Select -Index 2
33 |
34 | $AzureDevOpsProject = $Env:System_TeamProject
35 | $AzureDevOpsBuildDefinitionId = $Env:System_DefinitionId
36 | $AzureDevOpsBuildId = $Env:Build_BuildId
37 | }
38 | else {
39 | . $(System.DefaultWorkingDirectory)\eng\common\tools.ps1
40 | $darc = Get-Darc
41 | $buildInfo = & $darc get-build `
42 | --id ${{ parameters.BARBuildId }} `
43 | --extended `
44 | --output-format json `
45 | --ci `
46 | | convertFrom-Json
47 |
48 | $BarId = ${{ parameters.BARBuildId }}
49 | $Channels = $Env:PromoteToMaestroChannels -split ","
50 | $Channels = $Channels -join "]["
51 | $Channels = "[$Channels]"
52 |
53 | $IsStableBuild = $buildInfo.stable
54 | $AzureDevOpsProject = $buildInfo.azureDevOpsProject
55 | $AzureDevOpsBuildDefinitionId = $buildInfo.azureDevOpsBuildDefinitionId
56 | $AzureDevOpsBuildId = $buildInfo.azureDevOpsBuildId
57 | }
58 |
59 | Write-Host "##vso[task.setvariable variable=BARBuildId]$BarId"
60 | Write-Host "##vso[task.setvariable variable=TargetChannels]$Channels"
61 | Write-Host "##vso[task.setvariable variable=IsStableBuild]$IsStableBuild"
62 |
63 | Write-Host "##vso[task.setvariable variable=AzDOProjectName]$AzureDevOpsProject"
64 | Write-Host "##vso[task.setvariable variable=AzDOPipelineId]$AzureDevOpsBuildDefinitionId"
65 | Write-Host "##vso[task.setvariable variable=AzDOBuildId]$AzureDevOpsBuildId"
66 | }
67 | catch {
68 | Write-Host $_
69 | Write-Host $_.Exception
70 | Write-Host $_.ScriptStackTrace
71 | exit 1
72 | }
73 | env:
74 | PromoteToMaestroChannels: ${{ parameters.PromoteToChannelIds }}
75 |
--------------------------------------------------------------------------------
/NuGet.config:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
--------------------------------------------------------------------------------
/eng/common/core-templates/steps/source-build.yml:
--------------------------------------------------------------------------------
1 | parameters:
2 | # This template adds arcade-powered source-build to CI.
3 |
4 | # This is a 'steps' template, and is intended for advanced scenarios where the existing build
5 | # infra has a careful build methodology that must be followed. For example, a repo
6 | # (dotnet/runtime) might choose to clone the GitHub repo only once and store it as a pipeline
7 | # artifact for all subsequent jobs to use, to reduce dependence on a strong network connection to
8 | # GitHub. Using this steps template leaves room for that infra to be included.
9 |
10 | # Defines the platform on which to run the steps. See 'eng/common/core-templates/job/source-build.yml'
11 | # for details. The entire object is described in the 'job' template for simplicity, even though
12 | # the usage of the properties on this object is split between the 'job' and 'steps' templates.
13 | platform: {}
14 | is1ESPipeline: false
15 |
16 | steps:
17 | # Build. Keep it self-contained for simple reusability. (No source-build-specific job variables.)
18 | - script: |
19 | set -x
20 | df -h
21 |
22 | # If building on the internal project, the internal storage variable may be available (usually only if needed)
23 | # In that case, add variables to allow the download of internal runtimes if the specified versions are not found
24 | # in the default public locations.
25 | internalRuntimeDownloadArgs=
26 | if [ '$(dotnetbuilds-internal-container-read-token-base64)' != '$''(dotnetbuilds-internal-container-read-token-base64)' ]; then
27 | internalRuntimeDownloadArgs='/p:DotNetRuntimeSourceFeed=https://ci.dot.net/internal /p:DotNetRuntimeSourceFeedKey=$(dotnetbuilds-internal-container-read-token-base64) --runtimesourcefeed https://ci.dot.net/internal --runtimesourcefeedkey '$(dotnetbuilds-internal-container-read-token-base64)''
28 | fi
29 |
30 | buildConfig=Release
31 | # Check if AzDO substitutes in a build config from a variable, and use it if so.
32 | if [ '$(_BuildConfig)' != '$''(_BuildConfig)' ]; then
33 | buildConfig='$(_BuildConfig)'
34 | fi
35 |
36 | targetRidArgs=
37 | if [ '${{ parameters.platform.targetRID }}' != '' ]; then
38 | targetRidArgs='/p:TargetRid=${{ parameters.platform.targetRID }}'
39 | fi
40 |
41 | portableBuildArgs=
42 | if [ '${{ parameters.platform.portableBuild }}' != '' ]; then
43 | portableBuildArgs='/p:PortableBuild=${{ parameters.platform.portableBuild }}'
44 | fi
45 |
46 | ${{ coalesce(parameters.platform.buildScript, './build.sh') }} --ci \
47 | --configuration $buildConfig \
48 | --restore --build --pack -bl \
49 | --source-build \
50 | ${{ parameters.platform.buildArguments }} \
51 | $internalRuntimeDownloadArgs \
52 | $targetRidArgs \
53 | $portableBuildArgs \
54 | displayName: Build
55 |
56 | - template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
57 | parameters:
58 | is1ESPipeline: ${{ parameters.is1ESPipeline }}
59 | args:
60 | displayName: Publish BuildLogs
61 | targetPath: artifacts/log/${{ coalesce(variables._BuildConfig, 'Release') }}
62 | artifactName: BuildLogs_SourceBuild_${{ parameters.platform.name }}_Attempt$(System.JobAttempt)
63 | continueOnError: true
64 | condition: succeededOrFailed()
65 | sbomEnabled: false # we don't need SBOM for logs
66 |
--------------------------------------------------------------------------------
/eng/common/post-build/redact-logs.ps1:
--------------------------------------------------------------------------------
1 | [CmdletBinding(PositionalBinding=$False)]
2 | param(
3 | [Parameter(Mandatory=$true, Position=0)][string] $InputPath,
4 | [Parameter(Mandatory=$true)][string] $BinlogToolVersion,
5 | [Parameter(Mandatory=$false)][string] $DotnetPath,
6 | [Parameter(Mandatory=$false)][string] $PackageFeed = 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/index.json',
7 | # File with strings to redact - separated by newlines.
8 | # For comments start the line with '# ' - such lines are ignored
9 | [Parameter(Mandatory=$false)][string] $TokensFilePath,
10 | [Parameter(ValueFromRemainingArguments=$true)][String[]]$TokensToRedact,
11 | [Parameter(Mandatory=$false)][string] $runtimeSourceFeed,
12 | [Parameter(Mandatory=$false)][string] $runtimeSourceFeedKey)
13 |
14 | try {
15 | $ErrorActionPreference = 'Stop'
16 | Set-StrictMode -Version 2.0
17 |
18 | # `tools.ps1` checks $ci to perform some actions. Since the post-build
19 | # scripts don't necessarily execute in the same agent that run the
20 | # build.ps1/sh script this variable isn't automatically set.
21 | $ci = $true
22 | $disableConfigureToolsetImport = $true
23 | . $PSScriptRoot\..\tools.ps1
24 |
25 | $packageName = 'binlogtool'
26 |
27 | $dotnet = $DotnetPath
28 |
29 | if (!$dotnet) {
30 | $dotnetRoot = InitializeDotNetCli -install:$true
31 | $dotnet = "$dotnetRoot\dotnet.exe"
32 | }
33 |
34 | $toolList = & "$dotnet" tool list -g
35 |
36 | if ($toolList -like "*$packageName*") {
37 | & "$dotnet" tool uninstall $packageName -g
38 | }
39 |
40 | $toolPath = "$PSScriptRoot\..\..\..\.tools"
41 | $verbosity = 'minimal'
42 |
43 | New-Item -ItemType Directory -Force -Path $toolPath
44 |
45 | Push-Location -Path $toolPath
46 |
47 | try {
48 | Write-Host "Installing Binlog redactor CLI..."
49 | Write-Host "'$dotnet' new tool-manifest"
50 | & "$dotnet" new tool-manifest
51 | Write-Host "'$dotnet' tool install $packageName --local --add-source '$PackageFeed' -v $verbosity --version $BinlogToolVersion"
52 | & "$dotnet" tool install $packageName --local --add-source "$PackageFeed" -v $verbosity --version $BinlogToolVersion
53 |
54 | if (Test-Path $TokensFilePath) {
55 | Write-Host "Adding additional sensitive data for redaction from file: " $TokensFilePath
56 | $TokensToRedact += Get-Content -Path $TokensFilePath | Foreach {$_.Trim()} | Where { $_ -notmatch "^# " }
57 | }
58 |
59 | $optionalParams = [System.Collections.ArrayList]::new()
60 |
61 | Foreach ($p in $TokensToRedact)
62 | {
63 | if($p -match '^\$\(.*\)$')
64 | {
65 | Write-Host ("Ignoring token {0} as it is probably unexpanded AzDO variable" -f $p)
66 | }
67 | elseif($p)
68 | {
69 | $optionalParams.Add("-p:" + $p) | Out-Null
70 | }
71 | }
72 |
73 | & $dotnet binlogtool redact --input:$InputPath --recurse --in-place `
74 | @optionalParams
75 |
76 | if ($LastExitCode -ne 0) {
77 | Write-PipelineTelemetryError -Category 'Redactor' -Type 'warning' -Message "Problems using Redactor tool (exit code: $LastExitCode). But ignoring them now."
78 | }
79 | }
80 | finally {
81 | Pop-Location
82 | }
83 |
84 | Write-Host 'done.'
85 | }
86 | catch {
87 | Write-Host $_
88 | Write-PipelineTelemetryError -Category 'Redactor' -Message "There was an error while trying to redact logs. Error: $_"
89 | ExitWithExitCode 1
90 | }
91 |
--------------------------------------------------------------------------------
/eng/common/sdk-task.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | show_usage() {
4 | echo "Common settings:"
5 | echo " --task Name of Arcade task (name of a project in SdkTasks directory of the Arcade SDK package)"
6 | echo " --restore Restore dependencies"
7 | echo " --verbosity Msbuild verbosity: q[uiet], m[inimal], n[ormal], d[etailed], and diag[nostic]"
8 | echo " --help Print help and exit"
9 | echo ""
10 |
11 | echo "Advanced settings:"
12 | echo " --excludeCIBinarylog Don't output binary log (short: -nobl)"
13 | echo " --noWarnAsError Do not warn as error"
14 | echo ""
15 | echo "Command line arguments not listed above are passed thru to msbuild."
16 | }
17 |
18 | source="${BASH_SOURCE[0]}"
19 |
20 | # resolve $source until the file is no longer a symlink
21 | while [[ -h "$source" ]]; do
22 | scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
23 | source="$(readlink "$source")"
24 | # if $source was a relative symlink, we need to resolve it relative to the path where the
25 | # symlink file was located
26 | [[ $source != /* ]] && source="$scriptroot/$source"
27 | done
28 | scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
29 |
30 | Build() {
31 | local target=$1
32 | local log_suffix=""
33 | [[ "$target" != "Execute" ]] && log_suffix=".$target"
34 | local log="$log_dir/$task$log_suffix.binlog"
35 | local binaryLogArg=""
36 | [[ $binary_log == true ]] && binaryLogArg="/bl:$log"
37 | local output_path="$toolset_dir/$task/"
38 |
39 | MSBuild "$taskProject" \
40 | $binaryLogArg \
41 | /t:"$target" \
42 | /p:Configuration="$configuration" \
43 | /p:RepoRoot="$repo_root" \
44 | /p:BaseIntermediateOutputPath="$output_path" \
45 | /v:"$verbosity" \
46 | $properties
47 | }
48 |
49 | binary_log=true
50 | configuration="Debug"
51 | verbosity="minimal"
52 | exclude_ci_binary_log=false
53 | restore=false
54 | help=false
55 | properties=''
56 | warnAsError=true
57 |
58 | while (($# > 0)); do
59 | lowerI="$(echo $1 | tr "[:upper:]" "[:lower:]")"
60 | case $lowerI in
61 | --task)
62 | task=$2
63 | shift 2
64 | ;;
65 | --restore)
66 | restore=true
67 | shift 1
68 | ;;
69 | --verbosity)
70 | verbosity=$2
71 | shift 2
72 | ;;
73 | --excludecibinarylog|--nobl)
74 | binary_log=false
75 | exclude_ci_binary_log=true
76 | shift 1
77 | ;;
78 | --noWarnAsError)
79 | warnAsError=false
80 | shift 1
81 | ;;
82 | --help)
83 | help=true
84 | shift 1
85 | ;;
86 | *)
87 | properties="$properties $1"
88 | shift 1
89 | ;;
90 | esac
91 | done
92 |
93 | ci=true
94 |
95 | if $help; then
96 | show_usage
97 | exit 0
98 | fi
99 |
100 | . "$scriptroot/tools.sh"
101 | InitializeToolset
102 |
103 | if [[ -z "$task" ]]; then
104 | Write-PipelineTelemetryError -Category 'Task' -Name 'MissingTask' -Message "Missing required parameter '-task '"
105 | ExitWithExitCode 1
106 | fi
107 |
108 | taskProject=$(GetSdkTaskProject "$task")
109 | if [[ ! -e "$taskProject" ]]; then
110 | Write-PipelineTelemetryError -Category 'Task' -Name 'UnknownTask' -Message "Unknown task: $task"
111 | ExitWithExitCode 1
112 | fi
113 |
114 | if $restore; then
115 | Build "Restore"
116 | fi
117 |
118 | Build "Execute"
119 |
120 |
121 | ExitWithExitCode 0
122 |
--------------------------------------------------------------------------------
/eng/common/native/install-cmake.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | source="${BASH_SOURCE[0]}"
4 | scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
5 |
6 | . $scriptroot/common-library.sh
7 |
8 | base_uri=
9 | install_path=
10 | version=
11 | clean=false
12 | force=false
13 | download_retries=5
14 | retry_wait_time_seconds=30
15 |
16 | while (($# > 0)); do
17 | lowerI="$(echo $1 | tr "[:upper:]" "[:lower:]")"
18 | case $lowerI in
19 | --baseuri)
20 | base_uri=$2
21 | shift 2
22 | ;;
23 | --installpath)
24 | install_path=$2
25 | shift 2
26 | ;;
27 | --version)
28 | version=$2
29 | shift 2
30 | ;;
31 | --clean)
32 | clean=true
33 | shift 1
34 | ;;
35 | --force)
36 | force=true
37 | shift 1
38 | ;;
39 | --downloadretries)
40 | download_retries=$2
41 | shift 2
42 | ;;
43 | --retrywaittimeseconds)
44 | retry_wait_time_seconds=$2
45 | shift 2
46 | ;;
47 | --help)
48 | echo "Common settings:"
49 | echo " --baseuri Base file directory or Url wrom which to acquire tool archives"
50 | echo " --installpath Base directory to install native tool to"
51 | echo " --clean Don't install the tool, just clean up the current install of the tool"
52 | echo " --force Force install of tools even if they previously exist"
53 | echo " --help Print help and exit"
54 | echo ""
55 | echo "Advanced settings:"
56 | echo " --downloadretries Total number of retry attempts"
57 | echo " --retrywaittimeseconds Wait time between retry attempts in seconds"
58 | echo ""
59 | exit 0
60 | ;;
61 | esac
62 | done
63 |
64 | tool_name="cmake"
65 | tool_os=$(GetCurrentOS)
66 | tool_folder="$(echo $tool_os | tr "[:upper:]" "[:lower:]")"
67 | tool_arch="x86_64"
68 | tool_name_moniker="$tool_name-$version-$tool_os-$tool_arch"
69 | tool_install_directory="$install_path/$tool_name/$version"
70 | tool_file_path="$tool_install_directory/$tool_name_moniker/bin/$tool_name"
71 | shim_path="$install_path/$tool_name.sh"
72 | uri="${base_uri}/$tool_folder/$tool_name/$tool_name_moniker.tar.gz"
73 |
74 | # Clean up tool and installers
75 | if [[ $clean = true ]]; then
76 | echo "Cleaning $tool_install_directory"
77 | if [[ -d $tool_install_directory ]]; then
78 | rm -rf $tool_install_directory
79 | fi
80 |
81 | echo "Cleaning $shim_path"
82 | if [[ -f $shim_path ]]; then
83 | rm -rf $shim_path
84 | fi
85 |
86 | tool_temp_path=$(GetTempPathFileName $uri)
87 | echo "Cleaning $tool_temp_path"
88 | if [[ -f $tool_temp_path ]]; then
89 | rm -rf $tool_temp_path
90 | fi
91 |
92 | exit 0
93 | fi
94 |
95 | # Install tool
96 | if [[ -f $tool_file_path ]] && [[ $force = false ]]; then
97 | echo "$tool_name ($version) already exists, skipping install"
98 | exit 0
99 | fi
100 |
101 | DownloadAndExtract $uri $tool_install_directory $force $download_retries $retry_wait_time_seconds
102 |
103 | if [[ $? != 0 ]]; then
104 | Write-PipelineTelemetryError -category 'NativeToolsBootstrap' 'Installation failed'
105 | exit 1
106 | fi
107 |
108 | # Generate Shim
109 | # Always rewrite shims so that we are referencing the expected version
110 | NewScriptShim $shim_path $tool_file_path true
111 |
112 | if [[ $? != 0 ]]; then
113 | Write-PipelineTelemetryError -category 'NativeToolsBootstrap' 'Shim generation failed'
114 | exit 1
115 | fi
116 |
117 | exit 0
118 |
--------------------------------------------------------------------------------
/eng/common/native/install-cmake-test.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | source="${BASH_SOURCE[0]}"
4 | scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
5 |
6 | . $scriptroot/common-library.sh
7 |
8 | base_uri=
9 | install_path=
10 | version=
11 | clean=false
12 | force=false
13 | download_retries=5
14 | retry_wait_time_seconds=30
15 |
16 | while (($# > 0)); do
17 | lowerI="$(echo $1 | tr "[:upper:]" "[:lower:]")"
18 | case $lowerI in
19 | --baseuri)
20 | base_uri=$2
21 | shift 2
22 | ;;
23 | --installpath)
24 | install_path=$2
25 | shift 2
26 | ;;
27 | --version)
28 | version=$2
29 | shift 2
30 | ;;
31 | --clean)
32 | clean=true
33 | shift 1
34 | ;;
35 | --force)
36 | force=true
37 | shift 1
38 | ;;
39 | --downloadretries)
40 | download_retries=$2
41 | shift 2
42 | ;;
43 | --retrywaittimeseconds)
44 | retry_wait_time_seconds=$2
45 | shift 2
46 | ;;
47 | --help)
48 | echo "Common settings:"
49 | echo " --baseuri Base file directory or Url wrom which to acquire tool archives"
50 | echo " --installpath Base directory to install native tool to"
51 | echo " --clean Don't install the tool, just clean up the current install of the tool"
52 | echo " --force Force install of tools even if they previously exist"
53 | echo " --help Print help and exit"
54 | echo ""
55 | echo "Advanced settings:"
56 | echo " --downloadretries Total number of retry attempts"
57 | echo " --retrywaittimeseconds Wait time between retry attempts in seconds"
58 | echo ""
59 | exit 0
60 | ;;
61 | esac
62 | done
63 |
64 | tool_name="cmake-test"
65 | tool_os=$(GetCurrentOS)
66 | tool_folder="$(echo $tool_os | tr "[:upper:]" "[:lower:]")"
67 | tool_arch="x86_64"
68 | tool_name_moniker="$tool_name-$version-$tool_os-$tool_arch"
69 | tool_install_directory="$install_path/$tool_name/$version"
70 | tool_file_path="$tool_install_directory/$tool_name_moniker/bin/$tool_name"
71 | shim_path="$install_path/$tool_name.sh"
72 | uri="${base_uri}/$tool_folder/$tool_name/$tool_name_moniker.tar.gz"
73 |
74 | # Clean up tool and installers
75 | if [[ $clean = true ]]; then
76 | echo "Cleaning $tool_install_directory"
77 | if [[ -d $tool_install_directory ]]; then
78 | rm -rf $tool_install_directory
79 | fi
80 |
81 | echo "Cleaning $shim_path"
82 | if [[ -f $shim_path ]]; then
83 | rm -rf $shim_path
84 | fi
85 |
86 | tool_temp_path=$(GetTempPathFileName $uri)
87 | echo "Cleaning $tool_temp_path"
88 | if [[ -f $tool_temp_path ]]; then
89 | rm -rf $tool_temp_path
90 | fi
91 |
92 | exit 0
93 | fi
94 |
95 | # Install tool
96 | if [[ -f $tool_file_path ]] && [[ $force = false ]]; then
97 | echo "$tool_name ($version) already exists, skipping install"
98 | exit 0
99 | fi
100 |
101 | DownloadAndExtract $uri $tool_install_directory $force $download_retries $retry_wait_time_seconds
102 |
103 | if [[ $? != 0 ]]; then
104 | Write-PipelineTelemetryError -category 'NativeToolsBootstrap' 'Installation failed'
105 | exit 1
106 | fi
107 |
108 | # Generate Shim
109 | # Always rewrite shims so that we are referencing the expected version
110 | NewScriptShim $shim_path $tool_file_path true
111 |
112 | if [[ $? != 0 ]]; then
113 | Write-PipelineTelemetryError -category 'NativeToolsBootstrap' 'Shim generation failed'
114 | exit 1
115 | fi
116 |
117 | exit 0
118 |
--------------------------------------------------------------------------------
/eng/common/sdk-task.ps1:
--------------------------------------------------------------------------------
1 | [CmdletBinding(PositionalBinding=$false)]
2 | Param(
3 | [string] $configuration = 'Debug',
4 | [string] $task,
5 | [string] $verbosity = 'minimal',
6 | [string] $msbuildEngine = $null,
7 | [switch] $restore,
8 | [switch] $prepareMachine,
9 | [switch][Alias('nobl')]$excludeCIBinaryLog,
10 | [switch]$noWarnAsError,
11 | [switch] $help,
12 | [string] $runtimeSourceFeed = '',
13 | [string] $runtimeSourceFeedKey = '',
14 | [Parameter(ValueFromRemainingArguments=$true)][String[]]$properties
15 | )
16 |
17 | $ci = $true
18 | $binaryLog = if ($excludeCIBinaryLog) { $false } else { $true }
19 | $warnAsError = if ($noWarnAsError) { $false } else { $true }
20 |
21 | . $PSScriptRoot\tools.ps1
22 |
23 | function Print-Usage() {
24 | Write-Host "Common settings:"
25 | Write-Host " -task Name of Arcade task (name of a project in SdkTasks directory of the Arcade SDK package)"
26 | Write-Host " -restore Restore dependencies"
27 | Write-Host " -verbosity Msbuild verbosity: q[uiet], m[inimal], n[ormal], d[etailed], and diag[nostic]"
28 | Write-Host " -help Print help and exit"
29 | Write-Host ""
30 |
31 | Write-Host "Advanced settings:"
32 | Write-Host " -prepareMachine Prepare machine for CI run"
33 | Write-Host " -msbuildEngine Msbuild engine to use to run build ('dotnet', 'vs', or unspecified)."
34 | Write-Host " -excludeCIBinaryLog When running on CI, allow no binary log (short: -nobl)"
35 | Write-Host ""
36 | Write-Host "Command line arguments not listed above are passed thru to msbuild."
37 | }
38 |
39 | function Build([string]$target) {
40 | $logSuffix = if ($target -eq 'Execute') { '' } else { ".$target" }
41 | $log = Join-Path $LogDir "$task$logSuffix.binlog"
42 | $binaryLogArg = if ($binaryLog) { "/bl:$log" } else { "" }
43 | $outputPath = Join-Path $ToolsetDir "$task\"
44 |
45 | MSBuild $taskProject `
46 | $binaryLogArg `
47 | /t:$target `
48 | /p:Configuration=$configuration `
49 | /p:RepoRoot=$RepoRoot `
50 | /p:BaseIntermediateOutputPath=$outputPath `
51 | /v:$verbosity `
52 | @properties
53 | }
54 |
55 | try {
56 | if ($help -or (($null -ne $properties) -and ($properties.Contains('/help') -or $properties.Contains('/?')))) {
57 | Print-Usage
58 | exit 0
59 | }
60 |
61 | if ($task -eq "") {
62 | Write-PipelineTelemetryError -Category 'Build' -Message "Missing required parameter '-task '"
63 | Print-Usage
64 | ExitWithExitCode 1
65 | }
66 |
67 | if( $msbuildEngine -eq "vs") {
68 | # Ensure desktop MSBuild is available for sdk tasks.
69 | if( -not ($GlobalJson.tools.PSObject.Properties.Name -contains "vs" )) {
70 | $GlobalJson.tools | Add-Member -Name "vs" -Value (ConvertFrom-Json "{ `"version`": `"16.5`" }") -MemberType NoteProperty
71 | }
72 | if( -not ($GlobalJson.tools.PSObject.Properties.Name -match "xcopy-msbuild" )) {
73 | $GlobalJson.tools | Add-Member -Name "xcopy-msbuild" -Value "18.0.0" -MemberType NoteProperty
74 | }
75 | if ($GlobalJson.tools."xcopy-msbuild".Trim() -ine "none") {
76 | $xcopyMSBuildToolsFolder = InitializeXCopyMSBuild $GlobalJson.tools."xcopy-msbuild" -install $true
77 | }
78 | if ($xcopyMSBuildToolsFolder -eq $null) {
79 | throw 'Unable to get xcopy downloadable version of msbuild'
80 | }
81 |
82 | $global:_MSBuildExe = "$($xcopyMSBuildToolsFolder)\MSBuild\Current\Bin\MSBuild.exe"
83 | }
84 |
85 | $taskProject = GetSdkTaskProject $task
86 | if (!(Test-Path $taskProject)) {
87 | Write-PipelineTelemetryError -Category 'Build' -Message "Unknown task: $task"
88 | ExitWithExitCode 1
89 | }
90 |
91 | if ($restore) {
92 | Build 'Restore'
93 | }
94 |
95 | Build 'Execute'
96 | }
97 | catch {
98 | Write-Host $_.ScriptStackTrace
99 | Write-PipelineTelemetryError -Category 'Build' -Message $_
100 | ExitWithExitCode 1
101 | }
102 |
103 | ExitWithExitCode 0
104 |
--------------------------------------------------------------------------------
/doc/test-workload-updates.md:
--------------------------------------------------------------------------------
1 | # Test Workload Updates Process
2 |
3 | This document provides instructions for the MAUI, Android, and iOS teams on how to create test workload sets for validation and testing purposes.
4 |
5 | ## Overview
6 |
7 | Test workload sets allow teams to validate workload updates before they are included in official releases. This process supports two main scenarios:
8 | 1. Creating test workload sets for general validation
9 | 2. Creating test workload sets with Visual Studio insertion for integration testing
10 |
11 | ## Prerequisites
12 |
13 | - Access to the workload-versions repository
14 | - Permissions to create branches and pull requests
15 | - Access to Azure DevOps internal builds
16 |
17 | ## Process Steps
18 |
19 | ### 1. Create a Test Branch
20 |
21 | 1. **Branch from a release branch**: Start by creating a test branch from one of the existing release branches
22 | ```
23 | Example branch name: release/9.0.3xx-mauitest
24 | ```
25 |
26 | **Branch naming convention:**
27 | - `release/{version}/mauitest{mauiversion}` - for MAUI team tests
28 | - `release/{version}/androidtest{androidversion}` - for Android team tests
29 | - `release/{version}/iostest{iosversion}` - for iOS team tests
30 |
31 | 2. **Make your changes**: Update the necessary workload configurations in your test branch
32 |
33 | ### 2. Create and Merge Pull Request
34 |
35 | 1. **Publish your test build to a Darc channel**: After making changes, publish the build artifacts to a dedicated Darc channel for test workloads.
36 | 2. **Use `darc update-dependencies`**: Run `darc update-dependencies` to update your test branch with the new build information from the Darc channel. This ensures your branch references the correct test workload versions.
37 | 3. **Get approval** from the appropriate reviewers
38 | 4. **Merge the PR** once approved
39 |
40 | > **Note**: Once the test pipeline is established, PRs will not be required and teams can push directly to the internal Azure DevOps repository.
41 |
42 | ### 3. Queue Internal Build
43 |
44 | After merging your PR, queue an internal build using the Azure DevOps pipeline.
45 | https://dev.azure.com/dnceng/internal/_build?definitionId=1298
46 |
47 | ## Build Configuration Options
48 |
49 | When queuing the pipeline, you have several configuration options depending on your testing needs:
50 |
51 | ### For Test Workload Sets Only
52 |
53 | If you only need to create a test workload set:
54 |
55 | 1. ✅ **Select pipeline version by branch/tag** set to **eng**
56 | 2. ✅ **🚩 Source branch 🚩** set to your branch
57 | 3. ✅ **Select "Publish to AzDO"** when queuing the pipeline
58 | 4. ✅ **AzDO publish feed** is the feed it'll publish the workload set to. Default should be _dotnet-workloads_.
59 | 5. ✅ **⭐ Create a test workload set** enabled
60 | - This will generate the test workload set for validation that is of the form
61 | - The workload set will be published to Azure DevOps for testing
62 |
63 | ### For Visual Studio Insertion Testing
64 |
65 | If you need to test a Visual Studio insertion:
66 |
67 | 1. ✅ **Update the workload drop names** for the workloads you want to update in VS
68 | 2. ✅ **Update the primary VS insertion branch** configuration
69 | 3. ✅ **Check the box for "Create VS insertion"**
70 |
71 | This configuration will:
72 | - Create the test workload set
73 | - Prepare the workloads for VS insertion
74 | - Initiate the insertion process into the specified VS branch
75 |
76 | ## Important Notes
77 |
78 | - **Branch Management**: Keep test branches organized and clean up after testing is complete
79 | - **Communication**: Coordinate with other teams if multiple test branches are being used simultaneously
80 |
81 | ## Troubleshooting
82 |
83 | If you encounter issues during the process:
84 |
85 | 1. **Build Failures**: Check the build logs in Azure DevOps for specific error messages
86 | 2. **Permission Issues**: Ensure you have the necessary access rights for the repositories and pipelines
87 | 4. **Workload Set Versioning**: The workload set created will match the SDK band specified in the `Versions.props` file.
88 |
89 | ## Support
90 |
91 | For additional support or questions about this process, please reach out to the .NET SDK team on our Teams chat.
92 |
--------------------------------------------------------------------------------
/eng/common/native/init-distro-rid.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | # getNonPortableDistroRid
4 | #
5 | # Input:
6 | # targetOs: (str)
7 | # targetArch: (str)
8 | # rootfsDir: (str)
9 | #
10 | # Return:
11 | # non-portable rid
12 | getNonPortableDistroRid()
13 | {
14 | targetOs="$1"
15 | targetArch="$2"
16 | rootfsDir="$3"
17 | nonPortableRid=""
18 |
19 | if [ "$targetOs" = "linux" ]; then
20 | # shellcheck disable=SC1091
21 | if [ -e "${rootfsDir}/etc/os-release" ]; then
22 | . "${rootfsDir}/etc/os-release"
23 | if echo "${VERSION_ID:-}" | grep -qE '^([[:digit:]]|\.)+$'; then
24 | nonPortableRid="${ID}.${VERSION_ID}-${targetArch}"
25 | else
26 | # Rolling release distros either do not set VERSION_ID, set it as blank or
27 | # set it to non-version looking string (such as TEMPLATE_VERSION_ID on ArchLinux);
28 | # so omit it here to be consistent with everything else.
29 | nonPortableRid="${ID}-${targetArch}"
30 | fi
31 | elif [ -e "${rootfsDir}/android_platform" ]; then
32 | # shellcheck disable=SC1091
33 | . "${rootfsDir}/android_platform"
34 | nonPortableRid="$RID"
35 | fi
36 | fi
37 |
38 | if [ "$targetOs" = "freebsd" ]; then
39 | # $rootfsDir can be empty. freebsd-version is a shell script and should always work.
40 | __freebsd_major_version=$("$rootfsDir"/bin/freebsd-version | cut -d'.' -f1)
41 | nonPortableRid="freebsd.$__freebsd_major_version-${targetArch}"
42 | elif command -v getprop >/dev/null && getprop ro.product.system.model | grep -qi android; then
43 | __android_sdk_version=$(getprop ro.build.version.sdk)
44 | nonPortableRid="android.$__android_sdk_version-${targetArch}"
45 | elif [ "$targetOs" = "illumos" ]; then
46 | __uname_version=$(uname -v)
47 | nonPortableRid="illumos-${targetArch}"
48 | elif [ "$targetOs" = "solaris" ]; then
49 | __uname_version=$(uname -v)
50 | __solaris_major_version=$(echo "$__uname_version" | cut -d'.' -f1)
51 | nonPortableRid="solaris.$__solaris_major_version-${targetArch}"
52 | elif [ "$targetOs" = "haiku" ]; then
53 | __uname_release="$(uname -r)"
54 | nonPortableRid=haiku.r"$__uname_release"-"$targetArch"
55 | fi
56 |
57 | echo "$nonPortableRid" | tr '[:upper:]' '[:lower:]'
58 | }
59 |
60 | # initDistroRidGlobal
61 | #
62 | # Input:
63 | # os: (str)
64 | # arch: (str)
65 | # rootfsDir?: (nullable:string)
66 | #
67 | # Return:
68 | # None
69 | #
70 | # Notes:
71 | # It is important to note that the function does not return anything, but it
72 | # exports the following variables on success:
73 | # __DistroRid : Non-portable rid of the target platform.
74 | # __PortableTargetOS : OS-part of the portable rid that corresponds to the target platform.
75 | initDistroRidGlobal()
76 | {
77 | targetOs="$1"
78 | targetArch="$2"
79 | rootfsDir=""
80 | if [ $# -ge 3 ]; then
81 | rootfsDir="$3"
82 | fi
83 |
84 | if [ -n "${rootfsDir}" ]; then
85 | # We may have a cross build. Check for the existence of the rootfsDir
86 | if [ ! -e "${rootfsDir}" ]; then
87 | echo "Error: rootfsDir has been passed, but the location is not valid."
88 | exit 1
89 | fi
90 | fi
91 |
92 | __DistroRid=$(getNonPortableDistroRid "${targetOs}" "${targetArch}" "${rootfsDir}")
93 |
94 | if [ -z "${__PortableTargetOS:-}" ]; then
95 | __PortableTargetOS="$targetOs"
96 |
97 | STRINGS="$(command -v strings || true)"
98 | if [ -z "$STRINGS" ]; then
99 | STRINGS="$(command -v llvm-strings || true)"
100 | fi
101 |
102 | # Check for musl-based distros (e.g. Alpine Linux, Void Linux).
103 | if "${rootfsDir}/usr/bin/ldd" --version 2>&1 | grep -q musl ||
104 | ( [ -n "$STRINGS" ] && "$STRINGS" "${rootfsDir}/usr/bin/ldd" 2>&1 | grep -q musl ); then
105 | __PortableTargetOS="linux-musl"
106 | fi
107 | fi
108 |
109 | export __DistroRid __PortableTargetOS
110 | }
111 |
--------------------------------------------------------------------------------
/eng/common/vmr-sync.ps1:
--------------------------------------------------------------------------------
1 | <#
2 | .SYNOPSIS
3 |
4 | This script is used for synchronizing the current repository into a local VMR.
5 | It pulls the current repository's code into the specified VMR directory for local testing or
6 | Source-Build validation.
7 |
8 | .DESCRIPTION
9 |
10 | The tooling used for synchronization will clone the VMR repository into a temporary folder if
11 | it does not already exist. These clones can be reused in future synchronizations, so it is
12 | recommended to dedicate a folder for this to speed up re-runs.
13 |
14 | .EXAMPLE
15 | Synchronize current repository into a local VMR:
16 | ./vmr-sync.ps1 -vmrDir "$HOME/repos/dotnet" -tmpDir "$HOME/repos/tmp"
17 |
18 | .PARAMETER tmpDir
19 | Required. Path to the temporary folder where repositories will be cloned
20 |
21 | .PARAMETER vmrBranch
22 | Optional. Branch of the 'dotnet/dotnet' repo to synchronize. The VMR will be checked out to this branch
23 |
24 | .PARAMETER azdevPat
25 | Optional. Azure DevOps PAT to use for cloning private repositories.
26 |
27 | .PARAMETER vmrDir
28 | Optional. Path to the dotnet/dotnet repository. When null, gets cloned to the temporary folder
29 |
30 | .PARAMETER debugOutput
31 | Optional. Enables debug logging in the darc vmr command.
32 |
33 | .PARAMETER ci
34 | Optional. Denotes that the script is running in a CI environment.
35 | #>
36 | param (
37 | [Parameter(Mandatory=$true, HelpMessage="Path to the temporary folder where repositories will be cloned")]
38 | [string][Alias('t', 'tmp')]$tmpDir,
39 | [string][Alias('b', 'branch')]$vmrBranch,
40 | [string]$remote,
41 | [string]$azdevPat,
42 | [string][Alias('v', 'vmr')]$vmrDir,
43 | [switch]$ci,
44 | [switch]$debugOutput
45 | )
46 |
47 | function Fail {
48 | Write-Host "> $($args[0])" -ForegroundColor 'Red'
49 | }
50 |
51 | function Highlight {
52 | Write-Host "> $($args[0])" -ForegroundColor 'Cyan'
53 | }
54 |
55 | $verbosity = 'verbose'
56 | if ($debugOutput) {
57 | $verbosity = 'debug'
58 | }
59 | # Validation
60 |
61 | if (-not $tmpDir) {
62 | Fail "Missing -tmpDir argument. Please specify the path to the temporary folder where the repositories will be cloned"
63 | exit 1
64 | }
65 |
66 | # Sanitize the input
67 |
68 | if (-not $vmrDir) {
69 | $vmrDir = Join-Path $tmpDir 'dotnet'
70 | }
71 |
72 | if (-not (Test-Path -Path $tmpDir -PathType Container)) {
73 | New-Item -ItemType Directory -Path $tmpDir | Out-Null
74 | }
75 |
76 | # Prepare the VMR
77 |
78 | if (-not (Test-Path -Path $vmrDir -PathType Container)) {
79 | Highlight "Cloning 'dotnet/dotnet' into $vmrDir.."
80 | git clone https://github.com/dotnet/dotnet $vmrDir
81 |
82 | if ($vmrBranch) {
83 | git -C $vmrDir switch -c $vmrBranch
84 | }
85 | }
86 | else {
87 | if ((git -C $vmrDir diff --quiet) -eq $false) {
88 | Fail "There are changes in the working tree of $vmrDir. Please commit or stash your changes"
89 | exit 1
90 | }
91 |
92 | if ($vmrBranch) {
93 | Highlight "Preparing $vmrDir"
94 | git -C $vmrDir checkout $vmrBranch
95 | git -C $vmrDir pull
96 | }
97 | }
98 |
99 | Set-StrictMode -Version Latest
100 |
101 | # Prepare darc
102 |
103 | Highlight 'Installing .NET, preparing the tooling..'
104 | . .\eng\common\tools.ps1
105 | $dotnetRoot = InitializeDotNetCli -install:$true
106 | $darc = Get-Darc
107 | $dotnet = "$dotnetRoot\dotnet.exe"
108 |
109 | Highlight "Starting the synchronization of VMR.."
110 |
111 | # Synchronize the VMR
112 | $darcArgs = (
113 | "vmr", "forwardflow",
114 | "--tmp", $tmpDir,
115 | "--$verbosity",
116 | $vmrDir
117 | )
118 |
119 | if ($ci) {
120 | $darcArgs += ("--ci")
121 | }
122 |
123 | if ($azdevPat) {
124 | $darcArgs += ("--azdev-pat", $azdevPat)
125 | }
126 |
127 | & "$darc" $darcArgs
128 |
129 | if ($LASTEXITCODE -eq 0) {
130 | Highlight "Synchronization succeeded"
131 | }
132 | else {
133 | Fail "Synchronization of repo to VMR failed!"
134 | Fail "'$vmrDir' is left in its last state (re-run of this script will reset it)."
135 | Fail "Please inspect the logs which contain path to the failing patch file (use -debugOutput to get all the details)."
136 | Fail "Once you make changes to the conflicting VMR patch, commit it locally and re-run this script."
137 | exit 1
138 | }
139 |
--------------------------------------------------------------------------------
/eng/common/templates/job/job.yml:
--------------------------------------------------------------------------------
1 | parameters:
2 | enablePublishBuildArtifacts: false
3 | disableComponentGovernance: ''
4 | componentGovernanceIgnoreDirectories: ''
5 | # Sbom related params
6 | enableSbom: true
7 | runAsPublic: false
8 | PackageVersion: 9.0.0
9 | BuildDropPath: '$(System.DefaultWorkingDirectory)/artifacts'
10 |
11 | jobs:
12 | - template: /eng/common/core-templates/job/job.yml
13 | parameters:
14 | is1ESPipeline: false
15 |
16 | ${{ each parameter in parameters }}:
17 | ${{ if and(ne(parameter.key, 'steps'), ne(parameter.key, 'is1ESPipeline')) }}:
18 | ${{ parameter.key }}: ${{ parameter.value }}
19 |
20 | steps:
21 | - ${{ each step in parameters.steps }}:
22 | - ${{ step }}
23 |
24 | componentGovernanceSteps:
25 | - template: /eng/common/templates/steps/component-governance.yml
26 | parameters:
27 | ${{ if eq(parameters.disableComponentGovernance, '') }}:
28 | ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.runAsPublic, 'false'), or(startsWith(variables['Build.SourceBranch'], 'refs/heads/release/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/dotnet/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/microsoft/'), eq(variables['Build.SourceBranch'], 'refs/heads/main'))) }}:
29 | disableComponentGovernance: false
30 | ${{ else }}:
31 | disableComponentGovernance: true
32 | ${{ else }}:
33 | disableComponentGovernance: ${{ parameters.disableComponentGovernance }}
34 | componentGovernanceIgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }}
35 |
36 | artifactPublishSteps:
37 | - ${{ if ne(parameters.artifacts.publish, '') }}:
38 | - ${{ if and(ne(parameters.artifacts.publish.artifacts, 'false'), ne(parameters.artifacts.publish.artifacts, '')) }}:
39 | - template: /eng/common/core-templates/steps/publish-build-artifacts.yml
40 | parameters:
41 | is1ESPipeline: false
42 | args:
43 | displayName: Publish pipeline artifacts
44 | pathToPublish: '$(Build.ArtifactStagingDirectory)/artifacts'
45 | publishLocation: Container
46 | artifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }}
47 | continueOnError: true
48 | condition: always()
49 | retryCountOnTaskFailure: 10 # for any logs being locked
50 | - ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}:
51 | - template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
52 | parameters:
53 | is1ESPipeline: false
54 | args:
55 | targetPath: '$(Build.ArtifactStagingDirectory)/artifacts/log'
56 | artifactName: ${{ coalesce(parameters.artifacts.publish.logs.name, 'Logs_Build_$(Agent.Os)_$(_BuildConfig)') }}
57 | displayName: 'Publish logs'
58 | continueOnError: true
59 | condition: always()
60 | retryCountOnTaskFailure: 10 # for any logs being locked
61 | sbomEnabled: false # we don't need SBOM for logs
62 |
63 | - ${{ if ne(parameters.enablePublishBuildArtifacts, 'false') }}:
64 | - template: /eng/common/core-templates/steps/publish-build-artifacts.yml
65 | parameters:
66 | is1ESPipeline: false
67 | args:
68 | displayName: Publish Logs
69 | pathToPublish: '$(Build.ArtifactStagingDirectory)/artifacts/log/$(_BuildConfig)'
70 | publishLocation: Container
71 | artifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)_Attempt$(System.JobAttempt)' ) }}
72 | continueOnError: true
73 | condition: always()
74 |
75 | - ${{ if eq(parameters.enableBuildRetry, 'true') }}:
76 | - template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
77 | parameters:
78 | is1ESPipeline: false
79 | args:
80 | targetPath: '$(System.DefaultWorkingDirectory)\eng\common\BuildConfiguration'
81 | artifactName: 'BuildConfiguration'
82 | displayName: 'Publish build retry configuration'
83 | continueOnError: true
84 | sbomEnabled: false # we don't need SBOM for BuildConfiguration
85 |
--------------------------------------------------------------------------------
/eng/common/templates-official/job/job.yml:
--------------------------------------------------------------------------------
1 | parameters:
2 | # Sbom related params
3 | enableSbom: true
4 | runAsPublic: false
5 | PackageVersion: 9.0.0
6 | BuildDropPath: '$(System.DefaultWorkingDirectory)/artifacts'
7 |
8 | jobs:
9 | - template: /eng/common/core-templates/job/job.yml
10 | parameters:
11 | is1ESPipeline: true
12 |
13 | componentGovernanceSteps:
14 | - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.enableSbom, 'true')) }}:
15 | - template: /eng/common/templates/steps/generate-sbom.yml
16 | parameters:
17 | PackageVersion: ${{ parameters.packageVersion }}
18 | BuildDropPath: ${{ parameters.buildDropPath }}
19 | ManifestDirPath: $(Build.ArtifactStagingDirectory)/sbom
20 | publishArtifacts: false
21 |
22 | # publish artifacts
23 | # for 1ES managed templates, use the templateContext.output to handle multiple outputs.
24 | templateContext:
25 | outputParentDirectory: $(Build.ArtifactStagingDirectory)
26 | outputs:
27 | - ${{ if ne(parameters.artifacts.publish, '') }}:
28 | - ${{ if and(ne(parameters.artifacts.publish.artifacts, 'false'), ne(parameters.artifacts.publish.artifacts, '')) }}:
29 | - output: buildArtifacts
30 | displayName: Publish pipeline artifacts
31 | PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts'
32 | ArtifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }}
33 | condition: always()
34 | retryCountOnTaskFailure: 10 # for any logs being locked
35 | continueOnError: true
36 | - ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}:
37 | - output: pipelineArtifact
38 | targetPath: '$(Build.ArtifactStagingDirectory)/artifacts/log'
39 | artifactName: ${{ coalesce(parameters.artifacts.publish.logs.name, 'Logs_Build_$(Agent.Os)_$(_BuildConfig)_Attempt$(System.JobAttempt)') }}
40 | displayName: 'Publish logs'
41 | continueOnError: true
42 | condition: always()
43 | retryCountOnTaskFailure: 10 # for any logs being locked
44 | sbomEnabled: false # we don't need SBOM for logs
45 |
46 | - ${{ if eq(parameters.enablePublishBuildArtifacts, true) }}:
47 | - output: buildArtifacts
48 | displayName: Publish Logs
49 | PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts/log/$(_BuildConfig)'
50 | publishLocation: Container
51 | ArtifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)_Attempt$(System.JobAttempt)' ) }}
52 | continueOnError: true
53 | condition: always()
54 | sbomEnabled: false # we don't need SBOM for logs
55 |
56 | - ${{ if eq(parameters.enableBuildRetry, 'true') }}:
57 | - output: pipelineArtifact
58 | targetPath: '$(Build.ArtifactStagingDirectory)/artifacts/eng/common/BuildConfiguration'
59 | artifactName: 'BuildConfiguration'
60 | displayName: 'Publish build retry configuration'
61 | continueOnError: true
62 | sbomEnabled: false # we don't need SBOM for BuildConfiguration
63 |
64 | - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.enableSbom, 'true')) }}:
65 | - output: pipelineArtifact
66 | displayName: Publish SBOM manifest
67 | continueOnError: true
68 | targetPath: $(Build.ArtifactStagingDirectory)/sbom
69 | artifactName: $(ARTIFACT_NAME)
70 |
71 | # add any outputs provided via root yaml
72 | - ${{ if ne(parameters.templateContext.outputs, '') }}:
73 | - ${{ each output in parameters.templateContext.outputs }}:
74 | - ${{ output }}
75 |
76 | # add any remaining templateContext properties
77 | ${{ each context in parameters.templateContext }}:
78 | ${{ if and(ne(context.key, 'outputParentDirectory'), ne(context.key, 'outputs')) }}:
79 | ${{ context.key }}: ${{ context.value }}
80 |
81 | ${{ each parameter in parameters }}:
82 | ${{ if and(ne(parameter.key, 'templateContext'), ne(parameter.key, 'is1ESPipeline')) }}:
83 | ${{ parameter.key }}: ${{ parameter.value }}
84 |
--------------------------------------------------------------------------------
/eng/common/post-build/nuget-verification.ps1:
--------------------------------------------------------------------------------
1 | <#
2 | .SYNOPSIS
3 | Verifies that Microsoft NuGet packages have proper metadata.
4 | .DESCRIPTION
5 | Downloads a verification tool and runs metadata validation on the provided NuGet packages. This script writes an
6 | error if any of the provided packages fail validation. All arguments provided to this PowerShell script that do not
7 | match PowerShell parameters are passed on to the verification tool downloaded during the execution of this script.
8 | .PARAMETER NuGetExePath
9 | The path to the nuget.exe binary to use. If not provided, nuget.exe will be downloaded into the -DownloadPath
10 | directory.
11 | .PARAMETER PackageSource
12 | The package source to use to download the verification tool. If not provided, nuget.org will be used.
13 | .PARAMETER DownloadPath
14 | The directory path to download the verification tool and nuget.exe to. If not provided,
15 | %TEMP%\NuGet.VerifyNuGetPackage will be used.
16 | .PARAMETER args
17 | Arguments that will be passed to the verification tool.
18 | .EXAMPLE
19 | PS> .\verify.ps1 *.nupkg
20 | Verifies the metadata of all .nupkg files in the currect working directory.
21 | .EXAMPLE
22 | PS> .\verify.ps1 --help
23 | Displays the help text of the downloaded verifiction tool.
24 | .LINK
25 | https://github.com/NuGet/NuGetGallery/blob/master/src/VerifyMicrosoftPackage/README.md
26 | #>
27 |
28 | # This script was copied from https://github.com/NuGet/NuGetGallery/blob/3e25ad135146676bcab0050a516939d9958bfa5d/src/VerifyMicrosoftPackage/verify.ps1
29 |
30 | [CmdletBinding(PositionalBinding = $false)]
31 | param(
32 | [string]$NuGetExePath,
33 | [string]$PackageSource = "https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/index.json",
34 | [string]$DownloadPath,
35 | [Parameter(ValueFromRemainingArguments = $true)]
36 | [string[]]$args
37 | )
38 |
39 | # The URL to download nuget.exe.
40 | $nugetExeUrl = "https://dist.nuget.org/win-x86-commandline/v4.9.4/nuget.exe"
41 |
42 | # The package ID of the verification tool.
43 | $packageId = "NuGet.VerifyMicrosoftPackage"
44 |
45 | # The location that nuget.exe and the verification tool will be downloaded to.
46 | if (!$DownloadPath) {
47 | $DownloadPath = (Join-Path $env:TEMP "NuGet.VerifyMicrosoftPackage")
48 | }
49 |
50 | $fence = New-Object -TypeName string -ArgumentList '=', 80
51 |
52 | # Create the download directory, if it doesn't already exist.
53 | if (!(Test-Path $DownloadPath)) {
54 | New-Item -ItemType Directory $DownloadPath | Out-Null
55 | }
56 | Write-Host "Using download path: $DownloadPath"
57 |
58 | if ($NuGetExePath) {
59 | $nuget = $NuGetExePath
60 | } else {
61 | $downloadedNuGetExe = Join-Path $DownloadPath "nuget.exe"
62 |
63 | # Download nuget.exe, if it doesn't already exist.
64 | if (!(Test-Path $downloadedNuGetExe)) {
65 | Write-Host "Downloading nuget.exe from $nugetExeUrl..."
66 | $ProgressPreference = 'SilentlyContinue'
67 | try {
68 | Invoke-WebRequest $nugetExeUrl -UseBasicParsing -OutFile $downloadedNuGetExe
69 | $ProgressPreference = 'Continue'
70 | } catch {
71 | $ProgressPreference = 'Continue'
72 | Write-Error $_
73 | Write-Error "nuget.exe failed to download."
74 | exit
75 | }
76 | }
77 |
78 | $nuget = $downloadedNuGetExe
79 | }
80 |
81 | Write-Host "Using nuget.exe path: $nuget"
82 | Write-Host " "
83 |
84 | # Download the latest version of the verification tool.
85 | Write-Host "Downloading the latest version of $packageId from $packageSource..."
86 | Write-Host $fence
87 | & $nuget install $packageId `
88 | -Prerelease `
89 | -OutputDirectory $DownloadPath `
90 | -Source $PackageSource
91 | Write-Host $fence
92 | Write-Host " "
93 |
94 | if ($LASTEXITCODE -ne 0) {
95 | Write-Error "nuget.exe failed to fetch the verify tool."
96 | exit
97 | }
98 |
99 | # Find the most recently downloaded tool
100 | Write-Host "Finding the most recently downloaded verification tool."
101 | $verifyProbePath = Join-Path $DownloadPath "$packageId.*"
102 | $verifyPath = Get-ChildItem -Path $verifyProbePath -Directory `
103 | | Sort-Object -Property LastWriteTime -Descending `
104 | | Select-Object -First 1
105 | $verify = Join-Path $verifyPath "tools\NuGet.VerifyMicrosoftPackage.exe"
106 | Write-Host "Using verification tool: $verify"
107 | Write-Host " "
108 |
109 | # Execute the verification tool.
110 | Write-Host "Executing the verify tool..."
111 | Write-Host $fence
112 | & $verify $args
113 | Write-Host $fence
114 | Write-Host " "
115 |
116 | # Respond to the exit code.
117 | if ($LASTEXITCODE -ne 0) {
118 | Write-Error "The verify tool found some problems."
119 | } else {
120 | Write-Output "The verify tool succeeded."
121 | }
122 |
--------------------------------------------------------------------------------
/eng/common/native/install-tool.ps1:
--------------------------------------------------------------------------------
1 | <#
2 | .SYNOPSIS
3 | Install native tool
4 |
5 | .DESCRIPTION
6 | Install cmake native tool from Azure blob storage
7 |
8 | .PARAMETER InstallPath
9 | Base directory to install native tool to
10 |
11 | .PARAMETER BaseUri
12 | Base file directory or Url from which to acquire tool archives
13 |
14 | .PARAMETER CommonLibraryDirectory
15 | Path to folder containing common library modules
16 |
17 | .PARAMETER Force
18 | Force install of tools even if they previously exist
19 |
20 | .PARAMETER Clean
21 | Don't install the tool, just clean up the current install of the tool
22 |
23 | .PARAMETER DownloadRetries
24 | Total number of retry attempts
25 |
26 | .PARAMETER RetryWaitTimeInSeconds
27 | Wait time between retry attempts in seconds
28 |
29 | .NOTES
30 | Returns 0 if install succeeds, 1 otherwise
31 | #>
32 | [CmdletBinding(PositionalBinding=$false)]
33 | Param (
34 | [Parameter(Mandatory=$True)]
35 | [string] $ToolName,
36 | [Parameter(Mandatory=$True)]
37 | [string] $InstallPath,
38 | [Parameter(Mandatory=$True)]
39 | [string] $BaseUri,
40 | [Parameter(Mandatory=$True)]
41 | [string] $Version,
42 | [string] $CommonLibraryDirectory = $PSScriptRoot,
43 | [switch] $Force = $False,
44 | [switch] $Clean = $False,
45 | [int] $DownloadRetries = 5,
46 | [int] $RetryWaitTimeInSeconds = 30
47 | )
48 |
49 | . $PSScriptRoot\..\pipeline-logging-functions.ps1
50 |
51 | # Import common library modules
52 | Import-Module -Name (Join-Path $CommonLibraryDirectory "CommonLibrary.psm1")
53 |
54 | try {
55 | # Define verbose switch if undefined
56 | $Verbose = $VerbosePreference -Eq "Continue"
57 |
58 | $Arch = CommonLibrary\Get-MachineArchitecture
59 | $ToolOs = "win64"
60 | if($Arch -Eq "x32") {
61 | $ToolOs = "win32"
62 | }
63 | $ToolNameMoniker = "$ToolName-$Version-$ToolOs-$Arch"
64 | $ToolInstallDirectory = Join-Path $InstallPath "$ToolName\$Version\"
65 | $Uri = "$BaseUri/windows/$ToolName/$ToolNameMoniker.zip"
66 | $ShimPath = Join-Path $InstallPath "$ToolName.exe"
67 |
68 | if ($Clean) {
69 | Write-Host "Cleaning $ToolInstallDirectory"
70 | if (Test-Path $ToolInstallDirectory) {
71 | Remove-Item $ToolInstallDirectory -Force -Recurse
72 | }
73 | Write-Host "Cleaning $ShimPath"
74 | if (Test-Path $ShimPath) {
75 | Remove-Item $ShimPath -Force
76 | }
77 | $ToolTempPath = CommonLibrary\Get-TempPathFilename -Path $Uri
78 | Write-Host "Cleaning $ToolTempPath"
79 | if (Test-Path $ToolTempPath) {
80 | Remove-Item $ToolTempPath -Force
81 | }
82 | exit 0
83 | }
84 |
85 | # Install tool
86 | if ((Test-Path $ToolInstallDirectory) -And (-Not $Force)) {
87 | Write-Verbose "$ToolName ($Version) already exists, skipping install"
88 | }
89 | else {
90 | $InstallStatus = CommonLibrary\DownloadAndExtract -Uri $Uri `
91 | -InstallDirectory $ToolInstallDirectory `
92 | -Force:$Force `
93 | -DownloadRetries $DownloadRetries `
94 | -RetryWaitTimeInSeconds $RetryWaitTimeInSeconds `
95 | -Verbose:$Verbose
96 |
97 | if ($InstallStatus -Eq $False) {
98 | Write-PipelineTelemetryError "Installation failed" -Category "NativeToolsetBootstrapping"
99 | exit 1
100 | }
101 | }
102 |
103 | $ToolFilePath = Get-ChildItem $ToolInstallDirectory -Recurse -Filter "$ToolName.exe" | % { $_.FullName }
104 | if (@($ToolFilePath).Length -Gt 1) {
105 | Write-Error "There are multiple copies of $ToolName in $($ToolInstallDirectory): `n$(@($ToolFilePath | out-string))"
106 | exit 1
107 | } elseif (@($ToolFilePath).Length -Lt 1) {
108 | Write-Host "$ToolName was not found in $ToolInstallDirectory."
109 | exit 1
110 | }
111 |
112 | # Generate shim
113 | # Always rewrite shims so that we are referencing the expected version
114 | $GenerateShimStatus = CommonLibrary\New-ScriptShim -ShimName $ToolName `
115 | -ShimDirectory $InstallPath `
116 | -ToolFilePath "$ToolFilePath" `
117 | -BaseUri $BaseUri `
118 | -Force:$Force `
119 | -Verbose:$Verbose
120 |
121 | if ($GenerateShimStatus -Eq $False) {
122 | Write-PipelineTelemetryError "Generate shim failed" -Category "NativeToolsetBootstrapping"
123 | return 1
124 | }
125 |
126 | exit 0
127 | }
128 | catch {
129 | Write-Host $_.ScriptStackTrace
130 | Write-PipelineTelemetryError -Category "NativeToolsetBootstrapping" -Message $_
131 | exit 1
132 | }
133 |
--------------------------------------------------------------------------------
/eng/common/pipeline-logging-functions.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | function Write-PipelineTelemetryError {
4 | local telemetry_category=''
5 | local force=false
6 | local function_args=()
7 | local message=''
8 | while [[ $# -gt 0 ]]; do
9 | opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")"
10 | case "$opt" in
11 | -category|-c)
12 | telemetry_category=$2
13 | shift
14 | ;;
15 | -force|-f)
16 | force=true
17 | ;;
18 | -*)
19 | function_args+=("$1 $2")
20 | shift
21 | ;;
22 | *)
23 | message=$*
24 | ;;
25 | esac
26 | shift
27 | done
28 |
29 | if [[ $force != true ]] && [[ "$ci" != true ]]; then
30 | echo "$message" >&2
31 | return
32 | fi
33 |
34 | if [[ $force == true ]]; then
35 | function_args+=("-force")
36 | fi
37 | message="(NETCORE_ENGINEERING_TELEMETRY=$telemetry_category) $message"
38 | function_args+=("$message")
39 | Write-PipelineTaskError ${function_args[@]}
40 | }
41 |
42 | function Write-PipelineTaskError {
43 | local message_type="error"
44 | local sourcepath=''
45 | local linenumber=''
46 | local columnnumber=''
47 | local error_code=''
48 | local force=false
49 |
50 | while [[ $# -gt 0 ]]; do
51 | opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")"
52 | case "$opt" in
53 | -type|-t)
54 | message_type=$2
55 | shift
56 | ;;
57 | -sourcepath|-s)
58 | sourcepath=$2
59 | shift
60 | ;;
61 | -linenumber|-ln)
62 | linenumber=$2
63 | shift
64 | ;;
65 | -columnnumber|-cn)
66 | columnnumber=$2
67 | shift
68 | ;;
69 | -errcode|-e)
70 | error_code=$2
71 | shift
72 | ;;
73 | -force|-f)
74 | force=true
75 | ;;
76 | *)
77 | break
78 | ;;
79 | esac
80 |
81 | shift
82 | done
83 |
84 | if [[ $force != true ]] && [[ "$ci" != true ]]; then
85 | echo "$@" >&2
86 | return
87 | fi
88 |
89 | local message="##vso[task.logissue"
90 |
91 | message="$message type=$message_type"
92 |
93 | if [ -n "$sourcepath" ]; then
94 | message="$message;sourcepath=$sourcepath"
95 | fi
96 |
97 | if [ -n "$linenumber" ]; then
98 | message="$message;linenumber=$linenumber"
99 | fi
100 |
101 | if [ -n "$columnnumber" ]; then
102 | message="$message;columnnumber=$columnnumber"
103 | fi
104 |
105 | if [ -n "$error_code" ]; then
106 | message="$message;code=$error_code"
107 | fi
108 |
109 | message="$message]$*"
110 | echo "$message"
111 | }
112 |
113 | function Write-PipelineSetVariable {
114 | if [[ "$ci" != true ]]; then
115 | return
116 | fi
117 |
118 | local name=''
119 | local value=''
120 | local secret=false
121 | local as_output=false
122 | local is_multi_job_variable=true
123 |
124 | while [[ $# -gt 0 ]]; do
125 | opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")"
126 | case "$opt" in
127 | -name|-n)
128 | name=$2
129 | shift
130 | ;;
131 | -value|-v)
132 | value=$2
133 | shift
134 | ;;
135 | -secret|-s)
136 | secret=true
137 | ;;
138 | -as_output|-a)
139 | as_output=true
140 | ;;
141 | -is_multi_job_variable|-i)
142 | is_multi_job_variable=$2
143 | shift
144 | ;;
145 | esac
146 | shift
147 | done
148 |
149 | value=${value/;/%3B}
150 | value=${value/\\r/%0D}
151 | value=${value/\\n/%0A}
152 | value=${value/]/%5D}
153 |
154 | local message="##vso[task.setvariable variable=$name;isSecret=$secret;isOutput=$is_multi_job_variable]$value"
155 |
156 | if [[ "$as_output" == true ]]; then
157 | $message
158 | else
159 | echo "$message"
160 | fi
161 | }
162 |
163 | function Write-PipelinePrependPath {
164 | local prepend_path=''
165 |
166 | while [[ $# -gt 0 ]]; do
167 | opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")"
168 | case "$opt" in
169 | -path|-p)
170 | prepend_path=$2
171 | shift
172 | ;;
173 | esac
174 | shift
175 | done
176 |
177 | export PATH="$prepend_path:$PATH"
178 |
179 | if [[ "$ci" == true ]]; then
180 | echo "##vso[task.prependpath]$prepend_path"
181 | fi
182 | }
183 |
184 | function Write-PipelineSetResult {
185 | local result=''
186 | local message=''
187 |
188 | while [[ $# -gt 0 ]]; do
189 | opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")"
190 | case "$opt" in
191 | -result|-r)
192 | result=$2
193 | shift
194 | ;;
195 | -message|-m)
196 | message=$2
197 | shift
198 | ;;
199 | esac
200 | shift
201 | done
202 |
203 | if [[ "$ci" == true ]]; then
204 | echo "##vso[task.complete result=$result;]$message"
205 | fi
206 | }
207 |
--------------------------------------------------------------------------------
/eng/common/native/common-library.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | function GetNativeInstallDirectory {
4 | local install_dir
5 |
6 | if [[ -z $NETCOREENG_INSTALL_DIRECTORY ]]; then
7 | install_dir=$HOME/.netcoreeng/native/
8 | else
9 | install_dir=$NETCOREENG_INSTALL_DIRECTORY
10 | fi
11 |
12 | echo $install_dir
13 | return 0
14 | }
15 |
16 | function GetTempDirectory {
17 |
18 | echo $(GetNativeInstallDirectory)temp/
19 | return 0
20 | }
21 |
22 | function ExpandZip {
23 | local zip_path=$1
24 | local output_directory=$2
25 | local force=${3:-false}
26 |
27 | echo "Extracting $zip_path to $output_directory"
28 | if [[ -d $output_directory ]] && [[ $force = false ]]; then
29 | echo "Directory '$output_directory' already exists, skipping extract"
30 | return 0
31 | fi
32 |
33 | if [[ -d $output_directory ]]; then
34 | echo "'Force flag enabled, but '$output_directory' exists. Removing directory"
35 | rm -rf $output_directory
36 | if [[ $? != 0 ]]; then
37 | Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Unable to remove '$output_directory'"
38 | return 1
39 | fi
40 | fi
41 |
42 | echo "Creating directory: '$output_directory'"
43 | mkdir -p $output_directory
44 |
45 | echo "Extracting archive"
46 | tar -xf $zip_path -C $output_directory
47 | if [[ $? != 0 ]]; then
48 | Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Unable to extract '$zip_path'"
49 | return 1
50 | fi
51 |
52 | return 0
53 | }
54 |
55 | function GetCurrentOS {
56 | local unameOut="$(uname -s)"
57 | case $unameOut in
58 | Linux*) echo "Linux";;
59 | Darwin*) echo "MacOS";;
60 | esac
61 | return 0
62 | }
63 |
64 | function GetFile {
65 | local uri=$1
66 | local path=$2
67 | local force=${3:-false}
68 | local download_retries=${4:-5}
69 | local retry_wait_time_seconds=${5:-30}
70 |
71 | if [[ -f $path ]]; then
72 | if [[ $force = false ]]; then
73 | echo "File '$path' already exists. Skipping download"
74 | return 0
75 | else
76 | rm -rf $path
77 | fi
78 | fi
79 |
80 | if [[ -f $uri ]]; then
81 | echo "'$uri' is a file path, copying file to '$path'"
82 | cp $uri $path
83 | return $?
84 | fi
85 |
86 | echo "Downloading $uri"
87 | # Use curl if available, otherwise use wget
88 | if command -v curl > /dev/null; then
89 | curl "$uri" -sSL --retry $download_retries --retry-delay $retry_wait_time_seconds --create-dirs -o "$path" --fail
90 | else
91 | wget -q -O "$path" "$uri" --tries="$download_retries"
92 | fi
93 |
94 | return $?
95 | }
96 |
97 | function GetTempPathFileName {
98 | local path=$1
99 |
100 | local temp_dir=$(GetTempDirectory)
101 | local temp_file_name=$(basename $path)
102 | echo $temp_dir$temp_file_name
103 | return 0
104 | }
105 |
106 | function DownloadAndExtract {
107 | local uri=$1
108 | local installDir=$2
109 | local force=${3:-false}
110 | local download_retries=${4:-5}
111 | local retry_wait_time_seconds=${5:-30}
112 |
113 | local temp_tool_path=$(GetTempPathFileName $uri)
114 |
115 | echo "downloading to: $temp_tool_path"
116 |
117 | # Download file
118 | GetFile "$uri" "$temp_tool_path" $force $download_retries $retry_wait_time_seconds
119 | if [[ $? != 0 ]]; then
120 | Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Failed to download '$uri' to '$temp_tool_path'."
121 | return 1
122 | fi
123 |
124 | # Extract File
125 | echo "extracting from $temp_tool_path to $installDir"
126 | ExpandZip "$temp_tool_path" "$installDir" $force $download_retries $retry_wait_time_seconds
127 | if [[ $? != 0 ]]; then
128 | Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Failed to extract '$temp_tool_path' to '$installDir'."
129 | return 1
130 | fi
131 |
132 | return 0
133 | }
134 |
135 | function NewScriptShim {
136 | local shimpath=$1
137 | local tool_file_path=$2
138 | local force=${3:-false}
139 |
140 | echo "Generating '$shimpath' shim"
141 | if [[ -f $shimpath ]]; then
142 | if [[ $force = false ]]; then
143 | echo "File '$shimpath' already exists." >&2
144 | return 1
145 | else
146 | rm -rf $shimpath
147 | fi
148 | fi
149 |
150 | if [[ ! -f $tool_file_path ]]; then
151 | # try to see if the path is lower cased
152 | tool_file_path="$(echo $tool_file_path | tr "[:upper:]" "[:lower:]")"
153 | if [[ ! -f $tool_file_path ]]; then
154 | Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Specified tool file path:'$tool_file_path' does not exist"
155 | return 1
156 | fi
157 | fi
158 |
159 | local shim_contents=$'#!/usr/bin/env bash\n'
160 | shim_contents+="SHIMARGS="$'$1\n'
161 | shim_contents+="$tool_file_path"$' $SHIMARGS\n'
162 |
163 | # Write shim file
164 | echo "$shim_contents" > $shimpath
165 |
166 | chmod +x $shimpath
167 |
168 | echo "Finished generating shim '$shimpath'"
169 |
170 | return $?
171 | }
172 |
173 |
--------------------------------------------------------------------------------
/eng/common/internal-feed-operations.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | set -e
4 |
5 | # Sets VSS_NUGET_EXTERNAL_FEED_ENDPOINTS based on the "darc-int-*" feeds defined in NuGet.config. This is needed
6 | # in build agents by CredProvider to authenticate the restore requests to internal feeds as specified in
7 | # https://github.com/microsoft/artifacts-credprovider/blob/0f53327cd12fd893d8627d7b08a2171bf5852a41/README.md#environment-variables.
8 | # This should ONLY be called from identified internal builds
9 | function SetupCredProvider {
10 | local authToken=$1
11 |
12 | # Install the Cred Provider NuGet plugin
13 | echo "Setting up Cred Provider NuGet plugin in the agent..."...
14 | echo "Getting 'installcredprovider.ps1' from 'https://github.com/microsoft/artifacts-credprovider'..."
15 |
16 | local url="https://raw.githubusercontent.com/microsoft/artifacts-credprovider/master/helpers/installcredprovider.sh"
17 |
18 | echo "Writing the contents of 'installcredprovider.ps1' locally..."
19 | local installcredproviderPath="installcredprovider.sh"
20 | if command -v curl > /dev/null; then
21 | curl $url > "$installcredproviderPath"
22 | else
23 | wget -q -O "$installcredproviderPath" "$url"
24 | fi
25 |
26 | echo "Installing plugin..."
27 | . "$installcredproviderPath"
28 |
29 | echo "Deleting local copy of 'installcredprovider.sh'..."
30 | rm installcredprovider.sh
31 |
32 | if [ ! -d "$HOME/.nuget/plugins" ]; then
33 | Write-PipelineTelemetryError -category 'Build' 'CredProvider plugin was not installed correctly!'
34 | ExitWithExitCode 1
35 | else
36 | echo "CredProvider plugin was installed correctly!"
37 | fi
38 |
39 | # Then, we set the 'VSS_NUGET_EXTERNAL_FEED_ENDPOINTS' environment variable to restore from the stable
40 | # feeds successfully
41 |
42 | local nugetConfigPath="{$repo_root}NuGet.config"
43 |
44 | if [ ! "$nugetConfigPath" ]; then
45 | Write-PipelineTelemetryError -category 'Build' "NuGet.config file not found in repo's root!"
46 | ExitWithExitCode 1
47 | fi
48 |
49 | local endpoints='['
50 | local nugetConfigPackageValues=`cat "$nugetConfigPath" | grep "key=\"darc-int-"`
51 | local pattern="value=\"(.*)\""
52 |
53 | for value in $nugetConfigPackageValues
54 | do
55 | if [[ $value =~ $pattern ]]; then
56 | local endpoint="${BASH_REMATCH[1]}"
57 | endpoints+="{\"endpoint\": \"$endpoint\", \"password\": \"$authToken\"},"
58 | fi
59 | done
60 |
61 | endpoints=${endpoints%?}
62 | endpoints+=']'
63 |
64 | if [ ${#endpoints} -gt 2 ]; then
65 | local endpointCredentials="{\"endpointCredentials\": "$endpoints"}"
66 |
67 | echo "##vso[task.setvariable variable=VSS_NUGET_EXTERNAL_FEED_ENDPOINTS]$endpointCredentials"
68 | echo "##vso[task.setvariable variable=NUGET_CREDENTIALPROVIDER_SESSIONTOKENCACHE_ENABLED]False"
69 | else
70 | echo "No internal endpoints found in NuGet.config"
71 | fi
72 | }
73 |
74 | # Workaround for https://github.com/microsoft/msbuild/issues/4430
75 | function InstallDotNetSdkAndRestoreArcade {
76 | local dotnetTempDir="$repo_root/dotnet"
77 | local dotnetSdkVersion="2.1.507" # After experimentation we know this version works when restoring the SDK (compared to 3.0.*)
78 | local restoreProjPath="$repo_root/eng/common/restore.proj"
79 |
80 | echo "Installing dotnet SDK version $dotnetSdkVersion to restore Arcade SDK..."
81 | echo "" > "$restoreProjPath"
82 |
83 | InstallDotNetSdk "$dotnetTempDir" "$dotnetSdkVersion"
84 |
85 | local res=`$dotnetTempDir/dotnet restore $restoreProjPath`
86 | echo "Arcade SDK restored!"
87 |
88 | # Cleanup
89 | if [ "$restoreProjPath" ]; then
90 | rm "$restoreProjPath"
91 | fi
92 |
93 | if [ "$dotnetTempDir" ]; then
94 | rm -r $dotnetTempDir
95 | fi
96 | }
97 |
98 | source="${BASH_SOURCE[0]}"
99 | operation=''
100 | authToken=''
101 | repoName=''
102 |
103 | while [[ $# > 0 ]]; do
104 | opt="$(echo "$1" | tr "[:upper:]" "[:lower:]")"
105 | case "$opt" in
106 | --operation)
107 | operation=$2
108 | shift
109 | ;;
110 | --authtoken)
111 | authToken=$2
112 | shift
113 | ;;
114 | *)
115 | echo "Invalid argument: $1"
116 | usage
117 | exit 1
118 | ;;
119 | esac
120 |
121 | shift
122 | done
123 |
124 | while [[ -h "$source" ]]; do
125 | scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
126 | source="$(readlink "$source")"
127 | # if $source was a relative symlink, we need to resolve it relative to the path where the
128 | # symlink file was located
129 | [[ $source != /* ]] && source="$scriptroot/$source"
130 | done
131 | scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
132 |
133 | . "$scriptroot/tools.sh"
134 |
135 | if [ "$operation" = "setup" ]; then
136 | SetupCredProvider $authToken
137 | elif [ "$operation" = "install-restore" ]; then
138 | InstallDotNetSdkAndRestoreArcade
139 | else
140 | echo "Unknown operation '$operation'!"
141 | fi
142 |
--------------------------------------------------------------------------------
/eng/common/core-templates/job/onelocbuild.yml:
--------------------------------------------------------------------------------
1 | parameters:
2 | # Optional: dependencies of the job
3 | dependsOn: ''
4 |
5 | # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool
6 | pool: ''
7 |
8 | CeapexPat: $(dn-bot-ceapex-package-r) # PAT for the loc AzDO instance https://dev.azure.com/ceapex
9 | GithubPat: $(BotAccount-dotnet-bot-repo-PAT)
10 |
11 | SourcesDirectory: $(System.DefaultWorkingDirectory)
12 | CreatePr: true
13 | AutoCompletePr: false
14 | ReusePr: true
15 | UseLfLineEndings: true
16 | UseCheckedInLocProjectJson: false
17 | SkipLocProjectJsonGeneration: false
18 | LanguageSet: VS_Main_Languages
19 | LclSource: lclFilesInRepo
20 | LclPackageId: ''
21 | RepoType: gitHub
22 | GitHubOrg: dotnet
23 | MirrorRepo: ''
24 | MirrorBranch: main
25 | condition: ''
26 | JobNameSuffix: ''
27 | is1ESPipeline: ''
28 | jobs:
29 | - job: OneLocBuild${{ parameters.JobNameSuffix }}
30 |
31 | dependsOn: ${{ parameters.dependsOn }}
32 |
33 | displayName: OneLocBuild${{ parameters.JobNameSuffix }}
34 |
35 | variables:
36 | - group: OneLocBuildVariables # Contains the CeapexPat and GithubPat
37 | - name: _GenerateLocProjectArguments
38 | value: -SourcesDirectory ${{ parameters.SourcesDirectory }}
39 | -LanguageSet "${{ parameters.LanguageSet }}"
40 | -CreateNeutralXlfs
41 | - ${{ if eq(parameters.UseCheckedInLocProjectJson, 'true') }}:
42 | - name: _GenerateLocProjectArguments
43 | value: ${{ variables._GenerateLocProjectArguments }} -UseCheckedInLocProjectJson
44 | - template: /eng/common/core-templates/variables/pool-providers.yml
45 | parameters:
46 | is1ESPipeline: ${{ parameters.is1ESPipeline }}
47 |
48 | ${{ if ne(parameters.pool, '') }}:
49 | pool: ${{ parameters.pool }}
50 | ${{ if eq(parameters.pool, '') }}:
51 | pool:
52 | # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
53 | ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
54 | name: AzurePipelines-EO
55 | image: 1ESPT-Windows2022
56 | demands: Cmd
57 | os: windows
58 | # If it's not devdiv, it's dnceng
59 | ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}:
60 | name: $(DncEngInternalBuildPool)
61 | image: 1es-windows-2022
62 | os: windows
63 |
64 | steps:
65 | - ${{ if eq(parameters.is1ESPipeline, '') }}:
66 | - 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error
67 |
68 | - ${{ if ne(parameters.SkipLocProjectJsonGeneration, 'true') }}:
69 | - task: Powershell@2
70 | inputs:
71 | filePath: $(System.DefaultWorkingDirectory)/eng/common/generate-locproject.ps1
72 | arguments: $(_GenerateLocProjectArguments)
73 | displayName: Generate LocProject.json
74 | condition: ${{ parameters.condition }}
75 |
76 | - task: OneLocBuild@2
77 | displayName: OneLocBuild
78 | env:
79 | SYSTEM_ACCESSTOKEN: $(System.AccessToken)
80 | inputs:
81 | locProj: eng/Localize/LocProject.json
82 | outDir: $(Build.ArtifactStagingDirectory)
83 | lclSource: ${{ parameters.LclSource }}
84 | lclPackageId: ${{ parameters.LclPackageId }}
85 | isCreatePrSelected: ${{ parameters.CreatePr }}
86 | isAutoCompletePrSelected: ${{ parameters.AutoCompletePr }}
87 | ${{ if eq(parameters.CreatePr, true) }}:
88 | isUseLfLineEndingsSelected: ${{ parameters.UseLfLineEndings }}
89 | isShouldReusePrSelected: ${{ parameters.ReusePr }}
90 | packageSourceAuth: patAuth
91 | patVariable: ${{ parameters.CeapexPat }}
92 | ${{ if eq(parameters.RepoType, 'gitHub') }}:
93 | repoType: ${{ parameters.RepoType }}
94 | gitHubPatVariable: "${{ parameters.GithubPat }}"
95 | ${{ if ne(parameters.MirrorRepo, '') }}:
96 | isMirrorRepoSelected: true
97 | gitHubOrganization: ${{ parameters.GitHubOrg }}
98 | mirrorRepo: ${{ parameters.MirrorRepo }}
99 | mirrorBranch: ${{ parameters.MirrorBranch }}
100 | condition: ${{ parameters.condition }}
101 |
102 | # Copy the locProject.json to the root of the Loc directory, then publish a pipeline artifact
103 | - task: CopyFiles@2
104 | displayName: Copy LocProject.json
105 | inputs:
106 | SourceFolder: '$(System.DefaultWorkingDirectory)/eng/Localize/'
107 | Contents: 'LocProject.json'
108 | TargetFolder: '$(Build.ArtifactStagingDirectory)/loc'
109 | condition: ${{ parameters.condition }}
110 |
111 | - template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
112 | parameters:
113 | is1ESPipeline: ${{ parameters.is1ESPipeline }}
114 | args:
115 | targetPath: '$(Build.ArtifactStagingDirectory)/loc'
116 | artifactName: 'Loc'
117 | displayName: 'Publish Localization Files'
118 | condition: ${{ parameters.condition }}
119 |
--------------------------------------------------------------------------------
/eng/common/internal-feed-operations.ps1:
--------------------------------------------------------------------------------
1 | param(
2 | [Parameter(Mandatory=$true)][string] $Operation,
3 | [string] $AuthToken,
4 | [string] $CommitSha,
5 | [string] $RepoName,
6 | [switch] $IsFeedPrivate
7 | )
8 |
9 | $ErrorActionPreference = 'Stop'
10 | Set-StrictMode -Version 2.0
11 | . $PSScriptRoot\tools.ps1
12 |
13 | # Sets VSS_NUGET_EXTERNAL_FEED_ENDPOINTS based on the "darc-int-*" feeds defined in NuGet.config. This is needed
14 | # in build agents by CredProvider to authenticate the restore requests to internal feeds as specified in
15 | # https://github.com/microsoft/artifacts-credprovider/blob/0f53327cd12fd893d8627d7b08a2171bf5852a41/README.md#environment-variables. This should ONLY be called from identified
16 | # internal builds
17 | function SetupCredProvider {
18 | param(
19 | [string] $AuthToken
20 | )
21 |
22 | # Install the Cred Provider NuGet plugin
23 | Write-Host 'Setting up Cred Provider NuGet plugin in the agent...'
24 | Write-Host "Getting 'installcredprovider.ps1' from 'https://github.com/microsoft/artifacts-credprovider'..."
25 |
26 | $url = 'https://raw.githubusercontent.com/microsoft/artifacts-credprovider/master/helpers/installcredprovider.ps1'
27 |
28 | Write-Host "Writing the contents of 'installcredprovider.ps1' locally..."
29 | Invoke-WebRequest $url -UseBasicParsing -OutFile installcredprovider.ps1
30 |
31 | Write-Host 'Installing plugin...'
32 | .\installcredprovider.ps1 -Force
33 |
34 | Write-Host "Deleting local copy of 'installcredprovider.ps1'..."
35 | Remove-Item .\installcredprovider.ps1
36 |
37 | if (-Not("$env:USERPROFILE\.nuget\plugins\netcore")) {
38 | Write-PipelineTelemetryError -Category 'Arcade' -Message 'CredProvider plugin was not installed correctly!'
39 | ExitWithExitCode 1
40 | }
41 | else {
42 | Write-Host 'CredProvider plugin was installed correctly!'
43 | }
44 |
45 | # Then, we set the 'VSS_NUGET_EXTERNAL_FEED_ENDPOINTS' environment variable to restore from the stable
46 | # feeds successfully
47 |
48 | $nugetConfigPath = Join-Path $RepoRoot "NuGet.config"
49 |
50 | if (-Not (Test-Path -Path $nugetConfigPath)) {
51 | Write-PipelineTelemetryError -Category 'Build' -Message 'NuGet.config file not found in repo root!'
52 | ExitWithExitCode 1
53 | }
54 |
55 | $endpoints = New-Object System.Collections.ArrayList
56 | $nugetConfigPackageSources = Select-Xml -Path $nugetConfigPath -XPath "//packageSources/add[contains(@key, 'darc-int-')]/@value" | foreach{$_.Node.Value}
57 |
58 | if (($nugetConfigPackageSources | Measure-Object).Count -gt 0 ) {
59 | foreach ($stableRestoreResource in $nugetConfigPackageSources) {
60 | $trimmedResource = ([string]$stableRestoreResource).Trim()
61 | [void]$endpoints.Add(@{endpoint="$trimmedResource"; password="$AuthToken"})
62 | }
63 | }
64 |
65 | if (($endpoints | Measure-Object).Count -gt 0) {
66 | $endpointCredentials = @{endpointCredentials=$endpoints} | ConvertTo-Json -Compress
67 |
68 | # Create the environment variables the AzDo way
69 | Write-LoggingCommand -Area 'task' -Event 'setvariable' -Data $endpointCredentials -Properties @{
70 | 'variable' = 'VSS_NUGET_EXTERNAL_FEED_ENDPOINTS'
71 | 'issecret' = 'false'
72 | }
73 |
74 | # We don't want sessions cached since we will be updating the endpoints quite frequently
75 | Write-LoggingCommand -Area 'task' -Event 'setvariable' -Data 'False' -Properties @{
76 | 'variable' = 'NUGET_CREDENTIALPROVIDER_SESSIONTOKENCACHE_ENABLED'
77 | 'issecret' = 'false'
78 | }
79 | }
80 | else
81 | {
82 | Write-Host 'No internal endpoints found in NuGet.config'
83 | }
84 | }
85 |
86 | #Workaround for https://github.com/microsoft/msbuild/issues/4430
87 | function InstallDotNetSdkAndRestoreArcade {
88 | $dotnetTempDir = Join-Path $RepoRoot "dotnet"
89 | $dotnetSdkVersion="2.1.507" # After experimentation we know this version works when restoring the SDK (compared to 3.0.*)
90 | $dotnet = "$dotnetTempDir\dotnet.exe"
91 | $restoreProjPath = "$PSScriptRoot\restore.proj"
92 |
93 | Write-Host "Installing dotnet SDK version $dotnetSdkVersion to restore Arcade SDK..."
94 | InstallDotNetSdk "$dotnetTempDir" "$dotnetSdkVersion"
95 |
96 | '' | Out-File "$restoreProjPath"
97 |
98 | & $dotnet restore $restoreProjPath
99 |
100 | Write-Host 'Arcade SDK restored!'
101 |
102 | if (Test-Path -Path $restoreProjPath) {
103 | Remove-Item $restoreProjPath
104 | }
105 |
106 | if (Test-Path -Path $dotnetTempDir) {
107 | Remove-Item $dotnetTempDir -Recurse
108 | }
109 | }
110 |
111 | try {
112 | Push-Location $PSScriptRoot
113 |
114 | if ($Operation -like 'setup') {
115 | SetupCredProvider $AuthToken
116 | }
117 | elseif ($Operation -like 'install-restore') {
118 | InstallDotNetSdkAndRestoreArcade
119 | }
120 | else {
121 | Write-PipelineTelemetryError -Category 'Arcade' -Message "Unknown operation '$Operation'!"
122 | ExitWithExitCode 1
123 | }
124 | }
125 | catch {
126 | Write-Host $_.ScriptStackTrace
127 | Write-PipelineTelemetryError -Category 'Arcade' -Message $_
128 | ExitWithExitCode 1
129 | }
130 | finally {
131 | Pop-Location
132 | }
133 |
--------------------------------------------------------------------------------
/eng/common/native/init-compiler.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | #
3 | # This file detects the C/C++ compiler and exports it to the CC/CXX environment variables
4 | #
5 | # NOTE: some scripts source this file and rely on stdout being empty, make sure
6 | # to not output *anything* here, unless it is an error message that fails the
7 | # build.
8 |
9 | if [ -z "$build_arch" ] || [ -z "$compiler" ]; then
10 | echo "Usage..."
11 | echo "build_arch= compiler= init-compiler.sh"
12 | echo "Specify the target architecture."
13 | echo "Specify the name of compiler (clang or gcc)."
14 | exit 1
15 | fi
16 |
17 | case "$compiler" in
18 | clang*|-clang*|--clang*)
19 | # clangx.y or clang-x.y
20 | version="$(echo "$compiler" | tr -d '[:alpha:]-=')"
21 | majorVersion="${version%%.*}"
22 |
23 | # LLVM based on v18 released in early 2024, with two releases per year
24 | maxVersion="$((18 + ((($(date +%Y) - 2024) * 12 + $(date +%-m) - 3) / 6)))"
25 | compiler=clang
26 | ;;
27 |
28 | gcc*|-gcc*|--gcc*)
29 | # gccx.y or gcc-x.y
30 | version="$(echo "$compiler" | tr -d '[:alpha:]-=')"
31 | majorVersion="${version%%.*}"
32 |
33 | # GCC based on v14 released in early 2024, with one release per year
34 | maxVersion="$((14 + ((($(date +%Y) - 2024) * 12 + $(date +%-m) - 3) / 12)))"
35 | compiler=gcc
36 | ;;
37 | esac
38 |
39 | cxxCompiler="$compiler++"
40 |
41 | # clear the existing CC and CXX from environment
42 | CC=
43 | CXX=
44 | LDFLAGS=
45 |
46 | if [ "$compiler" = "gcc" ]; then cxxCompiler="g++"; fi
47 |
48 | check_version_exists() {
49 | desired_version=-1
50 |
51 | # Set up the environment to be used for building with the desired compiler.
52 | if command -v "$compiler-$1" > /dev/null; then
53 | desired_version="-$1"
54 | elif command -v "$compiler$1" > /dev/null; then
55 | desired_version="$1"
56 | fi
57 |
58 | echo "$desired_version"
59 | }
60 |
61 | __baseOS="$(uname)"
62 | set_compiler_version_from_CC() {
63 | if [ "$__baseOS" = "Darwin" ]; then
64 | # On Darwin, the versions from -version/-dumpversion refer to Xcode
65 | # versions, not llvm versions, so we can't rely on them.
66 | return
67 | fi
68 |
69 | version="$("$CC" -dumpversion)"
70 | if [ -z "$version" ]; then
71 | echo "Error: $CC -dumpversion didn't provide a version"
72 | exit 1
73 | fi
74 |
75 | # gcc and clang often display 3 part versions. However, gcc can show only 1 part in some environments.
76 | IFS=. read -r majorVersion _ < /dev/null; then
96 | echo "Error: No compatible version of $compiler was found within the range of $minVersion to $maxVersion. Please upgrade your toolchain or specify the compiler explicitly using CLR_CC and CLR_CXX environment variables."
97 | exit 1
98 | fi
99 |
100 | CC="$(command -v "$compiler" 2> /dev/null)"
101 | CXX="$(command -v "$cxxCompiler" 2> /dev/null)"
102 | set_compiler_version_from_CC
103 | fi
104 | else
105 | desired_version="$(check_version_exists "$majorVersion")"
106 | if [ "$desired_version" = "-1" ]; then
107 | echo "Error: Could not find specific version of $compiler: $majorVersion."
108 | exit 1
109 | fi
110 | fi
111 |
112 | if [ -z "$CC" ]; then
113 | CC="$(command -v "$compiler$desired_version" 2> /dev/null)"
114 | CXX="$(command -v "$cxxCompiler$desired_version" 2> /dev/null)"
115 | if [ -z "$CXX" ]; then CXX="$(command -v "$cxxCompiler" 2> /dev/null)"; fi
116 | set_compiler_version_from_CC
117 | fi
118 | else
119 | if [ ! -f "$CLR_CC" ]; then
120 | echo "Error: CLR_CC is set but path '$CLR_CC' does not exist"
121 | exit 1
122 | fi
123 | CC="$CLR_CC"
124 | CXX="$CLR_CXX"
125 | set_compiler_version_from_CC
126 | fi
127 |
128 | if [ -z "$CC" ]; then
129 | echo "Error: Unable to find $compiler."
130 | exit 1
131 | fi
132 |
133 | if [ "$__baseOS" != "Darwin" ]; then
134 | # On Darwin, we always want to use the Apple linker.
135 |
136 | # Only lld version >= 9 can be considered stable. lld supports s390x starting from 18.0.
137 | if [ "$compiler" = "clang" ] && [ -n "$majorVersion" ] && [ "$majorVersion" -ge 9 ] && { [ "$build_arch" != "s390x" ] || [ "$majorVersion" -ge 18 ]; }; then
138 | if "$CC" -fuse-ld=lld -Wl,--version >/dev/null 2>&1; then
139 | LDFLAGS="-fuse-ld=lld"
140 | fi
141 | fi
142 | fi
143 |
144 | SCAN_BUILD_COMMAND="$(command -v "scan-build$desired_version" 2> /dev/null)"
145 |
146 | export CC CXX LDFLAGS SCAN_BUILD_COMMAND
147 |
--------------------------------------------------------------------------------
/eng/common/sdl/configure-sdl-tool.ps1:
--------------------------------------------------------------------------------
1 | Param(
2 | [string] $GuardianCliLocation,
3 | [string] $WorkingDirectory,
4 | [string] $TargetDirectory,
5 | [string] $GdnFolder,
6 | # The list of Guardian tools to configure. For each object in the array:
7 | # - If the item is a [hashtable], it must contain these entries:
8 | # - Name = The tool name as Guardian knows it.
9 | # - Scenario = (Optional) Scenario-specific name for this configuration entry. It must be unique
10 | # among all tool entries with the same Name.
11 | # - Args = (Optional) Array of Guardian tool configuration args, like '@("Target > C:\temp")'
12 | # - If the item is a [string] $v, it is treated as '@{ Name="$v" }'
13 | [object[]] $ToolsList,
14 | [string] $GuardianLoggerLevel='Standard',
15 | # Optional: Additional params to add to any tool using CredScan.
16 | [string[]] $CrScanAdditionalRunConfigParams,
17 | # Optional: Additional params to add to any tool using PoliCheck.
18 | [string[]] $PoliCheckAdditionalRunConfigParams,
19 | # Optional: Additional params to add to any tool using CodeQL/Semmle.
20 | [string[]] $CodeQLAdditionalRunConfigParams,
21 | # Optional: Additional params to add to any tool using Binskim.
22 | [string[]] $BinskimAdditionalRunConfigParams
23 | )
24 |
25 | $ErrorActionPreference = 'Stop'
26 | Set-StrictMode -Version 2.0
27 | $disableConfigureToolsetImport = $true
28 | $global:LASTEXITCODE = 0
29 |
30 | try {
31 | # `tools.ps1` checks $ci to perform some actions. Since the SDL
32 | # scripts don't necessarily execute in the same agent that run the
33 | # build.ps1/sh script this variable isn't automatically set.
34 | $ci = $true
35 | . $PSScriptRoot\..\tools.ps1
36 |
37 | # Normalize tools list: all in [hashtable] form with defined values for each key.
38 | $ToolsList = $ToolsList |
39 | ForEach-Object {
40 | if ($_ -is [string]) {
41 | $_ = @{ Name = $_ }
42 | }
43 |
44 | if (-not ($_['Scenario'])) { $_.Scenario = "" }
45 | if (-not ($_['Args'])) { $_.Args = @() }
46 | $_
47 | }
48 |
49 | Write-Host "List of tools to configure:"
50 | $ToolsList | ForEach-Object { $_ | Out-String | Write-Host }
51 |
52 | # We store config files in the r directory of .gdn
53 | $gdnConfigPath = Join-Path $GdnFolder 'r'
54 | $ValidPath = Test-Path $GuardianCliLocation
55 |
56 | if ($ValidPath -eq $False)
57 | {
58 | Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Invalid Guardian CLI Location."
59 | ExitWithExitCode 1
60 | }
61 |
62 | foreach ($tool in $ToolsList) {
63 | # Put together the name and scenario to make a unique key.
64 | $toolConfigName = $tool.Name
65 | if ($tool.Scenario) {
66 | $toolConfigName += "_" + $tool.Scenario
67 | }
68 |
69 | Write-Host "=== Configuring $toolConfigName..."
70 |
71 | $gdnConfigFile = Join-Path $gdnConfigPath "$toolConfigName-configure.gdnconfig"
72 |
73 | # For some tools, add default and automatic args.
74 | switch -Exact ($tool.Name) {
75 | 'credscan' {
76 | if ($targetDirectory) {
77 | $tool.Args += "`"TargetDirectory < $TargetDirectory`""
78 | }
79 | $tool.Args += "`"OutputType < pre`""
80 | $tool.Args += $CrScanAdditionalRunConfigParams
81 | }
82 | 'policheck' {
83 | if ($targetDirectory) {
84 | $tool.Args += "`"Target < $TargetDirectory`""
85 | }
86 | $tool.Args += $PoliCheckAdditionalRunConfigParams
87 | }
88 | {$_ -in 'semmle', 'codeql'} {
89 | if ($targetDirectory) {
90 | $tool.Args += "`"SourceCodeDirectory < $TargetDirectory`""
91 | }
92 | $tool.Args += $CodeQLAdditionalRunConfigParams
93 | }
94 | 'binskim' {
95 | if ($targetDirectory) {
96 | # Binskim crashes due to specific PDBs. GitHub issue: https://github.com/microsoft/binskim/issues/924.
97 | # We are excluding all `_.pdb` files from the scan.
98 | $tool.Args += "`"Target < $TargetDirectory\**;-:file|$TargetDirectory\**\_.pdb`""
99 | }
100 | $tool.Args += $BinskimAdditionalRunConfigParams
101 | }
102 | }
103 |
104 | # Create variable pointing to the args array directly so we can use splat syntax later.
105 | $toolArgs = $tool.Args
106 |
107 | # Configure the tool. If args array is provided or the current tool has some default arguments
108 | # defined, add "--args" and splat each element on the end. Arg format is "{Arg id} < {Value}",
109 | # one per parameter. Doc page for "guardian configure":
110 | # https://dev.azure.com/securitytools/SecurityIntegration/_wiki/wikis/Guardian/1395/configure
111 | Exec-BlockVerbosely {
112 | & $GuardianCliLocation configure `
113 | --working-directory $WorkingDirectory `
114 | --tool $tool.Name `
115 | --output-path $gdnConfigFile `
116 | --logger-level $GuardianLoggerLevel `
117 | --noninteractive `
118 | --force `
119 | $(if ($toolArgs) { "--args" }) @toolArgs
120 | Exit-IfNZEC "Sdl"
121 | }
122 |
123 | Write-Host "Created '$toolConfigName' configuration file: $gdnConfigFile"
124 | }
125 | }
126 | catch {
127 | Write-Host $_.ScriptStackTrace
128 | Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
129 | ExitWithExitCode 1
130 | }
131 |
--------------------------------------------------------------------------------