├── icon.png
├── eng
├── common
│ ├── dotnet-install.cmd
│ ├── CIBuild.cmd
│ ├── sdl
│ │ ├── packages.config
│ │ ├── NuGet.config
│ │ ├── run-sdl.ps1
│ │ ├── extract-artifact-archives.ps1
│ │ ├── init-sdl.ps1
│ │ ├── extract-artifact-packages.ps1
│ │ └── configure-sdl-tool.ps1
│ ├── init-tools-native.cmd
│ ├── templates
│ │ ├── steps
│ │ │ ├── run-on-unix.yml
│ │ │ ├── run-on-windows.yml
│ │ │ ├── add-build-to-channel.yml
│ │ │ ├── build-reason.yml
│ │ │ ├── publish-logs.yml
│ │ │ ├── run-script-ifequalelse.yml
│ │ │ ├── retain-build.yml
│ │ │ ├── generate-sbom.yml
│ │ │ ├── execute-codeql.yml
│ │ │ ├── execute-sdl.yml
│ │ │ ├── telemetry-end.yml
│ │ │ └── source-build.yml
│ │ ├── variables
│ │ │ └── sdl-variables.yml
│ │ ├── post-build
│ │ │ ├── trigger-subscription.yml
│ │ │ ├── common-variables.yml
│ │ │ └── setup-maestro-vars.yml
│ │ ├── jobs
│ │ │ ├── codeql-build.yml
│ │ │ ├── source-build.yml
│ │ │ └── jobs.yml
│ │ └── job
│ │ │ ├── source-build.yml
│ │ │ ├── source-index-stage1.yml
│ │ │ ├── onelocbuild.yml
│ │ │ └── publish-build-assets.yml
│ ├── cross
│ │ ├── arm
│ │ │ ├── sources.list.jessie
│ │ │ ├── tizen
│ │ │ │ └── tizen.patch
│ │ │ ├── sources.list.zesty
│ │ │ ├── sources.list.xenial
│ │ │ ├── sources.list.bionic
│ │ │ ├── tizen-build-rootfs.sh
│ │ │ └── tizen-fetch.sh
│ │ ├── armel
│ │ │ ├── sources.list.jessie
│ │ │ ├── tizen
│ │ │ │ ├── tizen.patch
│ │ │ │ └── tizen-dotnet.ks
│ │ │ ├── tizen-build-rootfs.sh
│ │ │ ├── armel.jessie.patch
│ │ │ └── tizen-fetch.sh
│ │ ├── armv6
│ │ │ └── sources.list.buster
│ │ ├── x86
│ │ │ ├── tizen
│ │ │ │ └── tizen.patch
│ │ │ ├── sources.list.bionic
│ │ │ ├── sources.list.xenial
│ │ │ ├── tizen-build-rootfs.sh
│ │ │ └── tizen-fetch.sh
│ │ ├── arm64
│ │ │ ├── sources.list.buster
│ │ │ ├── tizen
│ │ │ │ └── tizen.patch
│ │ │ ├── sources.list.stretch
│ │ │ ├── sources.list.zesty
│ │ │ ├── sources.list.xenial
│ │ │ ├── sources.list.bionic
│ │ │ ├── tizen-build-rootfs.sh
│ │ │ └── tizen-fetch.sh
│ │ ├── s390x
│ │ │ └── sources.list.bionic
│ │ ├── ppc64le
│ │ │ └── sources.list.bionic
│ │ └── build-android-rootfs.sh
│ ├── internal
│ │ ├── Directory.Build.props
│ │ ├── NuGet.config
│ │ └── Tools.csproj
│ ├── PSScriptAnalyzerSettings.psd1
│ ├── cibuild.sh
│ ├── enable-cross-org-publishing.ps1
│ ├── msbuild.ps1
│ ├── generate-sbom-prep.ps1
│ ├── generate-sbom-prep.sh
│ ├── helixpublish.proj
│ ├── dotnet-install.ps1
│ ├── post-build
│ │ ├── nuget-validation.ps1
│ │ ├── add-build-to-channel.ps1
│ │ ├── check-channel-consistency.ps1
│ │ ├── publish-using-darc.ps1
│ │ ├── trigger-subscriptions.ps1
│ │ └── post-build-utils.ps1
│ ├── msbuild.sh
│ ├── README.md
│ ├── retain-build.ps1
│ ├── darc-init.ps1
│ ├── dotnet-install.sh
│ ├── darc-init.sh
│ ├── sdk-task.ps1
│ ├── native
│ │ ├── install-cmake.sh
│ │ ├── install-cmake-test.sh
│ │ ├── install-tool.ps1
│ │ └── common-library.sh
│ ├── pipeline-logging-functions.sh
│ ├── internal-feed-operations.sh
│ └── internal-feed-operations.ps1
├── Versions.props
└── Version.Details.xml
├── Build.cmd
├── global.json
├── Directory.Build.targets
├── PackageSourceMapper
├── Common
│ ├── Utils.cs
│ ├── Request.cs
│ ├── PackageData.cs
│ └── LocalizedResourceManager.cs
├── App.config
├── Program.cs
├── Logging
│ └── Logger.cs
├── NuGet.PackageSourceMapper.csproj
└── Print.cs
├── .github
└── workflows
│ └── CODEOWNERS
├── .config
├── TSAConfig.gdntsa
└── TSAOptions.json
├── nuget.config
├── Directory.Build.props
├── LICENSE.txt
├── PackageSourceMapper.sln
├── .gitattributes
├── README.md
├── .gitignore
└── azure-pipelines-official.yml
/icon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/NuGet/PackageSourceMapper/HEAD/icon.png
--------------------------------------------------------------------------------
/eng/common/dotnet-install.cmd:
--------------------------------------------------------------------------------
1 | @echo off
2 | powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0dotnet-install.ps1""" %*"
--------------------------------------------------------------------------------
/Build.cmd:
--------------------------------------------------------------------------------
1 | @echo off
2 | powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0eng\common\build.ps1""" -restore -build -pack %*"
3 | exit /b %ErrorLevel%
4 |
--------------------------------------------------------------------------------
/eng/common/CIBuild.cmd:
--------------------------------------------------------------------------------
1 | @echo off
2 | powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0Build.ps1""" -restore -build -test -sign -pack -publish -ci %*"
--------------------------------------------------------------------------------
/eng/common/sdl/packages.config:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
--------------------------------------------------------------------------------
/eng/common/init-tools-native.cmd:
--------------------------------------------------------------------------------
1 | @echo off
2 | powershell -NoProfile -NoLogo -ExecutionPolicy ByPass -command "& """%~dp0init-tools-native.ps1""" %*"
3 | exit /b %ErrorLevel%
--------------------------------------------------------------------------------
/eng/common/templates/steps/run-on-unix.yml:
--------------------------------------------------------------------------------
1 | parameters:
2 | agentOs: ''
3 | steps: []
4 |
5 | steps:
6 | - ${{ if ne(parameters.agentOs, 'Windows_NT') }}:
7 | - ${{ parameters.steps }}
8 |
--------------------------------------------------------------------------------
/eng/common/templates/steps/run-on-windows.yml:
--------------------------------------------------------------------------------
1 | parameters:
2 | agentOs: ''
3 | steps: []
4 |
5 | steps:
6 | - ${{ if eq(parameters.agentOs, 'Windows_NT') }}:
7 | - ${{ parameters.steps }}
8 |
--------------------------------------------------------------------------------
/eng/common/cross/arm/sources.list.jessie:
--------------------------------------------------------------------------------
1 | # Debian (sid) # UNSTABLE
2 | deb http://ftp.debian.org/debian/ sid main contrib non-free
3 | deb-src http://ftp.debian.org/debian/ sid main contrib non-free
4 |
--------------------------------------------------------------------------------
/eng/common/cross/armel/sources.list.jessie:
--------------------------------------------------------------------------------
1 | # Debian (jessie) # Stable
2 | deb http://ftp.debian.org/debian/ jessie main contrib non-free
3 | deb-src http://ftp.debian.org/debian/ jessie main contrib non-free
4 |
--------------------------------------------------------------------------------
/eng/common/cross/armv6/sources.list.buster:
--------------------------------------------------------------------------------
1 | deb http://raspbian.raspberrypi.org/raspbian/ buster main contrib non-free rpi
2 | deb-src http://raspbian.raspberrypi.org/raspbian/ buster main contrib non-free rpi
3 |
--------------------------------------------------------------------------------
/global.json:
--------------------------------------------------------------------------------
1 | {
2 | "tools": {
3 | "dotnet": "6.0.100"
4 | },
5 | "msbuild-sdks": {
6 | "Microsoft.DotNet.Arcade.Sdk": "7.0.0-beta.22157.6",
7 | "Microsoft.DotNet.Helix.Sdk": "7.0.0-beta.22157.6"
8 | }
9 | }
10 |
--------------------------------------------------------------------------------
/Directory.Build.targets:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/PackageSourceMapper/Common/Utils.cs:
--------------------------------------------------------------------------------
1 | namespace NuGet.PackageSourceMapper
2 | {
3 | internal class ReturnCode
4 | {
5 | public static int Ok = 0;
6 | public static int ArgumentError = 1;
7 | public static int UnknownError = 2;
8 | }
9 | }
10 |
--------------------------------------------------------------------------------
/eng/common/internal/Directory.Build.props:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
--------------------------------------------------------------------------------
/.github/workflows/CODEOWNERS:
--------------------------------------------------------------------------------
1 | # These owners will be the default owners for everything in
2 | # the repo. Unless a later match takes precedence,
3 | # review when someone opens a pull request.
4 | # For more on how to customize the CODEOWNERS file - https://help.github.com/en/articles/about-code-owners
5 | * @NuGet/nuget-client
6 |
--------------------------------------------------------------------------------
/eng/common/internal/NuGet.config:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/.config/TSAConfig.gdntsa:
--------------------------------------------------------------------------------
1 | {
2 | "codebaseName": "NuGet.PackageSourceMapper_dev",
3 | "instanceUrl": "https://devdiv.visualstudio.com",
4 | "projectName": "DevDiv",
5 | "areaPath": "DevDiv\\NuGet\\NuGet Clients",
6 | "iterationPath": "DevDiv",
7 | "tools": [
8 | "CredScan",
9 | "PoliCheck"
10 | ]
11 | }
--------------------------------------------------------------------------------
/PackageSourceMapper/App.config:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
--------------------------------------------------------------------------------
/eng/common/templates/variables/sdl-variables.yml:
--------------------------------------------------------------------------------
1 | variables:
2 | # The Guardian version specified in 'eng/common/sdl/packages.config'. This value must be kept in
3 | # sync with the packages.config file.
4 | - name: DefaultGuardianVersion
5 | value: 0.109.0
6 | - name: GuardianPackagesConfigFile
7 | value: $(Build.SourcesDirectory)\eng\common\sdl\packages.config
--------------------------------------------------------------------------------
/PackageSourceMapper/Common/Request.cs:
--------------------------------------------------------------------------------
1 | using NuGet.Configuration;
2 |
3 | namespace NuGet.PackageSourceMapper
4 | {
5 | internal class Request
6 | {
7 | public string GlobalPackagesFolder { get; set; }
8 | public ISettings Settings { get; set; }
9 | public bool IdPatternOnlyOption { get; set; }
10 | public bool RemoveUnusedSourcesOption { get; set; }
11 | }
12 | }
13 |
--------------------------------------------------------------------------------
/eng/common/sdl/NuGet.config:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
--------------------------------------------------------------------------------
/.config/TSAOptions.json:
--------------------------------------------------------------------------------
1 |
2 | {
3 | "tsaVersion": "TsaV2",
4 | "codebase": "NewOrUpdate",
5 | "codebaseName": "NuGet.PackageSourceMapper_dev",
6 | "tsaStamp": "DevDiv",
7 | "tsaEnvironment": "PROD",
8 | "instanceUrl": "https://devdiv.visualstudio.com",
9 | "projectName": "DevDiv",
10 | "areaPath": "DevDiv\\NuGet\\NuGet Clients",
11 | "iterationPath": "DevDiv",
12 | "allTools": true,
13 | "repositoryName": "NuGet.PackageSourceMapper"
14 | }
15 |
--------------------------------------------------------------------------------
/eng/common/templates/steps/add-build-to-channel.yml:
--------------------------------------------------------------------------------
1 | parameters:
2 | ChannelId: 0
3 |
4 | steps:
5 | - task: PowerShell@2
6 | displayName: Add Build to Channel
7 | inputs:
8 | filePath: $(Build.SourcesDirectory)/eng/common/post-build/add-build-to-channel.ps1
9 | arguments: -BuildId $(BARBuildId)
10 | -ChannelId ${{ parameters.ChannelId }}
11 | -MaestroApiAccessToken $(MaestroApiAccessToken)
12 | -MaestroApiEndPoint $(MaestroApiEndPoint)
13 | -MaestroApiVersion $(MaestroApiVersion)
14 |
--------------------------------------------------------------------------------
/eng/common/cross/x86/tizen/tizen.patch:
--------------------------------------------------------------------------------
1 | diff -u -r a/usr/lib/libc.so b/usr/lib/libc.so
2 | --- a/usr/lib/libc.so 2016-12-30 23:00:08.284951863 +0900
3 | +++ b/usr/lib/libc.so 2016-12-30 23:00:32.140951815 +0900
4 | @@ -2,4 +2,4 @@
5 | Use the shared library, but some functions are only in
6 | the static library, so try that secondarily. */
7 | OUTPUT_FORMAT(elf32-i386)
8 | -GROUP ( /lib/libc.so.6 /usr/lib/libc_nonshared.a AS_NEEDED ( /lib/ld-linux.so.2 ) )
9 | +GROUP ( libc.so.6 libc_nonshared.a AS_NEEDED ( ld-linux.so.2 ) )
10 |
--------------------------------------------------------------------------------
/eng/common/templates/post-build/trigger-subscription.yml:
--------------------------------------------------------------------------------
1 | parameters:
2 | ChannelId: 0
3 |
4 | steps:
5 | - task: PowerShell@2
6 | displayName: Triggering subscriptions
7 | inputs:
8 | filePath: $(Build.SourcesDirectory)/eng/common/post-build/trigger-subscriptions.ps1
9 | arguments: -SourceRepo $(Build.Repository.Uri)
10 | -ChannelId ${{ parameters.ChannelId }}
11 | -MaestroApiAccessToken $(MaestroAccessToken)
12 | -MaestroApiEndPoint $(MaestroApiEndPoint)
13 | -MaestroApiVersion $(MaestroApiVersion)
14 |
--------------------------------------------------------------------------------
/eng/common/cross/armel/tizen/tizen.patch:
--------------------------------------------------------------------------------
1 | diff -u -r a/usr/lib/libc.so b/usr/lib/libc.so
2 | --- a/usr/lib/libc.so 2016-12-30 23:00:08.284951863 +0900
3 | +++ b/usr/lib/libc.so 2016-12-30 23:00:32.140951815 +0900
4 | @@ -2,4 +2,4 @@
5 | Use the shared library, but some functions are only in
6 | the static library, so try that secondarily. */
7 | OUTPUT_FORMAT(elf32-littlearm)
8 | -GROUP ( /lib/libc.so.6 /usr/lib/libc_nonshared.a AS_NEEDED ( /lib/ld-linux.so.3 ) )
9 | +GROUP ( libc.so.6 libc_nonshared.a AS_NEEDED ( ld-linux.so.3 ) )
10 |
--------------------------------------------------------------------------------
/eng/common/PSScriptAnalyzerSettings.psd1:
--------------------------------------------------------------------------------
1 | @{
2 | IncludeRules=@('PSAvoidUsingCmdletAliases',
3 | 'PSAvoidUsingWMICmdlet',
4 | 'PSAvoidUsingPositionalParameters',
5 | 'PSAvoidUsingInvokeExpression',
6 | 'PSUseDeclaredVarsMoreThanAssignments',
7 | 'PSUseCmdletCorrectly',
8 | 'PSStandardDSCFunctionsInResource',
9 | 'PSUseIdenticalMandatoryParametersForDSC',
10 | 'PSUseIdenticalParametersForDSC')
11 | }
--------------------------------------------------------------------------------
/eng/common/cross/arm/tizen/tizen.patch:
--------------------------------------------------------------------------------
1 | diff -u -r a/usr/lib/libc.so b/usr/lib/libc.so
2 | --- a/usr/lib/libc.so 2016-12-30 23:00:08.284951863 +0900
3 | +++ b/usr/lib/libc.so 2016-12-30 23:00:32.140951815 +0900
4 | @@ -2,4 +2,4 @@
5 | Use the shared library, but some functions are only in
6 | the static library, so try that secondarily. */
7 | OUTPUT_FORMAT(elf32-littlearm)
8 | -GROUP ( /lib/libc.so.6 /usr/lib/libc_nonshared.a AS_NEEDED ( /lib/ld-linux-armhf.so.3 ) )
9 | +GROUP ( libc.so.6 libc_nonshared.a AS_NEEDED ( ld-linux-armhf.so.3 ) )
10 |
--------------------------------------------------------------------------------
/eng/common/cross/arm64/sources.list.buster:
--------------------------------------------------------------------------------
1 | deb http://deb.debian.org/debian buster main
2 | deb-src http://deb.debian.org/debian buster main
3 |
4 | deb http://deb.debian.org/debian-security/ buster/updates main
5 | deb-src http://deb.debian.org/debian-security/ buster/updates main
6 |
7 | deb http://deb.debian.org/debian buster-updates main
8 | deb-src http://deb.debian.org/debian buster-updates main
9 |
10 | deb http://deb.debian.org/debian buster-backports main contrib non-free
11 | deb-src http://deb.debian.org/debian buster-backports main contrib non-free
12 |
--------------------------------------------------------------------------------
/eng/common/cross/arm64/tizen/tizen.patch:
--------------------------------------------------------------------------------
1 | diff -u -r a/usr/lib/libc.so b/usr/lib/libc.so
2 | --- a/usr/lib64/libc.so 2016-12-30 23:00:08.284951863 +0900
3 | +++ b/usr/lib64/libc.so 2016-12-30 23:00:32.140951815 +0900
4 | @@ -2,4 +2,4 @@
5 | Use the shared library, but some functions are only in
6 | the static library, so try that secondarily. */
7 | OUTPUT_FORMAT(elf64-littleaarch64)
8 | -GROUP ( /lib64/libc.so.6 /usr/lib64/libc_nonshared.a AS_NEEDED ( /lib/ld-linux-aarch64.so.1 ) )
9 | +GROUP ( libc.so.6 libc_nonshared.a AS_NEEDED ( ld-linux-aarch64.so.1 ) )
10 |
--------------------------------------------------------------------------------
/nuget.config:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/eng/common/cross/arm64/sources.list.stretch:
--------------------------------------------------------------------------------
1 | deb http://deb.debian.org/debian stretch main
2 | deb-src http://deb.debian.org/debian stretch main
3 |
4 | deb http://deb.debian.org/debian-security/ stretch/updates main
5 | deb-src http://deb.debian.org/debian-security/ stretch/updates main
6 |
7 | deb http://deb.debian.org/debian stretch-updates main
8 | deb-src http://deb.debian.org/debian stretch-updates main
9 |
10 | deb http://deb.debian.org/debian stretch-backports main contrib non-free
11 | deb-src http://deb.debian.org/debian stretch-backports main contrib non-free
12 |
13 |
--------------------------------------------------------------------------------
/PackageSourceMapper/Program.cs:
--------------------------------------------------------------------------------
1 | using System.CommandLine.Builder;
2 | using System.CommandLine.Parsing;
3 | using System.Threading.Tasks;
4 |
5 | namespace NuGet.PackageSourceMapper
6 | {
7 | internal partial class Program
8 | {
9 | public static Task Main(string[] args)
10 | {
11 | var parser = new CommandLineBuilder()
12 | .AddCommand(GenerateCommandHandler.GenerateCommand())
13 | .UseDefaults()
14 | .Build();
15 | return parser.InvokeAsync(args);
16 | }
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/eng/common/cibuild.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | source="${BASH_SOURCE[0]}"
4 |
5 | # resolve $SOURCE until the file is no longer a symlink
6 | while [[ -h $source ]]; do
7 | scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
8 | source="$(readlink "$source")"
9 |
10 | # if $source was a relative symlink, we need to resolve it relative to the path where
11 | # the symlink file was located
12 | [[ $source != /* ]] && source="$scriptroot/$source"
13 | done
14 | scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
15 |
16 | . "$scriptroot/build.sh" --restore --build --test --pack --publish --ci $@
--------------------------------------------------------------------------------
/eng/common/templates/steps/build-reason.yml:
--------------------------------------------------------------------------------
1 | # build-reason.yml
2 | # Description: runs steps if build.reason condition is valid. conditions is a string of valid build reasons
3 | # to include steps (',' separated).
4 | parameters:
5 | conditions: ''
6 | steps: []
7 |
8 | steps:
9 | - ${{ if and( not(startsWith(parameters.conditions, 'not')), contains(parameters.conditions, variables['build.reason'])) }}:
10 | - ${{ parameters.steps }}
11 | - ${{ if and( startsWith(parameters.conditions, 'not'), not(contains(parameters.conditions, variables['build.reason']))) }}:
12 | - ${{ parameters.steps }}
13 |
--------------------------------------------------------------------------------
/eng/common/enable-cross-org-publishing.ps1:
--------------------------------------------------------------------------------
1 | param(
2 | [string] $token
3 | )
4 |
5 |
6 | . $PSScriptRoot\pipeline-logging-functions.ps1
7 |
8 | # Write-PipelineSetVariable will no-op if a variable named $ci is not defined
9 | # Since this script is only ever called in AzDO builds, just universally set it
10 | $ci = $true
11 |
12 | Write-PipelineSetVariable -Name 'VSS_NUGET_ACCESSTOKEN' -Value $token -IsMultiJobVariable $false
13 | Write-PipelineSetVariable -Name 'VSS_NUGET_URI_PREFIXES' -Value 'https://dnceng.pkgs.visualstudio.com/;https://pkgs.dev.azure.com/dnceng/;https://devdiv.pkgs.visualstudio.com/;https://pkgs.dev.azure.com/devdiv/' -IsMultiJobVariable $false
14 |
--------------------------------------------------------------------------------
/eng/common/cross/x86/sources.list.bionic:
--------------------------------------------------------------------------------
1 | deb http://archive.ubuntu.com/ubuntu/ bionic main restricted universe
2 | deb-src http://archive.ubuntu.com/ubuntu/ bionic main restricted universe
3 |
4 | deb http://archive.ubuntu.com/ubuntu/ bionic-updates main restricted universe
5 | deb-src http://archive.ubuntu.com/ubuntu/ bionic-updates main restricted universe
6 |
7 | deb http://archive.ubuntu.com/ubuntu/ bionic-backports main restricted
8 | deb-src http://archive.ubuntu.com/ubuntu/ bionic-backports main restricted
9 |
10 | deb http://archive.ubuntu.com/ubuntu/ bionic-security main restricted universe multiverse
11 | deb-src http://archive.ubuntu.com/ubuntu/ bionic-security main restricted universe multiverse
12 |
--------------------------------------------------------------------------------
/eng/common/cross/x86/sources.list.xenial:
--------------------------------------------------------------------------------
1 | deb http://archive.ubuntu.com/ubuntu/ xenial main restricted universe
2 | deb-src http://archive.ubuntu.com/ubuntu/ xenial main restricted universe
3 |
4 | deb http://archive.ubuntu.com/ubuntu/ xenial-updates main restricted universe
5 | deb-src http://archive.ubuntu.com/ubuntu/ xenial-updates main restricted universe
6 |
7 | deb http://archive.ubuntu.com/ubuntu/ xenial-backports main restricted
8 | deb-src http://archive.ubuntu.com/ubuntu/ xenial-backports main restricted
9 |
10 | deb http://archive.ubuntu.com/ubuntu/ xenial-security main restricted universe multiverse
11 | deb-src http://archive.ubuntu.com/ubuntu/ xenial-security main restricted universe multiverse
12 |
--------------------------------------------------------------------------------
/eng/common/msbuild.ps1:
--------------------------------------------------------------------------------
1 | [CmdletBinding(PositionalBinding=$false)]
2 | Param(
3 | [string] $verbosity = 'minimal',
4 | [bool] $warnAsError = $true,
5 | [bool] $nodeReuse = $true,
6 | [switch] $ci,
7 | [switch] $prepareMachine,
8 | [switch] $excludePrereleaseVS,
9 | [string] $msbuildEngine = $null,
10 | [Parameter(ValueFromRemainingArguments=$true)][String[]]$extraArgs
11 | )
12 |
13 | . $PSScriptRoot\tools.ps1
14 |
15 | try {
16 | if ($ci) {
17 | $nodeReuse = $false
18 | }
19 |
20 | MSBuild @extraArgs
21 | }
22 | catch {
23 | Write-Host $_.ScriptStackTrace
24 | Write-PipelineTelemetryError -Category 'Build' -Message $_
25 | ExitWithExitCode 1
26 | }
27 |
28 | ExitWithExitCode 0
--------------------------------------------------------------------------------
/eng/common/cross/arm/sources.list.zesty:
--------------------------------------------------------------------------------
1 | deb http://ports.ubuntu.com/ubuntu-ports/ zesty main restricted universe
2 | deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty main restricted universe
3 |
4 | deb http://ports.ubuntu.com/ubuntu-ports/ zesty-updates main restricted universe
5 | deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-updates main restricted universe
6 |
7 | deb http://ports.ubuntu.com/ubuntu-ports/ zesty-backports main restricted
8 | deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-backports main restricted
9 |
10 | deb http://ports.ubuntu.com/ubuntu-ports/ zesty-security main restricted universe multiverse
11 | deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-security main restricted universe multiverse
12 |
--------------------------------------------------------------------------------
/eng/common/cross/arm64/sources.list.zesty:
--------------------------------------------------------------------------------
1 | deb http://ports.ubuntu.com/ubuntu-ports/ zesty main restricted universe
2 | deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty main restricted universe
3 |
4 | deb http://ports.ubuntu.com/ubuntu-ports/ zesty-updates main restricted universe
5 | deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-updates main restricted universe
6 |
7 | deb http://ports.ubuntu.com/ubuntu-ports/ zesty-backports main restricted
8 | deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-backports main restricted
9 |
10 | deb http://ports.ubuntu.com/ubuntu-ports/ zesty-security main restricted universe multiverse
11 | deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-security main restricted universe multiverse
12 |
--------------------------------------------------------------------------------
/eng/common/cross/arm/sources.list.xenial:
--------------------------------------------------------------------------------
1 | deb http://ports.ubuntu.com/ubuntu-ports/ xenial main restricted universe
2 | deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial main restricted universe
3 |
4 | deb http://ports.ubuntu.com/ubuntu-ports/ xenial-updates main restricted universe
5 | deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-updates main restricted universe
6 |
7 | deb http://ports.ubuntu.com/ubuntu-ports/ xenial-backports main restricted
8 | deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-backports main restricted
9 |
10 | deb http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse
11 | deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse
--------------------------------------------------------------------------------
/eng/common/cross/arm64/sources.list.xenial:
--------------------------------------------------------------------------------
1 | deb http://ports.ubuntu.com/ubuntu-ports/ xenial main restricted universe
2 | deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial main restricted universe
3 |
4 | deb http://ports.ubuntu.com/ubuntu-ports/ xenial-updates main restricted universe
5 | deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-updates main restricted universe
6 |
7 | deb http://ports.ubuntu.com/ubuntu-ports/ xenial-backports main restricted
8 | deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-backports main restricted
9 |
10 | deb http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse
11 | deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse
--------------------------------------------------------------------------------
/eng/common/cross/arm/sources.list.bionic:
--------------------------------------------------------------------------------
1 | deb http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe
2 | deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe
3 |
4 | deb http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe
5 | deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe
6 |
7 | deb http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted
8 | deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted
9 |
10 | deb http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse
11 | deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse
12 |
--------------------------------------------------------------------------------
/eng/common/cross/arm64/sources.list.bionic:
--------------------------------------------------------------------------------
1 | deb http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe
2 | deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe
3 |
4 | deb http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe
5 | deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe
6 |
7 | deb http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted
8 | deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted
9 |
10 | deb http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse
11 | deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse
12 |
--------------------------------------------------------------------------------
/eng/common/cross/s390x/sources.list.bionic:
--------------------------------------------------------------------------------
1 | deb http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe
2 | deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe
3 |
4 | deb http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe
5 | deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe
6 |
7 | deb http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted
8 | deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted
9 |
10 | deb http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse
11 | deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse
12 |
--------------------------------------------------------------------------------
/eng/common/cross/ppc64le/sources.list.bionic:
--------------------------------------------------------------------------------
1 | deb http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe
2 | deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe
3 |
4 | deb http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe
5 | deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe
6 |
7 | deb http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted
8 | deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted
9 |
10 | deb http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse
11 | deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse
12 |
--------------------------------------------------------------------------------
/eng/Versions.props:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | 1.1.0
6 | preview
7 |
8 | false
9 |
10 | 1.2.0-beta-22165-02
11 |
12 | $(MicrosoftSourceLinkGitHubVersion)
13 | 7.0.100-preview.2.22075.3
14 |
15 |
16 |
--------------------------------------------------------------------------------
/eng/common/generate-sbom-prep.ps1:
--------------------------------------------------------------------------------
1 | Param(
2 | [Parameter(Mandatory=$true)][string] $ManifestDirPath # Manifest directory where sbom will be placed
3 | )
4 |
5 | Write-Host "Creating dir $ManifestDirPath"
6 | # create directory for sbom manifest to be placed
7 | if (!(Test-Path -path $ManifestDirPath))
8 | {
9 | New-Item -ItemType Directory -path $ManifestDirPath
10 | Write-Host "Successfully created directory $ManifestDirPath"
11 | }
12 | else{
13 | Write-PipelineTelemetryError -category 'Build' "Unable to create sbom folder."
14 | }
15 |
16 | Write-Host "Updating artifact name"
17 | $artifact_name = "${env:SYSTEM_STAGENAME}_${env:AGENT_JOBNAME}_SBOM" -replace '["/:<>\\|?@*"() ]', '_'
18 | Write-Host "Artifact name $artifact_name"
19 | Write-Host "##vso[task.setvariable variable=ARTIFACT_NAME]$artifact_name"
20 |
--------------------------------------------------------------------------------
/eng/common/generate-sbom-prep.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | source="${BASH_SOURCE[0]}"
4 |
5 | manifest_dir=$1
6 |
7 | if [ ! -d "$manifest_dir" ] ; then
8 | mkdir -p "$manifest_dir"
9 | echo "Sbom directory created." $manifest_dir
10 | else
11 | Write-PipelineTelemetryError -category 'Build' "Unable to create sbom folder."
12 | fi
13 |
14 | artifact_name=$SYSTEM_STAGENAME"_"$AGENT_JOBNAME"_SBOM"
15 | echo "Artifact name before : "$artifact_name
16 | # replace all special characters with _, some builds use special characters like : in Agent.Jobname, that is not a permissible name while uploading artifacts.
17 | safe_artifact_name="${artifact_name//["/:<>\\|?@*$" ]/_}"
18 | echo "Artifact name after : "$safe_artifact_name
19 | export ARTIFACT_NAME=$safe_artifact_name
20 | echo "##vso[task.setvariable variable=ARTIFACT_NAME]$safe_artifact_name"
21 |
22 | exit 0
23 |
--------------------------------------------------------------------------------
/eng/common/templates/steps/publish-logs.yml:
--------------------------------------------------------------------------------
1 | parameters:
2 | StageLabel: ''
3 | JobLabel: ''
4 |
5 | steps:
6 | - task: Powershell@2
7 | displayName: Prepare Binlogs to Upload
8 | inputs:
9 | targetType: inline
10 | script: |
11 | New-Item -ItemType Directory $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/
12 | Move-Item -Path $(Build.SourcesDirectory)/artifacts/log/Debug/* $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/
13 | continueOnError: true
14 | condition: always()
15 |
16 | - task: PublishBuildArtifacts@1
17 | displayName: Publish Logs
18 | inputs:
19 | PathtoPublish: '$(Build.SourcesDirectory)/PostBuildLogs'
20 | PublishLocation: Container
21 | ArtifactName: PostBuildLogs
22 | continueOnError: true
23 | condition: always()
24 |
--------------------------------------------------------------------------------
/eng/common/helixpublish.proj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | msbuild
5 |
6 |
7 |
8 |
9 | %(Identity)
10 |
11 |
12 |
13 |
14 |
15 | $(WorkItemDirectory)
16 | $(WorkItemCommand)
17 | $(WorkItemTimeout)
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
--------------------------------------------------------------------------------
/eng/common/dotnet-install.ps1:
--------------------------------------------------------------------------------
1 | [CmdletBinding(PositionalBinding=$false)]
2 | Param(
3 | [string] $verbosity = 'minimal',
4 | [string] $architecture = '',
5 | [string] $version = 'Latest',
6 | [string] $runtime = 'dotnet',
7 | [string] $RuntimeSourceFeed = '',
8 | [string] $RuntimeSourceFeedKey = ''
9 | )
10 |
11 | . $PSScriptRoot\tools.ps1
12 |
13 | $dotnetRoot = Join-Path $RepoRoot '.dotnet'
14 |
15 | $installdir = $dotnetRoot
16 | try {
17 | if ($architecture -and $architecture.Trim() -eq 'x86') {
18 | $installdir = Join-Path $installdir 'x86'
19 | }
20 | InstallDotNet $installdir $version $architecture $runtime $true -RuntimeSourceFeed $RuntimeSourceFeed -RuntimeSourceFeedKey $RuntimeSourceFeedKey
21 | }
22 | catch {
23 | Write-Host $_.ScriptStackTrace
24 | Write-PipelineTelemetryError -Category 'InitializeToolset' -Message $_
25 | ExitWithExitCode 1
26 | }
27 |
28 | ExitWithExitCode 0
29 |
--------------------------------------------------------------------------------
/eng/common/templates/post-build/common-variables.yml:
--------------------------------------------------------------------------------
1 | variables:
2 | - group: AzureDevOps-Artifact-Feeds-Pats
3 | - group: DotNet-Blob-Feed
4 | - group: DotNet-DotNetCli-Storage
5 | - group: DotNet-MSRC-Storage
6 | - group: Publish-Build-Assets
7 |
8 | # Whether the build is internal or not
9 | - name: IsInternalBuild
10 | value: ${{ and(ne(variables['System.TeamProject'], 'public'), contains(variables['Build.SourceBranch'], 'internal')) }}
11 |
12 | # Default Maestro++ API Endpoint and API Version
13 | - name: MaestroApiEndPoint
14 | value: "https://maestro-prod.westus2.cloudapp.azure.com"
15 | - name: MaestroApiAccessToken
16 | value: $(MaestroAccessToken)
17 | - name: MaestroApiVersion
18 | value: "2020-02-20"
19 |
20 | - name: SourceLinkCLIVersion
21 | value: 3.0.0
22 | - name: SymbolToolVersion
23 | value: 1.0.1
24 |
25 | - name: runCodesignValidationInjection
26 | value: false
27 |
--------------------------------------------------------------------------------
/PackageSourceMapper/Common/PackageData.cs:
--------------------------------------------------------------------------------
1 | using NuGet.Packaging.Core;
2 |
3 | namespace NuGet.PackageSourceMapper
4 | {
5 | internal static partial class GenerateCommandHandler
6 | {
7 | private class PackageData
8 | {
9 | public PackageIdentity PackageIdentity { get; set; }
10 | public string PackageContentHash { get; set; }
11 | public string PackageRemoteHash { get; set; }
12 | public string OriginalSource { get; set; }
13 |
14 | private PackageData()
15 | {
16 | }
17 |
18 | public PackageData(PackageIdentity packageIdentity, string packageContentHash, string packageRemoteHash, string originalSource)
19 | {
20 | PackageIdentity = packageIdentity;
21 | PackageContentHash = packageContentHash;
22 | PackageRemoteHash = packageRemoteHash;
23 | OriginalSource = originalSource;
24 | }
25 | }
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/eng/common/cross/arm64/tizen-build-rootfs.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | set -e
3 |
4 | __CrossDir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
5 | __TIZEN_CROSSDIR="$__CrossDir/tizen"
6 |
7 | if [[ -z "$ROOTFS_DIR" ]]; then
8 | echo "ROOTFS_DIR is not defined."
9 | exit 1;
10 | fi
11 |
12 | TIZEN_TMP_DIR=$ROOTFS_DIR/tizen_tmp
13 | mkdir -p $TIZEN_TMP_DIR
14 |
15 | # Download files
16 | echo ">>Start downloading files"
17 | VERBOSE=1 $__CrossDir/tizen-fetch.sh $TIZEN_TMP_DIR
18 | echo "<>Start constructing Tizen rootfs"
21 | TIZEN_RPM_FILES=`ls $TIZEN_TMP_DIR/*.rpm`
22 | cd $ROOTFS_DIR
23 | for f in $TIZEN_RPM_FILES; do
24 | rpm2cpio $f | cpio -idm --quiet
25 | done
26 | echo "<>Start configuring Tizen rootfs"
33 | ln -sfn asm-arm64 ./usr/include/asm
34 | patch -p1 < $__TIZEN_CROSSDIR/tizen.patch
35 | echo "<>Start downloading files"
17 | VERBOSE=1 $__X86_CrossDir/tizen-fetch.sh $TIZEN_TMP_DIR
18 | echo "<>Start constructing Tizen rootfs"
21 | TIZEN_RPM_FILES=`ls $TIZEN_TMP_DIR/*.rpm`
22 | cd $ROOTFS_DIR
23 | for f in $TIZEN_RPM_FILES; do
24 | rpm2cpio $f | cpio -idm --quiet
25 | done
26 | echo "<>Start configuring Tizen rootfs"
33 | ln -sfn asm-x86 ./usr/include/asm
34 | patch -p1 < $__TIZEN_CROSSDIR/tizen.patch
35 | echo "<>Start downloading files"
17 | VERBOSE=1 $__ARM_HARDFP_CrossDir/tizen-fetch.sh $TIZEN_TMP_DIR
18 | echo "<>Start constructing Tizen rootfs"
21 | TIZEN_RPM_FILES=`ls $TIZEN_TMP_DIR/*.rpm`
22 | cd $ROOTFS_DIR
23 | for f in $TIZEN_RPM_FILES; do
24 | rpm2cpio $f | cpio -idm --quiet
25 | done
26 | echo "<>Start configuring Tizen rootfs"
33 | ln -sfn asm-arm ./usr/include/asm
34 | patch -p1 < $__TIZEN_CROSSDIR/tizen.patch
35 | echo "<>Start downloading files"
17 | VERBOSE=1 $__ARM_SOFTFP_CrossDir/tizen-fetch.sh $TIZEN_TMP_DIR
18 | echo "<>Start constructing Tizen rootfs"
21 | TIZEN_RPM_FILES=`ls $TIZEN_TMP_DIR/*.rpm`
22 | cd $ROOTFS_DIR
23 | for f in $TIZEN_RPM_FILES; do
24 | rpm2cpio $f | cpio -idm --quiet
25 | done
26 | echo "<>Start configuring Tizen rootfs"
33 | ln -sfn asm-arm ./usr/include/asm
34 | patch -p1 < $__TIZEN_CROSSDIR/tizen.patch
35 | echo "<
2 |
3 |
4 |
5 |
6 | $(CopyrightNetFoundation)
7 | True
8 | embedded
9 | true
10 | Latest
11 | true
12 |
15 | true
16 |
17 |
18 |
19 |
24 | $(MicrosoftCodeAnalysisCSharpVersion)
25 |
26 |
27 |
--------------------------------------------------------------------------------
/LICENSE.txt:
--------------------------------------------------------------------------------
1 | The MIT License (MIT)
2 |
3 | Copyright (c) .NET Foundation and Contributors
4 |
5 | All rights reserved.
6 |
7 | Permission is hereby granted, free of charge, to any person obtaining a copy
8 | of this software and associated documentation files (the "Software"), to deal
9 | in the Software without restriction, including without limitation the rights
10 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
11 | copies of the Software, and to permit persons to whom the Software is
12 | furnished to do so, subject to the following conditions:
13 |
14 | The above copyright notice and this permission notice shall be included in all
15 | copies or substantial portions of the Software.
16 |
17 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
20 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
22 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
23 | SOFTWARE.
--------------------------------------------------------------------------------
/PackageSourceMapper.sln:
--------------------------------------------------------------------------------
1 |
2 | Microsoft Visual Studio Solution File, Format Version 12.00
3 | # Visual Studio Version 17
4 | VisualStudioVersion = 17.0.31710.8
5 | MinimumVisualStudioVersion = 10.0.40219.1
6 | Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "NuGet.PackageSourceMapper", "PackageSourceMapper\NuGet.PackageSourceMapper.csproj", "{51F4617B-CF30-4125-BD9B-C7A65EC66BA3}"
7 | EndProject
8 | Global
9 | GlobalSection(SolutionConfigurationPlatforms) = preSolution
10 | Debug|Any CPU = Debug|Any CPU
11 | Release|Any CPU = Release|Any CPU
12 | EndGlobalSection
13 | GlobalSection(ProjectConfigurationPlatforms) = postSolution
14 | {51F4617B-CF30-4125-BD9B-C7A65EC66BA3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
15 | {51F4617B-CF30-4125-BD9B-C7A65EC66BA3}.Debug|Any CPU.Build.0 = Debug|Any CPU
16 | {51F4617B-CF30-4125-BD9B-C7A65EC66BA3}.Release|Any CPU.ActiveCfg = Release|Any CPU
17 | {51F4617B-CF30-4125-BD9B-C7A65EC66BA3}.Release|Any CPU.Build.0 = Release|Any CPU
18 | EndGlobalSection
19 | GlobalSection(SolutionProperties) = preSolution
20 | HideSolutionNode = FALSE
21 | EndGlobalSection
22 | GlobalSection(ExtensibilityGlobals) = postSolution
23 | SolutionGuid = {C6B13A97-0BA0-46CE-ABAF-8A536952BE71}
24 | EndGlobalSection
25 | EndGlobal
26 |
--------------------------------------------------------------------------------
/PackageSourceMapper/Common/LocalizedResourceManager.cs:
--------------------------------------------------------------------------------
1 | using System.Globalization;
2 | using System.Resources;
3 | using System.Threading;
4 |
5 | namespace NuGet.PackageSourceMapper.Common
6 | {
7 | internal static class LocalizedResourceManager
8 | {
9 | private static readonly ResourceManager _resourceManager = new ResourceManager("NuGet.PackageSourceMapper.Properties.Resources", typeof(LocalizedResourceManager).Assembly);
10 |
11 | public static string GetString(string resourceName)
12 | {
13 | var culture = GetLanguageName();
14 | return _resourceManager.GetString(resourceName + '_' + culture, CultureInfo.InvariantCulture) ??
15 | _resourceManager.GetString(resourceName, CultureInfo.InvariantCulture);
16 | }
17 | public static string GetLanguageName()
18 | {
19 | var culture = Thread.CurrentThread.CurrentUICulture;
20 | while (!culture.IsNeutralCulture)
21 | {
22 | if (culture.Parent == culture)
23 | {
24 | break;
25 | }
26 |
27 | culture = culture.Parent;
28 | }
29 |
30 | return culture.ThreeLetterWindowsLanguageName.ToLowerInvariant();
31 | }
32 | }
33 | }
34 |
--------------------------------------------------------------------------------
/eng/common/templates/jobs/codeql-build.yml:
--------------------------------------------------------------------------------
1 | parameters:
2 | # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md
3 | continueOnError: false
4 | # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
5 | jobs: []
6 | # Optional: if specified, restore and use this version of Guardian instead of the default.
7 | overrideGuardianVersion: ''
8 |
9 | jobs:
10 | - template: /eng/common/templates/jobs/jobs.yml
11 | parameters:
12 | enableMicrobuild: false
13 | enablePublishBuildArtifacts: false
14 | enablePublishTestResults: false
15 | enablePublishBuildAssets: false
16 | enablePublishUsingPipelines: false
17 | enableTelemetry: true
18 |
19 | variables:
20 | - group: Publish-Build-Assets
21 | # The Guardian version specified in 'eng/common/sdl/packages.config'. This value must be kept in
22 | # sync with the packages.config file.
23 | - name: DefaultGuardianVersion
24 | value: 0.109.0
25 | - name: GuardianPackagesConfigFile
26 | value: $(Build.SourcesDirectory)\eng\common\sdl\packages.config
27 | - name: GuardianVersion
28 | value: ${{ coalesce(parameters.overrideGuardianVersion, '$(DefaultGuardianVersion)') }}
29 |
30 | jobs: ${{ parameters.jobs }}
31 |
32 |
--------------------------------------------------------------------------------
/eng/common/templates/steps/retain-build.yml:
--------------------------------------------------------------------------------
1 | parameters:
2 | # Optional azure devops PAT with build execute permissions for the build's organization,
3 | # only needed if the build that should be retained ran on a different organization than
4 | # the pipeline where this template is executing from
5 | Token: ''
6 | # Optional BuildId to retain, defaults to the current running build
7 | BuildId: ''
8 | # Azure devops Organization URI for the build in the https://dev.azure.com/ format.
9 | # Defaults to the organization the current pipeline is running on
10 | AzdoOrgUri: '$(System.CollectionUri)'
11 | # Azure devops project for the build. Defaults to the project the current pipeline is running on
12 | AzdoProject: '$(System.TeamProject)'
13 |
14 | steps:
15 | - task: powershell@2
16 | inputs:
17 | targetType: 'filePath'
18 | filePath: eng/common/retain-build.ps1
19 | pwsh: true
20 | arguments: >
21 | -AzdoOrgUri: ${{parameters.AzdoOrgUri}}
22 | -AzdoProject ${{parameters.AzdoProject}}
23 | -Token ${{coalesce(parameters.Token, '$env:SYSTEM_ACCESSTOKEN') }}
24 | -BuildId ${{coalesce(parameters.BuildId, '$env:BUILD_ID')}}
25 | displayName: Enable permanent build retention
26 | env:
27 | SYSTEM_ACCESSTOKEN: $(System.AccessToken)
28 | BUILD_ID: $(Build.BuildId)
--------------------------------------------------------------------------------
/eng/common/msbuild.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | source="${BASH_SOURCE[0]}"
4 |
5 | # resolve $source until the file is no longer a symlink
6 | while [[ -h "$source" ]]; do
7 | scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
8 | source="$(readlink "$source")"
9 | # if $source was a relative symlink, we need to resolve it relative to the path where the
10 | # symlink file was located
11 | [[ $source != /* ]] && source="$scriptroot/$source"
12 | done
13 | scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
14 |
15 | verbosity='minimal'
16 | warn_as_error=true
17 | node_reuse=true
18 | prepare_machine=false
19 | extra_args=''
20 |
21 | while (($# > 0)); do
22 | lowerI="$(echo $1 | tr "[:upper:]" "[:lower:]")"
23 | case $lowerI in
24 | --verbosity)
25 | verbosity=$2
26 | shift 2
27 | ;;
28 | --warnaserror)
29 | warn_as_error=$2
30 | shift 2
31 | ;;
32 | --nodereuse)
33 | node_reuse=$2
34 | shift 2
35 | ;;
36 | --ci)
37 | ci=true
38 | shift 1
39 | ;;
40 | --preparemachine)
41 | prepare_machine=true
42 | shift 1
43 | ;;
44 | *)
45 | extra_args="$extra_args $1"
46 | shift 1
47 | ;;
48 | esac
49 | done
50 |
51 | . "$scriptroot/tools.sh"
52 |
53 | if [[ "$ci" == true ]]; then
54 | node_reuse=false
55 | fi
56 |
57 | MSBuild $extra_args
58 | ExitWithExitCode 0
59 |
--------------------------------------------------------------------------------
/eng/common/README.md:
--------------------------------------------------------------------------------
1 | # Don't touch this folder
2 |
3 | uuuuuuuuuuuuuuuuuuuu
4 | u" uuuuuuuuuuuuuuuuuu "u
5 | u" u$$$$$$$$$$$$$$$$$$$$u "u
6 | u" u$$$$$$$$$$$$$$$$$$$$$$$$u "u
7 | u" u$$$$$$$$$$$$$$$$$$$$$$$$$$$$u "u
8 | u" u$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$u "u
9 | u" u$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$u "u
10 | $ $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$ $
11 | $ $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$ $
12 | $ $$$" ... "$... ...$" ... "$$$ ... "$$$ $
13 | $ $$$u `"$$$$$$$ $$$ $$$$$ $$ $$$ $$$ $
14 | $ $$$$$$uu "$$$$ $$$ $$$$$ $$ """ u$$$ $
15 | $ $$$""$$$ $$$$ $$$u "$$$" u$$ $$$$$$$$ $
16 | $ $$$$....,$$$$$..$$$$$....,$$$$..$$$$$$$$ $
17 | $ $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$ $
18 | "u "$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$" u"
19 | "u "$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$" u"
20 | "u "$$$$$$$$$$$$$$$$$$$$$$$$$$$$" u"
21 | "u "$$$$$$$$$$$$$$$$$$$$$$$$" u"
22 | "u "$$$$$$$$$$$$$$$$$$$$" u"
23 | "u """""""""""""""""" u"
24 | """"""""""""""""""""
25 |
26 | !!! Changes made in this directory are subject to being overwritten by automation !!!
27 |
28 | The files in this directory are shared by all Arcade repos and managed by automation. If you need to make changes to these files, open an issue or submit a pull request to https://github.com/dotnet/arcade first.
29 |
--------------------------------------------------------------------------------
/eng/common/cross/armel/armel.jessie.patch:
--------------------------------------------------------------------------------
1 | diff -u -r a/usr/include/urcu/uatomic/generic.h b/usr/include/urcu/uatomic/generic.h
2 | --- a/usr/include/urcu/uatomic/generic.h 2014-10-22 15:00:58.000000000 -0700
3 | +++ b/usr/include/urcu/uatomic/generic.h 2020-10-30 21:38:28.550000000 -0700
4 | @@ -69,10 +69,10 @@
5 | #endif
6 | #ifdef UATOMIC_HAS_ATOMIC_SHORT
7 | case 2:
8 | - return __sync_val_compare_and_swap_2(addr, old, _new);
9 | + return __sync_val_compare_and_swap_2((uint16_t*) addr, old, _new);
10 | #endif
11 | case 4:
12 | - return __sync_val_compare_and_swap_4(addr, old, _new);
13 | + return __sync_val_compare_and_swap_4((uint32_t*) addr, old, _new);
14 | #if (CAA_BITS_PER_LONG == 64)
15 | case 8:
16 | return __sync_val_compare_and_swap_8(addr, old, _new);
17 | @@ -109,7 +109,7 @@
18 | return;
19 | #endif
20 | case 4:
21 | - __sync_and_and_fetch_4(addr, val);
22 | + __sync_and_and_fetch_4((uint32_t*) addr, val);
23 | return;
24 | #if (CAA_BITS_PER_LONG == 64)
25 | case 8:
26 | @@ -148,7 +148,7 @@
27 | return;
28 | #endif
29 | case 4:
30 | - __sync_or_and_fetch_4(addr, val);
31 | + __sync_or_and_fetch_4((uint32_t*) addr, val);
32 | return;
33 | #if (CAA_BITS_PER_LONG == 64)
34 | case 8:
35 | @@ -187,7 +187,7 @@
36 | return __sync_add_and_fetch_2(addr, val);
37 | #endif
38 | case 4:
39 | - return __sync_add_and_fetch_4(addr, val);
40 | + return __sync_add_and_fetch_4((uint32_t*) addr, val);
41 | #if (CAA_BITS_PER_LONG == 64)
42 | case 8:
43 | return __sync_add_and_fetch_8(addr, val);
44 |
--------------------------------------------------------------------------------
/eng/common/retain-build.ps1:
--------------------------------------------------------------------------------
1 |
2 | Param(
3 | [Parameter(Mandatory=$true)][int] $buildId,
4 | [Parameter(Mandatory=$true)][string] $azdoOrgUri,
5 | [Parameter(Mandatory=$true)][string] $azdoProject,
6 | [Parameter(Mandatory=$true)][string] $token
7 | )
8 |
9 | $ErrorActionPreference = 'Stop'
10 | Set-StrictMode -Version 2.0
11 |
12 | function Get-AzDOHeaders(
13 | [string] $token)
14 | {
15 | $base64AuthInfo = [Convert]::ToBase64String([Text.Encoding]::ASCII.GetBytes(":${token}"))
16 | $headers = @{"Authorization"="Basic $base64AuthInfo"}
17 | return $headers
18 | }
19 |
20 | function Update-BuildRetention(
21 | [string] $azdoOrgUri,
22 | [string] $azdoProject,
23 | [int] $buildId,
24 | [string] $token)
25 | {
26 | $headers = Get-AzDOHeaders -token $token
27 | $requestBody = "{
28 | `"keepForever`": `"true`"
29 | }"
30 |
31 | $requestUri = "${azdoOrgUri}/${azdoProject}/_apis/build/builds/${buildId}?api-version=6.0"
32 | write-Host "Attempting to retain build using the following URI: ${requestUri} ..."
33 |
34 | try {
35 | Invoke-RestMethod -Uri $requestUri -Method Patch -Body $requestBody -Header $headers -contentType "application/json"
36 | Write-Host "Updated retention settings for build ${buildId}."
37 | }
38 | catch {
39 | Write-Error "Failed to update retention settings for build: $_.Exception.Response.StatusDescription"
40 | exit 1
41 | }
42 | }
43 |
44 | Update-BuildRetention -azdoOrgUri $azdoOrgUri -azdoProject $azdoProject -buildId $buildId -token $token
45 | exit 0
46 |
--------------------------------------------------------------------------------
/.gitattributes:
--------------------------------------------------------------------------------
1 | # Auto detect text files and perform LF normalization
2 | * text=auto
3 |
4 | .gitattributes text
5 | .gitignore text
6 |
7 | project.json text
8 |
9 | *.doc diff=astextplain
10 | *.DOC diff=astextplain
11 | *.docx diff=astextplain
12 | *.DOCX diff=astextplain
13 | *.dot diff=astextplain
14 | *.DOT diff=astextplain
15 | *.pdf diff=astextplain
16 | *.PDF diff=astextplain
17 | *.rtf diff=astextplain
18 | *.RTF diff=astextplain
19 |
20 | *.asm text
21 | *.c text
22 | *.config text
23 | *.cpp text
24 | *.clj text
25 | *.cs text diff=csharp
26 | *.css text
27 | *.cxx text
28 | *.erl text
29 | *.fs text
30 | *.fsx text
31 | *.h text
32 | *.hs text
33 | *.htm text diff=html
34 | *.html text diff=html
35 | *.hxx text
36 | *.java text
37 | *.js text
38 | *.json text
39 | *.less text
40 | *.lisp text
41 | *.lua text
42 | *.m text
43 | *.md text
44 | *.php text
45 | *.ps1 text
46 | *.py text
47 | *.rb text
48 | *.resx text
49 | *.sass text
50 | *.scss text
51 | *.sql text
52 | *.vb text
53 |
54 | # Unix artifacts
55 | *.sh text eol=lf
56 |
57 | # Visual Studio artifacts
58 | *.csproj text eol=crlf
59 | *.dbproj text eol=crlf
60 | *.filters text eol=crlf
61 | *.fsproj text eol=crlf
62 | *.props text eol=crlf
63 | *.sln text eol=crlf
64 | *.targets text eol=crlf
65 | *.vbproj text eol=crlf
66 | *.vcxitems text eol=crlf
67 | *.vcxproj text eol=crlf
68 |
69 | # Denote all files that are truly binary and should not be modified.
70 | *.jpg binary
71 | *.png binary
72 | *.gif binary
73 | *.zip binary
74 |
--------------------------------------------------------------------------------
/eng/common/sdl/run-sdl.ps1:
--------------------------------------------------------------------------------
1 | Param(
2 | [string] $GuardianCliLocation,
3 | [string] $WorkingDirectory,
4 | [string] $GdnFolder,
5 | [string] $UpdateBaseline,
6 | [string] $GuardianLoggerLevel='Standard'
7 | )
8 |
9 | $ErrorActionPreference = 'Stop'
10 | Set-StrictMode -Version 2.0
11 | $disableConfigureToolsetImport = $true
12 | $global:LASTEXITCODE = 0
13 |
14 | try {
15 | # `tools.ps1` checks $ci to perform some actions. Since the SDL
16 | # scripts don't necessarily execute in the same agent that run the
17 | # build.ps1/sh script this variable isn't automatically set.
18 | $ci = $true
19 | . $PSScriptRoot\..\tools.ps1
20 |
21 | # We store config files in the r directory of .gdn
22 | $gdnConfigPath = Join-Path $GdnFolder 'r'
23 | $ValidPath = Test-Path $GuardianCliLocation
24 |
25 | if ($ValidPath -eq $False)
26 | {
27 | Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Invalid Guardian CLI Location."
28 | ExitWithExitCode 1
29 | }
30 |
31 | $gdnConfigFiles = Get-ChildItem $gdnConfigPath -Recurse -Include '*.gdnconfig'
32 | Write-Host "Discovered Guardian config files:"
33 | $gdnConfigFiles | Out-String | Write-Host
34 |
35 | Exec-BlockVerbosely {
36 | & $GuardianCliLocation run `
37 | --working-directory $WorkingDirectory `
38 | --baseline mainbaseline `
39 | --update-baseline $UpdateBaseline `
40 | --logger-level $GuardianLoggerLevel `
41 | --config @gdnConfigFiles
42 | Exit-IfNZEC "Sdl"
43 | }
44 | }
45 | catch {
46 | Write-Host $_.ScriptStackTrace
47 | Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
48 | ExitWithExitCode 1
49 | }
50 |
--------------------------------------------------------------------------------
/eng/common/internal/Tools.csproj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | net472
5 | false
6 | false
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 | https://devdiv.pkgs.visualstudio.com/_packaging/dotnet-core-internal-tooling/nuget/v3/index.json;
18 |
19 |
20 | $(RestoreSources);
21 | https://devdiv.pkgs.visualstudio.com/_packaging/VS/nuget/v3/index.json;
22 |
23 |
24 |
25 |
26 |
27 |
28 |
--------------------------------------------------------------------------------
/eng/common/post-build/add-build-to-channel.ps1:
--------------------------------------------------------------------------------
1 | param(
2 | [Parameter(Mandatory=$true)][int] $BuildId,
3 | [Parameter(Mandatory=$true)][int] $ChannelId,
4 | [Parameter(Mandatory=$true)][string] $MaestroApiAccessToken,
5 | [Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro-prod.westus2.cloudapp.azure.com',
6 | [Parameter(Mandatory=$false)][string] $MaestroApiVersion = '2019-01-16'
7 | )
8 |
9 | try {
10 | . $PSScriptRoot\post-build-utils.ps1
11 |
12 | # Check that the channel we are going to promote the build to exist
13 | $channelInfo = Get-MaestroChannel -ChannelId $ChannelId
14 |
15 | if (!$channelInfo) {
16 | Write-PipelineTelemetryCategory -Category 'PromoteBuild' -Message "Channel with BAR ID $ChannelId was not found in BAR!"
17 | ExitWithExitCode 1
18 | }
19 |
20 | # Get info about which channel(s) the build has already been promoted to
21 | $buildInfo = Get-MaestroBuild -BuildId $BuildId
22 |
23 | if (!$buildInfo) {
24 | Write-PipelineTelemetryError -Category 'PromoteBuild' -Message "Build with BAR ID $BuildId was not found in BAR!"
25 | ExitWithExitCode 1
26 | }
27 |
28 | # Find whether the build is already assigned to the channel or not
29 | if ($buildInfo.channels) {
30 | foreach ($channel in $buildInfo.channels) {
31 | if ($channel.Id -eq $ChannelId) {
32 | Write-Host "The build with BAR ID $BuildId is already on channel $ChannelId!"
33 | ExitWithExitCode 0
34 | }
35 | }
36 | }
37 |
38 | Write-Host "Promoting build '$BuildId' to channel '$ChannelId'."
39 |
40 | Assign-BuildToChannel -BuildId $BuildId -ChannelId $ChannelId
41 |
42 | Write-Host 'done.'
43 | }
44 | catch {
45 | Write-Host $_
46 | Write-PipelineTelemetryError -Category 'PromoteBuild' -Message "There was an error while trying to promote build '$BuildId' to channel '$ChannelId'"
47 | ExitWithExitCode 1
48 | }
49 |
--------------------------------------------------------------------------------
/eng/common/templates/steps/generate-sbom.yml:
--------------------------------------------------------------------------------
1 | # BuildDropPath - The root folder of the drop directory for which the manifest file will be generated.
2 | # PackageName - The name of the package this SBOM represents.
3 | # PackageVersion - The version of the package this SBOM represents.
4 | # ManifestDirPath - The path of the directory where the generated manifest files will be placed
5 |
6 | parameters:
7 | PackageVersion: 7.0.0
8 | BuildDropPath: '$(Build.SourcesDirectory)/artifacts'
9 | PackageName: '.NET'
10 | ManifestDirPath: $(Build.ArtifactStagingDirectory)/sbom
11 | sbomContinueOnError: true
12 |
13 | steps:
14 | - task: PowerShell@2
15 | displayName: Prep for SBOM generation in (Non-linux)
16 | condition: or(eq(variables['Agent.Os'], 'Windows_NT'), eq(variables['Agent.Os'], 'Darwin'))
17 | inputs:
18 | filePath: ./eng/common/generate-sbom-prep.ps1
19 | arguments: ${{parameters.manifestDirPath}}
20 |
21 | # Chmodding is a workaround for https://github.com/dotnet/arcade/issues/8461
22 | - script: |
23 | chmod +x ./eng/common/generate-sbom-prep.sh
24 | ./eng/common/generate-sbom-prep.sh ${{parameters.manifestDirPath}}
25 | displayName: Prep for SBOM generation in (Linux)
26 | condition: eq(variables['Agent.Os'], 'Linux')
27 | continueOnError: ${{ parameters.sbomContinueOnError }}
28 |
29 | - task: AzureArtifacts.manifest-generator-task.manifest-generator-task.ManifestGeneratorTask@0
30 | displayName: 'Generate SBOM manifest'
31 | continueOnError: ${{ parameters.sbomContinueOnError }}
32 | inputs:
33 | PackageName: ${{ parameters.packageName }}
34 | BuildDropPath: ${{ parameters.buildDropPath }}
35 | PackageVersion: ${{ parameters.packageVersion }}
36 | ManifestDirPath: ${{ parameters.manifestDirPath }}
37 |
38 | - task: PublishPipelineArtifact@1
39 | displayName: Publish SBOM manifest
40 | continueOnError: ${{parameters.sbomContinueOnError}}
41 | inputs:
42 | targetPath: '${{parameters.manifestDirPath}}'
43 | artifactName: $(ARTIFACT_NAME)
44 |
45 |
--------------------------------------------------------------------------------
/eng/common/post-build/check-channel-consistency.ps1:
--------------------------------------------------------------------------------
1 | param(
2 | [Parameter(Mandatory=$true)][string] $PromoteToChannels, # List of channels that the build should be promoted to
3 | [Parameter(Mandatory=$true)][array] $AvailableChannelIds # List of channel IDs available in the YAML implementation
4 | )
5 |
6 | try {
7 | . $PSScriptRoot\post-build-utils.ps1
8 |
9 | if ($PromoteToChannels -eq "") {
10 | Write-PipelineTaskError -Type 'warning' -Message "This build won't publish assets as it's not configured to any Maestro channel. If that wasn't intended use Darc to configure a default channel using add-default-channel for this branch or to promote it to a channel using add-build-to-channel. See https://github.com/dotnet/arcade/blob/master/Documentation/Darc.md#assigning-an-individual-build-to-a-channel for more info."
11 | ExitWithExitCode 0
12 | }
13 |
14 | # Check that every channel that Maestro told to promote the build to
15 | # is available in YAML
16 | $PromoteToChannelsIds = $PromoteToChannels -split "\D" | Where-Object { $_ }
17 |
18 | $hasErrors = $false
19 |
20 | foreach ($id in $PromoteToChannelsIds) {
21 | if (($id -ne 0) -and ($id -notin $AvailableChannelIds)) {
22 | Write-PipelineTaskError -Message "Channel $id is not present in the post-build YAML configuration! This is an error scenario. Please contact @dnceng."
23 | $hasErrors = $true
24 | }
25 | }
26 |
27 | # The `Write-PipelineTaskError` doesn't error the script and we might report several errors
28 | # in the previous lines. The check below makes sure that we return an error state from the
29 | # script if we reported any validation error
30 | if ($hasErrors) {
31 | ExitWithExitCode 1
32 | }
33 |
34 | Write-Host 'done.'
35 | }
36 | catch {
37 | Write-Host $_
38 | Write-PipelineTelemetryError -Category 'CheckChannelConsistency' -Message "There was an error while trying to check consistency of Maestro default channels for the build and post-build YAML configuration."
39 | ExitWithExitCode 1
40 | }
41 |
--------------------------------------------------------------------------------
/eng/common/darc-init.ps1:
--------------------------------------------------------------------------------
1 | param (
2 | $darcVersion = $null,
3 | $versionEndpoint = 'https://maestro-prod.westus2.cloudapp.azure.com/api/assets/darc-version?api-version=2019-01-16',
4 | $verbosity = 'minimal',
5 | $toolpath = $null
6 | )
7 |
8 | . $PSScriptRoot\tools.ps1
9 |
10 | function InstallDarcCli ($darcVersion, $toolpath) {
11 | $darcCliPackageName = 'microsoft.dotnet.darc'
12 |
13 | $dotnetRoot = InitializeDotNetCli -install:$true
14 | $dotnet = "$dotnetRoot\dotnet.exe"
15 | $toolList = & "$dotnet" tool list -g
16 |
17 | if ($toolList -like "*$darcCliPackageName*") {
18 | & "$dotnet" tool uninstall $darcCliPackageName -g
19 | }
20 |
21 | # If the user didn't explicitly specify the darc version,
22 | # query the Maestro API for the correct version of darc to install.
23 | if (-not $darcVersion) {
24 | $darcVersion = $(Invoke-WebRequest -Uri $versionEndpoint -UseBasicParsing).Content
25 | }
26 |
27 | $arcadeServicesSource = 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json'
28 |
29 | Write-Host "Installing Darc CLI version $darcVersion..."
30 | Write-Host 'You may need to restart your command window if this is the first dotnet tool you have installed.'
31 | if (-not $toolpath) {
32 | Write-Host "'$dotnet' tool install $darcCliPackageName --version $darcVersion --add-source '$arcadeServicesSource' -v $verbosity -g"
33 | & "$dotnet" tool install $darcCliPackageName --version $darcVersion --add-source "$arcadeServicesSource" -v $verbosity -g
34 | }else {
35 | Write-Host "'$dotnet' tool install $darcCliPackageName --version $darcVersion --add-source '$arcadeServicesSource' -v $verbosity --tool-path '$toolpath'"
36 | & "$dotnet" tool install $darcCliPackageName --version $darcVersion --add-source "$arcadeServicesSource" -v $verbosity --tool-path "$toolpath"
37 | }
38 | }
39 |
40 | try {
41 | InstallDarcCli $darcVersion $toolpath
42 | }
43 | catch {
44 | Write-Host $_.ScriptStackTrace
45 | Write-PipelineTelemetryError -Category 'Darc' -Message $_
46 | ExitWithExitCode 1
47 | }
--------------------------------------------------------------------------------
/eng/common/post-build/publish-using-darc.ps1:
--------------------------------------------------------------------------------
1 | param(
2 | [Parameter(Mandatory=$true)][int] $BuildId,
3 | [Parameter(Mandatory=$true)][int] $PublishingInfraVersion,
4 | [Parameter(Mandatory=$true)][string] $AzdoToken,
5 | [Parameter(Mandatory=$true)][string] $MaestroToken,
6 | [Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro-prod.westus2.cloudapp.azure.com',
7 | [Parameter(Mandatory=$true)][string] $WaitPublishingFinish,
8 | [Parameter(Mandatory=$false)][string] $ArtifactsPublishingAdditionalParameters,
9 | [Parameter(Mandatory=$false)][string] $SymbolPublishingAdditionalParameters
10 | )
11 |
12 | try {
13 | . $PSScriptRoot\post-build-utils.ps1
14 |
15 | $darc = Get-Darc
16 |
17 | $optionalParams = [System.Collections.ArrayList]::new()
18 |
19 | if ("" -ne $ArtifactsPublishingAdditionalParameters) {
20 | $optionalParams.Add("--artifact-publishing-parameters") | Out-Null
21 | $optionalParams.Add($ArtifactsPublishingAdditionalParameters) | Out-Null
22 | }
23 |
24 | if ("" -ne $SymbolPublishingAdditionalParameters) {
25 | $optionalParams.Add("--symbol-publishing-parameters") | Out-Null
26 | $optionalParams.Add($SymbolPublishingAdditionalParameters) | Out-Null
27 | }
28 |
29 | if ("false" -eq $WaitPublishingFinish) {
30 | $optionalParams.Add("--no-wait") | Out-Null
31 | }
32 |
33 | & $darc add-build-to-channel `
34 | --id $buildId `
35 | --publishing-infra-version $PublishingInfraVersion `
36 | --default-channels `
37 | --source-branch main `
38 | --azdev-pat $AzdoToken `
39 | --bar-uri $MaestroApiEndPoint `
40 | --password $MaestroToken `
41 | @optionalParams
42 |
43 | if ($LastExitCode -ne 0) {
44 | Write-Host "Problems using Darc to promote build ${buildId} to default channels. Stopping execution..."
45 | exit 1
46 | }
47 |
48 | Write-Host 'done.'
49 | }
50 | catch {
51 | Write-Host $_
52 | Write-PipelineTelemetryError -Category 'PromoteBuild' -Message "There was an error while trying to publish build '$BuildId' to default channels."
53 | ExitWithExitCode 1
54 | }
55 |
--------------------------------------------------------------------------------
/PackageSourceMapper/Logging/Logger.cs:
--------------------------------------------------------------------------------
1 | using NuGet.Common;
2 | using System;
3 | using System.Threading.Tasks;
4 |
5 | namespace PackageSourceMapper.Logging
6 | {
7 | internal class Logger : ILogger
8 | {
9 | public LogLevel VerbosityLevel { get; set; } = LogLevel.Debug;
10 |
11 | public void Log(LogLevel level, string data)
12 | {
13 | if (DisplayMessage(level))
14 | {
15 | Console.WriteLine(data);
16 | }
17 | }
18 |
19 | ///
20 | /// True if the message meets the verbosity level.
21 | ///
22 | public virtual bool DisplayMessage(LogLevel messageLevel)
23 | {
24 | return (messageLevel >= VerbosityLevel);
25 | }
26 |
27 | public virtual void LogDebug(string data)
28 | {
29 | Log(LogLevel.Debug, data);
30 | }
31 |
32 | public virtual void LogError(string data)
33 | {
34 | Log(LogLevel.Error, data);
35 | }
36 |
37 | public virtual void LogInformation(string data)
38 | {
39 | Log(LogLevel.Information, data);
40 | }
41 |
42 | public virtual void LogInformationSummary(string data)
43 | {
44 | Log(LogLevel.Information, data);
45 | }
46 |
47 | public virtual void LogMinimal(string data)
48 | {
49 | Log(LogLevel.Minimal, data);
50 | }
51 |
52 | public virtual void LogVerbose(string data)
53 | {
54 | Log(LogLevel.Verbose, data);
55 | }
56 |
57 | public virtual void LogWarning(string data)
58 | {
59 | Log(LogLevel.Warning, data);
60 | }
61 |
62 | public Task LogAsync(LogLevel level, string data)
63 | {
64 | throw new NotImplementedException();
65 | }
66 |
67 | public void Log(ILogMessage message)
68 | {
69 | throw new NotImplementedException();
70 | }
71 |
72 | public Task LogAsync(ILogMessage message)
73 | {
74 | throw new NotImplementedException();
75 | }
76 | }
77 | }
78 |
--------------------------------------------------------------------------------
/eng/common/templates/jobs/source-build.yml:
--------------------------------------------------------------------------------
1 | parameters:
2 | # This template adds arcade-powered source-build to CI. A job is created for each platform, as
3 | # well as an optional server job that completes when all platform jobs complete.
4 |
5 | # The name of the "join" job for all source-build platforms. If set to empty string, the job is
6 | # not included. Existing repo pipelines can use this job depend on all source-build jobs
7 | # completing without maintaining a separate list of every single job ID: just depend on this one
8 | # server job. By default, not included. Recommended name if used: 'Source_Build_Complete'.
9 | allCompletedJobId: ''
10 |
11 | # See /eng/common/templates/job/source-build.yml
12 | jobNamePrefix: 'Source_Build'
13 |
14 | # This is the default platform provided by Arcade, intended for use by a managed-only repo.
15 | defaultManagedPlatform:
16 | name: 'Managed'
17 | container: 'mcr.microsoft.com/dotnet-buildtools/prereqs:centos-7-3e800f1-20190501005343'
18 |
19 | # Defines the platforms on which to run build jobs. One job is created for each platform, and the
20 | # object in this array is sent to the job template as 'platform'. If no platforms are specified,
21 | # one job runs on 'defaultManagedPlatform'.
22 | platforms: []
23 |
24 | jobs:
25 |
26 | - ${{ if ne(parameters.allCompletedJobId, '') }}:
27 | - job: ${{ parameters.allCompletedJobId }}
28 | displayName: Source-Build Complete
29 | pool: server
30 | dependsOn:
31 | - ${{ each platform in parameters.platforms }}:
32 | - ${{ parameters.jobNamePrefix }}_${{ platform.name }}
33 | - ${{ if eq(length(parameters.platforms), 0) }}:
34 | - ${{ parameters.jobNamePrefix }}_${{ parameters.defaultManagedPlatform.name }}
35 |
36 | - ${{ each platform in parameters.platforms }}:
37 | - template: /eng/common/templates/job/source-build.yml
38 | parameters:
39 | jobNamePrefix: ${{ parameters.jobNamePrefix }}
40 | platform: ${{ platform }}
41 |
42 | - ${{ if eq(length(parameters.platforms), 0) }}:
43 | - template: /eng/common/templates/job/source-build.yml
44 | parameters:
45 | jobNamePrefix: ${{ parameters.jobNamePrefix }}
46 | platform: ${{ parameters.defaultManagedPlatform }}
47 |
--------------------------------------------------------------------------------
/eng/common/templates/steps/execute-codeql.yml:
--------------------------------------------------------------------------------
1 | parameters:
2 | # Language that should be analyzed. Defaults to csharp
3 | language: csharp
4 | # Build Commands
5 | buildCommands: ''
6 | overrideParameters: '' # Optional: to override values for parameters.
7 | additionalParameters: '' # Optional: parameters that need user specific values eg: '-SourceToolsList @("abc","def") -ArtifactToolsList @("ghi","jkl")'
8 | # Optional: if specified, restore and use this version of Guardian instead of the default.
9 | overrideGuardianVersion: ''
10 | # Optional: if true, publish the '.gdn' folder as a pipeline artifact. This can help with in-depth
11 | # diagnosis of problems with specific tool configurations.
12 | publishGuardianDirectoryToPipeline: false
13 | # The script to run to execute all SDL tools. Use this if you want to use a script to define SDL
14 | # parameters rather than relying on YAML. It may be better to use a local script, because you can
15 | # reproduce results locally without piecing together a command based on the YAML.
16 | executeAllSdlToolsScript: 'eng/common/sdl/execute-all-sdl-tools.ps1'
17 | # There is some sort of bug (has been reported) in Azure DevOps where if this parameter is named
18 | # 'continueOnError', the parameter value is not correctly picked up.
19 | # This can also be remedied by the caller (post-build.yml) if it does not use a nested parameter
20 | # optional: determines whether to continue the build if the step errors;
21 | sdlContinueOnError: false
22 |
23 | steps:
24 | - template: /eng/common/templates/steps/execute-sdl.yml
25 | parameters:
26 | overrideGuardianVersion: ${{ parameters.overrideGuardianVersion }}
27 | executeAllSdlToolsScript: ${{ parameters.executeAllSdlToolsScript }}
28 | overrideParameters: ${{ parameters.overrideParameters }}
29 | additionalParameters: '${{ parameters.additionalParameters }}
30 | -CodeQLAdditionalRunConfigParams @("BuildCommands < ${{ parameters.buildCommands }}", "Language < ${{ parameters.language }}")'
31 | publishGuardianDirectoryToPipeline: ${{ parameters.publishGuardianDirectoryToPipeline }}
32 | sdlContinueOnError: ${{ parameters.sdlContinueOnError }}
--------------------------------------------------------------------------------
/eng/common/dotnet-install.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | source="${BASH_SOURCE[0]}"
4 | # resolve $source until the file is no longer a symlink
5 | while [[ -h "$source" ]]; do
6 | scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
7 | source="$(readlink "$source")"
8 | # if $source was a relative symlink, we need to resolve it relative to the path where the
9 | # symlink file was located
10 | [[ $source != /* ]] && source="$scriptroot/$source"
11 | done
12 | scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
13 |
14 | . "$scriptroot/tools.sh"
15 |
16 | version='Latest'
17 | architecture=''
18 | runtime='dotnet'
19 | runtimeSourceFeed=''
20 | runtimeSourceFeedKey=''
21 | while [[ $# > 0 ]]; do
22 | opt="$(echo "$1" | tr "[:upper:]" "[:lower:]")"
23 | case "$opt" in
24 | -version|-v)
25 | shift
26 | version="$1"
27 | ;;
28 | -architecture|-a)
29 | shift
30 | architecture="$1"
31 | ;;
32 | -runtime|-r)
33 | shift
34 | runtime="$1"
35 | ;;
36 | -runtimesourcefeed)
37 | shift
38 | runtimeSourceFeed="$1"
39 | ;;
40 | -runtimesourcefeedkey)
41 | shift
42 | runtimeSourceFeedKey="$1"
43 | ;;
44 | *)
45 | Write-PipelineTelemetryError -Category 'Build' -Message "Invalid argument: $1"
46 | exit 1
47 | ;;
48 | esac
49 | shift
50 | done
51 |
52 | # Use uname to determine what the CPU is, see https://en.wikipedia.org/wiki/Uname#Examples
53 | cpuname=$(uname -m)
54 | case $cpuname in
55 | aarch64)
56 | buildarch=arm64
57 | ;;
58 | loongarch64)
59 | buildarch=loongarch64
60 | ;;
61 | amd64|x86_64)
62 | buildarch=x64
63 | ;;
64 | armv*l)
65 | buildarch=arm
66 | ;;
67 | i686)
68 | buildarch=x86
69 | ;;
70 | *)
71 | echo "Unknown CPU $cpuname detected, treating it as x64"
72 | buildarch=x64
73 | ;;
74 | esac
75 |
76 | dotnetRoot="${repo_root}.dotnet"
77 | if [[ $architecture != "" ]] && [[ $architecture != $buildarch ]]; then
78 | dotnetRoot="$dotnetRoot/$architecture"
79 | fi
80 |
81 | InstallDotNet $dotnetRoot $version "$architecture" $runtime true $runtimeSourceFeed $runtimeSourceFeedKey || {
82 | local exit_code=$?
83 | Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "dotnet-install.sh failed (exit code '$exit_code')." >&2
84 | ExitWithExitCode $exit_code
85 | }
86 |
87 | ExitWithExitCode 0
88 |
--------------------------------------------------------------------------------
/PackageSourceMapper/NuGet.PackageSourceMapper.csproj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | Exe
5 | net6.0
6 | 9.0
7 |
8 |
9 |
10 | true
11 | packagesourcemapper
12 | Package source mapping onboarding tool
13 | Package Source Mapping;Audit;NuGet Config
14 | Microsoft
15 | ./nupkg
16 | Readme.md
17 | © Microsoft Corporation. All rights reserved.
18 | https://github.com/NuGet/PackageSourceMapper
19 | MIT
20 | images/icon.png
21 | True
22 | true
23 | true
24 | true
25 | NuGet.PackageSourceMapper
26 |
27 |
28 |
29 |
30 | True
31 | \
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 | true
43 |
44 |
45 |
46 |
47 | True
48 | True
49 | Resources.resx
50 |
51 |
52 |
53 |
54 |
55 | ResXFileCodeGenerator
56 | Resources.Designer.cs
57 | Never
58 |
59 |
60 |
61 |
--------------------------------------------------------------------------------
/eng/common/sdl/extract-artifact-archives.ps1:
--------------------------------------------------------------------------------
1 | # This script looks for each archive file in a directory and extracts it into the target directory.
2 | # For example, the file "$InputPath/bin.tar.gz" extracts to "$ExtractPath/bin.tar.gz.extracted/**".
3 | # Uses the "tar" utility added to Windows 10 / Windows 2019 that supports tar.gz and zip.
4 | param(
5 | # Full path to directory where archives are stored.
6 | [Parameter(Mandatory=$true)][string] $InputPath,
7 | # Full path to directory to extract archives into. May be the same as $InputPath.
8 | [Parameter(Mandatory=$true)][string] $ExtractPath
9 | )
10 |
11 | $ErrorActionPreference = 'Stop'
12 | Set-StrictMode -Version 2.0
13 |
14 | $disableConfigureToolsetImport = $true
15 |
16 | try {
17 | # `tools.ps1` checks $ci to perform some actions. Since the SDL
18 | # scripts don't necessarily execute in the same agent that run the
19 | # build.ps1/sh script this variable isn't automatically set.
20 | $ci = $true
21 | . $PSScriptRoot\..\tools.ps1
22 |
23 | Measure-Command {
24 | $jobs = @()
25 |
26 | # Find archive files for non-Windows and Windows builds.
27 | $archiveFiles = @(
28 | Get-ChildItem (Join-Path $InputPath "*.tar.gz")
29 | Get-ChildItem (Join-Path $InputPath "*.zip")
30 | )
31 |
32 | foreach ($targzFile in $archiveFiles) {
33 | $jobs += Start-Job -ScriptBlock {
34 | $file = $using:targzFile
35 | $fileName = [System.IO.Path]::GetFileName($file)
36 | $extractDir = Join-Path $using:ExtractPath "$fileName.extracted"
37 |
38 | New-Item $extractDir -ItemType Directory -Force | Out-Null
39 |
40 | Write-Host "Extracting '$file' to '$extractDir'..."
41 |
42 | # Pipe errors to stdout to prevent PowerShell detecting them and quitting the job early.
43 | # This type of quit skips the catch, so we wouldn't be able to tell which file triggered the
44 | # error. Save output so it can be stored in the exception string along with context.
45 | $output = tar -xf $file -C $extractDir 2>&1
46 | # Handle NZEC manually rather than using Exit-IfNZEC: we are in a background job, so we
47 | # don't have access to the outer scope.
48 | if ($LASTEXITCODE -ne 0) {
49 | throw "Error extracting '$file': non-zero exit code ($LASTEXITCODE). Output: '$output'"
50 | }
51 |
52 | Write-Host "Extracted to $extractDir"
53 | }
54 | }
55 |
56 | Receive-Job $jobs -Wait
57 | }
58 | }
59 | catch {
60 | Write-Host $_
61 | Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
62 | ExitWithExitCode 1
63 | }
64 |
--------------------------------------------------------------------------------
/eng/common/post-build/trigger-subscriptions.ps1:
--------------------------------------------------------------------------------
1 | param(
2 | [Parameter(Mandatory=$true)][string] $SourceRepo,
3 | [Parameter(Mandatory=$true)][int] $ChannelId,
4 | [Parameter(Mandatory=$true)][string] $MaestroApiAccessToken,
5 | [Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro-prod.westus2.cloudapp.azure.com',
6 | [Parameter(Mandatory=$false)][string] $MaestroApiVersion = '2019-01-16'
7 | )
8 |
9 | try {
10 | . $PSScriptRoot\post-build-utils.ps1
11 |
12 | # Get all the $SourceRepo subscriptions
13 | $normalizedSourceRepo = $SourceRepo.Replace('dnceng@', '')
14 | $subscriptions = Get-MaestroSubscriptions -SourceRepository $normalizedSourceRepo -ChannelId $ChannelId
15 |
16 | if (!$subscriptions) {
17 | Write-PipelineTelemetryError -Category 'TriggerSubscriptions' -Message "No subscriptions found for source repo '$normalizedSourceRepo' in channel '$ChannelId'"
18 | ExitWithExitCode 0
19 | }
20 |
21 | $subscriptionsToTrigger = New-Object System.Collections.Generic.List[string]
22 | $failedTriggeredSubscription = $false
23 |
24 | # Get all enabled subscriptions that need dependency flow on 'everyBuild'
25 | foreach ($subscription in $subscriptions) {
26 | if ($subscription.enabled -and $subscription.policy.updateFrequency -like 'everyBuild' -and $subscription.channel.id -eq $ChannelId) {
27 | Write-Host "Should trigger this subscription: ${$subscription.id}"
28 | [void]$subscriptionsToTrigger.Add($subscription.id)
29 | }
30 | }
31 |
32 | foreach ($subscriptionToTrigger in $subscriptionsToTrigger) {
33 | try {
34 | Write-Host "Triggering subscription '$subscriptionToTrigger'."
35 |
36 | Trigger-Subscription -SubscriptionId $subscriptionToTrigger
37 |
38 | Write-Host 'done.'
39 | }
40 | catch
41 | {
42 | Write-Host "There was an error while triggering subscription '$subscriptionToTrigger'"
43 | Write-Host $_
44 | Write-Host $_.ScriptStackTrace
45 | $failedTriggeredSubscription = $true
46 | }
47 | }
48 |
49 | if ($subscriptionsToTrigger.Count -eq 0) {
50 | Write-Host "No subscription matched source repo '$normalizedSourceRepo' and channel ID '$ChannelId'."
51 | }
52 | elseif ($failedTriggeredSubscription) {
53 | Write-PipelineTelemetryError -Category 'TriggerSubscriptions' -Message 'At least one subscription failed to be triggered...'
54 | ExitWithExitCode 1
55 | }
56 | else {
57 | Write-Host 'All subscriptions were triggered successfully!'
58 | }
59 | }
60 | catch {
61 | Write-Host $_.ScriptStackTrace
62 | Write-PipelineTelemetryError -Category 'TriggerSubscriptions' -Message $_
63 | ExitWithExitCode 1
64 | }
65 |
--------------------------------------------------------------------------------
/eng/common/sdl/init-sdl.ps1:
--------------------------------------------------------------------------------
1 | Param(
2 | [string] $GuardianCliLocation,
3 | [string] $Repository,
4 | [string] $BranchName='master',
5 | [string] $WorkingDirectory,
6 | [string] $AzureDevOpsAccessToken,
7 | [string] $GuardianLoggerLevel='Standard'
8 | )
9 |
10 | $ErrorActionPreference = 'Stop'
11 | Set-StrictMode -Version 2.0
12 | $disableConfigureToolsetImport = $true
13 | $global:LASTEXITCODE = 0
14 |
15 | # `tools.ps1` checks $ci to perform some actions. Since the SDL
16 | # scripts don't necessarily execute in the same agent that run the
17 | # build.ps1/sh script this variable isn't automatically set.
18 | $ci = $true
19 | . $PSScriptRoot\..\tools.ps1
20 |
21 | # Don't display the console progress UI - it's a huge perf hit
22 | $ProgressPreference = 'SilentlyContinue'
23 |
24 | # Construct basic auth from AzDO access token; construct URI to the repository's gdn folder stored in that repository; construct location of zip file
25 | $encodedPat = [Convert]::ToBase64String([System.Text.Encoding]::ASCII.GetBytes(":$AzureDevOpsAccessToken"))
26 | $escapedRepository = [Uri]::EscapeDataString("/$Repository/$BranchName/.gdn")
27 | $uri = "https://dev.azure.com/dnceng/internal/_apis/git/repositories/sdl-tool-cfg/Items?path=$escapedRepository&versionDescriptor[versionOptions]=0&`$format=zip&api-version=5.0"
28 | $zipFile = "$WorkingDirectory/gdn.zip"
29 |
30 | Add-Type -AssemblyName System.IO.Compression.FileSystem
31 | $gdnFolder = (Join-Path $WorkingDirectory '.gdn')
32 |
33 | try {
34 | # if the folder does not exist, we'll do a guardian init and push it to the remote repository
35 | Write-Host 'Initializing Guardian...'
36 | Write-Host "$GuardianCliLocation init --working-directory $WorkingDirectory --logger-level $GuardianLoggerLevel"
37 | & $GuardianCliLocation init --working-directory $WorkingDirectory --logger-level $GuardianLoggerLevel
38 | if ($LASTEXITCODE -ne 0) {
39 | Write-PipelineTelemetryError -Force -Category 'Build' -Message "Guardian init failed with exit code $LASTEXITCODE."
40 | ExitWithExitCode $LASTEXITCODE
41 | }
42 | # We create the mainbaseline so it can be edited later
43 | Write-Host "$GuardianCliLocation baseline --working-directory $WorkingDirectory --name mainbaseline"
44 | & $GuardianCliLocation baseline --working-directory $WorkingDirectory --name mainbaseline
45 | if ($LASTEXITCODE -ne 0) {
46 | Write-PipelineTelemetryError -Force -Category 'Build' -Message "Guardian baseline failed with exit code $LASTEXITCODE."
47 | ExitWithExitCode $LASTEXITCODE
48 | }
49 | ExitWithExitCode 0
50 | }
51 | catch {
52 | Write-Host $_.ScriptStackTrace
53 | Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
54 | ExitWithExitCode 1
55 | }
56 |
--------------------------------------------------------------------------------
/eng/common/darc-init.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | source="${BASH_SOURCE[0]}"
4 | darcVersion=''
5 | versionEndpoint='https://maestro-prod.westus2.cloudapp.azure.com/api/assets/darc-version?api-version=2019-01-16'
6 | verbosity='minimal'
7 |
8 | while [[ $# > 0 ]]; do
9 | opt="$(echo "$1" | tr "[:upper:]" "[:lower:]")"
10 | case "$opt" in
11 | --darcversion)
12 | darcVersion=$2
13 | shift
14 | ;;
15 | --versionendpoint)
16 | versionEndpoint=$2
17 | shift
18 | ;;
19 | --verbosity)
20 | verbosity=$2
21 | shift
22 | ;;
23 | --toolpath)
24 | toolpath=$2
25 | shift
26 | ;;
27 | *)
28 | echo "Invalid argument: $1"
29 | usage
30 | exit 1
31 | ;;
32 | esac
33 |
34 | shift
35 | done
36 |
37 | # resolve $source until the file is no longer a symlink
38 | while [[ -h "$source" ]]; do
39 | scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
40 | source="$(readlink "$source")"
41 | # if $source was a relative symlink, we need to resolve it relative to the path where the
42 | # symlink file was located
43 | [[ $source != /* ]] && source="$scriptroot/$source"
44 | done
45 | scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
46 |
47 | . "$scriptroot/tools.sh"
48 |
49 | if [ -z "$darcVersion" ]; then
50 | darcVersion=$(curl -X GET "$versionEndpoint" -H "accept: text/plain")
51 | fi
52 |
53 | function InstallDarcCli {
54 | local darc_cli_package_name="microsoft.dotnet.darc"
55 |
56 | InitializeDotNetCli true
57 | local dotnet_root=$_InitializeDotNetCli
58 |
59 | if [ -z "$toolpath" ]; then
60 | local tool_list=$($dotnet_root/dotnet tool list -g)
61 | if [[ $tool_list = *$darc_cli_package_name* ]]; then
62 | echo $($dotnet_root/dotnet tool uninstall $darc_cli_package_name -g)
63 | fi
64 | else
65 | local tool_list=$($dotnet_root/dotnet tool list --tool-path "$toolpath")
66 | if [[ $tool_list = *$darc_cli_package_name* ]]; then
67 | echo $($dotnet_root/dotnet tool uninstall $darc_cli_package_name --tool-path "$toolpath")
68 | fi
69 | fi
70 |
71 | local arcadeServicesSource="https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json"
72 |
73 | echo "Installing Darc CLI version $darcVersion..."
74 | echo "You may need to restart your command shell if this is the first dotnet tool you have installed."
75 | if [ -z "$toolpath" ]; then
76 | echo $($dotnet_root/dotnet tool install $darc_cli_package_name --version $darcVersion --add-source "$arcadeServicesSource" -v $verbosity -g)
77 | else
78 | echo $($dotnet_root/dotnet tool install $darc_cli_package_name --version $darcVersion --add-source "$arcadeServicesSource" -v $verbosity --tool-path "$toolpath")
79 | fi
80 | }
81 |
82 | InstallDarcCli
83 |
--------------------------------------------------------------------------------
/eng/common/sdl/extract-artifact-packages.ps1:
--------------------------------------------------------------------------------
1 | param(
2 | [Parameter(Mandatory=$true)][string] $InputPath, # Full path to directory where artifact packages are stored
3 | [Parameter(Mandatory=$true)][string] $ExtractPath # Full path to directory where the packages will be extracted
4 | )
5 |
6 | $ErrorActionPreference = 'Stop'
7 | Set-StrictMode -Version 2.0
8 |
9 | $disableConfigureToolsetImport = $true
10 |
11 | function ExtractArtifacts {
12 | if (!(Test-Path $InputPath)) {
13 | Write-Host "Input Path does not exist: $InputPath"
14 | ExitWithExitCode 0
15 | }
16 | $Jobs = @()
17 | Get-ChildItem "$InputPath\*.nupkg" |
18 | ForEach-Object {
19 | $Jobs += Start-Job -ScriptBlock $ExtractPackage -ArgumentList $_.FullName
20 | }
21 |
22 | foreach ($Job in $Jobs) {
23 | Wait-Job -Id $Job.Id | Receive-Job
24 | }
25 | }
26 |
27 | try {
28 | # `tools.ps1` checks $ci to perform some actions. Since the SDL
29 | # scripts don't necessarily execute in the same agent that run the
30 | # build.ps1/sh script this variable isn't automatically set.
31 | $ci = $true
32 | . $PSScriptRoot\..\tools.ps1
33 |
34 | $ExtractPackage = {
35 | param(
36 | [string] $PackagePath # Full path to a NuGet package
37 | )
38 |
39 | if (!(Test-Path $PackagePath)) {
40 | Write-PipelineTelemetryError -Category 'Build' -Message "Input file does not exist: $PackagePath"
41 | ExitWithExitCode 1
42 | }
43 |
44 | $RelevantExtensions = @('.dll', '.exe', '.pdb')
45 | Write-Host -NoNewLine 'Extracting ' ([System.IO.Path]::GetFileName($PackagePath)) '...'
46 |
47 | $PackageId = [System.IO.Path]::GetFileNameWithoutExtension($PackagePath)
48 | $ExtractPath = Join-Path -Path $using:ExtractPath -ChildPath $PackageId
49 |
50 | Add-Type -AssemblyName System.IO.Compression.FileSystem
51 |
52 | [System.IO.Directory]::CreateDirectory($ExtractPath);
53 |
54 | try {
55 | $zip = [System.IO.Compression.ZipFile]::OpenRead($PackagePath)
56 |
57 | $zip.Entries |
58 | Where-Object {$RelevantExtensions -contains [System.IO.Path]::GetExtension($_.Name)} |
59 | ForEach-Object {
60 | $TargetFile = Join-Path -Path $ExtractPath -ChildPath $_.Name
61 |
62 | [System.IO.Compression.ZipFileExtensions]::ExtractToFile($_, $TargetFile, $true)
63 | }
64 | }
65 | catch {
66 | Write-Host $_
67 | Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
68 | ExitWithExitCode 1
69 | }
70 | finally {
71 | $zip.Dispose()
72 | }
73 | }
74 | Measure-Command { ExtractArtifacts }
75 | }
76 | catch {
77 | Write-Host $_
78 | Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
79 | ExitWithExitCode 1
80 | }
81 |
--------------------------------------------------------------------------------
/eng/common/templates/job/source-build.yml:
--------------------------------------------------------------------------------
1 | parameters:
2 | # This template adds arcade-powered source-build to CI. The template produces a server job with a
3 | # default ID 'Source_Build_Complete' to put in a dependency list if necessary.
4 |
5 | # Specifies the prefix for source-build jobs added to pipeline. Use this if disambiguation needed.
6 | jobNamePrefix: 'Source_Build'
7 |
8 | # Defines the platform on which to run the job. By default, a linux-x64 machine, suitable for
9 | # managed-only repositories. This is an object with these properties:
10 | #
11 | # name: ''
12 | # The name of the job. This is included in the job ID.
13 | # targetRID: ''
14 | # The name of the target RID to use, instead of the one auto-detected by Arcade.
15 | # nonPortable: false
16 | # Enables non-portable mode. This means a more specific RID (e.g. fedora.32-x64 rather than
17 | # linux-x64), and compiling against distro-provided packages rather than portable ones.
18 | # skipPublishValidation: false
19 | # Disables publishing validation. By default, a check is performed to ensure no packages are
20 | # published by source-build.
21 | # container: ''
22 | # A container to use. Runs in docker.
23 | # pool: {}
24 | # A pool to use. Runs directly on an agent.
25 | # buildScript: ''
26 | # Specifies the build script to invoke to perform the build in the repo. The default
27 | # './build.sh' should work for typical Arcade repositories, but this is customizable for
28 | # difficult situations.
29 | # jobProperties: {}
30 | # A list of job properties to inject at the top level, for potential extensibility beyond
31 | # container and pool.
32 | platform: {}
33 |
34 | jobs:
35 | - job: ${{ parameters.jobNamePrefix }}_${{ parameters.platform.name }}
36 | displayName: Source-Build (${{ parameters.platform.name }})
37 |
38 | ${{ each property in parameters.platform.jobProperties }}:
39 | ${{ property.key }}: ${{ property.value }}
40 |
41 | ${{ if ne(parameters.platform.container, '') }}:
42 | container: ${{ parameters.platform.container }}
43 |
44 | ${{ if eq(parameters.platform.pool, '') }}:
45 | # The default VM host AzDO pool. This should be capable of running Docker containers: almost all
46 | # source-build builds run in Docker, including the default managed platform.
47 | pool:
48 | ${{ if eq(variables['System.TeamProject'], 'public') }}:
49 | name: NetCore1ESPool-Public
50 | demands: ImageOverride -equals Build.Ubuntu.1804.Amd64.Open
51 | ${{ if eq(variables['System.TeamProject'], 'internal') }}:
52 | name: NetCore1ESPool-Internal
53 | demands: ImageOverride -equals Build.Ubuntu.1804.Amd64
54 | ${{ if ne(parameters.platform.pool, '') }}:
55 | pool: ${{ parameters.platform.pool }}
56 |
57 | workspace:
58 | clean: all
59 |
60 | steps:
61 | - template: /eng/common/templates/steps/source-build.yml
62 | parameters:
63 | platform: ${{ parameters.platform }}
64 |
--------------------------------------------------------------------------------
/eng/common/templates/steps/execute-sdl.yml:
--------------------------------------------------------------------------------
1 | parameters:
2 | overrideGuardianVersion: ''
3 | executeAllSdlToolsScript: ''
4 | overrideParameters: ''
5 | additionalParameters: ''
6 | publishGuardianDirectoryToPipeline: false
7 | sdlContinueOnError: false
8 | condition: ''
9 |
10 | steps:
11 | - ${{ if ne(parameters.overrideGuardianVersion, '') }}:
12 | - powershell: |
13 | $content = Get-Content $(GuardianPackagesConfigFile)
14 |
15 | Write-Host "packages.config content was:`n$content"
16 |
17 | $content = $content.Replace('$(DefaultGuardianVersion)', '$(GuardianVersion)')
18 | $content | Set-Content $(GuardianPackagesConfigFile)
19 |
20 | Write-Host "packages.config content updated to:`n$content"
21 | displayName: Use overridden Guardian version ${{ parameters.overrideGuardianVersion }}
22 |
23 | - task: NuGetToolInstaller@1
24 | displayName: 'Install NuGet.exe'
25 |
26 | - task: NuGetCommand@2
27 | displayName: 'Install Guardian'
28 | inputs:
29 | restoreSolution: $(Build.SourcesDirectory)\eng\common\sdl\packages.config
30 | feedsToUse: config
31 | nugetConfigPath: $(Build.SourcesDirectory)\eng\common\sdl\NuGet.config
32 | externalFeedCredentials: GuardianConnect
33 | restoreDirectory: $(Build.SourcesDirectory)\.packages
34 |
35 | - ${{ if ne(parameters.overrideParameters, '') }}:
36 | - powershell: ${{ parameters.executeAllSdlToolsScript }} ${{ parameters.overrideParameters }}
37 | displayName: Execute SDL
38 | continueOnError: ${{ parameters.sdlContinueOnError }}
39 | condition: ${{ parameters.condition }}
40 |
41 | - ${{ if eq(parameters.overrideParameters, '') }}:
42 | - powershell: ${{ parameters.executeAllSdlToolsScript }}
43 | -GuardianPackageName Microsoft.Guardian.Cli.$(GuardianVersion)
44 | -NugetPackageDirectory $(Build.SourcesDirectory)\.packages
45 | -AzureDevOpsAccessToken $(dn-bot-dotnet-build-rw-code-rw)
46 | ${{ parameters.additionalParameters }}
47 | displayName: Execute SDL
48 | continueOnError: ${{ parameters.sdlContinueOnError }}
49 | condition: ${{ parameters.condition }}
50 |
51 | - ${{ if ne(parameters.publishGuardianDirectoryToPipeline, 'false') }}:
52 | # We want to publish the Guardian results and configuration for easy diagnosis. However, the
53 | # '.gdn' dir is a mix of configuration, results, extracted dependencies, and Guardian default
54 | # tooling files. Some of these files are large and aren't useful during an investigation, so
55 | # exclude them by simply deleting them before publishing. (As of writing, there is no documented
56 | # way to selectively exclude a dir from the pipeline artifact publish task.)
57 | - task: DeleteFiles@1
58 | displayName: Delete Guardian dependencies to avoid uploading
59 | inputs:
60 | SourceFolder: $(Agent.BuildDirectory)/.gdn
61 | Contents: |
62 | c
63 | i
64 | condition: succeededOrFailed()
65 | - publish: $(Agent.BuildDirectory)/.gdn
66 | artifact: GuardianConfiguration
67 | displayName: Publish GuardianConfiguration
68 | condition: succeededOrFailed()
--------------------------------------------------------------------------------
/eng/common/templates/post-build/setup-maestro-vars.yml:
--------------------------------------------------------------------------------
1 | parameters:
2 | BARBuildId: ''
3 | PromoteToChannelIds: ''
4 |
5 | steps:
6 | - ${{ if eq(coalesce(parameters.PromoteToChannelIds, 0), 0) }}:
7 | - task: DownloadBuildArtifacts@0
8 | displayName: Download Release Configs
9 | inputs:
10 | buildType: current
11 | artifactName: ReleaseConfigs
12 | checkDownloadedFiles: true
13 |
14 | - task: PowerShell@2
15 | name: setReleaseVars
16 | displayName: Set Release Configs Vars
17 | inputs:
18 | targetType: inline
19 | pwsh: true
20 | script: |
21 | try {
22 | if (!$Env:PromoteToMaestroChannels -or $Env:PromoteToMaestroChannels.Trim() -eq '') {
23 | $Content = Get-Content $(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt
24 |
25 | $BarId = $Content | Select -Index 0
26 | $Channels = $Content | Select -Index 1
27 | $IsStableBuild = $Content | Select -Index 2
28 |
29 | $AzureDevOpsProject = $Env:System_TeamProject
30 | $AzureDevOpsBuildDefinitionId = $Env:System_DefinitionId
31 | $AzureDevOpsBuildId = $Env:Build_BuildId
32 | }
33 | else {
34 | $buildApiEndpoint = "${Env:MaestroApiEndPoint}/api/builds/${Env:BARBuildId}?api-version=${Env:MaestroApiVersion}"
35 |
36 | $apiHeaders = New-Object 'System.Collections.Generic.Dictionary[[String],[String]]'
37 | $apiHeaders.Add('Accept', 'application/json')
38 | $apiHeaders.Add('Authorization',"Bearer ${Env:MAESTRO_API_TOKEN}")
39 |
40 | $buildInfo = try { Invoke-WebRequest -Method Get -Uri $buildApiEndpoint -Headers $apiHeaders | ConvertFrom-Json } catch { Write-Host "Error: $_" }
41 |
42 | $BarId = $Env:BARBuildId
43 | $Channels = $Env:PromoteToMaestroChannels -split ","
44 | $Channels = $Channels -join "]["
45 | $Channels = "[$Channels]"
46 |
47 | $IsStableBuild = $buildInfo.stable
48 | $AzureDevOpsProject = $buildInfo.azureDevOpsProject
49 | $AzureDevOpsBuildDefinitionId = $buildInfo.azureDevOpsBuildDefinitionId
50 | $AzureDevOpsBuildId = $buildInfo.azureDevOpsBuildId
51 | }
52 |
53 | Write-Host "##vso[task.setvariable variable=BARBuildId]$BarId"
54 | Write-Host "##vso[task.setvariable variable=TargetChannels]$Channels"
55 | Write-Host "##vso[task.setvariable variable=IsStableBuild]$IsStableBuild"
56 |
57 | Write-Host "##vso[task.setvariable variable=AzDOProjectName]$AzureDevOpsProject"
58 | Write-Host "##vso[task.setvariable variable=AzDOPipelineId]$AzureDevOpsBuildDefinitionId"
59 | Write-Host "##vso[task.setvariable variable=AzDOBuildId]$AzureDevOpsBuildId"
60 | }
61 | catch {
62 | Write-Host $_
63 | Write-Host $_.Exception
64 | Write-Host $_.ScriptStackTrace
65 | exit 1
66 | }
67 | env:
68 | MAESTRO_API_TOKEN: $(MaestroApiAccessToken)
69 | BARBuildId: ${{ parameters.BARBuildId }}
70 | PromoteToMaestroChannels: ${{ parameters.PromoteToChannelIds }}
71 |
--------------------------------------------------------------------------------
/eng/common/templates/job/source-index-stage1.yml:
--------------------------------------------------------------------------------
1 | parameters:
2 | runAsPublic: false
3 | sourceIndexPackageVersion: 1.0.1-20210614.1
4 | sourceIndexPackageSource: https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json
5 | sourceIndexBuildCommand: powershell -NoLogo -NoProfile -ExecutionPolicy Bypass -Command "eng/common/build.ps1 -restore -build -binarylog -ci"
6 | preSteps: []
7 | binlogPath: artifacts/log/Debug/Build.binlog
8 | condition: ''
9 | dependsOn: ''
10 |
11 | jobs:
12 | - job: SourceIndexStage1
13 | dependsOn: ${{ parameters.dependsOn }}
14 | condition: ${{ parameters.condition }}
15 | variables:
16 | - name: SourceIndexPackageVersion
17 | value: ${{ parameters.sourceIndexPackageVersion }}
18 | - name: SourceIndexPackageSource
19 | value: ${{ parameters.sourceIndexPackageSource }}
20 | - name: BinlogPath
21 | value: ${{ parameters.binlogPath }}
22 | - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
23 | - group: source-dot-net stage1 variables
24 |
25 | pool:
26 | ${{ if eq(variables['System.TeamProject'], 'public') }}:
27 | name: NetCore1ESPool-Public
28 | demands: ImageOverride -equals Build.Server.Amd64.VS2019.Open
29 | ${{ if eq(variables['System.TeamProject'], 'internal') }}:
30 | name: NetCore1ESPool-Internal
31 | demands: ImageOverride -equals Build.Server.Amd64.VS2019
32 | steps:
33 | - ${{ each preStep in parameters.preSteps }}:
34 | - ${{ preStep }}
35 |
36 | - task: UseDotNet@2
37 | displayName: Use .NET Core sdk 3.1
38 | inputs:
39 | packageType: sdk
40 | version: 3.1.x
41 | installationPath: $(Agent.TempDirectory)/dotnet
42 | workingDirectory: $(Agent.TempDirectory)
43 |
44 | - script: |
45 | $(Agent.TempDirectory)/dotnet/dotnet tool install BinLogToSln --version $(SourceIndexPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools
46 | $(Agent.TempDirectory)/dotnet/dotnet tool install UploadIndexStage1 --version $(SourceIndexPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools
47 | displayName: Download Tools
48 | # Set working directory to temp directory so 'dotnet' doesn't try to use global.json and use the repo's sdk.
49 | workingDirectory: $(Agent.TempDirectory)
50 |
51 | - script: ${{ parameters.sourceIndexBuildCommand }}
52 | displayName: Build Repository
53 |
54 | - script: $(Agent.TempDirectory)/.source-index/tools/BinLogToSln -i $(BinlogPath) -r $(Build.SourcesDirectory) -n $(Build.Repository.Name) -o .source-index/stage1output
55 | displayName: Process Binlog into indexable sln
56 |
57 | - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
58 | - script: $(Agent.TempDirectory)/.source-index/tools/UploadIndexStage1 -i .source-index/stage1output -n $(Build.Repository.Name)
59 | displayName: Upload stage1 artifacts to source index
60 | env:
61 | BLOB_CONTAINER_URL: $(source-dot-net-stage1-blob-container-url)
62 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # NuGet Package Source Mapper tool
2 |
3 | ## Description
4 |
5 | This tool helps onboarding to [package source mapping](https://devblogs.microsoft.com/nuget/introducing-package-source-mapping) feature.
6 | It can generate package source mapping section for you from nuget.config file and restored `global packages folder`.
7 | Here is steps to use the tool. It works for both packagereference and packages.config type projects. Please note tool doesn't map packages in fallback folder since they don't get copied to `global packages folder`.
8 |
9 | 1. Declare a new [global packages folder for your solution](https://docs.microsoft.com/en-us/nuget/reference/nuget-config-file#config-section) in nuget.config file.
10 |
11 | ```xml
12 |
13 |
14 |
15 | ```
16 |
17 | 1. Preparation
18 |
19 | * Do solution restore
20 | * If you have any restore/build script then please run before running this tool. It applies to any test or any other sub solutions. If you happen to have packages restored in different folder due to sub project settings then please copy them to above `global packages folder`.
21 |
22 | 1. Run this tool with options suitable for you. See examples below.
23 |
24 | 1. Copy generated `nugetPackageSourceMapping.config` file content into your nuget.config file. Please make any adjustments most sutiable for your use case.
25 |
26 | 1. Clear all local cache one more time to start on clean slate `dotnet nuget locals all --clear`
27 |
28 | 1. Repeat restore step above and make sure everything still works.
29 |
30 | ## Synopsis:
31 |
32 | ```dotnetcli
33 | packagesourcemapper generate [-h|--help] [--verbosity ] [--fully-specified]
34 | ```
35 |
36 | ### Commands
37 |
38 | If no command is specified, the command will default to `help`.
39 |
40 | #### `generate`
41 | Generates packageSourceMapping section for nuget.config file.
42 |
43 | ### Arguments:
44 |
45 | #### `CONFIGPATH`
46 |
47 | Specify path to `nuget.config` used for packagesourcemapper. This is positional argument so just value after `generate` command.
48 |
49 | ### Options:
50 |
51 | #### `-h|--help`
52 |
53 | Show help information
54 |
55 | #### `--verbosity `
56 |
57 | Sets the verbosity level of the command. Allowed values are `q[uiet]`, `m[inimal]`, `n[ormal]`, `d[etailed]`, and `diag[nostic]`. The default is `minimal`. For more information, see [LoggerVerbosity](https://docs.microsoft.com/en-us/dotnet/api/microsoft.build.framework.loggerverbosity?view=msbuild-16-netcore).
58 |
59 | #### `--fully-specified`
60 |
61 | Specify this option to generate full specified pattern instead without prefix. Currently only packages starting with `Microsoft, System, Runtime, Xunit` are prefixed by default.
62 |
63 | #### `--remove-unused-sources`
64 |
65 | Specify this option if the packagesourcemapper should attempt to reduce the number of sources used in nuget.config by consolidating them.
66 |
67 | ### Examples
68 |
69 | Generate packageSourceMapping section:
70 |
71 | `PackageSourceMapper.exe generate C:\NuGetProj\NuGet.Client\NuGet.Config`
72 |
73 | `PackageSourceMapper.exe generate C:\NuGetProj\NuGet.Client\NuGet.Config --verbosity diag`
74 |
75 | Generate packageSourceMapping section without any prefixing:
76 |
77 | `PackageSourceMapper.exe generate C:\NuGetProj\NuGet.Client\NuGet.Config --verbosity m --fully-specified`
78 |
79 | ## Feedback
80 |
81 | File NuGet.Client bugs in the [NuGet/PackageSourceMapper](https://github.com/NuGet/PackageSourceMapper/issues)
82 |
--------------------------------------------------------------------------------
/eng/common/sdk-task.ps1:
--------------------------------------------------------------------------------
1 | [CmdletBinding(PositionalBinding=$false)]
2 | Param(
3 | [string] $configuration = 'Debug',
4 | [string] $task,
5 | [string] $verbosity = 'minimal',
6 | [string] $msbuildEngine = $null,
7 | [switch] $restore,
8 | [switch] $prepareMachine,
9 | [switch] $help,
10 | [Parameter(ValueFromRemainingArguments=$true)][String[]]$properties
11 | )
12 |
13 | $ci = $true
14 | $binaryLog = $true
15 | $warnAsError = $true
16 |
17 | . $PSScriptRoot\tools.ps1
18 |
19 | function Print-Usage() {
20 | Write-Host "Common settings:"
21 | Write-Host " -task Name of Arcade task (name of a project in SdkTasks directory of the Arcade SDK package)"
22 | Write-Host " -restore Restore dependencies"
23 | Write-Host " -verbosity Msbuild verbosity: q[uiet], m[inimal], n[ormal], d[etailed], and diag[nostic]"
24 | Write-Host " -help Print help and exit"
25 | Write-Host ""
26 |
27 | Write-Host "Advanced settings:"
28 | Write-Host " -prepareMachine Prepare machine for CI run"
29 | Write-Host " -msbuildEngine Msbuild engine to use to run build ('dotnet', 'vs', or unspecified)."
30 | Write-Host ""
31 | Write-Host "Command line arguments not listed above are passed thru to msbuild."
32 | }
33 |
34 | function Build([string]$target) {
35 | $logSuffix = if ($target -eq 'Execute') { '' } else { ".$target" }
36 | $log = Join-Path $LogDir "$task$logSuffix.binlog"
37 | $outputPath = Join-Path $ToolsetDir "$task\"
38 |
39 | MSBuild $taskProject `
40 | /bl:$log `
41 | /t:$target `
42 | /p:Configuration=$configuration `
43 | /p:RepoRoot=$RepoRoot `
44 | /p:BaseIntermediateOutputPath=$outputPath `
45 | /v:$verbosity `
46 | @properties
47 | }
48 |
49 | try {
50 | if ($help -or (($null -ne $properties) -and ($properties.Contains('/help') -or $properties.Contains('/?')))) {
51 | Print-Usage
52 | exit 0
53 | }
54 |
55 | if ($task -eq "") {
56 | Write-PipelineTelemetryError -Category 'Build' -Message "Missing required parameter '-task '"
57 | Print-Usage
58 | ExitWithExitCode 1
59 | }
60 |
61 | if( $msbuildEngine -eq "vs") {
62 | # Ensure desktop MSBuild is available for sdk tasks.
63 | if( -not ($GlobalJson.tools.PSObject.Properties.Name -contains "vs" )) {
64 | $GlobalJson.tools | Add-Member -Name "vs" -Value (ConvertFrom-Json "{ `"version`": `"16.5`" }") -MemberType NoteProperty
65 | }
66 | if( -not ($GlobalJson.tools.PSObject.Properties.Name -match "xcopy-msbuild" )) {
67 | $GlobalJson.tools | Add-Member -Name "xcopy-msbuild" -Value "16.10.0-preview2" -MemberType NoteProperty
68 | }
69 | if ($GlobalJson.tools."xcopy-msbuild".Trim() -ine "none") {
70 | $xcopyMSBuildToolsFolder = InitializeXCopyMSBuild $GlobalJson.tools."xcopy-msbuild" -install $true
71 | }
72 | if ($xcopyMSBuildToolsFolder -eq $null) {
73 | throw 'Unable to get xcopy downloadable version of msbuild'
74 | }
75 |
76 | $global:_MSBuildExe = "$($xcopyMSBuildToolsFolder)\MSBuild\Current\Bin\MSBuild.exe"
77 | }
78 |
79 | $taskProject = GetSdkTaskProject $task
80 | if (!(Test-Path $taskProject)) {
81 | Write-PipelineTelemetryError -Category 'Build' -Message "Unknown task: $task"
82 | ExitWithExitCode 1
83 | }
84 |
85 | if ($restore) {
86 | Build 'Restore'
87 | }
88 |
89 | Build 'Execute'
90 | }
91 | catch {
92 | Write-Host $_.ScriptStackTrace
93 | Write-PipelineTelemetryError -Category 'Build' -Message $_
94 | ExitWithExitCode 1
95 | }
96 |
97 | ExitWithExitCode 0
98 |
--------------------------------------------------------------------------------
/eng/common/native/install-cmake.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | source="${BASH_SOURCE[0]}"
4 | scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
5 |
6 | . $scriptroot/common-library.sh
7 |
8 | base_uri=
9 | install_path=
10 | version=
11 | clean=false
12 | force=false
13 | download_retries=5
14 | retry_wait_time_seconds=30
15 |
16 | while (($# > 0)); do
17 | lowerI="$(echo $1 | tr "[:upper:]" "[:lower:]")"
18 | case $lowerI in
19 | --baseuri)
20 | base_uri=$2
21 | shift 2
22 | ;;
23 | --installpath)
24 | install_path=$2
25 | shift 2
26 | ;;
27 | --version)
28 | version=$2
29 | shift 2
30 | ;;
31 | --clean)
32 | clean=true
33 | shift 1
34 | ;;
35 | --force)
36 | force=true
37 | shift 1
38 | ;;
39 | --downloadretries)
40 | download_retries=$2
41 | shift 2
42 | ;;
43 | --retrywaittimeseconds)
44 | retry_wait_time_seconds=$2
45 | shift 2
46 | ;;
47 | --help)
48 | echo "Common settings:"
49 | echo " --baseuri Base file directory or Url wrom which to acquire tool archives"
50 | echo " --installpath Base directory to install native tool to"
51 | echo " --clean Don't install the tool, just clean up the current install of the tool"
52 | echo " --force Force install of tools even if they previously exist"
53 | echo " --help Print help and exit"
54 | echo ""
55 | echo "Advanced settings:"
56 | echo " --downloadretries Total number of retry attempts"
57 | echo " --retrywaittimeseconds Wait time between retry attempts in seconds"
58 | echo ""
59 | exit 0
60 | ;;
61 | esac
62 | done
63 |
64 | tool_name="cmake"
65 | tool_os=$(GetCurrentOS)
66 | tool_folder="$(echo $tool_os | tr "[:upper:]" "[:lower:]")"
67 | tool_arch="x86_64"
68 | tool_name_moniker="$tool_name-$version-$tool_os-$tool_arch"
69 | tool_install_directory="$install_path/$tool_name/$version"
70 | tool_file_path="$tool_install_directory/$tool_name_moniker/bin/$tool_name"
71 | shim_path="$install_path/$tool_name.sh"
72 | uri="${base_uri}/$tool_folder/$tool_name/$tool_name_moniker.tar.gz"
73 |
74 | # Clean up tool and installers
75 | if [[ $clean = true ]]; then
76 | echo "Cleaning $tool_install_directory"
77 | if [[ -d $tool_install_directory ]]; then
78 | rm -rf $tool_install_directory
79 | fi
80 |
81 | echo "Cleaning $shim_path"
82 | if [[ -f $shim_path ]]; then
83 | rm -rf $shim_path
84 | fi
85 |
86 | tool_temp_path=$(GetTempPathFileName $uri)
87 | echo "Cleaning $tool_temp_path"
88 | if [[ -f $tool_temp_path ]]; then
89 | rm -rf $tool_temp_path
90 | fi
91 |
92 | exit 0
93 | fi
94 |
95 | # Install tool
96 | if [[ -f $tool_file_path ]] && [[ $force = false ]]; then
97 | echo "$tool_name ($version) already exists, skipping install"
98 | exit 0
99 | fi
100 |
101 | DownloadAndExtract $uri $tool_install_directory $force $download_retries $retry_wait_time_seconds
102 |
103 | if [[ $? != 0 ]]; then
104 | Write-PipelineTelemetryError -category 'NativeToolsBootstrap' 'Installation failed'
105 | exit 1
106 | fi
107 |
108 | # Generate Shim
109 | # Always rewrite shims so that we are referencing the expected version
110 | NewScriptShim $shim_path $tool_file_path true
111 |
112 | if [[ $? != 0 ]]; then
113 | Write-PipelineTelemetryError -category 'NativeToolsBootstrap' 'Shim generation failed'
114 | exit 1
115 | fi
116 |
117 | exit 0
118 |
--------------------------------------------------------------------------------
/eng/common/native/install-cmake-test.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | source="${BASH_SOURCE[0]}"
4 | scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
5 |
6 | . $scriptroot/common-library.sh
7 |
8 | base_uri=
9 | install_path=
10 | version=
11 | clean=false
12 | force=false
13 | download_retries=5
14 | retry_wait_time_seconds=30
15 |
16 | while (($# > 0)); do
17 | lowerI="$(echo $1 | tr "[:upper:]" "[:lower:]")"
18 | case $lowerI in
19 | --baseuri)
20 | base_uri=$2
21 | shift 2
22 | ;;
23 | --installpath)
24 | install_path=$2
25 | shift 2
26 | ;;
27 | --version)
28 | version=$2
29 | shift 2
30 | ;;
31 | --clean)
32 | clean=true
33 | shift 1
34 | ;;
35 | --force)
36 | force=true
37 | shift 1
38 | ;;
39 | --downloadretries)
40 | download_retries=$2
41 | shift 2
42 | ;;
43 | --retrywaittimeseconds)
44 | retry_wait_time_seconds=$2
45 | shift 2
46 | ;;
47 | --help)
48 | echo "Common settings:"
49 | echo " --baseuri Base file directory or Url wrom which to acquire tool archives"
50 | echo " --installpath Base directory to install native tool to"
51 | echo " --clean Don't install the tool, just clean up the current install of the tool"
52 | echo " --force Force install of tools even if they previously exist"
53 | echo " --help Print help and exit"
54 | echo ""
55 | echo "Advanced settings:"
56 | echo " --downloadretries Total number of retry attempts"
57 | echo " --retrywaittimeseconds Wait time between retry attempts in seconds"
58 | echo ""
59 | exit 0
60 | ;;
61 | esac
62 | done
63 |
64 | tool_name="cmake-test"
65 | tool_os=$(GetCurrentOS)
66 | tool_folder="$(echo $tool_os | tr "[:upper:]" "[:lower:]")"
67 | tool_arch="x86_64"
68 | tool_name_moniker="$tool_name-$version-$tool_os-$tool_arch"
69 | tool_install_directory="$install_path/$tool_name/$version"
70 | tool_file_path="$tool_install_directory/$tool_name_moniker/bin/$tool_name"
71 | shim_path="$install_path/$tool_name.sh"
72 | uri="${base_uri}/$tool_folder/$tool_name/$tool_name_moniker.tar.gz"
73 |
74 | # Clean up tool and installers
75 | if [[ $clean = true ]]; then
76 | echo "Cleaning $tool_install_directory"
77 | if [[ -d $tool_install_directory ]]; then
78 | rm -rf $tool_install_directory
79 | fi
80 |
81 | echo "Cleaning $shim_path"
82 | if [[ -f $shim_path ]]; then
83 | rm -rf $shim_path
84 | fi
85 |
86 | tool_temp_path=$(GetTempPathFileName $uri)
87 | echo "Cleaning $tool_temp_path"
88 | if [[ -f $tool_temp_path ]]; then
89 | rm -rf $tool_temp_path
90 | fi
91 |
92 | exit 0
93 | fi
94 |
95 | # Install tool
96 | if [[ -f $tool_file_path ]] && [[ $force = false ]]; then
97 | echo "$tool_name ($version) already exists, skipping install"
98 | exit 0
99 | fi
100 |
101 | DownloadAndExtract $uri $tool_install_directory $force $download_retries $retry_wait_time_seconds
102 |
103 | if [[ $? != 0 ]]; then
104 | Write-PipelineTelemetryError -category 'NativeToolsBootstrap' 'Installation failed'
105 | exit 1
106 | fi
107 |
108 | # Generate Shim
109 | # Always rewrite shims so that we are referencing the expected version
110 | NewScriptShim $shim_path $tool_file_path true
111 |
112 | if [[ $? != 0 ]]; then
113 | Write-PipelineTelemetryError -category 'NativeToolsBootstrap' 'Shim generation failed'
114 | exit 1
115 | fi
116 |
117 | exit 0
118 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # NuGet Files
2 | *.nuget.targets
3 | *.nuget.props
4 |
5 | # Project K
6 | project.lock.json
7 | artifacts/
8 |
9 | # Tests
10 | .test/
11 |
12 | # VSCode
13 | **/.vs/
14 | **/.vscode/
15 |
16 | cli/
17 | cli1.0.4/
18 | **/nupkgs/*.nupkg
19 | packages/
20 | .nuget/nuget.exe
21 | */.nuget/
22 |
23 | # ignore nuget.exe for e2e tests
24 | test/EndToEnd/nuget.exe
25 |
26 | # remove ilmerge log
27 | mergelog.txt
28 |
29 | # MSBuild binlog
30 | *.binlog
31 |
32 | # User-specific files
33 | *.suo
34 | *.user
35 | *.sln.docstates
36 |
37 | # Build results
38 | [Aa]rtifacts/
39 | [Dd]ebug/
40 | [Rr]elease/
41 | !eng/[Rr]elease
42 | [Bb]in/
43 | [Oo]bj/
44 | [Pp]ackages/
45 | x64/
46 | .dotnet/
47 | .dotnet-test/
48 | .packages/
49 | .tools/
50 | .vscode/
51 |
52 | # Roslyn cache directories
53 | **/*.ide/
54 |
55 | [Dd]ebug/
56 | [Rr]elease/
57 | x64/
58 | [Bb]in/
59 | [Oo]bj/
60 |
61 | # Enable "build/" folder in the NuGet Packages folder since NuGet packages use it for MSBuild targets
62 | !packages/*/build/
63 |
64 | # MSTest test Results
65 | [Tt]est[Rr]esult*/
66 | [Bb]uild[Ll]og.*
67 |
68 | *_i.c
69 | *_p.c
70 | *.ilk
71 | *.meta
72 | *.obj
73 | *.pch
74 | *.pdb
75 | *.pgc
76 | *.pgd
77 | *.rsp
78 | *.sbr
79 | *.tlb
80 | *.tli
81 | *.tlh
82 | *.tmp
83 | *.tmp_proj
84 | *.log
85 | *.vspscc
86 | *.vssscc
87 | .builds
88 | *.pidb
89 | *.log
90 | *.scc
91 | *.g.cs
92 |
93 | # Visual C++ cache files
94 | ipch/
95 | *.aps
96 | *.ncb
97 | *.opensdf
98 | *.sdf
99 | *.cachefile
100 |
101 | # Visual Studio profiler
102 | *.psess
103 | *.vsp
104 | *.vspx
105 |
106 | # Guidance Automation Toolkit
107 | *.gpState
108 |
109 | # ReSharper is a .NET coding add-in
110 | _ReSharper*/
111 | *.[Rr]e[Ss]harper
112 | *.DotSettings
113 |
114 | # TeamCity is a build add-in
115 | _TeamCity*
116 |
117 | # DotCover is a Code Coverage Tool
118 | *.dotCover
119 |
120 | # NCrunch
121 | *.ncrunch*
122 | .*crunch*.local.xml
123 |
124 | # Installshield output folder
125 | [Ee]xpress/
126 |
127 | # DocProject is a documentation generator add-in
128 | DocProject/buildhelp/
129 | DocProject/Help/*.HxT
130 | DocProject/Help/*.HxC
131 | DocProject/Help/*.hhc
132 | DocProject/Help/*.hhk
133 | DocProject/Help/*.hhp
134 | DocProject/Help/Html2
135 | DocProject/Help/html
136 |
137 | # Click-Once directory
138 | publish/
139 |
140 | # Publish Web Output
141 | *.Publish.xml
142 |
143 | # NuGet Packages Directory
144 | /packages/
145 | !packages/repositories.config
146 |
147 | # Windows Azure Build Output
148 | csx
149 | *.build.csdef
150 |
151 | # Windows Store app package directory
152 | AppPackages/
153 |
154 | # Others
155 | sql/
156 | *.Cache
157 | ClientBin/
158 | [Ss]tyle[Cc]op.*
159 | ~$*
160 | *~
161 | *.dbmdl
162 | *.[Pp]ublish.xml
163 | *.pfx
164 | *.publishsettings
165 |
166 | # RIA/Silverlight projects
167 | Generated_Code/
168 |
169 | # Backup & report files from converting an old project file to a newer
170 | # Visual Studio version. Backup files are not needed, because we have git ;-)
171 | _UpgradeReport_Files/
172 | Backup*/
173 | UpgradeLog*.XML
174 | UpgradeLog*.htm
175 |
176 | # SQL Server files
177 | App_Data/*.mdf
178 | App_Data/*.ldf
179 |
180 |
181 | #LightSwitch generated files
182 | GeneratedArtifacts/
183 | _Pvt_Extensions/
184 | ModelManifest.xml
185 |
186 | # =========================
187 | # Windows detritus
188 | # =========================
189 |
190 | # Windows image file caches
191 | Thumbs.db
192 | ehthumbs.db
193 |
194 | # Folder config file
195 | Desktop.ini
196 |
197 | # Recycle Bin used on file shares
198 | $RECYCLE.BIN/
199 |
200 | # Mac desktop service store files
201 | .DS_Store
202 |
203 | # =========================
204 | # project.json detritus
205 | # =========================
206 |
207 | artifacts
208 | launchSettings.json
209 |
210 | # include the test packages
211 | !test/EndToEnd/Packages/**/*
212 | !test/EndToEnd/ProjectTemplates/**/*.pfx
213 |
214 | .idea
215 | .ionide
216 |
217 | # Image manifests are generated at build-time for VSIX
218 | *.imagemanifest
219 |
--------------------------------------------------------------------------------
/eng/common/post-build/post-build-utils.ps1:
--------------------------------------------------------------------------------
1 | # Most of the functions in this file require the variables `MaestroApiEndPoint`,
2 | # `MaestroApiVersion` and `MaestroApiAccessToken` to be globally available.
3 |
4 | $ErrorActionPreference = 'Stop'
5 | Set-StrictMode -Version 2.0
6 |
7 | # `tools.ps1` checks $ci to perform some actions. Since the post-build
8 | # scripts don't necessarily execute in the same agent that run the
9 | # build.ps1/sh script this variable isn't automatically set.
10 | $ci = $true
11 | $disableConfigureToolsetImport = $true
12 | . $PSScriptRoot\..\tools.ps1
13 |
14 | function Create-MaestroApiRequestHeaders([string]$ContentType = 'application/json') {
15 | Validate-MaestroVars
16 |
17 | $headers = New-Object 'System.Collections.Generic.Dictionary[[String],[String]]'
18 | $headers.Add('Accept', $ContentType)
19 | $headers.Add('Authorization',"Bearer $MaestroApiAccessToken")
20 | return $headers
21 | }
22 |
23 | function Get-MaestroChannel([int]$ChannelId) {
24 | Validate-MaestroVars
25 |
26 | $apiHeaders = Create-MaestroApiRequestHeaders
27 | $apiEndpoint = "$MaestroApiEndPoint/api/channels/${ChannelId}?api-version=$MaestroApiVersion"
28 |
29 | $result = try { Invoke-WebRequest -Method Get -Uri $apiEndpoint -Headers $apiHeaders | ConvertFrom-Json } catch { Write-Host "Error: $_" }
30 | return $result
31 | }
32 |
33 | function Get-MaestroBuild([int]$BuildId) {
34 | Validate-MaestroVars
35 |
36 | $apiHeaders = Create-MaestroApiRequestHeaders -AuthToken $MaestroApiAccessToken
37 | $apiEndpoint = "$MaestroApiEndPoint/api/builds/${BuildId}?api-version=$MaestroApiVersion"
38 |
39 | $result = try { return Invoke-WebRequest -Method Get -Uri $apiEndpoint -Headers $apiHeaders | ConvertFrom-Json } catch { Write-Host "Error: $_" }
40 | return $result
41 | }
42 |
43 | function Get-MaestroSubscriptions([string]$SourceRepository, [int]$ChannelId) {
44 | Validate-MaestroVars
45 |
46 | $SourceRepository = [System.Web.HttpUtility]::UrlEncode($SourceRepository)
47 | $apiHeaders = Create-MaestroApiRequestHeaders -AuthToken $MaestroApiAccessToken
48 | $apiEndpoint = "$MaestroApiEndPoint/api/subscriptions?sourceRepository=$SourceRepository&channelId=$ChannelId&api-version=$MaestroApiVersion"
49 |
50 | $result = try { Invoke-WebRequest -Method Get -Uri $apiEndpoint -Headers $apiHeaders | ConvertFrom-Json } catch { Write-Host "Error: $_" }
51 | return $result
52 | }
53 |
54 | function Assign-BuildToChannel([int]$BuildId, [int]$ChannelId) {
55 | Validate-MaestroVars
56 |
57 | $apiHeaders = Create-MaestroApiRequestHeaders -AuthToken $MaestroApiAccessToken
58 | $apiEndpoint = "$MaestroApiEndPoint/api/channels/${ChannelId}/builds/${BuildId}?api-version=$MaestroApiVersion"
59 | Invoke-WebRequest -Method Post -Uri $apiEndpoint -Headers $apiHeaders | Out-Null
60 | }
61 |
62 | function Trigger-Subscription([string]$SubscriptionId) {
63 | Validate-MaestroVars
64 |
65 | $apiHeaders = Create-MaestroApiRequestHeaders -AuthToken $MaestroApiAccessToken
66 | $apiEndpoint = "$MaestroApiEndPoint/api/subscriptions/$SubscriptionId/trigger?api-version=$MaestroApiVersion"
67 | Invoke-WebRequest -Uri $apiEndpoint -Headers $apiHeaders -Method Post | Out-Null
68 | }
69 |
70 | function Validate-MaestroVars {
71 | try {
72 | Get-Variable MaestroApiEndPoint | Out-Null
73 | Get-Variable MaestroApiVersion | Out-Null
74 | Get-Variable MaestroApiAccessToken | Out-Null
75 |
76 | if (!($MaestroApiEndPoint -Match '^http[s]?://maestro-(int|prod).westus2.cloudapp.azure.com$')) {
77 | Write-PipelineTelemetryError -Category 'MaestroVars' -Message "MaestroApiEndPoint is not a valid Maestro URL. '$MaestroApiEndPoint'"
78 | ExitWithExitCode 1
79 | }
80 |
81 | if (!($MaestroApiVersion -Match '^[0-9]{4}-[0-9]{2}-[0-9]{2}$')) {
82 | Write-PipelineTelemetryError -Category 'MaestroVars' -Message "MaestroApiVersion does not match a version string in the format yyyy-MM-DD. '$MaestroApiVersion'"
83 | ExitWithExitCode 1
84 | }
85 | }
86 | catch {
87 | Write-PipelineTelemetryError -Category 'MaestroVars' -Message 'Error: Variables `MaestroApiEndPoint`, `MaestroApiVersion` and `MaestroApiAccessToken` are required while using this script.'
88 | Write-Host $_
89 | ExitWithExitCode 1
90 | }
91 | }
92 |
--------------------------------------------------------------------------------
/eng/common/templates/steps/telemetry-end.yml:
--------------------------------------------------------------------------------
1 | parameters:
2 | maxRetries: 5
3 | retryDelay: 10 # in seconds
4 |
5 | steps:
6 | - bash: |
7 | if [ "$AGENT_JOBSTATUS" = "Succeeded" ] || [ "$AGENT_JOBSTATUS" = "PartiallySucceeded" ]; then
8 | errorCount=0
9 | else
10 | errorCount=1
11 | fi
12 | warningCount=0
13 |
14 | curlStatus=1
15 | retryCount=0
16 | # retry loop to harden against spotty telemetry connections
17 | # we don't retry successes and 4xx client errors
18 | until [[ $curlStatus -eq 0 || ( $curlStatus -ge 400 && $curlStatus -le 499 ) || $retryCount -ge $MaxRetries ]]
19 | do
20 | if [ $retryCount -gt 0 ]; then
21 | echo "Failed to send telemetry to Helix; waiting $RetryDelay seconds before retrying..."
22 | sleep $RetryDelay
23 | fi
24 |
25 | # create a temporary file for curl output
26 | res=`mktemp`
27 |
28 | curlResult=`
29 | curl --verbose --output $res --write-out "%{http_code}"\
30 | -H 'Content-Type: application/json' \
31 | -H "X-Helix-Job-Token: $Helix_JobToken" \
32 | -H 'Content-Length: 0' \
33 | -X POST -G "https://helix.dot.net/api/2018-03-14/telemetry/job/build/$Helix_WorkItemId/finish" \
34 | --data-urlencode "errorCount=$errorCount" \
35 | --data-urlencode "warningCount=$warningCount"`
36 | curlStatus=$?
37 |
38 | if [ $curlStatus -eq 0 ]; then
39 | if [ $curlResult -gt 299 ] || [ $curlResult -lt 200 ]; then
40 | curlStatus=$curlResult
41 | fi
42 | fi
43 |
44 | let retryCount++
45 | done
46 |
47 | if [ $curlStatus -ne 0 ]; then
48 | echo "Failed to Send Build Finish information after $retryCount retries"
49 | vstsLogOutput="vso[task.logissue type=error;sourcepath=templates/steps/telemetry-end.yml;code=1;]Failed to Send Build Finish information: $curlStatus"
50 | echo "##$vstsLogOutput"
51 | exit 1
52 | fi
53 | displayName: Send Unix Build End Telemetry
54 | env:
55 | # defined via VSTS variables in start-job.sh
56 | Helix_JobToken: $(Helix_JobToken)
57 | Helix_WorkItemId: $(Helix_WorkItemId)
58 | MaxRetries: ${{ parameters.maxRetries }}
59 | RetryDelay: ${{ parameters.retryDelay }}
60 | condition: and(always(), ne(variables['Agent.Os'], 'Windows_NT'))
61 | - powershell: |
62 | if (($env:Agent_JobStatus -eq 'Succeeded') -or ($env:Agent_JobStatus -eq 'PartiallySucceeded')) {
63 | $ErrorCount = 0
64 | } else {
65 | $ErrorCount = 1
66 | }
67 | $WarningCount = 0
68 |
69 | # Basic retry loop to harden against server flakiness
70 | $retryCount = 0
71 | while ($retryCount -lt $env:MaxRetries) {
72 | try {
73 | Invoke-RestMethod -Uri "https://helix.dot.net/api/2018-03-14/telemetry/job/build/$env:Helix_WorkItemId/finish?errorCount=$ErrorCount&warningCount=$WarningCount" -Method Post -ContentType "application/json" -Body "" `
74 | -Headers @{ 'X-Helix-Job-Token'=$env:Helix_JobToken }
75 | break
76 | }
77 | catch {
78 | $statusCode = $_.Exception.Response.StatusCode.value__
79 | if ($statusCode -ge 400 -and $statusCode -le 499) {
80 | Write-Host "##vso[task.logissue]error Failed to send telemetry to Helix (status code $statusCode); not retrying (4xx client error)"
81 | Write-Host "##vso[task.logissue]error ", $_.Exception.GetType().FullName, $_.Exception.Message
82 | exit 1
83 | }
84 | Write-Host "Failed to send telemetry to Helix (status code $statusCode); waiting $env:RetryDelay seconds before retrying..."
85 | $retryCount++
86 | sleep $env:RetryDelay
87 | continue
88 | }
89 | }
90 |
91 | if ($retryCount -ge $env:MaxRetries) {
92 | Write-Host "##vso[task.logissue]error Failed to send telemetry to Helix after $retryCount retries."
93 | exit 1
94 | }
95 | displayName: Send Windows Build End Telemetry
96 | env:
97 | # defined via VSTS variables in start-job.ps1
98 | Helix_JobToken: $(Helix_JobToken)
99 | Helix_WorkItemId: $(Helix_WorkItemId)
100 | MaxRetries: ${{ parameters.maxRetries }}
101 | RetryDelay: ${{ parameters.retryDelay }}
102 | condition: and(always(),eq(variables['Agent.Os'], 'Windows_NT'))
103 |
--------------------------------------------------------------------------------
/eng/common/templates/job/onelocbuild.yml:
--------------------------------------------------------------------------------
1 | parameters:
2 | # Optional: dependencies of the job
3 | dependsOn: ''
4 |
5 | # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool
6 | pool: ''
7 |
8 | CeapexPat: $(dn-bot-ceapex-package-r) # PAT for the loc AzDO instance https://dev.azure.com/ceapex
9 | GithubPat: $(BotAccount-dotnet-bot-repo-PAT)
10 |
11 | SourcesDirectory: $(Build.SourcesDirectory)
12 | CreatePr: true
13 | AutoCompletePr: false
14 | ReusePr: true
15 | UseLfLineEndings: true
16 | UseCheckedInLocProjectJson: false
17 | LanguageSet: VS_Main_Languages
18 | LclSource: lclFilesInRepo
19 | LclPackageId: ''
20 | RepoType: gitHub
21 | GitHubOrg: dotnet
22 | MirrorRepo: ''
23 | MirrorBranch: main
24 | condition: ''
25 |
26 | jobs:
27 | - job: OneLocBuild
28 |
29 | dependsOn: ${{ parameters.dependsOn }}
30 |
31 | displayName: OneLocBuild
32 |
33 | ${{ if ne(parameters.pool, '') }}:
34 | pool: ${{ parameters.pool }}
35 | ${{ if eq(parameters.pool, '') }}:
36 | pool:
37 | # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
38 | ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
39 | name: VSEngSS-MicroBuild2022-1ES
40 | demands: Cmd
41 | # If it's not devdiv, it's dnceng
42 | ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}:
43 | name: NetCore1ESPool-Internal
44 | demands: ImageOverride -equals Build.Server.Amd64.VS2019
45 |
46 | variables:
47 | - group: OneLocBuildVariables # Contains the CeapexPat and GithubPat
48 | - name: _GenerateLocProjectArguments
49 | value: -SourcesDirectory ${{ parameters.SourcesDirectory }}
50 | -LanguageSet "${{ parameters.LanguageSet }}"
51 | -CreateNeutralXlfs
52 | - ${{ if eq(parameters.UseCheckedInLocProjectJson, 'true') }}:
53 | - name: _GenerateLocProjectArguments
54 | value: ${{ variables._GenerateLocProjectArguments }} -UseCheckedInLocProjectJson
55 |
56 |
57 | steps:
58 | - task: Powershell@2
59 | inputs:
60 | filePath: $(Build.SourcesDirectory)/eng/common/generate-locproject.ps1
61 | arguments: $(_GenerateLocProjectArguments)
62 | displayName: Generate LocProject.json
63 | condition: ${{ parameters.condition }}
64 |
65 | - task: OneLocBuild@2
66 | displayName: OneLocBuild
67 | env:
68 | SYSTEM_ACCESSTOKEN: $(System.AccessToken)
69 | inputs:
70 | locProj: eng/Localize/LocProject.json
71 | outDir: $(Build.ArtifactStagingDirectory)
72 | lclSource: ${{ parameters.LclSource }}
73 | lclPackageId: ${{ parameters.LclPackageId }}
74 | isCreatePrSelected: ${{ parameters.CreatePr }}
75 | ${{ if eq(parameters.CreatePr, true) }}:
76 | isAutoCompletePrSelected: ${{ parameters.AutoCompletePr }}
77 | isUseLfLineEndingsSelected: ${{ parameters.UseLfLineEndings }}
78 | ${{ if eq(parameters.RepoType, 'gitHub') }}:
79 | isShouldReusePrSelected: ${{ parameters.ReusePr }}
80 | packageSourceAuth: patAuth
81 | patVariable: ${{ parameters.CeapexPat }}
82 | ${{ if eq(parameters.RepoType, 'gitHub') }}:
83 | repoType: ${{ parameters.RepoType }}
84 | gitHubPatVariable: "${{ parameters.GithubPat }}"
85 | ${{ if ne(parameters.MirrorRepo, '') }}:
86 | isMirrorRepoSelected: true
87 | gitHubOrganization: ${{ parameters.GitHubOrg }}
88 | mirrorRepo: ${{ parameters.MirrorRepo }}
89 | mirrorBranch: ${{ parameters.MirrorBranch }}
90 | condition: ${{ parameters.condition }}
91 |
92 | - task: PublishBuildArtifacts@1
93 | displayName: Publish Localization Files
94 | inputs:
95 | PathtoPublish: '$(Build.ArtifactStagingDirectory)/loc'
96 | PublishLocation: Container
97 | ArtifactName: Loc
98 | condition: ${{ parameters.condition }}
99 |
100 | - task: PublishBuildArtifacts@1
101 | displayName: Publish LocProject.json
102 | inputs:
103 | PathtoPublish: '$(Build.SourcesDirectory)/eng/Localize/'
104 | PublishLocation: Container
105 | ArtifactName: Loc
106 | condition: ${{ parameters.condition }}
--------------------------------------------------------------------------------
/eng/Version.Details.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | https://dev.azure.com/dnceng/internal/_git/dotnet-symuploader
6 | 62ceb439e80bf0814d0ffa17f022d4624ea4aa6c
7 |
8 |
9 | https://dev.azure.com/dnceng/internal/_git/dotnet-symuploader
10 | 62ceb439e80bf0814d0ffa17f022d4624ea4aa6c
11 |
12 |
13 | https://github.com/dotnet/templating
14 |
15 |
16 |
17 |
18 |
19 | https://github.com/dotnet/arcade
20 | 81001b45bd54f9223905bf55f6ed0125273580fa
21 |
22 |
23 | https://github.com/dotnet/arcade
24 | 81001b45bd54f9223905bf55f6ed0125273580fa
25 |
26 |
27 | https://github.com/dotnet/arcade
28 | 81001b45bd54f9223905bf55f6ed0125273580fa
29 |
30 |
31 | https://github.com/dotnet/arcade
32 | 81001b45bd54f9223905bf55f6ed0125273580fa
33 |
34 |
35 | https://github.com/dotnet/arcade
36 | 81001b45bd54f9223905bf55f6ed0125273580fa
37 |
38 |
39 | https://github.com/dotnet/arcade-services
40 | 354627f34e567c924dc4cc927d1c70a627aeb9f8
41 |
42 |
43 | https://github.com/dotnet/arcade-services
44 | a5f3ed9d5f560555ff6d26b286acdcfbb7ce3b14
45 |
46 |
47 | https://github.com/dotnet/xharness
48 | 17956df760439a8eedf6acc2ef510a6fe185124a
49 |
50 |
51 | https://github.com/dotnet/roslyn
52 | 27d9215906a62c0a550ba19cd275a2dc94ad88b3
53 |
54 |
55 | https://github.com/dotnet/linker
56 | 3efd231da430baa0fd670e278f6b5c3e62834bde
57 |
58 |
59 | https://github.com/dotnet/sourcelink
60 | 5dc8e5c5649d6e9f2c10401e89a2762dee67c73e
61 |
62 |
63 |
64 | https://github.com/dotnet/sourcelink
65 | 5dc8e5c5649d6e9f2c10401e89a2762dee67c73e
66 |
67 |
68 | https://github.com/dotnet/symreader-converter
69 | c5ba7c88f92e2dde156c324a8c8edc04d9fa4fe0
70 |
71 |
72 | https://github.com/dotnet/symreader-converter
73 | c5ba7c88f92e2dde156c324a8c8edc04d9fa4fe0
74 |
75 |
76 | https://github.com/dotnet/xliff-tasks
77 | bece141801e549fda0fda5c0ab96f733b1093515
78 |
79 |
80 |
81 |
--------------------------------------------------------------------------------
/PackageSourceMapper/Print.cs:
--------------------------------------------------------------------------------
1 | using NuGet.Common;
2 | using NuGet.Packaging.Core;
3 | using System;
4 | using System.Collections.Concurrent;
5 | using System.Collections.Generic;
6 | using System.Linq;
7 | using System.Text;
8 |
9 | namespace NuGet.PackageSourceMapper
10 | {
11 | internal static partial class GenerateCommandHandler
12 | {
13 | private static void PrintUndefinedSources(ConcurrentDictionary> sources, HashSet undefinedSources, ILogger logger)
14 | {
15 | if (undefinedSources.Count > 0)
16 | {
17 | var undefinedSourcesLog = new StringBuilder();
18 |
19 | foreach (string undefinedSource in undefinedSources)
20 | {
21 | if (undefinedSource == NuGetOrgApi || (undefinedSource == PACKAGES__WITHOUT__SOURCES))
22 | {
23 | continue;
24 | }
25 | else
26 | {
27 | if (sources.ContainsKey(undefinedSource))
28 | {
29 | undefinedSourcesLog.AppendLine($" Source : {undefinedSource} with {sources[undefinedSource].Distinct().Count()} packages.");
30 |
31 | foreach (PackageData packageContent in sources[undefinedSource].Distinct().OrderBy(p => p.PackageIdentity.Id))
32 | {
33 | undefinedSourcesLog.AppendLine($" - {packageContent.PackageIdentity.Id} {packageContent.PackageIdentity.Version}");
34 | }
35 | }
36 | else
37 | {
38 | undefinedSourcesLog.AppendLine($" Source : {undefinedSource} with unknown number of packages.");
39 | continue;
40 | }
41 |
42 | }
43 | }
44 |
45 | if (undefinedSources.Contains(PACKAGES__WITHOUT__SOURCES) && sources.ContainsKey(PACKAGES__WITHOUT__SOURCES))
46 | {
47 | undefinedSourcesLog.AppendLine($" Source : Packages don't have source meta data: {sources[PACKAGES__WITHOUT__SOURCES].Distinct().Count()} packages.");
48 |
49 | foreach (PackageData packageContent in sources[PACKAGES__WITHOUT__SOURCES].Distinct().OrderBy(p => p.PackageIdentity.Id))
50 | {
51 | undefinedSourcesLog.AppendLine($" - {packageContent.PackageIdentity.Id} {packageContent.PackageIdentity.Version}");
52 | }
53 | }
54 |
55 | if (undefinedSourcesLog.Length > 0)
56 | {
57 | logger.LogMinimal($"{Environment.NewLine} Following sources detected from packages sources but not found in nuget.config:");
58 | logger.LogMinimal(undefinedSourcesLog.ToString());
59 | }
60 | }
61 | }
62 |
63 | private static void PrintStatistics(ConcurrentDictionary> sources, ILogger logger)
64 | {
65 | List uniquePackages = sources.Values.SelectMany(s => s).Select(s => s.PackageIdentity.Id).Distinct().OrderBy(s => s).ToList();
66 | List uniquePackageVersions = sources.Values.SelectMany(s => s).Distinct().OrderBy(s => s.PackageIdentity).ToList();
67 |
68 | logger.LogMinimal($" Total source count: {sources.Count}, Unique packages {uniquePackages.Count}, PackageVersion count: {uniquePackageVersions.Count}");
69 |
70 | foreach (KeyValuePair> source in sources.OrderBy(s => s.Key))
71 | {
72 | List sourceUniquePackages = source.Value.Select(s => s.PackageIdentity).Distinct().ToList();
73 |
74 | logger.LogMinimal($" Source : {source.Key}, Unique packages {sourceUniquePackages.Count}, PackageVersion count: {source.Value.Count}");
75 |
76 | foreach (PackageIdentity package in sourceUniquePackages.OrderBy(p => p.Id))
77 | {
78 | logger.LogVerbose($" - {package}");
79 | }
80 |
81 | logger.LogMinimal(string.Empty);
82 | }
83 |
84 | logger.LogMinimal(string.Empty);
85 | }
86 | }
87 | }
88 |
--------------------------------------------------------------------------------
/eng/common/templates/jobs/jobs.yml:
--------------------------------------------------------------------------------
1 | parameters:
2 | # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md
3 | continueOnError: false
4 |
5 | # Optional: Include PublishBuildArtifacts task
6 | enablePublishBuildArtifacts: false
7 |
8 | # Optional: Enable publishing using release pipelines
9 | enablePublishUsingPipelines: false
10 |
11 | # Optional: Enable running the source-build jobs to build repo from source
12 | enableSourceBuild: false
13 |
14 | # Optional: Parameters for source-build template.
15 | # See /eng/common/templates/jobs/source-build.yml for options
16 | sourceBuildParameters: []
17 |
18 | graphFileGeneration:
19 | # Optional: Enable generating the graph files at the end of the build
20 | enabled: false
21 | # Optional: Include toolset dependencies in the generated graph files
22 | includeToolset: false
23 |
24 | # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
25 | jobs: []
26 |
27 | # Optional: Override automatically derived dependsOn value for "publish build assets" job
28 | publishBuildAssetsDependsOn: ''
29 |
30 | # Optional: should run as a public build even in the internal project
31 | # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
32 | runAsPublic: false
33 |
34 | enableSourceIndex: false
35 | sourceIndexParams: {}
36 |
37 | # Internal resources (telemetry, microbuild) can only be accessed from non-public projects,
38 | # and some (Microbuild) should only be applied to non-PR cases for internal builds.
39 |
40 | jobs:
41 | - ${{ each job in parameters.jobs }}:
42 | - template: ../job/job.yml
43 | parameters:
44 | # pass along parameters
45 | ${{ each parameter in parameters }}:
46 | ${{ if ne(parameter.key, 'jobs') }}:
47 | ${{ parameter.key }}: ${{ parameter.value }}
48 |
49 | # pass along job properties
50 | ${{ each property in job }}:
51 | ${{ if ne(property.key, 'job') }}:
52 | ${{ property.key }}: ${{ property.value }}
53 |
54 | name: ${{ job.job }}
55 |
56 | - ${{ if eq(parameters.enableSourceBuild, true) }}:
57 | - template: /eng/common/templates/jobs/source-build.yml
58 | parameters:
59 | allCompletedJobId: Source_Build_Complete
60 | ${{ each parameter in parameters.sourceBuildParameters }}:
61 | ${{ parameter.key }}: ${{ parameter.value }}
62 |
63 | - ${{ if eq(parameters.enableSourceIndex, 'true') }}:
64 | - template: ../job/source-index-stage1.yml
65 | parameters:
66 | runAsPublic: ${{ parameters.runAsPublic }}
67 | ${{ each parameter in parameters.sourceIndexParams }}:
68 | ${{ parameter.key }}: ${{ parameter.value }}
69 |
70 | - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
71 |
72 | - ${{ if or(eq(parameters.enablePublishBuildAssets, true), eq(parameters.artifacts.publish.manifests, 'true'), ne(parameters.artifacts.publish.manifests, '')) }}:
73 | - template: ../job/publish-build-assets.yml
74 | parameters:
75 | continueOnError: ${{ parameters.continueOnError }}
76 | dependsOn:
77 | - ${{ if ne(parameters.publishBuildAssetsDependsOn, '') }}:
78 | - ${{ each job in parameters.publishBuildAssetsDependsOn }}:
79 | - ${{ job.job }}
80 | - ${{ if eq(parameters.publishBuildAssetsDependsOn, '') }}:
81 | - ${{ each job in parameters.jobs }}:
82 | - ${{ job.job }}
83 | - ${{ if eq(parameters.enableSourceBuild, true) }}:
84 | - Source_Build_Complete
85 | pool:
86 | # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
87 | ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
88 | name: VSEngSS-MicroBuild2022-1ES
89 | demands: Cmd
90 | # If it's not devdiv, it's dnceng
91 | ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}:
92 | name: NetCore1ESPool-Internal
93 | demands: ImageOverride -equals Build.Server.Amd64.VS2019
94 |
95 | runAsPublic: ${{ parameters.runAsPublic }}
96 | publishUsingPipelines: ${{ parameters.enablePublishUsingPipelines }}
97 | enablePublishBuildArtifacts: ${{ parameters.enablePublishBuildArtifacts }}
98 |
--------------------------------------------------------------------------------
/eng/common/sdl/configure-sdl-tool.ps1:
--------------------------------------------------------------------------------
1 | Param(
2 | [string] $GuardianCliLocation,
3 | [string] $WorkingDirectory,
4 | [string] $TargetDirectory,
5 | [string] $GdnFolder,
6 | # The list of Guardian tools to configure. For each object in the array:
7 | # - If the item is a [hashtable], it must contain these entries:
8 | # - Name = The tool name as Guardian knows it.
9 | # - Scenario = (Optional) Scenario-specific name for this configuration entry. It must be unique
10 | # among all tool entries with the same Name.
11 | # - Args = (Optional) Array of Guardian tool configuration args, like '@("Target > C:\temp")'
12 | # - If the item is a [string] $v, it is treated as '@{ Name="$v" }'
13 | [object[]] $ToolsList,
14 | [string] $GuardianLoggerLevel='Standard',
15 | # Optional: Additional params to add to any tool using CredScan.
16 | [string[]] $CrScanAdditionalRunConfigParams,
17 | # Optional: Additional params to add to any tool using PoliCheck.
18 | [string[]] $PoliCheckAdditionalRunConfigParams,
19 | # Optional: Additional params to add to any tool using CodeQL/Semmle.
20 | [string[]] $CodeQLAdditionalRunConfigParams
21 | )
22 |
23 | $ErrorActionPreference = 'Stop'
24 | Set-StrictMode -Version 2.0
25 | $disableConfigureToolsetImport = $true
26 | $global:LASTEXITCODE = 0
27 |
28 | try {
29 | # `tools.ps1` checks $ci to perform some actions. Since the SDL
30 | # scripts don't necessarily execute in the same agent that run the
31 | # build.ps1/sh script this variable isn't automatically set.
32 | $ci = $true
33 | . $PSScriptRoot\..\tools.ps1
34 |
35 | # Normalize tools list: all in [hashtable] form with defined values for each key.
36 | $ToolsList = $ToolsList |
37 | ForEach-Object {
38 | if ($_ -is [string]) {
39 | $_ = @{ Name = $_ }
40 | }
41 |
42 | if (-not ($_['Scenario'])) { $_.Scenario = "" }
43 | if (-not ($_['Args'])) { $_.Args = @() }
44 | $_
45 | }
46 |
47 | Write-Host "List of tools to configure:"
48 | $ToolsList | ForEach-Object { $_ | Out-String | Write-Host }
49 |
50 | # We store config files in the r directory of .gdn
51 | $gdnConfigPath = Join-Path $GdnFolder 'r'
52 | $ValidPath = Test-Path $GuardianCliLocation
53 |
54 | if ($ValidPath -eq $False)
55 | {
56 | Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Invalid Guardian CLI Location."
57 | ExitWithExitCode 1
58 | }
59 |
60 | foreach ($tool in $ToolsList) {
61 | # Put together the name and scenario to make a unique key.
62 | $toolConfigName = $tool.Name
63 | if ($tool.Scenario) {
64 | $toolConfigName += "_" + $tool.Scenario
65 | }
66 |
67 | Write-Host "=== Configuring $toolConfigName..."
68 |
69 | $gdnConfigFile = Join-Path $gdnConfigPath "$toolConfigName-configure.gdnconfig"
70 |
71 | # For some tools, add default and automatic args.
72 | if ($tool.Name -eq 'credscan') {
73 | if ($targetDirectory) {
74 | $tool.Args += "`"TargetDirectory < $TargetDirectory`""
75 | }
76 | $tool.Args += "`"OutputType < pre`""
77 | $tool.Args += $CrScanAdditionalRunConfigParams
78 | } elseif ($tool.Name -eq 'policheck') {
79 | if ($targetDirectory) {
80 | $tool.Args += "`"Target < $TargetDirectory`""
81 | }
82 | $tool.Args += $PoliCheckAdditionalRunConfigParams
83 | } elseif ($tool.Name -eq 'semmle' -or $tool.Name -eq 'codeql') {
84 | if ($targetDirectory) {
85 | $tool.Args += "`"SourceCodeDirectory < $TargetDirectory`""
86 | }
87 | $tool.Args += $CodeQLAdditionalRunConfigParams
88 | }
89 |
90 | # Create variable pointing to the args array directly so we can use splat syntax later.
91 | $toolArgs = $tool.Args
92 |
93 | # Configure the tool. If args array is provided or the current tool has some default arguments
94 | # defined, add "--args" and splat each element on the end. Arg format is "{Arg id} < {Value}",
95 | # one per parameter. Doc page for "guardian configure":
96 | # https://dev.azure.com/securitytools/SecurityIntegration/_wiki/wikis/Guardian/1395/configure
97 | Exec-BlockVerbosely {
98 | & $GuardianCliLocation configure `
99 | --working-directory $WorkingDirectory `
100 | --tool $tool.Name `
101 | --output-path $gdnConfigFile `
102 | --logger-level $GuardianLoggerLevel `
103 | --noninteractive `
104 | --force `
105 | $(if ($toolArgs) { "--args" }) @toolArgs
106 | Exit-IfNZEC "Sdl"
107 | }
108 |
109 | Write-Host "Created '$toolConfigName' configuration file: $gdnConfigFile"
110 | }
111 | }
112 | catch {
113 | Write-Host $_.ScriptStackTrace
114 | Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
115 | ExitWithExitCode 1
116 | }
117 |
--------------------------------------------------------------------------------
/eng/common/native/install-tool.ps1:
--------------------------------------------------------------------------------
1 | <#
2 | .SYNOPSIS
3 | Install native tool
4 |
5 | .DESCRIPTION
6 | Install cmake native tool from Azure blob storage
7 |
8 | .PARAMETER InstallPath
9 | Base directory to install native tool to
10 |
11 | .PARAMETER BaseUri
12 | Base file directory or Url from which to acquire tool archives
13 |
14 | .PARAMETER CommonLibraryDirectory
15 | Path to folder containing common library modules
16 |
17 | .PARAMETER Force
18 | Force install of tools even if they previously exist
19 |
20 | .PARAMETER Clean
21 | Don't install the tool, just clean up the current install of the tool
22 |
23 | .PARAMETER DownloadRetries
24 | Total number of retry attempts
25 |
26 | .PARAMETER RetryWaitTimeInSeconds
27 | Wait time between retry attempts in seconds
28 |
29 | .NOTES
30 | Returns 0 if install succeeds, 1 otherwise
31 | #>
32 | [CmdletBinding(PositionalBinding=$false)]
33 | Param (
34 | [Parameter(Mandatory=$True)]
35 | [string] $ToolName,
36 | [Parameter(Mandatory=$True)]
37 | [string] $InstallPath,
38 | [Parameter(Mandatory=$True)]
39 | [string] $BaseUri,
40 | [Parameter(Mandatory=$True)]
41 | [string] $Version,
42 | [string] $CommonLibraryDirectory = $PSScriptRoot,
43 | [switch] $Force = $False,
44 | [switch] $Clean = $False,
45 | [int] $DownloadRetries = 5,
46 | [int] $RetryWaitTimeInSeconds = 30
47 | )
48 |
49 | . $PSScriptRoot\..\pipeline-logging-functions.ps1
50 |
51 | # Import common library modules
52 | Import-Module -Name (Join-Path $CommonLibraryDirectory "CommonLibrary.psm1")
53 |
54 | try {
55 | # Define verbose switch if undefined
56 | $Verbose = $VerbosePreference -Eq "Continue"
57 |
58 | $Arch = CommonLibrary\Get-MachineArchitecture
59 | $ToolOs = "win64"
60 | if($Arch -Eq "x32") {
61 | $ToolOs = "win32"
62 | }
63 | $ToolNameMoniker = "$ToolName-$Version-$ToolOs-$Arch"
64 | $ToolInstallDirectory = Join-Path $InstallPath "$ToolName\$Version\"
65 | $Uri = "$BaseUri/windows/$ToolName/$ToolNameMoniker.zip"
66 | $ShimPath = Join-Path $InstallPath "$ToolName.exe"
67 |
68 | if ($Clean) {
69 | Write-Host "Cleaning $ToolInstallDirectory"
70 | if (Test-Path $ToolInstallDirectory) {
71 | Remove-Item $ToolInstallDirectory -Force -Recurse
72 | }
73 | Write-Host "Cleaning $ShimPath"
74 | if (Test-Path $ShimPath) {
75 | Remove-Item $ShimPath -Force
76 | }
77 | $ToolTempPath = CommonLibrary\Get-TempPathFilename -Path $Uri
78 | Write-Host "Cleaning $ToolTempPath"
79 | if (Test-Path $ToolTempPath) {
80 | Remove-Item $ToolTempPath -Force
81 | }
82 | exit 0
83 | }
84 |
85 | # Install tool
86 | if ((Test-Path $ToolInstallDirectory) -And (-Not $Force)) {
87 | Write-Verbose "$ToolName ($Version) already exists, skipping install"
88 | }
89 | else {
90 | $InstallStatus = CommonLibrary\DownloadAndExtract -Uri $Uri `
91 | -InstallDirectory $ToolInstallDirectory `
92 | -Force:$Force `
93 | -DownloadRetries $DownloadRetries `
94 | -RetryWaitTimeInSeconds $RetryWaitTimeInSeconds `
95 | -Verbose:$Verbose
96 |
97 | if ($InstallStatus -Eq $False) {
98 | Write-PipelineTelemetryError "Installation failed" -Category "NativeToolsetBootstrapping"
99 | exit 1
100 | }
101 | }
102 |
103 | $ToolFilePath = Get-ChildItem $ToolInstallDirectory -Recurse -Filter "$ToolName.exe" | % { $_.FullName }
104 | if (@($ToolFilePath).Length -Gt 1) {
105 | Write-Error "There are multiple copies of $ToolName in $($ToolInstallDirectory): `n$(@($ToolFilePath | out-string))"
106 | exit 1
107 | } elseif (@($ToolFilePath).Length -Lt 1) {
108 | Write-Host "$ToolName was not found in $ToolInstallDirectory."
109 | exit 1
110 | }
111 |
112 | # Generate shim
113 | # Always rewrite shims so that we are referencing the expected version
114 | $GenerateShimStatus = CommonLibrary\New-ScriptShim -ShimName $ToolName `
115 | -ShimDirectory $InstallPath `
116 | -ToolFilePath "$ToolFilePath" `
117 | -BaseUri $BaseUri `
118 | -Force:$Force `
119 | -Verbose:$Verbose
120 |
121 | if ($GenerateShimStatus -Eq $False) {
122 | Write-PipelineTelemetryError "Generate shim failed" -Category "NativeToolsetBootstrapping"
123 | return 1
124 | }
125 |
126 | exit 0
127 | }
128 | catch {
129 | Write-Host $_.ScriptStackTrace
130 | Write-PipelineTelemetryError -Category "NativeToolsetBootstrapping" -Message $_
131 | exit 1
132 | }
133 |
--------------------------------------------------------------------------------
/eng/common/pipeline-logging-functions.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | function Write-PipelineTelemetryError {
4 | local telemetry_category=''
5 | local force=false
6 | local function_args=()
7 | local message=''
8 | while [[ $# -gt 0 ]]; do
9 | opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")"
10 | case "$opt" in
11 | -category|-c)
12 | telemetry_category=$2
13 | shift
14 | ;;
15 | -force|-f)
16 | force=true
17 | ;;
18 | -*)
19 | function_args+=("$1 $2")
20 | shift
21 | ;;
22 | *)
23 | message=$*
24 | ;;
25 | esac
26 | shift
27 | done
28 |
29 | if [[ $force != true ]] && [[ "$ci" != true ]]; then
30 | echo "$message" >&2
31 | return
32 | fi
33 |
34 | if [[ $force == true ]]; then
35 | function_args+=("-force")
36 | fi
37 | message="(NETCORE_ENGINEERING_TELEMETRY=$telemetry_category) $message"
38 | function_args+=("$message")
39 | Write-PipelineTaskError ${function_args[@]}
40 | }
41 |
42 | function Write-PipelineTaskError {
43 | local message_type="error"
44 | local sourcepath=''
45 | local linenumber=''
46 | local columnnumber=''
47 | local error_code=''
48 | local force=false
49 |
50 | while [[ $# -gt 0 ]]; do
51 | opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")"
52 | case "$opt" in
53 | -type|-t)
54 | message_type=$2
55 | shift
56 | ;;
57 | -sourcepath|-s)
58 | sourcepath=$2
59 | shift
60 | ;;
61 | -linenumber|-ln)
62 | linenumber=$2
63 | shift
64 | ;;
65 | -columnnumber|-cn)
66 | columnnumber=$2
67 | shift
68 | ;;
69 | -errcode|-e)
70 | error_code=$2
71 | shift
72 | ;;
73 | -force|-f)
74 | force=true
75 | ;;
76 | *)
77 | break
78 | ;;
79 | esac
80 |
81 | shift
82 | done
83 |
84 | if [[ $force != true ]] && [[ "$ci" != true ]]; then
85 | echo "$@" >&2
86 | return
87 | fi
88 |
89 | local message="##vso[task.logissue"
90 |
91 | message="$message type=$message_type"
92 |
93 | if [ -n "$sourcepath" ]; then
94 | message="$message;sourcepath=$sourcepath"
95 | fi
96 |
97 | if [ -n "$linenumber" ]; then
98 | message="$message;linenumber=$linenumber"
99 | fi
100 |
101 | if [ -n "$columnnumber" ]; then
102 | message="$message;columnnumber=$columnnumber"
103 | fi
104 |
105 | if [ -n "$error_code" ]; then
106 | message="$message;code=$error_code"
107 | fi
108 |
109 | message="$message]$*"
110 | echo "$message"
111 | }
112 |
113 | function Write-PipelineSetVariable {
114 | if [[ "$ci" != true ]]; then
115 | return
116 | fi
117 |
118 | local name=''
119 | local value=''
120 | local secret=false
121 | local as_output=false
122 | local is_multi_job_variable=true
123 |
124 | while [[ $# -gt 0 ]]; do
125 | opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")"
126 | case "$opt" in
127 | -name|-n)
128 | name=$2
129 | shift
130 | ;;
131 | -value|-v)
132 | value=$2
133 | shift
134 | ;;
135 | -secret|-s)
136 | secret=true
137 | ;;
138 | -as_output|-a)
139 | as_output=true
140 | ;;
141 | -is_multi_job_variable|-i)
142 | is_multi_job_variable=$2
143 | shift
144 | ;;
145 | esac
146 | shift
147 | done
148 |
149 | value=${value/;/%3B}
150 | value=${value/\\r/%0D}
151 | value=${value/\\n/%0A}
152 | value=${value/]/%5D}
153 |
154 | local message="##vso[task.setvariable variable=$name;isSecret=$secret;isOutput=$is_multi_job_variable]$value"
155 |
156 | if [[ "$as_output" == true ]]; then
157 | $message
158 | else
159 | echo "$message"
160 | fi
161 | }
162 |
163 | function Write-PipelinePrependPath {
164 | local prepend_path=''
165 |
166 | while [[ $# -gt 0 ]]; do
167 | opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")"
168 | case "$opt" in
169 | -path|-p)
170 | prepend_path=$2
171 | shift
172 | ;;
173 | esac
174 | shift
175 | done
176 |
177 | export PATH="$prepend_path:$PATH"
178 |
179 | if [[ "$ci" == true ]]; then
180 | echo "##vso[task.prependpath]$prepend_path"
181 | fi
182 | }
183 |
184 | function Write-PipelineSetResult {
185 | local result=''
186 | local message=''
187 |
188 | while [[ $# -gt 0 ]]; do
189 | opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")"
190 | case "$opt" in
191 | -result|-r)
192 | result=$2
193 | shift
194 | ;;
195 | -message|-m)
196 | message=$2
197 | shift
198 | ;;
199 | esac
200 | shift
201 | done
202 |
203 | if [[ "$ci" == true ]]; then
204 | echo "##vso[task.complete result=$result;]$message"
205 | fi
206 | }
207 |
--------------------------------------------------------------------------------
/eng/common/native/common-library.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | function GetNativeInstallDirectory {
4 | local install_dir
5 |
6 | if [[ -z $NETCOREENG_INSTALL_DIRECTORY ]]; then
7 | install_dir=$HOME/.netcoreeng/native/
8 | else
9 | install_dir=$NETCOREENG_INSTALL_DIRECTORY
10 | fi
11 |
12 | echo $install_dir
13 | return 0
14 | }
15 |
16 | function GetTempDirectory {
17 |
18 | echo $(GetNativeInstallDirectory)temp/
19 | return 0
20 | }
21 |
22 | function ExpandZip {
23 | local zip_path=$1
24 | local output_directory=$2
25 | local force=${3:-false}
26 |
27 | echo "Extracting $zip_path to $output_directory"
28 | if [[ -d $output_directory ]] && [[ $force = false ]]; then
29 | echo "Directory '$output_directory' already exists, skipping extract"
30 | return 0
31 | fi
32 |
33 | if [[ -d $output_directory ]]; then
34 | echo "'Force flag enabled, but '$output_directory' exists. Removing directory"
35 | rm -rf $output_directory
36 | if [[ $? != 0 ]]; then
37 | Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Unable to remove '$output_directory'"
38 | return 1
39 | fi
40 | fi
41 |
42 | echo "Creating directory: '$output_directory'"
43 | mkdir -p $output_directory
44 |
45 | echo "Extracting archive"
46 | tar -xf $zip_path -C $output_directory
47 | if [[ $? != 0 ]]; then
48 | Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Unable to extract '$zip_path'"
49 | return 1
50 | fi
51 |
52 | return 0
53 | }
54 |
55 | function GetCurrentOS {
56 | local unameOut="$(uname -s)"
57 | case $unameOut in
58 | Linux*) echo "Linux";;
59 | Darwin*) echo "MacOS";;
60 | esac
61 | return 0
62 | }
63 |
64 | function GetFile {
65 | local uri=$1
66 | local path=$2
67 | local force=${3:-false}
68 | local download_retries=${4:-5}
69 | local retry_wait_time_seconds=${5:-30}
70 |
71 | if [[ -f $path ]]; then
72 | if [[ $force = false ]]; then
73 | echo "File '$path' already exists. Skipping download"
74 | return 0
75 | else
76 | rm -rf $path
77 | fi
78 | fi
79 |
80 | if [[ -f $uri ]]; then
81 | echo "'$uri' is a file path, copying file to '$path'"
82 | cp $uri $path
83 | return $?
84 | fi
85 |
86 | echo "Downloading $uri"
87 | # Use curl if available, otherwise use wget
88 | if command -v curl > /dev/null; then
89 | curl "$uri" -sSL --retry $download_retries --retry-delay $retry_wait_time_seconds --create-dirs -o "$path" --fail
90 | else
91 | wget -q -O "$path" "$uri" --tries="$download_retries"
92 | fi
93 |
94 | return $?
95 | }
96 |
97 | function GetTempPathFileName {
98 | local path=$1
99 |
100 | local temp_dir=$(GetTempDirectory)
101 | local temp_file_name=$(basename $path)
102 | echo $temp_dir$temp_file_name
103 | return 0
104 | }
105 |
106 | function DownloadAndExtract {
107 | local uri=$1
108 | local installDir=$2
109 | local force=${3:-false}
110 | local download_retries=${4:-5}
111 | local retry_wait_time_seconds=${5:-30}
112 |
113 | local temp_tool_path=$(GetTempPathFileName $uri)
114 |
115 | echo "downloading to: $temp_tool_path"
116 |
117 | # Download file
118 | GetFile "$uri" "$temp_tool_path" $force $download_retries $retry_wait_time_seconds
119 | if [[ $? != 0 ]]; then
120 | Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Failed to download '$uri' to '$temp_tool_path'."
121 | return 1
122 | fi
123 |
124 | # Extract File
125 | echo "extracting from $temp_tool_path to $installDir"
126 | ExpandZip "$temp_tool_path" "$installDir" $force $download_retries $retry_wait_time_seconds
127 | if [[ $? != 0 ]]; then
128 | Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Failed to extract '$temp_tool_path' to '$installDir'."
129 | return 1
130 | fi
131 |
132 | return 0
133 | }
134 |
135 | function NewScriptShim {
136 | local shimpath=$1
137 | local tool_file_path=$2
138 | local force=${3:-false}
139 |
140 | echo "Generating '$shimpath' shim"
141 | if [[ -f $shimpath ]]; then
142 | if [[ $force = false ]]; then
143 | echo "File '$shimpath' already exists." >&2
144 | return 1
145 | else
146 | rm -rf $shimpath
147 | fi
148 | fi
149 |
150 | if [[ ! -f $tool_file_path ]]; then
151 | # try to see if the path is lower cased
152 | tool_file_path="$(echo $tool_file_path | tr "[:upper:]" "[:lower:]")"
153 | if [[ ! -f $tool_file_path ]]; then
154 | Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Specified tool file path:'$tool_file_path' does not exist"
155 | return 1
156 | fi
157 | fi
158 |
159 | local shim_contents=$'#!/usr/bin/env bash\n'
160 | shim_contents+="SHIMARGS="$'$1\n'
161 | shim_contents+="$tool_file_path"$' $SHIMARGS\n'
162 |
163 | # Write shim file
164 | echo "$shim_contents" > $shimpath
165 |
166 | chmod +x $shimpath
167 |
168 | echo "Finished generating shim '$shimpath'"
169 |
170 | return $?
171 | }
172 |
173 |
--------------------------------------------------------------------------------
/eng/common/internal-feed-operations.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | set -e
4 |
5 | # Sets VSS_NUGET_EXTERNAL_FEED_ENDPOINTS based on the "darc-int-*" feeds defined in NuGet.config. This is needed
6 | # in build agents by CredProvider to authenticate the restore requests to internal feeds as specified in
7 | # https://github.com/microsoft/artifacts-credprovider/blob/0f53327cd12fd893d8627d7b08a2171bf5852a41/README.md#environment-variables.
8 | # This should ONLY be called from identified internal builds
9 | function SetupCredProvider {
10 | local authToken=$1
11 |
12 | # Install the Cred Provider NuGet plugin
13 | echo "Setting up Cred Provider NuGet plugin in the agent..."...
14 | echo "Getting 'installcredprovider.ps1' from 'https://github.com/microsoft/artifacts-credprovider'..."
15 |
16 | local url="https://raw.githubusercontent.com/microsoft/artifacts-credprovider/master/helpers/installcredprovider.sh"
17 |
18 | echo "Writing the contents of 'installcredprovider.ps1' locally..."
19 | local installcredproviderPath="installcredprovider.sh"
20 | if command -v curl > /dev/null; then
21 | curl $url > "$installcredproviderPath"
22 | else
23 | wget -q -O "$installcredproviderPath" "$url"
24 | fi
25 |
26 | echo "Installing plugin..."
27 | . "$installcredproviderPath"
28 |
29 | echo "Deleting local copy of 'installcredprovider.sh'..."
30 | rm installcredprovider.sh
31 |
32 | if [ ! -d "$HOME/.nuget/plugins" ]; then
33 | Write-PipelineTelemetryError -category 'Build' 'CredProvider plugin was not installed correctly!'
34 | ExitWithExitCode 1
35 | else
36 | echo "CredProvider plugin was installed correctly!"
37 | fi
38 |
39 | # Then, we set the 'VSS_NUGET_EXTERNAL_FEED_ENDPOINTS' environment variable to restore from the stable
40 | # feeds successfully
41 |
42 | local nugetConfigPath="{$repo_root}NuGet.config"
43 |
44 | if [ ! "$nugetConfigPath" ]; then
45 | Write-PipelineTelemetryError -category 'Build' "NuGet.config file not found in repo's root!"
46 | ExitWithExitCode 1
47 | fi
48 |
49 | local endpoints='['
50 | local nugetConfigPackageValues=`cat "$nugetConfigPath" | grep "key=\"darc-int-"`
51 | local pattern="value=\"(.*)\""
52 |
53 | for value in $nugetConfigPackageValues
54 | do
55 | if [[ $value =~ $pattern ]]; then
56 | local endpoint="${BASH_REMATCH[1]}"
57 | endpoints+="{\"endpoint\": \"$endpoint\", \"password\": \"$authToken\"},"
58 | fi
59 | done
60 |
61 | endpoints=${endpoints%?}
62 | endpoints+=']'
63 |
64 | if [ ${#endpoints} -gt 2 ]; then
65 | local endpointCredentials="{\"endpointCredentials\": "$endpoints"}"
66 |
67 | echo "##vso[task.setvariable variable=VSS_NUGET_EXTERNAL_FEED_ENDPOINTS]$endpointCredentials"
68 | echo "##vso[task.setvariable variable=NUGET_CREDENTIALPROVIDER_SESSIONTOKENCACHE_ENABLED]False"
69 | else
70 | echo "No internal endpoints found in NuGet.config"
71 | fi
72 | }
73 |
74 | # Workaround for https://github.com/microsoft/msbuild/issues/4430
75 | function InstallDotNetSdkAndRestoreArcade {
76 | local dotnetTempDir="$repo_root/dotnet"
77 | local dotnetSdkVersion="2.1.507" # After experimentation we know this version works when restoring the SDK (compared to 3.0.*)
78 | local restoreProjPath="$repo_root/eng/common/restore.proj"
79 |
80 | echo "Installing dotnet SDK version $dotnetSdkVersion to restore Arcade SDK..."
81 | echo "" > "$restoreProjPath"
82 |
83 | InstallDotNetSdk "$dotnetTempDir" "$dotnetSdkVersion"
84 |
85 | local res=`$dotnetTempDir/dotnet restore $restoreProjPath`
86 | echo "Arcade SDK restored!"
87 |
88 | # Cleanup
89 | if [ "$restoreProjPath" ]; then
90 | rm "$restoreProjPath"
91 | fi
92 |
93 | if [ "$dotnetTempDir" ]; then
94 | rm -r $dotnetTempDir
95 | fi
96 | }
97 |
98 | source="${BASH_SOURCE[0]}"
99 | operation=''
100 | authToken=''
101 | repoName=''
102 |
103 | while [[ $# > 0 ]]; do
104 | opt="$(echo "$1" | tr "[:upper:]" "[:lower:]")"
105 | case "$opt" in
106 | --operation)
107 | operation=$2
108 | shift
109 | ;;
110 | --authtoken)
111 | authToken=$2
112 | shift
113 | ;;
114 | *)
115 | echo "Invalid argument: $1"
116 | usage
117 | exit 1
118 | ;;
119 | esac
120 |
121 | shift
122 | done
123 |
124 | while [[ -h "$source" ]]; do
125 | scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
126 | source="$(readlink "$source")"
127 | # if $source was a relative symlink, we need to resolve it relative to the path where the
128 | # symlink file was located
129 | [[ $source != /* ]] && source="$scriptroot/$source"
130 | done
131 | scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
132 |
133 | . "$scriptroot/tools.sh"
134 |
135 | if [ "$operation" = "setup" ]; then
136 | SetupCredProvider $authToken
137 | elif [ "$operation" = "install-restore" ]; then
138 | InstallDotNetSdkAndRestoreArcade
139 | else
140 | echo "Unknown operation '$operation'!"
141 | fi
142 |
--------------------------------------------------------------------------------
/eng/common/cross/arm64/tizen-fetch.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | set -e
3 |
4 | if [[ -z "${VERBOSE// }" ]] || [ "$VERBOSE" -ne "$VERBOSE" ] 2>/dev/null; then
5 | VERBOSE=0
6 | fi
7 |
8 | Log()
9 | {
10 | if [ $VERBOSE -ge $1 ]; then
11 | echo ${@:2}
12 | fi
13 | }
14 |
15 | Inform()
16 | {
17 | Log 1 -e "\x1B[0;34m$@\x1B[m"
18 | }
19 |
20 | Debug()
21 | {
22 | Log 2 -e "\x1B[0;32m$@\x1B[m"
23 | }
24 |
25 | Error()
26 | {
27 | >&2 Log 0 -e "\x1B[0;31m$@\x1B[m"
28 | }
29 |
30 | Fetch()
31 | {
32 | URL=$1
33 | FILE=$2
34 | PROGRESS=$3
35 | if [ $VERBOSE -ge 1 ] && [ $PROGRESS ]; then
36 | CURL_OPT="--progress-bar"
37 | else
38 | CURL_OPT="--silent"
39 | fi
40 | curl $CURL_OPT $URL > $FILE
41 | }
42 |
43 | hash curl 2> /dev/null || { Error "Require 'curl' Aborting."; exit 1; }
44 | hash xmllint 2> /dev/null || { Error "Require 'xmllint' Aborting."; exit 1; }
45 | hash sha256sum 2> /dev/null || { Error "Require 'sha256sum' Aborting."; exit 1; }
46 |
47 | TMPDIR=$1
48 | if [ ! -d $TMPDIR ]; then
49 | TMPDIR=./tizen_tmp
50 | Debug "Create temporary directory : $TMPDIR"
51 | mkdir -p $TMPDIR
52 | fi
53 |
54 | TIZEN_URL=http://download.tizen.org/snapshots/tizen/
55 | BUILD_XML=build.xml
56 | REPOMD_XML=repomd.xml
57 | PRIMARY_XML=primary.xml
58 | TARGET_URL="http://__not_initialized"
59 |
60 | Xpath_get()
61 | {
62 | XPATH_RESULT=''
63 | XPATH=$1
64 | XML_FILE=$2
65 | RESULT=$(xmllint --xpath $XPATH $XML_FILE)
66 | if [[ -z ${RESULT// } ]]; then
67 | Error "Can not find target from $XML_FILE"
68 | Debug "Xpath = $XPATH"
69 | exit 1
70 | fi
71 | XPATH_RESULT=$RESULT
72 | }
73 |
74 | fetch_tizen_pkgs_init()
75 | {
76 | TARGET=$1
77 | PROFILE=$2
78 | Debug "Initialize TARGET=$TARGET, PROFILE=$PROFILE"
79 |
80 | TMP_PKG_DIR=$TMPDIR/tizen_${PROFILE}_pkgs
81 | if [ -d $TMP_PKG_DIR ]; then rm -rf $TMP_PKG_DIR; fi
82 | mkdir -p $TMP_PKG_DIR
83 |
84 | PKG_URL=$TIZEN_URL/$PROFILE/latest
85 |
86 | BUILD_XML_URL=$PKG_URL/$BUILD_XML
87 | TMP_BUILD=$TMP_PKG_DIR/$BUILD_XML
88 | TMP_REPOMD=$TMP_PKG_DIR/$REPOMD_XML
89 | TMP_PRIMARY=$TMP_PKG_DIR/$PRIMARY_XML
90 | TMP_PRIMARYGZ=${TMP_PRIMARY}.gz
91 |
92 | Fetch $BUILD_XML_URL $TMP_BUILD
93 |
94 | Debug "fetch $BUILD_XML_URL to $TMP_BUILD"
95 |
96 | TARGET_XPATH="//build/buildtargets/buildtarget[@name=\"$TARGET\"]/repo[@type=\"binary\"]/text()"
97 | Xpath_get $TARGET_XPATH $TMP_BUILD
98 | TARGET_PATH=$XPATH_RESULT
99 | TARGET_URL=$PKG_URL/$TARGET_PATH
100 |
101 | REPOMD_URL=$TARGET_URL/repodata/repomd.xml
102 | PRIMARY_XPATH='string(//*[local-name()="data"][@type="primary"]/*[local-name()="location"]/@href)'
103 |
104 | Fetch $REPOMD_URL $TMP_REPOMD
105 |
106 | Debug "fetch $REPOMD_URL to $TMP_REPOMD"
107 |
108 | Xpath_get $PRIMARY_XPATH $TMP_REPOMD
109 | PRIMARY_XML_PATH=$XPATH_RESULT
110 | PRIMARY_URL=$TARGET_URL/$PRIMARY_XML_PATH
111 |
112 | Fetch $PRIMARY_URL $TMP_PRIMARYGZ
113 |
114 | Debug "fetch $PRIMARY_URL to $TMP_PRIMARYGZ"
115 |
116 | gunzip $TMP_PRIMARYGZ
117 |
118 | Debug "unzip $TMP_PRIMARYGZ to $TMP_PRIMARY"
119 | }
120 |
121 | fetch_tizen_pkgs()
122 | {
123 | ARCH=$1
124 | PACKAGE_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="location"]/@href)'
125 |
126 | PACKAGE_CHECKSUM_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="checksum"]/text())'
127 |
128 | for pkg in ${@:2}
129 | do
130 | Inform "Fetching... $pkg"
131 | XPATH=${PACKAGE_XPATH_TPL/_PKG_/$pkg}
132 | XPATH=${XPATH/_ARCH_/$ARCH}
133 | Xpath_get $XPATH $TMP_PRIMARY
134 | PKG_PATH=$XPATH_RESULT
135 |
136 | XPATH=${PACKAGE_CHECKSUM_XPATH_TPL/_PKG_/$pkg}
137 | XPATH=${XPATH/_ARCH_/$ARCH}
138 | Xpath_get $XPATH $TMP_PRIMARY
139 | CHECKSUM=$XPATH_RESULT
140 |
141 | PKG_URL=$TARGET_URL/$PKG_PATH
142 | PKG_FILE=$(basename $PKG_PATH)
143 | PKG_PATH=$TMPDIR/$PKG_FILE
144 |
145 | Debug "Download $PKG_URL to $PKG_PATH"
146 | Fetch $PKG_URL $PKG_PATH true
147 |
148 | echo "$CHECKSUM $PKG_PATH" | sha256sum -c - > /dev/null
149 | if [ $? -ne 0 ]; then
150 | Error "Fail to fetch $PKG_URL to $PKG_PATH"
151 | Debug "Checksum = $CHECKSUM"
152 | exit 1
153 | fi
154 | done
155 | }
156 |
157 | Inform "Initialize arm base"
158 | fetch_tizen_pkgs_init standard base
159 | Inform "fetch common packages"
160 | fetch_tizen_pkgs aarch64 gcc glibc glibc-devel libicu libicu-devel libatomic linux-glibc-devel keyutils keyutils-devel libkeyutils
161 | Inform "fetch coreclr packages"
162 | fetch_tizen_pkgs aarch64 lldb lldb-devel libgcc libstdc++ libstdc++-devel libunwind libunwind-devel lttng-ust-devel lttng-ust userspace-rcu-devel userspace-rcu
163 | Inform "fetch corefx packages"
164 | fetch_tizen_pkgs aarch64 libcom_err libcom_err-devel zlib zlib-devel libopenssl11 libopenssl1.1-devel krb5 krb5-devel
165 |
166 | Inform "Initialize standard unified"
167 | fetch_tizen_pkgs_init standard unified
168 | Inform "fetch corefx packages"
169 | fetch_tizen_pkgs aarch64 gssdp gssdp-devel tizen-release
170 |
171 |
--------------------------------------------------------------------------------
/eng/common/cross/x86/tizen-fetch.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | set -e
3 |
4 | if [[ -z "${VERBOSE// }" ]] || [ "$VERBOSE" -ne "$VERBOSE" ] 2>/dev/null; then
5 | VERBOSE=0
6 | fi
7 |
8 | Log()
9 | {
10 | if [ $VERBOSE -ge $1 ]; then
11 | echo ${@:2}
12 | fi
13 | }
14 |
15 | Inform()
16 | {
17 | Log 1 -e "\x1B[0;34m$@\x1B[m"
18 | }
19 |
20 | Debug()
21 | {
22 | Log 2 -e "\x1B[0;32m$@\x1B[m"
23 | }
24 |
25 | Error()
26 | {
27 | >&2 Log 0 -e "\x1B[0;31m$@\x1B[m"
28 | }
29 |
30 | Fetch()
31 | {
32 | URL=$1
33 | FILE=$2
34 | PROGRESS=$3
35 | if [ $VERBOSE -ge 1 ] && [ $PROGRESS ]; then
36 | CURL_OPT="--progress-bar"
37 | else
38 | CURL_OPT="--silent"
39 | fi
40 | curl $CURL_OPT $URL > $FILE
41 | }
42 |
43 | hash curl 2> /dev/null || { Error "Require 'curl' Aborting."; exit 1; }
44 | hash xmllint 2> /dev/null || { Error "Require 'xmllint' Aborting."; exit 1; }
45 | hash sha256sum 2> /dev/null || { Error "Require 'sha256sum' Aborting."; exit 1; }
46 |
47 | TMPDIR=$1
48 | if [ ! -d $TMPDIR ]; then
49 | TMPDIR=./tizen_tmp
50 | Debug "Create temporary directory : $TMPDIR"
51 | mkdir -p $TMPDIR
52 | fi
53 |
54 | TIZEN_URL=http://download.tizen.org/snapshots/tizen
55 | BUILD_XML=build.xml
56 | REPOMD_XML=repomd.xml
57 | PRIMARY_XML=primary.xml
58 | TARGET_URL="http://__not_initialized"
59 |
60 | Xpath_get()
61 | {
62 | XPATH_RESULT=''
63 | XPATH=$1
64 | XML_FILE=$2
65 | RESULT=$(xmllint --xpath $XPATH $XML_FILE)
66 | if [[ -z ${RESULT// } ]]; then
67 | Error "Can not find target from $XML_FILE"
68 | Debug "Xpath = $XPATH"
69 | exit 1
70 | fi
71 | XPATH_RESULT=$RESULT
72 | }
73 |
74 | fetch_tizen_pkgs_init()
75 | {
76 | TARGET=$1
77 | PROFILE=$2
78 | Debug "Initialize TARGET=$TARGET, PROFILE=$PROFILE"
79 |
80 | TMP_PKG_DIR=$TMPDIR/tizen_${PROFILE}_pkgs
81 | if [ -d $TMP_PKG_DIR ]; then rm -rf $TMP_PKG_DIR; fi
82 | mkdir -p $TMP_PKG_DIR
83 |
84 | PKG_URL=$TIZEN_URL/$PROFILE/latest
85 |
86 | BUILD_XML_URL=$PKG_URL/$BUILD_XML
87 | TMP_BUILD=$TMP_PKG_DIR/$BUILD_XML
88 | TMP_REPOMD=$TMP_PKG_DIR/$REPOMD_XML
89 | TMP_PRIMARY=$TMP_PKG_DIR/$PRIMARY_XML
90 | TMP_PRIMARYGZ=${TMP_PRIMARY}.gz
91 |
92 | Fetch $BUILD_XML_URL $TMP_BUILD
93 |
94 | Debug "fetch $BUILD_XML_URL to $TMP_BUILD"
95 |
96 | TARGET_XPATH="//build/buildtargets/buildtarget[@name=\"$TARGET\"]/repo[@type=\"binary\"]/text()"
97 | Xpath_get $TARGET_XPATH $TMP_BUILD
98 | TARGET_PATH=$XPATH_RESULT
99 | TARGET_URL=$PKG_URL/$TARGET_PATH
100 |
101 | REPOMD_URL=$TARGET_URL/repodata/repomd.xml
102 | PRIMARY_XPATH='string(//*[local-name()="data"][@type="primary"]/*[local-name()="location"]/@href)'
103 |
104 | Fetch $REPOMD_URL $TMP_REPOMD
105 |
106 | Debug "fetch $REPOMD_URL to $TMP_REPOMD"
107 |
108 | Xpath_get $PRIMARY_XPATH $TMP_REPOMD
109 | PRIMARY_XML_PATH=$XPATH_RESULT
110 | PRIMARY_URL=$TARGET_URL/$PRIMARY_XML_PATH
111 |
112 | Fetch $PRIMARY_URL $TMP_PRIMARYGZ
113 |
114 | Debug "fetch $PRIMARY_URL to $TMP_PRIMARYGZ"
115 |
116 | gunzip $TMP_PRIMARYGZ
117 |
118 | Debug "unzip $TMP_PRIMARYGZ to $TMP_PRIMARY"
119 | }
120 |
121 | fetch_tizen_pkgs()
122 | {
123 | ARCH=$1
124 | PACKAGE_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="location"]/@href)'
125 |
126 | PACKAGE_CHECKSUM_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="checksum"]/text())'
127 |
128 | for pkg in ${@:2}
129 | do
130 | Inform "Fetching... $pkg"
131 | XPATH=${PACKAGE_XPATH_TPL/_PKG_/$pkg}
132 | XPATH=${XPATH/_ARCH_/$ARCH}
133 | Xpath_get $XPATH $TMP_PRIMARY
134 | PKG_PATH=$XPATH_RESULT
135 |
136 | XPATH=${PACKAGE_CHECKSUM_XPATH_TPL/_PKG_/$pkg}
137 | XPATH=${XPATH/_ARCH_/$ARCH}
138 | Xpath_get $XPATH $TMP_PRIMARY
139 | CHECKSUM=$XPATH_RESULT
140 |
141 | PKG_URL=$TARGET_URL/$PKG_PATH
142 | PKG_FILE=$(basename $PKG_PATH)
143 | PKG_PATH=$TMPDIR/$PKG_FILE
144 |
145 | Debug "Download $PKG_URL to $PKG_PATH"
146 | Fetch $PKG_URL $PKG_PATH true
147 |
148 | echo "$CHECKSUM $PKG_PATH" | sha256sum -c - > /dev/null
149 | if [ $? -ne 0 ]; then
150 | Error "Fail to fetch $PKG_URL to $PKG_PATH"
151 | Debug "Checksum = $CHECKSUM"
152 | exit 1
153 | fi
154 | done
155 | }
156 |
157 | Inform "Initialize i686 base"
158 | fetch_tizen_pkgs_init standard base
159 | Inform "fetch common packages"
160 | fetch_tizen_pkgs i686 gcc gcc-devel-static glibc glibc-devel libicu libicu-devel libatomic linux-glibc-devel keyutils keyutils-devel libkeyutils
161 | Inform "fetch coreclr packages"
162 | fetch_tizen_pkgs i686 lldb lldb-devel libgcc libstdc++ libstdc++-devel libunwind libunwind-devel lttng-ust-devel lttng-ust userspace-rcu-devel userspace-rcu
163 | Inform "fetch corefx packages"
164 | fetch_tizen_pkgs i686 libcom_err libcom_err-devel zlib zlib-devel libopenssl11 libopenssl1.1-devel krb5 krb5-devel
165 |
166 | Inform "Initialize standard unified"
167 | fetch_tizen_pkgs_init standard unified
168 | Inform "fetch corefx packages"
169 | fetch_tizen_pkgs i686 gssdp gssdp-devel tizen-release
170 |
171 |
--------------------------------------------------------------------------------
/eng/common/cross/arm/tizen-fetch.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | set -e
3 |
4 | if [[ -z "${VERBOSE// }" ]] || [ "$VERBOSE" -ne "$VERBOSE" ] 2>/dev/null; then
5 | VERBOSE=0
6 | fi
7 |
8 | Log()
9 | {
10 | if [ $VERBOSE -ge $1 ]; then
11 | echo ${@:2}
12 | fi
13 | }
14 |
15 | Inform()
16 | {
17 | Log 1 -e "\x1B[0;34m$@\x1B[m"
18 | }
19 |
20 | Debug()
21 | {
22 | Log 2 -e "\x1B[0;32m$@\x1B[m"
23 | }
24 |
25 | Error()
26 | {
27 | >&2 Log 0 -e "\x1B[0;31m$@\x1B[m"
28 | }
29 |
30 | Fetch()
31 | {
32 | URL=$1
33 | FILE=$2
34 | PROGRESS=$3
35 | if [ $VERBOSE -ge 1 ] && [ $PROGRESS ]; then
36 | CURL_OPT="--progress-bar"
37 | else
38 | CURL_OPT="--silent"
39 | fi
40 | curl $CURL_OPT $URL > $FILE
41 | }
42 |
43 | hash curl 2> /dev/null || { Error "Require 'curl' Aborting."; exit 1; }
44 | hash xmllint 2> /dev/null || { Error "Require 'xmllint' Aborting."; exit 1; }
45 | hash sha256sum 2> /dev/null || { Error "Require 'sha256sum' Aborting."; exit 1; }
46 |
47 | TMPDIR=$1
48 | if [ ! -d $TMPDIR ]; then
49 | TMPDIR=./tizen_tmp
50 | Debug "Create temporary directory : $TMPDIR"
51 | mkdir -p $TMPDIR
52 | fi
53 |
54 | TIZEN_URL=http://download.tizen.org/snapshots/tizen
55 | BUILD_XML=build.xml
56 | REPOMD_XML=repomd.xml
57 | PRIMARY_XML=primary.xml
58 | TARGET_URL="http://__not_initialized"
59 |
60 | Xpath_get()
61 | {
62 | XPATH_RESULT=''
63 | XPATH=$1
64 | XML_FILE=$2
65 | RESULT=$(xmllint --xpath $XPATH $XML_FILE)
66 | if [[ -z ${RESULT// } ]]; then
67 | Error "Can not find target from $XML_FILE"
68 | Debug "Xpath = $XPATH"
69 | exit 1
70 | fi
71 | XPATH_RESULT=$RESULT
72 | }
73 |
74 | fetch_tizen_pkgs_init()
75 | {
76 | TARGET=$1
77 | PROFILE=$2
78 | Debug "Initialize TARGET=$TARGET, PROFILE=$PROFILE"
79 |
80 | TMP_PKG_DIR=$TMPDIR/tizen_${PROFILE}_pkgs
81 | if [ -d $TMP_PKG_DIR ]; then rm -rf $TMP_PKG_DIR; fi
82 | mkdir -p $TMP_PKG_DIR
83 |
84 | PKG_URL=$TIZEN_URL/$PROFILE/latest
85 |
86 | BUILD_XML_URL=$PKG_URL/$BUILD_XML
87 | TMP_BUILD=$TMP_PKG_DIR/$BUILD_XML
88 | TMP_REPOMD=$TMP_PKG_DIR/$REPOMD_XML
89 | TMP_PRIMARY=$TMP_PKG_DIR/$PRIMARY_XML
90 | TMP_PRIMARYGZ=${TMP_PRIMARY}.gz
91 |
92 | Fetch $BUILD_XML_URL $TMP_BUILD
93 |
94 | Debug "fetch $BUILD_XML_URL to $TMP_BUILD"
95 |
96 | TARGET_XPATH="//build/buildtargets/buildtarget[@name=\"$TARGET\"]/repo[@type=\"binary\"]/text()"
97 | Xpath_get $TARGET_XPATH $TMP_BUILD
98 | TARGET_PATH=$XPATH_RESULT
99 | TARGET_URL=$PKG_URL/$TARGET_PATH
100 |
101 | REPOMD_URL=$TARGET_URL/repodata/repomd.xml
102 | PRIMARY_XPATH='string(//*[local-name()="data"][@type="primary"]/*[local-name()="location"]/@href)'
103 |
104 | Fetch $REPOMD_URL $TMP_REPOMD
105 |
106 | Debug "fetch $REPOMD_URL to $TMP_REPOMD"
107 |
108 | Xpath_get $PRIMARY_XPATH $TMP_REPOMD
109 | PRIMARY_XML_PATH=$XPATH_RESULT
110 | PRIMARY_URL=$TARGET_URL/$PRIMARY_XML_PATH
111 |
112 | Fetch $PRIMARY_URL $TMP_PRIMARYGZ
113 |
114 | Debug "fetch $PRIMARY_URL to $TMP_PRIMARYGZ"
115 |
116 | gunzip $TMP_PRIMARYGZ
117 |
118 | Debug "unzip $TMP_PRIMARYGZ to $TMP_PRIMARY"
119 | }
120 |
121 | fetch_tizen_pkgs()
122 | {
123 | ARCH=$1
124 | PACKAGE_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="location"]/@href)'
125 |
126 | PACKAGE_CHECKSUM_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="checksum"]/text())'
127 |
128 | for pkg in ${@:2}
129 | do
130 | Inform "Fetching... $pkg"
131 | XPATH=${PACKAGE_XPATH_TPL/_PKG_/$pkg}
132 | XPATH=${XPATH/_ARCH_/$ARCH}
133 | Xpath_get $XPATH $TMP_PRIMARY
134 | PKG_PATH=$XPATH_RESULT
135 |
136 | XPATH=${PACKAGE_CHECKSUM_XPATH_TPL/_PKG_/$pkg}
137 | XPATH=${XPATH/_ARCH_/$ARCH}
138 | Xpath_get $XPATH $TMP_PRIMARY
139 | CHECKSUM=$XPATH_RESULT
140 |
141 | PKG_URL=$TARGET_URL/$PKG_PATH
142 | PKG_FILE=$(basename $PKG_PATH)
143 | PKG_PATH=$TMPDIR/$PKG_FILE
144 |
145 | Debug "Download $PKG_URL to $PKG_PATH"
146 | Fetch $PKG_URL $PKG_PATH true
147 |
148 | echo "$CHECKSUM $PKG_PATH" | sha256sum -c - > /dev/null
149 | if [ $? -ne 0 ]; then
150 | Error "Fail to fetch $PKG_URL to $PKG_PATH"
151 | Debug "Checksum = $CHECKSUM"
152 | exit 1
153 | fi
154 | done
155 | }
156 |
157 | Inform "Initialize arm base"
158 | fetch_tizen_pkgs_init standard base
159 | Inform "fetch common packages"
160 | fetch_tizen_pkgs armv7hl gcc gcc-devel-static glibc glibc-devel libicu libicu-devel libatomic linux-glibc-devel keyutils keyutils-devel libkeyutils
161 | Inform "fetch coreclr packages"
162 | fetch_tizen_pkgs armv7hl lldb lldb-devel libgcc libstdc++ libstdc++-devel libunwind libunwind-devel lttng-ust-devel lttng-ust userspace-rcu-devel userspace-rcu
163 | Inform "fetch corefx packages"
164 | fetch_tizen_pkgs armv7hl libcom_err libcom_err-devel zlib zlib-devel libopenssl11 libopenssl1.1-devel krb5 krb5-devel
165 |
166 | Inform "Initialize standard unified"
167 | fetch_tizen_pkgs_init standard unified
168 | Inform "fetch corefx packages"
169 | fetch_tizen_pkgs armv7hl gssdp gssdp-devel tizen-release
170 |
171 |
--------------------------------------------------------------------------------
/eng/common/cross/armel/tizen-fetch.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | set -e
3 |
4 | if [[ -z "${VERBOSE// }" ]] || [ "$VERBOSE" -ne "$VERBOSE" ] 2>/dev/null; then
5 | VERBOSE=0
6 | fi
7 |
8 | Log()
9 | {
10 | if [ $VERBOSE -ge $1 ]; then
11 | echo ${@:2}
12 | fi
13 | }
14 |
15 | Inform()
16 | {
17 | Log 1 -e "\x1B[0;34m$@\x1B[m"
18 | }
19 |
20 | Debug()
21 | {
22 | Log 2 -e "\x1B[0;32m$@\x1B[m"
23 | }
24 |
25 | Error()
26 | {
27 | >&2 Log 0 -e "\x1B[0;31m$@\x1B[m"
28 | }
29 |
30 | Fetch()
31 | {
32 | URL=$1
33 | FILE=$2
34 | PROGRESS=$3
35 | if [ $VERBOSE -ge 1 ] && [ $PROGRESS ]; then
36 | CURL_OPT="--progress-bar"
37 | else
38 | CURL_OPT="--silent"
39 | fi
40 | curl $CURL_OPT $URL > $FILE
41 | }
42 |
43 | hash curl 2> /dev/null || { Error "Require 'curl' Aborting."; exit 1; }
44 | hash xmllint 2> /dev/null || { Error "Require 'xmllint' Aborting."; exit 1; }
45 | hash sha256sum 2> /dev/null || { Error "Require 'sha256sum' Aborting."; exit 1; }
46 |
47 | TMPDIR=$1
48 | if [ ! -d $TMPDIR ]; then
49 | TMPDIR=./tizen_tmp
50 | Debug "Create temporary directory : $TMPDIR"
51 | mkdir -p $TMPDIR
52 | fi
53 |
54 | TIZEN_URL=http://download.tizen.org/snapshots/tizen
55 | BUILD_XML=build.xml
56 | REPOMD_XML=repomd.xml
57 | PRIMARY_XML=primary.xml
58 | TARGET_URL="http://__not_initialized"
59 |
60 | Xpath_get()
61 | {
62 | XPATH_RESULT=''
63 | XPATH=$1
64 | XML_FILE=$2
65 | RESULT=$(xmllint --xpath $XPATH $XML_FILE)
66 | if [[ -z ${RESULT// } ]]; then
67 | Error "Can not find target from $XML_FILE"
68 | Debug "Xpath = $XPATH"
69 | exit 1
70 | fi
71 | XPATH_RESULT=$RESULT
72 | }
73 |
74 | fetch_tizen_pkgs_init()
75 | {
76 | TARGET=$1
77 | PROFILE=$2
78 | Debug "Initialize TARGET=$TARGET, PROFILE=$PROFILE"
79 |
80 | TMP_PKG_DIR=$TMPDIR/tizen_${PROFILE}_pkgs
81 | if [ -d $TMP_PKG_DIR ]; then rm -rf $TMP_PKG_DIR; fi
82 | mkdir -p $TMP_PKG_DIR
83 |
84 | PKG_URL=$TIZEN_URL/$PROFILE/latest
85 |
86 | BUILD_XML_URL=$PKG_URL/$BUILD_XML
87 | TMP_BUILD=$TMP_PKG_DIR/$BUILD_XML
88 | TMP_REPOMD=$TMP_PKG_DIR/$REPOMD_XML
89 | TMP_PRIMARY=$TMP_PKG_DIR/$PRIMARY_XML
90 | TMP_PRIMARYGZ=${TMP_PRIMARY}.gz
91 |
92 | Fetch $BUILD_XML_URL $TMP_BUILD
93 |
94 | Debug "fetch $BUILD_XML_URL to $TMP_BUILD"
95 |
96 | TARGET_XPATH="//build/buildtargets/buildtarget[@name=\"$TARGET\"]/repo[@type=\"binary\"]/text()"
97 | Xpath_get $TARGET_XPATH $TMP_BUILD
98 | TARGET_PATH=$XPATH_RESULT
99 | TARGET_URL=$PKG_URL/$TARGET_PATH
100 |
101 | REPOMD_URL=$TARGET_URL/repodata/repomd.xml
102 | PRIMARY_XPATH='string(//*[local-name()="data"][@type="primary"]/*[local-name()="location"]/@href)'
103 |
104 | Fetch $REPOMD_URL $TMP_REPOMD
105 |
106 | Debug "fetch $REPOMD_URL to $TMP_REPOMD"
107 |
108 | Xpath_get $PRIMARY_XPATH $TMP_REPOMD
109 | PRIMARY_XML_PATH=$XPATH_RESULT
110 | PRIMARY_URL=$TARGET_URL/$PRIMARY_XML_PATH
111 |
112 | Fetch $PRIMARY_URL $TMP_PRIMARYGZ
113 |
114 | Debug "fetch $PRIMARY_URL to $TMP_PRIMARYGZ"
115 |
116 | gunzip $TMP_PRIMARYGZ
117 |
118 | Debug "unzip $TMP_PRIMARYGZ to $TMP_PRIMARY"
119 | }
120 |
121 | fetch_tizen_pkgs()
122 | {
123 | ARCH=$1
124 | PACKAGE_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="location"]/@href)'
125 |
126 | PACKAGE_CHECKSUM_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="checksum"]/text())'
127 |
128 | for pkg in ${@:2}
129 | do
130 | Inform "Fetching... $pkg"
131 | XPATH=${PACKAGE_XPATH_TPL/_PKG_/$pkg}
132 | XPATH=${XPATH/_ARCH_/$ARCH}
133 | Xpath_get $XPATH $TMP_PRIMARY
134 | PKG_PATH=$XPATH_RESULT
135 |
136 | XPATH=${PACKAGE_CHECKSUM_XPATH_TPL/_PKG_/$pkg}
137 | XPATH=${XPATH/_ARCH_/$ARCH}
138 | Xpath_get $XPATH $TMP_PRIMARY
139 | CHECKSUM=$XPATH_RESULT
140 |
141 | PKG_URL=$TARGET_URL/$PKG_PATH
142 | PKG_FILE=$(basename $PKG_PATH)
143 | PKG_PATH=$TMPDIR/$PKG_FILE
144 |
145 | Debug "Download $PKG_URL to $PKG_PATH"
146 | Fetch $PKG_URL $PKG_PATH true
147 |
148 | echo "$CHECKSUM $PKG_PATH" | sha256sum -c - > /dev/null
149 | if [ $? -ne 0 ]; then
150 | Error "Fail to fetch $PKG_URL to $PKG_PATH"
151 | Debug "Checksum = $CHECKSUM"
152 | exit 1
153 | fi
154 | done
155 | }
156 |
157 | Inform "Initialize arm base"
158 | fetch_tizen_pkgs_init standard base
159 | Inform "fetch common packages"
160 | fetch_tizen_pkgs armv7l gcc gcc-devel-static glibc glibc-devel libicu libicu-devel libatomic linux-glibc-devel keyutils keyutils-devel libkeyutils
161 | Inform "fetch coreclr packages"
162 | fetch_tizen_pkgs armv7l lldb lldb-devel libgcc libstdc++ libstdc++-devel libunwind libunwind-devel lttng-ust-devel lttng-ust userspace-rcu-devel userspace-rcu
163 | Inform "fetch corefx packages"
164 | fetch_tizen_pkgs armv7l libcom_err libcom_err-devel zlib zlib-devel libopenssl11 libopenssl1.1-devel krb5 krb5-devel
165 |
166 | Inform "Initialize standard unified"
167 | fetch_tizen_pkgs_init standard unified
168 | Inform "fetch corefx packages"
169 | fetch_tizen_pkgs armv7l gssdp gssdp-devel tizen-release
170 |
171 |
--------------------------------------------------------------------------------
/eng/common/internal-feed-operations.ps1:
--------------------------------------------------------------------------------
1 | param(
2 | [Parameter(Mandatory=$true)][string] $Operation,
3 | [string] $AuthToken,
4 | [string] $CommitSha,
5 | [string] $RepoName,
6 | [switch] $IsFeedPrivate
7 | )
8 |
9 | $ErrorActionPreference = 'Stop'
10 | Set-StrictMode -Version 2.0
11 | . $PSScriptRoot\tools.ps1
12 |
13 | # Sets VSS_NUGET_EXTERNAL_FEED_ENDPOINTS based on the "darc-int-*" feeds defined in NuGet.config. This is needed
14 | # in build agents by CredProvider to authenticate the restore requests to internal feeds as specified in
15 | # https://github.com/microsoft/artifacts-credprovider/blob/0f53327cd12fd893d8627d7b08a2171bf5852a41/README.md#environment-variables. This should ONLY be called from identified
16 | # internal builds
17 | function SetupCredProvider {
18 | param(
19 | [string] $AuthToken
20 | )
21 |
22 | # Install the Cred Provider NuGet plugin
23 | Write-Host 'Setting up Cred Provider NuGet plugin in the agent...'
24 | Write-Host "Getting 'installcredprovider.ps1' from 'https://github.com/microsoft/artifacts-credprovider'..."
25 |
26 | $url = 'https://raw.githubusercontent.com/microsoft/artifacts-credprovider/master/helpers/installcredprovider.ps1'
27 |
28 | Write-Host "Writing the contents of 'installcredprovider.ps1' locally..."
29 | Invoke-WebRequest $url -OutFile installcredprovider.ps1
30 |
31 | Write-Host 'Installing plugin...'
32 | .\installcredprovider.ps1 -Force
33 |
34 | Write-Host "Deleting local copy of 'installcredprovider.ps1'..."
35 | Remove-Item .\installcredprovider.ps1
36 |
37 | if (-Not("$env:USERPROFILE\.nuget\plugins\netcore")) {
38 | Write-PipelineTelemetryError -Category 'Arcade' -Message 'CredProvider plugin was not installed correctly!'
39 | ExitWithExitCode 1
40 | }
41 | else {
42 | Write-Host 'CredProvider plugin was installed correctly!'
43 | }
44 |
45 | # Then, we set the 'VSS_NUGET_EXTERNAL_FEED_ENDPOINTS' environment variable to restore from the stable
46 | # feeds successfully
47 |
48 | $nugetConfigPath = Join-Path $RepoRoot "NuGet.config"
49 |
50 | if (-Not (Test-Path -Path $nugetConfigPath)) {
51 | Write-PipelineTelemetryError -Category 'Build' -Message 'NuGet.config file not found in repo root!'
52 | ExitWithExitCode 1
53 | }
54 |
55 | $endpoints = New-Object System.Collections.ArrayList
56 | $nugetConfigPackageSources = Select-Xml -Path $nugetConfigPath -XPath "//packageSources/add[contains(@key, 'darc-int-')]/@value" | foreach{$_.Node.Value}
57 |
58 | if (($nugetConfigPackageSources | Measure-Object).Count -gt 0 ) {
59 | foreach ($stableRestoreResource in $nugetConfigPackageSources) {
60 | $trimmedResource = ([string]$stableRestoreResource).Trim()
61 | [void]$endpoints.Add(@{endpoint="$trimmedResource"; password="$AuthToken"})
62 | }
63 | }
64 |
65 | if (($endpoints | Measure-Object).Count -gt 0) {
66 | $endpointCredentials = @{endpointCredentials=$endpoints} | ConvertTo-Json -Compress
67 |
68 | # Create the environment variables the AzDo way
69 | Write-LoggingCommand -Area 'task' -Event 'setvariable' -Data $endpointCredentials -Properties @{
70 | 'variable' = 'VSS_NUGET_EXTERNAL_FEED_ENDPOINTS'
71 | 'issecret' = 'false'
72 | }
73 |
74 | # We don't want sessions cached since we will be updating the endpoints quite frequently
75 | Write-LoggingCommand -Area 'task' -Event 'setvariable' -Data 'False' -Properties @{
76 | 'variable' = 'NUGET_CREDENTIALPROVIDER_SESSIONTOKENCACHE_ENABLED'
77 | 'issecret' = 'false'
78 | }
79 | }
80 | else
81 | {
82 | Write-Host 'No internal endpoints found in NuGet.config'
83 | }
84 | }
85 |
86 | #Workaround for https://github.com/microsoft/msbuild/issues/4430
87 | function InstallDotNetSdkAndRestoreArcade {
88 | $dotnetTempDir = Join-Path $RepoRoot "dotnet"
89 | $dotnetSdkVersion="2.1.507" # After experimentation we know this version works when restoring the SDK (compared to 3.0.*)
90 | $dotnet = "$dotnetTempDir\dotnet.exe"
91 | $restoreProjPath = "$PSScriptRoot\restore.proj"
92 |
93 | Write-Host "Installing dotnet SDK version $dotnetSdkVersion to restore Arcade SDK..."
94 | InstallDotNetSdk "$dotnetTempDir" "$dotnetSdkVersion"
95 |
96 | '' | Out-File "$restoreProjPath"
97 |
98 | & $dotnet restore $restoreProjPath
99 |
100 | Write-Host 'Arcade SDK restored!'
101 |
102 | if (Test-Path -Path $restoreProjPath) {
103 | Remove-Item $restoreProjPath
104 | }
105 |
106 | if (Test-Path -Path $dotnetTempDir) {
107 | Remove-Item $dotnetTempDir -Recurse
108 | }
109 | }
110 |
111 | try {
112 | Push-Location $PSScriptRoot
113 |
114 | if ($Operation -like 'setup') {
115 | SetupCredProvider $AuthToken
116 | }
117 | elseif ($Operation -like 'install-restore') {
118 | InstallDotNetSdkAndRestoreArcade
119 | }
120 | else {
121 | Write-PipelineTelemetryError -Category 'Arcade' -Message "Unknown operation '$Operation'!"
122 | ExitWithExitCode 1
123 | }
124 | }
125 | catch {
126 | Write-Host $_.ScriptStackTrace
127 | Write-PipelineTelemetryError -Category 'Arcade' -Message $_
128 | ExitWithExitCode 1
129 | }
130 | finally {
131 | Pop-Location
132 | }
133 |
--------------------------------------------------------------------------------
/eng/common/templates/steps/source-build.yml:
--------------------------------------------------------------------------------
1 | parameters:
2 | # This template adds arcade-powered source-build to CI.
3 |
4 | # This is a 'steps' template, and is intended for advanced scenarios where the existing build
5 | # infra has a careful build methodology that must be followed. For example, a repo
6 | # (dotnet/runtime) might choose to clone the GitHub repo only once and store it as a pipeline
7 | # artifact for all subsequent jobs to use, to reduce dependence on a strong network connection to
8 | # GitHub. Using this steps template leaves room for that infra to be included.
9 |
10 | # Defines the platform on which to run the steps. See 'eng/common/templates/job/source-build.yml'
11 | # for details. The entire object is described in the 'job' template for simplicity, even though
12 | # the usage of the properties on this object is split between the 'job' and 'steps' templates.
13 | platform: {}
14 |
15 | steps:
16 | # Build. Keep it self-contained for simple reusability. (No source-build-specific job variables.)
17 | - script: |
18 | set -x
19 | df -h
20 |
21 | # If building on the internal project, the artifact feeds variable may be available (usually only if needed)
22 | # In that case, call the feed setup script to add internal feeds corresponding to public ones.
23 | # In addition, add an msbuild argument to copy the WIP from the repo to the target build location.
24 | # This is because SetupNuGetSources.sh will alter the current NuGet.config file, and we need to preserve those
25 | # changes.
26 | internalRestoreArgs=
27 | if [ '$(dn-bot-dnceng-artifact-feeds-rw)' != '$''(dn-bot-dnceng-artifact-feeds-rw)' ]; then
28 | # Temporarily work around https://github.com/dotnet/arcade/issues/7709
29 | chmod +x $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh
30 | $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh $(Build.SourcesDirectory)/NuGet.config $(dn-bot-dnceng-artifact-feeds-rw)
31 | internalRestoreArgs='/p:CopyWipIntoInnerSourceBuildRepo=true'
32 |
33 | # The 'Copy WIP' feature of source build uses git stash to apply changes from the original repo.
34 | # This only works if there is a username/email configured, which won't be the case in most CI runs.
35 | git config --get user.email
36 | if [ $? -ne 0 ]; then
37 | git config user.email dn-bot@microsoft.com
38 | git config user.name dn-bot
39 | fi
40 | fi
41 |
42 | # If building on the internal project, the internal storage variable may be available (usually only if needed)
43 | # In that case, add variables to allow the download of internal runtimes if the specified versions are not found
44 | # in the default public locations.
45 | internalRuntimeDownloadArgs=
46 | if [ '$(dotnetbuilds-internal-container-read-token-base64)' != '$''(dotnetbuilds-internal-container-read-token-base64)' ]; then
47 | internalRuntimeDownloadArgs='/p:DotNetRuntimeSourceFeed=https://dotnetbuilds.blob.core.windows.net/internal /p:DotNetRuntimeSourceFeedKey=$(dotnetbuilds-internal-container-read-token-base64) --runtimesourcefeed https://dotnetbuilds.blob.core.windows.net/internal --runtimesourcefeedkey $(dotnetbuilds-internal-container-read-token-base64)'
48 | fi
49 |
50 | buildConfig=Release
51 | # Check if AzDO substitutes in a build config from a variable, and use it if so.
52 | if [ '$(_BuildConfig)' != '$''(_BuildConfig)' ]; then
53 | buildConfig='$(_BuildConfig)'
54 | fi
55 |
56 | officialBuildArgs=
57 | if [ '${{ and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}' = 'True' ]; then
58 | officialBuildArgs='/p:DotNetPublishUsingPipelines=true /p:OfficialBuildId=$(BUILD.BUILDNUMBER)'
59 | fi
60 |
61 | targetRidArgs=
62 | if [ '${{ parameters.platform.targetRID }}' != '' ]; then
63 | targetRidArgs='/p:TargetRid=${{ parameters.platform.targetRID }}'
64 | fi
65 |
66 | publishArgs=
67 | if [ '${{ parameters.platform.skipPublishValidation }}' != 'true' ]; then
68 | publishArgs='--publish'
69 | fi
70 |
71 | ${{ coalesce(parameters.platform.buildScript, './build.sh') }} --ci \
72 | --configuration $buildConfig \
73 | --restore --build --pack $publishArgs -bl \
74 | $officialBuildArgs \
75 | $internalRuntimeDownloadArgs \
76 | $internalRestoreArgs \
77 | $targetRidArgs \
78 | /p:SourceBuildNonPortable=${{ parameters.platform.nonPortable }} \
79 | /p:ArcadeBuildFromSource=true
80 | displayName: Build
81 |
82 | # Upload build logs for diagnosis.
83 | - task: CopyFiles@2
84 | displayName: Prepare BuildLogs staging directory
85 | inputs:
86 | SourceFolder: '$(Build.SourcesDirectory)'
87 | Contents: |
88 | **/*.log
89 | **/*.binlog
90 | artifacts/source-build/self/prebuilt-report/**
91 | TargetFolder: '$(Build.StagingDirectory)/BuildLogs'
92 | CleanTargetFolder: true
93 | continueOnError: true
94 | condition: succeededOrFailed()
95 |
96 | - task: PublishPipelineArtifact@1
97 | displayName: Publish BuildLogs
98 | inputs:
99 | targetPath: '$(Build.StagingDirectory)/BuildLogs'
100 | artifactName: BuildLogs_SourceBuild_${{ parameters.platform.name }}_Attempt$(System.JobAttempt)
101 | continueOnError: true
102 | condition: succeededOrFailed()
103 |
--------------------------------------------------------------------------------
/eng/common/templates/job/publish-build-assets.yml:
--------------------------------------------------------------------------------
1 | parameters:
2 | configuration: 'Debug'
3 |
4 | # Optional: condition for the job to run
5 | condition: ''
6 |
7 | # Optional: 'true' if future jobs should run even if this job fails
8 | continueOnError: false
9 |
10 | # Optional: dependencies of the job
11 | dependsOn: ''
12 |
13 | # Optional: Include PublishBuildArtifacts task
14 | enablePublishBuildArtifacts: false
15 |
16 | # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool
17 | pool: {}
18 |
19 | # Optional: should run as a public build even in the internal project
20 | # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
21 | runAsPublic: false
22 |
23 | # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing
24 | publishUsingPipelines: false
25 |
26 | jobs:
27 | - job: Asset_Registry_Publish
28 |
29 | dependsOn: ${{ parameters.dependsOn }}
30 |
31 | displayName: Publish to Build Asset Registry
32 |
33 | pool: ${{ parameters.pool }}
34 |
35 | variables:
36 | - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
37 | - name: _BuildConfig
38 | value: ${{ parameters.configuration }}
39 | - group: Publish-Build-Assets
40 | - group: AzureDevOps-Artifact-Feeds-Pats
41 | - name: runCodesignValidationInjection
42 | value: false
43 |
44 | steps:
45 | - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
46 | - task: DownloadBuildArtifacts@0
47 | displayName: Download artifact
48 | inputs:
49 | artifactName: AssetManifests
50 | downloadPath: '$(Build.StagingDirectory)/Download'
51 | checkDownloadedFiles: true
52 | condition: ${{ parameters.condition }}
53 | continueOnError: ${{ parameters.continueOnError }}
54 |
55 | - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
56 | - task: NuGetAuthenticate@0
57 |
58 | - task: PowerShell@2
59 | displayName: Enable cross-org NuGet feed authentication
60 | inputs:
61 | filePath: $(Build.SourcesDirectory)/eng/common/enable-cross-org-publishing.ps1
62 | arguments: -token $(dn-bot-all-orgs-artifact-feeds-rw)
63 |
64 | - task: PowerShell@2
65 | displayName: Publish Build Assets
66 | inputs:
67 | filePath: eng\common\sdk-task.ps1
68 | arguments: -task PublishBuildAssets -restore -msbuildEngine dotnet
69 | /p:ManifestsPath='$(Build.StagingDirectory)/Download/AssetManifests'
70 | /p:BuildAssetRegistryToken=$(MaestroAccessToken)
71 | /p:MaestroApiEndpoint=https://maestro-prod.westus2.cloudapp.azure.com
72 | /p:PublishUsingPipelines=${{ parameters.publishUsingPipelines }}
73 | /p:Configuration=$(_BuildConfig)
74 | /p:OfficialBuildId=$(Build.BuildNumber)
75 | condition: ${{ parameters.condition }}
76 | continueOnError: ${{ parameters.continueOnError }}
77 |
78 | - task: powershell@2
79 | displayName: Create ReleaseConfigs Artifact
80 | inputs:
81 | targetType: inline
82 | script: |
83 | Add-Content -Path "$(Build.StagingDirectory)/ReleaseConfigs.txt" -Value $(BARBuildId)
84 | Add-Content -Path "$(Build.StagingDirectory)/ReleaseConfigs.txt" -Value "$(DefaultChannels)"
85 | Add-Content -Path "$(Build.StagingDirectory)/ReleaseConfigs.txt" -Value $(IsStableBuild)
86 |
87 | - task: PublishBuildArtifacts@1
88 | displayName: Publish ReleaseConfigs Artifact
89 | inputs:
90 | PathtoPublish: '$(Build.StagingDirectory)/ReleaseConfigs.txt'
91 | PublishLocation: Container
92 | ArtifactName: ReleaseConfigs
93 |
94 | - task: powershell@2
95 | displayName: Check if SymbolPublishingExclusionsFile.txt exists
96 | inputs:
97 | targetType: inline
98 | script: |
99 | $symbolExclusionfile = "$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt"
100 | if(Test-Path -Path $symbolExclusionfile)
101 | {
102 | Write-Host "SymbolExclusionFile exists"
103 | Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]true"
104 | }
105 | else{
106 | Write-Host "Symbols Exclusion file does not exists"
107 | Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]false"
108 | }
109 |
110 | - task: PublishBuildArtifacts@1
111 | displayName: Publish SymbolPublishingExclusionsFile Artifact
112 | condition: eq(variables['SymbolExclusionFile'], 'true')
113 | inputs:
114 | PathtoPublish: '$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt'
115 | PublishLocation: Container
116 | ArtifactName: ReleaseConfigs
117 |
118 | - ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}:
119 | - template: /eng/common/templates/steps/publish-logs.yml
120 | parameters:
121 | JobLabel: 'Publish_Artifacts_Logs'
122 |
--------------------------------------------------------------------------------
/azure-pipelines-official.yml:
--------------------------------------------------------------------------------
1 | resources:
2 | - repo: self
3 | clean: true
4 |
5 | - Details in https://github.com/NuGet/Client.Engineering/issues/2770
6 |
7 | # The variables `_DotNetArtifactsCategory` and `_DotNetValidationArtifactsCategory` are required for proper publishing of build artifacts. See https://github.com/dotnet/roslyn/pull/38259
8 | variables:
9 | - name: _DotNetArtifactsCategory
10 | value: .NETCore
11 | - name: _DotNetValidationArtifactsCategory
12 | value: .NETCoreValidation
13 | - name: Codeql.Enabled
14 | value: false
15 | - name: Codeql.TSAEnabled
16 | value: false
17 | - name: Codeql.TSAOptionsPath
18 | value: $(Build.SourcesDirectory)\.config\TSAOptions.json
19 | - name: notificationAlias
20 | value: $(TsaNotificationEmail)
21 |
22 | # Branches that trigger a build on commit
23 | trigger:
24 | - dev
25 |
26 | stages:
27 | - stage: publish
28 | displayName: Publishing
29 | variables:
30 | - group: DotNet-Symbol-Server-Pats
31 |
32 | pool:
33 | name: NetCore1ESPool-Internal
34 | demands:
35 | - ImageOverride -equals 1es-windows-2022
36 | - cmd
37 |
38 | jobs:
39 | - job: OfficialBuild
40 | displayName: Official Build
41 |
42 | steps:
43 | - task: CredScan@3
44 | inputs:
45 | # Output in PREFast format so TSAUpload can consume it.
46 | outputFormat: pre
47 |
48 | - task: PoliCheck@2
49 | inputs:
50 | inputType: "Basic"
51 | targetType: "F"
52 | targetArgument: "$(Build.SourcesDirectory)"
53 | result: "PoliCheck.xml"
54 |
55 | - bash: |
56 | if [ "$(SignType)" = "Real" ]; then
57 | echo 'Codeql scan enabled'
58 | echo "##vso[task.setvariable variable=Codeql.Enabled]true"
59 | echo "##vso[task.setvariable variable=Codeql.TSAEnabled]true"
60 | else
61 | echo 'Codeql scan Disabled'
62 | fi
63 | displayName: "Set CodeQl variables"
64 |
65 | - task: CodeQL3000Init@0
66 | displayName: Initialize CodeQL
67 | condition: "and(succeeded(), eq(variables['Codeql.Enabled'], 'true'))"
68 |
69 | - task: NuGetAuthenticate@0
70 |
71 | - task: MicroBuildSigningPlugin@3
72 | displayName: Install Signing Plugin
73 | inputs:
74 | signType: $(SignType)
75 | zipSources: false
76 | feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json
77 | env:
78 | TeamName: nuget.client
79 | condition: and(succeeded(), ne(variables['SignType'], ''))
80 |
81 | - script: eng\common\CIBuild.cmd
82 | -configuration $(BuildConfiguration)
83 | /p:OfficialBuildId=$(Build.BuildNumber)
84 | /p:DotNetSignType=$(SignType)
85 | /p:DotNetSymbolServerTokenMsdl=$(microsoft-symbol-server-pat)
86 | /p:DotNetSymbolServerTokenSymWeb=$(symweb-symbol-server-pat)
87 | /p:DotNetArtifactsCategory=$(_DotNetArtifactsCategory)
88 | /p:DotnetPublishUsingPipelines=true
89 | displayName: Build
90 |
91 | - task: CodeQL3000Finalize@0
92 | displayName: Finalize CodeQL
93 | condition: "and(succeeded(), eq(variables['Codeql.Enabled'], 'true'))"
94 |
95 | - template: eng\common\templates\steps\generate-sbom.yml
96 |
97 | - task: PublishBuildArtifacts@1
98 | displayName: Publish Logs
99 | inputs:
100 | PathtoPublish: '$(Build.SourcesDirectory)\artifacts\log\$(BuildConfiguration)'
101 | ArtifactName: 'Logs'
102 | publishLocation: Container
103 | continueOnError: true
104 | condition: not(succeeded())
105 |
106 | - task: PublishBuildArtifacts@1
107 | displayName: Publish Logs
108 | inputs:
109 | PathtoPublish: '$(Build.SourcesDirectory)\artifacts\TestResults\$(BuildConfiguration)'
110 | ArtifactName: 'TestResults'
111 | publishLocation: Container
112 | condition: not(succeeded())
113 |
114 | - task: SdtReport@2
115 | displayName: "Generate Analysis Report"
116 | inputs:
117 | CredScan: true
118 | PoliCheck: true
119 | APIScan: false
120 | ToolLogsNotFoundAction: "Standard"
121 |
122 | - task: TSAUpload@2
123 | displayName: "TSA upload to Codebase NuGet.PackageSourceMapper"
124 | inputs:
125 | GdnPublishTsaOnboard: false
126 | GdnPublishTsaConfigFile: '$(Build.SourcesDirectory)\.config\TSAConfig.gdntsa' # All relevant settings are in this file.
127 | GdnPublishTsaExportedResultsPublishable: true
128 |
129 | # Publish our NuPkgs as an artifact. The name of this artifact must be PackageArtifacts as the
130 | # arcade templates depend on the name.'
131 | - task: PublishBuildArtifacts@1
132 | displayName: Publish Packages
133 | inputs:
134 | PathtoPublish: '$(Build.SourcesDirectory)\artifacts\packages\$(BuildConfiguration)'
135 | ArtifactName: 'PackageArtifacts'
136 | condition: succeeded()
137 |
138 | - task: ms-vseng.MicroBuildTasks.521a94ea-9e68-468a-8167-6dcf361ea776.MicroBuildCleanup@1
139 | displayName: Cleanup
140 | condition: always()
141 |
142 | - task: PublishBuildArtifacts@1
143 | displayName: Publish MicroBuild Artifacts
144 | inputs:
145 | PathtoPublish: '$(Build.ArtifactStagingDirectory)\MicroBuild\Output'
146 | ArtifactName: MicroBuildOutputs
147 | publishLocation: Container
148 | condition: succeededOrFailed()
--------------------------------------------------------------------------------
/eng/common/cross/build-android-rootfs.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | set -e
3 | __NDK_Version=r21
4 |
5 | usage()
6 | {
7 | echo "Creates a toolchain and sysroot used for cross-compiling for Android."
8 | echo.
9 | echo "Usage: $0 [BuildArch] [ApiLevel]"
10 | echo.
11 | echo "BuildArch is the target architecture of Android. Currently only arm64 is supported."
12 | echo "ApiLevel is the target Android API level. API levels usually match to Android releases. See https://source.android.com/source/build-numbers.html"
13 | echo.
14 | echo "By default, the toolchain and sysroot will be generated in cross/android-rootfs/toolchain/[BuildArch]. You can change this behavior"
15 | echo "by setting the TOOLCHAIN_DIR environment variable"
16 | echo.
17 | echo "By default, the NDK will be downloaded into the cross/android-rootfs/android-ndk-$__NDK_Version directory. If you already have an NDK installation,"
18 | echo "you can set the NDK_DIR environment variable to have this script use that installation of the NDK."
19 | echo "By default, this script will generate a file, android_platform, in the root of the ROOTFS_DIR directory that contains the RID for the supported and tested Android build: android.28-arm64. This file is to replace '/etc/os-release', which is not available for Android."
20 | exit 1
21 | }
22 |
23 | __ApiLevel=28 # The minimum platform for arm64 is API level 21 but the minimum version that support glob(3) is 28. See $ANDROID_NDK/toolchains/llvm/prebuilt/linux-x86_64/sysroot/usr/include/glob.h
24 | __BuildArch=arm64
25 | __AndroidArch=aarch64
26 | __AndroidToolchain=aarch64-linux-android
27 |
28 | for i in "$@"
29 | do
30 | lowerI="$(echo $i | tr "[:upper:]" "[:lower:]")"
31 | case $lowerI in
32 | -?|-h|--help)
33 | usage
34 | exit 1
35 | ;;
36 | arm64)
37 | __BuildArch=arm64
38 | __AndroidArch=aarch64
39 | __AndroidToolchain=aarch64-linux-android
40 | ;;
41 | arm)
42 | __BuildArch=arm
43 | __AndroidArch=arm
44 | __AndroidToolchain=arm-linux-androideabi
45 | ;;
46 | *[0-9])
47 | __ApiLevel=$i
48 | ;;
49 | *)
50 | __UnprocessedBuildArgs="$__UnprocessedBuildArgs $i"
51 | ;;
52 | esac
53 | done
54 |
55 | # Obtain the location of the bash script to figure out where the root of the repo is.
56 | __ScriptBaseDir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
57 |
58 | __CrossDir="$__ScriptBaseDir/../../../.tools/android-rootfs"
59 |
60 | if [[ ! -f "$__CrossDir" ]]; then
61 | mkdir -p "$__CrossDir"
62 | fi
63 |
64 | # Resolve absolute path to avoid `../` in build logs
65 | __CrossDir="$( cd "$__CrossDir" && pwd )"
66 |
67 | __NDK_Dir="$__CrossDir/android-ndk-$__NDK_Version"
68 | __lldb_Dir="$__CrossDir/lldb"
69 | __ToolchainDir="$__CrossDir/android-ndk-$__NDK_Version"
70 |
71 | if [[ -n "$TOOLCHAIN_DIR" ]]; then
72 | __ToolchainDir=$TOOLCHAIN_DIR
73 | fi
74 |
75 | if [[ -n "$NDK_DIR" ]]; then
76 | __NDK_Dir=$NDK_DIR
77 | fi
78 |
79 | echo "Target API level: $__ApiLevel"
80 | echo "Target architecture: $__BuildArch"
81 | echo "NDK location: $__NDK_Dir"
82 | echo "Target Toolchain location: $__ToolchainDir"
83 |
84 | # Download the NDK if required
85 | if [ ! -d $__NDK_Dir ]; then
86 | echo Downloading the NDK into $__NDK_Dir
87 | mkdir -p $__NDK_Dir
88 | wget -q --progress=bar:force:noscroll --show-progress https://dl.google.com/android/repository/android-ndk-$__NDK_Version-linux-x86_64.zip -O $__CrossDir/android-ndk-$__NDK_Version-linux-x86_64.zip
89 | unzip -q $__CrossDir/android-ndk-$__NDK_Version-linux-x86_64.zip -d $__CrossDir
90 | fi
91 |
92 | if [ ! -d $__lldb_Dir ]; then
93 | mkdir -p $__lldb_Dir
94 | echo Downloading LLDB into $__lldb_Dir
95 | wget -q --progress=bar:force:noscroll --show-progress https://dl.google.com/android/repository/lldb-2.3.3614996-linux-x86_64.zip -O $__CrossDir/lldb-2.3.3614996-linux-x86_64.zip
96 | unzip -q $__CrossDir/lldb-2.3.3614996-linux-x86_64.zip -d $__lldb_Dir
97 | fi
98 |
99 | echo "Download dependencies..."
100 | __TmpDir=$__CrossDir/tmp/$__BuildArch/
101 | mkdir -p "$__TmpDir"
102 |
103 | # combined dependencies for coreclr, installer and libraries
104 | __AndroidPackages="libicu"
105 | __AndroidPackages+=" libandroid-glob"
106 | __AndroidPackages+=" liblzma"
107 | __AndroidPackages+=" krb5"
108 | __AndroidPackages+=" openssl"
109 |
110 | for path in $(wget -qO- http://termux.net/dists/stable/main/binary-$__AndroidArch/Packages |\
111 | grep -A15 "Package: \(${__AndroidPackages// /\\|}\)" | grep -v "static\|tool" | grep Filename); do
112 |
113 | if [[ "$path" != "Filename:" ]]; then
114 | echo "Working on: $path"
115 | wget -qO- http://termux.net/$path | dpkg -x - "$__TmpDir"
116 | fi
117 | done
118 |
119 | cp -R "$__TmpDir/data/data/com.termux/files/usr/"* "$__ToolchainDir/sysroot/usr/"
120 |
121 | # Generate platform file for build.sh script to assign to __DistroRid
122 | echo "Generating platform file..."
123 | echo "RID=android.${__ApiLevel}-${__BuildArch}" > $__ToolchainDir/sysroot/android_platform
124 |
125 | echo "Now to build coreclr, libraries and installers; run:"
126 | echo ROOTFS_DIR=\$\(realpath $__ToolchainDir/sysroot\) ./build.sh --cross --arch $__BuildArch \
127 | --subsetCategory coreclr
128 | echo ROOTFS_DIR=\$\(realpath $__ToolchainDir/sysroot\) ./build.sh --cross --arch $__BuildArch \
129 | --subsetCategory libraries
130 | echo ROOTFS_DIR=\$\(realpath $__ToolchainDir/sysroot\) ./build.sh --cross --arch $__BuildArch \
131 | --subsetCategory installer
132 |
--------------------------------------------------------------------------------