diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml
index 9530a155..dfd6b35f 100644
--- a/eng/Version.Details.xml
+++ b/eng/Version.Details.xml
@@ -3,13 +3,13 @@
-
+
https://github.com/dotnet/arcade
- 69abe6b2063083c0b35fc3a5b16cb2bdbaf5e8b0
+ 3294c57354d5d24eaa6aca9461635ab317163968
-
+
https://github.com/dotnet/arcade
- 69abe6b2063083c0b35fc3a5b16cb2bdbaf5e8b0
+ 3294c57354d5d24eaa6aca9461635ab317163968
diff --git a/eng/common/CIBuild.cmd b/eng/common/CIBuild.cmd
index 56c2f25a..ac1f72bf 100644
--- a/eng/common/CIBuild.cmd
+++ b/eng/common/CIBuild.cmd
@@ -1,2 +1,2 @@
@echo off
-powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0Build.ps1""" -restore -build -test -sign -pack -publish -ci %*"
\ No newline at end of file
+powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0Build.ps1""" -restore -build -test -sign -pack -publish -ci %*"
diff --git a/eng/common/SetupNugetSources.ps1 b/eng/common/SetupNugetSources.ps1
index efa2fd72..fc8d6180 100644
--- a/eng/common/SetupNugetSources.ps1
+++ b/eng/common/SetupNugetSources.ps1
@@ -1,31 +1,31 @@
-# This file is a temporary workaround for internal builds to be able to restore from private AzDO feeds.
-# This file should be removed as part of this issue: https://github.com/dotnet/arcade/issues/4080
+# This script adds internal feeds required to build commits that depend on internal package sources. For instance,
+# dotnet6-internal would be added automatically if dotnet6 was found in the nuget.config file. In addition also enables
+# disabled internal Maestro (darc-int*) feeds.
#
-# What the script does is iterate over all package sources in the pointed NuGet.config and add a credential entry
-# under for each Maestro managed private feed. Two additional credential
-# entries are also added for the two private static internal feeds: dotnet3-internal and dotnet3-internal-transport.
+# Optionally, this script also adds a credential entry for each of the internal feeds if supplied.
#
-# This script needs to be called in every job that will restore packages and which the base repo has
-# private AzDO feeds in the NuGet.config.
-#
-# See example YAML call for this script below. Note the use of the variable `$(dn-bot-dnceng-artifact-feeds-rw)`
-# from the AzureDevOps-Artifact-Feeds-Pats variable group.
-#
-# Any disabledPackageSources entries which start with "darc-int" will be re-enabled as part of this script executing
+# See example call for this script below.
#
# - task: PowerShell@2
-# displayName: Setup Private Feeds Credentials
+# displayName: Setup internal Feeds Credentials
# condition: eq(variables['Agent.OS'], 'Windows_NT')
# inputs:
-# filePath: $(Build.SourcesDirectory)/eng/common/SetupNugetSources.ps1
-# arguments: -ConfigFile $(Build.SourcesDirectory)/NuGet.config -Password $Env:Token
+# filePath: $(System.DefaultWorkingDirectory)/eng/common/SetupNugetSources.ps1
+# arguments: -ConfigFile $(System.DefaultWorkingDirectory)/NuGet.config -Password $Env:Token
# env:
# Token: $(dn-bot-dnceng-artifact-feeds-rw)
+#
+# Note that the NuGetAuthenticate task should be called after SetupNugetSources.
+# This ensures that:
+# - Appropriate creds are set for the added internal feeds (if not supplied to the scrupt)
+# - The credential provider is installed.
+#
+# This logic is also abstracted into enable-internal-sources.yml.
[CmdletBinding()]
param (
[Parameter(Mandatory = $true)][string]$ConfigFile,
- [Parameter(Mandatory = $true)][string]$Password
+ $Password
)
$ErrorActionPreference = "Stop"
@@ -34,25 +34,42 @@ Set-StrictMode -Version 2.0
. $PSScriptRoot\tools.ps1
+# Adds or enables the package source with the given name
+function AddOrEnablePackageSource($sources, $disabledPackageSources, $SourceName, $SourceEndPoint, $creds, $Username, $pwd) {
+ if ($disabledPackageSources -eq $null -or -not (EnableInternalPackageSource -DisabledPackageSources $disabledPackageSources -Creds $creds -PackageSourceName $SourceName)) {
+ AddPackageSource -Sources $sources -SourceName $SourceName -SourceEndPoint $SourceEndPoint -Creds $creds -Username $userName -pwd $Password
+ }
+}
+
# Add source entry to PackageSources
function AddPackageSource($sources, $SourceName, $SourceEndPoint, $creds, $Username, $pwd) {
$packageSource = $sources.SelectSingleNode("add[@key='$SourceName']")
if ($packageSource -eq $null)
{
+ Write-Host "Adding package source $SourceName"
+
$packageSource = $doc.CreateElement("add")
$packageSource.SetAttribute("key", $SourceName)
$packageSource.SetAttribute("value", $SourceEndPoint)
$sources.AppendChild($packageSource) | Out-Null
}
else {
- Write-Host "Package source $SourceName already present."
+ Write-Host "Package source $SourceName already present and enabled."
}
+
AddCredential -Creds $creds -Source $SourceName -Username $Username -pwd $pwd
}
# Add a credential node for the specified source
function AddCredential($creds, $source, $username, $pwd) {
+ # If no cred supplied, don't do anything.
+ if (!$pwd) {
+ return;
+ }
+
+ Write-Host "Inserting credential for feed: " $source
+
# Looks for credential configuration for the given SourceName. Create it if none is found.
$sourceElement = $creds.SelectSingleNode($Source)
if ($sourceElement -eq $null)
@@ -85,24 +102,27 @@ function AddCredential($creds, $source, $username, $pwd) {
$passwordElement.SetAttribute("value", $pwd)
}
-function InsertMaestroPrivateFeedCredentials($Sources, $Creds, $Username, $pwd) {
- $maestroPrivateSources = $Sources.SelectNodes("add[contains(@key,'darc-int')]")
-
- Write-Host "Inserting credentials for $($maestroPrivateSources.Count) Maestro's private feeds."
-
- ForEach ($PackageSource in $maestroPrivateSources) {
- Write-Host "`tInserting credential for Maestro's feed:" $PackageSource.Key
- AddCredential -Creds $creds -Source $PackageSource.Key -Username $Username -pwd $pwd
+# Enable all darc-int package sources.
+function EnableMaestroInternalPackageSources($DisabledPackageSources, $Creds) {
+ $maestroInternalSources = $DisabledPackageSources.SelectNodes("add[contains(@key,'darc-int')]")
+ ForEach ($DisabledPackageSource in $maestroInternalSources) {
+ EnableInternalPackageSource -DisabledPackageSources $DisabledPackageSources -Creds $Creds -PackageSourceName $DisabledPackageSource.key
}
}
-function EnablePrivatePackageSources($DisabledPackageSources) {
- $maestroPrivateSources = $DisabledPackageSources.SelectNodes("add[contains(@key,'darc-int')]")
- ForEach ($DisabledPackageSource in $maestroPrivateSources) {
- Write-Host "`tEnsuring private source '$($DisabledPackageSource.key)' is enabled by deleting it from disabledPackageSource"
+# Enables an internal package source by name, if found. Returns true if the package source was found and enabled, false otherwise.
+function EnableInternalPackageSource($DisabledPackageSources, $Creds, $PackageSourceName) {
+ $DisabledPackageSource = $DisabledPackageSources.SelectSingleNode("add[@key='$PackageSourceName']")
+ if ($DisabledPackageSource) {
+ Write-Host "Enabling internal source '$($DisabledPackageSource.key)'."
+
# Due to https://github.com/NuGet/Home/issues/10291, we must actually remove the disabled entries
$DisabledPackageSources.RemoveChild($DisabledPackageSource)
+
+ AddCredential -Creds $creds -Source $DisabledPackageSource.Key -Username $userName -pwd $Password
+ return $true
}
+ return $false
}
if (!(Test-Path $ConfigFile -PathType Leaf)) {
@@ -110,58 +130,47 @@ if (!(Test-Path $ConfigFile -PathType Leaf)) {
ExitWithExitCode 1
}
-if (!$Password) {
- Write-PipelineTelemetryError -Category 'Build' -Message 'Eng/common/SetupNugetSources.ps1 returned a non-zero exit code. Please supply a valid PAT'
- ExitWithExitCode 1
-}
-
# Load NuGet.config
$doc = New-Object System.Xml.XmlDocument
$filename = (Get-Item $ConfigFile).FullName
$doc.Load($filename)
-# Get reference to or create one if none exist already
+# Get reference to - fail if none exist
$sources = $doc.DocumentElement.SelectSingleNode("packageSources")
if ($sources -eq $null) {
- $sources = $doc.CreateElement("packageSources")
- $doc.DocumentElement.AppendChild($sources) | Out-Null
+ Write-PipelineTelemetryError -Category 'Build' -Message "Eng/common/SetupNugetSources.ps1 returned a non-zero exit code. NuGet config file must contain a packageSources section: $ConfigFile"
+ ExitWithExitCode 1
}
-# Looks for a node. Create it if none is found.
-$creds = $doc.DocumentElement.SelectSingleNode("packageSourceCredentials")
-if ($creds -eq $null) {
- $creds = $doc.CreateElement("packageSourceCredentials")
- $doc.DocumentElement.AppendChild($creds) | Out-Null
+$creds = $null
+$feedSuffix = "v3/index.json"
+if ($Password) {
+ $feedSuffix = "v2"
+ # Looks for a node. Create it if none is found.
+ $creds = $doc.DocumentElement.SelectSingleNode("packageSourceCredentials")
+ if ($creds -eq $null) {
+ $creds = $doc.CreateElement("packageSourceCredentials")
+ $doc.DocumentElement.AppendChild($creds) | Out-Null
+ }
}
+$userName = "dn-bot"
+
# Check for disabledPackageSources; we'll enable any darc-int ones we find there
$disabledSources = $doc.DocumentElement.SelectSingleNode("disabledPackageSources")
if ($disabledSources -ne $null) {
Write-Host "Checking for any darc-int disabled package sources in the disabledPackageSources node"
- EnablePrivatePackageSources -DisabledPackageSources $disabledSources
+ EnableMaestroInternalPackageSources -DisabledPackageSources $disabledSources -Creds $creds
}
-
-$userName = "dn-bot"
-
-# Insert credential nodes for Maestro's private feeds
-InsertMaestroPrivateFeedCredentials -Sources $sources -Creds $creds -Username $userName -pwd $Password
-
-# 3.1 uses a different feed url format so it's handled differently here
-$dotnet31Source = $sources.SelectSingleNode("add[@key='dotnet3.1']")
-if ($dotnet31Source -ne $null) {
- AddPackageSource -Sources $sources -SourceName "dotnet3.1-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal/nuget/v2" -Creds $creds -Username $userName -pwd $Password
- AddPackageSource -Sources $sources -SourceName "dotnet3.1-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-transport/nuget/v2" -Creds $creds -Username $userName -pwd $Password
-}
-
-$dotnetVersions = @('5','6','7','8')
+$dotnetVersions = @('5','6','7','8','9','10')
foreach ($dotnetVersion in $dotnetVersions) {
$feedPrefix = "dotnet" + $dotnetVersion;
$dotnetSource = $sources.SelectSingleNode("add[@key='$feedPrefix']")
if ($dotnetSource -ne $null) {
- AddPackageSource -Sources $sources -SourceName "$feedPrefix-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/$feedPrefix-internal/nuget/v2" -Creds $creds -Username $userName -pwd $Password
- AddPackageSource -Sources $sources -SourceName "$feedPrefix-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/$feedPrefix-internal-transport/nuget/v2" -Creds $creds -Username $userName -pwd $Password
+ AddOrEnablePackageSource -Sources $sources -DisabledPackageSources $disabledSources -SourceName "$feedPrefix-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/$feedPrefix-internal/nuget/$feedSuffix" -Creds $creds -Username $userName -pwd $Password
+ AddOrEnablePackageSource -Sources $sources -DisabledPackageSources $disabledSources -SourceName "$feedPrefix-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/$feedPrefix-internal-transport/nuget/$feedSuffix" -Creds $creds -Username $userName -pwd $Password
}
}
-$doc.Save($filename)
\ No newline at end of file
+$doc.Save($filename)
diff --git a/eng/common/SetupNugetSources.sh b/eng/common/SetupNugetSources.sh
index d387c7ea..b97cc536 100644
--- a/eng/common/SetupNugetSources.sh
+++ b/eng/common/SetupNugetSources.sh
@@ -1,28 +1,27 @@
#!/usr/bin/env bash
-# This file is a temporary workaround for internal builds to be able to restore from private AzDO feeds.
-# This file should be removed as part of this issue: https://github.com/dotnet/arcade/issues/4080
+# This script adds internal feeds required to build commits that depend on internal package sources. For instance,
+# dotnet6-internal would be added automatically if dotnet6 was found in the nuget.config file. In addition also enables
+# disabled internal Maestro (darc-int*) feeds.
+#
+# Optionally, this script also adds a credential entry for each of the internal feeds if supplied.
#
-# What the script does is iterate over all package sources in the pointed NuGet.config and add a credential entry
-# under for each Maestro's managed private feed. Two additional credential
-# entries are also added for the two private static internal feeds: dotnet3-internal and dotnet3-internal-transport.
-#
-# This script needs to be called in every job that will restore packages and which the base repo has
-# private AzDO feeds in the NuGet.config.
-#
-# See example YAML call for this script below. Note the use of the variable `$(dn-bot-dnceng-artifact-feeds-rw)`
-# from the AzureDevOps-Artifact-Feeds-Pats variable group.
-#
-# Any disabledPackageSources entries which start with "darc-int" will be re-enabled as part of this script executing.
+# See example call for this script below.
#
# - task: Bash@3
-# displayName: Setup Private Feeds Credentials
+# displayName: Setup Internal Feeds
# inputs:
-# filePath: $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh
-# arguments: $(Build.SourcesDirectory)/NuGet.config $Token
+# filePath: $(System.DefaultWorkingDirectory)/eng/common/SetupNugetSources.sh
+# arguments: $(System.DefaultWorkingDirectory)/NuGet.config
# condition: ne(variables['Agent.OS'], 'Windows_NT')
-# env:
-# Token: $(dn-bot-dnceng-artifact-feeds-rw)
+# - task: NuGetAuthenticate@1
+#
+# Note that the NuGetAuthenticate task should be called after SetupNugetSources.
+# This ensures that:
+# - Appropriate creds are set for the added internal feeds (if not supplied to the scrupt)
+# - The credential provider is installed.
+#
+# This logic is also abstracted into enable-internal-sources.yml.
ConfigFile=$1
CredToken=$2
@@ -48,88 +47,129 @@ if [ ! -f "$ConfigFile" ]; then
ExitWithExitCode 1
fi
-if [ -z "$CredToken" ]; then
- Write-PipelineTelemetryError -category 'Build' "Error: Eng/common/SetupNugetSources.sh returned a non-zero exit code. Please supply a valid PAT"
- ExitWithExitCode 1
-fi
-
if [[ `uname -s` == "Darwin" ]]; then
NL=$'\\\n'
TB=''
fi
-# Ensure there is a ... section.
-grep -i "" $ConfigFile
-if [ "$?" != "0" ]; then
- echo "Adding ... section."
- ConfigNodeHeader=""
- PackageSourcesTemplate="${TB}${NL}${TB}"
+# Enables an internal package source by name, if found. Returns 0 if found and enabled, 1 if not found.
+EnableInternalPackageSource() {
+ local PackageSourceName="$1"
+
+ # Check if disabledPackageSources section exists
+ grep -i "" "$ConfigFile" > /dev/null
+ if [ "$?" != "0" ]; then
+ return 1 # No disabled sources section
+ fi
+
+ # Check if this source name is disabled
+ grep -i " /dev/null
+ if [ "$?" == "0" ]; then
+ echo "Enabling internal source '$PackageSourceName'."
+ # Remove the disabled entry (including any surrounding comments or whitespace on the same line)
+ sed -i.bak "//d" "$ConfigFile"
+
+ # Add the source name to PackageSources for credential handling
+ PackageSources+=("$PackageSourceName")
+ return 0 # Found and enabled
+ fi
+
+ return 1 # Not found in disabled sources
+}
+
+# Add source entry to PackageSources
+AddPackageSource() {
+ local SourceName="$1"
+ local SourceEndPoint="$2"
+
+ # Check if source already exists
+ grep -i " /dev/null
+ if [ "$?" == "0" ]; then
+ echo "Package source $SourceName already present and enabled."
+ PackageSources+=("$SourceName")
+ return
+ fi
+
+ echo "Adding package source $SourceName"
+ PackageSourcesNodeFooter=""
+ PackageSourceTemplate="${TB}"
+
+ sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" "$ConfigFile"
+ PackageSources+=("$SourceName")
+}
+
+# Adds or enables the package source with the given name
+AddOrEnablePackageSource() {
+ local SourceName="$1"
+ local SourceEndPoint="$2"
+
+ # Try to enable if disabled, if not found then add new source
+ EnableInternalPackageSource "$SourceName"
+ if [ "$?" != "0" ]; then
+ AddPackageSource "$SourceName" "$SourceEndPoint"
+ fi
+}
- sed -i.bak "s|$ConfigNodeHeader|$ConfigNodeHeader${NL}$PackageSourcesTemplate|" $ConfigFile
-fi
+# Enable all darc-int package sources
+EnableMaestroInternalPackageSources() {
+ # Check if disabledPackageSources section exists
+ grep -i "" "$ConfigFile" > /dev/null
+ if [ "$?" != "0" ]; then
+ return # No disabled sources section
+ fi
+
+ # Find all darc-int disabled sources
+ local DisabledDarcIntSources=()
+ DisabledDarcIntSources+=$(grep -oh '"darc-int-[^"]*" value="true"' "$ConfigFile" | tr -d '"')
+
+ for DisabledSourceName in ${DisabledDarcIntSources[@]} ; do
+ if [[ $DisabledSourceName == darc-int* ]]; then
+ EnableInternalPackageSource "$DisabledSourceName"
+ fi
+ done
+}
-# Ensure there is a ... section.
-grep -i "" $ConfigFile
+# Ensure there is a ... section.
+grep -i "" $ConfigFile
if [ "$?" != "0" ]; then
- echo "Adding ... section."
-
- PackageSourcesNodeFooter=""
- PackageSourceCredentialsTemplate="${TB}${NL}${TB}"
-
- sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourcesNodeFooter${NL}$PackageSourceCredentialsTemplate|" $ConfigFile
+ Write-PipelineTelemetryError -Category 'Build' "Error: Eng/common/SetupNugetSources.sh returned a non-zero exit code. NuGet config file must contain a packageSources section: $ConfigFile"
+ ExitWithExitCode 1
fi
PackageSources=()
-# Ensure dotnet3.1-internal and dotnet3.1-internal-transport are in the packageSources if the public dotnet3.1 feeds are present
-grep -i "... section.
+ grep -i "" $ConfigFile
if [ "$?" != "0" ]; then
- echo "Adding dotnet3.1-internal to the packageSources."
- PackageSourcesNodeFooter=""
- PackageSourceTemplate="${TB}"
+ echo "Adding ... section."
- sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile
- fi
- PackageSources+=('dotnet3.1-internal')
-
- grep -i "" $ConfigFile
- if [ "$?" != "0" ]; then
- echo "Adding dotnet3.1-internal-transport to the packageSources."
PackageSourcesNodeFooter=""
- PackageSourceTemplate="${TB}"
+ PackageSourceCredentialsTemplate="${TB}${NL}${TB}"
- sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile
+ sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourcesNodeFooter${NL}$PackageSourceCredentialsTemplate|" $ConfigFile
fi
- PackageSources+=('dotnet3.1-internal-transport')
fi
-DotNetVersions=('5' '6' '7' '8')
+# Check for disabledPackageSources; we'll enable any darc-int ones we find there
+grep -i "" $ConfigFile > /dev/null
+if [ "$?" == "0" ]; then
+ echo "Checking for any darc-int disabled package sources in the disabledPackageSources node"
+ EnableMaestroInternalPackageSources
+fi
+
+DotNetVersions=('5' '6' '7' '8' '9' '10')
for DotNetVersion in ${DotNetVersions[@]} ; do
FeedPrefix="dotnet${DotNetVersion}";
- grep -i " /dev/null
if [ "$?" == "0" ]; then
- grep -i ""
-
- sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile
- fi
- PackageSources+=("$FeedPrefix-internal")
-
- grep -i "" $ConfigFile
- if [ "$?" != "0" ]; then
- echo "Adding $FeedPrefix-internal-transport to the packageSources."
- PackageSourcesNodeFooter=""
- PackageSourceTemplate="${TB}"
-
- sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile
- fi
- PackageSources+=("$FeedPrefix-internal-transport")
+ AddOrEnablePackageSource "$FeedPrefix-internal" "https://pkgs.dev.azure.com/dnceng/internal/_packaging/$FeedPrefix-internal/nuget/$FeedSuffix"
+ AddOrEnablePackageSource "$FeedPrefix-internal-transport" "https://pkgs.dev.azure.com/dnceng/internal/_packaging/$FeedPrefix-internal-transport/nuget/$FeedSuffix"
fi
done
@@ -140,32 +180,17 @@ PackageSources+="$IFS"
PackageSources+=$(grep -oh '"darc-int-[^"]*"' $ConfigFile | tr -d '"')
IFS=$PrevIFS
-for FeedName in ${PackageSources[@]} ; do
- # Check if there is no existing credential for this FeedName
- grep -i "<$FeedName>" $ConfigFile
- if [ "$?" != "0" ]; then
- echo "Adding credentials for $FeedName."
-
- PackageSourceCredentialsNodeFooter=""
- NewCredential="${TB}${TB}<$FeedName>${NL}${NL}${NL}$FeedName>"
+if [ "$CredToken" ]; then
+ for FeedName in ${PackageSources[@]} ; do
+ # Check if there is no existing credential for this FeedName
+ grep -i "<$FeedName>" $ConfigFile
+ if [ "$?" != "0" ]; then
+ echo " Inserting credential for feed: $FeedName"
- sed -i.bak "s|$PackageSourceCredentialsNodeFooter|$NewCredential${NL}$PackageSourceCredentialsNodeFooter|" $ConfigFile
- fi
-done
+ PackageSourceCredentialsNodeFooter=""
+ NewCredential="${TB}${TB}<$FeedName>${NL}${TB}${NL}${TB}${TB}${NL}${TB}${TB}$FeedName>"
-# Re-enable any entries in disabledPackageSources where the feed name contains darc-int
-grep -i "" $ConfigFile
-if [ "$?" == "0" ]; then
- DisabledDarcIntSources=()
- echo "Re-enabling any disabled \"darc-int\" package sources in $ConfigFile"
- DisabledDarcIntSources+=$(grep -oh '"darc-int-[^"]*" value="true"' $ConfigFile | tr -d '"')
- for DisabledSourceName in ${DisabledDarcIntSources[@]} ; do
- if [[ $DisabledSourceName == darc-int* ]]
- then
- OldDisableValue=""
- NewDisableValue=""
- sed -i.bak "s|$OldDisableValue|$NewDisableValue|" $ConfigFile
- echo "Neutralized disablePackageSources entry for '$DisabledSourceName'"
+ sed -i.bak "s|$PackageSourceCredentialsNodeFooter|$NewCredential${NL}$PackageSourceCredentialsNodeFooter|" $ConfigFile
fi
done
fi
diff --git a/eng/common/build.cmd b/eng/common/build.cmd
new file mode 100644
index 00000000..99daf368
--- /dev/null
+++ b/eng/common/build.cmd
@@ -0,0 +1,3 @@
+@echo off
+powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0build.ps1""" %*"
+exit /b %ErrorLevel%
diff --git a/eng/common/build.ps1 b/eng/common/build.ps1
index 33a6f2d0..18397a60 100644
--- a/eng/common/build.ps1
+++ b/eng/common/build.ps1
@@ -6,7 +6,9 @@ Param(
[string][Alias('v')]$verbosity = "minimal",
[string] $msbuildEngine = $null,
[bool] $warnAsError = $true,
+ [string] $warnNotAsError = '',
[bool] $nodeReuse = $true,
+ [switch] $buildCheck = $false,
[switch][Alias('r')]$restore,
[switch] $deployDeps,
[switch][Alias('b')]$build,
@@ -19,6 +21,8 @@ Param(
[switch] $pack,
[switch] $publish,
[switch] $clean,
+ [switch][Alias('pb')]$productBuild,
+ [switch]$fromVMR,
[switch][Alias('bl')]$binaryLog,
[switch][Alias('nobl')]$excludeCIBinarylog,
[switch] $ci,
@@ -58,6 +62,7 @@ function Print-Usage() {
Write-Host " -sign Sign build outputs"
Write-Host " -publish Publish artifacts (e.g. symbols)"
Write-Host " -clean Clean the solution"
+ Write-Host " -productBuild Build the solution in the way it will be built in the full .NET product (VMR) build (short: -pb)"
Write-Host ""
Write-Host "Advanced settings:"
@@ -66,9 +71,13 @@ function Print-Usage() {
Write-Host " -excludeCIBinarylog Don't output binary log (short: -nobl)"
Write-Host " -prepareMachine Prepare machine for CI run, clean up processes after build"
Write-Host " -warnAsError Sets warnaserror msbuild parameter ('true' or 'false')"
+ Write-Host " -warnNotAsError Sets a semi-colon delimited list of warning codes that should not be treated as errors"
Write-Host " -msbuildEngine Msbuild engine to use to run build ('dotnet', 'vs', or unspecified)."
Write-Host " -excludePrereleaseVS Set to exclude build engines in prerelease versions of Visual Studio"
Write-Host " -nativeToolsOnMachine Sets the native tools on machine environment variable (indicating that the script should use native tools on machine)"
+ Write-Host " -nodeReuse Sets nodereuse msbuild parameter ('true' or 'false')"
+ Write-Host " -buildCheck Sets /check msbuild parameter"
+ Write-Host " -fromVMR Set when building from within the VMR"
Write-Host ""
Write-Host "Command line arguments not listed above are passed thru to msbuild."
@@ -95,6 +104,7 @@ function Build {
$bl = if ($binaryLog) { '/bl:' + (Join-Path $LogDir 'Build.binlog') } else { '' }
$platformArg = if ($platform) { "/p:Platform=$platform" } else { '' }
+ $check = if ($buildCheck) { '/check' } else { '' }
if ($projects) {
# Re-assign properties to a new variable because PowerShell doesn't let us append properties directly for unclear reasons.
@@ -111,6 +121,7 @@ function Build {
MSBuild $toolsetBuildProj `
$bl `
$platformArg `
+ $check `
/p:Configuration=$configuration `
/p:RepoRoot=$RepoRoot `
/p:Restore=$restore `
@@ -120,10 +131,13 @@ function Build {
/p:Deploy=$deploy `
/p:Test=$test `
/p:Pack=$pack `
+ /p:DotNetBuild=$productBuild `
+ /p:DotNetBuildFromVMR=$fromVMR `
/p:IntegrationTest=$integrationTest `
/p:PerformanceTest=$performanceTest `
/p:Sign=$sign `
/p:Publish=$publish `
+ /p:RestoreStaticGraphEnableBinaryLogger=$binaryLog `
@properties
}
diff --git a/eng/common/build.sh b/eng/common/build.sh
index 50af40cd..5883e53b 100644
--- a/eng/common/build.sh
+++ b/eng/common/build.sh
@@ -22,6 +22,9 @@ usage()
echo " --sourceBuild Source-build the solution (short: -sb)"
echo " Will additionally trigger the following actions: --restore, --build, --pack"
echo " If --configuration is not set explicitly, will also set it to 'Release'"
+ echo " --productBuild Build the solution in the way it will be built in the full .NET product (VMR) build (short: -pb)"
+ echo " Will additionally trigger the following actions: --restore, --build, --pack"
+ echo " If --configuration is not set explicitly, will also set it to 'Release'"
echo " --rebuild Rebuild solution"
echo " --test Run all unit tests in the solution (short: -t)"
echo " --integrationTest Run all integration tests in the solution"
@@ -39,6 +42,9 @@ usage()
echo " --prepareMachine Prepare machine for CI run, clean up processes after build"
echo " --nodeReuse Sets nodereuse msbuild parameter ('true' or 'false')"
echo " --warnAsError Sets warnaserror msbuild parameter ('true' or 'false')"
+ echo " --warnNotAsError Sets a semi-colon delimited list of warning codes that should not be treated as errors"
+ echo " --buildCheck Sets /check msbuild parameter"
+ echo " --fromVMR Set when building from within the VMR"
echo ""
echo "Command line arguments not listed above are passed thru to msbuild."
echo "Arguments can also be passed in with a single hyphen."
@@ -59,6 +65,8 @@ scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
restore=false
build=false
source_build=false
+product_build=false
+from_vmr=false
rebuild=false
test=false
integration_test=false
@@ -71,7 +79,9 @@ ci=false
clean=false
warn_as_error=true
+warn_not_as_error=''
node_reuse=true
+build_check=false
binary_log=false
exclude_ci_binary_log=false
pipelines_log=false
@@ -83,8 +93,8 @@ verbosity='minimal'
runtime_source_feed=''
runtime_source_feed_key=''
-properties=''
-while [[ $# > 0 ]]; do
+properties=()
+while [[ $# -gt 0 ]]; do
opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")"
case "$opt" in
-help|-h)
@@ -105,7 +115,7 @@ while [[ $# > 0 ]]; do
-binarylog|-bl)
binary_log=true
;;
- -excludeCIBinarylog|-nobl)
+ -excludecibinarylog|-nobl)
exclude_ci_binary_log=true
;;
-pipelineslog|-pl)
@@ -123,12 +133,22 @@ while [[ $# > 0 ]]; do
-pack)
pack=true
;;
- -sourcebuild|-sb)
+ -sourcebuild|-source-build|-sb)
build=true
source_build=true
+ product_build=true
+ restore=true
+ pack=true
+ ;;
+ -productbuild|-product-build|-pb)
+ build=true
+ product_build=true
restore=true
pack=true
;;
+ -fromvmr|-from-vmr)
+ from_vmr=true
+ ;;
-test|-t)
test=true
;;
@@ -158,10 +178,17 @@ while [[ $# > 0 ]]; do
warn_as_error=$2
shift
;;
+ -warnnotaserror)
+ warn_not_as_error=$2
+ shift
+ ;;
-nodereuse)
node_reuse=$2
shift
;;
+ -buildcheck)
+ build_check=true
+ ;;
-runtimesourcefeed)
runtime_source_feed=$2
shift
@@ -171,7 +198,7 @@ while [[ $# > 0 ]]; do
shift
;;
*)
- properties="$properties $1"
+ properties+=("$1")
;;
esac
@@ -205,7 +232,7 @@ function Build {
InitializeCustomToolset
if [[ ! -z "$projects" ]]; then
- properties="$properties /p:Projects=$projects"
+ properties+=("/p:Projects=$projects")
fi
local bl=""
@@ -213,13 +240,21 @@ function Build {
bl="/bl:\"$log_dir/Build.binlog\""
fi
+ local check=""
+ if [[ "$build_check" == true ]]; then
+ check="/check"
+ fi
+
MSBuild $_InitializeToolset \
$bl \
+ $check \
/p:Configuration=$configuration \
/p:RepoRoot="$repo_root" \
/p:Restore=$restore \
/p:Build=$build \
- /p:ArcadeBuildFromSource=$source_build \
+ /p:DotNetBuild=$product_build \
+ /p:DotNetBuildSourceOnly=$source_build \
+ /p:DotNetBuildFromVMR=$from_vmr \
/p:Rebuild=$rebuild \
/p:Test=$test \
/p:Pack=$pack \
@@ -227,7 +262,8 @@ function Build {
/p:PerformanceTest=$performance_test \
/p:Sign=$sign \
/p:Publish=$publish \
- $properties
+ /p:RestoreStaticGraphEnableBinaryLogger=$binary_log \
+ ${properties[@]+"${properties[@]}"}
ExitWithExitCode 0
}
diff --git a/eng/common/cibuild.sh b/eng/common/cibuild.sh
index 1a02c0de..66e3b0ac 100644
--- a/eng/common/cibuild.sh
+++ b/eng/common/cibuild.sh
@@ -13,4 +13,4 @@ while [[ -h $source ]]; do
done
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
-. "$scriptroot/build.sh" --restore --build --test --pack --publish --ci $@
\ No newline at end of file
+. "$scriptroot/build.sh" --restore --build --test --pack --publish --ci $@
diff --git a/eng/common/core-templates/job/job.yml b/eng/common/core-templates/job/job.yml
new file mode 100644
index 00000000..a427eac2
--- /dev/null
+++ b/eng/common/core-templates/job/job.yml
@@ -0,0 +1,231 @@
+parameters:
+# Job schema parameters - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
+ cancelTimeoutInMinutes: ''
+ condition: ''
+ container: ''
+ continueOnError: false
+ dependsOn: ''
+ displayName: ''
+ pool: ''
+ steps: []
+ strategy: ''
+ timeoutInMinutes: ''
+ variables: []
+ workspace: ''
+ templateContext: {}
+
+# Job base template specific parameters
+ # See schema documentation - https://github.com/dotnet/arcade/blob/master/Documentation/AzureDevOps/TemplateSchema.md
+ # publishing defaults
+ artifacts: ''
+ enableMicrobuild: false
+ enablePreviewMicrobuild: false
+ microbuildPluginVersion: 'latest'
+ enableMicrobuildForMacAndLinux: false
+ microbuildUseESRP: true
+ enablePublishBuildArtifacts: false
+ enablePublishBuildAssets: false
+ enablePublishTestResults: false
+ enableBuildRetry: false
+ mergeTestResults: false
+ testRunTitle: ''
+ testResultsFormat: ''
+ name: ''
+ preSteps: []
+ artifactPublishSteps: []
+ runAsPublic: false
+
+# 1es specific parameters
+ is1ESPipeline: ''
+
+jobs:
+- job: ${{ parameters.name }}
+
+ ${{ if ne(parameters.cancelTimeoutInMinutes, '') }}:
+ cancelTimeoutInMinutes: ${{ parameters.cancelTimeoutInMinutes }}
+
+ ${{ if ne(parameters.condition, '') }}:
+ condition: ${{ parameters.condition }}
+
+ ${{ if ne(parameters.container, '') }}:
+ container: ${{ parameters.container }}
+
+ ${{ if ne(parameters.continueOnError, '') }}:
+ continueOnError: ${{ parameters.continueOnError }}
+
+ ${{ if ne(parameters.dependsOn, '') }}:
+ dependsOn: ${{ parameters.dependsOn }}
+
+ ${{ if ne(parameters.displayName, '') }}:
+ displayName: ${{ parameters.displayName }}
+
+ ${{ if ne(parameters.pool, '') }}:
+ pool: ${{ parameters.pool }}
+
+ ${{ if ne(parameters.strategy, '') }}:
+ strategy: ${{ parameters.strategy }}
+
+ ${{ if ne(parameters.timeoutInMinutes, '') }}:
+ timeoutInMinutes: ${{ parameters.timeoutInMinutes }}
+
+ ${{ if ne(parameters.templateContext, '') }}:
+ templateContext: ${{ parameters.templateContext }}
+
+ variables:
+ - name: AllowPtrToDetectTestRunRetryFiles
+ value: true
+ - ${{ if ne(parameters.enableTelemetry, 'false') }}:
+ - name: DOTNET_CLI_TELEMETRY_PROFILE
+ value: '$(Build.Repository.Uri)'
+ # Retry signature validation up to three times, waiting 2 seconds between attempts.
+ # See https://learn.microsoft.com/en-us/nuget/reference/errors-and-warnings/nu3028#retry-untrusted-root-failures
+ - name: NUGET_EXPERIMENTAL_CHAIN_BUILD_RETRY_POLICY
+ value: 3,2000
+ - ${{ each variable in parameters.variables }}:
+ # handle name-value variable syntax
+ # example:
+ # - name: [key]
+ # value: [value]
+ - ${{ if ne(variable.name, '') }}:
+ - name: ${{ variable.name }}
+ value: ${{ variable.value }}
+
+ # handle variable groups
+ - ${{ if ne(variable.group, '') }}:
+ - group: ${{ variable.group }}
+
+ # handle template variable syntax
+ # example:
+ # - template: path/to/template.yml
+ # parameters:
+ # [key]: [value]
+ - ${{ if ne(variable.template, '') }}:
+ - template: ${{ variable.template }}
+ ${{ if ne(variable.parameters, '') }}:
+ parameters: ${{ variable.parameters }}
+
+ # handle key-value variable syntax.
+ # example:
+ # - [key]: [value]
+ - ${{ if and(eq(variable.name, ''), eq(variable.group, ''), eq(variable.template, '')) }}:
+ - ${{ each pair in variable }}:
+ - name: ${{ pair.key }}
+ value: ${{ pair.value }}
+
+ # DotNet-HelixApi-Access provides 'HelixApiAccessToken' for internal builds
+ - ${{ if and(eq(parameters.enableTelemetry, 'true'), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - group: DotNet-HelixApi-Access
+
+ ${{ if ne(parameters.workspace, '') }}:
+ workspace: ${{ parameters.workspace }}
+
+ steps:
+ - ${{ if eq(parameters.is1ESPipeline, '') }}:
+ - 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error
+
+ - ${{ if ne(parameters.preSteps, '') }}:
+ - ${{ each preStep in parameters.preSteps }}:
+ - ${{ preStep }}
+
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - template: /eng/common/core-templates/steps/install-microbuild.yml
+ parameters:
+ enableMicrobuild: ${{ parameters.enableMicrobuild }}
+ enablePreviewMicrobuild: ${{ parameters.enablePreviewMicrobuild }}
+ microbuildPluginVersion: ${{ parameters.microbuildPluginVersion }}
+ enableMicrobuildForMacAndLinux: ${{ parameters.enableMicrobuildForMacAndLinux }}
+ microbuildUseESRP: ${{ parameters.microbuildUseESRP }}
+ continueOnError: ${{ parameters.continueOnError }}
+
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), eq(variables['System.TeamProject'], 'internal')) }}:
+ - task: NuGetAuthenticate@1
+
+ - ${{ if and(ne(parameters.artifacts.download, 'false'), ne(parameters.artifacts.download, '')) }}:
+ - task: DownloadPipelineArtifact@2
+ inputs:
+ buildType: current
+ artifactName: ${{ coalesce(parameters.artifacts.download.name, 'Artifacts_$(Agent.OS)_$(_BuildConfig)') }}
+ targetPath: ${{ coalesce(parameters.artifacts.download.path, 'artifacts') }}
+ itemPattern: ${{ coalesce(parameters.artifacts.download.pattern, '**') }}
+
+ - ${{ each step in parameters.steps }}:
+ - ${{ step }}
+
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - template: /eng/common/core-templates/steps/cleanup-microbuild.yml
+ parameters:
+ enableMicrobuild: ${{ parameters.enableMicrobuild }}
+ enablePreviewMicrobuild: ${{ parameters.enablePreviewMicrobuild }}
+ microbuildPluginVersion: ${{ parameters.microbuildPluginVersion }}
+ enableMicrobuildForMacAndLinux: ${{ parameters.enableMicrobuildForMacAndLinux }}
+ continueOnError: ${{ parameters.continueOnError }}
+
+ # Publish test results
+ - ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'xunit')) }}:
+ - task: PublishTestResults@2
+ displayName: Publish XUnit Test Results
+ inputs:
+ testResultsFormat: 'xUnit'
+ testResultsFiles: '*.xml'
+ searchFolder: '$(System.DefaultWorkingDirectory)/artifacts/TestResults/$(_BuildConfig)'
+ testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-xunit
+ mergeTestResults: ${{ parameters.mergeTestResults }}
+ continueOnError: true
+ condition: always()
+ - ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'vstest')) }}:
+ - task: PublishTestResults@2
+ displayName: Publish TRX Test Results
+ inputs:
+ testResultsFormat: 'VSTest'
+ testResultsFiles: '*.trx'
+ searchFolder: '$(System.DefaultWorkingDirectory)/artifacts/TestResults/$(_BuildConfig)'
+ testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-trx
+ mergeTestResults: ${{ parameters.mergeTestResults }}
+ continueOnError: true
+ condition: always()
+
+ # gather artifacts
+ - ${{ if ne(parameters.artifacts.publish, '') }}:
+ - ${{ if and(ne(parameters.artifacts.publish.artifacts, 'false'), ne(parameters.artifacts.publish.artifacts, '')) }}:
+ - task: CopyFiles@2
+ displayName: Gather binaries for publish to artifacts
+ inputs:
+ SourceFolder: 'artifacts/bin'
+ Contents: '**'
+ TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/bin'
+ - task: CopyFiles@2
+ displayName: Gather packages for publish to artifacts
+ inputs:
+ SourceFolder: 'artifacts/packages'
+ Contents: '**'
+ TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/packages'
+ - ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}:
+ - task: CopyFiles@2
+ displayName: Gather logs for publish to artifacts
+ inputs:
+ SourceFolder: 'artifacts/log'
+ Contents: '**'
+ TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/log'
+ continueOnError: true
+ condition: always()
+
+ - ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}:
+ - task: CopyFiles@2
+ displayName: Gather logs for publish to artifacts
+ inputs:
+ SourceFolder: 'artifacts/log/$(_BuildConfig)'
+ Contents: '**'
+ TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/log/$(_BuildConfig)'
+ continueOnError: true
+ condition: always()
+ - ${{ if eq(parameters.enableBuildRetry, 'true') }}:
+ - task: CopyFiles@2
+ displayName: Gather buildconfiguration for build retry
+ inputs:
+ SourceFolder: '$(System.DefaultWorkingDirectory)/eng/common/BuildConfiguration'
+ Contents: '**'
+ TargetFolder: '$(Build.ArtifactStagingDirectory)/eng/common/BuildConfiguration'
+ continueOnError: true
+ condition: always()
+ - ${{ each step in parameters.artifactPublishSteps }}:
+ - ${{ step }}
diff --git a/eng/common/core-templates/job/onelocbuild.yml b/eng/common/core-templates/job/onelocbuild.yml
new file mode 100644
index 00000000..eefed3b6
--- /dev/null
+++ b/eng/common/core-templates/job/onelocbuild.yml
@@ -0,0 +1,118 @@
+parameters:
+ # Optional: dependencies of the job
+ dependsOn: ''
+
+ # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool
+ pool: ''
+
+ CeapexPat: $(dn-bot-ceapex-package-r) # PAT for the loc AzDO instance https://dev.azure.com/ceapex
+ GithubPat: $(BotAccount-dotnet-bot-repo-PAT)
+
+ SourcesDirectory: $(System.DefaultWorkingDirectory)
+ CreatePr: true
+ AutoCompletePr: false
+ ReusePr: true
+ UseLfLineEndings: true
+ UseCheckedInLocProjectJson: false
+ SkipLocProjectJsonGeneration: false
+ LanguageSet: VS_Main_Languages
+ LclSource: lclFilesInRepo
+ LclPackageId: ''
+ RepoType: gitHub
+ GitHubOrg: dotnet
+ MirrorRepo: ''
+ MirrorBranch: main
+ condition: ''
+ JobNameSuffix: ''
+ is1ESPipeline: ''
+jobs:
+- job: OneLocBuild${{ parameters.JobNameSuffix }}
+
+ dependsOn: ${{ parameters.dependsOn }}
+
+ displayName: OneLocBuild${{ parameters.JobNameSuffix }}
+
+ variables:
+ - group: OneLocBuildVariables # Contains the CeapexPat and GithubPat
+ - name: _GenerateLocProjectArguments
+ value: -SourcesDirectory ${{ parameters.SourcesDirectory }}
+ -LanguageSet "${{ parameters.LanguageSet }}"
+ -CreateNeutralXlfs
+ - ${{ if eq(parameters.UseCheckedInLocProjectJson, 'true') }}:
+ - name: _GenerateLocProjectArguments
+ value: ${{ variables._GenerateLocProjectArguments }} -UseCheckedInLocProjectJson
+ - template: /eng/common/core-templates/variables/pool-providers.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+
+ ${{ if ne(parameters.pool, '') }}:
+ pool: ${{ parameters.pool }}
+ ${{ if eq(parameters.pool, '') }}:
+ pool:
+ # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
+ ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
+ name: AzurePipelines-EO
+ image: 1ESPT-Windows2025
+ demands: Cmd
+ os: windows
+ # If it's not devdiv, it's dnceng
+ ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}:
+ name: $(DncEngInternalBuildPool)
+ image: windows.vs2026.amd64
+ os: windows
+
+ steps:
+ - ${{ if eq(parameters.is1ESPipeline, '') }}:
+ - 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error
+
+ - ${{ if ne(parameters.SkipLocProjectJsonGeneration, 'true') }}:
+ - task: Powershell@2
+ inputs:
+ filePath: $(System.DefaultWorkingDirectory)/eng/common/generate-locproject.ps1
+ arguments: $(_GenerateLocProjectArguments)
+ displayName: Generate LocProject.json
+ condition: ${{ parameters.condition }}
+
+ - task: OneLocBuild@2
+ displayName: OneLocBuild
+ env:
+ SYSTEM_ACCESSTOKEN: $(System.AccessToken)
+ inputs:
+ locProj: eng/Localize/LocProject.json
+ outDir: $(Build.ArtifactStagingDirectory)
+ lclSource: ${{ parameters.LclSource }}
+ lclPackageId: ${{ parameters.LclPackageId }}
+ isCreatePrSelected: ${{ parameters.CreatePr }}
+ isAutoCompletePrSelected: ${{ parameters.AutoCompletePr }}
+ ${{ if eq(parameters.CreatePr, true) }}:
+ isUseLfLineEndingsSelected: ${{ parameters.UseLfLineEndings }}
+ isShouldReusePrSelected: ${{ parameters.ReusePr }}
+ packageSourceAuth: patAuth
+ patVariable: ${{ parameters.CeapexPat }}
+ ${{ if eq(parameters.RepoType, 'gitHub') }}:
+ repoType: ${{ parameters.RepoType }}
+ gitHubPatVariable: "${{ parameters.GithubPat }}"
+ ${{ if ne(parameters.MirrorRepo, '') }}:
+ isMirrorRepoSelected: true
+ gitHubOrganization: ${{ parameters.GitHubOrg }}
+ mirrorRepo: ${{ parameters.MirrorRepo }}
+ mirrorBranch: ${{ parameters.MirrorBranch }}
+ condition: ${{ parameters.condition }}
+
+ # Copy the locProject.json to the root of the Loc directory, then publish a pipeline artifact
+ - task: CopyFiles@2
+ displayName: Copy LocProject.json
+ inputs:
+ SourceFolder: '$(System.DefaultWorkingDirectory)/eng/Localize/'
+ Contents: 'LocProject.json'
+ TargetFolder: '$(Build.ArtifactStagingDirectory)/loc'
+ condition: ${{ parameters.condition }}
+
+ - template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ args:
+ targetPath: '$(Build.ArtifactStagingDirectory)/loc'
+ artifactName: 'Loc'
+ displayName: 'Publish Localization Files'
+ condition: ${{ parameters.condition }}
diff --git a/eng/common/core-templates/job/publish-build-assets.yml b/eng/common/core-templates/job/publish-build-assets.yml
new file mode 100644
index 00000000..fdfc4f30
--- /dev/null
+++ b/eng/common/core-templates/job/publish-build-assets.yml
@@ -0,0 +1,221 @@
+parameters:
+ configuration: 'Debug'
+
+ # Optional: condition for the job to run
+ condition: ''
+
+ # Optional: 'true' if future jobs should run even if this job fails
+ continueOnError: false
+
+ # Optional: dependencies of the job
+ dependsOn: ''
+
+ # Optional: Include PublishBuildArtifacts task
+ enablePublishBuildArtifacts: false
+
+ # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool
+ pool: {}
+
+ # Optional: should run as a public build even in the internal project
+ # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
+ runAsPublic: false
+
+ # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing
+ publishAssetsImmediately: false
+
+ artifactsPublishingAdditionalParameters: ''
+
+ signingValidationAdditionalParameters: ''
+
+ is1ESPipeline: ''
+
+ # Optional: 🌤️ or not the build has assets it wants to publish to BAR
+ isAssetlessBuild: false
+
+ # Optional, publishing version
+ publishingVersion: 3
+
+ # Optional: A minimatch pattern for the asset manifests to publish to BAR
+ assetManifestsPattern: '*/manifests/**/*.xml'
+
+ repositoryAlias: self
+
+ officialBuildId: ''
+
+jobs:
+- job: Asset_Registry_Publish
+
+ dependsOn: ${{ parameters.dependsOn }}
+ timeoutInMinutes: 150
+
+ ${{ if eq(parameters.publishAssetsImmediately, 'true') }}:
+ displayName: Publish Assets
+ ${{ else }}:
+ displayName: Publish to Build Asset Registry
+
+ variables:
+ - template: /eng/common/core-templates/variables/pool-providers.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - group: Publish-Build-Assets
+ - group: AzureDevOps-Artifact-Feeds-Pats
+ - name: runCodesignValidationInjection
+ value: false
+ # unconditional - needed for logs publishing (redactor tool version)
+ - template: /eng/common/core-templates/post-build/common-variables.yml
+ - name: OfficialBuildId
+ ${{ if ne(parameters.officialBuildId, '') }}:
+ value: ${{ parameters.officialBuildId }}
+ ${{ else }}:
+ value: $(Build.BuildNumber)
+
+ pool:
+ # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
+ ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
+ name: AzurePipelines-EO
+ image: 1ESPT-Windows2025
+ demands: Cmd
+ os: windows
+ # If it's not devdiv, it's dnceng
+ ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}:
+ name: NetCore1ESPool-Publishing-Internal
+ image: windows.vs2026.amd64
+ os: windows
+ steps:
+ - ${{ if eq(parameters.is1ESPipeline, '') }}:
+ - 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error
+
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - checkout: ${{ parameters.repositoryAlias }}
+ fetchDepth: 3
+ clean: true
+
+ - ${{ if eq(parameters.isAssetlessBuild, 'false') }}:
+ - ${{ if eq(parameters.publishingVersion, 3) }}:
+ - task: DownloadPipelineArtifact@2
+ displayName: Download Asset Manifests
+ inputs:
+ artifactName: AssetManifests
+ targetPath: '$(Build.StagingDirectory)/AssetManifests'
+ condition: ${{ parameters.condition }}
+ continueOnError: ${{ parameters.continueOnError }}
+ - ${{ if eq(parameters.publishingVersion, 4) }}:
+ - task: DownloadPipelineArtifact@2
+ displayName: Download V4 asset manifests
+ inputs:
+ itemPattern: '*/manifests/**/*.xml'
+ targetPath: '$(Build.StagingDirectory)/AllAssetManifests'
+ condition: ${{ parameters.condition }}
+ continueOnError: ${{ parameters.continueOnError }}
+ - task: CopyFiles@2
+ displayName: Copy V4 asset manifests to AssetManifests
+ inputs:
+ SourceFolder: '$(Build.StagingDirectory)/AllAssetManifests'
+ Contents: ${{ parameters.assetManifestsPattern }}
+ TargetFolder: '$(Build.StagingDirectory)/AssetManifests'
+ flattenFolders: true
+ condition: ${{ parameters.condition }}
+ continueOnError: ${{ parameters.continueOnError }}
+
+ - task: NuGetAuthenticate@1
+
+ # Populate internal runtime variables.
+ - template: /eng/common/templates/steps/enable-internal-sources.yml
+ ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
+ parameters:
+ legacyCredential: $(dn-bot-dnceng-artifact-feeds-rw)
+
+ - template: /eng/common/templates/steps/enable-internal-runtimes.yml
+
+ - task: AzureCLI@2
+ displayName: Publish Build Assets
+ inputs:
+ azureSubscription: "Darc: Maestro Production"
+ scriptType: ps
+ scriptLocation: scriptPath
+ scriptPath: $(System.DefaultWorkingDirectory)/eng/common/sdk-task.ps1
+ arguments: -task PublishBuildAssets -restore -msbuildEngine dotnet
+ /p:ManifestsPath='$(Build.StagingDirectory)/AssetManifests'
+ /p:IsAssetlessBuild=${{ parameters.isAssetlessBuild }}
+ /p:MaestroApiEndpoint=https://maestro.dot.net
+ /p:OfficialBuildId=$(OfficialBuildId)
+ -runtimeSourceFeed https://ci.dot.net/internal
+ -runtimeSourceFeedKey '$(dotnetbuilds-internal-container-read-token-base64)'
+
+ condition: ${{ parameters.condition }}
+ continueOnError: ${{ parameters.continueOnError }}
+
+ - task: powershell@2
+ displayName: Create ReleaseConfigs Artifact
+ inputs:
+ targetType: inline
+ script: |
+ New-Item -Path "$(Build.StagingDirectory)/ReleaseConfigs" -ItemType Directory -Force
+ $filePath = "$(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt"
+ Add-Content -Path $filePath -Value $(BARBuildId)
+ Add-Content -Path $filePath -Value "$(DefaultChannels)"
+ Add-Content -Path $filePath -Value $(IsStableBuild)
+
+ $symbolExclusionfile = "$(System.DefaultWorkingDirectory)/eng/SymbolPublishingExclusionsFile.txt"
+ if (Test-Path -Path $symbolExclusionfile)
+ {
+ Write-Host "SymbolExclusionFile exists"
+ Copy-Item -Path $symbolExclusionfile -Destination "$(Build.StagingDirectory)/ReleaseConfigs"
+ }
+
+ - ${{ if eq(parameters.publishingVersion, 4) }}:
+ - template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ args:
+ targetPath: '$(Build.ArtifactStagingDirectory)/MergedManifest.xml'
+ artifactName: AssetManifests
+ displayName: 'Publish Merged Manifest'
+ retryCountOnTaskFailure: 10 # for any files being locked
+
+ - template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ args:
+ displayName: Publish ReleaseConfigs Artifact
+ targetPath: '$(Build.StagingDirectory)/ReleaseConfigs'
+ artifactName: ReleaseConfigs
+ retryCountOnTaskFailure: 10 # for any files being locked
+
+ - ${{ if or(eq(parameters.publishAssetsImmediately, 'true'), eq(parameters.isAssetlessBuild, 'true')) }}:
+ - template: /eng/common/core-templates/post-build/setup-maestro-vars.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+
+ # Darc is targeting 8.0, so make sure it's installed
+ - task: UseDotNet@2
+ inputs:
+ version: 8.0.x
+
+ - task: AzureCLI@2
+ displayName: Publish Using Darc
+ inputs:
+ azureSubscription: "Darc: Maestro Production"
+ scriptType: ps
+ scriptLocation: scriptPath
+ scriptPath: $(System.DefaultWorkingDirectory)/eng/common/post-build/publish-using-darc.ps1
+ arguments: >
+ -BuildId $(BARBuildId)
+ -PublishingInfraVersion 3
+ -AzdoToken '$(System.AccessToken)'
+ -WaitPublishingFinish true
+ -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}'
+ -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}'
+ -SkipAssetsPublishing '${{ parameters.isAssetlessBuild }}'
+ -runtimeSourceFeed https://ci.dot.net/internal
+ -runtimeSourceFeedKey '$(dotnetbuilds-internal-container-read-token-base64)'
+
+ - ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}:
+ - template: /eng/common/core-templates/steps/publish-logs.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ StageLabel: 'BuildAssetRegistry'
+ JobLabel: 'Publish_Artifacts_Logs'
diff --git a/eng/common/core-templates/job/renovate.yml b/eng/common/core-templates/job/renovate.yml
new file mode 100644
index 00000000..ff86c80b
--- /dev/null
+++ b/eng/common/core-templates/job/renovate.yml
@@ -0,0 +1,196 @@
+# --------------------------------------------------------------------------------------
+# Renovate Bot Job Template
+# --------------------------------------------------------------------------------------
+# This Azure DevOps pipeline job template runs Renovate (https://docs.renovatebot.com/)
+# to automatically update dependencies in a GitHub repository.
+#
+# Renovate scans the repository for dependency files and creates pull requests to update
+# outdated dependencies based on the configuration specified in the renovateConfigPath
+# parameter.
+#
+# Usage:
+# For each product repo wanting to make use of Renovate, this template is called from
+# an internal Azure DevOps pipeline, typically with a schedule trigger, to check for
+# and propose dependency updates.
+#
+# For more info, see https://github.com/dotnet/arcade/blob/main/Documentation/Renovate.md
+# --------------------------------------------------------------------------------------
+
+parameters:
+
+# Path to the Renovate configuration file within the repository.
+- name: renovateConfigPath
+ type: string
+ default: 'eng/renovate.json'
+
+# GitHub repository to run Renovate against, in the format 'owner/repo'.
+# This could technically be any repo but convention is to target the same
+# repo that contains the calling pipeline. The Renovate config file would
+# be co-located with the pipeline's repo and, in most cases, the config
+# file is specific to the repo being targeted.
+- name: gitHubRepo
+ type: string
+
+# List of base branches to target for Renovate PRs.
+# NOTE: The Renovate configuration file is always read from the branch where the
+# pipeline is run, NOT from the target branches specified here. If you need different
+# configurations for different branches, run the pipeline from each branch separately.
+- name: baseBranches
+ type: object
+ default:
+ - main
+
+# When true, Renovate will run in dry run mode, which previews changes without creating PRs.
+# See the 'Run Renovate' step log output for details of what would have been changed.
+- name: dryRun
+ type: boolean
+ default: false
+
+# By default, Renovate will not recreate a PR for a given dependency/version pair that was
+# previously closed. This allows opting in to always recreating PRs even if they were
+# previously closed.
+- name: forceRecreatePR
+ type: boolean
+ default: false
+
+# Name of the arcade repository resource in the pipeline.
+# This allows repos which haven't been onboarded to Arcade to still use this
+# template by checking out the repo as a resource with a custom name and pointing
+# this parameter to it.
+- name: arcadeRepoResource
+ type: string
+ default: self
+
+# Directory name for the self repo under $(Build.SourcesDirectory) in multi-checkout.
+# In multi-checkout (when arcadeRepoResource != 'self'), Azure DevOps checks out the
+# self repo to $(Build.SourcesDirectory)/. Set this to match the auto-generated
+# directory name. Using the auto-generated name is necessary rather than explicitly
+# defining a checkout path because container jobs expect repos to live under the agent's
+# workspace ($(Pipeline.Workspace)). On some self-hosted setups the host path
+# (e.g., /mnt/vss/_work) differs from the container path (e.g., /__w), and a custom checkout
+# path can fail validation. Using the default checkout location keeps the paths consistent
+# and avoids this issue.
+- name: selfRepoName
+ type: string
+ default: ''
+- name: arcadeRepoName
+ type: string
+ default: ''
+
+# Pool configuration for the job.
+- name: pool
+ type: object
+ default:
+ name: NetCore1ESPool-Internal
+ image: build.azurelinux.3.amd64
+ os: linux
+
+jobs:
+- job: Renovate
+ displayName: Run Renovate
+ container: RenovateContainer
+ variables:
+ - group: dotnet-renovate-bot
+ # The Renovate version is automatically updated by https://github.com/dotnet/arcade/blob/main/azure-pipelines-renovate.yml.
+ # Changing the variable name here would require updating the name in https://github.com/dotnet/arcade/blob/main/eng/renovate.json as well.
+ - name: renovateVersion
+ value: '42'
+ readonly: true
+ - name: renovateLogFilePath
+ value: '$(Build.ArtifactStagingDirectory)/renovate.json'
+ readonly: true
+ - name: dryRunArg
+ readonly: true
+ ${{ if eq(parameters.dryRun, true) }}:
+ value: 'full'
+ ${{ else }}:
+ value: ''
+ - name: recreateWhenArg
+ readonly: true
+ ${{ if eq(parameters.forceRecreatePR, true) }}:
+ value: 'always'
+ ${{ else }}:
+ value: ''
+ # In multi-checkout (without custom paths), Azure DevOps places each repo under
+ # $(Build.SourcesDirectory)/. selfRepoName must be provided in that case.
+ - name: selfRepoPath
+ readonly: true
+ ${{ if eq(parameters.arcadeRepoResource, 'self') }}:
+ value: '$(Build.SourcesDirectory)'
+ ${{ else }}:
+ value: '$(Build.SourcesDirectory)/${{ parameters.selfRepoName }}'
+ - name: arcadeRepoPath
+ readonly: true
+ ${{ if eq(parameters.arcadeRepoResource, 'self') }}:
+ value: '$(Build.SourcesDirectory)'
+ ${{ else }}:
+ value: '$(Build.SourcesDirectory)/${{ parameters.arcadeRepoName }}'
+ pool: ${{ parameters.pool }}
+
+ templateContext:
+ outputParentDirectory: $(Build.ArtifactStagingDirectory)
+ outputs:
+ - output: pipelineArtifact
+ displayName: Publish Renovate Log
+ condition: succeededOrFailed()
+ targetPath: $(Build.ArtifactStagingDirectory)
+ artifactName: $(Agent.JobName)_Logs_Attempt$(System.JobAttempt)
+ isProduction: false # logs are non-production artifacts
+
+ steps:
+ - checkout: self
+ fetchDepth: 1
+
+ - ${{ if ne(parameters.arcadeRepoResource, 'self') }}:
+ - checkout: ${{ parameters.arcadeRepoResource }}
+ fetchDepth: 1
+
+ - script: |
+ renovate-config-validator $(selfRepoPath)/${{parameters.renovateConfigPath}} 2>&1 | tee /tmp/renovate-config-validator.out
+ validatorExit=${PIPESTATUS[0]}
+ if grep -q '^ WARN:' /tmp/renovate-config-validator.out; then
+ echo "##vso[task.logissue type=warning]Renovate config validator produced warnings."
+ echo "##vso[task.complete result=SucceededWithIssues]"
+ fi
+ exit $validatorExit
+ displayName: Validate Renovate config
+ env:
+ LOG_LEVEL: info
+ LOG_FILE_LEVEL: debug
+ LOG_FILE: $(Build.ArtifactStagingDirectory)/renovate-config-validator.json
+
+ - script: |
+ . $(arcadeRepoPath)/eng/common/renovate.env
+ renovate 2>&1 | tee /tmp/renovate.out
+ renovateExit=${PIPESTATUS[0]}
+ if grep -q '^ WARN:' /tmp/renovate.out; then
+ echo "##vso[task.logissue type=warning]Renovate produced warnings."
+ echo "##vso[task.complete result=SucceededWithIssues]"
+ fi
+ exit $renovateExit
+ displayName: Run Renovate
+ env:
+ RENOVATE_FORK_TOKEN: $(BotAccount-dotnet-renovate-bot-PAT)
+ RENOVATE_TOKEN: $(BotAccount-dotnet-renovate-bot-PAT)
+ RENOVATE_REPOSITORIES: ${{parameters.gitHubRepo}}
+ RENOVATE_BASE_BRANCHES: ${{ convertToJson(parameters.baseBranches) }}
+ RENOVATE_DRY_RUN: $(dryRunArg)
+ RENOVATE_RECREATE_WHEN: $(recreateWhenArg)
+ LOG_LEVEL: info
+ LOG_FILE_LEVEL: debug
+ LOG_FILE: $(renovateLogFilePath)
+ RENOVATE_CONFIG_FILE: $(selfRepoPath)/${{parameters.renovateConfigPath}}
+
+ - script: |
+ echo "PRs created by Renovate:"
+ if [ -s "$(renovateLogFilePath)" ]; then
+ if ! jq -r 'select(.msg == "PR created" and .pr != null) | "https://github.com/\(.repository)/pull/\(.pr)"' "$(renovateLogFilePath)" | sort -u; then
+ echo "##vso[task.logissue type=warning]Failed to parse Renovate log file with jq."
+ echo "##vso[task.complete result=SucceededWithIssues]"
+ fi
+ else
+ echo "##vso[task.logissue type=warning]No Renovate log file found or file is empty."
+ echo "##vso[task.complete result=SucceededWithIssues]"
+ fi
+ displayName: List created PRs
+ condition: and(succeededOrFailed(), eq('${{ parameters.dryRun }}', false))
diff --git a/eng/common/core-templates/job/source-build.yml b/eng/common/core-templates/job/source-build.yml
new file mode 100644
index 00000000..1997c2ae
--- /dev/null
+++ b/eng/common/core-templates/job/source-build.yml
@@ -0,0 +1,96 @@
+parameters:
+ # This template adds arcade-powered source-build to CI. The template produces a server job with a
+ # default ID 'Source_Build_Complete' to put in a dependency list if necessary.
+
+ # Specifies the prefix for source-build jobs added to pipeline. Use this if disambiguation needed.
+ jobNamePrefix: 'Source_Build'
+
+ # Defines the platform on which to run the job. By default, a linux-x64 machine, suitable for
+ # managed-only repositories. This is an object with these properties:
+ #
+ # name: ''
+ # The name of the job. This is included in the job ID.
+ # targetRID: ''
+ # The name of the target RID to use, instead of the one auto-detected by Arcade.
+ # portableBuild: false
+ # Enables non-portable mode. This means a more specific RID (e.g. fedora.32-x64 rather than
+ # linux-x64), and compiling against distro-provided packages rather than portable ones. The
+ # default is portable mode.
+ # skipPublishValidation: false
+ # Disables publishing validation. By default, a check is performed to ensure no packages are
+ # published by source-build.
+ # container: ''
+ # A container to use. Runs in docker.
+ # pool: {}
+ # A pool to use. Runs directly on an agent.
+ # buildScript: ''
+ # Specifies the build script to invoke to perform the build in the repo. The default
+ # './build.sh' should work for typical Arcade repositories, but this is customizable for
+ # difficult situations.
+ # buildArguments: ''
+ # Specifies additional build arguments to pass to the build script.
+ # jobProperties: {}
+ # A list of job properties to inject at the top level, for potential extensibility beyond
+ # container and pool.
+ platform: {}
+
+ is1ESPipeline: ''
+
+ # If set to true and running on a non-public project,
+ # Internal nuget and blob storage locations will be enabled.
+ # This is not enabled by default because many repositories do not need internal sources
+ # and do not need to have the required service connections approved in the pipeline.
+ enableInternalSources: false
+
+jobs:
+- job: ${{ parameters.jobNamePrefix }}_${{ parameters.platform.name }}
+ displayName: Source-Build (${{ parameters.platform.name }})
+
+ ${{ each property in parameters.platform.jobProperties }}:
+ ${{ property.key }}: ${{ property.value }}
+
+ ${{ if ne(parameters.platform.container, '') }}:
+ container: ${{ parameters.platform.container }}
+
+ ${{ if eq(parameters.platform.pool, '') }}:
+ # The default VM host AzDO pool. This should be capable of running Docker containers: almost all
+ # source-build builds run in Docker, including the default managed platform.
+ # /eng/common/core-templates/variables/pool-providers.yml can't be used here (some customers declare variables already), so duplicate its logic
+ ${{ if eq(parameters.is1ESPipeline, 'true') }}:
+ pool:
+ ${{ if eq(variables['System.TeamProject'], 'public') }}:
+ name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore-Svc-Public' ), False, 'NetCore-Public')]
+ demands: ImageOverride -equals build.azurelinux.3.amd64.open
+ ${{ if eq(variables['System.TeamProject'], 'internal') }}:
+ name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore1ESPool-Svc-Internal'), False, 'NetCore1ESPool-Internal')]
+ image: build.azurelinux.3.amd64
+ os: linux
+ ${{ else }}:
+ pool:
+ ${{ if eq(variables['System.TeamProject'], 'public') }}:
+ name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore-Svc-Public' ), False, 'NetCore-Public')]
+ demands: ImageOverride -equals build.azurelinux.3.amd64.open
+ ${{ if eq(variables['System.TeamProject'], 'internal') }}:
+ name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore1ESPool-Svc-Internal'), False, 'NetCore1ESPool-Internal')]
+ demands: ImageOverride -equals build.azurelinux.3.amd64
+ ${{ if ne(parameters.platform.pool, '') }}:
+ pool: ${{ parameters.platform.pool }}
+
+ workspace:
+ clean: all
+
+ steps:
+ - ${{ if eq(parameters.is1ESPipeline, '') }}:
+ - 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error
+
+ - ${{ if eq(parameters.enableInternalSources, true) }}:
+ - template: /eng/common/core-templates/steps/enable-internal-sources.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ - template: /eng/common/core-templates/steps/enable-internal-runtimes.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ - template: /eng/common/core-templates/steps/source-build.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ platform: ${{ parameters.platform }}
diff --git a/eng/common/core-templates/job/source-index-stage1.yml b/eng/common/core-templates/job/source-index-stage1.yml
new file mode 100644
index 00000000..bac6ac5f
--- /dev/null
+++ b/eng/common/core-templates/job/source-index-stage1.yml
@@ -0,0 +1,46 @@
+parameters:
+ runAsPublic: false
+ sourceIndexBuildCommand: powershell -NoLogo -NoProfile -ExecutionPolicy Bypass -Command "eng/common/build.ps1 -restore -build -binarylog -ci"
+ preSteps: []
+ binlogPath: artifacts/log/Debug/Build.binlog
+ condition: eq(variables['Build.SourceBranch'], 'refs/heads/main')
+ dependsOn: ''
+ pool: ''
+ is1ESPipeline: ''
+
+jobs:
+- job: SourceIndexStage1
+ dependsOn: ${{ parameters.dependsOn }}
+ condition: ${{ parameters.condition }}
+ variables:
+ - name: BinlogPath
+ value: ${{ parameters.binlogPath }}
+ - name: skipComponentGovernanceDetection
+ value: true
+ - template: /eng/common/core-templates/variables/pool-providers.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+
+ ${{ if ne(parameters.pool, '') }}:
+ pool: ${{ parameters.pool }}
+ ${{ if eq(parameters.pool, '') }}:
+ pool:
+ ${{ if eq(variables['System.TeamProject'], 'public') }}:
+ name: $(DncEngPublicBuildPool)
+ image: windows.vs2026.amd64.open
+ ${{ if eq(variables['System.TeamProject'], 'internal') }}:
+ name: $(DncEngInternalBuildPool)
+ image: windows.vs2026.amd64
+
+ steps:
+ - ${{ if eq(parameters.is1ESPipeline, '') }}:
+ - 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error
+
+ - ${{ each preStep in parameters.preSteps }}:
+ - ${{ preStep }}
+ - script: ${{ parameters.sourceIndexBuildCommand }}
+ displayName: Build Repository
+
+ - template: /eng/common/core-templates/steps/source-index-stage1-publish.yml
+ parameters:
+ binLogPath: ${{ parameters.binLogPath }}
diff --git a/eng/common/core-templates/jobs/jobs.yml b/eng/common/core-templates/jobs/jobs.yml
new file mode 100644
index 00000000..cc8cce45
--- /dev/null
+++ b/eng/common/core-templates/jobs/jobs.yml
@@ -0,0 +1,125 @@
+parameters:
+ # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md
+ continueOnError: false
+
+ # Optional: Include PublishBuildArtifacts task
+ enablePublishBuildArtifacts: false
+
+ # Optional: Enable running the source-build jobs to build repo from source
+ enableSourceBuild: false
+
+ # Optional: Parameters for source-build template.
+ # See /eng/common/core-templates/jobs/source-build.yml for options
+ sourceBuildParameters: []
+
+ graphFileGeneration:
+ # Optional: Enable generating the graph files at the end of the build
+ enabled: false
+ # Optional: Include toolset dependencies in the generated graph files
+ includeToolset: false
+
+ # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
+ jobs: []
+
+ # Optional: Override automatically derived dependsOn value for "publish build assets" job
+ publishBuildAssetsDependsOn: ''
+
+ # Optional: Publish the assets as soon as the publish to BAR stage is complete, rather doing so in a separate stage.
+ publishAssetsImmediately: false
+
+ # Optional: 🌤️ or not the build has assets it wants to publish to BAR
+ isAssetlessBuild: false
+
+ # Optional: If using publishAssetsImmediately and additional parameters are needed, can be used to send along additional parameters (normally sent to post-build.yml)
+ artifactsPublishingAdditionalParameters: ''
+ signingValidationAdditionalParameters: ''
+
+ # Optional: should run as a public build even in the internal project
+ # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
+ runAsPublic: false
+
+ enableSourceIndex: false
+ sourceIndexParams: {}
+
+ artifacts: {}
+ is1ESPipeline: ''
+
+ # Publishing version w/default.
+ publishingVersion: 3
+
+ repositoryAlias: self
+ officialBuildId: ''
+
+# Internal resources (telemetry, microbuild) can only be accessed from non-public projects,
+# and some (Microbuild) should only be applied to non-PR cases for internal builds.
+
+jobs:
+- ${{ each job in parameters.jobs }}:
+ - ${{ if eq(parameters.is1ESPipeline, 'true') }}:
+ - template: /eng/common/templates-official/job/job.yml
+ parameters:
+ # pass along parameters
+ ${{ each parameter in parameters }}:
+ ${{ if ne(parameter.key, 'jobs') }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
+
+ # pass along job properties
+ ${{ each property in job }}:
+ ${{ if ne(property.key, 'job') }}:
+ ${{ property.key }}: ${{ property.value }}
+
+ name: ${{ job.job }}
+
+ - ${{ else }}:
+ - template: /eng/common/templates/job/job.yml
+ parameters:
+ # pass along parameters
+ ${{ each parameter in parameters }}:
+ ${{ if ne(parameter.key, 'jobs') }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
+
+ # pass along job properties
+ ${{ each property in job }}:
+ ${{ if ne(property.key, 'job') }}:
+ ${{ property.key }}: ${{ property.value }}
+
+ name: ${{ job.job }}
+
+- ${{ if eq(parameters.enableSourceBuild, true) }}:
+ - template: /eng/common/core-templates/jobs/source-build.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ ${{ each parameter in parameters.sourceBuildParameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
+
+- ${{ if eq(parameters.enableSourceIndex, 'true') }}:
+ - template: ../job/source-index-stage1.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ runAsPublic: ${{ parameters.runAsPublic }}
+ ${{ each parameter in parameters.sourceIndexParams }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
+
+- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - ${{ if or(eq(parameters.enablePublishBuildAssets, true), eq(parameters.artifacts.publish.manifests, 'true'), ne(parameters.artifacts.publish.manifests, ''), eq(parameters.isAssetlessBuild, true)) }}:
+ - template: ../job/publish-build-assets.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ continueOnError: ${{ parameters.continueOnError }}
+ publishingVersion: ${{ parameters.publishingVersion }}
+ dependsOn:
+ - ${{ if ne(parameters.publishBuildAssetsDependsOn, '') }}:
+ - ${{ each job in parameters.publishBuildAssetsDependsOn }}:
+ - ${{ job.job }}
+ - ${{ if eq(parameters.publishBuildAssetsDependsOn, '') }}:
+ - ${{ each job in parameters.jobs }}:
+ - ${{ job.job }}
+
+ runAsPublic: ${{ parameters.runAsPublic }}
+ publishAssetsImmediately: ${{ or(parameters.publishAssetsImmediately, parameters.isAssetlessBuild) }}
+ isAssetlessBuild: ${{ parameters.isAssetlessBuild }}
+ enablePublishBuildArtifacts: ${{ parameters.enablePublishBuildArtifacts }}
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ signingValidationAdditionalParameters: ${{ parameters.signingValidationAdditionalParameters }}
+ repositoryAlias: ${{ parameters.repositoryAlias }}
+ officialBuildId: ${{ parameters.officialBuildId }}
diff --git a/eng/common/core-templates/jobs/source-build.yml b/eng/common/core-templates/jobs/source-build.yml
new file mode 100644
index 00000000..d92860cb
--- /dev/null
+++ b/eng/common/core-templates/jobs/source-build.yml
@@ -0,0 +1,42 @@
+parameters:
+ # This template adds arcade-powered source-build to CI. A job is created for each platform, as
+ # well as an optional server job that completes when all platform jobs complete.
+
+ # See /eng/common/core-templates/job/source-build.yml
+ jobNamePrefix: 'Source_Build'
+
+ # This is the default platform provided by Arcade, intended for use by a managed-only repo.
+ defaultManagedPlatform:
+ name: 'Managed'
+ container: 'mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream-10-amd64'
+
+ # Defines the platforms on which to run build jobs. One job is created for each platform, and the
+ # object in this array is sent to the job template as 'platform'. If no platforms are specified,
+ # one job runs on 'defaultManagedPlatform'.
+ platforms: []
+
+ is1ESPipeline: ''
+
+ # If set to true and running on a non-public project,
+ # Internal nuget and blob storage locations will be enabled.
+ # This is not enabled by default because many repositories do not need internal sources
+ # and do not need to have the required service connections approved in the pipeline.
+ enableInternalSources: false
+
+jobs:
+
+- ${{ each platform in parameters.platforms }}:
+ - template: /eng/common/core-templates/job/source-build.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ jobNamePrefix: ${{ parameters.jobNamePrefix }}
+ platform: ${{ platform }}
+ enableInternalSources: ${{ parameters.enableInternalSources }}
+
+- ${{ if eq(length(parameters.platforms), 0) }}:
+ - template: /eng/common/core-templates/job/source-build.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ jobNamePrefix: ${{ parameters.jobNamePrefix }}
+ platform: ${{ parameters.defaultManagedPlatform }}
+ enableInternalSources: ${{ parameters.enableInternalSources }}
diff --git a/eng/common/core-templates/post-build/common-variables.yml b/eng/common/core-templates/post-build/common-variables.yml
new file mode 100644
index 00000000..db298ae1
--- /dev/null
+++ b/eng/common/core-templates/post-build/common-variables.yml
@@ -0,0 +1,20 @@
+variables:
+ - group: Publish-Build-Assets
+
+ # Whether the build is internal or not
+ - name: IsInternalBuild
+ value: ${{ and(ne(variables['System.TeamProject'], 'public'), contains(variables['Build.SourceBranch'], 'internal')) }}
+
+ # Default Maestro++ API Endpoint and API Version
+ - name: MaestroApiEndPoint
+ value: "https://maestro.dot.net"
+ - name: MaestroApiVersion
+ value: "2020-02-20"
+
+ - name: SymbolToolVersion
+ value: 1.0.1
+ - name: BinlogToolVersion
+ value: 1.0.11
+
+ - name: runCodesignValidationInjection
+ value: false
diff --git a/eng/common/core-templates/post-build/post-build.yml b/eng/common/core-templates/post-build/post-build.yml
new file mode 100644
index 00000000..fcf40d1d
--- /dev/null
+++ b/eng/common/core-templates/post-build/post-build.yml
@@ -0,0 +1,324 @@
+parameters:
+# Which publishing infra should be used. THIS SHOULD MATCH THE VERSION ON THE BUILD MANIFEST.
+# Publishing V1 is no longer supported
+# Publishing V2 is no longer supported
+# Publishing V3 is the default
+- name: publishingInfraVersion
+ displayName: Which version of publishing should be used to promote the build definition?
+ type: number
+ default: 3
+ values:
+ - 3
+ - 4
+
+- name: BARBuildId
+ displayName: BAR Build Id
+ type: number
+ default: 0
+
+- name: PromoteToChannelIds
+ displayName: Channel to promote BARBuildId to
+ type: string
+ default: ''
+
+- name: enableSourceLinkValidation
+ displayName: Enable SourceLink validation
+ type: boolean
+ default: false
+
+- name: enableSigningValidation
+ displayName: Enable signing validation
+ type: boolean
+ default: true
+
+- name: enableSymbolValidation
+ displayName: Enable symbol validation
+ type: boolean
+ default: false
+
+- name: enableNugetValidation
+ displayName: Enable NuGet validation
+ type: boolean
+ default: true
+
+- name: publishInstallersAndChecksums
+ displayName: Publish installers and checksums
+ type: boolean
+ default: true
+
+- name: requireDefaultChannels
+ displayName: Fail the build if there are no default channel(s) registrations for the current build
+ type: boolean
+ default: false
+
+- name: isAssetlessBuild
+ type: boolean
+ displayName: Is Assetless Build
+ default: false
+
+# These parameters let the user customize the call to sdk-task.ps1 for publishing
+# symbols & general artifacts as well as for signing validation
+- name: symbolPublishingAdditionalParameters
+ displayName: Symbol publishing additional parameters
+ type: string
+ default: ''
+
+- name: artifactsPublishingAdditionalParameters
+ displayName: Artifact publishing additional parameters
+ type: string
+ default: ''
+
+- name: signingValidationAdditionalParameters
+ displayName: Signing validation additional parameters
+ type: string
+ default: ''
+
+# Which stages should finish execution before post-build stages start
+- name: validateDependsOn
+ type: object
+ default:
+ - build
+
+- name: publishDependsOn
+ type: object
+ default:
+ - Validate
+
+# Optional: Call asset publishing rather than running in a separate stage
+- name: publishAssetsImmediately
+ type: boolean
+ default: false
+
+- name: is1ESPipeline
+ type: boolean
+ default: false
+
+stages:
+- ${{ if or(eq( parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true')) }}:
+ - stage: Validate
+ dependsOn: ${{ parameters.validateDependsOn }}
+ displayName: Validate Build Assets
+ variables:
+ - template: /eng/common/core-templates/post-build/common-variables.yml
+ - template: /eng/common/core-templates/variables/pool-providers.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ jobs:
+ - job:
+ displayName: NuGet Validation
+ condition: and(succeededOrFailed(), eq( ${{ parameters.enableNugetValidation }}, 'true'))
+ pool:
+ # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
+ ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
+ name: AzurePipelines-EO
+ image: 1ESPT-Windows2025
+ demands: Cmd
+ os: windows
+ # If it's not devdiv, it's dnceng
+ ${{ else }}:
+ ${{ if eq(parameters.is1ESPipeline, true) }}:
+ name: $(DncEngInternalBuildPool)
+ image: windows.vs2026.amd64
+ os: windows
+ ${{ else }}:
+ name: $(DncEngInternalBuildPool)
+ demands: ImageOverride -equals windows.vs2026.amd64
+
+ steps:
+ - template: /eng/common/core-templates/post-build/setup-maestro-vars.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+
+ - ${{ if ne(parameters.publishingInfraVersion, 4) }}:
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Package Artifacts
+ inputs:
+ buildType: specific
+ buildVersionToDownload: specific
+ project: $(AzDOProjectName)
+ pipeline: $(AzDOPipelineId)
+ buildId: $(AzDOBuildId)
+ artifactName: PackageArtifacts
+ checkDownloadedFiles: true
+ - ${{ if eq(parameters.publishingInfraVersion, 4) }}:
+ - task: DownloadPipelineArtifact@2
+ displayName: Download Pipeline Artifacts (V4)
+ inputs:
+ itemPattern: '*/packages/**/*.nupkg'
+ targetPath: '$(Build.ArtifactStagingDirectory)/PipelineArtifactsDownload'
+ - task: CopyFiles@2
+ displayName: Flatten packages to PackageArtifacts
+ inputs:
+ SourceFolder: '$(Build.ArtifactStagingDirectory)/PipelineArtifactsDownload'
+ Contents: '**/*.nupkg'
+ TargetFolder: '$(Build.ArtifactStagingDirectory)/PackageArtifacts'
+ flattenFolders: true
+
+ - task: PowerShell@2
+ displayName: Validate
+ inputs:
+ filePath: $(System.DefaultWorkingDirectory)/eng/common/post-build/nuget-validation.ps1
+ arguments: -PackagesPath $(Build.ArtifactStagingDirectory)/PackageArtifacts/
+
+ - job:
+ displayName: Signing Validation
+ condition: and( eq( ${{ parameters.enableSigningValidation }}, 'true'), ne( variables['PostBuildSign'], 'true'))
+ pool:
+ # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
+ ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
+ name: AzurePipelines-EO
+ image: 1ESPT-Windows2025
+ demands: Cmd
+ os: windows
+ # If it's not devdiv, it's dnceng
+ ${{ else }}:
+ ${{ if eq(parameters.is1ESPipeline, true) }}:
+ name: $(DncEngInternalBuildPool)
+ image: windows.vs2026.amd64
+ os: windows
+ ${{ else }}:
+ name: $(DncEngInternalBuildPool)
+ demands: ImageOverride -equals windows.vs2026.amd64
+ steps:
+ - template: /eng/common/core-templates/post-build/setup-maestro-vars.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+
+ - ${{ if ne(parameters.publishingInfraVersion, 4) }}:
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Package Artifacts
+ inputs:
+ buildType: specific
+ buildVersionToDownload: specific
+ project: $(AzDOProjectName)
+ pipeline: $(AzDOPipelineId)
+ buildId: $(AzDOBuildId)
+ artifactName: PackageArtifacts
+ checkDownloadedFiles: true
+ - ${{ if eq(parameters.publishingInfraVersion, 4) }}:
+ - task: DownloadPipelineArtifact@2
+ displayName: Download Pipeline Artifacts (V4)
+ inputs:
+ itemPattern: '*/packages/**/*.nupkg'
+ targetPath: '$(Build.ArtifactStagingDirectory)/PipelineArtifactsDownload'
+ - task: CopyFiles@2
+ displayName: Flatten packages to PackageArtifacts
+ inputs:
+ SourceFolder: '$(Build.ArtifactStagingDirectory)/PipelineArtifactsDownload'
+ Contents: '**/*.nupkg'
+ TargetFolder: '$(Build.ArtifactStagingDirectory)/PackageArtifacts'
+ flattenFolders: true
+
+ # This is necessary whenever we want to publish/restore to an AzDO private feed
+ # Since sdk-task.ps1 tries to restore packages we need to do this authentication here
+ # otherwise it'll complain about accessing a private feed.
+ - task: NuGetAuthenticate@1
+ displayName: 'Authenticate to AzDO Feeds'
+
+ # Signing validation will optionally work with the buildmanifest file which is downloaded from
+ # Azure DevOps above.
+ - task: PowerShell@2
+ displayName: Validate
+ inputs:
+ filePath: eng\common\sdk-task.ps1
+ arguments: -task SigningValidation -restore
+ /p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts'
+ /p:SignCheckExclusionsFile='$(System.DefaultWorkingDirectory)/eng/SignCheckExclusionsFile.txt'
+ ${{ parameters.signingValidationAdditionalParameters }}
+
+ - template: /eng/common/core-templates/steps/publish-logs.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ StageLabel: 'Validation'
+ JobLabel: 'Signing'
+ BinlogToolVersion: $(BinlogToolVersion)
+
+ # SourceLink validation has been removed — the underlying CLI tool
+ # (targeting netcoreapp2.1) has not functioned for years.
+ # The enableSourceLinkValidation parameter is kept but ignored so
+ # existing pipelines that pass it are not broken.
+ # See https://github.com/dotnet/arcade/issues/16647
+ - ${{ if eq(parameters.enableSourceLinkValidation, 'true') }}:
+ - job:
+ displayName: 'SourceLink Validation Removed - please remove enableSourceLinkValidation from your pipeline'
+ pool: server
+ steps:
+ - task: Delay@1
+ displayName: 'Warning: SourceLink validation removed (see https://github.com/dotnet/arcade/issues/16647)'
+ inputs:
+ delayForMinutes: '0'
+
+- ${{ if ne(parameters.publishAssetsImmediately, 'true') }}:
+ - stage: publish_using_darc
+ ${{ if or(eq(parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true')) }}:
+ dependsOn: ${{ parameters.publishDependsOn }}
+ ${{ else }}:
+ dependsOn: ${{ parameters.validateDependsOn }}
+ displayName: Publish using Darc
+ variables:
+ - template: /eng/common/core-templates/post-build/common-variables.yml
+ - template: /eng/common/core-templates/variables/pool-providers.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ jobs:
+ - job:
+ displayName: Publish Using Darc
+ timeoutInMinutes: 120
+ pool:
+ # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
+ ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
+ name: AzurePipelines-EO
+ image: 1ESPT-Windows2025
+ demands: Cmd
+ os: windows
+ # If it's not devdiv, it's dnceng
+ ${{ else }}:
+ ${{ if eq(parameters.is1ESPipeline, true) }}:
+ name: NetCore1ESPool-Publishing-Internal
+ image: windows.vs2026.amd64
+ os: windows
+ ${{ else }}:
+ name: NetCore1ESPool-Publishing-Internal
+ demands: ImageOverride -equals windows.vs2026.amd64
+ steps:
+ - template: /eng/common/core-templates/post-build/setup-maestro-vars.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+
+ - task: NuGetAuthenticate@1
+
+ # Populate internal runtime variables.
+ - template: /eng/common/templates/steps/enable-internal-sources.yml
+ parameters:
+ legacyCredential: $(dn-bot-dnceng-artifact-feeds-rw)
+
+ - template: /eng/common/templates/steps/enable-internal-runtimes.yml
+
+ - task: UseDotNet@2
+ inputs:
+ version: 8.0.x
+
+ - task: AzureCLI@2
+ displayName: Publish Using Darc
+ inputs:
+ azureSubscription: "Darc: Maestro Production"
+ scriptType: ps
+ scriptLocation: scriptPath
+ scriptPath: $(System.DefaultWorkingDirectory)/eng/common/post-build/publish-using-darc.ps1
+ arguments: >
+ -BuildId $(BARBuildId)
+ -PublishingInfraVersion 3
+ -AzdoToken '$(System.AccessToken)'
+ -WaitPublishingFinish true
+ -RequireDefaultChannels ${{ parameters.requireDefaultChannels }}
+ -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}'
+ -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}'
+ -SkipAssetsPublishing '${{ parameters.isAssetlessBuild }}'
+ -runtimeSourceFeed https://ci.dot.net/internal
+ -runtimeSourceFeedKey '$(dotnetbuilds-internal-container-read-token-base64)'
diff --git a/eng/common/core-templates/post-build/setup-maestro-vars.yml b/eng/common/core-templates/post-build/setup-maestro-vars.yml
new file mode 100644
index 00000000..6dfa99ec
--- /dev/null
+++ b/eng/common/core-templates/post-build/setup-maestro-vars.yml
@@ -0,0 +1,73 @@
+parameters:
+ BARBuildId: ''
+ PromoteToChannelIds: ''
+ is1ESPipeline: ''
+
+steps:
+ - ${{ if eq(parameters.is1ESPipeline, '') }}:
+ - 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error
+
+ - ${{ if eq(coalesce(parameters.PromoteToChannelIds, 0), 0) }}:
+ - task: DownloadPipelineArtifact@2
+ displayName: Download Release Configs
+ inputs:
+ artifactName: ReleaseConfigs
+ targetPath: '$(Build.StagingDirectory)/ReleaseConfigs'
+
+ - task: AzureCLI@2
+ name: setReleaseVars
+ displayName: Set Release Configs Vars
+ inputs:
+ azureSubscription: "Darc: Maestro Production"
+ scriptType: pscore
+ scriptLocation: inlineScript
+ inlineScript: |
+ try {
+ if (!$Env:PromoteToMaestroChannels -or $Env:PromoteToMaestroChannels.Trim() -eq '') {
+ $Content = Get-Content $(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt
+
+ $BarId = $Content | Select -Index 0
+ $Channels = $Content | Select -Index 1
+ $IsStableBuild = $Content | Select -Index 2
+
+ $AzureDevOpsProject = $Env:System_TeamProject
+ $AzureDevOpsBuildDefinitionId = $Env:System_DefinitionId
+ $AzureDevOpsBuildId = $Env:Build_BuildId
+ }
+ else {
+ . $(System.DefaultWorkingDirectory)\eng\common\tools.ps1
+ $darc = Get-Darc
+ $buildInfo = & $darc get-build `
+ --id ${{ parameters.BARBuildId }} `
+ --extended `
+ --output-format json `
+ --ci `
+ | convertFrom-Json
+
+ $BarId = ${{ parameters.BARBuildId }}
+ $Channels = $Env:PromoteToMaestroChannels -split ","
+ $Channels = $Channels -join "]["
+ $Channels = "[$Channels]"
+
+ $IsStableBuild = $buildInfo.stable
+ $AzureDevOpsProject = $buildInfo.azureDevOpsProject
+ $AzureDevOpsBuildDefinitionId = $buildInfo.azureDevOpsBuildDefinitionId
+ $AzureDevOpsBuildId = $buildInfo.azureDevOpsBuildId
+ }
+
+ Write-Host "##vso[task.setvariable variable=BARBuildId]$BarId"
+ Write-Host "##vso[task.setvariable variable=TargetChannels]$Channels"
+ Write-Host "##vso[task.setvariable variable=IsStableBuild]$IsStableBuild"
+
+ Write-Host "##vso[task.setvariable variable=AzDOProjectName]$AzureDevOpsProject"
+ Write-Host "##vso[task.setvariable variable=AzDOPipelineId]$AzureDevOpsBuildDefinitionId"
+ Write-Host "##vso[task.setvariable variable=AzDOBuildId]$AzureDevOpsBuildId"
+ }
+ catch {
+ Write-Host $_
+ Write-Host $_.Exception
+ Write-Host $_.ScriptStackTrace
+ exit 1
+ }
+ env:
+ PromoteToMaestroChannels: ${{ parameters.PromoteToChannelIds }}
diff --git a/eng/common/core-templates/stages/renovate.yml b/eng/common/core-templates/stages/renovate.yml
new file mode 100644
index 00000000..edab2818
--- /dev/null
+++ b/eng/common/core-templates/stages/renovate.yml
@@ -0,0 +1,111 @@
+# --------------------------------------------------------------------------------------
+# Renovate Pipeline Template
+# --------------------------------------------------------------------------------------
+# This template provides a complete reusable pipeline definition for running Renovate
+# in a 1ES Official pipeline. Pipelines can extend from this template and only need
+# to pass the Renovate job parameters.
+#
+# For more info, see https://github.com/dotnet/arcade/blob/main/Documentation/Renovate.md
+# --------------------------------------------------------------------------------------
+
+parameters:
+
+# Path to the Renovate configuration file within the repository.
+- name: renovateConfigPath
+ type: string
+ default: 'eng/renovate.json'
+
+# GitHub repository to run Renovate against, in the format 'owner/repo'.
+- name: gitHubRepo
+ type: string
+
+# List of base branches to target for Renovate PRs.
+- name: baseBranches
+ type: object
+ default:
+ - main
+
+# When true, Renovate will run in dry run mode.
+- name: dryRun
+ type: boolean
+ default: false
+
+# When true, Renovate will recreate PRs even if they were previously closed.
+- name: forceRecreatePR
+ type: boolean
+ default: false
+
+# Name of the arcade repository resource in the pipeline.
+# This allows repos which haven't been onboarded to Arcade to still use this
+# template by checking out the repo as a resource with a custom name and pointing
+# this parameter to it.
+- name: arcadeRepoResource
+ type: string
+ default: 'self'
+
+- name: selfRepoName
+ type: string
+ default: ''
+- name: arcadeRepoName
+ type: string
+ default: ''
+
+# Pool configuration for the pipeline.
+- name: pool
+ type: object
+ default:
+ name: NetCore1ESPool-Internal
+ image: build.azurelinux.3.amd64
+ os: linux
+
+# Renovate version used in the container image tag.
+- name: renovateVersion
+ default: 43
+ type: number
+
+# Pool configuration for SDL analysis.
+- name: sdlPool
+ type: object
+ default:
+ name: NetCore1ESPool-Internal
+ image: windows.vs2026.amd64
+ os: windows
+
+resources:
+ repositories:
+ - repository: 1ESPipelineTemplates
+ type: git
+ name: 1ESPipelineTemplates/1ESPipelineTemplates
+ ref: refs/tags/release
+
+extends:
+ template: v1/1ES.Official.PipelineTemplate.yml@1ESPipelineTemplates
+ parameters:
+ pool: ${{ parameters.pool }}
+ sdl:
+ sourceAnalysisPool: ${{ parameters.sdlPool }}
+ # When repos that aren't onboarded to Arcade use this template, they set the
+ # arcadeRepoResource parameter to point to their Arcade repo resource. In that case,
+ # Aracde will be excluded from SDL analysis.
+ ${{ if ne(parameters.arcadeRepoResource, 'self') }}:
+ sourceRepositoriesToScan:
+ exclude:
+ - repository: ${{ parameters.arcadeRepoResource }}
+ containers:
+ RenovateContainer:
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-renovate-${{ parameters.renovateVersion }}-amd64
+ stages:
+ - stage: Renovate
+ displayName: Run Renovate
+ jobs:
+ - template: /eng/common/core-templates/job/renovate.yml@${{ parameters.arcadeRepoResource }}
+ parameters:
+ renovateConfigPath: ${{ parameters.renovateConfigPath }}
+ gitHubRepo: ${{ parameters.gitHubRepo }}
+ baseBranches: ${{ parameters.baseBranches }}
+ dryRun: ${{ parameters.dryRun }}
+ forceRecreatePR: ${{ parameters.forceRecreatePR }}
+ pool: ${{ parameters.pool }}
+ arcadeRepoResource: ${{ parameters.arcadeRepoResource }}
+ selfRepoName: ${{ parameters.selfRepoName }}
+ arcadeRepoName: ${{ parameters.arcadeRepoName }}
diff --git a/eng/common/core-templates/steps/cleanup-microbuild.yml b/eng/common/core-templates/steps/cleanup-microbuild.yml
new file mode 100644
index 00000000..c0fdcd33
--- /dev/null
+++ b/eng/common/core-templates/steps/cleanup-microbuild.yml
@@ -0,0 +1,28 @@
+parameters:
+ # Enable cleanup tasks for MicroBuild
+ enableMicrobuild: false
+ # Enable cleanup tasks for MicroBuild on Mac and Linux
+ # Will be ignored if 'enableMicrobuild' is false or 'Agent.Os' is 'Windows_NT'
+ enableMicrobuildForMacAndLinux: false
+ continueOnError: false
+
+steps:
+ - ${{ if eq(parameters.enableMicrobuild, 'true') }}:
+ - task: MicroBuildCleanup@1
+ displayName: Execute Microbuild cleanup tasks
+ condition: and(
+ always(),
+ or(
+ and(
+ eq(variables['Agent.Os'], 'Windows_NT'),
+ in(variables['_SignType'], 'real', 'test')
+ ),
+ and(
+ ${{ eq(parameters.enableMicrobuildForMacAndLinux, true) }},
+ ne(variables['Agent.Os'], 'Windows_NT'),
+ eq(variables['_SignType'], 'real')
+ )
+ ))
+ continueOnError: ${{ parameters.continueOnError }}
+ env:
+ TeamName: $(_TeamName)
diff --git a/eng/common/core-templates/steps/enable-internal-runtimes.yml b/eng/common/core-templates/steps/enable-internal-runtimes.yml
new file mode 100644
index 00000000..6bdbf62a
--- /dev/null
+++ b/eng/common/core-templates/steps/enable-internal-runtimes.yml
@@ -0,0 +1,32 @@
+# Obtains internal runtime download credentials and populates the 'dotnetbuilds-internal-container-read-token-base64'
+# variable with the base64-encoded SAS token, by default
+
+parameters:
+- name: federatedServiceConnection
+ type: string
+ default: 'dotnetbuilds-internal-read'
+- name: outputVariableName
+ type: string
+ default: 'dotnetbuilds-internal-container-read-token-base64'
+- name: expiryInHours
+ type: number
+ default: 1
+- name: base64Encode
+ type: boolean
+ default: true
+- name: is1ESPipeline
+ type: boolean
+ default: false
+
+steps:
+- ${{ if ne(variables['System.TeamProject'], 'public') }}:
+ - template: /eng/common/core-templates/steps/get-delegation-sas.yml
+ parameters:
+ federatedServiceConnection: ${{ parameters.federatedServiceConnection }}
+ outputVariableName: ${{ parameters.outputVariableName }}
+ expiryInHours: ${{ parameters.expiryInHours }}
+ base64Encode: ${{ parameters.base64Encode }}
+ storageAccount: dotnetbuilds
+ container: internal
+ permissions: rl
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
\ No newline at end of file
diff --git a/eng/common/core-templates/steps/enable-internal-sources.yml b/eng/common/core-templates/steps/enable-internal-sources.yml
new file mode 100644
index 00000000..4085512b
--- /dev/null
+++ b/eng/common/core-templates/steps/enable-internal-sources.yml
@@ -0,0 +1,47 @@
+parameters:
+# This is the Azure federated service connection that we log into to get an access token.
+- name: nugetFederatedServiceConnection
+ type: string
+ default: 'dnceng-artifacts-feeds-read'
+- name: is1ESPipeline
+ type: boolean
+ default: false
+# Legacy parameters to allow for PAT usage
+- name: legacyCredential
+ type: string
+ default: ''
+
+steps:
+- ${{ if ne(variables['System.TeamProject'], 'public') }}:
+ - ${{ if ne(parameters.legacyCredential, '') }}:
+ - task: PowerShell@2
+ displayName: Setup Internal Feeds
+ inputs:
+ filePath: $(System.DefaultWorkingDirectory)/eng/common/SetupNugetSources.ps1
+ arguments: -ConfigFile $(System.DefaultWorkingDirectory)/NuGet.config -Password $Env:Token
+ env:
+ Token: ${{ parameters.legacyCredential }}
+ # If running on dnceng (internal project), just use the default behavior for NuGetAuthenticate.
+ # If running on DevDiv, NuGetAuthenticate is not really an option. It's scoped to a single feed, and we have many feeds that
+ # may be added. Instead, we'll use the traditional approach (add cred to nuget.config), but use an account token.
+ - ${{ else }}:
+ - ${{ if eq(variables['System.TeamProject'], 'internal') }}:
+ - task: PowerShell@2
+ displayName: Setup Internal Feeds
+ inputs:
+ filePath: $(System.DefaultWorkingDirectory)/eng/common/SetupNugetSources.ps1
+ arguments: -ConfigFile $(System.DefaultWorkingDirectory)/NuGet.config
+ - ${{ else }}:
+ - template: /eng/common/templates/steps/get-federated-access-token.yml
+ parameters:
+ federatedServiceConnection: ${{ parameters.nugetFederatedServiceConnection }}
+ outputVariableName: 'dnceng-artifacts-feeds-read-access-token'
+ - task: PowerShell@2
+ displayName: Setup Internal Feeds
+ inputs:
+ filePath: $(System.DefaultWorkingDirectory)/eng/common/SetupNugetSources.ps1
+ arguments: -ConfigFile $(System.DefaultWorkingDirectory)/NuGet.config -Password $(dnceng-artifacts-feeds-read-access-token)
+ # This is required in certain scenarios to install the ADO credential provider.
+ # It installed by default in some msbuild invocations (e.g. VS msbuild), but needs to be installed for others
+ # (e.g. dotnet msbuild).
+ - task: NuGetAuthenticate@1
diff --git a/eng/common/core-templates/steps/generate-sbom.yml b/eng/common/core-templates/steps/generate-sbom.yml
new file mode 100644
index 00000000..aad0a8ae
--- /dev/null
+++ b/eng/common/core-templates/steps/generate-sbom.yml
@@ -0,0 +1,14 @@
+parameters:
+ PackageVersion: unused
+ BuildDropPath: unused
+ PackageName: unused
+ ManifestDirPath: unused
+ IgnoreDirectories: unused
+ sbomContinueOnError: unused
+ is1ESPipeline: unused
+ publishArtifacts: unused
+
+steps:
+- script: |
+ echo "##vso[task.logissue type=warning]Including generate-sbom.yml is deprecated, SBOM generation is handled 1ES PT now. Remove this include."
+ displayName: Issue generate-sbom.yml deprecation warning
diff --git a/eng/common/core-templates/steps/get-delegation-sas.yml b/eng/common/core-templates/steps/get-delegation-sas.yml
new file mode 100644
index 00000000..d2901470
--- /dev/null
+++ b/eng/common/core-templates/steps/get-delegation-sas.yml
@@ -0,0 +1,46 @@
+parameters:
+- name: federatedServiceConnection
+ type: string
+- name: outputVariableName
+ type: string
+- name: expiryInHours
+ type: number
+ default: 1
+- name: base64Encode
+ type: boolean
+ default: false
+- name: storageAccount
+ type: string
+- name: container
+ type: string
+- name: permissions
+ type: string
+ default: 'rl'
+- name: is1ESPipeline
+ type: boolean
+ default: false
+
+steps:
+- task: AzureCLI@2
+ displayName: 'Generate delegation SAS Token for ${{ parameters.storageAccount }}/${{ parameters.container }}'
+ inputs:
+ azureSubscription: ${{ parameters.federatedServiceConnection }}
+ scriptType: 'pscore'
+ scriptLocation: 'inlineScript'
+ inlineScript: |
+ # Calculate the expiration of the SAS token and convert to UTC
+ $expiry = (Get-Date).AddHours(${{ parameters.expiryInHours }}).ToUniversalTime().ToString("yyyy-MM-ddTHH:mm:ssZ")
+
+ $sas = az storage container generate-sas --account-name ${{ parameters.storageAccount }} --name ${{ parameters.container }} --permissions ${{ parameters.permissions }} --expiry $expiry --auth-mode login --as-user -o tsv
+
+ if ($LASTEXITCODE -ne 0) {
+ Write-Error "Failed to generate SAS token."
+ exit 1
+ }
+
+ if ('${{ parameters.base64Encode }}' -eq 'true') {
+ $sas = [Convert]::ToBase64String([System.Text.Encoding]::UTF8.GetBytes($sas))
+ }
+
+ Write-Host "Setting '${{ parameters.outputVariableName }}' with the access token value"
+ Write-Host "##vso[task.setvariable variable=${{ parameters.outputVariableName }};issecret=true]$sas"
diff --git a/eng/common/core-templates/steps/get-federated-access-token.yml b/eng/common/core-templates/steps/get-federated-access-token.yml
new file mode 100644
index 00000000..3a4d4410
--- /dev/null
+++ b/eng/common/core-templates/steps/get-federated-access-token.yml
@@ -0,0 +1,42 @@
+parameters:
+- name: federatedServiceConnection
+ type: string
+- name: outputVariableName
+ type: string
+- name: is1ESPipeline
+ type: boolean
+- name: stepName
+ type: string
+ default: 'getFederatedAccessToken'
+- name: condition
+ type: string
+ default: ''
+# Resource to get a token for. Common values include:
+# - '499b84ac-1321-427f-aa17-267ca6975798' for Azure DevOps
+# - 'https://storage.azure.com/' for storage
+# Defaults to Azure DevOps
+- name: resource
+ type: string
+ default: '499b84ac-1321-427f-aa17-267ca6975798'
+- name: isStepOutputVariable
+ type: boolean
+ default: false
+
+steps:
+- task: AzureCLI@2
+ displayName: 'Getting federated access token for feeds'
+ name: ${{ parameters.stepName }}
+ ${{ if ne(parameters.condition, '') }}:
+ condition: ${{ parameters.condition }}
+ inputs:
+ azureSubscription: ${{ parameters.federatedServiceConnection }}
+ scriptType: 'pscore'
+ scriptLocation: 'inlineScript'
+ inlineScript: |
+ $accessToken = az account get-access-token --query accessToken --resource ${{ parameters.resource }} --output tsv
+ if ($LASTEXITCODE -ne 0) {
+ Write-Error "Failed to get access token for resource '${{ parameters.resource }}'"
+ exit 1
+ }
+ Write-Host "Setting '${{ parameters.outputVariableName }}' with the access token value"
+ Write-Host "##vso[task.setvariable variable=${{ parameters.outputVariableName }};issecret=true;isOutput=${{ parameters.isStepOutputVariable }}]$accessToken"
\ No newline at end of file
diff --git a/eng/common/core-templates/steps/install-microbuild-impl.yml b/eng/common/core-templates/steps/install-microbuild-impl.yml
new file mode 100644
index 00000000..da22beb3
--- /dev/null
+++ b/eng/common/core-templates/steps/install-microbuild-impl.yml
@@ -0,0 +1,34 @@
+parameters:
+ - name: microbuildTaskInputs
+ type: object
+ default: {}
+
+ - name: microbuildEnv
+ type: object
+ default: {}
+
+ - name: enablePreviewMicrobuild
+ type: boolean
+ default: false
+
+ - name: condition
+ type: string
+
+ - name: continueOnError
+ type: boolean
+
+steps:
+- ${{ if eq(parameters.enablePreviewMicrobuild, true) }}:
+ - task: MicroBuildSigningPluginPreview@4
+ displayName: Install Preview MicroBuild plugin
+ inputs: ${{ parameters.microbuildTaskInputs }}
+ env: ${{ parameters.microbuildEnv }}
+ continueOnError: ${{ parameters.continueOnError }}
+ condition: ${{ parameters.condition }}
+- ${{ else }}:
+ - task: MicroBuildSigningPlugin@4
+ displayName: Install MicroBuild plugin
+ inputs: ${{ parameters.microbuildTaskInputs }}
+ env: ${{ parameters.microbuildEnv }}
+ continueOnError: ${{ parameters.continueOnError }}
+ condition: ${{ parameters.condition }}
diff --git a/eng/common/core-templates/steps/install-microbuild.yml b/eng/common/core-templates/steps/install-microbuild.yml
new file mode 100644
index 00000000..76a54e15
--- /dev/null
+++ b/eng/common/core-templates/steps/install-microbuild.yml
@@ -0,0 +1,118 @@
+parameters:
+ # Enable install tasks for MicroBuild
+ enableMicrobuild: false
+ # Enable install tasks for MicroBuild on Mac and Linux
+ # Will be ignored if 'enableMicrobuild' is false or 'Agent.Os' is 'Windows_NT'
+ enableMicrobuildForMacAndLinux: false
+ # Enable preview version of MB signing plugin
+ enablePreviewMicrobuild: false
+ # Determines whether the ESRP service connection information should be passed to the signing plugin.
+ # This overlaps with _SignType to some degree. We only need the service connection for real signing.
+ # It's important that the service connection not be passed to the MicroBuildSigningPlugin task in this place.
+ # Doing so will cause the service connection to be authorized for the pipeline, which isn't allowed and won't work for non-prod.
+ # Unfortunately, _SignType can't be used to exclude the use of the service connection in non-real sign scenarios. The
+ # variable is not available in template expression. _SignType has a very large proliferation across .NET, so replacing it is tough.
+ microbuildUseESRP: true
+ # Microbuild installation directory
+ microBuildOutputFolder: $(Agent.TempDirectory)/MicroBuild
+ # Microbuild version
+ microbuildPluginVersion: 'latest'
+
+ continueOnError: false
+
+steps:
+ - ${{ if eq(parameters.enableMicrobuild, 'true') }}:
+ - ${{ if eq(parameters.enableMicrobuildForMacAndLinux, 'true') }}:
+ # Needed to download the MicroBuild plugin nupkgs on Mac and Linux when nuget.exe is unavailable
+ - task: UseDotNet@2
+ displayName: Install .NET 8.0 SDK for MicroBuild Plugin
+ inputs:
+ packageType: sdk
+ version: 8.0.x
+ installationPath: ${{ parameters.microBuildOutputFolder }}/.dotnet-microbuild
+ condition: and(succeeded(), ne(variables['Agent.Os'], 'Windows_NT'))
+
+ - script: |
+ set -euo pipefail
+
+ # UseDotNet@2 prepends the dotnet executable path to the PATH variable, so we can call dotnet directly
+ version=$(dotnet --version)
+ cat << 'EOF' > ${{ parameters.microBuildOutputFolder }}/global.json
+ {
+ "sdk": {
+ "version": "$version",
+ "paths": [
+ "${{ parameters.microBuildOutputFolder }}/.dotnet-microbuild"
+ ],
+ "errorMessage": "The .NET SDK version $version is required to install the MicroBuild signing plugin."
+ }
+ }
+ EOF
+ displayName: 'Add global.json to MicroBuild Installation path'
+ workingDirectory: ${{ parameters.microBuildOutputFolder }}
+ condition: and(succeeded(), ne(variables['Agent.Os'], 'Windows_NT'))
+
+ - script: |
+ REM Check if ESRP is disabled while SignType is real
+ if /I "${{ parameters.microbuildUseESRP }}"=="false" if /I "$(_SignType)"=="real" (
+ echo Error: ESRP must be enabled when SignType is real.
+ exit /b 1
+ )
+ displayName: 'Validate ESRP usage (Windows)'
+ condition: and(succeeded(), eq(variables['Agent.Os'], 'Windows_NT'))
+ - script: |
+ # Check if ESRP is disabled while SignType is real
+ if [ "${{ parameters.microbuildUseESRP }}" = "false" ] && [ "$(_SignType)" = "real" ]; then
+ echo "Error: ESRP must be enabled when SignType is real."
+ exit 1
+ fi
+ displayName: 'Validate ESRP usage (Non-Windows)'
+ condition: and(succeeded(), ne(variables['Agent.Os'], 'Windows_NT'))
+
+ # Two different MB install steps. This is due to not being able to use the agent OS during
+ # YAML expansion, and Windows vs. Linux/Mac uses different service connections. However,
+ # we can avoid including the MB install step if not enabled at all. This avoids a bunch of
+ # extra pipeline authorizations, since most pipelines do not sign on non-Windows.
+ - template: /eng/common/core-templates/steps/install-microbuild-impl.yml
+ parameters:
+ enablePreviewMicrobuild: ${{ parameters.enablePreviewMicrobuild }}
+ microbuildTaskInputs:
+ signType: $(_SignType)
+ zipSources: false
+ feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json
+ version: ${{ parameters.microbuildPluginVersion }}
+ ${{ if eq(parameters.microbuildUseESRP, true) }}:
+ ConnectedServiceName: 'MicroBuild Signing Task (DevDiv)'
+ ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
+ ConnectedPMEServiceName: 6cc74545-d7b9-4050-9dfa-ebefcc8961ea
+ ${{ else }}:
+ ConnectedPMEServiceName: 248d384a-b39b-46e3-8ad5-c2c210d5e7ca
+ microbuildEnv:
+ TeamName: $(_TeamName)
+ MicroBuildOutputFolderOverride: ${{ parameters.microBuildOutputFolder }}
+ SYSTEM_ACCESSTOKEN: $(System.AccessToken)
+ continueOnError: ${{ parameters.continueOnError }}
+ condition: and(succeeded(), eq(variables['Agent.Os'], 'Windows_NT'), in(variables['_SignType'], 'real', 'test'))
+
+ - ${{ if eq(parameters.enableMicrobuildForMacAndLinux, true) }}:
+ - template: /eng/common/core-templates/steps/install-microbuild-impl.yml
+ parameters:
+ enablePreviewMicrobuild: ${{ parameters.enablePreviewMicrobuild }}
+ microbuildTaskInputs:
+ signType: $(_SignType)
+ zipSources: false
+ feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json
+ version: ${{ parameters.microbuildPluginVersion }}
+ workingDirectory: ${{ parameters.microBuildOutputFolder }}
+ ${{ if eq(parameters.microbuildUseESRP, true) }}:
+ ConnectedServiceName: 'MicroBuild Signing Task (DevDiv)'
+ ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
+ ConnectedPMEServiceName: beb8cb23-b303-4c95-ab26-9e44bc958d39
+ ${{ else }}:
+ ConnectedPMEServiceName: c24de2a5-cc7a-493d-95e4-8e5ff5cad2bc
+ microbuildEnv:
+ TeamName: $(_TeamName)
+ MicroBuildOutputFolderOverride: ${{ parameters.microBuildOutputFolder }}
+ SYSTEM_ACCESSTOKEN: $(System.AccessToken)
+ continueOnError: ${{ parameters.continueOnError }}
+ condition: and(succeeded(), ne(variables['Agent.Os'], 'Windows_NT'), eq(variables['_SignType'], 'real'))
diff --git a/eng/common/core-templates/steps/publish-build-artifacts.yml b/eng/common/core-templates/steps/publish-build-artifacts.yml
new file mode 100644
index 00000000..f24ce346
--- /dev/null
+++ b/eng/common/core-templates/steps/publish-build-artifacts.yml
@@ -0,0 +1,20 @@
+parameters:
+- name: is1ESPipeline
+ type: boolean
+ default: false
+- name: args
+ type: object
+ default: {}
+steps:
+- ${{ if ne(parameters.is1ESPipeline, true) }}:
+ - template: /eng/common/templates/steps/publish-build-artifacts.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ ${{ each parameter in parameters.args }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
+- ${{ else }}:
+ - template: /eng/common/templates-official/steps/publish-build-artifacts.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ ${{ each parameter in parameters.args }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
\ No newline at end of file
diff --git a/eng/common/core-templates/steps/publish-logs.yml b/eng/common/core-templates/steps/publish-logs.yml
new file mode 100644
index 00000000..84a1922c
--- /dev/null
+++ b/eng/common/core-templates/steps/publish-logs.yml
@@ -0,0 +1,64 @@
+parameters:
+ StageLabel: ''
+ JobLabel: ''
+ CustomSensitiveDataList: ''
+ # A default - in case value from eng/common/core-templates/post-build/common-variables.yml is not passed
+ BinlogToolVersion: '1.0.11'
+ is1ESPipeline: false
+
+steps:
+- task: Powershell@2
+ displayName: Prepare Binlogs to Upload
+ inputs:
+ targetType: inline
+ script: |
+ New-Item -ItemType Directory $(System.DefaultWorkingDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/
+ Move-Item -Path $(System.DefaultWorkingDirectory)/artifacts/log/Debug/* $(System.DefaultWorkingDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/
+ continueOnError: true
+ condition: always()
+
+- task: PowerShell@2
+ displayName: Redact Logs
+ inputs:
+ filePath: $(System.DefaultWorkingDirectory)/eng/common/post-build/redact-logs.ps1
+ # For now this needs to have explicit list of all sensitive data. Taken from eng/publishing/v3/publish.yml
+ # Sensitive data can as well be added to $(System.DefaultWorkingDirectory)/eng/BinlogSecretsRedactionFile.txt'
+ # If the file exists - sensitive data for redaction will be sourced from it
+ # (single entry per line, lines starting with '# ' are considered comments and skipped)
+ arguments: -InputPath '$(System.DefaultWorkingDirectory)/PostBuildLogs'
+ -BinlogToolVersion '${{parameters.BinlogToolVersion}}'
+ -TokensFilePath '$(System.DefaultWorkingDirectory)/eng/BinlogSecretsRedactionFile.txt'
+ -runtimeSourceFeed https://ci.dot.net/internal
+ -runtimeSourceFeedKey '$(dotnetbuilds-internal-container-read-token-base64)'
+ '$(publishing-dnceng-devdiv-code-r-build-re)'
+ '$(dn-bot-all-orgs-artifact-feeds-rw)'
+ '$(akams-client-id)'
+ '$(microsoft-symbol-server-pat)'
+ '$(symweb-symbol-server-pat)'
+ '$(dnceng-symbol-server-pat)'
+ '$(dn-bot-all-orgs-build-rw-code-rw)'
+ '$(System.AccessToken)'
+ ${{parameters.CustomSensitiveDataList}}
+ continueOnError: true
+ condition: always()
+
+- task: CopyFiles@2
+ displayName: Gather post build logs
+ inputs:
+ SourceFolder: '$(System.DefaultWorkingDirectory)/PostBuildLogs'
+ Contents: '**'
+ TargetFolder: '$(Build.ArtifactStagingDirectory)/PostBuildLogs'
+ condition: always()
+
+- template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ args:
+ displayName: Publish Logs
+ targetPath: '$(Build.ArtifactStagingDirectory)/PostBuildLogs'
+ artifactName: PostBuildLogs_${{ parameters.StageLabel }}_${{ parameters.JobLabel }}_Attempt$(System.JobAttempt)
+ continueOnError: true
+ condition: always()
+ retryCountOnTaskFailure: 10 # for any files being locked
+ isProduction: false # logs are non-production artifacts
+
diff --git a/eng/common/core-templates/steps/publish-pipeline-artifacts.yml b/eng/common/core-templates/steps/publish-pipeline-artifacts.yml
new file mode 100644
index 00000000..2efec04d
--- /dev/null
+++ b/eng/common/core-templates/steps/publish-pipeline-artifacts.yml
@@ -0,0 +1,20 @@
+parameters:
+- name: is1ESPipeline
+ type: boolean
+ default: false
+
+- name: args
+ type: object
+ default: {}
+
+steps:
+- ${{ if ne(parameters.is1ESPipeline, true) }}:
+ - template: /eng/common/templates/steps/publish-pipeline-artifacts.yml
+ parameters:
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
+- ${{ else }}:
+ - template: /eng/common/templates-official/steps/publish-pipeline-artifacts.yml
+ parameters:
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/core-templates/steps/retain-build.yml b/eng/common/core-templates/steps/retain-build.yml
new file mode 100644
index 00000000..83d97a26
--- /dev/null
+++ b/eng/common/core-templates/steps/retain-build.yml
@@ -0,0 +1,28 @@
+parameters:
+ # Optional azure devops PAT with build execute permissions for the build's organization,
+ # only needed if the build that should be retained ran on a different organization than
+ # the pipeline where this template is executing from
+ Token: ''
+ # Optional BuildId to retain, defaults to the current running build
+ BuildId: ''
+ # Azure devops Organization URI for the build in the https://dev.azure.com/ format.
+ # Defaults to the organization the current pipeline is running on
+ AzdoOrgUri: '$(System.CollectionUri)'
+ # Azure devops project for the build. Defaults to the project the current pipeline is running on
+ AzdoProject: '$(System.TeamProject)'
+
+steps:
+ - task: powershell@2
+ inputs:
+ targetType: 'filePath'
+ filePath: eng/common/retain-build.ps1
+ pwsh: true
+ arguments: >
+ -AzdoOrgUri: ${{parameters.AzdoOrgUri}}
+ -AzdoProject ${{parameters.AzdoProject}}
+ -Token ${{coalesce(parameters.Token, '$env:SYSTEM_ACCESSTOKEN') }}
+ -BuildId ${{coalesce(parameters.BuildId, '$env:BUILD_ID')}}
+ displayName: Enable permanent build retention
+ env:
+ SYSTEM_ACCESSTOKEN: $(System.AccessToken)
+ BUILD_ID: $(Build.BuildId)
\ No newline at end of file
diff --git a/eng/common/core-templates/steps/send-to-helix.yml b/eng/common/core-templates/steps/send-to-helix.yml
new file mode 100644
index 00000000..68fa739c
--- /dev/null
+++ b/eng/common/core-templates/steps/send-to-helix.yml
@@ -0,0 +1,93 @@
+# Please remember to update the documentation if you make changes to these parameters!
+parameters:
+ HelixSource: 'pr/default' # required -- sources must start with pr/, official/, prodcon/, or agent/
+ HelixType: 'tests/default/' # required -- Helix telemetry which identifies what type of data this is; should include "test" for clarity and must end in '/'
+ HelixBuild: $(Build.BuildNumber) # required -- the build number Helix will use to identify this -- automatically set to the AzDO build number
+ HelixTargetQueues: '' # required -- semicolon-delimited list of Helix queues to test on; see https://helix.dot.net/ for a list of queues
+ HelixAccessToken: '' # required -- access token to make Helix API requests; should be provided by the appropriate variable group
+ HelixProjectPath: 'eng/common/helixpublish.proj' # optional -- path to the project file to build relative to BUILD_SOURCESDIRECTORY
+ HelixProjectArguments: '' # optional -- arguments passed to the build command
+ HelixConfiguration: '' # optional -- additional property attached to a job
+ HelixPreCommands: '' # optional -- commands to run before Helix work item execution
+ HelixPostCommands: '' # optional -- commands to run after Helix work item execution
+ WorkItemDirectory: '' # optional -- a payload directory to zip up and send to Helix; requires WorkItemCommand; incompatible with XUnitProjects
+ WorkItemCommand: '' # optional -- a command to execute on the payload; requires WorkItemDirectory; incompatible with XUnitProjects
+ WorkItemTimeout: '' # optional -- a timeout in TimeSpan.Parse-ready value (e.g. 00:02:00) for the work item command; requires WorkItemDirectory; incompatible with XUnitProjects
+ CorrelationPayloadDirectory: '' # optional -- a directory to zip up and send to Helix as a correlation payload
+ XUnitProjects: '' # optional -- semicolon-delimited list of XUnitProjects to parse and send to Helix; requires XUnitRuntimeTargetFramework, XUnitPublishTargetFramework, XUnitRunnerVersion, and IncludeDotNetCli=true
+ XUnitWorkItemTimeout: '' # optional -- the workitem timeout in seconds for all workitems created from the xUnit projects specified by XUnitProjects
+ XUnitPublishTargetFramework: '' # optional -- framework to use to publish your xUnit projects
+ XUnitRuntimeTargetFramework: '' # optional -- framework to use for the xUnit console runner
+ XUnitRunnerVersion: '' # optional -- version of the xUnit nuget package you wish to use on Helix; required for XUnitProjects
+ IncludeDotNetCli: false # optional -- true will download a version of the .NET CLI onto the Helix machine as a correlation payload; requires DotNetCliPackageType and DotNetCliVersion
+ DotNetCliPackageType: '' # optional -- either 'sdk', 'runtime' or 'aspnetcore-runtime'; determines whether the sdk or runtime will be sent to Helix; see https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json
+ DotNetCliVersion: '' # optional -- version of the CLI to send to Helix; based on this: https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json
+ WaitForWorkItemCompletion: true # optional -- true will make the task wait until work items have been completed and fail the build if work items fail. False is "fire and forget."
+ IsExternal: false # [DEPRECATED] -- doesn't do anything, jobs are external if HelixAccessToken is empty and Creator is set
+ HelixBaseUri: 'https://helix.dot.net/' # optional -- sets the Helix API base URI (allows targeting https://helix.int-dot.net )
+ Creator: '' # optional -- if the build is external, use this to specify who is sending the job
+ DisplayNamePrefix: 'Run Tests' # optional -- rename the beginning of the displayName of the steps in AzDO
+ condition: succeeded() # optional -- condition for step to execute; defaults to succeeded()
+ continueOnError: false # optional -- determines whether to continue the build if the step errors; defaults to false
+
+steps:
+ - powershell: 'powershell "$env:BUILD_SOURCESDIRECTORY\eng\common\msbuild.ps1 $env:BUILD_SOURCESDIRECTORY/${{ parameters.HelixProjectPath }} /restore /p:TreatWarningsAsErrors=false ${{ parameters.HelixProjectArguments }} /t:Test /bl:$env:BUILD_SOURCESDIRECTORY\artifacts\log\$env:BuildConfig\SendToHelix.binlog"'
+ displayName: ${{ parameters.DisplayNamePrefix }} (Windows)
+ env:
+ BuildConfig: $(_BuildConfig)
+ HelixSource: ${{ parameters.HelixSource }}
+ HelixType: ${{ parameters.HelixType }}
+ HelixBuild: ${{ parameters.HelixBuild }}
+ HelixConfiguration: ${{ parameters.HelixConfiguration }}
+ HelixTargetQueues: ${{ parameters.HelixTargetQueues }}
+ HelixAccessToken: ${{ parameters.HelixAccessToken }}
+ HelixPreCommands: ${{ parameters.HelixPreCommands }}
+ HelixPostCommands: ${{ parameters.HelixPostCommands }}
+ WorkItemDirectory: ${{ parameters.WorkItemDirectory }}
+ WorkItemCommand: ${{ parameters.WorkItemCommand }}
+ WorkItemTimeout: ${{ parameters.WorkItemTimeout }}
+ CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }}
+ XUnitProjects: ${{ parameters.XUnitProjects }}
+ XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }}
+ XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }}
+ XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }}
+ XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }}
+ IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }}
+ DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }}
+ DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
+ WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
+ HelixBaseUri: ${{ parameters.HelixBaseUri }}
+ Creator: ${{ parameters.Creator }}
+ SYSTEM_ACCESSTOKEN: $(System.AccessToken)
+ condition: and(${{ parameters.condition }}, eq(variables['Agent.Os'], 'Windows_NT'))
+ continueOnError: ${{ parameters.continueOnError }}
+ - script: $BUILD_SOURCESDIRECTORY/eng/common/msbuild.sh $BUILD_SOURCESDIRECTORY/${{ parameters.HelixProjectPath }} /restore /p:TreatWarningsAsErrors=false ${{ parameters.HelixProjectArguments }} /t:Test /bl:$BUILD_SOURCESDIRECTORY/artifacts/log/$BuildConfig/SendToHelix.binlog
+ displayName: ${{ parameters.DisplayNamePrefix }} (Unix)
+ env:
+ BuildConfig: $(_BuildConfig)
+ HelixSource: ${{ parameters.HelixSource }}
+ HelixType: ${{ parameters.HelixType }}
+ HelixBuild: ${{ parameters.HelixBuild }}
+ HelixConfiguration: ${{ parameters.HelixConfiguration }}
+ HelixTargetQueues: ${{ parameters.HelixTargetQueues }}
+ HelixAccessToken: ${{ parameters.HelixAccessToken }}
+ HelixPreCommands: ${{ parameters.HelixPreCommands }}
+ HelixPostCommands: ${{ parameters.HelixPostCommands }}
+ WorkItemDirectory: ${{ parameters.WorkItemDirectory }}
+ WorkItemCommand: ${{ parameters.WorkItemCommand }}
+ WorkItemTimeout: ${{ parameters.WorkItemTimeout }}
+ CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }}
+ XUnitProjects: ${{ parameters.XUnitProjects }}
+ XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }}
+ XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }}
+ XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }}
+ XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }}
+ IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }}
+ DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }}
+ DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
+ WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
+ HelixBaseUri: ${{ parameters.HelixBaseUri }}
+ Creator: ${{ parameters.Creator }}
+ SYSTEM_ACCESSTOKEN: $(System.AccessToken)
+ condition: and(${{ parameters.condition }}, ne(variables['Agent.Os'], 'Windows_NT'))
+ continueOnError: ${{ parameters.continueOnError }}
diff --git a/eng/common/core-templates/steps/source-build.yml b/eng/common/core-templates/steps/source-build.yml
new file mode 100644
index 00000000..b75f59c4
--- /dev/null
+++ b/eng/common/core-templates/steps/source-build.yml
@@ -0,0 +1,65 @@
+parameters:
+ # This template adds arcade-powered source-build to CI.
+
+ # This is a 'steps' template, and is intended for advanced scenarios where the existing build
+ # infra has a careful build methodology that must be followed. For example, a repo
+ # (dotnet/runtime) might choose to clone the GitHub repo only once and store it as a pipeline
+ # artifact for all subsequent jobs to use, to reduce dependence on a strong network connection to
+ # GitHub. Using this steps template leaves room for that infra to be included.
+
+ # Defines the platform on which to run the steps. See 'eng/common/core-templates/job/source-build.yml'
+ # for details. The entire object is described in the 'job' template for simplicity, even though
+ # the usage of the properties on this object is split between the 'job' and 'steps' templates.
+ platform: {}
+ is1ESPipeline: false
+
+steps:
+# Build. Keep it self-contained for simple reusability. (No source-build-specific job variables.)
+- script: |
+ set -x
+ df -h
+
+ # If building on the internal project, the internal storage variable may be available (usually only if needed)
+ # In that case, add variables to allow the download of internal runtimes if the specified versions are not found
+ # in the default public locations.
+ internalRuntimeDownloadArgs=
+ if [ '$(dotnetbuilds-internal-container-read-token-base64)' != '$''(dotnetbuilds-internal-container-read-token-base64)' ]; then
+ internalRuntimeDownloadArgs='/p:DotNetRuntimeSourceFeed=https://ci.dot.net/internal /p:DotNetRuntimeSourceFeedKey=$(dotnetbuilds-internal-container-read-token-base64) --runtimesourcefeed https://ci.dot.net/internal --runtimesourcefeedkey $(dotnetbuilds-internal-container-read-token-base64)'
+ fi
+
+ buildConfig=Release
+ # Check if AzDO substitutes in a build config from a variable, and use it if so.
+ if [ '$(_BuildConfig)' != '$''(_BuildConfig)' ]; then
+ buildConfig='$(_BuildConfig)'
+ fi
+
+ targetRidArgs=
+ if [ '${{ parameters.platform.targetRID }}' != '' ]; then
+ targetRidArgs='/p:TargetRid=${{ parameters.platform.targetRID }}'
+ fi
+
+ portableBuildArgs=
+ if [ '${{ parameters.platform.portableBuild }}' != '' ]; then
+ portableBuildArgs='/p:PortableBuild=${{ parameters.platform.portableBuild }}'
+ fi
+
+ ${{ coalesce(parameters.platform.buildScript, './build.sh') }} --ci \
+ --configuration $buildConfig \
+ --restore --build --pack -bl \
+ --source-build \
+ ${{ parameters.platform.buildArguments }} \
+ $internalRuntimeDownloadArgs \
+ $targetRidArgs \
+ $portableBuildArgs \
+ displayName: Build
+
+- template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ args:
+ displayName: Publish BuildLogs
+ targetPath: artifacts/log/${{ coalesce(variables._BuildConfig, 'Release') }}
+ artifactName: BuildLogs_SourceBuild_${{ parameters.platform.name }}_Attempt$(System.JobAttempt)
+ continueOnError: true
+ condition: succeededOrFailed()
+ isProduction: false # logs are non-production artifacts
diff --git a/eng/common/core-templates/steps/source-index-stage1-publish.yml b/eng/common/core-templates/steps/source-index-stage1-publish.yml
new file mode 100644
index 00000000..3ad83b8c
--- /dev/null
+++ b/eng/common/core-templates/steps/source-index-stage1-publish.yml
@@ -0,0 +1,35 @@
+parameters:
+ sourceIndexUploadPackageVersion: 2.0.0-20250906.1
+ sourceIndexProcessBinlogPackageVersion: 1.0.1-20250906.1
+ sourceIndexPackageSource: https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json
+ binlogPath: artifacts/log/Debug/Build.binlog
+
+steps:
+- task: UseDotNet@2
+ displayName: "Source Index: Use .NET 9 SDK"
+ inputs:
+ packageType: sdk
+ version: 9.0.x
+ installationPath: $(Agent.TempDirectory)/dotnet
+ workingDirectory: $(Agent.TempDirectory)
+
+- script: |
+ $(Agent.TempDirectory)/dotnet/dotnet tool install BinLogToSln --version ${{parameters.sourceIndexProcessBinlogPackageVersion}} --source ${{parameters.sourceIndexPackageSource}} --tool-path $(Agent.TempDirectory)/.source-index/tools
+ $(Agent.TempDirectory)/dotnet/dotnet tool install UploadIndexStage1 --version ${{parameters.sourceIndexUploadPackageVersion}} --source ${{parameters.sourceIndexPackageSource}} --tool-path $(Agent.TempDirectory)/.source-index/tools
+ displayName: "Source Index: Download netsourceindex Tools"
+ # Set working directory to temp directory so 'dotnet' doesn't try to use global.json and use the repo's sdk.
+ workingDirectory: $(Agent.TempDirectory)
+
+- script: $(Agent.TempDirectory)/.source-index/tools/BinLogToSln -i ${{parameters.BinlogPath}} -r $(System.DefaultWorkingDirectory) -n $(Build.Repository.Name) -o .source-index/stage1output
+ displayName: "Source Index: Process Binlog into indexable sln"
+
+- ${{ if and(ne(parameters.runAsPublic, 'true'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - task: AzureCLI@2
+ displayName: "Source Index: Upload Source Index stage1 artifacts to Azure"
+ inputs:
+ azureSubscription: 'SourceDotNet Stage1 Publish'
+ addSpnToEnvironment: true
+ scriptType: 'ps'
+ scriptLocation: 'inlineScript'
+ inlineScript: |
+ $(Agent.TempDirectory)/.source-index/tools/UploadIndexStage1 -i .source-index/stage1output -n $(Build.Repository.Name) -s netsourceindexstage1 -b stage1
diff --git a/eng/common/core-templates/variables/pool-providers.yml b/eng/common/core-templates/variables/pool-providers.yml
new file mode 100644
index 00000000..41053d38
--- /dev/null
+++ b/eng/common/core-templates/variables/pool-providers.yml
@@ -0,0 +1,8 @@
+parameters:
+ is1ESPipeline: false
+
+variables:
+ - ${{ if eq(parameters.is1ESPipeline, 'true') }}:
+ - template: /eng/common/templates-official/variables/pool-providers.yml
+ - ${{ else }}:
+ - template: /eng/common/templates/variables/pool-providers.yml
\ No newline at end of file
diff --git a/eng/common/cross/arm/sources.list.bionic b/eng/common/cross/arm/sources.list.bionic
deleted file mode 100644
index 21095574..00000000
--- a/eng/common/cross/arm/sources.list.bionic
+++ /dev/null
@@ -1,11 +0,0 @@
-deb http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe
-deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe
-
-deb http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe
-deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe
-
-deb http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted
-deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted
-
-deb http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse
-deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse
diff --git a/eng/common/cross/arm/sources.list.focal b/eng/common/cross/arm/sources.list.focal
deleted file mode 100644
index 4de2600c..00000000
--- a/eng/common/cross/arm/sources.list.focal
+++ /dev/null
@@ -1,11 +0,0 @@
-deb http://ports.ubuntu.com/ubuntu-ports/ focal main restricted universe
-deb-src http://ports.ubuntu.com/ubuntu-ports/ focal main restricted universe
-
-deb http://ports.ubuntu.com/ubuntu-ports/ focal-updates main restricted universe
-deb-src http://ports.ubuntu.com/ubuntu-ports/ focal-updates main restricted universe
-
-deb http://ports.ubuntu.com/ubuntu-ports/ focal-backports main restricted
-deb-src http://ports.ubuntu.com/ubuntu-ports/ focal-backports main restricted
-
-deb http://ports.ubuntu.com/ubuntu-ports/ focal-security main restricted universe multiverse
-deb-src http://ports.ubuntu.com/ubuntu-ports/ focal-security main restricted universe multiverse
diff --git a/eng/common/cross/arm/sources.list.jammy b/eng/common/cross/arm/sources.list.jammy
deleted file mode 100644
index 6bb04530..00000000
--- a/eng/common/cross/arm/sources.list.jammy
+++ /dev/null
@@ -1,11 +0,0 @@
-deb http://ports.ubuntu.com/ubuntu-ports/ jammy main restricted universe
-deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy main restricted universe
-
-deb http://ports.ubuntu.com/ubuntu-ports/ jammy-updates main restricted universe
-deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy-updates main restricted universe
-
-deb http://ports.ubuntu.com/ubuntu-ports/ jammy-backports main restricted
-deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy-backports main restricted
-
-deb http://ports.ubuntu.com/ubuntu-ports/ jammy-security main restricted universe multiverse
-deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy-security main restricted universe multiverse
diff --git a/eng/common/cross/arm/sources.list.jessie b/eng/common/cross/arm/sources.list.jessie
deleted file mode 100644
index 4d142ac9..00000000
--- a/eng/common/cross/arm/sources.list.jessie
+++ /dev/null
@@ -1,3 +0,0 @@
-# Debian (sid) # UNSTABLE
-deb http://ftp.debian.org/debian/ sid main contrib non-free
-deb-src http://ftp.debian.org/debian/ sid main contrib non-free
diff --git a/eng/common/cross/arm/sources.list.xenial b/eng/common/cross/arm/sources.list.xenial
deleted file mode 100644
index 56fbb36a..00000000
--- a/eng/common/cross/arm/sources.list.xenial
+++ /dev/null
@@ -1,11 +0,0 @@
-deb http://ports.ubuntu.com/ubuntu-ports/ xenial main restricted universe
-deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial main restricted universe
-
-deb http://ports.ubuntu.com/ubuntu-ports/ xenial-updates main restricted universe
-deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-updates main restricted universe
-
-deb http://ports.ubuntu.com/ubuntu-ports/ xenial-backports main restricted
-deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-backports main restricted
-
-deb http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse
-deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse
diff --git a/eng/common/cross/arm/sources.list.zesty b/eng/common/cross/arm/sources.list.zesty
deleted file mode 100644
index ea2c14a7..00000000
--- a/eng/common/cross/arm/sources.list.zesty
+++ /dev/null
@@ -1,11 +0,0 @@
-deb http://ports.ubuntu.com/ubuntu-ports/ zesty main restricted universe
-deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty main restricted universe
-
-deb http://ports.ubuntu.com/ubuntu-ports/ zesty-updates main restricted universe
-deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-updates main restricted universe
-
-deb http://ports.ubuntu.com/ubuntu-ports/ zesty-backports main restricted
-deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-backports main restricted
-
-deb http://ports.ubuntu.com/ubuntu-ports/ zesty-security main restricted universe multiverse
-deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-security main restricted universe multiverse
diff --git a/eng/common/cross/arm64/sources.list.bionic b/eng/common/cross/arm64/sources.list.bionic
deleted file mode 100644
index 21095574..00000000
--- a/eng/common/cross/arm64/sources.list.bionic
+++ /dev/null
@@ -1,11 +0,0 @@
-deb http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe
-deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe
-
-deb http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe
-deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe
-
-deb http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted
-deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted
-
-deb http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse
-deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse
diff --git a/eng/common/cross/arm64/sources.list.buster b/eng/common/cross/arm64/sources.list.buster
deleted file mode 100644
index 7194ac64..00000000
--- a/eng/common/cross/arm64/sources.list.buster
+++ /dev/null
@@ -1,11 +0,0 @@
-deb http://deb.debian.org/debian buster main
-deb-src http://deb.debian.org/debian buster main
-
-deb http://deb.debian.org/debian-security/ buster/updates main
-deb-src http://deb.debian.org/debian-security/ buster/updates main
-
-deb http://deb.debian.org/debian buster-updates main
-deb-src http://deb.debian.org/debian buster-updates main
-
-deb http://deb.debian.org/debian buster-backports main contrib non-free
-deb-src http://deb.debian.org/debian buster-backports main contrib non-free
diff --git a/eng/common/cross/arm64/sources.list.focal b/eng/common/cross/arm64/sources.list.focal
deleted file mode 100644
index 4de2600c..00000000
--- a/eng/common/cross/arm64/sources.list.focal
+++ /dev/null
@@ -1,11 +0,0 @@
-deb http://ports.ubuntu.com/ubuntu-ports/ focal main restricted universe
-deb-src http://ports.ubuntu.com/ubuntu-ports/ focal main restricted universe
-
-deb http://ports.ubuntu.com/ubuntu-ports/ focal-updates main restricted universe
-deb-src http://ports.ubuntu.com/ubuntu-ports/ focal-updates main restricted universe
-
-deb http://ports.ubuntu.com/ubuntu-ports/ focal-backports main restricted
-deb-src http://ports.ubuntu.com/ubuntu-ports/ focal-backports main restricted
-
-deb http://ports.ubuntu.com/ubuntu-ports/ focal-security main restricted universe multiverse
-deb-src http://ports.ubuntu.com/ubuntu-ports/ focal-security main restricted universe multiverse
diff --git a/eng/common/cross/arm64/sources.list.jammy b/eng/common/cross/arm64/sources.list.jammy
deleted file mode 100644
index 6bb04530..00000000
--- a/eng/common/cross/arm64/sources.list.jammy
+++ /dev/null
@@ -1,11 +0,0 @@
-deb http://ports.ubuntu.com/ubuntu-ports/ jammy main restricted universe
-deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy main restricted universe
-
-deb http://ports.ubuntu.com/ubuntu-ports/ jammy-updates main restricted universe
-deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy-updates main restricted universe
-
-deb http://ports.ubuntu.com/ubuntu-ports/ jammy-backports main restricted
-deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy-backports main restricted
-
-deb http://ports.ubuntu.com/ubuntu-ports/ jammy-security main restricted universe multiverse
-deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy-security main restricted universe multiverse
diff --git a/eng/common/cross/arm64/sources.list.stretch b/eng/common/cross/arm64/sources.list.stretch
deleted file mode 100644
index 0e121577..00000000
--- a/eng/common/cross/arm64/sources.list.stretch
+++ /dev/null
@@ -1,12 +0,0 @@
-deb http://deb.debian.org/debian stretch main
-deb-src http://deb.debian.org/debian stretch main
-
-deb http://deb.debian.org/debian-security/ stretch/updates main
-deb-src http://deb.debian.org/debian-security/ stretch/updates main
-
-deb http://deb.debian.org/debian stretch-updates main
-deb-src http://deb.debian.org/debian stretch-updates main
-
-deb http://deb.debian.org/debian stretch-backports main contrib non-free
-deb-src http://deb.debian.org/debian stretch-backports main contrib non-free
-
diff --git a/eng/common/cross/arm64/sources.list.xenial b/eng/common/cross/arm64/sources.list.xenial
deleted file mode 100644
index 56fbb36a..00000000
--- a/eng/common/cross/arm64/sources.list.xenial
+++ /dev/null
@@ -1,11 +0,0 @@
-deb http://ports.ubuntu.com/ubuntu-ports/ xenial main restricted universe
-deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial main restricted universe
-
-deb http://ports.ubuntu.com/ubuntu-ports/ xenial-updates main restricted universe
-deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-updates main restricted universe
-
-deb http://ports.ubuntu.com/ubuntu-ports/ xenial-backports main restricted
-deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-backports main restricted
-
-deb http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse
-deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse
diff --git a/eng/common/cross/arm64/sources.list.zesty b/eng/common/cross/arm64/sources.list.zesty
deleted file mode 100644
index ea2c14a7..00000000
--- a/eng/common/cross/arm64/sources.list.zesty
+++ /dev/null
@@ -1,11 +0,0 @@
-deb http://ports.ubuntu.com/ubuntu-ports/ zesty main restricted universe
-deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty main restricted universe
-
-deb http://ports.ubuntu.com/ubuntu-ports/ zesty-updates main restricted universe
-deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-updates main restricted universe
-
-deb http://ports.ubuntu.com/ubuntu-ports/ zesty-backports main restricted
-deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-backports main restricted
-
-deb http://ports.ubuntu.com/ubuntu-ports/ zesty-security main restricted universe multiverse
-deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-security main restricted universe multiverse
diff --git a/eng/common/cross/arm64/tizen/tizen.patch b/eng/common/cross/arm64/tizen/tizen.patch
index af7c8be0..2cebc547 100644
--- a/eng/common/cross/arm64/tizen/tizen.patch
+++ b/eng/common/cross/arm64/tizen/tizen.patch
@@ -5,5 +5,5 @@ diff -u -r a/usr/lib/libc.so b/usr/lib/libc.so
Use the shared library, but some functions are only in
the static library, so try that secondarily. */
OUTPUT_FORMAT(elf64-littleaarch64)
--GROUP ( /lib64/libc.so.6 /usr/lib64/libc_nonshared.a AS_NEEDED ( /lib/ld-linux-aarch64.so.1 ) )
+-GROUP ( /lib64/libc.so.6 /usr/lib64/libc_nonshared.a AS_NEEDED ( /lib64/ld-linux-aarch64.so.1 ) )
+GROUP ( libc.so.6 libc_nonshared.a AS_NEEDED ( ld-linux-aarch64.so.1 ) )
diff --git a/eng/common/cross/armel/armel.jessie.patch b/eng/common/cross/armel/armel.jessie.patch
deleted file mode 100644
index 2d261561..00000000
--- a/eng/common/cross/armel/armel.jessie.patch
+++ /dev/null
@@ -1,43 +0,0 @@
-diff -u -r a/usr/include/urcu/uatomic/generic.h b/usr/include/urcu/uatomic/generic.h
---- a/usr/include/urcu/uatomic/generic.h 2014-10-22 15:00:58.000000000 -0700
-+++ b/usr/include/urcu/uatomic/generic.h 2020-10-30 21:38:28.550000000 -0700
-@@ -69,10 +69,10 @@
- #endif
- #ifdef UATOMIC_HAS_ATOMIC_SHORT
- case 2:
-- return __sync_val_compare_and_swap_2(addr, old, _new);
-+ return __sync_val_compare_and_swap_2((uint16_t*) addr, old, _new);
- #endif
- case 4:
-- return __sync_val_compare_and_swap_4(addr, old, _new);
-+ return __sync_val_compare_and_swap_4((uint32_t*) addr, old, _new);
- #if (CAA_BITS_PER_LONG == 64)
- case 8:
- return __sync_val_compare_and_swap_8(addr, old, _new);
-@@ -109,7 +109,7 @@
- return;
- #endif
- case 4:
-- __sync_and_and_fetch_4(addr, val);
-+ __sync_and_and_fetch_4((uint32_t*) addr, val);
- return;
- #if (CAA_BITS_PER_LONG == 64)
- case 8:
-@@ -148,7 +148,7 @@
- return;
- #endif
- case 4:
-- __sync_or_and_fetch_4(addr, val);
-+ __sync_or_and_fetch_4((uint32_t*) addr, val);
- return;
- #if (CAA_BITS_PER_LONG == 64)
- case 8:
-@@ -187,7 +187,7 @@
- return __sync_add_and_fetch_2(addr, val);
- #endif
- case 4:
-- return __sync_add_and_fetch_4(addr, val);
-+ return __sync_add_and_fetch_4((uint32_t*) addr, val);
- #if (CAA_BITS_PER_LONG == 64)
- case 8:
- return __sync_add_and_fetch_8(addr, val);
diff --git a/eng/common/cross/armel/sources.list.jessie b/eng/common/cross/armel/sources.list.jessie
deleted file mode 100644
index 3d9c3059..00000000
--- a/eng/common/cross/armel/sources.list.jessie
+++ /dev/null
@@ -1,3 +0,0 @@
-# Debian (jessie) # Stable
-deb http://ftp.debian.org/debian/ jessie main contrib non-free
-deb-src http://ftp.debian.org/debian/ jessie main contrib non-free
diff --git a/eng/common/cross/armv6/sources.list.buster b/eng/common/cross/armv6/sources.list.buster
deleted file mode 100644
index f27fc4fb..00000000
--- a/eng/common/cross/armv6/sources.list.buster
+++ /dev/null
@@ -1,2 +0,0 @@
-deb http://raspbian.raspberrypi.org/raspbian/ buster main contrib non-free rpi
-deb-src http://raspbian.raspberrypi.org/raspbian/ buster main contrib non-free rpi
diff --git a/eng/common/cross/build-android-rootfs.sh b/eng/common/cross/build-android-rootfs.sh
index f163fb9d..fbd8d808 100644
--- a/eng/common/cross/build-android-rootfs.sh
+++ b/eng/common/cross/build-android-rootfs.sh
@@ -5,15 +5,16 @@ __NDK_Version=r21
usage()
{
echo "Creates a toolchain and sysroot used for cross-compiling for Android."
- echo.
- echo "Usage: $0 [BuildArch] [ApiLevel]"
- echo.
+ echo
+ echo "Usage: $0 [BuildArch] [ApiLevel] [--ndk NDKVersion]"
+ echo
echo "BuildArch is the target architecture of Android. Currently only arm64 is supported."
echo "ApiLevel is the target Android API level. API levels usually match to Android releases. See https://source.android.com/source/build-numbers.html"
- echo.
+ echo "NDKVersion is the version of Android NDK. The default is r21. See https://developer.android.com/ndk/downloads/revision_history"
+ echo
echo "By default, the toolchain and sysroot will be generated in cross/android-rootfs/toolchain/[BuildArch]. You can change this behavior"
echo "by setting the TOOLCHAIN_DIR environment variable"
- echo.
+ echo
echo "By default, the NDK will be downloaded into the cross/android-rootfs/android-ndk-$__NDK_Version directory. If you already have an NDK installation,"
echo "you can set the NDK_DIR environment variable to have this script use that installation of the NDK."
echo "By default, this script will generate a file, android_platform, in the root of the ROOTFS_DIR directory that contains the RID for the supported and tested Android build: android.28-arm64. This file is to replace '/etc/os-release', which is not available for Android."
@@ -25,10 +26,15 @@ __BuildArch=arm64
__AndroidArch=aarch64
__AndroidToolchain=aarch64-linux-android
-for i in "$@"
- do
- lowerI="$(echo $i | tr "[:upper:]" "[:lower:]")"
- case $lowerI in
+while :; do
+ if [[ "$#" -le 0 ]]; then
+ break
+ fi
+
+ i=$1
+
+ lowerI="$(echo $i | tr "[:upper:]" "[:lower:]")"
+ case $lowerI in
-?|-h|--help)
usage
exit 1
@@ -43,6 +49,10 @@ for i in "$@"
__AndroidArch=arm
__AndroidToolchain=arm-linux-androideabi
;;
+ --ndk)
+ shift
+ __NDK_Version=$1
+ ;;
*[0-9])
__ApiLevel=$i
;;
@@ -50,8 +60,17 @@ for i in "$@"
__UnprocessedBuildArgs="$__UnprocessedBuildArgs $i"
;;
esac
+ shift
done
+if [[ "$__NDK_Version" == "r21" ]] || [[ "$__NDK_Version" == "r22" ]]; then
+ __NDK_File_Arch_Spec=-x86_64
+ __SysRoot=sysroot
+else
+ __NDK_File_Arch_Spec=
+ __SysRoot=toolchains/llvm/prebuilt/linux-x86_64/sysroot
+fi
+
# Obtain the location of the bash script to figure out where the root of the repo is.
__ScriptBaseDir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
@@ -78,6 +97,7 @@ fi
echo "Target API level: $__ApiLevel"
echo "Target architecture: $__BuildArch"
+echo "NDK version: $__NDK_Version"
echo "NDK location: $__NDK_Dir"
echo "Target Toolchain location: $__ToolchainDir"
@@ -85,8 +105,8 @@ echo "Target Toolchain location: $__ToolchainDir"
if [ ! -d $__NDK_Dir ]; then
echo Downloading the NDK into $__NDK_Dir
mkdir -p $__NDK_Dir
- wget -q --progress=bar:force:noscroll --show-progress https://dl.google.com/android/repository/android-ndk-$__NDK_Version-linux-x86_64.zip -O $__CrossDir/android-ndk-$__NDK_Version-linux-x86_64.zip
- unzip -q $__CrossDir/android-ndk-$__NDK_Version-linux-x86_64.zip -d $__CrossDir
+ wget -q --progress=bar:force:noscroll --show-progress https://dl.google.com/android/repository/android-ndk-$__NDK_Version-linux$__NDK_File_Arch_Spec.zip -O $__CrossDir/android-ndk-$__NDK_Version-linux.zip
+ unzip -q $__CrossDir/android-ndk-$__NDK_Version-linux.zip -d $__CrossDir
fi
if [ ! -d $__lldb_Dir ]; then
@@ -116,16 +136,11 @@ for path in $(wget -qO- https://packages.termux.dev/termux-main-21/dists/stable/
fi
done
-cp -R "$__TmpDir/data/data/com.termux/files/usr/"* "$__ToolchainDir/sysroot/usr/"
+cp -R "$__TmpDir/data/data/com.termux/files/usr/"* "$__ToolchainDir/$__SysRoot/usr/"
# Generate platform file for build.sh script to assign to __DistroRid
echo "Generating platform file..."
-echo "RID=android.${__ApiLevel}-${__BuildArch}" > $__ToolchainDir/sysroot/android_platform
-
-echo "Now to build coreclr, libraries and installers; run:"
-echo ROOTFS_DIR=\$\(realpath $__ToolchainDir/sysroot\) ./build.sh --cross --arch $__BuildArch \
- --subsetCategory coreclr
-echo ROOTFS_DIR=\$\(realpath $__ToolchainDir/sysroot\) ./build.sh --cross --arch $__BuildArch \
- --subsetCategory libraries
-echo ROOTFS_DIR=\$\(realpath $__ToolchainDir/sysroot\) ./build.sh --cross --arch $__BuildArch \
- --subsetCategory installer
+echo "RID=android.${__ApiLevel}-${__BuildArch}" > $__ToolchainDir/$__SysRoot/android_platform
+
+echo "Now to build coreclr, libraries and host; run:"
+echo ROOTFS_DIR=$(realpath $__ToolchainDir/$__SysRoot) ./build.sh clr+libs+host --cross --arch $__BuildArch
diff --git a/eng/common/cross/build-rootfs.sh b/eng/common/cross/build-rootfs.sh
index 9caf9b02..314c93c5 100644
--- a/eng/common/cross/build-rootfs.sh
+++ b/eng/common/cross/build-rootfs.sh
@@ -5,16 +5,18 @@ set -e
usage()
{
echo "Usage: $0 [BuildArch] [CodeName] [lldbx.y] [llvmx[.y]] [--skipunmount] --rootfsdir ]"
- echo "BuildArch can be: arm(default), arm64, armel, armv6, ppc64le, riscv64, s390x, x64, x86"
+ echo "BuildArch can be: arm(default), arm64, armel, armv6, loongarch64, ppc64le, riscv64, s390x, x64, x86"
echo "CodeName - optional, Code name for Linux, can be: xenial(default), zesty, bionic, alpine"
echo " for alpine can be specified with version: alpineX.YY or alpineedge"
- echo " for FreeBSD can be: freebsd12, freebsd13"
+ echo " for FreeBSD can be: freebsd13, freebsd14"
+ echo " for OpenBSD can be: openbsd"
echo " for illumos can be: illumos"
echo " for Haiku can be: haiku."
echo "lldbx.y - optional, LLDB version, can be: lldb3.9(default), lldb4.0, lldb5.0, lldb6.0 no-lldb. Ignored for alpine and FreeBSD"
echo "llvmx[.y] - optional, LLVM version for LLVM related packages."
echo "--skipunmount - optional, will skip the unmount of rootfs folder."
echo "--skipsigcheck - optional, will skip package signature checks (allowing untrusted packages)."
+ echo "--skipemulation - optional, will skip qemu and debootstrap requirement when building environment for debian based systems."
echo "--use-mirror - optional, use mirror URL to fetch resources, when available."
echo "--jobs N - optional, restrict to N jobs."
exit 1
@@ -26,11 +28,14 @@ __BuildArch=arm
__AlpineArch=armv7
__FreeBSDArch=arm
__FreeBSDMachineArch=armv7
+__OpenBSDArch=arm
+__OpenBSDMachineArch=armv7
__IllumosArch=arm7
__HaikuArch=arm
__QEMUArch=arm
__UbuntuArch=armhf
-__UbuntuRepo="http://ports.ubuntu.com/"
+__UbuntuRepo=
+__UbuntuSuites="updates security backports"
__LLDB_Package="liblldb-3.9-dev"
__SkipUnmount=0
@@ -51,29 +56,28 @@ __UbuntuPackages+=" symlinks"
__UbuntuPackages+=" libicu-dev"
__UbuntuPackages+=" liblttng-ust-dev"
__UbuntuPackages+=" libunwind8-dev"
-__UbuntuPackages+=" libnuma-dev"
__AlpinePackages+=" gettext-dev"
__AlpinePackages+=" icu-dev"
__AlpinePackages+=" libunwind-dev"
__AlpinePackages+=" lttng-ust-dev"
__AlpinePackages+=" compiler-rt"
-__AlpinePackages+=" numactl-dev"
# runtime libraries' dependencies
__UbuntuPackages+=" libcurl4-openssl-dev"
__UbuntuPackages+=" libkrb5-dev"
__UbuntuPackages+=" libssl-dev"
__UbuntuPackages+=" zlib1g-dev"
+__UbuntuPackages+=" libbrotli-dev"
__AlpinePackages+=" curl-dev"
__AlpinePackages+=" krb5-dev"
__AlpinePackages+=" openssl-dev"
__AlpinePackages+=" zlib-dev"
-__FreeBSDBase="12.4-RELEASE"
-__FreeBSDPkg="1.17.0"
-__FreeBSDABI="12"
+__FreeBSDBase="13.5-RELEASE"
+__FreeBSDPkg="1.21.3"
+__FreeBSDABI="13"
__FreeBSDPackages="libunwind"
__FreeBSDPackages+=" icu"
__FreeBSDPackages+=" libinotify"
@@ -81,6 +85,12 @@ __FreeBSDPackages+=" openssl"
__FreeBSDPackages+=" krb5"
__FreeBSDPackages+=" terminfo-db"
+__OpenBSDVersion="7.8"
+__OpenBSDPackages="heimdal-libs"
+__OpenBSDPackages+=" icu4c"
+__OpenBSDPackages+=" inotify-tools"
+__OpenBSDPackages+=" openssl"
+
__IllumosPackages="icu"
__IllumosPackages+=" mit-krb5"
__IllumosPackages+=" openssl"
@@ -90,18 +100,18 @@ __HaikuPackages="gcc_syslibs"
__HaikuPackages+=" gcc_syslibs_devel"
__HaikuPackages+=" gmp"
__HaikuPackages+=" gmp_devel"
-__HaikuPackages+=" icu66"
-__HaikuPackages+=" icu66_devel"
+__HaikuPackages+=" icu[0-9]+"
+__HaikuPackages+=" icu[0-9]*_devel"
__HaikuPackages+=" krb5"
__HaikuPackages+=" krb5_devel"
__HaikuPackages+=" libiconv"
__HaikuPackages+=" libiconv_devel"
-__HaikuPackages+=" llvm12_libunwind"
-__HaikuPackages+=" llvm12_libunwind_devel"
+__HaikuPackages+=" llvm[0-9]*_libunwind"
+__HaikuPackages+=" llvm[0-9]*_libunwind_devel"
__HaikuPackages+=" mpfr"
__HaikuPackages+=" mpfr_devel"
-__HaikuPackages+=" openssl"
-__HaikuPackages+=" openssl_devel"
+__HaikuPackages+=" openssl3"
+__HaikuPackages+=" openssl3_devel"
__HaikuPackages+=" zlib"
__HaikuPackages+=" zlib_devel"
@@ -127,9 +137,12 @@ __AlpineKeys='
616adfeb:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAq0BFD1D4lIxQcsqEpQzU\npNCYM3aP1V/fxxVdT4DWvSI53JHTwHQamKdMWtEXetWVbP5zSROniYKFXd/xrD9X\n0jiGHey3lEtylXRIPxe5s+wXoCmNLcJVnvTcDtwx/ne2NLHxp76lyc25At+6RgE6\nADjLVuoD7M4IFDkAsd8UQ8zM0Dww9SylIk/wgV3ZkifecvgUQRagrNUdUjR56EBZ\nraQrev4hhzOgwelT0kXCu3snbUuNY/lU53CoTzfBJ5UfEJ5pMw1ij6X0r5S9IVsy\nKLWH1hiO0NzU2c8ViUYCly4Fe9xMTFc6u2dy/dxf6FwERfGzETQxqZvSfrRX+GLj\n/QZAXiPg5178hT/m0Y3z5IGenIC/80Z9NCi+byF1WuJlzKjDcF/TU72zk0+PNM/H\nKuppf3JT4DyjiVzNC5YoWJT2QRMS9KLP5iKCSThwVceEEg5HfhQBRT9M6KIcFLSs\nmFjx9kNEEmc1E8hl5IR3+3Ry8G5/bTIIruz14jgeY9u5jhL8Vyyvo41jgt9sLHR1\n/J1TxKfkgksYev7PoX6/ZzJ1ksWKZY5NFoDXTNYUgzFUTOoEaOg3BAQKadb3Qbbq\nXIrxmPBdgrn9QI7NCgfnAY3Tb4EEjs3ON/BNyEhUENcXOH6I1NbcuBQ7g9P73kE4\nVORdoc8MdJ5eoKBpO8Ww8HECAwEAAQ==
616ae350:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAyduVzi1mWm+lYo2Tqt/0\nXkCIWrDNP1QBMVPrE0/ZlU2bCGSoo2Z9FHQKz/mTyMRlhNqTfhJ5qU3U9XlyGOPJ\npiM+b91g26pnpXJ2Q2kOypSgOMOPA4cQ42PkHBEqhuzssfj9t7x47ppS94bboh46\nxLSDRff/NAbtwTpvhStV3URYkxFG++cKGGa5MPXBrxIp+iZf9GnuxVdST5PGiVGP\nODL/b69sPJQNbJHVquqUTOh5Ry8uuD2WZuXfKf7/C0jC/ie9m2+0CttNu9tMciGM\nEyKG1/Xhk5iIWO43m4SrrT2WkFlcZ1z2JSf9Pjm4C2+HovYpihwwdM/OdP8Xmsnr\nDzVB4YvQiW+IHBjStHVuyiZWc+JsgEPJzisNY0Wyc/kNyNtqVKpX6dRhMLanLmy+\nf53cCSI05KPQAcGj6tdL+D60uKDkt+FsDa0BTAobZ31OsFVid0vCXtsbplNhW1IF\nHwsGXBTVcfXg44RLyL8Lk/2dQxDHNHzAUslJXzPxaHBLmt++2COa2EI1iWlvtznk\nOk9WP8SOAIj+xdqoiHcC4j72BOVVgiITIJNHrbppZCq6qPR+fgXmXa+sDcGh30m6\n9Wpbr28kLMSHiENCWTdsFij+NQTd5S47H7XTROHnalYDuF1RpS+DpQidT5tUimaT\nJZDr++FjKrnnijbyNF8b98UCAwEAAQ==
616db30d:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAnpUpyWDWjlUk3smlWeA0\nlIMW+oJ38t92CRLHH3IqRhyECBRW0d0aRGtq7TY8PmxjjvBZrxTNDpJT6KUk4LRm\na6A6IuAI7QnNK8SJqM0DLzlpygd7GJf8ZL9SoHSH+gFsYF67Cpooz/YDqWrlN7Vw\ntO00s0B+eXy+PCXYU7VSfuWFGK8TGEv6HfGMALLjhqMManyvfp8hz3ubN1rK3c8C\nUS/ilRh1qckdbtPvoDPhSbTDmfU1g/EfRSIEXBrIMLg9ka/XB9PvWRrekrppnQzP\nhP9YE3x/wbFc5QqQWiRCYyQl/rgIMOXvIxhkfe8H5n1Et4VAorkpEAXdsfN8KSVv\nLSMazVlLp9GYq5SUpqYX3KnxdWBgN7BJoZ4sltsTpHQ/34SXWfu3UmyUveWj7wp0\nx9hwsPirVI00EEea9AbP7NM2rAyu6ukcm4m6ATd2DZJIViq2es6m60AE6SMCmrQF\nwmk4H/kdQgeAELVfGOm2VyJ3z69fQuywz7xu27S6zTKi05Qlnohxol4wVb6OB7qG\nLPRtK9ObgzRo/OPumyXqlzAi/Yvyd1ZQk8labZps3e16bQp8+pVPiumWioMFJDWV\nGZjCmyMSU8V6MB6njbgLHoyg2LCukCAeSjbPGGGYhnKLm1AKSoJh3IpZuqcKCk5C\n8CM1S15HxV78s9dFntEqIokCAwEAAQ==
+66ba20fe:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAtfB12w4ZgqsXWZDfUAV/\n6Y4aHUKIu3q4SXrNZ7CXF9nXoAVYrS7NAxJdAodsY3vPCN0g5O8DFXR+390LdOuQ\n+HsGKCc1k5tX5ZXld37EZNTNSbR0k+NKhd9h6X3u6wqPOx7SIKxwAQR8qeeFq4pP\nrt9GAGlxtuYgzIIcKJPwE0dZlcBCg+GnptCUZXp/38BP1eYC+xTXSL6Muq1etYfg\nodXdb7Yl+2h1IHuOwo5rjgY5kpY7GcAs8AjGk3lDD/av60OTYccknH0NCVSmPoXK\nvrxDBOn0LQRNBLcAfnTKgHrzy0Q5h4TNkkyTgxkoQw5ObDk9nnabTxql732yy9BY\ns+hM9+dSFO1HKeVXreYSA2n1ndF18YAvAumzgyqzB7I4pMHXq1kC/8bONMJxwSkS\nYm6CoXKyavp7RqGMyeVpRC7tV+blkrrUml0BwNkxE+XnwDRB3xDV6hqgWe0XrifD\nYTfvd9ScZQP83ip0r4IKlq4GMv/R5shcCRJSkSZ6QSGshH40JYSoiwJf5FHbj9ND\n7do0UAqebWo4yNx63j/wb2ULorW3AClv0BCFSdPsIrCStiGdpgJDBR2P2NZOCob3\nG9uMj+wJD6JJg2nWqNJxkANXX37Qf8plgzssrhrgOvB0fjjS7GYhfkfmZTJ0wPOw\nA8+KzFseBh4UFGgue78KwgkCAwEAAQ==
'
__Keyring=
+__KeyringFile="/usr/share/keyrings/ubuntu-archive-keyring.gpg"
__SkipSigCheck=0
+__SkipEmulation=0
__UseMirror=0
__UnprocessedBuildArgs=
@@ -142,7 +155,6 @@ while :; do
case $lowerI in
-\?|-h|--help)
usage
- exit 1
;;
arm)
__BuildArch=arm
@@ -157,12 +169,19 @@ while :; do
__QEMUArch=aarch64
__FreeBSDArch=arm64
__FreeBSDMachineArch=aarch64
+ __OpenBSDArch=arm64
+ __OpenBSDMachineArch=aarch64
;;
armel)
__BuildArch=armel
__UbuntuArch=armel
- __UbuntuRepo="http://ftp.debian.org/debian/"
- __CodeName=jessie
+ __UbuntuRepo="http://archive.debian.org/debian/"
+ __CodeName=buster
+ __KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg"
+ __LLDB_Package="liblldb-6.0-dev"
+ __UbuntuPackages="${__UbuntuPackages// libomp-dev/}"
+ __UbuntuPackages="${__UbuntuPackages// libomp5/}"
+ __UbuntuSuites=
;;
armv6)
__BuildArch=armv6
@@ -170,10 +189,24 @@ while :; do
__QEMUArch=arm
__UbuntuRepo="http://raspbian.raspberrypi.org/raspbian/"
__CodeName=buster
+ __KeyringFile="/usr/share/keyrings/raspbian-archive-keyring.gpg"
__LLDB_Package="liblldb-6.0-dev"
+ __UbuntuSuites=
+
+ if [[ -e "$__KeyringFile" ]]; then
+ __Keyring="--keyring $__KeyringFile"
+ fi
+ ;;
+ loongarch64)
+ __BuildArch=loongarch64
+ __AlpineArch=loongarch64
+ __QEMUArch=loongarch64
+ __UbuntuArch=loong64
+ __UbuntuSuites=unreleased
+ __LLDB_Package="liblldb-19-dev"
- if [[ -e "/usr/share/keyrings/raspbian-archive-keyring.gpg" ]]; then
- __Keyring="--keyring /usr/share/keyrings/raspbian-archive-keyring.gpg"
+ if [[ "$__CodeName" == "sid" ]]; then
+ __UbuntuRepo="http://ftp.ports.debian.org/debian-ports/"
fi
;;
riscv64)
@@ -182,13 +215,8 @@ while :; do
__AlpinePackages="${__AlpinePackages// lldb-dev/}"
__QEMUArch=riscv64
__UbuntuArch=riscv64
- __UbuntuRepo="http://deb.debian.org/debian-ports"
__UbuntuPackages="${__UbuntuPackages// libunwind8-dev/}"
unset __LLDB_Package
-
- if [[ -e "/usr/share/keyrings/debian-ports-archive-keyring.gpg" ]]; then
- __Keyring="--keyring /usr/share/keyrings/debian-ports-archive-keyring.gpg --include=debian-ports-archive-keyring"
- fi
;;
ppc64le)
__BuildArch=ppc64le
@@ -218,6 +246,8 @@ while :; do
__UbuntuArch=amd64
__FreeBSDArch=amd64
__FreeBSDMachineArch=amd64
+ __OpenBSDArch=amd64
+ __OpenBSDMachineArch=amd64
__illumosArch=x86_64
__HaikuArch=x86_64
__UbuntuRepo="http://archive.ubuntu.com/ubuntu/"
@@ -229,12 +259,19 @@ while :; do
__UbuntuRepo="http://archive.ubuntu.com/ubuntu/"
;;
lldb*)
- version="${lowerI/lldb/}"
- parts=(${version//./ })
+ version="$(echo "$lowerI" | tr -d '[:alpha:]-=')"
+ majorVersion="${version%%.*}"
+
+ [ -z "${version##*.*}" ] && minorVersion="${version#*.}"
+ if [ -z "$minorVersion" ]; then
+ minorVersion=0
+ fi
# for versions > 6.0, lldb has dropped the minor version
- if [[ "${parts[0]}" -gt 6 ]]; then
- version="${parts[0]}"
+ if [ "$majorVersion" -le 6 ]; then
+ version="$majorVersion.$minorVersion"
+ else
+ version="$majorVersion"
fi
__LLDB_Package="liblldb-${version}-dev"
@@ -243,51 +280,40 @@ while :; do
unset __LLDB_Package
;;
llvm*)
- version="${lowerI/llvm/}"
- parts=(${version//./ })
- __LLVM_MajorVersion="${parts[0]}"
- __LLVM_MinorVersion="${parts[1]}"
-
- # for versions > 6.0, llvm has dropped the minor version
- if [[ -z "$__LLVM_MinorVersion" && "$__LLVM_MajorVersion" -le 6 ]]; then
- __LLVM_MinorVersion=0;
+ version="$(echo "$lowerI" | tr -d '[:alpha:]-=')"
+ __LLVM_MajorVersion="${version%%.*}"
+
+ [ -z "${version##*.*}" ] && __LLVM_MinorVersion="${version#*.}"
+ if [ -z "$__LLVM_MinorVersion" ]; then
+ __LLVM_MinorVersion=0
fi
- ;;
- xenial) # Ubuntu 16.04
- if [[ "$__CodeName" != "jessie" ]]; then
- __CodeName=xenial
+
+ # for versions > 6.0, lldb has dropped the minor version
+ if [ "$__LLVM_MajorVersion" -gt 6 ]; then
+ __LLVM_MinorVersion=
fi
+
;;
- zesty) # Ubuntu 17.04
- if [[ "$__CodeName" != "jessie" ]]; then
- __CodeName=zesty
- fi
+ xenial) # Ubuntu 16.04
+ __CodeName=xenial
;;
bionic) # Ubuntu 18.04
- if [[ "$__CodeName" != "jessie" ]]; then
- __CodeName=bionic
- fi
+ __CodeName=bionic
;;
focal) # Ubuntu 20.04
- if [[ "$__CodeName" != "jessie" ]]; then
- __CodeName=focal
- fi
+ __CodeName=focal
;;
jammy) # Ubuntu 22.04
- if [[ "$__CodeName" != "jessie" ]]; then
- __CodeName=jammy
- fi
+ __CodeName=jammy
;;
- jessie) # Debian 8
- __CodeName=jessie
-
- if [[ -z "$__UbuntuRepo" ]]; then
- __UbuntuRepo="http://ftp.debian.org/debian/"
- fi
+ noble) # Ubuntu 24.04
+ __CodeName=noble
+ __LLDB_Package="liblldb-19-dev"
;;
stretch) # Debian 9
__CodeName=stretch
__LLDB_Package="liblldb-6.0-dev"
+ __KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg"
if [[ -z "$__UbuntuRepo" ]]; then
__UbuntuRepo="http://ftp.debian.org/debian/"
@@ -296,25 +322,54 @@ while :; do
buster) # Debian 10
__CodeName=buster
__LLDB_Package="liblldb-6.0-dev"
+ __KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg"
if [[ -z "$__UbuntuRepo" ]]; then
- __UbuntuRepo="http://ftp.debian.org/debian/"
+ __UbuntuRepo="http://archive.debian.org/debian/"
fi
;;
bullseye) # Debian 11
__CodeName=bullseye
+ __KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg"
if [[ -z "$__UbuntuRepo" ]]; then
__UbuntuRepo="http://ftp.debian.org/debian/"
fi
;;
- sid) # Debian sid
- __CodeName=sid
+ bookworm) # Debian 12
+ __CodeName=bookworm
+ __KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg"
if [[ -z "$__UbuntuRepo" ]]; then
__UbuntuRepo="http://ftp.debian.org/debian/"
fi
;;
+ sid) # Debian sid
+ __CodeName=sid
+ __UbuntuSuites=
+
+ # Debian-Ports architectures need different values
+ case "$__UbuntuArch" in
+ amd64|arm64|armel|armhf|i386|mips64el|ppc64el|riscv64|s390x)
+ __KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg"
+
+ if [[ -z "$__UbuntuRepo" ]]; then
+ __UbuntuRepo="http://ftp.debian.org/debian/"
+ fi
+ ;;
+ *)
+ __KeyringFile="/usr/share/keyrings/debian-ports-archive-keyring.gpg"
+
+ if [[ -z "$__UbuntuRepo" ]]; then
+ __UbuntuRepo="http://ftp.ports.debian.org/debian-ports/"
+ fi
+ ;;
+ esac
+
+ if [[ -e "$__KeyringFile" ]]; then
+ __Keyring="--keyring $__KeyringFile"
+ fi
+ ;;
tizen)
__CodeName=
__UbuntuRepo=
@@ -323,25 +378,28 @@ while :; do
alpine*)
__CodeName=alpine
__UbuntuRepo=
- version="${lowerI/alpine/}"
- if [[ "$version" == "edge" ]]; then
+ if [[ "$lowerI" == "alpineedge" ]]; then
__AlpineVersion=edge
else
- parts=(${version//./ })
- __AlpineMajorVersion="${parts[0]}"
- __AlpineMinoVersion="${parts[1]}"
- __AlpineVersion="$__AlpineMajorVersion.$__AlpineMinoVersion"
+ version="$(echo "$lowerI" | tr -d '[:alpha:]-=')"
+ __AlpineMajorVersion="${version%%.*}"
+ __AlpineMinorVersion="${version#*.}"
+ __AlpineVersion="$__AlpineMajorVersion.$__AlpineMinorVersion"
fi
;;
- freebsd12)
+ freebsd13)
__CodeName=freebsd
__SkipUnmount=1
;;
- freebsd13)
+ freebsd14)
__CodeName=freebsd
- __FreeBSDBase="13.2-RELEASE"
- __FreeBSDABI="13"
+ __FreeBSDBase="14.3-RELEASE"
+ __FreeBSDABI="14"
+ __SkipUnmount=1
+ ;;
+ openbsd)
+ __CodeName=openbsd
__SkipUnmount=1
;;
illumos)
@@ -358,6 +416,9 @@ while :; do
--skipsigcheck)
__SkipSigCheck=1
;;
+ --skipemulation)
+ __SkipEmulation=1
+ ;;
--rootfsdir|-rootfsdir)
shift
__RootfsDir="$1"
@@ -390,16 +451,15 @@ case "$__AlpineVersion" in
elif [[ "$__AlpineArch" == "x86" ]]; then
__AlpineVersion=3.17 # minimum version that supports lldb-dev
__AlpinePackages+=" llvm15-libs"
- elif [[ "$__AlpineArch" == "riscv64" ]]; then
+ elif [[ "$__AlpineArch" == "riscv64" || "$__AlpineArch" == "loongarch64" ]]; then
+ __AlpineVersion=3.21 # minimum version that supports lldb-dev
+ __AlpinePackages+=" llvm19-libs"
+ elif [[ -n "$__AlpineMajorVersion" ]]; then
+ # use whichever alpine version is provided and select the latest toolchain libs
__AlpineLlvmLibsLookup=1
- __AlpineVersion=edge # minimum version with APKINDEX.tar.gz (packages archive)
else
__AlpineVersion=3.13 # 3.13 to maximize compatibility
__AlpinePackages+=" llvm10-libs"
-
- if [[ "$__AlpineArch" == "armv7" ]]; then
- __AlpinePackages="${__AlpinePackages//numactl-dev/}"
- fi
fi
esac
@@ -409,17 +469,12 @@ if [[ "$__AlpineVersion" =~ 3\.1[345] ]]; then
__AlpinePackages="${__AlpinePackages/compiler-rt/compiler-rt-static}"
fi
-if [[ "$__BuildArch" == "armel" ]]; then
- __LLDB_Package="lldb-3.5-dev"
-fi
+__UbuntuPackages+=" ${__LLDB_Package:-}"
-if [[ "$__CodeName" == "xenial" && "$__UbuntuArch" == "armhf" ]]; then
- # libnuma-dev is not available on armhf for xenial
- __UbuntuPackages="${__UbuntuPackages//libnuma-dev/}"
+if [[ -z "$__UbuntuRepo" ]]; then
+ __UbuntuRepo="http://ports.ubuntu.com/"
fi
-__UbuntuPackages+=" ${__LLDB_Package:-}"
-
if [[ -n "$__LLVM_MajorVersion" ]]; then
__UbuntuPackages+=" libclang-common-${__LLVM_MajorVersion}${__LLVM_MinorVersion:+.$__LLVM_MinorVersion}-dev"
fi
@@ -442,20 +497,41 @@ fi
mkdir -p "$__RootfsDir"
__RootfsDir="$( cd "$__RootfsDir" && pwd )"
+__hasWget=
+ensureDownloadTool()
+{
+ if command -v wget &> /dev/null; then
+ __hasWget=1
+ elif command -v curl &> /dev/null; then
+ __hasWget=0
+ else
+ >&2 echo "ERROR: either wget or curl is required by this script."
+ exit 1
+ fi
+}
+
if [[ "$__CodeName" == "alpine" ]]; then
__ApkToolsVersion=2.12.11
- __ApkToolsSHA512SUM=53e57b49230da07ef44ee0765b9592580308c407a8d4da7125550957bb72cb59638e04f8892a18b584451c8d841d1c7cb0f0ab680cc323a3015776affaa3be33
__ApkToolsDir="$(mktemp -d)"
__ApkKeysDir="$(mktemp -d)"
+ arch="$(uname -m)"
- wget "https://gitlab.alpinelinux.org/api/v4/projects/5/packages/generic//v$__ApkToolsVersion/x86_64/apk.static" -P "$__ApkToolsDir"
- echo "$__ApkToolsSHA512SUM $__ApkToolsDir/apk.static" | sha512sum -c
- chmod +x "$__ApkToolsDir/apk.static"
+ ensureDownloadTool
- if [[ -f "/usr/bin/qemu-$__QEMUArch-static" ]]; then
- mkdir -p "$__RootfsDir"/usr/bin
- cp -v "/usr/bin/qemu-$__QEMUArch-static" "$__RootfsDir/usr/bin"
+ if [[ "$__hasWget" == 1 ]]; then
+ wget -P "$__ApkToolsDir" "https://gitlab.alpinelinux.org/api/v4/projects/5/packages/generic/v$__ApkToolsVersion/$arch/apk.static"
+ else
+ curl -SLO --create-dirs --output-dir "$__ApkToolsDir" "https://gitlab.alpinelinux.org/api/v4/projects/5/packages/generic/v$__ApkToolsVersion/$arch/apk.static"
+ fi
+ if [[ "$arch" == "x86_64" ]]; then
+ __ApkToolsSHA512SUM="53e57b49230da07ef44ee0765b9592580308c407a8d4da7125550957bb72cb59638e04f8892a18b584451c8d841d1c7cb0f0ab680cc323a3015776affaa3be33"
+ elif [[ "$arch" == "aarch64" ]]; then
+ __ApkToolsSHA512SUM="9e2b37ecb2b56c05dad23d379be84fd494c14bd730b620d0d576bda760588e1f2f59a7fcb2f2080577e0085f23a0ca8eadd993b4e61c2ab29549fdb71969afd0"
+ else
+ echo "WARNING: add missing hash for your host architecture. To find the value, use: 'find /tmp -name apk.static -exec sha512sum {} \;'"
fi
+ echo "$__ApkToolsSHA512SUM $__ApkToolsDir/apk.static" | sha512sum -c
+ chmod +x "$__ApkToolsDir/apk.static"
if [[ "$__AlpineVersion" == "edge" ]]; then
version=edge
@@ -476,37 +552,55 @@ if [[ "$__CodeName" == "alpine" ]]; then
__ApkSignatureArg="--keys-dir $__ApkKeysDir"
fi
+ if [[ "$__SkipEmulation" == "1" ]]; then
+ __NoEmulationArg="--no-scripts"
+ fi
+
# initialize DB
+ # shellcheck disable=SC2086
"$__ApkToolsDir/apk.static" \
-X "http://dl-cdn.alpinelinux.org/alpine/$version/main" \
-X "http://dl-cdn.alpinelinux.org/alpine/$version/community" \
-U $__ApkSignatureArg --root "$__RootfsDir" --arch "$__AlpineArch" --initdb add
if [[ "$__AlpineLlvmLibsLookup" == 1 ]]; then
+ # shellcheck disable=SC2086
__AlpinePackages+=" $("$__ApkToolsDir/apk.static" \
-X "http://dl-cdn.alpinelinux.org/alpine/$version/main" \
-X "http://dl-cdn.alpinelinux.org/alpine/$version/community" \
-U $__ApkSignatureArg --root "$__RootfsDir" --arch "$__AlpineArch" \
- search 'llvm*-libs' | sort | tail -1 | sed 's/-[^-]*//2g')"
+ search 'llvm*-libs' | grep -E '^llvm' | sort | tail -1 | sed 's/-[^-]*//2g')"
fi
# install all packages in one go
+ # shellcheck disable=SC2086
"$__ApkToolsDir/apk.static" \
-X "http://dl-cdn.alpinelinux.org/alpine/$version/main" \
-X "http://dl-cdn.alpinelinux.org/alpine/$version/community" \
- -U $__ApkSignatureArg --root "$__RootfsDir" --arch "$__AlpineArch" \
+ -U $__ApkSignatureArg --root "$__RootfsDir" --arch "$__AlpineArch" $__NoEmulationArg \
add $__AlpinePackages
rm -r "$__ApkToolsDir"
elif [[ "$__CodeName" == "freebsd" ]]; then
mkdir -p "$__RootfsDir"/usr/local/etc
JOBS=${MAXJOBS:="$(getconf _NPROCESSORS_ONLN)"}
- wget -O - "https://download.freebsd.org/ftp/releases/${__FreeBSDArch}/${__FreeBSDMachineArch}/${__FreeBSDBase}/base.txz" | tar -C "$__RootfsDir" -Jxf - ./lib ./usr/lib ./usr/libdata ./usr/include ./usr/share/keys ./etc ./bin/freebsd-version
+
+ ensureDownloadTool
+
+ if [[ "$__hasWget" == 1 ]]; then
+ wget -O- "https://download.freebsd.org/ftp/releases/${__FreeBSDArch}/${__FreeBSDMachineArch}/${__FreeBSDBase}/base.txz" | tar -C "$__RootfsDir" -Jxf - ./lib ./usr/lib ./usr/libdata ./usr/include ./usr/share/keys ./etc ./bin/freebsd-version
+ else
+ curl -SL "https://download.freebsd.org/ftp/releases/${__FreeBSDArch}/${__FreeBSDMachineArch}/${__FreeBSDBase}/base.txz" | tar -C "$__RootfsDir" -Jxf - ./lib ./usr/lib ./usr/libdata ./usr/include ./usr/share/keys ./etc ./bin/freebsd-version
+ fi
echo "ABI = \"FreeBSD:${__FreeBSDABI}:${__FreeBSDMachineArch}\"; FINGERPRINTS = \"${__RootfsDir}/usr/share/keys\"; REPOS_DIR = [\"${__RootfsDir}/etc/pkg\"]; REPO_AUTOUPDATE = NO; RUN_SCRIPTS = NO;" > "${__RootfsDir}"/usr/local/etc/pkg.conf
- echo "FreeBSD: { url: \"pkg+http://pkg.FreeBSD.org/\${ABI}/quarterly\", mirror_type: \"srv\", signature_type: \"fingerprints\", fingerprints: \"${__RootfsDir}/usr/share/keys/pkg\", enabled: yes }" > "${__RootfsDir}"/etc/pkg/FreeBSD.conf
+ echo "FreeBSD: { url: \"pkg+http://pkg.FreeBSD.org/\${ABI}/quarterly\", mirror_type: \"srv\", signature_type: \"fingerprints\", fingerprints: \"/usr/share/keys/pkg\", enabled: yes }" > "${__RootfsDir}"/etc/pkg/FreeBSD.conf
mkdir -p "$__RootfsDir"/tmp
# get and build package manager
- wget -O - "https://github.com/freebsd/pkg/archive/${__FreeBSDPkg}.tar.gz" | tar -C "$__RootfsDir"/tmp -zxf -
+ if [[ "$__hasWget" == 1 ]]; then
+ wget -O- "https://github.com/freebsd/pkg/archive/${__FreeBSDPkg}.tar.gz" | tar -C "$__RootfsDir"/tmp -zxf -
+ else
+ curl -SL "https://github.com/freebsd/pkg/archive/${__FreeBSDPkg}.tar.gz" | tar -C "$__RootfsDir"/tmp -zxf -
+ fi
cd "$__RootfsDir/tmp/pkg-${__FreeBSDPkg}"
# needed for install to succeed
mkdir -p "$__RootfsDir"/host/etc
@@ -514,27 +608,99 @@ elif [[ "$__CodeName" == "freebsd" ]]; then
rm -rf "$__RootfsDir/tmp/pkg-${__FreeBSDPkg}"
# install packages we need.
INSTALL_AS_USER=$(whoami) "$__RootfsDir"/host/sbin/pkg -r "$__RootfsDir" -C "$__RootfsDir"/usr/local/etc/pkg.conf update
+ # shellcheck disable=SC2086
INSTALL_AS_USER=$(whoami) "$__RootfsDir"/host/sbin/pkg -r "$__RootfsDir" -C "$__RootfsDir"/usr/local/etc/pkg.conf install --yes $__FreeBSDPackages
+elif [[ "$__CodeName" == "openbsd" ]]; then
+ # determine mirrors
+ OPENBSD_MIRROR="https://cdn.openbsd.org/pub/OpenBSD/$__OpenBSDVersion/$__OpenBSDMachineArch"
+
+ # download base system sets
+ ensureDownloadTool
+
+ BASE_SETS=(base comp)
+ for set in "${BASE_SETS[@]}"; do
+ FILE="${set}${__OpenBSDVersion//./}.tgz"
+ echo "Downloading $FILE..."
+ if [[ "$__hasWget" == 1 ]]; then
+ wget -O- "$OPENBSD_MIRROR/$FILE" | tar -C "$__RootfsDir" -xzpf -
+ else
+ curl -SL "$OPENBSD_MIRROR/$FILE" | tar -C "$__RootfsDir" -xzpf -
+ fi
+ done
+
+ PKG_MIRROR="https://cdn.openbsd.org/pub/OpenBSD/${__OpenBSDVersion}/packages/${__OpenBSDMachineArch}"
+
+ echo "Installing packages into sysroot..."
+
+ # Fetch package index once
+ if [[ "$__hasWget" == 1 ]]; then
+ PKG_INDEX=$(wget -qO- "$PKG_MIRROR/")
+ else
+ PKG_INDEX=$(curl -s "$PKG_MIRROR/")
+ fi
+
+ for pkg in $__OpenBSDPackages; do
+ PKG_FILE=$(echo "$PKG_INDEX" | grep -Po ">\K${pkg}-[0-9][^\" ]*\.tgz" \
+ | sort -V | tail -n1)
+
+ echo "Resolved package filename for $pkg: $PKG_FILE"
+
+ [[ -z "$PKG_FILE" ]] && { echo "ERROR: Package $pkg not found"; exit 1; }
+
+ if [[ "$__hasWget" == 1 ]]; then
+ wget -O- "$PKG_MIRROR/$PKG_FILE" | tar -C "$__RootfsDir" -xzpf -
+ else
+ curl -SL "$PKG_MIRROR/$PKG_FILE" | tar -C "$__RootfsDir" -xzpf -
+ fi
+ done
+
+ echo "Creating versionless symlinks for shared libraries..."
+ # Find all versioned .so files and create the base .so symlink
+ for lib in "$__RootfsDir/usr/lib/libc++.so."* "$__RootfsDir/usr/lib/libc++abi.so."* "$__RootfsDir/usr/lib/libpthread.so."*; do
+ if [ -f "$lib" ]; then
+ # Extract the filename (e.g., libc++.so.12.0)
+ VERSIONED_NAME=$(basename "$lib")
+ # Remove the trailing version numbers (e.g., libc++.so)
+ BASE_NAME=${VERSIONED_NAME%.so.*}.so
+ # Create the symlink in the same directory
+ ln -sf "$VERSIONED_NAME" "$__RootfsDir/usr/lib/$BASE_NAME"
+ fi
+ done
elif [[ "$__CodeName" == "illumos" ]]; then
mkdir "$__RootfsDir/tmp"
pushd "$__RootfsDir/tmp"
JOBS=${MAXJOBS:="$(getconf _NPROCESSORS_ONLN)"}
+
+ ensureDownloadTool
+
echo "Downloading sysroot."
- wget -O - https://github.com/illumos/sysroot/releases/download/20181213-de6af22ae73b-v1/illumos-sysroot-i386-20181213-de6af22ae73b-v1.tar.gz | tar -C "$__RootfsDir" -xzf -
+ if [[ "$__hasWget" == 1 ]]; then
+ wget -O- https://github.com/illumos/sysroot/releases/download/20181213-de6af22ae73b-v1/illumos-sysroot-i386-20181213-de6af22ae73b-v1.tar.gz | tar -C "$__RootfsDir" -xzf -
+ else
+ curl -SL https://github.com/illumos/sysroot/releases/download/20181213-de6af22ae73b-v1/illumos-sysroot-i386-20181213-de6af22ae73b-v1.tar.gz | tar -C "$__RootfsDir" -xzf -
+ fi
echo "Building binutils. Please wait.."
- wget -O - https://ftp.gnu.org/gnu/binutils/binutils-2.33.1.tar.bz2 | tar -xjf -
+ if [[ "$__hasWget" == 1 ]]; then
+ wget -O- https://ftp.gnu.org/gnu/binutils/binutils-2.42.tar.xz | tar -xJf -
+ else
+ curl -SL https://ftp.gnu.org/gnu/binutils/binutils-2.42.tar.xz | tar -xJf -
+ fi
mkdir build-binutils && cd build-binutils
- ../binutils-2.33.1/configure --prefix="$__RootfsDir" --target="${__illumosArch}-sun-solaris2.10" --program-prefix="${__illumosArch}-illumos-" --with-sysroot="$__RootfsDir"
+ ../binutils-2.42/configure --prefix="$__RootfsDir" --target="${__illumosArch}-sun-solaris2.11" --program-prefix="${__illumosArch}-illumos-" --with-sysroot="$__RootfsDir"
make -j "$JOBS" && make install && cd ..
echo "Building gcc. Please wait.."
- wget -O - https://ftp.gnu.org/gnu/gcc/gcc-8.4.0/gcc-8.4.0.tar.xz | tar -xJf -
+ if [[ "$__hasWget" == 1 ]]; then
+ wget -O- https://ftp.gnu.org/gnu/gcc/gcc-13.3.0/gcc-13.3.0.tar.xz | tar -xJf -
+ else
+ curl -SL https://ftp.gnu.org/gnu/gcc/gcc-13.3.0/gcc-13.3.0.tar.xz | tar -xJf -
+ fi
CFLAGS="-fPIC"
CXXFLAGS="-fPIC"
CXXFLAGS_FOR_TARGET="-fPIC"
CFLAGS_FOR_TARGET="-fPIC"
export CFLAGS CXXFLAGS CXXFLAGS_FOR_TARGET CFLAGS_FOR_TARGET
mkdir build-gcc && cd build-gcc
- ../gcc-8.4.0/configure --prefix="$__RootfsDir" --target="${__illumosArch}-sun-solaris2.10" --program-prefix="${__illumosArch}-illumos-" --with-sysroot="$__RootfsDir" --with-gnu-as \
+ ../gcc-13.3.0/configure --prefix="$__RootfsDir" --target="${__illumosArch}-sun-solaris2.11" --program-prefix="${__illumosArch}-illumos-" --with-sysroot="$__RootfsDir" --with-gnu-as \
--with-gnu-ld --disable-nls --disable-libgomp --disable-libquadmath --disable-libssp --disable-libvtv --disable-libcilkrts --disable-libada --disable-libsanitizer \
--disable-libquadmath-support --disable-shared --enable-tls
make -j "$JOBS" && make install && cd ..
@@ -542,9 +708,13 @@ elif [[ "$__CodeName" == "illumos" ]]; then
if [[ "$__UseMirror" == 1 ]]; then
BaseUrl=https://pkgsrc.smartos.skylime.net
fi
- BaseUrl="$BaseUrl/packages/SmartOS/trunk/${__illumosArch}/All"
+ BaseUrl="$BaseUrl/packages/SmartOS/2019Q4/${__illumosArch}/All"
echo "Downloading manifest"
- wget "$BaseUrl"
+ if [[ "$__hasWget" == 1 ]]; then
+ wget "$BaseUrl"
+ else
+ curl -SLO "$BaseUrl"
+ fi
echo "Downloading dependencies."
read -ra array <<<"$__IllumosPackages"
for package in "${array[@]}"; do
@@ -552,7 +722,11 @@ elif [[ "$__CodeName" == "illumos" ]]; then
# find last occurrence of package in listing and extract its name
package="$(sed -En '/.*href="('"$package"'-[0-9].*).tgz".*/h;$!d;g;s//\1/p' All)"
echo "Resolved name '$package'"
- wget "$BaseUrl"/"$package".tgz
+ if [[ "$__hasWget" == 1 ]]; then
+ wget "$BaseUrl"/"$package".tgz
+ else
+ curl -SLO "$BaseUrl"/"$package".tgz
+ fi
ar -x "$package".tgz
tar --skip-old-files -xzf "$package".tmp.tg* -C "$__RootfsDir" 2>/dev/null
done
@@ -561,10 +735,17 @@ elif [[ "$__CodeName" == "illumos" ]]; then
rm -rf "$__RootfsDir"/{tmp,+*}
mkdir -p "$__RootfsDir"/usr/include/net
mkdir -p "$__RootfsDir"/usr/include/netpacket
- wget -P "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/bpf.h
- wget -P "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/dlt.h
- wget -P "$__RootfsDir"/usr/include/netpacket https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/inet/sockmods/netpacket/packet.h
- wget -P "$__RootfsDir"/usr/include/sys https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/sys/sdt.h
+ if [[ "$__hasWget" == 1 ]]; then
+ wget -P "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/bpf.h
+ wget -P "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/dlt.h
+ wget -P "$__RootfsDir"/usr/include/netpacket https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/inet/sockmods/netpacket/packet.h
+ wget -P "$__RootfsDir"/usr/include/sys https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/sys/sdt.h
+ else
+ curl -SLO --create-dirs --output-dir "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/bpf.h
+ curl -SLO --create-dirs --output-dir "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/dlt.h
+ curl -SLO --create-dirs --output-dir "$__RootfsDir"/usr/include/netpacket https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/inet/sockmods/netpacket/packet.h
+ curl -SLO --create-dirs --output-dir "$__RootfsDir"/usr/include/sys https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/sys/sdt.h
+ fi
elif [[ "$__CodeName" == "haiku" ]]; then
JOBS=${MAXJOBS:="$(getconf _NPROCESSORS_ONLN)"}
@@ -574,29 +755,55 @@ elif [[ "$__CodeName" == "haiku" ]]; then
mkdir "$__RootfsDir/tmp/download"
- echo "Downloading Haiku package tool"
- git clone https://github.com/haiku/haiku-toolchains-ubuntu --depth 1 $__RootfsDir/tmp/script
- wget -O "$__RootfsDir/tmp/download/hosttools.zip" $($__RootfsDir/tmp/script/fetch.sh --hosttools)
+ ensureDownloadTool
+
+ echo "Downloading Haiku package tools"
+ git clone https://github.com/haiku/haiku-toolchains-ubuntu --depth 1 "$__RootfsDir/tmp/script"
+ if [[ "$__hasWget" == 1 ]]; then
+ wget -O "$__RootfsDir/tmp/download/hosttools.zip" "$("$__RootfsDir/tmp/script/fetch.sh" --hosttools)"
+ else
+ curl -SLo "$__RootfsDir/tmp/download/hosttools.zip" "$("$__RootfsDir/tmp/script/fetch.sh" --hosttools)"
+ fi
+
unzip -o "$__RootfsDir/tmp/download/hosttools.zip" -d "$__RootfsDir/tmp/bin"
- DepotBaseUrl="https://depot.haiku-os.org/__api/v2/pkg/get-pkg"
- HpkgBaseUrl="https://eu.hpkg.haiku-os.org/haiku/master/$__HaikuArch/current"
+ HaikuBaseUrl="https://eu.hpkg.haiku-os.org/haiku/master/$__HaikuArch/current"
+ HaikuPortsBaseUrl="https://eu.hpkg.haiku-os.org/haikuports/master/$__HaikuArch/current"
+
+ echo "Downloading HaikuPorts package repository index..."
+ if [[ "$__hasWget" == 1 ]]; then
+ wget -P "$__RootfsDir/tmp/download" "$HaikuPortsBaseUrl/repo"
+ else
+ curl -SLO --create-dirs --output-dir "$__RootfsDir/tmp/download" "$HaikuPortsBaseUrl/repo"
+ fi
- # Download Haiku packages
echo "Downloading Haiku packages"
read -ra array <<<"$__HaikuPackages"
for package in "${array[@]}"; do
echo "Downloading $package..."
- # API documented here: https://github.com/haiku/haikudepotserver/blob/master/haikudepotserver-api2/src/main/resources/api2/pkg.yaml#L60
- # The schema here: https://github.com/haiku/haikudepotserver/blob/master/haikudepotserver-api2/src/main/resources/api2/pkg.yaml#L598
- hpkgDownloadUrl="$(wget -qO- --post-data='{"name":"'"$package"'","repositorySourceCode":"haikuports_'$__HaikuArch'","versionType":"LATEST","naturalLanguageCode":"en"}' \
- --header='Content-Type:application/json' "$DepotBaseUrl" | jq -r '.result.versions[].hpkgDownloadURL')"
- wget -P "$__RootfsDir/tmp/download" "$hpkgDownloadUrl"
+ hpkgFilename="$(LD_LIBRARY_PATH="$__RootfsDir/tmp/bin" "$__RootfsDir/tmp/bin/package_repo" list -f "$__RootfsDir/tmp/download/repo" |
+ grep -E "${package}-" | sort -V | tail -n 1 | xargs)"
+ if [ -z "$hpkgFilename" ]; then
+ >&2 echo "ERROR: package $package missing."
+ exit 1
+ fi
+ echo "Resolved filename: $hpkgFilename..."
+ hpkgDownloadUrl="$HaikuPortsBaseUrl/packages/$hpkgFilename"
+ if [[ "$__hasWget" == 1 ]]; then
+ wget -P "$__RootfsDir/tmp/download" "$hpkgDownloadUrl"
+ else
+ curl -SLO --create-dirs --output-dir "$__RootfsDir/tmp/download" "$hpkgDownloadUrl"
+ fi
done
for package in haiku haiku_devel; do
echo "Downloading $package..."
- hpkgVersion="$(wget -qO- $HpkgBaseUrl | sed -n 's/^.*version: "\([^"]*\)".*$/\1/p')"
- wget -P "$__RootfsDir/tmp/download" "$HpkgBaseUrl/packages/$package-$hpkgVersion-1-$__HaikuArch.hpkg"
+ if [[ "$__hasWget" == 1 ]]; then
+ hpkgVersion="$(wget -qO- "$HaikuBaseUrl" | sed -n 's/^.*version: "\([^"]*\)".*$/\1/p')"
+ wget -P "$__RootfsDir/tmp/download" "$HaikuBaseUrl/packages/$package-$hpkgVersion-1-$__HaikuArch.hpkg"
+ else
+ hpkgVersion="$(curl -sSL "$HaikuBaseUrl" | sed -n 's/^.*version: "\([^"]*\)".*$/\1/p')"
+ curl -SLO --create-dirs --output-dir "$__RootfsDir/tmp/download" "$HaikuBaseUrl/packages/$package-$hpkgVersion-1-$__HaikuArch.hpkg"
+ fi
done
# Set up the sysroot
@@ -609,7 +816,11 @@ elif [[ "$__CodeName" == "haiku" ]]; then
# Download buildtools
echo "Downloading Haiku buildtools"
- wget -O "$__RootfsDir/tmp/download/buildtools.zip" $($__RootfsDir/tmp/script/fetch.sh --buildtools --arch=$__HaikuArch)
+ if [[ "$__hasWget" == 1 ]]; then
+ wget -O "$__RootfsDir/tmp/download/buildtools.zip" "$("$__RootfsDir/tmp/script/fetch.sh" --buildtools --arch=$__HaikuArch)"
+ else
+ curl -SLo "$__RootfsDir/tmp/download/buildtools.zip" "$("$__RootfsDir/tmp/script/fetch.sh" --buildtools --arch=$__HaikuArch)"
+ fi
unzip -o "$__RootfsDir/tmp/download/buildtools.zip" -d "$__RootfsDir"
# Cleaning up temporary files
@@ -617,15 +828,69 @@ elif [[ "$__CodeName" == "haiku" ]]; then
popd
rm -rf "$__RootfsDir/tmp"
elif [[ -n "$__CodeName" ]]; then
+ __Suites="$__CodeName $(for suite in $__UbuntuSuites; do echo -n "$__CodeName-$suite "; done)"
+
+ if [[ "$__SkipEmulation" == "1" ]]; then
+ if [[ -z "$AR" ]]; then
+ if command -v ar &>/dev/null; then
+ AR="$(command -v ar)"
+ elif command -v llvm-ar &>/dev/null; then
+ AR="$(command -v llvm-ar)"
+ else
+ echo "Unable to find ar or llvm-ar on PATH, add them to PATH or set AR environment variable pointing to the available AR tool"
+ exit 1
+ fi
+ fi
+
+ PYTHON=${PYTHON_EXECUTABLE:-python3}
+
+ # shellcheck disable=SC2086,SC2046
+ echo running "$PYTHON" "$__CrossDir/install-debs.py" --arch "$__UbuntuArch" --mirror "$__UbuntuRepo" --rootfsdir "$__RootfsDir" --artool "$AR" \
+ $(for suite in $__Suites; do echo -n "--suite $suite "; done) \
+ $__UbuntuPackages
+ # shellcheck disable=SC2086,SC2046
+ "$PYTHON" "$__CrossDir/install-debs.py" --arch "$__UbuntuArch" --mirror "$__UbuntuRepo" --rootfsdir "$__RootfsDir" --artool "$AR" \
+ $(for suite in $__Suites; do echo -n "--suite $suite "; done) \
+ $__UbuntuPackages
+
+ exit 0
+ fi
+
+ __UpdateOptions=
if [[ "$__SkipSigCheck" == "0" ]]; then
__Keyring="$__Keyring --force-check-gpg"
+ else
+ __Keyring=
+ __UpdateOptions="--allow-unauthenticated --allow-insecure-repositories"
fi
- debootstrap "--variant=minbase" $__Keyring --arch "$__UbuntuArch" "$__CodeName" "$__RootfsDir" "$__UbuntuRepo"
- cp "$__CrossDir/$__BuildArch/sources.list.$__CodeName" "$__RootfsDir/etc/apt/sources.list"
- chroot "$__RootfsDir" apt-get update
+ # shellcheck disable=SC2086
+ echo running debootstrap "--variant=minbase" $__Keyring --arch "$__UbuntuArch" "$__CodeName" "$__RootfsDir" "$__UbuntuRepo"
+
+ # shellcheck disable=SC2086
+ if ! debootstrap "--variant=minbase" $__Keyring --arch "$__UbuntuArch" "$__CodeName" "$__RootfsDir" "$__UbuntuRepo"; then
+ echo "debootstrap failed! dumping debootstrap.log"
+ cat "$__RootfsDir/debootstrap/debootstrap.log"
+ exit 1
+ fi
+
+ rm -rf "$__RootfsDir"/etc/apt/*.{sources,list} "$__RootfsDir"/etc/apt/sources.list.d
+ mkdir -p "$__RootfsDir/etc/apt/sources.list.d/"
+
+ # shellcheck disable=SC2086
+ cat > "$__RootfsDir/etc/apt/sources.list.d/$__CodeName.sources" < token2) - (token1 < token2)
+ else:
+ return -1 if isinstance(token1, str) else 1
+
+ return len(tokens1) - len(tokens2)
+
+def compare_debian_versions(version1, version2):
+ """Compare two Debian package versions."""
+ epoch1, upstream1, revision1 = parse_debian_version(version1)
+ epoch2, upstream2, revision2 = parse_debian_version(version2)
+
+ if epoch1 != epoch2:
+ return epoch1 - epoch2
+
+ result = compare_upstream_version(upstream1, upstream2)
+ if result != 0:
+ return result
+
+ return compare_upstream_version(revision1, revision2)
+
+def resolve_dependencies(packages, aliases, desired_packages):
+ """Recursively resolves dependencies for the desired packages."""
+ resolved = []
+ to_process = deque(desired_packages)
+
+ while to_process:
+ current = to_process.popleft()
+ resolved_package = current if current in packages else aliases.get(current, [None])[0]
+
+ if not resolved_package:
+ print(f"Error: Package '{current}' was not found in the available packages.")
+ sys.exit(1)
+
+ if resolved_package not in resolved:
+ resolved.append(resolved_package)
+
+ deps = packages.get(resolved_package, {}).get("Depends", "")
+ if deps:
+ deps = [dep.split(' ')[0] for dep in deps.split(', ') if dep]
+ for dep in deps:
+ if dep not in resolved and dep not in to_process and dep in packages:
+ to_process.append(dep)
+
+ return resolved
+
+def parse_package_index(content):
+ """Parses the Packages.gz file and returns package information."""
+ packages = {}
+ aliases = {}
+ entries = re.split(r'\n\n+', content)
+
+ for entry in entries:
+ fields = dict(re.findall(r'^(\S+): (.+)$', entry, re.MULTILINE))
+ if "Package" in fields:
+ package_name = fields["Package"]
+ version = fields.get("Version")
+ filename = fields.get("Filename")
+ depends = fields.get("Depends")
+ provides = fields.get("Provides", None)
+
+ # Only update if package_name is not in packages or if the new version is higher
+ if package_name not in packages or compare_debian_versions(version, packages[package_name]["Version"]) > 0:
+ packages[package_name] = {
+ "Version": version,
+ "Filename": filename,
+ "Depends": depends
+ }
+
+ # Update aliases if package provides any alternatives
+ if provides:
+ provides_list = [x.strip() for x in provides.split(",")]
+ for alias in provides_list:
+ # Strip version specifiers
+ alias_name = re.sub(r'\s*\(=.*\)', '', alias)
+ if alias_name not in aliases:
+ aliases[alias_name] = []
+ if package_name not in aliases[alias_name]:
+ aliases[alias_name].append(package_name)
+
+ return packages, aliases
+
+def install_packages(mirror, packages_info, aliases, tmp_dir, extract_dir, ar_tool, desired_packages):
+ """Downloads .deb files and extracts them."""
+ resolved_packages = resolve_dependencies(packages_info, aliases, desired_packages)
+ print(f"Resolved packages (including dependencies): {resolved_packages}")
+
+ packages_to_download = {}
+
+ for pkg in resolved_packages:
+ if pkg in packages_info:
+ packages_to_download[pkg] = packages_info[pkg]
+
+ if pkg in aliases:
+ for alias in aliases[pkg]:
+ if alias in packages_info:
+ packages_to_download[alias] = packages_info[alias]
+
+ asyncio.run(download_deb_files_parallel(mirror, packages_to_download, tmp_dir))
+
+ package_to_deb_file_map = {}
+ for pkg in resolved_packages:
+ pkg_info = packages_info.get(pkg)
+ if pkg_info:
+ deb_filename = pkg_info.get("Filename")
+ if deb_filename:
+ deb_file_path = os.path.join(tmp_dir, os.path.basename(deb_filename))
+ package_to_deb_file_map[pkg] = deb_file_path
+
+ for pkg in reversed(resolved_packages):
+ deb_file = package_to_deb_file_map.get(pkg)
+ if deb_file and os.path.exists(deb_file):
+ extract_deb_file(deb_file, tmp_dir, extract_dir, ar_tool)
+
+ print("All done!")
+
+def extract_deb_file(deb_file, tmp_dir, extract_dir, ar_tool):
+ """Extract .deb file contents"""
+
+ os.makedirs(extract_dir, exist_ok=True)
+
+ with tempfile.TemporaryDirectory(dir=tmp_dir) as tmp_subdir:
+ result = subprocess.run(f"{ar_tool} t {os.path.abspath(deb_file)}", cwd=tmp_subdir, check=True, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+
+ tar_filename = None
+ for line in result.stdout.decode().splitlines():
+ if line.startswith("data.tar"):
+ tar_filename = line.strip()
+ break
+
+ if not tar_filename:
+ raise FileNotFoundError(f"Could not find 'data.tar.*' in {deb_file}.")
+
+ tar_file_path = os.path.join(tmp_subdir, tar_filename)
+ print(f"Extracting {tar_filename} from {deb_file}..")
+
+ subprocess.run(f"{ar_tool} p {os.path.abspath(deb_file)} {tar_filename} > {tar_file_path}", check=True, shell=True)
+
+ file_extension = os.path.splitext(tar_file_path)[1].lower()
+
+ if file_extension == ".xz":
+ mode = "r:xz"
+ elif file_extension == ".gz":
+ mode = "r:gz"
+ elif file_extension == ".zst":
+ # zstd is not supported by standard library yet
+ decompressed_tar_path = tar_file_path.replace(".zst", "")
+ with open(tar_file_path, "rb") as zst_file, open(decompressed_tar_path, "wb") as decompressed_file:
+ dctx = zstandard.ZstdDecompressor()
+ dctx.copy_stream(zst_file, decompressed_file)
+
+ tar_file_path = decompressed_tar_path
+ mode = "r"
+ else:
+ raise ValueError(f"Unsupported compression format: {file_extension}")
+
+ with tarfile.open(tar_file_path, mode) as tar:
+ tar.extractall(path=extract_dir, filter='fully_trusted')
+
+def finalize_setup(rootfsdir):
+ lib_dir = os.path.join(rootfsdir, 'lib')
+ usr_lib_dir = os.path.join(rootfsdir, 'usr', 'lib')
+
+ if os.path.exists(lib_dir):
+ if os.path.islink(lib_dir):
+ os.remove(lib_dir)
+ else:
+ os.makedirs(usr_lib_dir, exist_ok=True)
+
+ for item in os.listdir(lib_dir):
+ src = os.path.join(lib_dir, item)
+ dest = os.path.join(usr_lib_dir, item)
+
+ if os.path.isdir(src):
+ shutil.copytree(src, dest, dirs_exist_ok=True)
+ else:
+ shutil.copy2(src, dest)
+
+ shutil.rmtree(lib_dir)
+
+ os.symlink(usr_lib_dir, lib_dir)
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser(description="Generate rootfs for .NET runtime on Debian-like OS")
+ parser.add_argument("--distro", required=False, help="Distro name (e.g., debian, ubuntu, etc.)")
+ parser.add_argument("--arch", required=True, help="Architecture (e.g., amd64, loong64, etc.)")
+ parser.add_argument("--rootfsdir", required=True, help="Destination directory.")
+ parser.add_argument('--suite', required=True, action='append', help='Specify one or more repository suites to collect index data.')
+ parser.add_argument("--mirror", required=False, help="Mirror (e.g., http://ftp.debian.org/debian-ports etc.)")
+ parser.add_argument("--artool", required=False, default="ar", help="ar tool to extract debs (e.g., ar, llvm-ar etc.)")
+ parser.add_argument("packages", nargs="+", help="List of package names to be installed.")
+
+ args = parser.parse_args()
+
+ if args.mirror is None:
+ if args.distro == "ubuntu":
+ args.mirror = "http://archive.ubuntu.com/ubuntu" if args.arch in ["amd64", "i386"] else "http://ports.ubuntu.com/ubuntu-ports"
+ elif args.distro == "debian":
+ args.mirror = "http://ftp.debian.org/debian-ports"
+ else:
+ raise Exception("Unsupported distro")
+
+ DESIRED_PACKAGES = args.packages + [ # base packages
+ "dpkg",
+ "busybox",
+ "libc-bin",
+ "base-files",
+ "base-passwd",
+ "debianutils"
+ ]
+
+ print(f"Creating rootfs. rootfsdir: {args.rootfsdir}, distro: {args.distro}, arch: {args.arch}, suites: {args.suite}, mirror: {args.mirror}")
+
+ package_index_content = asyncio.run(download_package_index_parallel(args.mirror, args.arch, args.suite))
+
+ packages_info, aliases = parse_package_index(package_index_content)
+
+ with tempfile.TemporaryDirectory() as tmp_dir:
+ install_packages(args.mirror, packages_info, aliases, tmp_dir, args.rootfsdir, args.artool, DESIRED_PACKAGES)
+
+ finalize_setup(args.rootfsdir)
diff --git a/eng/common/cross/ppc64le/sources.list.bionic b/eng/common/cross/ppc64le/sources.list.bionic
deleted file mode 100644
index 21095574..00000000
--- a/eng/common/cross/ppc64le/sources.list.bionic
+++ /dev/null
@@ -1,11 +0,0 @@
-deb http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe
-deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe
-
-deb http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe
-deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe
-
-deb http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted
-deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted
-
-deb http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse
-deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse
diff --git a/eng/common/cross/riscv64/sources.list.sid b/eng/common/cross/riscv64/sources.list.sid
deleted file mode 100644
index 65f730d2..00000000
--- a/eng/common/cross/riscv64/sources.list.sid
+++ /dev/null
@@ -1 +0,0 @@
-deb http://deb.debian.org/debian-ports sid main
diff --git a/eng/common/cross/riscv64/tizen/tizen.patch b/eng/common/cross/riscv64/tizen/tizen.patch
new file mode 100644
index 00000000..eb6d1c07
--- /dev/null
+++ b/eng/common/cross/riscv64/tizen/tizen.patch
@@ -0,0 +1,9 @@
+diff -u -r a/usr/lib/libc.so b/usr/lib/libc.so
+--- a/usr/lib64/libc.so 2016-12-30 23:00:08.284951863 +0900
++++ b/usr/lib64/libc.so 2016-12-30 23:00:32.140951815 +0900
+@@ -2,4 +2,4 @@
+ Use the shared library, but some functions are only in
+ the static library, so try that secondarily. */
+ OUTPUT_FORMAT(elf64-littleriscv)
+-GROUP ( /lib64/libc.so.6 /usr/lib64/libc_nonshared.a AS_NEEDED ( /lib64/ld-linux-riscv64-lp64d.so.1 ) )
++GROUP ( libc.so.6 libc_nonshared.a AS_NEEDED ( ld-linux-riscv64-lp64d.so.1 ) )
diff --git a/eng/common/cross/s390x/sources.list.bionic b/eng/common/cross/s390x/sources.list.bionic
deleted file mode 100644
index 21095574..00000000
--- a/eng/common/cross/s390x/sources.list.bionic
+++ /dev/null
@@ -1,11 +0,0 @@
-deb http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe
-deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe
-
-deb http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe
-deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe
-
-deb http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted
-deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted
-
-deb http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse
-deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse
diff --git a/eng/common/cross/tizen-build-rootfs.sh b/eng/common/cross/tizen-build-rootfs.sh
index ac84173d..ba31c932 100644
--- a/eng/common/cross/tizen-build-rootfs.sh
+++ b/eng/common/cross/tizen-build-rootfs.sh
@@ -22,6 +22,10 @@ case "$ARCH" in
TIZEN_ARCH="x86_64"
LINK_ARCH="x86"
;;
+ riscv64)
+ TIZEN_ARCH="riscv64"
+ LINK_ARCH="riscv"
+ ;;
*)
echo "Unsupported architecture for tizen: $ARCH"
exit 1
@@ -58,4 +62,21 @@ rm -rf $TIZEN_TMP_DIR
echo ">>Start configuring Tizen rootfs"
ln -sfn asm-${LINK_ARCH} ./usr/include/asm
patch -p1 < $__TIZEN_CROSSDIR/tizen.patch
+if [[ "$TIZEN_ARCH" == "riscv64" ]]; then
+ echo "Fixing broken symlinks in $PWD"
+ rm ./usr/lib64/libresolv.so
+ ln -s ../../lib64/libresolv.so.2 ./usr/lib64/libresolv.so
+ rm ./usr/lib64/libpthread.so
+ ln -s ../../lib64/libpthread.so.0 ./usr/lib64/libpthread.so
+ rm ./usr/lib64/libdl.so
+ ln -s ../../lib64/libdl.so.2 ./usr/lib64/libdl.so
+ rm ./usr/lib64/libutil.so
+ ln -s ../../lib64/libutil.so.1 ./usr/lib64/libutil.so
+ rm ./usr/lib64/libm.so
+ ln -s ../../lib64/libm.so.6 ./usr/lib64/libm.so
+ rm ./usr/lib64/librt.so
+ ln -s ../../lib64/librt.so.1 ./usr/lib64/librt.so
+ rm ./lib/ld-linux-riscv64-lp64d.so.1
+ ln -s ../lib64/ld-linux-riscv64-lp64d.so.1 ./lib/ld-linux-riscv64-lp64d.so.1
+fi
echo "<:--stdlib=${CLR_CMAKE_CXX_STANDARD_LIBRARY}>)
+ add_link_options($<$:--stdlib=${CLR_CMAKE_CXX_STANDARD_LIBRARY}>)
+endif()
+
+option(CLR_CMAKE_CXX_STANDARD_LIBRARY_STATIC "Statically link against the C++ standard library" OFF)
+if(CLR_CMAKE_CXX_STANDARD_LIBRARY_STATIC)
+ add_link_options($<$:-static-libstdc++>)
+endif()
+
+set(CLR_CMAKE_CXX_ABI_LIBRARY "" CACHE STRING "C++ ABI implementation library to link against. Only supported with the Clang compiler.")
+if (CLR_CMAKE_CXX_ABI_LIBRARY)
+ # The user may specify the ABI library with the 'lib' prefix, like 'libstdc++'. Strip the prefix here so the linker finds the right library.
+ string(REGEX REPLACE "^lib(.+)" "\\1" CLR_CMAKE_CXX_ABI_LIBRARY ${CLR_CMAKE_CXX_ABI_LIBRARY})
+ # We need to specify this as a linker-backend option as Clang will filter this option out when linking to libc++.
+ add_link_options("LINKER:-l${CLR_CMAKE_CXX_ABI_LIBRARY}")
+endif()
+
set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)
set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY)
diff --git a/eng/common/cross/x64/sources.list.bionic b/eng/common/cross/x64/sources.list.bionic
deleted file mode 100644
index a71ccadc..00000000
--- a/eng/common/cross/x64/sources.list.bionic
+++ /dev/null
@@ -1,11 +0,0 @@
-deb http://archive.ubuntu.com/ubuntu/ bionic main restricted universe
-deb-src http://archive.ubuntu.com/ubuntu/ bionic main restricted universe
-
-deb http://archive.ubuntu.com/ubuntu/ bionic-updates main restricted universe
-deb-src http://archive.ubuntu.com/ubuntu/ bionic-updates main restricted universe
-
-deb http://archive.ubuntu.com/ubuntu/ bionic-backports main restricted
-deb-src http://archive.ubuntu.com/ubuntu/ bionic-backports main restricted
-
-deb http://archive.ubuntu.com/ubuntu/ bionic-security main restricted universe multiverse
-deb-src http://archive.ubuntu.com/ubuntu/ bionic-security main restricted universe multiverse
diff --git a/eng/common/cross/x64/sources.list.xenial b/eng/common/cross/x64/sources.list.xenial
deleted file mode 100644
index ad9c5a01..00000000
--- a/eng/common/cross/x64/sources.list.xenial
+++ /dev/null
@@ -1,11 +0,0 @@
-deb http://archive.ubuntu.com/ubuntu/ xenial main restricted universe
-deb-src http://archive.ubuntu.com/ubuntu/ xenial main restricted universe
-
-deb http://archive.ubuntu.com/ubuntu/ xenial-updates main restricted universe
-deb-src http://archive.ubuntu.com/ubuntu/ xenial-updates main restricted universe
-
-deb http://archive.ubuntu.com/ubuntu/ xenial-backports main restricted
-deb-src http://archive.ubuntu.com/ubuntu/ xenial-backports main restricted
-
-deb http://archive.ubuntu.com/ubuntu/ xenial-security main restricted universe multiverse
-deb-src http://archive.ubuntu.com/ubuntu/ xenial-security main restricted universe multiverse
diff --git a/eng/common/cross/x86/sources.list.bionic b/eng/common/cross/x86/sources.list.bionic
deleted file mode 100644
index a71ccadc..00000000
--- a/eng/common/cross/x86/sources.list.bionic
+++ /dev/null
@@ -1,11 +0,0 @@
-deb http://archive.ubuntu.com/ubuntu/ bionic main restricted universe
-deb-src http://archive.ubuntu.com/ubuntu/ bionic main restricted universe
-
-deb http://archive.ubuntu.com/ubuntu/ bionic-updates main restricted universe
-deb-src http://archive.ubuntu.com/ubuntu/ bionic-updates main restricted universe
-
-deb http://archive.ubuntu.com/ubuntu/ bionic-backports main restricted
-deb-src http://archive.ubuntu.com/ubuntu/ bionic-backports main restricted
-
-deb http://archive.ubuntu.com/ubuntu/ bionic-security main restricted universe multiverse
-deb-src http://archive.ubuntu.com/ubuntu/ bionic-security main restricted universe multiverse
diff --git a/eng/common/cross/x86/sources.list.focal b/eng/common/cross/x86/sources.list.focal
deleted file mode 100644
index 99d57313..00000000
--- a/eng/common/cross/x86/sources.list.focal
+++ /dev/null
@@ -1,11 +0,0 @@
-deb http://archive.ubuntu.com/ubuntu/ focal main restricted universe
-deb-src http://archive.ubuntu.com/ubuntu/ focal main restricted universe
-
-deb http://archive.ubuntu.com/ubuntu/ focal-updates main restricted universe
-deb-src http://archive.ubuntu.com/ubuntu/ focal-updates main restricted universe
-
-deb http://archive.ubuntu.com/ubuntu/ focal-backports main restricted
-deb-src http://archive.ubuntu.com/ubuntu/ focal-backports main restricted
-
-deb http://archive.ubuntu.com/ubuntu/ focal-security main restricted universe multiverse
-deb-src http://archive.ubuntu.com/ubuntu/ focal-security main restricted universe multiverse
diff --git a/eng/common/cross/x86/sources.list.jammy b/eng/common/cross/x86/sources.list.jammy
deleted file mode 100644
index af1c1fea..00000000
--- a/eng/common/cross/x86/sources.list.jammy
+++ /dev/null
@@ -1,11 +0,0 @@
-deb http://archive.ubuntu.com/ubuntu/ jammy main restricted universe
-deb-src http://archive.ubuntu.com/ubuntu/ jammy main restricted universe
-
-deb http://archive.ubuntu.com/ubuntu/ jammy-updates main restricted universe
-deb-src http://archive.ubuntu.com/ubuntu/ jammy-updates main restricted universe
-
-deb http://archive.ubuntu.com/ubuntu/ jammy-backports main restricted
-deb-src http://archive.ubuntu.com/ubuntu/ jammy-backports main restricted
-
-deb http://archive.ubuntu.com/ubuntu/ jammy-security main restricted universe multiverse
-deb-src http://archive.ubuntu.com/ubuntu/ jammy-security main restricted universe multiverse
diff --git a/eng/common/cross/x86/sources.list.xenial b/eng/common/cross/x86/sources.list.xenial
deleted file mode 100644
index ad9c5a01..00000000
--- a/eng/common/cross/x86/sources.list.xenial
+++ /dev/null
@@ -1,11 +0,0 @@
-deb http://archive.ubuntu.com/ubuntu/ xenial main restricted universe
-deb-src http://archive.ubuntu.com/ubuntu/ xenial main restricted universe
-
-deb http://archive.ubuntu.com/ubuntu/ xenial-updates main restricted universe
-deb-src http://archive.ubuntu.com/ubuntu/ xenial-updates main restricted universe
-
-deb http://archive.ubuntu.com/ubuntu/ xenial-backports main restricted
-deb-src http://archive.ubuntu.com/ubuntu/ xenial-backports main restricted
-
-deb http://archive.ubuntu.com/ubuntu/ xenial-security main restricted universe multiverse
-deb-src http://archive.ubuntu.com/ubuntu/ xenial-security main restricted universe multiverse
diff --git a/eng/common/darc-init.ps1 b/eng/common/darc-init.ps1
index 8fda30bd..a5be41db 100644
--- a/eng/common/darc-init.ps1
+++ b/eng/common/darc-init.ps1
@@ -1,6 +1,6 @@
param (
$darcVersion = $null,
- $versionEndpoint = 'https://maestro.dot.net/api/assets/darc-version?api-version=2019-01-16',
+ $versionEndpoint = 'https://maestro.dot.net/api/assets/darc-version?api-version=2020-02-20',
$verbosity = 'minimal',
$toolpath = $null
)
@@ -29,11 +29,11 @@ function InstallDarcCli ($darcVersion, $toolpath) {
Write-Host "Installing Darc CLI version $darcVersion..."
Write-Host 'You may need to restart your command window if this is the first dotnet tool you have installed.'
if (-not $toolpath) {
- Write-Host "'$dotnet' tool install $darcCliPackageName --version $darcVersion --add-source '$arcadeServicesSource' -v $verbosity -g"
- & "$dotnet" tool install $darcCliPackageName --version $darcVersion --add-source "$arcadeServicesSource" -v $verbosity -g
+ Write-Host "'$dotnet' tool install $darcCliPackageName --version $darcVersion --source '$arcadeServicesSource' -v $verbosity -g"
+ & "$dotnet" tool install $darcCliPackageName --version $darcVersion --source "$arcadeServicesSource" -v $verbosity -g
}else {
- Write-Host "'$dotnet' tool install $darcCliPackageName --version $darcVersion --add-source '$arcadeServicesSource' -v $verbosity --tool-path '$toolpath'"
- & "$dotnet" tool install $darcCliPackageName --version $darcVersion --add-source "$arcadeServicesSource" -v $verbosity --tool-path "$toolpath"
+ Write-Host "'$dotnet' tool install $darcCliPackageName --version $darcVersion --source '$arcadeServicesSource' -v $verbosity --tool-path '$toolpath'"
+ & "$dotnet" tool install $darcCliPackageName --version $darcVersion --source "$arcadeServicesSource" -v $verbosity --tool-path "$toolpath"
}
}
diff --git a/eng/common/darc-init.sh b/eng/common/darc-init.sh
index c305ae6b..b56d40e5 100644
--- a/eng/common/darc-init.sh
+++ b/eng/common/darc-init.sh
@@ -2,10 +2,10 @@
source="${BASH_SOURCE[0]}"
darcVersion=''
-versionEndpoint='https://maestro.dot.net/api/assets/darc-version?api-version=2019-01-16'
+versionEndpoint='https://maestro.dot.net/api/assets/darc-version?api-version=2020-02-20'
verbosity='minimal'
-while [[ $# > 0 ]]; do
+while [[ $# -gt 0 ]]; do
opt="$(echo "$1" | tr "[:upper:]" "[:lower:]")"
case "$opt" in
--darcversion)
@@ -68,14 +68,14 @@ function InstallDarcCli {
fi
fi
- local arcadeServicesSource="https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json"
+ local arcadeServicesSource="https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json"
echo "Installing Darc CLI version $darcVersion..."
echo "You may need to restart your command shell if this is the first dotnet tool you have installed."
if [ -z "$toolpath" ]; then
- echo $($dotnet_root/dotnet tool install $darc_cli_package_name --version $darcVersion --add-source "$arcadeServicesSource" -v $verbosity -g)
+ echo $($dotnet_root/dotnet tool install $darc_cli_package_name --version $darcVersion --source "$arcadeServicesSource" -v $verbosity -g)
else
- echo $($dotnet_root/dotnet tool install $darc_cli_package_name --version $darcVersion --add-source "$arcadeServicesSource" -v $verbosity --tool-path "$toolpath")
+ echo $($dotnet_root/dotnet tool install $darc_cli_package_name --version $darcVersion --source "$arcadeServicesSource" -v $verbosity --tool-path "$toolpath")
fi
}
diff --git a/eng/common/dotnet-install.sh b/eng/common/dotnet-install.sh
index 7e69e3a9..61f302bb 100644
--- a/eng/common/dotnet-install.sh
+++ b/eng/common/dotnet-install.sh
@@ -18,7 +18,7 @@ architecture=''
runtime='dotnet'
runtimeSourceFeed=''
runtimeSourceFeedKey=''
-while [[ $# > 0 ]]; do
+while [[ $# -gt 0 ]]; do
opt="$(echo "$1" | tr "[:upper:]" "[:lower:]")"
case "$opt" in
-version|-v)
@@ -71,6 +71,9 @@ case $cpuname in
i[3-6]86)
buildarch=x86
;;
+ riscv64)
+ buildarch=riscv64
+ ;;
*)
echo "Unknown CPU $cpuname detected, treating it as x64"
buildarch=x64
@@ -82,7 +85,7 @@ if [[ $architecture != "" ]] && [[ $architecture != $buildarch ]]; then
dotnetRoot="$dotnetRoot/$architecture"
fi
-InstallDotNet $dotnetRoot $version "$architecture" $runtime true $runtimeSourceFeed $runtimeSourceFeedKey || {
+InstallDotNet "$dotnetRoot" $version "$architecture" $runtime true $runtimeSourceFeed $runtimeSourceFeedKey || {
local exit_code=$?
Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "dotnet-install.sh failed (exit code '$exit_code')." >&2
ExitWithExitCode $exit_code
diff --git a/eng/common/dotnet.cmd b/eng/common/dotnet.cmd
new file mode 100644
index 00000000..527fa4bb
--- /dev/null
+++ b/eng/common/dotnet.cmd
@@ -0,0 +1,7 @@
+@echo off
+
+:: This script is used to install the .NET SDK.
+:: It will also invoke the SDK with any provided arguments.
+
+powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0dotnet.ps1""" %*"
+exit /b %ErrorLevel%
diff --git a/eng/common/dotnet.ps1 b/eng/common/dotnet.ps1
new file mode 100644
index 00000000..45e5676c
--- /dev/null
+++ b/eng/common/dotnet.ps1
@@ -0,0 +1,11 @@
+# This script is used to install the .NET SDK.
+# It will also invoke the SDK with any provided arguments.
+
+. $PSScriptRoot\tools.ps1
+$dotnetRoot = InitializeDotNetCli -install:$true
+
+# Invoke acquired SDK with args if they are provided
+if ($args.count -gt 0) {
+ $env:DOTNET_NOLOGO=1
+ & "$dotnetRoot\dotnet.exe" $args
+}
diff --git a/eng/common/dotnet.sh b/eng/common/dotnet.sh
new file mode 100644
index 00000000..f6d24871
--- /dev/null
+++ b/eng/common/dotnet.sh
@@ -0,0 +1,26 @@
+#!/usr/bin/env bash
+
+# This script is used to install the .NET SDK.
+# It will also invoke the SDK with any provided arguments.
+
+source="${BASH_SOURCE[0]}"
+# resolve $SOURCE until the file is no longer a symlink
+while [[ -h $source ]]; do
+ scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+ source="$(readlink "$source")"
+
+ # if $source was a relative symlink, we need to resolve it relative to the path where the
+ # symlink file was located
+ [[ $source != /* ]] && source="$scriptroot/$source"
+done
+scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+
+source $scriptroot/tools.sh
+InitializeDotNetCli true # install
+
+# Invoke acquired SDK with args if they are provided
+if [[ $# -gt 0 ]]; then
+ __dotnetDir=${_InitializeDotNetCli}
+ dotnetPath=${__dotnetDir}/dotnet
+ ${dotnetPath} "$@"
+fi
diff --git a/eng/common/generate-locproject.ps1 b/eng/common/generate-locproject.ps1
index 524aaa57..fa1cdc2b 100644
--- a/eng/common/generate-locproject.ps1
+++ b/eng/common/generate-locproject.ps1
@@ -33,15 +33,27 @@ $jsonTemplateFiles | ForEach-Object {
$jsonWinformsTemplateFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory" | Where-Object { $_.FullName -Match "en\\strings\.json" } # current winforms pattern
+$wxlFilesV3 = @()
+$wxlFilesV5 = @()
$wxlFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory" | Where-Object { $_.FullName -Match "\\.+\.wxl" -And -Not( $_.Directory.Name -Match "\d{4}" ) } # localized files live in four digit lang ID directories; this excludes them
if (-not $wxlFiles) {
$wxlEnFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory" | Where-Object { $_.FullName -Match "\\1033\\.+\.wxl" } # pick up en files (1033 = en) specifically so we can copy them to use as the neutral xlf files
if ($wxlEnFiles) {
- $wxlFiles = @()
- $wxlEnFiles | ForEach-Object {
- $destinationFile = "$($_.Directory.Parent.FullName)\$($_.Name)"
- $wxlFiles += Copy-Item "$($_.FullName)" -Destination $destinationFile -PassThru
- }
+ $wxlFiles = @()
+ $wxlEnFiles | ForEach-Object {
+ $destinationFile = "$($_.Directory.Parent.FullName)\$($_.Name)"
+ $content = Get-Content $_.FullName -Raw
+
+ # Split files on schema to select different parser settings in the generated project.
+ if ($content -like "*http://wixtoolset.org/schemas/v4/wxl*")
+ {
+ $wxlFilesV5 += Copy-Item $_.FullName -Destination $destinationFile -PassThru
+ }
+ elseif ($content -like "*http://schemas.microsoft.com/wix/2006/localization*")
+ {
+ $wxlFilesV3 += Copy-Item $_.FullName -Destination $destinationFile -PassThru
+ }
+ }
}
}
@@ -114,7 +126,32 @@ $locJson = @{
CloneLanguageSet = "WiX_CloneLanguages"
LssFiles = @( "wxl_loc.lss" )
LocItems = @(
- $wxlFiles | ForEach-Object {
+ $wxlFilesV3 | ForEach-Object {
+ $outputPath = "$($_.Directory.FullName | Resolve-Path -Relative)\"
+ $continue = $true
+ foreach ($exclusion in $exclusions.Exclusions) {
+ if ($_.FullName.Contains($exclusion)) {
+ $continue = $false
+ }
+ }
+ $sourceFile = ($_.FullName | Resolve-Path -Relative)
+ if ($continue)
+ {
+ return @{
+ SourceFile = $sourceFile
+ CopyOption = "LangIDOnPath"
+ OutputPath = $outputPath
+ }
+ }
+ }
+ )
+ },
+ @{
+ LanguageSet = $LanguageSet
+ CloneLanguageSet = "WiX_CloneLanguages"
+ LssFiles = @( "P210WxlSchemaV4.lss" )
+ LocItems = @(
+ $wxlFilesV5 | ForEach-Object {
$outputPath = "$($_.Directory.FullName | Resolve-Path -Relative)\"
$continue = $true
foreach ($exclusion in $exclusions.Exclusions) {
diff --git a/eng/common/generate-sbom-prep.ps1 b/eng/common/generate-sbom-prep.ps1
deleted file mode 100644
index 3e5c1c74..00000000
--- a/eng/common/generate-sbom-prep.ps1
+++ /dev/null
@@ -1,21 +0,0 @@
-Param(
- [Parameter(Mandatory=$true)][string] $ManifestDirPath # Manifest directory where sbom will be placed
-)
-
-. $PSScriptRoot\pipeline-logging-functions.ps1
-
-Write-Host "Creating dir $ManifestDirPath"
-# create directory for sbom manifest to be placed
-if (!(Test-Path -path $ManifestDirPath))
-{
- New-Item -ItemType Directory -path $ManifestDirPath
- Write-Host "Successfully created directory $ManifestDirPath"
-}
-else{
- Write-PipelineTelemetryError -category 'Build' "Unable to create sbom folder."
-}
-
-Write-Host "Updating artifact name"
-$artifact_name = "${env:SYSTEM_STAGENAME}_${env:AGENT_JOBNAME}_SBOM" -replace '["/:<>\\|?@*"() ]', '_'
-Write-Host "Artifact name $artifact_name"
-Write-Host "##vso[task.setvariable variable=ARTIFACT_NAME]$artifact_name"
diff --git a/eng/common/generate-sbom-prep.sh b/eng/common/generate-sbom-prep.sh
deleted file mode 100644
index d5c76dc8..00000000
--- a/eng/common/generate-sbom-prep.sh
+++ /dev/null
@@ -1,34 +0,0 @@
-#!/usr/bin/env bash
-
-source="${BASH_SOURCE[0]}"
-
-# resolve $SOURCE until the file is no longer a symlink
-while [[ -h $source ]]; do
- scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
- source="$(readlink "$source")"
-
- # if $source was a relative symlink, we need to resolve it relative to the path where the
- # symlink file was located
- [[ $source != /* ]] && source="$scriptroot/$source"
-done
-scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
-. $scriptroot/pipeline-logging-functions.sh
-
-manifest_dir=$1
-
-if [ ! -d "$manifest_dir" ] ; then
- mkdir -p "$manifest_dir"
- echo "Sbom directory created." $manifest_dir
-else
- Write-PipelineTelemetryError -category 'Build' "Unable to create sbom folder."
-fi
-
-artifact_name=$SYSTEM_STAGENAME"_"$AGENT_JOBNAME"_SBOM"
-echo "Artifact name before : "$artifact_name
-# replace all special characters with _, some builds use special characters like : in Agent.Jobname, that is not a permissible name while uploading artifacts.
-safe_artifact_name="${artifact_name//["/:<>\\|?@*$" ]/_}"
-echo "Artifact name after : "$safe_artifact_name
-export ARTIFACT_NAME=$safe_artifact_name
-echo "##vso[task.setvariable variable=ARTIFACT_NAME]$safe_artifact_name"
-
-exit 0
diff --git a/eng/common/helixpublish.proj b/eng/common/helixpublish.proj
index d7f18585..c1323bf4 100644
--- a/eng/common/helixpublish.proj
+++ b/eng/common/helixpublish.proj
@@ -1,3 +1,4 @@
+
diff --git a/eng/common/internal-feed-operations.ps1 b/eng/common/internal-feed-operations.ps1
index 92b77347..c282d3ae 100644
--- a/eng/common/internal-feed-operations.ps1
+++ b/eng/common/internal-feed-operations.ps1
@@ -26,7 +26,7 @@ function SetupCredProvider {
$url = 'https://raw.githubusercontent.com/microsoft/artifacts-credprovider/master/helpers/installcredprovider.ps1'
Write-Host "Writing the contents of 'installcredprovider.ps1' locally..."
- Invoke-WebRequest $url -OutFile installcredprovider.ps1
+ Invoke-WebRequest $url -UseBasicParsing -OutFile installcredprovider.ps1
Write-Host 'Installing plugin...'
.\installcredprovider.ps1 -Force
diff --git a/eng/common/internal-feed-operations.sh b/eng/common/internal-feed-operations.sh
index 9378223b..6299e7ef 100644
--- a/eng/common/internal-feed-operations.sh
+++ b/eng/common/internal-feed-operations.sh
@@ -100,7 +100,7 @@ operation=''
authToken=''
repoName=''
-while [[ $# > 0 ]]; do
+while [[ $# -gt 0 ]]; do
opt="$(echo "$1" | tr "[:upper:]" "[:lower:]")"
case "$opt" in
--operation)
diff --git a/eng/common/internal/Directory.Build.props b/eng/common/internal/Directory.Build.props
index dbf99d82..f1d041c3 100644
--- a/eng/common/internal/Directory.Build.props
+++ b/eng/common/internal/Directory.Build.props
@@ -1,4 +1,11 @@
+
+
+ false
+ false
+
+
+
diff --git a/eng/common/internal/NuGet.config b/eng/common/internal/NuGet.config
index 19d3d311..f70261ed 100644
--- a/eng/common/internal/NuGet.config
+++ b/eng/common/internal/NuGet.config
@@ -4,4 +4,7 @@
+
+
+
diff --git a/eng/common/internal/Tools.csproj b/eng/common/internal/Tools.csproj
index 7f5ce6d6..feaa6d20 100644
--- a/eng/common/internal/Tools.csproj
+++ b/eng/common/internal/Tools.csproj
@@ -1,9 +1,10 @@
+
net472
- false
false
+ false
@@ -14,17 +15,8 @@
-
-
-
- https://devdiv.pkgs.visualstudio.com/_packaging/dotnet-core-internal-tooling/nuget/v3/index.json;
-
-
- $(RestoreSources);
- https://devdiv.pkgs.visualstudio.com/_packaging/VS/nuget/v3/index.json;
-
-
+
diff --git a/eng/common/native/CommonLibrary.psm1 b/eng/common/native/CommonLibrary.psm1
index ca38268c..f71f6af6 100644
--- a/eng/common/native/CommonLibrary.psm1
+++ b/eng/common/native/CommonLibrary.psm1
@@ -277,7 +277,8 @@ function Get-MachineArchitecture {
if (($ProcessorArchitecture -Eq "AMD64") -Or
($ProcessorArchitecture -Eq "IA64") -Or
($ProcessorArchitecture -Eq "ARM64") -Or
- ($ProcessorArchitecture -Eq "LOONGARCH64")) {
+ ($ProcessorArchitecture -Eq "LOONGARCH64") -Or
+ ($ProcessorArchitecture -Eq "RISCV64")) {
return "x64"
}
return "x86"
diff --git a/eng/common/native/init-compiler.sh b/eng/common/native/init-compiler.sh
index 2d566064..9a0e1f2b 100644
--- a/eng/common/native/init-compiler.sh
+++ b/eng/common/native/init-compiler.sh
@@ -2,7 +2,9 @@
#
# This file detects the C/C++ compiler and exports it to the CC/CXX environment variables
#
-# NOTE: some scripts source this file and rely on stdout being empty, make sure to not output anything here!
+# NOTE: some scripts source this file and rely on stdout being empty, make sure
+# to not output *anything* here, unless it is an error message that fails the
+# build.
if [ -z "$build_arch" ] || [ -z "$compiler" ]; then
echo "Usage..."
@@ -17,11 +19,9 @@ case "$compiler" in
# clangx.y or clang-x.y
version="$(echo "$compiler" | tr -d '[:alpha:]-=')"
majorVersion="${version%%.*}"
- [ -z "${version##*.*}" ] && minorVersion="${version#*.}"
- if [ -z "$minorVersion" ] && [ -n "$majorVersion" ] && [ "$majorVersion" -le 6 ]; then
- minorVersion=0;
- fi
+ # LLVM based on v18 released in early 2024, with two releases per year
+ maxVersion="$((18 + ((($(date +%Y) - 2024) * 12 + $(date +%-m) - 3) / 6)))"
compiler=clang
;;
@@ -29,7 +29,9 @@ case "$compiler" in
# gccx.y or gcc-x.y
version="$(echo "$compiler" | tr -d '[:alpha:]-=')"
majorVersion="${version%%.*}"
- [ -z "${version##*.*}" ] && minorVersion="${version#*.}"
+
+ # GCC based on v14 released in early 2024, with one release per year
+ maxVersion="$((14 + ((($(date +%Y) - 2024) * 12 + $(date +%-m) - 3) / 12)))"
compiler=gcc
;;
esac
@@ -47,91 +49,98 @@ check_version_exists() {
desired_version=-1
# Set up the environment to be used for building with the desired compiler.
- if command -v "$compiler-$1.$2" > /dev/null; then
- desired_version="-$1.$2"
- elif command -v "$compiler$1$2" > /dev/null; then
- desired_version="$1$2"
- elif command -v "$compiler-$1$2" > /dev/null; then
- desired_version="-$1$2"
+ if command -v "$compiler-$1" > /dev/null; then
+ desired_version="-$1"
+ elif command -v "$compiler$1" > /dev/null; then
+ desired_version="$1"
fi
echo "$desired_version"
}
+__baseOS="$(uname)"
+set_compiler_version_from_CC() {
+ if [ "$__baseOS" = "Darwin" ]; then
+ # On Darwin, the versions from -version/-dumpversion refer to Xcode
+ # versions, not llvm versions, so we can't rely on them.
+ return
+ fi
+
+ version="$("$CC" -dumpversion)"
+ if [ -z "$version" ]; then
+ echo "Error: $CC -dumpversion didn't provide a version"
+ exit 1
+ fi
+
+ # gcc and clang often display 3 part versions. However, gcc can show only 1 part in some environments.
+ IFS=. read -r majorVersion _ < /dev/null; then
- if [ "$(uname)" != "Darwin" ]; then
- echo "Warning: Specific version of $compiler not found, falling back to use the one in PATH."
- fi
- CC="$(command -v "$compiler")"
- CXX="$(command -v "$cxxCompiler")"
- else
- echo "No usable version of $compiler found."
+ if ! command -v "$compiler" > /dev/null; then
+ echo "Error: No compatible version of $compiler was found within the range of $minVersion to $maxVersion. Please upgrade your toolchain or specify the compiler explicitly using CLR_CC and CLR_CXX environment variables."
exit 1
fi
- else
- if [ "$compiler" = "clang" ] && [ "$majorVersion" -lt 5 ]; then
- if [ "$build_arch" = "arm" ] || [ "$build_arch" = "armel" ]; then
- if command -v "$compiler" > /dev/null; then
- echo "Warning: Found clang version $majorVersion which is not supported on arm/armel architectures, falling back to use clang from PATH."
- CC="$(command -v "$compiler")"
- CXX="$(command -v "$cxxCompiler")"
- else
- echo "Found clang version $majorVersion which is not supported on arm/armel architectures, and there is no clang in PATH."
- exit 1
- fi
- fi
- fi
+
+ CC="$(command -v "$compiler" 2> /dev/null)"
+ CXX="$(command -v "$cxxCompiler" 2> /dev/null)"
+ set_compiler_version_from_CC
fi
else
- desired_version="$(check_version_exists "$majorVersion" "$minorVersion")"
+ desired_version="$(check_version_exists "$majorVersion")"
if [ "$desired_version" = "-1" ]; then
- echo "Could not find specific version of $compiler: $majorVersion $minorVersion."
+ echo "Error: Could not find specific version of $compiler: $majorVersion."
exit 1
fi
fi
if [ -z "$CC" ]; then
- CC="$(command -v "$compiler$desired_version")"
- CXX="$(command -v "$cxxCompiler$desired_version")"
- if [ -z "$CXX" ]; then CXX="$(command -v "$cxxCompiler")"; fi
+ CC="$(command -v "$compiler$desired_version" 2> /dev/null)"
+ CXX="$(command -v "$cxxCompiler$desired_version" 2> /dev/null)"
+ if [ -z "$CXX" ]; then CXX="$(command -v "$cxxCompiler" 2> /dev/null)"; fi
+ set_compiler_version_from_CC
fi
else
if [ ! -f "$CLR_CC" ]; then
- echo "CLR_CC is set but path '$CLR_CC' does not exist"
+ echo "Error: CLR_CC is set but path '$CLR_CC' does not exist"
exit 1
fi
CC="$CLR_CC"
CXX="$CLR_CXX"
+ set_compiler_version_from_CC
fi
if [ -z "$CC" ]; then
- echo "Unable to find $compiler."
+ echo "Error: Unable to find $compiler."
exit 1
fi
-# Only lld version >= 9 can be considered stable. lld doesn't support s390x.
-if [ "$compiler" = "clang" ] && [ -n "$majorVersion" ] && [ "$majorVersion" -ge 9 ] && [ "$build_arch" != "s390x" ]; then
- if "$CC" -fuse-ld=lld -Wl,--version >/dev/null 2>&1; then
- LDFLAGS="-fuse-ld=lld"
+if [ "$__baseOS" != "Darwin" ]; then
+ # On Darwin, we always want to use the Apple linker.
+
+ # Only lld version >= 9 can be considered stable. lld supports s390x starting from 18.0.
+ if [ "$compiler" = "clang" ] && [ -n "$majorVersion" ] && [ "$majorVersion" -ge 9 ] && { [ "$build_arch" != "s390x" ] || [ "$majorVersion" -ge 18 ]; }; then
+ if "$CC" -fuse-ld=lld -Wl,--version >/dev/null 2>&1; then
+ LDFLAGS="-fuse-ld=lld"
+ fi
fi
fi
-SCAN_BUILD_COMMAND="$(command -v "scan-build$desired_version")"
+SCAN_BUILD_COMMAND="$(command -v "scan-build$desired_version" 2> /dev/null)"
export CC CXX LDFLAGS SCAN_BUILD_COMMAND
diff --git a/eng/common/native/init-distro-rid.sh b/eng/common/native/init-distro-rid.sh
index de1687b2..8fc6d2fe 100644
--- a/eng/common/native/init-distro-rid.sh
+++ b/eng/common/native/init-distro-rid.sh
@@ -1,4 +1,4 @@
-#!/usr/bin/env bash
+#!/bin/sh
# getNonPortableDistroRid
#
@@ -11,21 +11,16 @@
# non-portable rid
getNonPortableDistroRid()
{
- local targetOs="$1"
- local targetArch="$2"
- local rootfsDir="$3"
- local nonPortableRid=""
+ targetOs="$1"
+ targetArch="$2"
+ rootfsDir="$3"
+ nonPortableRid=""
if [ "$targetOs" = "linux" ]; then
+ # shellcheck disable=SC1091
if [ -e "${rootfsDir}/etc/os-release" ]; then
- source "${rootfsDir}/etc/os-release"
-
- if [[ "${ID}" == "rhel" || "${ID}" == "rocky" || "${ID}" == "alpine" ]]; then
- # remove the last version digit
- VERSION_ID="${VERSION_ID%.*}"
- fi
-
- if [[ "${VERSION_ID:-}" =~ ^([[:digit:]]|\.)+$ ]]; then
+ . "${rootfsDir}/etc/os-release"
+ if echo "${VERSION_ID:-}" | grep -qE '^([[:digit:]]|\.)+$'; then
nonPortableRid="${ID}.${VERSION_ID}-${targetArch}"
else
# Rolling release distros either do not set VERSION_ID, set it as blank or
@@ -33,45 +28,35 @@ getNonPortableDistroRid()
# so omit it here to be consistent with everything else.
nonPortableRid="${ID}-${targetArch}"
fi
-
elif [ -e "${rootfsDir}/android_platform" ]; then
- source "$rootfsDir"/android_platform
+ # shellcheck disable=SC1091
+ . "${rootfsDir}/android_platform"
nonPortableRid="$RID"
fi
fi
if [ "$targetOs" = "freebsd" ]; then
- # $rootfsDir can be empty. freebsd-version is shell script and it should always work.
- __freebsd_major_version=$($rootfsDir/bin/freebsd-version | { read v; echo "${v%%.*}"; })
+ # $rootfsDir can be empty. freebsd-version is a shell script and should always work.
+ __freebsd_major_version=$("$rootfsDir"/bin/freebsd-version | cut -d'.' -f1)
nonPortableRid="freebsd.$__freebsd_major_version-${targetArch}"
- elif command -v getprop && getprop ro.product.system.model 2>&1 | grep -qi android; then
+ elif [ "$targetOs" = "openbsd" ]; then
+ nonPortableRid="openbsd.$(uname -r)-${targetArch}"
+ elif command -v getprop >/dev/null && getprop ro.product.system.model | grep -qi android; then
__android_sdk_version=$(getprop ro.build.version.sdk)
nonPortableRid="android.$__android_sdk_version-${targetArch}"
elif [ "$targetOs" = "illumos" ]; then
__uname_version=$(uname -v)
- case "$__uname_version" in
- omnios-*)
- __omnios_major_version=$(echo "${__uname_version:8:2}")
- nonPortableRid=omnios."$__omnios_major_version"-"$targetArch"
- ;;
- joyent_*)
- __smartos_major_version=$(echo "${__uname_version:7:4}")
- nonPortableRid=smartos."$__smartos_major_version"-"$targetArch"
- ;;
- illumos_*)
- nonPortableRid=openindiana-"$targetArch"
- ;;
- esac
+ nonPortableRid="illumos-${targetArch}"
elif [ "$targetOs" = "solaris" ]; then
__uname_version=$(uname -v)
- __solaris_major_version=$(echo "${__uname_version%.*}")
- nonPortableRid=solaris."$__solaris_major_version"-"$targetArch"
+ __solaris_major_version=$(echo "$__uname_version" | cut -d'.' -f1)
+ nonPortableRid="solaris.$__solaris_major_version-${targetArch}"
elif [ "$targetOs" = "haiku" ]; then
- __uname_release=$(uname -r)
+ __uname_release="$(uname -r)"
nonPortableRid=haiku.r"$__uname_release"-"$targetArch"
fi
- echo "$(echo $nonPortableRid | tr '[:upper:]' '[:lower:]')"
+ echo "$nonPortableRid" | tr '[:upper:]' '[:lower:]'
}
# initDistroRidGlobal
@@ -85,26 +70,23 @@ getNonPortableDistroRid()
# None
#
# Notes:
-#
-# It is important to note that the function does not return anything, but it
-# exports the following variables on success:
-#
-# __DistroRid : Non-portable rid of the target platform.
-# __PortableTargetOS : OS-part of the portable rid that corresponds to the target platform.
-#
+# It is important to note that the function does not return anything, but it
+# exports the following variables on success:
+# __DistroRid : Non-portable rid of the target platform.
+# __PortableTargetOS : OS-part of the portable rid that corresponds to the target platform.
initDistroRidGlobal()
{
- local targetOs="$1"
- local targetArch="$2"
- local rootfsDir=""
- if [ "$#" -ge 3 ]; then
+ targetOs="$1"
+ targetArch="$2"
+ rootfsDir=""
+ if [ $# -ge 3 ]; then
rootfsDir="$3"
fi
if [ -n "${rootfsDir}" ]; then
# We may have a cross build. Check for the existence of the rootfsDir
if [ ! -e "${rootfsDir}" ]; then
- echo "Error rootfsDir has been passed, but the location is not valid."
+ echo "Error: rootfsDir has been passed, but the location is not valid."
exit 1
fi
fi
@@ -119,7 +101,7 @@ initDistroRidGlobal()
STRINGS="$(command -v llvm-strings || true)"
fi
- # Check for musl-based distros (e.g Alpine Linux, Void Linux).
+ # Check for musl-based distros (e.g. Alpine Linux, Void Linux).
if "${rootfsDir}/usr/bin/ldd" --version 2>&1 | grep -q musl ||
( [ -n "$STRINGS" ] && "$STRINGS" "${rootfsDir}/usr/bin/ldd" 2>&1 | grep -q musl ); then
__PortableTargetOS="linux-musl"
diff --git a/eng/common/native/init-os-and-arch.sh b/eng/common/native/init-os-and-arch.sh
index e693617a..38921d43 100644
--- a/eng/common/native/init-os-and-arch.sh
+++ b/eng/common/native/init-os-and-arch.sh
@@ -1,4 +1,4 @@
-#!/usr/bin/env bash
+#!/bin/sh
# Use uname to determine what the OS is.
OSName=$(uname -s | tr '[:upper:]' '[:lower:]')
@@ -35,6 +35,10 @@ fi
case "$CPUName" in
arm64|aarch64)
arch=arm64
+ if [ "$(getconf LONG_BIT)" -lt 64 ]; then
+ # This is 32-bit OS running on 64-bit CPU (for example Raspberry Pi OS)
+ arch=arm
+ fi
;;
loongarch64)
@@ -50,6 +54,7 @@ case "$CPUName" in
;;
armv7l|armv8l)
+ # shellcheck disable=SC1091
if (NAME=""; . /etc/os-release; test "$NAME" = "Tizen"); then
arch=armel
else
diff --git a/eng/common/native/install-dependencies.sh b/eng/common/native/install-dependencies.sh
new file mode 100644
index 00000000..4742177a
--- /dev/null
+++ b/eng/common/native/install-dependencies.sh
@@ -0,0 +1,65 @@
+#!/bin/sh
+
+set -e
+
+# This is a simple script primarily used for CI to install necessary dependencies
+#
+# Usage:
+#
+# ./install-dependencies.sh
+
+os="$(echo "$1" | tr "[:upper:]" "[:lower:]")"
+
+if [ -z "$os" ]; then
+ . "$(dirname "$0")"/init-os-and-arch.sh
+fi
+
+case "$os" in
+ linux)
+ if [ -e /etc/os-release ]; then
+ . /etc/os-release
+ fi
+
+ if [ "$ID" = "debian" ] || [ "$ID_LIKE" = "debian" ]; then
+ apt update
+
+ apt install -y build-essential gettext locales cmake llvm clang lld lldb liblldb-dev libunwind8-dev libicu-dev liblttng-ust-dev \
+ libssl-dev libkrb5-dev pigz cpio ninja-build
+
+ localedef -i en_US -c -f UTF-8 -A /usr/share/locale/locale.alias en_US.UTF-8
+ elif [ "$ID" = "fedora" ] || [ "$ID" = "rhel" ] || [ "$ID" = "azurelinux" ] || [ "$ID" = "centos" ]; then
+ pkg_mgr="$(command -v tdnf 2>/dev/null || command -v dnf)"
+ $pkg_mgr install -y cmake llvm lld lldb clang python curl libicu-devel openssl-devel krb5-devel lttng-ust-devel pigz cpio ninja-build
+ elif [ "$ID" = "amzn" ]; then
+ dnf install -y cmake llvm lld lldb clang python libicu-devel openssl-devel krb5-devel lttng-ust-devel pigz cpio ninja-build
+ elif [ "$ID" = "alpine" ]; then
+ apk add build-base cmake bash curl clang llvm llvm-dev lld lldb-dev krb5-dev lttng-ust-dev icu-dev openssl-dev pigz cpio ninja
+ else
+ echo "Unsupported distro. distro: $ID"
+ exit 1
+ fi
+ ;;
+
+ osx|maccatalyst|ios|iossimulator|tvos|tvossimulator)
+ echo "Installed xcode version: $(xcode-select -p)"
+
+ export HOMEBREW_NO_INSTALL_CLEANUP=1
+ export HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK=1
+ # Skip brew update for now, see https://github.com/actions/setup-python/issues/577
+ # brew update --preinstall
+ brew bundle --no-upgrade --file=- < .\verify.ps1 *.nupkg
+ Verifies the metadata of all .nupkg files in the currect working directory.
+.EXAMPLE
+ PS> .\verify.ps1 --help
+ Displays the help text of the downloaded verifiction tool.
+.LINK
+ https://github.com/NuGet/NuGetGallery/blob/master/src/VerifyMicrosoftPackage/README.md
+#>
+
+# This script was copied from https://github.com/NuGet/NuGetGallery/blob/3e25ad135146676bcab0050a516939d9958bfa5d/src/VerifyMicrosoftPackage/verify.ps1
+
+[CmdletBinding(PositionalBinding = $false)]
+param(
+ [string]$NuGetExePath,
+ [string]$PackageSource = "https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/index.json",
+ [string]$DownloadPath,
+ [Parameter(ValueFromRemainingArguments = $true)]
+ [string[]]$args
+)
+
+# The URL to download nuget.exe.
+$nugetExeUrl = "https://dist.nuget.org/win-x86-commandline/v4.9.4/nuget.exe"
+
+# The package ID of the verification tool.
+$packageId = "NuGet.VerifyMicrosoftPackage"
+
+# The location that nuget.exe and the verification tool will be downloaded to.
+if (!$DownloadPath) {
+ $DownloadPath = (Join-Path $env:TEMP "NuGet.VerifyMicrosoftPackage")
+}
+
+$fence = New-Object -TypeName string -ArgumentList '=', 80
+
+# Create the download directory, if it doesn't already exist.
+if (!(Test-Path $DownloadPath)) {
+ New-Item -ItemType Directory $DownloadPath | Out-Null
+}
+Write-Host "Using download path: $DownloadPath"
+
+if ($NuGetExePath) {
+ $nuget = $NuGetExePath
+} else {
+ $downloadedNuGetExe = Join-Path $DownloadPath "nuget.exe"
+
+ # Download nuget.exe, if it doesn't already exist.
+ if (!(Test-Path $downloadedNuGetExe)) {
+ Write-Host "Downloading nuget.exe from $nugetExeUrl..."
+ $ProgressPreference = 'SilentlyContinue'
+ try {
+ Invoke-WebRequest $nugetExeUrl -UseBasicParsing -OutFile $downloadedNuGetExe
+ $ProgressPreference = 'Continue'
+ } catch {
+ $ProgressPreference = 'Continue'
+ Write-Error $_
+ Write-Error "nuget.exe failed to download."
+ exit
+ }
+ }
+
+ $nuget = $downloadedNuGetExe
+}
+
+Write-Host "Using nuget.exe path: $nuget"
+Write-Host " "
+
+# Download the latest version of the verification tool.
+Write-Host "Downloading the latest version of $packageId from $packageSource..."
+Write-Host $fence
+& $nuget install $packageId `
+ -Prerelease `
+ -OutputDirectory $DownloadPath `
+ -Source $PackageSource
+Write-Host $fence
+Write-Host " "
+
+if ($LASTEXITCODE -ne 0) {
+ Write-Error "nuget.exe failed to fetch the verify tool."
+ exit
+}
+
+# Find the most recently downloaded tool
+Write-Host "Finding the most recently downloaded verification tool."
+$verifyProbePath = Join-Path $DownloadPath "$packageId.*"
+$verifyPath = Get-ChildItem -Path $verifyProbePath -Directory `
+ | Sort-Object -Property LastWriteTime -Descending `
+ | Select-Object -First 1
+$verify = Join-Path $verifyPath "tools\NuGet.VerifyMicrosoftPackage.exe"
+Write-Host "Using verification tool: $verify"
+Write-Host " "
+
+# Execute the verification tool.
+Write-Host "Executing the verify tool..."
+Write-Host $fence
+& $verify $args
+Write-Host $fence
+Write-Host " "
+
+# Respond to the exit code.
+if ($LASTEXITCODE -ne 0) {
+ Write-Error "The verify tool found some problems."
+} else {
+ Write-Output "The verify tool succeeded."
+}
diff --git a/eng/common/post-build/post-build-utils.ps1 b/eng/common/post-build/post-build-utils.ps1
deleted file mode 100644
index 534f6988..00000000
--- a/eng/common/post-build/post-build-utils.ps1
+++ /dev/null
@@ -1,91 +0,0 @@
-# Most of the functions in this file require the variables `MaestroApiEndPoint`,
-# `MaestroApiVersion` and `MaestroApiAccessToken` to be globally available.
-
-$ErrorActionPreference = 'Stop'
-Set-StrictMode -Version 2.0
-
-# `tools.ps1` checks $ci to perform some actions. Since the post-build
-# scripts don't necessarily execute in the same agent that run the
-# build.ps1/sh script this variable isn't automatically set.
-$ci = $true
-$disableConfigureToolsetImport = $true
-. $PSScriptRoot\..\tools.ps1
-
-function Create-MaestroApiRequestHeaders([string]$ContentType = 'application/json') {
- Validate-MaestroVars
-
- $headers = New-Object 'System.Collections.Generic.Dictionary[[String],[String]]'
- $headers.Add('Accept', $ContentType)
- $headers.Add('Authorization',"Bearer $MaestroApiAccessToken")
- return $headers
-}
-
-function Get-MaestroChannel([int]$ChannelId) {
- Validate-MaestroVars
-
- $apiHeaders = Create-MaestroApiRequestHeaders
- $apiEndpoint = "$MaestroApiEndPoint/api/channels/${ChannelId}?api-version=$MaestroApiVersion"
-
- $result = try { Invoke-WebRequest -Method Get -Uri $apiEndpoint -Headers $apiHeaders | ConvertFrom-Json } catch { Write-Host "Error: $_" }
- return $result
-}
-
-function Get-MaestroBuild([int]$BuildId) {
- Validate-MaestroVars
-
- $apiHeaders = Create-MaestroApiRequestHeaders -AuthToken $MaestroApiAccessToken
- $apiEndpoint = "$MaestroApiEndPoint/api/builds/${BuildId}?api-version=$MaestroApiVersion"
-
- $result = try { return Invoke-WebRequest -Method Get -Uri $apiEndpoint -Headers $apiHeaders | ConvertFrom-Json } catch { Write-Host "Error: $_" }
- return $result
-}
-
-function Get-MaestroSubscriptions([string]$SourceRepository, [int]$ChannelId) {
- Validate-MaestroVars
-
- $SourceRepository = [System.Web.HttpUtility]::UrlEncode($SourceRepository)
- $apiHeaders = Create-MaestroApiRequestHeaders -AuthToken $MaestroApiAccessToken
- $apiEndpoint = "$MaestroApiEndPoint/api/subscriptions?sourceRepository=$SourceRepository&channelId=$ChannelId&api-version=$MaestroApiVersion"
-
- $result = try { Invoke-WebRequest -Method Get -Uri $apiEndpoint -Headers $apiHeaders | ConvertFrom-Json } catch { Write-Host "Error: $_" }
- return $result
-}
-
-function Assign-BuildToChannel([int]$BuildId, [int]$ChannelId) {
- Validate-MaestroVars
-
- $apiHeaders = Create-MaestroApiRequestHeaders -AuthToken $MaestroApiAccessToken
- $apiEndpoint = "$MaestroApiEndPoint/api/channels/${ChannelId}/builds/${BuildId}?api-version=$MaestroApiVersion"
- Invoke-WebRequest -Method Post -Uri $apiEndpoint -Headers $apiHeaders | Out-Null
-}
-
-function Trigger-Subscription([string]$SubscriptionId) {
- Validate-MaestroVars
-
- $apiHeaders = Create-MaestroApiRequestHeaders -AuthToken $MaestroApiAccessToken
- $apiEndpoint = "$MaestroApiEndPoint/api/subscriptions/$SubscriptionId/trigger?api-version=$MaestroApiVersion"
- Invoke-WebRequest -Uri $apiEndpoint -Headers $apiHeaders -Method Post | Out-Null
-}
-
-function Validate-MaestroVars {
- try {
- Get-Variable MaestroApiEndPoint | Out-Null
- Get-Variable MaestroApiVersion | Out-Null
- Get-Variable MaestroApiAccessToken | Out-Null
-
- if (!($MaestroApiEndPoint -Match '^http[s]?://maestro-(int|prod).westus2.cloudapp.azure.com$')) {
- Write-PipelineTelemetryError -Category 'MaestroVars' -Message "MaestroApiEndPoint is not a valid Maestro URL. '$MaestroApiEndPoint'"
- ExitWithExitCode 1
- }
-
- if (!($MaestroApiVersion -Match '^[0-9]{4}-[0-9]{2}-[0-9]{2}$')) {
- Write-PipelineTelemetryError -Category 'MaestroVars' -Message "MaestroApiVersion does not match a version string in the format yyyy-MM-DD. '$MaestroApiVersion'"
- ExitWithExitCode 1
- }
- }
- catch {
- Write-PipelineTelemetryError -Category 'MaestroVars' -Message 'Error: Variables `MaestroApiEndPoint`, `MaestroApiVersion` and `MaestroApiAccessToken` are required while using this script.'
- Write-Host $_
- ExitWithExitCode 1
- }
-}
diff --git a/eng/common/post-build/publish-using-darc.ps1 b/eng/common/post-build/publish-using-darc.ps1
index 238945cb..48e55598 100644
--- a/eng/common/post-build/publish-using-darc.ps1
+++ b/eng/common/post-build/publish-using-darc.ps1
@@ -5,11 +5,20 @@ param(
[Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro.dot.net',
[Parameter(Mandatory=$true)][string] $WaitPublishingFinish,
[Parameter(Mandatory=$false)][string] $ArtifactsPublishingAdditionalParameters,
- [Parameter(Mandatory=$false)][string] $SymbolPublishingAdditionalParameters
+ [Parameter(Mandatory=$false)][string] $SymbolPublishingAdditionalParameters,
+ [Parameter(Mandatory=$false)][string] $RequireDefaultChannels,
+ [Parameter(Mandatory=$false)][string] $SkipAssetsPublishing,
+ [Parameter(Mandatory=$false)][string] $runtimeSourceFeed,
+ [Parameter(Mandatory=$false)][string] $runtimeSourceFeedKey
)
try {
- . $PSScriptRoot\post-build-utils.ps1
+ # `tools.ps1` checks $ci to perform some actions. Since the post-build
+ # scripts don't necessarily execute in the same agent that run the
+ # build.ps1/sh script this variable isn't automatically set.
+ $ci = $true
+ $disableConfigureToolsetImport = $true
+ . $PSScriptRoot\..\tools.ps1
$darc = Get-Darc
@@ -28,6 +37,14 @@ try {
if ("false" -eq $WaitPublishingFinish) {
$optionalParams.Add("--no-wait") | Out-Null
}
+
+ if ("true" -eq $RequireDefaultChannels) {
+ $optionalParams.Add("--default-channels-required") | Out-Null
+ }
+
+ if ("true" -eq $SkipAssetsPublishing) {
+ $optionalParams.Add("--skip-assets-publishing") | Out-Null
+ }
& $darc add-build-to-channel `
--id $buildId `
@@ -37,6 +54,7 @@ try {
--azdev-pat "$AzdoToken" `
--bar-uri "$MaestroApiEndPoint" `
--ci `
+ --verbose `
@optionalParams
if ($LastExitCode -ne 0) {
diff --git a/eng/common/post-build/redact-logs.ps1 b/eng/common/post-build/redact-logs.ps1
new file mode 100644
index 00000000..672f4e26
--- /dev/null
+++ b/eng/common/post-build/redact-logs.ps1
@@ -0,0 +1,91 @@
+[CmdletBinding(PositionalBinding=$False)]
+param(
+ [Parameter(Mandatory=$true, Position=0)][string] $InputPath,
+ [Parameter(Mandatory=$true)][string] $BinlogToolVersion,
+ [Parameter(Mandatory=$false)][string] $DotnetPath,
+ [Parameter(Mandatory=$false)][string] $PackageFeed = 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/index.json',
+ # File with strings to redact - separated by newlines.
+ # For comments start the line with '# ' - such lines are ignored
+ [Parameter(Mandatory=$false)][string] $TokensFilePath,
+ [Parameter(ValueFromRemainingArguments=$true)][String[]]$TokensToRedact,
+ [Parameter(Mandatory=$false)][string] $runtimeSourceFeed,
+ [Parameter(Mandatory=$false)][string] $runtimeSourceFeedKey
+)
+
+try {
+ $ErrorActionPreference = 'Stop'
+ Set-StrictMode -Version 2.0
+
+ # `tools.ps1` checks $ci to perform some actions. Since the post-build
+ # scripts don't necessarily execute in the same agent that run the
+ # build.ps1/sh script this variable isn't automatically set.
+ $ci = $true
+ $disableConfigureToolsetImport = $true
+ . $PSScriptRoot\..\tools.ps1
+
+ $packageName = 'binlogtool'
+
+ $dotnet = $DotnetPath
+
+ if (!$dotnet) {
+ $dotnetRoot = InitializeDotNetCli -install:$true
+ $dotnet = "$dotnetRoot\dotnet.exe"
+ }
+
+ $toolList = & "$dotnet" tool list -g
+
+ if ($toolList -like "*$packageName*") {
+ & "$dotnet" tool uninstall $packageName -g
+ }
+
+ $toolPath = "$PSScriptRoot\..\..\..\.tools"
+ $verbosity = 'minimal'
+
+ New-Item -ItemType Directory -Force -Path $toolPath
+
+ Push-Location -Path $toolPath
+
+ try {
+ Write-Host "Installing Binlog redactor CLI..."
+ Write-Host "'$dotnet' new tool-manifest"
+ & "$dotnet" new tool-manifest
+ Write-Host "'$dotnet' tool install $packageName --local --source '$PackageFeed' -v $verbosity --version $BinlogToolVersion"
+ & "$dotnet" tool install $packageName --local --source "$PackageFeed" -v $verbosity --version $BinlogToolVersion
+
+ if (Test-Path $TokensFilePath) {
+ Write-Host "Adding additional sensitive data for redaction from file: " $TokensFilePath
+ $TokensToRedact += Get-Content -Path $TokensFilePath | Foreach {$_.Trim()} | Where { $_ -notmatch "^# " }
+ }
+
+ $optionalParams = [System.Collections.ArrayList]::new()
+
+ Foreach ($p in $TokensToRedact)
+ {
+ if($p -match '^\$\(.*\)$')
+ {
+ Write-Host ("Ignoring token {0} as it is probably unexpanded AzDO variable" -f $p)
+ }
+ elseif($p)
+ {
+ $optionalParams.Add("-p:" + $p) | Out-Null
+ }
+ }
+
+ & $dotnet binlogtool redact --input:$InputPath --recurse --in-place `
+ @optionalParams
+
+ if ($LastExitCode -ne 0) {
+ Write-PipelineTelemetryError -Category 'Redactor' -Type 'warning' -Message "Problems using Redactor tool (exit code: $LastExitCode). But ignoring them now."
+ }
+ }
+ finally {
+ Pop-Location
+ }
+
+ Write-Host 'done.'
+}
+catch {
+ Write-Host $_
+ Write-PipelineTelemetryError -Category 'Redactor' -Message "There was an error while trying to redact logs. Error: $_"
+ ExitWithExitCode 1
+}
diff --git a/eng/common/post-build/sourcelink-validation.ps1 b/eng/common/post-build/sourcelink-validation.ps1
deleted file mode 100644
index 4011d324..00000000
--- a/eng/common/post-build/sourcelink-validation.ps1
+++ /dev/null
@@ -1,319 +0,0 @@
-param(
- [Parameter(Mandatory=$true)][string] $InputPath, # Full path to directory where Symbols.NuGet packages to be checked are stored
- [Parameter(Mandatory=$true)][string] $ExtractPath, # Full path to directory where the packages will be extracted during validation
- [Parameter(Mandatory=$false)][string] $GHRepoName, # GitHub name of the repo including the Org. E.g., dotnet/arcade
- [Parameter(Mandatory=$false)][string] $GHCommit, # GitHub commit SHA used to build the packages
- [Parameter(Mandatory=$true)][string] $SourcelinkCliVersion # Version of SourceLink CLI to use
-)
-
-. $PSScriptRoot\post-build-utils.ps1
-
-# Cache/HashMap (File -> Exist flag) used to consult whether a file exist
-# in the repository at a specific commit point. This is populated by inserting
-# all files present in the repo at a specific commit point.
-$global:RepoFiles = @{}
-
-# Maximum number of jobs to run in parallel
-$MaxParallelJobs = 16
-
-$MaxRetries = 5
-$RetryWaitTimeInSeconds = 30
-
-# Wait time between check for system load
-$SecondsBetweenLoadChecks = 10
-
-if (!$InputPath -or !(Test-Path $InputPath)){
- Write-Host "No files to validate."
- ExitWithExitCode 0
-}
-
-$ValidatePackage = {
- param(
- [string] $PackagePath # Full path to a Symbols.NuGet package
- )
-
- . $using:PSScriptRoot\..\tools.ps1
-
- # Ensure input file exist
- if (!(Test-Path $PackagePath)) {
- Write-Host "Input file does not exist: $PackagePath"
- return [pscustomobject]@{
- result = 1
- packagePath = $PackagePath
- }
- }
-
- # Extensions for which we'll look for SourceLink information
- # For now we'll only care about Portable & Embedded PDBs
- $RelevantExtensions = @('.dll', '.exe', '.pdb')
-
- Write-Host -NoNewLine 'Validating ' ([System.IO.Path]::GetFileName($PackagePath)) '...'
-
- $PackageId = [System.IO.Path]::GetFileNameWithoutExtension($PackagePath)
- $ExtractPath = Join-Path -Path $using:ExtractPath -ChildPath $PackageId
- $FailedFiles = 0
-
- Add-Type -AssemblyName System.IO.Compression.FileSystem
-
- [System.IO.Directory]::CreateDirectory($ExtractPath) | Out-Null
-
- try {
- $zip = [System.IO.Compression.ZipFile]::OpenRead($PackagePath)
-
- $zip.Entries |
- Where-Object {$RelevantExtensions -contains [System.IO.Path]::GetExtension($_.Name)} |
- ForEach-Object {
- $FileName = $_.FullName
- $Extension = [System.IO.Path]::GetExtension($_.Name)
- $FakeName = -Join((New-Guid), $Extension)
- $TargetFile = Join-Path -Path $ExtractPath -ChildPath $FakeName
-
- # We ignore resource DLLs
- if ($FileName.EndsWith('.resources.dll')) {
- return [pscustomobject]@{
- result = 0
- packagePath = $PackagePath
- }
- }
-
- [System.IO.Compression.ZipFileExtensions]::ExtractToFile($_, $TargetFile, $true)
-
- $ValidateFile = {
- param(
- [string] $FullPath, # Full path to the module that has to be checked
- [string] $RealPath,
- [ref] $FailedFiles
- )
-
- $sourcelinkExe = "$env:USERPROFILE\.dotnet\tools"
- $sourcelinkExe = Resolve-Path "$sourcelinkExe\sourcelink.exe"
- $SourceLinkInfos = & $sourcelinkExe print-urls $FullPath | Out-String
-
- if ($LASTEXITCODE -eq 0 -and -not ([string]::IsNullOrEmpty($SourceLinkInfos))) {
- $NumFailedLinks = 0
-
- # We only care about Http addresses
- $Matches = (Select-String '(http[s]?)(:\/\/)([^\s,]+)' -Input $SourceLinkInfos -AllMatches).Matches
-
- if ($Matches.Count -ne 0) {
- $Matches.Value |
- ForEach-Object {
- $Link = $_
- $CommitUrl = "https://raw.githubusercontent.com/${using:GHRepoName}/${using:GHCommit}/"
-
- $FilePath = $Link.Replace($CommitUrl, "")
- $Status = 200
- $Cache = $using:RepoFiles
-
- $attempts = 0
-
- while ($attempts -lt $using:MaxRetries) {
- if ( !($Cache.ContainsKey($FilePath)) ) {
- try {
- $Uri = $Link -as [System.URI]
-
- if ($Link -match "submodules") {
- # Skip submodule links until sourcelink properly handles submodules
- $Status = 200
- }
- elseif ($Uri.AbsoluteURI -ne $null -and ($Uri.Host -match 'github' -or $Uri.Host -match 'githubusercontent')) {
- # Only GitHub links are valid
- $Status = (Invoke-WebRequest -Uri $Link -UseBasicParsing -Method HEAD -TimeoutSec 5).StatusCode
- }
- else {
- # If it's not a github link, we want to break out of the loop and not retry.
- $Status = 0
- $attempts = $using:MaxRetries
- }
- }
- catch {
- Write-Host $_
- $Status = 0
- }
- }
-
- if ($Status -ne 200) {
- $attempts++
-
- if ($attempts -lt $using:MaxRetries)
- {
- $attemptsLeft = $using:MaxRetries - $attempts
- Write-Warning "Download failed, $attemptsLeft attempts remaining, will retry in $using:RetryWaitTimeInSeconds seconds"
- Start-Sleep -Seconds $using:RetryWaitTimeInSeconds
- }
- else {
- if ($NumFailedLinks -eq 0) {
- if ($FailedFiles.Value -eq 0) {
- Write-Host
- }
-
- Write-Host "`tFile $RealPath has broken links:"
- }
-
- Write-Host "`t`tFailed to retrieve $Link"
-
- $NumFailedLinks++
- }
- }
- else {
- break
- }
- }
- }
- }
-
- if ($NumFailedLinks -ne 0) {
- $FailedFiles.value++
- $global:LASTEXITCODE = 1
- }
- }
- }
-
- &$ValidateFile $TargetFile $FileName ([ref]$FailedFiles)
- }
- }
- catch {
- Write-Host $_
- }
- finally {
- $zip.Dispose()
- }
-
- if ($FailedFiles -eq 0) {
- Write-Host 'Passed.'
- return [pscustomobject]@{
- result = 0
- packagePath = $PackagePath
- }
- }
- else {
- Write-PipelineTelemetryError -Category 'SourceLink' -Message "$PackagePath has broken SourceLink links."
- return [pscustomobject]@{
- result = 1
- packagePath = $PackagePath
- }
- }
-}
-
-function CheckJobResult(
- $result,
- $packagePath,
- [ref]$ValidationFailures,
- [switch]$logErrors) {
- if ($result -ne '0') {
- if ($logErrors) {
- Write-PipelineTelemetryError -Category 'SourceLink' -Message "$packagePath has broken SourceLink links."
- }
- $ValidationFailures.Value++
- }
-}
-
-function ValidateSourceLinkLinks {
- if ($GHRepoName -ne '' -and !($GHRepoName -Match '^[^\s\/]+/[^\s\/]+$')) {
- if (!($GHRepoName -Match '^[^\s-]+-[^\s]+$')) {
- Write-PipelineTelemetryError -Category 'SourceLink' -Message "GHRepoName should be in the format / or -. '$GHRepoName'"
- ExitWithExitCode 1
- }
- else {
- $GHRepoName = $GHRepoName -replace '^([^\s-]+)-([^\s]+)$', '$1/$2';
- }
- }
-
- if ($GHCommit -ne '' -and !($GHCommit -Match '^[0-9a-fA-F]{40}$')) {
- Write-PipelineTelemetryError -Category 'SourceLink' -Message "GHCommit should be a 40 chars hexadecimal string. '$GHCommit'"
- ExitWithExitCode 1
- }
-
- if ($GHRepoName -ne '' -and $GHCommit -ne '') {
- $RepoTreeURL = -Join('http://api.github.com/repos/', $GHRepoName, '/git/trees/', $GHCommit, '?recursive=1')
- $CodeExtensions = @('.cs', '.vb', '.fs', '.fsi', '.fsx', '.fsscript')
-
- try {
- # Retrieve the list of files in the repo at that particular commit point and store them in the RepoFiles hash
- $Data = Invoke-WebRequest $RepoTreeURL -UseBasicParsing | ConvertFrom-Json | Select-Object -ExpandProperty tree
-
- foreach ($file in $Data) {
- $Extension = [System.IO.Path]::GetExtension($file.path)
-
- if ($CodeExtensions.Contains($Extension)) {
- $RepoFiles[$file.path] = 1
- }
- }
- }
- catch {
- Write-Host "Problems downloading the list of files from the repo. Url used: $RepoTreeURL . Execution will proceed without caching."
- }
- }
- elseif ($GHRepoName -ne '' -or $GHCommit -ne '') {
- Write-Host 'For using the http caching mechanism both GHRepoName and GHCommit should be informed.'
- }
-
- if (Test-Path $ExtractPath) {
- Remove-Item $ExtractPath -Force -Recurse -ErrorAction SilentlyContinue
- }
-
- $ValidationFailures = 0
-
- # Process each NuGet package in parallel
- Get-ChildItem "$InputPath\*.symbols.nupkg" |
- ForEach-Object {
- Write-Host "Starting $($_.FullName)"
- Start-Job -ScriptBlock $ValidatePackage -ArgumentList $_.FullName | Out-Null
- $NumJobs = @(Get-Job -State 'Running').Count
-
- while ($NumJobs -ge $MaxParallelJobs) {
- Write-Host "There are $NumJobs validation jobs running right now. Waiting $SecondsBetweenLoadChecks seconds to check again."
- sleep $SecondsBetweenLoadChecks
- $NumJobs = @(Get-Job -State 'Running').Count
- }
-
- foreach ($Job in @(Get-Job -State 'Completed')) {
- $jobResult = Wait-Job -Id $Job.Id | Receive-Job
- CheckJobResult $jobResult.result $jobResult.packagePath ([ref]$ValidationFailures) -LogErrors
- Remove-Job -Id $Job.Id
- }
- }
-
- foreach ($Job in @(Get-Job)) {
- $jobResult = Wait-Job -Id $Job.Id | Receive-Job
- CheckJobResult $jobResult.result $jobResult.packagePath ([ref]$ValidationFailures)
- Remove-Job -Id $Job.Id
- }
- if ($ValidationFailures -gt 0) {
- Write-PipelineTelemetryError -Category 'SourceLink' -Message "$ValidationFailures package(s) failed validation."
- ExitWithExitCode 1
- }
-}
-
-function InstallSourcelinkCli {
- $sourcelinkCliPackageName = 'sourcelink'
-
- $dotnetRoot = InitializeDotNetCli -install:$true
- $dotnet = "$dotnetRoot\dotnet.exe"
- $toolList = & "$dotnet" tool list --global
-
- if (($toolList -like "*$sourcelinkCliPackageName*") -and ($toolList -like "*$sourcelinkCliVersion*")) {
- Write-Host "SourceLink CLI version $sourcelinkCliVersion is already installed."
- }
- else {
- Write-Host "Installing SourceLink CLI version $sourcelinkCliVersion..."
- Write-Host 'You may need to restart your command window if this is the first dotnet tool you have installed.'
- & "$dotnet" tool install $sourcelinkCliPackageName --version $sourcelinkCliVersion --verbosity "minimal" --global
- }
-}
-
-try {
- InstallSourcelinkCli
-
- foreach ($Job in @(Get-Job)) {
- Remove-Job -Id $Job.Id
- }
-
- ValidateSourceLinkLinks
-}
-catch {
- Write-Host $_.Exception
- Write-Host $_.ScriptStackTrace
- Write-PipelineTelemetryError -Category 'SourceLink' -Message $_
- ExitWithExitCode 1
-}
diff --git a/eng/common/post-build/symbols-validation.ps1 b/eng/common/post-build/symbols-validation.ps1
index cd2181ba..7146e593 100644
--- a/eng/common/post-build/symbols-validation.ps1
+++ b/eng/common/post-build/symbols-validation.ps1
@@ -322,8 +322,6 @@ function InstallDotnetSymbol {
}
try {
- . $PSScriptRoot\post-build-utils.ps1
-
InstallDotnetSymbol
foreach ($Job in @(Get-Job)) {
diff --git a/eng/common/post-build/trigger-subscriptions.ps1 b/eng/common/post-build/trigger-subscriptions.ps1
deleted file mode 100644
index ac9a9577..00000000
--- a/eng/common/post-build/trigger-subscriptions.ps1
+++ /dev/null
@@ -1,64 +0,0 @@
-param(
- [Parameter(Mandatory=$true)][string] $SourceRepo,
- [Parameter(Mandatory=$true)][int] $ChannelId,
- [Parameter(Mandatory=$true)][string] $MaestroApiAccessToken,
- [Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro.dot.net',
- [Parameter(Mandatory=$false)][string] $MaestroApiVersion = '2019-01-16'
-)
-
-try {
- . $PSScriptRoot\post-build-utils.ps1
-
- # Get all the $SourceRepo subscriptions
- $normalizedSourceRepo = $SourceRepo.Replace('dnceng@', '')
- $subscriptions = Get-MaestroSubscriptions -SourceRepository $normalizedSourceRepo -ChannelId $ChannelId
-
- if (!$subscriptions) {
- Write-PipelineTelemetryError -Category 'TriggerSubscriptions' -Message "No subscriptions found for source repo '$normalizedSourceRepo' in channel '$ChannelId'"
- ExitWithExitCode 0
- }
-
- $subscriptionsToTrigger = New-Object System.Collections.Generic.List[string]
- $failedTriggeredSubscription = $false
-
- # Get all enabled subscriptions that need dependency flow on 'everyBuild'
- foreach ($subscription in $subscriptions) {
- if ($subscription.enabled -and $subscription.policy.updateFrequency -like 'everyBuild' -and $subscription.channel.id -eq $ChannelId) {
- Write-Host "Should trigger this subscription: ${$subscription.id}"
- [void]$subscriptionsToTrigger.Add($subscription.id)
- }
- }
-
- foreach ($subscriptionToTrigger in $subscriptionsToTrigger) {
- try {
- Write-Host "Triggering subscription '$subscriptionToTrigger'."
-
- Trigger-Subscription -SubscriptionId $subscriptionToTrigger
-
- Write-Host 'done.'
- }
- catch
- {
- Write-Host "There was an error while triggering subscription '$subscriptionToTrigger'"
- Write-Host $_
- Write-Host $_.ScriptStackTrace
- $failedTriggeredSubscription = $true
- }
- }
-
- if ($subscriptionsToTrigger.Count -eq 0) {
- Write-Host "No subscription matched source repo '$normalizedSourceRepo' and channel ID '$ChannelId'."
- }
- elseif ($failedTriggeredSubscription) {
- Write-PipelineTelemetryError -Category 'TriggerSubscriptions' -Message 'At least one subscription failed to be triggered...'
- ExitWithExitCode 1
- }
- else {
- Write-Host 'All subscriptions were triggered successfully!'
- }
-}
-catch {
- Write-Host $_.ScriptStackTrace
- Write-PipelineTelemetryError -Category 'TriggerSubscriptions' -Message $_
- ExitWithExitCode 1
-}
diff --git a/eng/common/renovate.env b/eng/common/renovate.env
new file mode 100644
index 00000000..17ecc05d
--- /dev/null
+++ b/eng/common/renovate.env
@@ -0,0 +1,42 @@
+# Renovate Global Configuration
+# https://docs.renovatebot.com/self-hosted-configuration/
+#
+# NOTE: This file uses bash/shell format and is sourced via `. renovate.env`.
+# Values containing spaces or special characters must be quoted.
+
+# Author to use for git commits made by Renovate
+# https://docs.renovatebot.com/configuration-options/#gitauthor
+export RENOVATE_GIT_AUTHOR='.NET Renovate '
+
+# Disable rate limiting for PR creation (0 = unlimited)
+# https://docs.renovatebot.com/presets-default/#prhourlylimitnone
+# https://docs.renovatebot.com/presets-default/#prconcurrentlimitnone
+export RENOVATE_PR_HOURLY_LIMIT=0
+export RENOVATE_PR_CONCURRENT_LIMIT=0
+
+# Skip the onboarding PR that Renovate normally creates for new repos
+# https://docs.renovatebot.com/config-overview/#onboarding
+export RENOVATE_ONBOARDING=false
+
+# Any Renovate config file in the cloned repository is ignored. Only
+# the Renovate config file from the repo where the pipeline is running
+# is used (yes, those are the same repo but the sources may be different).
+# https://docs.renovatebot.com/self-hosted-configuration/#requireconfig
+export RENOVATE_REQUIRE_CONFIG=ignored
+
+# Customize the PR body content. This removes some of the default
+# sections that aren't relevant in a self-hosted config.
+# https://docs.renovatebot.com/configuration-options/#prheader
+# https://docs.renovatebot.com/configuration-options/#prbodynotes
+# https://docs.renovatebot.com/configuration-options/#prbodytemplate
+export RENOVATE_PR_HEADER='## Automated Dependency Update'
+export RENOVATE_PR_BODY_NOTES='["This PR has been created automatically by the [.NET Renovate Bot](https://github.com/dotnet/arcade/blob/main/Documentation/Renovate.md) to update one or more dependencies in your repo. Please review the changes and merge the PR if everything looks good."]'
+export RENOVATE_PR_BODY_TEMPLATE='{{{header}}}{{{table}}}{{{warnings}}}{{{notes}}}{{{changelogs}}}'
+
+# Extend the global config with additional presets
+# https://docs.renovatebot.com/self-hosted-configuration/#globalextends
+# Disable the Dependency Dashboard issue that tracks all updates
+export RENOVATE_GLOBAL_EXTENDS='[":disableDependencyDashboard"]'
+
+# Allow all commands for post-upgrade commands.
+export RENOVATE_ALLOWED_COMMANDS='[".*"]'
diff --git a/eng/common/sdk-task.ps1 b/eng/common/sdk-task.ps1
index 73828dd3..b64b66a6 100644
--- a/eng/common/sdk-task.ps1
+++ b/eng/common/sdk-task.ps1
@@ -6,13 +6,17 @@ Param(
[string] $msbuildEngine = $null,
[switch] $restore,
[switch] $prepareMachine,
+ [switch][Alias('nobl')]$excludeCIBinaryLog,
+ [switch]$noWarnAsError,
[switch] $help,
+ [string] $runtimeSourceFeed = '',
+ [string] $runtimeSourceFeedKey = '',
[Parameter(ValueFromRemainingArguments=$true)][String[]]$properties
)
$ci = $true
-$binaryLog = $true
-$warnAsError = $true
+$binaryLog = if ($excludeCIBinaryLog) { $false } else { $true }
+$warnAsError = if ($noWarnAsError) { $false } else { $true }
. $PSScriptRoot\tools.ps1
@@ -27,6 +31,7 @@ function Print-Usage() {
Write-Host "Advanced settings:"
Write-Host " -prepareMachine Prepare machine for CI run"
Write-Host " -msbuildEngine Msbuild engine to use to run build ('dotnet', 'vs', or unspecified)."
+ Write-Host " -excludeCIBinaryLog When running on CI, allow no binary log (short: -nobl)"
Write-Host ""
Write-Host "Command line arguments not listed above are passed thru to msbuild."
}
@@ -34,10 +39,11 @@ function Print-Usage() {
function Build([string]$target) {
$logSuffix = if ($target -eq 'Execute') { '' } else { ".$target" }
$log = Join-Path $LogDir "$task$logSuffix.binlog"
+ $binaryLogArg = if ($binaryLog) { "/bl:$log" } else { "" }
$outputPath = Join-Path $ToolsetDir "$task\"
MSBuild $taskProject `
- /bl:$log `
+ $binaryLogArg `
/t:$target `
/p:Configuration=$configuration `
/p:RepoRoot=$RepoRoot `
@@ -64,7 +70,7 @@ try {
$GlobalJson.tools | Add-Member -Name "vs" -Value (ConvertFrom-Json "{ `"version`": `"16.5`" }") -MemberType NoteProperty
}
if( -not ($GlobalJson.tools.PSObject.Properties.Name -match "xcopy-msbuild" )) {
- $GlobalJson.tools | Add-Member -Name "xcopy-msbuild" -Value "17.8.1-2" -MemberType NoteProperty
+ $GlobalJson.tools | Add-Member -Name "xcopy-msbuild" -Value "18.0.0" -MemberType NoteProperty
}
if ($GlobalJson.tools."xcopy-msbuild".Trim() -ine "none") {
$xcopyMSBuildToolsFolder = InitializeXCopyMSBuild $GlobalJson.tools."xcopy-msbuild" -install $true
diff --git a/eng/common/sdk-task.sh b/eng/common/sdk-task.sh
new file mode 100644
index 00000000..3270f83f
--- /dev/null
+++ b/eng/common/sdk-task.sh
@@ -0,0 +1,121 @@
+#!/usr/bin/env bash
+
+show_usage() {
+ echo "Common settings:"
+ echo " --task Name of Arcade task (name of a project in SdkTasks directory of the Arcade SDK package)"
+ echo " --restore Restore dependencies"
+ echo " --verbosity Msbuild verbosity: q[uiet], m[inimal], n[ormal], d[etailed], and diag[nostic]"
+ echo " --help Print help and exit"
+ echo ""
+
+ echo "Advanced settings:"
+ echo " --excludeCIBinarylog Don't output binary log (short: -nobl)"
+ echo " --noWarnAsError Do not warn as error"
+ echo ""
+ echo "Command line arguments not listed above are passed thru to msbuild."
+}
+
+source="${BASH_SOURCE[0]}"
+
+# resolve $source until the file is no longer a symlink
+while [[ -h "$source" ]]; do
+ scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+ source="$(readlink "$source")"
+ # if $source was a relative symlink, we need to resolve it relative to the path where the
+ # symlink file was located
+ [[ $source != /* ]] && source="$scriptroot/$source"
+done
+scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+
+Build() {
+ local target=$1
+ local log_suffix=""
+ [[ "$target" != "Execute" ]] && log_suffix=".$target"
+ local log="$log_dir/$task$log_suffix.binlog"
+ local binaryLogArg=""
+ [[ $binary_log == true ]] && binaryLogArg="/bl:$log"
+ local output_path="$toolset_dir/$task/"
+
+ MSBuild "$taskProject" \
+ $binaryLogArg \
+ /t:"$target" \
+ /p:Configuration="$configuration" \
+ /p:RepoRoot="$repo_root" \
+ /p:BaseIntermediateOutputPath="$output_path" \
+ /v:"$verbosity" \
+ $properties
+}
+
+binary_log=true
+configuration="Debug"
+verbosity="minimal"
+exclude_ci_binary_log=false
+restore=false
+help=false
+properties=''
+warnAsError=true
+
+while (($# > 0)); do
+ lowerI="$(echo $1 | tr "[:upper:]" "[:lower:]")"
+ case $lowerI in
+ --task)
+ task=$2
+ shift 2
+ ;;
+ --restore)
+ restore=true
+ shift 1
+ ;;
+ --verbosity)
+ verbosity=$2
+ shift 2
+ ;;
+ --excludecibinarylog|--nobl)
+ binary_log=false
+ exclude_ci_binary_log=true
+ shift 1
+ ;;
+ --noWarnAsError)
+ warnAsError=false
+ shift 1
+ ;;
+ --help)
+ help=true
+ shift 1
+ ;;
+ *)
+ properties="$properties $1"
+ shift 1
+ ;;
+ esac
+done
+
+ci=true
+
+if $help; then
+ show_usage
+ exit 0
+fi
+
+. "$scriptroot/tools.sh"
+InitializeToolset
+
+if [[ -z "$task" ]]; then
+ Write-PipelineTelemetryError -Category 'Task' -Name 'MissingTask' -Message "Missing required parameter '-task '"
+ ExitWithExitCode 1
+fi
+
+taskProject=$(GetSdkTaskProject "$task")
+if [[ ! -e "$taskProject" ]]; then
+ Write-PipelineTelemetryError -Category 'Task' -Name 'UnknownTask' -Message "Unknown task: $task"
+ ExitWithExitCode 1
+fi
+
+if $restore; then
+ Build "Restore"
+fi
+
+Build "Execute"
+
+
+ExitWithExitCode 0
diff --git a/eng/common/sdl/NuGet.config b/eng/common/sdl/NuGet.config
deleted file mode 100644
index 5bfbb02e..00000000
--- a/eng/common/sdl/NuGet.config
+++ /dev/null
@@ -1,18 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/eng/common/sdl/configure-sdl-tool.ps1 b/eng/common/sdl/configure-sdl-tool.ps1
deleted file mode 100644
index 27f5a411..00000000
--- a/eng/common/sdl/configure-sdl-tool.ps1
+++ /dev/null
@@ -1,130 +0,0 @@
-Param(
- [string] $GuardianCliLocation,
- [string] $WorkingDirectory,
- [string] $TargetDirectory,
- [string] $GdnFolder,
- # The list of Guardian tools to configure. For each object in the array:
- # - If the item is a [hashtable], it must contain these entries:
- # - Name = The tool name as Guardian knows it.
- # - Scenario = (Optional) Scenario-specific name for this configuration entry. It must be unique
- # among all tool entries with the same Name.
- # - Args = (Optional) Array of Guardian tool configuration args, like '@("Target > C:\temp")'
- # - If the item is a [string] $v, it is treated as '@{ Name="$v" }'
- [object[]] $ToolsList,
- [string] $GuardianLoggerLevel='Standard',
- # Optional: Additional params to add to any tool using CredScan.
- [string[]] $CrScanAdditionalRunConfigParams,
- # Optional: Additional params to add to any tool using PoliCheck.
- [string[]] $PoliCheckAdditionalRunConfigParams,
- # Optional: Additional params to add to any tool using CodeQL/Semmle.
- [string[]] $CodeQLAdditionalRunConfigParams,
- # Optional: Additional params to add to any tool using Binskim.
- [string[]] $BinskimAdditionalRunConfigParams
-)
-
-$ErrorActionPreference = 'Stop'
-Set-StrictMode -Version 2.0
-$disableConfigureToolsetImport = $true
-$global:LASTEXITCODE = 0
-
-try {
- # `tools.ps1` checks $ci to perform some actions. Since the SDL
- # scripts don't necessarily execute in the same agent that run the
- # build.ps1/sh script this variable isn't automatically set.
- $ci = $true
- . $PSScriptRoot\..\tools.ps1
-
- # Normalize tools list: all in [hashtable] form with defined values for each key.
- $ToolsList = $ToolsList |
- ForEach-Object {
- if ($_ -is [string]) {
- $_ = @{ Name = $_ }
- }
-
- if (-not ($_['Scenario'])) { $_.Scenario = "" }
- if (-not ($_['Args'])) { $_.Args = @() }
- $_
- }
-
- Write-Host "List of tools to configure:"
- $ToolsList | ForEach-Object { $_ | Out-String | Write-Host }
-
- # We store config files in the r directory of .gdn
- $gdnConfigPath = Join-Path $GdnFolder 'r'
- $ValidPath = Test-Path $GuardianCliLocation
-
- if ($ValidPath -eq $False)
- {
- Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Invalid Guardian CLI Location."
- ExitWithExitCode 1
- }
-
- foreach ($tool in $ToolsList) {
- # Put together the name and scenario to make a unique key.
- $toolConfigName = $tool.Name
- if ($tool.Scenario) {
- $toolConfigName += "_" + $tool.Scenario
- }
-
- Write-Host "=== Configuring $toolConfigName..."
-
- $gdnConfigFile = Join-Path $gdnConfigPath "$toolConfigName-configure.gdnconfig"
-
- # For some tools, add default and automatic args.
- switch -Exact ($tool.Name) {
- 'credscan' {
- if ($targetDirectory) {
- $tool.Args += "`"TargetDirectory < $TargetDirectory`""
- }
- $tool.Args += "`"OutputType < pre`""
- $tool.Args += $CrScanAdditionalRunConfigParams
- }
- 'policheck' {
- if ($targetDirectory) {
- $tool.Args += "`"Target < $TargetDirectory`""
- }
- $tool.Args += $PoliCheckAdditionalRunConfigParams
- }
- {$_ -in 'semmle', 'codeql'} {
- if ($targetDirectory) {
- $tool.Args += "`"SourceCodeDirectory < $TargetDirectory`""
- }
- $tool.Args += $CodeQLAdditionalRunConfigParams
- }
- 'binskim' {
- if ($targetDirectory) {
- # Binskim crashes due to specific PDBs. GitHub issue: https://github.com/microsoft/binskim/issues/924.
- # We are excluding all `_.pdb` files from the scan.
- $tool.Args += "`"Target < $TargetDirectory\**;-:file|$TargetDirectory\**\_.pdb`""
- }
- $tool.Args += $BinskimAdditionalRunConfigParams
- }
- }
-
- # Create variable pointing to the args array directly so we can use splat syntax later.
- $toolArgs = $tool.Args
-
- # Configure the tool. If args array is provided or the current tool has some default arguments
- # defined, add "--args" and splat each element on the end. Arg format is "{Arg id} < {Value}",
- # one per parameter. Doc page for "guardian configure":
- # https://dev.azure.com/securitytools/SecurityIntegration/_wiki/wikis/Guardian/1395/configure
- Exec-BlockVerbosely {
- & $GuardianCliLocation configure `
- --working-directory $WorkingDirectory `
- --tool $tool.Name `
- --output-path $gdnConfigFile `
- --logger-level $GuardianLoggerLevel `
- --noninteractive `
- --force `
- $(if ($toolArgs) { "--args" }) @toolArgs
- Exit-IfNZEC "Sdl"
- }
-
- Write-Host "Created '$toolConfigName' configuration file: $gdnConfigFile"
- }
-}
-catch {
- Write-Host $_.ScriptStackTrace
- Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
- ExitWithExitCode 1
-}
diff --git a/eng/common/sdl/execute-all-sdl-tools.ps1 b/eng/common/sdl/execute-all-sdl-tools.ps1
deleted file mode 100644
index 81ded5b7..00000000
--- a/eng/common/sdl/execute-all-sdl-tools.ps1
+++ /dev/null
@@ -1,165 +0,0 @@
-Param(
- [string] $GuardianPackageName, # Required: the name of guardian CLI package (not needed if GuardianCliLocation is specified)
- [string] $NugetPackageDirectory, # Required: directory where NuGet packages are installed (not needed if GuardianCliLocation is specified)
- [string] $GuardianCliLocation, # Optional: Direct location of Guardian CLI executable if GuardianPackageName & NugetPackageDirectory are not specified
- [string] $Repository=$env:BUILD_REPOSITORY_NAME, # Required: the name of the repository (e.g. dotnet/arcade)
- [string] $BranchName=$env:BUILD_SOURCEBRANCH, # Optional: name of branch or version of gdn settings; defaults to master
- [string] $SourceDirectory=$env:BUILD_SOURCESDIRECTORY, # Required: the directory where source files are located
- [string] $ArtifactsDirectory = (Join-Path $env:BUILD_ARTIFACTSTAGINGDIRECTORY ('artifacts')), # Required: the directory where build artifacts are located
-
- # Optional: list of SDL tools to run on source code. See 'configure-sdl-tool.ps1' for tools list
- # format.
- [object[]] $SourceToolsList,
- # Optional: list of SDL tools to run on built artifacts. See 'configure-sdl-tool.ps1' for tools
- # list format.
- [object[]] $ArtifactToolsList,
- # Optional: list of SDL tools to run without automatically specifying a target directory. See
- # 'configure-sdl-tool.ps1' for tools list format.
- [object[]] $CustomToolsList,
-
- [bool] $TsaPublish=$False, # Optional: true will publish results to TSA; only set to true after onboarding to TSA; TSA is the automated framework used to upload test results as bugs.
- [string] $TsaBranchName=$env:BUILD_SOURCEBRANCH, # Optional: required for TSA publish; defaults to $(Build.SourceBranchName); TSA is the automated framework used to upload test results as bugs.
- [string] $TsaRepositoryName=$env:BUILD_REPOSITORY_NAME, # Optional: TSA repository name; will be generated automatically if not submitted; TSA is the automated framework used to upload test results as bugs.
- [string] $BuildNumber=$env:BUILD_BUILDNUMBER, # Optional: required for TSA publish; defaults to $(Build.BuildNumber)
- [bool] $UpdateBaseline=$False, # Optional: if true, will update the baseline in the repository; should only be run after fixing any issues which need to be fixed
- [bool] $TsaOnboard=$False, # Optional: if true, will onboard the repository to TSA; should only be run once; TSA is the automated framework used to upload test results as bugs.
- [string] $TsaInstanceUrl, # Optional: only needed if TsaOnboard or TsaPublish is true; the instance-url registered with TSA; TSA is the automated framework used to upload test results as bugs.
- [string] $TsaCodebaseName, # Optional: only needed if TsaOnboard or TsaPublish is true; the name of the codebase registered with TSA; TSA is the automated framework used to upload test results as bugs.
- [string] $TsaProjectName, # Optional: only needed if TsaOnboard or TsaPublish is true; the name of the project registered with TSA; TSA is the automated framework used to upload test results as bugs.
- [string] $TsaNotificationEmail, # Optional: only needed if TsaOnboard is true; the email(s) which will receive notifications of TSA bug filings (e.g. alias@microsoft.com); TSA is the automated framework used to upload test results as bugs.
- [string] $TsaCodebaseAdmin, # Optional: only needed if TsaOnboard is true; the aliases which are admins of the TSA codebase (e.g. DOMAIN\alias); TSA is the automated framework used to upload test results as bugs.
- [string] $TsaBugAreaPath, # Optional: only needed if TsaOnboard is true; the area path where TSA will file bugs in AzDO; TSA is the automated framework used to upload test results as bugs.
- [string] $TsaIterationPath, # Optional: only needed if TsaOnboard is true; the iteration path where TSA will file bugs in AzDO; TSA is the automated framework used to upload test results as bugs.
- [string] $GuardianLoggerLevel='Standard', # Optional: the logger level for the Guardian CLI; options are Trace, Verbose, Standard, Warning, and Error
- [string[]] $CrScanAdditionalRunConfigParams, # Optional: Additional Params to custom build a CredScan run config in the format @("xyz:abc","sdf:1")
- [string[]] $PoliCheckAdditionalRunConfigParams, # Optional: Additional Params to custom build a Policheck run config in the format @("xyz:abc","sdf:1")
- [string[]] $CodeQLAdditionalRunConfigParams, # Optional: Additional Params to custom build a Semmle/CodeQL run config in the format @("xyz < abc","sdf < 1")
- [string[]] $BinskimAdditionalRunConfigParams, # Optional: Additional Params to custom build a Binskim run config in the format @("xyz < abc","sdf < 1")
- [bool] $BreakOnFailure=$False # Optional: Fail the build if there were errors during the run
-)
-
-try {
- $ErrorActionPreference = 'Stop'
- Set-StrictMode -Version 2.0
- $disableConfigureToolsetImport = $true
- $global:LASTEXITCODE = 0
-
- # `tools.ps1` checks $ci to perform some actions. Since the SDL
- # scripts don't necessarily execute in the same agent that run the
- # build.ps1/sh script this variable isn't automatically set.
- $ci = $true
- . $PSScriptRoot\..\tools.ps1
-
- #Replace repo names to the format of org/repo
- if (!($Repository.contains('/'))) {
- $RepoName = $Repository -replace '(.*?)-(.*)', '$1/$2';
- }
- else{
- $RepoName = $Repository;
- }
-
- if ($GuardianPackageName) {
- $guardianCliLocation = Join-Path $NugetPackageDirectory (Join-Path $GuardianPackageName (Join-Path 'tools' 'guardian.cmd'))
- } else {
- $guardianCliLocation = $GuardianCliLocation
- }
-
- $workingDirectory = (Split-Path $SourceDirectory -Parent)
- $ValidPath = Test-Path $guardianCliLocation
-
- if ($ValidPath -eq $False)
- {
- Write-PipelineTelemetryError -Force -Category 'Sdl' -Message 'Invalid Guardian CLI Location.'
- ExitWithExitCode 1
- }
-
- Exec-BlockVerbosely {
- & $(Join-Path $PSScriptRoot 'init-sdl.ps1') -GuardianCliLocation $guardianCliLocation -Repository $RepoName -BranchName $BranchName -WorkingDirectory $workingDirectory -GuardianLoggerLevel $GuardianLoggerLevel
- }
- $gdnFolder = Join-Path $workingDirectory '.gdn'
-
- if ($TsaOnboard) {
- if ($TsaCodebaseName -and $TsaNotificationEmail -and $TsaCodebaseAdmin -and $TsaBugAreaPath) {
- Exec-BlockVerbosely {
- & $guardianCliLocation tsa-onboard --codebase-name "$TsaCodebaseName" --notification-alias "$TsaNotificationEmail" --codebase-admin "$TsaCodebaseAdmin" --instance-url "$TsaInstanceUrl" --project-name "$TsaProjectName" --area-path "$TsaBugAreaPath" --iteration-path "$TsaIterationPath" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel
- }
- if ($LASTEXITCODE -ne 0) {
- Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Guardian tsa-onboard failed with exit code $LASTEXITCODE."
- ExitWithExitCode $LASTEXITCODE
- }
- } else {
- Write-PipelineTelemetryError -Force -Category 'Sdl' -Message 'Could not onboard to TSA -- not all required values ($TsaCodebaseName, $TsaNotificationEmail, $TsaCodebaseAdmin, $TsaBugAreaPath) were specified.'
- ExitWithExitCode 1
- }
- }
-
- # Configure a list of tools with a default target directory. Populates the ".gdn/r" directory.
- function Configure-ToolsList([object[]] $tools, [string] $targetDirectory) {
- if ($tools -and $tools.Count -gt 0) {
- Exec-BlockVerbosely {
- & $(Join-Path $PSScriptRoot 'configure-sdl-tool.ps1') `
- -GuardianCliLocation $guardianCliLocation `
- -WorkingDirectory $workingDirectory `
- -TargetDirectory $targetDirectory `
- -GdnFolder $gdnFolder `
- -ToolsList $tools `
- -GuardianLoggerLevel $GuardianLoggerLevel `
- -CrScanAdditionalRunConfigParams $CrScanAdditionalRunConfigParams `
- -PoliCheckAdditionalRunConfigParams $PoliCheckAdditionalRunConfigParams `
- -CodeQLAdditionalRunConfigParams $CodeQLAdditionalRunConfigParams `
- -BinskimAdditionalRunConfigParams $BinskimAdditionalRunConfigParams
- if ($BreakOnFailure) {
- Exit-IfNZEC "Sdl"
- }
- }
- }
- }
-
- # Configure Artifact and Source tools with default Target directories.
- Configure-ToolsList $ArtifactToolsList $ArtifactsDirectory
- Configure-ToolsList $SourceToolsList $SourceDirectory
- # Configure custom tools with no default Target directory.
- Configure-ToolsList $CustomToolsList $null
-
- # At this point, all tools are configured in the ".gdn" directory. Run them all in a single call.
- # (If we used "run" multiple times, each run would overwrite data from earlier runs.)
- Exec-BlockVerbosely {
- & $(Join-Path $PSScriptRoot 'run-sdl.ps1') `
- -GuardianCliLocation $guardianCliLocation `
- -WorkingDirectory $SourceDirectory `
- -UpdateBaseline $UpdateBaseline `
- -GdnFolder $gdnFolder
- }
-
- if ($TsaPublish) {
- if ($TsaBranchName -and $BuildNumber) {
- if (-not $TsaRepositoryName) {
- $TsaRepositoryName = "$($Repository)-$($BranchName)"
- }
- Exec-BlockVerbosely {
- & $guardianCliLocation tsa-publish --all-tools --repository-name "$TsaRepositoryName" --branch-name "$TsaBranchName" --build-number "$BuildNumber" --onboard $True --codebase-name "$TsaCodebaseName" --notification-alias "$TsaNotificationEmail" --codebase-admin "$TsaCodebaseAdmin" --instance-url "$TsaInstanceUrl" --project-name "$TsaProjectName" --area-path "$TsaBugAreaPath" --iteration-path "$TsaIterationPath" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel
- }
- if ($LASTEXITCODE -ne 0) {
- Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Guardian tsa-publish failed with exit code $LASTEXITCODE."
- ExitWithExitCode $LASTEXITCODE
- }
- } else {
- Write-PipelineTelemetryError -Force -Category 'Sdl' -Message 'Could not publish to TSA -- not all required values ($TsaBranchName, $BuildNumber) were specified.'
- ExitWithExitCode 1
- }
- }
-
- if ($BreakOnFailure) {
- Write-Host "Failing the build in case of breaking results..."
- Exec-BlockVerbosely {
- & $guardianCliLocation break --working-directory $workingDirectory --logger-level $GuardianLoggerLevel
- }
- } else {
- Write-Host "Letting the build pass even if there were breaking results..."
- }
-}
-catch {
- Write-Host $_.ScriptStackTrace
- Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
- exit 1
-}
diff --git a/eng/common/sdl/extract-artifact-archives.ps1 b/eng/common/sdl/extract-artifact-archives.ps1
deleted file mode 100644
index 68da4fbf..00000000
--- a/eng/common/sdl/extract-artifact-archives.ps1
+++ /dev/null
@@ -1,63 +0,0 @@
-# This script looks for each archive file in a directory and extracts it into the target directory.
-# For example, the file "$InputPath/bin.tar.gz" extracts to "$ExtractPath/bin.tar.gz.extracted/**".
-# Uses the "tar" utility added to Windows 10 / Windows 2019 that supports tar.gz and zip.
-param(
- # Full path to directory where archives are stored.
- [Parameter(Mandatory=$true)][string] $InputPath,
- # Full path to directory to extract archives into. May be the same as $InputPath.
- [Parameter(Mandatory=$true)][string] $ExtractPath
-)
-
-$ErrorActionPreference = 'Stop'
-Set-StrictMode -Version 2.0
-
-$disableConfigureToolsetImport = $true
-
-try {
- # `tools.ps1` checks $ci to perform some actions. Since the SDL
- # scripts don't necessarily execute in the same agent that run the
- # build.ps1/sh script this variable isn't automatically set.
- $ci = $true
- . $PSScriptRoot\..\tools.ps1
-
- Measure-Command {
- $jobs = @()
-
- # Find archive files for non-Windows and Windows builds.
- $archiveFiles = @(
- Get-ChildItem (Join-Path $InputPath "*.tar.gz")
- Get-ChildItem (Join-Path $InputPath "*.zip")
- )
-
- foreach ($targzFile in $archiveFiles) {
- $jobs += Start-Job -ScriptBlock {
- $file = $using:targzFile
- $fileName = [System.IO.Path]::GetFileName($file)
- $extractDir = Join-Path $using:ExtractPath "$fileName.extracted"
-
- New-Item $extractDir -ItemType Directory -Force | Out-Null
-
- Write-Host "Extracting '$file' to '$extractDir'..."
-
- # Pipe errors to stdout to prevent PowerShell detecting them and quitting the job early.
- # This type of quit skips the catch, so we wouldn't be able to tell which file triggered the
- # error. Save output so it can be stored in the exception string along with context.
- $output = tar -xf $file -C $extractDir 2>&1
- # Handle NZEC manually rather than using Exit-IfNZEC: we are in a background job, so we
- # don't have access to the outer scope.
- if ($LASTEXITCODE -ne 0) {
- throw "Error extracting '$file': non-zero exit code ($LASTEXITCODE). Output: '$output'"
- }
-
- Write-Host "Extracted to $extractDir"
- }
- }
-
- Receive-Job $jobs -Wait
- }
-}
-catch {
- Write-Host $_
- Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
- ExitWithExitCode 1
-}
diff --git a/eng/common/sdl/extract-artifact-packages.ps1 b/eng/common/sdl/extract-artifact-packages.ps1
deleted file mode 100644
index f031ed5b..00000000
--- a/eng/common/sdl/extract-artifact-packages.ps1
+++ /dev/null
@@ -1,82 +0,0 @@
-param(
- [Parameter(Mandatory=$true)][string] $InputPath, # Full path to directory where artifact packages are stored
- [Parameter(Mandatory=$true)][string] $ExtractPath # Full path to directory where the packages will be extracted
-)
-
-$ErrorActionPreference = 'Stop'
-Set-StrictMode -Version 2.0
-
-$disableConfigureToolsetImport = $true
-
-function ExtractArtifacts {
- if (!(Test-Path $InputPath)) {
- Write-Host "Input Path does not exist: $InputPath"
- ExitWithExitCode 0
- }
- $Jobs = @()
- Get-ChildItem "$InputPath\*.nupkg" |
- ForEach-Object {
- $Jobs += Start-Job -ScriptBlock $ExtractPackage -ArgumentList $_.FullName
- }
-
- foreach ($Job in $Jobs) {
- Wait-Job -Id $Job.Id | Receive-Job
- }
-}
-
-try {
- # `tools.ps1` checks $ci to perform some actions. Since the SDL
- # scripts don't necessarily execute in the same agent that run the
- # build.ps1/sh script this variable isn't automatically set.
- $ci = $true
- . $PSScriptRoot\..\tools.ps1
-
- $ExtractPackage = {
- param(
- [string] $PackagePath # Full path to a NuGet package
- )
-
- if (!(Test-Path $PackagePath)) {
- Write-PipelineTelemetryError -Category 'Build' -Message "Input file does not exist: $PackagePath"
- ExitWithExitCode 1
- }
-
- $RelevantExtensions = @('.dll', '.exe', '.pdb')
- Write-Host -NoNewLine 'Extracting ' ([System.IO.Path]::GetFileName($PackagePath)) '...'
-
- $PackageId = [System.IO.Path]::GetFileNameWithoutExtension($PackagePath)
- $ExtractPath = Join-Path -Path $using:ExtractPath -ChildPath $PackageId
-
- Add-Type -AssemblyName System.IO.Compression.FileSystem
-
- [System.IO.Directory]::CreateDirectory($ExtractPath);
-
- try {
- $zip = [System.IO.Compression.ZipFile]::OpenRead($PackagePath)
-
- $zip.Entries |
- Where-Object {$RelevantExtensions -contains [System.IO.Path]::GetExtension($_.Name)} |
- ForEach-Object {
- $TargetPath = Join-Path -Path $ExtractPath -ChildPath (Split-Path -Path $_.FullName)
- [System.IO.Directory]::CreateDirectory($TargetPath);
-
- $TargetFile = Join-Path -Path $ExtractPath -ChildPath $_.FullName
- [System.IO.Compression.ZipFileExtensions]::ExtractToFile($_, $TargetFile)
- }
- }
- catch {
- Write-Host $_
- Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
- ExitWithExitCode 1
- }
- finally {
- $zip.Dispose()
- }
- }
- Measure-Command { ExtractArtifacts }
-}
-catch {
- Write-Host $_
- Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
- ExitWithExitCode 1
-}
diff --git a/eng/common/sdl/init-sdl.ps1 b/eng/common/sdl/init-sdl.ps1
deleted file mode 100644
index 588ff8e2..00000000
--- a/eng/common/sdl/init-sdl.ps1
+++ /dev/null
@@ -1,47 +0,0 @@
-Param(
- [string] $GuardianCliLocation,
- [string] $Repository,
- [string] $BranchName='master',
- [string] $WorkingDirectory,
- [string] $GuardianLoggerLevel='Standard'
-)
-
-$ErrorActionPreference = 'Stop'
-Set-StrictMode -Version 2.0
-$disableConfigureToolsetImport = $true
-$global:LASTEXITCODE = 0
-
-# `tools.ps1` checks $ci to perform some actions. Since the SDL
-# scripts don't necessarily execute in the same agent that run the
-# build.ps1/sh script this variable isn't automatically set.
-$ci = $true
-. $PSScriptRoot\..\tools.ps1
-
-# Don't display the console progress UI - it's a huge perf hit
-$ProgressPreference = 'SilentlyContinue'
-
-Add-Type -AssemblyName System.IO.Compression.FileSystem
-
-try {
- # if the folder does not exist, we'll do a guardian init and push it to the remote repository
- Write-Host 'Initializing Guardian...'
- Write-Host "$GuardianCliLocation init --working-directory $WorkingDirectory --logger-level $GuardianLoggerLevel"
- & $GuardianCliLocation init --working-directory $WorkingDirectory --logger-level $GuardianLoggerLevel
- if ($LASTEXITCODE -ne 0) {
- Write-PipelineTelemetryError -Force -Category 'Build' -Message "Guardian init failed with exit code $LASTEXITCODE."
- ExitWithExitCode $LASTEXITCODE
- }
- # We create the mainbaseline so it can be edited later
- Write-Host "$GuardianCliLocation baseline --working-directory $WorkingDirectory --name mainbaseline"
- & $GuardianCliLocation baseline --working-directory $WorkingDirectory --name mainbaseline
- if ($LASTEXITCODE -ne 0) {
- Write-PipelineTelemetryError -Force -Category 'Build' -Message "Guardian baseline failed with exit code $LASTEXITCODE."
- ExitWithExitCode $LASTEXITCODE
- }
- ExitWithExitCode 0
-}
-catch {
- Write-Host $_.ScriptStackTrace
- Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
- ExitWithExitCode 1
-}
diff --git a/eng/common/sdl/packages.config b/eng/common/sdl/packages.config
deleted file mode 100644
index 4585cfd6..00000000
--- a/eng/common/sdl/packages.config
+++ /dev/null
@@ -1,4 +0,0 @@
-
-
-
-
diff --git a/eng/common/sdl/run-sdl.ps1 b/eng/common/sdl/run-sdl.ps1
deleted file mode 100644
index 2eac8c78..00000000
--- a/eng/common/sdl/run-sdl.ps1
+++ /dev/null
@@ -1,49 +0,0 @@
-Param(
- [string] $GuardianCliLocation,
- [string] $WorkingDirectory,
- [string] $GdnFolder,
- [string] $UpdateBaseline,
- [string] $GuardianLoggerLevel='Standard'
-)
-
-$ErrorActionPreference = 'Stop'
-Set-StrictMode -Version 2.0
-$disableConfigureToolsetImport = $true
-$global:LASTEXITCODE = 0
-
-try {
- # `tools.ps1` checks $ci to perform some actions. Since the SDL
- # scripts don't necessarily execute in the same agent that run the
- # build.ps1/sh script this variable isn't automatically set.
- $ci = $true
- . $PSScriptRoot\..\tools.ps1
-
- # We store config files in the r directory of .gdn
- $gdnConfigPath = Join-Path $GdnFolder 'r'
- $ValidPath = Test-Path $GuardianCliLocation
-
- if ($ValidPath -eq $False)
- {
- Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Invalid Guardian CLI Location."
- ExitWithExitCode 1
- }
-
- $gdnConfigFiles = Get-ChildItem $gdnConfigPath -Recurse -Include '*.gdnconfig'
- Write-Host "Discovered Guardian config files:"
- $gdnConfigFiles | Out-String | Write-Host
-
- Exec-BlockVerbosely {
- & $GuardianCliLocation run `
- --working-directory $WorkingDirectory `
- --baseline mainbaseline `
- --update-baseline $UpdateBaseline `
- --logger-level $GuardianLoggerLevel `
- --config @gdnConfigFiles
- Exit-IfNZEC "Sdl"
- }
-}
-catch {
- Write-Host $_.ScriptStackTrace
- Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
- ExitWithExitCode 1
-}
diff --git a/eng/common/sdl/sdl.ps1 b/eng/common/sdl/sdl.ps1
deleted file mode 100644
index 7fe603fe..00000000
--- a/eng/common/sdl/sdl.ps1
+++ /dev/null
@@ -1,40 +0,0 @@
-
-function Install-Gdn {
- param(
- [Parameter(Mandatory=$true)]
- [string]$Path,
-
- [string]$Source = "https://pkgs.dev.azure.com/dnceng/_packaging/Guardian1ESPTUpstreamOrgFeed/nuget/v3/index.json",
-
- # If omitted, install the latest version of Guardian, otherwise install that specific version.
- [string]$Version
- )
-
- $ErrorActionPreference = 'Stop'
- Set-StrictMode -Version 2.0
- $disableConfigureToolsetImport = $true
- $global:LASTEXITCODE = 0
-
- # `tools.ps1` checks $ci to perform some actions. Since the SDL
- # scripts don't necessarily execute in the same agent that run the
- # build.ps1/sh script this variable isn't automatically set.
- $ci = $true
- . $PSScriptRoot\..\tools.ps1
-
- $argumentList = @("install", "Microsoft.Guardian.Cli.win-x64", "-Source $Source", "-OutputDirectory $Path", "-NonInteractive", "-NoCache")
-
- if ($Version) {
- $argumentList += "-Version $Version"
- }
-
- Start-Process nuget -Verbose -ArgumentList $argumentList -NoNewWindow -Wait
-
- $gdnCliPath = Get-ChildItem -Filter guardian.cmd -Recurse -Path $Path
-
- if (!$gdnCliPath)
- {
- Write-PipelineTelemetryError -Category 'Sdl' -Message 'Failure installing Guardian'
- }
-
- return $gdnCliPath.FullName
-}
\ No newline at end of file
diff --git a/eng/common/sdl/trim-assets-version.ps1 b/eng/common/sdl/trim-assets-version.ps1
deleted file mode 100644
index a2e00487..00000000
--- a/eng/common/sdl/trim-assets-version.ps1
+++ /dev/null
@@ -1,75 +0,0 @@
-<#
-.SYNOPSIS
-Install and run the 'Microsoft.DotNet.VersionTools.Cli' tool with the 'trim-artifacts-version' command to trim the version from the NuGet assets file name.
-
-.PARAMETER InputPath
-Full path to directory where artifact packages are stored
-
-.PARAMETER Recursive
-Search for NuGet packages recursively
-
-#>
-
-Param(
- [string] $InputPath,
- [bool] $Recursive = $true
-)
-
-$CliToolName = "Microsoft.DotNet.VersionTools.Cli"
-
-function Install-VersionTools-Cli {
- param(
- [Parameter(Mandatory=$true)][string]$Version
- )
-
- Write-Host "Installing the package '$CliToolName' with a version of '$version' ..."
- $feed = "https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json"
-
- $argumentList = @("tool", "install", "--local", "$CliToolName", "--add-source $feed", "--no-cache", "--version $Version", "--create-manifest-if-needed")
- Start-Process "$dotnet" -Verbose -ArgumentList $argumentList -NoNewWindow -Wait
-}
-
-# -------------------------------------------------------------------
-
-if (!(Test-Path $InputPath)) {
- Write-Host "Input Path '$InputPath' does not exist"
- ExitWithExitCode 1
-}
-
-$ErrorActionPreference = 'Stop'
-Set-StrictMode -Version 2.0
-
-$disableConfigureToolsetImport = $true
-$global:LASTEXITCODE = 0
-
-# `tools.ps1` checks $ci to perform some actions. Since the SDL
-# scripts don't necessarily execute in the same agent that run the
-# build.ps1/sh script this variable isn't automatically set.
-$ci = $true
-. $PSScriptRoot\..\tools.ps1
-
-try {
- $dotnetRoot = InitializeDotNetCli -install:$true
- $dotnet = "$dotnetRoot\dotnet.exe"
-
- $toolsetVersion = Read-ArcadeSdkVersion
- Install-VersionTools-Cli -Version $toolsetVersion
-
- $cliToolFound = (& "$dotnet" tool list --local | Where-Object {$_.Split(' ')[0] -eq $CliToolName})
- if ($null -eq $cliToolFound) {
- Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "The '$CliToolName' tool is not installed."
- ExitWithExitCode 1
- }
-
- Exec-BlockVerbosely {
- & "$dotnet" $CliToolName trim-assets-version `
- --assets-path $InputPath `
- --recursive $Recursive
- Exit-IfNZEC "Sdl"
- }
-}
-catch {
- Write-Host $_
- Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
- ExitWithExitCode 1
-}
\ No newline at end of file
diff --git a/eng/common/template-guidance.md b/eng/common/template-guidance.md
new file mode 100644
index 00000000..f772aa3d
--- /dev/null
+++ b/eng/common/template-guidance.md
@@ -0,0 +1,128 @@
+# Overview
+
+Arcade provides templates for public (`/templates`) and 1ES pipeline templates (`/templates-official`) scenarios. Pipelines which are required to be managed by 1ES pipeline templates should reference `/templates-offical`, all other pipelines may reference `/templates`.
+
+## How to use
+
+Basic guidance is:
+
+- 1ES Pipeline Template or 1ES Microbuild template runs should reference `eng/common/templates-official`. Any internal production-graded pipeline should use these templates.
+
+- All other runs should reference `eng/common/templates`.
+
+See [azure-pipelines.yml](../../azure-pipelines.yml) (templates-official example) or [azure-pipelines-pr.yml](../../azure-pipelines-pr.yml) (templates example) for examples.
+
+#### The `templateIs1ESManaged` parameter
+
+The `templateIs1ESManaged` is available on most templates and affects which of the variants is used for nested templates. See [Development Notes](#development-notes) below for more information on the `templateIs1ESManaged1 parameter.
+
+- For templates under `job/`, `jobs/`, `steps`, or `post-build/`, this parameter must be explicitly set.
+
+## Multiple outputs
+
+1ES pipeline templates impose a policy where every publish artifact execution results in additional security scans being injected into your pipeline. When using `templates-official/jobs/jobs.yml`, Arcade reduces the number of additional security injections by gathering all publishing outputs into the [Build.ArtifactStagingDirectory](https://learn.microsoft.com/en-us/azure/devops/pipelines/build/variables?view=azure-devops&tabs=yaml#build-variables-devops-services), and utilizing the [outputParentDirectory](https://eng.ms/docs/cloud-ai-platform/devdiv/one-engineering-system-1es/1es-docs/1es-pipeline-templates/features/outputs#multiple-outputs) feature of 1ES pipeline templates. When implementing your pipeline, if you ensure publish artifacts are located in the `$(Build.ArtifactStagingDirectory)`, and utilize the 1ES provided template context, then you can reduce the number of security scans for your pipeline.
+
+Example:
+``` yaml
+# azure-pipelines.yml
+extends:
+ template: azure-pipelines/MicroBuild.1ES.Official.yml@MicroBuildTemplate
+ parameters:
+ stages:
+ - stage: build
+ jobs:
+ - template: /eng/common/templates-official/jobs/jobs.yml@self
+ parameters:
+ # 1ES makes use of outputs to reduce security task injection overhead
+ templateContext:
+ outputs:
+ - output: pipelineArtifact
+ displayName: 'Publish logs from source'
+ continueOnError: true
+ condition: always()
+ targetPath: $(Build.ArtifactStagingDirectory)/artifacts/log
+ artifactName: Logs
+ jobs:
+ - job: Windows
+ steps:
+ - script: echo "friendly neighborhood" > artifacts/marvel/spiderman.txt
+ # copy build outputs to artifact staging directory for publishing
+ - task: CopyFiles@2
+ displayName: Gather build output
+ inputs:
+ SourceFolder: '$(System.DefaultWorkingDirectory)/artifacts/marvel'
+ Contents: '**'
+ TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/marvel'
+```
+
+Note: Multiple outputs are ONLY applicable to 1ES PT publishing (only usable when referencing `templates-official`).
+
+## Development notes
+
+**Folder / file structure**
+
+``` text
+eng\common\
+ [templates || templates-official]\
+ job\
+ job.yml (shim + artifact publishing logic)
+ onelocbuild.yml (shim)
+ publish-build-assets.yml (shim)
+ source-build.yml (shim)
+ source-index-stage1.yml (shim)
+ jobs\
+ jobs.yml (shim)
+ source-build.yml (shim)
+ post-build\
+ post-build.yml (shim)
+ common-variabls.yml (shim)
+ setup-maestro-vars.yml (shim)
+ steps\
+ publish-build-artifacts.yml (logic)
+ publish-pipeline-artifacts.yml (logic)
+ component-governance.yml (shim)
+ publish-logs.yml (shim)
+ retain-build.yml (shim)
+ send-to-helix.yml (shim)
+ source-build.yml (shim)
+ variables\
+ pool-providers.yml (logic + redirect) # templates/variables/pool-providers.yml will redirect to templates-official/variables/pool-providers.yml if you are running in the internal project
+ core-templates\
+ job\
+ job.yml (logic)
+ onelocbuild.yml (logic)
+ publish-build-assets.yml (logic)
+ source-build.yml (logic)
+ source-index-stage1.yml (logic)
+ jobs\
+ jobs.yml (logic)
+ source-build.yml (logic)
+ post-build\
+ common-variabls.yml (logic)
+ post-build.yml (logic)
+ setup-maestro-vars.yml (logic)
+ steps\
+ component-governance.yml (logic)
+ publish-build-artifacts.yml (redirect)
+ publish-logs.yml (logic)
+ publish-pipeline-artifacts.yml (redirect)
+ retain-build.yml (logic)
+ send-to-helix.yml (logic)
+ source-build.yml (logic)
+ variables\
+ pool-providers.yml (redirect)
+```
+
+In the table above, a file is designated as "shim", "logic", or "redirect".
+
+- shim - represents a yaml file which is an intermediate step between pipeline logic and .Net Core Engineering's templates (`core-templates`) and defines the `is1ESPipeline` parameter value.
+
+- logic - represents actual base template logic.
+
+- redirect- represents a file in `core-templates` which redirects to the "logic" file in either `templates` or `templates-official`.
+
+Logic for Arcade's templates live **primarily** in the `core-templates` folder. The exceptions to the location of the logic files are around artifact publishing, which is handled differently between 1es pipeline templates and standard templates. `templates` and `templates-official` provide shim entry points which redirect to `core-templates` while also defining the `is1ESPipeline` parameter. If a shim is referenced in `templates`, then `is1ESPipeline` is set to `false`. If a shim is referenced in `templates-official`, then `is1ESPipeline` is set to `true`.
+
+Within `templates` and `templates-official`, the templates at the "stages", and "jobs" / "job" level have been replaced with shims. Templates at the "steps" and "variables" level are typically too granular to be replaced with shims and instead persist logic which is directly applicable to either scenario.
+
+Within `core-templates`, there are a handful of places where logic is dependent on which shim entry point was used. In those places, we redirect back to the respective logic file in `templates` or `templates-official`.
diff --git a/eng/common/templates-official/job/job.yml b/eng/common/templates-official/job/job.yml
index 1f035fee..a9d2839f 100644
--- a/eng/common/templates-official/job/job.yml
+++ b/eng/common/templates-official/job/job.yml
@@ -1,264 +1,86 @@
-# Internal resources (telemetry, microbuild) can only be accessed from non-public projects,
-# and some (Microbuild) should only be applied to non-PR cases for internal builds.
-
parameters:
-# Job schema parameters - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
- cancelTimeoutInMinutes: ''
- condition: ''
- container: ''
- continueOnError: false
- dependsOn: ''
- displayName: ''
- pool: ''
- steps: []
- strategy: ''
- timeoutInMinutes: ''
- variables: []
- workspace: ''
- templateContext: ''
-
-# Job base template specific parameters
- # See schema documentation - https://github.com/dotnet/arcade/blob/master/Documentation/AzureDevOps/TemplateSchema.md
- artifacts: ''
- enableMicrobuild: false
- enablePublishBuildArtifacts: false
- enablePublishBuildAssets: false
- enablePublishTestResults: false
- enablePublishUsingPipelines: false
- enableBuildRetry: false
- disableComponentGovernance: ''
- componentGovernanceIgnoreDirectories: ''
- mergeTestResults: false
- testRunTitle: ''
- testResultsFormat: ''
- name: ''
- preSteps: []
runAsPublic: false
-# Sbom related params
- enableSbom: true
- PackageVersion: 7.0.0
- BuildDropPath: '$(Build.SourcesDirectory)/artifacts'
+# Sbom related params, unused now and can eventually be removed
+ enableSbom: unused
+ PackageVersion: unused
+ BuildDropPath: unused
jobs:
-- job: ${{ parameters.name }}
-
- ${{ if ne(parameters.cancelTimeoutInMinutes, '') }}:
- cancelTimeoutInMinutes: ${{ parameters.cancelTimeoutInMinutes }}
-
- ${{ if ne(parameters.condition, '') }}:
- condition: ${{ parameters.condition }}
-
- ${{ if ne(parameters.container, '') }}:
- container: ${{ parameters.container }}
-
- ${{ if ne(parameters.continueOnError, '') }}:
- continueOnError: ${{ parameters.continueOnError }}
-
- ${{ if ne(parameters.dependsOn, '') }}:
- dependsOn: ${{ parameters.dependsOn }}
-
- ${{ if ne(parameters.displayName, '') }}:
- displayName: ${{ parameters.displayName }}
-
- ${{ if ne(parameters.pool, '') }}:
- pool: ${{ parameters.pool }}
-
- ${{ if ne(parameters.strategy, '') }}:
- strategy: ${{ parameters.strategy }}
-
- ${{ if ne(parameters.timeoutInMinutes, '') }}:
- timeoutInMinutes: ${{ parameters.timeoutInMinutes }}
-
- ${{ if ne(parameters.templateContext, '') }}:
- templateContext: ${{ parameters.templateContext }}
-
- variables:
- - ${{ if ne(parameters.enableTelemetry, 'false') }}:
- - name: DOTNET_CLI_TELEMETRY_PROFILE
- value: '$(Build.Repository.Uri)'
- - ${{ if eq(parameters.enableRichCodeNavigation, 'true') }}:
- - name: EnableRichCodeNavigation
- value: 'true'
- # Retry signature validation up to three times, waiting 2 seconds between attempts.
- # See https://learn.microsoft.com/en-us/nuget/reference/errors-and-warnings/nu3028#retry-untrusted-root-failures
- - name: NUGET_EXPERIMENTAL_CHAIN_BUILD_RETRY_POLICY
- value: 3,2000
- - ${{ each variable in parameters.variables }}:
- # handle name-value variable syntax
- # example:
- # - name: [key]
- # value: [value]
- - ${{ if ne(variable.name, '') }}:
- - name: ${{ variable.name }}
- value: ${{ variable.value }}
-
- # handle variable groups
- - ${{ if ne(variable.group, '') }}:
- - group: ${{ variable.group }}
-
- # handle template variable syntax
- # example:
- # - template: path/to/template.yml
- # parameters:
- # [key]: [value]
- - ${{ if ne(variable.template, '') }}:
- - template: ${{ variable.template }}
- ${{ if ne(variable.parameters, '') }}:
- parameters: ${{ variable.parameters }}
-
- # handle key-value variable syntax.
- # example:
- # - [key]: [value]
- - ${{ if and(eq(variable.name, ''), eq(variable.group, ''), eq(variable.template, '')) }}:
- - ${{ each pair in variable }}:
- - name: ${{ pair.key }}
- value: ${{ pair.value }}
-
- # DotNet-HelixApi-Access provides 'HelixApiAccessToken' for internal builds
- - ${{ if and(eq(parameters.enableTelemetry, 'true'), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - group: DotNet-HelixApi-Access
-
- ${{ if ne(parameters.workspace, '') }}:
- workspace: ${{ parameters.workspace }}
-
- steps:
- - ${{ if ne(parameters.preSteps, '') }}:
- - ${{ each preStep in parameters.preSteps }}:
- - ${{ preStep }}
-
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - ${{ if eq(parameters.enableMicrobuild, 'true') }}:
- - task: MicroBuildSigningPlugin@4
- displayName: Install MicroBuild plugin
- inputs:
- signType: $(_SignType)
- zipSources: false
- feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json
- env:
- TeamName: $(_TeamName)
- MicroBuildOutputFolderOverride: '$(Agent.TempDirectory)'
- continueOnError: ${{ parameters.continueOnError }}
- condition: and(succeeded(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
-
- - ${{ if and(eq(parameters.runAsPublic, 'false'), eq(variables['System.TeamProject'], 'internal')) }}:
- - task: NuGetAuthenticate@1
-
- - ${{ if and(ne(parameters.artifacts.download, 'false'), ne(parameters.artifacts.download, '')) }}:
- - task: DownloadPipelineArtifact@2
- inputs:
- buildType: current
- artifactName: ${{ coalesce(parameters.artifacts.download.name, 'Artifacts_$(Agent.OS)_$(_BuildConfig)') }}
- targetPath: ${{ coalesce(parameters.artifacts.download.path, 'artifacts') }}
- itemPattern: ${{ coalesce(parameters.artifacts.download.pattern, '**') }}
-
- - ${{ each step in parameters.steps }}:
- - ${{ step }}
-
- - ${{ if eq(parameters.enableRichCodeNavigation, true) }}:
- - task: RichCodeNavIndexer@0
- displayName: RichCodeNav Upload
- inputs:
- languages: ${{ coalesce(parameters.richCodeNavigationLanguage, 'csharp') }}
- environment: ${{ coalesce(parameters.richCodeNavigationEnvironment, 'production') }}
- richNavLogOutputDirectory: $(Build.SourcesDirectory)/artifacts/bin
- uploadRichNavArtifacts: ${{ coalesce(parameters.richCodeNavigationUploadArtifacts, false) }}
- continueOnError: true
-
- - template: /eng/common/templates-official/steps/component-governance.yml
- parameters:
- ${{ if eq(parameters.disableComponentGovernance, '') }}:
- ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.runAsPublic, 'false'), or(startsWith(variables['Build.SourceBranch'], 'refs/heads/release/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/dotnet/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/microsoft/'), eq(variables['Build.SourceBranch'], 'refs/heads/main'))) }}:
- disableComponentGovernance: false
- ${{ else }}:
- disableComponentGovernance: true
- ${{ else }}:
- disableComponentGovernance: ${{ parameters.disableComponentGovernance }}
- componentGovernanceIgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }}
-
- - ${{ if eq(parameters.enableMicrobuild, 'true') }}:
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - task: MicroBuildCleanup@1
- displayName: Execute Microbuild cleanup tasks
- condition: and(always(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
- continueOnError: ${{ parameters.continueOnError }}
- env:
- TeamName: $(_TeamName)
-
- - ${{ if ne(parameters.artifacts.publish, '') }}:
- - ${{ if and(ne(parameters.artifacts.publish.artifacts, 'false'), ne(parameters.artifacts.publish.artifacts, '')) }}:
- - task: CopyFiles@2
- displayName: Gather binaries for publish to artifacts
- inputs:
- SourceFolder: 'artifacts/bin'
- Contents: '**'
- TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/bin'
- - task: CopyFiles@2
- displayName: Gather packages for publish to artifacts
- inputs:
- SourceFolder: 'artifacts/packages'
- Contents: '**'
- TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/packages'
- - task: 1ES.PublishBuildArtifacts@1
- displayName: Publish pipeline artifacts
- inputs:
- PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts'
- PublishLocation: Container
- ArtifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }}
- continueOnError: true
- condition: always()
- - ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}:
- - task: 1ES.PublishPipelineArtifact@1
- inputs:
- targetPath: 'artifacts/log'
- artifactName: ${{ coalesce(parameters.artifacts.publish.logs.name, 'Logs_Build_$(Agent.Os)_$(_BuildConfig)') }}
- displayName: 'Publish logs'
- continueOnError: true
- condition: always()
-
- - ${{ if ne(parameters.enablePublishBuildArtifacts, 'false') }}:
- - task: 1ES.PublishBuildArtifacts@1
- displayName: Publish Logs
- inputs:
- PathtoPublish: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)'
- PublishLocation: Container
- ArtifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)' ) }}
- continueOnError: true
- condition: always()
-
- - ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'xunit')) }}:
- - task: PublishTestResults@2
- displayName: Publish XUnit Test Results
- inputs:
- testResultsFormat: 'xUnit'
- testResultsFiles: '*.xml'
- searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)'
- testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-xunit
- mergeTestResults: ${{ parameters.mergeTestResults }}
- continueOnError: true
- condition: always()
- - ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'vstest')) }}:
- - task: PublishTestResults@2
- displayName: Publish TRX Test Results
- inputs:
- testResultsFormat: 'VSTest'
- testResultsFiles: '*.trx'
- searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)'
- testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-trx
- mergeTestResults: ${{ parameters.mergeTestResults }}
- continueOnError: true
- condition: always()
-
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.enableSbom, 'true')) }}:
- - template: /eng/common/templates-official/steps/generate-sbom.yml
- parameters:
- PackageVersion: ${{ parameters.packageVersion}}
- BuildDropPath: ${{ parameters.buildDropPath }}
- IgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }}
-
- - ${{ if eq(parameters.enableBuildRetry, 'true') }}:
- - task: 1ES.PublishPipelineArtifact@1
- inputs:
- targetPath: '$(Build.SourcesDirectory)\eng\common\BuildConfiguration'
- artifactName: 'BuildConfiguration'
- displayName: 'Publish build retry configuration'
- continueOnError: true
\ No newline at end of file
+- template: /eng/common/core-templates/job/job.yml
+ parameters:
+ is1ESPipeline: true
+
+ # publish artifacts
+ # for 1ES managed templates, use the templateContext.output to handle multiple outputs.
+ templateContext:
+ outputParentDirectory: $(Build.ArtifactStagingDirectory)
+ outputs:
+ - ${{ if ne(parameters.artifacts.publish, '') }}:
+ - ${{ if and(ne(parameters.artifacts.publish.artifacts, 'false'), ne(parameters.artifacts.publish.artifacts, '')) }}:
+ - output: pipelineArtifact
+ displayName: Publish pipeline artifacts
+ targetPath: '$(Build.ArtifactStagingDirectory)/artifacts'
+ artifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }}
+ condition: succeeded()
+ retryCountOnTaskFailure: 10 # for any files being locked
+ continueOnError: true
+ - output: pipelineArtifact
+ displayName: Publish pipeline artifacts
+ targetPath: '$(Build.ArtifactStagingDirectory)/artifacts'
+ artifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }}_Attempt$(System.JobAttempt)
+ condition: not(succeeded())
+ retryCountOnTaskFailure: 10 # for any files being locked
+ continueOnError: true
+ - ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}:
+ - output: pipelineArtifact
+ targetPath: '$(Build.ArtifactStagingDirectory)/artifacts/log'
+ artifactName: ${{ coalesce(parameters.artifacts.publish.logs.name, 'Logs_Build_$(Agent.Os)_$(_BuildConfig)_Attempt$(System.JobAttempt)') }}
+ displayName: 'Publish logs'
+ continueOnError: true
+ condition: always()
+ retryCountOnTaskFailure: 10 # for any files being locked
+ isProduction: false # logs are non-production artifacts
+
+ - ${{ if eq(parameters.enablePublishBuildArtifacts, true) }}:
+ - output: pipelineArtifact
+ displayName: Publish Logs
+ targetPath: '$(Build.ArtifactStagingDirectory)/artifacts/log/$(_BuildConfig)'
+ artifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)_Attempt$(System.JobAttempt)' ) }}
+ continueOnError: true
+ condition: always()
+ retryCountOnTaskFailure: 10 # for any files being locked
+ isProduction: false # logs are non-production artifacts
+
+ - ${{ if eq(parameters.enableBuildRetry, 'true') }}:
+ - output: pipelineArtifact
+ targetPath: '$(Build.ArtifactStagingDirectory)/artifacts/eng/common/BuildConfiguration'
+ artifactName: 'BuildConfiguration'
+ displayName: 'Publish build retry configuration'
+ continueOnError: true
+ retryCountOnTaskFailure: 10 # for any files being locked
+ isProduction: false # BuildConfiguration is a non-production artifact
+
+ # V4 publishing: automatically publish staged artifacts as a pipeline artifact.
+ # The artifact name matches the SDK's FutureArtifactName ($(System.PhaseName)_Artifacts),
+ # which is encoded in the asset manifest for downstream publishing to discover.
+ - ${{ if eq(parameters.publishingVersion, 4) }}:
+ - output: pipelineArtifact
+ displayName: 'Publish V4 pipeline artifacts'
+ targetPath: '$(Build.ArtifactStagingDirectory)/artifacts'
+ artifactName: '$(System.PhaseName)_Artifacts'
+ continueOnError: true
+ retryCountOnTaskFailure: 10 # for any files being locked
+
+ # add any outputs provided via root yaml
+ - ${{ if ne(parameters.templateContext.outputs, '') }}:
+ - ${{ each output in parameters.templateContext.outputs }}:
+ - ${{ output }}
+
+ # add any remaining templateContext properties
+ ${{ each context in parameters.templateContext }}:
+ ${{ if and(ne(context.key, 'outputParentDirectory'), ne(context.key, 'outputs')) }}:
+ ${{ context.key }}: ${{ context.value }}
+
+ ${{ each parameter in parameters }}:
+ ${{ if and(ne(parameter.key, 'templateContext'), ne(parameter.key, 'is1ESPipeline')) }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates-official/job/onelocbuild.yml b/eng/common/templates-official/job/onelocbuild.yml
index 52b4d05d..0f0c514b 100644
--- a/eng/common/templates-official/job/onelocbuild.yml
+++ b/eng/common/templates-official/job/onelocbuild.yml
@@ -1,112 +1,7 @@
-parameters:
- # Optional: dependencies of the job
- dependsOn: ''
-
- # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool
- pool: ''
-
- CeapexPat: $(dn-bot-ceapex-package-r) # PAT for the loc AzDO instance https://dev.azure.com/ceapex
- GithubPat: $(BotAccount-dotnet-bot-repo-PAT)
-
- SourcesDirectory: $(Build.SourcesDirectory)
- CreatePr: true
- AutoCompletePr: false
- ReusePr: true
- UseLfLineEndings: true
- UseCheckedInLocProjectJson: false
- SkipLocProjectJsonGeneration: false
- LanguageSet: VS_Main_Languages
- LclSource: lclFilesInRepo
- LclPackageId: ''
- RepoType: gitHub
- GitHubOrg: dotnet
- MirrorRepo: ''
- MirrorBranch: main
- condition: ''
- JobNameSuffix: ''
-
jobs:
-- job: OneLocBuild${{ parameters.JobNameSuffix }}
-
- dependsOn: ${{ parameters.dependsOn }}
-
- displayName: OneLocBuild${{ parameters.JobNameSuffix }}
-
- variables:
- - group: OneLocBuildVariables # Contains the CeapexPat and GithubPat
- - name: _GenerateLocProjectArguments
- value: -SourcesDirectory ${{ parameters.SourcesDirectory }}
- -LanguageSet "${{ parameters.LanguageSet }}"
- -CreateNeutralXlfs
- - ${{ if eq(parameters.UseCheckedInLocProjectJson, 'true') }}:
- - name: _GenerateLocProjectArguments
- value: ${{ variables._GenerateLocProjectArguments }} -UseCheckedInLocProjectJson
- - template: /eng/common/templates-official/variables/pool-providers.yml
-
- ${{ if ne(parameters.pool, '') }}:
- pool: ${{ parameters.pool }}
- ${{ if eq(parameters.pool, '') }}:
- pool:
- # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
- ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
- name: AzurePipelines-EO
- image: 1ESPT-Windows2022
- demands: Cmd
- os: windows
- # If it's not devdiv, it's dnceng
- ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}:
- name: $(DncEngInternalBuildPool)
- image: 1es-windows-2022
- os: windows
-
- steps:
- - ${{ if ne(parameters.SkipLocProjectJsonGeneration, 'true') }}:
- - task: Powershell@2
- inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/generate-locproject.ps1
- arguments: $(_GenerateLocProjectArguments)
- displayName: Generate LocProject.json
- condition: ${{ parameters.condition }}
-
- - task: OneLocBuild@2
- displayName: OneLocBuild
- env:
- SYSTEM_ACCESSTOKEN: $(System.AccessToken)
- inputs:
- locProj: eng/Localize/LocProject.json
- outDir: $(Build.ArtifactStagingDirectory)
- lclSource: ${{ parameters.LclSource }}
- lclPackageId: ${{ parameters.LclPackageId }}
- isCreatePrSelected: ${{ parameters.CreatePr }}
- isAutoCompletePrSelected: ${{ parameters.AutoCompletePr }}
- ${{ if eq(parameters.CreatePr, true) }}:
- isUseLfLineEndingsSelected: ${{ parameters.UseLfLineEndings }}
- ${{ if eq(parameters.RepoType, 'gitHub') }}:
- isShouldReusePrSelected: ${{ parameters.ReusePr }}
- packageSourceAuth: patAuth
- patVariable: ${{ parameters.CeapexPat }}
- ${{ if eq(parameters.RepoType, 'gitHub') }}:
- repoType: ${{ parameters.RepoType }}
- gitHubPatVariable: "${{ parameters.GithubPat }}"
- ${{ if ne(parameters.MirrorRepo, '') }}:
- isMirrorRepoSelected: true
- gitHubOrganization: ${{ parameters.GitHubOrg }}
- mirrorRepo: ${{ parameters.MirrorRepo }}
- mirrorBranch: ${{ parameters.MirrorBranch }}
- condition: ${{ parameters.condition }}
-
- - task: 1ES.PublishBuildArtifacts@1
- displayName: Publish Localization Files
- inputs:
- PathtoPublish: '$(Build.ArtifactStagingDirectory)/loc'
- PublishLocation: Container
- ArtifactName: Loc
- condition: ${{ parameters.condition }}
+- template: /eng/common/core-templates/job/onelocbuild.yml
+ parameters:
+ is1ESPipeline: true
- - task: 1ES.PublishBuildArtifacts@1
- displayName: Publish LocProject.json
- inputs:
- PathtoPublish: '$(Build.SourcesDirectory)/eng/Localize/'
- PublishLocation: Container
- ArtifactName: Loc
- condition: ${{ parameters.condition }}
\ No newline at end of file
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates-official/job/publish-build-assets.yml b/eng/common/templates-official/job/publish-build-assets.yml
index 01173288..d667a70e 100644
--- a/eng/common/templates-official/job/publish-build-assets.yml
+++ b/eng/common/templates-official/job/publish-build-assets.yml
@@ -1,160 +1,7 @@
-parameters:
- configuration: 'Debug'
-
- # Optional: condition for the job to run
- condition: ''
-
- # Optional: 'true' if future jobs should run even if this job fails
- continueOnError: false
-
- # Optional: dependencies of the job
- dependsOn: ''
-
- # Optional: Include PublishBuildArtifacts task
- enablePublishBuildArtifacts: false
-
- # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool
- pool: {}
-
- # Optional: should run as a public build even in the internal project
- # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
- runAsPublic: false
-
- # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing
- publishUsingPipelines: false
-
- # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing
- publishAssetsImmediately: false
-
- artifactsPublishingAdditionalParameters: ''
-
- signingValidationAdditionalParameters: ''
-
jobs:
-- job: Asset_Registry_Publish
-
- dependsOn: ${{ parameters.dependsOn }}
- timeoutInMinutes: 150
-
- ${{ if eq(parameters.publishAssetsImmediately, 'true') }}:
- displayName: Publish Assets
- ${{ else }}:
- displayName: Publish to Build Asset Registry
-
- variables:
- - template: /eng/common/templates-official/variables/pool-providers.yml
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - group: Publish-Build-Assets
- - group: AzureDevOps-Artifact-Feeds-Pats
- - name: runCodesignValidationInjection
- value: false
- - ${{ if eq(parameters.publishAssetsImmediately, 'true') }}:
- - template: /eng/common/templates-official/post-build/common-variables.yml
-
- pool:
- # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
- ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
- name: AzurePipelines-EO
- image: 1ESPT-Windows2022
- demands: Cmd
- os: windows
- # If it's not devdiv, it's dnceng
- ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}:
- name: NetCore1ESPool-Publishing-Internal
- image: windows.vs2019.amd64
- os: windows
- steps:
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - task: DownloadBuildArtifacts@0
- displayName: Download artifact
- inputs:
- artifactName: AssetManifests
- downloadPath: '$(Build.StagingDirectory)/Download'
- checkDownloadedFiles: true
- condition: ${{ parameters.condition }}
- continueOnError: ${{ parameters.continueOnError }}
-
- - task: NuGetAuthenticate@1
-
- - task: AzureCLI@2
- displayName: Publish Build Assets
- inputs:
- azureSubscription: "Darc: Maestro Production"
- scriptType: ps
- scriptLocation: scriptPath
- scriptPath: $(Build.SourcesDirectory)/eng/common/sdk-task.ps1
- arguments: >
- -task PublishBuildAssets -restore -msbuildEngine dotnet
- /p:ManifestsPath='$(Build.StagingDirectory)/Download/AssetManifests'
- /p:MaestroApiEndpoint=https://maestro-prod.westus2.cloudapp.azure.com
- /p:PublishUsingPipelines=${{ parameters.publishUsingPipelines }}
- /p:OfficialBuildId=$(Build.BuildNumber)
- condition: ${{ parameters.condition }}
- continueOnError: ${{ parameters.continueOnError }}
-
- - task: powershell@2
- displayName: Create ReleaseConfigs Artifact
- inputs:
- targetType: inline
- script: |
- New-Item -Path "$(Build.StagingDirectory)/ReleaseConfigs" -ItemType Directory -Force
- $filePath = "$(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt"
- Add-Content -Path $filePath -Value $(BARBuildId)
- Add-Content -Path $filePath -Value "$(DefaultChannels)"
- Add-Content -Path $filePath -Value $(IsStableBuild)
-
- - task: 1ES.PublishBuildArtifacts@1
- displayName: Publish ReleaseConfigs Artifact
- inputs:
- PathtoPublish: '$(Build.StagingDirectory)/ReleaseConfigs'
- PublishLocation: Container
- ArtifactName: ReleaseConfigs
-
- - task: powershell@2
- displayName: Check if SymbolPublishingExclusionsFile.txt exists
- inputs:
- targetType: inline
- script: |
- $symbolExclusionfile = "$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt"
- if(Test-Path -Path $symbolExclusionfile)
- {
- Write-Host "SymbolExclusionFile exists"
- Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]true"
- }
- else{
- Write-Host "Symbols Exclusion file does not exists"
- Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]false"
- }
-
- - task: 1ES.PublishBuildArtifacts@1
- displayName: Publish SymbolPublishingExclusionsFile Artifact
- condition: eq(variables['SymbolExclusionFile'], 'true')
- inputs:
- PathtoPublish: '$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt'
- PublishLocation: Container
- ArtifactName: ReleaseConfigs
-
- - ${{ if eq(parameters.publishAssetsImmediately, 'true') }}:
- - template: /eng/common/templates-official/post-build/setup-maestro-vars.yml
- parameters:
- BARBuildId: ${{ parameters.BARBuildId }}
- PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
-
- - task: AzureCLI@2
- displayName: Publish Using Darc
- inputs:
- azureSubscription: "Darc: Maestro Production"
- scriptType: ps
- scriptLocation: scriptPath
- scriptPath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1
- arguments: -BuildId $(BARBuildId)
- -PublishingInfraVersion 3
- -AzdoToken '$(System.AccessToken)'
- -WaitPublishingFinish true
- -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}'
- -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}'
+- template: /eng/common/core-templates/job/publish-build-assets.yml
+ parameters:
+ is1ESPipeline: true
- - ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}:
- - template: /eng/common/templates-official/steps/publish-logs.yml
- parameters:
- JobLabel: 'Publish_Artifacts_Logs'
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates-official/job/source-build.yml b/eng/common/templates-official/job/source-build.yml
index f983033b..1a480034 100644
--- a/eng/common/templates-official/job/source-build.yml
+++ b/eng/common/templates-official/job/source-build.yml
@@ -1,75 +1,7 @@
-parameters:
- # This template adds arcade-powered source-build to CI. The template produces a server job with a
- # default ID 'Source_Build_Complete' to put in a dependency list if necessary.
-
- # Specifies the prefix for source-build jobs added to pipeline. Use this if disambiguation needed.
- jobNamePrefix: 'Source_Build'
-
- # Defines the platform on which to run the job. By default, a linux-x64 machine, suitable for
- # managed-only repositories. This is an object with these properties:
- #
- # name: ''
- # The name of the job. This is included in the job ID.
- # targetRID: ''
- # The name of the target RID to use, instead of the one auto-detected by Arcade.
- # nonPortable: false
- # Enables non-portable mode. This means a more specific RID (e.g. fedora.32-x64 rather than
- # linux-x64), and compiling against distro-provided packages rather than portable ones.
- # skipPublishValidation: false
- # Disables publishing validation. By default, a check is performed to ensure no packages are
- # published by source-build.
- # container: ''
- # A container to use. Runs in docker.
- # pool: {}
- # A pool to use. Runs directly on an agent.
- # buildScript: ''
- # Specifies the build script to invoke to perform the build in the repo. The default
- # './build.sh' should work for typical Arcade repositories, but this is customizable for
- # difficult situations.
- # jobProperties: {}
- # A list of job properties to inject at the top level, for potential extensibility beyond
- # container and pool.
- platform: {}
-
- # If set to true and running on a non-public project,
- # Internal blob storage locations will be enabled.
- # This is not enabled by default because many repositories do not need internal sources
- # and do not need to have the required service connections approved in the pipeline.
- enableInternalSources: false
-
jobs:
-- job: ${{ parameters.jobNamePrefix }}_${{ parameters.platform.name }}
- displayName: Source-Build (${{ parameters.platform.name }})
-
- ${{ each property in parameters.platform.jobProperties }}:
- ${{ property.key }}: ${{ property.value }}
-
- ${{ if ne(parameters.platform.container, '') }}:
- container: ${{ parameters.platform.container }}
-
- ${{ if eq(parameters.platform.pool, '') }}:
- # The default VM host AzDO pool. This should be capable of running Docker containers: almost all
- # source-build builds run in Docker, including the default managed platform.
- # /eng/common/templates-official/variables/pool-providers.yml can't be used here (some customers declare variables already), so duplicate its logic
- pool:
- ${{ if eq(variables['System.TeamProject'], 'public') }}:
- name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore-Svc-Public' ), False, 'NetCore-Public')]
- demands: ImageOverride -equals Build.Ubuntu.1804.Amd64.Open
-
- ${{ if eq(variables['System.TeamProject'], 'internal') }}:
- name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore1ESPool-Svc-Internal'), False, 'NetCore1ESPool-Internal')]
- image: 1es-mariner-2
- os: linux
-
- ${{ if ne(parameters.platform.pool, '') }}:
- pool: ${{ parameters.platform.pool }}
-
- workspace:
- clean: all
+- template: /eng/common/core-templates/job/source-build.yml
+ parameters:
+ is1ESPipeline: true
- steps:
- - ${{ if eq(parameters.enableInternalSources, true) }}:
- - template: /eng/common/templates-official/steps/enable-internal-runtimes.yml
- - template: /eng/common/templates-official/steps/source-build.yml
- parameters:
- platform: ${{ parameters.platform }}
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates-official/job/source-index-stage1.yml b/eng/common/templates-official/job/source-index-stage1.yml
index 60dfb6b2..6d5ead31 100644
--- a/eng/common/templates-official/job/source-index-stage1.yml
+++ b/eng/common/templates-official/job/source-index-stage1.yml
@@ -1,83 +1,7 @@
-parameters:
- runAsPublic: false
- sourceIndexUploadPackageVersion: 2.0.0-20240502.12
- sourceIndexProcessBinlogPackageVersion: 1.0.1-20240129.2
- sourceIndexPackageSource: https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json
- sourceIndexBuildCommand: powershell -NoLogo -NoProfile -ExecutionPolicy Bypass -Command "eng/common/build.ps1 -restore -build -binarylog -ci"
- preSteps: []
- binlogPath: artifacts/log/Debug/Build.binlog
- condition: ''
- dependsOn: ''
- pool: ''
-
jobs:
-- job: SourceIndexStage1
- dependsOn: ${{ parameters.dependsOn }}
- condition: ${{ parameters.condition }}
- variables:
- - name: SourceIndexUploadPackageVersion
- value: ${{ parameters.sourceIndexUploadPackageVersion }}
- - name: SourceIndexProcessBinlogPackageVersion
- value: ${{ parameters.sourceIndexProcessBinlogPackageVersion }}
- - name: SourceIndexPackageSource
- value: ${{ parameters.sourceIndexPackageSource }}
- - name: BinlogPath
- value: ${{ parameters.binlogPath }}
- - template: /eng/common/templates-official/variables/pool-providers.yml
-
- ${{ if ne(parameters.pool, '') }}:
- pool: ${{ parameters.pool }}
- ${{ if eq(parameters.pool, '') }}:
- pool:
- ${{ if eq(variables['System.TeamProject'], 'public') }}:
- name: $(DncEngPublicBuildPool)
- demands: ImageOverride -equals windows.vs2019.amd64.open
- ${{ if eq(variables['System.TeamProject'], 'internal') }}:
- name: $(DncEngInternalBuildPool)
- image: windows.vs2022.amd64
- os: windows
-
- steps:
- - ${{ each preStep in parameters.preSteps }}:
- - ${{ preStep }}
-
- - task: UseDotNet@2
- displayName: Use .NET 8 SDK
- inputs:
- packageType: sdk
- version: 8.0.x
- installationPath: $(Agent.TempDirectory)/dotnet
- workingDirectory: $(Agent.TempDirectory)
-
- - script: |
- $(Agent.TempDirectory)/dotnet/dotnet tool install BinLogToSln --version $(sourceIndexProcessBinlogPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools
- $(Agent.TempDirectory)/dotnet/dotnet tool install UploadIndexStage1 --version $(sourceIndexUploadPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools
- displayName: Download Tools
- # Set working directory to temp directory so 'dotnet' doesn't try to use global.json and use the repo's sdk.
- workingDirectory: $(Agent.TempDirectory)
-
- - script: ${{ parameters.sourceIndexBuildCommand }}
- displayName: Build Repository
-
- - script: $(Agent.TempDirectory)/.source-index/tools/BinLogToSln -i $(BinlogPath) -r $(Build.SourcesDirectory) -n $(Build.Repository.Name) -o .source-index/stage1output
- displayName: Process Binlog into indexable sln
-
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - task: AzureCLI@2
- displayName: Get stage 1 auth token
- inputs:
- azureSubscription: 'SourceDotNet Stage1 Publish'
- addSpnToEnvironment: true
- scriptType: 'ps'
- scriptLocation: 'inlineScript'
- inlineScript: |
- echo "##vso[task.setvariable variable=ARM_CLIENT_ID;issecret=true]$env:servicePrincipalId"
- echo "##vso[task.setvariable variable=ARM_ID_TOKEN;issecret=true]$env:idToken"
- echo "##vso[task.setvariable variable=ARM_TENANT_ID;issecret=true]$env:tenantId"
-
- - script: |
- az login --service-principal -u $(ARM_CLIENT_ID) --tenant $(ARM_TENANT_ID) --allow-no-subscriptions --federated-token $(ARM_ID_TOKEN)
- displayName: "Login to Azure"
+- template: /eng/common/core-templates/job/source-index-stage1.yml
+ parameters:
+ is1ESPipeline: true
- - script: $(Agent.TempDirectory)/.source-index/tools/UploadIndexStage1 -i .source-index/stage1output -n $(Build.Repository.Name) -s netsourceindexstage1 -b stage1
- displayName: Upload stage1 artifacts to source index
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates-official/jobs/codeql-build.yml b/eng/common/templates-official/jobs/codeql-build.yml
deleted file mode 100644
index b68d3c2f..00000000
--- a/eng/common/templates-official/jobs/codeql-build.yml
+++ /dev/null
@@ -1,31 +0,0 @@
-parameters:
- # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md
- continueOnError: false
- # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
- jobs: []
- # Optional: if specified, restore and use this version of Guardian instead of the default.
- overrideGuardianVersion: ''
-
-jobs:
-- template: /eng/common/templates-official/jobs/jobs.yml
- parameters:
- enableMicrobuild: false
- enablePublishBuildArtifacts: false
- enablePublishTestResults: false
- enablePublishBuildAssets: false
- enablePublishUsingPipelines: false
- enableTelemetry: true
-
- variables:
- - group: Publish-Build-Assets
- # The Guardian version specified in 'eng/common/sdl/packages.config'. This value must be kept in
- # sync with the packages.config file.
- - name: DefaultGuardianVersion
- value: 0.109.0
- - name: GuardianPackagesConfigFile
- value: $(Build.SourcesDirectory)\eng\common\sdl\packages.config
- - name: GuardianVersion
- value: ${{ coalesce(parameters.overrideGuardianVersion, '$(DefaultGuardianVersion)') }}
-
- jobs: ${{ parameters.jobs }}
-
diff --git a/eng/common/templates-official/jobs/jobs.yml b/eng/common/templates-official/jobs/jobs.yml
index 857a0f8b..007dedda 100644
--- a/eng/common/templates-official/jobs/jobs.yml
+++ b/eng/common/templates-official/jobs/jobs.yml
@@ -1,97 +1,7 @@
-parameters:
- # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md
- continueOnError: false
-
- # Optional: Include PublishBuildArtifacts task
- enablePublishBuildArtifacts: false
-
- # Optional: Enable publishing using release pipelines
- enablePublishUsingPipelines: false
-
- # Optional: Enable running the source-build jobs to build repo from source
- enableSourceBuild: false
-
- # Optional: Parameters for source-build template.
- # See /eng/common/templates-official/jobs/source-build.yml for options
- sourceBuildParameters: []
-
- graphFileGeneration:
- # Optional: Enable generating the graph files at the end of the build
- enabled: false
- # Optional: Include toolset dependencies in the generated graph files
- includeToolset: false
-
- # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
- jobs: []
-
- # Optional: Override automatically derived dependsOn value for "publish build assets" job
- publishBuildAssetsDependsOn: ''
-
- # Optional: Publish the assets as soon as the publish to BAR stage is complete, rather doing so in a separate stage.
- publishAssetsImmediately: false
-
- # Optional: If using publishAssetsImmediately and additional parameters are needed, can be used to send along additional parameters (normally sent to post-build.yml)
- artifactsPublishingAdditionalParameters: ''
- signingValidationAdditionalParameters: ''
-
- # Optional: should run as a public build even in the internal project
- # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
- runAsPublic: false
-
- enableSourceIndex: false
- sourceIndexParams: {}
-
-# Internal resources (telemetry, microbuild) can only be accessed from non-public projects,
-# and some (Microbuild) should only be applied to non-PR cases for internal builds.
-
jobs:
-- ${{ each job in parameters.jobs }}:
- - template: ../job/job.yml
- parameters:
- # pass along parameters
- ${{ each parameter in parameters }}:
- ${{ if ne(parameter.key, 'jobs') }}:
- ${{ parameter.key }}: ${{ parameter.value }}
-
- # pass along job properties
- ${{ each property in job }}:
- ${{ if ne(property.key, 'job') }}:
- ${{ property.key }}: ${{ property.value }}
-
- name: ${{ job.job }}
-
-- ${{ if eq(parameters.enableSourceBuild, true) }}:
- - template: /eng/common/templates-official/jobs/source-build.yml
- parameters:
- allCompletedJobId: Source_Build_Complete
- ${{ each parameter in parameters.sourceBuildParameters }}:
- ${{ parameter.key }}: ${{ parameter.value }}
-
-- ${{ if eq(parameters.enableSourceIndex, 'true') }}:
- - template: ../job/source-index-stage1.yml
- parameters:
- runAsPublic: ${{ parameters.runAsPublic }}
- ${{ each parameter in parameters.sourceIndexParams }}:
- ${{ parameter.key }}: ${{ parameter.value }}
-
-- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - ${{ if or(eq(parameters.enablePublishBuildAssets, true), eq(parameters.artifacts.publish.manifests, 'true'), ne(parameters.artifacts.publish.manifests, '')) }}:
- - template: ../job/publish-build-assets.yml
- parameters:
- continueOnError: ${{ parameters.continueOnError }}
- dependsOn:
- - ${{ if ne(parameters.publishBuildAssetsDependsOn, '') }}:
- - ${{ each job in parameters.publishBuildAssetsDependsOn }}:
- - ${{ job.job }}
- - ${{ if eq(parameters.publishBuildAssetsDependsOn, '') }}:
- - ${{ each job in parameters.jobs }}:
- - ${{ job.job }}
- - ${{ if eq(parameters.enableSourceBuild, true) }}:
- - Source_Build_Complete
+- template: /eng/common/core-templates/jobs/jobs.yml
+ parameters:
+ is1ESPipeline: true
- runAsPublic: ${{ parameters.runAsPublic }}
- publishUsingPipelines: ${{ parameters.enablePublishUsingPipelines }}
- publishAssetsImmediately: ${{ parameters.publishAssetsImmediately }}
- enablePublishBuildArtifacts: ${{ parameters.enablePublishBuildArtifacts }}
- artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
- signingValidationAdditionalParameters: ${{ parameters.signingValidationAdditionalParameters }}
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates-official/jobs/source-build.yml b/eng/common/templates-official/jobs/source-build.yml
index 5cf6a269..483e7b61 100644
--- a/eng/common/templates-official/jobs/source-build.yml
+++ b/eng/common/templates-official/jobs/source-build.yml
@@ -1,54 +1,7 @@
-parameters:
- # This template adds arcade-powered source-build to CI. A job is created for each platform, as
- # well as an optional server job that completes when all platform jobs complete.
-
- # The name of the "join" job for all source-build platforms. If set to empty string, the job is
- # not included. Existing repo pipelines can use this job depend on all source-build jobs
- # completing without maintaining a separate list of every single job ID: just depend on this one
- # server job. By default, not included. Recommended name if used: 'Source_Build_Complete'.
- allCompletedJobId: ''
-
- # See /eng/common/templates-official/job/source-build.yml
- jobNamePrefix: 'Source_Build'
-
- # This is the default platform provided by Arcade, intended for use by a managed-only repo.
- defaultManagedPlatform:
- name: 'Managed'
- container: 'mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream8'
-
- # Defines the platforms on which to run build jobs. One job is created for each platform, and the
- # object in this array is sent to the job template as 'platform'. If no platforms are specified,
- # one job runs on 'defaultManagedPlatform'.
- platforms: []
-
- # If set to true and running on a non-public project,
- # Internal nuget and blob storage locations will be enabled.
- # This is not enabled by default because many repositories do not need internal sources
- # and do not need to have the required service connections approved in the pipeline.
- enableInternalSources: false
-
jobs:
+- template: /eng/common/core-templates/jobs/source-build.yml
+ parameters:
+ is1ESPipeline: true
-- ${{ if ne(parameters.allCompletedJobId, '') }}:
- - job: ${{ parameters.allCompletedJobId }}
- displayName: Source-Build Complete
- pool: server
- dependsOn:
- - ${{ each platform in parameters.platforms }}:
- - ${{ parameters.jobNamePrefix }}_${{ platform.name }}
- - ${{ if eq(length(parameters.platforms), 0) }}:
- - ${{ parameters.jobNamePrefix }}_${{ parameters.defaultManagedPlatform.name }}
-
-- ${{ each platform in parameters.platforms }}:
- - template: /eng/common/templates-official/job/source-build.yml
- parameters:
- jobNamePrefix: ${{ parameters.jobNamePrefix }}
- platform: ${{ platform }}
- enableInternalSources: ${{ parameters.enableInternalSources }}
-
-- ${{ if eq(length(parameters.platforms), 0) }}:
- - template: /eng/common/templates-official/job/source-build.yml
- parameters:
- jobNamePrefix: ${{ parameters.jobNamePrefix }}
- platform: ${{ parameters.defaultManagedPlatform }}
- enableInternalSources: ${{ parameters.enableInternalSources }}
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
\ No newline at end of file
diff --git a/eng/common/templates-official/post-build/common-variables.yml b/eng/common/templates-official/post-build/common-variables.yml
index c24193ac..c32fc492 100644
--- a/eng/common/templates-official/post-build/common-variables.yml
+++ b/eng/common/templates-official/post-build/common-variables.yml
@@ -1,22 +1,8 @@
variables:
- - group: Publish-Build-Assets
+- template: /eng/common/core-templates/post-build/common-variables.yml
+ parameters:
+ # Specifies whether to use 1ES
+ is1ESPipeline: true
- # Whether the build is internal or not
- - name: IsInternalBuild
- value: ${{ and(ne(variables['System.TeamProject'], 'public'), contains(variables['Build.SourceBranch'], 'internal')) }}
-
- # Default Maestro++ API Endpoint and API Version
- - name: MaestroApiEndPoint
- value: "https://maestro-prod.westus2.cloudapp.azure.com"
- - name: MaestroApiAccessToken
- value: $(MaestroAccessToken)
- - name: MaestroApiVersion
- value: "2020-02-20"
-
- - name: SourceLinkCLIVersion
- value: 3.0.0
- - name: SymbolToolVersion
- value: 1.0.1
-
- - name: runCodesignValidationInjection
- value: false
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
\ No newline at end of file
diff --git a/eng/common/templates-official/post-build/post-build.yml b/eng/common/templates-official/post-build/post-build.yml
index b81b8770..2364c0fd 100644
--- a/eng/common/templates-official/post-build/post-build.yml
+++ b/eng/common/templates-official/post-build/post-build.yml
@@ -1,287 +1,8 @@
-parameters:
- # Which publishing infra should be used. THIS SHOULD MATCH THE VERSION ON THE BUILD MANIFEST.
- # Publishing V1 is no longer supported
- # Publishing V2 is no longer supported
- # Publishing V3 is the default
- - name: publishingInfraVersion
- displayName: Which version of publishing should be used to promote the build definition?
- type: number
- default: 3
- values:
- - 3
-
- - name: BARBuildId
- displayName: BAR Build Id
- type: number
- default: 0
-
- - name: PromoteToChannelIds
- displayName: Channel to promote BARBuildId to
- type: string
- default: ''
-
- - name: enableSourceLinkValidation
- displayName: Enable SourceLink validation
- type: boolean
- default: false
-
- - name: enableSigningValidation
- displayName: Enable signing validation
- type: boolean
- default: true
-
- - name: enableSymbolValidation
- displayName: Enable symbol validation
- type: boolean
- default: false
-
- - name: enableNugetValidation
- displayName: Enable NuGet validation
- type: boolean
- default: true
-
- - name: publishInstallersAndChecksums
- displayName: Publish installers and checksums
- type: boolean
- default: true
-
- - name: SDLValidationParameters
- type: object
- default:
- enable: false
- publishGdn: false
- continueOnError: false
- params: ''
- artifactNames: ''
- downloadArtifacts: true
-
- # These parameters let the user customize the call to sdk-task.ps1 for publishing
- # symbols & general artifacts as well as for signing validation
- - name: symbolPublishingAdditionalParameters
- displayName: Symbol publishing additional parameters
- type: string
- default: ''
-
- - name: artifactsPublishingAdditionalParameters
- displayName: Artifact publishing additional parameters
- type: string
- default: ''
-
- - name: signingValidationAdditionalParameters
- displayName: Signing validation additional parameters
- type: string
- default: ''
-
- # Which stages should finish execution before post-build stages start
- - name: validateDependsOn
- type: object
- default:
- - build
-
- - name: publishDependsOn
- type: object
- default:
- - Validate
-
- # Optional: Call asset publishing rather than running in a separate stage
- - name: publishAssetsImmediately
- type: boolean
- default: false
-
stages:
-- ${{ if or(eq( parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}:
- - stage: Validate
- dependsOn: ${{ parameters.validateDependsOn }}
- displayName: Validate Build Assets
- variables:
- - template: common-variables.yml
- - template: /eng/common/templates-official/variables/pool-providers.yml
- jobs:
- - job:
- displayName: NuGet Validation
- condition: and(succeededOrFailed(), eq( ${{ parameters.enableNugetValidation }}, 'true'))
- pool:
- # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
- ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
- name: AzurePipelines-EO
- image: 1ESPT-Windows2022
- demands: Cmd
- os: windows
- # If it's not devdiv, it's dnceng
- ${{ else }}:
- name: $(DncEngInternalBuildPool)
- image: 1es-windows-2022
- os: windows
-
- steps:
- - template: setup-maestro-vars.yml
- parameters:
- BARBuildId: ${{ parameters.BARBuildId }}
- PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
-
- - task: DownloadBuildArtifacts@0
- displayName: Download Package Artifacts
- inputs:
- buildType: specific
- buildVersionToDownload: specific
- project: $(AzDOProjectName)
- pipeline: $(AzDOPipelineId)
- buildId: $(AzDOBuildId)
- artifactName: PackageArtifacts
- checkDownloadedFiles: true
-
- - task: PowerShell@2
- displayName: Validate
- inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/post-build/nuget-validation.ps1
- arguments: -PackagesPath $(Build.ArtifactStagingDirectory)/PackageArtifacts/
- -ToolDestinationPath $(Agent.BuildDirectory)/Extract/
-
- - job:
- displayName: Signing Validation
- condition: and( eq( ${{ parameters.enableSigningValidation }}, 'true'), ne( variables['PostBuildSign'], 'true'))
- pool:
- # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
- ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
- name: AzurePipelines-EO
- image: 1ESPT-Windows2022
- demands: Cmd
- os: windows
- # If it's not devdiv, it's dnceng
- ${{ else }}:
- name: $(DncEngInternalBuildPool)
- image: 1es-windows-2022
- os: windows
- steps:
- - template: setup-maestro-vars.yml
- parameters:
- BARBuildId: ${{ parameters.BARBuildId }}
- PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
-
- - task: DownloadBuildArtifacts@0
- displayName: Download Package Artifacts
- inputs:
- buildType: specific
- buildVersionToDownload: specific
- project: $(AzDOProjectName)
- pipeline: $(AzDOPipelineId)
- buildId: $(AzDOBuildId)
- artifactName: PackageArtifacts
- checkDownloadedFiles: true
- itemPattern: |
- **
- !**/Microsoft.SourceBuild.Intermediate.*.nupkg
-
- # This is necessary whenever we want to publish/restore to an AzDO private feed
- # Since sdk-task.ps1 tries to restore packages we need to do this authentication here
- # otherwise it'll complain about accessing a private feed.
- - task: NuGetAuthenticate@1
- displayName: 'Authenticate to AzDO Feeds'
-
- # Signing validation will optionally work with the buildmanifest file which is downloaded from
- # Azure DevOps above.
- - task: PowerShell@2
- displayName: Validate
- inputs:
- filePath: eng\common\sdk-task.ps1
- arguments: -task SigningValidation -restore -msbuildEngine vs
- /p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts'
- /p:SignCheckExclusionsFile='$(Build.SourcesDirectory)/eng/SignCheckExclusionsFile.txt'
- ${{ parameters.signingValidationAdditionalParameters }}
-
- - template: ../steps/publish-logs.yml
- parameters:
- StageLabel: 'Validation'
- JobLabel: 'Signing'
- BinlogToolVersion: $(BinlogToolVersion)
-
- - job:
- displayName: SourceLink Validation
- condition: eq( ${{ parameters.enableSourceLinkValidation }}, 'true')
- pool:
- # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
- ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
- name: AzurePipelines-EO
- image: 1ESPT-Windows2022
- demands: Cmd
- os: windows
- # If it's not devdiv, it's dnceng
- ${{ else }}:
- name: $(DncEngInternalBuildPool)
- image: 1es-windows-2022
- os: windows
- steps:
- - template: setup-maestro-vars.yml
- parameters:
- BARBuildId: ${{ parameters.BARBuildId }}
- PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
-
- - task: DownloadBuildArtifacts@0
- displayName: Download Blob Artifacts
- inputs:
- buildType: specific
- buildVersionToDownload: specific
- project: $(AzDOProjectName)
- pipeline: $(AzDOPipelineId)
- buildId: $(AzDOBuildId)
- artifactName: BlobArtifacts
- checkDownloadedFiles: true
-
- - task: PowerShell@2
- displayName: Validate
- inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/post-build/sourcelink-validation.ps1
- arguments: -InputPath $(Build.ArtifactStagingDirectory)/BlobArtifacts/
- -ExtractPath $(Agent.BuildDirectory)/Extract/
- -GHRepoName $(Build.Repository.Name)
- -GHCommit $(Build.SourceVersion)
- -SourcelinkCliVersion $(SourceLinkCLIVersion)
- continueOnError: true
-
-- ${{ if ne(parameters.publishAssetsImmediately, 'true') }}:
- - stage: publish_using_darc
- ${{ if or(eq(parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}:
- dependsOn: ${{ parameters.publishDependsOn }}
- ${{ else }}:
- dependsOn: ${{ parameters.validateDependsOn }}
- displayName: Publish using Darc
- variables:
- - template: common-variables.yml
- - template: /eng/common/templates-official/variables/pool-providers.yml
- jobs:
- - job:
- displayName: Publish Using Darc
- timeoutInMinutes: 120
- pool:
- # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
- ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
- name: AzurePipelines-EO
- image: 1ESPT-Windows2022
- demands: Cmd
- os: windows
- # If it's not devdiv, it's dnceng
- ${{ else }}:
- name: NetCore1ESPool-Publishing-Internal
- image: windows.vs2019.amd64
- os: windows
- steps:
- - template: setup-maestro-vars.yml
- parameters:
- BARBuildId: ${{ parameters.BARBuildId }}
- PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
-
- - task: NuGetAuthenticate@1
+- template: /eng/common/core-templates/post-build/post-build.yml
+ parameters:
+ # Specifies whether to use 1ES
+ is1ESPipeline: true
- - task: AzureCLI@2
- displayName: Publish Using Darc
- inputs:
- azureSubscription: "Darc: Maestro Production"
- scriptType: ps
- scriptLocation: scriptPath
- scriptPath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1
- arguments: -BuildId $(BARBuildId)
- -PublishingInfraVersion ${{ parameters.publishingInfraVersion }}
- -AzdoToken '$(System.AccessToken)'
- -WaitPublishingFinish true
- -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}'
- -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}'
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates-official/post-build/setup-maestro-vars.yml b/eng/common/templates-official/post-build/setup-maestro-vars.yml
index 0c87f149..024397d8 100644
--- a/eng/common/templates-official/post-build/setup-maestro-vars.yml
+++ b/eng/common/templates-official/post-build/setup-maestro-vars.yml
@@ -1,70 +1,8 @@
-parameters:
- BARBuildId: ''
- PromoteToChannelIds: ''
-
steps:
- - ${{ if eq(coalesce(parameters.PromoteToChannelIds, 0), 0) }}:
- - task: DownloadBuildArtifacts@0
- displayName: Download Release Configs
- inputs:
- buildType: current
- artifactName: ReleaseConfigs
- checkDownloadedFiles: true
-
- - task: PowerShell@2
- name: setReleaseVars
- displayName: Set Release Configs Vars
- inputs:
- targetType: inline
- pwsh: true
- script: |
- try {
- if (!$Env:PromoteToMaestroChannels -or $Env:PromoteToMaestroChannels.Trim() -eq '') {
- $Content = Get-Content $(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt
-
- $BarId = $Content | Select -Index 0
- $Channels = $Content | Select -Index 1
- $IsStableBuild = $Content | Select -Index 2
-
- $AzureDevOpsProject = $Env:System_TeamProject
- $AzureDevOpsBuildDefinitionId = $Env:System_DefinitionId
- $AzureDevOpsBuildId = $Env:Build_BuildId
- }
- else {
- $buildApiEndpoint = "${Env:MaestroApiEndPoint}/api/builds/${Env:BARBuildId}?api-version=${Env:MaestroApiVersion}"
-
- $apiHeaders = New-Object 'System.Collections.Generic.Dictionary[[String],[String]]'
- $apiHeaders.Add('Accept', 'application/json')
- $apiHeaders.Add('Authorization',"Bearer ${Env:MAESTRO_API_TOKEN}")
-
- $buildInfo = try { Invoke-WebRequest -Method Get -Uri $buildApiEndpoint -Headers $apiHeaders | ConvertFrom-Json } catch { Write-Host "Error: $_" }
-
- $BarId = $Env:BARBuildId
- $Channels = $Env:PromoteToMaestroChannels -split ","
- $Channels = $Channels -join "]["
- $Channels = "[$Channels]"
-
- $IsStableBuild = $buildInfo.stable
- $AzureDevOpsProject = $buildInfo.azureDevOpsProject
- $AzureDevOpsBuildDefinitionId = $buildInfo.azureDevOpsBuildDefinitionId
- $AzureDevOpsBuildId = $buildInfo.azureDevOpsBuildId
- }
-
- Write-Host "##vso[task.setvariable variable=BARBuildId]$BarId"
- Write-Host "##vso[task.setvariable variable=TargetChannels]$Channels"
- Write-Host "##vso[task.setvariable variable=IsStableBuild]$IsStableBuild"
+- template: /eng/common/core-templates/post-build/setup-maestro-vars.yml
+ parameters:
+ # Specifies whether to use 1ES
+ is1ESPipeline: true
- Write-Host "##vso[task.setvariable variable=AzDOProjectName]$AzureDevOpsProject"
- Write-Host "##vso[task.setvariable variable=AzDOPipelineId]$AzureDevOpsBuildDefinitionId"
- Write-Host "##vso[task.setvariable variable=AzDOBuildId]$AzureDevOpsBuildId"
- }
- catch {
- Write-Host $_
- Write-Host $_.Exception
- Write-Host $_.ScriptStackTrace
- exit 1
- }
- env:
- MAESTRO_API_TOKEN: $(MaestroApiAccessToken)
- BARBuildId: ${{ parameters.BARBuildId }}
- PromoteToMaestroChannels: ${{ parameters.PromoteToChannelIds }}
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
\ No newline at end of file
diff --git a/eng/common/templates-official/post-build/trigger-subscription.yml b/eng/common/templates-official/post-build/trigger-subscription.yml
deleted file mode 100644
index da669030..00000000
--- a/eng/common/templates-official/post-build/trigger-subscription.yml
+++ /dev/null
@@ -1,13 +0,0 @@
-parameters:
- ChannelId: 0
-
-steps:
-- task: PowerShell@2
- displayName: Triggering subscriptions
- inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/post-build/trigger-subscriptions.ps1
- arguments: -SourceRepo $(Build.Repository.Uri)
- -ChannelId ${{ parameters.ChannelId }}
- -MaestroApiAccessToken $(MaestroAccessToken)
- -MaestroApiEndPoint $(MaestroApiEndPoint)
- -MaestroApiVersion $(MaestroApiVersion)
diff --git a/eng/common/templates-official/steps/add-build-to-channel.yml b/eng/common/templates-official/steps/add-build-to-channel.yml
deleted file mode 100644
index f67a210d..00000000
--- a/eng/common/templates-official/steps/add-build-to-channel.yml
+++ /dev/null
@@ -1,13 +0,0 @@
-parameters:
- ChannelId: 0
-
-steps:
-- task: PowerShell@2
- displayName: Add Build to Channel
- inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/post-build/add-build-to-channel.ps1
- arguments: -BuildId $(BARBuildId)
- -ChannelId ${{ parameters.ChannelId }}
- -MaestroApiAccessToken $(MaestroApiAccessToken)
- -MaestroApiEndPoint $(MaestroApiEndPoint)
- -MaestroApiVersion $(MaestroApiVersion)
diff --git a/eng/common/templates-official/steps/build-reason.yml b/eng/common/templates-official/steps/build-reason.yml
deleted file mode 100644
index eba58109..00000000
--- a/eng/common/templates-official/steps/build-reason.yml
+++ /dev/null
@@ -1,12 +0,0 @@
-# build-reason.yml
-# Description: runs steps if build.reason condition is valid. conditions is a string of valid build reasons
-# to include steps (',' separated).
-parameters:
- conditions: ''
- steps: []
-
-steps:
- - ${{ if and( not(startsWith(parameters.conditions, 'not')), contains(parameters.conditions, variables['build.reason'])) }}:
- - ${{ parameters.steps }}
- - ${{ if and( startsWith(parameters.conditions, 'not'), not(contains(parameters.conditions, variables['build.reason']))) }}:
- - ${{ parameters.steps }}
diff --git a/eng/common/templates-official/steps/component-governance.yml b/eng/common/templates-official/steps/component-governance.yml
deleted file mode 100644
index cbba0596..00000000
--- a/eng/common/templates-official/steps/component-governance.yml
+++ /dev/null
@@ -1,13 +0,0 @@
-parameters:
- disableComponentGovernance: false
- componentGovernanceIgnoreDirectories: ''
-
-steps:
-- ${{ if eq(parameters.disableComponentGovernance, 'true') }}:
- - script: echo "##vso[task.setvariable variable=skipComponentGovernanceDetection]true"
- displayName: Set skipComponentGovernanceDetection variable
-- ${{ if ne(parameters.disableComponentGovernance, 'true') }}:
- - task: ComponentGovernanceComponentDetection@0
- continueOnError: true
- inputs:
- ignoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }}
\ No newline at end of file
diff --git a/eng/common/templates-official/steps/enable-internal-runtimes.yml b/eng/common/templates-official/steps/enable-internal-runtimes.yml
index 93a8394a..f9dd238c 100644
--- a/eng/common/templates-official/steps/enable-internal-runtimes.yml
+++ b/eng/common/templates-official/steps/enable-internal-runtimes.yml
@@ -1,28 +1,9 @@
# Obtains internal runtime download credentials and populates the 'dotnetbuilds-internal-container-read-token-base64'
# variable with the base64-encoded SAS token, by default
-
-parameters:
-- name: federatedServiceConnection
- type: string
- default: 'dotnetbuilds-internal-read'
-- name: outputVariableName
- type: string
- default: 'dotnetbuilds-internal-container-read-token-base64'
-- name: expiryInHours
- type: number
- default: 1
-- name: base64Encode
- type: boolean
- default: true
-
steps:
-- ${{ if ne(variables['System.TeamProject'], 'public') }}:
- - template: /eng/common/templates-official/steps/get-delegation-sas.yml
- parameters:
- federatedServiceConnection: ${{ parameters.federatedServiceConnection }}
- outputVariableName: ${{ parameters.outputVariableName }}
- expiryInHours: ${{ parameters.expiryInHours }}
- base64Encode: ${{ parameters.base64Encode }}
- storageAccount: dotnetbuilds
- container: internal
- permissions: rl
+- template: /eng/common/core-templates/steps/enable-internal-runtimes.yml
+ parameters:
+ is1ESPipeline: true
+
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates-official/steps/enable-internal-sources.yml b/eng/common/templates-official/steps/enable-internal-sources.yml
new file mode 100644
index 00000000..e6d57182
--- /dev/null
+++ b/eng/common/templates-official/steps/enable-internal-sources.yml
@@ -0,0 +1,7 @@
+steps:
+- template: /eng/common/core-templates/steps/enable-internal-sources.yml
+ parameters:
+ is1ESPipeline: true
+
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
\ No newline at end of file
diff --git a/eng/common/templates-official/steps/execute-codeql.yml b/eng/common/templates-official/steps/execute-codeql.yml
deleted file mode 100644
index 9b4a5ffa..00000000
--- a/eng/common/templates-official/steps/execute-codeql.yml
+++ /dev/null
@@ -1,32 +0,0 @@
-parameters:
- # Language that should be analyzed. Defaults to csharp
- language: csharp
- # Build Commands
- buildCommands: ''
- overrideParameters: '' # Optional: to override values for parameters.
- additionalParameters: '' # Optional: parameters that need user specific values eg: '-SourceToolsList @("abc","def") -ArtifactToolsList @("ghi","jkl")'
- # Optional: if specified, restore and use this version of Guardian instead of the default.
- overrideGuardianVersion: ''
- # Optional: if true, publish the '.gdn' folder as a pipeline artifact. This can help with in-depth
- # diagnosis of problems with specific tool configurations.
- publishGuardianDirectoryToPipeline: false
- # The script to run to execute all SDL tools. Use this if you want to use a script to define SDL
- # parameters rather than relying on YAML. It may be better to use a local script, because you can
- # reproduce results locally without piecing together a command based on the YAML.
- executeAllSdlToolsScript: 'eng/common/sdl/execute-all-sdl-tools.ps1'
- # There is some sort of bug (has been reported) in Azure DevOps where if this parameter is named
- # 'continueOnError', the parameter value is not correctly picked up.
- # This can also be remedied by the caller (post-build.yml) if it does not use a nested parameter
- # optional: determines whether to continue the build if the step errors;
- sdlContinueOnError: false
-
-steps:
-- template: /eng/common/templates-official/steps/execute-sdl.yml
- parameters:
- overrideGuardianVersion: ${{ parameters.overrideGuardianVersion }}
- executeAllSdlToolsScript: ${{ parameters.executeAllSdlToolsScript }}
- overrideParameters: ${{ parameters.overrideParameters }}
- additionalParameters: '${{ parameters.additionalParameters }}
- -CodeQLAdditionalRunConfigParams @("BuildCommands < ${{ parameters.buildCommands }}", "Language < ${{ parameters.language }}")'
- publishGuardianDirectoryToPipeline: ${{ parameters.publishGuardianDirectoryToPipeline }}
- sdlContinueOnError: ${{ parameters.sdlContinueOnError }}
\ No newline at end of file
diff --git a/eng/common/templates-official/steps/execute-sdl.yml b/eng/common/templates-official/steps/execute-sdl.yml
deleted file mode 100644
index 301d5c59..00000000
--- a/eng/common/templates-official/steps/execute-sdl.yml
+++ /dev/null
@@ -1,86 +0,0 @@
-parameters:
- overrideGuardianVersion: ''
- executeAllSdlToolsScript: ''
- overrideParameters: ''
- additionalParameters: ''
- publishGuardianDirectoryToPipeline: false
- sdlContinueOnError: false
- condition: ''
-
-steps:
-- task: NuGetAuthenticate@1
-
-- task: NuGetToolInstaller@1
- displayName: 'Install NuGet.exe'
-
-- ${{ if ne(parameters.overrideGuardianVersion, '') }}:
- - pwsh: |
- Set-Location -Path $(Build.SourcesDirectory)\eng\common\sdl
- . .\sdl.ps1
- $guardianCliLocation = Install-Gdn -Path $(Build.SourcesDirectory)\.artifacts -Version ${{ parameters.overrideGuardianVersion }}
- Write-Host "##vso[task.setvariable variable=GuardianCliLocation]$guardianCliLocation"
- displayName: Install Guardian (Overridden)
-
-- ${{ if eq(parameters.overrideGuardianVersion, '') }}:
- - pwsh: |
- Set-Location -Path $(Build.SourcesDirectory)\eng\common\sdl
- . .\sdl.ps1
- $guardianCliLocation = Install-Gdn -Path $(Build.SourcesDirectory)\.artifacts
- Write-Host "##vso[task.setvariable variable=GuardianCliLocation]$guardianCliLocation"
- displayName: Install Guardian
-
-- ${{ if ne(parameters.overrideParameters, '') }}:
- - powershell: ${{ parameters.executeAllSdlToolsScript }} ${{ parameters.overrideParameters }}
- displayName: Execute SDL (Overridden)
- continueOnError: ${{ parameters.sdlContinueOnError }}
- condition: ${{ parameters.condition }}
-
-- ${{ if eq(parameters.overrideParameters, '') }}:
- - powershell: ${{ parameters.executeAllSdlToolsScript }}
- -GuardianCliLocation $(GuardianCliLocation)
- -NugetPackageDirectory $(Build.SourcesDirectory)\.packages
- -AzureDevOpsAccessToken $(dn-bot-dotnet-build-rw-code-rw)
- ${{ parameters.additionalParameters }}
- displayName: Execute SDL
- continueOnError: ${{ parameters.sdlContinueOnError }}
- condition: ${{ parameters.condition }}
-
-- ${{ if ne(parameters.publishGuardianDirectoryToPipeline, 'false') }}:
- # We want to publish the Guardian results and configuration for easy diagnosis. However, the
- # '.gdn' dir is a mix of configuration, results, extracted dependencies, and Guardian default
- # tooling files. Some of these files are large and aren't useful during an investigation, so
- # exclude them by simply deleting them before publishing. (As of writing, there is no documented
- # way to selectively exclude a dir from the pipeline artifact publish task.)
- - task: DeleteFiles@1
- displayName: Delete Guardian dependencies to avoid uploading
- inputs:
- SourceFolder: $(Agent.BuildDirectory)/.gdn
- Contents: |
- c
- i
- condition: succeededOrFailed()
-
- - publish: $(Agent.BuildDirectory)/.gdn
- artifact: GuardianConfiguration
- displayName: Publish GuardianConfiguration
- condition: succeededOrFailed()
-
- # Publish the SARIF files in a container named CodeAnalysisLogs to enable integration
- # with the "SARIF SAST Scans Tab" Azure DevOps extension
- - task: CopyFiles@2
- displayName: Copy SARIF files
- inputs:
- flattenFolders: true
- sourceFolder: $(Agent.BuildDirectory)/.gdn/rc/
- contents: '**/*.sarif'
- targetFolder: $(Build.SourcesDirectory)/CodeAnalysisLogs
- condition: succeededOrFailed()
-
- # Use PublishBuildArtifacts because the SARIF extension only checks this case
- # see microsoft/sarif-azuredevops-extension#4
- - task: PublishBuildArtifacts@1
- displayName: Publish SARIF files to CodeAnalysisLogs container
- inputs:
- pathToPublish: $(Build.SourcesDirectory)/CodeAnalysisLogs
- artifactName: CodeAnalysisLogs
- condition: succeededOrFailed()
\ No newline at end of file
diff --git a/eng/common/templates-official/steps/generate-sbom.yml b/eng/common/templates-official/steps/generate-sbom.yml
index 1bf43bf8..9a89a470 100644
--- a/eng/common/templates-official/steps/generate-sbom.yml
+++ b/eng/common/templates-official/steps/generate-sbom.yml
@@ -1,48 +1,7 @@
-# BuildDropPath - The root folder of the drop directory for which the manifest file will be generated.
-# PackageName - The name of the package this SBOM represents.
-# PackageVersion - The version of the package this SBOM represents.
-# ManifestDirPath - The path of the directory where the generated manifest files will be placed
-# IgnoreDirectories - Directories to ignore for SBOM generation. This will be passed through to the CG component detector.
-
-parameters:
- PackageVersion: 8.0.0
- BuildDropPath: '$(Build.SourcesDirectory)/artifacts'
- PackageName: '.NET'
- ManifestDirPath: $(Build.ArtifactStagingDirectory)/sbom
- IgnoreDirectories: ''
- sbomContinueOnError: true
-
steps:
-- task: PowerShell@2
- displayName: Prep for SBOM generation in (Non-linux)
- condition: or(eq(variables['Agent.Os'], 'Windows_NT'), eq(variables['Agent.Os'], 'Darwin'))
- inputs:
- filePath: ./eng/common/generate-sbom-prep.ps1
- arguments: ${{parameters.manifestDirPath}}
-
-# Chmodding is a workaround for https://github.com/dotnet/arcade/issues/8461
-- script: |
- chmod +x ./eng/common/generate-sbom-prep.sh
- ./eng/common/generate-sbom-prep.sh ${{parameters.manifestDirPath}}
- displayName: Prep for SBOM generation in (Linux)
- condition: eq(variables['Agent.Os'], 'Linux')
- continueOnError: ${{ parameters.sbomContinueOnError }}
-
-- task: AzureArtifacts.manifest-generator-task.manifest-generator-task.ManifestGeneratorTask@0
- displayName: 'Generate SBOM manifest'
- continueOnError: ${{ parameters.sbomContinueOnError }}
- inputs:
- PackageName: ${{ parameters.packageName }}
- BuildDropPath: ${{ parameters.buildDropPath }}
- PackageVersion: ${{ parameters.packageVersion }}
- ManifestDirPath: ${{ parameters.manifestDirPath }}
- ${{ if ne(parameters.IgnoreDirectories, '') }}:
- AdditionalComponentDetectorArgs: '--IgnoreDirectories ${{ parameters.IgnoreDirectories }}'
-
-- task: 1ES.PublishPipelineArtifact@1
- displayName: Publish SBOM manifest
- continueOnError: ${{parameters.sbomContinueOnError}}
- inputs:
- targetPath: '${{parameters.manifestDirPath}}'
- artifactName: $(ARTIFACT_NAME)
+- template: /eng/common/core-templates/steps/generate-sbom.yml
+ parameters:
+ is1ESPipeline: true
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates-official/steps/get-delegation-sas.yml b/eng/common/templates-official/steps/get-delegation-sas.yml
index c0e8f913..c5a9c1f8 100644
--- a/eng/common/templates-official/steps/get-delegation-sas.yml
+++ b/eng/common/templates-official/steps/get-delegation-sas.yml
@@ -1,43 +1,7 @@
-parameters:
-- name: federatedServiceConnection
- type: string
-- name: outputVariableName
- type: string
-- name: expiryInHours
- type: number
- default: 1
-- name: base64Encode
- type: boolean
- default: false
-- name: storageAccount
- type: string
-- name: container
- type: string
-- name: permissions
- type: string
- default: 'rl'
-
steps:
-- task: AzureCLI@2
- displayName: 'Generate delegation SAS Token for ${{ parameters.storageAccount }}/${{ parameters.container }}'
- inputs:
- azureSubscription: ${{ parameters.federatedServiceConnection }}
- scriptType: 'pscore'
- scriptLocation: 'inlineScript'
- inlineScript: |
- # Calculate the expiration of the SAS token and convert to UTC
- $expiry = (Get-Date).AddHours(${{ parameters.expiryInHours }}).ToUniversalTime().ToString("yyyy-MM-ddTHH:mm:ssZ")
-
- $sas = az storage container generate-sas --account-name ${{ parameters.storageAccount }} --name ${{ parameters.container }} --permissions ${{ parameters.permissions }} --expiry $expiry --auth-mode login --as-user -o tsv
-
- if ($LASTEXITCODE -ne 0) {
- Write-Error "Failed to generate SAS token."
- exit 1
- }
-
- if ('${{ parameters.base64Encode }}' -eq 'true') {
- $sas = [Convert]::ToBase64String([System.Text.Encoding]::UTF8.GetBytes($sas))
- }
+- template: /eng/common/core-templates/steps/get-delegation-sas.yml
+ parameters:
+ is1ESPipeline: true
- Write-Host "Setting '${{ parameters.outputVariableName }}' with the access token value"
- Write-Host "##vso[task.setvariable variable=${{ parameters.outputVariableName }};issecret=true]$sas"
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates-official/steps/get-federated-access-token.yml b/eng/common/templates-official/steps/get-federated-access-token.yml
index 55e33bd3..c8dcf6b8 100644
--- a/eng/common/templates-official/steps/get-federated-access-token.yml
+++ b/eng/common/templates-official/steps/get-federated-access-token.yml
@@ -1,40 +1,7 @@
-parameters:
-- name: federatedServiceConnection
- type: string
-- name: outputVariableName
- type: string
-- name: stepName
- type: string
- default: 'getFederatedAccessToken'
-- name: condition
- type: string
- default: ''
-# Resource to get a token for. Common values include:
-# - '499b84ac-1321-427f-aa17-267ca6975798' for Azure DevOps
-# - 'https://storage.azure.com/' for storage
-# Defaults to Azure DevOps
-- name: resource
- type: string
- default: '499b84ac-1321-427f-aa17-267ca6975798'
-- name: isStepOutputVariable
- type: boolean
- default: false
-
steps:
-- task: AzureCLI@2
- displayName: 'Getting federated access token for feeds'
- name: ${{ parameters.stepName }}
- ${{ if ne(parameters.condition, '') }}:
- condition: ${{ parameters.condition }}
- inputs:
- azureSubscription: ${{ parameters.federatedServiceConnection }}
- scriptType: 'pscore'
- scriptLocation: 'inlineScript'
- inlineScript: |
- $accessToken = az account get-access-token --query accessToken --resource ${{ parameters.resource }} --output tsv
- if ($LASTEXITCODE -ne 0) {
- Write-Error "Failed to get access token for resource '${{ parameters.resource }}'"
- exit 1
- }
- Write-Host "Setting '${{ parameters.outputVariableName }}' with the access token value"
- Write-Host "##vso[task.setvariable variable=${{ parameters.outputVariableName }};issecret=true;isOutput=${{ parameters.isStepOutputVariable }}]$accessToken"
\ No newline at end of file
+- template: /eng/common/core-templates/steps/get-federated-access-token.yml
+ parameters:
+ is1ESPipeline: true
+
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
\ No newline at end of file
diff --git a/eng/common/templates-official/steps/publish-build-artifacts.yml b/eng/common/templates-official/steps/publish-build-artifacts.yml
new file mode 100644
index 00000000..fcf6637b
--- /dev/null
+++ b/eng/common/templates-official/steps/publish-build-artifacts.yml
@@ -0,0 +1,46 @@
+parameters:
+- name: displayName
+ type: string
+ default: 'Publish to Build Artifact'
+
+- name: condition
+ type: string
+ default: succeeded()
+
+- name: artifactName
+ type: string
+
+- name: pathToPublish
+ type: string
+
+- name: continueOnError
+ type: boolean
+ default: false
+
+- name: publishLocation
+ type: string
+ default: 'Container'
+
+- name: is1ESPipeline
+ type: boolean
+ default: true
+
+- name: retryCountOnTaskFailure
+ type: string
+ default: 10
+
+steps:
+- ${{ if ne(parameters.is1ESPipeline, true) }}:
+ - 'eng/common/templates-official cannot be referenced from a non-1ES managed template': error
+- task: 1ES.PublishBuildArtifacts@1
+ displayName: ${{ parameters.displayName }}
+ condition: ${{ parameters.condition }}
+ ${{ if parameters.continueOnError }}:
+ continueOnError: ${{ parameters.continueOnError }}
+ inputs:
+ PublishLocation: ${{ parameters.publishLocation }}
+ PathtoPublish: ${{ parameters.pathToPublish }}
+ ${{ if parameters.artifactName }}:
+ ArtifactName: ${{ parameters.artifactName }}
+ ${{ if parameters.retryCountOnTaskFailure }}:
+ retryCountOnTaskFailure: ${{ parameters.retryCountOnTaskFailure }}
diff --git a/eng/common/templates-official/steps/publish-logs.yml b/eng/common/templates-official/steps/publish-logs.yml
index 04012fed..579fd531 100644
--- a/eng/common/templates-official/steps/publish-logs.yml
+++ b/eng/common/templates-official/steps/publish-logs.yml
@@ -1,23 +1,7 @@
-parameters:
- StageLabel: ''
- JobLabel: ''
-
steps:
-- task: Powershell@2
- displayName: Prepare Binlogs to Upload
- inputs:
- targetType: inline
- script: |
- New-Item -ItemType Directory $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/
- Move-Item -Path $(Build.SourcesDirectory)/artifacts/log/Debug/* $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/
- continueOnError: true
- condition: always()
+- template: /eng/common/core-templates/steps/publish-logs.yml
+ parameters:
+ is1ESPipeline: true
-- task: 1ES.PublishBuildArtifacts@1
- displayName: Publish Logs
- inputs:
- PathtoPublish: '$(Build.SourcesDirectory)/PostBuildLogs'
- PublishLocation: Container
- ArtifactName: PostBuildLogs
- continueOnError: true
- condition: always()
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates-official/steps/publish-pipeline-artifacts.yml b/eng/common/templates-official/steps/publish-pipeline-artifacts.yml
new file mode 100644
index 00000000..6a652ae1
--- /dev/null
+++ b/eng/common/templates-official/steps/publish-pipeline-artifacts.yml
@@ -0,0 +1,30 @@
+parameters:
+- name: is1ESPipeline
+ type: boolean
+ default: true
+
+- name: args
+ type: object
+ default: {}
+
+steps:
+- ${{ if ne(parameters.is1ESPipeline, true) }}:
+ - 'eng/common/templates-official cannot be referenced from a non-1ES managed template': error
+- task: 1ES.PublishPipelineArtifact@1
+ displayName: ${{ coalesce(parameters.args.displayName, 'Publish to Build Artifact') }}
+ ${{ if parameters.args.condition }}:
+ condition: ${{ parameters.args.condition }}
+ ${{ else }}:
+ condition: succeeded()
+ ${{ if parameters.args.continueOnError }}:
+ continueOnError: ${{ parameters.args.continueOnError }}
+ inputs:
+ targetPath: ${{ parameters.args.targetPath }}
+ ${{ if parameters.args.artifactName }}:
+ artifactName: ${{ parameters.args.artifactName }}
+ ${{ if parameters.args.properties }}:
+ properties: ${{ parameters.args.properties }}
+ ${{ if parameters.args.sbomEnabled }}:
+ sbomEnabled: ${{ parameters.args.sbomEnabled }}
+ ${{ if parameters.args.isProduction }}:
+ isProduction: ${{ parameters.args.isProduction }}
diff --git a/eng/common/templates-official/steps/retain-build.yml b/eng/common/templates-official/steps/retain-build.yml
index 83d97a26..55945515 100644
--- a/eng/common/templates-official/steps/retain-build.yml
+++ b/eng/common/templates-official/steps/retain-build.yml
@@ -1,28 +1,7 @@
-parameters:
- # Optional azure devops PAT with build execute permissions for the build's organization,
- # only needed if the build that should be retained ran on a different organization than
- # the pipeline where this template is executing from
- Token: ''
- # Optional BuildId to retain, defaults to the current running build
- BuildId: ''
- # Azure devops Organization URI for the build in the https://dev.azure.com/ format.
- # Defaults to the organization the current pipeline is running on
- AzdoOrgUri: '$(System.CollectionUri)'
- # Azure devops project for the build. Defaults to the project the current pipeline is running on
- AzdoProject: '$(System.TeamProject)'
-
steps:
- - task: powershell@2
- inputs:
- targetType: 'filePath'
- filePath: eng/common/retain-build.ps1
- pwsh: true
- arguments: >
- -AzdoOrgUri: ${{parameters.AzdoOrgUri}}
- -AzdoProject ${{parameters.AzdoProject}}
- -Token ${{coalesce(parameters.Token, '$env:SYSTEM_ACCESSTOKEN') }}
- -BuildId ${{coalesce(parameters.BuildId, '$env:BUILD_ID')}}
- displayName: Enable permanent build retention
- env:
- SYSTEM_ACCESSTOKEN: $(System.AccessToken)
- BUILD_ID: $(Build.BuildId)
\ No newline at end of file
+- template: /eng/common/core-templates/steps/retain-build.yml
+ parameters:
+ is1ESPipeline: true
+
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates-official/steps/send-to-helix.yml b/eng/common/templates-official/steps/send-to-helix.yml
index 3eb7e2d5..6500f21b 100644
--- a/eng/common/templates-official/steps/send-to-helix.yml
+++ b/eng/common/templates-official/steps/send-to-helix.yml
@@ -1,91 +1,7 @@
-# Please remember to update the documentation if you make changes to these parameters!
-parameters:
- HelixSource: 'pr/default' # required -- sources must start with pr/, official/, prodcon/, or agent/
- HelixType: 'tests/default/' # required -- Helix telemetry which identifies what type of data this is; should include "test" for clarity and must end in '/'
- HelixBuild: $(Build.BuildNumber) # required -- the build number Helix will use to identify this -- automatically set to the AzDO build number
- HelixTargetQueues: '' # required -- semicolon-delimited list of Helix queues to test on; see https://helix.dot.net/ for a list of queues
- HelixAccessToken: '' # required -- access token to make Helix API requests; should be provided by the appropriate variable group
- HelixConfiguration: '' # optional -- additional property attached to a job
- HelixPreCommands: '' # optional -- commands to run before Helix work item execution
- HelixPostCommands: '' # optional -- commands to run after Helix work item execution
- WorkItemDirectory: '' # optional -- a payload directory to zip up and send to Helix; requires WorkItemCommand; incompatible with XUnitProjects
- WorkItemCommand: '' # optional -- a command to execute on the payload; requires WorkItemDirectory; incompatible with XUnitProjects
- WorkItemTimeout: '' # optional -- a timeout in TimeSpan.Parse-ready value (e.g. 00:02:00) for the work item command; requires WorkItemDirectory; incompatible with XUnitProjects
- CorrelationPayloadDirectory: '' # optional -- a directory to zip up and send to Helix as a correlation payload
- XUnitProjects: '' # optional -- semicolon-delimited list of XUnitProjects to parse and send to Helix; requires XUnitRuntimeTargetFramework, XUnitPublishTargetFramework, XUnitRunnerVersion, and IncludeDotNetCli=true
- XUnitWorkItemTimeout: '' # optional -- the workitem timeout in seconds for all workitems created from the xUnit projects specified by XUnitProjects
- XUnitPublishTargetFramework: '' # optional -- framework to use to publish your xUnit projects
- XUnitRuntimeTargetFramework: '' # optional -- framework to use for the xUnit console runner
- XUnitRunnerVersion: '' # optional -- version of the xUnit nuget package you wish to use on Helix; required for XUnitProjects
- IncludeDotNetCli: false # optional -- true will download a version of the .NET CLI onto the Helix machine as a correlation payload; requires DotNetCliPackageType and DotNetCliVersion
- DotNetCliPackageType: '' # optional -- either 'sdk', 'runtime' or 'aspnetcore-runtime'; determines whether the sdk or runtime will be sent to Helix; see https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json
- DotNetCliVersion: '' # optional -- version of the CLI to send to Helix; based on this: https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json
- WaitForWorkItemCompletion: true # optional -- true will make the task wait until work items have been completed and fail the build if work items fail. False is "fire and forget."
- IsExternal: false # [DEPRECATED] -- doesn't do anything, jobs are external if HelixAccessToken is empty and Creator is set
- HelixBaseUri: 'https://helix.dot.net/' # optional -- sets the Helix API base URI (allows targeting https://helix.int-dot.net )
- Creator: '' # optional -- if the build is external, use this to specify who is sending the job
- DisplayNamePrefix: 'Run Tests' # optional -- rename the beginning of the displayName of the steps in AzDO
- condition: succeeded() # optional -- condition for step to execute; defaults to succeeded()
- continueOnError: false # optional -- determines whether to continue the build if the step errors; defaults to false
-
steps:
- - powershell: 'powershell "$env:BUILD_SOURCESDIRECTORY\eng\common\msbuild.ps1 $env:BUILD_SOURCESDIRECTORY\eng\common\helixpublish.proj /restore /p:TreatWarningsAsErrors=false /t:Test /bl:$env:BUILD_SOURCESDIRECTORY\artifacts\log\$env:BuildConfig\SendToHelix.binlog"'
- displayName: ${{ parameters.DisplayNamePrefix }} (Windows)
- env:
- BuildConfig: $(_BuildConfig)
- HelixSource: ${{ parameters.HelixSource }}
- HelixType: ${{ parameters.HelixType }}
- HelixBuild: ${{ parameters.HelixBuild }}
- HelixConfiguration: ${{ parameters.HelixConfiguration }}
- HelixTargetQueues: ${{ parameters.HelixTargetQueues }}
- HelixAccessToken: ${{ parameters.HelixAccessToken }}
- HelixPreCommands: ${{ parameters.HelixPreCommands }}
- HelixPostCommands: ${{ parameters.HelixPostCommands }}
- WorkItemDirectory: ${{ parameters.WorkItemDirectory }}
- WorkItemCommand: ${{ parameters.WorkItemCommand }}
- WorkItemTimeout: ${{ parameters.WorkItemTimeout }}
- CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }}
- XUnitProjects: ${{ parameters.XUnitProjects }}
- XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }}
- XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }}
- XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }}
- XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }}
- IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }}
- DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }}
- DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
- WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
- HelixBaseUri: ${{ parameters.HelixBaseUri }}
- Creator: ${{ parameters.Creator }}
- SYSTEM_ACCESSTOKEN: $(System.AccessToken)
- condition: and(${{ parameters.condition }}, eq(variables['Agent.Os'], 'Windows_NT'))
- continueOnError: ${{ parameters.continueOnError }}
- - script: $BUILD_SOURCESDIRECTORY/eng/common/msbuild.sh $BUILD_SOURCESDIRECTORY/eng/common/helixpublish.proj /restore /p:TreatWarningsAsErrors=false /t:Test /bl:$BUILD_SOURCESDIRECTORY/artifacts/log/$BuildConfig/SendToHelix.binlog
- displayName: ${{ parameters.DisplayNamePrefix }} (Unix)
- env:
- BuildConfig: $(_BuildConfig)
- HelixSource: ${{ parameters.HelixSource }}
- HelixType: ${{ parameters.HelixType }}
- HelixBuild: ${{ parameters.HelixBuild }}
- HelixConfiguration: ${{ parameters.HelixConfiguration }}
- HelixTargetQueues: ${{ parameters.HelixTargetQueues }}
- HelixAccessToken: ${{ parameters.HelixAccessToken }}
- HelixPreCommands: ${{ parameters.HelixPreCommands }}
- HelixPostCommands: ${{ parameters.HelixPostCommands }}
- WorkItemDirectory: ${{ parameters.WorkItemDirectory }}
- WorkItemCommand: ${{ parameters.WorkItemCommand }}
- WorkItemTimeout: ${{ parameters.WorkItemTimeout }}
- CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }}
- XUnitProjects: ${{ parameters.XUnitProjects }}
- XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }}
- XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }}
- XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }}
- XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }}
- IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }}
- DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }}
- DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
- WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
- HelixBaseUri: ${{ parameters.HelixBaseUri }}
- Creator: ${{ parameters.Creator }}
- SYSTEM_ACCESSTOKEN: $(System.AccessToken)
- condition: and(${{ parameters.condition }}, ne(variables['Agent.Os'], 'Windows_NT'))
- continueOnError: ${{ parameters.continueOnError }}
+- template: /eng/common/core-templates/steps/send-to-helix.yml
+ parameters:
+ is1ESPipeline: true
+
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates-official/steps/source-build.yml b/eng/common/templates-official/steps/source-build.yml
index 829f17c3..8f92c49e 100644
--- a/eng/common/templates-official/steps/source-build.yml
+++ b/eng/common/templates-official/steps/source-build.yml
@@ -1,129 +1,7 @@
-parameters:
- # This template adds arcade-powered source-build to CI.
-
- # This is a 'steps' template, and is intended for advanced scenarios where the existing build
- # infra has a careful build methodology that must be followed. For example, a repo
- # (dotnet/runtime) might choose to clone the GitHub repo only once and store it as a pipeline
- # artifact for all subsequent jobs to use, to reduce dependence on a strong network connection to
- # GitHub. Using this steps template leaves room for that infra to be included.
-
- # Defines the platform on which to run the steps. See 'eng/common/templates-official/job/source-build.yml'
- # for details. The entire object is described in the 'job' template for simplicity, even though
- # the usage of the properties on this object is split between the 'job' and 'steps' templates.
- platform: {}
-
steps:
-# Build. Keep it self-contained for simple reusability. (No source-build-specific job variables.)
-- script: |
- set -x
- df -h
-
- # If building on the internal project, the artifact feeds variable may be available (usually only if needed)
- # In that case, call the feed setup script to add internal feeds corresponding to public ones.
- # In addition, add an msbuild argument to copy the WIP from the repo to the target build location.
- # This is because SetupNuGetSources.sh will alter the current NuGet.config file, and we need to preserve those
- # changes.
- internalRestoreArgs=
- if [ '$(dn-bot-dnceng-artifact-feeds-rw)' != '$''(dn-bot-dnceng-artifact-feeds-rw)' ]; then
- # Temporarily work around https://github.com/dotnet/arcade/issues/7709
- chmod +x $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh
- $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh $(Build.SourcesDirectory)/NuGet.config $(dn-bot-dnceng-artifact-feeds-rw)
- internalRestoreArgs='/p:CopyWipIntoInnerSourceBuildRepo=true'
-
- # The 'Copy WIP' feature of source build uses git stash to apply changes from the original repo.
- # This only works if there is a username/email configured, which won't be the case in most CI runs.
- git config --get user.email
- if [ $? -ne 0 ]; then
- git config user.email dn-bot@microsoft.com
- git config user.name dn-bot
- fi
- fi
-
- # If building on the internal project, the internal storage variable may be available (usually only if needed)
- # In that case, add variables to allow the download of internal runtimes if the specified versions are not found
- # in the default public locations.
- internalRuntimeDownloadArgs=
- if [ '$(dotnetbuilds-internal-container-read-token-base64)' != '$''(dotnetbuilds-internal-container-read-token-base64)' ]; then
- internalRuntimeDownloadArgs='/p:DotNetRuntimeSourceFeed=https://dotnetbuilds.blob.core.windows.net/internal /p:DotNetRuntimeSourceFeedKey=$(dotnetbuilds-internal-container-read-token-base64) --runtimesourcefeed https://dotnetbuilds.blob.core.windows.net/internal --runtimesourcefeedkey $(dotnetbuilds-internal-container-read-token-base64)'
- fi
-
- buildConfig=Release
- # Check if AzDO substitutes in a build config from a variable, and use it if so.
- if [ '$(_BuildConfig)' != '$''(_BuildConfig)' ]; then
- buildConfig='$(_BuildConfig)'
- fi
-
- officialBuildArgs=
- if [ '${{ and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}' = 'True' ]; then
- officialBuildArgs='/p:DotNetPublishUsingPipelines=true /p:OfficialBuildId=$(BUILD.BUILDNUMBER)'
- fi
-
- targetRidArgs=
- if [ '${{ parameters.platform.targetRID }}' != '' ]; then
- targetRidArgs='/p:TargetRid=${{ parameters.platform.targetRID }}'
- fi
-
- runtimeOsArgs=
- if [ '${{ parameters.platform.runtimeOS }}' != '' ]; then
- runtimeOsArgs='/p:RuntimeOS=${{ parameters.platform.runtimeOS }}'
- fi
-
- baseOsArgs=
- if [ '${{ parameters.platform.baseOS }}' != '' ]; then
- baseOsArgs='/p:BaseOS=${{ parameters.platform.baseOS }}'
- fi
-
- publishArgs=
- if [ '${{ parameters.platform.skipPublishValidation }}' != 'true' ]; then
- publishArgs='--publish'
- fi
-
- assetManifestFileName=SourceBuild_RidSpecific.xml
- if [ '${{ parameters.platform.name }}' != '' ]; then
- assetManifestFileName=SourceBuild_${{ parameters.platform.name }}.xml
- fi
-
- ${{ coalesce(parameters.platform.buildScript, './build.sh') }} --ci \
- --configuration $buildConfig \
- --restore --build --pack $publishArgs -bl \
- $officialBuildArgs \
- $internalRuntimeDownloadArgs \
- $internalRestoreArgs \
- $targetRidArgs \
- $runtimeOsArgs \
- $baseOsArgs \
- /p:SourceBuildNonPortable=${{ parameters.platform.nonPortable }} \
- /p:ArcadeBuildFromSource=true \
- /p:AssetManifestFileName=$assetManifestFileName
- displayName: Build
-
-# Upload build logs for diagnosis.
-- task: CopyFiles@2
- displayName: Prepare BuildLogs staging directory
- inputs:
- SourceFolder: '$(Build.SourcesDirectory)'
- Contents: |
- **/*.log
- **/*.binlog
- artifacts/source-build/self/prebuilt-report/**
- TargetFolder: '$(Build.StagingDirectory)/BuildLogs'
- CleanTargetFolder: true
- continueOnError: true
- condition: succeededOrFailed()
-
-- task: 1ES.PublishPipelineArtifact@1
- displayName: Publish BuildLogs
- inputs:
- targetPath: '$(Build.StagingDirectory)/BuildLogs'
- artifactName: BuildLogs_SourceBuild_${{ parameters.platform.name }}_Attempt$(System.JobAttempt)
- continueOnError: true
- condition: succeededOrFailed()
+- template: /eng/common/core-templates/steps/source-build.yml
+ parameters:
+ is1ESPipeline: true
-# Manually inject component detection so that we can ignore the source build upstream cache, which contains
-# a nupkg cache of input packages (a local feed).
-# This path must match the upstream cache path in property 'CurrentRepoSourceBuiltNupkgCacheDir'
-# in src\Microsoft.DotNet.Arcade.Sdk\tools\SourceBuild\SourceBuildArcade.targets
-- task: ComponentGovernanceComponentDetection@0
- displayName: Component Detection (Exclude upstream cache)
- inputs:
- ignoreDirectories: '$(Build.SourcesDirectory)/artifacts/source-build/self/src/artifacts/obj/source-built-upstream-cache'
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates-official/steps/source-index-stage1-publish.yml b/eng/common/templates-official/steps/source-index-stage1-publish.yml
new file mode 100644
index 00000000..9b8b8094
--- /dev/null
+++ b/eng/common/templates-official/steps/source-index-stage1-publish.yml
@@ -0,0 +1,7 @@
+steps:
+- template: /eng/common/core-templates/steps/source-index-stage1-publish.yml
+ parameters:
+ is1ESPipeline: true
+
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates-official/variables/pool-providers.yml b/eng/common/templates-official/variables/pool-providers.yml
index 1f308b24..2cc3ae30 100644
--- a/eng/common/templates-official/variables/pool-providers.yml
+++ b/eng/common/templates-official/variables/pool-providers.yml
@@ -23,7 +23,7 @@
#
# pool:
# name: $(DncEngInternalBuildPool)
-# image: 1es-windows-2022
+# image: windows.vs2026.amd64
variables:
# Coalesce the target and source branches so we know when a PR targets a release branch
diff --git a/eng/common/templates-official/variables/sdl-variables.yml b/eng/common/templates-official/variables/sdl-variables.yml
deleted file mode 100644
index dbdd66d4..00000000
--- a/eng/common/templates-official/variables/sdl-variables.yml
+++ /dev/null
@@ -1,7 +0,0 @@
-variables:
-# The Guardian version specified in 'eng/common/sdl/packages.config'. This value must be kept in
-# sync with the packages.config file.
-- name: DefaultGuardianVersion
- value: 0.109.0
-- name: GuardianPackagesConfigFile
- value: $(Build.SourcesDirectory)\eng\common\sdl\packages.config
\ No newline at end of file
diff --git a/eng/common/templates/job/execute-sdl.yml b/eng/common/templates/job/execute-sdl.yml
deleted file mode 100644
index 7870f93b..00000000
--- a/eng/common/templates/job/execute-sdl.yml
+++ /dev/null
@@ -1,139 +0,0 @@
-parameters:
- enable: 'false' # Whether the SDL validation job should execute or not
- overrideParameters: '' # Optional: to override values for parameters.
- additionalParameters: '' # Optional: parameters that need user specific values eg: '-SourceToolsList @("abc","def") -ArtifactToolsList @("ghi","jkl")'
- # Optional: if specified, restore and use this version of Guardian instead of the default.
- overrideGuardianVersion: ''
- # Optional: if true, publish the '.gdn' folder as a pipeline artifact. This can help with in-depth
- # diagnosis of problems with specific tool configurations.
- publishGuardianDirectoryToPipeline: false
- # The script to run to execute all SDL tools. Use this if you want to use a script to define SDL
- # parameters rather than relying on YAML. It may be better to use a local script, because you can
- # reproduce results locally without piecing together a command based on the YAML.
- executeAllSdlToolsScript: 'eng/common/sdl/execute-all-sdl-tools.ps1'
- # There is some sort of bug (has been reported) in Azure DevOps where if this parameter is named
- # 'continueOnError', the parameter value is not correctly picked up.
- # This can also be remedied by the caller (post-build.yml) if it does not use a nested parameter
- sdlContinueOnError: false # optional: determines whether to continue the build if the step errors;
- # optional: determines if build artifacts should be downloaded.
- downloadArtifacts: true
- # optional: determines if this job should search the directory of downloaded artifacts for
- # 'tar.gz' and 'zip' archive files and extract them before running SDL validation tasks.
- extractArchiveArtifacts: false
- dependsOn: '' # Optional: dependencies of the job
- artifactNames: '' # Optional: patterns supplied to DownloadBuildArtifacts
- # Usage:
- # artifactNames:
- # - 'BlobArtifacts'
- # - 'Artifacts_Windows_NT_Release'
- # Optional: download a list of pipeline artifacts. 'downloadArtifacts' controls build artifacts,
- # not pipeline artifacts, so doesn't affect the use of this parameter.
- pipelineArtifactNames: []
-
-jobs:
-- job: Run_SDL
- dependsOn: ${{ parameters.dependsOn }}
- displayName: Run SDL tool
- condition: and(succeededOrFailed(), eq( ${{ parameters.enable }}, 'true'))
- variables:
- - group: DotNet-VSTS-Bot
- - name: AzDOProjectName
- value: ${{ parameters.AzDOProjectName }}
- - name: AzDOPipelineId
- value: ${{ parameters.AzDOPipelineId }}
- - name: AzDOBuildId
- value: ${{ parameters.AzDOBuildId }}
- - template: /eng/common/templates/variables/sdl-variables.yml
- - name: GuardianVersion
- value: ${{ coalesce(parameters.overrideGuardianVersion, '$(DefaultGuardianVersion)') }}
- - template: /eng/common/templates/variables/pool-providers.yml
- pool:
- # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
- ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
- name: VSEngSS-MicroBuild2022-1ES
- demands: Cmd
- # If it's not devdiv, it's dnceng
- ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}:
- name: $(DncEngInternalBuildPool)
- demands: ImageOverride -equals windows.vs2019.amd64
- steps:
- - checkout: self
- clean: true
-
- # If the template caller didn't provide an AzDO parameter, set them all up as Maestro vars.
- - ${{ if not(and(parameters.AzDOProjectName, parameters.AzDOPipelineId, parameters.AzDOBuildId)) }}:
- - template: /eng/common/templates/post-build/setup-maestro-vars.yml
-
- - ${{ if ne(parameters.downloadArtifacts, 'false')}}:
- - ${{ if ne(parameters.artifactNames, '') }}:
- - ${{ each artifactName in parameters.artifactNames }}:
- - task: DownloadBuildArtifacts@0
- displayName: Download Build Artifacts
- inputs:
- buildType: specific
- buildVersionToDownload: specific
- project: $(AzDOProjectName)
- pipeline: $(AzDOPipelineId)
- buildId: $(AzDOBuildId)
- artifactName: ${{ artifactName }}
- downloadPath: $(Build.ArtifactStagingDirectory)\artifacts
- checkDownloadedFiles: true
- - ${{ if eq(parameters.artifactNames, '') }}:
- - task: DownloadBuildArtifacts@0
- displayName: Download Build Artifacts
- inputs:
- buildType: specific
- buildVersionToDownload: specific
- project: $(AzDOProjectName)
- pipeline: $(AzDOPipelineId)
- buildId: $(AzDOBuildId)
- downloadType: specific files
- itemPattern: "**"
- downloadPath: $(Build.ArtifactStagingDirectory)\artifacts
- checkDownloadedFiles: true
-
- - ${{ each artifactName in parameters.pipelineArtifactNames }}:
- - task: DownloadPipelineArtifact@2
- displayName: Download Pipeline Artifacts
- inputs:
- buildType: specific
- buildVersionToDownload: specific
- project: $(AzDOProjectName)
- pipeline: $(AzDOPipelineId)
- buildId: $(AzDOBuildId)
- artifactName: ${{ artifactName }}
- downloadPath: $(Build.ArtifactStagingDirectory)\artifacts
- checkDownloadedFiles: true
-
- - powershell: eng/common/sdl/trim-assets-version.ps1
- -InputPath $(Build.ArtifactStagingDirectory)\artifacts
- displayName: Trim the version from the NuGet packages
- continueOnError: ${{ parameters.sdlContinueOnError }}
-
- - powershell: eng/common/sdl/extract-artifact-packages.ps1
- -InputPath $(Build.ArtifactStagingDirectory)\artifacts\BlobArtifacts
- -ExtractPath $(Build.ArtifactStagingDirectory)\artifacts\BlobArtifacts
- displayName: Extract Blob Artifacts
- continueOnError: ${{ parameters.sdlContinueOnError }}
-
- - powershell: eng/common/sdl/extract-artifact-packages.ps1
- -InputPath $(Build.ArtifactStagingDirectory)\artifacts\PackageArtifacts
- -ExtractPath $(Build.ArtifactStagingDirectory)\artifacts\PackageArtifacts
- displayName: Extract Package Artifacts
- continueOnError: ${{ parameters.sdlContinueOnError }}
-
- - ${{ if ne(parameters.extractArchiveArtifacts, 'false') }}:
- - powershell: eng/common/sdl/extract-artifact-archives.ps1
- -InputPath $(Build.ArtifactStagingDirectory)\artifacts
- -ExtractPath $(Build.ArtifactStagingDirectory)\artifacts
- displayName: Extract Archive Artifacts
- continueOnError: ${{ parameters.sdlContinueOnError }}
-
- - template: /eng/common/templates/steps/execute-sdl.yml
- parameters:
- overrideGuardianVersion: ${{ parameters.overrideGuardianVersion }}
- executeAllSdlToolsScript: ${{ parameters.executeAllSdlToolsScript }}
- overrideParameters: ${{ parameters.overrideParameters }}
- additionalParameters: ${{ parameters.additionalParameters }}
- publishGuardianDirectoryToPipeline: ${{ parameters.publishGuardianDirectoryToPipeline }}
- sdlContinueOnError: ${{ parameters.sdlContinueOnError }}
diff --git a/eng/common/templates/job/job.yml b/eng/common/templates/job/job.yml
index 8ec5c4f2..5e261f34 100644
--- a/eng/common/templates/job/job.yml
+++ b/eng/common/templates/job/job.yml
@@ -1,259 +1,85 @@
-# Internal resources (telemetry, microbuild) can only be accessed from non-public projects,
-# and some (Microbuild) should only be applied to non-PR cases for internal builds.
-
-parameters:
-# Job schema parameters - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
- cancelTimeoutInMinutes: ''
- condition: ''
- container: ''
- continueOnError: false
- dependsOn: ''
- displayName: ''
- pool: ''
- steps: []
- strategy: ''
- timeoutInMinutes: ''
- variables: []
- workspace: ''
- templateContext: ''
-
-# Job base template specific parameters
- # See schema documentation - https://github.com/dotnet/arcade/blob/master/Documentation/AzureDevOps/TemplateSchema.md
- artifacts: ''
- enableMicrobuild: false
+parameters:
enablePublishBuildArtifacts: false
- enablePublishBuildAssets: false
- enablePublishTestResults: false
- enablePublishUsingPipelines: false
- enableBuildRetry: false
- disableComponentGovernance: ''
- componentGovernanceIgnoreDirectories: ''
- mergeTestResults: false
- testRunTitle: ''
- testResultsFormat: ''
- name: ''
- preSteps: []
runAsPublic: false
-# Sbom related params
- enableSbom: true
- PackageVersion: 7.0.0
- BuildDropPath: '$(Build.SourcesDirectory)/artifacts'
+# CG related params, unused now and can eventually be removed
+ disableComponentGovernance: unused
+# Sbom related params, unused now and can eventually be removed
+ enableSbom: unused
+ PackageVersion: unused
+ BuildDropPath: unused
jobs:
-- job: ${{ parameters.name }}
-
- ${{ if ne(parameters.cancelTimeoutInMinutes, '') }}:
- cancelTimeoutInMinutes: ${{ parameters.cancelTimeoutInMinutes }}
-
- ${{ if ne(parameters.condition, '') }}:
- condition: ${{ parameters.condition }}
-
- ${{ if ne(parameters.container, '') }}:
- container: ${{ parameters.container }}
-
- ${{ if ne(parameters.continueOnError, '') }}:
- continueOnError: ${{ parameters.continueOnError }}
-
- ${{ if ne(parameters.dependsOn, '') }}:
- dependsOn: ${{ parameters.dependsOn }}
-
- ${{ if ne(parameters.displayName, '') }}:
- displayName: ${{ parameters.displayName }}
-
- ${{ if ne(parameters.pool, '') }}:
- pool: ${{ parameters.pool }}
-
- ${{ if ne(parameters.strategy, '') }}:
- strategy: ${{ parameters.strategy }}
-
- ${{ if ne(parameters.timeoutInMinutes, '') }}:
- timeoutInMinutes: ${{ parameters.timeoutInMinutes }}
-
- ${{ if ne(parameters.templateContext, '') }}:
- templateContext: ${{ parameters.templateContext }}
-
- variables:
- - ${{ if ne(parameters.enableTelemetry, 'false') }}:
- - name: DOTNET_CLI_TELEMETRY_PROFILE
- value: '$(Build.Repository.Uri)'
- - ${{ if eq(parameters.enableRichCodeNavigation, 'true') }}:
- - name: EnableRichCodeNavigation
- value: 'true'
- # Retry signature validation up to three times, waiting 2 seconds between attempts.
- # See https://learn.microsoft.com/en-us/nuget/reference/errors-and-warnings/nu3028#retry-untrusted-root-failures
- - name: NUGET_EXPERIMENTAL_CHAIN_BUILD_RETRY_POLICY
- value: 3,2000
- - ${{ each variable in parameters.variables }}:
- # handle name-value variable syntax
- # example:
- # - name: [key]
- # value: [value]
- - ${{ if ne(variable.name, '') }}:
- - name: ${{ variable.name }}
- value: ${{ variable.value }}
-
- # handle variable groups
- - ${{ if ne(variable.group, '') }}:
- - group: ${{ variable.group }}
-
- # handle template variable syntax
- # example:
- # - template: path/to/template.yml
- # parameters:
- # [key]: [value]
- - ${{ if ne(variable.template, '') }}:
- - template: ${{ variable.template }}
- ${{ if ne(variable.parameters, '') }}:
- parameters: ${{ variable.parameters }}
-
- # handle key-value variable syntax.
- # example:
- # - [key]: [value]
- - ${{ if and(eq(variable.name, ''), eq(variable.group, ''), eq(variable.template, '')) }}:
- - ${{ each pair in variable }}:
- - name: ${{ pair.key }}
- value: ${{ pair.value }}
-
- # DotNet-HelixApi-Access provides 'HelixApiAccessToken' for internal builds
- - ${{ if and(eq(parameters.enableTelemetry, 'true'), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - group: DotNet-HelixApi-Access
-
- ${{ if ne(parameters.workspace, '') }}:
- workspace: ${{ parameters.workspace }}
-
- steps:
- - ${{ if ne(parameters.preSteps, '') }}:
- - ${{ each preStep in parameters.preSteps }}:
- - ${{ preStep }}
-
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - ${{ if eq(parameters.enableMicrobuild, 'true') }}:
- - task: MicroBuildSigningPlugin@3
- displayName: Install MicroBuild plugin
- inputs:
- signType: $(_SignType)
- zipSources: false
- feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json
- env:
- TeamName: $(_TeamName)
- continueOnError: ${{ parameters.continueOnError }}
- condition: and(succeeded(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
-
- - ${{ if and(eq(parameters.runAsPublic, 'false'), eq(variables['System.TeamProject'], 'internal')) }}:
- - task: NuGetAuthenticate@1
-
- - ${{ if and(ne(parameters.artifacts.download, 'false'), ne(parameters.artifacts.download, '')) }}:
- - task: DownloadPipelineArtifact@2
- inputs:
- buildType: current
- artifactName: ${{ coalesce(parameters.artifacts.download.name, 'Artifacts_$(Agent.OS)_$(_BuildConfig)') }}
- targetPath: ${{ coalesce(parameters.artifacts.download.path, 'artifacts') }}
- itemPattern: ${{ coalesce(parameters.artifacts.download.pattern, '**') }}
-
- - ${{ each step in parameters.steps }}:
- - ${{ step }}
-
- - ${{ if eq(parameters.enableRichCodeNavigation, true) }}:
- - task: RichCodeNavIndexer@0
- displayName: RichCodeNav Upload
- inputs:
- languages: ${{ coalesce(parameters.richCodeNavigationLanguage, 'csharp') }}
- environment: ${{ coalesce(parameters.richCodeNavigationEnvironment, 'production') }}
- richNavLogOutputDirectory: $(Build.SourcesDirectory)/artifacts/bin
- uploadRichNavArtifacts: ${{ coalesce(parameters.richCodeNavigationUploadArtifacts, false) }}
- continueOnError: true
-
- - template: /eng/common/templates/steps/component-governance.yml
- parameters:
- ${{ if eq(parameters.disableComponentGovernance, '') }}:
- ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.runAsPublic, 'false'), or(startsWith(variables['Build.SourceBranch'], 'refs/heads/release/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/dotnet/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/microsoft/'), eq(variables['Build.SourceBranch'], 'refs/heads/main'))) }}:
- disableComponentGovernance: false
- ${{ else }}:
- disableComponentGovernance: true
- ${{ else }}:
- disableComponentGovernance: ${{ parameters.disableComponentGovernance }}
- componentGovernanceIgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }}
-
- - ${{ if eq(parameters.enableMicrobuild, 'true') }}:
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - task: MicroBuildCleanup@1
- displayName: Execute Microbuild cleanup tasks
- condition: and(always(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
- continueOnError: ${{ parameters.continueOnError }}
- env:
- TeamName: $(_TeamName)
-
- - ${{ if ne(parameters.artifacts.publish, '') }}:
- - ${{ if and(ne(parameters.artifacts.publish.artifacts, 'false'), ne(parameters.artifacts.publish.artifacts, '')) }}:
- - task: CopyFiles@2
- displayName: Gather binaries for publish to artifacts
- inputs:
- SourceFolder: 'artifacts/bin'
- Contents: '**'
- TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/bin'
- - task: CopyFiles@2
- displayName: Gather packages for publish to artifacts
- inputs:
- SourceFolder: 'artifacts/packages'
- Contents: '**'
- TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/packages'
- - task: PublishBuildArtifacts@1
- displayName: Publish pipeline artifacts
- inputs:
- PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts'
- PublishLocation: Container
- ArtifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }}
- continueOnError: true
- condition: always()
- - ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}:
- - publish: artifacts/log
- artifact: ${{ coalesce(parameters.artifacts.publish.logs.name, 'Logs_Build_$(Agent.Os)_$(_BuildConfig)') }}
- displayName: Publish logs
- continueOnError: true
- condition: always()
-
- - ${{ if ne(parameters.enablePublishBuildArtifacts, 'false') }}:
- - task: PublishBuildArtifacts@1
- displayName: Publish Logs
- inputs:
- PathtoPublish: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)'
- PublishLocation: Container
- ArtifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)' ) }}
- continueOnError: true
- condition: always()
-
- - ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'xunit')) }}:
- - task: PublishTestResults@2
- displayName: Publish XUnit Test Results
- inputs:
- testResultsFormat: 'xUnit'
- testResultsFiles: '*.xml'
- searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)'
- testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-xunit
- mergeTestResults: ${{ parameters.mergeTestResults }}
- continueOnError: true
- condition: always()
- - ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'vstest')) }}:
- - task: PublishTestResults@2
- displayName: Publish TRX Test Results
- inputs:
- testResultsFormat: 'VSTest'
- testResultsFiles: '*.trx'
- searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)'
- testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-trx
- mergeTestResults: ${{ parameters.mergeTestResults }}
- continueOnError: true
- condition: always()
-
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.enableSbom, 'true')) }}:
- - template: /eng/common/templates/steps/generate-sbom.yml
- parameters:
- PackageVersion: ${{ parameters.packageVersion}}
- BuildDropPath: ${{ parameters.buildDropPath }}
- IgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }}
-
- - ${{ if eq(parameters.enableBuildRetry, 'true') }}:
- - publish: $(Build.SourcesDirectory)\eng\common\BuildConfiguration
- artifact: BuildConfiguration
- displayName: Publish build retry configuration
- continueOnError: true
+- template: /eng/common/core-templates/job/job.yml
+ parameters:
+ is1ESPipeline: false
+
+ ${{ each parameter in parameters }}:
+ ${{ if and(ne(parameter.key, 'steps'), ne(parameter.key, 'is1ESPipeline')) }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
+
+ steps:
+ - ${{ each step in parameters.steps }}:
+ - ${{ step }}
+
+ # we don't run CG in public
+ - ${{ if eq(variables['System.TeamProject'], 'public') }}:
+ - script: echo "##vso[task.setvariable variable=skipComponentGovernanceDetection]true"
+ displayName: Set skipComponentGovernanceDetection variable
+
+ artifactPublishSteps:
+ - ${{ if ne(parameters.artifacts.publish, '') }}:
+ - ${{ if and(ne(parameters.artifacts.publish.artifacts, 'false'), ne(parameters.artifacts.publish.artifacts, '')) }}:
+ - template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
+ parameters:
+ is1ESPipeline: false
+ args:
+ displayName: Publish pipeline artifacts
+ targetPath: '$(Build.ArtifactStagingDirectory)/artifacts'
+ artifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }}
+ continueOnError: true
+ condition: succeeded()
+ retryCountOnTaskFailure: 10 # for any files being locked
+ - template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
+ parameters:
+ is1ESPipeline: false
+ args:
+ displayName: Publish pipeline artifacts
+ targetPath: '$(Build.ArtifactStagingDirectory)/artifacts'
+ artifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }}_Attempt$(System.JobAttempt)
+ continueOnError: true
+ condition: not(succeeded())
+ retryCountOnTaskFailure: 10 # for any files being locked
+ - ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}:
+ - template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
+ parameters:
+ is1ESPipeline: false
+ args:
+ targetPath: '$(Build.ArtifactStagingDirectory)/artifacts/log'
+ artifactName: ${{ coalesce(parameters.artifacts.publish.logs.name, 'Logs_Build_$(Agent.Os)_$(_BuildConfig)') }}
+ displayName: 'Publish logs'
+ continueOnError: true
+ condition: always()
+ retryCountOnTaskFailure: 10 # for any files being locked
+
+ - ${{ if ne(parameters.enablePublishBuildArtifacts, 'false') }}:
+ - template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
+ parameters:
+ is1ESPipeline: false
+ args:
+ displayName: Publish Logs
+ targetPath: '$(Build.ArtifactStagingDirectory)/artifacts/log/$(_BuildConfig)'
+ artifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)_Attempt$(System.JobAttempt)' ) }}
+ continueOnError: true
+ condition: always()
+ retryCountOnTaskFailure: 10 # for any files being locked
+
+ - ${{ if eq(parameters.enableBuildRetry, 'true') }}:
+ - template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
+ parameters:
+ is1ESPipeline: false
+ args:
+ targetPath: '$(System.DefaultWorkingDirectory)\eng\common\BuildConfiguration'
+ artifactName: 'BuildConfiguration'
+ displayName: 'Publish build retry configuration'
+ continueOnError: true
+ retryCountOnTaskFailure: 10 # for any files being locked
diff --git a/eng/common/templates/job/onelocbuild.yml b/eng/common/templates/job/onelocbuild.yml
index 60ab00c4..ff829dc4 100644
--- a/eng/common/templates/job/onelocbuild.yml
+++ b/eng/common/templates/job/onelocbuild.yml
@@ -1,109 +1,7 @@
-parameters:
- # Optional: dependencies of the job
- dependsOn: ''
-
- # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool
- pool: ''
-
- CeapexPat: $(dn-bot-ceapex-package-r) # PAT for the loc AzDO instance https://dev.azure.com/ceapex
- GithubPat: $(BotAccount-dotnet-bot-repo-PAT)
-
- SourcesDirectory: $(Build.SourcesDirectory)
- CreatePr: true
- AutoCompletePr: false
- ReusePr: true
- UseLfLineEndings: true
- UseCheckedInLocProjectJson: false
- SkipLocProjectJsonGeneration: false
- LanguageSet: VS_Main_Languages
- LclSource: lclFilesInRepo
- LclPackageId: ''
- RepoType: gitHub
- GitHubOrg: dotnet
- MirrorRepo: ''
- MirrorBranch: main
- condition: ''
- JobNameSuffix: ''
-
jobs:
-- job: OneLocBuild${{ parameters.JobNameSuffix }}
-
- dependsOn: ${{ parameters.dependsOn }}
-
- displayName: OneLocBuild${{ parameters.JobNameSuffix }}
-
- variables:
- - group: OneLocBuildVariables # Contains the CeapexPat and GithubPat
- - name: _GenerateLocProjectArguments
- value: -SourcesDirectory ${{ parameters.SourcesDirectory }}
- -LanguageSet "${{ parameters.LanguageSet }}"
- -CreateNeutralXlfs
- - ${{ if eq(parameters.UseCheckedInLocProjectJson, 'true') }}:
- - name: _GenerateLocProjectArguments
- value: ${{ variables._GenerateLocProjectArguments }} -UseCheckedInLocProjectJson
- - template: /eng/common/templates/variables/pool-providers.yml
-
- ${{ if ne(parameters.pool, '') }}:
- pool: ${{ parameters.pool }}
- ${{ if eq(parameters.pool, '') }}:
- pool:
- # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
- ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
- name: VSEngSS-MicroBuild2022-1ES
- demands: Cmd
- # If it's not devdiv, it's dnceng
- ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}:
- name: $(DncEngInternalBuildPool)
- demands: ImageOverride -equals windows.vs2019.amd64
-
- steps:
- - ${{ if ne(parameters.SkipLocProjectJsonGeneration, 'true') }}:
- - task: Powershell@2
- inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/generate-locproject.ps1
- arguments: $(_GenerateLocProjectArguments)
- displayName: Generate LocProject.json
- condition: ${{ parameters.condition }}
-
- - task: OneLocBuild@2
- displayName: OneLocBuild
- env:
- SYSTEM_ACCESSTOKEN: $(System.AccessToken)
- inputs:
- locProj: eng/Localize/LocProject.json
- outDir: $(Build.ArtifactStagingDirectory)
- lclSource: ${{ parameters.LclSource }}
- lclPackageId: ${{ parameters.LclPackageId }}
- isCreatePrSelected: ${{ parameters.CreatePr }}
- isAutoCompletePrSelected: ${{ parameters.AutoCompletePr }}
- ${{ if eq(parameters.CreatePr, true) }}:
- isUseLfLineEndingsSelected: ${{ parameters.UseLfLineEndings }}
- ${{ if eq(parameters.RepoType, 'gitHub') }}:
- isShouldReusePrSelected: ${{ parameters.ReusePr }}
- packageSourceAuth: patAuth
- patVariable: ${{ parameters.CeapexPat }}
- ${{ if eq(parameters.RepoType, 'gitHub') }}:
- repoType: ${{ parameters.RepoType }}
- gitHubPatVariable: "${{ parameters.GithubPat }}"
- ${{ if ne(parameters.MirrorRepo, '') }}:
- isMirrorRepoSelected: true
- gitHubOrganization: ${{ parameters.GitHubOrg }}
- mirrorRepo: ${{ parameters.MirrorRepo }}
- mirrorBranch: ${{ parameters.MirrorBranch }}
- condition: ${{ parameters.condition }}
-
- - task: PublishBuildArtifacts@1
- displayName: Publish Localization Files
- inputs:
- PathtoPublish: '$(Build.ArtifactStagingDirectory)/loc'
- PublishLocation: Container
- ArtifactName: Loc
- condition: ${{ parameters.condition }}
+- template: /eng/common/core-templates/job/onelocbuild.yml
+ parameters:
+ is1ESPipeline: false
- - task: PublishBuildArtifacts@1
- displayName: Publish LocProject.json
- inputs:
- PathtoPublish: '$(Build.SourcesDirectory)/eng/Localize/'
- PublishLocation: Container
- ArtifactName: Loc
- condition: ${{ parameters.condition }}
\ No newline at end of file
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates/job/publish-build-assets.yml b/eng/common/templates/job/publish-build-assets.yml
index cc2b346b..ab2edec2 100644
--- a/eng/common/templates/job/publish-build-assets.yml
+++ b/eng/common/templates/job/publish-build-assets.yml
@@ -1,156 +1,7 @@
-parameters:
- configuration: 'Debug'
-
- # Optional: condition for the job to run
- condition: ''
-
- # Optional: 'true' if future jobs should run even if this job fails
- continueOnError: false
-
- # Optional: dependencies of the job
- dependsOn: ''
-
- # Optional: Include PublishBuildArtifacts task
- enablePublishBuildArtifacts: false
-
- # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool
- pool: {}
-
- # Optional: should run as a public build even in the internal project
- # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
- runAsPublic: false
-
- # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing
- publishUsingPipelines: false
-
- # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing
- publishAssetsImmediately: false
-
- artifactsPublishingAdditionalParameters: ''
-
- signingValidationAdditionalParameters: ''
-
jobs:
-- job: Asset_Registry_Publish
-
- dependsOn: ${{ parameters.dependsOn }}
- timeoutInMinutes: 150
-
- ${{ if eq(parameters.publishAssetsImmediately, 'true') }}:
- displayName: Publish Assets
- ${{ else }}:
- displayName: Publish to Build Asset Registry
-
- variables:
- - template: /eng/common/templates/variables/pool-providers.yml
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - group: Publish-Build-Assets
- - group: AzureDevOps-Artifact-Feeds-Pats
- - name: runCodesignValidationInjection
- value: false
- - ${{ if eq(parameters.publishAssetsImmediately, 'true') }}:
- - template: /eng/common/templates/post-build/common-variables.yml
-
- pool:
- # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
- ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
- name: VSEngSS-MicroBuild2022-1ES
- demands: Cmd
- # If it's not devdiv, it's dnceng
- ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}:
- name: NetCore1ESPool-Publishing-Internal
- demands: ImageOverride -equals windows.vs2019.amd64
-
- steps:
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - task: DownloadBuildArtifacts@0
- displayName: Download artifact
- inputs:
- artifactName: AssetManifests
- downloadPath: '$(Build.StagingDirectory)/Download'
- checkDownloadedFiles: true
- condition: ${{ parameters.condition }}
- continueOnError: ${{ parameters.continueOnError }}
-
- - task: NuGetAuthenticate@1
-
- - task: AzureCLI@2
- displayName: Publish Build Assets
- inputs:
- azureSubscription: "Darc: Maestro Production"
- scriptType: ps
- scriptLocation: scriptPath
- scriptPath: $(Build.SourcesDirectory)/eng/common/sdk-task.ps1
- arguments: >
- -task PublishBuildAssets -restore -msbuildEngine dotnet
- /p:ManifestsPath='$(Build.StagingDirectory)/Download/AssetManifests'
- /p:MaestroApiEndpoint=https://maestro.dot.net
- /p:PublishUsingPipelines=${{ parameters.publishUsingPipelines }}
- /p:OfficialBuildId=$(Build.BuildNumber)
- condition: ${{ parameters.condition }}
- continueOnError: ${{ parameters.continueOnError }}
-
- - task: powershell@2
- displayName: Create ReleaseConfigs Artifact
- inputs:
- targetType: inline
- script: |
- Add-Content -Path "$(Build.StagingDirectory)/ReleaseConfigs.txt" -Value $(BARBuildId)
- Add-Content -Path "$(Build.StagingDirectory)/ReleaseConfigs.txt" -Value "$(DefaultChannels)"
- Add-Content -Path "$(Build.StagingDirectory)/ReleaseConfigs.txt" -Value $(IsStableBuild)
-
- - task: PublishBuildArtifacts@1
- displayName: Publish ReleaseConfigs Artifact
- inputs:
- PathtoPublish: '$(Build.StagingDirectory)/ReleaseConfigs.txt'
- PublishLocation: Container
- ArtifactName: ReleaseConfigs
-
- - task: powershell@2
- displayName: Check if SymbolPublishingExclusionsFile.txt exists
- inputs:
- targetType: inline
- script: |
- $symbolExclusionfile = "$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt"
- if(Test-Path -Path $symbolExclusionfile)
- {
- Write-Host "SymbolExclusionFile exists"
- Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]true"
- }
- else{
- Write-Host "Symbols Exclusion file does not exists"
- Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]false"
- }
-
- - task: PublishBuildArtifacts@1
- displayName: Publish SymbolPublishingExclusionsFile Artifact
- condition: eq(variables['SymbolExclusionFile'], 'true')
- inputs:
- PathtoPublish: '$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt'
- PublishLocation: Container
- ArtifactName: ReleaseConfigs
-
- - ${{ if eq(parameters.publishAssetsImmediately, 'true') }}:
- - template: /eng/common/templates/post-build/setup-maestro-vars.yml
- parameters:
- BARBuildId: ${{ parameters.BARBuildId }}
- PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
-
- - task: AzureCLI@2
- displayName: Publish Using Darc
- inputs:
- azureSubscription: "Darc: Maestro Production"
- scriptType: ps
- scriptLocation: scriptPath
- scriptPath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1
- arguments: -BuildId $(BARBuildId)
- -PublishingInfraVersion 3
- -AzdoToken '$(System.AccessToken)'
- -WaitPublishingFinish true
- -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}'
- -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}'
+- template: /eng/common/core-templates/job/publish-build-assets.yml
+ parameters:
+ is1ESPipeline: false
- - ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}:
- - template: /eng/common/templates/steps/publish-logs.yml
- parameters:
- JobLabel: 'Publish_Artifacts_Logs'
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates/job/source-build.yml b/eng/common/templates/job/source-build.yml
index c0ff472b..e44d47b1 100644
--- a/eng/common/templates/job/source-build.yml
+++ b/eng/common/templates/job/source-build.yml
@@ -1,74 +1,7 @@
-parameters:
- # This template adds arcade-powered source-build to CI. The template produces a server job with a
- # default ID 'Source_Build_Complete' to put in a dependency list if necessary.
-
- # Specifies the prefix for source-build jobs added to pipeline. Use this if disambiguation needed.
- jobNamePrefix: 'Source_Build'
-
- # Defines the platform on which to run the job. By default, a linux-x64 machine, suitable for
- # managed-only repositories. This is an object with these properties:
- #
- # name: ''
- # The name of the job. This is included in the job ID.
- # targetRID: ''
- # The name of the target RID to use, instead of the one auto-detected by Arcade.
- # nonPortable: false
- # Enables non-portable mode. This means a more specific RID (e.g. fedora.32-x64 rather than
- # linux-x64), and compiling against distro-provided packages rather than portable ones.
- # skipPublishValidation: false
- # Disables publishing validation. By default, a check is performed to ensure no packages are
- # published by source-build.
- # container: ''
- # A container to use. Runs in docker.
- # pool: {}
- # A pool to use. Runs directly on an agent.
- # buildScript: ''
- # Specifies the build script to invoke to perform the build in the repo. The default
- # './build.sh' should work for typical Arcade repositories, but this is customizable for
- # difficult situations.
- # jobProperties: {}
- # A list of job properties to inject at the top level, for potential extensibility beyond
- # container and pool.
- platform: {}
-
- # If set to true and running on a non-public project,
- # Internal blob storage locations will be enabled.
- # This is not enabled by default because many repositories do not need internal sources
- # and do not need to have the required service connections approved in the pipeline.
- enableInternalSources: false
-
jobs:
-- job: ${{ parameters.jobNamePrefix }}_${{ parameters.platform.name }}
- displayName: Source-Build (${{ parameters.platform.name }})
-
- ${{ each property in parameters.platform.jobProperties }}:
- ${{ property.key }}: ${{ property.value }}
-
- ${{ if ne(parameters.platform.container, '') }}:
- container: ${{ parameters.platform.container }}
-
- ${{ if eq(parameters.platform.pool, '') }}:
- # The default VM host AzDO pool. This should be capable of running Docker containers: almost all
- # source-build builds run in Docker, including the default managed platform.
- # /eng/common/templates/variables/pool-providers.yml can't be used here (some customers declare variables already), so duplicate its logic
- pool:
- ${{ if eq(variables['System.TeamProject'], 'public') }}:
- name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore-Svc-Public' ), False, 'NetCore-Public')]
- demands: ImageOverride -equals Build.Ubuntu.1804.Amd64.Open
-
- ${{ if eq(variables['System.TeamProject'], 'internal') }}:
- name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore1ESPool-Svc-Internal'), False, 'NetCore1ESPool-Internal')]
- demands: ImageOverride -equals Build.Ubuntu.1804.Amd64
-
- ${{ if ne(parameters.platform.pool, '') }}:
- pool: ${{ parameters.platform.pool }}
-
- workspace:
- clean: all
+- template: /eng/common/core-templates/job/source-build.yml
+ parameters:
+ is1ESPipeline: false
- steps:
- - ${{ if eq(parameters.enableInternalSources, true) }}:
- - template: /eng/common/templates/steps/enable-internal-runtimes.yml
- - template: /eng/common/templates/steps/source-build.yml
- parameters:
- platform: ${{ parameters.platform }}
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates/job/source-index-stage1.yml b/eng/common/templates/job/source-index-stage1.yml
index 0b6bb89d..89f32915 100644
--- a/eng/common/templates/job/source-index-stage1.yml
+++ b/eng/common/templates/job/source-index-stage1.yml
@@ -1,82 +1,7 @@
-parameters:
- runAsPublic: false
- sourceIndexUploadPackageVersion: 2.0.0-20240502.12
- sourceIndexProcessBinlogPackageVersion: 1.0.1-20240129.2
- sourceIndexPackageSource: https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json
- sourceIndexBuildCommand: powershell -NoLogo -NoProfile -ExecutionPolicy Bypass -Command "eng/common/build.ps1 -restore -build -binarylog -ci"
- preSteps: []
- binlogPath: artifacts/log/Debug/Build.binlog
- condition: ''
- dependsOn: ''
- pool: ''
-
jobs:
-- job: SourceIndexStage1
- dependsOn: ${{ parameters.dependsOn }}
- condition: ${{ parameters.condition }}
- variables:
- - name: SourceIndexUploadPackageVersion
- value: ${{ parameters.sourceIndexUploadPackageVersion }}
- - name: SourceIndexProcessBinlogPackageVersion
- value: ${{ parameters.sourceIndexProcessBinlogPackageVersion }}
- - name: SourceIndexPackageSource
- value: ${{ parameters.sourceIndexPackageSource }}
- - name: BinlogPath
- value: ${{ parameters.binlogPath }}
- - template: /eng/common/templates/variables/pool-providers.yml
-
- ${{ if ne(parameters.pool, '') }}:
- pool: ${{ parameters.pool }}
- ${{ if eq(parameters.pool, '') }}:
- pool:
- ${{ if eq(variables['System.TeamProject'], 'public') }}:
- name: $(DncEngPublicBuildPool)
- demands: ImageOverride -equals windows.vs2019.amd64.open
- ${{ if eq(variables['System.TeamProject'], 'internal') }}:
- name: $(DncEngInternalBuildPool)
- demands: ImageOverride -equals windows.vs2019.amd64
-
- steps:
- - ${{ each preStep in parameters.preSteps }}:
- - ${{ preStep }}
-
- - task: UseDotNet@2
- displayName: Use .NET 8 SDK
- inputs:
- packageType: sdk
- version: 8.0.x
- installationPath: $(Agent.TempDirectory)/dotnet
- workingDirectory: $(Agent.TempDirectory)
-
- - script: |
- $(Agent.TempDirectory)/dotnet/dotnet tool install BinLogToSln --version $(sourceIndexProcessBinlogPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools
- $(Agent.TempDirectory)/dotnet/dotnet tool install UploadIndexStage1 --version $(sourceIndexUploadPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools
- displayName: Download Tools
- # Set working directory to temp directory so 'dotnet' doesn't try to use global.json and use the repo's sdk.
- workingDirectory: $(Agent.TempDirectory)
-
- - script: ${{ parameters.sourceIndexBuildCommand }}
- displayName: Build Repository
-
- - script: $(Agent.TempDirectory)/.source-index/tools/BinLogToSln -i $(BinlogPath) -r $(Build.SourcesDirectory) -n $(Build.Repository.Name) -o .source-index/stage1output
- displayName: Process Binlog into indexable sln
-
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - task: AzureCLI@2
- displayName: Get stage 1 auth token
- inputs:
- azureSubscription: 'SourceDotNet Stage1 Publish'
- addSpnToEnvironment: true
- scriptType: 'ps'
- scriptLocation: 'inlineScript'
- inlineScript: |
- echo "##vso[task.setvariable variable=ARM_CLIENT_ID;issecret=true]$env:servicePrincipalId"
- echo "##vso[task.setvariable variable=ARM_ID_TOKEN;issecret=true]$env:idToken"
- echo "##vso[task.setvariable variable=ARM_TENANT_ID;issecret=true]$env:tenantId"
-
- - script: |
- az login --service-principal -u $(ARM_CLIENT_ID) --tenant $(ARM_TENANT_ID) --allow-no-subscriptions --federated-token $(ARM_ID_TOKEN)
- displayName: "Login to Azure"
+- template: /eng/common/core-templates/job/source-index-stage1.yml
+ parameters:
+ is1ESPipeline: false
- - script: $(Agent.TempDirectory)/.source-index/tools/UploadIndexStage1 -i .source-index/stage1output -n $(Build.Repository.Name) -s netsourceindexstage1 -b stage1
- displayName: Upload stage1 artifacts to source index
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates/jobs/codeql-build.yml b/eng/common/templates/jobs/codeql-build.yml
deleted file mode 100644
index f7dc5ea4..00000000
--- a/eng/common/templates/jobs/codeql-build.yml
+++ /dev/null
@@ -1,31 +0,0 @@
-parameters:
- # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md
- continueOnError: false
- # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
- jobs: []
- # Optional: if specified, restore and use this version of Guardian instead of the default.
- overrideGuardianVersion: ''
-
-jobs:
-- template: /eng/common/templates/jobs/jobs.yml
- parameters:
- enableMicrobuild: false
- enablePublishBuildArtifacts: false
- enablePublishTestResults: false
- enablePublishBuildAssets: false
- enablePublishUsingPipelines: false
- enableTelemetry: true
-
- variables:
- - group: Publish-Build-Assets
- # The Guardian version specified in 'eng/common/sdl/packages.config'. This value must be kept in
- # sync with the packages.config file.
- - name: DefaultGuardianVersion
- value: 0.109.0
- - name: GuardianPackagesConfigFile
- value: $(Build.SourcesDirectory)\eng\common\sdl\packages.config
- - name: GuardianVersion
- value: ${{ coalesce(parameters.overrideGuardianVersion, '$(DefaultGuardianVersion)') }}
-
- jobs: ${{ parameters.jobs }}
-
diff --git a/eng/common/templates/jobs/jobs.yml b/eng/common/templates/jobs/jobs.yml
index 289bb239..388e9037 100644
--- a/eng/common/templates/jobs/jobs.yml
+++ b/eng/common/templates/jobs/jobs.yml
@@ -1,97 +1,7 @@
-parameters:
- # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md
- continueOnError: false
-
- # Optional: Include PublishBuildArtifacts task
- enablePublishBuildArtifacts: false
-
- # Optional: Enable publishing using release pipelines
- enablePublishUsingPipelines: false
-
- # Optional: Enable running the source-build jobs to build repo from source
- enableSourceBuild: false
-
- # Optional: Parameters for source-build template.
- # See /eng/common/templates/jobs/source-build.yml for options
- sourceBuildParameters: []
-
- graphFileGeneration:
- # Optional: Enable generating the graph files at the end of the build
- enabled: false
- # Optional: Include toolset dependencies in the generated graph files
- includeToolset: false
-
- # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
- jobs: []
-
- # Optional: Override automatically derived dependsOn value for "publish build assets" job
- publishBuildAssetsDependsOn: ''
-
- # Optional: Publish the assets as soon as the publish to BAR stage is complete, rather doing so in a separate stage.
- publishAssetsImmediately: false
-
- # Optional: If using publishAssetsImmediately and additional parameters are needed, can be used to send along additional parameters (normally sent to post-build.yml)
- artifactsPublishingAdditionalParameters: ''
- signingValidationAdditionalParameters: ''
-
- # Optional: should run as a public build even in the internal project
- # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
- runAsPublic: false
-
- enableSourceIndex: false
- sourceIndexParams: {}
-
-# Internal resources (telemetry, microbuild) can only be accessed from non-public projects,
-# and some (Microbuild) should only be applied to non-PR cases for internal builds.
-
jobs:
-- ${{ each job in parameters.jobs }}:
- - template: ../job/job.yml
- parameters:
- # pass along parameters
- ${{ each parameter in parameters }}:
- ${{ if ne(parameter.key, 'jobs') }}:
- ${{ parameter.key }}: ${{ parameter.value }}
-
- # pass along job properties
- ${{ each property in job }}:
- ${{ if ne(property.key, 'job') }}:
- ${{ property.key }}: ${{ property.value }}
-
- name: ${{ job.job }}
-
-- ${{ if eq(parameters.enableSourceBuild, true) }}:
- - template: /eng/common/templates/jobs/source-build.yml
- parameters:
- allCompletedJobId: Source_Build_Complete
- ${{ each parameter in parameters.sourceBuildParameters }}:
- ${{ parameter.key }}: ${{ parameter.value }}
-
-- ${{ if eq(parameters.enableSourceIndex, 'true') }}:
- - template: ../job/source-index-stage1.yml
- parameters:
- runAsPublic: ${{ parameters.runAsPublic }}
- ${{ each parameter in parameters.sourceIndexParams }}:
- ${{ parameter.key }}: ${{ parameter.value }}
-
-- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - ${{ if or(eq(parameters.enablePublishBuildAssets, true), eq(parameters.artifacts.publish.manifests, 'true'), ne(parameters.artifacts.publish.manifests, '')) }}:
- - template: ../job/publish-build-assets.yml
- parameters:
- continueOnError: ${{ parameters.continueOnError }}
- dependsOn:
- - ${{ if ne(parameters.publishBuildAssetsDependsOn, '') }}:
- - ${{ each job in parameters.publishBuildAssetsDependsOn }}:
- - ${{ job.job }}
- - ${{ if eq(parameters.publishBuildAssetsDependsOn, '') }}:
- - ${{ each job in parameters.jobs }}:
- - ${{ job.job }}
- - ${{ if eq(parameters.enableSourceBuild, true) }}:
- - Source_Build_Complete
+- template: /eng/common/core-templates/jobs/jobs.yml
+ parameters:
+ is1ESPipeline: false
- runAsPublic: ${{ parameters.runAsPublic }}
- publishUsingPipelines: ${{ parameters.enablePublishUsingPipelines }}
- publishAssetsImmediately: ${{ parameters.publishAssetsImmediately }}
- enablePublishBuildArtifacts: ${{ parameters.enablePublishBuildArtifacts }}
- artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
- signingValidationAdditionalParameters: ${{ parameters.signingValidationAdditionalParameters }}
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates/jobs/source-build.yml b/eng/common/templates/jobs/source-build.yml
index 5f46bfa8..818d4c32 100644
--- a/eng/common/templates/jobs/source-build.yml
+++ b/eng/common/templates/jobs/source-build.yml
@@ -1,54 +1,7 @@
-parameters:
- # This template adds arcade-powered source-build to CI. A job is created for each platform, as
- # well as an optional server job that completes when all platform jobs complete.
-
- # The name of the "join" job for all source-build platforms. If set to empty string, the job is
- # not included. Existing repo pipelines can use this job depend on all source-build jobs
- # completing without maintaining a separate list of every single job ID: just depend on this one
- # server job. By default, not included. Recommended name if used: 'Source_Build_Complete'.
- allCompletedJobId: ''
-
- # See /eng/common/templates/job/source-build.yml
- jobNamePrefix: 'Source_Build'
-
- # This is the default platform provided by Arcade, intended for use by a managed-only repo.
- defaultManagedPlatform:
- name: 'Managed'
- container: 'mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream8'
-
- # Defines the platforms on which to run build jobs. One job is created for each platform, and the
- # object in this array is sent to the job template as 'platform'. If no platforms are specified,
- # one job runs on 'defaultManagedPlatform'.
- platforms: []
-
- # If set to true and running on a non-public project,
- # Internal nuget and blob storage locations will be enabled.
- # This is not enabled by default because many repositories do not need internal sources
- # and do not need to have the required service connections approved in the pipeline.
- enableInternalSources: false
-
jobs:
+- template: /eng/common/core-templates/jobs/source-build.yml
+ parameters:
+ is1ESPipeline: false
-- ${{ if ne(parameters.allCompletedJobId, '') }}:
- - job: ${{ parameters.allCompletedJobId }}
- displayName: Source-Build Complete
- pool: server
- dependsOn:
- - ${{ each platform in parameters.platforms }}:
- - ${{ parameters.jobNamePrefix }}_${{ platform.name }}
- - ${{ if eq(length(parameters.platforms), 0) }}:
- - ${{ parameters.jobNamePrefix }}_${{ parameters.defaultManagedPlatform.name }}
-
-- ${{ each platform in parameters.platforms }}:
- - template: /eng/common/templates/job/source-build.yml
- parameters:
- jobNamePrefix: ${{ parameters.jobNamePrefix }}
- platform: ${{ platform }}
- enableInternalSources: ${{ parameters.enableInternalSources }}
-
-- ${{ if eq(length(parameters.platforms), 0) }}:
- - template: /eng/common/templates/job/source-build.yml
- parameters:
- jobNamePrefix: ${{ parameters.jobNamePrefix }}
- platform: ${{ parameters.defaultManagedPlatform }}
- enableInternalSources: ${{ parameters.enableInternalSources }}
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
\ No newline at end of file
diff --git a/eng/common/templates/post-build/common-variables.yml b/eng/common/templates/post-build/common-variables.yml
index 173914f2..7fa10587 100644
--- a/eng/common/templates/post-build/common-variables.yml
+++ b/eng/common/templates/post-build/common-variables.yml
@@ -1,22 +1,8 @@
variables:
- - group: Publish-Build-Assets
+- template: /eng/common/core-templates/post-build/common-variables.yml
+ parameters:
+ # Specifies whether to use 1ES
+ is1ESPipeline: false
- # Whether the build is internal or not
- - name: IsInternalBuild
- value: ${{ and(ne(variables['System.TeamProject'], 'public'), contains(variables['Build.SourceBranch'], 'internal')) }}
-
- # Default Maestro++ API Endpoint and API Version
- - name: MaestroApiEndPoint
- value: "https://maestro.dot.net"
- - name: MaestroApiAccessToken
- value: $(MaestroAccessToken)
- - name: MaestroApiVersion
- value: "2020-02-20"
-
- - name: SourceLinkCLIVersion
- value: 3.0.0
- - name: SymbolToolVersion
- value: 1.0.1
-
- - name: runCodesignValidationInjection
- value: false
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
\ No newline at end of file
diff --git a/eng/common/templates/post-build/post-build.yml b/eng/common/templates/post-build/post-build.yml
index c3b6a301..53ede714 100644
--- a/eng/common/templates/post-build/post-build.yml
+++ b/eng/common/templates/post-build/post-build.yml
@@ -1,283 +1,8 @@
-parameters:
- # Which publishing infra should be used. THIS SHOULD MATCH THE VERSION ON THE BUILD MANIFEST.
- # Publishing V1 is no longer supported
- # Publishing V2 is no longer supported
- # Publishing V3 is the default
- - name: publishingInfraVersion
- displayName: Which version of publishing should be used to promote the build definition?
- type: number
- default: 3
- values:
- - 3
-
- - name: BARBuildId
- displayName: BAR Build Id
- type: number
- default: 0
-
- - name: PromoteToChannelIds
- displayName: Channel to promote BARBuildId to
- type: string
- default: ''
-
- - name: enableSourceLinkValidation
- displayName: Enable SourceLink validation
- type: boolean
- default: false
-
- - name: enableSigningValidation
- displayName: Enable signing validation
- type: boolean
- default: true
-
- - name: enableSymbolValidation
- displayName: Enable symbol validation
- type: boolean
- default: false
-
- - name: enableNugetValidation
- displayName: Enable NuGet validation
- type: boolean
- default: true
-
- - name: publishInstallersAndChecksums
- displayName: Publish installers and checksums
- type: boolean
- default: true
-
- - name: SDLValidationParameters
- type: object
- default:
- enable: false
- publishGdn: false
- continueOnError: false
- params: ''
- artifactNames: ''
- downloadArtifacts: true
-
- # These parameters let the user customize the call to sdk-task.ps1 for publishing
- # symbols & general artifacts as well as for signing validation
- - name: symbolPublishingAdditionalParameters
- displayName: Symbol publishing additional parameters
- type: string
- default: ''
-
- - name: artifactsPublishingAdditionalParameters
- displayName: Artifact publishing additional parameters
- type: string
- default: ''
-
- - name: signingValidationAdditionalParameters
- displayName: Signing validation additional parameters
- type: string
- default: ''
-
- # Which stages should finish execution before post-build stages start
- - name: validateDependsOn
- type: object
- default:
- - build
-
- - name: publishDependsOn
- type: object
- default:
- - Validate
-
- # Optional: Call asset publishing rather than running in a separate stage
- - name: publishAssetsImmediately
- type: boolean
- default: false
-
stages:
-- ${{ if or(eq( parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}:
- - stage: Validate
- dependsOn: ${{ parameters.validateDependsOn }}
- displayName: Validate Build Assets
- variables:
- - template: common-variables.yml
- - template: /eng/common/templates/variables/pool-providers.yml
- jobs:
- - job:
- displayName: NuGet Validation
- condition: and(succeededOrFailed(), eq( ${{ parameters.enableNugetValidation }}, 'true'))
- pool:
- # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
- ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
- name: VSEngSS-MicroBuild2022-1ES
- demands: Cmd
- # If it's not devdiv, it's dnceng
- ${{ else }}:
- name: $(DncEngInternalBuildPool)
- demands: ImageOverride -equals windows.vs2019.amd64
-
- steps:
- - template: setup-maestro-vars.yml
- parameters:
- BARBuildId: ${{ parameters.BARBuildId }}
- PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
-
- - task: DownloadBuildArtifacts@0
- displayName: Download Package Artifacts
- inputs:
- buildType: specific
- buildVersionToDownload: specific
- project: $(AzDOProjectName)
- pipeline: $(AzDOPipelineId)
- buildId: $(AzDOBuildId)
- artifactName: PackageArtifacts
- checkDownloadedFiles: true
-
- - task: PowerShell@2
- displayName: Validate
- inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/post-build/nuget-validation.ps1
- arguments: -PackagesPath $(Build.ArtifactStagingDirectory)/PackageArtifacts/
- -ToolDestinationPath $(Agent.BuildDirectory)/Extract/
-
- - job:
- displayName: Signing Validation
- condition: and( eq( ${{ parameters.enableSigningValidation }}, 'true'), ne( variables['PostBuildSign'], 'true'))
- pool:
- # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
- ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
- name: VSEngSS-MicroBuild2022-1ES
- demands: Cmd
- # If it's not devdiv, it's dnceng
- ${{ else }}:
- name: $(DncEngInternalBuildPool)
- demands: ImageOverride -equals windows.vs2019.amd64
- steps:
- - template: setup-maestro-vars.yml
- parameters:
- BARBuildId: ${{ parameters.BARBuildId }}
- PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
-
- - task: DownloadBuildArtifacts@0
- displayName: Download Package Artifacts
- inputs:
- buildType: specific
- buildVersionToDownload: specific
- project: $(AzDOProjectName)
- pipeline: $(AzDOPipelineId)
- buildId: $(AzDOBuildId)
- artifactName: PackageArtifacts
- checkDownloadedFiles: true
- itemPattern: |
- **
- !**/Microsoft.SourceBuild.Intermediate.*.nupkg
-
- # This is necessary whenever we want to publish/restore to an AzDO private feed
- # Since sdk-task.ps1 tries to restore packages we need to do this authentication here
- # otherwise it'll complain about accessing a private feed.
- - task: NuGetAuthenticate@1
- displayName: 'Authenticate to AzDO Feeds'
-
- # Signing validation will optionally work with the buildmanifest file which is downloaded from
- # Azure DevOps above.
- - task: PowerShell@2
- displayName: Validate
- inputs:
- filePath: eng\common\sdk-task.ps1
- arguments: -task SigningValidation -restore -msbuildEngine vs
- /p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts'
- /p:SignCheckExclusionsFile='$(Build.SourcesDirectory)/eng/SignCheckExclusionsFile.txt'
- ${{ parameters.signingValidationAdditionalParameters }}
-
- - template: ../steps/publish-logs.yml
- parameters:
- StageLabel: 'Validation'
- JobLabel: 'Signing'
-
- - job:
- displayName: SourceLink Validation
- condition: eq( ${{ parameters.enableSourceLinkValidation }}, 'true')
- pool:
- # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
- ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
- name: VSEngSS-MicroBuild2022-1ES
- demands: Cmd
- # If it's not devdiv, it's dnceng
- ${{ else }}:
- name: $(DncEngInternalBuildPool)
- demands: ImageOverride -equals windows.vs2019.amd64
- steps:
- - template: setup-maestro-vars.yml
- parameters:
- BARBuildId: ${{ parameters.BARBuildId }}
- PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
-
- - task: DownloadBuildArtifacts@0
- displayName: Download Blob Artifacts
- inputs:
- buildType: specific
- buildVersionToDownload: specific
- project: $(AzDOProjectName)
- pipeline: $(AzDOPipelineId)
- buildId: $(AzDOBuildId)
- artifactName: BlobArtifacts
- checkDownloadedFiles: true
-
- - task: PowerShell@2
- displayName: Validate
- inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/post-build/sourcelink-validation.ps1
- arguments: -InputPath $(Build.ArtifactStagingDirectory)/BlobArtifacts/
- -ExtractPath $(Agent.BuildDirectory)/Extract/
- -GHRepoName $(Build.Repository.Name)
- -GHCommit $(Build.SourceVersion)
- -SourcelinkCliVersion $(SourceLinkCLIVersion)
- continueOnError: true
-
- - template: /eng/common/templates/job/execute-sdl.yml
- parameters:
- enable: ${{ parameters.SDLValidationParameters.enable }}
- publishGuardianDirectoryToPipeline: ${{ parameters.SDLValidationParameters.publishGdn }}
- additionalParameters: ${{ parameters.SDLValidationParameters.params }}
- continueOnError: ${{ parameters.SDLValidationParameters.continueOnError }}
- artifactNames: ${{ parameters.SDLValidationParameters.artifactNames }}
- downloadArtifacts: ${{ parameters.SDLValidationParameters.downloadArtifacts }}
-
-- ${{ if ne(parameters.publishAssetsImmediately, 'true') }}:
- - stage: publish_using_darc
- ${{ if or(eq(parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}:
- dependsOn: ${{ parameters.publishDependsOn }}
- ${{ else }}:
- dependsOn: ${{ parameters.validateDependsOn }}
- displayName: Publish using Darc
- variables:
- - template: common-variables.yml
- - template: /eng/common/templates/variables/pool-providers.yml
- jobs:
- - job:
- displayName: Publish Using Darc
- timeoutInMinutes: 120
- pool:
- # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
- ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
- name: VSEngSS-MicroBuild2022-1ES
- demands: Cmd
- # If it's not devdiv, it's dnceng
- ${{ else }}:
- name: NetCore1ESPool-Publishing-Internal
- demands: ImageOverride -equals windows.vs2019.amd64
- steps:
- - template: setup-maestro-vars.yml
- parameters:
- BARBuildId: ${{ parameters.BARBuildId }}
- PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
-
- - task: NuGetAuthenticate@1
+- template: /eng/common/core-templates/post-build/post-build.yml
+ parameters:
+ # Specifies whether to use 1ES
+ is1ESPipeline: false
- - task: AzureCLI@2
- displayName: Publish Using Darc
- inputs:
- azureSubscription: "Darc: Maestro Production"
- scriptType: ps
- scriptLocation: scriptPath
- scriptPath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1
- arguments: -BuildId $(BARBuildId)
- -PublishingInfraVersion ${{ parameters.publishingInfraVersion }}
- -AzdoToken '$(System.AccessToken)'
- -WaitPublishingFinish true
- -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}'
- -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}'
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
\ No newline at end of file
diff --git a/eng/common/templates/post-build/setup-maestro-vars.yml b/eng/common/templates/post-build/setup-maestro-vars.yml
index 64b9abc6..a79fab5b 100644
--- a/eng/common/templates/post-build/setup-maestro-vars.yml
+++ b/eng/common/templates/post-build/setup-maestro-vars.yml
@@ -1,70 +1,8 @@
-parameters:
- BARBuildId: ''
- PromoteToChannelIds: ''
-
steps:
- - ${{ if eq(coalesce(parameters.PromoteToChannelIds, 0), 0) }}:
- - task: DownloadBuildArtifacts@0
- displayName: Download Release Configs
- inputs:
- buildType: current
- artifactName: ReleaseConfigs
- checkDownloadedFiles: true
-
- - task: AzureCLI@2
- name: setReleaseVars
- displayName: Set Release Configs Vars
- inputs:
- azureSubscription: "Darc: Maestro Production"
- scriptType: pscore
- scriptLocation: inlineScript
- inlineScript: |
- try {
- if (!$Env:PromoteToMaestroChannels -or $Env:PromoteToMaestroChannels.Trim() -eq '') {
- $Content = Get-Content $(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt
-
- $BarId = $Content | Select -Index 0
- $Channels = $Content | Select -Index 1
- $IsStableBuild = $Content | Select -Index 2
-
- $AzureDevOpsProject = $Env:System_TeamProject
- $AzureDevOpsBuildDefinitionId = $Env:System_DefinitionId
- $AzureDevOpsBuildId = $Env:Build_BuildId
- }
- else {
- . $(Build.SourcesDirectory)\eng\common\tools.ps1
- $darc = Get-Darc
- $buildInfo = & $darc get-build `
- --id ${{ parameters.BARBuildId }} `
- --extended `
- --output-format json `
- --ci `
- | convertFrom-Json
-
- $BarId = ${{ parameters.BARBuildId }}
- $Channels = $Env:PromoteToMaestroChannels -split ","
- $Channels = $Channels -join "]["
- $Channels = "[$Channels]"
-
- $IsStableBuild = $buildInfo.stable
- $AzureDevOpsProject = $buildInfo.azureDevOpsProject
- $AzureDevOpsBuildDefinitionId = $buildInfo.azureDevOpsBuildDefinitionId
- $AzureDevOpsBuildId = $buildInfo.azureDevOpsBuildId
- }
-
- Write-Host "##vso[task.setvariable variable=BARBuildId]$BarId"
- Write-Host "##vso[task.setvariable variable=TargetChannels]$Channels"
- Write-Host "##vso[task.setvariable variable=IsStableBuild]$IsStableBuild"
+- template: /eng/common/core-templates/post-build/setup-maestro-vars.yml
+ parameters:
+ # Specifies whether to use 1ES
+ is1ESPipeline: false
- Write-Host "##vso[task.setvariable variable=AzDOProjectName]$AzureDevOpsProject"
- Write-Host "##vso[task.setvariable variable=AzDOPipelineId]$AzureDevOpsBuildDefinitionId"
- Write-Host "##vso[task.setvariable variable=AzDOBuildId]$AzureDevOpsBuildId"
- }
- catch {
- Write-Host $_
- Write-Host $_.Exception
- Write-Host $_.ScriptStackTrace
- exit 1
- }
- env:
- PromoteToMaestroChannels: ${{ parameters.PromoteToChannelIds }}
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
\ No newline at end of file
diff --git a/eng/common/templates/post-build/trigger-subscription.yml b/eng/common/templates/post-build/trigger-subscription.yml
deleted file mode 100644
index da669030..00000000
--- a/eng/common/templates/post-build/trigger-subscription.yml
+++ /dev/null
@@ -1,13 +0,0 @@
-parameters:
- ChannelId: 0
-
-steps:
-- task: PowerShell@2
- displayName: Triggering subscriptions
- inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/post-build/trigger-subscriptions.ps1
- arguments: -SourceRepo $(Build.Repository.Uri)
- -ChannelId ${{ parameters.ChannelId }}
- -MaestroApiAccessToken $(MaestroAccessToken)
- -MaestroApiEndPoint $(MaestroApiEndPoint)
- -MaestroApiVersion $(MaestroApiVersion)
diff --git a/eng/common/templates/steps/add-build-to-channel.yml b/eng/common/templates/steps/add-build-to-channel.yml
deleted file mode 100644
index f67a210d..00000000
--- a/eng/common/templates/steps/add-build-to-channel.yml
+++ /dev/null
@@ -1,13 +0,0 @@
-parameters:
- ChannelId: 0
-
-steps:
-- task: PowerShell@2
- displayName: Add Build to Channel
- inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/post-build/add-build-to-channel.ps1
- arguments: -BuildId $(BARBuildId)
- -ChannelId ${{ parameters.ChannelId }}
- -MaestroApiAccessToken $(MaestroApiAccessToken)
- -MaestroApiEndPoint $(MaestroApiEndPoint)
- -MaestroApiVersion $(MaestroApiVersion)
diff --git a/eng/common/templates/steps/build-reason.yml b/eng/common/templates/steps/build-reason.yml
deleted file mode 100644
index eba58109..00000000
--- a/eng/common/templates/steps/build-reason.yml
+++ /dev/null
@@ -1,12 +0,0 @@
-# build-reason.yml
-# Description: runs steps if build.reason condition is valid. conditions is a string of valid build reasons
-# to include steps (',' separated).
-parameters:
- conditions: ''
- steps: []
-
-steps:
- - ${{ if and( not(startsWith(parameters.conditions, 'not')), contains(parameters.conditions, variables['build.reason'])) }}:
- - ${{ parameters.steps }}
- - ${{ if and( startsWith(parameters.conditions, 'not'), not(contains(parameters.conditions, variables['build.reason']))) }}:
- - ${{ parameters.steps }}
diff --git a/eng/common/templates/steps/component-governance.yml b/eng/common/templates/steps/component-governance.yml
deleted file mode 100644
index cbba0596..00000000
--- a/eng/common/templates/steps/component-governance.yml
+++ /dev/null
@@ -1,13 +0,0 @@
-parameters:
- disableComponentGovernance: false
- componentGovernanceIgnoreDirectories: ''
-
-steps:
-- ${{ if eq(parameters.disableComponentGovernance, 'true') }}:
- - script: echo "##vso[task.setvariable variable=skipComponentGovernanceDetection]true"
- displayName: Set skipComponentGovernanceDetection variable
-- ${{ if ne(parameters.disableComponentGovernance, 'true') }}:
- - task: ComponentGovernanceComponentDetection@0
- continueOnError: true
- inputs:
- ignoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }}
\ No newline at end of file
diff --git a/eng/common/templates/steps/enable-internal-runtimes.yml b/eng/common/templates/steps/enable-internal-runtimes.yml
index 54dc9416..b21a8038 100644
--- a/eng/common/templates/steps/enable-internal-runtimes.yml
+++ b/eng/common/templates/steps/enable-internal-runtimes.yml
@@ -1,28 +1,10 @@
# Obtains internal runtime download credentials and populates the 'dotnetbuilds-internal-container-read-token-base64'
# variable with the base64-encoded SAS token, by default
-parameters:
-- name: federatedServiceConnection
- type: string
- default: 'dotnetbuilds-internal-read'
-- name: outputVariableName
- type: string
- default: 'dotnetbuilds-internal-container-read-token-base64'
-- name: expiryInHours
- type: number
- default: 1
-- name: base64Encode
- type: boolean
- default: true
-
steps:
-- ${{ if ne(variables['System.TeamProject'], 'public') }}:
- - template: /eng/common/templates/steps/get-delegation-sas.yml
- parameters:
- federatedServiceConnection: ${{ parameters.federatedServiceConnection }}
- outputVariableName: ${{ parameters.outputVariableName }}
- expiryInHours: ${{ parameters.expiryInHours }}
- base64Encode: ${{ parameters.base64Encode }}
- storageAccount: dotnetbuilds
- container: internal
- permissions: rl
+- template: /eng/common/core-templates/steps/enable-internal-runtimes.yml
+ parameters:
+ is1ESPipeline: false
+
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates/steps/enable-internal-sources.yml b/eng/common/templates/steps/enable-internal-sources.yml
new file mode 100644
index 00000000..5f87e9ab
--- /dev/null
+++ b/eng/common/templates/steps/enable-internal-sources.yml
@@ -0,0 +1,7 @@
+steps:
+- template: /eng/common/core-templates/steps/enable-internal-sources.yml
+ parameters:
+ is1ESPipeline: false
+
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
\ No newline at end of file
diff --git a/eng/common/templates/steps/execute-codeql.yml b/eng/common/templates/steps/execute-codeql.yml
deleted file mode 100644
index 3930b163..00000000
--- a/eng/common/templates/steps/execute-codeql.yml
+++ /dev/null
@@ -1,32 +0,0 @@
-parameters:
- # Language that should be analyzed. Defaults to csharp
- language: csharp
- # Build Commands
- buildCommands: ''
- overrideParameters: '' # Optional: to override values for parameters.
- additionalParameters: '' # Optional: parameters that need user specific values eg: '-SourceToolsList @("abc","def") -ArtifactToolsList @("ghi","jkl")'
- # Optional: if specified, restore and use this version of Guardian instead of the default.
- overrideGuardianVersion: ''
- # Optional: if true, publish the '.gdn' folder as a pipeline artifact. This can help with in-depth
- # diagnosis of problems with specific tool configurations.
- publishGuardianDirectoryToPipeline: false
- # The script to run to execute all SDL tools. Use this if you want to use a script to define SDL
- # parameters rather than relying on YAML. It may be better to use a local script, because you can
- # reproduce results locally without piecing together a command based on the YAML.
- executeAllSdlToolsScript: 'eng/common/sdl/execute-all-sdl-tools.ps1'
- # There is some sort of bug (has been reported) in Azure DevOps where if this parameter is named
- # 'continueOnError', the parameter value is not correctly picked up.
- # This can also be remedied by the caller (post-build.yml) if it does not use a nested parameter
- # optional: determines whether to continue the build if the step errors;
- sdlContinueOnError: false
-
-steps:
-- template: /eng/common/templates/steps/execute-sdl.yml
- parameters:
- overrideGuardianVersion: ${{ parameters.overrideGuardianVersion }}
- executeAllSdlToolsScript: ${{ parameters.executeAllSdlToolsScript }}
- overrideParameters: ${{ parameters.overrideParameters }}
- additionalParameters: '${{ parameters.additionalParameters }}
- -CodeQLAdditionalRunConfigParams @("BuildCommands < ${{ parameters.buildCommands }}", "Language < ${{ parameters.language }}")'
- publishGuardianDirectoryToPipeline: ${{ parameters.publishGuardianDirectoryToPipeline }}
- sdlContinueOnError: ${{ parameters.sdlContinueOnError }}
\ No newline at end of file
diff --git a/eng/common/templates/steps/execute-sdl.yml b/eng/common/templates/steps/execute-sdl.yml
deleted file mode 100644
index fe0ebf8c..00000000
--- a/eng/common/templates/steps/execute-sdl.yml
+++ /dev/null
@@ -1,89 +0,0 @@
-parameters:
- overrideGuardianVersion: ''
- executeAllSdlToolsScript: ''
- overrideParameters: ''
- additionalParameters: ''
- publishGuardianDirectoryToPipeline: false
- sdlContinueOnError: false
- condition: ''
-
-steps:
-- task: NuGetAuthenticate@1
-
-- task: NuGetToolInstaller@1
- displayName: 'Install NuGet.exe'
-
-- ${{ if ne(parameters.overrideGuardianVersion, '') }}:
- - pwsh: |
- Set-Location -Path $(Build.SourcesDirectory)\eng\common\sdl
- . .\sdl.ps1
- $guardianCliLocation = Install-Gdn -Path $(Build.SourcesDirectory)\.artifacts -Version ${{ parameters.overrideGuardianVersion }}
- Write-Host "##vso[task.setvariable variable=GuardianCliLocation]$guardianCliLocation"
- displayName: Install Guardian (Overridden)
-
-- ${{ if eq(parameters.overrideGuardianVersion, '') }}:
- - pwsh: |
- Set-Location -Path $(Build.SourcesDirectory)\eng\common\sdl
- . .\sdl.ps1
- $guardianCliLocation = Install-Gdn -Path $(Build.SourcesDirectory)\.artifacts
- Write-Host "##vso[task.setvariable variable=GuardianCliLocation]$guardianCliLocation"
- displayName: Install Guardian
-
-- ${{ if ne(parameters.overrideParameters, '') }}:
- - powershell: ${{ parameters.executeAllSdlToolsScript }} ${{ parameters.overrideParameters }}
- displayName: Execute SDL (Overridden)
- continueOnError: ${{ parameters.sdlContinueOnError }}
- condition: ${{ parameters.condition }}
- env:
- GUARDIAN_DEFAULT_PACKAGE_SOURCE_SECRET: $(System.AccessToken)
-
-- ${{ if eq(parameters.overrideParameters, '') }}:
- - powershell: ${{ parameters.executeAllSdlToolsScript }}
- -GuardianCliLocation $(GuardianCliLocation)
- -NugetPackageDirectory $(Build.SourcesDirectory)\.packages
- ${{ parameters.additionalParameters }}
- displayName: Execute SDL
- continueOnError: ${{ parameters.sdlContinueOnError }}
- condition: ${{ parameters.condition }}
- env:
- GUARDIAN_DEFAULT_PACKAGE_SOURCE_SECRET: $(System.AccessToken)
-
-- ${{ if ne(parameters.publishGuardianDirectoryToPipeline, 'false') }}:
- # We want to publish the Guardian results and configuration for easy diagnosis. However, the
- # '.gdn' dir is a mix of configuration, results, extracted dependencies, and Guardian default
- # tooling files. Some of these files are large and aren't useful during an investigation, so
- # exclude them by simply deleting them before publishing. (As of writing, there is no documented
- # way to selectively exclude a dir from the pipeline artifact publish task.)
- - task: DeleteFiles@1
- displayName: Delete Guardian dependencies to avoid uploading
- inputs:
- SourceFolder: $(Agent.BuildDirectory)/.gdn
- Contents: |
- c
- i
- condition: succeededOrFailed()
-
- - publish: $(Agent.BuildDirectory)/.gdn
- artifact: GuardianConfiguration
- displayName: Publish GuardianConfiguration
- condition: succeededOrFailed()
-
- # Publish the SARIF files in a container named CodeAnalysisLogs to enable integration
- # with the "SARIF SAST Scans Tab" Azure DevOps extension
- - task: CopyFiles@2
- displayName: Copy SARIF files
- inputs:
- flattenFolders: true
- sourceFolder: $(Agent.BuildDirectory)/.gdn/rc/
- contents: '**/*.sarif'
- targetFolder: $(Build.SourcesDirectory)/CodeAnalysisLogs
- condition: succeededOrFailed()
-
- # Use PublishBuildArtifacts because the SARIF extension only checks this case
- # see microsoft/sarif-azuredevops-extension#4
- - task: PublishBuildArtifacts@1
- displayName: Publish SARIF files to CodeAnalysisLogs container
- inputs:
- pathToPublish: $(Build.SourcesDirectory)/CodeAnalysisLogs
- artifactName: CodeAnalysisLogs
- condition: succeededOrFailed()
\ No newline at end of file
diff --git a/eng/common/templates/steps/generate-sbom.yml b/eng/common/templates/steps/generate-sbom.yml
index 2b21eae4..26dc00a2 100644
--- a/eng/common/templates/steps/generate-sbom.yml
+++ b/eng/common/templates/steps/generate-sbom.yml
@@ -1,48 +1,7 @@
-# BuildDropPath - The root folder of the drop directory for which the manifest file will be generated.
-# PackageName - The name of the package this SBOM represents.
-# PackageVersion - The version of the package this SBOM represents.
-# ManifestDirPath - The path of the directory where the generated manifest files will be placed
-# IgnoreDirectories - Directories to ignore for SBOM generation. This will be passed through to the CG component detector.
-
-parameters:
- PackageVersion: 8.0.0
- BuildDropPath: '$(Build.SourcesDirectory)/artifacts'
- PackageName: '.NET'
- ManifestDirPath: $(Build.ArtifactStagingDirectory)/sbom
- IgnoreDirectories: ''
- sbomContinueOnError: true
-
steps:
-- task: PowerShell@2
- displayName: Prep for SBOM generation in (Non-linux)
- condition: or(eq(variables['Agent.Os'], 'Windows_NT'), eq(variables['Agent.Os'], 'Darwin'))
- inputs:
- filePath: ./eng/common/generate-sbom-prep.ps1
- arguments: ${{parameters.manifestDirPath}}
-
-# Chmodding is a workaround for https://github.com/dotnet/arcade/issues/8461
-- script: |
- chmod +x ./eng/common/generate-sbom-prep.sh
- ./eng/common/generate-sbom-prep.sh ${{parameters.manifestDirPath}}
- displayName: Prep for SBOM generation in (Linux)
- condition: eq(variables['Agent.Os'], 'Linux')
- continueOnError: ${{ parameters.sbomContinueOnError }}
-
-- task: AzureArtifacts.manifest-generator-task.manifest-generator-task.ManifestGeneratorTask@0
- displayName: 'Generate SBOM manifest'
- continueOnError: ${{ parameters.sbomContinueOnError }}
- inputs:
- PackageName: ${{ parameters.packageName }}
- BuildDropPath: ${{ parameters.buildDropPath }}
- PackageVersion: ${{ parameters.packageVersion }}
- ManifestDirPath: ${{ parameters.manifestDirPath }}
- ${{ if ne(parameters.IgnoreDirectories, '') }}:
- AdditionalComponentDetectorArgs: '--IgnoreDirectories ${{ parameters.IgnoreDirectories }}'
-
-- task: PublishPipelineArtifact@1
- displayName: Publish SBOM manifest
- continueOnError: ${{parameters.sbomContinueOnError}}
- inputs:
- targetPath: '${{parameters.manifestDirPath}}'
- artifactName: $(ARTIFACT_NAME)
+- template: /eng/common/core-templates/steps/generate-sbom.yml
+ parameters:
+ is1ESPipeline: false
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates/steps/get-delegation-sas.yml b/eng/common/templates/steps/get-delegation-sas.yml
index c0e8f913..83760c97 100644
--- a/eng/common/templates/steps/get-delegation-sas.yml
+++ b/eng/common/templates/steps/get-delegation-sas.yml
@@ -1,43 +1,7 @@
-parameters:
-- name: federatedServiceConnection
- type: string
-- name: outputVariableName
- type: string
-- name: expiryInHours
- type: number
- default: 1
-- name: base64Encode
- type: boolean
- default: false
-- name: storageAccount
- type: string
-- name: container
- type: string
-- name: permissions
- type: string
- default: 'rl'
-
steps:
-- task: AzureCLI@2
- displayName: 'Generate delegation SAS Token for ${{ parameters.storageAccount }}/${{ parameters.container }}'
- inputs:
- azureSubscription: ${{ parameters.federatedServiceConnection }}
- scriptType: 'pscore'
- scriptLocation: 'inlineScript'
- inlineScript: |
- # Calculate the expiration of the SAS token and convert to UTC
- $expiry = (Get-Date).AddHours(${{ parameters.expiryInHours }}).ToUniversalTime().ToString("yyyy-MM-ddTHH:mm:ssZ")
-
- $sas = az storage container generate-sas --account-name ${{ parameters.storageAccount }} --name ${{ parameters.container }} --permissions ${{ parameters.permissions }} --expiry $expiry --auth-mode login --as-user -o tsv
-
- if ($LASTEXITCODE -ne 0) {
- Write-Error "Failed to generate SAS token."
- exit 1
- }
-
- if ('${{ parameters.base64Encode }}' -eq 'true') {
- $sas = [Convert]::ToBase64String([System.Text.Encoding]::UTF8.GetBytes($sas))
- }
+- template: /eng/common/core-templates/steps/get-delegation-sas.yml
+ parameters:
+ is1ESPipeline: false
- Write-Host "Setting '${{ parameters.outputVariableName }}' with the access token value"
- Write-Host "##vso[task.setvariable variable=${{ parameters.outputVariableName }};issecret=true]$sas"
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates/steps/get-federated-access-token.yml b/eng/common/templates/steps/get-federated-access-token.yml
index 55e33bd3..31e151d9 100644
--- a/eng/common/templates/steps/get-federated-access-token.yml
+++ b/eng/common/templates/steps/get-federated-access-token.yml
@@ -1,40 +1,7 @@
-parameters:
-- name: federatedServiceConnection
- type: string
-- name: outputVariableName
- type: string
-- name: stepName
- type: string
- default: 'getFederatedAccessToken'
-- name: condition
- type: string
- default: ''
-# Resource to get a token for. Common values include:
-# - '499b84ac-1321-427f-aa17-267ca6975798' for Azure DevOps
-# - 'https://storage.azure.com/' for storage
-# Defaults to Azure DevOps
-- name: resource
- type: string
- default: '499b84ac-1321-427f-aa17-267ca6975798'
-- name: isStepOutputVariable
- type: boolean
- default: false
-
steps:
-- task: AzureCLI@2
- displayName: 'Getting federated access token for feeds'
- name: ${{ parameters.stepName }}
- ${{ if ne(parameters.condition, '') }}:
- condition: ${{ parameters.condition }}
- inputs:
- azureSubscription: ${{ parameters.federatedServiceConnection }}
- scriptType: 'pscore'
- scriptLocation: 'inlineScript'
- inlineScript: |
- $accessToken = az account get-access-token --query accessToken --resource ${{ parameters.resource }} --output tsv
- if ($LASTEXITCODE -ne 0) {
- Write-Error "Failed to get access token for resource '${{ parameters.resource }}'"
- exit 1
- }
- Write-Host "Setting '${{ parameters.outputVariableName }}' with the access token value"
- Write-Host "##vso[task.setvariable variable=${{ parameters.outputVariableName }};issecret=true;isOutput=${{ parameters.isStepOutputVariable }}]$accessToken"
\ No newline at end of file
+- template: /eng/common/core-templates/steps/get-federated-access-token.yml
+ parameters:
+ is1ESPipeline: false
+
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
\ No newline at end of file
diff --git a/eng/common/templates/steps/publish-build-artifacts.yml b/eng/common/templates/steps/publish-build-artifacts.yml
new file mode 100644
index 00000000..605e602e
--- /dev/null
+++ b/eng/common/templates/steps/publish-build-artifacts.yml
@@ -0,0 +1,46 @@
+parameters:
+- name: is1ESPipeline
+ type: boolean
+ default: false
+
+- name: displayName
+ type: string
+ default: 'Publish to Build Artifact'
+
+- name: condition
+ type: string
+ default: succeeded()
+
+- name: artifactName
+ type: string
+
+- name: pathToPublish
+ type: string
+
+- name: continueOnError
+ type: boolean
+ default: false
+
+- name: publishLocation
+ type: string
+ default: 'Container'
+
+- name: retryCountOnTaskFailure
+ type: string
+ default: 10
+
+steps:
+- ${{ if eq(parameters.is1ESPipeline, true) }}:
+ - 'eng/common/templates cannot be referenced from a 1ES managed template': error
+- task: PublishBuildArtifacts@1
+ displayName: ${{ parameters.displayName }}
+ condition: ${{ parameters.condition }}
+ ${{ if parameters.continueOnError }}:
+ continueOnError: ${{ parameters.continueOnError }}
+ inputs:
+ PublishLocation: ${{ parameters.publishLocation }}
+ PathtoPublish: ${{ parameters.pathToPublish }}
+ ${{ if parameters.artifactName }}:
+ ArtifactName: ${{ parameters.artifactName }}
+ ${{ if parameters.retryCountOnTaskFailure }}:
+ retryCountOnTaskFailure: ${{ parameters.retryCountOnTaskFailure }}
diff --git a/eng/common/templates/steps/publish-logs.yml b/eng/common/templates/steps/publish-logs.yml
index 88f238f3..4ea86bd8 100644
--- a/eng/common/templates/steps/publish-logs.yml
+++ b/eng/common/templates/steps/publish-logs.yml
@@ -1,23 +1,7 @@
-parameters:
- StageLabel: ''
- JobLabel: ''
-
steps:
-- task: Powershell@2
- displayName: Prepare Binlogs to Upload
- inputs:
- targetType: inline
- script: |
- New-Item -ItemType Directory $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/
- Move-Item -Path $(Build.SourcesDirectory)/artifacts/log/Debug/* $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/
- continueOnError: true
- condition: always()
+- template: /eng/common/core-templates/steps/publish-logs.yml
+ parameters:
+ is1ESPipeline: false
-- task: PublishBuildArtifacts@1
- displayName: Publish Logs
- inputs:
- PathtoPublish: '$(Build.SourcesDirectory)/PostBuildLogs'
- PublishLocation: Container
- ArtifactName: PostBuildLogs
- continueOnError: true
- condition: always()
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates/steps/publish-pipeline-artifacts.yml b/eng/common/templates/steps/publish-pipeline-artifacts.yml
new file mode 100644
index 00000000..5dd698b2
--- /dev/null
+++ b/eng/common/templates/steps/publish-pipeline-artifacts.yml
@@ -0,0 +1,34 @@
+parameters:
+- name: is1ESPipeline
+ type: boolean
+ default: false
+
+- name: args
+ type: object
+ default: {}
+
+steps:
+- ${{ if eq(parameters.is1ESPipeline, true) }}:
+ - 'eng/common/templates cannot be referenced from a 1ES managed template': error
+- task: PublishPipelineArtifact@1
+ displayName: ${{ coalesce(parameters.args.displayName, 'Publish to Build Artifact') }}
+ ${{ if parameters.args.condition }}:
+ condition: ${{ parameters.args.condition }}
+ ${{ else }}:
+ condition: succeeded()
+ ${{ if parameters.args.continueOnError }}:
+ continueOnError: ${{ parameters.args.continueOnError }}
+ inputs:
+ targetPath: ${{ parameters.args.targetPath }}
+ ${{ if parameters.args.artifactName }}:
+ artifactName: ${{ parameters.args.artifactName }}
+ ${{ if parameters.args.publishLocation }}:
+ publishLocation: ${{ parameters.args.publishLocation }}
+ ${{ if parameters.args.fileSharePath }}:
+ fileSharePath: ${{ parameters.args.fileSharePath }}
+ ${{ if parameters.args.Parallel }}:
+ parallel: ${{ parameters.args.Parallel }}
+ ${{ if parameters.args.parallelCount }}:
+ parallelCount: ${{ parameters.args.parallelCount }}
+ ${{ if parameters.args.properties }}:
+ properties: ${{ parameters.args.properties }}
\ No newline at end of file
diff --git a/eng/common/templates/steps/retain-build.yml b/eng/common/templates/steps/retain-build.yml
index 83d97a26..8e841ace 100644
--- a/eng/common/templates/steps/retain-build.yml
+++ b/eng/common/templates/steps/retain-build.yml
@@ -1,28 +1,7 @@
-parameters:
- # Optional azure devops PAT with build execute permissions for the build's organization,
- # only needed if the build that should be retained ran on a different organization than
- # the pipeline where this template is executing from
- Token: ''
- # Optional BuildId to retain, defaults to the current running build
- BuildId: ''
- # Azure devops Organization URI for the build in the https://dev.azure.com/ format.
- # Defaults to the organization the current pipeline is running on
- AzdoOrgUri: '$(System.CollectionUri)'
- # Azure devops project for the build. Defaults to the project the current pipeline is running on
- AzdoProject: '$(System.TeamProject)'
-
steps:
- - task: powershell@2
- inputs:
- targetType: 'filePath'
- filePath: eng/common/retain-build.ps1
- pwsh: true
- arguments: >
- -AzdoOrgUri: ${{parameters.AzdoOrgUri}}
- -AzdoProject ${{parameters.AzdoProject}}
- -Token ${{coalesce(parameters.Token, '$env:SYSTEM_ACCESSTOKEN') }}
- -BuildId ${{coalesce(parameters.BuildId, '$env:BUILD_ID')}}
- displayName: Enable permanent build retention
- env:
- SYSTEM_ACCESSTOKEN: $(System.AccessToken)
- BUILD_ID: $(Build.BuildId)
\ No newline at end of file
+- template: /eng/common/core-templates/steps/retain-build.yml
+ parameters:
+ is1ESPipeline: false
+
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates/steps/run-on-unix.yml b/eng/common/templates/steps/run-on-unix.yml
deleted file mode 100644
index e1733814..00000000
--- a/eng/common/templates/steps/run-on-unix.yml
+++ /dev/null
@@ -1,7 +0,0 @@
-parameters:
- agentOs: ''
- steps: []
-
-steps:
-- ${{ if ne(parameters.agentOs, 'Windows_NT') }}:
- - ${{ parameters.steps }}
diff --git a/eng/common/templates/steps/run-on-windows.yml b/eng/common/templates/steps/run-on-windows.yml
deleted file mode 100644
index 73e7e9c2..00000000
--- a/eng/common/templates/steps/run-on-windows.yml
+++ /dev/null
@@ -1,7 +0,0 @@
-parameters:
- agentOs: ''
- steps: []
-
-steps:
-- ${{ if eq(parameters.agentOs, 'Windows_NT') }}:
- - ${{ parameters.steps }}
diff --git a/eng/common/templates/steps/run-script-ifequalelse.yml b/eng/common/templates/steps/run-script-ifequalelse.yml
deleted file mode 100644
index 3d1242f5..00000000
--- a/eng/common/templates/steps/run-script-ifequalelse.yml
+++ /dev/null
@@ -1,33 +0,0 @@
-parameters:
- # if parameter1 equals parameter 2, run 'ifScript' command, else run 'elsescript' command
- parameter1: ''
- parameter2: ''
- ifScript: ''
- elseScript: ''
-
- # name of script step
- name: Script
-
- # display name of script step
- displayName: If-Equal-Else Script
-
- # environment
- env: {}
-
- # conditional expression for step execution
- condition: ''
-
-steps:
-- ${{ if and(ne(parameters.ifScript, ''), eq(parameters.parameter1, parameters.parameter2)) }}:
- - script: ${{ parameters.ifScript }}
- name: ${{ parameters.name }}
- displayName: ${{ parameters.displayName }}
- env: ${{ parameters.env }}
- condition: ${{ parameters.condition }}
-
-- ${{ if and(ne(parameters.elseScript, ''), ne(parameters.parameter1, parameters.parameter2)) }}:
- - script: ${{ parameters.elseScript }}
- name: ${{ parameters.name }}
- displayName: ${{ parameters.displayName }}
- env: ${{ parameters.env }}
- condition: ${{ parameters.condition }}
\ No newline at end of file
diff --git a/eng/common/templates/steps/send-to-helix.yml b/eng/common/templates/steps/send-to-helix.yml
index 3eb7e2d5..39f99fc2 100644
--- a/eng/common/templates/steps/send-to-helix.yml
+++ b/eng/common/templates/steps/send-to-helix.yml
@@ -1,91 +1,7 @@
-# Please remember to update the documentation if you make changes to these parameters!
-parameters:
- HelixSource: 'pr/default' # required -- sources must start with pr/, official/, prodcon/, or agent/
- HelixType: 'tests/default/' # required -- Helix telemetry which identifies what type of data this is; should include "test" for clarity and must end in '/'
- HelixBuild: $(Build.BuildNumber) # required -- the build number Helix will use to identify this -- automatically set to the AzDO build number
- HelixTargetQueues: '' # required -- semicolon-delimited list of Helix queues to test on; see https://helix.dot.net/ for a list of queues
- HelixAccessToken: '' # required -- access token to make Helix API requests; should be provided by the appropriate variable group
- HelixConfiguration: '' # optional -- additional property attached to a job
- HelixPreCommands: '' # optional -- commands to run before Helix work item execution
- HelixPostCommands: '' # optional -- commands to run after Helix work item execution
- WorkItemDirectory: '' # optional -- a payload directory to zip up and send to Helix; requires WorkItemCommand; incompatible with XUnitProjects
- WorkItemCommand: '' # optional -- a command to execute on the payload; requires WorkItemDirectory; incompatible with XUnitProjects
- WorkItemTimeout: '' # optional -- a timeout in TimeSpan.Parse-ready value (e.g. 00:02:00) for the work item command; requires WorkItemDirectory; incompatible with XUnitProjects
- CorrelationPayloadDirectory: '' # optional -- a directory to zip up and send to Helix as a correlation payload
- XUnitProjects: '' # optional -- semicolon-delimited list of XUnitProjects to parse and send to Helix; requires XUnitRuntimeTargetFramework, XUnitPublishTargetFramework, XUnitRunnerVersion, and IncludeDotNetCli=true
- XUnitWorkItemTimeout: '' # optional -- the workitem timeout in seconds for all workitems created from the xUnit projects specified by XUnitProjects
- XUnitPublishTargetFramework: '' # optional -- framework to use to publish your xUnit projects
- XUnitRuntimeTargetFramework: '' # optional -- framework to use for the xUnit console runner
- XUnitRunnerVersion: '' # optional -- version of the xUnit nuget package you wish to use on Helix; required for XUnitProjects
- IncludeDotNetCli: false # optional -- true will download a version of the .NET CLI onto the Helix machine as a correlation payload; requires DotNetCliPackageType and DotNetCliVersion
- DotNetCliPackageType: '' # optional -- either 'sdk', 'runtime' or 'aspnetcore-runtime'; determines whether the sdk or runtime will be sent to Helix; see https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json
- DotNetCliVersion: '' # optional -- version of the CLI to send to Helix; based on this: https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json
- WaitForWorkItemCompletion: true # optional -- true will make the task wait until work items have been completed and fail the build if work items fail. False is "fire and forget."
- IsExternal: false # [DEPRECATED] -- doesn't do anything, jobs are external if HelixAccessToken is empty and Creator is set
- HelixBaseUri: 'https://helix.dot.net/' # optional -- sets the Helix API base URI (allows targeting https://helix.int-dot.net )
- Creator: '' # optional -- if the build is external, use this to specify who is sending the job
- DisplayNamePrefix: 'Run Tests' # optional -- rename the beginning of the displayName of the steps in AzDO
- condition: succeeded() # optional -- condition for step to execute; defaults to succeeded()
- continueOnError: false # optional -- determines whether to continue the build if the step errors; defaults to false
-
steps:
- - powershell: 'powershell "$env:BUILD_SOURCESDIRECTORY\eng\common\msbuild.ps1 $env:BUILD_SOURCESDIRECTORY\eng\common\helixpublish.proj /restore /p:TreatWarningsAsErrors=false /t:Test /bl:$env:BUILD_SOURCESDIRECTORY\artifacts\log\$env:BuildConfig\SendToHelix.binlog"'
- displayName: ${{ parameters.DisplayNamePrefix }} (Windows)
- env:
- BuildConfig: $(_BuildConfig)
- HelixSource: ${{ parameters.HelixSource }}
- HelixType: ${{ parameters.HelixType }}
- HelixBuild: ${{ parameters.HelixBuild }}
- HelixConfiguration: ${{ parameters.HelixConfiguration }}
- HelixTargetQueues: ${{ parameters.HelixTargetQueues }}
- HelixAccessToken: ${{ parameters.HelixAccessToken }}
- HelixPreCommands: ${{ parameters.HelixPreCommands }}
- HelixPostCommands: ${{ parameters.HelixPostCommands }}
- WorkItemDirectory: ${{ parameters.WorkItemDirectory }}
- WorkItemCommand: ${{ parameters.WorkItemCommand }}
- WorkItemTimeout: ${{ parameters.WorkItemTimeout }}
- CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }}
- XUnitProjects: ${{ parameters.XUnitProjects }}
- XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }}
- XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }}
- XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }}
- XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }}
- IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }}
- DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }}
- DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
- WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
- HelixBaseUri: ${{ parameters.HelixBaseUri }}
- Creator: ${{ parameters.Creator }}
- SYSTEM_ACCESSTOKEN: $(System.AccessToken)
- condition: and(${{ parameters.condition }}, eq(variables['Agent.Os'], 'Windows_NT'))
- continueOnError: ${{ parameters.continueOnError }}
- - script: $BUILD_SOURCESDIRECTORY/eng/common/msbuild.sh $BUILD_SOURCESDIRECTORY/eng/common/helixpublish.proj /restore /p:TreatWarningsAsErrors=false /t:Test /bl:$BUILD_SOURCESDIRECTORY/artifacts/log/$BuildConfig/SendToHelix.binlog
- displayName: ${{ parameters.DisplayNamePrefix }} (Unix)
- env:
- BuildConfig: $(_BuildConfig)
- HelixSource: ${{ parameters.HelixSource }}
- HelixType: ${{ parameters.HelixType }}
- HelixBuild: ${{ parameters.HelixBuild }}
- HelixConfiguration: ${{ parameters.HelixConfiguration }}
- HelixTargetQueues: ${{ parameters.HelixTargetQueues }}
- HelixAccessToken: ${{ parameters.HelixAccessToken }}
- HelixPreCommands: ${{ parameters.HelixPreCommands }}
- HelixPostCommands: ${{ parameters.HelixPostCommands }}
- WorkItemDirectory: ${{ parameters.WorkItemDirectory }}
- WorkItemCommand: ${{ parameters.WorkItemCommand }}
- WorkItemTimeout: ${{ parameters.WorkItemTimeout }}
- CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }}
- XUnitProjects: ${{ parameters.XUnitProjects }}
- XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }}
- XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }}
- XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }}
- XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }}
- IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }}
- DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }}
- DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
- WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
- HelixBaseUri: ${{ parameters.HelixBaseUri }}
- Creator: ${{ parameters.Creator }}
- SYSTEM_ACCESSTOKEN: $(System.AccessToken)
- condition: and(${{ parameters.condition }}, ne(variables['Agent.Os'], 'Windows_NT'))
- continueOnError: ${{ parameters.continueOnError }}
+- template: /eng/common/core-templates/steps/send-to-helix.yml
+ parameters:
+ is1ESPipeline: false
+
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates/steps/source-build.yml b/eng/common/templates/steps/source-build.yml
index 41bbb915..23c1d6f4 100644
--- a/eng/common/templates/steps/source-build.yml
+++ b/eng/common/templates/steps/source-build.yml
@@ -1,129 +1,7 @@
-parameters:
- # This template adds arcade-powered source-build to CI.
-
- # This is a 'steps' template, and is intended for advanced scenarios where the existing build
- # infra has a careful build methodology that must be followed. For example, a repo
- # (dotnet/runtime) might choose to clone the GitHub repo only once and store it as a pipeline
- # artifact for all subsequent jobs to use, to reduce dependence on a strong network connection to
- # GitHub. Using this steps template leaves room for that infra to be included.
-
- # Defines the platform on which to run the steps. See 'eng/common/templates/job/source-build.yml'
- # for details. The entire object is described in the 'job' template for simplicity, even though
- # the usage of the properties on this object is split between the 'job' and 'steps' templates.
- platform: {}
-
steps:
-# Build. Keep it self-contained for simple reusability. (No source-build-specific job variables.)
-- script: |
- set -x
- df -h
-
- # If building on the internal project, the artifact feeds variable may be available (usually only if needed)
- # In that case, call the feed setup script to add internal feeds corresponding to public ones.
- # In addition, add an msbuild argument to copy the WIP from the repo to the target build location.
- # This is because SetupNuGetSources.sh will alter the current NuGet.config file, and we need to preserve those
- # changes.
- internalRestoreArgs=
- if [ '$(dn-bot-dnceng-artifact-feeds-rw)' != '$''(dn-bot-dnceng-artifact-feeds-rw)' ]; then
- # Temporarily work around https://github.com/dotnet/arcade/issues/7709
- chmod +x $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh
- $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh $(Build.SourcesDirectory)/NuGet.config $(dn-bot-dnceng-artifact-feeds-rw)
- internalRestoreArgs='/p:CopyWipIntoInnerSourceBuildRepo=true'
-
- # The 'Copy WIP' feature of source build uses git stash to apply changes from the original repo.
- # This only works if there is a username/email configured, which won't be the case in most CI runs.
- git config --get user.email
- if [ $? -ne 0 ]; then
- git config user.email dn-bot@microsoft.com
- git config user.name dn-bot
- fi
- fi
-
- # If building on the internal project, the internal storage variable may be available (usually only if needed)
- # In that case, add variables to allow the download of internal runtimes if the specified versions are not found
- # in the default public locations.
- internalRuntimeDownloadArgs=
- if [ '$(dotnetbuilds-internal-container-read-token-base64)' != '$''(dotnetbuilds-internal-container-read-token-base64)' ]; then
- internalRuntimeDownloadArgs='/p:DotNetRuntimeSourceFeed=https://dotnetbuilds.blob.core.windows.net/internal /p:DotNetRuntimeSourceFeedKey=$(dotnetbuilds-internal-container-read-token-base64) --runtimesourcefeed https://dotnetbuilds.blob.core.windows.net/internal --runtimesourcefeedkey $(dotnetbuilds-internal-container-read-token-base64)'
- fi
-
- buildConfig=Release
- # Check if AzDO substitutes in a build config from a variable, and use it if so.
- if [ '$(_BuildConfig)' != '$''(_BuildConfig)' ]; then
- buildConfig='$(_BuildConfig)'
- fi
-
- officialBuildArgs=
- if [ '${{ and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}' = 'True' ]; then
- officialBuildArgs='/p:DotNetPublishUsingPipelines=true /p:OfficialBuildId=$(BUILD.BUILDNUMBER)'
- fi
-
- targetRidArgs=
- if [ '${{ parameters.platform.targetRID }}' != '' ]; then
- targetRidArgs='/p:TargetRid=${{ parameters.platform.targetRID }}'
- fi
-
- runtimeOsArgs=
- if [ '${{ parameters.platform.runtimeOS }}' != '' ]; then
- runtimeOsArgs='/p:RuntimeOS=${{ parameters.platform.runtimeOS }}'
- fi
-
- baseOsArgs=
- if [ '${{ parameters.platform.baseOS }}' != '' ]; then
- baseOsArgs='/p:BaseOS=${{ parameters.platform.baseOS }}'
- fi
-
- publishArgs=
- if [ '${{ parameters.platform.skipPublishValidation }}' != 'true' ]; then
- publishArgs='--publish'
- fi
-
- assetManifestFileName=SourceBuild_RidSpecific.xml
- if [ '${{ parameters.platform.name }}' != '' ]; then
- assetManifestFileName=SourceBuild_${{ parameters.platform.name }}.xml
- fi
-
- ${{ coalesce(parameters.platform.buildScript, './build.sh') }} --ci \
- --configuration $buildConfig \
- --restore --build --pack $publishArgs -bl \
- $officialBuildArgs \
- $internalRuntimeDownloadArgs \
- $internalRestoreArgs \
- $targetRidArgs \
- $runtimeOsArgs \
- $baseOsArgs \
- /p:SourceBuildNonPortable=${{ parameters.platform.nonPortable }} \
- /p:ArcadeBuildFromSource=true \
- /p:AssetManifestFileName=$assetManifestFileName
- displayName: Build
-
-# Upload build logs for diagnosis.
-- task: CopyFiles@2
- displayName: Prepare BuildLogs staging directory
- inputs:
- SourceFolder: '$(Build.SourcesDirectory)'
- Contents: |
- **/*.log
- **/*.binlog
- artifacts/source-build/self/prebuilt-report/**
- TargetFolder: '$(Build.StagingDirectory)/BuildLogs'
- CleanTargetFolder: true
- continueOnError: true
- condition: succeededOrFailed()
-
-- task: PublishPipelineArtifact@1
- displayName: Publish BuildLogs
- inputs:
- targetPath: '$(Build.StagingDirectory)/BuildLogs'
- artifactName: BuildLogs_SourceBuild_${{ parameters.platform.name }}_Attempt$(System.JobAttempt)
- continueOnError: true
- condition: succeededOrFailed()
+- template: /eng/common/core-templates/steps/source-build.yml
+ parameters:
+ is1ESPipeline: false
-# Manually inject component detection so that we can ignore the source build upstream cache, which contains
-# a nupkg cache of input packages (a local feed).
-# This path must match the upstream cache path in property 'CurrentRepoSourceBuiltNupkgCacheDir'
-# in src\Microsoft.DotNet.Arcade.Sdk\tools\SourceBuild\SourceBuildArcade.targets
-- task: ComponentGovernanceComponentDetection@0
- displayName: Component Detection (Exclude upstream cache)
- inputs:
- ignoreDirectories: '$(Build.SourcesDirectory)/artifacts/source-build/self/src/artifacts/obj/source-built-upstream-cache'
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates/steps/source-index-stage1-publish.yml b/eng/common/templates/steps/source-index-stage1-publish.yml
new file mode 100644
index 00000000..182cec33
--- /dev/null
+++ b/eng/common/templates/steps/source-index-stage1-publish.yml
@@ -0,0 +1,7 @@
+steps:
+- template: /eng/common/core-templates/steps/source-index-stage1-publish.yml
+ parameters:
+ is1ESPipeline: false
+
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates/steps/telemetry-end.yml b/eng/common/templates/steps/telemetry-end.yml
deleted file mode 100644
index fadc04ca..00000000
--- a/eng/common/templates/steps/telemetry-end.yml
+++ /dev/null
@@ -1,102 +0,0 @@
-parameters:
- maxRetries: 5
- retryDelay: 10 # in seconds
-
-steps:
-- bash: |
- if [ "$AGENT_JOBSTATUS" = "Succeeded" ] || [ "$AGENT_JOBSTATUS" = "PartiallySucceeded" ]; then
- errorCount=0
- else
- errorCount=1
- fi
- warningCount=0
-
- curlStatus=1
- retryCount=0
- # retry loop to harden against spotty telemetry connections
- # we don't retry successes and 4xx client errors
- until [[ $curlStatus -eq 0 || ( $curlStatus -ge 400 && $curlStatus -le 499 ) || $retryCount -ge $MaxRetries ]]
- do
- if [ $retryCount -gt 0 ]; then
- echo "Failed to send telemetry to Helix; waiting $RetryDelay seconds before retrying..."
- sleep $RetryDelay
- fi
-
- # create a temporary file for curl output
- res=`mktemp`
-
- curlResult=`
- curl --verbose --output $res --write-out "%{http_code}"\
- -H 'Content-Type: application/json' \
- -H "X-Helix-Job-Token: $Helix_JobToken" \
- -H 'Content-Length: 0' \
- -X POST -G "https://helix.dot.net/api/2018-03-14/telemetry/job/build/$Helix_WorkItemId/finish" \
- --data-urlencode "errorCount=$errorCount" \
- --data-urlencode "warningCount=$warningCount"`
- curlStatus=$?
-
- if [ $curlStatus -eq 0 ]; then
- if [ $curlResult -gt 299 ] || [ $curlResult -lt 200 ]; then
- curlStatus=$curlResult
- fi
- fi
-
- let retryCount++
- done
-
- if [ $curlStatus -ne 0 ]; then
- echo "Failed to Send Build Finish information after $retryCount retries"
- vstsLogOutput="vso[task.logissue type=error;sourcepath=templates/steps/telemetry-end.yml;code=1;]Failed to Send Build Finish information: $curlStatus"
- echo "##$vstsLogOutput"
- exit 1
- fi
- displayName: Send Unix Build End Telemetry
- env:
- # defined via VSTS variables in start-job.sh
- Helix_JobToken: $(Helix_JobToken)
- Helix_WorkItemId: $(Helix_WorkItemId)
- MaxRetries: ${{ parameters.maxRetries }}
- RetryDelay: ${{ parameters.retryDelay }}
- condition: and(always(), ne(variables['Agent.Os'], 'Windows_NT'))
-- powershell: |
- if (($env:Agent_JobStatus -eq 'Succeeded') -or ($env:Agent_JobStatus -eq 'PartiallySucceeded')) {
- $ErrorCount = 0
- } else {
- $ErrorCount = 1
- }
- $WarningCount = 0
-
- # Basic retry loop to harden against server flakiness
- $retryCount = 0
- while ($retryCount -lt $env:MaxRetries) {
- try {
- Invoke-RestMethod -Uri "https://helix.dot.net/api/2018-03-14/telemetry/job/build/$env:Helix_WorkItemId/finish?errorCount=$ErrorCount&warningCount=$WarningCount" -Method Post -ContentType "application/json" -Body "" `
- -Headers @{ 'X-Helix-Job-Token'=$env:Helix_JobToken }
- break
- }
- catch {
- $statusCode = $_.Exception.Response.StatusCode.value__
- if ($statusCode -ge 400 -and $statusCode -le 499) {
- Write-Host "##vso[task.logissue]error Failed to send telemetry to Helix (status code $statusCode); not retrying (4xx client error)"
- Write-Host "##vso[task.logissue]error ", $_.Exception.GetType().FullName, $_.Exception.Message
- exit 1
- }
- Write-Host "Failed to send telemetry to Helix (status code $statusCode); waiting $env:RetryDelay seconds before retrying..."
- $retryCount++
- sleep $env:RetryDelay
- continue
- }
- }
-
- if ($retryCount -ge $env:MaxRetries) {
- Write-Host "##vso[task.logissue]error Failed to send telemetry to Helix after $retryCount retries."
- exit 1
- }
- displayName: Send Windows Build End Telemetry
- env:
- # defined via VSTS variables in start-job.ps1
- Helix_JobToken: $(Helix_JobToken)
- Helix_WorkItemId: $(Helix_WorkItemId)
- MaxRetries: ${{ parameters.maxRetries }}
- RetryDelay: ${{ parameters.retryDelay }}
- condition: and(always(),eq(variables['Agent.Os'], 'Windows_NT'))
diff --git a/eng/common/templates/steps/telemetry-start.yml b/eng/common/templates/steps/telemetry-start.yml
deleted file mode 100644
index 6abbcb33..00000000
--- a/eng/common/templates/steps/telemetry-start.yml
+++ /dev/null
@@ -1,241 +0,0 @@
-parameters:
- helixSource: 'undefined_defaulted_in_telemetry.yml'
- helixType: 'undefined_defaulted_in_telemetry.yml'
- buildConfig: ''
- runAsPublic: false
- maxRetries: 5
- retryDelay: 10 # in seconds
-
-steps:
-- ${{ if and(eq(parameters.runAsPublic, 'false'), not(eq(variables['System.TeamProject'], 'public'))) }}:
- - task: AzureKeyVault@2
- inputs:
- azureSubscription: 'HelixProd_KeyVault'
- KeyVaultName: HelixProdKV
- SecretsFilter: 'HelixApiAccessToken'
- condition: always()
-- bash: |
- # create a temporary file
- jobInfo=`mktemp`
-
- # write job info content to temporary file
- cat > $jobInfo < to synchronize
+ type: string
+ default: $(Build.SourceVersion)
+
+- name: vmrPath
+ displayName: Path where the dotnet/dotnet is checked out to
+ type: string
+ default: $(Agent.BuildDirectory)/vmr
+
+- name: additionalSyncs
+ displayName: Optional list of package names whose repo's source will also be synchronized in the local VMR, e.g. NuGet.Protocol
+ type: object
+ default: []
+
+steps:
+- checkout: vmr
+ displayName: Clone dotnet/dotnet
+ path: vmr
+ clean: true
+
+- checkout: self
+ displayName: Clone $(Build.Repository.Name)
+ path: repo
+ fetchDepth: 0
+
+# This step is needed so that when we get a detached HEAD / shallow clone,
+# we still pull the commit into the temporary repo clone to use it during the sync.
+# Also unshallow the clone so that forwardflow command would work.
+- script: |
+ git branch repo-head
+ git rev-parse HEAD
+ displayName: Label PR commit
+ workingDirectory: $(Agent.BuildDirectory)/repo
+
+- script: |
+ git config --global user.name "dotnet-maestro[bot]"
+ git config --global user.email "dotnet-maestro[bot]@users.noreply.github.com"
+ displayName: Set git author to dotnet-maestro[bot]
+ workingDirectory: ${{ parameters.vmrPath }}
+
+- script: |
+ ./eng/common/vmr-sync.sh \
+ --vmr ${{ parameters.vmrPath }} \
+ --tmp $(Agent.TempDirectory) \
+ --azdev-pat '$(dn-bot-all-orgs-code-r)' \
+ --ci \
+ --debug
+
+ if [ "$?" -ne 0 ]; then
+ echo "##vso[task.logissue type=error]Failed to synchronize the VMR"
+ exit 1
+ fi
+ displayName: Sync repo into VMR (Unix)
+ condition: ne(variables['Agent.OS'], 'Windows_NT')
+ workingDirectory: $(Agent.BuildDirectory)/repo
+
+- script: |
+ git config --global diff.astextplain.textconv echo
+ git config --system core.longpaths true
+ displayName: Configure Windows git (longpaths, astextplain)
+ condition: eq(variables['Agent.OS'], 'Windows_NT')
+
+- powershell: |
+ ./eng/common/vmr-sync.ps1 `
+ -vmr ${{ parameters.vmrPath }} `
+ -tmp $(Agent.TempDirectory) `
+ -azdevPat '$(dn-bot-all-orgs-code-r)' `
+ -ci `
+ -debugOutput
+
+ if ($LASTEXITCODE -ne 0) {
+ echo "##vso[task.logissue type=error]Failed to synchronize the VMR"
+ exit 1
+ }
+ displayName: Sync repo into VMR (Windows)
+ condition: eq(variables['Agent.OS'], 'Windows_NT')
+ workingDirectory: $(Agent.BuildDirectory)/repo
+
+- ${{ if eq(variables['Build.Reason'], 'PullRequest') }}:
+ - task: CopyFiles@2
+ displayName: Collect failed patches
+ condition: failed()
+ inputs:
+ SourceFolder: '$(Agent.TempDirectory)'
+ Contents: '*.patch'
+ TargetFolder: '$(Build.ArtifactStagingDirectory)/FailedPatches'
+
+ - publish: '$(Build.ArtifactStagingDirectory)/FailedPatches'
+ artifact: $(System.JobDisplayName)_FailedPatches
+ displayName: Upload failed patches
+ condition: failed()
+
+- ${{ each assetName in parameters.additionalSyncs }}:
+ # The vmr-sync script ends up staging files in the local VMR so we have to commit those
+ - script:
+ git commit --allow-empty -am "Forward-flow $(Build.Repository.Name)"
+ displayName: Commit local VMR changes
+ workingDirectory: ${{ parameters.vmrPath }}
+
+ - script: |
+ set -ex
+
+ echo "Searching for details of asset ${{ assetName }}..."
+
+ # Use darc to get dependencies information
+ dependencies=$(./.dotnet/dotnet darc get-dependencies --name '${{ assetName }}' --ci)
+
+ # Extract repository URL and commit hash
+ repository=$(echo "$dependencies" | grep 'Repo:' | sed 's/Repo:[[:space:]]*//' | head -1)
+
+ if [ -z "$repository" ]; then
+ echo "##vso[task.logissue type=error]Asset ${{ assetName }} not found in the dependency list"
+ exit 1
+ fi
+
+ commit=$(echo "$dependencies" | grep 'Commit:' | sed 's/Commit:[[:space:]]*//' | head -1)
+
+ echo "Updating the VMR from $repository / $commit..."
+ cd ..
+ git clone $repository ${{ assetName }}
+ cd ${{ assetName }}
+ git checkout $commit
+ git branch "sync/$commit"
+
+ ./eng/common/vmr-sync.sh \
+ --vmr ${{ parameters.vmrPath }} \
+ --tmp $(Agent.TempDirectory) \
+ --azdev-pat '$(dn-bot-all-orgs-code-r)' \
+ --ci \
+ --debug
+
+ if [ "$?" -ne 0 ]; then
+ echo "##vso[task.logissue type=error]Failed to synchronize the VMR"
+ exit 1
+ fi
+ displayName: Sync ${{ assetName }} into (Unix)
+ condition: ne(variables['Agent.OS'], 'Windows_NT')
+ workingDirectory: $(Agent.BuildDirectory)/repo
+
+ - powershell: |
+ $ErrorActionPreference = 'Stop'
+
+ Write-Host "Searching for details of asset ${{ assetName }}..."
+
+ $dependencies = .\.dotnet\dotnet darc get-dependencies --name '${{ assetName }}' --ci
+
+ $repository = $dependencies | Select-String -Pattern 'Repo:\s+([^\s]+)' | Select-Object -First 1
+ $repository -match 'Repo:\s+([^\s]+)' | Out-Null
+ $repository = $matches[1]
+
+ if ($repository -eq $null) {
+ Write-Error "Asset ${{ assetName }} not found in the dependency list"
+ exit 1
+ }
+
+ $commit = $dependencies | Select-String -Pattern 'Commit:\s+([^\s]+)' | Select-Object -First 1
+ $commit -match 'Commit:\s+([^\s]+)' | Out-Null
+ $commit = $matches[1]
+
+ Write-Host "Updating the VMR from $repository / $commit..."
+ cd ..
+ git clone $repository ${{ assetName }}
+ cd ${{ assetName }}
+ git checkout $commit
+ git branch "sync/$commit"
+
+ .\eng\common\vmr-sync.ps1 `
+ -vmr ${{ parameters.vmrPath }} `
+ -tmp $(Agent.TempDirectory) `
+ -azdevPat '$(dn-bot-all-orgs-code-r)' `
+ -ci `
+ -debugOutput
+
+ if ($LASTEXITCODE -ne 0) {
+ echo "##vso[task.logissue type=error]Failed to synchronize the VMR"
+ exit 1
+ }
+ displayName: Sync ${{ assetName }} into (Windows)
+ condition: ne(variables['Agent.OS'], 'Windows_NT')
+ workingDirectory: $(Agent.BuildDirectory)/repo
diff --git a/eng/common/templates/variables/pool-providers.yml b/eng/common/templates/variables/pool-providers.yml
index d236f9fd..587770f0 100644
--- a/eng/common/templates/variables/pool-providers.yml
+++ b/eng/common/templates/variables/pool-providers.yml
@@ -23,35 +23,37 @@
#
# pool:
# name: $(DncEngInternalBuildPool)
-# demands: ImageOverride -equals windows.vs2019.amd64
-
+# demands: ImageOverride -equals windows.vs2026.amd64
variables:
- # Coalesce the target and source branches so we know when a PR targets a release branch
- # If these variables are somehow missing, fall back to main (tends to have more capacity)
+ - ${{ if eq(variables['System.TeamProject'], 'internal') }}:
+ - template: /eng/common/templates-official/variables/pool-providers.yml
+ - ${{ else }}:
+ # Coalesce the target and source branches so we know when a PR targets a release branch
+ # If these variables are somehow missing, fall back to main (tends to have more capacity)
- # Any new -Svc alternative pools should have variables added here to allow for splitting work
- - name: DncEngPublicBuildPool
- value: $[
- replace(
+ # Any new -Svc alternative pools should have variables added here to allow for splitting work
+ - name: DncEngPublicBuildPool
+ value: $[
replace(
- eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'),
- True,
- 'NetCore-Svc-Public'
- ),
- False,
- 'NetCore-Public'
- )
- ]
+ replace(
+ eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'),
+ True,
+ 'NetCore-Svc-Public'
+ ),
+ False,
+ 'NetCore-Public'
+ )
+ ]
- - name: DncEngInternalBuildPool
- value: $[
- replace(
+ - name: DncEngInternalBuildPool
+ value: $[
replace(
- eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'),
- True,
- 'NetCore1ESPool-Svc-Internal'
- ),
- False,
- 'NetCore1ESPool-Internal'
- )
- ]
+ replace(
+ eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'),
+ True,
+ 'NetCore1ESPool-Svc-Internal'
+ ),
+ False,
+ 'NetCore1ESPool-Internal'
+ )
+ ]
diff --git a/eng/common/templates/variables/sdl-variables.yml b/eng/common/templates/variables/sdl-variables.yml
deleted file mode 100644
index dbdd66d4..00000000
--- a/eng/common/templates/variables/sdl-variables.yml
+++ /dev/null
@@ -1,7 +0,0 @@
-variables:
-# The Guardian version specified in 'eng/common/sdl/packages.config'. This value must be kept in
-# sync with the packages.config file.
-- name: DefaultGuardianVersion
- value: 0.109.0
-- name: GuardianPackagesConfigFile
- value: $(Build.SourcesDirectory)\eng\common\sdl\packages.config
\ No newline at end of file
diff --git a/eng/common/templates/vmr-build-pr.yml b/eng/common/templates/vmr-build-pr.yml
new file mode 100644
index 00000000..2f3694fa
--- /dev/null
+++ b/eng/common/templates/vmr-build-pr.yml
@@ -0,0 +1,43 @@
+# This pipeline is used for running the VMR verification of the PR changes in repo-level PRs.
+#
+# It will run a full set of verification jobs defined in:
+# https://github.com/dotnet/dotnet/blob/10060d128e3f470e77265f8490f5e4f72dae738e/eng/pipelines/templates/stages/vmr-build.yml#L27-L38
+#
+# For repos that do not need to run the full set, you would do the following:
+#
+# 1. Copy this YML file to a repo-specific location, i.e. outside of eng/common.
+#
+# 2. Add `verifications` parameter to VMR template reference
+#
+# Examples:
+# - For source-build stage 1 verification, add the following:
+# verifications: [ "source-build-stage1" ]
+#
+# - For Windows only verifications, add the following:
+# verifications: [ "unified-build-windows-x64", "unified-build-windows-x86" ]
+
+trigger: none
+pr: none
+
+variables:
+- template: /eng/common/templates/variables/pool-providers.yml@self
+
+- name: skipComponentGovernanceDetection # we run CG on internal builds only
+ value: true
+
+- name: Codeql.Enabled # we run CodeQL on internal builds only
+ value: false
+
+resources:
+ repositories:
+ - repository: vmr
+ type: github
+ name: dotnet/dotnet
+ endpoint: dotnet
+ ref: refs/heads/main # Set to whatever VMR branch the PR build should insert into
+
+stages:
+- template: /eng/pipelines/templates/stages/vmr-build.yml@vmr
+ parameters:
+ isBuiltFromVmr: false
+ scope: lite
diff --git a/eng/common/tools.ps1 b/eng/common/tools.ps1
index eb188cfd..c96f5018 100644
--- a/eng/common/tools.ps1
+++ b/eng/common/tools.ps1
@@ -34,6 +34,9 @@
# Configures warning treatment in msbuild.
[bool]$warnAsError = if (Test-Path variable:warnAsError) { $warnAsError } else { $true }
+# Specifies semi-colon delimited list of warning codes that should not be treated as errors.
+[string]$warnNotAsError = if (Test-Path variable:warnNotAsError) { $warnNotAsError } else { '' }
+
# Specifies which msbuild engine to use for build: 'vs', 'dotnet' or unspecified (determined based on presence of tools.vs in global.json).
[string]$msbuildEngine = if (Test-Path variable:msbuildEngine) { $msbuildEngine } else { $null }
@@ -42,7 +45,7 @@
[bool]$useInstalledDotNetCli = if (Test-Path variable:useInstalledDotNetCli) { $useInstalledDotNetCli } else { $true }
# Enable repos to use a particular version of the on-line dotnet-install scripts.
-# default URL: https://dotnet.microsoft.com/download/dotnet/scripts/v1/dotnet-install.ps1
+# default URL: https://builds.dotnet.microsoft.com/dotnet/scripts/v1/dotnet-install.ps1
[string]$dotnetInstallScriptVersion = if (Test-Path variable:dotnetInstallScriptVersion) { $dotnetInstallScriptVersion } else { 'v1' }
# True to use global NuGet cache instead of restoring packages to repository-local directory.
@@ -65,6 +68,9 @@ $ErrorActionPreference = 'Stop'
# Base-64 encoded SAS token that has permission to storage container described by $runtimeSourceFeed
[string]$runtimeSourceFeedKey = if (Test-Path variable:runtimeSourceFeedKey) { $runtimeSourceFeedKey } else { $null }
+# True when the build is running within the VMR.
+[bool]$fromVMR = if (Test-Path variable:fromVMR) { $fromVMR } else { $false }
+
function Create-Directory ([string[]] $path) {
New-Item -Path $path -Force -ItemType 'Directory' | Out-Null
}
@@ -154,22 +160,14 @@ function InitializeDotNetCli([bool]$install, [bool]$createSdkLocationFile) {
return $global:_DotNetInstallDir
}
- # Don't resolve runtime, shared framework, or SDK from other locations to ensure build determinism
- $env:DOTNET_MULTILEVEL_LOOKUP=0
-
# Disable first run since we do not need all ASP.NET packages restored.
- $env:DOTNET_SKIP_FIRST_TIME_EXPERIENCE=1
+ $env:DOTNET_NOLOGO=1
# Disable telemetry on CI.
if ($ci) {
$env:DOTNET_CLI_TELEMETRY_OPTOUT=1
}
- # Source Build uses DotNetCoreSdkDir variable
- if ($env:DotNetCoreSdkDir -ne $null) {
- $env:DOTNET_INSTALL_DIR = $env:DotNetCoreSdkDir
- }
-
# Find the first path on %PATH% that contains the dotnet.exe
if ($useInstalledDotNetCli -and (-not $globalJsonHasRuntimes) -and ($env:DOTNET_INSTALL_DIR -eq $null)) {
$dotnetExecutable = GetExecutableFileName 'dotnet'
@@ -227,8 +225,7 @@ function InitializeDotNetCli([bool]$install, [bool]$createSdkLocationFile) {
# Make Sure that our bootstrapped dotnet cli is available in future steps of the Azure Pipelines build
Write-PipelinePrependPath -Path $dotnetRoot
- Write-PipelineSetVariable -Name 'DOTNET_MULTILEVEL_LOOKUP' -Value '0'
- Write-PipelineSetVariable -Name 'DOTNET_SKIP_FIRST_TIME_EXPERIENCE' -Value '1'
+ Write-PipelineSetVariable -Name 'DOTNET_NOLOGO' -Value '1'
return $global:_DotNetInstallDir = $dotnetRoot
}
@@ -254,20 +251,32 @@ function Retry($downloadBlock, $maxRetries = 5) {
Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Unable to download file in $maxRetries attempts."
break
}
-
}
}
function GetDotNetInstallScript([string] $dotnetRoot) {
$installScript = Join-Path $dotnetRoot 'dotnet-install.ps1'
+ $shouldDownload = $false
+
if (!(Test-Path $installScript)) {
+ $shouldDownload = $true
+ } else {
+ # Check if the script is older than 30 days
+ $fileAge = (Get-Date) - (Get-Item $installScript).LastWriteTime
+ if ($fileAge.Days -gt 30) {
+ Write-Host "Existing install script is too old, re-downloading..."
+ $shouldDownload = $true
+ }
+ }
+
+ if ($shouldDownload) {
Create-Directory $dotnetRoot
$ProgressPreference = 'SilentlyContinue' # Don't display the console progress UI - it's a huge perf hit
- $uri = "https://dotnet.microsoft.com/download/dotnet/scripts/$dotnetInstallScriptVersion/dotnet-install.ps1"
+ $uri = "https://builds.dotnet.microsoft.com/dotnet/scripts/$dotnetInstallScriptVersion/dotnet-install.ps1"
Retry({
Write-Host "GET $uri"
- Invoke-WebRequest $uri -OutFile $installScript
+ Invoke-WebRequest $uri -UseBasicParsing -OutFile $installScript
})
}
@@ -289,6 +298,8 @@ function InstallDotNet([string] $dotnetRoot,
$dotnetVersionLabel = "'sdk v$version'"
+ # For performance this check is duplicated in src/Microsoft.DotNet.Arcade.Sdk/src/InstallDotNetCore.cs
+ # if you are making changes here, consider if you need to make changes there as well.
if ($runtime -ne '' -and $runtime -ne 'sdk') {
$runtimePath = $dotnetRoot
$runtimePath = $runtimePath + "\shared"
@@ -321,7 +332,7 @@ function InstallDotNet([string] $dotnetRoot,
$variations += @($installParameters)
$dotnetBuilds = $installParameters.Clone()
- $dotnetbuilds.AzureFeed = "https://dotnetbuilds.azureedge.net/public"
+ $dotnetbuilds.AzureFeed = "https://ci.dot.net/public"
$variations += @($dotnetBuilds)
if ($runtimeSourceFeed) {
@@ -384,8 +395,8 @@ function InitializeVisualStudioMSBuild([bool]$install, [object]$vsRequirements =
# If the version of msbuild is going to be xcopied,
# use this version. Version matches a package here:
- # https://dev.azure.com/dnceng/public/_artifacts/feed/dotnet-eng/NuGet/RoslynTools.MSBuild/versions/17.8.1-2
- $defaultXCopyMSBuildVersion = '17.8.1-2'
+ # https://dev.azure.com/dnceng/public/_artifacts/feed/dotnet-eng/NuGet/Microsoft.DotNet.Arcade.MSBuild.Xcopy/versions/18.0.0
+ $defaultXCopyMSBuildVersion = '18.0.0'
if (!$vsRequirements) {
if (Get-Member -InputObject $GlobalJson.tools -Name 'vs') {
@@ -417,14 +428,13 @@ function InitializeVisualStudioMSBuild([bool]$install, [object]$vsRequirements =
# Locate Visual Studio installation or download x-copy msbuild.
$vsInfo = LocateVisualStudio $vsRequirements
- if ($vsInfo -ne $null) {
+ if ($vsInfo -ne $null -and $env:ForceUseXCopyMSBuild -eq $null) {
# Ensure vsInstallDir has a trailing slash
$vsInstallDir = Join-Path $vsInfo.installationPath "\"
$vsMajorVersion = $vsInfo.installationVersion.Split('.')[0]
InitializeVisualStudioEnvironmentVariables $vsInstallDir $vsMajorVersion
} else {
-
if (Get-Member -InputObject $GlobalJson.tools -Name 'xcopy-msbuild') {
$xcopyMSBuildVersion = $GlobalJson.tools.'xcopy-msbuild'
$vsMajorVersion = $xcopyMSBuildVersion.Split('.')[0]
@@ -450,7 +460,7 @@ function InitializeVisualStudioMSBuild([bool]$install, [object]$vsRequirements =
if ($xcopyMSBuildVersion.Trim() -ine "none") {
$vsInstallDir = InitializeXCopyMSBuild $xcopyMSBuildVersion $install
if ($vsInstallDir -eq $null) {
- throw "Could not xcopy msbuild. Please check that package 'RoslynTools.MSBuild @ $xcopyMSBuildVersion' exists on feed 'dotnet-eng'."
+ throw "Could not xcopy msbuild. Please check that package 'Microsoft.DotNet.Arcade.MSBuild.Xcopy @ $xcopyMSBuildVersion' exists on feed 'dotnet-eng'."
}
}
if ($vsInstallDir -eq $null) {
@@ -487,7 +497,7 @@ function InstallXCopyMSBuild([string]$packageVersion) {
}
function InitializeXCopyMSBuild([string]$packageVersion, [bool]$install) {
- $packageName = 'RoslynTools.MSBuild'
+ $packageName = 'Microsoft.DotNet.Arcade.MSBuild.Xcopy'
$packageDir = Join-Path $ToolsDir "msbuild\$packageVersion"
$packagePath = Join-Path $packageDir "$packageName.$packageVersion.nupkg"
@@ -501,9 +511,13 @@ function InitializeXCopyMSBuild([string]$packageVersion, [bool]$install) {
Write-Host "Downloading $packageName $packageVersion"
$ProgressPreference = 'SilentlyContinue' # Don't display the console progress UI - it's a huge perf hit
Retry({
- Invoke-WebRequest "https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/flat2/$packageName/$packageVersion/$packageName.$packageVersion.nupkg" -OutFile $packagePath
+ Invoke-WebRequest "https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/flat2/$packageName/$packageVersion/$packageName.$packageVersion.nupkg" -UseBasicParsing -OutFile $packagePath
})
+ if (!(Test-Path $packagePath)) {
+ Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "See https://dev.azure.com/dnceng/internal/_wiki/wikis/DNCEng%20Services%20Wiki/1074/Updating-Microsoft.DotNet.Arcade.MSBuild.Xcopy-WAS-RoslynTools.MSBuild-(xcopy-msbuild)-generation?anchor=troubleshooting for help troubleshooting issues with XCopy MSBuild"
+ throw
+ }
Unzip $packagePath $packageDir
}
@@ -531,7 +545,8 @@ function LocateVisualStudio([object]$vsRequirements = $null){
if (Get-Member -InputObject $GlobalJson.tools -Name 'vswhere') {
$vswhereVersion = $GlobalJson.tools.vswhere
} else {
- $vswhereVersion = '2.5.2'
+ # keep this in sync with the VSWhereVersion in DefaultVersions.props
+ $vswhereVersion = '3.1.7'
}
$vsWhereDir = Join-Path $ToolsDir "vswhere\$vswhereVersion"
@@ -539,25 +554,33 @@ function LocateVisualStudio([object]$vsRequirements = $null){
if (!(Test-Path $vsWhereExe)) {
Create-Directory $vsWhereDir
- Write-Host 'Downloading vswhere'
+ Write-Host "Downloading vswhere $vswhereVersion"
+ $ProgressPreference = 'SilentlyContinue' # Don't display the console progress UI - it's a huge perf hit
Retry({
- Invoke-WebRequest "https://netcorenativeassets.blob.core.windows.net/resource-packages/external/windows/vswhere/$vswhereVersion/vswhere.exe" -OutFile $vswhereExe
+ Invoke-WebRequest "https://netcorenativeassets.blob.core.windows.net/resource-packages/external/windows/vswhere/$vswhereVersion/vswhere.exe" -UseBasicParsing -OutFile $vswhereExe
})
}
- if (!$vsRequirements) { $vsRequirements = $GlobalJson.tools.vs }
+ if (!$vsRequirements) {
+ if (Get-Member -InputObject $GlobalJson.tools -Name 'vs' -ErrorAction SilentlyContinue) {
+ $vsRequirements = $GlobalJson.tools.vs
+ } else {
+ $vsRequirements = $null
+ }
+ }
+
$args = @('-latest', '-format', 'json', '-requires', 'Microsoft.Component.MSBuild', '-products', '*')
if (!$excludePrereleaseVS) {
$args += '-prerelease'
}
- if (Get-Member -InputObject $vsRequirements -Name 'version') {
+ if ($vsRequirements -and (Get-Member -InputObject $vsRequirements -Name 'version' -ErrorAction SilentlyContinue)) {
$args += '-version'
$args += $vsRequirements.version
}
- if (Get-Member -InputObject $vsRequirements -Name 'components') {
+ if ($vsRequirements -and (Get-Member -InputObject $vsRequirements -Name 'components' -ErrorAction SilentlyContinue)) {
foreach ($component in $vsRequirements.components) {
$args += '-requires'
$args += $component
@@ -570,6 +593,11 @@ function LocateVisualStudio([object]$vsRequirements = $null){
return $null
}
+ if ($null -eq $vsInfo -or $vsInfo.Count -eq 0) {
+ throw "No instance of Visual Studio meeting the requirements specified was found. Requirements: $($args -join ' ')"
+ return $null
+ }
+
# use first matching instance
return $vsInfo[0]
}
@@ -602,14 +630,7 @@ function InitializeBuildTool() {
}
$dotnetPath = Join-Path $dotnetRoot (GetExecutableFileName 'dotnet')
- # Use override if it exists - commonly set by source-build
- if ($null -eq $env:_OverrideArcadeInitializeBuildToolFramework) {
- $initializeBuildToolFramework="net8.0"
- } else {
- $initializeBuildToolFramework=$env:_OverrideArcadeInitializeBuildToolFramework
- }
-
- $buildTool = @{ Path = $dotnetPath; Command = 'msbuild'; Tool = 'dotnet'; Framework = $initializeBuildToolFramework }
+ $buildTool = @{ Path = $dotnetPath; Command = 'msbuild'; Tool = 'dotnet'; Framework = 'net' }
} elseif ($msbuildEngine -eq "vs") {
try {
$msbuildPath = InitializeVisualStudioMSBuild -install:$restore
@@ -618,7 +639,7 @@ function InitializeBuildTool() {
ExitWithExitCode 1
}
- $buildTool = @{ Path = $msbuildPath; Command = ""; Tool = "vs"; Framework = "net472"; ExcludePrereleaseVS = $excludePrereleaseVS }
+ $buildTool = @{ Path = $msbuildPath; Command = ""; Tool = "vs"; Framework = "netframework"; ExcludePrereleaseVS = $excludePrereleaseVS }
} else {
Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Unexpected value of -msbuildEngine: '$msbuildEngine'."
ExitWithExitCode 1
@@ -651,7 +672,6 @@ function GetNuGetPackageCachePath() {
$env:NUGET_PACKAGES = Join-Path $env:UserProfile '.nuget\packages\'
} else {
$env:NUGET_PACKAGES = Join-Path $RepoRoot '.packages\'
- $env:RESTORENOCACHE = $true
}
}
@@ -684,8 +704,14 @@ function Read-ArcadeSdkVersion() {
}
function InitializeToolset() {
- if (Test-Path variable:global:_ToolsetBuildProj) {
- return $global:_ToolsetBuildProj
+ # For Unified Build/Source-build support, check whether the environment variable is
+ # set. If it is, then use this as the toolset build project.
+ if ($env:_InitializeToolset -ne $null) {
+ return $global:_InitializeToolset = $env:_InitializeToolset
+ }
+
+ if (Test-Path variable:global:_InitializeToolset) {
+ return $global:_InitializeToolset
}
$nugetCache = GetNuGetPackageCachePath
@@ -696,7 +722,7 @@ function InitializeToolset() {
if (Test-Path $toolsetLocationFile) {
$path = Get-Content $toolsetLocationFile -TotalCount 1
if (Test-Path $path) {
- return $global:_ToolsetBuildProj = $path
+ return $global:_InitializeToolset = $path
}
}
@@ -719,7 +745,7 @@ function InitializeToolset() {
throw "Invalid toolset path: $path"
}
- return $global:_ToolsetBuildProj = $path
+ return $global:_InitializeToolset = $path
}
function ExitWithExitCode([int] $exitCode) {
@@ -767,28 +793,13 @@ function MSBuild() {
$toolsetBuildProject = InitializeToolset
$basePath = Split-Path -parent $toolsetBuildProject
- $possiblePaths = @(
- # new scripts need to work with old packages, so we need to look for the old names/versions
- (Join-Path $basePath (Join-Path $buildTool.Framework 'Microsoft.DotNet.ArcadeLogging.dll')),
- (Join-Path $basePath (Join-Path $buildTool.Framework 'Microsoft.DotNet.Arcade.Sdk.dll')),
- (Join-Path $basePath (Join-Path netcoreapp2.1 'Microsoft.DotNet.ArcadeLogging.dll')),
- (Join-Path $basePath (Join-Path netcoreapp2.1 'Microsoft.DotNet.Arcade.Sdk.dll'))
- (Join-Path $basePath (Join-Path netcoreapp3.1 'Microsoft.DotNet.ArcadeLogging.dll')),
- (Join-Path $basePath (Join-Path netcoreapp3.1 'Microsoft.DotNet.Arcade.Sdk.dll'))
- (Join-Path $basePath (Join-Path net7.0 'Microsoft.DotNet.ArcadeLogging.dll')),
- (Join-Path $basePath (Join-Path net7.0 'Microsoft.DotNet.Arcade.Sdk.dll'))
- )
- $selectedPath = $null
- foreach ($path in $possiblePaths) {
- if (Test-Path $path -PathType Leaf) {
- $selectedPath = $path
- break
- }
- }
+ $selectedPath = Join-Path $basePath (Join-Path $buildTool.Framework 'Microsoft.DotNet.ArcadeLogging.dll')
+
if (-not $selectedPath) {
- Write-PipelineTelemetryError -Category 'Build' -Message 'Unable to find arcade sdk logger assembly.'
+ Write-PipelineTelemetryError -Category 'Build' -Message "Unable to find arcade sdk logger assembly: $selectedPath"
ExitWithExitCode 1
}
+
$args += "/logger:$selectedPath"
}
@@ -819,6 +830,11 @@ function MSBuild-Core() {
$cmdArgs = "$($buildTool.Command) /m /nologo /clp:Summary /v:$verbosity /nr:$nodeReuse /p:ContinuousIntegrationBuild=$ci"
+ # Add -mt flag for MSBuild multithreaded mode if enabled via environment variable
+ if ($env:MSBUILD_MT_ENABLED -eq "1") {
+ $cmdArgs += ' -mt'
+ }
+
if ($warnAsError) {
$cmdArgs += ' /warnaserror /p:TreatWarningsAsErrors=true'
}
@@ -826,6 +842,10 @@ function MSBuild-Core() {
$cmdArgs += ' /p:TreatWarningsAsErrors=false'
}
+ if ($warnNotAsError) {
+ $cmdArgs += " /warnnotaserror:$warnNotAsError /p:AdditionalWarningsNotAsErrors=$warnNotAsError"
+ }
+
foreach ($arg in $args) {
if ($null -ne $arg -and $arg.Trim() -ne "") {
if ($arg.EndsWith('\')) {
@@ -835,7 +855,8 @@ function MSBuild-Core() {
}
}
- $env:ARCADE_BUILD_TOOL_COMMAND = "$($buildTool.Path) $cmdArgs"
+ # Be sure quote the path in case there are spaces in the dotnet installation location.
+ $env:ARCADE_BUILD_TOOL_COMMAND = "`"$($buildTool.Path)`" $cmdArgs"
$exitCode = Exec-Process $buildTool.Path $cmdArgs
@@ -850,7 +871,8 @@ function MSBuild-Core() {
}
# When running on Azure Pipelines, override the returned exit code to avoid double logging.
- if ($ci -and $env:SYSTEM_TEAMPROJECT -ne $null) {
+ # Skip this when the build is a child of the VMR build.
+ if ($ci -and $env:SYSTEM_TEAMPROJECT -ne $null -and !$fromVMR) {
Write-PipelineSetResult -Result "Failed" -Message "msbuild execution failed."
# Exiting with an exit code causes the azure pipelines task to log yet another "noise" error
# The above Write-PipelineSetResult will cause the task to be marked as failure without adding yet another error
@@ -892,7 +914,7 @@ function IsWindowsPlatform() {
}
function Get-Darc($version) {
- $darcPath = "$TempDir\darc\$(New-Guid)"
+ $darcPath = "$TempDir\darc\$([guid]::NewGuid())"
if ($version -ne $null) {
& $PSScriptRoot\darc-init.ps1 -toolpath $darcPath -darcVersion $version | Out-Host
} else {
diff --git a/eng/common/tools.sh b/eng/common/tools.sh
index 3392e3a9..a6e0ed59 100644
--- a/eng/common/tools.sh
+++ b/eng/common/tools.sh
@@ -5,6 +5,9 @@
# CI mode - set to true on CI server for PR validation build or official build.
ci=${ci:-false}
+# Build mode
+source_build=${source_build:-false}
+
# Set to true to use the pipelines logger which will enable Azure logging output.
# https://github.com/Microsoft/azure-pipelines-tasks/blob/master/docs/authoring/commands.md
# This flag is meant as a temporary opt-opt for the feature while validate it across
@@ -49,16 +52,20 @@ fi
# Configures warning treatment in msbuild.
warn_as_error=${warn_as_error:-true}
+# Specifies semi-colon delimited list of warning codes that should not be treated as errors.
+warn_not_as_error=${warn_not_as_error:-''}
+
# True to attempt using .NET Core already that meets requirements specified in global.json
# installed on the machine instead of downloading one.
use_installed_dotnet_cli=${use_installed_dotnet_cli:-true}
# Enable repos to use a particular version of the on-line dotnet-install scripts.
-# default URL: https://dotnet.microsoft.com/download/dotnet/scripts/v1/dotnet-install.sh
+# default URL: https://builds.dotnet.microsoft.com/dotnet/scripts/v1/dotnet-install.sh
dotnetInstallScriptVersion=${dotnetInstallScriptVersion:-'v1'}
# True to use global NuGet cache instead of restoring packages to repository-local directory.
-if [[ "$ci" == true ]]; then
+# Keep in sync with NuGetPackageroot in Arcade SDK's RepositoryLayout.props.
+if [[ "$ci" == true || "$source_build" == true ]]; then
use_global_nuget_cache=${use_global_nuget_cache:-false}
else
use_global_nuget_cache=${use_global_nuget_cache:-true}
@@ -68,6 +75,9 @@ fi
runtime_source_feed=${runtime_source_feed:-''}
runtime_source_feed_key=${runtime_source_feed_key:-''}
+# True when the build is running within the VMR.
+from_vmr=${from_vmr:-false}
+
# Resolve any symlinks in the given path.
function ResolvePath {
local path=$1
@@ -108,11 +118,8 @@ function InitializeDotNetCli {
local install=$1
- # Don't resolve runtime, shared framework, or SDK from other locations to ensure build determinism
- export DOTNET_MULTILEVEL_LOOKUP=0
-
# Disable first run since we want to control all package sources
- export DOTNET_SKIP_FIRST_TIME_EXPERIENCE=1
+ export DOTNET_NOLOGO=1
# Disable telemetry on CI
if [[ $ci == true ]]; then
@@ -123,11 +130,6 @@ function InitializeDotNetCli {
# so it doesn't output warnings to the console.
export LTTNG_HOME="$HOME"
- # Source Build uses DotNetCoreSdkDir variable
- if [[ -n "${DotNetCoreSdkDir:-}" ]]; then
- export DOTNET_INSTALL_DIR="$DotNetCoreSdkDir"
- fi
-
# Find the first path on $PATH that contains the dotnet.exe
if [[ "$use_installed_dotnet_cli" == true && $global_json_has_runtimes == false && -z "${DOTNET_INSTALL_DIR:-}" ]]; then
local dotnet_path=`command -v dotnet`
@@ -146,7 +148,7 @@ function InitializeDotNetCli {
if [[ $global_json_has_runtimes == false && -n "${DOTNET_INSTALL_DIR:-}" && -d "$DOTNET_INSTALL_DIR/sdk/$dotnet_sdk_version" ]]; then
dotnet_root="$DOTNET_INSTALL_DIR"
else
- dotnet_root="$repo_root/.dotnet"
+ dotnet_root="${repo_root}.dotnet"
export DOTNET_INSTALL_DIR="$dotnet_root"
@@ -164,8 +166,7 @@ function InitializeDotNetCli {
# build steps from using anything other than what we've downloaded.
Write-PipelinePrependPath -path "$dotnet_root"
- Write-PipelineSetVariable -name "DOTNET_MULTILEVEL_LOOKUP" -value "0"
- Write-PipelineSetVariable -name "DOTNET_SKIP_FIRST_TIME_EXPERIENCE" -value "1"
+ Write-PipelineSetVariable -name "DOTNET_NOLOGO" -value "1"
# return value
_InitializeDotNetCli="$dotnet_root"
@@ -186,6 +187,8 @@ function InstallDotNet {
local version=$2
local runtime=$4
+ # For performance this check is duplicated in src/Microsoft.DotNet.Arcade.Sdk/src/InstallDotNetCore.cs
+ # if you are making changes here, consider if you need to make changes there as well.
local dotnetVersionLabel="'$runtime v$version'"
if [[ -n "${4:-}" ]] && [ "$4" != 'sdk' ]; then
runtimePath="$root"
@@ -234,7 +237,7 @@ function InstallDotNet {
local public_location=("${installParameters[@]}")
variations+=(public_location)
- local dotnetbuilds=("${installParameters[@]}" --azure-feed "https://dotnetbuilds.azureedge.net/public")
+ local dotnetbuilds=("${installParameters[@]}" --azure-feed "https://ci.dot.net/public")
variations+=(dotnetbuilds)
if [[ -n "${6:-}" ]]; then
@@ -297,9 +300,30 @@ function with_retries {
function GetDotNetInstallScript {
local root=$1
local install_script="$root/dotnet-install.sh"
- local install_script_url="https://dotnet.microsoft.com/download/dotnet/scripts/$dotnetInstallScriptVersion/dotnet-install.sh"
+ local install_script_url="https://builds.dotnet.microsoft.com/dotnet/scripts/$dotnetInstallScriptVersion/dotnet-install.sh"
+ local timestamp_file="$root/.dotnet-install.timestamp"
+ local should_download=false
if [[ ! -a "$install_script" ]]; then
+ should_download=true
+ elif [[ -f "$timestamp_file" ]]; then
+ # Check if the script is older than 30 days using timestamp file
+ local download_time=$(cat "$timestamp_file" 2>/dev/null || echo "0")
+ local current_time=$(date +%s)
+ local age_seconds=$((current_time - download_time))
+
+ # 30 days = 30 * 24 * 60 * 60 = 2592000 seconds
+ if [[ $age_seconds -gt 2592000 ]]; then
+ echo "Existing install script is too old, re-downloading..."
+ should_download=true
+ fi
+ else
+ # No timestamp file exists, assume script is old and re-download
+ echo "No timestamp found for existing install script, re-downloading..."
+ should_download=true
+ fi
+
+ if [[ "$should_download" == true ]]; then
mkdir -p "$root"
echo "Downloading '$install_script_url'"
@@ -310,7 +334,7 @@ function GetDotNetInstallScript {
curl "$install_script_url" -sSL --retry 10 --create-dirs -o "$install_script" || {
if command -v openssl &> /dev/null; then
echo "Curl failed; dumping some information about dotnet.microsoft.com for later investigation"
- echo | openssl s_client -showcerts -servername dotnet.microsoft.com -connect dotnet.microsoft.com:443
+ echo | openssl s_client -showcerts -servername dotnet.microsoft.com -connect dotnet.microsoft.com:443 || true
fi
echo "Will now retry the same URL with verbose logging."
with_retries curl "$install_script_url" -sSL --verbose --retry 10 --create-dirs -o "$install_script" || {
@@ -326,6 +350,9 @@ function GetDotNetInstallScript {
ExitWithExitCode $exit_code
}
fi
+
+ # Create timestamp file to track download time in seconds from epoch
+ date +%s > "$timestamp_file"
fi
# return value
_GetDotNetInstallScript="$install_script"
@@ -341,22 +368,14 @@ function InitializeBuildTool {
# return values
_InitializeBuildTool="$_InitializeDotNetCli/dotnet"
_InitializeBuildToolCommand="msbuild"
- # use override if it exists - commonly set by source-build
- if [[ "${_OverrideArcadeInitializeBuildToolFramework:-x}" == "x" ]]; then
- _InitializeBuildToolFramework="net8.0"
- else
- _InitializeBuildToolFramework="${_OverrideArcadeInitializeBuildToolFramework}"
- fi
}
-# Set RestoreNoCache as a workaround for https://github.com/NuGet/Home/issues/3116
function GetNuGetPackageCachePath {
if [[ -z ${NUGET_PACKAGES:-} ]]; then
if [[ "$use_global_nuget_cache" == true ]]; then
- export NUGET_PACKAGES="$HOME/.nuget/packages"
+ export NUGET_PACKAGES="$HOME/.nuget/packages/"
else
- export NUGET_PACKAGES="$repo_root/.packages"
- export RESTORENOCACHE=true
+ export NUGET_PACKAGES="$repo_root/.packages/"
fi
fi
@@ -440,7 +459,7 @@ function StopProcesses {
}
function MSBuild {
- local args=$@
+ local args=( "$@" )
if [[ "$pipelines_log" == true ]]; then
InitializeBuildTool
InitializeToolset
@@ -453,31 +472,17 @@ function MSBuild {
fi
local toolset_dir="${_InitializeToolset%/*}"
- # new scripts need to work with old packages, so we need to look for the old names/versions
- local selectedPath=
- local possiblePaths=()
- possiblePaths+=( "$toolset_dir/$_InitializeBuildToolFramework/Microsoft.DotNet.ArcadeLogging.dll" )
- possiblePaths+=( "$toolset_dir/$_InitializeBuildToolFramework/Microsoft.DotNet.Arcade.Sdk.dll" )
- possiblePaths+=( "$toolset_dir/netcoreapp2.1/Microsoft.DotNet.ArcadeLogging.dll" )
- possiblePaths+=( "$toolset_dir/netcoreapp2.1/Microsoft.DotNet.Arcade.Sdk.dll" )
- possiblePaths+=( "$toolset_dir/netcoreapp3.1/Microsoft.DotNet.ArcadeLogging.dll" )
- possiblePaths+=( "$toolset_dir/netcoreapp3.1/Microsoft.DotNet.Arcade.Sdk.dll" )
- possiblePaths+=( "$toolset_dir/net7.0/Microsoft.DotNet.ArcadeLogging.dll" )
- possiblePaths+=( "$toolset_dir/net7.0/Microsoft.DotNet.Arcade.Sdk.dll" )
- for path in "${possiblePaths[@]}"; do
- if [[ -f $path ]]; then
- selectedPath=$path
- break
- fi
- done
+ local selectedPath="$toolset_dir/net/Microsoft.DotNet.ArcadeLogging.dll"
+
if [[ -z "$selectedPath" ]]; then
- Write-PipelineTelemetryError -category 'Build' "Unable to find arcade sdk logger assembly."
+ Write-PipelineTelemetryError -category 'Build' "Unable to find arcade sdk logger assembly: $selectedPath"
ExitWithExitCode 1
fi
+
args+=( "-logger:$selectedPath" )
fi
- MSBuild-Core ${args[@]}
+ MSBuild-Core "${args[@]}"
}
function MSBuild-Core {
@@ -510,7 +515,8 @@ function MSBuild-Core {
echo "Build failed with exit code $exit_code. Check errors above."
# When running on Azure Pipelines, override the returned exit code to avoid double logging.
- if [[ "$ci" == "true" && -n ${SYSTEM_TEAMPROJECT:-} ]]; then
+ # Skip this when the build is a child of the VMR build.
+ if [[ "$ci" == true && -n ${SYSTEM_TEAMPROJECT:-} && "$from_vmr" != true ]]; then
Write-PipelineSetResult -result "Failed" -message "msbuild execution failed."
# Exiting with an exit code causes the azure pipelines task to log yet another "noise" error
# The above Write-PipelineSetResult will cause the task to be marked as failure without adding yet another error
@@ -521,7 +527,18 @@ function MSBuild-Core {
}
}
- RunBuildTool "$_InitializeBuildToolCommand" /m /nologo /clp:Summary /v:$verbosity /nr:$node_reuse $warnaserror_switch /p:TreatWarningsAsErrors=$warn_as_error /p:ContinuousIntegrationBuild=$ci "$@"
+ # Add -mt flag for MSBuild multithreaded mode if enabled via environment variable
+ local mt_switch=""
+ if [[ "${MSBUILD_MT_ENABLED:-}" == "1" ]]; then
+ mt_switch="-mt"
+ fi
+
+ local warnnotaserror_switch=""
+ if [[ -n "$warn_not_as_error" ]]; then
+ warnnotaserror_switch="/warnnotaserror:$warn_not_as_error /p:AdditionalWarningsNotAsErrors=$warn_not_as_error"
+ fi
+
+ RunBuildTool "$_InitializeBuildToolCommand" /m /nologo /clp:Summary /v:$verbosity /nr:$node_reuse $warnaserror_switch $mt_switch $warnnotaserror_switch /p:TreatWarningsAsErrors=$warn_as_error /p:ContinuousIntegrationBuild=$ci "$@"
}
function GetDarc {
@@ -533,6 +550,13 @@ function GetDarc {
fi
"$eng_root/common/darc-init.sh" --toolpath "$darc_path" $version
+ darc_tool="$darc_path/darc"
+}
+
+# Returns a full path to an Arcade SDK task project file.
+function GetSdkTaskProject {
+ taskName=$1
+ echo "$(dirname $_InitializeToolset)/SdkTasks/$taskName.proj"
}
ResolvePath "${BASH_SOURCE[0]}"
diff --git a/eng/common/vmr-sync.ps1 b/eng/common/vmr-sync.ps1
new file mode 100644
index 00000000..b37992d9
--- /dev/null
+++ b/eng/common/vmr-sync.ps1
@@ -0,0 +1,164 @@
+<#
+.SYNOPSIS
+
+This script is used for synchronizing the current repository into a local VMR.
+It pulls the current repository's code into the specified VMR directory for local testing or
+Source-Build validation.
+
+.DESCRIPTION
+
+The tooling used for synchronization will clone the VMR repository into a temporary folder if
+it does not already exist. These clones can be reused in future synchronizations, so it is
+recommended to dedicate a folder for this to speed up re-runs.
+
+.EXAMPLE
+ Synchronize current repository into a local VMR:
+ ./vmr-sync.ps1 -vmrDir "$HOME/repos/dotnet" -tmpDir "$HOME/repos/tmp"
+
+.PARAMETER tmpDir
+Required. Path to the temporary folder where repositories will be cloned
+
+.PARAMETER vmrBranch
+Optional. Branch of the 'dotnet/dotnet' repo to synchronize. The VMR will be checked out to this branch
+
+.PARAMETER azdevPat
+Optional. Azure DevOps PAT to use for cloning private repositories.
+
+.PARAMETER vmrDir
+Optional. Path to the dotnet/dotnet repository. When null, gets cloned to the temporary folder
+
+.PARAMETER debugOutput
+Optional. Enables debug logging in the darc vmr command.
+
+.PARAMETER ci
+Optional. Denotes that the script is running in a CI environment.
+#>
+param (
+ [Parameter(Mandatory=$true, HelpMessage="Path to the temporary folder where repositories will be cloned")]
+ [string][Alias('t', 'tmp')]$tmpDir,
+ [string][Alias('b', 'branch')]$vmrBranch,
+ [string]$remote,
+ [string]$azdevPat,
+ [string][Alias('v', 'vmr')]$vmrDir,
+ [switch]$ci,
+ [switch]$debugOutput
+)
+
+function Fail {
+ Write-Host "> $($args[0])" -ForegroundColor 'Red'
+}
+
+function Highlight {
+ Write-Host "> $($args[0])" -ForegroundColor 'Cyan'
+}
+
+$verbosity = 'verbose'
+if ($debugOutput) {
+ $verbosity = 'debug'
+}
+# Validation
+
+if (-not $tmpDir) {
+ Fail "Missing -tmpDir argument. Please specify the path to the temporary folder where the repositories will be cloned"
+ exit 1
+}
+
+# Sanitize the input
+
+if (-not $vmrDir) {
+ $vmrDir = Join-Path $tmpDir 'dotnet'
+}
+
+if (-not (Test-Path -Path $tmpDir -PathType Container)) {
+ New-Item -ItemType Directory -Path $tmpDir | Out-Null
+}
+
+# Prepare the VMR
+
+if (-not (Test-Path -Path $vmrDir -PathType Container)) {
+ Highlight "Cloning 'dotnet/dotnet' into $vmrDir.."
+ git clone https://github.com/dotnet/dotnet $vmrDir
+
+ if ($vmrBranch) {
+ git -C $vmrDir switch -c $vmrBranch
+ }
+}
+else {
+ if ((git -C $vmrDir diff --quiet) -eq $false) {
+ Fail "There are changes in the working tree of $vmrDir. Please commit or stash your changes"
+ exit 1
+ }
+
+ if ($vmrBranch) {
+ Highlight "Preparing $vmrDir"
+ git -C $vmrDir checkout $vmrBranch
+ git -C $vmrDir pull
+ }
+}
+
+Set-StrictMode -Version Latest
+
+# Prepare darc
+
+Highlight 'Installing .NET, preparing the tooling..'
+. .\eng\common\tools.ps1
+$dotnetRoot = InitializeDotNetCli -install:$true
+$env:DOTNET_ROOT = $dotnetRoot
+$darc = Get-Darc
+
+Highlight "Starting the synchronization of VMR.."
+
+# Synchronize the VMR
+$versionDetailsPath = Resolve-Path (Join-Path $PSScriptRoot '..\Version.Details.xml') | Select-Object -ExpandProperty Path
+[xml]$versionDetails = Get-Content -Path $versionDetailsPath
+$repoName = $versionDetails.SelectSingleNode('//Source').Mapping
+if (-not $repoName) {
+ Fail "Failed to resolve repo mapping from $versionDetailsPath"
+ exit 1
+}
+
+$darcArgs = (
+ "vmr", "forwardflow",
+ "--tmp", $tmpDir,
+ "--$verbosity",
+ $vmrDir
+)
+
+if ($ci) {
+ $darcArgs += ("--ci")
+}
+
+if ($azdevPat) {
+ $darcArgs += ("--azdev-pat", $azdevPat)
+}
+
+& "$darc" $darcArgs
+
+if ($LASTEXITCODE -eq 0) {
+ Highlight "Synchronization succeeded"
+}
+else {
+ Highlight "Failed to flow code into the local VMR. Falling back to resetting the VMR to match repo contents..."
+ git -C $vmrDir reset --hard
+
+ $resetArgs = (
+ "vmr", "reset",
+ "${repoName}:HEAD",
+ "--vmr", $vmrDir,
+ "--tmp", $tmpDir,
+ "--additional-remotes", "${repoName}:${repoRoot}"
+ )
+
+ & "$darc" $resetArgs
+
+ if ($LASTEXITCODE -eq 0) {
+ Highlight "Successfully reset the VMR using 'darc vmr reset'"
+ }
+ else {
+ Fail "Synchronization of repo to VMR failed!"
+ Fail "'$vmrDir' is left in its last state (re-run of this script will reset it)."
+ Fail "Please inspect the logs which contain path to the failing patch file (use -debugOutput to get all the details)."
+ Fail "Once you make changes to the conflicting VMR patch, commit it locally and re-run this script."
+ exit 1
+ }
+}
diff --git a/eng/common/vmr-sync.sh b/eng/common/vmr-sync.sh
new file mode 100644
index 00000000..198caec5
--- /dev/null
+++ b/eng/common/vmr-sync.sh
@@ -0,0 +1,227 @@
+#!/bin/bash
+
+### This script is used for synchronizing the current repository into a local VMR.
+### It pulls the current repository's code into the specified VMR directory for local testing or
+### Source-Build validation.
+###
+### The tooling used for synchronization will clone the VMR repository into a temporary folder if
+### it does not already exist. These clones can be reused in future synchronizations, so it is
+### recommended to dedicate a folder for this to speed up re-runs.
+###
+### USAGE:
+### Synchronize current repository into a local VMR:
+### ./vmr-sync.sh --tmp "$HOME/repos/tmp" "$HOME/repos/dotnet"
+###
+### Options:
+### -t, --tmp, --tmp-dir PATH
+### Required. Path to the temporary folder where repositories will be cloned
+###
+### -b, --branch, --vmr-branch BRANCH_NAME
+### Optional. Branch of the 'dotnet/dotnet' repo to synchronize. The VMR will be checked out to this branch
+###
+### --debug
+### Optional. Turns on the most verbose logging for the VMR tooling
+###
+### --remote name:URI
+### Optional. Additional remote to use during the synchronization
+### This can be used to synchronize to a commit from a fork of the repository
+### Example: 'runtime:https://github.com/yourfork/runtime'
+###
+### --azdev-pat
+### Optional. Azure DevOps PAT to use for cloning private repositories.
+###
+### -v, --vmr, --vmr-dir PATH
+### Optional. Path to the dotnet/dotnet repository. When null, gets cloned to the temporary folder
+
+source="${BASH_SOURCE[0]}"
+
+# resolve $source until the file is no longer a symlink
+while [[ -h "$source" ]]; do
+ scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+ source="$(readlink "$source")"
+ # if $source was a relative symlink, we need to resolve it relative to the path where the
+ # symlink file was located
+ [[ $source != /* ]] && source="$scriptroot/$source"
+done
+scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+
+function print_help () {
+ sed -n '/^### /,/^$/p' "$source" | cut -b 5-
+}
+
+COLOR_RED=$(tput setaf 1 2>/dev/null || true)
+COLOR_CYAN=$(tput setaf 6 2>/dev/null || true)
+COLOR_CLEAR=$(tput sgr0 2>/dev/null || true)
+COLOR_RESET=uniquesearchablestring
+FAILURE_PREFIX='> '
+
+function fail () {
+ echo "${COLOR_RED}$FAILURE_PREFIX${1//${COLOR_RESET}/${COLOR_RED}}${COLOR_CLEAR}" >&2
+}
+
+function highlight () {
+ echo "${COLOR_CYAN}$FAILURE_PREFIX${1//${COLOR_RESET}/${COLOR_CYAN}}${COLOR_CLEAR}"
+}
+
+tmp_dir=''
+vmr_dir=''
+vmr_branch=''
+additional_remotes=''
+verbosity=verbose
+azdev_pat=''
+ci=false
+
+while [[ $# -gt 0 ]]; do
+ opt="$(echo "$1" | tr "[:upper:]" "[:lower:]")"
+ case "$opt" in
+ -t|--tmp|--tmp-dir)
+ tmp_dir=$2
+ shift
+ ;;
+ -v|--vmr|--vmr-dir)
+ vmr_dir=$2
+ shift
+ ;;
+ -b|--branch|--vmr-branch)
+ vmr_branch=$2
+ shift
+ ;;
+ --remote)
+ additional_remotes="$additional_remotes $2"
+ shift
+ ;;
+ --azdev-pat)
+ azdev_pat=$2
+ shift
+ ;;
+ --ci)
+ ci=true
+ ;;
+ -d|--debug)
+ verbosity=debug
+ ;;
+ -h|--help)
+ print_help
+ exit 0
+ ;;
+ *)
+ fail "Invalid argument: $1"
+ print_help
+ exit 1
+ ;;
+ esac
+
+ shift
+done
+
+# Validation
+
+if [[ -z "$tmp_dir" ]]; then
+ fail "Missing --tmp-dir argument. Please specify the path to the temporary folder where the repositories will be cloned"
+ exit 1
+fi
+
+# Sanitize the input
+
+if [[ -z "$vmr_dir" ]]; then
+ vmr_dir="$tmp_dir/dotnet"
+fi
+
+if [[ ! -d "$tmp_dir" ]]; then
+ mkdir -p "$tmp_dir"
+fi
+
+if [[ "$verbosity" == "debug" ]]; then
+ set -x
+fi
+
+# Prepare the VMR
+
+if [[ ! -d "$vmr_dir" ]]; then
+ highlight "Cloning 'dotnet/dotnet' into $vmr_dir.."
+ git clone https://github.com/dotnet/dotnet "$vmr_dir"
+
+ if [[ -n "$vmr_branch" ]]; then
+ git -C "$vmr_dir" switch -c "$vmr_branch"
+ fi
+else
+ if ! git -C "$vmr_dir" diff --quiet; then
+ fail "There are changes in the working tree of $vmr_dir. Please commit or stash your changes"
+ exit 1
+ fi
+
+ if [[ -n "$vmr_branch" ]]; then
+ highlight "Preparing $vmr_dir"
+ git -C "$vmr_dir" checkout "$vmr_branch"
+ git -C "$vmr_dir" pull
+ fi
+fi
+
+set -e
+
+# Prepare darc
+
+highlight 'Installing .NET, preparing the tooling..'
+source "./eng/common/tools.sh"
+InitializeDotNetCli true
+GetDarc
+dotnetDir=$( cd ./.dotnet/; pwd -P )
+dotnet=$dotnetDir/dotnet
+
+highlight "Starting the synchronization of VMR.."
+set +e
+
+if [[ -n "$additional_remotes" ]]; then
+ additional_remotes="--additional-remotes $additional_remotes"
+fi
+
+if [[ -n "$azdev_pat" ]]; then
+ azdev_pat="--azdev-pat $azdev_pat"
+fi
+
+ci_arg=''
+if [[ "$ci" == "true" ]]; then
+ ci_arg="--ci"
+fi
+
+# Synchronize the VMR
+
+version_details_path=$(cd "$scriptroot/.."; pwd -P)/Version.Details.xml
+repo_name=$(grep -m 1 '