[arcade] Move to arcade 8.0 for main and to use new Version system (#22400)

* Flakey test fixes (#22340)

* Update Versions.props (#22341)

* Try fix package stable version

* Update to arcade net8

* try this cleanup

* fix pack

* Update Versions.targets

* try again

* Update Versions.targets

* Update Versions.props

* Xharness needs to be net9.0

* Update Versions.props

* update

* Update pack.yml

* Update Versions.targets

* Update Versions.props

* Update pack.yml

* Update pack.yml

* Update Directory.Build.props

* Update pack.yml

* Update Directory.Build.props

* Update pack.yml

* Fix paths

* Update Versions.props

* Update Versions.props

* Move to correct place

---------

Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
Co-authored-by: Shane Neuville <shneuvil@microsoft.com>
This commit is contained in:
Rui Marinho 2024-05-15 18:32:30 +01:00 коммит произвёл GitHub
Родитель 28e9cbb6d8
Коммит af1054050d
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: B5690EEEBB952194
109 изменённых файлов: 4088 добавлений и 2900 удалений

Просмотреть файл

@ -21,7 +21,7 @@
]
},
"microsoft.dotnet.xharness.cli": {
"version": "9.0.0-prerelease.24256.1",
"version": "9.0.0-prerelease.24263.1",
"commands": [
"xharness"
]

Просмотреть файл

@ -1,5 +1,6 @@
<Project>
<Import Project="Sdk.props" Sdk="Microsoft.DotNet.Arcade.Sdk" />
<Import Condition="'$(EnvironmentBuildPropsImported)' != 'True'" Project="$(MSBuildThisFileDirectory)eng\Environment.Build.props" />
<PropertyGroup>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
@ -25,13 +26,11 @@
<PackageProjectUrl>https://github.com/dotnet/maui</PackageProjectUrl>
<PackageRequireLicenseAcceptance>True</PackageRequireLicenseAcceptance>
<Copyright>© Microsoft Corporation. All rights reserved.</Copyright>
<PackageVersion>$(DotNetVersionBand)-dev</PackageVersion>
<PackageOutputPath>$(MauiRootDirectory)artifacts</PackageOutputPath>
<LicenseFile>$(MauiRootDirectory)LICENSE.TXT</LicenseFile>
<PackageIconFullPath>$(MauiRootDirectory)Assets\icon.png</PackageIconFullPath>
<PackageThirdPartyNoticesFile>$(MauiRootDirectory)THIRD-PARTY-NOTICES.TXT</PackageThirdPartyNoticesFile>
<DefaultPackageTags>dotnet-maui;dotnet;maui;cross-platform;ios;android;macos;maccatalyst;windows;winui;tizen</DefaultPackageTags>
<ContinuousIntegrationBuild Condition=" '$(CI)' == 'true' ">true</ContinuousIntegrationBuild>
<PublishRepositoryUrl>true</PublishRepositoryUrl>
<EmbedUntrackedSources>true</EmbedUntrackedSources>
<IncludeSymbols>true</IncludeSymbols>
@ -181,7 +180,6 @@
<Win2DWarnNoPlatform>false</Win2DWarnNoPlatform>
</PropertyGroup>
<Import Condition="'$(EnvironmentBuildPropsImported)' != 'True'" Project="$(MSBuildThisFileDirectory)eng\Environment.Build.props" />
<Import Condition="'$(SampleProject)' != 'True' and '$(CI)' == 'true'" Project="$(MSBuildThisFileDirectory)eng\SourceLink.Build.props" />
<ItemGroup Condition="'$(MSBuildVersion)' != '' AND '$(MSBuildVersion)' &lt; '16.7.0' ">
<PackageReference Include="Microsoft.Net.Compilers.Toolset" Version="4.7.0" PrivateAssets="all" />

Просмотреть файл

@ -9,6 +9,10 @@
<CI Condition="'$(TF_BUILD)' == 'true'">true</CI>
</PropertyGroup>
<PropertyGroup>
<ContinuousIntegrationBuild Condition="'$(CI)' == 'true' OR '$(TF_BUILD)' == 'true'">true</ContinuousIntegrationBuild>
</PropertyGroup>
<!-- This is used by the libraries -->
<PropertyGroup Condition="'$(AndroidTargetFrameworks)' == ''">
<AndroidTargetFrameworks>MonoAndroid10.0;</AndroidTargetFrameworks>

Просмотреть файл

@ -1,26 +1,26 @@
<Dependencies>
<ProductDependencies>
<Dependency Name="Microsoft.DotNet.XHarness.TestRunners.Common" Version="9.0.0-prerelease.24256.1">
<Dependency Name="Microsoft.DotNet.XHarness.TestRunners.Common" Version="9.0.0-prerelease.24263.1">
<Uri>https://github.com/dotnet/xharness</Uri>
<Sha>62a8662088425e03f7ba44a4ec817c198e38d0f9</Sha>
<Sha>718b5a12f619fe4d4794b80bee5f3da38bcc6400</Sha>
</Dependency>
<Dependency Name="Microsoft.DotNet.XHarness.TestRunners.Xunit" Version="9.0.0-prerelease.24256.1">
<Dependency Name="Microsoft.DotNet.XHarness.TestRunners.Xunit" Version="9.0.0-prerelease.24263.1">
<Uri>https://github.com/dotnet/xharness</Uri>
<Sha>62a8662088425e03f7ba44a4ec817c198e38d0f9</Sha>
<Sha>718b5a12f619fe4d4794b80bee5f3da38bcc6400</Sha>
</Dependency>
<Dependency Name="Microsoft.DotNet.XHarness.CLI" Version="9.0.0-prerelease.24256.1">
<Dependency Name="Microsoft.DotNet.XHarness.CLI" Version="9.0.0-prerelease.24263.1">
<Uri>https://github.com/dotnet/xharness</Uri>
<Sha>62a8662088425e03f7ba44a4ec817c198e38d0f9</Sha>
<Sha>718b5a12f619fe4d4794b80bee5f3da38bcc6400</Sha>
</Dependency>
</ProductDependencies>
<ToolsetDependencies>
<Dependency Name="Microsoft.DotNet.Build.Tasks.Feed" Version="9.0.0-beta.24257.1">
<Dependency Name="Microsoft.DotNet.Build.Tasks.Feed" Version="8.0.0-beta.24225.1">
<Uri>https://github.com/dotnet/arcade</Uri>
<Sha>f663d5690bfe8e442a8c3e9fff9b6735014fbec4</Sha>
<Sha>67d23f4ba1813b315e7e33c71d18b63475f5c5f8</Sha>
</Dependency>
<Dependency Name="Microsoft.DotNet.Arcade.Sdk" Version="9.0.0-beta.24257.1">
<Dependency Name="Microsoft.DotNet.Arcade.Sdk" Version="8.0.0-beta.24225.1">
<Uri>https://github.com/dotnet/arcade</Uri>
<Sha>f663d5690bfe8e442a8c3e9fff9b6735014fbec4</Sha>
<Sha>67d23f4ba1813b315e7e33c71d18b63475f5c5f8</Sha>
<SourceBuild RepoName="arcade" ManagedOnly="true" />
</Dependency>
</ToolsetDependencies>

Просмотреть файл

@ -7,9 +7,9 @@
<PatchVersion>60</PatchVersion>
<SdkBandVersion>8.0.100</SdkBandVersion>
<PreReleaseVersionLabel>ci.net8</PreReleaseVersionLabel>
<PreReleaseVersionIteration>
</PreReleaseVersionIteration>
<!-- Enable to remove prerelease label. -->
<!-- Servicing builds have different characteristics for the way dependencies, baselines, and versions are handled. -->
<IsServicingBuild Condition=" '$(PreReleaseVersionLabel)' == 'servicing' ">true</IsServicingBuild>
<!-- Enable to remove prerelease label and produce stable package versions. -->
<StabilizePackageVersion Condition="'$(StabilizePackageVersion)' == ''">false</StabilizePackageVersion>
<DotNetFinalVersionKind Condition="'$(StabilizePackageVersion)' == 'true'">release</DotNetFinalVersionKind>
<WorkloadVersionSuffix Condition="'$(DotNetFinalVersionKind)' != 'release' and '$(PreReleaseVersionIteration)' == ''">-$(PreReleaseVersionLabel)</WorkloadVersionSuffix>
@ -104,9 +104,9 @@
<_HarfBuzzSharpVersion>7.3.0</_HarfBuzzSharpVersion>
<_SkiaSharpNativeAssetsVersion>0.0.0-commit.e2c5c86249621857107c779af0f79b4d06613766.655</_SkiaSharpNativeAssetsVersion>
<MicrosoftTemplateEngineTasksVersion>7.0.114</MicrosoftTemplateEngineTasksVersion>
<MicrosoftDotNetXHarnessTestRunnersCommonVersion>9.0.0-prerelease.24256.1</MicrosoftDotNetXHarnessTestRunnersCommonVersion>
<MicrosoftDotNetXHarnessTestRunnersXunitVersion>9.0.0-prerelease.24256.1</MicrosoftDotNetXHarnessTestRunnersXunitVersion>
<MicrosoftDotNetXHarnessCLIVersion>9.0.0-prerelease.24256.1</MicrosoftDotNetXHarnessCLIVersion>
<MicrosoftDotNetXHarnessTestRunnersCommonVersion>9.0.0-prerelease.24263.1</MicrosoftDotNetXHarnessTestRunnersCommonVersion>
<MicrosoftDotNetXHarnessTestRunnersXunitVersion>9.0.0-prerelease.24263.1</MicrosoftDotNetXHarnessTestRunnersXunitVersion>
<MicrosoftDotNetXHarnessCLIVersion>9.0.0-prerelease.24263.1</MicrosoftDotNetXHarnessCLIVersion>
<TizenUIExtensionsVersion>0.9.2</TizenUIExtensionsVersion>
<SvgSkiaPackageVersion>1.0.0.16</SvgSkiaPackageVersion>
<FizzlerPackageVersion>1.3.0</FizzlerPackageVersion>

Просмотреть файл

@ -1,12 +1,7 @@
<Project>
<PropertyGroup>
<NightlyTag>nightly</NightlyTag>
<RegexTag>[a-z]+\.[0-9]</RegexTag>
</PropertyGroup>
<PropertyGroup>
<GitBranch Condition="'$(SYSTEM_PULLREQUEST_TARGETBRANCH)' != ''">$(SYSTEM_PULLREQUEST_TARGETBRANCH)</GitBranch>
<GitBranch Condition="'$(SYSTEM_PULLREQUEST_TARGETBRANCH)' == '' and '$(BUILD_SOURCEBRANCHNAME)' != ''">$(BUILD_SOURCEBRANCHNAME)</GitBranch>
</PropertyGroup>
<PropertyGroup>
<CoreCompileDependsOn>
@ -32,13 +27,11 @@
<Target Name="SetVersions" BeforeTargets="$(SetVersionsBefore)" DependsOnTargets="$(SetVersionsDependsOn)" Returns="$(Version)">
<PropertyGroup>
<SemVerLabel>$([System.Text.RegularExpressions.Regex]::Match($(GitTag), $(RegexTag)))</SemVerLabel>
<GitSemVerLabel Condition=" '$(GitTag)' != '' and $([System.Text.RegularExpressions.Regex]::IsMatch('$(GitTag)', $(RegexTag))) ">$(SemVerLabel)</GitSemVerLabel>
<GitSemVerLabel Condition="$(CI) and '$(BUILD_REASON)' == 'Schedule'">$(NightlyTag)</GitSemVerLabel>
<GitSemVerDashLabel Condition="'$(PreReleaseVersionLabel)' != ''" >-$(PreReleaseVersionLabel)</GitSemVerDashLabel>
<PreReleaseVersionLabel Condition="$(CI) and '$(BUILD_REASON)' == 'Schedule'">$(NightlyTag)</PreReleaseVersionLabel>
</PropertyGroup>
<ItemGroup>
<VersionMetadata Condition="$(CI) and '$(BUILD_REASON)' == 'PullRequest'"
Include="pr.$(SYSTEM_PULLREQUEST_PULLREQUESTNUMBER)"/>
@ -49,21 +42,9 @@
</ItemGroup>
<PropertyGroup>
<VersionMetadataLabel>@(VersionMetadata -> '%(Identity)', '-')</VersionMetadataLabel>
<VersionMetadataPlusLabel Condition="'$(VersionMetadataLabel)' != ''">+$(VersionMetadataLabel)</VersionMetadataPlusLabel>
<Version>$(MajorVersion).$(MinorVersion).$(PatchVersion)</Version>
<PackageReferenceVersion>$(MajorVersion).$(MinorVersion).$(PatchVersion)$(GitSemVerDashLabel)</PackageReferenceVersion>
<PackageReferenceVersion Condition="$(CI) and '$(GitSemVerDashLabel)' != ''">$(MajorVersion).$(MinorVersion).$(PatchVersion)$(GitSemVerDashLabel).$(BUILDVERSION)</PackageReferenceVersion>
<PackageReferenceVersion Condition="$(CI) and '$(GitSemVerDashLabel)' == ''">$(MajorVersion).$(MinorVersion).$(PatchVersion)</PackageReferenceVersion>
<VSComponentVersion>$(MajorVersion).$(MinorVersion).$(GitBaseVersionPatch)</VSComponentVersion>
<VSComponentVersion Condition="$(CI) and '$(GitSemVerDashLabel)' != ''">$(MajorVersion).$(MinorVersion).$(PatchVersion).$(BUILDVERSION)</VSComponentVersion>
<VSComponentVersion Condition="$(CI) and '$(GitSemVerDashLabel)' == ''">$(MajorVersion).$(MinorVersion).$(PatchVersion).0</VSComponentVersion>
<PackageVersion>$(PackageReferenceVersion)$(VersionMetadataPlusLabel)</PackageVersion>
</PropertyGroup>
<PropertyGroup>
<InformationalVersion>$(PackageVersion)</InformationalVersion>
<FileVersion>$(Version).0</FileVersion>
<PackageReferenceVersion>$(PackageVersion)</PackageReferenceVersion>
<VSComponentVersion Condition="'$(PreReleaseVersionLabel)' != ''">$(MajorVersion).$(MinorVersion).$(PatchVersion).$(BUILDVERSION)</VSComponentVersion>
<VSComponentVersion Condition="'$(PreReleaseVersionLabel)' == ''">$(MajorVersion).$(MinorVersion).$(PatchVersion).0</VSComponentVersion>
</PropertyGroup>
<Message Condition="$(CI) and '$(BUILD_REASON)' == 'Schedule'" Importance="high" Text="##vso[build.addbuildtag]$(NightlyTag)"/>

Просмотреть файл

@ -9,6 +9,8 @@ var localDotnet = GetBuildVariable("workloads", "local") == "local";
var vsVersion = GetBuildVariable("VS", "");
string MSBuildExe = Argument("msbuild", EnvironmentVariable("MSBUILD_EXE", ""));
string nugetSource = Argument("nugetsource", "");
string officialBuildId = Argument("officialbuildid", "");
string testFilter = Argument("test-filter", EnvironmentVariable("TEST_FILTER"));
var arcadeBin = MakeAbsolute(new DirectoryPath("./artifacts/bin/"));
@ -274,10 +276,16 @@ Task("dotnet-pack-maui")
var sln = "./Microsoft.Maui.Packages.slnf";
if (!IsRunningOnWindows())
sln = "./Microsoft.Maui.Packages-mac.slnf";
if(string.IsNullOrEmpty(officialBuildId))
{
officialBuildId = DateTime.UtcNow.ToString("yyyyMMdd.1");
}
RunMSBuildWithDotNet(sln, target: "Pack", properties: new Dictionary<string, string>
{
{ "SymbolPackageFormat", "snupkg" }
{ "SymbolPackageFormat", "snupkg" },
{ "OfficialBuildId", officialBuildId },
});
});

Просмотреть файл

@ -1,3 +0,0 @@
@echo off
powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0build.ps1""" %*"
exit /b %ErrorLevel%

Просмотреть файл

@ -19,7 +19,6 @@ Param(
[switch] $pack,
[switch] $publish,
[switch] $clean,
[switch][Alias('pb')]$productBuild,
[switch][Alias('bl')]$binaryLog,
[switch][Alias('nobl')]$excludeCIBinarylog,
[switch] $ci,
@ -59,7 +58,6 @@ function Print-Usage() {
Write-Host " -sign Sign build outputs"
Write-Host " -publish Publish artifacts (e.g. symbols)"
Write-Host " -clean Clean the solution"
Write-Host " -productBuild Build the solution in the way it will be built in the full .NET product (VMR) build (short: -pb)"
Write-Host ""
Write-Host "Advanced settings:"
@ -122,7 +120,6 @@ function Build {
/p:Deploy=$deploy `
/p:Test=$test `
/p:Pack=$pack `
/p:DotNetBuildRepo=$productBuild `
/p:IntegrationTest=$integrationTest `
/p:PerformanceTest=$performanceTest `
/p:Sign=$sign `

Просмотреть файл

@ -22,9 +22,6 @@ usage()
echo " --sourceBuild Source-build the solution (short: -sb)"
echo " Will additionally trigger the following actions: --restore, --build, --pack"
echo " If --configuration is not set explicitly, will also set it to 'Release'"
echo " --productBuild Build the solution in the way it will be built in the full .NET product (VMR) build (short: -pb)"
echo " Will additionally trigger the following actions: --restore, --build, --pack"
echo " If --configuration is not set explicitly, will also set it to 'Release'"
echo " --rebuild Rebuild solution"
echo " --test Run all unit tests in the solution (short: -t)"
echo " --integrationTest Run all integration tests in the solution"
@ -62,7 +59,6 @@ scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
restore=false
build=false
source_build=false
product_build=false
rebuild=false
test=false
integration_test=false
@ -109,7 +105,7 @@ while [[ $# > 0 ]]; do
-binarylog|-bl)
binary_log=true
;;
-excludecibinarylog|-nobl)
-excludeCIBinarylog|-nobl)
exclude_ci_binary_log=true
;;
-pipelineslog|-pl)
@ -130,13 +126,6 @@ while [[ $# > 0 ]]; do
-sourcebuild|-sb)
build=true
source_build=true
product_build=true
restore=true
pack=true
;;
-productBuild|-pb)
build=true
product_build=true
restore=true
pack=true
;;
@ -230,9 +219,7 @@ function Build {
/p:RepoRoot="$repo_root" \
/p:Restore=$restore \
/p:Build=$build \
/p:DotNetBuildRepo=$product_build \
/p:ArcadeBuildFromSource=$source_build \
/p:DotNetBuildSourceOnly=$source_build \
/p:Rebuild=$rebuild \
/p:Test=$test \
/p:Pack=$pack \

Просмотреть файл

@ -1,266 +0,0 @@
parameters:
# Job schema parameters - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
cancelTimeoutInMinutes: ''
condition: ''
container: ''
continueOnError: false
dependsOn: ''
displayName: ''
pool: ''
steps: []
strategy: ''
timeoutInMinutes: ''
variables: []
workspace: ''
templateContext: {}
# Job base template specific parameters
# See schema documentation - https://github.com/dotnet/arcade/blob/master/Documentation/AzureDevOps/TemplateSchema.md
# publishing defaults
artifacts: ''
enableMicrobuild: false
enablePublishBuildArtifacts: false
enablePublishBuildAssets: false
enablePublishTestResults: false
enablePublishUsingPipelines: false
enableBuildRetry: false
disableComponentGovernance: ''
componentGovernanceIgnoreDirectories: ''
mergeTestResults: false
testRunTitle: ''
testResultsFormat: ''
name: ''
preSteps: []
artifactPublishSteps: []
runAsPublic: false
# Sbom related params
enableSbom: true
PackageVersion: 9.0.0
BuildDropPath: '$(Build.SourcesDirectory)/artifacts'
# 1es specific parameters
is1ESPipeline: ''
jobs:
- job: ${{ parameters.name }}
${{ if ne(parameters.cancelTimeoutInMinutes, '') }}:
cancelTimeoutInMinutes: ${{ parameters.cancelTimeoutInMinutes }}
${{ if ne(parameters.condition, '') }}:
condition: ${{ parameters.condition }}
${{ if ne(parameters.container, '') }}:
container: ${{ parameters.container }}
${{ if ne(parameters.continueOnError, '') }}:
continueOnError: ${{ parameters.continueOnError }}
${{ if ne(parameters.dependsOn, '') }}:
dependsOn: ${{ parameters.dependsOn }}
${{ if ne(parameters.displayName, '') }}:
displayName: ${{ parameters.displayName }}
${{ if ne(parameters.pool, '') }}:
pool: ${{ parameters.pool }}
${{ if ne(parameters.strategy, '') }}:
strategy: ${{ parameters.strategy }}
${{ if ne(parameters.timeoutInMinutes, '') }}:
timeoutInMinutes: ${{ parameters.timeoutInMinutes }}
${{ if ne(parameters.templateContext, '') }}:
templateContext: ${{ parameters.templateContext }}
variables:
- ${{ if ne(parameters.enableTelemetry, 'false') }}:
- name: DOTNET_CLI_TELEMETRY_PROFILE
value: '$(Build.Repository.Uri)'
- ${{ if eq(parameters.enableRichCodeNavigation, 'true') }}:
- name: EnableRichCodeNavigation
value: 'true'
# Retry signature validation up to three times, waiting 2 seconds between attempts.
# See https://learn.microsoft.com/en-us/nuget/reference/errors-and-warnings/nu3028#retry-untrusted-root-failures
- name: NUGET_EXPERIMENTAL_CHAIN_BUILD_RETRY_POLICY
value: 3,2000
- ${{ each variable in parameters.variables }}:
# handle name-value variable syntax
# example:
# - name: [key]
# value: [value]
- ${{ if ne(variable.name, '') }}:
- name: ${{ variable.name }}
value: ${{ variable.value }}
# handle variable groups
- ${{ if ne(variable.group, '') }}:
- group: ${{ variable.group }}
# handle template variable syntax
# example:
# - template: path/to/template.yml
# parameters:
# [key]: [value]
- ${{ if ne(variable.template, '') }}:
- template: ${{ variable.template }}
${{ if ne(variable.parameters, '') }}:
parameters: ${{ variable.parameters }}
# handle key-value variable syntax.
# example:
# - [key]: [value]
- ${{ if and(eq(variable.name, ''), eq(variable.group, ''), eq(variable.template, '')) }}:
- ${{ each pair in variable }}:
- name: ${{ pair.key }}
value: ${{ pair.value }}
# DotNet-HelixApi-Access provides 'HelixApiAccessToken' for internal builds
- ${{ if and(eq(parameters.enableTelemetry, 'true'), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- group: DotNet-HelixApi-Access
${{ if ne(parameters.workspace, '') }}:
workspace: ${{ parameters.workspace }}
steps:
- ${{ if eq(parameters.is1ESPipeline, '') }}:
- 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error
- ${{ if ne(parameters.preSteps, '') }}:
- ${{ each preStep in parameters.preSteps }}:
- ${{ preStep }}
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- ${{ if eq(parameters.enableMicrobuild, 'true') }}:
- task: MicroBuildSigningPlugin@4
displayName: Install MicroBuild plugin
inputs:
signType: $(_SignType)
zipSources: false
feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json
env:
TeamName: $(_TeamName)
MicroBuildOutputFolderOverride: '$(Agent.TempDirectory)'
continueOnError: ${{ parameters.continueOnError }}
condition: and(succeeded(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
- ${{ if and(eq(parameters.runAsPublic, 'false'), eq(variables['System.TeamProject'], 'internal')) }}:
- task: NuGetAuthenticate@1
- ${{ if and(ne(parameters.artifacts.download, 'false'), ne(parameters.artifacts.download, '')) }}:
- task: DownloadPipelineArtifact@2
inputs:
buildType: current
artifactName: ${{ coalesce(parameters.artifacts.download.name, 'Artifacts_$(Agent.OS)_$(_BuildConfig)') }}
targetPath: ${{ coalesce(parameters.artifacts.download.path, 'artifacts') }}
itemPattern: ${{ coalesce(parameters.artifacts.download.pattern, '**') }}
- ${{ each step in parameters.steps }}:
- ${{ step }}
- ${{ if eq(parameters.enableRichCodeNavigation, true) }}:
- task: RichCodeNavIndexer@0
displayName: RichCodeNav Upload
inputs:
languages: ${{ coalesce(parameters.richCodeNavigationLanguage, 'csharp') }}
environment: ${{ coalesce(parameters.richCodeNavigationEnvironment, 'internal') }}
richNavLogOutputDirectory: $(Build.SourcesDirectory)/artifacts/bin
uploadRichNavArtifacts: ${{ coalesce(parameters.richCodeNavigationUploadArtifacts, false) }}
continueOnError: true
- template: /eng/common/core-templates/steps/component-governance.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
${{ if eq(parameters.disableComponentGovernance, '') }}:
${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.runAsPublic, 'false'), or(startsWith(variables['Build.SourceBranch'], 'refs/heads/release/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/dotnet/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/microsoft/'), eq(variables['Build.SourceBranch'], 'refs/heads/main'))) }}:
disableComponentGovernance: false
${{ else }}:
disableComponentGovernance: true
${{ else }}:
disableComponentGovernance: ${{ parameters.disableComponentGovernance }}
componentGovernanceIgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }}
- ${{ if eq(parameters.enableMicrobuild, 'true') }}:
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- task: MicroBuildCleanup@1
displayName: Execute Microbuild cleanup tasks
condition: and(always(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
continueOnError: ${{ parameters.continueOnError }}
env:
TeamName: $(_TeamName)
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.enableSbom, 'true')) }}:
- template: /eng/common/core-templates/steps/generate-sbom.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
PackageVersion: ${{ parameters.packageVersion}}
BuildDropPath: ${{ parameters.buildDropPath }}
IgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }}
publishArtifacts: false
# Publish test results
- ${{ if and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')) }}:
- ${{ if eq(parameters.testResultsFormat, 'xunit') }}:
- task: PublishTestResults@2
displayName: Publish XUnit Test Results
inputs:
testResultsFormat: 'xUnit'
testResultsFiles: '*.xml'
searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)'
testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-xunit
mergeTestResults: ${{ parameters.mergeTestResults }}
continueOnError: true
condition: always()
- ${{ if eq(parameters.testResultsFormat, 'vstest') }}:
- task: PublishTestResults@2
displayName: Publish TRX Test Results
inputs:
testResultsFormat: 'VSTest'
testResultsFiles: '*.trx'
searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)'
testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-trx
mergeTestResults: ${{ parameters.mergeTestResults }}
continueOnError: true
condition: always()
# gather artifacts
- ${{ if ne(parameters.artifacts.publish, '') }}:
- ${{ if and(ne(parameters.artifacts.publish.artifacts, 'false'), ne(parameters.artifacts.publish.artifacts, '')) }}:
- task: CopyFiles@2
displayName: Gather binaries for publish to artifacts
inputs:
SourceFolder: 'artifacts/bin'
Contents: '**'
TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/bin'
- task: CopyFiles@2
displayName: Gather packages for publish to artifacts
inputs:
SourceFolder: 'artifacts/packages'
Contents: '**'
TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/packages'
- ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}:
- task: CopyFiles@2
displayName: Gather logs for publish to artifacts
inputs:
SourceFolder: 'artifacts/log'
Contents: '**'
TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/log'
- ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}:
- task: CopyFiles@2
displayName: Gather logs for publish to artifacts
inputs:
SourceFolder: 'artifacts/log/$(_BuildConfig)'
Contents: '**'
TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/log/$(_BuildConfig)'
- ${{ if eq(parameters.enableBuildRetry, 'true') }}:
- task: CopyFiles@2
displayName: Gather buildconfiguration for build retry
inputs:
SourceFolder: '$(Build.SourcesDirectory)/eng/common/BuildConfiguration'
Contents: '**'
TargetFolder: '$(Build.ArtifactStagingDirectory)/eng/common/BuildConfiguration'
- ${{ each step in parameters.artifactPublishSteps }}:
- ${{ step }}

Просмотреть файл

@ -1,121 +0,0 @@
parameters:
# Optional: dependencies of the job
dependsOn: ''
# Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool
pool: ''
CeapexPat: $(dn-bot-ceapex-package-r) # PAT for the loc AzDO instance https://dev.azure.com/ceapex
GithubPat: $(BotAccount-dotnet-bot-repo-PAT)
SourcesDirectory: $(Build.SourcesDirectory)
CreatePr: true
AutoCompletePr: false
ReusePr: true
UseLfLineEndings: true
UseCheckedInLocProjectJson: false
SkipLocProjectJsonGeneration: false
LanguageSet: VS_Main_Languages
LclSource: lclFilesInRepo
LclPackageId: ''
RepoType: gitHub
GitHubOrg: dotnet
MirrorRepo: ''
MirrorBranch: main
condition: ''
JobNameSuffix: ''
is1ESPipeline: ''
jobs:
- job: OneLocBuild${{ parameters.JobNameSuffix }}
dependsOn: ${{ parameters.dependsOn }}
displayName: OneLocBuild${{ parameters.JobNameSuffix }}
variables:
- group: OneLocBuildVariables # Contains the CeapexPat and GithubPat
- name: _GenerateLocProjectArguments
value: -SourcesDirectory ${{ parameters.SourcesDirectory }}
-LanguageSet "${{ parameters.LanguageSet }}"
-CreateNeutralXlfs
- ${{ if eq(parameters.UseCheckedInLocProjectJson, 'true') }}:
- name: _GenerateLocProjectArguments
value: ${{ variables._GenerateLocProjectArguments }} -UseCheckedInLocProjectJson
- template: /eng/common/core-templates/variables/pool-providers.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
${{ if ne(parameters.pool, '') }}:
pool: ${{ parameters.pool }}
${{ if eq(parameters.pool, '') }}:
pool:
# We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
name: AzurePipelines-EO
image: 1ESPT-Windows2022
demands: Cmd
os: windows
# If it's not devdiv, it's dnceng
${{ if ne(variables['System.TeamProject'], 'DevDiv') }}:
name: $(DncEngInternalBuildPool)
image: 1es-windows-2022
os: windows
steps:
- ${{ if eq(parameters.is1ESPipeline, '') }}:
- 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error
- ${{ if ne(parameters.SkipLocProjectJsonGeneration, 'true') }}:
- task: Powershell@2
inputs:
filePath: $(Build.SourcesDirectory)/eng/common/generate-locproject.ps1
arguments: $(_GenerateLocProjectArguments)
displayName: Generate LocProject.json
condition: ${{ parameters.condition }}
- task: OneLocBuild@2
displayName: OneLocBuild
env:
SYSTEM_ACCESSTOKEN: $(System.AccessToken)
inputs:
locProj: eng/Localize/LocProject.json
outDir: $(Build.ArtifactStagingDirectory)
lclSource: ${{ parameters.LclSource }}
lclPackageId: ${{ parameters.LclPackageId }}
isCreatePrSelected: ${{ parameters.CreatePr }}
isAutoCompletePrSelected: ${{ parameters.AutoCompletePr }}
${{ if eq(parameters.CreatePr, true) }}:
isUseLfLineEndingsSelected: ${{ parameters.UseLfLineEndings }}
${{ if eq(parameters.RepoType, 'gitHub') }}:
isShouldReusePrSelected: ${{ parameters.ReusePr }}
packageSourceAuth: patAuth
patVariable: ${{ parameters.CeapexPat }}
${{ if eq(parameters.RepoType, 'gitHub') }}:
repoType: ${{ parameters.RepoType }}
gitHubPatVariable: "${{ parameters.GithubPat }}"
${{ if ne(parameters.MirrorRepo, '') }}:
isMirrorRepoSelected: true
gitHubOrganization: ${{ parameters.GitHubOrg }}
mirrorRepo: ${{ parameters.MirrorRepo }}
mirrorBranch: ${{ parameters.MirrorBranch }}
condition: ${{ parameters.condition }}
- template: /eng/common/core-templates/steps/publish-build-artifacts.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
args:
displayName: Publish Localization Files
pathToPublish: '$(Build.ArtifactStagingDirectory)/loc'
publishLocation: Container
artifactName: Loc
condition: ${{ parameters.condition }}
- template: /eng/common/core-templates/steps/publish-build-artifacts.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
args:
displayName: Publish LocProject.json
pathToPublish: '$(Build.SourcesDirectory)/eng/Localize/'
publishLocation: Container
artifactName: Loc
condition: ${{ parameters.condition }}

Просмотреть файл

@ -1,172 +0,0 @@
parameters:
configuration: 'Debug'
# Optional: condition for the job to run
condition: ''
# Optional: 'true' if future jobs should run even if this job fails
continueOnError: false
# Optional: dependencies of the job
dependsOn: ''
# Optional: Include PublishBuildArtifacts task
enablePublishBuildArtifacts: false
# Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool
pool: {}
# Optional: should run as a public build even in the internal project
# if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
runAsPublic: false
# Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing
publishUsingPipelines: false
# Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing
publishAssetsImmediately: false
artifactsPublishingAdditionalParameters: ''
signingValidationAdditionalParameters: ''
is1ESPipeline: ''
jobs:
- job: Asset_Registry_Publish
dependsOn: ${{ parameters.dependsOn }}
timeoutInMinutes: 150
${{ if eq(parameters.publishAssetsImmediately, 'true') }}:
displayName: Publish Assets
${{ else }}:
displayName: Publish to Build Asset Registry
variables:
- template: /eng/common/core-templates/variables/pool-providers.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- group: Publish-Build-Assets
- group: AzureDevOps-Artifact-Feeds-Pats
- name: runCodesignValidationInjection
value: false
# unconditional - needed for logs publishing (redactor tool version)
- template: /eng/common/core-templates/post-build/common-variables.yml
pool:
# We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
name: AzurePipelines-EO
image: 1ESPT-Windows2022
demands: Cmd
os: windows
# If it's not devdiv, it's dnceng
${{ if ne(variables['System.TeamProject'], 'DevDiv') }}:
name: NetCore1ESPool-Publishing-Internal
image: windows.vs2019.amd64
os: windows
steps:
- ${{ if eq(parameters.is1ESPipeline, '') }}:
- 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- checkout: self
fetchDepth: 3
clean: true
- task: DownloadBuildArtifacts@0
displayName: Download artifact
inputs:
artifactName: AssetManifests
downloadPath: '$(Build.StagingDirectory)/Download'
checkDownloadedFiles: true
condition: ${{ parameters.condition }}
continueOnError: ${{ parameters.continueOnError }}
- task: NuGetAuthenticate@1
- task: PowerShell@2
displayName: Publish Build Assets
inputs:
filePath: eng\common\sdk-task.ps1
arguments: -task PublishBuildAssets -restore -msbuildEngine dotnet
/p:ManifestsPath='$(Build.StagingDirectory)/Download/AssetManifests'
/p:BuildAssetRegistryToken=$(MaestroAccessToken)
/p:MaestroApiEndpoint=https://maestro.dot.net
/p:PublishUsingPipelines=${{ parameters.publishUsingPipelines }}
/p:OfficialBuildId=$(Build.BuildNumber)
condition: ${{ parameters.condition }}
continueOnError: ${{ parameters.continueOnError }}
- task: powershell@2
displayName: Create ReleaseConfigs Artifact
inputs:
targetType: inline
script: |
New-Item -Path "$(Build.StagingDirectory)/ReleaseConfigs" -ItemType Directory -Force
$filePath = "$(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt"
Add-Content -Path $filePath -Value $(BARBuildId)
Add-Content -Path $filePath -Value "$(DefaultChannels)"
Add-Content -Path $filePath -Value $(IsStableBuild)
- template: /eng/common/core-templates/steps/publish-build-artifacts.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
args:
displayName: Publish ReleaseConfigs Artifact
pathToPublish: '$(Build.StagingDirectory)/ReleaseConfigs'
publishLocation: Container
artifactName: ReleaseConfigs
- task: powershell@2
displayName: Check if SymbolPublishingExclusionsFile.txt exists
inputs:
targetType: inline
script: |
$symbolExclusionfile = "$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt"
if(Test-Path -Path $symbolExclusionfile)
{
Write-Host "SymbolExclusionFile exists"
Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]true"
}
else{
Write-Host "Symbols Exclusion file does not exist"
Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]false"
}
- template: /eng/common/core-templates/steps/publish-build-artifacts.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
args:
displayName: Publish SymbolPublishingExclusionsFile Artifact
condition: eq(variables['SymbolExclusionFile'], 'true')
pathToPublish: '$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt'
publishLocation: Container
artifactName: ReleaseConfigs
- ${{ if eq(parameters.publishAssetsImmediately, 'true') }}:
- template: /eng/common/core-templates/post-build/setup-maestro-vars.yml
parameters:
BARBuildId: ${{ parameters.BARBuildId }}
PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
is1ESPipeline: ${{ parameters.is1ESPipeline }}
- task: PowerShell@2
displayName: Publish Using Darc
inputs:
filePath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1
arguments: -BuildId $(BARBuildId)
-PublishingInfraVersion 3
-AzdoToken '$(publishing-dnceng-devdiv-code-r-build-re)'
-MaestroToken '$(MaestroApiAccessToken)'
-WaitPublishingFinish true
-ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}'
-SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}'
- ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}:
- template: /eng/common/core-templates/steps/publish-logs.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
JobLabel: 'Publish_Artifacts_Logs'

Просмотреть файл

@ -1,80 +0,0 @@
parameters:
# This template adds arcade-powered source-build to CI. The template produces a server job with a
# default ID 'Source_Build_Complete' to put in a dependency list if necessary.
# Specifies the prefix for source-build jobs added to pipeline. Use this if disambiguation needed.
jobNamePrefix: 'Source_Build'
# Defines the platform on which to run the job. By default, a linux-x64 machine, suitable for
# managed-only repositories. This is an object with these properties:
#
# name: ''
# The name of the job. This is included in the job ID.
# targetRID: ''
# The name of the target RID to use, instead of the one auto-detected by Arcade.
# nonPortable: false
# Enables non-portable mode. This means a more specific RID (e.g. fedora.32-x64 rather than
# linux-x64), and compiling against distro-provided packages rather than portable ones.
# skipPublishValidation: false
# Disables publishing validation. By default, a check is performed to ensure no packages are
# published by source-build.
# container: ''
# A container to use. Runs in docker.
# pool: {}
# A pool to use. Runs directly on an agent.
# buildScript: ''
# Specifies the build script to invoke to perform the build in the repo. The default
# './build.sh' should work for typical Arcade repositories, but this is customizable for
# difficult situations.
# jobProperties: {}
# A list of job properties to inject at the top level, for potential extensibility beyond
# container and pool.
platform: {}
is1ESPipeline: ''
jobs:
- job: ${{ parameters.jobNamePrefix }}_${{ parameters.platform.name }}
displayName: Source-Build (${{ parameters.platform.name }})
${{ each property in parameters.platform.jobProperties }}:
${{ property.key }}: ${{ property.value }}
${{ if ne(parameters.platform.container, '') }}:
container: ${{ parameters.platform.container }}
${{ if eq(parameters.platform.pool, '') }}:
# The default VM host AzDO pool. This should be capable of running Docker containers: almost all
# source-build builds run in Docker, including the default managed platform.
# /eng/common/core-templates/variables/pool-providers.yml can't be used here (some customers declare variables already), so duplicate its logic
${{ if eq(parameters.is1ESPipeline, 'true') }}:
pool:
${{ if eq(variables['System.TeamProject'], 'public') }}:
name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore-Svc-Public' ), False, 'NetCore-Public')]
demands: ImageOverride -equals build.ubuntu.2004.amd64
${{ if eq(variables['System.TeamProject'], 'internal') }}:
name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore1ESPool-Svc-Internal'), False, 'NetCore1ESPool-Internal')]
image: 1es-mariner-2
os: linux
${{ else }}:
pool:
${{ if eq(variables['System.TeamProject'], 'public') }}:
name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore-Svc-Public' ), False, 'NetCore-Public')]
demands: ImageOverride -equals Build.Ubuntu.2204.Amd64.Open
${{ if eq(variables['System.TeamProject'], 'internal') }}:
name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore1ESPool-Svc-Internal'), False, 'NetCore1ESPool-Internal')]
demands: ImageOverride -equals Build.Ubuntu.2204.Amd64
${{ if ne(parameters.platform.pool, '') }}:
pool: ${{ parameters.platform.pool }}
workspace:
clean: all
steps:
- ${{ if eq(parameters.is1ESPipeline, '') }}:
- 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error
- template: /eng/common/core-templates/steps/source-build.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
platform: ${{ parameters.platform }}

Просмотреть файл

@ -1,91 +0,0 @@
parameters:
runAsPublic: false
sourceIndexUploadPackageVersion: 2.0.0-20240502.12
sourceIndexProcessBinlogPackageVersion: 1.0.1-20240129.2
sourceIndexPackageSource: https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json
sourceIndexBuildCommand: powershell -NoLogo -NoProfile -ExecutionPolicy Bypass -Command "eng/common/build.ps1 -restore -build -binarylog -ci"
preSteps: []
binlogPath: artifacts/log/Debug/Build.binlog
condition: ''
dependsOn: ''
pool: ''
is1ESPipeline: ''
jobs:
- job: SourceIndexStage1
dependsOn: ${{ parameters.dependsOn }}
condition: ${{ parameters.condition }}
variables:
- name: SourceIndexUploadPackageVersion
value: ${{ parameters.sourceIndexUploadPackageVersion }}
- name: SourceIndexProcessBinlogPackageVersion
value: ${{ parameters.sourceIndexProcessBinlogPackageVersion }}
- name: SourceIndexPackageSource
value: ${{ parameters.sourceIndexPackageSource }}
- name: BinlogPath
value: ${{ parameters.binlogPath }}
- template: /eng/common/core-templates/variables/pool-providers.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
${{ if ne(parameters.pool, '') }}:
pool: ${{ parameters.pool }}
${{ if eq(parameters.pool, '') }}:
pool:
${{ if eq(variables['System.TeamProject'], 'public') }}:
name: $(DncEngPublicBuildPool)
image: windows.vs2022.amd64.open
${{ if eq(variables['System.TeamProject'], 'internal') }}:
name: $(DncEngInternalBuildPool)
image: windows.vs2022.amd64
steps:
- ${{ if eq(parameters.is1ESPipeline, '') }}:
- 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error
- ${{ each preStep in parameters.preSteps }}:
- ${{ preStep }}
- task: UseDotNet@2
displayName: Use .NET 8 SDK
inputs:
packageType: sdk
version: 8.0.x
installationPath: $(Agent.TempDirectory)/dotnet
workingDirectory: $(Agent.TempDirectory)
- script: |
$(Agent.TempDirectory)/dotnet/dotnet tool install BinLogToSln --version $(sourceIndexProcessBinlogPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools
$(Agent.TempDirectory)/dotnet/dotnet tool install UploadIndexStage1 --version $(sourceIndexUploadPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools
displayName: Download Tools
# Set working directory to temp directory so 'dotnet' doesn't try to use global.json and use the repo's sdk.
workingDirectory: $(Agent.TempDirectory)
- script: ${{ parameters.sourceIndexBuildCommand }}
displayName: Build Repository
- script: $(Agent.TempDirectory)/.source-index/tools/BinLogToSln -i $(BinlogPath) -r $(Build.SourcesDirectory) -n $(Build.Repository.Name) -o .source-index/stage1output
displayName: Process Binlog into indexable sln
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- task: AzureCLI@2
displayName: Get stage 1 auth token
inputs:
azureSubscription: 'SourceDotNet Stage1 Publish'
addSpnToEnvironment: true
scriptType: 'ps'
scriptLocation: 'inlineScript'
inlineScript: |
echo "##vso[task.setvariable variable=ARM_CLIENT_ID]$env:servicePrincipalId"
echo "##vso[task.setvariable variable=ARM_ID_TOKEN]$env:idToken"
echo "##vso[task.setvariable variable=ARM_TENANT_ID]$env:tenantId"
- script: |
echo "Client ID: $(ARM_CLIENT_ID)"
echo "ID Token: $(ARM_ID_TOKEN)"
echo "Tenant ID: $(ARM_TENANT_ID)"
az login --service-principal -u $(ARM_CLIENT_ID) --tenant $(ARM_TENANT_ID) --allow-no-subscriptions --federated-token $(ARM_ID_TOKEN)
displayName: "Login to Azure"
- script: $(Agent.TempDirectory)/.source-index/tools/UploadIndexStage1 -i .source-index/stage1output -n $(Build.Repository.Name) -s netsourceindexstage1 -b stage1
displayName: Upload stage1 artifacts to source index

Просмотреть файл

@ -1,33 +0,0 @@
parameters:
# See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md
continueOnError: false
# Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
jobs: []
# Optional: if specified, restore and use this version of Guardian instead of the default.
overrideGuardianVersion: ''
is1ESPipeline: ''
jobs:
- template: /eng/common/core-templates/jobs/jobs.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
enableMicrobuild: false
enablePublishBuildArtifacts: false
enablePublishTestResults: false
enablePublishBuildAssets: false
enablePublishUsingPipelines: false
enableTelemetry: true
variables:
- group: Publish-Build-Assets
# The Guardian version specified in 'eng/common/sdl/packages.config'. This value must be kept in
# sync with the packages.config file.
- name: DefaultGuardianVersion
value: 0.109.0
- name: GuardianPackagesConfigFile
value: $(Build.SourcesDirectory)\eng\common\sdl\packages.config
- name: GuardianVersion
value: ${{ coalesce(parameters.overrideGuardianVersion, '$(DefaultGuardianVersion)') }}
jobs: ${{ parameters.jobs }}

Просмотреть файл

@ -1,119 +0,0 @@
parameters:
# See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md
continueOnError: false
# Optional: Include PublishBuildArtifacts task
enablePublishBuildArtifacts: false
# Optional: Enable publishing using release pipelines
enablePublishUsingPipelines: false
# Optional: Enable running the source-build jobs to build repo from source
enableSourceBuild: false
# Optional: Parameters for source-build template.
# See /eng/common/core-templates/jobs/source-build.yml for options
sourceBuildParameters: []
graphFileGeneration:
# Optional: Enable generating the graph files at the end of the build
enabled: false
# Optional: Include toolset dependencies in the generated graph files
includeToolset: false
# Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
jobs: []
# Optional: Override automatically derived dependsOn value for "publish build assets" job
publishBuildAssetsDependsOn: ''
# Optional: Publish the assets as soon as the publish to BAR stage is complete, rather doing so in a separate stage.
publishAssetsImmediately: false
# Optional: If using publishAssetsImmediately and additional parameters are needed, can be used to send along additional parameters (normally sent to post-build.yml)
artifactsPublishingAdditionalParameters: ''
signingValidationAdditionalParameters: ''
# Optional: should run as a public build even in the internal project
# if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
runAsPublic: false
enableSourceIndex: false
sourceIndexParams: {}
artifacts: {}
is1ESPipeline: ''
# Internal resources (telemetry, microbuild) can only be accessed from non-public projects,
# and some (Microbuild) should only be applied to non-PR cases for internal builds.
jobs:
- ${{ each job in parameters.jobs }}:
- ${{ if eq(parameters.is1ESPipeline, 'true') }}:
- template: /eng/common/templates-official/job/job.yml
parameters:
# pass along parameters
${{ each parameter in parameters }}:
${{ if ne(parameter.key, 'jobs') }}:
${{ parameter.key }}: ${{ parameter.value }}
# pass along job properties
${{ each property in job }}:
${{ if ne(property.key, 'job') }}:
${{ property.key }}: ${{ property.value }}
name: ${{ job.job }}
- ${{ else }}:
- template: /eng/common/templates/job/job.yml
parameters:
# pass along parameters
${{ each parameter in parameters }}:
${{ if ne(parameter.key, 'jobs') }}:
${{ parameter.key }}: ${{ parameter.value }}
# pass along job properties
${{ each property in job }}:
${{ if ne(property.key, 'job') }}:
${{ property.key }}: ${{ property.value }}
name: ${{ job.job }}
- ${{ if eq(parameters.enableSourceBuild, true) }}:
- template: /eng/common/core-templates/jobs/source-build.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
allCompletedJobId: Source_Build_Complete
${{ each parameter in parameters.sourceBuildParameters }}:
${{ parameter.key }}: ${{ parameter.value }}
- ${{ if eq(parameters.enableSourceIndex, 'true') }}:
- template: ../job/source-index-stage1.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
runAsPublic: ${{ parameters.runAsPublic }}
${{ each parameter in parameters.sourceIndexParams }}:
${{ parameter.key }}: ${{ parameter.value }}
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- ${{ if or(eq(parameters.enablePublishBuildAssets, true), eq(parameters.artifacts.publish.manifests, 'true'), ne(parameters.artifacts.publish.manifests, '')) }}:
- template: ../job/publish-build-assets.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
continueOnError: ${{ parameters.continueOnError }}
dependsOn:
- ${{ if ne(parameters.publishBuildAssetsDependsOn, '') }}:
- ${{ each job in parameters.publishBuildAssetsDependsOn }}:
- ${{ job.job }}
- ${{ if eq(parameters.publishBuildAssetsDependsOn, '') }}:
- ${{ each job in parameters.jobs }}:
- ${{ job.job }}
- ${{ if eq(parameters.enableSourceBuild, true) }}:
- Source_Build_Complete
runAsPublic: ${{ parameters.runAsPublic }}
publishUsingPipelines: ${{ parameters.enablePublishUsingPipelines }}
publishAssetsImmediately: ${{ parameters.publishAssetsImmediately }}
enablePublishBuildArtifacts: ${{ parameters.enablePublishBuildArtifacts }}
artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
signingValidationAdditionalParameters: ${{ parameters.signingValidationAdditionalParameters }}

Просмотреть файл

@ -1,50 +0,0 @@
parameters:
# This template adds arcade-powered source-build to CI. A job is created for each platform, as
# well as an optional server job that completes when all platform jobs complete.
# The name of the "join" job for all source-build platforms. If set to empty string, the job is
# not included. Existing repo pipelines can use this job depend on all source-build jobs
# completing without maintaining a separate list of every single job ID: just depend on this one
# server job. By default, not included. Recommended name if used: 'Source_Build_Complete'.
allCompletedJobId: ''
# See /eng/common/core-templates/job/source-build.yml
jobNamePrefix: 'Source_Build'
# This is the default platform provided by Arcade, intended for use by a managed-only repo.
defaultManagedPlatform:
name: 'Managed'
container: 'mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream9'
# Defines the platforms on which to run build jobs. One job is created for each platform, and the
# object in this array is sent to the job template as 'platform'. If no platforms are specified,
# one job runs on 'defaultManagedPlatform'.
platforms: []
is1ESPipeline: ''
jobs:
- ${{ if ne(parameters.allCompletedJobId, '') }}:
- job: ${{ parameters.allCompletedJobId }}
displayName: Source-Build Complete
pool: server
dependsOn:
- ${{ each platform in parameters.platforms }}:
- ${{ parameters.jobNamePrefix }}_${{ platform.name }}
- ${{ if eq(length(parameters.platforms), 0) }}:
- ${{ parameters.jobNamePrefix }}_${{ parameters.defaultManagedPlatform.name }}
- ${{ each platform in parameters.platforms }}:
- template: /eng/common/core-templates/job/source-build.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
jobNamePrefix: ${{ parameters.jobNamePrefix }}
platform: ${{ platform }}
- ${{ if eq(length(parameters.platforms), 0) }}:
- template: /eng/common/core-templates/job/source-build.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
jobNamePrefix: ${{ parameters.jobNamePrefix }}
platform: ${{ parameters.defaultManagedPlatform }}

Просмотреть файл

@ -1,24 +0,0 @@
variables:
- group: Publish-Build-Assets
# Whether the build is internal or not
- name: IsInternalBuild
value: ${{ and(ne(variables['System.TeamProject'], 'public'), contains(variables['Build.SourceBranch'], 'internal')) }}
# Default Maestro++ API Endpoint and API Version
- name: MaestroApiEndPoint
value: "https://maestro.dot.net"
- name: MaestroApiAccessToken
value: $(MaestroAccessToken)
- name: MaestroApiVersion
value: "2020-02-20"
- name: SourceLinkCLIVersion
value: 3.0.0
- name: SymbolToolVersion
value: 1.0.1
- name: BinlogToolVersion
value: 1.0.11
- name: runCodesignValidationInjection
value: false

Просмотреть файл

@ -1,314 +0,0 @@
parameters:
# Which publishing infra should be used. THIS SHOULD MATCH THE VERSION ON THE BUILD MANIFEST.
# Publishing V1 is no longer supported
# Publishing V2 is no longer supported
# Publishing V3 is the default
- name: publishingInfraVersion
displayName: Which version of publishing should be used to promote the build definition?
type: number
default: 3
values:
- 3
- name: BARBuildId
displayName: BAR Build Id
type: number
default: 0
- name: PromoteToChannelIds
displayName: Channel to promote BARBuildId to
type: string
default: ''
- name: enableSourceLinkValidation
displayName: Enable SourceLink validation
type: boolean
default: false
- name: enableSigningValidation
displayName: Enable signing validation
type: boolean
default: true
- name: enableSymbolValidation
displayName: Enable symbol validation
type: boolean
default: false
- name: enableNugetValidation
displayName: Enable NuGet validation
type: boolean
default: true
- name: publishInstallersAndChecksums
displayName: Publish installers and checksums
type: boolean
default: true
- name: SDLValidationParameters
type: object
default:
enable: false
publishGdn: false
continueOnError: false
params: ''
artifactNames: ''
downloadArtifacts: true
# These parameters let the user customize the call to sdk-task.ps1 for publishing
# symbols & general artifacts as well as for signing validation
- name: symbolPublishingAdditionalParameters
displayName: Symbol publishing additional parameters
type: string
default: ''
- name: artifactsPublishingAdditionalParameters
displayName: Artifact publishing additional parameters
type: string
default: ''
- name: signingValidationAdditionalParameters
displayName: Signing validation additional parameters
type: string
default: ''
# Which stages should finish execution before post-build stages start
- name: validateDependsOn
type: object
default:
- build
- name: publishDependsOn
type: object
default:
- Validate
# Optional: Call asset publishing rather than running in a separate stage
- name: publishAssetsImmediately
type: boolean
default: false
- name: is1ESPipeline
type: boolean
default: false
stages:
- ${{ if or(eq( parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}:
- stage: Validate
dependsOn: ${{ parameters.validateDependsOn }}
displayName: Validate Build Assets
variables:
- template: /eng/common/core-templates/post-build/common-variables.yml
- template: /eng/common/core-templates/variables/pool-providers.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
jobs:
- job:
displayName: NuGet Validation
condition: and(succeededOrFailed(), eq( ${{ parameters.enableNugetValidation }}, 'true'))
pool:
# We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
name: AzurePipelines-EO
image: 1ESPT-Windows2022
demands: Cmd
os: windows
# If it's not devdiv, it's dnceng
${{ else }}:
${{ if eq(parameters.is1ESPipeline, true) }}:
name: $(DncEngInternalBuildPool)
image: windows.vs2022.amd64
os: windows
${{ else }}:
name: $(DncEngInternalBuildPool)
demands: ImageOverride -equals windows.vs2022.amd64
steps:
- template: /eng/common/core-templates/post-build/setup-maestro-vars.yml
parameters:
BARBuildId: ${{ parameters.BARBuildId }}
PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
is1ESPipeline: ${{ parameters.is1ESPipeline }}
- task: DownloadBuildArtifacts@0
displayName: Download Package Artifacts
inputs:
buildType: specific
buildVersionToDownload: specific
project: $(AzDOProjectName)
pipeline: $(AzDOPipelineId)
buildId: $(AzDOBuildId)
artifactName: PackageArtifacts
checkDownloadedFiles: true
- task: PowerShell@2
displayName: Validate
inputs:
filePath: $(Build.SourcesDirectory)/eng/common/post-build/nuget-validation.ps1
arguments: -PackagesPath $(Build.ArtifactStagingDirectory)/PackageArtifacts/
-ToolDestinationPath $(Agent.BuildDirectory)/Extract/
- job:
displayName: Signing Validation
condition: and( eq( ${{ parameters.enableSigningValidation }}, 'true'), ne( variables['PostBuildSign'], 'true'))
pool:
# We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
name: AzurePipelines-EO
image: 1ESPT-Windows2022
demands: Cmd
os: windows
# If it's not devdiv, it's dnceng
${{ else }}:
${{ if eq(parameters.is1ESPipeline, true) }}:
name: $(DncEngInternalBuildPool)
image: 1es-windows-2022
os: windows
${{ else }}:
name: $(DncEngInternalBuildPool)
demands: ImageOverride -equals windows.vs2022.amd64
steps:
- template: /eng/common/core-templates/post-build/setup-maestro-vars.yml
parameters:
BARBuildId: ${{ parameters.BARBuildId }}
PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
is1ESPipeline: ${{ parameters.is1ESPipeline }}
- task: DownloadBuildArtifacts@0
displayName: Download Package Artifacts
inputs:
buildType: specific
buildVersionToDownload: specific
project: $(AzDOProjectName)
pipeline: $(AzDOPipelineId)
buildId: $(AzDOBuildId)
artifactName: PackageArtifacts
checkDownloadedFiles: true
itemPattern: |
**
!**/Microsoft.SourceBuild.Intermediate.*.nupkg
# This is necessary whenever we want to publish/restore to an AzDO private feed
# Since sdk-task.ps1 tries to restore packages we need to do this authentication here
# otherwise it'll complain about accessing a private feed.
- task: NuGetAuthenticate@1
displayName: 'Authenticate to AzDO Feeds'
# Signing validation will optionally work with the buildmanifest file which is downloaded from
# Azure DevOps above.
- task: PowerShell@2
displayName: Validate
inputs:
filePath: eng\common\sdk-task.ps1
arguments: -task SigningValidation -restore -msbuildEngine vs
/p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts'
/p:SignCheckExclusionsFile='$(Build.SourcesDirectory)/eng/SignCheckExclusionsFile.txt'
${{ parameters.signingValidationAdditionalParameters }}
- template: /eng/common/core-templates/steps/publish-logs.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
StageLabel: 'Validation'
JobLabel: 'Signing'
BinlogToolVersion: $(BinlogToolVersion)
- job:
displayName: SourceLink Validation
condition: eq( ${{ parameters.enableSourceLinkValidation }}, 'true')
pool:
# We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
name: AzurePipelines-EO
image: 1ESPT-Windows2022
demands: Cmd
os: windows
# If it's not devdiv, it's dnceng
${{ else }}:
${{ if eq(parameters.is1ESPipeline, true) }}:
name: $(DncEngInternalBuildPool)
image: 1es-windows-2022
os: windows
${{ else }}:
name: $(DncEngInternalBuildPool)
demands: ImageOverride -equals windows.vs2022.amd64
steps:
- template: /eng/common/core-templates/post-build/setup-maestro-vars.yml
parameters:
BARBuildId: ${{ parameters.BARBuildId }}
PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
is1ESPipeline: ${{ parameters.is1ESPipeline }}
- task: DownloadBuildArtifacts@0
displayName: Download Blob Artifacts
inputs:
buildType: specific
buildVersionToDownload: specific
project: $(AzDOProjectName)
pipeline: $(AzDOPipelineId)
buildId: $(AzDOBuildId)
artifactName: BlobArtifacts
checkDownloadedFiles: true
- task: PowerShell@2
displayName: Validate
inputs:
filePath: $(Build.SourcesDirectory)/eng/common/post-build/sourcelink-validation.ps1
arguments: -InputPath $(Build.ArtifactStagingDirectory)/BlobArtifacts/
-ExtractPath $(Agent.BuildDirectory)/Extract/
-GHRepoName $(Build.Repository.Name)
-GHCommit $(Build.SourceVersion)
-SourcelinkCliVersion $(SourceLinkCLIVersion)
continueOnError: true
- ${{ if ne(parameters.publishAssetsImmediately, 'true') }}:
- stage: publish_using_darc
${{ if or(eq(parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}:
dependsOn: ${{ parameters.publishDependsOn }}
${{ else }}:
dependsOn: ${{ parameters.validateDependsOn }}
displayName: Publish using Darc
variables:
- template: /eng/common/core-templates/post-build/common-variables.yml
- template: /eng/common/core-templates/variables/pool-providers.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
jobs:
- job:
displayName: Publish Using Darc
timeoutInMinutes: 120
pool:
# We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
name: AzurePipelines-EO
image: 1ESPT-Windows2022
demands: Cmd
os: windows
# If it's not devdiv, it's dnceng
${{ else }}:
${{ if eq(parameters.is1ESPipeline, true) }}:
name: NetCore1ESPool-Publishing-Internal
image: windows.vs2019.amd64
os: windows
${{ else }}:
name: NetCore1ESPool-Publishing-Internal
demands: ImageOverride -equals windows.vs2019.amd64
steps:
- template: /eng/common/core-templates/post-build/setup-maestro-vars.yml
parameters:
BARBuildId: ${{ parameters.BARBuildId }}
PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
is1ESPipeline: ${{ parameters.is1ESPipeline }}
- task: NuGetAuthenticate@1
- task: PowerShell@2
displayName: Publish Using Darc
inputs:
filePath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1
arguments: -BuildId $(BARBuildId)
-PublishingInfraVersion ${{ parameters.publishingInfraVersion }}
-AzdoToken '$(publishing-dnceng-devdiv-code-r-build-re)'
-MaestroToken '$(MaestroApiAccessToken)'
-WaitPublishingFinish true
-ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}'
-SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}'

Просмотреть файл

@ -1,74 +0,0 @@
parameters:
BARBuildId: ''
PromoteToChannelIds: ''
is1ESPipeline: ''
steps:
- ${{ if eq(parameters.is1ESPipeline, '') }}:
- 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error
- ${{ if eq(coalesce(parameters.PromoteToChannelIds, 0), 0) }}:
- task: DownloadBuildArtifacts@0
displayName: Download Release Configs
inputs:
buildType: current
artifactName: ReleaseConfigs
checkDownloadedFiles: true
- task: PowerShell@2
name: setReleaseVars
displayName: Set Release Configs Vars
inputs:
targetType: inline
pwsh: true
script: |
try {
if (!$Env:PromoteToMaestroChannels -or $Env:PromoteToMaestroChannels.Trim() -eq '') {
$Content = Get-Content $(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt
$BarId = $Content | Select -Index 0
$Channels = $Content | Select -Index 1
$IsStableBuild = $Content | Select -Index 2
$AzureDevOpsProject = $Env:System_TeamProject
$AzureDevOpsBuildDefinitionId = $Env:System_DefinitionId
$AzureDevOpsBuildId = $Env:Build_BuildId
}
else {
$buildApiEndpoint = "${Env:MaestroApiEndPoint}/api/builds/${Env:BARBuildId}?api-version=${Env:MaestroApiVersion}"
$apiHeaders = New-Object 'System.Collections.Generic.Dictionary[[String],[String]]'
$apiHeaders.Add('Accept', 'application/json')
$apiHeaders.Add('Authorization',"Bearer ${Env:MAESTRO_API_TOKEN}")
$buildInfo = try { Invoke-WebRequest -Method Get -Uri $buildApiEndpoint -Headers $apiHeaders | ConvertFrom-Json } catch { Write-Host "Error: $_" }
$BarId = $Env:BARBuildId
$Channels = $Env:PromoteToMaestroChannels -split ","
$Channels = $Channels -join "]["
$Channels = "[$Channels]"
$IsStableBuild = $buildInfo.stable
$AzureDevOpsProject = $buildInfo.azureDevOpsProject
$AzureDevOpsBuildDefinitionId = $buildInfo.azureDevOpsBuildDefinitionId
$AzureDevOpsBuildId = $buildInfo.azureDevOpsBuildId
}
Write-Host "##vso[task.setvariable variable=BARBuildId]$BarId"
Write-Host "##vso[task.setvariable variable=TargetChannels]$Channels"
Write-Host "##vso[task.setvariable variable=IsStableBuild]$IsStableBuild"
Write-Host "##vso[task.setvariable variable=AzDOProjectName]$AzureDevOpsProject"
Write-Host "##vso[task.setvariable variable=AzDOPipelineId]$AzureDevOpsBuildDefinitionId"
Write-Host "##vso[task.setvariable variable=AzDOBuildId]$AzureDevOpsBuildId"
}
catch {
Write-Host $_
Write-Host $_.Exception
Write-Host $_.ScriptStackTrace
exit 1
}
env:
MAESTRO_API_TOKEN: $(MaestroApiAccessToken)
BARBuildId: ${{ parameters.BARBuildId }}
PromoteToMaestroChannels: ${{ parameters.PromoteToChannelIds }}

Просмотреть файл

@ -1,13 +0,0 @@
parameters:
ChannelId: 0
steps:
- task: PowerShell@2
displayName: Triggering subscriptions
inputs:
filePath: $(Build.SourcesDirectory)/eng/common/post-build/trigger-subscriptions.ps1
arguments: -SourceRepo $(Build.Repository.Uri)
-ChannelId ${{ parameters.ChannelId }}
-MaestroApiAccessToken $(MaestroAccessToken)
-MaestroApiEndPoint $(MaestroApiEndPoint)
-MaestroApiVersion $(MaestroApiVersion)

Просмотреть файл

@ -1,13 +0,0 @@
parameters:
ChannelId: 0
steps:
- task: PowerShell@2
displayName: Add Build to Channel
inputs:
filePath: $(Build.SourcesDirectory)/eng/common/post-build/add-build-to-channel.ps1
arguments: -BuildId $(BARBuildId)
-ChannelId ${{ parameters.ChannelId }}
-MaestroApiAccessToken $(MaestroApiAccessToken)
-MaestroApiEndPoint $(MaestroApiEndPoint)
-MaestroApiVersion $(MaestroApiVersion)

Просмотреть файл

@ -1,14 +0,0 @@
parameters:
disableComponentGovernance: false
componentGovernanceIgnoreDirectories: ''
is1ESPipeline: false
steps:
- ${{ if eq(parameters.disableComponentGovernance, 'true') }}:
- script: echo "##vso[task.setvariable variable=skipComponentGovernanceDetection]true"
displayName: Set skipComponentGovernanceDetection variable
- ${{ if ne(parameters.disableComponentGovernance, 'true') }}:
- task: ComponentGovernanceComponentDetection@0
continueOnError: true
inputs:
ignoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }}

Просмотреть файл

@ -1,54 +0,0 @@
# BuildDropPath - The root folder of the drop directory for which the manifest file will be generated.
# PackageName - The name of the package this SBOM represents.
# PackageVersion - The version of the package this SBOM represents.
# ManifestDirPath - The path of the directory where the generated manifest files will be placed
# IgnoreDirectories - Directories to ignore for SBOM generation. This will be passed through to the CG component detector.
parameters:
PackageVersion: 9.0.0
BuildDropPath: '$(Build.SourcesDirectory)/artifacts'
PackageName: '.NET'
ManifestDirPath: $(Build.ArtifactStagingDirectory)/sbom
IgnoreDirectories: ''
sbomContinueOnError: true
is1ESPipeline: false
# disable publishArtifacts if some other step is publishing the artifacts (like job.yml).
publishArtifacts: true
steps:
- task: PowerShell@2
displayName: Prep for SBOM generation in (Non-linux)
condition: or(eq(variables['Agent.Os'], 'Windows_NT'), eq(variables['Agent.Os'], 'Darwin'))
inputs:
filePath: ./eng/common/generate-sbom-prep.ps1
arguments: ${{parameters.manifestDirPath}}
# Chmodding is a workaround for https://github.com/dotnet/arcade/issues/8461
- script: |
chmod +x ./eng/common/generate-sbom-prep.sh
./eng/common/generate-sbom-prep.sh ${{parameters.manifestDirPath}}
displayName: Prep for SBOM generation in (Linux)
condition: eq(variables['Agent.Os'], 'Linux')
continueOnError: ${{ parameters.sbomContinueOnError }}
- task: AzureArtifacts.manifest-generator-task.manifest-generator-task.ManifestGeneratorTask@0
displayName: 'Generate SBOM manifest'
continueOnError: ${{ parameters.sbomContinueOnError }}
inputs:
PackageName: ${{ parameters.packageName }}
BuildDropPath: ${{ parameters.buildDropPath }}
PackageVersion: ${{ parameters.packageVersion }}
ManifestDirPath: ${{ parameters.manifestDirPath }}
${{ if ne(parameters.IgnoreDirectories, '') }}:
AdditionalComponentDetectorArgs: '--IgnoreDirectories ${{ parameters.IgnoreDirectories }}'
- ${{ if eq(parameters.publishArtifacts, 'true')}}:
- template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
args:
displayName: Publish SBOM manifest
continueOnError: ${{parameters.sbomContinueOnError}}
targetPath: '${{ parameters.manifestDirPath }}'
artifactName: $(ARTIFACT_NAME)

Просмотреть файл

@ -1,20 +0,0 @@
parameters:
- name: is1ESPipeline
type: boolean
default: false
- name: args
type: object
default: {}
steps:
- ${{ if ne(parameters.is1ESPipeline, true) }}:
- template: /eng/common/templates/steps/publish-build-artifacts.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
${{ each parameter in parameters.args }}:
${{ parameter.key }}: ${{ parameter.value }}
- ${{ else }}:
- template: /eng/common/templates-official/steps/publish-build-artifacts.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
${{ each parameter in parameters.args }}:
${{ parameter.key }}: ${{ parameter.value }}

Просмотреть файл

@ -1,59 +0,0 @@
parameters:
StageLabel: ''
JobLabel: ''
CustomSensitiveDataList: ''
# A default - in case value from eng/common/core-templates/post-build/common-variables.yml is not passed
BinlogToolVersion: '1.0.11'
is1ESPipeline: false
steps:
- task: Powershell@2
displayName: Prepare Binlogs to Upload
inputs:
targetType: inline
script: |
New-Item -ItemType Directory $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/
Move-Item -Path $(Build.SourcesDirectory)/artifacts/log/Debug/* $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/
continueOnError: true
condition: always()
- task: PowerShell@2
displayName: Redact Logs
inputs:
filePath: $(Build.SourcesDirectory)/eng/common/post-build/redact-logs.ps1
# For now this needs to have explicit list of all sensitive data. Taken from eng/publishing/v3/publish.yml
# Sensitive data can as well be added to $(Build.SourcesDirectory)/eng/BinlogSecretsRedactionFile.txt'
# If the file exists - sensitive data for redaction will be sourced from it
# (single entry per line, lines starting with '# ' are considered comments and skipped)
arguments: -InputPath '$(Build.SourcesDirectory)/PostBuildLogs'
-BinlogToolVersion ${{parameters.BinlogToolVersion}}
-TokensFilePath '$(Build.SourcesDirectory)/eng/BinlogSecretsRedactionFile.txt'
'$(publishing-dnceng-devdiv-code-r-build-re)'
'$(MaestroAccessToken)'
'$(dn-bot-all-orgs-artifact-feeds-rw)'
'$(akams-client-id)'
'$(akams-client-secret)'
'$(microsoft-symbol-server-pat)'
'$(symweb-symbol-server-pat)'
'$(dn-bot-all-orgs-build-rw-code-rw)'
${{parameters.CustomSensitiveDataList}}
continueOnError: true
condition: always()
- task: CopyFiles@2
displayName: Gather post build logs
inputs:
SourceFolder: '$(Build.SourcesDirectory)/PostBuildLogs'
Contents: '**'
TargetFolder: '$(Build.ArtifactStagingDirectory)/PostBuildLogs'
- template: /eng/common/core-templates/steps/publish-build-artifacts.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
args:
displayName: Publish Logs
pathToPublish: '$(Build.ArtifactStagingDirectory)/PostBuildLogs'
publishLocation: Container
artifactName: PostBuildLogs
continueOnError: true
condition: always()

Просмотреть файл

@ -1,20 +0,0 @@
parameters:
- name: is1ESPipeline
type: boolean
default: false
- name: args
type: object
default: {}
steps:
- ${{ if ne(parameters.is1ESPipeline, true) }}:
- template: /eng/common/templates/steps/publish-pipeline-artifacts.yml
parameters:
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
- ${{ else }}:
- template: /eng/common/templates-official/steps/publish-pipeline-artifacts.yml
parameters:
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}

Просмотреть файл

@ -1,28 +0,0 @@
parameters:
# Optional azure devops PAT with build execute permissions for the build's organization,
# only needed if the build that should be retained ran on a different organization than
# the pipeline where this template is executing from
Token: ''
# Optional BuildId to retain, defaults to the current running build
BuildId: ''
# Azure devops Organization URI for the build in the https://dev.azure.com/<organization> format.
# Defaults to the organization the current pipeline is running on
AzdoOrgUri: '$(System.CollectionUri)'
# Azure devops project for the build. Defaults to the project the current pipeline is running on
AzdoProject: '$(System.TeamProject)'
steps:
- task: powershell@2
inputs:
targetType: 'filePath'
filePath: eng/common/retain-build.ps1
pwsh: true
arguments: >
-AzdoOrgUri: ${{parameters.AzdoOrgUri}}
-AzdoProject ${{parameters.AzdoProject}}
-Token ${{coalesce(parameters.Token, '$env:SYSTEM_ACCESSTOKEN') }}
-BuildId ${{coalesce(parameters.BuildId, '$env:BUILD_ID')}}
displayName: Enable permanent build retention
env:
SYSTEM_ACCESSTOKEN: $(System.AccessToken)
BUILD_ID: $(Build.BuildId)

Просмотреть файл

@ -1,93 +0,0 @@
# Please remember to update the documentation if you make changes to these parameters!
parameters:
HelixSource: 'pr/default' # required -- sources must start with pr/, official/, prodcon/, or agent/
HelixType: 'tests/default/' # required -- Helix telemetry which identifies what type of data this is; should include "test" for clarity and must end in '/'
HelixBuild: $(Build.BuildNumber) # required -- the build number Helix will use to identify this -- automatically set to the AzDO build number
HelixTargetQueues: '' # required -- semicolon-delimited list of Helix queues to test on; see https://helix.dot.net/ for a list of queues
HelixAccessToken: '' # required -- access token to make Helix API requests; should be provided by the appropriate variable group
HelixProjectPath: 'eng/common/helixpublish.proj' # optional -- path to the project file to build relative to BUILD_SOURCESDIRECTORY
HelixProjectArguments: '' # optional -- arguments passed to the build command
HelixConfiguration: '' # optional -- additional property attached to a job
HelixPreCommands: '' # optional -- commands to run before Helix work item execution
HelixPostCommands: '' # optional -- commands to run after Helix work item execution
WorkItemDirectory: '' # optional -- a payload directory to zip up and send to Helix; requires WorkItemCommand; incompatible with XUnitProjects
WorkItemCommand: '' # optional -- a command to execute on the payload; requires WorkItemDirectory; incompatible with XUnitProjects
WorkItemTimeout: '' # optional -- a timeout in TimeSpan.Parse-ready value (e.g. 00:02:00) for the work item command; requires WorkItemDirectory; incompatible with XUnitProjects
CorrelationPayloadDirectory: '' # optional -- a directory to zip up and send to Helix as a correlation payload
XUnitProjects: '' # optional -- semicolon-delimited list of XUnitProjects to parse and send to Helix; requires XUnitRuntimeTargetFramework, XUnitPublishTargetFramework, XUnitRunnerVersion, and IncludeDotNetCli=true
XUnitWorkItemTimeout: '' # optional -- the workitem timeout in seconds for all workitems created from the xUnit projects specified by XUnitProjects
XUnitPublishTargetFramework: '' # optional -- framework to use to publish your xUnit projects
XUnitRuntimeTargetFramework: '' # optional -- framework to use for the xUnit console runner
XUnitRunnerVersion: '' # optional -- version of the xUnit nuget package you wish to use on Helix; required for XUnitProjects
IncludeDotNetCli: false # optional -- true will download a version of the .NET CLI onto the Helix machine as a correlation payload; requires DotNetCliPackageType and DotNetCliVersion
DotNetCliPackageType: '' # optional -- either 'sdk', 'runtime' or 'aspnetcore-runtime'; determines whether the sdk or runtime will be sent to Helix; see https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json
DotNetCliVersion: '' # optional -- version of the CLI to send to Helix; based on this: https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json
WaitForWorkItemCompletion: true # optional -- true will make the task wait until work items have been completed and fail the build if work items fail. False is "fire and forget."
IsExternal: false # [DEPRECATED] -- doesn't do anything, jobs are external if HelixAccessToken is empty and Creator is set
HelixBaseUri: 'https://helix.dot.net/' # optional -- sets the Helix API base URI (allows targeting https://helix.int-dot.net )
Creator: '' # optional -- if the build is external, use this to specify who is sending the job
DisplayNamePrefix: 'Run Tests' # optional -- rename the beginning of the displayName of the steps in AzDO
condition: succeeded() # optional -- condition for step to execute; defaults to succeeded()
continueOnError: false # optional -- determines whether to continue the build if the step errors; defaults to false
steps:
- powershell: 'powershell "$env:BUILD_SOURCESDIRECTORY\eng\common\msbuild.ps1 $env:BUILD_SOURCESDIRECTORY/${{ parameters.HelixProjectPath }} /restore /p:TreatWarningsAsErrors=false ${{ parameters.HelixProjectArguments }} /t:Test /bl:$env:BUILD_SOURCESDIRECTORY\artifacts\log\$env:BuildConfig\SendToHelix.binlog"'
displayName: ${{ parameters.DisplayNamePrefix }} (Windows)
env:
BuildConfig: $(_BuildConfig)
HelixSource: ${{ parameters.HelixSource }}
HelixType: ${{ parameters.HelixType }}
HelixBuild: ${{ parameters.HelixBuild }}
HelixConfiguration: ${{ parameters.HelixConfiguration }}
HelixTargetQueues: ${{ parameters.HelixTargetQueues }}
HelixAccessToken: ${{ parameters.HelixAccessToken }}
HelixPreCommands: ${{ parameters.HelixPreCommands }}
HelixPostCommands: ${{ parameters.HelixPostCommands }}
WorkItemDirectory: ${{ parameters.WorkItemDirectory }}
WorkItemCommand: ${{ parameters.WorkItemCommand }}
WorkItemTimeout: ${{ parameters.WorkItemTimeout }}
CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }}
XUnitProjects: ${{ parameters.XUnitProjects }}
XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }}
XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }}
XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }}
XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }}
IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }}
DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }}
DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
HelixBaseUri: ${{ parameters.HelixBaseUri }}
Creator: ${{ parameters.Creator }}
SYSTEM_ACCESSTOKEN: $(System.AccessToken)
condition: and(${{ parameters.condition }}, eq(variables['Agent.Os'], 'Windows_NT'))
continueOnError: ${{ parameters.continueOnError }}
- script: $BUILD_SOURCESDIRECTORY/eng/common/msbuild.sh $BUILD_SOURCESDIRECTORY/${{ parameters.HelixProjectPath }} /restore /p:TreatWarningsAsErrors=false ${{ parameters.HelixProjectArguments }} /t:Test /bl:$BUILD_SOURCESDIRECTORY/artifacts/log/$BuildConfig/SendToHelix.binlog
displayName: ${{ parameters.DisplayNamePrefix }} (Unix)
env:
BuildConfig: $(_BuildConfig)
HelixSource: ${{ parameters.HelixSource }}
HelixType: ${{ parameters.HelixType }}
HelixBuild: ${{ parameters.HelixBuild }}
HelixConfiguration: ${{ parameters.HelixConfiguration }}
HelixTargetQueues: ${{ parameters.HelixTargetQueues }}
HelixAccessToken: ${{ parameters.HelixAccessToken }}
HelixPreCommands: ${{ parameters.HelixPreCommands }}
HelixPostCommands: ${{ parameters.HelixPostCommands }}
WorkItemDirectory: ${{ parameters.WorkItemDirectory }}
WorkItemCommand: ${{ parameters.WorkItemCommand }}
WorkItemTimeout: ${{ parameters.WorkItemTimeout }}
CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }}
XUnitProjects: ${{ parameters.XUnitProjects }}
XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }}
XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }}
XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }}
XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }}
IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }}
DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }}
DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
HelixBaseUri: ${{ parameters.HelixBaseUri }}
Creator: ${{ parameters.Creator }}
SYSTEM_ACCESSTOKEN: $(System.AccessToken)
condition: and(${{ parameters.condition }}, ne(variables['Agent.Os'], 'Windows_NT'))
continueOnError: ${{ parameters.continueOnError }}

Просмотреть файл

@ -1,134 +0,0 @@
parameters:
# This template adds arcade-powered source-build to CI.
# This is a 'steps' template, and is intended for advanced scenarios where the existing build
# infra has a careful build methodology that must be followed. For example, a repo
# (dotnet/runtime) might choose to clone the GitHub repo only once and store it as a pipeline
# artifact for all subsequent jobs to use, to reduce dependence on a strong network connection to
# GitHub. Using this steps template leaves room for that infra to be included.
# Defines the platform on which to run the steps. See 'eng/common/core-templates/job/source-build.yml'
# for details. The entire object is described in the 'job' template for simplicity, even though
# the usage of the properties on this object is split between the 'job' and 'steps' templates.
platform: {}
is1ESPipeline: false
steps:
# Build. Keep it self-contained for simple reusability. (No source-build-specific job variables.)
- script: |
set -x
df -h
# If building on the internal project, the artifact feeds variable may be available (usually only if needed)
# In that case, call the feed setup script to add internal feeds corresponding to public ones.
# In addition, add an msbuild argument to copy the WIP from the repo to the target build location.
# This is because SetupNuGetSources.sh will alter the current NuGet.config file, and we need to preserve those
# changes.
internalRestoreArgs=
if [ '$(dn-bot-dnceng-artifact-feeds-rw)' != '$''(dn-bot-dnceng-artifact-feeds-rw)' ]; then
# Temporarily work around https://github.com/dotnet/arcade/issues/7709
chmod +x $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh
$(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh $(Build.SourcesDirectory)/NuGet.config $(dn-bot-dnceng-artifact-feeds-rw)
internalRestoreArgs='/p:CopyWipIntoInnerSourceBuildRepo=true'
# The 'Copy WIP' feature of source build uses git stash to apply changes from the original repo.
# This only works if there is a username/email configured, which won't be the case in most CI runs.
git config --get user.email
if [ $? -ne 0 ]; then
git config user.email dn-bot@microsoft.com
git config user.name dn-bot
fi
fi
# If building on the internal project, the internal storage variable may be available (usually only if needed)
# In that case, add variables to allow the download of internal runtimes if the specified versions are not found
# in the default public locations.
internalRuntimeDownloadArgs=
if [ '$(dotnetbuilds-internal-container-read-token-base64)' != '$''(dotnetbuilds-internal-container-read-token-base64)' ]; then
internalRuntimeDownloadArgs='/p:DotNetRuntimeSourceFeed=https://dotnetbuilds.blob.core.windows.net/internal /p:DotNetRuntimeSourceFeedKey=$(dotnetbuilds-internal-container-read-token-base64) --runtimesourcefeed https://dotnetbuilds.blob.core.windows.net/internal --runtimesourcefeedkey $(dotnetbuilds-internal-container-read-token-base64)'
fi
buildConfig=Release
# Check if AzDO substitutes in a build config from a variable, and use it if so.
if [ '$(_BuildConfig)' != '$''(_BuildConfig)' ]; then
buildConfig='$(_BuildConfig)'
fi
officialBuildArgs=
if [ '${{ and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}' = 'True' ]; then
officialBuildArgs='/p:DotNetPublishUsingPipelines=true /p:OfficialBuildId=$(BUILD.BUILDNUMBER)'
fi
targetRidArgs=
if [ '${{ parameters.platform.targetRID }}' != '' ]; then
targetRidArgs='/p:TargetRid=${{ parameters.platform.targetRID }}'
fi
runtimeOsArgs=
if [ '${{ parameters.platform.runtimeOS }}' != '' ]; then
runtimeOsArgs='/p:RuntimeOS=${{ parameters.platform.runtimeOS }}'
fi
baseOsArgs=
if [ '${{ parameters.platform.baseOS }}' != '' ]; then
baseOsArgs='/p:BaseOS=${{ parameters.platform.baseOS }}'
fi
publishArgs=
if [ '${{ parameters.platform.skipPublishValidation }}' != 'true' ]; then
publishArgs='--publish'
fi
assetManifestFileName=SourceBuild_RidSpecific.xml
if [ '${{ parameters.platform.name }}' != '' ]; then
assetManifestFileName=SourceBuild_${{ parameters.platform.name }}.xml
fi
${{ coalesce(parameters.platform.buildScript, './build.sh') }} --ci \
--configuration $buildConfig \
--restore --build --pack $publishArgs -bl \
$officialBuildArgs \
$internalRuntimeDownloadArgs \
$internalRestoreArgs \
$targetRidArgs \
$runtimeOsArgs \
$baseOsArgs \
/p:SourceBuildNonPortable=${{ parameters.platform.nonPortable }} \
/p:ArcadeBuildFromSource=true \
/p:DotNetBuildSourceOnly=true \
/p:DotNetBuildRepo=true \
/p:AssetManifestFileName=$assetManifestFileName
displayName: Build
# Upload build logs for diagnosis.
- task: CopyFiles@2
displayName: Prepare BuildLogs staging directory
inputs:
SourceFolder: '$(Build.SourcesDirectory)'
Contents: |
**/*.log
**/*.binlog
artifacts/sb/prebuilt-report/**
TargetFolder: '$(Build.StagingDirectory)/BuildLogs'
CleanTargetFolder: true
continueOnError: true
condition: succeededOrFailed()
- template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
args:
displayName: Publish BuildLogs
targetPath: '$(Build.StagingDirectory)/BuildLogs'
artifactName: BuildLogs_SourceBuild_${{ parameters.platform.name }}_Attempt$(System.JobAttempt)
continueOnError: true
condition: succeededOrFailed()
# Manually inject component detection so that we can ignore the source build upstream cache, which contains
# a nupkg cache of input packages (a local feed).
# This path must match the upstream cache path in property 'CurrentRepoSourceBuiltNupkgCacheDir'
# in src\Microsoft.DotNet.Arcade.Sdk\tools\SourceBuild\SourceBuildArcade.targets
- task: ComponentGovernanceComponentDetection@0
displayName: Component Detection (Exclude upstream cache)
inputs:
ignoreDirectories: '$(Build.SourcesDirectory)/artifacts/sb/src/artifacts/obj/source-built-upstream-cache'

Просмотреть файл

@ -1,8 +0,0 @@
parameters:
is1ESPipeline: false
variables:
- ${{ if eq(parameters.is1ESPipeline, 'true') }}:
- template: /eng/common/templates-official/variables/pool-providers.yml
- ${{ else }}:
- template: /eng/common/templates/variables/pool-providers.yml

Просмотреть файл

@ -8,7 +8,7 @@ usage()
echo "BuildArch can be: arm(default), arm64, armel, armv6, ppc64le, riscv64, s390x, x64, x86"
echo "CodeName - optional, Code name for Linux, can be: xenial(default), zesty, bionic, alpine"
echo " for alpine can be specified with version: alpineX.YY or alpineedge"
echo " for FreeBSD can be: freebsd13, freebsd14"
echo " for FreeBSD can be: freebsd12, freebsd13"
echo " for illumos can be: illumos"
echo " for Haiku can be: haiku."
echo "lldbx.y - optional, LLDB version, can be: lldb3.9(default), lldb4.0, lldb5.0, lldb6.0 no-lldb. Ignored for alpine and FreeBSD"
@ -71,9 +71,9 @@ __AlpinePackages+=" krb5-dev"
__AlpinePackages+=" openssl-dev"
__AlpinePackages+=" zlib-dev"
__FreeBSDBase="13.2-RELEASE"
__FreeBSDBase="12.4-RELEASE"
__FreeBSDPkg="1.17.0"
__FreeBSDABI="13"
__FreeBSDABI="12"
__FreeBSDPackages="libunwind"
__FreeBSDPackages+=" icu"
__FreeBSDPackages+=" libinotify"
@ -142,6 +142,7 @@ while :; do
case $lowerI in
-\?|-h|--help)
usage
exit 1
;;
arm)
__BuildArch=arm
@ -181,12 +182,12 @@ while :; do
__AlpinePackages="${__AlpinePackages// lldb-dev/}"
__QEMUArch=riscv64
__UbuntuArch=riscv64
__UbuntuRepo="http://deb.debian.org/debian"
__UbuntuRepo="http://deb.debian.org/debian-ports"
__UbuntuPackages="${__UbuntuPackages// libunwind8-dev/}"
unset __LLDB_Package
if [[ -e "/usr/share/keyrings/debian-archive-keyring.gpg" ]]; then
__Keyring="--keyring /usr/share/keyrings/debian-archive-keyring.gpg --include=debian-archive-keyring"
if [[ -e "/usr/share/keyrings/debian-ports-archive-keyring.gpg" ]]; then
__Keyring="--keyring /usr/share/keyrings/debian-ports-archive-keyring.gpg --include=debian-ports-archive-keyring"
fi
;;
ppc64le)
@ -228,19 +229,12 @@ while :; do
__UbuntuRepo="http://archive.ubuntu.com/ubuntu/"
;;
lldb*)
version="$(echo "$lowerI" | tr -d '[:alpha:]-=')"
majorVersion="${version%%.*}"
[ -z "${version##*.*}" ] && minorVersion="${version#*.}"
if [ -z "$minorVersion" ]; then
minorVersion=0
fi
version="${lowerI/lldb/}"
parts=(${version//./ })
# for versions > 6.0, lldb has dropped the minor version
if [ "$majorVersion" -le 6 ]; then
version="$majorVersion.$minorVersion"
else
version="$majorVersion"
if [[ "${parts[0]}" -gt 6 ]]; then
version="${parts[0]}"
fi
__LLDB_Package="liblldb-${version}-dev"
@ -249,19 +243,15 @@ while :; do
unset __LLDB_Package
;;
llvm*)
version="$(echo "$lowerI" | tr -d '[:alpha:]-=')"
__LLVM_MajorVersion="${version%%.*}"
version="${lowerI/llvm/}"
parts=(${version//./ })
__LLVM_MajorVersion="${parts[0]}"
__LLVM_MinorVersion="${parts[1]}"
[ -z "${version##*.*}" ] && __LLVM_MinorVersion="${version#*.}"
if [ -z "$__LLVM_MinorVersion" ]; then
__LLVM_MinorVersion=0
# for versions > 6.0, llvm has dropped the minor version
if [[ -z "$__LLVM_MinorVersion" && "$__LLVM_MajorVersion" -le 6 ]]; then
__LLVM_MinorVersion=0;
fi
# for versions > 6.0, lldb has dropped the minor version
if [ "$__LLVM_MajorVersion" -gt 6 ]; then
__LLVM_MinorVersion=
fi
;;
xenial) # Ubuntu 16.04
if [[ "$__CodeName" != "jessie" ]]; then
@ -333,24 +323,25 @@ while :; do
alpine*)
__CodeName=alpine
__UbuntuRepo=
version="${lowerI/alpine/}"
if [[ "$lowerI" == "alpineedge" ]]; then
if [[ "$version" == "edge" ]]; then
__AlpineVersion=edge
else
version="$(echo "$lowerI" | tr -d '[:alpha:]-=')"
__AlpineMajorVersion="${version%%.*}"
__AlpineMinorVersion="${version#*.}"
__AlpineVersion="$__AlpineMajorVersion.$__AlpineMinorVersion"
parts=(${version//./ })
__AlpineMajorVersion="${parts[0]}"
__AlpineMinoVersion="${parts[1]}"
__AlpineVersion="$__AlpineMajorVersion.$__AlpineMinoVersion"
fi
;;
freebsd13)
freebsd12)
__CodeName=freebsd
__SkipUnmount=1
;;
freebsd14)
freebsd13)
__CodeName=freebsd
__FreeBSDBase="14.0-RELEASE"
__FreeBSDABI="14"
__FreeBSDBase="13.2-RELEASE"
__FreeBSDABI="13"
__SkipUnmount=1
;;
illumos)
@ -451,39 +442,13 @@ fi
mkdir -p "$__RootfsDir"
__RootfsDir="$( cd "$__RootfsDir" && pwd )"
__hasWget=
ensureDownloadTool()
{
if command -v wget &> /dev/null; then
__hasWget=1
elif command -v curl &> /dev/null; then
__hasWget=0
else
>&2 echo "ERROR: either wget or curl is required by this script."
exit 1
fi
}
if [[ "$__CodeName" == "alpine" ]]; then
__ApkToolsVersion=2.12.11
__ApkToolsSHA512SUM=53e57b49230da07ef44ee0765b9592580308c407a8d4da7125550957bb72cb59638e04f8892a18b584451c8d841d1c7cb0f0ab680cc323a3015776affaa3be33
__ApkToolsDir="$(mktemp -d)"
__ApkKeysDir="$(mktemp -d)"
arch="$(uname -m)"
ensureDownloadTool()
if [[ "$__hasWget" == 1 ]]; then
wget -P "$__ApkToolsDir" "https://gitlab.alpinelinux.org/api/v4/projects/5/packages/generic/v$__ApkToolsVersion/$arch/apk.static"
else
curl -SLO --create-dirs --output-dir "$__ApkToolsDir" "https://gitlab.alpinelinux.org/api/v4/projects/5/packages/generic/v$__ApkToolsVersion/$arch/apk.static"
fi
if [[ "$arch" == "x86_64" ]]; then
__ApkToolsSHA512SUM="53e57b49230da07ef44ee0765b9592580308c407a8d4da7125550957bb72cb59638e04f8892a18b584451c8d841d1c7cb0f0ab680cc323a3015776affaa3be33"
elif [[ "$arch" == "aarch64" ]]; then
__ApkToolsSHA512SUM="9e2b37ecb2b56c05dad23d379be84fd494c14bd730b620d0d576bda760588e1f2f59a7fcb2f2080577e0085f23a0ca8eadd993b4e61c2ab29549fdb71969afd0"
else
echo "WARNING: add missing hash for your host architecture. To find the value, use: 'find /tmp -name apk.static -exec sha512sum {} \;'"
fi
wget "https://gitlab.alpinelinux.org/api/v4/projects/5/packages/generic//v$__ApkToolsVersion/x86_64/apk.static" -P "$__ApkToolsDir"
echo "$__ApkToolsSHA512SUM $__ApkToolsDir/apk.static" | sha512sum -c
chmod +x "$__ApkToolsDir/apk.static"
@ -512,23 +477,20 @@ if [[ "$__CodeName" == "alpine" ]]; then
fi
# initialize DB
# shellcheck disable=SC2086
"$__ApkToolsDir/apk.static" \
-X "http://dl-cdn.alpinelinux.org/alpine/$version/main" \
-X "http://dl-cdn.alpinelinux.org/alpine/$version/community" \
-U $__ApkSignatureArg --root "$__RootfsDir" --arch "$__AlpineArch" --initdb add
if [[ "$__AlpineLlvmLibsLookup" == 1 ]]; then
# shellcheck disable=SC2086
__AlpinePackages+=" $("$__ApkToolsDir/apk.static" \
-X "http://dl-cdn.alpinelinux.org/alpine/$version/main" \
-X "http://dl-cdn.alpinelinux.org/alpine/$version/community" \
-U $__ApkSignatureArg --root "$__RootfsDir" --arch "$__AlpineArch" \
search 'llvm*-libs' | grep -E '^llvm' | sort | tail -1 | sed 's/-[^-]*//2g')"
search 'llvm*-libs' | sort | tail -1 | sed 's/-[^-]*//2g')"
fi
# install all packages in one go
# shellcheck disable=SC2086
"$__ApkToolsDir/apk.static" \
-X "http://dl-cdn.alpinelinux.org/alpine/$version/main" \
-X "http://dl-cdn.alpinelinux.org/alpine/$version/community" \
@ -539,23 +501,12 @@ if [[ "$__CodeName" == "alpine" ]]; then
elif [[ "$__CodeName" == "freebsd" ]]; then
mkdir -p "$__RootfsDir"/usr/local/etc
JOBS=${MAXJOBS:="$(getconf _NPROCESSORS_ONLN)"}
ensureDownloadTool()
if [[ "$__hasWget" == 1 ]]; then
wget -O- "https://download.freebsd.org/ftp/releases/${__FreeBSDArch}/${__FreeBSDMachineArch}/${__FreeBSDBase}/base.txz" | tar -C "$__RootfsDir" -Jxf - ./lib ./usr/lib ./usr/libdata ./usr/include ./usr/share/keys ./etc ./bin/freebsd-version
else
curl -SL "https://download.freebsd.org/ftp/releases/${__FreeBSDArch}/${__FreeBSDMachineArch}/${__FreeBSDBase}/base.txz" | tar -C "$__RootfsDir" -Jxf - ./lib ./usr/lib ./usr/libdata ./usr/include ./usr/share/keys ./etc ./bin/freebsd-version
fi
wget -O - "https://download.freebsd.org/ftp/releases/${__FreeBSDArch}/${__FreeBSDMachineArch}/${__FreeBSDBase}/base.txz" | tar -C "$__RootfsDir" -Jxf - ./lib ./usr/lib ./usr/libdata ./usr/include ./usr/share/keys ./etc ./bin/freebsd-version
echo "ABI = \"FreeBSD:${__FreeBSDABI}:${__FreeBSDMachineArch}\"; FINGERPRINTS = \"${__RootfsDir}/usr/share/keys\"; REPOS_DIR = [\"${__RootfsDir}/etc/pkg\"]; REPO_AUTOUPDATE = NO; RUN_SCRIPTS = NO;" > "${__RootfsDir}"/usr/local/etc/pkg.conf
echo "FreeBSD: { url: \"pkg+http://pkg.FreeBSD.org/\${ABI}/quarterly\", mirror_type: \"srv\", signature_type: \"fingerprints\", fingerprints: \"${__RootfsDir}/usr/share/keys/pkg\", enabled: yes }" > "${__RootfsDir}"/etc/pkg/FreeBSD.conf
mkdir -p "$__RootfsDir"/tmp
# get and build package manager
if [[ "$__hasWget" == 1 ]]; then
wget -O- "https://github.com/freebsd/pkg/archive/${__FreeBSDPkg}.tar.gz" | tar -C "$__RootfsDir"/tmp -zxf -
else
curl -SL "https://github.com/freebsd/pkg/archive/${__FreeBSDPkg}.tar.gz" | tar -C "$__RootfsDir"/tmp -zxf -
fi
wget -O - "https://github.com/freebsd/pkg/archive/${__FreeBSDPkg}.tar.gz" | tar -C "$__RootfsDir"/tmp -zxf -
cd "$__RootfsDir/tmp/pkg-${__FreeBSDPkg}"
# needed for install to succeed
mkdir -p "$__RootfsDir"/host/etc
@ -563,36 +514,20 @@ elif [[ "$__CodeName" == "freebsd" ]]; then
rm -rf "$__RootfsDir/tmp/pkg-${__FreeBSDPkg}"
# install packages we need.
INSTALL_AS_USER=$(whoami) "$__RootfsDir"/host/sbin/pkg -r "$__RootfsDir" -C "$__RootfsDir"/usr/local/etc/pkg.conf update
# shellcheck disable=SC2086
INSTALL_AS_USER=$(whoami) "$__RootfsDir"/host/sbin/pkg -r "$__RootfsDir" -C "$__RootfsDir"/usr/local/etc/pkg.conf install --yes $__FreeBSDPackages
elif [[ "$__CodeName" == "illumos" ]]; then
mkdir "$__RootfsDir/tmp"
pushd "$__RootfsDir/tmp"
JOBS=${MAXJOBS:="$(getconf _NPROCESSORS_ONLN)"}
ensureDownloadTool()
echo "Downloading sysroot."
if [[ "$__hasWget" == 1 ]]; then
wget -O- https://github.com/illumos/sysroot/releases/download/20181213-de6af22ae73b-v1/illumos-sysroot-i386-20181213-de6af22ae73b-v1.tar.gz | tar -C "$__RootfsDir" -xzf -
else
curl -SL https://github.com/illumos/sysroot/releases/download/20181213-de6af22ae73b-v1/illumos-sysroot-i386-20181213-de6af22ae73b-v1.tar.gz | tar -C "$__RootfsDir" -xzf -
fi
wget -O - https://github.com/illumos/sysroot/releases/download/20181213-de6af22ae73b-v1/illumos-sysroot-i386-20181213-de6af22ae73b-v1.tar.gz | tar -C "$__RootfsDir" -xzf -
echo "Building binutils. Please wait.."
if [[ "$__hasWget" == 1 ]]; then
wget -O- https://ftp.gnu.org/gnu/binutils/binutils-2.33.1.tar.bz2 | tar -xjf -
else
curl -SL https://ftp.gnu.org/gnu/binutils/binutils-2.33.1.tar.bz2 | tar -xjf -
fi
wget -O - https://ftp.gnu.org/gnu/binutils/binutils-2.33.1.tar.bz2 | tar -xjf -
mkdir build-binutils && cd build-binutils
../binutils-2.33.1/configure --prefix="$__RootfsDir" --target="${__illumosArch}-sun-solaris2.10" --program-prefix="${__illumosArch}-illumos-" --with-sysroot="$__RootfsDir"
make -j "$JOBS" && make install && cd ..
echo "Building gcc. Please wait.."
if [[ "$__hasWget" == 1 ]]; then
wget -O- https://ftp.gnu.org/gnu/gcc/gcc-8.4.0/gcc-8.4.0.tar.xz | tar -xJf -
else
curl -SL https://ftp.gnu.org/gnu/gcc/gcc-8.4.0/gcc-8.4.0.tar.xz | tar -xJf -
fi
wget -O - https://ftp.gnu.org/gnu/gcc/gcc-8.4.0/gcc-8.4.0.tar.xz | tar -xJf -
CFLAGS="-fPIC"
CXXFLAGS="-fPIC"
CXXFLAGS_FOR_TARGET="-fPIC"
@ -609,11 +544,7 @@ elif [[ "$__CodeName" == "illumos" ]]; then
fi
BaseUrl="$BaseUrl/packages/SmartOS/trunk/${__illumosArch}/All"
echo "Downloading manifest"
if [[ "$__hasWget" == 1 ]]; then
wget "$BaseUrl"
else
curl -SLO "$BaseUrl"
fi
wget "$BaseUrl"
echo "Downloading dependencies."
read -ra array <<<"$__IllumosPackages"
for package in "${array[@]}"; do
@ -621,11 +552,7 @@ elif [[ "$__CodeName" == "illumos" ]]; then
# find last occurrence of package in listing and extract its name
package="$(sed -En '/.*href="('"$package"'-[0-9].*).tgz".*/h;$!d;g;s//\1/p' All)"
echo "Resolved name '$package'"
if [[ "$__hasWget" == 1 ]]; then
wget "$BaseUrl"/"$package".tgz
else
curl -SLO "$BaseUrl"/"$package".tgz
fi
wget "$BaseUrl"/"$package".tgz
ar -x "$package".tgz
tar --skip-old-files -xzf "$package".tmp.tg* -C "$__RootfsDir" 2>/dev/null
done
@ -634,17 +561,10 @@ elif [[ "$__CodeName" == "illumos" ]]; then
rm -rf "$__RootfsDir"/{tmp,+*}
mkdir -p "$__RootfsDir"/usr/include/net
mkdir -p "$__RootfsDir"/usr/include/netpacket
if [[ "$__hasWget" == 1 ]]; then
wget -P "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/bpf.h
wget -P "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/dlt.h
wget -P "$__RootfsDir"/usr/include/netpacket https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/inet/sockmods/netpacket/packet.h
wget -P "$__RootfsDir"/usr/include/sys https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/sys/sdt.h
else
curl -SLO --create-dirs --output-dir "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/bpf.h
curl -SLO --create-dirs --output-dir "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/dlt.h
curl -SLO --create-dirs --output-dir "$__RootfsDir"/usr/include/netpacket https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/inet/sockmods/netpacket/packet.h
curl -SLO --create-dirs --output-dir "$__RootfsDir"/usr/include/sys https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/sys/sdt.h
fi
wget -P "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/bpf.h
wget -P "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/dlt.h
wget -P "$__RootfsDir"/usr/include/netpacket https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/inet/sockmods/netpacket/packet.h
wget -P "$__RootfsDir"/usr/include/sys https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/sys/sdt.h
elif [[ "$__CodeName" == "haiku" ]]; then
JOBS=${MAXJOBS:="$(getconf _NPROCESSORS_ONLN)"}
@ -654,16 +574,9 @@ elif [[ "$__CodeName" == "haiku" ]]; then
mkdir "$__RootfsDir/tmp/download"
ensureDownloadTool()
echo "Downloading Haiku package tool"
git clone https://github.com/haiku/haiku-toolchains-ubuntu --depth 1 "$__RootfsDir/tmp/script"
if [[ "$__hasWget" == 1 ]]; then
wget -O "$__RootfsDir/tmp/download/hosttools.zip" "$("$__RootfsDir/tmp/script/fetch.sh" --hosttools)"
else
curl -SLo "$__RootfsDir/tmp/download/hosttools.zip" "$("$__RootfsDir/tmp/script/fetch.sh" --hosttools)"
fi
git clone https://github.com/haiku/haiku-toolchains-ubuntu --depth 1 $__RootfsDir/tmp/script
wget -O "$__RootfsDir/tmp/download/hosttools.zip" $($__RootfsDir/tmp/script/fetch.sh --hosttools)
unzip -o "$__RootfsDir/tmp/download/hosttools.zip" -d "$__RootfsDir/tmp/bin"
DepotBaseUrl="https://depot.haiku-os.org/__api/v2/pkg/get-pkg"
@ -676,25 +589,14 @@ elif [[ "$__CodeName" == "haiku" ]]; then
echo "Downloading $package..."
# API documented here: https://github.com/haiku/haikudepotserver/blob/master/haikudepotserver-api2/src/main/resources/api2/pkg.yaml#L60
# The schema here: https://github.com/haiku/haikudepotserver/blob/master/haikudepotserver-api2/src/main/resources/api2/pkg.yaml#L598
if [[ "$__hasWget" == 1 ]]; then
hpkgDownloadUrl="$(wget -qO- --post-data '{"name":"'"$package"'","repositorySourceCode":"haikuports_'$__HaikuArch'","versionType":"LATEST","naturalLanguageCode":"en"}' \
--header 'Content-Type:application/json' "$DepotBaseUrl" | jq -r '.result.versions[].hpkgDownloadURL')"
wget -P "$__RootfsDir/tmp/download" "$hpkgDownloadUrl"
else
hpkgDownloadUrl="$(curl -sSL -XPOST --data '{"name":"'"$package"'","repositorySourceCode":"haikuports_'$__HaikuArch'","versionType":"LATEST","naturalLanguageCode":"en"}' \
--header 'Content-Type:application/json' "$DepotBaseUrl" | jq -r '.result.versions[].hpkgDownloadURL')"
curl -SLO --create-dirs --output-dir "$__RootfsDir/tmp/download" "$hpkgDownloadUrl"
fi
hpkgDownloadUrl="$(wget -qO- --post-data='{"name":"'"$package"'","repositorySourceCode":"haikuports_'$__HaikuArch'","versionType":"LATEST","naturalLanguageCode":"en"}' \
--header='Content-Type:application/json' "$DepotBaseUrl" | jq -r '.result.versions[].hpkgDownloadURL')"
wget -P "$__RootfsDir/tmp/download" "$hpkgDownloadUrl"
done
for package in haiku haiku_devel; do
echo "Downloading $package..."
if [[ "$__hasWget" == 1 ]]; then
hpkgVersion="$(wget -qO- "$HpkgBaseUrl" | sed -n 's/^.*version: "\([^"]*\)".*$/\1/p')"
wget -P "$__RootfsDir/tmp/download" "$HpkgBaseUrl/packages/$package-$hpkgVersion-1-$__HaikuArch.hpkg"
else
hpkgVersion="$(curl -sSL "$HpkgBaseUrl" | sed -n 's/^.*version: "\([^"]*\)".*$/\1/p')"
curl -SLO --create-dirs --output-dir "$__RootfsDir/tmp/download" "$HpkgBaseUrl/packages/$package-$hpkgVersion-1-$__HaikuArch.hpkg"
fi
hpkgVersion="$(wget -qO- $HpkgBaseUrl | sed -n 's/^.*version: "\([^"]*\)".*$/\1/p')"
wget -P "$__RootfsDir/tmp/download" "$HpkgBaseUrl/packages/$package-$hpkgVersion-1-$__HaikuArch.hpkg"
done
# Set up the sysroot
@ -707,11 +609,7 @@ elif [[ "$__CodeName" == "haiku" ]]; then
# Download buildtools
echo "Downloading Haiku buildtools"
if [[ "$__hasWget" == 1 ]]; then
wget -O "$__RootfsDir/tmp/download/buildtools.zip" "$("$__RootfsDir/tmp/script/fetch.sh" --buildtools --arch=$__HaikuArch)"
else
curl -SLo "$__RootfsDir/tmp/download/buildtools.zip" "$("$__RootfsDir/tmp/script/fetch.sh" --buildtools --arch=$__HaikuArch)"
fi
wget -O "$__RootfsDir/tmp/download/buildtools.zip" $($__RootfsDir/tmp/script/fetch.sh --buildtools --arch=$__HaikuArch)
unzip -o "$__RootfsDir/tmp/download/buildtools.zip" -d "$__RootfsDir"
# Cleaning up temporary files
@ -724,12 +622,10 @@ elif [[ -n "$__CodeName" ]]; then
__Keyring="$__Keyring --force-check-gpg"
fi
# shellcheck disable=SC2086
debootstrap "--variant=minbase" $__Keyring --arch "$__UbuntuArch" "$__CodeName" "$__RootfsDir" "$__UbuntuRepo"
cp "$__CrossDir/$__BuildArch/sources.list.$__CodeName" "$__RootfsDir/etc/apt/sources.list"
chroot "$__RootfsDir" apt-get update
chroot "$__RootfsDir" apt-get -f -y install
# shellcheck disable=SC2086
chroot "$__RootfsDir" apt-get -y install $__UbuntuPackages
chroot "$__RootfsDir" symlinks -cr /usr
chroot "$__RootfsDir" apt-get clean
@ -747,5 +643,6 @@ elif [[ "$__Tizen" == "tizen" ]]; then
ROOTFS_DIR="$__RootfsDir" "$__CrossDir/tizen-build-rootfs.sh" "$__BuildArch"
else
echo "Unsupported target platform."
usage
usage;
exit 1
fi

Просмотреть файл

@ -1 +1 @@
deb http://deb.debian.org/debian sid main
deb http://deb.debian.org/debian-ports sid main

Просмотреть файл

@ -1,9 +0,0 @@
diff -u -r a/usr/lib/libc.so b/usr/lib/libc.so
--- a/usr/lib64/libc.so 2016-12-30 23:00:08.284951863 +0900
+++ b/usr/lib64/libc.so 2016-12-30 23:00:32.140951815 +0900
@@ -2,4 +2,4 @@
Use the shared library, but some functions are only in
the static library, so try that secondarily. */
OUTPUT_FORMAT(elf64-littleriscv)
-GROUP ( /lib64/libc.so.6 /usr/lib64/libc_nonshared.a AS_NEEDED ( /lib64/ld-linux-riscv64-lp64d.so.1 ) )
+GROUP ( libc.so.6 libc_nonshared.a AS_NEEDED ( ld-linux-riscv64-lp64d.so.1 ) )

Просмотреть файл

@ -22,10 +22,6 @@ case "$ARCH" in
TIZEN_ARCH="x86_64"
LINK_ARCH="x86"
;;
riscv64)
TIZEN_ARCH="riscv64"
LINK_ARCH="riscv"
;;
*)
echo "Unsupported architecture for tizen: $ARCH"
exit 1
@ -62,21 +58,4 @@ rm -rf $TIZEN_TMP_DIR
echo ">>Start configuring Tizen rootfs"
ln -sfn asm-${LINK_ARCH} ./usr/include/asm
patch -p1 < $__TIZEN_CROSSDIR/tizen.patch
if [[ "$TIZEN_ARCH" == "riscv64" ]]; then
echo "Fixing broken symlinks in $PWD"
rm ./usr/lib64/libresolv.so
ln -s ../../lib64/libresolv.so.2 ./usr/lib64/libresolv.so
rm ./usr/lib64/libpthread.so
ln -s ../../lib64/libpthread.so.0 ./usr/lib64/libpthread.so
rm ./usr/lib64/libdl.so
ln -s ../../lib64/libdl.so.2 ./usr/lib64/libdl.so
rm ./usr/lib64/libutil.so
ln -s ../../lib64/libutil.so.1 ./usr/lib64/libutil.so
rm ./usr/lib64/libm.so
ln -s ../../lib64/libm.so.6 ./usr/lib64/libm.so
rm ./usr/lib64/librt.so
ln -s ../../lib64/librt.so.1 ./usr/lib64/librt.so
rm ./lib/ld-linux-riscv64-lp64d.so.1
ln -s ../lib64/ld-linux-riscv64-lp64d.so.1 ./lib/ld-linux-riscv64-lp64d.so.1
fi
echo "<<Finish configuring Tizen rootfs"

Просмотреть файл

@ -156,28 +156,17 @@ fetch_tizen_pkgs()
done
}
if [ "$TIZEN_ARCH" == "riscv64" ]; then
BASE="Tizen-Base-RISCV"
UNIFIED="Tizen-Unified-RISCV"
else
BASE="Tizen-Base"
UNIFIED="Tizen-Unified"
fi
Inform "Initialize ${TIZEN_ARCH} base"
fetch_tizen_pkgs_init standard $BASE
fetch_tizen_pkgs_init standard Tizen-Base
Inform "fetch common packages"
fetch_tizen_pkgs ${TIZEN_ARCH} gcc gcc-devel-static glibc glibc-devel libicu libicu-devel libatomic linux-glibc-devel keyutils keyutils-devel libkeyutils
Inform "fetch coreclr packages"
fetch_tizen_pkgs ${TIZEN_ARCH} libgcc libstdc++ libstdc++-devel libunwind libunwind-devel lttng-ust-devel lttng-ust userspace-rcu-devel userspace-rcu
if [ "$TIZEN_ARCH" != "riscv64" ]; then
fetch_tizen_pkgs ${TIZEN_ARCH} lldb lldb-devel
fi
fetch_tizen_pkgs ${TIZEN_ARCH} lldb lldb-devel libgcc libstdc++ libstdc++-devel libunwind libunwind-devel lttng-ust-devel lttng-ust userspace-rcu-devel userspace-rcu
Inform "fetch corefx packages"
fetch_tizen_pkgs ${TIZEN_ARCH} libcom_err libcom_err-devel zlib zlib-devel libopenssl11 libopenssl1.1-devel krb5 krb5-devel
Inform "Initialize standard unified"
fetch_tizen_pkgs_init standard $UNIFIED
fetch_tizen_pkgs_init standard Tizen-Unified
Inform "fetch corefx packages"
fetch_tizen_pkgs ${TIZEN_ARCH} gssdp gssdp-devel tizen-release

Просмотреть файл

@ -80,9 +80,6 @@ elseif(TARGET_ARCH_NAME STREQUAL "riscv64")
set(TOOLCHAIN "riscv64-alpine-linux-musl")
else()
set(TOOLCHAIN "riscv64-linux-gnu")
if(TIZEN)
set(TIZEN_TOOLCHAIN "riscv64-tizen-linux-gnu/13.1.0")
endif()
endif()
elseif(TARGET_ARCH_NAME STREQUAL "s390x")
set(CMAKE_SYSTEM_PROCESSOR s390x)
@ -147,10 +144,6 @@ if(TIZEN)
include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}/include/c++/)
include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}/include/c++/x86_64-tizen-linux-gnu)
endif()
if(TARGET_ARCH_NAME STREQUAL "riscv64")
include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}/include/c++/)
include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}/include/c++/riscv64-tizen-linux-gnu)
endif()
endif()
if(ANDROID)
@ -277,7 +270,7 @@ if(TARGET_ARCH_NAME MATCHES "^(arm|armel)$")
add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/lib")
add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}")
endif()
elseif(TARGET_ARCH_NAME MATCHES "^(arm64|x64|riscv64)$")
elseif(TARGET_ARCH_NAME MATCHES "^(arm64|x64)$")
if(TIZEN)
add_toolchain_linker_flag("-B${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}")
add_toolchain_linker_flag("-L${CROSS_ROOTFS}/lib64")
@ -288,8 +281,6 @@ elseif(TARGET_ARCH_NAME MATCHES "^(arm64|x64|riscv64)$")
add_toolchain_linker_flag("-Wl,--rpath-link=${CROSS_ROOTFS}/usr/lib64")
add_toolchain_linker_flag("-Wl,--rpath-link=${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}")
endif()
elseif(TARGET_ARCH_NAME STREQUAL "s390x")
add_toolchain_linker_flag("--target=${TOOLCHAIN}")
elseif(TARGET_ARCH_NAME STREQUAL "x86")
if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/i586-alpine-linux-musl)
add_toolchain_linker_flag("--target=${TOOLCHAIN}")
@ -337,8 +328,6 @@ if(TARGET_ARCH_NAME MATCHES "^(arm|armel)$")
if(TARGET_ARCH_NAME STREQUAL "armel")
add_compile_options(-mfloat-abi=softfp)
endif()
elseif(TARGET_ARCH_NAME STREQUAL "s390x")
add_compile_options("--target=${TOOLCHAIN}")
elseif(TARGET_ARCH_NAME STREQUAL "x86")
if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/i586-alpine-linux-musl)
add_compile_options(--target=${TOOLCHAIN})
@ -382,26 +371,6 @@ if(TARGET_ARCH_NAME MATCHES "^(arm|armel|x86)$")
endif()
endif()
# Set C++ standard library options if specified
set(CLR_CMAKE_CXX_STANDARD_LIBRARY "" CACHE STRING "Standard library flavor to link against. Only supported with the Clang compiler.")
if (CLR_CMAKE_CXX_STANDARD_LIBRARY)
add_compile_options($<$<COMPILE_LANG_AND_ID:CXX,Clang>:--stdlib=${CLR_CMAKE_CXX_STANDARD_LIBRARY}>)
add_link_options($<$<LINK_LANG_AND_ID:CXX,Clang>:--stdlib=${CLR_CMAKE_CXX_STANDARD_LIBRARY}>)
endif()
option(CLR_CMAKE_CXX_STANDARD_LIBRARY_STATIC "Statically link against the C++ standard library" OFF)
if(CLR_CMAKE_CXX_STANDARD_LIBRARY_STATIC)
add_link_options($<$<LINK_LANGUAGE:CXX>:-static-libstdc++>)
endif()
set(CLR_CMAKE_CXX_ABI_LIBRARY "" CACHE STRING "C++ ABI implementation library to link against. Only supported with the Clang compiler.")
if (CLR_CMAKE_CXX_ABI_LIBRARY)
# The user may specify the ABI library with the 'lib' prefix, like 'libstdc++'. Strip the prefix here so the linker finds the right library.
string(REGEX REPLACE "^lib(.+)" "\\1" CLR_CMAKE_CXX_ABI_LIBRARY ${CLR_CMAKE_CXX_ABI_LIBRARY})
# We need to specify this as a linker-backend option as Clang will filter this option out when linking to libc++.
add_link_options("LINKER:-l${CLR_CMAKE_CXX_ABI_LIBRARY}")
endif()
set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)
set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY)

Просмотреть файл

@ -1,4 +1,3 @@
<!-- Licensed to the .NET Foundation under one or more agreements. The .NET Foundation licenses this file to you under the MIT license. -->
<Project Sdk="Microsoft.DotNet.Helix.Sdk" DefaultTargets="Test">
<PropertyGroup>

Просмотреть файл

@ -1,6 +1,4 @@
<!-- Licensed to the .NET Foundation under one or more agreements. The .NET Foundation licenses this file to you under the MIT license. -->
<Project>
<Import Project="Sdk.props" Sdk="Microsoft.DotNet.Arcade.Sdk" />
</Project>

Просмотреть файл

@ -1,6 +1,5 @@
<!-- Licensed to the .NET Foundation under one or more agreements. The .NET Foundation licenses this file to you under the MIT license. -->
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net472</TargetFramework>
<ImportDirectoryBuildTargets>false</ImportDirectoryBuildTargets>
@ -28,5 +27,4 @@
<!-- Repository extensibility point -->
<Import Project="$(RepositoryEngineeringDir)InternalTools.props" Condition="Exists('$(RepositoryEngineeringDir)InternalTools.props')" />
</Project>

Просмотреть файл

@ -64,7 +64,7 @@ if [ -z "$CLR_CC" ]; then
if [ -z "$majorVersion" ]; then
# note: gcc (all versions) and clang versions higher than 6 do not have minor version in file name, if it is zero.
if [ "$compiler" = "clang" ]; then versions="18 17 16 15 14 13 12 11 10 9 8 7 6.0 5.0 4.0 3.9 3.8 3.7 3.6 3.5"
elif [ "$compiler" = "gcc" ]; then versions="14 13 12 11 10 9 8 7 6 5 4.9"; fi
elif [ "$compiler" = "gcc" ]; then versions="13 12 11 10 9 8 7 6 5 4.9"; fi
for version in $versions; do
_major="${version%%.*}"
@ -125,8 +125,8 @@ if [ -z "$CC" ]; then
exit 1
fi
# Only lld version >= 9 can be considered stable. lld supports s390x starting from 18.0.
if [ "$compiler" = "clang" ] && [ -n "$majorVersion" ] && [ "$majorVersion" -ge 9 ] && ([ "$build_arch" != "s390x" ] || [ "$majorVersion" -ge 18 ]); then
# Only lld version >= 9 can be considered stable. lld doesn't support s390x.
if [ "$compiler" = "clang" ] && [ -n "$majorVersion" ] && [ "$majorVersion" -ge 9 ] && [ "$build_arch" != "s390x" ]; then
if "$CC" -fuse-ld=lld -Wl,--version >/dev/null 2>&1; then
LDFLAGS="-fuse-ld=lld"
fi

Просмотреть файл

@ -1,4 +1,4 @@
#!/bin/sh
#!/usr/bin/env bash
# getNonPortableDistroRid
#
@ -11,16 +11,21 @@
# non-portable rid
getNonPortableDistroRid()
{
targetOs="$1"
targetArch="$2"
rootfsDir="$3"
nonPortableRid=""
local targetOs="$1"
local targetArch="$2"
local rootfsDir="$3"
local nonPortableRid=""
if [ "$targetOs" = "linux" ]; then
# shellcheck disable=SC1091
if [ -e "${rootfsDir}/etc/os-release" ]; then
. "${rootfsDir}/etc/os-release"
if echo "${VERSION_ID:-}" | grep -qE '^([[:digit:]]|\.)+$'; then
source "${rootfsDir}/etc/os-release"
if [[ "${ID}" == "rhel" || "${ID}" == "rocky" || "${ID}" == "alpine" ]]; then
# remove the last version digit
VERSION_ID="${VERSION_ID%.*}"
fi
if [[ "${VERSION_ID:-}" =~ ^([[:digit:]]|\.)+$ ]]; then
nonPortableRid="${ID}.${VERSION_ID}-${targetArch}"
else
# Rolling release distros either do not set VERSION_ID, set it as blank or
@ -28,33 +33,45 @@ getNonPortableDistroRid()
# so omit it here to be consistent with everything else.
nonPortableRid="${ID}-${targetArch}"
fi
elif [ -e "${rootfsDir}/android_platform" ]; then
# shellcheck disable=SC1091
. "${rootfsDir}/android_platform"
source "$rootfsDir"/android_platform
nonPortableRid="$RID"
fi
fi
if [ "$targetOs" = "freebsd" ]; then
# $rootfsDir can be empty. freebsd-version is a shell script and should always work.
__freebsd_major_version=$("$rootfsDir"/bin/freebsd-version | cut -d'.' -f1)
# $rootfsDir can be empty. freebsd-version is shell script and it should always work.
__freebsd_major_version=$($rootfsDir/bin/freebsd-version | { read v; echo "${v%%.*}"; })
nonPortableRid="freebsd.$__freebsd_major_version-${targetArch}"
elif command -v getprop >/dev/null && getprop ro.product.system.model | grep -qi android; then
elif command -v getprop && getprop ro.product.system.model 2>&1 | grep -qi android; then
__android_sdk_version=$(getprop ro.build.version.sdk)
nonPortableRid="android.$__android_sdk_version-${targetArch}"
elif [ "$targetOs" = "illumos" ]; then
__uname_version=$(uname -v)
nonPortableRid="illumos-${targetArch}"
case "$__uname_version" in
omnios-*)
__omnios_major_version=$(echo "${__uname_version:8:2}")
nonPortableRid=omnios."$__omnios_major_version"-"$targetArch"
;;
joyent_*)
__smartos_major_version=$(echo "${__uname_version:7:4}")
nonPortableRid=smartos."$__smartos_major_version"-"$targetArch"
;;
illumos_*)
nonPortableRid=openindiana-"$targetArch"
;;
esac
elif [ "$targetOs" = "solaris" ]; then
__uname_version=$(uname -v)
__solaris_major_version=$(echo "$__uname_version" | cut -d'.' -f1)
nonPortableRid="solaris.$__solaris_major_version-${targetArch}"
__solaris_major_version=$(echo "${__uname_version%.*}")
nonPortableRid=solaris."$__solaris_major_version"-"$targetArch"
elif [ "$targetOs" = "haiku" ]; then
__uname_release="$(uname -r)"
__uname_release=$(uname -r)
nonPortableRid=haiku.r"$__uname_release"-"$targetArch"
fi
echo "$nonPortableRid" | tr '[:upper:]' '[:lower:]'
echo "$(echo $nonPortableRid | tr '[:upper:]' '[:lower:]')"
}
# initDistroRidGlobal
@ -68,23 +85,26 @@ getNonPortableDistroRid()
# None
#
# Notes:
# It is important to note that the function does not return anything, but it
# exports the following variables on success:
# __DistroRid : Non-portable rid of the target platform.
# __PortableTargetOS : OS-part of the portable rid that corresponds to the target platform.
#
# It is important to note that the function does not return anything, but it
# exports the following variables on success:
#
# __DistroRid : Non-portable rid of the target platform.
# __PortableTargetOS : OS-part of the portable rid that corresponds to the target platform.
#
initDistroRidGlobal()
{
targetOs="$1"
targetArch="$2"
rootfsDir=""
if [ $# -ge 3 ]; then
local targetOs="$1"
local targetArch="$2"
local rootfsDir=""
if [ "$#" -ge 3 ]; then
rootfsDir="$3"
fi
if [ -n "${rootfsDir}" ]; then
# We may have a cross build. Check for the existence of the rootfsDir
if [ ! -e "${rootfsDir}" ]; then
echo "Error: rootfsDir has been passed, but the location is not valid."
echo "Error rootfsDir has been passed, but the location is not valid."
exit 1
fi
fi
@ -99,7 +119,7 @@ initDistroRidGlobal()
STRINGS="$(command -v llvm-strings || true)"
fi
# Check for musl-based distros (e.g. Alpine Linux, Void Linux).
# Check for musl-based distros (e.g Alpine Linux, Void Linux).
if "${rootfsDir}/usr/bin/ldd" --version 2>&1 | grep -q musl ||
( [ -n "$STRINGS" ] && "$STRINGS" "${rootfsDir}/usr/bin/ldd" 2>&1 | grep -q musl ); then
__PortableTargetOS="linux-musl"

Просмотреть файл

@ -1,4 +1,4 @@
#!/bin/sh
#!/usr/bin/env bash
# Use uname to determine what the OS is.
OSName=$(uname -s | tr '[:upper:]' '[:lower:]')
@ -35,10 +35,6 @@ fi
case "$CPUName" in
arm64|aarch64)
arch=arm64
if [ "$(getconf LONG_BIT)" -lt 64 ]; then
# This is 32-bit OS running on 64-bit CPU (for example Raspberry Pi OS)
arch=arm
fi
;;
loongarch64)
@ -54,7 +50,6 @@ case "$CPUName" in
;;
armv7l|armv8l)
# shellcheck disable=SC1091
if (NAME=""; . /etc/os-release; test "$NAME" = "Tizen"); then
arch=armel
else

Просмотреть файл

@ -7,7 +7,7 @@ try {
. $PSScriptRoot\post-build-utils.ps1
if ($PromoteToChannels -eq "") {
Write-PipelineTaskError -Type 'warning' -Message "This build won't publish assets as it's not configured to any Maestro channel. If that wasn't intended use Darc to configure a default channel using add-default-channel for this branch or to promote it to a channel using add-build-to-channel. See https://github.com/dotnet/arcade/blob/main/Documentation/Darc.md#assigning-an-individual-build-to-a-channel for more info."
Write-PipelineTaskError -Type 'warning' -Message "This build won't publish assets as it's not configured to any Maestro channel. If that wasn't intended use Darc to configure a default channel using add-default-channel for this branch or to promote it to a channel using add-build-to-channel. See https://github.com/dotnet/arcade/blob/master/Documentation/Darc.md#assigning-an-individual-build-to-a-channel for more info."
ExitWithExitCode 0
}

Просмотреть файл

@ -1,81 +0,0 @@
[CmdletBinding(PositionalBinding=$False)]
param(
[Parameter(Mandatory=$true, Position=0)][string] $InputPath,
[Parameter(Mandatory=$true)][string] $BinlogToolVersion,
[Parameter(Mandatory=$false)][string] $DotnetPath,
[Parameter(Mandatory=$false)][string] $PackageFeed = 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/index.json',
# File with strings to redact - separated by newlines.
# For comments start the line with '# ' - such lines are ignored
[Parameter(Mandatory=$false)][string] $TokensFilePath,
[Parameter(ValueFromRemainingArguments=$true)][String[]]$TokensToRedact
)
try {
. $PSScriptRoot\post-build-utils.ps1
$packageName = 'binlogtool'
$dotnet = $DotnetPath
if (!$dotnet) {
$dotnetRoot = InitializeDotNetCli -install:$true
$dotnet = "$dotnetRoot\dotnet.exe"
}
$toolList = & "$dotnet" tool list -g
if ($toolList -like "*$packageName*") {
& "$dotnet" tool uninstall $packageName -g
}
$toolPath = "$PSScriptRoot\..\..\..\.tools"
$verbosity = 'minimal'
New-Item -ItemType Directory -Force -Path $toolPath
Push-Location -Path $toolPath
try {
Write-Host "Installing Binlog redactor CLI..."
Write-Host "'$dotnet' new tool-manifest"
& "$dotnet" new tool-manifest
Write-Host "'$dotnet' tool install $packageName --local --add-source '$PackageFeed' -v $verbosity --version $BinlogToolVersion"
& "$dotnet" tool install $packageName --local --add-source "$PackageFeed" -v $verbosity --version $BinlogToolVersion
if (Test-Path $TokensFilePath) {
Write-Host "Adding additional sensitive data for redaction from file: " $TokensFilePath
$TokensToRedact += Get-Content -Path $TokensFilePath | Foreach {$_.Trim()} | Where { $_ -notmatch "^# " }
}
$optionalParams = [System.Collections.ArrayList]::new()
Foreach ($p in $TokensToRedact)
{
if($p -match '^\$\(.*\)$')
{
Write-Host ("Ignoring token {0} as it is probably unexpanded AzDO variable" -f $p)
}
elseif($p)
{
$optionalParams.Add("-p:" + $p) | Out-Null
}
}
& $dotnet binlogtool redact --input:$InputPath --recurse --in-place `
@optionalParams
if ($LastExitCode -ne 0) {
Write-PipelineTelemetryError -Category 'Redactor' -Type 'warning' -Message "Problems using Redactor tool (exit code: $LastExitCode). But ignoring them now."
}
}
finally {
Pop-Location
}
Write-Host 'done.'
}
catch {
Write-Host $_
Write-PipelineTelemetryError -Category 'Redactor' -Message "There was an error while trying to redact logs. Error: $_"
ExitWithExitCode 1
}

Просмотреть файл

@ -64,7 +64,7 @@ try {
$GlobalJson.tools | Add-Member -Name "vs" -Value (ConvertFrom-Json "{ `"version`": `"16.5`" }") -MemberType NoteProperty
}
if( -not ($GlobalJson.tools.PSObject.Properties.Name -match "xcopy-msbuild" )) {
$GlobalJson.tools | Add-Member -Name "xcopy-msbuild" -Value "17.10.0-pre.4.0" -MemberType NoteProperty
$GlobalJson.tools | Add-Member -Name "xcopy-msbuild" -Value "17.8.1-2" -MemberType NoteProperty
}
if ($GlobalJson.tools."xcopy-msbuild".Trim() -ine "none") {
$xcopyMSBuildToolsFolder = InitializeXCopyMSBuild $GlobalJson.tools."xcopy-msbuild" -install $true

Просмотреть файл

@ -72,4 +72,4 @@ catch {
Write-Host $_
Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
ExitWithExitCode 1
}
}

Просмотреть файл

@ -1,137 +0,0 @@
# Overview
Arcade provides templates for public (`/templates`) and 1ES pipeline templates (`/templates-official`) scenarios. Pipelines which are required to be managed by 1ES pipeline templates should reference `/templates-offical`, all other pipelines may reference `/templates`.
## How to use
Basic guidance is:
- 1ES Pipeline Template or 1ES Microbuild template runs should reference `eng/common/templates-official`. Any internal production-graded pipeline should use these templates.
- All other runs should reference `eng/common/templates`.
See [azure-pipelines.yml](../../azure-pipelines.yml) (templates-official example) or [azure-pipelines-pr.yml](../../azure-pipelines-pr.yml) (templates example) for examples.
#### The `templateIs1ESManaged` parameter
The `templateIs1ESManaged` is available on most templates and affects which of the variants is used for nested templates. See [Development Notes](#development-notes) below for more information on the `templateIs1ESManaged1 parameter.
- For templates under `job/`, `jobs/`, `steps`, or `post-build/`, this parameter must be explicitly set.
## Multiple outputs
1ES pipeline templates impose a policy where every publish artifact execution results in additional security scans being injected into your pipeline. When using `templates-official/jobs/jobs.yml`, Arcade reduces the number of additional security injections by gathering all publishing outputs into the [Build.ArtifactStagingDirectory](https://learn.microsoft.com/en-us/azure/devops/pipelines/build/variables?view=azure-devops&tabs=yaml#build-variables-devops-services), and utilizing the [outputParentDirectory](https://eng.ms/docs/cloud-ai-platform/devdiv/one-engineering-system-1es/1es-docs/1es-pipeline-templates/features/outputs#multiple-outputs) feature of 1ES pipeline templates. When implementing your pipeline, if you ensure publish artifacts are located in the `$(Build.ArtifactStagingDirectory)`, and utilize the 1ES provided template context, then you can reduce the number of security scans for your pipeline.
Example:
``` yaml
# azure-pipelines.yml
extends:
template: azure-pipelines/MicroBuild.1ES.Official.yml@MicroBuildTemplate
parameters:
stages:
- stage: build
jobs:
- template: /eng/common/templates-official/jobs/jobs.yml@self
parameters:
# 1ES makes use of outputs to reduce security task injection overhead
templateContext:
outputs:
- output: pipelineArtifact
displayName: 'Publish logs from source'
continueOnError: true
condition: always()
targetPath: $(Build.ArtifactStagingDirectory)/artifacts/log
artifactName: Logs
jobs:
- job: Windows
steps:
- script: echo "friendly neighborhood" > artifacts/marvel/spiderman.txt
# copy build outputs to artifact staging directory for publishing
- task: CopyFiles@2
displayName: Gather build output
inputs:
SourceFolder: '$(Build.SourcesDirectory)/artifacts/marvel'
Contents: '**'
TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/marvel'
```
Note: Multiple outputs are ONLY applicable to 1ES PT publishing (only usable when referencing `templates-official`).
# Development notes
**Folder / file structure**
``` text
eng\common\
[templates || templates-official]\
job\
job.yml (shim + artifact publishing logic)
onelocbuild.yml (shim)
publish-build-assets.yml (shim)
source-build.yml (shim)
source-index-stage1.yml (shim)
jobs\
codeql-build.yml (shim)
jobs.yml (shim)
source-build.yml (shim)
post-build\
post-build.yml (shim)
trigger-subscription.yml (shim)
common-variabls.yml (shim)
setup-maestro-vars.yml (shim)
steps\
publish-build-artifacts.yml (logic)
publish-pipeline-artifacts.yml (logic)
add-build-channel.yml (shim)
component-governance.yml (shim)
generate-sbom.yml (shim)
publish-logs.yml (shim)
retain-build.yml (shim)
send-to-helix.yml (shim)
source-build.yml (shim)
variables\
pool-providers.yml (logic + redirect) # templates/variables/pool-providers.yml will redirect to templates-official/variables/pool-providers.yml if you are running in the internal project
sdl-variables.yml (logic)
core-templates\
job\
job.yml (logic)
onelocbuild.yml (logic)
publish-build-assets.yml (logic)
source-build.yml (logic)
source-index-stage1.yml (logic)
jobs\
codeql-build.yml (logic)
jobs.yml (logic)
source-build.yml (logic)
post-build\
common-variabls.yml (logic)
post-build.yml (logic)
setup-maestro-vars.yml (logic)
trigger-subscription.yml (logic)
steps\
add-build-to-channel.yml (logic)
component-governance.yml (logic)
generate-sbom.yml (logic)
publish-build-artifacts.yml (redirect)
publish-logs.yml (logic)
publish-pipeline-artifacts.yml (redirect)
retain-build.yml (logic)
send-to-helix.yml (logic)
source-build.yml (logic)
variables\
pool-providers.yml (redirect)
```
In the table above, a file is designated as "shim", "logic", or "redirect".
- shim - represents a yaml file which is an intermediate step between pipeline logic and .Net Core Engineering's templates (`core-templates`) and defines the `is1ESPipeline` parameter value.
- logic - represents actual base template logic.
- redirect- represents a file in `core-templates` which redirects to the "logic" file in either `templates` or `templates-official`.
Logic for Arcade's templates live **primarily** in the `core-templates` folder. The exceptions to the location of the logic files are around artifact publishing, which is handled differently between 1es pipeline templates and standard templates. `templates` and `templates-official` provide shim entry points which redirect to `core-templates` while also defining the `is1ESPipeline` parameter. If a shim is referenced in `templates`, then `is1ESPipeline` is set to `false`. If a shim is referenced in `templates-official`, then `is1ESPipeline` is set to `true`.
Within `templates` and `templates-official`, the templates at the "stages", and "jobs" / "job" level have been replaced with shims. Templates at the "steps" and "variables" level are typically too granular to be replaced with shims and instead persist logic which is directly applicable to either scenario.
Within `core-templates`, there are a handful of places where logic is dependent on which shim entry point was used. In those places, we redirect back to the respective logic file in `templates` or `templates-official`.

Просмотреть файл

@ -1,62 +1,264 @@
# Internal resources (telemetry, microbuild) can only be accessed from non-public projects,
# and some (Microbuild) should only be applied to non-PR cases for internal builds.
parameters:
# Job schema parameters - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
cancelTimeoutInMinutes: ''
condition: ''
container: ''
continueOnError: false
dependsOn: ''
displayName: ''
pool: ''
steps: []
strategy: ''
timeoutInMinutes: ''
variables: []
workspace: ''
templateContext: ''
# Job base template specific parameters
# See schema documentation - https://github.com/dotnet/arcade/blob/master/Documentation/AzureDevOps/TemplateSchema.md
artifacts: ''
enableMicrobuild: false
enablePublishBuildArtifacts: false
enablePublishBuildAssets: false
enablePublishTestResults: false
enablePublishUsingPipelines: false
enableBuildRetry: false
disableComponentGovernance: ''
componentGovernanceIgnoreDirectories: ''
mergeTestResults: false
testRunTitle: ''
testResultsFormat: ''
name: ''
preSteps: []
runAsPublic: false
# Sbom related params
enableSbom: true
PackageVersion: 7.0.0
BuildDropPath: '$(Build.SourcesDirectory)/artifacts'
jobs:
- template: /eng/common/core-templates/job/job.yml
parameters:
is1ESPipeline: true
- job: ${{ parameters.name }}
# publish artifacts
# for 1ES managed templates, use the templateContext.output to handle multiple outputs.
templateContext:
outputParentDirectory: $(Build.ArtifactStagingDirectory)
outputs:
- ${{ if ne(parameters.artifacts.publish, '') }}:
- ${{ if and(ne(parameters.artifacts.publish.artifacts, 'false'), ne(parameters.artifacts.publish.artifacts, '')) }}:
- output: buildArtifacts
displayName: Publish pipeline artifacts
PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts'
ArtifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }}
condition: always()
continueOnError: true
- ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}:
- output: pipelineArtifact
targetPath: '$(Build.ArtifactStagingDirectory)/artifacts/log'
artifactName: ${{ coalesce(parameters.artifacts.publish.logs.name, 'Logs_Build_$(Agent.Os)_$(_BuildConfig)_Attempt$(System.JobAttempt)') }}
displayName: 'Publish logs'
continueOnError: true
condition: always()
${{ if ne(parameters.cancelTimeoutInMinutes, '') }}:
cancelTimeoutInMinutes: ${{ parameters.cancelTimeoutInMinutes }}
- ${{ if eq(parameters.enablePublishBuildArtifacts, true) }}:
- output: buildArtifacts
displayName: Publish Logs
PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts/log/$(_BuildConfig)'
publishLocation: Container
ArtifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)' ) }}
continueOnError: true
condition: always()
${{ if ne(parameters.condition, '') }}:
condition: ${{ parameters.condition }}
- ${{ if eq(parameters.enableBuildRetry, 'true') }}:
- output: pipelineArtifact
targetPath: '$(Build.ArtifactStagingDirectory)/artifacts/eng/common/BuildConfiguration'
artifactName: 'BuildConfiguration'
displayName: 'Publish build retry configuration'
continueOnError: true
${{ if ne(parameters.container, '') }}:
container: ${{ parameters.container }}
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.enableSbom, 'true')) }}:
- output: pipelineArtifact
displayName: Publish SBOM manifest
continueOnError: true
targetPath: $(Build.ArtifactStagingDirectory)/sbom
artifactName: $(ARTIFACT_NAME)
${{ if ne(parameters.continueOnError, '') }}:
continueOnError: ${{ parameters.continueOnError }}
# add any outputs provided via root yaml
- ${{ if ne(parameters.templateContext.outputs, '') }}:
- ${{ each output in parameters.templateContext.outputs }}:
- ${{ output }}
# add any remaining templateContext properties
${{ each context in parameters.templateContext }}:
${{ if and(ne(context.key, 'outputParentDirectory'), ne(context.key, 'outputs')) }}:
${{ context.key }}: ${{ context.value }}
${{ if ne(parameters.dependsOn, '') }}:
dependsOn: ${{ parameters.dependsOn }}
${{ each parameter in parameters }}:
${{ if and(ne(parameter.key, 'templateContext'), ne(parameter.key, 'is1ESPipeline')) }}:
${{ parameter.key }}: ${{ parameter.value }}
${{ if ne(parameters.displayName, '') }}:
displayName: ${{ parameters.displayName }}
${{ if ne(parameters.pool, '') }}:
pool: ${{ parameters.pool }}
${{ if ne(parameters.strategy, '') }}:
strategy: ${{ parameters.strategy }}
${{ if ne(parameters.timeoutInMinutes, '') }}:
timeoutInMinutes: ${{ parameters.timeoutInMinutes }}
${{ if ne(parameters.templateContext, '') }}:
templateContext: ${{ parameters.templateContext }}
variables:
- ${{ if ne(parameters.enableTelemetry, 'false') }}:
- name: DOTNET_CLI_TELEMETRY_PROFILE
value: '$(Build.Repository.Uri)'
- ${{ if eq(parameters.enableRichCodeNavigation, 'true') }}:
- name: EnableRichCodeNavigation
value: 'true'
# Retry signature validation up to three times, waiting 2 seconds between attempts.
# See https://learn.microsoft.com/en-us/nuget/reference/errors-and-warnings/nu3028#retry-untrusted-root-failures
- name: NUGET_EXPERIMENTAL_CHAIN_BUILD_RETRY_POLICY
value: 3,2000
- ${{ each variable in parameters.variables }}:
# handle name-value variable syntax
# example:
# - name: [key]
# value: [value]
- ${{ if ne(variable.name, '') }}:
- name: ${{ variable.name }}
value: ${{ variable.value }}
# handle variable groups
- ${{ if ne(variable.group, '') }}:
- group: ${{ variable.group }}
# handle template variable syntax
# example:
# - template: path/to/template.yml
# parameters:
# [key]: [value]
- ${{ if ne(variable.template, '') }}:
- template: ${{ variable.template }}
${{ if ne(variable.parameters, '') }}:
parameters: ${{ variable.parameters }}
# handle key-value variable syntax.
# example:
# - [key]: [value]
- ${{ if and(eq(variable.name, ''), eq(variable.group, ''), eq(variable.template, '')) }}:
- ${{ each pair in variable }}:
- name: ${{ pair.key }}
value: ${{ pair.value }}
# DotNet-HelixApi-Access provides 'HelixApiAccessToken' for internal builds
- ${{ if and(eq(parameters.enableTelemetry, 'true'), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- group: DotNet-HelixApi-Access
${{ if ne(parameters.workspace, '') }}:
workspace: ${{ parameters.workspace }}
steps:
- ${{ if ne(parameters.preSteps, '') }}:
- ${{ each preStep in parameters.preSteps }}:
- ${{ preStep }}
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- ${{ if eq(parameters.enableMicrobuild, 'true') }}:
- task: MicroBuildSigningPlugin@4
displayName: Install MicroBuild plugin
inputs:
signType: $(_SignType)
zipSources: false
feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json
env:
TeamName: $(_TeamName)
MicroBuildOutputFolderOverride: '$(Agent.TempDirectory)'
continueOnError: ${{ parameters.continueOnError }}
condition: and(succeeded(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
- ${{ if and(eq(parameters.runAsPublic, 'false'), eq(variables['System.TeamProject'], 'internal')) }}:
- task: NuGetAuthenticate@1
- ${{ if and(ne(parameters.artifacts.download, 'false'), ne(parameters.artifacts.download, '')) }}:
- task: DownloadPipelineArtifact@2
inputs:
buildType: current
artifactName: ${{ coalesce(parameters.artifacts.download.name, 'Artifacts_$(Agent.OS)_$(_BuildConfig)') }}
targetPath: ${{ coalesce(parameters.artifacts.download.path, 'artifacts') }}
itemPattern: ${{ coalesce(parameters.artifacts.download.pattern, '**') }}
- ${{ each step in parameters.steps }}:
- ${{ step }}
- ${{ if eq(parameters.enableRichCodeNavigation, true) }}:
- task: RichCodeNavIndexer@0
displayName: RichCodeNav Upload
inputs:
languages: ${{ coalesce(parameters.richCodeNavigationLanguage, 'csharp') }}
environment: ${{ coalesce(parameters.richCodeNavigationEnvironment, 'production') }}
richNavLogOutputDirectory: $(Build.SourcesDirectory)/artifacts/bin
uploadRichNavArtifacts: ${{ coalesce(parameters.richCodeNavigationUploadArtifacts, false) }}
continueOnError: true
- template: /eng/common/templates-official/steps/component-governance.yml
parameters:
${{ if eq(parameters.disableComponentGovernance, '') }}:
${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.runAsPublic, 'false'), or(startsWith(variables['Build.SourceBranch'], 'refs/heads/release/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/dotnet/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/microsoft/'), eq(variables['Build.SourceBranch'], 'refs/heads/main'))) }}:
disableComponentGovernance: false
${{ else }}:
disableComponentGovernance: true
${{ else }}:
disableComponentGovernance: ${{ parameters.disableComponentGovernance }}
componentGovernanceIgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }}
- ${{ if eq(parameters.enableMicrobuild, 'true') }}:
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- task: MicroBuildCleanup@1
displayName: Execute Microbuild cleanup tasks
condition: and(always(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
continueOnError: ${{ parameters.continueOnError }}
env:
TeamName: $(_TeamName)
- ${{ if ne(parameters.artifacts.publish, '') }}:
- ${{ if and(ne(parameters.artifacts.publish.artifacts, 'false'), ne(parameters.artifacts.publish.artifacts, '')) }}:
- task: CopyFiles@2
displayName: Gather binaries for publish to artifacts
inputs:
SourceFolder: 'artifacts/bin'
Contents: '**'
TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/bin'
- task: CopyFiles@2
displayName: Gather packages for publish to artifacts
inputs:
SourceFolder: 'artifacts/packages'
Contents: '**'
TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/packages'
- task: 1ES.PublishBuildArtifacts@1
displayName: Publish pipeline artifacts
inputs:
PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts'
PublishLocation: Container
ArtifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }}
continueOnError: true
condition: always()
- ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}:
- task: 1ES.PublishPipelineArtifact@1
inputs:
targetPath: 'artifacts/log'
artifactName: ${{ coalesce(parameters.artifacts.publish.logs.name, 'Logs_Build_$(Agent.Os)_$(_BuildConfig)') }}
displayName: 'Publish logs'
continueOnError: true
condition: always()
- ${{ if ne(parameters.enablePublishBuildArtifacts, 'false') }}:
- task: 1ES.PublishBuildArtifacts@1
displayName: Publish Logs
inputs:
PathtoPublish: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)'
PublishLocation: Container
ArtifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)' ) }}
continueOnError: true
condition: always()
- ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'xunit')) }}:
- task: PublishTestResults@2
displayName: Publish XUnit Test Results
inputs:
testResultsFormat: 'xUnit'
testResultsFiles: '*.xml'
searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)'
testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-xunit
mergeTestResults: ${{ parameters.mergeTestResults }}
continueOnError: true
condition: always()
- ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'vstest')) }}:
- task: PublishTestResults@2
displayName: Publish TRX Test Results
inputs:
testResultsFormat: 'VSTest'
testResultsFiles: '*.trx'
searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)'
testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-trx
mergeTestResults: ${{ parameters.mergeTestResults }}
continueOnError: true
condition: always()
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.enableSbom, 'true')) }}:
- template: /eng/common/templates-official/steps/generate-sbom.yml
parameters:
PackageVersion: ${{ parameters.packageVersion}}
BuildDropPath: ${{ parameters.buildDropPath }}
IgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }}
- ${{ if eq(parameters.enableBuildRetry, 'true') }}:
- task: 1ES.PublishPipelineArtifact@1
inputs:
targetPath: '$(Build.SourcesDirectory)\eng\common\BuildConfiguration'
artifactName: 'BuildConfiguration'
displayName: 'Publish build retry configuration'
continueOnError: true

Просмотреть файл

@ -1,7 +1,112 @@
jobs:
- template: /eng/common/core-templates/job/onelocbuild.yml
parameters:
is1ESPipeline: true
parameters:
# Optional: dependencies of the job
dependsOn: ''
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
# Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool
pool: ''
CeapexPat: $(dn-bot-ceapex-package-r) # PAT for the loc AzDO instance https://dev.azure.com/ceapex
GithubPat: $(BotAccount-dotnet-bot-repo-PAT)
SourcesDirectory: $(Build.SourcesDirectory)
CreatePr: true
AutoCompletePr: false
ReusePr: true
UseLfLineEndings: true
UseCheckedInLocProjectJson: false
SkipLocProjectJsonGeneration: false
LanguageSet: VS_Main_Languages
LclSource: lclFilesInRepo
LclPackageId: ''
RepoType: gitHub
GitHubOrg: dotnet
MirrorRepo: ''
MirrorBranch: main
condition: ''
JobNameSuffix: ''
jobs:
- job: OneLocBuild${{ parameters.JobNameSuffix }}
dependsOn: ${{ parameters.dependsOn }}
displayName: OneLocBuild${{ parameters.JobNameSuffix }}
variables:
- group: OneLocBuildVariables # Contains the CeapexPat and GithubPat
- name: _GenerateLocProjectArguments
value: -SourcesDirectory ${{ parameters.SourcesDirectory }}
-LanguageSet "${{ parameters.LanguageSet }}"
-CreateNeutralXlfs
- ${{ if eq(parameters.UseCheckedInLocProjectJson, 'true') }}:
- name: _GenerateLocProjectArguments
value: ${{ variables._GenerateLocProjectArguments }} -UseCheckedInLocProjectJson
- template: /eng/common/templates-official/variables/pool-providers.yml
${{ if ne(parameters.pool, '') }}:
pool: ${{ parameters.pool }}
${{ if eq(parameters.pool, '') }}:
pool:
# We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
name: AzurePipelines-EO
image: 1ESPT-Windows2022
demands: Cmd
os: windows
# If it's not devdiv, it's dnceng
${{ if ne(variables['System.TeamProject'], 'DevDiv') }}:
name: $(DncEngInternalBuildPool)
image: 1es-windows-2022
os: windows
steps:
- ${{ if ne(parameters.SkipLocProjectJsonGeneration, 'true') }}:
- task: Powershell@2
inputs:
filePath: $(Build.SourcesDirectory)/eng/common/generate-locproject.ps1
arguments: $(_GenerateLocProjectArguments)
displayName: Generate LocProject.json
condition: ${{ parameters.condition }}
- task: OneLocBuild@2
displayName: OneLocBuild
env:
SYSTEM_ACCESSTOKEN: $(System.AccessToken)
inputs:
locProj: eng/Localize/LocProject.json
outDir: $(Build.ArtifactStagingDirectory)
lclSource: ${{ parameters.LclSource }}
lclPackageId: ${{ parameters.LclPackageId }}
isCreatePrSelected: ${{ parameters.CreatePr }}
isAutoCompletePrSelected: ${{ parameters.AutoCompletePr }}
${{ if eq(parameters.CreatePr, true) }}:
isUseLfLineEndingsSelected: ${{ parameters.UseLfLineEndings }}
${{ if eq(parameters.RepoType, 'gitHub') }}:
isShouldReusePrSelected: ${{ parameters.ReusePr }}
packageSourceAuth: patAuth
patVariable: ${{ parameters.CeapexPat }}
${{ if eq(parameters.RepoType, 'gitHub') }}:
repoType: ${{ parameters.RepoType }}
gitHubPatVariable: "${{ parameters.GithubPat }}"
${{ if ne(parameters.MirrorRepo, '') }}:
isMirrorRepoSelected: true
gitHubOrganization: ${{ parameters.GitHubOrg }}
mirrorRepo: ${{ parameters.MirrorRepo }}
mirrorBranch: ${{ parameters.MirrorBranch }}
condition: ${{ parameters.condition }}
- task: 1ES.PublishBuildArtifacts@1
displayName: Publish Localization Files
inputs:
PathtoPublish: '$(Build.ArtifactStagingDirectory)/loc'
PublishLocation: Container
ArtifactName: Loc
condition: ${{ parameters.condition }}
- task: 1ES.PublishBuildArtifacts@1
displayName: Publish LocProject.json
inputs:
PathtoPublish: '$(Build.SourcesDirectory)/eng/Localize/'
PublishLocation: Container
ArtifactName: Loc
condition: ${{ parameters.condition }}

Просмотреть файл

@ -1,7 +1,155 @@
jobs:
- template: /eng/common/core-templates/job/publish-build-assets.yml
parameters:
is1ESPipeline: true
parameters:
configuration: 'Debug'
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
# Optional: condition for the job to run
condition: ''
# Optional: 'true' if future jobs should run even if this job fails
continueOnError: false
# Optional: dependencies of the job
dependsOn: ''
# Optional: Include PublishBuildArtifacts task
enablePublishBuildArtifacts: false
# Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool
pool: {}
# Optional: should run as a public build even in the internal project
# if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
runAsPublic: false
# Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing
publishUsingPipelines: false
# Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing
publishAssetsImmediately: false
artifactsPublishingAdditionalParameters: ''
signingValidationAdditionalParameters: ''
jobs:
- job: Asset_Registry_Publish
dependsOn: ${{ parameters.dependsOn }}
timeoutInMinutes: 150
${{ if eq(parameters.publishAssetsImmediately, 'true') }}:
displayName: Publish Assets
${{ else }}:
displayName: Publish to Build Asset Registry
variables:
- template: /eng/common/templates-official/variables/pool-providers.yml
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- group: Publish-Build-Assets
- group: AzureDevOps-Artifact-Feeds-Pats
- name: runCodesignValidationInjection
value: false
- ${{ if eq(parameters.publishAssetsImmediately, 'true') }}:
- template: /eng/common/templates-official/post-build/common-variables.yml
pool:
# We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
name: AzurePipelines-EO
image: 1ESPT-Windows2022
demands: Cmd
os: windows
# If it's not devdiv, it's dnceng
${{ if ne(variables['System.TeamProject'], 'DevDiv') }}:
name: NetCore1ESPool-Publishing-Internal
image: windows.vs2019.amd64
os: windows
steps:
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- task: DownloadBuildArtifacts@0
displayName: Download artifact
inputs:
artifactName: AssetManifests
downloadPath: '$(Build.StagingDirectory)/Download'
checkDownloadedFiles: true
condition: ${{ parameters.condition }}
continueOnError: ${{ parameters.continueOnError }}
- task: NuGetAuthenticate@1
- task: PowerShell@2
displayName: Publish Build Assets
inputs:
filePath: eng\common\sdk-task.ps1
arguments: -task PublishBuildAssets -restore -msbuildEngine dotnet
/p:ManifestsPath='$(Build.StagingDirectory)/Download/AssetManifests'
/p:BuildAssetRegistryToken=$(MaestroAccessToken)
/p:MaestroApiEndpoint=https://maestro-prod.westus2.cloudapp.azure.com
/p:PublishUsingPipelines=${{ parameters.publishUsingPipelines }}
/p:OfficialBuildId=$(Build.BuildNumber)
condition: ${{ parameters.condition }}
continueOnError: ${{ parameters.continueOnError }}
- task: powershell@2
displayName: Create ReleaseConfigs Artifact
inputs:
targetType: inline
script: |
New-Item -Path "$(Build.StagingDirectory)/ReleaseConfigs" -ItemType Directory -Force
$filePath = "$(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt"
Add-Content -Path $filePath -Value $(BARBuildId)
Add-Content -Path $filePath -Value "$(DefaultChannels)"
Add-Content -Path $filePath -Value $(IsStableBuild)
- task: 1ES.PublishBuildArtifacts@1
displayName: Publish ReleaseConfigs Artifact
inputs:
PathtoPublish: '$(Build.StagingDirectory)/ReleaseConfigs'
PublishLocation: Container
ArtifactName: ReleaseConfigs
- task: powershell@2
displayName: Check if SymbolPublishingExclusionsFile.txt exists
inputs:
targetType: inline
script: |
$symbolExclusionfile = "$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt"
if(Test-Path -Path $symbolExclusionfile)
{
Write-Host "SymbolExclusionFile exists"
Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]true"
}
else{
Write-Host "Symbols Exclusion file does not exists"
Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]false"
}
- task: 1ES.PublishBuildArtifacts@1
displayName: Publish SymbolPublishingExclusionsFile Artifact
condition: eq(variables['SymbolExclusionFile'], 'true')
inputs:
PathtoPublish: '$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt'
PublishLocation: Container
ArtifactName: ReleaseConfigs
- ${{ if eq(parameters.publishAssetsImmediately, 'true') }}:
- template: /eng/common/templates-official/post-build/setup-maestro-vars.yml
parameters:
BARBuildId: ${{ parameters.BARBuildId }}
PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
- task: PowerShell@2
displayName: Publish Using Darc
inputs:
filePath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1
arguments: -BuildId $(BARBuildId)
-PublishingInfraVersion 3
-AzdoToken '$(publishing-dnceng-devdiv-code-r-build-re)'
-MaestroToken '$(MaestroApiAccessToken)'
-WaitPublishingFinish true
-ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}'
-SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}'
- ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}:
- template: /eng/common/templates-official/steps/publish-logs.yml
parameters:
JobLabel: 'Publish_Artifacts_Logs'

Просмотреть файл

@ -1,7 +1,67 @@
jobs:
- template: /eng/common/core-templates/job/source-build.yml
parameters:
is1ESPipeline: true
parameters:
# This template adds arcade-powered source-build to CI. The template produces a server job with a
# default ID 'Source_Build_Complete' to put in a dependency list if necessary.
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
# Specifies the prefix for source-build jobs added to pipeline. Use this if disambiguation needed.
jobNamePrefix: 'Source_Build'
# Defines the platform on which to run the job. By default, a linux-x64 machine, suitable for
# managed-only repositories. This is an object with these properties:
#
# name: ''
# The name of the job. This is included in the job ID.
# targetRID: ''
# The name of the target RID to use, instead of the one auto-detected by Arcade.
# nonPortable: false
# Enables non-portable mode. This means a more specific RID (e.g. fedora.32-x64 rather than
# linux-x64), and compiling against distro-provided packages rather than portable ones.
# skipPublishValidation: false
# Disables publishing validation. By default, a check is performed to ensure no packages are
# published by source-build.
# container: ''
# A container to use. Runs in docker.
# pool: {}
# A pool to use. Runs directly on an agent.
# buildScript: ''
# Specifies the build script to invoke to perform the build in the repo. The default
# './build.sh' should work for typical Arcade repositories, but this is customizable for
# difficult situations.
# jobProperties: {}
# A list of job properties to inject at the top level, for potential extensibility beyond
# container and pool.
platform: {}
jobs:
- job: ${{ parameters.jobNamePrefix }}_${{ parameters.platform.name }}
displayName: Source-Build (${{ parameters.platform.name }})
${{ each property in parameters.platform.jobProperties }}:
${{ property.key }}: ${{ property.value }}
${{ if ne(parameters.platform.container, '') }}:
container: ${{ parameters.platform.container }}
${{ if eq(parameters.platform.pool, '') }}:
# The default VM host AzDO pool. This should be capable of running Docker containers: almost all
# source-build builds run in Docker, including the default managed platform.
# /eng/common/templates-official/variables/pool-providers.yml can't be used here (some customers declare variables already), so duplicate its logic
pool:
${{ if eq(variables['System.TeamProject'], 'public') }}:
name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore-Svc-Public' ), False, 'NetCore-Public')]
demands: ImageOverride -equals Build.Ubuntu.1804.Amd64.Open
${{ if eq(variables['System.TeamProject'], 'internal') }}:
name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore1ESPool-Svc-Internal'), False, 'NetCore1ESPool-Internal')]
image: 1es-mariner-2
os: linux
${{ if ne(parameters.platform.pool, '') }}:
pool: ${{ parameters.platform.pool }}
workspace:
clean: all
steps:
- template: /eng/common/templates-official/steps/source-build.yml
parameters:
platform: ${{ parameters.platform }}

Просмотреть файл

@ -1,7 +1,68 @@
jobs:
- template: /eng/common/core-templates/job/source-index-stage1.yml
parameters:
is1ESPipeline: true
parameters:
runAsPublic: false
sourceIndexPackageVersion: 1.0.1-20230228.2
sourceIndexPackageSource: https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json
sourceIndexBuildCommand: powershell -NoLogo -NoProfile -ExecutionPolicy Bypass -Command "eng/common/build.ps1 -restore -build -binarylog -ci"
preSteps: []
binlogPath: artifacts/log/Debug/Build.binlog
condition: ''
dependsOn: ''
pool: ''
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
jobs:
- job: SourceIndexStage1
dependsOn: ${{ parameters.dependsOn }}
condition: ${{ parameters.condition }}
variables:
- name: SourceIndexPackageVersion
value: ${{ parameters.sourceIndexPackageVersion }}
- name: SourceIndexPackageSource
value: ${{ parameters.sourceIndexPackageSource }}
- name: BinlogPath
value: ${{ parameters.binlogPath }}
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- group: source-dot-net stage1 variables
- template: /eng/common/templates-official/variables/pool-providers.yml
${{ if ne(parameters.pool, '') }}:
pool: ${{ parameters.pool }}
${{ if eq(parameters.pool, '') }}:
pool:
${{ if eq(variables['System.TeamProject'], 'public') }}:
name: $(DncEngPublicBuildPool)
demands: ImageOverride -equals windows.vs2019.amd64.open
${{ if eq(variables['System.TeamProject'], 'internal') }}:
name: $(DncEngInternalBuildPool)
image: windows.vs2022.amd64
os: windows
steps:
- ${{ each preStep in parameters.preSteps }}:
- ${{ preStep }}
- task: UseDotNet@2
displayName: Use .NET Core SDK 6
inputs:
packageType: sdk
version: 6.0.x
installationPath: $(Agent.TempDirectory)/dotnet
workingDirectory: $(Agent.TempDirectory)
- script: |
$(Agent.TempDirectory)/dotnet/dotnet tool install BinLogToSln --version $(SourceIndexPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools
$(Agent.TempDirectory)/dotnet/dotnet tool install UploadIndexStage1 --version $(SourceIndexPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools
displayName: Download Tools
# Set working directory to temp directory so 'dotnet' doesn't try to use global.json and use the repo's sdk.
workingDirectory: $(Agent.TempDirectory)
- script: ${{ parameters.sourceIndexBuildCommand }}
displayName: Build Repository
- script: $(Agent.TempDirectory)/.source-index/tools/BinLogToSln -i $(BinlogPath) -r $(Build.SourcesDirectory) -n $(Build.Repository.Name) -o .source-index/stage1output
displayName: Process Binlog into indexable sln
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- script: $(Agent.TempDirectory)/.source-index/tools/UploadIndexStage1 -i .source-index/stage1output -n $(Build.Repository.Name)
displayName: Upload stage1 artifacts to source index
env:
BLOB_CONTAINER_URL: $(source-dot-net-stage1-blob-container-url)

Просмотреть файл

@ -1,7 +1,31 @@
jobs:
- template: /eng/common/core-templates/jobs/codeql-build.yml
parameters:
is1ESPipeline: true
parameters:
# See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md
continueOnError: false
# Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
jobs: []
# Optional: if specified, restore and use this version of Guardian instead of the default.
overrideGuardianVersion: ''
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
jobs:
- template: /eng/common/templates-official/jobs/jobs.yml
parameters:
enableMicrobuild: false
enablePublishBuildArtifacts: false
enablePublishTestResults: false
enablePublishBuildAssets: false
enablePublishUsingPipelines: false
enableTelemetry: true
variables:
- group: Publish-Build-Assets
# The Guardian version specified in 'eng/common/sdl/packages.config'. This value must be kept in
# sync with the packages.config file.
- name: DefaultGuardianVersion
value: 0.109.0
- name: GuardianPackagesConfigFile
value: $(Build.SourcesDirectory)\eng\common\sdl\packages.config
- name: GuardianVersion
value: ${{ coalesce(parameters.overrideGuardianVersion, '$(DefaultGuardianVersion)') }}
jobs: ${{ parameters.jobs }}

Просмотреть файл

@ -1,7 +1,97 @@
jobs:
- template: /eng/common/core-templates/jobs/jobs.yml
parameters:
is1ESPipeline: true
parameters:
# See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md
continueOnError: false
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
# Optional: Include PublishBuildArtifacts task
enablePublishBuildArtifacts: false
# Optional: Enable publishing using release pipelines
enablePublishUsingPipelines: false
# Optional: Enable running the source-build jobs to build repo from source
enableSourceBuild: false
# Optional: Parameters for source-build template.
# See /eng/common/templates-official/jobs/source-build.yml for options
sourceBuildParameters: []
graphFileGeneration:
# Optional: Enable generating the graph files at the end of the build
enabled: false
# Optional: Include toolset dependencies in the generated graph files
includeToolset: false
# Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
jobs: []
# Optional: Override automatically derived dependsOn value for "publish build assets" job
publishBuildAssetsDependsOn: ''
# Optional: Publish the assets as soon as the publish to BAR stage is complete, rather doing so in a separate stage.
publishAssetsImmediately: false
# Optional: If using publishAssetsImmediately and additional parameters are needed, can be used to send along additional parameters (normally sent to post-build.yml)
artifactsPublishingAdditionalParameters: ''
signingValidationAdditionalParameters: ''
# Optional: should run as a public build even in the internal project
# if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
runAsPublic: false
enableSourceIndex: false
sourceIndexParams: {}
# Internal resources (telemetry, microbuild) can only be accessed from non-public projects,
# and some (Microbuild) should only be applied to non-PR cases for internal builds.
jobs:
- ${{ each job in parameters.jobs }}:
- template: ../job/job.yml
parameters:
# pass along parameters
${{ each parameter in parameters }}:
${{ if ne(parameter.key, 'jobs') }}:
${{ parameter.key }}: ${{ parameter.value }}
# pass along job properties
${{ each property in job }}:
${{ if ne(property.key, 'job') }}:
${{ property.key }}: ${{ property.value }}
name: ${{ job.job }}
- ${{ if eq(parameters.enableSourceBuild, true) }}:
- template: /eng/common/templates-official/jobs/source-build.yml
parameters:
allCompletedJobId: Source_Build_Complete
${{ each parameter in parameters.sourceBuildParameters }}:
${{ parameter.key }}: ${{ parameter.value }}
- ${{ if eq(parameters.enableSourceIndex, 'true') }}:
- template: ../job/source-index-stage1.yml
parameters:
runAsPublic: ${{ parameters.runAsPublic }}
${{ each parameter in parameters.sourceIndexParams }}:
${{ parameter.key }}: ${{ parameter.value }}
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- ${{ if or(eq(parameters.enablePublishBuildAssets, true), eq(parameters.artifacts.publish.manifests, 'true'), ne(parameters.artifacts.publish.manifests, '')) }}:
- template: ../job/publish-build-assets.yml
parameters:
continueOnError: ${{ parameters.continueOnError }}
dependsOn:
- ${{ if ne(parameters.publishBuildAssetsDependsOn, '') }}:
- ${{ each job in parameters.publishBuildAssetsDependsOn }}:
- ${{ job.job }}
- ${{ if eq(parameters.publishBuildAssetsDependsOn, '') }}:
- ${{ each job in parameters.jobs }}:
- ${{ job.job }}
- ${{ if eq(parameters.enableSourceBuild, true) }}:
- Source_Build_Complete
runAsPublic: ${{ parameters.runAsPublic }}
publishUsingPipelines: ${{ parameters.enablePublishUsingPipelines }}
publishAssetsImmediately: ${{ parameters.publishAssetsImmediately }}
enablePublishBuildArtifacts: ${{ parameters.enablePublishBuildArtifacts }}
artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
signingValidationAdditionalParameters: ${{ parameters.signingValidationAdditionalParameters }}

Просмотреть файл

@ -1,7 +1,46 @@
jobs:
- template: /eng/common/core-templates/jobs/source-build.yml
parameters:
is1ESPipeline: true
parameters:
# This template adds arcade-powered source-build to CI. A job is created for each platform, as
# well as an optional server job that completes when all platform jobs complete.
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
# The name of the "join" job for all source-build platforms. If set to empty string, the job is
# not included. Existing repo pipelines can use this job depend on all source-build jobs
# completing without maintaining a separate list of every single job ID: just depend on this one
# server job. By default, not included. Recommended name if used: 'Source_Build_Complete'.
allCompletedJobId: ''
# See /eng/common/templates-official/job/source-build.yml
jobNamePrefix: 'Source_Build'
# This is the default platform provided by Arcade, intended for use by a managed-only repo.
defaultManagedPlatform:
name: 'Managed'
container: 'mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream8'
# Defines the platforms on which to run build jobs. One job is created for each platform, and the
# object in this array is sent to the job template as 'platform'. If no platforms are specified,
# one job runs on 'defaultManagedPlatform'.
platforms: []
jobs:
- ${{ if ne(parameters.allCompletedJobId, '') }}:
- job: ${{ parameters.allCompletedJobId }}
displayName: Source-Build Complete
pool: server
dependsOn:
- ${{ each platform in parameters.platforms }}:
- ${{ parameters.jobNamePrefix }}_${{ platform.name }}
- ${{ if eq(length(parameters.platforms), 0) }}:
- ${{ parameters.jobNamePrefix }}_${{ parameters.defaultManagedPlatform.name }}
- ${{ each platform in parameters.platforms }}:
- template: /eng/common/templates-official/job/source-build.yml
parameters:
jobNamePrefix: ${{ parameters.jobNamePrefix }}
platform: ${{ platform }}
- ${{ if eq(length(parameters.platforms), 0) }}:
- template: /eng/common/templates-official/job/source-build.yml
parameters:
jobNamePrefix: ${{ parameters.jobNamePrefix }}
platform: ${{ parameters.defaultManagedPlatform }}

Просмотреть файл

@ -1,8 +1,22 @@
variables:
- template: /eng/common/core-templates/post-build/common-variables.yml
parameters:
# Specifies whether to use 1ES
is1ESPipeline: true
- group: Publish-Build-Assets
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
# Whether the build is internal or not
- name: IsInternalBuild
value: ${{ and(ne(variables['System.TeamProject'], 'public'), contains(variables['Build.SourceBranch'], 'internal')) }}
# Default Maestro++ API Endpoint and API Version
- name: MaestroApiEndPoint
value: "https://maestro-prod.westus2.cloudapp.azure.com"
- name: MaestroApiAccessToken
value: $(MaestroAccessToken)
- name: MaestroApiVersion
value: "2020-02-20"
- name: SourceLinkCLIVersion
value: 3.0.0
- name: SymbolToolVersion
value: 1.0.1
- name: runCodesignValidationInjection
value: false

Просмотреть файл

@ -1,8 +1,285 @@
stages:
- template: /eng/common/core-templates/post-build/post-build.yml
parameters:
# Specifies whether to use 1ES
is1ESPipeline: true
parameters:
# Which publishing infra should be used. THIS SHOULD MATCH THE VERSION ON THE BUILD MANIFEST.
# Publishing V1 is no longer supported
# Publishing V2 is no longer supported
# Publishing V3 is the default
- name: publishingInfraVersion
displayName: Which version of publishing should be used to promote the build definition?
type: number
default: 3
values:
- 3
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
- name: BARBuildId
displayName: BAR Build Id
type: number
default: 0
- name: PromoteToChannelIds
displayName: Channel to promote BARBuildId to
type: string
default: ''
- name: enableSourceLinkValidation
displayName: Enable SourceLink validation
type: boolean
default: false
- name: enableSigningValidation
displayName: Enable signing validation
type: boolean
default: true
- name: enableSymbolValidation
displayName: Enable symbol validation
type: boolean
default: false
- name: enableNugetValidation
displayName: Enable NuGet validation
type: boolean
default: true
- name: publishInstallersAndChecksums
displayName: Publish installers and checksums
type: boolean
default: true
- name: SDLValidationParameters
type: object
default:
enable: false
publishGdn: false
continueOnError: false
params: ''
artifactNames: ''
downloadArtifacts: true
# These parameters let the user customize the call to sdk-task.ps1 for publishing
# symbols & general artifacts as well as for signing validation
- name: symbolPublishingAdditionalParameters
displayName: Symbol publishing additional parameters
type: string
default: ''
- name: artifactsPublishingAdditionalParameters
displayName: Artifact publishing additional parameters
type: string
default: ''
- name: signingValidationAdditionalParameters
displayName: Signing validation additional parameters
type: string
default: ''
# Which stages should finish execution before post-build stages start
- name: validateDependsOn
type: object
default:
- build
- name: publishDependsOn
type: object
default:
- Validate
# Optional: Call asset publishing rather than running in a separate stage
- name: publishAssetsImmediately
type: boolean
default: false
stages:
- ${{ if or(eq( parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}:
- stage: Validate
dependsOn: ${{ parameters.validateDependsOn }}
displayName: Validate Build Assets
variables:
- template: common-variables.yml
- template: /eng/common/templates-official/variables/pool-providers.yml
jobs:
- job:
displayName: NuGet Validation
condition: and(succeededOrFailed(), eq( ${{ parameters.enableNugetValidation }}, 'true'))
pool:
# We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
name: AzurePipelines-EO
image: 1ESPT-Windows2022
demands: Cmd
os: windows
# If it's not devdiv, it's dnceng
${{ else }}:
name: $(DncEngInternalBuildPool)
image: 1es-windows-2022
os: windows
steps:
- template: setup-maestro-vars.yml
parameters:
BARBuildId: ${{ parameters.BARBuildId }}
PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
- task: DownloadBuildArtifacts@0
displayName: Download Package Artifacts
inputs:
buildType: specific
buildVersionToDownload: specific
project: $(AzDOProjectName)
pipeline: $(AzDOPipelineId)
buildId: $(AzDOBuildId)
artifactName: PackageArtifacts
checkDownloadedFiles: true
- task: PowerShell@2
displayName: Validate
inputs:
filePath: $(Build.SourcesDirectory)/eng/common/post-build/nuget-validation.ps1
arguments: -PackagesPath $(Build.ArtifactStagingDirectory)/PackageArtifacts/
-ToolDestinationPath $(Agent.BuildDirectory)/Extract/
- job:
displayName: Signing Validation
condition: and( eq( ${{ parameters.enableSigningValidation }}, 'true'), ne( variables['PostBuildSign'], 'true'))
pool:
# We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
name: AzurePipelines-EO
image: 1ESPT-Windows2022
demands: Cmd
os: windows
# If it's not devdiv, it's dnceng
${{ else }}:
name: $(DncEngInternalBuildPool)
image: 1es-windows-2022
os: windows
steps:
- template: setup-maestro-vars.yml
parameters:
BARBuildId: ${{ parameters.BARBuildId }}
PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
- task: DownloadBuildArtifacts@0
displayName: Download Package Artifacts
inputs:
buildType: specific
buildVersionToDownload: specific
project: $(AzDOProjectName)
pipeline: $(AzDOPipelineId)
buildId: $(AzDOBuildId)
artifactName: PackageArtifacts
checkDownloadedFiles: true
itemPattern: |
**
!**/Microsoft.SourceBuild.Intermediate.*.nupkg
# This is necessary whenever we want to publish/restore to an AzDO private feed
# Since sdk-task.ps1 tries to restore packages we need to do this authentication here
# otherwise it'll complain about accessing a private feed.
- task: NuGetAuthenticate@1
displayName: 'Authenticate to AzDO Feeds'
# Signing validation will optionally work with the buildmanifest file which is downloaded from
# Azure DevOps above.
- task: PowerShell@2
displayName: Validate
inputs:
filePath: eng\common\sdk-task.ps1
arguments: -task SigningValidation -restore -msbuildEngine vs
/p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts'
/p:SignCheckExclusionsFile='$(Build.SourcesDirectory)/eng/SignCheckExclusionsFile.txt'
${{ parameters.signingValidationAdditionalParameters }}
- template: ../steps/publish-logs.yml
parameters:
StageLabel: 'Validation'
JobLabel: 'Signing'
BinlogToolVersion: $(BinlogToolVersion)
- job:
displayName: SourceLink Validation
condition: eq( ${{ parameters.enableSourceLinkValidation }}, 'true')
pool:
# We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
name: AzurePipelines-EO
image: 1ESPT-Windows2022
demands: Cmd
os: windows
# If it's not devdiv, it's dnceng
${{ else }}:
name: $(DncEngInternalBuildPool)
image: 1es-windows-2022
os: windows
steps:
- template: setup-maestro-vars.yml
parameters:
BARBuildId: ${{ parameters.BARBuildId }}
PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
- task: DownloadBuildArtifacts@0
displayName: Download Blob Artifacts
inputs:
buildType: specific
buildVersionToDownload: specific
project: $(AzDOProjectName)
pipeline: $(AzDOPipelineId)
buildId: $(AzDOBuildId)
artifactName: BlobArtifacts
checkDownloadedFiles: true
- task: PowerShell@2
displayName: Validate
inputs:
filePath: $(Build.SourcesDirectory)/eng/common/post-build/sourcelink-validation.ps1
arguments: -InputPath $(Build.ArtifactStagingDirectory)/BlobArtifacts/
-ExtractPath $(Agent.BuildDirectory)/Extract/
-GHRepoName $(Build.Repository.Name)
-GHCommit $(Build.SourceVersion)
-SourcelinkCliVersion $(SourceLinkCLIVersion)
continueOnError: true
- ${{ if ne(parameters.publishAssetsImmediately, 'true') }}:
- stage: publish_using_darc
${{ if or(eq(parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}:
dependsOn: ${{ parameters.publishDependsOn }}
${{ else }}:
dependsOn: ${{ parameters.validateDependsOn }}
displayName: Publish using Darc
variables:
- template: common-variables.yml
- template: /eng/common/templates-official/variables/pool-providers.yml
jobs:
- job:
displayName: Publish Using Darc
timeoutInMinutes: 120
pool:
# We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
name: AzurePipelines-EO
image: 1ESPT-Windows2022
demands: Cmd
os: windows
# If it's not devdiv, it's dnceng
${{ else }}:
name: NetCore1ESPool-Publishing-Internal
image: windows.vs2019.amd64
os: windows
steps:
- template: setup-maestro-vars.yml
parameters:
BARBuildId: ${{ parameters.BARBuildId }}
PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
- task: NuGetAuthenticate@1
- task: PowerShell@2
displayName: Publish Using Darc
inputs:
filePath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1
arguments: -BuildId $(BARBuildId)
-PublishingInfraVersion ${{ parameters.publishingInfraVersion }}
-AzdoToken '$(publishing-dnceng-devdiv-code-r-build-re)'
-MaestroToken '$(MaestroApiAccessToken)'
-WaitPublishingFinish true
-ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}'
-SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}'

Просмотреть файл

@ -1,8 +1,70 @@
steps:
- template: /eng/common/core-templates/post-build/setup-maestro-vars.yml
parameters:
# Specifies whether to use 1ES
is1ESPipeline: true
parameters:
BARBuildId: ''
PromoteToChannelIds: ''
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
steps:
- ${{ if eq(coalesce(parameters.PromoteToChannelIds, 0), 0) }}:
- task: DownloadBuildArtifacts@0
displayName: Download Release Configs
inputs:
buildType: current
artifactName: ReleaseConfigs
checkDownloadedFiles: true
- task: PowerShell@2
name: setReleaseVars
displayName: Set Release Configs Vars
inputs:
targetType: inline
pwsh: true
script: |
try {
if (!$Env:PromoteToMaestroChannels -or $Env:PromoteToMaestroChannels.Trim() -eq '') {
$Content = Get-Content $(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt
$BarId = $Content | Select -Index 0
$Channels = $Content | Select -Index 1
$IsStableBuild = $Content | Select -Index 2
$AzureDevOpsProject = $Env:System_TeamProject
$AzureDevOpsBuildDefinitionId = $Env:System_DefinitionId
$AzureDevOpsBuildId = $Env:Build_BuildId
}
else {
$buildApiEndpoint = "${Env:MaestroApiEndPoint}/api/builds/${Env:BARBuildId}?api-version=${Env:MaestroApiVersion}"
$apiHeaders = New-Object 'System.Collections.Generic.Dictionary[[String],[String]]'
$apiHeaders.Add('Accept', 'application/json')
$apiHeaders.Add('Authorization',"Bearer ${Env:MAESTRO_API_TOKEN}")
$buildInfo = try { Invoke-WebRequest -Method Get -Uri $buildApiEndpoint -Headers $apiHeaders | ConvertFrom-Json } catch { Write-Host "Error: $_" }
$BarId = $Env:BARBuildId
$Channels = $Env:PromoteToMaestroChannels -split ","
$Channels = $Channels -join "]["
$Channels = "[$Channels]"
$IsStableBuild = $buildInfo.stable
$AzureDevOpsProject = $buildInfo.azureDevOpsProject
$AzureDevOpsBuildDefinitionId = $buildInfo.azureDevOpsBuildDefinitionId
$AzureDevOpsBuildId = $buildInfo.azureDevOpsBuildId
}
Write-Host "##vso[task.setvariable variable=BARBuildId]$BarId"
Write-Host "##vso[task.setvariable variable=TargetChannels]$Channels"
Write-Host "##vso[task.setvariable variable=IsStableBuild]$IsStableBuild"
Write-Host "##vso[task.setvariable variable=AzDOProjectName]$AzureDevOpsProject"
Write-Host "##vso[task.setvariable variable=AzDOPipelineId]$AzureDevOpsBuildDefinitionId"
Write-Host "##vso[task.setvariable variable=AzDOBuildId]$AzureDevOpsBuildId"
}
catch {
Write-Host $_
Write-Host $_.Exception
Write-Host $_.ScriptStackTrace
exit 1
}
env:
MAESTRO_API_TOKEN: $(MaestroApiAccessToken)
BARBuildId: ${{ parameters.BARBuildId }}
PromoteToMaestroChannels: ${{ parameters.PromoteToChannelIds }}

Просмотреть файл

@ -1,7 +1,13 @@
steps:
- template: /eng/common/core-templates/steps/add-build-to-channel.yml
parameters:
is1ESPipeline: true
parameters:
ChannelId: 0
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
steps:
- task: PowerShell@2
displayName: Add Build to Channel
inputs:
filePath: $(Build.SourcesDirectory)/eng/common/post-build/add-build-to-channel.ps1
arguments: -BuildId $(BARBuildId)
-ChannelId ${{ parameters.ChannelId }}
-MaestroApiAccessToken $(MaestroApiAccessToken)
-MaestroApiEndPoint $(MaestroApiEndPoint)
-MaestroApiVersion $(MaestroApiVersion)

Просмотреть файл

@ -0,0 +1,12 @@
# build-reason.yml
# Description: runs steps if build.reason condition is valid. conditions is a string of valid build reasons
# to include steps (',' separated).
parameters:
conditions: ''
steps: []
steps:
- ${{ if and( not(startsWith(parameters.conditions, 'not')), contains(parameters.conditions, variables['build.reason'])) }}:
- ${{ parameters.steps }}
- ${{ if and( startsWith(parameters.conditions, 'not'), not(contains(parameters.conditions, variables['build.reason']))) }}:
- ${{ parameters.steps }}

Просмотреть файл

@ -1,7 +1,13 @@
steps:
- template: /eng/common/core-templates/steps/component-governance.yml
parameters:
is1ESPipeline: true
parameters:
disableComponentGovernance: false
componentGovernanceIgnoreDirectories: ''
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
steps:
- ${{ if eq(parameters.disableComponentGovernance, 'true') }}:
- script: echo "##vso[task.setvariable variable=skipComponentGovernanceDetection]true"
displayName: Set skipComponentGovernanceDetection variable
- ${{ if ne(parameters.disableComponentGovernance, 'true') }}:
- task: ComponentGovernanceComponentDetection@0
continueOnError: true
inputs:
ignoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }}

Просмотреть файл

@ -0,0 +1,32 @@
parameters:
# Language that should be analyzed. Defaults to csharp
language: csharp
# Build Commands
buildCommands: ''
overrideParameters: '' # Optional: to override values for parameters.
additionalParameters: '' # Optional: parameters that need user specific values eg: '-SourceToolsList @("abc","def") -ArtifactToolsList @("ghi","jkl")'
# Optional: if specified, restore and use this version of Guardian instead of the default.
overrideGuardianVersion: ''
# Optional: if true, publish the '.gdn' folder as a pipeline artifact. This can help with in-depth
# diagnosis of problems with specific tool configurations.
publishGuardianDirectoryToPipeline: false
# The script to run to execute all SDL tools. Use this if you want to use a script to define SDL
# parameters rather than relying on YAML. It may be better to use a local script, because you can
# reproduce results locally without piecing together a command based on the YAML.
executeAllSdlToolsScript: 'eng/common/sdl/execute-all-sdl-tools.ps1'
# There is some sort of bug (has been reported) in Azure DevOps where if this parameter is named
# 'continueOnError', the parameter value is not correctly picked up.
# This can also be remedied by the caller (post-build.yml) if it does not use a nested parameter
# optional: determines whether to continue the build if the step errors;
sdlContinueOnError: false
steps:
- template: /eng/common/templates-official/steps/execute-sdl.yml
parameters:
overrideGuardianVersion: ${{ parameters.overrideGuardianVersion }}
executeAllSdlToolsScript: ${{ parameters.executeAllSdlToolsScript }}
overrideParameters: ${{ parameters.overrideParameters }}
additionalParameters: '${{ parameters.additionalParameters }}
-CodeQLAdditionalRunConfigParams @("BuildCommands < ${{ parameters.buildCommands }}", "Language < ${{ parameters.language }}")'
publishGuardianDirectoryToPipeline: ${{ parameters.publishGuardianDirectoryToPipeline }}
sdlContinueOnError: ${{ parameters.sdlContinueOnError }}

Просмотреть файл

@ -0,0 +1,88 @@
parameters:
overrideGuardianVersion: ''
executeAllSdlToolsScript: ''
overrideParameters: ''
additionalParameters: ''
publishGuardianDirectoryToPipeline: false
sdlContinueOnError: false
condition: ''
steps:
- task: NuGetAuthenticate@1
inputs:
nuGetServiceConnections: GuardianConnect
- task: NuGetToolInstaller@1
displayName: 'Install NuGet.exe'
- ${{ if ne(parameters.overrideGuardianVersion, '') }}:
- pwsh: |
Set-Location -Path $(Build.SourcesDirectory)\eng\common\sdl
. .\sdl.ps1
$guardianCliLocation = Install-Gdn -Path $(Build.SourcesDirectory)\.artifacts -Version ${{ parameters.overrideGuardianVersion }}
Write-Host "##vso[task.setvariable variable=GuardianCliLocation]$guardianCliLocation"
displayName: Install Guardian (Overridden)
- ${{ if eq(parameters.overrideGuardianVersion, '') }}:
- pwsh: |
Set-Location -Path $(Build.SourcesDirectory)\eng\common\sdl
. .\sdl.ps1
$guardianCliLocation = Install-Gdn -Path $(Build.SourcesDirectory)\.artifacts
Write-Host "##vso[task.setvariable variable=GuardianCliLocation]$guardianCliLocation"
displayName: Install Guardian
- ${{ if ne(parameters.overrideParameters, '') }}:
- powershell: ${{ parameters.executeAllSdlToolsScript }} ${{ parameters.overrideParameters }}
displayName: Execute SDL (Overridden)
continueOnError: ${{ parameters.sdlContinueOnError }}
condition: ${{ parameters.condition }}
- ${{ if eq(parameters.overrideParameters, '') }}:
- powershell: ${{ parameters.executeAllSdlToolsScript }}
-GuardianCliLocation $(GuardianCliLocation)
-NugetPackageDirectory $(Build.SourcesDirectory)\.packages
-AzureDevOpsAccessToken $(dn-bot-dotnet-build-rw-code-rw)
${{ parameters.additionalParameters }}
displayName: Execute SDL
continueOnError: ${{ parameters.sdlContinueOnError }}
condition: ${{ parameters.condition }}
- ${{ if ne(parameters.publishGuardianDirectoryToPipeline, 'false') }}:
# We want to publish the Guardian results and configuration for easy diagnosis. However, the
# '.gdn' dir is a mix of configuration, results, extracted dependencies, and Guardian default
# tooling files. Some of these files are large and aren't useful during an investigation, so
# exclude them by simply deleting them before publishing. (As of writing, there is no documented
# way to selectively exclude a dir from the pipeline artifact publish task.)
- task: DeleteFiles@1
displayName: Delete Guardian dependencies to avoid uploading
inputs:
SourceFolder: $(Agent.BuildDirectory)/.gdn
Contents: |
c
i
condition: succeededOrFailed()
- publish: $(Agent.BuildDirectory)/.gdn
artifact: GuardianConfiguration
displayName: Publish GuardianConfiguration
condition: succeededOrFailed()
# Publish the SARIF files in a container named CodeAnalysisLogs to enable integration
# with the "SARIF SAST Scans Tab" Azure DevOps extension
- task: CopyFiles@2
displayName: Copy SARIF files
inputs:
flattenFolders: true
sourceFolder: $(Agent.BuildDirectory)/.gdn/rc/
contents: '**/*.sarif'
targetFolder: $(Build.SourcesDirectory)/CodeAnalysisLogs
condition: succeededOrFailed()
# Use PublishBuildArtifacts because the SARIF extension only checks this case
# see microsoft/sarif-azuredevops-extension#4
- task: PublishBuildArtifacts@1
displayName: Publish SARIF files to CodeAnalysisLogs container
inputs:
pathToPublish: $(Build.SourcesDirectory)/CodeAnalysisLogs
artifactName: CodeAnalysisLogs
condition: succeededOrFailed()

Просмотреть файл

@ -1,7 +1,48 @@
steps:
- template: /eng/common/core-templates/steps/generate-sbom.yml
parameters:
is1ESPipeline: true
# BuildDropPath - The root folder of the drop directory for which the manifest file will be generated.
# PackageName - The name of the package this SBOM represents.
# PackageVersion - The version of the package this SBOM represents.
# ManifestDirPath - The path of the directory where the generated manifest files will be placed
# IgnoreDirectories - Directories to ignore for SBOM generation. This will be passed through to the CG component detector.
parameters:
PackageVersion: 8.0.0
BuildDropPath: '$(Build.SourcesDirectory)/artifacts'
PackageName: '.NET'
ManifestDirPath: $(Build.ArtifactStagingDirectory)/sbom
IgnoreDirectories: ''
sbomContinueOnError: true
steps:
- task: PowerShell@2
displayName: Prep for SBOM generation in (Non-linux)
condition: or(eq(variables['Agent.Os'], 'Windows_NT'), eq(variables['Agent.Os'], 'Darwin'))
inputs:
filePath: ./eng/common/generate-sbom-prep.ps1
arguments: ${{parameters.manifestDirPath}}
# Chmodding is a workaround for https://github.com/dotnet/arcade/issues/8461
- script: |
chmod +x ./eng/common/generate-sbom-prep.sh
./eng/common/generate-sbom-prep.sh ${{parameters.manifestDirPath}}
displayName: Prep for SBOM generation in (Linux)
condition: eq(variables['Agent.Os'], 'Linux')
continueOnError: ${{ parameters.sbomContinueOnError }}
- task: AzureArtifacts.manifest-generator-task.manifest-generator-task.ManifestGeneratorTask@0
displayName: 'Generate SBOM manifest'
continueOnError: ${{ parameters.sbomContinueOnError }}
inputs:
PackageName: ${{ parameters.packageName }}
BuildDropPath: ${{ parameters.buildDropPath }}
PackageVersion: ${{ parameters.packageVersion }}
ManifestDirPath: ${{ parameters.manifestDirPath }}
${{ if ne(parameters.IgnoreDirectories, '') }}:
AdditionalComponentDetectorArgs: '--IgnoreDirectories ${{ parameters.IgnoreDirectories }}'
- task: 1ES.PublishPipelineArtifact@1
displayName: Publish SBOM manifest
continueOnError: ${{parameters.sbomContinueOnError}}
inputs:
targetPath: '${{parameters.manifestDirPath}}'
artifactName: $(ARTIFACT_NAME)
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}

Просмотреть файл

@ -1,41 +0,0 @@
parameters:
- name: displayName
type: string
default: 'Publish to Build Artifact'
- name: condition
type: string
default: succeeded()
- name: artifactName
type: string
- name: pathToPublish
type: string
- name: continueOnError
type: boolean
default: false
- name: publishLocation
type: string
default: 'Container'
- name: is1ESPipeline
type: boolean
default: true
steps:
- ${{ if ne(parameters.is1ESPipeline, true) }}:
- 'eng/common/templates-official cannot be referenced from a non-1ES managed template': error
- task: 1ES.PublishBuildArtifacts@1
displayName: ${{ parameters.displayName }}
condition: ${{ parameters.condition }}
${{ if parameters.continueOnError }}:
continueOnError: ${{ parameters.continueOnError }}
inputs:
PublishLocation: ${{ parameters.publishLocation }}
PathtoPublish: ${{ parameters.pathToPublish }}
${{ if parameters.artifactName }}:
ArtifactName: ${{ parameters.artifactName }}

Просмотреть файл

@ -1,7 +1,23 @@
steps:
- template: /eng/common/core-templates/steps/publish-logs.yml
parameters:
is1ESPipeline: true
parameters:
StageLabel: ''
JobLabel: ''
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
steps:
- task: Powershell@2
displayName: Prepare Binlogs to Upload
inputs:
targetType: inline
script: |
New-Item -ItemType Directory $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/
Move-Item -Path $(Build.SourcesDirectory)/artifacts/log/Debug/* $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/
continueOnError: true
condition: always()
- task: 1ES.PublishBuildArtifacts@1
displayName: Publish Logs
inputs:
PathtoPublish: '$(Build.SourcesDirectory)/PostBuildLogs'
PublishLocation: Container
ArtifactName: PostBuildLogs
continueOnError: true
condition: always()

Просмотреть файл

@ -1,26 +0,0 @@
parameters:
- name: is1ESPipeline
type: boolean
default: true
- name: args
type: object
default: {}
steps:
- ${{ if ne(parameters.is1ESPipeline, true) }}:
- 'eng/common/templates-official cannot be referenced from a non-1ES managed template': error
- task: 1ES.PublishPipelineArtifact@1
displayName: ${{ coalesce(parameters.args.displayName, 'Publish to Build Artifact') }}
${{ if parameters.args.condition }}:
condition: ${{ parameters.args.condition }}
${{ else }}:
condition: succeeded()
${{ if parameters.args.continueOnError }}:
continueOnError: ${{ parameters.args.continueOnError }}
inputs:
targetPath: ${{ parameters.args.targetPath }}
${{ if parameters.args.artifactName }}:
artifactName: ${{ parameters.args.artifactName }}
${{ if parameters.args.properties }}:
properties: ${{ parameters.args.properties }}

Просмотреть файл

@ -1,7 +1,28 @@
steps:
- template: /eng/common/core-templates/steps/retain-build.yml
parameters:
is1ESPipeline: true
parameters:
# Optional azure devops PAT with build execute permissions for the build's organization,
# only needed if the build that should be retained ran on a different organization than
# the pipeline where this template is executing from
Token: ''
# Optional BuildId to retain, defaults to the current running build
BuildId: ''
# Azure devops Organization URI for the build in the https://dev.azure.com/<organization> format.
# Defaults to the organization the current pipeline is running on
AzdoOrgUri: '$(System.CollectionUri)'
# Azure devops project for the build. Defaults to the project the current pipeline is running on
AzdoProject: '$(System.TeamProject)'
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
steps:
- task: powershell@2
inputs:
targetType: 'filePath'
filePath: eng/common/retain-build.ps1
pwsh: true
arguments: >
-AzdoOrgUri: ${{parameters.AzdoOrgUri}}
-AzdoProject ${{parameters.AzdoProject}}
-Token ${{coalesce(parameters.Token, '$env:SYSTEM_ACCESSTOKEN') }}
-BuildId ${{coalesce(parameters.BuildId, '$env:BUILD_ID')}}
displayName: Enable permanent build retention
env:
SYSTEM_ACCESSTOKEN: $(System.AccessToken)
BUILD_ID: $(Build.BuildId)

Просмотреть файл

@ -1,7 +1,91 @@
steps:
- template: /eng/common/core-templates/steps/send-to-helix.yml
parameters:
is1ESPipeline: true
# Please remember to update the documentation if you make changes to these parameters!
parameters:
HelixSource: 'pr/default' # required -- sources must start with pr/, official/, prodcon/, or agent/
HelixType: 'tests/default/' # required -- Helix telemetry which identifies what type of data this is; should include "test" for clarity and must end in '/'
HelixBuild: $(Build.BuildNumber) # required -- the build number Helix will use to identify this -- automatically set to the AzDO build number
HelixTargetQueues: '' # required -- semicolon-delimited list of Helix queues to test on; see https://helix.dot.net/ for a list of queues
HelixAccessToken: '' # required -- access token to make Helix API requests; should be provided by the appropriate variable group
HelixConfiguration: '' # optional -- additional property attached to a job
HelixPreCommands: '' # optional -- commands to run before Helix work item execution
HelixPostCommands: '' # optional -- commands to run after Helix work item execution
WorkItemDirectory: '' # optional -- a payload directory to zip up and send to Helix; requires WorkItemCommand; incompatible with XUnitProjects
WorkItemCommand: '' # optional -- a command to execute on the payload; requires WorkItemDirectory; incompatible with XUnitProjects
WorkItemTimeout: '' # optional -- a timeout in TimeSpan.Parse-ready value (e.g. 00:02:00) for the work item command; requires WorkItemDirectory; incompatible with XUnitProjects
CorrelationPayloadDirectory: '' # optional -- a directory to zip up and send to Helix as a correlation payload
XUnitProjects: '' # optional -- semicolon-delimited list of XUnitProjects to parse and send to Helix; requires XUnitRuntimeTargetFramework, XUnitPublishTargetFramework, XUnitRunnerVersion, and IncludeDotNetCli=true
XUnitWorkItemTimeout: '' # optional -- the workitem timeout in seconds for all workitems created from the xUnit projects specified by XUnitProjects
XUnitPublishTargetFramework: '' # optional -- framework to use to publish your xUnit projects
XUnitRuntimeTargetFramework: '' # optional -- framework to use for the xUnit console runner
XUnitRunnerVersion: '' # optional -- version of the xUnit nuget package you wish to use on Helix; required for XUnitProjects
IncludeDotNetCli: false # optional -- true will download a version of the .NET CLI onto the Helix machine as a correlation payload; requires DotNetCliPackageType and DotNetCliVersion
DotNetCliPackageType: '' # optional -- either 'sdk', 'runtime' or 'aspnetcore-runtime'; determines whether the sdk or runtime will be sent to Helix; see https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json
DotNetCliVersion: '' # optional -- version of the CLI to send to Helix; based on this: https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json
WaitForWorkItemCompletion: true # optional -- true will make the task wait until work items have been completed and fail the build if work items fail. False is "fire and forget."
IsExternal: false # [DEPRECATED] -- doesn't do anything, jobs are external if HelixAccessToken is empty and Creator is set
HelixBaseUri: 'https://helix.dot.net/' # optional -- sets the Helix API base URI (allows targeting https://helix.int-dot.net )
Creator: '' # optional -- if the build is external, use this to specify who is sending the job
DisplayNamePrefix: 'Run Tests' # optional -- rename the beginning of the displayName of the steps in AzDO
condition: succeeded() # optional -- condition for step to execute; defaults to succeeded()
continueOnError: false # optional -- determines whether to continue the build if the step errors; defaults to false
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
steps:
- powershell: 'powershell "$env:BUILD_SOURCESDIRECTORY\eng\common\msbuild.ps1 $env:BUILD_SOURCESDIRECTORY\eng\common\helixpublish.proj /restore /p:TreatWarningsAsErrors=false /t:Test /bl:$env:BUILD_SOURCESDIRECTORY\artifacts\log\$env:BuildConfig\SendToHelix.binlog"'
displayName: ${{ parameters.DisplayNamePrefix }} (Windows)
env:
BuildConfig: $(_BuildConfig)
HelixSource: ${{ parameters.HelixSource }}
HelixType: ${{ parameters.HelixType }}
HelixBuild: ${{ parameters.HelixBuild }}
HelixConfiguration: ${{ parameters.HelixConfiguration }}
HelixTargetQueues: ${{ parameters.HelixTargetQueues }}
HelixAccessToken: ${{ parameters.HelixAccessToken }}
HelixPreCommands: ${{ parameters.HelixPreCommands }}
HelixPostCommands: ${{ parameters.HelixPostCommands }}
WorkItemDirectory: ${{ parameters.WorkItemDirectory }}
WorkItemCommand: ${{ parameters.WorkItemCommand }}
WorkItemTimeout: ${{ parameters.WorkItemTimeout }}
CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }}
XUnitProjects: ${{ parameters.XUnitProjects }}
XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }}
XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }}
XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }}
XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }}
IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }}
DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }}
DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
HelixBaseUri: ${{ parameters.HelixBaseUri }}
Creator: ${{ parameters.Creator }}
SYSTEM_ACCESSTOKEN: $(System.AccessToken)
condition: and(${{ parameters.condition }}, eq(variables['Agent.Os'], 'Windows_NT'))
continueOnError: ${{ parameters.continueOnError }}
- script: $BUILD_SOURCESDIRECTORY/eng/common/msbuild.sh $BUILD_SOURCESDIRECTORY/eng/common/helixpublish.proj /restore /p:TreatWarningsAsErrors=false /t:Test /bl:$BUILD_SOURCESDIRECTORY/artifacts/log/$BuildConfig/SendToHelix.binlog
displayName: ${{ parameters.DisplayNamePrefix }} (Unix)
env:
BuildConfig: $(_BuildConfig)
HelixSource: ${{ parameters.HelixSource }}
HelixType: ${{ parameters.HelixType }}
HelixBuild: ${{ parameters.HelixBuild }}
HelixConfiguration: ${{ parameters.HelixConfiguration }}
HelixTargetQueues: ${{ parameters.HelixTargetQueues }}
HelixAccessToken: ${{ parameters.HelixAccessToken }}
HelixPreCommands: ${{ parameters.HelixPreCommands }}
HelixPostCommands: ${{ parameters.HelixPostCommands }}
WorkItemDirectory: ${{ parameters.WorkItemDirectory }}
WorkItemCommand: ${{ parameters.WorkItemCommand }}
WorkItemTimeout: ${{ parameters.WorkItemTimeout }}
CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }}
XUnitProjects: ${{ parameters.XUnitProjects }}
XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }}
XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }}
XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }}
XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }}
IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }}
DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }}
DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
HelixBaseUri: ${{ parameters.HelixBaseUri }}
Creator: ${{ parameters.Creator }}
SYSTEM_ACCESSTOKEN: $(System.AccessToken)
condition: and(${{ parameters.condition }}, ne(variables['Agent.Os'], 'Windows_NT'))
continueOnError: ${{ parameters.continueOnError }}

Просмотреть файл

@ -1,7 +1,129 @@
steps:
- template: /eng/common/core-templates/steps/source-build.yml
parameters:
is1ESPipeline: true
parameters:
# This template adds arcade-powered source-build to CI.
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
# This is a 'steps' template, and is intended for advanced scenarios where the existing build
# infra has a careful build methodology that must be followed. For example, a repo
# (dotnet/runtime) might choose to clone the GitHub repo only once and store it as a pipeline
# artifact for all subsequent jobs to use, to reduce dependence on a strong network connection to
# GitHub. Using this steps template leaves room for that infra to be included.
# Defines the platform on which to run the steps. See 'eng/common/templates-official/job/source-build.yml'
# for details. The entire object is described in the 'job' template for simplicity, even though
# the usage of the properties on this object is split between the 'job' and 'steps' templates.
platform: {}
steps:
# Build. Keep it self-contained for simple reusability. (No source-build-specific job variables.)
- script: |
set -x
df -h
# If building on the internal project, the artifact feeds variable may be available (usually only if needed)
# In that case, call the feed setup script to add internal feeds corresponding to public ones.
# In addition, add an msbuild argument to copy the WIP from the repo to the target build location.
# This is because SetupNuGetSources.sh will alter the current NuGet.config file, and we need to preserve those
# changes.
internalRestoreArgs=
if [ '$(dn-bot-dnceng-artifact-feeds-rw)' != '$''(dn-bot-dnceng-artifact-feeds-rw)' ]; then
# Temporarily work around https://github.com/dotnet/arcade/issues/7709
chmod +x $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh
$(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh $(Build.SourcesDirectory)/NuGet.config $(dn-bot-dnceng-artifact-feeds-rw)
internalRestoreArgs='/p:CopyWipIntoInnerSourceBuildRepo=true'
# The 'Copy WIP' feature of source build uses git stash to apply changes from the original repo.
# This only works if there is a username/email configured, which won't be the case in most CI runs.
git config --get user.email
if [ $? -ne 0 ]; then
git config user.email dn-bot@microsoft.com
git config user.name dn-bot
fi
fi
# If building on the internal project, the internal storage variable may be available (usually only if needed)
# In that case, add variables to allow the download of internal runtimes if the specified versions are not found
# in the default public locations.
internalRuntimeDownloadArgs=
if [ '$(dotnetbuilds-internal-container-read-token-base64)' != '$''(dotnetbuilds-internal-container-read-token-base64)' ]; then
internalRuntimeDownloadArgs='/p:DotNetRuntimeSourceFeed=https://dotnetbuilds.blob.core.windows.net/internal /p:DotNetRuntimeSourceFeedKey=$(dotnetbuilds-internal-container-read-token-base64) --runtimesourcefeed https://dotnetbuilds.blob.core.windows.net/internal --runtimesourcefeedkey $(dotnetbuilds-internal-container-read-token-base64)'
fi
buildConfig=Release
# Check if AzDO substitutes in a build config from a variable, and use it if so.
if [ '$(_BuildConfig)' != '$''(_BuildConfig)' ]; then
buildConfig='$(_BuildConfig)'
fi
officialBuildArgs=
if [ '${{ and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}' = 'True' ]; then
officialBuildArgs='/p:DotNetPublishUsingPipelines=true /p:OfficialBuildId=$(BUILD.BUILDNUMBER)'
fi
targetRidArgs=
if [ '${{ parameters.platform.targetRID }}' != '' ]; then
targetRidArgs='/p:TargetRid=${{ parameters.platform.targetRID }}'
fi
runtimeOsArgs=
if [ '${{ parameters.platform.runtimeOS }}' != '' ]; then
runtimeOsArgs='/p:RuntimeOS=${{ parameters.platform.runtimeOS }}'
fi
baseOsArgs=
if [ '${{ parameters.platform.baseOS }}' != '' ]; then
baseOsArgs='/p:BaseOS=${{ parameters.platform.baseOS }}'
fi
publishArgs=
if [ '${{ parameters.platform.skipPublishValidation }}' != 'true' ]; then
publishArgs='--publish'
fi
assetManifestFileName=SourceBuild_RidSpecific.xml
if [ '${{ parameters.platform.name }}' != '' ]; then
assetManifestFileName=SourceBuild_${{ parameters.platform.name }}.xml
fi
${{ coalesce(parameters.platform.buildScript, './build.sh') }} --ci \
--configuration $buildConfig \
--restore --build --pack $publishArgs -bl \
$officialBuildArgs \
$internalRuntimeDownloadArgs \
$internalRestoreArgs \
$targetRidArgs \
$runtimeOsArgs \
$baseOsArgs \
/p:SourceBuildNonPortable=${{ parameters.platform.nonPortable }} \
/p:ArcadeBuildFromSource=true \
/p:AssetManifestFileName=$assetManifestFileName
displayName: Build
# Upload build logs for diagnosis.
- task: CopyFiles@2
displayName: Prepare BuildLogs staging directory
inputs:
SourceFolder: '$(Build.SourcesDirectory)'
Contents: |
**/*.log
**/*.binlog
artifacts/source-build/self/prebuilt-report/**
TargetFolder: '$(Build.StagingDirectory)/BuildLogs'
CleanTargetFolder: true
continueOnError: true
condition: succeededOrFailed()
- task: 1ES.PublishPipelineArtifact@1
displayName: Publish BuildLogs
inputs:
targetPath: '$(Build.StagingDirectory)/BuildLogs'
artifactName: BuildLogs_SourceBuild_${{ parameters.platform.name }}_Attempt$(System.JobAttempt)
continueOnError: true
condition: succeededOrFailed()
# Manually inject component detection so that we can ignore the source build upstream cache, which contains
# a nupkg cache of input packages (a local feed).
# This path must match the upstream cache path in property 'CurrentRepoSourceBuiltNupkgCacheDir'
# in src\Microsoft.DotNet.Arcade.Sdk\tools\SourceBuild\SourceBuildArcade.targets
- task: ComponentGovernanceComponentDetection@0
displayName: Component Detection (Exclude upstream cache)
inputs:
ignoreDirectories: '$(Build.SourcesDirectory)/artifacts/source-build/self/src/artifacts/obj/source-built-upstream-cache'

Просмотреть файл

@ -0,0 +1,139 @@
parameters:
enable: 'false' # Whether the SDL validation job should execute or not
overrideParameters: '' # Optional: to override values for parameters.
additionalParameters: '' # Optional: parameters that need user specific values eg: '-SourceToolsList @("abc","def") -ArtifactToolsList @("ghi","jkl")'
# Optional: if specified, restore and use this version of Guardian instead of the default.
overrideGuardianVersion: ''
# Optional: if true, publish the '.gdn' folder as a pipeline artifact. This can help with in-depth
# diagnosis of problems with specific tool configurations.
publishGuardianDirectoryToPipeline: false
# The script to run to execute all SDL tools. Use this if you want to use a script to define SDL
# parameters rather than relying on YAML. It may be better to use a local script, because you can
# reproduce results locally without piecing together a command based on the YAML.
executeAllSdlToolsScript: 'eng/common/sdl/execute-all-sdl-tools.ps1'
# There is some sort of bug (has been reported) in Azure DevOps where if this parameter is named
# 'continueOnError', the parameter value is not correctly picked up.
# This can also be remedied by the caller (post-build.yml) if it does not use a nested parameter
sdlContinueOnError: false # optional: determines whether to continue the build if the step errors;
# optional: determines if build artifacts should be downloaded.
downloadArtifacts: true
# optional: determines if this job should search the directory of downloaded artifacts for
# 'tar.gz' and 'zip' archive files and extract them before running SDL validation tasks.
extractArchiveArtifacts: false
dependsOn: '' # Optional: dependencies of the job
artifactNames: '' # Optional: patterns supplied to DownloadBuildArtifacts
# Usage:
# artifactNames:
# - 'BlobArtifacts'
# - 'Artifacts_Windows_NT_Release'
# Optional: download a list of pipeline artifacts. 'downloadArtifacts' controls build artifacts,
# not pipeline artifacts, so doesn't affect the use of this parameter.
pipelineArtifactNames: []
jobs:
- job: Run_SDL
dependsOn: ${{ parameters.dependsOn }}
displayName: Run SDL tool
condition: and(succeededOrFailed(), eq( ${{ parameters.enable }}, 'true'))
variables:
- group: DotNet-VSTS-Bot
- name: AzDOProjectName
value: ${{ parameters.AzDOProjectName }}
- name: AzDOPipelineId
value: ${{ parameters.AzDOPipelineId }}
- name: AzDOBuildId
value: ${{ parameters.AzDOBuildId }}
- template: /eng/common/templates/variables/sdl-variables.yml
- name: GuardianVersion
value: ${{ coalesce(parameters.overrideGuardianVersion, '$(DefaultGuardianVersion)') }}
- template: /eng/common/templates/variables/pool-providers.yml
pool:
# We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
name: VSEngSS-MicroBuild2022-1ES
demands: Cmd
# If it's not devdiv, it's dnceng
${{ if ne(variables['System.TeamProject'], 'DevDiv') }}:
name: $(DncEngInternalBuildPool)
demands: ImageOverride -equals windows.vs2019.amd64
steps:
- checkout: self
clean: true
# If the template caller didn't provide an AzDO parameter, set them all up as Maestro vars.
- ${{ if not(and(parameters.AzDOProjectName, parameters.AzDOPipelineId, parameters.AzDOBuildId)) }}:
- template: /eng/common/templates/post-build/setup-maestro-vars.yml
- ${{ if ne(parameters.downloadArtifacts, 'false')}}:
- ${{ if ne(parameters.artifactNames, '') }}:
- ${{ each artifactName in parameters.artifactNames }}:
- task: DownloadBuildArtifacts@0
displayName: Download Build Artifacts
inputs:
buildType: specific
buildVersionToDownload: specific
project: $(AzDOProjectName)
pipeline: $(AzDOPipelineId)
buildId: $(AzDOBuildId)
artifactName: ${{ artifactName }}
downloadPath: $(Build.ArtifactStagingDirectory)\artifacts
checkDownloadedFiles: true
- ${{ if eq(parameters.artifactNames, '') }}:
- task: DownloadBuildArtifacts@0
displayName: Download Build Artifacts
inputs:
buildType: specific
buildVersionToDownload: specific
project: $(AzDOProjectName)
pipeline: $(AzDOPipelineId)
buildId: $(AzDOBuildId)
downloadType: specific files
itemPattern: "**"
downloadPath: $(Build.ArtifactStagingDirectory)\artifacts
checkDownloadedFiles: true
- ${{ each artifactName in parameters.pipelineArtifactNames }}:
- task: DownloadPipelineArtifact@2
displayName: Download Pipeline Artifacts
inputs:
buildType: specific
buildVersionToDownload: specific
project: $(AzDOProjectName)
pipeline: $(AzDOPipelineId)
buildId: $(AzDOBuildId)
artifactName: ${{ artifactName }}
downloadPath: $(Build.ArtifactStagingDirectory)\artifacts
checkDownloadedFiles: true
- powershell: eng/common/sdl/trim-assets-version.ps1
-InputPath $(Build.ArtifactStagingDirectory)\artifacts
displayName: Trim the version from the NuGet packages
continueOnError: ${{ parameters.sdlContinueOnError }}
- powershell: eng/common/sdl/extract-artifact-packages.ps1
-InputPath $(Build.ArtifactStagingDirectory)\artifacts\BlobArtifacts
-ExtractPath $(Build.ArtifactStagingDirectory)\artifacts\BlobArtifacts
displayName: Extract Blob Artifacts
continueOnError: ${{ parameters.sdlContinueOnError }}
- powershell: eng/common/sdl/extract-artifact-packages.ps1
-InputPath $(Build.ArtifactStagingDirectory)\artifacts\PackageArtifacts
-ExtractPath $(Build.ArtifactStagingDirectory)\artifacts\PackageArtifacts
displayName: Extract Package Artifacts
continueOnError: ${{ parameters.sdlContinueOnError }}
- ${{ if ne(parameters.extractArchiveArtifacts, 'false') }}:
- powershell: eng/common/sdl/extract-artifact-archives.ps1
-InputPath $(Build.ArtifactStagingDirectory)\artifacts
-ExtractPath $(Build.ArtifactStagingDirectory)\artifacts
displayName: Extract Archive Artifacts
continueOnError: ${{ parameters.sdlContinueOnError }}
- template: /eng/common/templates/steps/execute-sdl.yml
parameters:
overrideGuardianVersion: ${{ parameters.overrideGuardianVersion }}
executeAllSdlToolsScript: ${{ parameters.executeAllSdlToolsScript }}
overrideParameters: ${{ parameters.overrideParameters }}
additionalParameters: ${{ parameters.additionalParameters }}
publishGuardianDirectoryToPipeline: ${{ parameters.publishGuardianDirectoryToPipeline }}
sdlContinueOnError: ${{ parameters.sdlContinueOnError }}

Просмотреть файл

@ -1,61 +1,259 @@
parameters:
# Internal resources (telemetry, microbuild) can only be accessed from non-public projects,
# and some (Microbuild) should only be applied to non-PR cases for internal builds.
parameters:
# Job schema parameters - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
cancelTimeoutInMinutes: ''
condition: ''
container: ''
continueOnError: false
dependsOn: ''
displayName: ''
pool: ''
steps: []
strategy: ''
timeoutInMinutes: ''
variables: []
workspace: ''
templateContext: ''
# Job base template specific parameters
# See schema documentation - https://github.com/dotnet/arcade/blob/master/Documentation/AzureDevOps/TemplateSchema.md
artifacts: ''
enableMicrobuild: false
enablePublishBuildArtifacts: false
enablePublishBuildAssets: false
enablePublishTestResults: false
enablePublishUsingPipelines: false
enableBuildRetry: false
disableComponentGovernance: ''
componentGovernanceIgnoreDirectories: ''
mergeTestResults: false
testRunTitle: ''
testResultsFormat: ''
name: ''
preSteps: []
runAsPublic: false
# Sbom related params
enableSbom: true
PackageVersion: 7.0.0
BuildDropPath: '$(Build.SourcesDirectory)/artifacts'
jobs:
- template: /eng/common/core-templates/job/job.yml
parameters:
is1ESPipeline: false
- job: ${{ parameters.name }}
${{ each parameter in parameters }}:
${{ if and(ne(parameter.key, 'steps'), ne(parameter.key, 'is1ESPipeline')) }}:
${{ parameter.key }}: ${{ parameter.value }}
${{ if ne(parameters.cancelTimeoutInMinutes, '') }}:
cancelTimeoutInMinutes: ${{ parameters.cancelTimeoutInMinutes }}
steps:
- ${{ each step in parameters.steps }}:
- ${{ step }}
${{ if ne(parameters.condition, '') }}:
condition: ${{ parameters.condition }}
artifactPublishSteps:
- ${{ if ne(parameters.artifacts.publish, '') }}:
- ${{ if and(ne(parameters.artifacts.publish.artifacts, 'false'), ne(parameters.artifacts.publish.artifacts, '')) }}:
- template: /eng/common/core-templates/steps/publish-build-artifacts.yml
parameters:
is1ESPipeline: false
args:
displayName: Publish pipeline artifacts
pathToPublish: '$(Build.ArtifactStagingDirectory)/artifacts'
publishLocation: Container
artifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }}
continueOnError: true
condition: always()
- ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}:
- template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
parameters:
is1ESPipeline: false
args:
targetPath: '$(Build.ArtifactStagingDirectory)/artifacts/log'
artifactName: ${{ coalesce(parameters.artifacts.publish.logs.name, 'Logs_Build_$(Agent.Os)_$(_BuildConfig)') }}
displayName: 'Publish logs'
continueOnError: true
condition: always()
${{ if ne(parameters.container, '') }}:
container: ${{ parameters.container }}
- ${{ if ne(parameters.enablePublishBuildArtifacts, 'false') }}:
- template: /eng/common/core-templates/steps/publish-build-artifacts.yml
parameters:
is1ESPipeline: false
args:
displayName: Publish Logs
pathToPublish: '$(Build.ArtifactStagingDirectory)/artifacts/log/$(_BuildConfig)'
publishLocation: Container
artifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)' ) }}
continueOnError: true
condition: always()
${{ if ne(parameters.continueOnError, '') }}:
continueOnError: ${{ parameters.continueOnError }}
- ${{ if eq(parameters.enableBuildRetry, 'true') }}:
- template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
parameters:
is1ESPipeline: false
args:
targetPath: '$(Build.SourcesDirectory)\eng\common\BuildConfiguration'
artifactName: 'BuildConfiguration'
displayName: 'Publish build retry configuration'
continueOnError: true
${{ if ne(parameters.dependsOn, '') }}:
dependsOn: ${{ parameters.dependsOn }}
${{ if ne(parameters.displayName, '') }}:
displayName: ${{ parameters.displayName }}
${{ if ne(parameters.pool, '') }}:
pool: ${{ parameters.pool }}
${{ if ne(parameters.strategy, '') }}:
strategy: ${{ parameters.strategy }}
${{ if ne(parameters.timeoutInMinutes, '') }}:
timeoutInMinutes: ${{ parameters.timeoutInMinutes }}
${{ if ne(parameters.templateContext, '') }}:
templateContext: ${{ parameters.templateContext }}
variables:
- ${{ if ne(parameters.enableTelemetry, 'false') }}:
- name: DOTNET_CLI_TELEMETRY_PROFILE
value: '$(Build.Repository.Uri)'
- ${{ if eq(parameters.enableRichCodeNavigation, 'true') }}:
- name: EnableRichCodeNavigation
value: 'true'
# Retry signature validation up to three times, waiting 2 seconds between attempts.
# See https://learn.microsoft.com/en-us/nuget/reference/errors-and-warnings/nu3028#retry-untrusted-root-failures
- name: NUGET_EXPERIMENTAL_CHAIN_BUILD_RETRY_POLICY
value: 3,2000
- ${{ each variable in parameters.variables }}:
# handle name-value variable syntax
# example:
# - name: [key]
# value: [value]
- ${{ if ne(variable.name, '') }}:
- name: ${{ variable.name }}
value: ${{ variable.value }}
# handle variable groups
- ${{ if ne(variable.group, '') }}:
- group: ${{ variable.group }}
# handle template variable syntax
# example:
# - template: path/to/template.yml
# parameters:
# [key]: [value]
- ${{ if ne(variable.template, '') }}:
- template: ${{ variable.template }}
${{ if ne(variable.parameters, '') }}:
parameters: ${{ variable.parameters }}
# handle key-value variable syntax.
# example:
# - [key]: [value]
- ${{ if and(eq(variable.name, ''), eq(variable.group, ''), eq(variable.template, '')) }}:
- ${{ each pair in variable }}:
- name: ${{ pair.key }}
value: ${{ pair.value }}
# DotNet-HelixApi-Access provides 'HelixApiAccessToken' for internal builds
- ${{ if and(eq(parameters.enableTelemetry, 'true'), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- group: DotNet-HelixApi-Access
${{ if ne(parameters.workspace, '') }}:
workspace: ${{ parameters.workspace }}
steps:
- ${{ if ne(parameters.preSteps, '') }}:
- ${{ each preStep in parameters.preSteps }}:
- ${{ preStep }}
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- ${{ if eq(parameters.enableMicrobuild, 'true') }}:
- task: MicroBuildSigningPlugin@3
displayName: Install MicroBuild plugin
inputs:
signType: $(_SignType)
zipSources: false
feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json
env:
TeamName: $(_TeamName)
continueOnError: ${{ parameters.continueOnError }}
condition: and(succeeded(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
- ${{ if and(eq(parameters.runAsPublic, 'false'), eq(variables['System.TeamProject'], 'internal')) }}:
- task: NuGetAuthenticate@1
- ${{ if and(ne(parameters.artifacts.download, 'false'), ne(parameters.artifacts.download, '')) }}:
- task: DownloadPipelineArtifact@2
inputs:
buildType: current
artifactName: ${{ coalesce(parameters.artifacts.download.name, 'Artifacts_$(Agent.OS)_$(_BuildConfig)') }}
targetPath: ${{ coalesce(parameters.artifacts.download.path, 'artifacts') }}
itemPattern: ${{ coalesce(parameters.artifacts.download.pattern, '**') }}
- ${{ each step in parameters.steps }}:
- ${{ step }}
- ${{ if eq(parameters.enableRichCodeNavigation, true) }}:
- task: RichCodeNavIndexer@0
displayName: RichCodeNav Upload
inputs:
languages: ${{ coalesce(parameters.richCodeNavigationLanguage, 'csharp') }}
environment: ${{ coalesce(parameters.richCodeNavigationEnvironment, 'production') }}
richNavLogOutputDirectory: $(Build.SourcesDirectory)/artifacts/bin
uploadRichNavArtifacts: ${{ coalesce(parameters.richCodeNavigationUploadArtifacts, false) }}
continueOnError: true
- template: /eng/common/templates/steps/component-governance.yml
parameters:
${{ if eq(parameters.disableComponentGovernance, '') }}:
${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.runAsPublic, 'false'), or(startsWith(variables['Build.SourceBranch'], 'refs/heads/release/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/dotnet/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/microsoft/'), eq(variables['Build.SourceBranch'], 'refs/heads/main'))) }}:
disableComponentGovernance: false
${{ else }}:
disableComponentGovernance: true
${{ else }}:
disableComponentGovernance: ${{ parameters.disableComponentGovernance }}
componentGovernanceIgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }}
- ${{ if eq(parameters.enableMicrobuild, 'true') }}:
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- task: MicroBuildCleanup@1
displayName: Execute Microbuild cleanup tasks
condition: and(always(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
continueOnError: ${{ parameters.continueOnError }}
env:
TeamName: $(_TeamName)
- ${{ if ne(parameters.artifacts.publish, '') }}:
- ${{ if and(ne(parameters.artifacts.publish.artifacts, 'false'), ne(parameters.artifacts.publish.artifacts, '')) }}:
- task: CopyFiles@2
displayName: Gather binaries for publish to artifacts
inputs:
SourceFolder: 'artifacts/bin'
Contents: '**'
TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/bin'
- task: CopyFiles@2
displayName: Gather packages for publish to artifacts
inputs:
SourceFolder: 'artifacts/packages'
Contents: '**'
TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/packages'
- task: PublishBuildArtifacts@1
displayName: Publish pipeline artifacts
inputs:
PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts'
PublishLocation: Container
ArtifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }}
continueOnError: true
condition: always()
- ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}:
- publish: artifacts/log
artifact: ${{ coalesce(parameters.artifacts.publish.logs.name, 'Logs_Build_$(Agent.Os)_$(_BuildConfig)') }}
displayName: Publish logs
continueOnError: true
condition: always()
- ${{ if ne(parameters.enablePublishBuildArtifacts, 'false') }}:
- task: PublishBuildArtifacts@1
displayName: Publish Logs
inputs:
PathtoPublish: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)'
PublishLocation: Container
ArtifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)' ) }}
continueOnError: true
condition: always()
- ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'xunit')) }}:
- task: PublishTestResults@2
displayName: Publish XUnit Test Results
inputs:
testResultsFormat: 'xUnit'
testResultsFiles: '*.xml'
searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)'
testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-xunit
mergeTestResults: ${{ parameters.mergeTestResults }}
continueOnError: true
condition: always()
- ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'vstest')) }}:
- task: PublishTestResults@2
displayName: Publish TRX Test Results
inputs:
testResultsFormat: 'VSTest'
testResultsFiles: '*.trx'
searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)'
testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-trx
mergeTestResults: ${{ parameters.mergeTestResults }}
continueOnError: true
condition: always()
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.enableSbom, 'true')) }}:
- template: /eng/common/templates/steps/generate-sbom.yml
parameters:
PackageVersion: ${{ parameters.packageVersion}}
BuildDropPath: ${{ parameters.buildDropPath }}
IgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }}
- ${{ if eq(parameters.enableBuildRetry, 'true') }}:
- publish: $(Build.SourcesDirectory)\eng\common\BuildConfiguration
artifact: BuildConfiguration
displayName: Publish build retry configuration
continueOnError: true

Просмотреть файл

@ -1,7 +1,109 @@
jobs:
- template: /eng/common/core-templates/job/onelocbuild.yml
parameters:
is1ESPipeline: false
parameters:
# Optional: dependencies of the job
dependsOn: ''
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
# Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool
pool: ''
CeapexPat: $(dn-bot-ceapex-package-r) # PAT for the loc AzDO instance https://dev.azure.com/ceapex
GithubPat: $(BotAccount-dotnet-bot-repo-PAT)
SourcesDirectory: $(Build.SourcesDirectory)
CreatePr: true
AutoCompletePr: false
ReusePr: true
UseLfLineEndings: true
UseCheckedInLocProjectJson: false
SkipLocProjectJsonGeneration: false
LanguageSet: VS_Main_Languages
LclSource: lclFilesInRepo
LclPackageId: ''
RepoType: gitHub
GitHubOrg: dotnet
MirrorRepo: ''
MirrorBranch: main
condition: ''
JobNameSuffix: ''
jobs:
- job: OneLocBuild${{ parameters.JobNameSuffix }}
dependsOn: ${{ parameters.dependsOn }}
displayName: OneLocBuild${{ parameters.JobNameSuffix }}
variables:
- group: OneLocBuildVariables # Contains the CeapexPat and GithubPat
- name: _GenerateLocProjectArguments
value: -SourcesDirectory ${{ parameters.SourcesDirectory }}
-LanguageSet "${{ parameters.LanguageSet }}"
-CreateNeutralXlfs
- ${{ if eq(parameters.UseCheckedInLocProjectJson, 'true') }}:
- name: _GenerateLocProjectArguments
value: ${{ variables._GenerateLocProjectArguments }} -UseCheckedInLocProjectJson
- template: /eng/common/templates/variables/pool-providers.yml
${{ if ne(parameters.pool, '') }}:
pool: ${{ parameters.pool }}
${{ if eq(parameters.pool, '') }}:
pool:
# We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
name: VSEngSS-MicroBuild2022-1ES
demands: Cmd
# If it's not devdiv, it's dnceng
${{ if ne(variables['System.TeamProject'], 'DevDiv') }}:
name: $(DncEngInternalBuildPool)
demands: ImageOverride -equals windows.vs2019.amd64
steps:
- ${{ if ne(parameters.SkipLocProjectJsonGeneration, 'true') }}:
- task: Powershell@2
inputs:
filePath: $(Build.SourcesDirectory)/eng/common/generate-locproject.ps1
arguments: $(_GenerateLocProjectArguments)
displayName: Generate LocProject.json
condition: ${{ parameters.condition }}
- task: OneLocBuild@2
displayName: OneLocBuild
env:
SYSTEM_ACCESSTOKEN: $(System.AccessToken)
inputs:
locProj: eng/Localize/LocProject.json
outDir: $(Build.ArtifactStagingDirectory)
lclSource: ${{ parameters.LclSource }}
lclPackageId: ${{ parameters.LclPackageId }}
isCreatePrSelected: ${{ parameters.CreatePr }}
isAutoCompletePrSelected: ${{ parameters.AutoCompletePr }}
${{ if eq(parameters.CreatePr, true) }}:
isUseLfLineEndingsSelected: ${{ parameters.UseLfLineEndings }}
${{ if eq(parameters.RepoType, 'gitHub') }}:
isShouldReusePrSelected: ${{ parameters.ReusePr }}
packageSourceAuth: patAuth
patVariable: ${{ parameters.CeapexPat }}
${{ if eq(parameters.RepoType, 'gitHub') }}:
repoType: ${{ parameters.RepoType }}
gitHubPatVariable: "${{ parameters.GithubPat }}"
${{ if ne(parameters.MirrorRepo, '') }}:
isMirrorRepoSelected: true
gitHubOrganization: ${{ parameters.GitHubOrg }}
mirrorRepo: ${{ parameters.MirrorRepo }}
mirrorBranch: ${{ parameters.MirrorBranch }}
condition: ${{ parameters.condition }}
- task: PublishBuildArtifacts@1
displayName: Publish Localization Files
inputs:
PathtoPublish: '$(Build.ArtifactStagingDirectory)/loc'
PublishLocation: Container
ArtifactName: Loc
condition: ${{ parameters.condition }}
- task: PublishBuildArtifacts@1
displayName: Publish LocProject.json
inputs:
PathtoPublish: '$(Build.SourcesDirectory)/eng/Localize/'
PublishLocation: Container
ArtifactName: Loc
condition: ${{ parameters.condition }}

Просмотреть файл

@ -1,7 +1,151 @@
jobs:
- template: /eng/common/core-templates/job/publish-build-assets.yml
parameters:
is1ESPipeline: false
parameters:
configuration: 'Debug'
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
# Optional: condition for the job to run
condition: ''
# Optional: 'true' if future jobs should run even if this job fails
continueOnError: false
# Optional: dependencies of the job
dependsOn: ''
# Optional: Include PublishBuildArtifacts task
enablePublishBuildArtifacts: false
# Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool
pool: {}
# Optional: should run as a public build even in the internal project
# if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
runAsPublic: false
# Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing
publishUsingPipelines: false
# Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing
publishAssetsImmediately: false
artifactsPublishingAdditionalParameters: ''
signingValidationAdditionalParameters: ''
jobs:
- job: Asset_Registry_Publish
dependsOn: ${{ parameters.dependsOn }}
timeoutInMinutes: 150
${{ if eq(parameters.publishAssetsImmediately, 'true') }}:
displayName: Publish Assets
${{ else }}:
displayName: Publish to Build Asset Registry
variables:
- template: /eng/common/templates/variables/pool-providers.yml
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- group: Publish-Build-Assets
- group: AzureDevOps-Artifact-Feeds-Pats
- name: runCodesignValidationInjection
value: false
- ${{ if eq(parameters.publishAssetsImmediately, 'true') }}:
- template: /eng/common/templates/post-build/common-variables.yml
pool:
# We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
name: VSEngSS-MicroBuild2022-1ES
demands: Cmd
# If it's not devdiv, it's dnceng
${{ if ne(variables['System.TeamProject'], 'DevDiv') }}:
name: NetCore1ESPool-Publishing-Internal
demands: ImageOverride -equals windows.vs2019.amd64
steps:
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- task: DownloadBuildArtifacts@0
displayName: Download artifact
inputs:
artifactName: AssetManifests
downloadPath: '$(Build.StagingDirectory)/Download'
checkDownloadedFiles: true
condition: ${{ parameters.condition }}
continueOnError: ${{ parameters.continueOnError }}
- task: NuGetAuthenticate@1
- task: PowerShell@2
displayName: Publish Build Assets
inputs:
filePath: eng\common\sdk-task.ps1
arguments: -task PublishBuildAssets -restore -msbuildEngine dotnet
/p:ManifestsPath='$(Build.StagingDirectory)/Download/AssetManifests'
/p:BuildAssetRegistryToken=$(MaestroAccessToken)
/p:MaestroApiEndpoint=https://maestro.dot.net
/p:PublishUsingPipelines=${{ parameters.publishUsingPipelines }}
/p:OfficialBuildId=$(Build.BuildNumber)
condition: ${{ parameters.condition }}
continueOnError: ${{ parameters.continueOnError }}
- task: powershell@2
displayName: Create ReleaseConfigs Artifact
inputs:
targetType: inline
script: |
Add-Content -Path "$(Build.StagingDirectory)/ReleaseConfigs.txt" -Value $(BARBuildId)
Add-Content -Path "$(Build.StagingDirectory)/ReleaseConfigs.txt" -Value "$(DefaultChannels)"
Add-Content -Path "$(Build.StagingDirectory)/ReleaseConfigs.txt" -Value $(IsStableBuild)
- task: PublishBuildArtifacts@1
displayName: Publish ReleaseConfigs Artifact
inputs:
PathtoPublish: '$(Build.StagingDirectory)/ReleaseConfigs.txt'
PublishLocation: Container
ArtifactName: ReleaseConfigs
- task: powershell@2
displayName: Check if SymbolPublishingExclusionsFile.txt exists
inputs:
targetType: inline
script: |
$symbolExclusionfile = "$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt"
if(Test-Path -Path $symbolExclusionfile)
{
Write-Host "SymbolExclusionFile exists"
Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]true"
}
else{
Write-Host "Symbols Exclusion file does not exists"
Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]false"
}
- task: PublishBuildArtifacts@1
displayName: Publish SymbolPublishingExclusionsFile Artifact
condition: eq(variables['SymbolExclusionFile'], 'true')
inputs:
PathtoPublish: '$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt'
PublishLocation: Container
ArtifactName: ReleaseConfigs
- ${{ if eq(parameters.publishAssetsImmediately, 'true') }}:
- template: /eng/common/templates/post-build/setup-maestro-vars.yml
parameters:
BARBuildId: ${{ parameters.BARBuildId }}
PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
- task: PowerShell@2
displayName: Publish Using Darc
inputs:
filePath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1
arguments: -BuildId $(BARBuildId)
-PublishingInfraVersion 3
-AzdoToken '$(publishing-dnceng-devdiv-code-r-build-re)'
-MaestroToken '$(MaestroApiAccessToken)'
-WaitPublishingFinish true
-ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}'
-SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}'
- ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}:
- template: /eng/common/templates/steps/publish-logs.yml
parameters:
JobLabel: 'Publish_Artifacts_Logs'

Просмотреть файл

@ -1,7 +1,66 @@
jobs:
- template: /eng/common/core-templates/job/source-build.yml
parameters:
is1ESPipeline: false
parameters:
# This template adds arcade-powered source-build to CI. The template produces a server job with a
# default ID 'Source_Build_Complete' to put in a dependency list if necessary.
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
# Specifies the prefix for source-build jobs added to pipeline. Use this if disambiguation needed.
jobNamePrefix: 'Source_Build'
# Defines the platform on which to run the job. By default, a linux-x64 machine, suitable for
# managed-only repositories. This is an object with these properties:
#
# name: ''
# The name of the job. This is included in the job ID.
# targetRID: ''
# The name of the target RID to use, instead of the one auto-detected by Arcade.
# nonPortable: false
# Enables non-portable mode. This means a more specific RID (e.g. fedora.32-x64 rather than
# linux-x64), and compiling against distro-provided packages rather than portable ones.
# skipPublishValidation: false
# Disables publishing validation. By default, a check is performed to ensure no packages are
# published by source-build.
# container: ''
# A container to use. Runs in docker.
# pool: {}
# A pool to use. Runs directly on an agent.
# buildScript: ''
# Specifies the build script to invoke to perform the build in the repo. The default
# './build.sh' should work for typical Arcade repositories, but this is customizable for
# difficult situations.
# jobProperties: {}
# A list of job properties to inject at the top level, for potential extensibility beyond
# container and pool.
platform: {}
jobs:
- job: ${{ parameters.jobNamePrefix }}_${{ parameters.platform.name }}
displayName: Source-Build (${{ parameters.platform.name }})
${{ each property in parameters.platform.jobProperties }}:
${{ property.key }}: ${{ property.value }}
${{ if ne(parameters.platform.container, '') }}:
container: ${{ parameters.platform.container }}
${{ if eq(parameters.platform.pool, '') }}:
# The default VM host AzDO pool. This should be capable of running Docker containers: almost all
# source-build builds run in Docker, including the default managed platform.
# /eng/common/templates/variables/pool-providers.yml can't be used here (some customers declare variables already), so duplicate its logic
pool:
${{ if eq(variables['System.TeamProject'], 'public') }}:
name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore-Svc-Public' ), False, 'NetCore-Public')]
demands: ImageOverride -equals Build.Ubuntu.1804.Amd64.Open
${{ if eq(variables['System.TeamProject'], 'internal') }}:
name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore1ESPool-Svc-Internal'), False, 'NetCore1ESPool-Internal')]
demands: ImageOverride -equals Build.Ubuntu.1804.Amd64
${{ if ne(parameters.platform.pool, '') }}:
pool: ${{ parameters.platform.pool }}
workspace:
clean: all
steps:
- template: /eng/common/templates/steps/source-build.yml
parameters:
platform: ${{ parameters.platform }}

Просмотреть файл

@ -1,7 +1,67 @@
jobs:
- template: /eng/common/core-templates/job/source-index-stage1.yml
parameters:
is1ESPipeline: false
parameters:
runAsPublic: false
sourceIndexPackageVersion: 1.0.1-20230228.2
sourceIndexPackageSource: https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json
sourceIndexBuildCommand: powershell -NoLogo -NoProfile -ExecutionPolicy Bypass -Command "eng/common/build.ps1 -restore -build -binarylog -ci"
preSteps: []
binlogPath: artifacts/log/Debug/Build.binlog
condition: ''
dependsOn: ''
pool: ''
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
jobs:
- job: SourceIndexStage1
dependsOn: ${{ parameters.dependsOn }}
condition: ${{ parameters.condition }}
variables:
- name: SourceIndexPackageVersion
value: ${{ parameters.sourceIndexPackageVersion }}
- name: SourceIndexPackageSource
value: ${{ parameters.sourceIndexPackageSource }}
- name: BinlogPath
value: ${{ parameters.binlogPath }}
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- group: source-dot-net stage1 variables
- template: /eng/common/templates/variables/pool-providers.yml
${{ if ne(parameters.pool, '') }}:
pool: ${{ parameters.pool }}
${{ if eq(parameters.pool, '') }}:
pool:
${{ if eq(variables['System.TeamProject'], 'public') }}:
name: $(DncEngPublicBuildPool)
demands: ImageOverride -equals windows.vs2019.amd64.open
${{ if eq(variables['System.TeamProject'], 'internal') }}:
name: $(DncEngInternalBuildPool)
demands: ImageOverride -equals windows.vs2019.amd64
steps:
- ${{ each preStep in parameters.preSteps }}:
- ${{ preStep }}
- task: UseDotNet@2
displayName: Use .NET Core SDK 6
inputs:
packageType: sdk
version: 6.0.x
installationPath: $(Agent.TempDirectory)/dotnet
workingDirectory: $(Agent.TempDirectory)
- script: |
$(Agent.TempDirectory)/dotnet/dotnet tool install BinLogToSln --version $(SourceIndexPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools
$(Agent.TempDirectory)/dotnet/dotnet tool install UploadIndexStage1 --version $(SourceIndexPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools
displayName: Download Tools
# Set working directory to temp directory so 'dotnet' doesn't try to use global.json and use the repo's sdk.
workingDirectory: $(Agent.TempDirectory)
- script: ${{ parameters.sourceIndexBuildCommand }}
displayName: Build Repository
- script: $(Agent.TempDirectory)/.source-index/tools/BinLogToSln -i $(BinlogPath) -r $(Build.SourcesDirectory) -n $(Build.Repository.Name) -o .source-index/stage1output
displayName: Process Binlog into indexable sln
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- script: $(Agent.TempDirectory)/.source-index/tools/UploadIndexStage1 -i .source-index/stage1output -n $(Build.Repository.Name)
displayName: Upload stage1 artifacts to source index
env:
BLOB_CONTAINER_URL: $(source-dot-net-stage1-blob-container-url)

Просмотреть файл

@ -1,7 +1,31 @@
jobs:
- template: /eng/common/core-templates/jobs/codeql-build.yml
parameters:
is1ESPipeline: false
parameters:
# See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md
continueOnError: false
# Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
jobs: []
# Optional: if specified, restore and use this version of Guardian instead of the default.
overrideGuardianVersion: ''
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
jobs:
- template: /eng/common/templates/jobs/jobs.yml
parameters:
enableMicrobuild: false
enablePublishBuildArtifacts: false
enablePublishTestResults: false
enablePublishBuildAssets: false
enablePublishUsingPipelines: false
enableTelemetry: true
variables:
- group: Publish-Build-Assets
# The Guardian version specified in 'eng/common/sdl/packages.config'. This value must be kept in
# sync with the packages.config file.
- name: DefaultGuardianVersion
value: 0.109.0
- name: GuardianPackagesConfigFile
value: $(Build.SourcesDirectory)\eng\common\sdl\packages.config
- name: GuardianVersion
value: ${{ coalesce(parameters.overrideGuardianVersion, '$(DefaultGuardianVersion)') }}
jobs: ${{ parameters.jobs }}

Просмотреть файл

@ -1,7 +1,97 @@
jobs:
- template: /eng/common/core-templates/jobs/jobs.yml
parameters:
is1ESPipeline: false
parameters:
# See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md
continueOnError: false
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
# Optional: Include PublishBuildArtifacts task
enablePublishBuildArtifacts: false
# Optional: Enable publishing using release pipelines
enablePublishUsingPipelines: false
# Optional: Enable running the source-build jobs to build repo from source
enableSourceBuild: false
# Optional: Parameters for source-build template.
# See /eng/common/templates/jobs/source-build.yml for options
sourceBuildParameters: []
graphFileGeneration:
# Optional: Enable generating the graph files at the end of the build
enabled: false
# Optional: Include toolset dependencies in the generated graph files
includeToolset: false
# Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
jobs: []
# Optional: Override automatically derived dependsOn value for "publish build assets" job
publishBuildAssetsDependsOn: ''
# Optional: Publish the assets as soon as the publish to BAR stage is complete, rather doing so in a separate stage.
publishAssetsImmediately: false
# Optional: If using publishAssetsImmediately and additional parameters are needed, can be used to send along additional parameters (normally sent to post-build.yml)
artifactsPublishingAdditionalParameters: ''
signingValidationAdditionalParameters: ''
# Optional: should run as a public build even in the internal project
# if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
runAsPublic: false
enableSourceIndex: false
sourceIndexParams: {}
# Internal resources (telemetry, microbuild) can only be accessed from non-public projects,
# and some (Microbuild) should only be applied to non-PR cases for internal builds.
jobs:
- ${{ each job in parameters.jobs }}:
- template: ../job/job.yml
parameters:
# pass along parameters
${{ each parameter in parameters }}:
${{ if ne(parameter.key, 'jobs') }}:
${{ parameter.key }}: ${{ parameter.value }}
# pass along job properties
${{ each property in job }}:
${{ if ne(property.key, 'job') }}:
${{ property.key }}: ${{ property.value }}
name: ${{ job.job }}
- ${{ if eq(parameters.enableSourceBuild, true) }}:
- template: /eng/common/templates/jobs/source-build.yml
parameters:
allCompletedJobId: Source_Build_Complete
${{ each parameter in parameters.sourceBuildParameters }}:
${{ parameter.key }}: ${{ parameter.value }}
- ${{ if eq(parameters.enableSourceIndex, 'true') }}:
- template: ../job/source-index-stage1.yml
parameters:
runAsPublic: ${{ parameters.runAsPublic }}
${{ each parameter in parameters.sourceIndexParams }}:
${{ parameter.key }}: ${{ parameter.value }}
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- ${{ if or(eq(parameters.enablePublishBuildAssets, true), eq(parameters.artifacts.publish.manifests, 'true'), ne(parameters.artifacts.publish.manifests, '')) }}:
- template: ../job/publish-build-assets.yml
parameters:
continueOnError: ${{ parameters.continueOnError }}
dependsOn:
- ${{ if ne(parameters.publishBuildAssetsDependsOn, '') }}:
- ${{ each job in parameters.publishBuildAssetsDependsOn }}:
- ${{ job.job }}
- ${{ if eq(parameters.publishBuildAssetsDependsOn, '') }}:
- ${{ each job in parameters.jobs }}:
- ${{ job.job }}
- ${{ if eq(parameters.enableSourceBuild, true) }}:
- Source_Build_Complete
runAsPublic: ${{ parameters.runAsPublic }}
publishUsingPipelines: ${{ parameters.enablePublishUsingPipelines }}
publishAssetsImmediately: ${{ parameters.publishAssetsImmediately }}
enablePublishBuildArtifacts: ${{ parameters.enablePublishBuildArtifacts }}
artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
signingValidationAdditionalParameters: ${{ parameters.signingValidationAdditionalParameters }}

Просмотреть файл

@ -1,7 +1,46 @@
jobs:
- template: /eng/common/core-templates/jobs/source-build.yml
parameters:
is1ESPipeline: false
parameters:
# This template adds arcade-powered source-build to CI. A job is created for each platform, as
# well as an optional server job that completes when all platform jobs complete.
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
# The name of the "join" job for all source-build platforms. If set to empty string, the job is
# not included. Existing repo pipelines can use this job depend on all source-build jobs
# completing without maintaining a separate list of every single job ID: just depend on this one
# server job. By default, not included. Recommended name if used: 'Source_Build_Complete'.
allCompletedJobId: ''
# See /eng/common/templates/job/source-build.yml
jobNamePrefix: 'Source_Build'
# This is the default platform provided by Arcade, intended for use by a managed-only repo.
defaultManagedPlatform:
name: 'Managed'
container: 'mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream8'
# Defines the platforms on which to run build jobs. One job is created for each platform, and the
# object in this array is sent to the job template as 'platform'. If no platforms are specified,
# one job runs on 'defaultManagedPlatform'.
platforms: []
jobs:
- ${{ if ne(parameters.allCompletedJobId, '') }}:
- job: ${{ parameters.allCompletedJobId }}
displayName: Source-Build Complete
pool: server
dependsOn:
- ${{ each platform in parameters.platforms }}:
- ${{ parameters.jobNamePrefix }}_${{ platform.name }}
- ${{ if eq(length(parameters.platforms), 0) }}:
- ${{ parameters.jobNamePrefix }}_${{ parameters.defaultManagedPlatform.name }}
- ${{ each platform in parameters.platforms }}:
- template: /eng/common/templates/job/source-build.yml
parameters:
jobNamePrefix: ${{ parameters.jobNamePrefix }}
platform: ${{ platform }}
- ${{ if eq(length(parameters.platforms), 0) }}:
- template: /eng/common/templates/job/source-build.yml
parameters:
jobNamePrefix: ${{ parameters.jobNamePrefix }}
platform: ${{ parameters.defaultManagedPlatform }}

Просмотреть файл

@ -1,8 +1,22 @@
variables:
- template: /eng/common/core-templates/post-build/common-variables.yml
parameters:
# Specifies whether to use 1ES
is1ESPipeline: false
- group: Publish-Build-Assets
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
# Whether the build is internal or not
- name: IsInternalBuild
value: ${{ and(ne(variables['System.TeamProject'], 'public'), contains(variables['Build.SourceBranch'], 'internal')) }}
# Default Maestro++ API Endpoint and API Version
- name: MaestroApiEndPoint
value: "https://maestro.dot.net"
- name: MaestroApiAccessToken
value: $(MaestroAccessToken)
- name: MaestroApiVersion
value: "2020-02-20"
- name: SourceLinkCLIVersion
value: 3.0.0
- name: SymbolToolVersion
value: 1.0.1
- name: runCodesignValidationInjection
value: false

Просмотреть файл

@ -1,8 +1,281 @@
stages:
- template: /eng/common/core-templates/post-build/post-build.yml
parameters:
# Specifies whether to use 1ES
is1ESPipeline: false
parameters:
# Which publishing infra should be used. THIS SHOULD MATCH THE VERSION ON THE BUILD MANIFEST.
# Publishing V1 is no longer supported
# Publishing V2 is no longer supported
# Publishing V3 is the default
- name: publishingInfraVersion
displayName: Which version of publishing should be used to promote the build definition?
type: number
default: 3
values:
- 3
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
- name: BARBuildId
displayName: BAR Build Id
type: number
default: 0
- name: PromoteToChannelIds
displayName: Channel to promote BARBuildId to
type: string
default: ''
- name: enableSourceLinkValidation
displayName: Enable SourceLink validation
type: boolean
default: false
- name: enableSigningValidation
displayName: Enable signing validation
type: boolean
default: true
- name: enableSymbolValidation
displayName: Enable symbol validation
type: boolean
default: false
- name: enableNugetValidation
displayName: Enable NuGet validation
type: boolean
default: true
- name: publishInstallersAndChecksums
displayName: Publish installers and checksums
type: boolean
default: true
- name: SDLValidationParameters
type: object
default:
enable: false
publishGdn: false
continueOnError: false
params: ''
artifactNames: ''
downloadArtifacts: true
# These parameters let the user customize the call to sdk-task.ps1 for publishing
# symbols & general artifacts as well as for signing validation
- name: symbolPublishingAdditionalParameters
displayName: Symbol publishing additional parameters
type: string
default: ''
- name: artifactsPublishingAdditionalParameters
displayName: Artifact publishing additional parameters
type: string
default: ''
- name: signingValidationAdditionalParameters
displayName: Signing validation additional parameters
type: string
default: ''
# Which stages should finish execution before post-build stages start
- name: validateDependsOn
type: object
default:
- build
- name: publishDependsOn
type: object
default:
- Validate
# Optional: Call asset publishing rather than running in a separate stage
- name: publishAssetsImmediately
type: boolean
default: false
stages:
- ${{ if or(eq( parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}:
- stage: Validate
dependsOn: ${{ parameters.validateDependsOn }}
displayName: Validate Build Assets
variables:
- template: common-variables.yml
- template: /eng/common/templates/variables/pool-providers.yml
jobs:
- job:
displayName: NuGet Validation
condition: and(succeededOrFailed(), eq( ${{ parameters.enableNugetValidation }}, 'true'))
pool:
# We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
name: VSEngSS-MicroBuild2022-1ES
demands: Cmd
# If it's not devdiv, it's dnceng
${{ else }}:
name: $(DncEngInternalBuildPool)
demands: ImageOverride -equals windows.vs2019.amd64
steps:
- template: setup-maestro-vars.yml
parameters:
BARBuildId: ${{ parameters.BARBuildId }}
PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
- task: DownloadBuildArtifacts@0
displayName: Download Package Artifacts
inputs:
buildType: specific
buildVersionToDownload: specific
project: $(AzDOProjectName)
pipeline: $(AzDOPipelineId)
buildId: $(AzDOBuildId)
artifactName: PackageArtifacts
checkDownloadedFiles: true
- task: PowerShell@2
displayName: Validate
inputs:
filePath: $(Build.SourcesDirectory)/eng/common/post-build/nuget-validation.ps1
arguments: -PackagesPath $(Build.ArtifactStagingDirectory)/PackageArtifacts/
-ToolDestinationPath $(Agent.BuildDirectory)/Extract/
- job:
displayName: Signing Validation
condition: and( eq( ${{ parameters.enableSigningValidation }}, 'true'), ne( variables['PostBuildSign'], 'true'))
pool:
# We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
name: VSEngSS-MicroBuild2022-1ES
demands: Cmd
# If it's not devdiv, it's dnceng
${{ else }}:
name: $(DncEngInternalBuildPool)
demands: ImageOverride -equals windows.vs2019.amd64
steps:
- template: setup-maestro-vars.yml
parameters:
BARBuildId: ${{ parameters.BARBuildId }}
PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
- task: DownloadBuildArtifacts@0
displayName: Download Package Artifacts
inputs:
buildType: specific
buildVersionToDownload: specific
project: $(AzDOProjectName)
pipeline: $(AzDOPipelineId)
buildId: $(AzDOBuildId)
artifactName: PackageArtifacts
checkDownloadedFiles: true
itemPattern: |
**
!**/Microsoft.SourceBuild.Intermediate.*.nupkg
# This is necessary whenever we want to publish/restore to an AzDO private feed
# Since sdk-task.ps1 tries to restore packages we need to do this authentication here
# otherwise it'll complain about accessing a private feed.
- task: NuGetAuthenticate@1
displayName: 'Authenticate to AzDO Feeds'
# Signing validation will optionally work with the buildmanifest file which is downloaded from
# Azure DevOps above.
- task: PowerShell@2
displayName: Validate
inputs:
filePath: eng\common\sdk-task.ps1
arguments: -task SigningValidation -restore -msbuildEngine vs
/p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts'
/p:SignCheckExclusionsFile='$(Build.SourcesDirectory)/eng/SignCheckExclusionsFile.txt'
${{ parameters.signingValidationAdditionalParameters }}
- template: ../steps/publish-logs.yml
parameters:
StageLabel: 'Validation'
JobLabel: 'Signing'
- job:
displayName: SourceLink Validation
condition: eq( ${{ parameters.enableSourceLinkValidation }}, 'true')
pool:
# We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
name: VSEngSS-MicroBuild2022-1ES
demands: Cmd
# If it's not devdiv, it's dnceng
${{ else }}:
name: $(DncEngInternalBuildPool)
demands: ImageOverride -equals windows.vs2019.amd64
steps:
- template: setup-maestro-vars.yml
parameters:
BARBuildId: ${{ parameters.BARBuildId }}
PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
- task: DownloadBuildArtifacts@0
displayName: Download Blob Artifacts
inputs:
buildType: specific
buildVersionToDownload: specific
project: $(AzDOProjectName)
pipeline: $(AzDOPipelineId)
buildId: $(AzDOBuildId)
artifactName: BlobArtifacts
checkDownloadedFiles: true
- task: PowerShell@2
displayName: Validate
inputs:
filePath: $(Build.SourcesDirectory)/eng/common/post-build/sourcelink-validation.ps1
arguments: -InputPath $(Build.ArtifactStagingDirectory)/BlobArtifacts/
-ExtractPath $(Agent.BuildDirectory)/Extract/
-GHRepoName $(Build.Repository.Name)
-GHCommit $(Build.SourceVersion)
-SourcelinkCliVersion $(SourceLinkCLIVersion)
continueOnError: true
- template: /eng/common/templates/job/execute-sdl.yml
parameters:
enable: ${{ parameters.SDLValidationParameters.enable }}
publishGuardianDirectoryToPipeline: ${{ parameters.SDLValidationParameters.publishGdn }}
additionalParameters: ${{ parameters.SDLValidationParameters.params }}
continueOnError: ${{ parameters.SDLValidationParameters.continueOnError }}
artifactNames: ${{ parameters.SDLValidationParameters.artifactNames }}
downloadArtifacts: ${{ parameters.SDLValidationParameters.downloadArtifacts }}
- ${{ if ne(parameters.publishAssetsImmediately, 'true') }}:
- stage: publish_using_darc
${{ if or(eq(parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}:
dependsOn: ${{ parameters.publishDependsOn }}
${{ else }}:
dependsOn: ${{ parameters.validateDependsOn }}
displayName: Publish using Darc
variables:
- template: common-variables.yml
- template: /eng/common/templates/variables/pool-providers.yml
jobs:
- job:
displayName: Publish Using Darc
timeoutInMinutes: 120
pool:
# We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
name: VSEngSS-MicroBuild2022-1ES
demands: Cmd
# If it's not devdiv, it's dnceng
${{ else }}:
name: NetCore1ESPool-Publishing-Internal
demands: ImageOverride -equals windows.vs2019.amd64
steps:
- template: setup-maestro-vars.yml
parameters:
BARBuildId: ${{ parameters.BARBuildId }}
PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
- task: NuGetAuthenticate@1
- task: PowerShell@2
displayName: Publish Using Darc
inputs:
filePath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1
arguments: -BuildId $(BARBuildId)
-PublishingInfraVersion ${{ parameters.publishingInfraVersion }}
-AzdoToken '$(publishing-dnceng-devdiv-code-r-build-re)'
-MaestroToken '$(MaestroApiAccessToken)'
-WaitPublishingFinish true
-ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}'
-SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}'

Просмотреть файл

@ -1,8 +1,70 @@
steps:
- template: /eng/common/core-templates/post-build/setup-maestro-vars.yml
parameters:
# Specifies whether to use 1ES
is1ESPipeline: false
parameters:
BARBuildId: ''
PromoteToChannelIds: ''
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
steps:
- ${{ if eq(coalesce(parameters.PromoteToChannelIds, 0), 0) }}:
- task: DownloadBuildArtifacts@0
displayName: Download Release Configs
inputs:
buildType: current
artifactName: ReleaseConfigs
checkDownloadedFiles: true
- task: PowerShell@2
name: setReleaseVars
displayName: Set Release Configs Vars
inputs:
targetType: inline
pwsh: true
script: |
try {
if (!$Env:PromoteToMaestroChannels -or $Env:PromoteToMaestroChannels.Trim() -eq '') {
$Content = Get-Content $(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt
$BarId = $Content | Select -Index 0
$Channels = $Content | Select -Index 1
$IsStableBuild = $Content | Select -Index 2
$AzureDevOpsProject = $Env:System_TeamProject
$AzureDevOpsBuildDefinitionId = $Env:System_DefinitionId
$AzureDevOpsBuildId = $Env:Build_BuildId
}
else {
$buildApiEndpoint = "${Env:MaestroApiEndPoint}/api/builds/${Env:BARBuildId}?api-version=${Env:MaestroApiVersion}"
$apiHeaders = New-Object 'System.Collections.Generic.Dictionary[[String],[String]]'
$apiHeaders.Add('Accept', 'application/json')
$apiHeaders.Add('Authorization',"Bearer ${Env:MAESTRO_API_TOKEN}")
$buildInfo = try { Invoke-WebRequest -Method Get -Uri $buildApiEndpoint -Headers $apiHeaders | ConvertFrom-Json } catch { Write-Host "Error: $_" }
$BarId = $Env:BARBuildId
$Channels = $Env:PromoteToMaestroChannels -split ","
$Channels = $Channels -join "]["
$Channels = "[$Channels]"
$IsStableBuild = $buildInfo.stable
$AzureDevOpsProject = $buildInfo.azureDevOpsProject
$AzureDevOpsBuildDefinitionId = $buildInfo.azureDevOpsBuildDefinitionId
$AzureDevOpsBuildId = $buildInfo.azureDevOpsBuildId
}
Write-Host "##vso[task.setvariable variable=BARBuildId]$BarId"
Write-Host "##vso[task.setvariable variable=TargetChannels]$Channels"
Write-Host "##vso[task.setvariable variable=IsStableBuild]$IsStableBuild"
Write-Host "##vso[task.setvariable variable=AzDOProjectName]$AzureDevOpsProject"
Write-Host "##vso[task.setvariable variable=AzDOPipelineId]$AzureDevOpsBuildDefinitionId"
Write-Host "##vso[task.setvariable variable=AzDOBuildId]$AzureDevOpsBuildId"
}
catch {
Write-Host $_
Write-Host $_.Exception
Write-Host $_.ScriptStackTrace
exit 1
}
env:
MAESTRO_API_TOKEN: $(MaestroApiAccessToken)
BARBuildId: ${{ parameters.BARBuildId }}
PromoteToMaestroChannels: ${{ parameters.PromoteToChannelIds }}

Просмотреть файл

@ -1,7 +1,13 @@
steps:
- template: /eng/common/core-templates/steps/add-build-to-channel.yml
parameters:
is1ESPipeline: false
parameters:
ChannelId: 0
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
steps:
- task: PowerShell@2
displayName: Add Build to Channel
inputs:
filePath: $(Build.SourcesDirectory)/eng/common/post-build/add-build-to-channel.ps1
arguments: -BuildId $(BARBuildId)
-ChannelId ${{ parameters.ChannelId }}
-MaestroApiAccessToken $(MaestroApiAccessToken)
-MaestroApiEndPoint $(MaestroApiEndPoint)
-MaestroApiVersion $(MaestroApiVersion)

Просмотреть файл

@ -0,0 +1,12 @@
# build-reason.yml
# Description: runs steps if build.reason condition is valid. conditions is a string of valid build reasons
# to include steps (',' separated).
parameters:
conditions: ''
steps: []
steps:
- ${{ if and( not(startsWith(parameters.conditions, 'not')), contains(parameters.conditions, variables['build.reason'])) }}:
- ${{ parameters.steps }}
- ${{ if and( startsWith(parameters.conditions, 'not'), not(contains(parameters.conditions, variables['build.reason']))) }}:
- ${{ parameters.steps }}

Просмотреть файл

@ -1,7 +1,13 @@
steps:
- template: /eng/common/core-templates/steps/component-governance.yml
parameters:
is1ESPipeline: false
parameters:
disableComponentGovernance: false
componentGovernanceIgnoreDirectories: ''
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
steps:
- ${{ if eq(parameters.disableComponentGovernance, 'true') }}:
- script: echo "##vso[task.setvariable variable=skipComponentGovernanceDetection]true"
displayName: Set skipComponentGovernanceDetection variable
- ${{ if ne(parameters.disableComponentGovernance, 'true') }}:
- task: ComponentGovernanceComponentDetection@0
continueOnError: true
inputs:
ignoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }}

Просмотреть файл

@ -0,0 +1,32 @@
parameters:
# Language that should be analyzed. Defaults to csharp
language: csharp
# Build Commands
buildCommands: ''
overrideParameters: '' # Optional: to override values for parameters.
additionalParameters: '' # Optional: parameters that need user specific values eg: '-SourceToolsList @("abc","def") -ArtifactToolsList @("ghi","jkl")'
# Optional: if specified, restore and use this version of Guardian instead of the default.
overrideGuardianVersion: ''
# Optional: if true, publish the '.gdn' folder as a pipeline artifact. This can help with in-depth
# diagnosis of problems with specific tool configurations.
publishGuardianDirectoryToPipeline: false
# The script to run to execute all SDL tools. Use this if you want to use a script to define SDL
# parameters rather than relying on YAML. It may be better to use a local script, because you can
# reproduce results locally without piecing together a command based on the YAML.
executeAllSdlToolsScript: 'eng/common/sdl/execute-all-sdl-tools.ps1'
# There is some sort of bug (has been reported) in Azure DevOps where if this parameter is named
# 'continueOnError', the parameter value is not correctly picked up.
# This can also be remedied by the caller (post-build.yml) if it does not use a nested parameter
# optional: determines whether to continue the build if the step errors;
sdlContinueOnError: false
steps:
- template: /eng/common/templates/steps/execute-sdl.yml
parameters:
overrideGuardianVersion: ${{ parameters.overrideGuardianVersion }}
executeAllSdlToolsScript: ${{ parameters.executeAllSdlToolsScript }}
overrideParameters: ${{ parameters.overrideParameters }}
additionalParameters: '${{ parameters.additionalParameters }}
-CodeQLAdditionalRunConfigParams @("BuildCommands < ${{ parameters.buildCommands }}", "Language < ${{ parameters.language }}")'
publishGuardianDirectoryToPipeline: ${{ parameters.publishGuardianDirectoryToPipeline }}
sdlContinueOnError: ${{ parameters.sdlContinueOnError }}

Просмотреть файл

@ -0,0 +1,88 @@
parameters:
overrideGuardianVersion: ''
executeAllSdlToolsScript: ''
overrideParameters: ''
additionalParameters: ''
publishGuardianDirectoryToPipeline: false
sdlContinueOnError: false
condition: ''
steps:
- task: NuGetAuthenticate@1
inputs:
nuGetServiceConnections: GuardianConnect
- task: NuGetToolInstaller@1
displayName: 'Install NuGet.exe'
- ${{ if ne(parameters.overrideGuardianVersion, '') }}:
- pwsh: |
Set-Location -Path $(Build.SourcesDirectory)\eng\common\sdl
. .\sdl.ps1
$guardianCliLocation = Install-Gdn -Path $(Build.SourcesDirectory)\.artifacts -Version ${{ parameters.overrideGuardianVersion }}
Write-Host "##vso[task.setvariable variable=GuardianCliLocation]$guardianCliLocation"
displayName: Install Guardian (Overridden)
- ${{ if eq(parameters.overrideGuardianVersion, '') }}:
- pwsh: |
Set-Location -Path $(Build.SourcesDirectory)\eng\common\sdl
. .\sdl.ps1
$guardianCliLocation = Install-Gdn -Path $(Build.SourcesDirectory)\.artifacts
Write-Host "##vso[task.setvariable variable=GuardianCliLocation]$guardianCliLocation"
displayName: Install Guardian
- ${{ if ne(parameters.overrideParameters, '') }}:
- powershell: ${{ parameters.executeAllSdlToolsScript }} ${{ parameters.overrideParameters }}
displayName: Execute SDL (Overridden)
continueOnError: ${{ parameters.sdlContinueOnError }}
condition: ${{ parameters.condition }}
- ${{ if eq(parameters.overrideParameters, '') }}:
- powershell: ${{ parameters.executeAllSdlToolsScript }}
-GuardianCliLocation $(GuardianCliLocation)
-NugetPackageDirectory $(Build.SourcesDirectory)\.packages
-AzureDevOpsAccessToken $(dn-bot-dotnet-build-rw-code-rw)
${{ parameters.additionalParameters }}
displayName: Execute SDL
continueOnError: ${{ parameters.sdlContinueOnError }}
condition: ${{ parameters.condition }}
- ${{ if ne(parameters.publishGuardianDirectoryToPipeline, 'false') }}:
# We want to publish the Guardian results and configuration for easy diagnosis. However, the
# '.gdn' dir is a mix of configuration, results, extracted dependencies, and Guardian default
# tooling files. Some of these files are large and aren't useful during an investigation, so
# exclude them by simply deleting them before publishing. (As of writing, there is no documented
# way to selectively exclude a dir from the pipeline artifact publish task.)
- task: DeleteFiles@1
displayName: Delete Guardian dependencies to avoid uploading
inputs:
SourceFolder: $(Agent.BuildDirectory)/.gdn
Contents: |
c
i
condition: succeededOrFailed()
- publish: $(Agent.BuildDirectory)/.gdn
artifact: GuardianConfiguration
displayName: Publish GuardianConfiguration
condition: succeededOrFailed()
# Publish the SARIF files in a container named CodeAnalysisLogs to enable integration
# with the "SARIF SAST Scans Tab" Azure DevOps extension
- task: CopyFiles@2
displayName: Copy SARIF files
inputs:
flattenFolders: true
sourceFolder: $(Agent.BuildDirectory)/.gdn/rc/
contents: '**/*.sarif'
targetFolder: $(Build.SourcesDirectory)/CodeAnalysisLogs
condition: succeededOrFailed()
# Use PublishBuildArtifacts because the SARIF extension only checks this case
# see microsoft/sarif-azuredevops-extension#4
- task: PublishBuildArtifacts@1
displayName: Publish SARIF files to CodeAnalysisLogs container
inputs:
pathToPublish: $(Build.SourcesDirectory)/CodeAnalysisLogs
artifactName: CodeAnalysisLogs
condition: succeededOrFailed()

Просмотреть файл

@ -1,7 +1,48 @@
steps:
- template: /eng/common/core-templates/steps/generate-sbom.yml
parameters:
is1ESPipeline: false
# BuildDropPath - The root folder of the drop directory for which the manifest file will be generated.
# PackageName - The name of the package this SBOM represents.
# PackageVersion - The version of the package this SBOM represents.
# ManifestDirPath - The path of the directory where the generated manifest files will be placed
# IgnoreDirectories - Directories to ignore for SBOM generation. This will be passed through to the CG component detector.
parameters:
PackageVersion: 8.0.0
BuildDropPath: '$(Build.SourcesDirectory)/artifacts'
PackageName: '.NET'
ManifestDirPath: $(Build.ArtifactStagingDirectory)/sbom
IgnoreDirectories: ''
sbomContinueOnError: true
steps:
- task: PowerShell@2
displayName: Prep for SBOM generation in (Non-linux)
condition: or(eq(variables['Agent.Os'], 'Windows_NT'), eq(variables['Agent.Os'], 'Darwin'))
inputs:
filePath: ./eng/common/generate-sbom-prep.ps1
arguments: ${{parameters.manifestDirPath}}
# Chmodding is a workaround for https://github.com/dotnet/arcade/issues/8461
- script: |
chmod +x ./eng/common/generate-sbom-prep.sh
./eng/common/generate-sbom-prep.sh ${{parameters.manifestDirPath}}
displayName: Prep for SBOM generation in (Linux)
condition: eq(variables['Agent.Os'], 'Linux')
continueOnError: ${{ parameters.sbomContinueOnError }}
- task: AzureArtifacts.manifest-generator-task.manifest-generator-task.ManifestGeneratorTask@0
displayName: 'Generate SBOM manifest'
continueOnError: ${{ parameters.sbomContinueOnError }}
inputs:
PackageName: ${{ parameters.packageName }}
BuildDropPath: ${{ parameters.buildDropPath }}
PackageVersion: ${{ parameters.packageVersion }}
ManifestDirPath: ${{ parameters.manifestDirPath }}
${{ if ne(parameters.IgnoreDirectories, '') }}:
AdditionalComponentDetectorArgs: '--IgnoreDirectories ${{ parameters.IgnoreDirectories }}'
- task: PublishPipelineArtifact@1
displayName: Publish SBOM manifest
continueOnError: ${{parameters.sbomContinueOnError}}
inputs:
targetPath: '${{parameters.manifestDirPath}}'
artifactName: $(ARTIFACT_NAME)
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}

Просмотреть файл

@ -1,40 +0,0 @@
parameters:
- name: is1ESPipeline
type: boolean
default: false
- name: displayName
type: string
default: 'Publish to Build Artifact'
- name: condition
type: string
default: succeeded()
- name: artifactName
type: string
- name: pathToPublish
type: string
- name: continueOnError
type: boolean
default: false
- name: publishLocation
type: string
default: 'Container'
steps:
- ${{ if eq(parameters.is1ESPipeline, true) }}:
- 'eng/common/templates cannot be referenced from a 1ES managed template': error
- task: PublishBuildArtifacts@1
displayName: ${{ parameters.displayName }}
condition: ${{ parameters.condition }}
${{ if parameters.continueOnError }}:
continueOnError: ${{ parameters.continueOnError }}
inputs:
PublishLocation: ${{ parameters.publishLocation }}
PathtoPublish: ${{ parameters.pathToPublish }}
${{ if parameters.artifactName }}:
ArtifactName: ${{ parameters.artifactName }}

Просмотреть файл

@ -1,7 +1,23 @@
steps:
- template: /eng/common/core-templates/steps/publish-logs.yml
parameters:
is1ESPipeline: false
parameters:
StageLabel: ''
JobLabel: ''
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
steps:
- task: Powershell@2
displayName: Prepare Binlogs to Upload
inputs:
targetType: inline
script: |
New-Item -ItemType Directory $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/
Move-Item -Path $(Build.SourcesDirectory)/artifacts/log/Debug/* $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/
continueOnError: true
condition: always()
- task: PublishBuildArtifacts@1
displayName: Publish Logs
inputs:
PathtoPublish: '$(Build.SourcesDirectory)/PostBuildLogs'
PublishLocation: Container
ArtifactName: PostBuildLogs
continueOnError: true
condition: always()

Просмотреть файл

@ -1,34 +0,0 @@
parameters:
- name: is1ESPipeline
type: boolean
default: false
- name: args
type: object
default: {}
steps:
- ${{ if eq(parameters.is1ESPipeline, true) }}:
- 'eng/common/templates cannot be referenced from a 1ES managed template': error
- task: PublishPipelineArtifact@1
displayName: ${{ coalesce(parameters.args.displayName, 'Publish to Build Artifact') }}
${{ if parameters.args.condition }}:
condition: ${{ parameters.args.condition }}
${{ else }}:
condition: succeeded()
${{ if parameters.args.continueOnError }}:
continueOnError: ${{ parameters.args.continueOnError }}
inputs:
targetPath: ${{ parameters.args.targetPath }}
${{ if parameters.args.artifactName }}:
artifactName: ${{ parameters.args.artifactName }}
${{ if parameters.args.publishLocation }}:
publishLocation: ${{ parameters.args.publishLocation }}
${{ if parameters.args.fileSharePath }}:
fileSharePath: ${{ parameters.args.fileSharePath }}
${{ if parameters.args.Parallel }}:
parallel: ${{ parameters.args.Parallel }}
${{ if parameters.args.parallelCount }}:
parallelCount: ${{ parameters.args.parallelCount }}
${{ if parameters.args.properties }}:
properties: ${{ parameters.args.properties }}

Просмотреть файл

@ -1,7 +1,28 @@
steps:
- template: /eng/common/core-templates/steps/retain-build.yml
parameters:
is1ESPipeline: false
parameters:
# Optional azure devops PAT with build execute permissions for the build's organization,
# only needed if the build that should be retained ran on a different organization than
# the pipeline where this template is executing from
Token: ''
# Optional BuildId to retain, defaults to the current running build
BuildId: ''
# Azure devops Organization URI for the build in the https://dev.azure.com/<organization> format.
# Defaults to the organization the current pipeline is running on
AzdoOrgUri: '$(System.CollectionUri)'
# Azure devops project for the build. Defaults to the project the current pipeline is running on
AzdoProject: '$(System.TeamProject)'
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
steps:
- task: powershell@2
inputs:
targetType: 'filePath'
filePath: eng/common/retain-build.ps1
pwsh: true
arguments: >
-AzdoOrgUri: ${{parameters.AzdoOrgUri}}
-AzdoProject ${{parameters.AzdoProject}}
-Token ${{coalesce(parameters.Token, '$env:SYSTEM_ACCESSTOKEN') }}
-BuildId ${{coalesce(parameters.BuildId, '$env:BUILD_ID')}}
displayName: Enable permanent build retention
env:
SYSTEM_ACCESSTOKEN: $(System.AccessToken)
BUILD_ID: $(Build.BuildId)

Просмотреть файл

@ -0,0 +1,7 @@
parameters:
agentOs: ''
steps: []
steps:
- ${{ if ne(parameters.agentOs, 'Windows_NT') }}:
- ${{ parameters.steps }}

Просмотреть файл

@ -0,0 +1,7 @@
parameters:
agentOs: ''
steps: []
steps:
- ${{ if eq(parameters.agentOs, 'Windows_NT') }}:
- ${{ parameters.steps }}

Просмотреть файл

@ -0,0 +1,33 @@
parameters:
# if parameter1 equals parameter 2, run 'ifScript' command, else run 'elsescript' command
parameter1: ''
parameter2: ''
ifScript: ''
elseScript: ''
# name of script step
name: Script
# display name of script step
displayName: If-Equal-Else Script
# environment
env: {}
# conditional expression for step execution
condition: ''
steps:
- ${{ if and(ne(parameters.ifScript, ''), eq(parameters.parameter1, parameters.parameter2)) }}:
- script: ${{ parameters.ifScript }}
name: ${{ parameters.name }}
displayName: ${{ parameters.displayName }}
env: ${{ parameters.env }}
condition: ${{ parameters.condition }}
- ${{ if and(ne(parameters.elseScript, ''), ne(parameters.parameter1, parameters.parameter2)) }}:
- script: ${{ parameters.elseScript }}
name: ${{ parameters.name }}
displayName: ${{ parameters.displayName }}
env: ${{ parameters.env }}
condition: ${{ parameters.condition }}

Просмотреть файл

@ -1,7 +1,91 @@
steps:
- template: /eng/common/core-templates/steps/send-to-helix.yml
parameters:
is1ESPipeline: false
# Please remember to update the documentation if you make changes to these parameters!
parameters:
HelixSource: 'pr/default' # required -- sources must start with pr/, official/, prodcon/, or agent/
HelixType: 'tests/default/' # required -- Helix telemetry which identifies what type of data this is; should include "test" for clarity and must end in '/'
HelixBuild: $(Build.BuildNumber) # required -- the build number Helix will use to identify this -- automatically set to the AzDO build number
HelixTargetQueues: '' # required -- semicolon-delimited list of Helix queues to test on; see https://helix.dot.net/ for a list of queues
HelixAccessToken: '' # required -- access token to make Helix API requests; should be provided by the appropriate variable group
HelixConfiguration: '' # optional -- additional property attached to a job
HelixPreCommands: '' # optional -- commands to run before Helix work item execution
HelixPostCommands: '' # optional -- commands to run after Helix work item execution
WorkItemDirectory: '' # optional -- a payload directory to zip up and send to Helix; requires WorkItemCommand; incompatible with XUnitProjects
WorkItemCommand: '' # optional -- a command to execute on the payload; requires WorkItemDirectory; incompatible with XUnitProjects
WorkItemTimeout: '' # optional -- a timeout in TimeSpan.Parse-ready value (e.g. 00:02:00) for the work item command; requires WorkItemDirectory; incompatible with XUnitProjects
CorrelationPayloadDirectory: '' # optional -- a directory to zip up and send to Helix as a correlation payload
XUnitProjects: '' # optional -- semicolon-delimited list of XUnitProjects to parse and send to Helix; requires XUnitRuntimeTargetFramework, XUnitPublishTargetFramework, XUnitRunnerVersion, and IncludeDotNetCli=true
XUnitWorkItemTimeout: '' # optional -- the workitem timeout in seconds for all workitems created from the xUnit projects specified by XUnitProjects
XUnitPublishTargetFramework: '' # optional -- framework to use to publish your xUnit projects
XUnitRuntimeTargetFramework: '' # optional -- framework to use for the xUnit console runner
XUnitRunnerVersion: '' # optional -- version of the xUnit nuget package you wish to use on Helix; required for XUnitProjects
IncludeDotNetCli: false # optional -- true will download a version of the .NET CLI onto the Helix machine as a correlation payload; requires DotNetCliPackageType and DotNetCliVersion
DotNetCliPackageType: '' # optional -- either 'sdk', 'runtime' or 'aspnetcore-runtime'; determines whether the sdk or runtime will be sent to Helix; see https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json
DotNetCliVersion: '' # optional -- version of the CLI to send to Helix; based on this: https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json
WaitForWorkItemCompletion: true # optional -- true will make the task wait until work items have been completed and fail the build if work items fail. False is "fire and forget."
IsExternal: false # [DEPRECATED] -- doesn't do anything, jobs are external if HelixAccessToken is empty and Creator is set
HelixBaseUri: 'https://helix.dot.net/' # optional -- sets the Helix API base URI (allows targeting https://helix.int-dot.net )
Creator: '' # optional -- if the build is external, use this to specify who is sending the job
DisplayNamePrefix: 'Run Tests' # optional -- rename the beginning of the displayName of the steps in AzDO
condition: succeeded() # optional -- condition for step to execute; defaults to succeeded()
continueOnError: false # optional -- determines whether to continue the build if the step errors; defaults to false
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
steps:
- powershell: 'powershell "$env:BUILD_SOURCESDIRECTORY\eng\common\msbuild.ps1 $env:BUILD_SOURCESDIRECTORY\eng\common\helixpublish.proj /restore /p:TreatWarningsAsErrors=false /t:Test /bl:$env:BUILD_SOURCESDIRECTORY\artifacts\log\$env:BuildConfig\SendToHelix.binlog"'
displayName: ${{ parameters.DisplayNamePrefix }} (Windows)
env:
BuildConfig: $(_BuildConfig)
HelixSource: ${{ parameters.HelixSource }}
HelixType: ${{ parameters.HelixType }}
HelixBuild: ${{ parameters.HelixBuild }}
HelixConfiguration: ${{ parameters.HelixConfiguration }}
HelixTargetQueues: ${{ parameters.HelixTargetQueues }}
HelixAccessToken: ${{ parameters.HelixAccessToken }}
HelixPreCommands: ${{ parameters.HelixPreCommands }}
HelixPostCommands: ${{ parameters.HelixPostCommands }}
WorkItemDirectory: ${{ parameters.WorkItemDirectory }}
WorkItemCommand: ${{ parameters.WorkItemCommand }}
WorkItemTimeout: ${{ parameters.WorkItemTimeout }}
CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }}
XUnitProjects: ${{ parameters.XUnitProjects }}
XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }}
XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }}
XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }}
XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }}
IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }}
DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }}
DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
HelixBaseUri: ${{ parameters.HelixBaseUri }}
Creator: ${{ parameters.Creator }}
SYSTEM_ACCESSTOKEN: $(System.AccessToken)
condition: and(${{ parameters.condition }}, eq(variables['Agent.Os'], 'Windows_NT'))
continueOnError: ${{ parameters.continueOnError }}
- script: $BUILD_SOURCESDIRECTORY/eng/common/msbuild.sh $BUILD_SOURCESDIRECTORY/eng/common/helixpublish.proj /restore /p:TreatWarningsAsErrors=false /t:Test /bl:$BUILD_SOURCESDIRECTORY/artifacts/log/$BuildConfig/SendToHelix.binlog
displayName: ${{ parameters.DisplayNamePrefix }} (Unix)
env:
BuildConfig: $(_BuildConfig)
HelixSource: ${{ parameters.HelixSource }}
HelixType: ${{ parameters.HelixType }}
HelixBuild: ${{ parameters.HelixBuild }}
HelixConfiguration: ${{ parameters.HelixConfiguration }}
HelixTargetQueues: ${{ parameters.HelixTargetQueues }}
HelixAccessToken: ${{ parameters.HelixAccessToken }}
HelixPreCommands: ${{ parameters.HelixPreCommands }}
HelixPostCommands: ${{ parameters.HelixPostCommands }}
WorkItemDirectory: ${{ parameters.WorkItemDirectory }}
WorkItemCommand: ${{ parameters.WorkItemCommand }}
WorkItemTimeout: ${{ parameters.WorkItemTimeout }}
CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }}
XUnitProjects: ${{ parameters.XUnitProjects }}
XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }}
XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }}
XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }}
XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }}
IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }}
DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }}
DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
HelixBaseUri: ${{ parameters.HelixBaseUri }}
Creator: ${{ parameters.Creator }}
SYSTEM_ACCESSTOKEN: $(System.AccessToken)
condition: and(${{ parameters.condition }}, ne(variables['Agent.Os'], 'Windows_NT'))
continueOnError: ${{ parameters.continueOnError }}

Просмотреть файл

@ -1,7 +1,129 @@
steps:
- template: /eng/common/core-templates/steps/source-build.yml
parameters:
is1ESPipeline: false
parameters:
# This template adds arcade-powered source-build to CI.
${{ each parameter in parameters }}:
${{ parameter.key }}: ${{ parameter.value }}
# This is a 'steps' template, and is intended for advanced scenarios where the existing build
# infra has a careful build methodology that must be followed. For example, a repo
# (dotnet/runtime) might choose to clone the GitHub repo only once and store it as a pipeline
# artifact for all subsequent jobs to use, to reduce dependence on a strong network connection to
# GitHub. Using this steps template leaves room for that infra to be included.
# Defines the platform on which to run the steps. See 'eng/common/templates/job/source-build.yml'
# for details. The entire object is described in the 'job' template for simplicity, even though
# the usage of the properties on this object is split between the 'job' and 'steps' templates.
platform: {}
steps:
# Build. Keep it self-contained for simple reusability. (No source-build-specific job variables.)
- script: |
set -x
df -h
# If building on the internal project, the artifact feeds variable may be available (usually only if needed)
# In that case, call the feed setup script to add internal feeds corresponding to public ones.
# In addition, add an msbuild argument to copy the WIP from the repo to the target build location.
# This is because SetupNuGetSources.sh will alter the current NuGet.config file, and we need to preserve those
# changes.
internalRestoreArgs=
if [ '$(dn-bot-dnceng-artifact-feeds-rw)' != '$''(dn-bot-dnceng-artifact-feeds-rw)' ]; then
# Temporarily work around https://github.com/dotnet/arcade/issues/7709
chmod +x $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh
$(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh $(Build.SourcesDirectory)/NuGet.config $(dn-bot-dnceng-artifact-feeds-rw)
internalRestoreArgs='/p:CopyWipIntoInnerSourceBuildRepo=true'
# The 'Copy WIP' feature of source build uses git stash to apply changes from the original repo.
# This only works if there is a username/email configured, which won't be the case in most CI runs.
git config --get user.email
if [ $? -ne 0 ]; then
git config user.email dn-bot@microsoft.com
git config user.name dn-bot
fi
fi
# If building on the internal project, the internal storage variable may be available (usually only if needed)
# In that case, add variables to allow the download of internal runtimes if the specified versions are not found
# in the default public locations.
internalRuntimeDownloadArgs=
if [ '$(dotnetbuilds-internal-container-read-token-base64)' != '$''(dotnetbuilds-internal-container-read-token-base64)' ]; then
internalRuntimeDownloadArgs='/p:DotNetRuntimeSourceFeed=https://dotnetbuilds.blob.core.windows.net/internal /p:DotNetRuntimeSourceFeedKey=$(dotnetbuilds-internal-container-read-token-base64) --runtimesourcefeed https://dotnetbuilds.blob.core.windows.net/internal --runtimesourcefeedkey $(dotnetbuilds-internal-container-read-token-base64)'
fi
buildConfig=Release
# Check if AzDO substitutes in a build config from a variable, and use it if so.
if [ '$(_BuildConfig)' != '$''(_BuildConfig)' ]; then
buildConfig='$(_BuildConfig)'
fi
officialBuildArgs=
if [ '${{ and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}' = 'True' ]; then
officialBuildArgs='/p:DotNetPublishUsingPipelines=true /p:OfficialBuildId=$(BUILD.BUILDNUMBER)'
fi
targetRidArgs=
if [ '${{ parameters.platform.targetRID }}' != '' ]; then
targetRidArgs='/p:TargetRid=${{ parameters.platform.targetRID }}'
fi
runtimeOsArgs=
if [ '${{ parameters.platform.runtimeOS }}' != '' ]; then
runtimeOsArgs='/p:RuntimeOS=${{ parameters.platform.runtimeOS }}'
fi
baseOsArgs=
if [ '${{ parameters.platform.baseOS }}' != '' ]; then
baseOsArgs='/p:BaseOS=${{ parameters.platform.baseOS }}'
fi
publishArgs=
if [ '${{ parameters.platform.skipPublishValidation }}' != 'true' ]; then
publishArgs='--publish'
fi
assetManifestFileName=SourceBuild_RidSpecific.xml
if [ '${{ parameters.platform.name }}' != '' ]; then
assetManifestFileName=SourceBuild_${{ parameters.platform.name }}.xml
fi
${{ coalesce(parameters.platform.buildScript, './build.sh') }} --ci \
--configuration $buildConfig \
--restore --build --pack $publishArgs -bl \
$officialBuildArgs \
$internalRuntimeDownloadArgs \
$internalRestoreArgs \
$targetRidArgs \
$runtimeOsArgs \
$baseOsArgs \
/p:SourceBuildNonPortable=${{ parameters.platform.nonPortable }} \
/p:ArcadeBuildFromSource=true \
/p:AssetManifestFileName=$assetManifestFileName
displayName: Build
# Upload build logs for diagnosis.
- task: CopyFiles@2
displayName: Prepare BuildLogs staging directory
inputs:
SourceFolder: '$(Build.SourcesDirectory)'
Contents: |
**/*.log
**/*.binlog
artifacts/source-build/self/prebuilt-report/**
TargetFolder: '$(Build.StagingDirectory)/BuildLogs'
CleanTargetFolder: true
continueOnError: true
condition: succeededOrFailed()
- task: PublishPipelineArtifact@1
displayName: Publish BuildLogs
inputs:
targetPath: '$(Build.StagingDirectory)/BuildLogs'
artifactName: BuildLogs_SourceBuild_${{ parameters.platform.name }}_Attempt$(System.JobAttempt)
continueOnError: true
condition: succeededOrFailed()
# Manually inject component detection so that we can ignore the source build upstream cache, which contains
# a nupkg cache of input packages (a local feed).
# This path must match the upstream cache path in property 'CurrentRepoSourceBuiltNupkgCacheDir'
# in src\Microsoft.DotNet.Arcade.Sdk\tools\SourceBuild\SourceBuildArcade.targets
- task: ComponentGovernanceComponentDetection@0
displayName: Component Detection (Exclude upstream cache)
inputs:
ignoreDirectories: '$(Build.SourcesDirectory)/artifacts/source-build/self/src/artifacts/obj/source-built-upstream-cache'

Просмотреть файл

@ -0,0 +1,102 @@
parameters:
maxRetries: 5
retryDelay: 10 # in seconds
steps:
- bash: |
if [ "$AGENT_JOBSTATUS" = "Succeeded" ] || [ "$AGENT_JOBSTATUS" = "PartiallySucceeded" ]; then
errorCount=0
else
errorCount=1
fi
warningCount=0
curlStatus=1
retryCount=0
# retry loop to harden against spotty telemetry connections
# we don't retry successes and 4xx client errors
until [[ $curlStatus -eq 0 || ( $curlStatus -ge 400 && $curlStatus -le 499 ) || $retryCount -ge $MaxRetries ]]
do
if [ $retryCount -gt 0 ]; then
echo "Failed to send telemetry to Helix; waiting $RetryDelay seconds before retrying..."
sleep $RetryDelay
fi
# create a temporary file for curl output
res=`mktemp`
curlResult=`
curl --verbose --output $res --write-out "%{http_code}"\
-H 'Content-Type: application/json' \
-H "X-Helix-Job-Token: $Helix_JobToken" \
-H 'Content-Length: 0' \
-X POST -G "https://helix.dot.net/api/2018-03-14/telemetry/job/build/$Helix_WorkItemId/finish" \
--data-urlencode "errorCount=$errorCount" \
--data-urlencode "warningCount=$warningCount"`
curlStatus=$?
if [ $curlStatus -eq 0 ]; then
if [ $curlResult -gt 299 ] || [ $curlResult -lt 200 ]; then
curlStatus=$curlResult
fi
fi
let retryCount++
done
if [ $curlStatus -ne 0 ]; then
echo "Failed to Send Build Finish information after $retryCount retries"
vstsLogOutput="vso[task.logissue type=error;sourcepath=templates/steps/telemetry-end.yml;code=1;]Failed to Send Build Finish information: $curlStatus"
echo "##$vstsLogOutput"
exit 1
fi
displayName: Send Unix Build End Telemetry
env:
# defined via VSTS variables in start-job.sh
Helix_JobToken: $(Helix_JobToken)
Helix_WorkItemId: $(Helix_WorkItemId)
MaxRetries: ${{ parameters.maxRetries }}
RetryDelay: ${{ parameters.retryDelay }}
condition: and(always(), ne(variables['Agent.Os'], 'Windows_NT'))
- powershell: |
if (($env:Agent_JobStatus -eq 'Succeeded') -or ($env:Agent_JobStatus -eq 'PartiallySucceeded')) {
$ErrorCount = 0
} else {
$ErrorCount = 1
}
$WarningCount = 0
# Basic retry loop to harden against server flakiness
$retryCount = 0
while ($retryCount -lt $env:MaxRetries) {
try {
Invoke-RestMethod -Uri "https://helix.dot.net/api/2018-03-14/telemetry/job/build/$env:Helix_WorkItemId/finish?errorCount=$ErrorCount&warningCount=$WarningCount" -Method Post -ContentType "application/json" -Body "" `
-Headers @{ 'X-Helix-Job-Token'=$env:Helix_JobToken }
break
}
catch {
$statusCode = $_.Exception.Response.StatusCode.value__
if ($statusCode -ge 400 -and $statusCode -le 499) {
Write-Host "##vso[task.logissue]error Failed to send telemetry to Helix (status code $statusCode); not retrying (4xx client error)"
Write-Host "##vso[task.logissue]error ", $_.Exception.GetType().FullName, $_.Exception.Message
exit 1
}
Write-Host "Failed to send telemetry to Helix (status code $statusCode); waiting $env:RetryDelay seconds before retrying..."
$retryCount++
sleep $env:RetryDelay
continue
}
}
if ($retryCount -ge $env:MaxRetries) {
Write-Host "##vso[task.logissue]error Failed to send telemetry to Helix after $retryCount retries."
exit 1
}
displayName: Send Windows Build End Telemetry
env:
# defined via VSTS variables in start-job.ps1
Helix_JobToken: $(Helix_JobToken)
Helix_WorkItemId: $(Helix_WorkItemId)
MaxRetries: ${{ parameters.maxRetries }}
RetryDelay: ${{ parameters.retryDelay }}
condition: and(always(),eq(variables['Agent.Os'], 'Windows_NT'))

Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше