This commit is contained in:
Коммит
49f4cfdc0d
|
@ -0,0 +1,10 @@
|
|||
{
|
||||
"instanceUrl": "https://devdiv.visualstudio.com/",
|
||||
"template": "TFSDEVDIV",
|
||||
"projectName": "DEVDIV",
|
||||
"areaPath": "DevDiv\\NET Developer Experience\\Productivity",
|
||||
"iterationPath": "DevDiv",
|
||||
"notificationAliases": [ "mlinfraswat@microsoft.com" ],
|
||||
"repositoryName":"roslyn-sdk",
|
||||
"codebaseName": "roslyn-sdk"
|
||||
}
|
|
@ -8,6 +8,8 @@ root = true
|
|||
indent_style = space
|
||||
# (Please don't specify an indent_size here; that has too many unintended consequences.)
|
||||
|
||||
file_header_template = Licensed to the .NET Foundation under one or more agreements.\nThe .NET Foundation licenses this file to you under the MIT license.\nSee the LICENSE file in the project root for more information.
|
||||
|
||||
# Code files
|
||||
[*.{cs,csx,vb,vbx}]
|
||||
indent_size = 4
|
||||
|
@ -80,6 +82,30 @@ csharp_new_line_before_finally = true
|
|||
csharp_new_line_before_members_in_object_initializers = true
|
||||
csharp_new_line_before_members_in_anonymous_types = true
|
||||
|
||||
# Space preferences
|
||||
csharp_space_after_cast = false
|
||||
csharp_space_after_colon_in_inheritance_clause = true
|
||||
csharp_space_after_comma = true
|
||||
csharp_space_after_dot = false
|
||||
csharp_space_after_keywords_in_control_flow_statements = true
|
||||
csharp_space_after_semicolon_in_for_statement = true
|
||||
csharp_space_around_binary_operators = before_and_after
|
||||
csharp_space_around_declaration_statements = do_not_ignore
|
||||
csharp_space_before_colon_in_inheritance_clause = true
|
||||
csharp_space_before_comma = false
|
||||
csharp_space_before_dot = false
|
||||
csharp_space_before_open_square_brackets = false
|
||||
csharp_space_before_semicolon_in_for_statement = false
|
||||
csharp_space_between_empty_square_brackets = false
|
||||
csharp_space_between_method_call_empty_parameter_list_parentheses = false
|
||||
csharp_space_between_method_call_name_and_opening_parenthesis = false
|
||||
csharp_space_between_method_call_parameter_list_parentheses = false
|
||||
csharp_space_between_method_declaration_empty_parameter_list_parentheses = false
|
||||
csharp_space_between_method_declaration_name_and_open_parenthesis = false
|
||||
csharp_space_between_method_declaration_parameter_list_parentheses = false
|
||||
csharp_space_between_parentheses = false
|
||||
csharp_space_between_square_brackets = false
|
||||
|
||||
[*.cs]
|
||||
# Disable enforcement of items covered by StyleCop Analyzers
|
||||
dotnet_style_qualification_for_field = false:none
|
||||
|
|
|
@ -0,0 +1,36 @@
|
|||
is_global = true
|
||||
|
||||
# IDE0055: Fix formatting
|
||||
dotnet_diagnostic.IDE0055.severity = warning
|
||||
|
||||
dotnet_diagnostic.SA1101.severity = none
|
||||
dotnet_diagnostic.SA1201.severity = none
|
||||
dotnet_diagnostic.SA1202.severity = none
|
||||
dotnet_diagnostic.SA1204.severity = none
|
||||
dotnet_diagnostic.SA1308.severity = none
|
||||
dotnet_diagnostic.SA1309.severity = none
|
||||
dotnet_diagnostic.SA1311.severity = none
|
||||
dotnet_diagnostic.SA1412.severity = warning
|
||||
dotnet_diagnostic.SA1501.severity = none
|
||||
dotnet_diagnostic.SA1600.severity = none
|
||||
dotnet_diagnostic.SA1601.severity = none
|
||||
dotnet_diagnostic.SA1629.severity = none
|
||||
dotnet_diagnostic.SX1101.severity = warning
|
||||
dotnet_diagnostic.SX1309.severity = warning
|
||||
|
||||
# DiagnosticId must be unique across analyzers: Many tests use similar analyzer instances
|
||||
dotnet_diagnostic.RS1019.severity = none
|
||||
# Enable analyzer release tracking: only used in this repository for testing
|
||||
dotnet_diagnostic.RS2008.severity = none
|
||||
# Prefer 'IsKind' for checking syntax kinds
|
||||
dotnet_diagnostic.RS1034.severity = none
|
||||
# Use 'SymbolEqualityComparer' when comparing symbols
|
||||
dotnet_diagnostic.RS1024.severity = none
|
||||
# Configure generated code analysis
|
||||
dotnet_diagnostic.RS1025.severity = none
|
||||
# Enable concurrent execution
|
||||
dotnet_diagnostic.RS1026.severity = none
|
||||
# The diagnostic title should not contain a period, nor any line return character, nor any leading or trailing whitespaces
|
||||
dotnet_diagnostic.RS1031.severity = none
|
||||
# The diagnostic message should not contain any line return character nor any leading or trailing whitespaces and should either be a single sentence without a trailing period or a multi-sentences with a trailing period
|
||||
dotnet_diagnostic.RS1032.severity = none
|
109
.vsts-ci.yml
109
.vsts-ci.yml
|
@ -1,6 +1,6 @@
|
|||
resources:
|
||||
- repo: self
|
||||
clean: true
|
||||
clean: true
|
||||
|
||||
# The variables `_DotNetArtifactsCategory` and `_DotNetValidationArtifactsCategory` are required for proper publishing of build artifacts. See https://github.com/dotnet/roslyn/pull/38259
|
||||
variables:
|
||||
|
@ -11,19 +11,27 @@ variables:
|
|||
|
||||
# Branches that trigger a build on commit
|
||||
trigger:
|
||||
- master
|
||||
- main
|
||||
- dev17.0
|
||||
|
||||
stages:
|
||||
- stage: build
|
||||
displayName: Build and Test
|
||||
displayName: Build
|
||||
pool:
|
||||
name: VSEngSS-MicroBuild2022-1ES
|
||||
demands:
|
||||
- cmd
|
||||
|
||||
jobs:
|
||||
- ${{ if eq(variables['Build.SourceBranch'], 'refs/heads/main') }}:
|
||||
- template: /eng/common/templates/job/onelocbuild.yml
|
||||
parameters:
|
||||
CreatePr: true
|
||||
LclSource: lclFilesfromPackage
|
||||
LclPackageId: 'LCL-JUNO-PROD-ROSLYNSDK'
|
||||
|
||||
- job: OfficialBuild
|
||||
displayName: Official Build
|
||||
pool:
|
||||
name: VSEng-MicroBuildVS2017
|
||||
demands:
|
||||
- cmd
|
||||
|
||||
steps:
|
||||
- task: ms-vseng.MicroBuildTasks.30666190-6959-11e5-9f96-f56098202fef.MicroBuildSigningPlugin@1
|
||||
|
@ -32,11 +40,21 @@ stages:
|
|||
signType: $(SignType)
|
||||
esrpSigning: true
|
||||
condition: and(succeeded(), ne(variables['SignType'], ''))
|
||||
|
||||
|
||||
- task: ms-vseng.MicroBuildTasks.32f78468-e895-4f47-962c-58a699361df8.MicroBuildSwixPlugin@1
|
||||
displayName: Install Swix Plugin
|
||||
|
||||
- script: eng\common\CIBuild.cmd
|
||||
|
||||
- task: PowerShell@2
|
||||
inputs:
|
||||
targetType: 'filePath'
|
||||
filePath: eng\common\Build.ps1
|
||||
arguments: > # Use this to avoid newline characters in multiline string
|
||||
-restore
|
||||
-build
|
||||
-sign
|
||||
-pack
|
||||
-publish
|
||||
-ci
|
||||
-configuration $(BuildConfiguration)
|
||||
/p:OfficialBuildId=$(Build.BuildNumber)
|
||||
/p:VisualStudioDropName=$(VisualStudioDropName)
|
||||
|
@ -47,23 +65,8 @@ stages:
|
|||
/p:DotnetPublishUsingPipelines=true
|
||||
displayName: Build
|
||||
|
||||
- task: PublishTestResults@1
|
||||
displayName: Publish Test Results
|
||||
inputs:
|
||||
testRunner: XUnit
|
||||
testResultsFiles: 'artifacts/TestResults/$(BuildConfiguration)/*.xml'
|
||||
mergeTestResults: true
|
||||
testRunTitle: 'Unit Tests'
|
||||
condition: always()
|
||||
- template: eng\common\templates\steps\generate-sbom.yml
|
||||
|
||||
- task: NuGetPublisher@0
|
||||
displayName: Publish NuGet Packages to MyGet
|
||||
inputs:
|
||||
searchPattern: 'artifacts\packages\$(BuildConfiguration)\Shipping\*.nupkg'
|
||||
connectedServiceName: 'RoslynSdk - NuGet feed'
|
||||
nuGetVersion: 4.0.0.2283
|
||||
condition: succeeded()
|
||||
|
||||
# Publishes setup VSIXes to a drop.
|
||||
# Note: The insertion tool looks for the display name of this task in the logs.
|
||||
- task: ms-vseng.MicroBuildTasks.4305a8de-ba66-4d8b-b2d1-0dc4ecbbf5e8.MicroBuildUploadVstsDropFolder@1
|
||||
|
@ -71,16 +74,17 @@ stages:
|
|||
inputs:
|
||||
DropName: $(VisualStudioDropName)
|
||||
DropFolder: 'artifacts\VSSetup\$(BuildConfiguration)\Insertion'
|
||||
AccessToken: $(System.AccessToken)
|
||||
condition: succeeded()
|
||||
|
||||
|
||||
- task: PublishBuildArtifacts@1
|
||||
displayName: Publish Logs
|
||||
inputs:
|
||||
PathtoPublish: '$(Build.SourcesDirectory)\artifacts\log\$(BuildConfiguration)'
|
||||
ArtifactName: 'Logs'
|
||||
continueOnError: true
|
||||
condition: not(succeeded())
|
||||
|
||||
condition: always()
|
||||
|
||||
# Publish an artifact that the RoslynInsertionTool is able to find by its name.
|
||||
- task: PublishBuildArtifacts@1
|
||||
displayName: Publish Artifact VSSetup
|
||||
|
@ -88,8 +92,8 @@ stages:
|
|||
PathtoPublish: 'artifacts\VSSetup\$(BuildConfiguration)'
|
||||
ArtifactName: 'VSSetup'
|
||||
condition: succeeded()
|
||||
|
||||
# Publish our NuPkgs as an artifact. The name of this artifact must be PackageArtifacts as the
|
||||
|
||||
# Publish our NuPkgs as an artifact. The name of this artifact must be PackageArtifacts as the
|
||||
# arcade templates depend on the name.
|
||||
- task: PublishBuildArtifacts@1
|
||||
displayName: Publish Artifact Packages
|
||||
|
@ -97,7 +101,7 @@ stages:
|
|||
PathtoPublish: 'artifacts\packages\$(BuildConfiguration)'
|
||||
ArtifactName: 'PackageArtifacts'
|
||||
condition: succeeded()
|
||||
|
||||
|
||||
# Publish Asset Manifests for Build Asset Registry job
|
||||
- task: PublishBuildArtifacts@1
|
||||
displayName: Publish Asset Manifests
|
||||
|
@ -105,7 +109,7 @@ stages:
|
|||
PathtoPublish: '$(Build.SourcesDirectory)/artifacts/log/$(BuildConfiguration)/AssetManifest'
|
||||
ArtifactName: AssetManifests
|
||||
condition: succeeded()
|
||||
|
||||
|
||||
- task: ms-vseng.MicroBuildTasks.521a94ea-9e68-468a-8167-6dcf361ea776.MicroBuildCleanup@1
|
||||
displayName: Cleanup
|
||||
condition: always()
|
||||
|
@ -114,14 +118,51 @@ stages:
|
|||
- template: /eng/common/templates/job/publish-build-assets.yml
|
||||
parameters:
|
||||
publishUsingPipelines: true
|
||||
dependsOn:
|
||||
dependsOn:
|
||||
- OfficialBuild
|
||||
queue:
|
||||
name: Hosted VS2017
|
||||
|
||||
- stage: analysis
|
||||
displayName: Code analysis
|
||||
pool:
|
||||
name: VSEngSS-MicroBuild2022-1ES
|
||||
demands:
|
||||
- cmd
|
||||
jobs:
|
||||
- job: codeql
|
||||
displayName: CodeQL
|
||||
timeoutInMinutes: 120
|
||||
variables:
|
||||
# CG is handled in the primary CI pipeline
|
||||
- name: skipComponentGovernanceDetection
|
||||
value: true
|
||||
# Force CodeQL enabled so it may be run on any branch
|
||||
- name: Codeql.Enabled
|
||||
value: true
|
||||
# Do not let CodeQL 3000 Extension gate scan frequency
|
||||
- name: Codeql.Cadence
|
||||
value: 0
|
||||
- name: Codeql.TSAEnabled
|
||||
value: true
|
||||
steps:
|
||||
- task: UseDotNet@2
|
||||
inputs:
|
||||
useGlobalJson: true
|
||||
- task: CodeQL3000Init@0
|
||||
displayName: CodeQL Initialize
|
||||
- script: eng\common\cibuild.cmd
|
||||
-configuration Release
|
||||
-prepareMachine
|
||||
/p:Test=false
|
||||
displayName: Windows Build
|
||||
- task: CodeQL3000Finalize@0
|
||||
displayName: CodeQL Finalize
|
||||
|
||||
- ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
|
||||
- template: eng\common\templates\post-build\post-build.yml
|
||||
parameters:
|
||||
publishingInfraVersion: 3
|
||||
# Symbol validation is not entirely reliable as of yet, so should be turned off until
|
||||
# https://github.com/dotnet/arcade/issues/2871 is resolved.
|
||||
enableSymbolValidation: false
|
||||
|
|
|
@ -1,12 +1,24 @@
|
|||
# Branches that trigger a build on commit
|
||||
trigger:
|
||||
- main
|
||||
- dev17.0
|
||||
|
||||
# Branches that trigger builds on PR
|
||||
pr:
|
||||
- main
|
||||
- dev17.0
|
||||
|
||||
variables:
|
||||
- name: DOTNET_ROOT
|
||||
value: $(Build.SourcesDirectory)\.dotnet
|
||||
- name: XUNIT_LOGS
|
||||
value: $(Build.SourcesDirectory)\artifacts\log\$(_configuration)
|
||||
|
||||
jobs:
|
||||
- job: Windows
|
||||
pool:
|
||||
name: NetCorePublic-Pool
|
||||
queue: BuildPool.Windows.10.Amd64.Open
|
||||
name: NetCore-Public
|
||||
demands: ImageOverride -equals windows.vs2022.scout.amd64.open
|
||||
strategy:
|
||||
maxParallel: 6
|
||||
matrix:
|
||||
|
@ -19,11 +31,11 @@ jobs:
|
|||
_configuration: Release
|
||||
_solution: Samples
|
||||
SDK Debug Test:
|
||||
_args: -test
|
||||
_args: -test -integrationTest
|
||||
_configuration: Debug
|
||||
_solution: Roslyn-SDK
|
||||
SDK Release Test:
|
||||
_args: -test
|
||||
_args: -test -integrationTest
|
||||
_configuration: Release
|
||||
_solution: Roslyn-SDK
|
||||
SDK Pack:
|
||||
|
@ -37,6 +49,10 @@ jobs:
|
|||
timeoutInMinutes: 90
|
||||
|
||||
steps:
|
||||
- script: |
|
||||
reg add "HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\Windows\Windows Error Reporting\LocalDumps" /f /v DumpType /t REG_DWORD /d 2
|
||||
reg add "HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\Windows\Windows Error Reporting\LocalDumps" /f /v DumpCount /t REG_DWORD /d 2
|
||||
reg add "HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\Windows\Windows Error Reporting\LocalDumps" /f /v DumpFolder /t REG_SZ /d "$(Build.SourcesDirectory)\artifacts\log\$(_configuration)"
|
||||
- script: eng\PRBuild.cmd $(_args) -configuration $(_configuration) -prepareMachine -projects $(Build.SourcesDirectory)\$(_solution).sln /p:OfficialBuild=false
|
||||
- task: PublishBuildArtifacts@1
|
||||
inputs:
|
||||
|
@ -44,7 +60,7 @@ jobs:
|
|||
ArtifactName: '$(_solution) $(_configuration) logs'
|
||||
publishLocation: Container
|
||||
continueOnError: true
|
||||
condition: failed()
|
||||
condition: not(succeeded())
|
||||
- task: PublishTestResults@2
|
||||
inputs:
|
||||
testRunner: 'xUnit'
|
||||
|
|
|
@ -0,0 +1,34 @@
|
|||
<!-- Licensed to the .NET Foundation under one or more agreements. The .NET Foundation licenses this file to you under the MIT license. See the LICENSE file in the project root for more information. -->
|
||||
<Project>
|
||||
<Import Project="Sdk.props" Sdk="Microsoft.DotNet.Arcade.Sdk" />
|
||||
|
||||
<PropertyGroup>
|
||||
<Features>strict</Features>
|
||||
<EnforceCodeStyleInBuild>true</EnforceCodeStyleInBuild>
|
||||
</PropertyGroup>
|
||||
|
||||
<!--
|
||||
Language specific settings
|
||||
-->
|
||||
<Choose>
|
||||
<!-- VB specific settings -->
|
||||
<When Condition="'$(Language)' == 'VB'">
|
||||
<PropertyGroup>
|
||||
<LangVersion>16</LangVersion>
|
||||
</PropertyGroup>
|
||||
</When>
|
||||
|
||||
<!-- C# specific settings -->
|
||||
<When Condition="'$(Language)' == 'C#'">
|
||||
<PropertyGroup>
|
||||
<LangVersion>9</LangVersion>
|
||||
<WarningLevel>9999</WarningLevel>
|
||||
</PropertyGroup>
|
||||
</When>
|
||||
</Choose>
|
||||
|
||||
<PropertyGroup Condition="'$(CopyrightMicrosoft)' != ''">
|
||||
<Copyright>$(CopyrightMicrosoft)</Copyright>
|
||||
<PackageLicenseExpression>MIT</PackageLicenseExpression>
|
||||
</PropertyGroup>
|
||||
</Project>
|
|
@ -0,0 +1,9 @@
|
|||
<!-- Licensed to the .NET Foundation under one or more agreements. The .NET Foundation licenses this file to you under the MIT license. See the LICENSE file in the project root for more information. -->
|
||||
<Project>
|
||||
<Import Project="Sdk.targets" Sdk="Microsoft.DotNet.Arcade.Sdk" />
|
||||
<ItemGroup>
|
||||
<!-- This is to for the version of System.Security.Cryptography.X509Certificates to always be resolved
|
||||
as 4.3 or higher. If this repo ever moves to central package version management, this can be removed. -->
|
||||
<PackageReference Include="System.Security.Cryptography.X509Certificates" Version="4.3.0" PrivateAssets="all" />
|
||||
</ItemGroup>
|
||||
</Project>
|
|
@ -2,12 +2,13 @@
|
|||
<configuration>
|
||||
<packageSources>
|
||||
<clear />
|
||||
<add key="nuget.org" value="https://api.nuget.org/v3/index.json" />
|
||||
<add key="dotnet-public" value="https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/index.json" />
|
||||
<add key="dotnet-tools" value="https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json" />
|
||||
<add key="dotnet-eng" value="https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json" />
|
||||
<add key="dotnet5" value="https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet5/nuget/v3/index.json" />
|
||||
<add key="roslyn" value="https://dotnet.myget.org/F/roslyn/api/v3/index.json" />
|
||||
<add key="roslyn-analyzers" value="https://dotnet.myget.org/F/roslyn-analyzers/api/v3/index.json" />
|
||||
<add key="vssdk" value="https://pkgs.dev.azure.com/azure-public/vside/_packaging/vssdk/nuget/v3/index.json" />
|
||||
<add key="vs-impl" value="https://pkgs.dev.azure.com/azure-public/vside/_packaging/vs-impl/nuget/v3/index.json" />
|
||||
<add key="vssdk" value="https://pkgs.dev.azure.com/azure-public/vside/_packaging/vssdk/nuget/v3/index.json" />
|
||||
</packageSources>
|
||||
<disabledPackageSources />
|
||||
</configuration>
|
||||
|
|
|
@ -3,11 +3,11 @@
|
|||
| Branch | Status |
|
||||
|:------:|:------:|
|
||||
|dev16.0.x|[![Build Status](https://dnceng.visualstudio.com/public/_apis/build/status/dotnet/roslyn-sdk/public-CI?branchName=dev16.0.x&label=build)](https://dnceng.visualstudio.com/public/_build/latest?definitionId=137&branchName=dev16.0.x)|
|
||||
|master|[![Build Status](https://dnceng.visualstudio.com/public/_apis/build/status/dotnet/roslyn-sdk/public-CI?branchName=master&label=build)](https://dnceng.visualstudio.com/public/_build/latest?definitionId=137&branchName=master)|
|
||||
|main|[![Build Status](https://dnceng.visualstudio.com/public/_apis/build/status/dotnet/roslyn-sdk/public-CI?branchName=main&label=build)](https://dnceng.visualstudio.com/public/_build/latest?definitionId=137&branchName=main)|
|
||||
|
||||
# What is the Roslyn-SDK?
|
||||
|
||||
Roslyn is the compiler platform for .NET. It consists of the compiler itself and a powerful set of APIs to interact with the compiler. The Roslyn platform is hosted at [github.com/dotnet/roslyn](https://github.com/dotnet/roslyn). The compiler is part of every .NET installation. The APIs to interact with the compiler are available via NuGet (see the [Roslyn repository](https://github.com/dotnet/roslyn) for details). The Roslyn SDK includes additional components to get you started with advanced topics such as distributing a Roslyn analyzer as Visual Studio extension or to inspect code with the Syntax Vizualizer. The documentation for the Roslyn platform can be found at [docs.microsoft.com/dotnet/csharp/roslyn-sdk](https://docs.microsoft.com/dotnet/csharp/roslyn-sdk). This repository contains code for both the Roslyn-SDK templates and Syntax Vizualizer.
|
||||
Roslyn is the compiler platform for .NET. It consists of the compiler itself and a powerful set of APIs to interact with the compiler. The Roslyn platform is hosted at [github.com/dotnet/roslyn](https://github.com/dotnet/roslyn). The compiler is part of every .NET installation. The APIs to interact with the compiler are available via NuGet (see the [Roslyn repository](https://github.com/dotnet/roslyn) for details). The Roslyn SDK includes additional components to get you started with advanced topics such as distributing a Roslyn analyzer as Visual Studio extension or to inspect code with the Syntax Visualizer. The documentation for the Roslyn platform can be found at [docs.microsoft.com/dotnet/csharp/roslyn-sdk](https://docs.microsoft.com/dotnet/csharp/roslyn-sdk). This repository contains code for both the Roslyn-SDK templates and Syntax Visualizer.
|
||||
|
||||
# Installation instructions
|
||||
|
||||
|
|
146
Roslyn-SDK.sln
146
Roslyn-SDK.sln
|
@ -1,7 +1,7 @@
|
|||
|
||||
Microsoft Visual Studio Solution File, Format Version 12.00
|
||||
# Visual Studio Version 16
|
||||
VisualStudioVersion = 16.0.29814.53
|
||||
# Visual Studio Version 17
|
||||
VisualStudioVersion = 17.0.31706.66
|
||||
MinimumVisualStudioVersion = 15.0.26124.0
|
||||
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "src", "src", "{924F7971-780C-4E70-A306-86482469502E}"
|
||||
EndProject
|
||||
|
@ -153,6 +153,48 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Roslyn.SDK.IntegrationTests
|
|||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Roslyn.SDK.UnitTests", "tests\VisualStudio.Roslyn.SDK\Roslyn.SDK.UnitTests\Roslyn.SDK.UnitTests.csproj", "{11B1F856-9025-4A4C-B90D-B1237743B672}"
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.CodeAnalysis.SourceGenerators.Testing", "src\Microsoft.CodeAnalysis.Testing\Microsoft.CodeAnalysis.SourceGenerators.Testing\Microsoft.CodeAnalysis.SourceGenerators.Testing.csproj", "{05A91267-ABC8-4249-9A04-166C08EAD849}"
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.CodeAnalysis.CSharp.SourceGenerators.Testing", "src\Microsoft.CodeAnalysis.Testing\Microsoft.CodeAnalysis.CSharp.SourceGenerators.Testing\Microsoft.CodeAnalysis.CSharp.SourceGenerators.Testing.csproj", "{99D7BB0C-DE8D-4952-A9BF-63A5215256C4}"
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.CodeAnalysis.CSharp.SourceGenerators.Testing.MSTest", "src\Microsoft.CodeAnalysis.Testing\Microsoft.CodeAnalysis.CSharp.SourceGenerators.Testing.MSTest\Microsoft.CodeAnalysis.CSharp.SourceGenerators.Testing.MSTest.csproj", "{31886751-5615-435C-A40C-EA2CC415BBDD}"
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.CodeAnalysis.CSharp.SourceGenerators.Testing.NUnit", "src\Microsoft.CodeAnalysis.Testing\Microsoft.CodeAnalysis.CSharp.SourceGenerators.Testing.NUnit\Microsoft.CodeAnalysis.CSharp.SourceGenerators.Testing.NUnit.csproj", "{92558FFF-0935-4B12-8ED6-E3DC4E486B7F}"
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.CodeAnalysis.CSharp.SourceGenerators.Testing.XUnit", "src\Microsoft.CodeAnalysis.Testing\Microsoft.CodeAnalysis.CSharp.SourceGenerators.Testing.XUnit\Microsoft.CodeAnalysis.CSharp.SourceGenerators.Testing.XUnit.csproj", "{5BC27DBC-6727-4DA1-B1AF-5EA5D3803627}"
|
||||
EndProject
|
||||
Project("{778DAE3C-4631-46EA-AA77-85C1314464D9}") = "Microsoft.CodeAnalysis.VisualBasic.SourceGenerators.Testing", "src\Microsoft.CodeAnalysis.Testing\Microsoft.CodeAnalysis.VisualBasic.SourceGenerators.Testing\Microsoft.CodeAnalysis.VisualBasic.SourceGenerators.Testing.vbproj", "{EC96BD3F-2DFE-4072-86F5-BFB349FF77D9}"
|
||||
EndProject
|
||||
Project("{778DAE3C-4631-46EA-AA77-85C1314464D9}") = "Microsoft.CodeAnalysis.VisualBasic.SourceGenerators.Testing.MSTest", "src\Microsoft.CodeAnalysis.Testing\Microsoft.CodeAnalysis.VisualBasic.SourceGenerators.Testing.MSTest\Microsoft.CodeAnalysis.VisualBasic.SourceGenerators.Testing.MSTest.vbproj", "{74F9C0F1-2E95-4EC8-B395-EB73AC4DD42B}"
|
||||
EndProject
|
||||
Project("{778DAE3C-4631-46EA-AA77-85C1314464D9}") = "Microsoft.CodeAnalysis.VisualBasic.SourceGenerators.Testing.NUnit", "src\Microsoft.CodeAnalysis.Testing\Microsoft.CodeAnalysis.VisualBasic.SourceGenerators.Testing.NUnit\Microsoft.CodeAnalysis.VisualBasic.SourceGenerators.Testing.NUnit.vbproj", "{A7D3307A-0A0D-4D20-887A-9C830A38B058}"
|
||||
EndProject
|
||||
Project("{778DAE3C-4631-46EA-AA77-85C1314464D9}") = "Microsoft.CodeAnalysis.VisualBasic.SourceGenerators.Testing.XUnit", "src\Microsoft.CodeAnalysis.Testing\Microsoft.CodeAnalysis.VisualBasic.SourceGenerators.Testing.XUnit\Microsoft.CodeAnalysis.VisualBasic.SourceGenerators.Testing.XUnit.vbproj", "{4B158F47-4759-495B-83BB-1D87130E3DF8}"
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.CodeAnalysis.SourceGenerators.Testing.UnitTests", "tests\Microsoft.CodeAnalysis.Testing\Microsoft.CodeAnalysis.SourceGenerators.Testing.UnitTests\Microsoft.CodeAnalysis.SourceGenerators.Testing.UnitTests.csproj", "{68CAEBF3-D428-4B50-8305-4BE4F7753CA8}"
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.CodeAnalysis.CSharp.SourceGenerators.Testing.UnitTests", "tests\Microsoft.CodeAnalysis.Testing\Microsoft.CodeAnalysis.CSharp.SourceGenerators.Testing.UnitTests\Microsoft.CodeAnalysis.CSharp.SourceGenerators.Testing.UnitTests.csproj", "{0E75580E-10E6-4CAC-87C2-D83C6C0B1D10}"
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.CodeAnalysis.CSharp.SourceGenerators.Testing.MSTest.UnitTests", "tests\Microsoft.CodeAnalysis.Testing\Microsoft.CodeAnalysis.CSharp.SourceGenerators.Testing.MSTest.UnitTests\Microsoft.CodeAnalysis.CSharp.SourceGenerators.Testing.MSTest.UnitTests.csproj", "{4E02E29B-54C7-4576-8575-0438EABCCC88}"
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.CodeAnalysis.CSharp.SourceGenerators.Testing.NUnit.UnitTests", "tests\Microsoft.CodeAnalysis.Testing\Microsoft.CodeAnalysis.CSharp.SourceGenerators.Testing.NUnit.UnitTests\Microsoft.CodeAnalysis.CSharp.SourceGenerators.Testing.NUnit.UnitTests.csproj", "{485C6D8A-271F-44CD-9F55-B5283A917A00}"
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.CodeAnalysis.CSharp.SourceGenerators.Testing.XUnit.UnitTests", "tests\Microsoft.CodeAnalysis.Testing\Microsoft.CodeAnalysis.CSharp.SourceGenerators.Testing.XUnit.UnitTests\Microsoft.CodeAnalysis.CSharp.SourceGenerators.Testing.XUnit.UnitTests.csproj", "{074F9FC2-B0E0-40E4-B356-99F0E07E279B}"
|
||||
EndProject
|
||||
Project("{778DAE3C-4631-46EA-AA77-85C1314464D9}") = "Microsoft.CodeAnalysis.VisualBasic.SourceGenerators.Testing.UnitTests", "tests\Microsoft.CodeAnalysis.Testing\Microsoft.CodeAnalysis.VisualBasic.SourceGenerators.Testing.UnitTests\Microsoft.CodeAnalysis.VisualBasic.SourceGenerators.Testing.UnitTests.vbproj", "{ED7BCD8E-F553-48F0-962F-019BE7C2B78E}"
|
||||
EndProject
|
||||
Project("{778DAE3C-4631-46EA-AA77-85C1314464D9}") = "Microsoft.CodeAnalysis.VisualBasic.SourceGenerators.Testing.MSTest.UnitTests", "tests\Microsoft.CodeAnalysis.Testing\Microsoft.CodeAnalysis.VisualBasic.SourceGenerators.Testing.MSTest.UnitTests\Microsoft.CodeAnalysis.VisualBasic.SourceGenerators.Testing.MSTest.UnitTests.vbproj", "{7D9C0EF5-7383-4E35-811B-3288B3C806F3}"
|
||||
EndProject
|
||||
Project("{778DAE3C-4631-46EA-AA77-85C1314464D9}") = "Microsoft.CodeAnalysis.VisualBasic.SourceGenerators.Testing.NUnit.UnitTests", "tests\Microsoft.CodeAnalysis.Testing\Microsoft.CodeAnalysis.VisualBasic.SourceGenerators.Testing.NUnit.UnitTests\Microsoft.CodeAnalysis.VisualBasic.SourceGenerators.Testing.NUnit.UnitTests.vbproj", "{7C3FE60E-055B-4E0C-BB85-C7E94A640074}"
|
||||
EndProject
|
||||
Project("{778DAE3C-4631-46EA-AA77-85C1314464D9}") = "Microsoft.CodeAnalysis.VisualBasic.SourceGenerators.Testing.XUnit.UnitTests", "tests\Microsoft.CodeAnalysis.Testing\Microsoft.CodeAnalysis.VisualBasic.SourceGenerators.Testing.XUnit.UnitTests\Microsoft.CodeAnalysis.VisualBasic.SourceGenerators.Testing.XUnit.UnitTests.vbproj", "{92BD1781-5DB4-4F72-BCCB-0D64C0790A2B}"
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Roslyn.ComponentDebugger", "src\VisualStudio.Roslyn.SDK\ComponentDebugger\Roslyn.ComponentDebugger.csproj", "{421DE59C-8246-4679-9D69-79F16A7187BE}"
|
||||
EndProject
|
||||
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "util", "util", "{7A94E723-ADD6-48C4-BBE7-1D5B311187A8}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "AssemblyVersionGenerator", "src\VisualStudio.Roslyn.SDK\AssemblyVersionGenerator\AssemblyVersionGenerator.csproj", "{AB6B3C69-9F6F-461C-BFD8-D3F25B9F44AD}"
|
||||
EndProject
|
||||
Global
|
||||
GlobalSection(SolutionConfigurationPlatforms) = preSolution
|
||||
Debug|Any CPU = Debug|Any CPU
|
||||
|
@ -431,6 +473,86 @@ Global
|
|||
{11B1F856-9025-4A4C-B90D-B1237743B672}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{11B1F856-9025-4A4C-B90D-B1237743B672}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{11B1F856-9025-4A4C-B90D-B1237743B672}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{05A91267-ABC8-4249-9A04-166C08EAD849}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{05A91267-ABC8-4249-9A04-166C08EAD849}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{05A91267-ABC8-4249-9A04-166C08EAD849}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{05A91267-ABC8-4249-9A04-166C08EAD849}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{99D7BB0C-DE8D-4952-A9BF-63A5215256C4}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{99D7BB0C-DE8D-4952-A9BF-63A5215256C4}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{99D7BB0C-DE8D-4952-A9BF-63A5215256C4}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{99D7BB0C-DE8D-4952-A9BF-63A5215256C4}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{31886751-5615-435C-A40C-EA2CC415BBDD}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{31886751-5615-435C-A40C-EA2CC415BBDD}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{31886751-5615-435C-A40C-EA2CC415BBDD}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{31886751-5615-435C-A40C-EA2CC415BBDD}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{92558FFF-0935-4B12-8ED6-E3DC4E486B7F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{92558FFF-0935-4B12-8ED6-E3DC4E486B7F}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{92558FFF-0935-4B12-8ED6-E3DC4E486B7F}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{92558FFF-0935-4B12-8ED6-E3DC4E486B7F}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{5BC27DBC-6727-4DA1-B1AF-5EA5D3803627}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{5BC27DBC-6727-4DA1-B1AF-5EA5D3803627}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{5BC27DBC-6727-4DA1-B1AF-5EA5D3803627}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{5BC27DBC-6727-4DA1-B1AF-5EA5D3803627}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{EC96BD3F-2DFE-4072-86F5-BFB349FF77D9}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{EC96BD3F-2DFE-4072-86F5-BFB349FF77D9}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{EC96BD3F-2DFE-4072-86F5-BFB349FF77D9}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{EC96BD3F-2DFE-4072-86F5-BFB349FF77D9}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{74F9C0F1-2E95-4EC8-B395-EB73AC4DD42B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{74F9C0F1-2E95-4EC8-B395-EB73AC4DD42B}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{74F9C0F1-2E95-4EC8-B395-EB73AC4DD42B}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{74F9C0F1-2E95-4EC8-B395-EB73AC4DD42B}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{A7D3307A-0A0D-4D20-887A-9C830A38B058}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{A7D3307A-0A0D-4D20-887A-9C830A38B058}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{A7D3307A-0A0D-4D20-887A-9C830A38B058}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{A7D3307A-0A0D-4D20-887A-9C830A38B058}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{4B158F47-4759-495B-83BB-1D87130E3DF8}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{4B158F47-4759-495B-83BB-1D87130E3DF8}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{4B158F47-4759-495B-83BB-1D87130E3DF8}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{4B158F47-4759-495B-83BB-1D87130E3DF8}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{68CAEBF3-D428-4B50-8305-4BE4F7753CA8}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{68CAEBF3-D428-4B50-8305-4BE4F7753CA8}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{68CAEBF3-D428-4B50-8305-4BE4F7753CA8}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{68CAEBF3-D428-4B50-8305-4BE4F7753CA8}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{0E75580E-10E6-4CAC-87C2-D83C6C0B1D10}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{0E75580E-10E6-4CAC-87C2-D83C6C0B1D10}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{0E75580E-10E6-4CAC-87C2-D83C6C0B1D10}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{0E75580E-10E6-4CAC-87C2-D83C6C0B1D10}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{4E02E29B-54C7-4576-8575-0438EABCCC88}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{4E02E29B-54C7-4576-8575-0438EABCCC88}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{4E02E29B-54C7-4576-8575-0438EABCCC88}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{4E02E29B-54C7-4576-8575-0438EABCCC88}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{485C6D8A-271F-44CD-9F55-B5283A917A00}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{485C6D8A-271F-44CD-9F55-B5283A917A00}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{485C6D8A-271F-44CD-9F55-B5283A917A00}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{485C6D8A-271F-44CD-9F55-B5283A917A00}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{074F9FC2-B0E0-40E4-B356-99F0E07E279B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{074F9FC2-B0E0-40E4-B356-99F0E07E279B}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{074F9FC2-B0E0-40E4-B356-99F0E07E279B}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{074F9FC2-B0E0-40E4-B356-99F0E07E279B}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{ED7BCD8E-F553-48F0-962F-019BE7C2B78E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{ED7BCD8E-F553-48F0-962F-019BE7C2B78E}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{ED7BCD8E-F553-48F0-962F-019BE7C2B78E}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{ED7BCD8E-F553-48F0-962F-019BE7C2B78E}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{7D9C0EF5-7383-4E35-811B-3288B3C806F3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{7D9C0EF5-7383-4E35-811B-3288B3C806F3}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{7D9C0EF5-7383-4E35-811B-3288B3C806F3}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{7D9C0EF5-7383-4E35-811B-3288B3C806F3}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{7C3FE60E-055B-4E0C-BB85-C7E94A640074}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{7C3FE60E-055B-4E0C-BB85-C7E94A640074}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{7C3FE60E-055B-4E0C-BB85-C7E94A640074}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{7C3FE60E-055B-4E0C-BB85-C7E94A640074}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{92BD1781-5DB4-4F72-BCCB-0D64C0790A2B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{92BD1781-5DB4-4F72-BCCB-0D64C0790A2B}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{92BD1781-5DB4-4F72-BCCB-0D64C0790A2B}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{92BD1781-5DB4-4F72-BCCB-0D64C0790A2B}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{421DE59C-8246-4679-9D69-79F16A7187BE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{421DE59C-8246-4679-9D69-79F16A7187BE}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{421DE59C-8246-4679-9D69-79F16A7187BE}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{421DE59C-8246-4679-9D69-79F16A7187BE}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{AB6B3C69-9F6F-461C-BFD8-D3F25B9F44AD}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{AB6B3C69-9F6F-461C-BFD8-D3F25B9F44AD}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{AB6B3C69-9F6F-461C-BFD8-D3F25B9F44AD}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{AB6B3C69-9F6F-461C-BFD8-D3F25B9F44AD}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
EndGlobalSection
|
||||
GlobalSection(SolutionProperties) = preSolution
|
||||
HideSolutionNode = FALSE
|
||||
|
@ -509,6 +631,26 @@ Global
|
|||
{A3AF556C-276C-49BA-A9ED-E7D42FECAA46} = {8C343846-5F9F-4033-9B52-B44C61962449}
|
||||
{6DBBFF7B-2C28-47D7-8618-B6085044E38D} = {A3AF556C-276C-49BA-A9ED-E7D42FECAA46}
|
||||
{11B1F856-9025-4A4C-B90D-B1237743B672} = {A3AF556C-276C-49BA-A9ED-E7D42FECAA46}
|
||||
{05A91267-ABC8-4249-9A04-166C08EAD849} = {A3E08CE3-2358-4D36-875B-82C99AC61CD8}
|
||||
{99D7BB0C-DE8D-4952-A9BF-63A5215256C4} = {A3E08CE3-2358-4D36-875B-82C99AC61CD8}
|
||||
{31886751-5615-435C-A40C-EA2CC415BBDD} = {A3E08CE3-2358-4D36-875B-82C99AC61CD8}
|
||||
{92558FFF-0935-4B12-8ED6-E3DC4E486B7F} = {A3E08CE3-2358-4D36-875B-82C99AC61CD8}
|
||||
{5BC27DBC-6727-4DA1-B1AF-5EA5D3803627} = {A3E08CE3-2358-4D36-875B-82C99AC61CD8}
|
||||
{EC96BD3F-2DFE-4072-86F5-BFB349FF77D9} = {A3E08CE3-2358-4D36-875B-82C99AC61CD8}
|
||||
{74F9C0F1-2E95-4EC8-B395-EB73AC4DD42B} = {A3E08CE3-2358-4D36-875B-82C99AC61CD8}
|
||||
{A7D3307A-0A0D-4D20-887A-9C830A38B058} = {A3E08CE3-2358-4D36-875B-82C99AC61CD8}
|
||||
{4B158F47-4759-495B-83BB-1D87130E3DF8} = {A3E08CE3-2358-4D36-875B-82C99AC61CD8}
|
||||
{68CAEBF3-D428-4B50-8305-4BE4F7753CA8} = {9905147E-CC1F-42A0-BD27-05586C583DF7}
|
||||
{0E75580E-10E6-4CAC-87C2-D83C6C0B1D10} = {9905147E-CC1F-42A0-BD27-05586C583DF7}
|
||||
{4E02E29B-54C7-4576-8575-0438EABCCC88} = {9905147E-CC1F-42A0-BD27-05586C583DF7}
|
||||
{485C6D8A-271F-44CD-9F55-B5283A917A00} = {9905147E-CC1F-42A0-BD27-05586C583DF7}
|
||||
{074F9FC2-B0E0-40E4-B356-99F0E07E279B} = {9905147E-CC1F-42A0-BD27-05586C583DF7}
|
||||
{ED7BCD8E-F553-48F0-962F-019BE7C2B78E} = {9905147E-CC1F-42A0-BD27-05586C583DF7}
|
||||
{7D9C0EF5-7383-4E35-811B-3288B3C806F3} = {9905147E-CC1F-42A0-BD27-05586C583DF7}
|
||||
{7C3FE60E-055B-4E0C-BB85-C7E94A640074} = {9905147E-CC1F-42A0-BD27-05586C583DF7}
|
||||
{92BD1781-5DB4-4F72-BCCB-0D64C0790A2B} = {9905147E-CC1F-42A0-BD27-05586C583DF7}
|
||||
{421DE59C-8246-4679-9D69-79F16A7187BE} = {F9B73995-76C6-4056-ADA9-18342F951361}
|
||||
{AB6B3C69-9F6F-461C-BFD8-D3F25B9F44AD} = {7A94E723-ADD6-48C4-BBE7-1D5B311187A8}
|
||||
EndGlobalSection
|
||||
GlobalSection(ExtensibilityGlobals) = postSolution
|
||||
SolutionGuid = {56695AA9-EA80-47A7-8562-E51285906C54}
|
||||
|
|
|
@ -1,23 +0,0 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<RuleSet Name="Roslyn SDK Rules" Description="Hand selected set of analyzer rules for use within the Roslyn SDK project." ToolsVersion="15.0">
|
||||
<Rules AnalyzerId="StyleCop.Analyzers" RuleNamespace="StyleCop.Analyzers">
|
||||
<Rule Id="SA1101" Action="None" />
|
||||
<Rule Id="SA1201" Action="None" />
|
||||
<Rule Id="SA1202" Action="None" />
|
||||
<Rule Id="SA1204" Action="None" />
|
||||
<Rule Id="SA1308" Action="None" />
|
||||
<Rule Id="SA1309" Action="None" />
|
||||
<Rule Id="SA1311" Action="None" />
|
||||
<Rule Id="SA1412" Action="Warning" />
|
||||
<Rule Id="SA1501" Action="None" />
|
||||
<Rule Id="SA1600" Action="None" />
|
||||
<Rule Id="SA1601" Action="None" />
|
||||
<Rule Id="SA1629" Action="None" />
|
||||
<Rule Id="SX1101" Action="Warning" />
|
||||
<Rule Id="SX1309" Action="Warning" />
|
||||
</Rules>
|
||||
<Rules AnalyzerId="Roslyn.Diagnostics.Analyzers" RuleNamespace="Roslyn.Diagnostics.Analyzers">
|
||||
<Rule Id="RS1019" Action="None" /> <!-- DiagnosticId must be unique across analyzers: Many tests use similar analyzer instances -->
|
||||
<Rule Id="RS1022" Action="None" /> <!-- AD0001: https://github.com/dotnet/roslyn-analyzers/issues/1803 -->
|
||||
</Rules>
|
||||
</RuleSet>
|
23
Samples.sln
23
Samples.sln
|
@ -1,4 +1,4 @@
|
|||
|
||||
|
||||
Microsoft Visual Studio Solution File, Format Version 12.00
|
||||
# Visual Studio Version 16
|
||||
VisualStudioVersion = 16.0.28606.18
|
||||
|
@ -113,9 +113,15 @@ Project("{778DAE3C-4631-46EA-AA77-85C1314464D9}") = "VisualBasicToCSharpConverte
|
|||
EndProject
|
||||
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "SourceGenerators", "SourceGenerators", "{14D18F51-6B59-49D5-9AB7-08B38417A459}"
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "SourceGeneratorSamples", "samples\CSharp\SourceGenerators\SourceGeneratorSamples\SourceGeneratorSamples.csproj", "{2ADE5CFA-5DF4-44A9-BD67-E884BCFBA045}"
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "CSharpSourceGeneratorSamples", "samples\CSharp\SourceGenerators\SourceGeneratorSamples\CSharpSourceGeneratorSamples.csproj", "{2ADE5CFA-5DF4-44A9-BD67-E884BCFBA045}"
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "GeneratedDemo", "samples\CSharp\SourceGenerators\GeneratedDemo\GeneratedDemo.csproj", "{EC4DB63B-C2B4-4D06-AF98-15253035C6D5}"
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "CSharpGeneratedDemo", "samples\CSharp\SourceGenerators\GeneratedDemo\CSharpGeneratedDemo.csproj", "{EC4DB63B-C2B4-4D06-AF98-15253035C6D5}"
|
||||
EndProject
|
||||
Project("{778DAE3C-4631-46EA-AA77-85C1314464D9}") = "VisualBasicGeneratedDemo", "samples\VisualBasic\SourceGenerators\GeneratedDemo\VisualBasicGeneratedDemo.vbproj", "{DA924876-9CF5-47E0-AA01-ADAF47653D39}"
|
||||
EndProject
|
||||
Project("{778DAE3C-4631-46EA-AA77-85C1314464D9}") = "VisualBasicSourceGeneratorSamples", "samples\VisualBasic\SourceGenerators\SourceGeneratorSamples\VisualBasicSourceGeneratorSamples.vbproj", "{8322B6E4-0CB1-4EC1-A2CC-2E4DB02C834A}"
|
||||
EndProject
|
||||
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "SourceGenerators", "SourceGenerators", "{E79B07C8-0859-4B5C-9650-68D855833C6E}"
|
||||
EndProject
|
||||
Global
|
||||
GlobalSection(SolutionConfigurationPlatforms) = preSolution
|
||||
|
@ -283,6 +289,14 @@ Global
|
|||
{EC4DB63B-C2B4-4D06-AF98-15253035C6D5}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{EC4DB63B-C2B4-4D06-AF98-15253035C6D5}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{EC4DB63B-C2B4-4D06-AF98-15253035C6D5}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{DA924876-9CF5-47E0-AA01-ADAF47653D39}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{DA924876-9CF5-47E0-AA01-ADAF47653D39}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{DA924876-9CF5-47E0-AA01-ADAF47653D39}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{DA924876-9CF5-47E0-AA01-ADAF47653D39}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{8322B6E4-0CB1-4EC1-A2CC-2E4DB02C834A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{8322B6E4-0CB1-4EC1-A2CC-2E4DB02C834A}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{8322B6E4-0CB1-4EC1-A2CC-2E4DB02C834A}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{8322B6E4-0CB1-4EC1-A2CC-2E4DB02C834A}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
EndGlobalSection
|
||||
GlobalSection(SolutionProperties) = preSolution
|
||||
HideSolutionNode = FALSE
|
||||
|
@ -342,6 +356,9 @@ Global
|
|||
{14D18F51-6B59-49D5-9AB7-08B38417A459} = {C3FB27E9-C8EE-4F76-B0AA-7CD67A7E652B}
|
||||
{2ADE5CFA-5DF4-44A9-BD67-E884BCFBA045} = {14D18F51-6B59-49D5-9AB7-08B38417A459}
|
||||
{EC4DB63B-C2B4-4D06-AF98-15253035C6D5} = {14D18F51-6B59-49D5-9AB7-08B38417A459}
|
||||
{DA924876-9CF5-47E0-AA01-ADAF47653D39} = {E79B07C8-0859-4B5C-9650-68D855833C6E}
|
||||
{8322B6E4-0CB1-4EC1-A2CC-2E4DB02C834A} = {E79B07C8-0859-4B5C-9650-68D855833C6E}
|
||||
{E79B07C8-0859-4B5C-9650-68D855833C6E} = {CDA94F62-E35A-4913-8045-D9D42416513C}
|
||||
EndGlobalSection
|
||||
GlobalSection(ExtensibilityGlobals) = postSolution
|
||||
SolutionGuid = {B849838B-3D7A-4B6B-BE07-285DCB1588F4}
|
||||
|
|
|
@ -0,0 +1,6 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Project>
|
||||
<PropertyGroup>
|
||||
<PublishingVersion>3</PublishingVersion>
|
||||
</PropertyGroup>
|
||||
</Project>
|
|
@ -3,9 +3,9 @@
|
|||
<ProductDependencies>
|
||||
</ProductDependencies>
|
||||
<ToolsetDependencies>
|
||||
<Dependency Name="Microsoft.DotNet.Arcade.Sdk" Version="5.0.0-beta.20465.7">
|
||||
<Dependency Name="Microsoft.DotNet.Arcade.Sdk" Version="7.0.0-beta.22411.2">
|
||||
<Uri>https://github.com/dotnet/arcade</Uri>
|
||||
<Sha>fa0486ddb04a76341d822903c8977fb9fa088d1e</Sha>
|
||||
<Sha>6a638cd0c13962ab2a1943cb1c878be5a41dd82e</Sha>
|
||||
</Dependency>
|
||||
</ToolsetDependencies>
|
||||
</Dependencies>
|
||||
|
|
|
@ -2,69 +2,83 @@
|
|||
<Project>
|
||||
<PropertyGroup>
|
||||
<!-- This repo version -->
|
||||
<VersionPrefix>3.2.0</VersionPrefix>
|
||||
<NugetPackagePrefix>1.0.1</NugetPackagePrefix>
|
||||
<VersionPrefix>4.0.0</VersionPrefix>
|
||||
<NugetPackagePrefix>1.1.2</NugetPackagePrefix>
|
||||
<PreReleaseVersionLabel>beta1</PreReleaseVersionLabel>
|
||||
<!-- Opt-in repo features -->
|
||||
<UsingToolVSSDK>true</UsingToolVSSDK>
|
||||
<UsingToolNetFrameworkReferenceAssemblies>true</UsingToolNetFrameworkReferenceAssemblies>
|
||||
<UsingToolMicrosoftNetCompilers>true</UsingToolMicrosoftNetCompilers>
|
||||
<UsingToolSymbolUploader>true</UsingToolSymbolUploader>
|
||||
<MicrosoftNetCompilersToolsetVersion>3.8.0-4.20464.1</MicrosoftNetCompilersToolsetVersion>
|
||||
<UsingToolNuGetRepack>true</UsingToolNuGetRepack>
|
||||
<MicrosoftNetCompilersToolsetVersion>4.3.0-3.final</MicrosoftNetCompilersToolsetVersion>
|
||||
<!-- Force prior version due to https://github.com/microsoft/vstest/pull/2192 and https://github.com/microsoft/vstest/pull/2067 -->
|
||||
<MicrosoftNETTestSdkVersion>16.1.1</MicrosoftNETTestSdkVersion>
|
||||
<!-- Dependencies -->
|
||||
<!-- Roslyn for VS 2019 -->
|
||||
<!-- NOTE: Do not upgrade these to be newer than what shipped in VS 2019 since the Syntax Visualizer extension still
|
||||
supports it. -->
|
||||
<MicrosoftCodeAnalysisAnalyzersVersionVS2019>3.0.0</MicrosoftCodeAnalysisAnalyzersVersionVS2019>
|
||||
<MicrosoftCodeAnalysisCommonVersionVS2019>3.3.1</MicrosoftCodeAnalysisCommonVersionVS2019>
|
||||
<MicrosoftCodeAnalysisCSharpVersionVS2019>3.3.1</MicrosoftCodeAnalysisCSharpVersionVS2019>
|
||||
<MicrosoftCodeAnalysisEditorFeaturesTextVersionVS2019>3.3.1</MicrosoftCodeAnalysisEditorFeaturesTextVersionVS2019>
|
||||
<MicrosoftCodeAnalysisVisualBasicVersionVS2019>3.3.1</MicrosoftCodeAnalysisVisualBasicVersionVS2019>
|
||||
<MicrosoftCodeAnalysisWorkspacesCommonVersionVS2019>3.3.1</MicrosoftCodeAnalysisWorkspacesCommonVersionVS2019>
|
||||
</PropertyGroup>
|
||||
<!-- Dependencies -->
|
||||
<PropertyGroup>
|
||||
<!-- Versions used by several individual references below -->
|
||||
<MicrosoftCodeAnalysisPackagesVersion>4.1.0</MicrosoftCodeAnalysisPackagesVersion>
|
||||
<MicrosoftVisualStudioShellPackagesVersion>17.3.32809.331</MicrosoftVisualStudioShellPackagesVersion>
|
||||
<MicrosoftVisualStudioEditorPackagesVersion>17.3.198</MicrosoftVisualStudioEditorPackagesVersion>
|
||||
<MicrosoftVisualStudioExtensibilityTestingVersion>0.1.135-beta</MicrosoftVisualStudioExtensibilityTestingVersion>
|
||||
<!-- Roslyn -->
|
||||
<MicrosoftCodeAnalysisAnalyzersVersion>3.3.3</MicrosoftCodeAnalysisAnalyzersVersion>
|
||||
<MicrosoftCodeAnalysisCommonVersion>$(MicrosoftCodeAnalysisPackagesVersion)</MicrosoftCodeAnalysisCommonVersion>
|
||||
<MicrosoftCodeAnalysisCSharpVersion>$(MicrosoftCodeAnalysisPackagesVersion)</MicrosoftCodeAnalysisCSharpVersion>
|
||||
<MicrosoftCodeAnalysisEditorFeaturesTextVersion>$(MicrosoftCodeAnalysisPackagesVersion)</MicrosoftCodeAnalysisEditorFeaturesTextVersion>
|
||||
<MicrosoftCodeAnalysisVisualBasicVersion>$(MicrosoftCodeAnalysisPackagesVersion)</MicrosoftCodeAnalysisVisualBasicVersion>
|
||||
<MicrosoftCodeAnalysisWorkspacesCommonVersion>$(MicrosoftCodeAnalysisPackagesVersion)</MicrosoftCodeAnalysisWorkspacesCommonVersion>
|
||||
<!-- We target a lower roslyn API to build internal generators against so they still load in earlier VS versions -->
|
||||
<MicrosoftCodeAnalysisGeneratorVersion>3.9.0</MicrosoftCodeAnalysisGeneratorVersion>
|
||||
<!-- VS SDK -->
|
||||
<!-- NOTE: Do not upgrade these to be newer than what shipped in VS 2019
|
||||
we need our extensions to work on both. -->
|
||||
<VSLangProjVersion>7.0.3300</VSLangProjVersion>
|
||||
<VSSDKTemplateWizardInterfaceVersion>12.0.4</VSSDKTemplateWizardInterfaceVersion>
|
||||
<EnvDTEVersion>8.0.1</EnvDTEVersion>
|
||||
<EnvDTE80Version>8.0.0</EnvDTE80Version>
|
||||
<MicrosoftVisualStudioComponentModelHostVersion>16.4.280</MicrosoftVisualStudioComponentModelHostVersion>
|
||||
<MicrosoftVisualStudioCoreUtilityVersion>16.4.280</MicrosoftVisualStudioCoreUtilityVersion>
|
||||
<MicrosoftVisualStudioEditorVersion>16.4.280</MicrosoftVisualStudioEditorVersion>
|
||||
<MicrosoftVisualStudioLanguageStandardClassificationVersion>16.4.280</MicrosoftVisualStudioLanguageStandardClassificationVersion>
|
||||
<MicrosoftVisualStudioOLEInteropVersion>7.10.6071</MicrosoftVisualStudioOLEInteropVersion>
|
||||
<MicrosoftVisualStudioShell150Version>16.5.29911.84</MicrosoftVisualStudioShell150Version>
|
||||
<MicrosoftVisualStudioShellFrameworkVersion>16.5.29911.84</MicrosoftVisualStudioShellFrameworkVersion>
|
||||
<MicrosoftVisualStudioShellInteropVersion>7.10.6072</MicrosoftVisualStudioShellInteropVersion>
|
||||
<MicrosoftVisualStudioShellInterop80Version>8.0.50728</MicrosoftVisualStudioShellInterop80Version>
|
||||
<MicrosoftVisualStudioShellInterop90Version>9.0.30730</MicrosoftVisualStudioShellInterop90Version>
|
||||
<MicrosoftVisualStudioShellInterop100Version>10.0.30320</MicrosoftVisualStudioShellInterop100Version>
|
||||
<MicrosoftVisualStudioShellInterop110Version>11.0.61031</MicrosoftVisualStudioShellInterop110Version>
|
||||
<MicrosoftVisualStudioShellInterop121DesignTimeVersion>12.1.30329</MicrosoftVisualStudioShellInterop121DesignTimeVersion>
|
||||
<MicrosoftVisualStudioShellInterop140DesignTimeVersion>14.3.26929</MicrosoftVisualStudioShellInterop140DesignTimeVersion>
|
||||
<MicrosoftVisualStudioShellInterop157DesignTimeVersion>15.7.1</MicrosoftVisualStudioShellInterop157DesignTimeVersion>
|
||||
<MicrosoftVisualStudioTextDataVersion>16.4.280</MicrosoftVisualStudioTextDataVersion>
|
||||
<MicrosoftVisualStudioTextLogicVersion>16.4.280</MicrosoftVisualStudioTextLogicVersion>
|
||||
<MicrosoftVisualStudioTextManagerInteropVersion>7.10.6071</MicrosoftVisualStudioTextManagerInteropVersion>
|
||||
<MicrosoftVisualStudioTextUIVersion>16.4.280</MicrosoftVisualStudioTextUIVersion>
|
||||
<MicrosoftVisualStudioTextUIWpfVersion>16.4.280</MicrosoftVisualStudioTextUIWpfVersion>
|
||||
<MicrosoftVisualStudioUtilitiesVersion>16.5.29903.186</MicrosoftVisualStudioUtilitiesVersion>
|
||||
<MicrosoftVisualStudioThreadingVersion>16.5.132</MicrosoftVisualStudioThreadingVersion>
|
||||
<EnvDTE80Version>$(MicrosoftVisualStudioShellPackagesVersion)</EnvDTE80Version>
|
||||
<EnvDTEVersion>$(MicrosoftVisualStudioShellPackagesVersion)</EnvDTEVersion>
|
||||
<MicrosoftServiceHubFrameworkVersion>4.0.2048</MicrosoftServiceHubFrameworkVersion>
|
||||
<MicrosoftVisualStudioCompositionVersion>17.0.46</MicrosoftVisualStudioCompositionVersion>
|
||||
<MicrosoftVisualStudioComponentModelHostVersion>$(MicrosoftVisualStudioEditorPackagesVersion)</MicrosoftVisualStudioComponentModelHostVersion>
|
||||
<MicrosoftVisualStudioCoreUtilityVersion>$(MicrosoftVisualStudioEditorPackagesVersion)</MicrosoftVisualStudioCoreUtilityVersion>
|
||||
<MicrosoftVisualStudioEditorVersion>$(MicrosoftVisualStudioEditorPackagesVersion)</MicrosoftVisualStudioEditorVersion>
|
||||
<MicrosoftVisualStudioImageCatalogVersion>$(MicrosoftVisualStudioShellPackagesVersion)</MicrosoftVisualStudioImageCatalogVersion>
|
||||
<MicrosoftVisualStudioImagingInterop140DesignTimeVersion>$(MicrosoftVisualStudioShellPackagesVersion)</MicrosoftVisualStudioImagingInterop140DesignTimeVersion>
|
||||
<MicrosoftVisualStudioImagingVersion>$(MicrosoftVisualStudioShellPackagesVersion)</MicrosoftVisualStudioImagingVersion>
|
||||
<MicrosoftVisualStudioInteropVersion>$(MicrosoftVisualStudioShellPackagesVersion)</MicrosoftVisualStudioInteropVersion>
|
||||
<MicrosoftVisualStudioLanguageStandardClassificationVersion>$(MicrosoftVisualStudioEditorPackagesVersion)</MicrosoftVisualStudioLanguageStandardClassificationVersion>
|
||||
<MicrosoftVisualStudioSDKAnalyzersVersion>16.10.10</MicrosoftVisualStudioSDKAnalyzersVersion>
|
||||
<MicrosoftVisualStudioShell150Version>$(MicrosoftVisualStudioShellPackagesVersion)</MicrosoftVisualStudioShell150Version>
|
||||
<MicrosoftVisualStudioShellFrameworkVersion>$(MicrosoftVisualStudioShellPackagesVersion)</MicrosoftVisualStudioShellFrameworkVersion>
|
||||
<MicrosoftVisualStudioShellInteropVersion>$(MicrosoftVisualStudioShellPackagesVersion)</MicrosoftVisualStudioShellInteropVersion>
|
||||
<MicrosoftVisualStudioTextDataVersion>$(MicrosoftVisualStudioEditorPackagesVersion)</MicrosoftVisualStudioTextDataVersion>
|
||||
<MicrosoftVisualStudioTextLogicVersion>$(MicrosoftVisualStudioEditorPackagesVersion)</MicrosoftVisualStudioTextLogicVersion>
|
||||
<MicrosoftVisualStudioTextManagerInterop80Version>$(MicrosoftVisualStudioShellPackagesVersion)</MicrosoftVisualStudioTextManagerInterop80Version>
|
||||
<MicrosoftVisualStudioTextUIWpfVersion>$(MicrosoftVisualStudioEditorPackagesVersion)</MicrosoftVisualStudioTextUIWpfVersion>
|
||||
<MicrosoftVisualStudioThreadingVersion>17.3.44</MicrosoftVisualStudioThreadingVersion>
|
||||
<MicrosoftVisualStudioUtilitiesInternalVersion>16.3.36</MicrosoftVisualStudioUtilitiesInternalVersion>
|
||||
<MicrosoftVisualStudioValidationVersion>17.0.64</MicrosoftVisualStudioValidationVersion>
|
||||
<NuGetSolutionRestoreManagerInteropVersion>5.6.0</NuGetSolutionRestoreManagerInteropVersion>
|
||||
<StreamJsonRpcVersion>2.3.99</StreamJsonRpcVersion>
|
||||
<StreamJsonRpcVersion>2.12.27</StreamJsonRpcVersion>
|
||||
<VSLangProjVersion>$(MicrosoftVisualStudioShellPackagesVersion)</VSLangProjVersion>
|
||||
<MicrosoftVisualStudioTemplateWizardInterfaceVersion>17.0.0-preview-1-30928-1111</MicrosoftVisualStudioTemplateWizardInterfaceVersion>
|
||||
<MicrosoftVSSDKBuildToolsVersion>17.0.4207-preview4</MicrosoftVSSDKBuildToolsVersion>
|
||||
<MicroBuildPluginsSwixBuildVersion>1.1.33</MicroBuildPluginsSwixBuildVersion>
|
||||
<SystemThreadingTasksDataflowVersion>6.0.0</SystemThreadingTasksDataflowVersion>
|
||||
<!-- Project System-->
|
||||
<MicrosoftVisualStudioProjectSystemManagedVersion>17.3.51-g48943e0519</MicrosoftVisualStudioProjectSystemManagedVersion>
|
||||
<MicrosoftVisualStudioProjectSystemManagedVSVersion>17.3.51-g48943e0519</MicrosoftVisualStudioProjectSystemManagedVSVersion>
|
||||
<MicrosoftVisualStudioProjectSystemSDKToolsVersion>17.3.195-pre</MicrosoftVisualStudioProjectSystemSDKToolsVersion>
|
||||
<!-- Libs -->
|
||||
<SystemCollectionsImmutableVersion>1.3.1</SystemCollectionsImmutableVersion>
|
||||
<SystemCompositionVersion>1.1.0</SystemCompositionVersion>
|
||||
<SystemReflectionMetadataVersion>1.4.2</SystemReflectionMetadataVersion>
|
||||
<DiffPlexVersion>1.4.4</DiffPlexVersion>
|
||||
<DiffPlexVersion>1.5.0</DiffPlexVersion>
|
||||
<SystemNetHttpVersion>4.3.4</SystemNetHttpVersion>
|
||||
<!-- Testing -->
|
||||
<MicrosoftCodeAnalysis2PrimaryTestVersion>2.6.1</MicrosoftCodeAnalysis2PrimaryTestVersion>
|
||||
<MicrosoftCodeAnalysis3PrimaryTestVersion>3.3.1</MicrosoftCodeAnalysis3PrimaryTestVersion>
|
||||
<MicrosoftCodeAnalysisTestingVersion>1.0.1-beta1.20365.2</MicrosoftCodeAnalysisTestingVersion>
|
||||
<MicrosoftCodeAnalysis3PrimaryTestVersion>3.9.0</MicrosoftCodeAnalysis3PrimaryTestVersion>
|
||||
<MicrosoftCodeAnalysisTestingVersion>1.0.1-beta1.20374.2</MicrosoftCodeAnalysisTestingVersion>
|
||||
<xunitassertVersion>$(xunitVersion)</xunitassertVersion>
|
||||
<XunitCombinatorialVersion>1.2.7</XunitCombinatorialVersion>
|
||||
<VsixTestingXunitVersion>0.1.49-beta</VsixTestingXunitVersion>
|
||||
<MicrosoftVisualStudioExtensibilityTestingSourceGeneratorVersion>$(MicrosoftVisualStudioExtensibilityTestingVersion)</MicrosoftVisualStudioExtensibilityTestingSourceGeneratorVersion>
|
||||
<MicrosoftVisualStudioExtensibilityTestingXunitVersion>$(MicrosoftVisualStudioExtensibilityTestingVersion)</MicrosoftVisualStudioExtensibilityTestingXunitVersion>
|
||||
<!-- Needed to override the transitive 9.0.1 version brought in by the 16.1.1 Microsoft.NET.Test.Sdk -->
|
||||
<NewtonsoftJsonVersion>13.0.1</NewtonsoftJsonVersion>
|
||||
<!-- Analyzers -->
|
||||
<RoslynDiagnosticsAnalyzersVersion>2.9.8</RoslynDiagnosticsAnalyzersVersion>
|
||||
<StyleCopAnalyzersVersion>1.2.0-beta.164</StyleCopAnalyzersVersion>
|
||||
|
|
|
@ -99,8 +99,9 @@ function InsertMaestroPrivateFeedCredentials($Sources, $Creds, $Username, $Passw
|
|||
function EnablePrivatePackageSources($DisabledPackageSources) {
|
||||
$maestroPrivateSources = $DisabledPackageSources.SelectNodes("add[contains(@key,'darc-int')]")
|
||||
ForEach ($DisabledPackageSource in $maestroPrivateSources) {
|
||||
Write-Host "`tEnsuring private source '$($DisabledPackageSource.key)' is enabled"
|
||||
$DisabledPackageSource.SetAttribute("value", "false")
|
||||
Write-Host "`tEnsuring private source '$($DisabledPackageSource.key)' is enabled by deleting it from disabledPackageSource"
|
||||
# Due to https://github.com/NuGet/Home/issues/10291, we must actually remove the disabled entries
|
||||
$DisabledPackageSources.RemoveChild($DisabledPackageSource)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -145,16 +146,22 @@ $userName = "dn-bot"
|
|||
# Insert credential nodes for Maestro's private feeds
|
||||
InsertMaestroPrivateFeedCredentials -Sources $sources -Creds $creds -Username $userName -Password $Password
|
||||
|
||||
# 3.1 uses a different feed url format so it's handled differently here
|
||||
$dotnet31Source = $sources.SelectSingleNode("add[@key='dotnet3.1']")
|
||||
if ($dotnet31Source -ne $null) {
|
||||
AddPackageSource -Sources $sources -SourceName "dotnet3.1-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal/nuget/v2" -Creds $creds -Username $userName -Password $Password
|
||||
AddPackageSource -Sources $sources -SourceName "dotnet3.1-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-transport/nuget/v2" -Creds $creds -Username $userName -Password $Password
|
||||
}
|
||||
|
||||
$dotnet5Source = $sources.SelectSingleNode("add[@key='dotnet5']")
|
||||
if ($dotnet5Source -ne $null) {
|
||||
AddPackageSource -Sources $sources -SourceName "dotnet5-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet5-internal/nuget/v2" -Creds $creds -Username $userName -Password $Password
|
||||
AddPackageSource -Sources $sources -SourceName "dotnet5-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet5-internal-transport/nuget/v2" -Creds $creds -Username $userName -Password $Password
|
||||
$dotnetVersions = @('5','6','7')
|
||||
|
||||
foreach ($dotnetVersion in $dotnetVersions) {
|
||||
$feedPrefix = "dotnet" + $dotnetVersion;
|
||||
$dotnetSource = $sources.SelectSingleNode("add[@key='$feedPrefix']")
|
||||
if ($dotnetSource -ne $null) {
|
||||
AddPackageSource -Sources $sources -SourceName "$feedPrefix-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/$feedPrefix-internal/nuget/v2" -Creds $creds -Username $userName -Password $Password
|
||||
AddPackageSource -Sources $sources -SourceName "$feedPrefix-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/$feedPrefix-internal-transport/nuget/v2" -Creds $creds -Username $userName -Password $Password
|
||||
}
|
||||
}
|
||||
|
||||
$doc.Save($filename)
|
||||
|
|
|
@ -105,29 +105,33 @@ if [ "$?" == "0" ]; then
|
|||
PackageSources+=('dotnet3.1-internal-transport')
|
||||
fi
|
||||
|
||||
# Ensure dotnet5-internal and dotnet5-internal-transport are in the packageSources if the public dotnet5 feeds are present
|
||||
grep -i "<add key=\"dotnet5\"" $ConfigFile
|
||||
if [ "$?" == "0" ]; then
|
||||
grep -i "<add key=\"dotnet5-internal\"" $ConfigFile
|
||||
if [ "$?" != "0" ]; then
|
||||
echo "Adding dotnet5-internal to the packageSources."
|
||||
PackageSourcesNodeFooter="</packageSources>"
|
||||
PackageSourceTemplate="${TB}<add key=\"dotnet5-internal\" value=\"https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet5-internal/nuget/v2\" />"
|
||||
DotNetVersions=('5' '6' '7')
|
||||
|
||||
sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile
|
||||
for DotNetVersion in ${DotNetVersions[@]} ; do
|
||||
FeedPrefix="dotnet${DotNetVersion}";
|
||||
grep -i "<add key=\"$FeedPrefix\"" $ConfigFile
|
||||
if [ "$?" == "0" ]; then
|
||||
grep -i "<add key=\"$FeedPrefix-internal\"" $ConfigFile
|
||||
if [ "$?" != "0" ]; then
|
||||
echo "Adding $FeedPrefix-internal to the packageSources."
|
||||
PackageSourcesNodeFooter="</packageSources>"
|
||||
PackageSourceTemplate="${TB}<add key=\"$FeedPrefix-internal\" value=\"https://pkgs.dev.azure.com/dnceng/internal/_packaging/$FeedPrefix-internal/nuget/v2\" />"
|
||||
|
||||
sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile
|
||||
fi
|
||||
PackageSources+=("$FeedPrefix-internal")
|
||||
|
||||
grep -i "<add key=\"$FeedPrefix-internal-transport\">" $ConfigFile
|
||||
if [ "$?" != "0" ]; then
|
||||
echo "Adding $FeedPrefix-internal-transport to the packageSources."
|
||||
PackageSourcesNodeFooter="</packageSources>"
|
||||
PackageSourceTemplate="${TB}<add key=\"$FeedPrefix-internal-transport\" value=\"https://pkgs.dev.azure.com/dnceng/internal/_packaging/$FeedPrefix-internal-transport/nuget/v2\" />"
|
||||
|
||||
sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile
|
||||
fi
|
||||
PackageSources+=("$FeedPrefix-internal-transport")
|
||||
fi
|
||||
PackageSources+=('dotnet5-internal')
|
||||
|
||||
grep -i "<add key=\"dotnet5-internal-transport\">" $ConfigFile
|
||||
if [ "$?" != "0" ]; then
|
||||
echo "Adding dotnet5-internal-transport to the packageSources."
|
||||
PackageSourcesNodeFooter="</packageSources>"
|
||||
PackageSourceTemplate="${TB}<add key=\"dotnet5-internal-transport\" value=\"https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet5-internal-transport/nuget/v2\" />"
|
||||
|
||||
sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile
|
||||
fi
|
||||
PackageSources+=('dotnet5-internal-transport')
|
||||
fi
|
||||
done
|
||||
|
||||
# I want things split line by line
|
||||
PrevIFS=$IFS
|
||||
|
@ -158,8 +162,8 @@ if [ "$?" == "0" ]; then
|
|||
for DisabledSourceName in ${DisabledDarcIntSources[@]} ; do
|
||||
if [[ $DisabledSourceName == darc-int* ]]
|
||||
then
|
||||
OldDisableValue="add key=\"$DisabledSourceName\" value=\"true\""
|
||||
NewDisableValue="add key=\"$DisabledSourceName\" value=\"false\""
|
||||
OldDisableValue="<add key=\"$DisabledSourceName\" value=\"true\" />"
|
||||
NewDisableValue="<!-- Reenabled for build : $DisabledSourceName -->"
|
||||
sed -i.bak "s|$OldDisableValue|$NewDisableValue|" $ConfigFile
|
||||
echo "Neutralized disablePackageSources entry for '$DisabledSourceName'"
|
||||
fi
|
||||
|
|
|
@ -7,7 +7,6 @@ Param(
|
|||
[string] $msbuildEngine = $null,
|
||||
[bool] $warnAsError = $true,
|
||||
[bool] $nodeReuse = $true,
|
||||
[bool] $useDefaultDotnetInstall = $false,
|
||||
[switch][Alias('r')]$restore,
|
||||
[switch] $deployDeps,
|
||||
[switch][Alias('b')]$build,
|
||||
|
@ -26,6 +25,7 @@ Param(
|
|||
[switch] $prepareMachine,
|
||||
[string] $runtimeSourceFeed = '',
|
||||
[string] $runtimeSourceFeedKey = '',
|
||||
[switch] $excludePrereleaseVS,
|
||||
[switch] $help,
|
||||
[Parameter(ValueFromRemainingArguments=$true)][String[]]$properties
|
||||
)
|
||||
|
@ -66,7 +66,7 @@ function Print-Usage() {
|
|||
Write-Host " -prepareMachine Prepare machine for CI run, clean up processes after build"
|
||||
Write-Host " -warnAsError <value> Sets warnaserror msbuild parameter ('true' or 'false')"
|
||||
Write-Host " -msbuildEngine <value> Msbuild engine to use to run build ('dotnet', 'vs', or unspecified)."
|
||||
Write-Host " -useDefaultDotnetInstall <value> Use dotnet-install.* scripts from public location as opposed to from eng common folder"
|
||||
Write-Host " -excludePrereleaseVS Set to exclude build engines in prerelease versions of Visual Studio"
|
||||
Write-Host ""
|
||||
|
||||
Write-Host "Command line arguments not listed above are passed thru to msbuild."
|
||||
|
|
|
@ -19,6 +19,9 @@ usage()
|
|||
echo "Actions:"
|
||||
echo " --restore Restore dependencies (short: -r)"
|
||||
echo " --build Build solution (short: -b)"
|
||||
echo " --sourceBuild Source-build the solution (short: -sb)"
|
||||
echo " Will additionally trigger the following actions: --restore, --build, --pack"
|
||||
echo " If --configuration is not set explicitly, will also set it to 'Release'"
|
||||
echo " --rebuild Rebuild solution"
|
||||
echo " --test Run all unit tests in the solution (short: -t)"
|
||||
echo " --integrationTest Run all integration tests in the solution"
|
||||
|
@ -36,8 +39,6 @@ usage()
|
|||
echo " --prepareMachine Prepare machine for CI run, clean up processes after build"
|
||||
echo " --nodeReuse <value> Sets nodereuse msbuild parameter ('true' or 'false')"
|
||||
echo " --warnAsError <value> Sets warnaserror msbuild parameter ('true' or 'false')"
|
||||
echo " --useDefaultDotnetInstall <value> Use dotnet-install.* scripts from public location as opposed to from eng common folder"
|
||||
|
||||
echo ""
|
||||
echo "Command line arguments not listed above are passed thru to msbuild."
|
||||
echo "Arguments can also be passed in with a single hyphen."
|
||||
|
@ -57,6 +58,7 @@ scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
|
|||
|
||||
restore=false
|
||||
build=false
|
||||
source_build=false
|
||||
rebuild=false
|
||||
test=false
|
||||
integration_test=false
|
||||
|
@ -75,16 +77,15 @@ exclude_ci_binary_log=false
|
|||
pipelines_log=false
|
||||
|
||||
projects=''
|
||||
configuration='Debug'
|
||||
configuration=''
|
||||
prepare_machine=false
|
||||
verbosity='minimal'
|
||||
runtime_source_feed=''
|
||||
runtime_source_feed_key=''
|
||||
use_default_dotnet_install=false
|
||||
|
||||
properties=''
|
||||
while [[ $# > 0 ]]; do
|
||||
opt="$(echo "${1/#--/-}" | awk '{print tolower($0)}')"
|
||||
opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")"
|
||||
case "$opt" in
|
||||
-help|-h)
|
||||
usage
|
||||
|
@ -122,6 +123,12 @@ while [[ $# > 0 ]]; do
|
|||
-pack)
|
||||
pack=true
|
||||
;;
|
||||
-sourcebuild|-sb)
|
||||
build=true
|
||||
source_build=true
|
||||
restore=true
|
||||
pack=true
|
||||
;;
|
||||
-test|-t)
|
||||
test=true
|
||||
;;
|
||||
|
@ -159,14 +166,10 @@ while [[ $# > 0 ]]; do
|
|||
runtime_source_feed=$2
|
||||
shift
|
||||
;;
|
||||
-runtimesourcefeedkey)
|
||||
-runtimesourcefeedkey)
|
||||
runtime_source_feed_key=$2
|
||||
shift
|
||||
;;
|
||||
-usedefaultdotnetinstall)
|
||||
use_default_dotnet_install=$2
|
||||
shift
|
||||
;;
|
||||
*)
|
||||
properties="$properties $1"
|
||||
;;
|
||||
|
@ -175,6 +178,10 @@ while [[ $# > 0 ]]; do
|
|||
shift
|
||||
done
|
||||
|
||||
if [[ -z "$configuration" ]]; then
|
||||
if [[ "$source_build" = true ]]; then configuration="Release"; else configuration="Debug"; fi
|
||||
fi
|
||||
|
||||
if [[ "$ci" == true ]]; then
|
||||
pipelines_log=true
|
||||
node_reuse=false
|
||||
|
@ -212,6 +219,7 @@ function Build {
|
|||
/p:RepoRoot="$repo_root" \
|
||||
/p:Restore=$restore \
|
||||
/p:Build=$build \
|
||||
/p:ArcadeBuildFromSource=$source_build \
|
||||
/p:Rebuild=$rebuild \
|
||||
/p:Test=$test \
|
||||
/p:Pack=$pack \
|
||||
|
|
|
@ -1,11 +0,0 @@
|
|||
deb http://ports.ubuntu.com/ubuntu-ports/ trusty main restricted universe
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty main restricted universe
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ trusty-updates main restricted universe
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-updates main restricted universe
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ trusty-backports main restricted
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-backports main restricted
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ trusty-security main restricted universe multiverse
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-security main restricted universe multiverse
|
|
@ -0,0 +1,35 @@
|
|||
#!/usr/bin/env bash
|
||||
set -e
|
||||
|
||||
__ARM_HARDFP_CrossDir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
|
||||
__TIZEN_CROSSDIR="$__ARM_HARDFP_CrossDir/tizen"
|
||||
|
||||
if [[ -z "$ROOTFS_DIR" ]]; then
|
||||
echo "ROOTFS_DIR is not defined."
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
TIZEN_TMP_DIR=$ROOTFS_DIR/tizen_tmp
|
||||
mkdir -p $TIZEN_TMP_DIR
|
||||
|
||||
# Download files
|
||||
echo ">>Start downloading files"
|
||||
VERBOSE=1 $__ARM_HARDFP_CrossDir/tizen-fetch.sh $TIZEN_TMP_DIR
|
||||
echo "<<Finish downloading files"
|
||||
|
||||
echo ">>Start constructing Tizen rootfs"
|
||||
TIZEN_RPM_FILES=`ls $TIZEN_TMP_DIR/*.rpm`
|
||||
cd $ROOTFS_DIR
|
||||
for f in $TIZEN_RPM_FILES; do
|
||||
rpm2cpio $f | cpio -idm --quiet
|
||||
done
|
||||
echo "<<Finish constructing Tizen rootfs"
|
||||
|
||||
# Cleanup tmp
|
||||
rm -rf $TIZEN_TMP_DIR
|
||||
|
||||
# Configure Tizen rootfs
|
||||
echo ">>Start configuring Tizen rootfs"
|
||||
ln -sfn asm-arm ./usr/include/asm
|
||||
patch -p1 < $__TIZEN_CROSSDIR/tizen.patch
|
||||
echo "<<Finish configuring Tizen rootfs"
|
|
@ -0,0 +1,170 @@
|
|||
#!/usr/bin/env bash
|
||||
set -e
|
||||
|
||||
if [[ -z "${VERBOSE// }" ]] || [ "$VERBOSE" -ne "$VERBOSE" ] 2>/dev/null; then
|
||||
VERBOSE=0
|
||||
fi
|
||||
|
||||
Log()
|
||||
{
|
||||
if [ $VERBOSE -ge $1 ]; then
|
||||
echo ${@:2}
|
||||
fi
|
||||
}
|
||||
|
||||
Inform()
|
||||
{
|
||||
Log 1 -e "\x1B[0;34m$@\x1B[m"
|
||||
}
|
||||
|
||||
Debug()
|
||||
{
|
||||
Log 2 -e "\x1B[0;32m$@\x1B[m"
|
||||
}
|
||||
|
||||
Error()
|
||||
{
|
||||
>&2 Log 0 -e "\x1B[0;31m$@\x1B[m"
|
||||
}
|
||||
|
||||
Fetch()
|
||||
{
|
||||
URL=$1
|
||||
FILE=$2
|
||||
PROGRESS=$3
|
||||
if [ $VERBOSE -ge 1 ] && [ $PROGRESS ]; then
|
||||
CURL_OPT="--progress-bar"
|
||||
else
|
||||
CURL_OPT="--silent"
|
||||
fi
|
||||
curl $CURL_OPT $URL > $FILE
|
||||
}
|
||||
|
||||
hash curl 2> /dev/null || { Error "Require 'curl' Aborting."; exit 1; }
|
||||
hash xmllint 2> /dev/null || { Error "Require 'xmllint' Aborting."; exit 1; }
|
||||
hash sha256sum 2> /dev/null || { Error "Require 'sha256sum' Aborting."; exit 1; }
|
||||
|
||||
TMPDIR=$1
|
||||
if [ ! -d $TMPDIR ]; then
|
||||
TMPDIR=./tizen_tmp
|
||||
Debug "Create temporary directory : $TMPDIR"
|
||||
mkdir -p $TMPDIR
|
||||
fi
|
||||
|
||||
TIZEN_URL=http://download.tizen.org/snapshots/tizen
|
||||
BUILD_XML=build.xml
|
||||
REPOMD_XML=repomd.xml
|
||||
PRIMARY_XML=primary.xml
|
||||
TARGET_URL="http://__not_initialized"
|
||||
|
||||
Xpath_get()
|
||||
{
|
||||
XPATH_RESULT=''
|
||||
XPATH=$1
|
||||
XML_FILE=$2
|
||||
RESULT=$(xmllint --xpath $XPATH $XML_FILE)
|
||||
if [[ -z ${RESULT// } ]]; then
|
||||
Error "Can not find target from $XML_FILE"
|
||||
Debug "Xpath = $XPATH"
|
||||
exit 1
|
||||
fi
|
||||
XPATH_RESULT=$RESULT
|
||||
}
|
||||
|
||||
fetch_tizen_pkgs_init()
|
||||
{
|
||||
TARGET=$1
|
||||
PROFILE=$2
|
||||
Debug "Initialize TARGET=$TARGET, PROFILE=$PROFILE"
|
||||
|
||||
TMP_PKG_DIR=$TMPDIR/tizen_${PROFILE}_pkgs
|
||||
if [ -d $TMP_PKG_DIR ]; then rm -rf $TMP_PKG_DIR; fi
|
||||
mkdir -p $TMP_PKG_DIR
|
||||
|
||||
PKG_URL=$TIZEN_URL/$PROFILE/latest
|
||||
|
||||
BUILD_XML_URL=$PKG_URL/$BUILD_XML
|
||||
TMP_BUILD=$TMP_PKG_DIR/$BUILD_XML
|
||||
TMP_REPOMD=$TMP_PKG_DIR/$REPOMD_XML
|
||||
TMP_PRIMARY=$TMP_PKG_DIR/$PRIMARY_XML
|
||||
TMP_PRIMARYGZ=${TMP_PRIMARY}.gz
|
||||
|
||||
Fetch $BUILD_XML_URL $TMP_BUILD
|
||||
|
||||
Debug "fetch $BUILD_XML_URL to $TMP_BUILD"
|
||||
|
||||
TARGET_XPATH="//build/buildtargets/buildtarget[@name=\"$TARGET\"]/repo[@type=\"binary\"]/text()"
|
||||
Xpath_get $TARGET_XPATH $TMP_BUILD
|
||||
TARGET_PATH=$XPATH_RESULT
|
||||
TARGET_URL=$PKG_URL/$TARGET_PATH
|
||||
|
||||
REPOMD_URL=$TARGET_URL/repodata/repomd.xml
|
||||
PRIMARY_XPATH='string(//*[local-name()="data"][@type="primary"]/*[local-name()="location"]/@href)'
|
||||
|
||||
Fetch $REPOMD_URL $TMP_REPOMD
|
||||
|
||||
Debug "fetch $REPOMD_URL to $TMP_REPOMD"
|
||||
|
||||
Xpath_get $PRIMARY_XPATH $TMP_REPOMD
|
||||
PRIMARY_XML_PATH=$XPATH_RESULT
|
||||
PRIMARY_URL=$TARGET_URL/$PRIMARY_XML_PATH
|
||||
|
||||
Fetch $PRIMARY_URL $TMP_PRIMARYGZ
|
||||
|
||||
Debug "fetch $PRIMARY_URL to $TMP_PRIMARYGZ"
|
||||
|
||||
gunzip $TMP_PRIMARYGZ
|
||||
|
||||
Debug "unzip $TMP_PRIMARYGZ to $TMP_PRIMARY"
|
||||
}
|
||||
|
||||
fetch_tizen_pkgs()
|
||||
{
|
||||
ARCH=$1
|
||||
PACKAGE_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="location"]/@href)'
|
||||
|
||||
PACKAGE_CHECKSUM_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="checksum"]/text())'
|
||||
|
||||
for pkg in ${@:2}
|
||||
do
|
||||
Inform "Fetching... $pkg"
|
||||
XPATH=${PACKAGE_XPATH_TPL/_PKG_/$pkg}
|
||||
XPATH=${XPATH/_ARCH_/$ARCH}
|
||||
Xpath_get $XPATH $TMP_PRIMARY
|
||||
PKG_PATH=$XPATH_RESULT
|
||||
|
||||
XPATH=${PACKAGE_CHECKSUM_XPATH_TPL/_PKG_/$pkg}
|
||||
XPATH=${XPATH/_ARCH_/$ARCH}
|
||||
Xpath_get $XPATH $TMP_PRIMARY
|
||||
CHECKSUM=$XPATH_RESULT
|
||||
|
||||
PKG_URL=$TARGET_URL/$PKG_PATH
|
||||
PKG_FILE=$(basename $PKG_PATH)
|
||||
PKG_PATH=$TMPDIR/$PKG_FILE
|
||||
|
||||
Debug "Download $PKG_URL to $PKG_PATH"
|
||||
Fetch $PKG_URL $PKG_PATH true
|
||||
|
||||
echo "$CHECKSUM $PKG_PATH" | sha256sum -c - > /dev/null
|
||||
if [ $? -ne 0 ]; then
|
||||
Error "Fail to fetch $PKG_URL to $PKG_PATH"
|
||||
Debug "Checksum = $CHECKSUM"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
Inform "Initialize arm base"
|
||||
fetch_tizen_pkgs_init standard base
|
||||
Inform "fetch common packages"
|
||||
fetch_tizen_pkgs armv7hl gcc gcc-devel-static glibc glibc-devel libicu libicu-devel libatomic linux-glibc-devel keyutils keyutils-devel libkeyutils
|
||||
Inform "fetch coreclr packages"
|
||||
fetch_tizen_pkgs armv7hl lldb lldb-devel libgcc libstdc++ libstdc++-devel libunwind libunwind-devel lttng-ust-devel lttng-ust userspace-rcu-devel userspace-rcu
|
||||
Inform "fetch corefx packages"
|
||||
fetch_tizen_pkgs armv7hl libcom_err libcom_err-devel zlib zlib-devel libopenssl11 libopenssl1.1-devel krb5 krb5-devel
|
||||
|
||||
Inform "Initialize standard unified"
|
||||
fetch_tizen_pkgs_init standard unified
|
||||
Inform "fetch corefx packages"
|
||||
fetch_tizen_pkgs armv7hl gssdp gssdp-devel tizen-release
|
||||
|
|
@ -0,0 +1,9 @@
|
|||
diff -u -r a/usr/lib/libc.so b/usr/lib/libc.so
|
||||
--- a/usr/lib/libc.so 2016-12-30 23:00:08.284951863 +0900
|
||||
+++ b/usr/lib/libc.so 2016-12-30 23:00:32.140951815 +0900
|
||||
@@ -2,4 +2,4 @@
|
||||
Use the shared library, but some functions are only in
|
||||
the static library, so try that secondarily. */
|
||||
OUTPUT_FORMAT(elf32-littlearm)
|
||||
-GROUP ( /lib/libc.so.6 /usr/lib/libc_nonshared.a AS_NEEDED ( /lib/ld-linux-armhf.so.3 ) )
|
||||
+GROUP ( libc.so.6 libc_nonshared.a AS_NEEDED ( ld-linux-armhf.so.3 ) )
|
|
@ -1,71 +0,0 @@
|
|||
From e72c9d7ead60e3317bd6d1fade995c07021c947b Mon Sep 17 00:00:00 2001
|
||||
From: Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
|
||||
Date: Thu, 7 May 2015 13:25:04 -0400
|
||||
Subject: [PATCH] Fix: building probe providers with C++ compiler
|
||||
|
||||
Robert Daniels wrote:
|
||||
> > I'm attempting to use lttng userspace tracing with a C++ application
|
||||
> > on an ARM platform. I'm using GCC 4.8.4 on Linux 3.14 with the 2.6
|
||||
> > release of lttng. I've compiled lttng-modules, lttng-ust, and
|
||||
> > lttng-tools and have been able to get a simple test working with C
|
||||
> > code. When I attempt to run the hello.cxx test on my target it will
|
||||
> > segfault.
|
||||
>
|
||||
>
|
||||
> I spent a little time digging into this issue and finally discovered the
|
||||
> cause of my segfault with ARM C++ tracepoints.
|
||||
>
|
||||
> There is a struct called 'lttng_event' in ust-events.h which contains an
|
||||
> empty union 'u'. This was the cause of my issue. Under C, this empty union
|
||||
> compiles to a zero byte member while under C++ it compiles to a one byte
|
||||
> member, and in my case was four-byte aligned which caused my C++ code to
|
||||
> have the 'cds_list_head node' offset incorrectly by four bytes. This lead
|
||||
> to an incorrect linked list structure which caused my issue.
|
||||
>
|
||||
> Since this union is empty, I simply removed it from the struct and everything
|
||||
> worked correctly.
|
||||
>
|
||||
> I don't know the history or purpose behind this empty union so I'd like to
|
||||
> know if this is a safe fix. If it is I can submit a patch with the union
|
||||
> removed.
|
||||
|
||||
That's a very nice catch!
|
||||
|
||||
We do not support building tracepoint probe provider with
|
||||
g++ yet, as stated in lttng-ust(3):
|
||||
|
||||
"- Note for C++ support: although an application instrumented with
|
||||
tracepoints can be compiled with g++, tracepoint probes should be
|
||||
compiled with gcc (only tested with gcc so far)."
|
||||
|
||||
However, if it works fine with this fix, then I'm tempted to take it,
|
||||
especially because removing the empty union does not appear to affect
|
||||
the layout of struct lttng_event as seen from liblttng-ust, which must
|
||||
be compiled with a C compiler, and from probe providers compiled with
|
||||
a C compiler. So all we are changing is the layout of a probe provider
|
||||
compiled with a C++ compiler, which is anyway buggy at the moment,
|
||||
because it is not compatible with the layout expected by liblttng-ust
|
||||
compiled with a C compiler.
|
||||
|
||||
Reported-by: Robert Daniels <robert.daniels@vantagecontrols.com>
|
||||
Signed-off-by: Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
|
||||
---
|
||||
include/lttng/ust-events.h | 2 --
|
||||
1 file changed, 2 deletions(-)
|
||||
|
||||
diff --git a/usr/include/lttng/ust-events.h b/usr/include/lttng/ust-events.h
|
||||
index 328a875..3d7a274 100644
|
||||
--- a/usr/include/lttng/ust-events.h
|
||||
+++ b/usr/include/lttng/ust-events.h
|
||||
@@ -407,8 +407,6 @@ struct lttng_event {
|
||||
void *_deprecated1;
|
||||
struct lttng_ctx *ctx;
|
||||
enum lttng_ust_instrumentation instrumentation;
|
||||
- union {
|
||||
- } u;
|
||||
struct cds_list_head node; /* Event list in session */
|
||||
struct cds_list_head _deprecated2;
|
||||
void *_deprecated3;
|
||||
--
|
||||
2.7.4
|
||||
|
|
@ -1,97 +0,0 @@
|
|||
diff -u -r a/usr/include/urcu/uatomic/generic.h b/usr/include/urcu/uatomic/generic.h
|
||||
--- a/usr/include/urcu/uatomic/generic.h 2014-03-28 06:04:42.000000000 +0900
|
||||
+++ b/usr/include/urcu/uatomic/generic.h 2017-02-13 10:35:21.189927116 +0900
|
||||
@@ -65,17 +65,17 @@
|
||||
switch (len) {
|
||||
#ifdef UATOMIC_HAS_ATOMIC_BYTE
|
||||
case 1:
|
||||
- return __sync_val_compare_and_swap_1(addr, old, _new);
|
||||
+ return __sync_val_compare_and_swap_1((uint8_t *) addr, old, _new);
|
||||
#endif
|
||||
#ifdef UATOMIC_HAS_ATOMIC_SHORT
|
||||
case 2:
|
||||
- return __sync_val_compare_and_swap_2(addr, old, _new);
|
||||
+ return __sync_val_compare_and_swap_2((uint16_t *) addr, old, _new);
|
||||
#endif
|
||||
case 4:
|
||||
- return __sync_val_compare_and_swap_4(addr, old, _new);
|
||||
+ return __sync_val_compare_and_swap_4((uint32_t *) addr, old, _new);
|
||||
#if (CAA_BITS_PER_LONG == 64)
|
||||
case 8:
|
||||
- return __sync_val_compare_and_swap_8(addr, old, _new);
|
||||
+ return __sync_val_compare_and_swap_8((uint64_t *) addr, old, _new);
|
||||
#endif
|
||||
}
|
||||
_uatomic_link_error();
|
||||
@@ -100,20 +100,20 @@
|
||||
switch (len) {
|
||||
#ifdef UATOMIC_HAS_ATOMIC_BYTE
|
||||
case 1:
|
||||
- __sync_and_and_fetch_1(addr, val);
|
||||
+ __sync_and_and_fetch_1((uint8_t *) addr, val);
|
||||
return;
|
||||
#endif
|
||||
#ifdef UATOMIC_HAS_ATOMIC_SHORT
|
||||
case 2:
|
||||
- __sync_and_and_fetch_2(addr, val);
|
||||
+ __sync_and_and_fetch_2((uint16_t *) addr, val);
|
||||
return;
|
||||
#endif
|
||||
case 4:
|
||||
- __sync_and_and_fetch_4(addr, val);
|
||||
+ __sync_and_and_fetch_4((uint32_t *) addr, val);
|
||||
return;
|
||||
#if (CAA_BITS_PER_LONG == 64)
|
||||
case 8:
|
||||
- __sync_and_and_fetch_8(addr, val);
|
||||
+ __sync_and_and_fetch_8((uint64_t *) addr, val);
|
||||
return;
|
||||
#endif
|
||||
}
|
||||
@@ -139,20 +139,20 @@
|
||||
switch (len) {
|
||||
#ifdef UATOMIC_HAS_ATOMIC_BYTE
|
||||
case 1:
|
||||
- __sync_or_and_fetch_1(addr, val);
|
||||
+ __sync_or_and_fetch_1((uint8_t *) addr, val);
|
||||
return;
|
||||
#endif
|
||||
#ifdef UATOMIC_HAS_ATOMIC_SHORT
|
||||
case 2:
|
||||
- __sync_or_and_fetch_2(addr, val);
|
||||
+ __sync_or_and_fetch_2((uint16_t *) addr, val);
|
||||
return;
|
||||
#endif
|
||||
case 4:
|
||||
- __sync_or_and_fetch_4(addr, val);
|
||||
+ __sync_or_and_fetch_4((uint32_t *) addr, val);
|
||||
return;
|
||||
#if (CAA_BITS_PER_LONG == 64)
|
||||
case 8:
|
||||
- __sync_or_and_fetch_8(addr, val);
|
||||
+ __sync_or_and_fetch_8((uint64_t *) addr, val);
|
||||
return;
|
||||
#endif
|
||||
}
|
||||
@@ -180,17 +180,17 @@
|
||||
switch (len) {
|
||||
#ifdef UATOMIC_HAS_ATOMIC_BYTE
|
||||
case 1:
|
||||
- return __sync_add_and_fetch_1(addr, val);
|
||||
+ return __sync_add_and_fetch_1((uint8_t *) addr, val);
|
||||
#endif
|
||||
#ifdef UATOMIC_HAS_ATOMIC_SHORT
|
||||
case 2:
|
||||
- return __sync_add_and_fetch_2(addr, val);
|
||||
+ return __sync_add_and_fetch_2((uint16_t *) addr, val);
|
||||
#endif
|
||||
case 4:
|
||||
- return __sync_add_and_fetch_4(addr, val);
|
||||
+ return __sync_add_and_fetch_4((uint32_t *) addr, val);
|
||||
#if (CAA_BITS_PER_LONG == 64)
|
||||
case 8:
|
||||
- return __sync_add_and_fetch_8(addr, val);
|
||||
+ return __sync_add_and_fetch_8((uint64_t *) addr, val);
|
||||
#endif
|
||||
}
|
||||
_uatomic_link_error();
|
|
@ -1,11 +0,0 @@
|
|||
deb http://ports.ubuntu.com/ubuntu-ports/ trusty main restricted universe
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty main restricted universe
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ trusty-updates main restricted universe
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-updates main restricted universe
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ trusty-backports main restricted
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-backports main restricted
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ trusty-security main restricted universe multiverse
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-security main restricted universe multiverse
|
|
@ -157,7 +157,7 @@ fetch_tizen_pkgs()
|
|||
Inform "Initialize arm base"
|
||||
fetch_tizen_pkgs_init standard base
|
||||
Inform "fetch common packages"
|
||||
fetch_tizen_pkgs aarch64 gcc glibc glibc-devel libicu libicu-devel libatomic linux-glibc-devel
|
||||
fetch_tizen_pkgs aarch64 gcc glibc glibc-devel libicu libicu-devel libatomic linux-glibc-devel keyutils keyutils-devel libkeyutils
|
||||
Inform "fetch coreclr packages"
|
||||
fetch_tizen_pkgs aarch64 lldb lldb-devel libgcc libstdc++ libstdc++-devel libunwind libunwind-devel lttng-ust-devel lttng-ust userspace-rcu-devel userspace-rcu
|
||||
Inform "fetch corefx packages"
|
||||
|
|
|
@ -0,0 +1,43 @@
|
|||
diff -u -r a/usr/include/urcu/uatomic/generic.h b/usr/include/urcu/uatomic/generic.h
|
||||
--- a/usr/include/urcu/uatomic/generic.h 2014-10-22 15:00:58.000000000 -0700
|
||||
+++ b/usr/include/urcu/uatomic/generic.h 2020-10-30 21:38:28.550000000 -0700
|
||||
@@ -69,10 +69,10 @@
|
||||
#endif
|
||||
#ifdef UATOMIC_HAS_ATOMIC_SHORT
|
||||
case 2:
|
||||
- return __sync_val_compare_and_swap_2(addr, old, _new);
|
||||
+ return __sync_val_compare_and_swap_2((uint16_t*) addr, old, _new);
|
||||
#endif
|
||||
case 4:
|
||||
- return __sync_val_compare_and_swap_4(addr, old, _new);
|
||||
+ return __sync_val_compare_and_swap_4((uint32_t*) addr, old, _new);
|
||||
#if (CAA_BITS_PER_LONG == 64)
|
||||
case 8:
|
||||
return __sync_val_compare_and_swap_8(addr, old, _new);
|
||||
@@ -109,7 +109,7 @@
|
||||
return;
|
||||
#endif
|
||||
case 4:
|
||||
- __sync_and_and_fetch_4(addr, val);
|
||||
+ __sync_and_and_fetch_4((uint32_t*) addr, val);
|
||||
return;
|
||||
#if (CAA_BITS_PER_LONG == 64)
|
||||
case 8:
|
||||
@@ -148,7 +148,7 @@
|
||||
return;
|
||||
#endif
|
||||
case 4:
|
||||
- __sync_or_and_fetch_4(addr, val);
|
||||
+ __sync_or_and_fetch_4((uint32_t*) addr, val);
|
||||
return;
|
||||
#if (CAA_BITS_PER_LONG == 64)
|
||||
case 8:
|
||||
@@ -187,7 +187,7 @@
|
||||
return __sync_add_and_fetch_2(addr, val);
|
||||
#endif
|
||||
case 4:
|
||||
- return __sync_add_and_fetch_4(addr, val);
|
||||
+ return __sync_add_and_fetch_4((uint32_t*) addr, val);
|
||||
#if (CAA_BITS_PER_LONG == 64)
|
||||
case 8:
|
||||
return __sync_add_and_fetch_8(addr, val);
|
|
@ -157,7 +157,7 @@ fetch_tizen_pkgs()
|
|||
Inform "Initialize arm base"
|
||||
fetch_tizen_pkgs_init standard base
|
||||
Inform "fetch common packages"
|
||||
fetch_tizen_pkgs armv7l gcc glibc glibc-devel libicu libicu-devel libatomic linux-glibc-devel
|
||||
fetch_tizen_pkgs armv7l gcc gcc-devel-static glibc glibc-devel libicu libicu-devel libatomic linux-glibc-devel keyutils keyutils-devel libkeyutils
|
||||
Inform "fetch coreclr packages"
|
||||
fetch_tizen_pkgs armv7l lldb lldb-devel libgcc libstdc++ libstdc++-devel libunwind libunwind-devel lttng-ust-devel lttng-ust userspace-rcu-devel userspace-rcu
|
||||
Inform "fetch corefx packages"
|
||||
|
|
|
@ -0,0 +1,2 @@
|
|||
deb http://raspbian.raspberrypi.org/raspbian/ buster main contrib non-free rpi
|
||||
deb-src http://raspbian.raspberrypi.org/raspbian/ buster main contrib non-free rpi
|
|
@ -27,7 +27,7 @@ __AndroidToolchain=aarch64-linux-android
|
|||
|
||||
for i in "$@"
|
||||
do
|
||||
lowerI="$(echo $i | awk '{print tolower($0)}')"
|
||||
lowerI="$(echo $i | tr "[:upper:]" "[:lower:]")"
|
||||
case $lowerI in
|
||||
-?|-h|--help)
|
||||
usage
|
||||
|
|
|
@ -4,22 +4,27 @@ set -e
|
|||
|
||||
usage()
|
||||
{
|
||||
echo "Usage: $0 [BuildArch] [CodeName] [lldbx.y] [--skipunmount] --rootfsdir <directory>]"
|
||||
echo "BuildArch can be: arm(default), armel, arm64, x86"
|
||||
echo "CodeName - optional, Code name for Linux, can be: trusty, xenial(default), zesty, bionic, alpine. If BuildArch is armel, LinuxCodeName is jessie(default) or tizen."
|
||||
echo " for FreeBSD can be: freebsd11 or freebsd12."
|
||||
echo " for illumos can be: illumos."
|
||||
echo "lldbx.y - optional, LLDB version, can be: lldb3.9(default), lldb4.0, lldb5.0, lldb6.0 no-lldb. Ignored for alpine and FReeBSD"
|
||||
echo "Usage: $0 [BuildArch] [CodeName] [lldbx.y] [llvmx[.y]] [--skipunmount] --rootfsdir <directory>]"
|
||||
echo "BuildArch can be: arm(default), arm64, armel, armv6, ppc64le, riscv64, s390x, x64, x86"
|
||||
echo "CodeName - optional, Code name for Linux, can be: xenial(default), zesty, bionic, alpine, alpine3.13 or alpine3.14. If BuildArch is armel, LinuxCodeName is jessie(default) or tizen."
|
||||
echo " for FreeBSD can be: freebsd12, freebsd13"
|
||||
echo " for illumos can be: illumos"
|
||||
echo " for Haiku can be: haiku."
|
||||
echo "lldbx.y - optional, LLDB version, can be: lldb3.9(default), lldb4.0, lldb5.0, lldb6.0 no-lldb. Ignored for alpine and FreeBSD"
|
||||
echo "llvmx[.y] - optional, LLVM version for LLVM related packages."
|
||||
echo "--skipunmount - optional, will skip the unmount of rootfs folder."
|
||||
echo "--use-mirror - optional, use mirror URL to fetch resources, when available."
|
||||
echo "--jobs N - optional, restrict to N jobs."
|
||||
exit 1
|
||||
}
|
||||
|
||||
__CodeName=xenial
|
||||
__CrossDir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
|
||||
__InitialDir=$PWD
|
||||
__BuildArch=arm
|
||||
__AlpineArch=armv7
|
||||
__FreeBSDArch=arm
|
||||
__FreeBSDMachineArch=armv7
|
||||
__IllumosArch=arm7
|
||||
__QEMUArch=arm
|
||||
__UbuntuArch=armhf
|
||||
__UbuntuRepo="http://ports.ubuntu.com/"
|
||||
|
@ -32,15 +37,14 @@ __UbuntuPackages="build-essential"
|
|||
__AlpinePackages="alpine-base"
|
||||
__AlpinePackages+=" build-base"
|
||||
__AlpinePackages+=" linux-headers"
|
||||
__AlpinePackagesEdgeCommunity=" lldb-dev"
|
||||
__AlpinePackagesEdgeMain=" llvm10-libs"
|
||||
__AlpinePackagesEdgeMain+=" python3"
|
||||
__AlpinePackagesEdgeMain+=" libedit"
|
||||
__AlpinePackages+=" lldb-dev"
|
||||
__AlpinePackages+=" python3"
|
||||
__AlpinePackages+=" libedit"
|
||||
|
||||
# symlinks fixer
|
||||
__UbuntuPackages+=" symlinks"
|
||||
|
||||
# CoreCLR and CoreFX dependencies
|
||||
# runtime dependencies
|
||||
__UbuntuPackages+=" libicu-dev"
|
||||
__UbuntuPackages+=" liblttng-ust-dev"
|
||||
__UbuntuPackages+=" libunwind8-dev"
|
||||
|
@ -49,8 +53,9 @@ __AlpinePackages+=" gettext-dev"
|
|||
__AlpinePackages+=" icu-dev"
|
||||
__AlpinePackages+=" libunwind-dev"
|
||||
__AlpinePackages+=" lttng-ust-dev"
|
||||
__AlpinePackages+=" compiler-rt-static"
|
||||
|
||||
# CoreFX dependencies
|
||||
# runtime libraries' dependencies
|
||||
__UbuntuPackages+=" libcurl4-openssl-dev"
|
||||
__UbuntuPackages+=" libkrb5-dev"
|
||||
__UbuntuPackages+=" libssl-dev"
|
||||
|
@ -61,30 +66,48 @@ __AlpinePackages+=" krb5-dev"
|
|||
__AlpinePackages+=" openssl-dev"
|
||||
__AlpinePackages+=" zlib-dev"
|
||||
|
||||
__FreeBSDBase="12.1-RELEASE"
|
||||
__FreeBSDPkg="1.12.0"
|
||||
__FreeBSDBase="12.3-RELEASE"
|
||||
__FreeBSDPkg="1.17.0"
|
||||
__FreeBSDABI="12"
|
||||
__FreeBSDPackages="libunwind"
|
||||
__FreeBSDPackages+=" icu"
|
||||
__FreeBSDPackages+=" libinotify"
|
||||
__FreeBSDPackages+=" lttng-ust"
|
||||
__FreeBSDPackages+=" openssl"
|
||||
__FreeBSDPackages+=" krb5"
|
||||
__FreeBSDPackages+=" terminfo-db"
|
||||
|
||||
__IllumosPackages="icu-64.2nb2"
|
||||
__IllumosPackages+=" mit-krb5-1.16.2nb4"
|
||||
__IllumosPackages+=" openssl-1.1.1e"
|
||||
__IllumosPackages+=" zlib-1.2.11"
|
||||
|
||||
__HaikuPackages="gmp"
|
||||
__HaikuPackages+=" gmp_devel"
|
||||
__HaikuPackages+=" krb5"
|
||||
__HaikuPackages+=" krb5_devel"
|
||||
__HaikuPackages+=" libiconv"
|
||||
__HaikuPackages+=" libiconv_devel"
|
||||
__HaikuPackages+=" llvm12_libunwind"
|
||||
__HaikuPackages+=" llvm12_libunwind_devel"
|
||||
__HaikuPackages+=" mpfr"
|
||||
__HaikuPackages+=" mpfr_devel"
|
||||
|
||||
# ML.NET dependencies
|
||||
__UbuntuPackages+=" libomp5"
|
||||
__UbuntuPackages+=" libomp-dev"
|
||||
|
||||
__Keyring=
|
||||
__UseMirror=0
|
||||
|
||||
__UnprocessedBuildArgs=
|
||||
while :; do
|
||||
if [ $# -le 0 ]; then
|
||||
if [[ "$#" -le 0 ]]; then
|
||||
break
|
||||
fi
|
||||
|
||||
lowerI="$(echo $1 | awk '{print tolower($0)}')"
|
||||
lowerI="$(echo "$1" | tr "[:upper:]" "[:lower:]")"
|
||||
case $lowerI in
|
||||
-?|-h|--help)
|
||||
-\?|-h|--help)
|
||||
usage
|
||||
exit 1
|
||||
;;
|
||||
|
@ -99,6 +122,8 @@ while :; do
|
|||
__UbuntuArch=arm64
|
||||
__AlpineArch=aarch64
|
||||
__QEMUArch=aarch64
|
||||
__FreeBSDArch=arm64
|
||||
__FreeBSDMachineArch=aarch64
|
||||
;;
|
||||
armel)
|
||||
__BuildArch=armel
|
||||
|
@ -106,6 +131,56 @@ while :; do
|
|||
__UbuntuRepo="http://ftp.debian.org/debian/"
|
||||
__CodeName=jessie
|
||||
;;
|
||||
armv6)
|
||||
__BuildArch=armv6
|
||||
__UbuntuArch=armhf
|
||||
__QEMUArch=arm
|
||||
__UbuntuRepo="http://raspbian.raspberrypi.org/raspbian/"
|
||||
__CodeName=buster
|
||||
__LLDB_Package="liblldb-6.0-dev"
|
||||
|
||||
if [[ -e "/usr/share/keyrings/raspbian-archive-keyring.gpg" ]]; then
|
||||
__Keyring="--keyring /usr/share/keyrings/raspbian-archive-keyring.gpg"
|
||||
fi
|
||||
;;
|
||||
ppc64le)
|
||||
__BuildArch=ppc64le
|
||||
__UbuntuArch=ppc64el
|
||||
__UbuntuRepo="http://ports.ubuntu.com/ubuntu-ports/"
|
||||
__UbuntuPackages=$(echo ${__UbuntuPackages} | sed 's/ libunwind8-dev//')
|
||||
__UbuntuPackages=$(echo ${__UbuntuPackages} | sed 's/ libomp-dev//')
|
||||
__UbuntuPackages=$(echo ${__UbuntuPackages} | sed 's/ libomp5//')
|
||||
unset __LLDB_Package
|
||||
;;
|
||||
riscv64)
|
||||
__BuildArch=riscv64
|
||||
__UbuntuArch=riscv64
|
||||
__UbuntuRepo="http://deb.debian.org/debian-ports"
|
||||
__CodeName=sid
|
||||
__UbuntuPackages=$(echo ${__UbuntuPackages} | sed 's/ libunwind8-dev//')
|
||||
unset __LLDB_Package
|
||||
|
||||
if [[ -e "/usr/share/keyrings/debian-ports-archive-keyring.gpg" ]]; then
|
||||
__Keyring="--keyring /usr/share/keyrings/debian-ports-archive-keyring.gpg --include=debian-ports-archive-keyring"
|
||||
fi
|
||||
;;
|
||||
s390x)
|
||||
__BuildArch=s390x
|
||||
__UbuntuArch=s390x
|
||||
__UbuntuRepo="http://ports.ubuntu.com/ubuntu-ports/"
|
||||
__UbuntuPackages=$(echo ${__UbuntuPackages} | sed 's/ libunwind8-dev//')
|
||||
__UbuntuPackages=$(echo ${__UbuntuPackages} | sed 's/ libomp-dev//')
|
||||
__UbuntuPackages=$(echo ${__UbuntuPackages} | sed 's/ libomp5//')
|
||||
unset __LLDB_Package
|
||||
;;
|
||||
x64)
|
||||
__BuildArch=x64
|
||||
__UbuntuArch=amd64
|
||||
__FreeBSDArch=amd64
|
||||
__FreeBSDMachineArch=amd64
|
||||
__illumosArch=x86_64
|
||||
__UbuntuRepo=
|
||||
;;
|
||||
x86)
|
||||
__BuildArch=x86
|
||||
__UbuntuArch=i386
|
||||
|
@ -132,23 +207,27 @@ while :; do
|
|||
no-lldb)
|
||||
unset __LLDB_Package
|
||||
;;
|
||||
trusty) # Ubuntu 14.04
|
||||
if [ "$__CodeName" != "jessie" ]; then
|
||||
__CodeName=trusty
|
||||
llvm*)
|
||||
version="$(echo "$lowerI" | tr -d '[:alpha:]-=')"
|
||||
parts=(${version//./ })
|
||||
__LLVM_MajorVersion="${parts[0]}"
|
||||
__LLVM_MinorVersion="${parts[1]}"
|
||||
if [[ -z "$__LLVM_MinorVersion" && "$__LLVM_MajorVersion" -le 6 ]]; then
|
||||
__LLVM_MinorVersion=0;
|
||||
fi
|
||||
;;
|
||||
xenial) # Ubuntu 16.04
|
||||
if [ "$__CodeName" != "jessie" ]; then
|
||||
if [[ "$__CodeName" != "jessie" ]]; then
|
||||
__CodeName=xenial
|
||||
fi
|
||||
;;
|
||||
zesty) # Ubuntu 17.04
|
||||
if [ "$__CodeName" != "jessie" ]; then
|
||||
if [[ "$__CodeName" != "jessie" ]]; then
|
||||
__CodeName=zesty
|
||||
fi
|
||||
;;
|
||||
bionic) # Ubuntu 18.04
|
||||
if [ "$__CodeName" != "jessie" ]; then
|
||||
if [[ "$__CodeName" != "jessie" ]]; then
|
||||
__CodeName=bionic
|
||||
fi
|
||||
;;
|
||||
|
@ -167,29 +246,38 @@ while :; do
|
|||
__LLDB_Package="liblldb-6.0-dev"
|
||||
;;
|
||||
tizen)
|
||||
if [ "$__BuildArch" != "armel" ] && [ "$__BuildArch" != "arm64" ]; then
|
||||
echo "Tizen is available only for armel and arm64."
|
||||
usage;
|
||||
exit 1;
|
||||
fi
|
||||
__CodeName=
|
||||
__UbuntuRepo=
|
||||
__Tizen=tizen
|
||||
;;
|
||||
alpine)
|
||||
alpine|alpine3.13)
|
||||
__CodeName=alpine
|
||||
__UbuntuRepo=
|
||||
__AlpineVersion=3.13
|
||||
__AlpinePackages+=" llvm10-libs"
|
||||
;;
|
||||
alpine3.14)
|
||||
__CodeName=alpine
|
||||
__UbuntuRepo=
|
||||
__AlpineVersion=3.14
|
||||
__AlpinePackages+=" llvm11-libs"
|
||||
;;
|
||||
freebsd11)
|
||||
__FreeBSDBase="11.3-RELEASE"
|
||||
;&
|
||||
freebsd12)
|
||||
__CodeName=freebsd
|
||||
__BuildArch=x64
|
||||
__SkipUnmount=1
|
||||
;;
|
||||
freebsd13)
|
||||
__CodeName=freebsd
|
||||
__FreeBSDBase="13.0-RELEASE"
|
||||
__FreeBSDABI="13"
|
||||
__SkipUnmount=1
|
||||
;;
|
||||
illumos)
|
||||
__CodeName=illumos
|
||||
__SkipUnmount=1
|
||||
;;
|
||||
haiku)
|
||||
__CodeName=haiku
|
||||
__BuildArch=x64
|
||||
__SkipUnmount=1
|
||||
;;
|
||||
|
@ -198,11 +286,15 @@ while :; do
|
|||
;;
|
||||
--rootfsdir|-rootfsdir)
|
||||
shift
|
||||
__RootfsDir=$1
|
||||
__RootfsDir="$1"
|
||||
;;
|
||||
--use-mirror)
|
||||
__UseMirror=1
|
||||
;;
|
||||
--use-jobs)
|
||||
shift
|
||||
MAXJOBS=$1
|
||||
;;
|
||||
*)
|
||||
__UnprocessedBuildArgs="$__UnprocessedBuildArgs $1"
|
||||
;;
|
||||
|
@ -211,82 +303,76 @@ while :; do
|
|||
shift
|
||||
done
|
||||
|
||||
if [ "$__BuildArch" == "armel" ]; then
|
||||
if [[ "$__BuildArch" == "armel" ]]; then
|
||||
__LLDB_Package="lldb-3.5-dev"
|
||||
fi
|
||||
|
||||
__UbuntuPackages+=" ${__LLDB_Package:-}"
|
||||
|
||||
if [ -z "$__RootfsDir" ] && [ ! -z "$ROOTFS_DIR" ]; then
|
||||
__RootfsDir=$ROOTFS_DIR
|
||||
if [[ -n "$__LLVM_MajorVersion" ]]; then
|
||||
__UbuntuPackages+=" libclang-common-${__LLVM_MajorVersion}${__LLVM_MinorVersion:+.$__LLVM_MinorVersion}-dev"
|
||||
fi
|
||||
|
||||
if [ -z "$__RootfsDir" ]; then
|
||||
if [[ -z "$__RootfsDir" && -n "$ROOTFS_DIR" ]]; then
|
||||
__RootfsDir="$ROOTFS_DIR"
|
||||
fi
|
||||
|
||||
if [[ -z "$__RootfsDir" ]]; then
|
||||
__RootfsDir="$__CrossDir/../../../.tools/rootfs/$__BuildArch"
|
||||
fi
|
||||
|
||||
if [ -d "$__RootfsDir" ]; then
|
||||
if [ $__SkipUnmount == 0 ]; then
|
||||
umount $__RootfsDir/* || true
|
||||
if [[ -d "$__RootfsDir" ]]; then
|
||||
if [[ "$__SkipUnmount" == "0" ]]; then
|
||||
umount "$__RootfsDir"/* || true
|
||||
fi
|
||||
rm -rf $__RootfsDir
|
||||
rm -rf "$__RootfsDir"
|
||||
fi
|
||||
|
||||
mkdir -p $__RootfsDir
|
||||
mkdir -p "$__RootfsDir"
|
||||
__RootfsDir="$( cd "$__RootfsDir" && pwd )"
|
||||
|
||||
if [[ "$__CodeName" == "alpine" ]]; then
|
||||
__ApkToolsVersion=2.9.1
|
||||
__AlpineVersion=3.9
|
||||
__ApkToolsDir=$(mktemp -d)
|
||||
wget https://github.com/alpinelinux/apk-tools/releases/download/v$__ApkToolsVersion/apk-tools-$__ApkToolsVersion-x86_64-linux.tar.gz -P $__ApkToolsDir
|
||||
tar -xf $__ApkToolsDir/apk-tools-$__ApkToolsVersion-x86_64-linux.tar.gz -C $__ApkToolsDir
|
||||
mkdir -p $__RootfsDir/usr/bin
|
||||
cp -v /usr/bin/qemu-$__QEMUArch-static $__RootfsDir/usr/bin
|
||||
__ApkToolsDir="$(mktemp -d)"
|
||||
wget "https://github.com/alpinelinux/apk-tools/releases/download/v$__ApkToolsVersion/apk-tools-$__ApkToolsVersion-x86_64-linux.tar.gz" -P "$__ApkToolsDir"
|
||||
tar -xf "$__ApkToolsDir/apk-tools-$__ApkToolsVersion-x86_64-linux.tar.gz" -C "$__ApkToolsDir"
|
||||
mkdir -p "$__RootfsDir"/usr/bin
|
||||
cp -v "/usr/bin/qemu-$__QEMUArch-static" "$__RootfsDir/usr/bin"
|
||||
|
||||
$__ApkToolsDir/apk-tools-$__ApkToolsVersion/apk \
|
||||
-X http://dl-cdn.alpinelinux.org/alpine/v$__AlpineVersion/main \
|
||||
-X http://dl-cdn.alpinelinux.org/alpine/v$__AlpineVersion/community \
|
||||
-U --allow-untrusted --root $__RootfsDir --arch $__AlpineArch --initdb \
|
||||
"$__ApkToolsDir/apk-tools-$__ApkToolsVersion/apk" \
|
||||
-X "http://dl-cdn.alpinelinux.org/alpine/v$__AlpineVersion/main" \
|
||||
-X "http://dl-cdn.alpinelinux.org/alpine/v$__AlpineVersion/community" \
|
||||
-U --allow-untrusted --root "$__RootfsDir" --arch "$__AlpineArch" --initdb \
|
||||
add $__AlpinePackages
|
||||
|
||||
$__ApkToolsDir/apk-tools-$__ApkToolsVersion/apk \
|
||||
-X http://dl-cdn.alpinelinux.org/alpine/edge/main \
|
||||
-U --allow-untrusted --root $__RootfsDir --arch $__AlpineArch --initdb \
|
||||
add $__AlpinePackagesEdgeMain
|
||||
|
||||
$__ApkToolsDir/apk-tools-$__ApkToolsVersion/apk \
|
||||
-X http://dl-cdn.alpinelinux.org/alpine/edge/community \
|
||||
-U --allow-untrusted --root $__RootfsDir --arch $__AlpineArch --initdb \
|
||||
add $__AlpinePackagesEdgeCommunity
|
||||
|
||||
rm -r $__ApkToolsDir
|
||||
rm -r "$__ApkToolsDir"
|
||||
elif [[ "$__CodeName" == "freebsd" ]]; then
|
||||
mkdir -p $__RootfsDir/usr/local/etc
|
||||
wget -O - https://download.freebsd.org/ftp/releases/amd64/${__FreeBSDBase}/base.txz | tar -C $__RootfsDir -Jxf - ./lib ./usr/lib ./usr/libdata ./usr/include ./usr/share/keys ./etc ./bin/freebsd-version
|
||||
# For now, ask for 11 ABI even on 12. This can be revisited later.
|
||||
echo "ABI = \"FreeBSD:11:amd64\"; FINGERPRINTS = \"${__RootfsDir}/usr/share/keys\"; REPOS_DIR = [\"${__RootfsDir}/etc/pkg\"]; REPO_AUTOUPDATE = NO; RUN_SCRIPTS = NO;" > ${__RootfsDir}/usr/local/etc/pkg.conf
|
||||
echo "FreeBSD: { url: "pkg+http://pkg.FreeBSD.org/\${ABI}/quarterly", mirror_type: \"srv\", signature_type: \"fingerprints\", fingerprints: \"${__RootfsDir}/usr/share/keys/pkg\", enabled: yes }" > ${__RootfsDir}/etc/pkg/FreeBSD.conf
|
||||
mkdir -p $__RootfsDir/tmp
|
||||
mkdir -p "$__RootfsDir"/usr/local/etc
|
||||
JOBS=${MAXJOBS:="$(getconf _NPROCESSORS_ONLN)"}
|
||||
wget -O - "https://download.freebsd.org/ftp/releases/${__FreeBSDArch}/${__FreeBSDMachineArch}/${__FreeBSDBase}/base.txz" | tar -C "$__RootfsDir" -Jxf - ./lib ./usr/lib ./usr/libdata ./usr/include ./usr/share/keys ./etc ./bin/freebsd-version
|
||||
echo "ABI = \"FreeBSD:${__FreeBSDABI}:${__FreeBSDMachineArch}\"; FINGERPRINTS = \"${__RootfsDir}/usr/share/keys\"; REPOS_DIR = [\"${__RootfsDir}/etc/pkg\"]; REPO_AUTOUPDATE = NO; RUN_SCRIPTS = NO;" > "${__RootfsDir}"/usr/local/etc/pkg.conf
|
||||
echo "FreeBSD: { url: \"pkg+http://pkg.FreeBSD.org/\${ABI}/quarterly\", mirror_type: \"srv\", signature_type: \"fingerprints\", fingerprints: \"${__RootfsDir}/usr/share/keys/pkg\", enabled: yes }" > "${__RootfsDir}"/etc/pkg/FreeBSD.conf
|
||||
mkdir -p "$__RootfsDir"/tmp
|
||||
# get and build package manager
|
||||
wget -O - https://github.com/freebsd/pkg/archive/${__FreeBSDPkg}.tar.gz | tar -C $__RootfsDir/tmp -zxf -
|
||||
cd $__RootfsDir/tmp/pkg-${__FreeBSDPkg}
|
||||
wget -O - "https://github.com/freebsd/pkg/archive/${__FreeBSDPkg}.tar.gz" | tar -C "$__RootfsDir"/tmp -zxf -
|
||||
cd "$__RootfsDir/tmp/pkg-${__FreeBSDPkg}"
|
||||
# needed for install to succeed
|
||||
mkdir -p $__RootfsDir/host/etc
|
||||
./autogen.sh && ./configure --prefix=$__RootfsDir/host && make && make install
|
||||
rm -rf $__RootfsDir/tmp/pkg-${__FreeBSDPkg}
|
||||
mkdir -p "$__RootfsDir"/host/etc
|
||||
./autogen.sh && ./configure --prefix="$__RootfsDir"/host && make -j "$JOBS" && make install
|
||||
rm -rf "$__RootfsDir/tmp/pkg-${__FreeBSDPkg}"
|
||||
# install packages we need.
|
||||
INSTALL_AS_USER=$(whoami) $__RootfsDir/host/sbin/pkg -r $__RootfsDir -C $__RootfsDir/usr/local/etc/pkg.conf update
|
||||
INSTALL_AS_USER=$(whoami) $__RootfsDir/host/sbin/pkg -r $__RootfsDir -C $__RootfsDir/usr/local/etc/pkg.conf install --yes $__FreeBSDPackages
|
||||
INSTALL_AS_USER=$(whoami) "$__RootfsDir"/host/sbin/pkg -r "$__RootfsDir" -C "$__RootfsDir"/usr/local/etc/pkg.conf update
|
||||
INSTALL_AS_USER=$(whoami) "$__RootfsDir"/host/sbin/pkg -r "$__RootfsDir" -C "$__RootfsDir"/usr/local/etc/pkg.conf install --yes $__FreeBSDPackages
|
||||
elif [[ "$__CodeName" == "illumos" ]]; then
|
||||
mkdir "$__RootfsDir/tmp"
|
||||
pushd "$__RootfsDir/tmp"
|
||||
JOBS="$(getconf _NPROCESSORS_ONLN)"
|
||||
JOBS=${MAXJOBS:="$(getconf _NPROCESSORS_ONLN)"}
|
||||
echo "Downloading sysroot."
|
||||
wget -O - https://github.com/illumos/sysroot/releases/download/20181213-de6af22ae73b-v1/illumos-sysroot-i386-20181213-de6af22ae73b-v1.tar.gz | tar -C "$__RootfsDir" -xzf -
|
||||
echo "Building binutils. Please wait.."
|
||||
wget -O - https://ftp.gnu.org/gnu/binutils/binutils-2.33.1.tar.bz2 | tar -xjf -
|
||||
mkdir build-binutils && cd build-binutils
|
||||
../binutils-2.33.1/configure --prefix="$__RootfsDir" --target="x86_64-sun-solaris2.10" --program-prefix="x86_64-illumos-" --with-sysroot="$__RootfsDir"
|
||||
../binutils-2.33.1/configure --prefix="$__RootfsDir" --target="${__illumosArch}-sun-solaris2.10" --program-prefix="${__illumosArch}-illumos-" --with-sysroot="$__RootfsDir"
|
||||
make -j "$JOBS" && make install && cd ..
|
||||
echo "Building gcc. Please wait.."
|
||||
wget -O - https://ftp.gnu.org/gnu/gcc/gcc-8.4.0/gcc-8.4.0.tar.xz | tar -xJf -
|
||||
|
@ -296,7 +382,7 @@ elif [[ "$__CodeName" == "illumos" ]]; then
|
|||
CFLAGS_FOR_TARGET="-fPIC"
|
||||
export CFLAGS CXXFLAGS CXXFLAGS_FOR_TARGET CFLAGS_FOR_TARGET
|
||||
mkdir build-gcc && cd build-gcc
|
||||
../gcc-8.4.0/configure --prefix="$__RootfsDir" --target="x86_64-sun-solaris2.10" --program-prefix="x86_64-illumos-" --with-sysroot="$__RootfsDir" --with-gnu-as \
|
||||
../gcc-8.4.0/configure --prefix="$__RootfsDir" --target="${__illumosArch}-sun-solaris2.10" --program-prefix="${__illumosArch}-illumos-" --with-sysroot="$__RootfsDir" --with-gnu-as \
|
||||
--with-gnu-ld --disable-nls --disable-libgomp --disable-libquadmath --disable-libssp --disable-libvtv --disable-libcilkrts --disable-libada --disable-libsanitizer \
|
||||
--disable-libquadmath-support --disable-shared --enable-tls
|
||||
make -j "$JOBS" && make install && cd ..
|
||||
|
@ -304,7 +390,7 @@ elif [[ "$__CodeName" == "illumos" ]]; then
|
|||
if [[ "$__UseMirror" == 1 ]]; then
|
||||
BaseUrl=http://pkgsrc.smartos.skylime.net
|
||||
fi
|
||||
BaseUrl="$BaseUrl"/packages/SmartOS/2020Q1/x86_64/All
|
||||
BaseUrl="$BaseUrl/packages/SmartOS/2020Q1/${__illumosArch}/All"
|
||||
echo "Downloading dependencies."
|
||||
read -ra array <<<"$__IllumosPackages"
|
||||
for package in "${array[@]}"; do
|
||||
|
@ -322,26 +408,90 @@ elif [[ "$__CodeName" == "illumos" ]]; then
|
|||
wget -P "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/dlt.h
|
||||
wget -P "$__RootfsDir"/usr/include/netpacket https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/inet/sockmods/netpacket/packet.h
|
||||
wget -P "$__RootfsDir"/usr/include/sys https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/sys/sdt.h
|
||||
elif [[ -n $__CodeName ]]; then
|
||||
qemu-debootstrap --arch $__UbuntuArch $__CodeName $__RootfsDir $__UbuntuRepo
|
||||
cp $__CrossDir/$__BuildArch/sources.list.$__CodeName $__RootfsDir/etc/apt/sources.list
|
||||
chroot $__RootfsDir apt-get update
|
||||
chroot $__RootfsDir apt-get -f -y install
|
||||
chroot $__RootfsDir apt-get -y install $__UbuntuPackages
|
||||
chroot $__RootfsDir symlinks -cr /usr
|
||||
elif [[ "$__CodeName" == "haiku" ]]; then
|
||||
JOBS=${MAXJOBS:="$(getconf _NPROCESSORS_ONLN)"}
|
||||
|
||||
if [ $__SkipUnmount == 0 ]; then
|
||||
umount $__RootfsDir/* || true
|
||||
echo "Building Haiku sysroot for x86_64"
|
||||
mkdir -p "$__RootfsDir/tmp"
|
||||
cd "$__RootfsDir/tmp"
|
||||
git clone -b hrev56235 https://review.haiku-os.org/haiku
|
||||
git clone -b btrev43195 https://review.haiku-os.org/buildtools
|
||||
cd "$__RootfsDir/tmp/buildtools" && git checkout 7487388f5110021d400b9f3b88e1a7f310dc066d
|
||||
|
||||
# Fetch some unmerged patches
|
||||
cd "$__RootfsDir/tmp/haiku"
|
||||
## Add development build profile (slimmer than nightly)
|
||||
git fetch origin refs/changes/64/4164/1 && git -c commit.gpgsign=false cherry-pick FETCH_HEAD
|
||||
|
||||
# Build jam
|
||||
cd "$__RootfsDir/tmp/buildtools/jam"
|
||||
make
|
||||
|
||||
# Configure cross tools
|
||||
echo "Building cross-compiler"
|
||||
mkdir -p "$__RootfsDir/generated"
|
||||
cd "$__RootfsDir/generated"
|
||||
"$__RootfsDir/tmp/haiku/configure" -j"$JOBS" --sysroot "$__RootfsDir" --cross-tools-source "$__RootfsDir/tmp/buildtools" --build-cross-tools x86_64
|
||||
|
||||
# Build Haiku packages
|
||||
echo "Building Haiku"
|
||||
echo 'HAIKU_BUILD_PROFILE = "development-raw" ;' > UserProfileConfig
|
||||
"$__RootfsDir/tmp/buildtools/jam/jam0" -j"$JOBS" -q '<build>package' '<repository>Haiku'
|
||||
|
||||
BaseUrl="https://depot.haiku-os.org/__api/v2/pkg/get-pkg"
|
||||
|
||||
# Download additional packages
|
||||
echo "Downloading additional required packages"
|
||||
read -ra array <<<"$__HaikuPackages"
|
||||
for package in "${array[@]}"; do
|
||||
echo "Downloading $package..."
|
||||
# API documented here: https://github.com/haiku/haikudepotserver/blob/master/haikudepotserver-api2/src/main/resources/api2/pkg.yaml#L60
|
||||
# The schema here: https://github.com/haiku/haikudepotserver/blob/master/haikudepotserver-api2/src/main/resources/api2/pkg.yaml#L598
|
||||
hpkgDownloadUrl="$(wget -qO- --post-data='{"name":"'"$package"'","repositorySourceCode":"haikuports_x86_64","versionType":"LATEST","naturalLanguageCode":"en"}' \
|
||||
--header='Content-Type:application/json' "$BaseUrl" | jq -r '.result.versions[].hpkgDownloadURL')"
|
||||
wget -P "$__RootfsDir/generated/download" "$hpkgDownloadUrl"
|
||||
done
|
||||
|
||||
# Setup the sysroot
|
||||
echo "Setting up sysroot and extracting needed packages"
|
||||
mkdir -p "$__RootfsDir/boot/system"
|
||||
for file in "$__RootfsDir/generated/objects/haiku/x86_64/packaging/packages/"*.hpkg; do
|
||||
"$__RootfsDir/generated/objects/linux/x86_64/release/tools/package/package" extract -C "$__RootfsDir/boot/system" "$file"
|
||||
done
|
||||
for file in "$__RootfsDir/generated/download/"*.hpkg; do
|
||||
"$__RootfsDir/generated/objects/linux/x86_64/release/tools/package/package" extract -C "$__RootfsDir/boot/system" "$file"
|
||||
done
|
||||
|
||||
# Cleaning up temporary files
|
||||
echo "Cleaning up temporary files"
|
||||
rm -rf "$__RootfsDir/tmp"
|
||||
for name in "$__RootfsDir/generated/"*; do
|
||||
if [[ "$name" =~ "cross-tools-" ]]; then
|
||||
: # Keep the cross-compiler
|
||||
else
|
||||
rm -rf "$name"
|
||||
fi
|
||||
done
|
||||
elif [[ -n "$__CodeName" ]]; then
|
||||
qemu-debootstrap $__Keyring --arch "$__UbuntuArch" "$__CodeName" "$__RootfsDir" "$__UbuntuRepo"
|
||||
cp "$__CrossDir/$__BuildArch/sources.list.$__CodeName" "$__RootfsDir/etc/apt/sources.list"
|
||||
chroot "$__RootfsDir" apt-get update
|
||||
chroot "$__RootfsDir" apt-get -f -y install
|
||||
chroot "$__RootfsDir" apt-get -y install $__UbuntuPackages
|
||||
chroot "$__RootfsDir" symlinks -cr /usr
|
||||
chroot "$__RootfsDir" apt-get clean
|
||||
|
||||
if [[ "$__SkipUnmount" == "0" ]]; then
|
||||
umount "$__RootfsDir"/* || true
|
||||
fi
|
||||
|
||||
if [[ "$__BuildArch" == "arm" && "$__CodeName" == "trusty" ]]; then
|
||||
pushd $__RootfsDir
|
||||
patch -p1 < $__CrossDir/$__BuildArch/trusty.patch
|
||||
patch -p1 < $__CrossDir/$__BuildArch/trusty-lttng-2.4.patch
|
||||
if [[ "$__BuildArch" == "armel" && "$__CodeName" == "jessie" ]]; then
|
||||
pushd "$__RootfsDir"
|
||||
patch -p1 < "$__CrossDir/$__BuildArch/armel.jessie.patch"
|
||||
popd
|
||||
fi
|
||||
elif [[ "$__Tizen" == "tizen" ]]; then
|
||||
ROOTFS_DIR=$__RootfsDir $__CrossDir/$__BuildArch/tizen-build-rootfs.sh
|
||||
ROOTFS_DIR="$__RootfsDir" "$__CrossDir/$__BuildArch/tizen-build-rootfs.sh"
|
||||
else
|
||||
echo "Unsupported target platform."
|
||||
usage;
|
||||
|
|
|
@ -0,0 +1,11 @@
|
|||
deb http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse
|
|
@ -0,0 +1 @@
|
|||
deb http://deb.debian.org/debian-ports sid main
|
|
@ -0,0 +1,11 @@
|
|||
deb http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse
|
|
@ -3,21 +3,25 @@ set(CROSS_ROOTFS $ENV{ROOTFS_DIR})
|
|||
set(TARGET_ARCH_NAME $ENV{TARGET_BUILD_ARCH})
|
||||
if(EXISTS ${CROSS_ROOTFS}/bin/freebsd-version)
|
||||
set(CMAKE_SYSTEM_NAME FreeBSD)
|
||||
set(FREEBSD 1)
|
||||
elseif(EXISTS ${CROSS_ROOTFS}/usr/platform/i86pc)
|
||||
set(CMAKE_SYSTEM_NAME SunOS)
|
||||
set(ILLUMOS 1)
|
||||
elseif(EXISTS ${CROSS_ROOTFS}/boot/system/develop/headers/config/HaikuConfig.h)
|
||||
set(CMAKE_SYSTEM_NAME Haiku)
|
||||
else()
|
||||
set(CMAKE_SYSTEM_NAME Linux)
|
||||
set(LINUX 1)
|
||||
endif()
|
||||
set(CMAKE_SYSTEM_VERSION 1)
|
||||
|
||||
if(TARGET_ARCH_NAME STREQUAL "armel")
|
||||
set(CMAKE_SYSTEM_PROCESSOR armv7l)
|
||||
set(TOOLCHAIN "arm-linux-gnueabi")
|
||||
if("$ENV{__DistroRid}" MATCHES "tizen.*")
|
||||
set(TIZEN_TOOLCHAIN "armv7l-tizen-linux-gnueabi/9.2.0")
|
||||
endif()
|
||||
elseif(TARGET_ARCH_NAME STREQUAL "arm")
|
||||
if(EXISTS ${CROSS_ROOTFS}/etc/tizen-release)
|
||||
set(TIZEN 1)
|
||||
elseif(EXISTS ${CROSS_ROOTFS}/android_platform)
|
||||
set(ANDROID 1)
|
||||
endif()
|
||||
|
||||
if(TARGET_ARCH_NAME STREQUAL "arm")
|
||||
set(CMAKE_SYSTEM_PROCESSOR armv7l)
|
||||
if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/armv7-alpine-linux-musleabihf)
|
||||
set(TOOLCHAIN "armv7-alpine-linux-musleabihf")
|
||||
|
@ -26,27 +30,65 @@ elseif(TARGET_ARCH_NAME STREQUAL "arm")
|
|||
else()
|
||||
set(TOOLCHAIN "arm-linux-gnueabihf")
|
||||
endif()
|
||||
if(TIZEN)
|
||||
set(TIZEN_TOOLCHAIN "armv7hl-tizen-linux-gnueabihf/9.2.0")
|
||||
endif()
|
||||
elseif(TARGET_ARCH_NAME STREQUAL "arm64")
|
||||
set(CMAKE_SYSTEM_PROCESSOR aarch64)
|
||||
if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/aarch64-alpine-linux-musl)
|
||||
set(TOOLCHAIN "aarch64-alpine-linux-musl")
|
||||
else()
|
||||
elseif(LINUX)
|
||||
set(TOOLCHAIN "aarch64-linux-gnu")
|
||||
if(TIZEN)
|
||||
set(TIZEN_TOOLCHAIN "aarch64-tizen-linux-gnu/9.2.0")
|
||||
endif()
|
||||
elseif(FREEBSD)
|
||||
set(triple "aarch64-unknown-freebsd12")
|
||||
endif()
|
||||
if("$ENV{__DistroRid}" MATCHES "tizen.*")
|
||||
set(TIZEN_TOOLCHAIN "aarch64-tizen-linux-gnu/9.2.0")
|
||||
elseif(TARGET_ARCH_NAME STREQUAL "armel")
|
||||
set(CMAKE_SYSTEM_PROCESSOR armv7l)
|
||||
set(TOOLCHAIN "arm-linux-gnueabi")
|
||||
if(TIZEN)
|
||||
set(TIZEN_TOOLCHAIN "armv7l-tizen-linux-gnueabi/9.2.0")
|
||||
endif()
|
||||
elseif(TARGET_ARCH_NAME STREQUAL "armv6")
|
||||
set(CMAKE_SYSTEM_PROCESSOR armv6l)
|
||||
if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/armv6-alpine-linux-musleabihf)
|
||||
set(TOOLCHAIN "armv6-alpine-linux-musleabihf")
|
||||
else()
|
||||
set(TOOLCHAIN "arm-linux-gnueabihf")
|
||||
endif()
|
||||
elseif(TARGET_ARCH_NAME STREQUAL "ppc64le")
|
||||
set(CMAKE_SYSTEM_PROCESSOR ppc64le)
|
||||
set(TOOLCHAIN "powerpc64le-linux-gnu")
|
||||
elseif(TARGET_ARCH_NAME STREQUAL "riscv64")
|
||||
set(CMAKE_SYSTEM_PROCESSOR riscv64)
|
||||
set(TOOLCHAIN "riscv64-linux-gnu")
|
||||
elseif(TARGET_ARCH_NAME STREQUAL "s390x")
|
||||
set(CMAKE_SYSTEM_PROCESSOR s390x)
|
||||
set(TOOLCHAIN "s390x-linux-gnu")
|
||||
elseif(TARGET_ARCH_NAME STREQUAL "x64")
|
||||
set(CMAKE_SYSTEM_PROCESSOR x86_64)
|
||||
if(LINUX)
|
||||
set(TOOLCHAIN "x86_64-linux-gnu")
|
||||
if(TIZEN)
|
||||
set(TIZEN_TOOLCHAIN "x86_64-tizen-linux-gnu/9.2.0")
|
||||
endif()
|
||||
elseif(FREEBSD)
|
||||
set(triple "x86_64-unknown-freebsd12")
|
||||
elseif(ILLUMOS)
|
||||
set(TOOLCHAIN "x86_64-illumos")
|
||||
elseif(HAIKU)
|
||||
set(TOOLCHAIN "x64_64-unknown-haiku")
|
||||
endif()
|
||||
elseif(TARGET_ARCH_NAME STREQUAL "x86")
|
||||
set(CMAKE_SYSTEM_PROCESSOR i686)
|
||||
set(TOOLCHAIN "i686-linux-gnu")
|
||||
elseif (CMAKE_SYSTEM_NAME STREQUAL "FreeBSD")
|
||||
set(CMAKE_SYSTEM_PROCESSOR "x86_64")
|
||||
set(triple "x86_64-unknown-freebsd11")
|
||||
elseif (ILLUMOS)
|
||||
set(CMAKE_SYSTEM_PROCESSOR "x86_64")
|
||||
set(TOOLCHAIN "x86_64-illumos")
|
||||
if(TIZEN)
|
||||
set(TIZEN_TOOLCHAIN "i586-tizen-linux-gnu/9.2.0")
|
||||
endif()
|
||||
else()
|
||||
message(FATAL_ERROR "Arch is ${TARGET_ARCH_NAME}. Only armel, arm, arm64 and x86 are supported!")
|
||||
message(FATAL_ERROR "Arch is ${TARGET_ARCH_NAME}. Only arm, arm64, armel, armv6, ppc64le, riscv64, s390x, x64 and x86 are supported!")
|
||||
endif()
|
||||
|
||||
if(DEFINED ENV{TOOLCHAIN})
|
||||
|
@ -54,7 +96,11 @@ if(DEFINED ENV{TOOLCHAIN})
|
|||
endif()
|
||||
|
||||
# Specify include paths
|
||||
if(DEFINED TIZEN_TOOLCHAIN)
|
||||
if(TIZEN)
|
||||
if(TARGET_ARCH_NAME STREQUAL "arm")
|
||||
include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}/include/c++/)
|
||||
include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}/include/c++/armv7hl-tizen-linux-gnueabihf)
|
||||
endif()
|
||||
if(TARGET_ARCH_NAME STREQUAL "armel")
|
||||
include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}/include/c++/)
|
||||
include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}/include/c++/armv7l-tizen-linux-gnueabi)
|
||||
|
@ -63,9 +109,13 @@ if(DEFINED TIZEN_TOOLCHAIN)
|
|||
include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}/include/c++/)
|
||||
include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}/include/c++/aarch64-tizen-linux-gnu)
|
||||
endif()
|
||||
if(TARGET_ARCH_NAME STREQUAL "x86")
|
||||
include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}/include/c++/)
|
||||
include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}/include/c++/i586-tizen-linux-gnu)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if("$ENV{__DistroRid}" MATCHES "android.*")
|
||||
if(ANDROID)
|
||||
if(TARGET_ARCH_NAME STREQUAL "arm")
|
||||
set(ANDROID_ABI armeabi-v7a)
|
||||
elseif(TARGET_ARCH_NAME STREQUAL "arm64")
|
||||
|
@ -73,7 +123,9 @@ if("$ENV{__DistroRid}" MATCHES "android.*")
|
|||
endif()
|
||||
|
||||
# extract platform number required by the NDK's toolchain
|
||||
string(REGEX REPLACE ".*\\.([0-9]+)-.*" "\\1" ANDROID_PLATFORM "$ENV{__DistroRid}")
|
||||
file(READ "${CROSS_ROOTFS}/android_platform" RID_FILE_CONTENTS)
|
||||
string(REPLACE "RID=" "" ANDROID_RID "${RID_FILE_CONTENTS}")
|
||||
string(REGEX REPLACE ".*\\.([0-9]+)-.*" "\\1" ANDROID_PLATFORM "${ANDROID_RID}")
|
||||
|
||||
set(ANDROID_TOOLCHAIN clang)
|
||||
set(FEATURE_EVENT_TRACE 0) # disable event trace as there is no lttng-ust package in termux repository
|
||||
|
@ -82,12 +134,15 @@ if("$ENV{__DistroRid}" MATCHES "android.*")
|
|||
|
||||
# include official NDK toolchain script
|
||||
include(${CROSS_ROOTFS}/../build/cmake/android.toolchain.cmake)
|
||||
elseif(CMAKE_SYSTEM_NAME STREQUAL "FreeBSD")
|
||||
elseif(FREEBSD)
|
||||
# we cross-compile by instructing clang
|
||||
set(CMAKE_C_COMPILER_TARGET ${triple})
|
||||
set(CMAKE_CXX_COMPILER_TARGET ${triple})
|
||||
set(CMAKE_ASM_COMPILER_TARGET ${triple})
|
||||
set(CMAKE_SYSROOT "${CROSS_ROOTFS}")
|
||||
set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -fuse-ld=lld")
|
||||
set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -fuse-ld=lld")
|
||||
set(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} -fuse-ld=lld")
|
||||
elseif(ILLUMOS)
|
||||
set(CMAKE_SYSROOT "${CROSS_ROOTFS}")
|
||||
|
||||
|
@ -119,6 +174,41 @@ elseif(ILLUMOS)
|
|||
|
||||
set(CMAKE_C_STANDARD_LIBRARIES "${CMAKE_C_STANDARD_LIBRARIES} -lssp")
|
||||
set(CMAKE_CXX_STANDARD_LIBRARIES "${CMAKE_CXX_STANDARD_LIBRARIES} -lssp")
|
||||
elseif(HAIKU)
|
||||
set(CMAKE_SYSROOT "${CROSS_ROOTFS}")
|
||||
|
||||
set(TOOLSET_PREFIX ${TOOLCHAIN}-)
|
||||
function(locate_toolchain_exec exec var)
|
||||
string(TOUPPER ${exec} EXEC_UPPERCASE)
|
||||
if(NOT "$ENV{CLR_${EXEC_UPPERCASE}}" STREQUAL "")
|
||||
set(${var} "$ENV{CLR_${EXEC_UPPERCASE}}" PARENT_SCOPE)
|
||||
return()
|
||||
endif()
|
||||
|
||||
set(SEARCH_PATH "${CROSS_ROOTFS}/generated/cross-tools-x86_64/bin")
|
||||
|
||||
find_program(EXEC_LOCATION_${exec}
|
||||
PATHS ${SEARCH_PATH}
|
||||
NAMES
|
||||
"${TOOLSET_PREFIX}${exec}${CLR_CMAKE_COMPILER_FILE_NAME_VERSION}"
|
||||
"${TOOLSET_PREFIX}${exec}")
|
||||
|
||||
if (EXEC_LOCATION_${exec} STREQUAL "EXEC_LOCATION_${exec}-NOTFOUND")
|
||||
message(FATAL_ERROR "Unable to find toolchain executable. Name: ${exec}, Prefix: ${TOOLSET_PREFIX}.")
|
||||
endif()
|
||||
set(${var} ${EXEC_LOCATION_${exec}} PARENT_SCOPE)
|
||||
endfunction()
|
||||
|
||||
set(CMAKE_SYSTEM_PREFIX_PATH "${CROSS_ROOTFS}")
|
||||
|
||||
locate_toolchain_exec(gcc CMAKE_C_COMPILER)
|
||||
locate_toolchain_exec(g++ CMAKE_CXX_COMPILER)
|
||||
|
||||
set(CMAKE_C_STANDARD_LIBRARIES "${CMAKE_C_STANDARD_LIBRARIES} -lssp")
|
||||
set(CMAKE_CXX_STANDARD_LIBRARIES "${CMAKE_CXX_STANDARD_LIBRARIES} -lssp")
|
||||
|
||||
# let CMake set up the correct search paths
|
||||
include(Platform/Haiku)
|
||||
else()
|
||||
set(CMAKE_SYSROOT "${CROSS_ROOTFS}")
|
||||
|
||||
|
@ -135,20 +225,24 @@ function(add_toolchain_linker_flag Flag)
|
|||
if (NOT Config STREQUAL "")
|
||||
set(CONFIG_SUFFIX "_${Config}")
|
||||
endif()
|
||||
set("CMAKE_EXE_LINKER_FLAGS${CONFIG_SUFFIX}" "${CMAKE_EXE_LINKER_FLAGS${CONFIG_SUFFIX}} ${Flag}" PARENT_SCOPE)
|
||||
set("CMAKE_SHARED_LINKER_FLAGS${CONFIG_SUFFIX}" "${CMAKE_SHARED_LINKER_FLAGS${CONFIG_SUFFIX}} ${Flag}" PARENT_SCOPE)
|
||||
set("CMAKE_EXE_LINKER_FLAGS${CONFIG_SUFFIX}_INIT" "${CMAKE_EXE_LINKER_FLAGS${CONFIG_SUFFIX}_INIT} ${Flag}" PARENT_SCOPE)
|
||||
set("CMAKE_SHARED_LINKER_FLAGS${CONFIG_SUFFIX}_INIT" "${CMAKE_SHARED_LINKER_FLAGS${CONFIG_SUFFIX}_INIT} ${Flag}" PARENT_SCOPE)
|
||||
endfunction()
|
||||
|
||||
if(LINUX)
|
||||
add_toolchain_linker_flag("-Wl,--rpath-link=${CROSS_ROOTFS}/lib/${TOOLCHAIN}")
|
||||
add_toolchain_linker_flag("-Wl,--rpath-link=${CROSS_ROOTFS}/usr/lib/${TOOLCHAIN}")
|
||||
endif()
|
||||
|
||||
if(TARGET_ARCH_NAME STREQUAL "armel")
|
||||
if(DEFINED TIZEN_TOOLCHAIN) # For Tizen only
|
||||
if(TARGET_ARCH_NAME MATCHES "^(arm|armel)$")
|
||||
if(TIZEN)
|
||||
add_toolchain_linker_flag("-B${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}")
|
||||
add_toolchain_linker_flag("-L${CROSS_ROOTFS}/lib")
|
||||
add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/lib")
|
||||
add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}")
|
||||
endif()
|
||||
elseif(TARGET_ARCH_NAME STREQUAL "arm64")
|
||||
if(DEFINED TIZEN_TOOLCHAIN) # For Tizen only
|
||||
if(TIZEN)
|
||||
add_toolchain_linker_flag("-B${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}")
|
||||
add_toolchain_linker_flag("-L${CROSS_ROOTFS}/lib64")
|
||||
add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/lib64")
|
||||
|
@ -160,6 +254,13 @@ elseif(TARGET_ARCH_NAME STREQUAL "arm64")
|
|||
endif()
|
||||
elseif(TARGET_ARCH_NAME STREQUAL "x86")
|
||||
add_toolchain_linker_flag(-m32)
|
||||
|
||||
if(TIZEN)
|
||||
add_toolchain_linker_flag("-B${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}")
|
||||
add_toolchain_linker_flag("-L${CROSS_ROOTFS}/lib")
|
||||
add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/lib")
|
||||
add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}")
|
||||
endif()
|
||||
elseif(ILLUMOS)
|
||||
add_toolchain_linker_flag("-L${CROSS_ROOTFS}/lib/amd64")
|
||||
add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/amd64/lib")
|
||||
|
@ -167,7 +268,7 @@ endif()
|
|||
|
||||
# Specify compile options
|
||||
|
||||
if((TARGET_ARCH_NAME MATCHES "^(arm|armel|arm64)$" AND NOT "$ENV{__DistroRid}" MATCHES "android.*") OR ILLUMOS)
|
||||
if((TARGET_ARCH_NAME MATCHES "^(arm|arm64|armel|armv6|ppc64le|riscv64|s390x)$" AND NOT ANDROID AND NOT FREEBSD) OR ILLUMOS OR HAIKU)
|
||||
set(CMAKE_C_COMPILER_TARGET ${TOOLCHAIN})
|
||||
set(CMAKE_CXX_COMPILER_TARGET ${TOOLCHAIN})
|
||||
set(CMAKE_ASM_COMPILER_TARGET ${TOOLCHAIN})
|
||||
|
@ -194,8 +295,8 @@ elseif(TARGET_ARCH_NAME STREQUAL "x86")
|
|||
add_compile_options(-Wno-error=unused-command-line-argument)
|
||||
endif()
|
||||
|
||||
if(DEFINED TIZEN_TOOLCHAIN)
|
||||
if(TARGET_ARCH_NAME MATCHES "^(armel|arm64)$")
|
||||
if(TIZEN)
|
||||
if(TARGET_ARCH_NAME MATCHES "^(arm|armel|arm64|x86)$")
|
||||
add_compile_options(-Wno-deprecated-declarations) # compile-time option
|
||||
add_compile_options(-D__extern_always_inline=inline) # compile-time option
|
||||
endif()
|
||||
|
|
|
@ -1,11 +0,0 @@
|
|||
deb http://archive.ubuntu.com/ubuntu/ trusty main restricted universe
|
||||
deb-src http://archive.ubuntu.com/ubuntu/ trusty main restricted universe
|
||||
|
||||
deb http://archive.ubuntu.com/ubuntu/ trusty-updates main restricted universe
|
||||
deb-src http://archive.ubuntu.com/ubuntu/ trusty-updates main restricted universe
|
||||
|
||||
deb http://archive.ubuntu.com/ubuntu/ trusty-backports main restricted
|
||||
deb-src http://archive.ubuntu.com/ubuntu/ trusty-backports main restricted
|
||||
|
||||
deb http://archive.ubuntu.com/ubuntu/ trusty-security main restricted universe multiverse
|
||||
deb-src http://archive.ubuntu.com/ubuntu/ trusty-security main restricted universe multiverse
|
|
@ -0,0 +1,35 @@
|
|||
#!/usr/bin/env bash
|
||||
set -e
|
||||
|
||||
__X86_CrossDir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
|
||||
__TIZEN_CROSSDIR="$__X86_CrossDir/tizen"
|
||||
|
||||
if [[ -z "$ROOTFS_DIR" ]]; then
|
||||
echo "ROOTFS_DIR is not defined."
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
TIZEN_TMP_DIR=$ROOTFS_DIR/tizen_tmp
|
||||
mkdir -p $TIZEN_TMP_DIR
|
||||
|
||||
# Download files
|
||||
echo ">>Start downloading files"
|
||||
VERBOSE=1 $__X86_CrossDir/tizen-fetch.sh $TIZEN_TMP_DIR
|
||||
echo "<<Finish downloading files"
|
||||
|
||||
echo ">>Start constructing Tizen rootfs"
|
||||
TIZEN_RPM_FILES=`ls $TIZEN_TMP_DIR/*.rpm`
|
||||
cd $ROOTFS_DIR
|
||||
for f in $TIZEN_RPM_FILES; do
|
||||
rpm2cpio $f | cpio -idm --quiet
|
||||
done
|
||||
echo "<<Finish constructing Tizen rootfs"
|
||||
|
||||
# Cleanup tmp
|
||||
rm -rf $TIZEN_TMP_DIR
|
||||
|
||||
# Configure Tizen rootfs
|
||||
echo ">>Start configuring Tizen rootfs"
|
||||
ln -sfn asm-x86 ./usr/include/asm
|
||||
patch -p1 < $__TIZEN_CROSSDIR/tizen.patch
|
||||
echo "<<Finish configuring Tizen rootfs"
|
|
@ -0,0 +1,170 @@
|
|||
#!/usr/bin/env bash
|
||||
set -e
|
||||
|
||||
if [[ -z "${VERBOSE// }" ]] || [ "$VERBOSE" -ne "$VERBOSE" ] 2>/dev/null; then
|
||||
VERBOSE=0
|
||||
fi
|
||||
|
||||
Log()
|
||||
{
|
||||
if [ $VERBOSE -ge $1 ]; then
|
||||
echo ${@:2}
|
||||
fi
|
||||
}
|
||||
|
||||
Inform()
|
||||
{
|
||||
Log 1 -e "\x1B[0;34m$@\x1B[m"
|
||||
}
|
||||
|
||||
Debug()
|
||||
{
|
||||
Log 2 -e "\x1B[0;32m$@\x1B[m"
|
||||
}
|
||||
|
||||
Error()
|
||||
{
|
||||
>&2 Log 0 -e "\x1B[0;31m$@\x1B[m"
|
||||
}
|
||||
|
||||
Fetch()
|
||||
{
|
||||
URL=$1
|
||||
FILE=$2
|
||||
PROGRESS=$3
|
||||
if [ $VERBOSE -ge 1 ] && [ $PROGRESS ]; then
|
||||
CURL_OPT="--progress-bar"
|
||||
else
|
||||
CURL_OPT="--silent"
|
||||
fi
|
||||
curl $CURL_OPT $URL > $FILE
|
||||
}
|
||||
|
||||
hash curl 2> /dev/null || { Error "Require 'curl' Aborting."; exit 1; }
|
||||
hash xmllint 2> /dev/null || { Error "Require 'xmllint' Aborting."; exit 1; }
|
||||
hash sha256sum 2> /dev/null || { Error "Require 'sha256sum' Aborting."; exit 1; }
|
||||
|
||||
TMPDIR=$1
|
||||
if [ ! -d $TMPDIR ]; then
|
||||
TMPDIR=./tizen_tmp
|
||||
Debug "Create temporary directory : $TMPDIR"
|
||||
mkdir -p $TMPDIR
|
||||
fi
|
||||
|
||||
TIZEN_URL=http://download.tizen.org/snapshots/tizen
|
||||
BUILD_XML=build.xml
|
||||
REPOMD_XML=repomd.xml
|
||||
PRIMARY_XML=primary.xml
|
||||
TARGET_URL="http://__not_initialized"
|
||||
|
||||
Xpath_get()
|
||||
{
|
||||
XPATH_RESULT=''
|
||||
XPATH=$1
|
||||
XML_FILE=$2
|
||||
RESULT=$(xmllint --xpath $XPATH $XML_FILE)
|
||||
if [[ -z ${RESULT// } ]]; then
|
||||
Error "Can not find target from $XML_FILE"
|
||||
Debug "Xpath = $XPATH"
|
||||
exit 1
|
||||
fi
|
||||
XPATH_RESULT=$RESULT
|
||||
}
|
||||
|
||||
fetch_tizen_pkgs_init()
|
||||
{
|
||||
TARGET=$1
|
||||
PROFILE=$2
|
||||
Debug "Initialize TARGET=$TARGET, PROFILE=$PROFILE"
|
||||
|
||||
TMP_PKG_DIR=$TMPDIR/tizen_${PROFILE}_pkgs
|
||||
if [ -d $TMP_PKG_DIR ]; then rm -rf $TMP_PKG_DIR; fi
|
||||
mkdir -p $TMP_PKG_DIR
|
||||
|
||||
PKG_URL=$TIZEN_URL/$PROFILE/latest
|
||||
|
||||
BUILD_XML_URL=$PKG_URL/$BUILD_XML
|
||||
TMP_BUILD=$TMP_PKG_DIR/$BUILD_XML
|
||||
TMP_REPOMD=$TMP_PKG_DIR/$REPOMD_XML
|
||||
TMP_PRIMARY=$TMP_PKG_DIR/$PRIMARY_XML
|
||||
TMP_PRIMARYGZ=${TMP_PRIMARY}.gz
|
||||
|
||||
Fetch $BUILD_XML_URL $TMP_BUILD
|
||||
|
||||
Debug "fetch $BUILD_XML_URL to $TMP_BUILD"
|
||||
|
||||
TARGET_XPATH="//build/buildtargets/buildtarget[@name=\"$TARGET\"]/repo[@type=\"binary\"]/text()"
|
||||
Xpath_get $TARGET_XPATH $TMP_BUILD
|
||||
TARGET_PATH=$XPATH_RESULT
|
||||
TARGET_URL=$PKG_URL/$TARGET_PATH
|
||||
|
||||
REPOMD_URL=$TARGET_URL/repodata/repomd.xml
|
||||
PRIMARY_XPATH='string(//*[local-name()="data"][@type="primary"]/*[local-name()="location"]/@href)'
|
||||
|
||||
Fetch $REPOMD_URL $TMP_REPOMD
|
||||
|
||||
Debug "fetch $REPOMD_URL to $TMP_REPOMD"
|
||||
|
||||
Xpath_get $PRIMARY_XPATH $TMP_REPOMD
|
||||
PRIMARY_XML_PATH=$XPATH_RESULT
|
||||
PRIMARY_URL=$TARGET_URL/$PRIMARY_XML_PATH
|
||||
|
||||
Fetch $PRIMARY_URL $TMP_PRIMARYGZ
|
||||
|
||||
Debug "fetch $PRIMARY_URL to $TMP_PRIMARYGZ"
|
||||
|
||||
gunzip $TMP_PRIMARYGZ
|
||||
|
||||
Debug "unzip $TMP_PRIMARYGZ to $TMP_PRIMARY"
|
||||
}
|
||||
|
||||
fetch_tizen_pkgs()
|
||||
{
|
||||
ARCH=$1
|
||||
PACKAGE_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="location"]/@href)'
|
||||
|
||||
PACKAGE_CHECKSUM_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="checksum"]/text())'
|
||||
|
||||
for pkg in ${@:2}
|
||||
do
|
||||
Inform "Fetching... $pkg"
|
||||
XPATH=${PACKAGE_XPATH_TPL/_PKG_/$pkg}
|
||||
XPATH=${XPATH/_ARCH_/$ARCH}
|
||||
Xpath_get $XPATH $TMP_PRIMARY
|
||||
PKG_PATH=$XPATH_RESULT
|
||||
|
||||
XPATH=${PACKAGE_CHECKSUM_XPATH_TPL/_PKG_/$pkg}
|
||||
XPATH=${XPATH/_ARCH_/$ARCH}
|
||||
Xpath_get $XPATH $TMP_PRIMARY
|
||||
CHECKSUM=$XPATH_RESULT
|
||||
|
||||
PKG_URL=$TARGET_URL/$PKG_PATH
|
||||
PKG_FILE=$(basename $PKG_PATH)
|
||||
PKG_PATH=$TMPDIR/$PKG_FILE
|
||||
|
||||
Debug "Download $PKG_URL to $PKG_PATH"
|
||||
Fetch $PKG_URL $PKG_PATH true
|
||||
|
||||
echo "$CHECKSUM $PKG_PATH" | sha256sum -c - > /dev/null
|
||||
if [ $? -ne 0 ]; then
|
||||
Error "Fail to fetch $PKG_URL to $PKG_PATH"
|
||||
Debug "Checksum = $CHECKSUM"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
Inform "Initialize i686 base"
|
||||
fetch_tizen_pkgs_init standard base
|
||||
Inform "fetch common packages"
|
||||
fetch_tizen_pkgs i686 gcc gcc-devel-static glibc glibc-devel libicu libicu-devel libatomic linux-glibc-devel keyutils keyutils-devel libkeyutils
|
||||
Inform "fetch coreclr packages"
|
||||
fetch_tizen_pkgs i686 lldb lldb-devel libgcc libstdc++ libstdc++-devel libunwind libunwind-devel lttng-ust-devel lttng-ust userspace-rcu-devel userspace-rcu
|
||||
Inform "fetch corefx packages"
|
||||
fetch_tizen_pkgs i686 libcom_err libcom_err-devel zlib zlib-devel libopenssl11 libopenssl1.1-devel krb5 krb5-devel
|
||||
|
||||
Inform "Initialize standard unified"
|
||||
fetch_tizen_pkgs_init standard unified
|
||||
Inform "fetch corefx packages"
|
||||
fetch_tizen_pkgs i686 gssdp gssdp-devel tizen-release
|
||||
|
|
@ -0,0 +1,9 @@
|
|||
diff -u -r a/usr/lib/libc.so b/usr/lib/libc.so
|
||||
--- a/usr/lib/libc.so 2016-12-30 23:00:08.284951863 +0900
|
||||
+++ b/usr/lib/libc.so 2016-12-30 23:00:32.140951815 +0900
|
||||
@@ -2,4 +2,4 @@
|
||||
Use the shared library, but some functions are only in
|
||||
the static library, so try that secondarily. */
|
||||
OUTPUT_FORMAT(elf32-i386)
|
||||
-GROUP ( /lib/libc.so.6 /usr/lib/libc_nonshared.a AS_NEEDED ( /lib/ld-linux.so.2 ) )
|
||||
+GROUP ( libc.so.6 libc_nonshared.a AS_NEEDED ( ld-linux.so.2 ) )
|
|
@ -6,7 +6,7 @@ versionEndpoint='https://maestro-prod.westus2.cloudapp.azure.com/api/assets/darc
|
|||
verbosity='minimal'
|
||||
|
||||
while [[ $# > 0 ]]; do
|
||||
opt="$(echo "$1" | awk '{print tolower($0)}')"
|
||||
opt="$(echo "$1" | tr "[:upper:]" "[:lower:]")"
|
||||
case "$opt" in
|
||||
--darcversion)
|
||||
darcVersion=$2
|
||||
|
@ -53,7 +53,7 @@ fi
|
|||
function InstallDarcCli {
|
||||
local darc_cli_package_name="microsoft.dotnet.darc"
|
||||
|
||||
InitializeDotNetCli
|
||||
InitializeDotNetCli true
|
||||
local dotnet_root=$_InitializeDotNetCli
|
||||
|
||||
if [ -z "$toolpath" ]; then
|
||||
|
|
|
@ -1,774 +0,0 @@
|
|||
#
|
||||
# Copyright (c) .NET Foundation and contributors. All rights reserved.
|
||||
# Licensed under the MIT license. See LICENSE file in the project root for full license information.
|
||||
#
|
||||
|
||||
# Copied from https://dot.net/v1/dotnet-install.ps1 on 8/26/2020
|
||||
|
||||
<#
|
||||
.SYNOPSIS
|
||||
Installs dotnet cli
|
||||
.DESCRIPTION
|
||||
Installs dotnet cli. If dotnet installation already exists in the given directory
|
||||
it will update it only if the requested version differs from the one already installed.
|
||||
.PARAMETER Channel
|
||||
Default: LTS
|
||||
Download from the Channel specified. Possible values:
|
||||
- Current - most current release
|
||||
- LTS - most current supported release
|
||||
- 2-part version in a format A.B - represents a specific release
|
||||
examples: 2.0, 1.0
|
||||
- Branch name
|
||||
examples: release/2.0.0, Master
|
||||
Note: The version parameter overrides the channel parameter.
|
||||
.PARAMETER Version
|
||||
Default: latest
|
||||
Represents a build version on specific channel. Possible values:
|
||||
- latest - most latest build on specific channel
|
||||
- coherent - most latest coherent build on specific channel
|
||||
coherent applies only to SDK downloads
|
||||
- 3-part version in a format A.B.C - represents specific version of build
|
||||
examples: 2.0.0-preview2-006120, 1.1.0
|
||||
.PARAMETER InstallDir
|
||||
Default: %LocalAppData%\Microsoft\dotnet
|
||||
Path to where to install dotnet. Note that binaries will be placed directly in a given directory.
|
||||
.PARAMETER Architecture
|
||||
Default: <auto> - this value represents currently running OS architecture
|
||||
Architecture of dotnet binaries to be installed.
|
||||
Possible values are: <auto>, amd64, x64, x86, arm64, arm
|
||||
.PARAMETER SharedRuntime
|
||||
This parameter is obsolete and may be removed in a future version of this script.
|
||||
The recommended alternative is '-Runtime dotnet'.
|
||||
Installs just the shared runtime bits, not the entire SDK.
|
||||
.PARAMETER Runtime
|
||||
Installs just a shared runtime, not the entire SDK.
|
||||
Possible values:
|
||||
- dotnet - the Microsoft.NETCore.App shared runtime
|
||||
- aspnetcore - the Microsoft.AspNetCore.App shared runtime
|
||||
- windowsdesktop - the Microsoft.WindowsDesktop.App shared runtime
|
||||
.PARAMETER DryRun
|
||||
If set it will not perform installation but instead display what command line to use to consistently install
|
||||
currently requested version of dotnet cli. In example if you specify version 'latest' it will display a link
|
||||
with specific version so that this command can be used deterministicly in a build script.
|
||||
It also displays binaries location if you prefer to install or download it yourself.
|
||||
.PARAMETER NoPath
|
||||
By default this script will set environment variable PATH for the current process to the binaries folder inside installation folder.
|
||||
If set it will display binaries location but not set any environment variable.
|
||||
.PARAMETER Verbose
|
||||
Displays diagnostics information.
|
||||
.PARAMETER AzureFeed
|
||||
Default: https://dotnetcli.azureedge.net/dotnet
|
||||
This parameter typically is not changed by the user.
|
||||
It allows changing the URL for the Azure feed used by this installer.
|
||||
.PARAMETER UncachedFeed
|
||||
This parameter typically is not changed by the user.
|
||||
It allows changing the URL for the Uncached feed used by this installer.
|
||||
.PARAMETER FeedCredential
|
||||
Used as a query string to append to the Azure feed.
|
||||
It allows changing the URL to use non-public blob storage accounts.
|
||||
.PARAMETER ProxyAddress
|
||||
If set, the installer will use the proxy when making web requests
|
||||
.PARAMETER ProxyUseDefaultCredentials
|
||||
Default: false
|
||||
Use default credentials, when using proxy address.
|
||||
.PARAMETER ProxyBypassList
|
||||
If set with ProxyAddress, will provide the list of comma separated urls that will bypass the proxy
|
||||
.PARAMETER SkipNonVersionedFiles
|
||||
Default: false
|
||||
Skips installing non-versioned files if they already exist, such as dotnet.exe.
|
||||
.PARAMETER NoCdn
|
||||
Disable downloading from the Azure CDN, and use the uncached feed directly.
|
||||
.PARAMETER JSonFile
|
||||
Determines the SDK version from a user specified global.json file
|
||||
Note: global.json must have a value for 'SDK:Version'
|
||||
#>
|
||||
[cmdletbinding()]
|
||||
param(
|
||||
[string]$Channel="LTS",
|
||||
[string]$Version="Latest",
|
||||
[string]$JSonFile,
|
||||
[string]$InstallDir="<auto>",
|
||||
[string]$Architecture="<auto>",
|
||||
[ValidateSet("dotnet", "aspnetcore", "windowsdesktop", IgnoreCase = $false)]
|
||||
[string]$Runtime,
|
||||
[Obsolete("This parameter may be removed in a future version of this script. The recommended alternative is '-Runtime dotnet'.")]
|
||||
[switch]$SharedRuntime,
|
||||
[switch]$DryRun,
|
||||
[switch]$NoPath,
|
||||
[string]$AzureFeed="https://dotnetcli.azureedge.net/dotnet",
|
||||
[string]$UncachedFeed="https://dotnetcli.blob.core.windows.net/dotnet",
|
||||
[string]$FeedCredential,
|
||||
[string]$ProxyAddress,
|
||||
[switch]$ProxyUseDefaultCredentials,
|
||||
[string[]]$ProxyBypassList=@(),
|
||||
[switch]$SkipNonVersionedFiles,
|
||||
[switch]$NoCdn
|
||||
)
|
||||
|
||||
Set-StrictMode -Version Latest
|
||||
$ErrorActionPreference="Stop"
|
||||
$ProgressPreference="SilentlyContinue"
|
||||
|
||||
if ($NoCdn) {
|
||||
$AzureFeed = $UncachedFeed
|
||||
}
|
||||
|
||||
$BinFolderRelativePath=""
|
||||
|
||||
if ($SharedRuntime -and (-not $Runtime)) {
|
||||
$Runtime = "dotnet"
|
||||
}
|
||||
|
||||
# example path with regex: shared/1.0.0-beta-12345/somepath
|
||||
$VersionRegEx="/\d+\.\d+[^/]+/"
|
||||
$OverrideNonVersionedFiles = !$SkipNonVersionedFiles
|
||||
|
||||
function Say($str) {
|
||||
try
|
||||
{
|
||||
Write-Host "dotnet-install: $str"
|
||||
}
|
||||
catch
|
||||
{
|
||||
# Some platforms cannot utilize Write-Host (Azure Functions, for instance). Fall back to Write-Output
|
||||
Write-Output "dotnet-install: $str"
|
||||
}
|
||||
}
|
||||
|
||||
function Say-Verbose($str) {
|
||||
try
|
||||
{
|
||||
Write-Verbose "dotnet-install: $str"
|
||||
}
|
||||
catch
|
||||
{
|
||||
# Some platforms cannot utilize Write-Verbose (Azure Functions, for instance). Fall back to Write-Output
|
||||
Write-Output "dotnet-install: $str"
|
||||
}
|
||||
}
|
||||
|
||||
function Say-Invocation($Invocation) {
|
||||
$command = $Invocation.MyCommand;
|
||||
$args = (($Invocation.BoundParameters.Keys | foreach { "-$_ `"$($Invocation.BoundParameters[$_])`"" }) -join " ")
|
||||
Say-Verbose "$command $args"
|
||||
}
|
||||
|
||||
function Invoke-With-Retry([ScriptBlock]$ScriptBlock, [int]$MaxAttempts = 3, [int]$SecondsBetweenAttempts = 1) {
|
||||
$Attempts = 0
|
||||
|
||||
while ($true) {
|
||||
try {
|
||||
return $ScriptBlock.Invoke()
|
||||
}
|
||||
catch {
|
||||
$Attempts++
|
||||
if ($Attempts -lt $MaxAttempts) {
|
||||
Start-Sleep $SecondsBetweenAttempts
|
||||
}
|
||||
else {
|
||||
throw
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function Get-Machine-Architecture() {
|
||||
Say-Invocation $MyInvocation
|
||||
|
||||
# On PS x86, PROCESSOR_ARCHITECTURE reports x86 even on x64 systems.
|
||||
# To get the correct architecture, we need to use PROCESSOR_ARCHITEW6432.
|
||||
# PS x64 doesn't define this, so we fall back to PROCESSOR_ARCHITECTURE.
|
||||
# Possible values: amd64, x64, x86, arm64, arm
|
||||
|
||||
if( $ENV:PROCESSOR_ARCHITEW6432 -ne $null )
|
||||
{
|
||||
return $ENV:PROCESSOR_ARCHITEW6432
|
||||
}
|
||||
|
||||
return $ENV:PROCESSOR_ARCHITECTURE
|
||||
}
|
||||
|
||||
function Get-CLIArchitecture-From-Architecture([string]$Architecture) {
|
||||
Say-Invocation $MyInvocation
|
||||
|
||||
switch ($Architecture.ToLower()) {
|
||||
{ $_ -eq "<auto>" } { return Get-CLIArchitecture-From-Architecture $(Get-Machine-Architecture) }
|
||||
{ ($_ -eq "amd64") -or ($_ -eq "x64") } { return "x64" }
|
||||
{ $_ -eq "x86" } { return "x86" }
|
||||
{ $_ -eq "arm" } { return "arm" }
|
||||
{ $_ -eq "arm64" } { return "arm64" }
|
||||
default { throw "Architecture not supported. If you think this is a bug, report it at https://github.com/dotnet/sdk/issues" }
|
||||
}
|
||||
}
|
||||
|
||||
# The version text returned from the feeds is a 1-line or 2-line string:
|
||||
# For the SDK and the dotnet runtime (2 lines):
|
||||
# Line 1: # commit_hash
|
||||
# Line 2: # 4-part version
|
||||
# For the aspnetcore runtime (1 line):
|
||||
# Line 1: # 4-part version
|
||||
function Get-Version-Info-From-Version-Text([string]$VersionText) {
|
||||
Say-Invocation $MyInvocation
|
||||
|
||||
$Data = -split $VersionText
|
||||
|
||||
$VersionInfo = @{
|
||||
CommitHash = $(if ($Data.Count -gt 1) { $Data[0] })
|
||||
Version = $Data[-1] # last line is always the version number.
|
||||
}
|
||||
return $VersionInfo
|
||||
}
|
||||
|
||||
function Load-Assembly([string] $Assembly) {
|
||||
try {
|
||||
Add-Type -Assembly $Assembly | Out-Null
|
||||
}
|
||||
catch {
|
||||
# On Nano Server, Powershell Core Edition is used. Add-Type is unable to resolve base class assemblies because they are not GAC'd.
|
||||
# Loading the base class assemblies is not unnecessary as the types will automatically get resolved.
|
||||
}
|
||||
}
|
||||
|
||||
function GetHTTPResponse([Uri] $Uri)
|
||||
{
|
||||
Invoke-With-Retry(
|
||||
{
|
||||
|
||||
$HttpClient = $null
|
||||
|
||||
try {
|
||||
# HttpClient is used vs Invoke-WebRequest in order to support Nano Server which doesn't support the Invoke-WebRequest cmdlet.
|
||||
Load-Assembly -Assembly System.Net.Http
|
||||
|
||||
if(-not $ProxyAddress) {
|
||||
try {
|
||||
# Despite no proxy being explicitly specified, we may still be behind a default proxy
|
||||
$DefaultProxy = [System.Net.WebRequest]::DefaultWebProxy;
|
||||
if($DefaultProxy -and (-not $DefaultProxy.IsBypassed($Uri))) {
|
||||
$ProxyAddress = $DefaultProxy.GetProxy($Uri).OriginalString
|
||||
$ProxyUseDefaultCredentials = $true
|
||||
}
|
||||
} catch {
|
||||
# Eat the exception and move forward as the above code is an attempt
|
||||
# at resolving the DefaultProxy that may not have been a problem.
|
||||
$ProxyAddress = $null
|
||||
Say-Verbose("Exception ignored: $_.Exception.Message - moving forward...")
|
||||
}
|
||||
}
|
||||
|
||||
if($ProxyAddress) {
|
||||
$HttpClientHandler = New-Object System.Net.Http.HttpClientHandler
|
||||
$HttpClientHandler.Proxy = New-Object System.Net.WebProxy -Property @{
|
||||
Address=$ProxyAddress;
|
||||
UseDefaultCredentials=$ProxyUseDefaultCredentials;
|
||||
BypassList = $ProxyBypassList;
|
||||
}
|
||||
$HttpClient = New-Object System.Net.Http.HttpClient -ArgumentList $HttpClientHandler
|
||||
}
|
||||
else {
|
||||
|
||||
$HttpClient = New-Object System.Net.Http.HttpClient
|
||||
}
|
||||
# Default timeout for HttpClient is 100s. For a 50 MB download this assumes 500 KB/s average, any less will time out
|
||||
# 20 minutes allows it to work over much slower connections.
|
||||
$HttpClient.Timeout = New-TimeSpan -Minutes 20
|
||||
$Response = $HttpClient.GetAsync("${Uri}${FeedCredential}").Result
|
||||
if (($Response -eq $null) -or (-not ($Response.IsSuccessStatusCode))) {
|
||||
# The feed credential is potentially sensitive info. Do not log FeedCredential to console output.
|
||||
$ErrorMsg = "Failed to download $Uri."
|
||||
if ($Response -ne $null) {
|
||||
$ErrorMsg += " $Response"
|
||||
}
|
||||
|
||||
throw $ErrorMsg
|
||||
}
|
||||
|
||||
return $Response
|
||||
}
|
||||
finally {
|
||||
if ($HttpClient -ne $null) {
|
||||
$HttpClient.Dispose()
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
function Get-Latest-Version-Info([string]$AzureFeed, [string]$Channel, [bool]$Coherent) {
|
||||
Say-Invocation $MyInvocation
|
||||
|
||||
$VersionFileUrl = $null
|
||||
if ($Runtime -eq "dotnet") {
|
||||
$VersionFileUrl = "$UncachedFeed/Runtime/$Channel/latest.version"
|
||||
}
|
||||
elseif ($Runtime -eq "aspnetcore") {
|
||||
$VersionFileUrl = "$UncachedFeed/aspnetcore/Runtime/$Channel/latest.version"
|
||||
}
|
||||
# Currently, the WindowsDesktop runtime is manufactured with the .Net core runtime
|
||||
elseif ($Runtime -eq "windowsdesktop") {
|
||||
$VersionFileUrl = "$UncachedFeed/Runtime/$Channel/latest.version"
|
||||
}
|
||||
elseif (-not $Runtime) {
|
||||
if ($Coherent) {
|
||||
$VersionFileUrl = "$UncachedFeed/Sdk/$Channel/latest.coherent.version"
|
||||
}
|
||||
else {
|
||||
$VersionFileUrl = "$UncachedFeed/Sdk/$Channel/latest.version"
|
||||
}
|
||||
}
|
||||
else {
|
||||
throw "Invalid value for `$Runtime"
|
||||
}
|
||||
try {
|
||||
$Response = GetHTTPResponse -Uri $VersionFileUrl
|
||||
}
|
||||
catch {
|
||||
throw "Could not resolve version information."
|
||||
}
|
||||
$StringContent = $Response.Content.ReadAsStringAsync().Result
|
||||
|
||||
switch ($Response.Content.Headers.ContentType) {
|
||||
{ ($_ -eq "application/octet-stream") } { $VersionText = $StringContent }
|
||||
{ ($_ -eq "text/plain") } { $VersionText = $StringContent }
|
||||
{ ($_ -eq "text/plain; charset=UTF-8") } { $VersionText = $StringContent }
|
||||
default { throw "``$Response.Content.Headers.ContentType`` is an unknown .version file content type." }
|
||||
}
|
||||
|
||||
$VersionInfo = Get-Version-Info-From-Version-Text $VersionText
|
||||
|
||||
return $VersionInfo
|
||||
}
|
||||
|
||||
function Parse-Jsonfile-For-Version([string]$JSonFile) {
|
||||
Say-Invocation $MyInvocation
|
||||
|
||||
If (-Not (Test-Path $JSonFile)) {
|
||||
throw "Unable to find '$JSonFile'"
|
||||
}
|
||||
try {
|
||||
$JSonContent = Get-Content($JSonFile) -Raw | ConvertFrom-Json | Select-Object -expand "sdk" -ErrorAction SilentlyContinue
|
||||
}
|
||||
catch {
|
||||
throw "Json file unreadable: '$JSonFile'"
|
||||
}
|
||||
if ($JSonContent) {
|
||||
try {
|
||||
$JSonContent.PSObject.Properties | ForEach-Object {
|
||||
$PropertyName = $_.Name
|
||||
if ($PropertyName -eq "version") {
|
||||
$Version = $_.Value
|
||||
Say-Verbose "Version = $Version"
|
||||
}
|
||||
}
|
||||
}
|
||||
catch {
|
||||
throw "Unable to parse the SDK node in '$JSonFile'"
|
||||
}
|
||||
}
|
||||
else {
|
||||
throw "Unable to find the SDK node in '$JSonFile'"
|
||||
}
|
||||
If ($Version -eq $null) {
|
||||
throw "Unable to find the SDK:version node in '$JSonFile'"
|
||||
}
|
||||
return $Version
|
||||
}
|
||||
|
||||
function Get-Specific-Version-From-Version([string]$AzureFeed, [string]$Channel, [string]$Version, [string]$JSonFile) {
|
||||
Say-Invocation $MyInvocation
|
||||
|
||||
if (-not $JSonFile) {
|
||||
switch ($Version.ToLower()) {
|
||||
{ $_ -eq "latest" } {
|
||||
$LatestVersionInfo = Get-Latest-Version-Info -AzureFeed $AzureFeed -Channel $Channel -Coherent $False
|
||||
return $LatestVersionInfo.Version
|
||||
}
|
||||
{ $_ -eq "coherent" } {
|
||||
$LatestVersionInfo = Get-Latest-Version-Info -AzureFeed $AzureFeed -Channel $Channel -Coherent $True
|
||||
return $LatestVersionInfo.Version
|
||||
}
|
||||
default { return $Version }
|
||||
}
|
||||
}
|
||||
else {
|
||||
return Parse-Jsonfile-For-Version $JSonFile
|
||||
}
|
||||
}
|
||||
|
||||
function Get-Download-Link([string]$AzureFeed, [string]$SpecificVersion, [string]$CLIArchitecture) {
|
||||
Say-Invocation $MyInvocation
|
||||
|
||||
# If anything fails in this lookup it will default to $SpecificVersion
|
||||
$SpecificProductVersion = Get-Product-Version -AzureFeed $AzureFeed -SpecificVersion $SpecificVersion
|
||||
|
||||
if ($Runtime -eq "dotnet") {
|
||||
$PayloadURL = "$AzureFeed/Runtime/$SpecificVersion/dotnet-runtime-$SpecificProductVersion-win-$CLIArchitecture.zip"
|
||||
}
|
||||
elseif ($Runtime -eq "aspnetcore") {
|
||||
$PayloadURL = "$AzureFeed/aspnetcore/Runtime/$SpecificVersion/aspnetcore-runtime-$SpecificProductVersion-win-$CLIArchitecture.zip"
|
||||
}
|
||||
elseif ($Runtime -eq "windowsdesktop") {
|
||||
$PayloadURL = "$AzureFeed/Runtime/$SpecificVersion/windowsdesktop-runtime-$SpecificProductVersion-win-$CLIArchitecture.zip"
|
||||
}
|
||||
elseif (-not $Runtime) {
|
||||
$PayloadURL = "$AzureFeed/Sdk/$SpecificVersion/dotnet-sdk-$SpecificProductVersion-win-$CLIArchitecture.zip"
|
||||
}
|
||||
else {
|
||||
throw "Invalid value for `$Runtime"
|
||||
}
|
||||
|
||||
Say-Verbose "Constructed primary named payload URL: $PayloadURL"
|
||||
|
||||
return $PayloadURL, $SpecificProductVersion
|
||||
}
|
||||
|
||||
function Get-LegacyDownload-Link([string]$AzureFeed, [string]$SpecificVersion, [string]$CLIArchitecture) {
|
||||
Say-Invocation $MyInvocation
|
||||
|
||||
if (-not $Runtime) {
|
||||
$PayloadURL = "$AzureFeed/Sdk/$SpecificVersion/dotnet-dev-win-$CLIArchitecture.$SpecificVersion.zip"
|
||||
}
|
||||
elseif ($Runtime -eq "dotnet") {
|
||||
$PayloadURL = "$AzureFeed/Runtime/$SpecificVersion/dotnet-win-$CLIArchitecture.$SpecificVersion.zip"
|
||||
}
|
||||
else {
|
||||
return $null
|
||||
}
|
||||
|
||||
Say-Verbose "Constructed legacy named payload URL: $PayloadURL"
|
||||
|
||||
return $PayloadURL
|
||||
}
|
||||
|
||||
function Get-Product-Version([string]$AzureFeed, [string]$SpecificVersion) {
|
||||
Say-Invocation $MyInvocation
|
||||
|
||||
if ($Runtime -eq "dotnet") {
|
||||
$ProductVersionTxtURL = "$AzureFeed/Runtime/$SpecificVersion/productVersion.txt"
|
||||
}
|
||||
elseif ($Runtime -eq "aspnetcore") {
|
||||
$ProductVersionTxtURL = "$AzureFeed/aspnetcore/Runtime/$SpecificVersion/productVersion.txt"
|
||||
}
|
||||
elseif ($Runtime -eq "windowsdesktop") {
|
||||
$ProductVersionTxtURL = "$AzureFeed/Runtime/$SpecificVersion/productVersion.txt"
|
||||
}
|
||||
elseif (-not $Runtime) {
|
||||
$ProductVersionTxtURL = "$AzureFeed/Sdk/$SpecificVersion/productVersion.txt"
|
||||
}
|
||||
else {
|
||||
throw "Invalid value specified for `$Runtime"
|
||||
}
|
||||
|
||||
Say-Verbose "Checking for existence of $ProductVersionTxtURL"
|
||||
|
||||
try {
|
||||
$productVersionResponse = GetHTTPResponse($productVersionTxtUrl)
|
||||
|
||||
if ($productVersionResponse.StatusCode -eq 200) {
|
||||
$productVersion = $productVersionResponse.Content.ReadAsStringAsync().Result.Trim()
|
||||
if ($productVersion -ne $SpecificVersion)
|
||||
{
|
||||
Say "Using alternate version $productVersion found in $ProductVersionTxtURL"
|
||||
}
|
||||
|
||||
return $productVersion
|
||||
}
|
||||
else {
|
||||
Say-Verbose "Got StatusCode $($productVersionResponse.StatusCode) trying to get productVersion.txt at $productVersionTxtUrl, so using default value of $SpecificVersion"
|
||||
$productVersion = $SpecificVersion
|
||||
}
|
||||
} catch {
|
||||
Say-Verbose "Could not read productVersion.txt at $productVersionTxtUrl, so using default value of $SpecificVersion"
|
||||
$productVersion = $SpecificVersion
|
||||
}
|
||||
|
||||
return $productVersion
|
||||
}
|
||||
|
||||
function Get-User-Share-Path() {
|
||||
Say-Invocation $MyInvocation
|
||||
|
||||
$InstallRoot = $env:DOTNET_INSTALL_DIR
|
||||
if (!$InstallRoot) {
|
||||
$InstallRoot = "$env:LocalAppData\Microsoft\dotnet"
|
||||
}
|
||||
return $InstallRoot
|
||||
}
|
||||
|
||||
function Resolve-Installation-Path([string]$InstallDir) {
|
||||
Say-Invocation $MyInvocation
|
||||
|
||||
if ($InstallDir -eq "<auto>") {
|
||||
return Get-User-Share-Path
|
||||
}
|
||||
return $InstallDir
|
||||
}
|
||||
|
||||
function Is-Dotnet-Package-Installed([string]$InstallRoot, [string]$RelativePathToPackage, [string]$SpecificVersion) {
|
||||
Say-Invocation $MyInvocation
|
||||
|
||||
$DotnetPackagePath = Join-Path -Path $InstallRoot -ChildPath $RelativePathToPackage | Join-Path -ChildPath $SpecificVersion
|
||||
Say-Verbose "Is-Dotnet-Package-Installed: DotnetPackagePath=$DotnetPackagePath"
|
||||
return Test-Path $DotnetPackagePath -PathType Container
|
||||
}
|
||||
|
||||
function Get-Absolute-Path([string]$RelativeOrAbsolutePath) {
|
||||
# Too much spam
|
||||
# Say-Invocation $MyInvocation
|
||||
|
||||
return $ExecutionContext.SessionState.Path.GetUnresolvedProviderPathFromPSPath($RelativeOrAbsolutePath)
|
||||
}
|
||||
|
||||
function Get-Path-Prefix-With-Version($path) {
|
||||
$match = [regex]::match($path, $VersionRegEx)
|
||||
if ($match.Success) {
|
||||
return $entry.FullName.Substring(0, $match.Index + $match.Length)
|
||||
}
|
||||
|
||||
return $null
|
||||
}
|
||||
|
||||
function Get-List-Of-Directories-And-Versions-To-Unpack-From-Dotnet-Package([System.IO.Compression.ZipArchive]$Zip, [string]$OutPath) {
|
||||
Say-Invocation $MyInvocation
|
||||
|
||||
$ret = @()
|
||||
foreach ($entry in $Zip.Entries) {
|
||||
$dir = Get-Path-Prefix-With-Version $entry.FullName
|
||||
if ($dir -ne $null) {
|
||||
$path = Get-Absolute-Path $(Join-Path -Path $OutPath -ChildPath $dir)
|
||||
if (-Not (Test-Path $path -PathType Container)) {
|
||||
$ret += $dir
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
$ret = $ret | Sort-Object | Get-Unique
|
||||
|
||||
$values = ($ret | foreach { "$_" }) -join ";"
|
||||
Say-Verbose "Directories to unpack: $values"
|
||||
|
||||
return $ret
|
||||
}
|
||||
|
||||
# Example zip content and extraction algorithm:
|
||||
# Rule: files if extracted are always being extracted to the same relative path locally
|
||||
# .\
|
||||
# a.exe # file does not exist locally, extract
|
||||
# b.dll # file exists locally, override only if $OverrideFiles set
|
||||
# aaa\ # same rules as for files
|
||||
# ...
|
||||
# abc\1.0.0\ # directory contains version and exists locally
|
||||
# ... # do not extract content under versioned part
|
||||
# abc\asd\ # same rules as for files
|
||||
# ...
|
||||
# def\ghi\1.0.1\ # directory contains version and does not exist locally
|
||||
# ... # extract content
|
||||
function Extract-Dotnet-Package([string]$ZipPath, [string]$OutPath) {
|
||||
Say-Invocation $MyInvocation
|
||||
|
||||
Load-Assembly -Assembly System.IO.Compression.FileSystem
|
||||
Set-Variable -Name Zip
|
||||
try {
|
||||
$Zip = [System.IO.Compression.ZipFile]::OpenRead($ZipPath)
|
||||
|
||||
$DirectoriesToUnpack = Get-List-Of-Directories-And-Versions-To-Unpack-From-Dotnet-Package -Zip $Zip -OutPath $OutPath
|
||||
|
||||
foreach ($entry in $Zip.Entries) {
|
||||
$PathWithVersion = Get-Path-Prefix-With-Version $entry.FullName
|
||||
if (($PathWithVersion -eq $null) -Or ($DirectoriesToUnpack -contains $PathWithVersion)) {
|
||||
$DestinationPath = Get-Absolute-Path $(Join-Path -Path $OutPath -ChildPath $entry.FullName)
|
||||
$DestinationDir = Split-Path -Parent $DestinationPath
|
||||
$OverrideFiles=$OverrideNonVersionedFiles -Or (-Not (Test-Path $DestinationPath))
|
||||
if ((-Not $DestinationPath.EndsWith("\")) -And $OverrideFiles) {
|
||||
New-Item -ItemType Directory -Force -Path $DestinationDir | Out-Null
|
||||
[System.IO.Compression.ZipFileExtensions]::ExtractToFile($entry, $DestinationPath, $OverrideNonVersionedFiles)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
finally {
|
||||
if ($Zip -ne $null) {
|
||||
$Zip.Dispose()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function DownloadFile($Source, [string]$OutPath) {
|
||||
if ($Source -notlike "http*") {
|
||||
# Using System.IO.Path.GetFullPath to get the current directory
|
||||
# does not work in this context - $pwd gives the current directory
|
||||
if (![System.IO.Path]::IsPathRooted($Source)) {
|
||||
$Source = $(Join-Path -Path $pwd -ChildPath $Source)
|
||||
}
|
||||
$Source = Get-Absolute-Path $Source
|
||||
Say "Copying file from $Source to $OutPath"
|
||||
Copy-Item $Source $OutPath
|
||||
return
|
||||
}
|
||||
|
||||
$Stream = $null
|
||||
|
||||
try {
|
||||
$Response = GetHTTPResponse -Uri $Source
|
||||
$Stream = $Response.Content.ReadAsStreamAsync().Result
|
||||
$File = [System.IO.File]::Create($OutPath)
|
||||
$Stream.CopyTo($File)
|
||||
$File.Close()
|
||||
}
|
||||
finally {
|
||||
if ($Stream -ne $null) {
|
||||
$Stream.Dispose()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function Prepend-Sdk-InstallRoot-To-Path([string]$InstallRoot, [string]$BinFolderRelativePath) {
|
||||
$BinPath = Get-Absolute-Path $(Join-Path -Path $InstallRoot -ChildPath $BinFolderRelativePath)
|
||||
if (-Not $NoPath) {
|
||||
$SuffixedBinPath = "$BinPath;"
|
||||
if (-Not $env:path.Contains($SuffixedBinPath)) {
|
||||
Say "Adding to current process PATH: `"$BinPath`". Note: This change will not be visible if PowerShell was run as a child process."
|
||||
$env:path = $SuffixedBinPath + $env:path
|
||||
} else {
|
||||
Say-Verbose "Current process PATH already contains `"$BinPath`""
|
||||
}
|
||||
}
|
||||
else {
|
||||
Say "Binaries of dotnet can be found in $BinPath"
|
||||
}
|
||||
}
|
||||
|
||||
$CLIArchitecture = Get-CLIArchitecture-From-Architecture $Architecture
|
||||
$SpecificVersion = Get-Specific-Version-From-Version -AzureFeed $AzureFeed -Channel $Channel -Version $Version -JSonFile $JSonFile
|
||||
$DownloadLink, $EffectiveVersion = Get-Download-Link -AzureFeed $AzureFeed -SpecificVersion $SpecificVersion -CLIArchitecture $CLIArchitecture
|
||||
$LegacyDownloadLink = Get-LegacyDownload-Link -AzureFeed $AzureFeed -SpecificVersion $SpecificVersion -CLIArchitecture $CLIArchitecture
|
||||
|
||||
$InstallRoot = Resolve-Installation-Path $InstallDir
|
||||
Say-Verbose "InstallRoot: $InstallRoot"
|
||||
$ScriptName = $MyInvocation.MyCommand.Name
|
||||
|
||||
if ($DryRun) {
|
||||
Say "Payload URLs:"
|
||||
Say "Primary named payload URL: $DownloadLink"
|
||||
if ($LegacyDownloadLink) {
|
||||
Say "Legacy named payload URL: $LegacyDownloadLink"
|
||||
}
|
||||
$RepeatableCommand = ".\$ScriptName -Version `"$SpecificVersion`" -InstallDir `"$InstallRoot`" -Architecture `"$CLIArchitecture`""
|
||||
if ($Runtime -eq "dotnet") {
|
||||
$RepeatableCommand+=" -Runtime `"dotnet`""
|
||||
}
|
||||
elseif ($Runtime -eq "aspnetcore") {
|
||||
$RepeatableCommand+=" -Runtime `"aspnetcore`""
|
||||
}
|
||||
foreach ($key in $MyInvocation.BoundParameters.Keys) {
|
||||
if (-not (@("Architecture","Channel","DryRun","InstallDir","Runtime","SharedRuntime","Version") -contains $key)) {
|
||||
$RepeatableCommand+=" -$key `"$($MyInvocation.BoundParameters[$key])`""
|
||||
}
|
||||
}
|
||||
Say "Repeatable invocation: $RepeatableCommand"
|
||||
exit 0
|
||||
}
|
||||
|
||||
if ($Runtime -eq "dotnet") {
|
||||
$assetName = ".NET Core Runtime"
|
||||
$dotnetPackageRelativePath = "shared\Microsoft.NETCore.App"
|
||||
}
|
||||
elseif ($Runtime -eq "aspnetcore") {
|
||||
$assetName = "ASP.NET Core Runtime"
|
||||
$dotnetPackageRelativePath = "shared\Microsoft.AspNetCore.App"
|
||||
}
|
||||
elseif ($Runtime -eq "windowsdesktop") {
|
||||
$assetName = ".NET Core Windows Desktop Runtime"
|
||||
$dotnetPackageRelativePath = "shared\Microsoft.WindowsDesktop.App"
|
||||
}
|
||||
elseif (-not $Runtime) {
|
||||
$assetName = ".NET Core SDK"
|
||||
$dotnetPackageRelativePath = "sdk"
|
||||
}
|
||||
else {
|
||||
throw "Invalid value for `$Runtime"
|
||||
}
|
||||
|
||||
if ($SpecificVersion -ne $EffectiveVersion)
|
||||
{
|
||||
Say "Performing installation checks for effective version: $EffectiveVersion"
|
||||
$SpecificVersion = $EffectiveVersion
|
||||
}
|
||||
|
||||
# Check if the SDK version is already installed.
|
||||
$isAssetInstalled = Is-Dotnet-Package-Installed -InstallRoot $InstallRoot -RelativePathToPackage $dotnetPackageRelativePath -SpecificVersion $SpecificVersion
|
||||
if ($isAssetInstalled) {
|
||||
Say "$assetName version $SpecificVersion is already installed."
|
||||
Prepend-Sdk-InstallRoot-To-Path -InstallRoot $InstallRoot -BinFolderRelativePath $BinFolderRelativePath
|
||||
exit 0
|
||||
}
|
||||
|
||||
New-Item -ItemType Directory -Force -Path $InstallRoot | Out-Null
|
||||
|
||||
$installDrive = $((Get-Item $InstallRoot).PSDrive.Name);
|
||||
$diskInfo = Get-PSDrive -Name $installDrive
|
||||
if ($diskInfo.Free / 1MB -le 100) {
|
||||
Say "There is not enough disk space on drive ${installDrive}:"
|
||||
exit 0
|
||||
}
|
||||
|
||||
$ZipPath = [System.IO.Path]::combine([System.IO.Path]::GetTempPath(), [System.IO.Path]::GetRandomFileName())
|
||||
Say-Verbose "Zip path: $ZipPath"
|
||||
|
||||
$DownloadFailed = $false
|
||||
Say "Downloading link: $DownloadLink"
|
||||
try {
|
||||
DownloadFile -Source $DownloadLink -OutPath $ZipPath
|
||||
}
|
||||
catch {
|
||||
Say "Cannot download: $DownloadLink"
|
||||
if ($LegacyDownloadLink) {
|
||||
$DownloadLink = $LegacyDownloadLink
|
||||
$ZipPath = [System.IO.Path]::combine([System.IO.Path]::GetTempPath(), [System.IO.Path]::GetRandomFileName())
|
||||
Say-Verbose "Legacy zip path: $ZipPath"
|
||||
Say "Downloading legacy link: $DownloadLink"
|
||||
try {
|
||||
DownloadFile -Source $DownloadLink -OutPath $ZipPath
|
||||
}
|
||||
catch {
|
||||
Say "Cannot download: $DownloadLink"
|
||||
$DownloadFailed = $true
|
||||
}
|
||||
}
|
||||
else {
|
||||
$DownloadFailed = $true
|
||||
}
|
||||
}
|
||||
|
||||
if ($DownloadFailed) {
|
||||
throw "Could not find/download: `"$assetName`" with version = $SpecificVersion`nRefer to: https://aka.ms/dotnet-os-lifecycle for information on .NET Core support"
|
||||
}
|
||||
|
||||
Say "Extracting zip from $DownloadLink"
|
||||
Extract-Dotnet-Package -ZipPath $ZipPath -OutPath $InstallRoot
|
||||
|
||||
# Check if the SDK version is installed; if not, fail the installation.
|
||||
$isAssetInstalled = $false
|
||||
|
||||
# if the version contains "RTM" or "servicing"; check if a 'release-type' SDK version is installed.
|
||||
if ($SpecificVersion -Match "rtm" -or $SpecificVersion -Match "servicing") {
|
||||
$ReleaseVersion = $SpecificVersion.Split("-")[0]
|
||||
Say-Verbose "Checking installation: version = $ReleaseVersion"
|
||||
$isAssetInstalled = Is-Dotnet-Package-Installed -InstallRoot $InstallRoot -RelativePathToPackage $dotnetPackageRelativePath -SpecificVersion $ReleaseVersion
|
||||
}
|
||||
|
||||
# Check if the SDK version is installed.
|
||||
if (!$isAssetInstalled) {
|
||||
Say-Verbose "Checking installation: version = $SpecificVersion"
|
||||
$isAssetInstalled = Is-Dotnet-Package-Installed -InstallRoot $InstallRoot -RelativePathToPackage $dotnetPackageRelativePath -SpecificVersion $SpecificVersion
|
||||
}
|
||||
|
||||
if (!$isAssetInstalled) {
|
||||
throw "`"$assetName`" with version = $SpecificVersion failed to install with an unknown error."
|
||||
}
|
||||
|
||||
Remove-Item $ZipPath
|
||||
|
||||
Prepend-Sdk-InstallRoot-To-Path -InstallRoot $InstallRoot -BinFolderRelativePath $BinFolderRelativePath
|
||||
|
||||
Say "Installation finished"
|
||||
exit 0
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -19,7 +19,7 @@ runtime='dotnet'
|
|||
runtimeSourceFeed=''
|
||||
runtimeSourceFeedKey=''
|
||||
while [[ $# > 0 ]]; do
|
||||
opt="$(echo "$1" | awk '{print tolower($0)}')"
|
||||
opt="$(echo "$1" | tr "[:upper:]" "[:lower:]")"
|
||||
case "$opt" in
|
||||
-version|-v)
|
||||
shift
|
||||
|
@ -49,24 +49,22 @@ while [[ $# > 0 ]]; do
|
|||
shift
|
||||
done
|
||||
|
||||
# Use uname to determine what the CPU is.
|
||||
cpuname=$(uname -p)
|
||||
# Some Linux platforms report unknown for platform, but the arch for machine.
|
||||
if [[ "$cpuname" == "unknown" ]]; then
|
||||
cpuname=$(uname -m)
|
||||
fi
|
||||
|
||||
# Use uname to determine what the CPU is, see https://en.wikipedia.org/wiki/Uname#Examples
|
||||
cpuname=$(uname -m)
|
||||
case $cpuname in
|
||||
aarch64)
|
||||
arm64|aarch64)
|
||||
buildarch=arm64
|
||||
;;
|
||||
loongarch64)
|
||||
buildarch=loongarch64
|
||||
;;
|
||||
amd64|x86_64)
|
||||
buildarch=x64
|
||||
;;
|
||||
armv*l)
|
||||
buildarch=arm
|
||||
;;
|
||||
i686)
|
||||
i[3-6]86)
|
||||
buildarch=x86
|
||||
;;
|
||||
*)
|
||||
|
@ -75,7 +73,7 @@ case $cpuname in
|
|||
;;
|
||||
esac
|
||||
|
||||
dotnetRoot="$repo_root/.dotnet"
|
||||
dotnetRoot="${repo_root}.dotnet"
|
||||
if [[ $architecture != "" ]] && [[ $architecture != $buildarch ]]; then
|
||||
dotnetRoot="$dotnetRoot/$architecture"
|
||||
fi
|
||||
|
|
|
@ -1,86 +0,0 @@
|
|||
Param(
|
||||
[Parameter(Mandatory=$true)][string] $barToken, # Token generated at https://maestro-prod.westus2.cloudapp.azure.com/Account/Tokens
|
||||
[Parameter(Mandatory=$true)][string] $gitHubPat, # GitHub personal access token from https://github.com/settings/tokens (no auth scopes needed)
|
||||
[Parameter(Mandatory=$true)][string] $azdoPat, # Azure Dev Ops tokens from https://dev.azure.com/dnceng/_details/security/tokens (code read scope needed)
|
||||
[Parameter(Mandatory=$true)][string] $outputFolder, # Where the graphviz.txt file will be created
|
||||
[string] $darcVersion, # darc's version
|
||||
[string] $graphvizVersion = '2.38', # GraphViz version
|
||||
[switch] $includeToolset # Whether the graph should include toolset dependencies or not. i.e. arcade, optimization. For more about
|
||||
# toolset dependencies see https://github.com/dotnet/arcade/blob/master/Documentation/Darc.md#toolset-vs-product-dependencies
|
||||
)
|
||||
|
||||
function CheckExitCode ([string]$stage)
|
||||
{
|
||||
$exitCode = $LASTEXITCODE
|
||||
if ($exitCode -ne 0) {
|
||||
Write-PipelineTelemetryError -Category 'Arcade' -Message "Something failed in stage: '$stage'. Check for errors above. Exiting now..."
|
||||
ExitWithExitCode $exitCode
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
$ErrorActionPreference = 'Stop'
|
||||
. $PSScriptRoot\tools.ps1
|
||||
|
||||
Import-Module -Name (Join-Path $PSScriptRoot 'native\CommonLibrary.psm1')
|
||||
|
||||
Push-Location $PSScriptRoot
|
||||
|
||||
Write-Host 'Installing darc...'
|
||||
. .\darc-init.ps1 -darcVersion $darcVersion
|
||||
CheckExitCode 'Running darc-init'
|
||||
|
||||
$engCommonBaseDir = Join-Path $PSScriptRoot 'native\'
|
||||
$graphvizInstallDir = CommonLibrary\Get-NativeInstallDirectory
|
||||
$nativeToolBaseUri = 'https://netcorenativeassets.blob.core.windows.net/resource-packages/external'
|
||||
$installBin = Join-Path $graphvizInstallDir 'bin'
|
||||
|
||||
Write-Host 'Installing dot...'
|
||||
.\native\install-tool.ps1 -ToolName graphviz -InstallPath $installBin -BaseUri $nativeToolBaseUri -CommonLibraryDirectory $engCommonBaseDir -Version $graphvizVersion -Verbose
|
||||
|
||||
$darcExe = "$env:USERPROFILE\.dotnet\tools"
|
||||
$darcExe = Resolve-Path "$darcExe\darc.exe"
|
||||
|
||||
Create-Directory $outputFolder
|
||||
|
||||
# Generate 3 graph descriptions:
|
||||
# 1. Flat with coherency information
|
||||
# 2. Graphviz (dot) file
|
||||
# 3. Standard dependency graph
|
||||
$graphVizFilePath = "$outputFolder\graphviz.txt"
|
||||
$graphVizImageFilePath = "$outputFolder\graph.png"
|
||||
$normalGraphFilePath = "$outputFolder\graph-full.txt"
|
||||
$flatGraphFilePath = "$outputFolder\graph-flat.txt"
|
||||
$baseOptions = @( '--github-pat', "$gitHubPat", '--azdev-pat', "$azdoPat", '--password', "$barToken" )
|
||||
|
||||
if ($includeToolset) {
|
||||
Write-Host 'Toolsets will be included in the graph...'
|
||||
$baseOptions += @( '--include-toolset' )
|
||||
}
|
||||
|
||||
Write-Host 'Generating standard dependency graph...'
|
||||
& "$darcExe" get-dependency-graph @baseOptions --output-file $normalGraphFilePath
|
||||
CheckExitCode 'Generating normal dependency graph'
|
||||
|
||||
Write-Host 'Generating flat dependency graph and graphviz file...'
|
||||
& "$darcExe" get-dependency-graph @baseOptions --flat --coherency --graphviz $graphVizFilePath --output-file $flatGraphFilePath
|
||||
CheckExitCode 'Generating flat and graphviz dependency graph'
|
||||
|
||||
Write-Host "Generating graph image $graphVizFilePath"
|
||||
$dotFilePath = Join-Path $installBin "graphviz\$graphvizVersion\release\bin\dot.exe"
|
||||
& "$dotFilePath" -Tpng -o"$graphVizImageFilePath" "$graphVizFilePath"
|
||||
CheckExitCode 'Generating graphviz image'
|
||||
|
||||
Write-Host "'$graphVizFilePath', '$flatGraphFilePath', '$normalGraphFilePath' and '$graphVizImageFilePath' created!"
|
||||
}
|
||||
catch {
|
||||
if (!$includeToolset) {
|
||||
Write-Host 'This might be a toolset repo which includes only toolset dependencies. ' -NoNewline -ForegroundColor Yellow
|
||||
Write-Host 'Since -includeToolset is not set there is no graph to create. Include -includeToolset and try again...' -ForegroundColor Yellow
|
||||
}
|
||||
Write-Host $_.ScriptStackTrace
|
||||
Write-PipelineTelemetryError -Category 'Arcade' -Message $_
|
||||
ExitWithExitCode 1
|
||||
} finally {
|
||||
Pop-Location
|
||||
}
|
|
@ -0,0 +1,115 @@
|
|||
Param(
|
||||
[Parameter(Mandatory=$true)][string] $SourcesDirectory, # Directory where source files live; if using a Localize directory it should live in here
|
||||
[string] $LanguageSet = 'VS_Main_Languages', # Language set to be used in the LocProject.json
|
||||
[switch] $UseCheckedInLocProjectJson, # When set, generates a LocProject.json and compares it to one that already exists in the repo; otherwise just generates one
|
||||
[switch] $CreateNeutralXlfs # Creates neutral xlf files. Only set to false when running locally
|
||||
)
|
||||
|
||||
# Generates LocProject.json files for the OneLocBuild task. OneLocBuildTask is described here:
|
||||
# https://ceapex.visualstudio.com/CEINTL/_wiki/wikis/CEINTL.wiki/107/Localization-with-OneLocBuild-Task
|
||||
|
||||
Set-StrictMode -Version 2.0
|
||||
$ErrorActionPreference = "Stop"
|
||||
. $PSScriptRoot\pipeline-logging-functions.ps1
|
||||
|
||||
$exclusionsFilePath = "$SourcesDirectory\eng\Localize\LocExclusions.json"
|
||||
$exclusions = @{ Exclusions = @() }
|
||||
if (Test-Path -Path $exclusionsFilePath)
|
||||
{
|
||||
$exclusions = Get-Content "$exclusionsFilePath" | ConvertFrom-Json
|
||||
}
|
||||
|
||||
Push-Location "$SourcesDirectory" # push location for Resolve-Path -Relative to work
|
||||
|
||||
# Template files
|
||||
$jsonFiles = @()
|
||||
$jsonTemplateFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory" | Where-Object { $_.FullName -Match "\.template\.config\\localize\\.+\.en\.json" } # .NET templating pattern
|
||||
$jsonTemplateFiles | ForEach-Object {
|
||||
$null = $_.Name -Match "(.+)\.[\w-]+\.json" # matches '[filename].[langcode].json
|
||||
|
||||
$destinationFile = "$($_.Directory.FullName)\$($Matches.1).json"
|
||||
$jsonFiles += Copy-Item "$($_.FullName)" -Destination $destinationFile -PassThru
|
||||
}
|
||||
|
||||
$jsonWinformsTemplateFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory" | Where-Object { $_.FullName -Match "en\\strings\.json" } # current winforms pattern
|
||||
|
||||
$xlfFiles = @()
|
||||
|
||||
$allXlfFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory\*\*.xlf"
|
||||
$langXlfFiles = @()
|
||||
if ($allXlfFiles) {
|
||||
$null = $allXlfFiles[0].FullName -Match "\.([\w-]+)\.xlf" # matches '[langcode].xlf'
|
||||
$firstLangCode = $Matches.1
|
||||
$langXlfFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory\*\*.$firstLangCode.xlf"
|
||||
}
|
||||
$langXlfFiles | ForEach-Object {
|
||||
$null = $_.Name -Match "(.+)\.[\w-]+\.xlf" # matches '[filename].[langcode].xlf
|
||||
|
||||
$destinationFile = "$($_.Directory.FullName)\$($Matches.1).xlf"
|
||||
$xlfFiles += Copy-Item "$($_.FullName)" -Destination $destinationFile -PassThru
|
||||
}
|
||||
|
||||
$locFiles = $jsonFiles + $jsonWinformsTemplateFiles + $xlfFiles
|
||||
|
||||
$locJson = @{
|
||||
Projects = @(
|
||||
@{
|
||||
LanguageSet = $LanguageSet
|
||||
LocItems = @(
|
||||
$locFiles | ForEach-Object {
|
||||
$outputPath = "$(($_.DirectoryName | Resolve-Path -Relative) + "\")"
|
||||
$continue = $true
|
||||
foreach ($exclusion in $exclusions.Exclusions) {
|
||||
if ($outputPath.Contains($exclusion))
|
||||
{
|
||||
$continue = $false
|
||||
}
|
||||
}
|
||||
$sourceFile = ($_.FullName | Resolve-Path -Relative)
|
||||
if (!$CreateNeutralXlfs -and $_.Extension -eq '.xlf') {
|
||||
Remove-Item -Path $sourceFile
|
||||
}
|
||||
if ($continue)
|
||||
{
|
||||
if ($_.Directory.Name -eq 'en' -and $_.Extension -eq '.json') {
|
||||
return @{
|
||||
SourceFile = $sourceFile
|
||||
CopyOption = "LangIDOnPath"
|
||||
OutputPath = "$($_.Directory.Parent.FullName | Resolve-Path -Relative)\"
|
||||
}
|
||||
}
|
||||
else {
|
||||
return @{
|
||||
SourceFile = $sourceFile
|
||||
CopyOption = "LangIDOnName"
|
||||
OutputPath = $outputPath
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
$json = ConvertTo-Json $locJson -Depth 5
|
||||
Write-Host "LocProject.json generated:`n`n$json`n`n"
|
||||
Pop-Location
|
||||
|
||||
if (!$UseCheckedInLocProjectJson) {
|
||||
New-Item "$SourcesDirectory\eng\Localize\LocProject.json" -Force # Need this to make sure the Localize directory is created
|
||||
Set-Content "$SourcesDirectory\eng\Localize\LocProject.json" $json
|
||||
}
|
||||
else {
|
||||
New-Item "$SourcesDirectory\eng\Localize\LocProject-generated.json" -Force # Need this to make sure the Localize directory is created
|
||||
Set-Content "$SourcesDirectory\eng\Localize\LocProject-generated.json" $json
|
||||
|
||||
if ((Get-FileHash "$SourcesDirectory\eng\Localize\LocProject-generated.json").Hash -ne (Get-FileHash "$SourcesDirectory\eng\Localize\LocProject.json").Hash) {
|
||||
Write-PipelineTelemetryError -Category "OneLocBuild" -Message "Existing LocProject.json differs from generated LocProject.json. Download LocProject-generated.json and compare them."
|
||||
|
||||
exit 1
|
||||
}
|
||||
else {
|
||||
Write-Host "Generated LocProject.json and current LocProject.json are identical."
|
||||
}
|
||||
}
|
|
@ -0,0 +1,21 @@
|
|||
Param(
|
||||
[Parameter(Mandatory=$true)][string] $ManifestDirPath # Manifest directory where sbom will be placed
|
||||
)
|
||||
|
||||
. $PSScriptRoot\pipeline-logging-functions.ps1
|
||||
|
||||
Write-Host "Creating dir $ManifestDirPath"
|
||||
# create directory for sbom manifest to be placed
|
||||
if (!(Test-Path -path $ManifestDirPath))
|
||||
{
|
||||
New-Item -ItemType Directory -path $ManifestDirPath
|
||||
Write-Host "Successfully created directory $ManifestDirPath"
|
||||
}
|
||||
else{
|
||||
Write-PipelineTelemetryError -category 'Build' "Unable to create sbom folder."
|
||||
}
|
||||
|
||||
Write-Host "Updating artifact name"
|
||||
$artifact_name = "${env:SYSTEM_STAGENAME}_${env:AGENT_JOBNAME}_SBOM" -replace '["/:<>\\|?@*"() ]', '_'
|
||||
Write-Host "Artifact name $artifact_name"
|
||||
Write-Host "##vso[task.setvariable variable=ARTIFACT_NAME]$artifact_name"
|
|
@ -0,0 +1,34 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source="${BASH_SOURCE[0]}"
|
||||
|
||||
# resolve $SOURCE until the file is no longer a symlink
|
||||
while [[ -h $source ]]; do
|
||||
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
|
||||
source="$(readlink "$source")"
|
||||
|
||||
# if $source was a relative symlink, we need to resolve it relative to the path where the
|
||||
# symlink file was located
|
||||
[[ $source != /* ]] && source="$scriptroot/$source"
|
||||
done
|
||||
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
|
||||
. $scriptroot/pipeline-logging-functions.sh
|
||||
|
||||
manifest_dir=$1
|
||||
|
||||
if [ ! -d "$manifest_dir" ] ; then
|
||||
mkdir -p "$manifest_dir"
|
||||
echo "Sbom directory created." $manifest_dir
|
||||
else
|
||||
Write-PipelineTelemetryError -category 'Build' "Unable to create sbom folder."
|
||||
fi
|
||||
|
||||
artifact_name=$SYSTEM_STAGENAME"_"$AGENT_JOBNAME"_SBOM"
|
||||
echo "Artifact name before : "$artifact_name
|
||||
# replace all special characters with _, some builds use special characters like : in Agent.Jobname, that is not a permissible name while uploading artifacts.
|
||||
safe_artifact_name="${artifact_name//["/:<>\\|?@*$" ]/_}"
|
||||
echo "Artifact name after : "$safe_artifact_name
|
||||
export ARTIFACT_NAME=$safe_artifact_name
|
||||
echo "##vso[task.setvariable variable=ARTIFACT_NAME]$safe_artifact_name"
|
||||
|
||||
exit 0
|
|
@ -31,6 +31,10 @@ Wait time between retry attempts in seconds
|
|||
.PARAMETER GlobalJsonFile
|
||||
File path to global.json file
|
||||
|
||||
.PARAMETER PathPromotion
|
||||
Optional switch to enable either promote native tools specified in the global.json to the path (in Azure Pipelines)
|
||||
or break the build if a native tool is not found on the path (on a local dev machine)
|
||||
|
||||
.NOTES
|
||||
#>
|
||||
[CmdletBinding(PositionalBinding=$false)]
|
||||
|
@ -41,7 +45,8 @@ Param (
|
|||
[switch] $Force = $False,
|
||||
[int] $DownloadRetries = 5,
|
||||
[int] $RetryWaitTimeInSeconds = 30,
|
||||
[string] $GlobalJsonFile
|
||||
[string] $GlobalJsonFile,
|
||||
[switch] $PathPromotion
|
||||
)
|
||||
|
||||
if (!$GlobalJsonFile) {
|
||||
|
@ -77,53 +82,100 @@ try {
|
|||
ConvertFrom-Json |
|
||||
Select-Object -Expand 'native-tools' -ErrorAction SilentlyContinue
|
||||
if ($NativeTools) {
|
||||
$NativeTools.PSObject.Properties | ForEach-Object {
|
||||
$ToolName = $_.Name
|
||||
$ToolVersion = $_.Value
|
||||
$LocalInstallerArguments = @{ ToolName = "$ToolName" }
|
||||
$LocalInstallerArguments += @{ InstallPath = "$InstallBin" }
|
||||
$LocalInstallerArguments += @{ BaseUri = "$BaseUri" }
|
||||
$LocalInstallerArguments += @{ CommonLibraryDirectory = "$EngCommonBaseDir" }
|
||||
$LocalInstallerArguments += @{ Version = "$ToolVersion" }
|
||||
if ($PathPromotion -eq $True) {
|
||||
if ($env:SYSTEM_TEAMPROJECT) { # check to see if we're in an Azure pipelines build
|
||||
$NativeTools.PSObject.Properties | ForEach-Object {
|
||||
$ToolName = $_.Name
|
||||
$ToolVersion = $_.Value
|
||||
$InstalledTools = @{}
|
||||
|
||||
if ($Verbose) {
|
||||
$LocalInstallerArguments += @{ Verbose = $True }
|
||||
if ((Get-Command "$ToolName" -ErrorAction SilentlyContinue) -eq $null) {
|
||||
if ($ToolVersion -eq "latest") {
|
||||
$ToolVersion = ""
|
||||
}
|
||||
$ArcadeToolsDirectory = "C:\arcade-tools"
|
||||
if (-not (Test-Path $ArcadeToolsDirectory)) {
|
||||
Write-Error "Arcade tools directory '$ArcadeToolsDirectory' was not found; artifacts were not properly installed."
|
||||
exit 1
|
||||
}
|
||||
$ToolDirectory = (Get-ChildItem -Path "$ArcadeToolsDirectory" -Filter "$ToolName-$ToolVersion*" | Sort-Object -Descending)[0]
|
||||
if ([string]::IsNullOrWhiteSpace($ToolDirectory)) {
|
||||
Write-Error "Unable to find directory for $ToolName $ToolVersion; please make sure the tool is installed on this image."
|
||||
exit 1
|
||||
}
|
||||
$BinPathFile = "$($ToolDirectory.FullName)\binpath.txt"
|
||||
if (-not (Test-Path -Path "$BinPathFile")) {
|
||||
Write-Error "Unable to find binpath.txt in '$($ToolDirectory.FullName)' ($ToolName $ToolVersion); artifact is either installed incorrectly or is not a bootstrappable tool."
|
||||
exit 1
|
||||
}
|
||||
$BinPath = Get-Content "$BinPathFile"
|
||||
$ToolPath = Convert-Path -Path $BinPath
|
||||
Write-Host "Adding $ToolName to the path ($ToolPath)..."
|
||||
Write-Host "##vso[task.prependpath]$ToolPath"
|
||||
$InstalledTools += @{ $ToolName = $ToolDirectory.FullName }
|
||||
}
|
||||
}
|
||||
return $InstalledTools
|
||||
} else {
|
||||
$NativeTools.PSObject.Properties | ForEach-Object {
|
||||
$ToolName = $_.Name
|
||||
$ToolVersion = $_.Value
|
||||
|
||||
if ((Get-Command "$ToolName" -ErrorAction SilentlyContinue) -eq $null) {
|
||||
Write-PipelineTelemetryError -Category 'NativeToolsBootstrap' -Message "$ToolName not found on path. Please install $ToolName $ToolVersion before proceeding."
|
||||
}
|
||||
}
|
||||
exit 0
|
||||
}
|
||||
if (Get-Variable 'Force' -ErrorAction 'SilentlyContinue') {
|
||||
if($Force) {
|
||||
$LocalInstallerArguments += @{ Force = $True }
|
||||
} else {
|
||||
$NativeTools.PSObject.Properties | ForEach-Object {
|
||||
$ToolName = $_.Name
|
||||
$ToolVersion = $_.Value
|
||||
$LocalInstallerArguments = @{ ToolName = "$ToolName" }
|
||||
$LocalInstallerArguments += @{ InstallPath = "$InstallBin" }
|
||||
$LocalInstallerArguments += @{ BaseUri = "$BaseUri" }
|
||||
$LocalInstallerArguments += @{ CommonLibraryDirectory = "$EngCommonBaseDir" }
|
||||
$LocalInstallerArguments += @{ Version = "$ToolVersion" }
|
||||
|
||||
if ($Verbose) {
|
||||
$LocalInstallerArguments += @{ Verbose = $True }
|
||||
}
|
||||
if (Get-Variable 'Force' -ErrorAction 'SilentlyContinue') {
|
||||
if($Force) {
|
||||
$LocalInstallerArguments += @{ Force = $True }
|
||||
}
|
||||
}
|
||||
if ($Clean) {
|
||||
$LocalInstallerArguments += @{ Clean = $True }
|
||||
}
|
||||
|
||||
Write-Verbose "Installing $ToolName version $ToolVersion"
|
||||
Write-Verbose "Executing '$InstallerPath $($LocalInstallerArguments.Keys.ForEach({"-$_ '$($LocalInstallerArguments.$_)'"}) -join ' ')'"
|
||||
& $InstallerPath @LocalInstallerArguments
|
||||
if ($LASTEXITCODE -Ne "0") {
|
||||
$errMsg = "$ToolName installation failed"
|
||||
if ((Get-Variable 'DoNotAbortNativeToolsInstallationOnFailure' -ErrorAction 'SilentlyContinue') -and $DoNotAbortNativeToolsInstallationOnFailure) {
|
||||
$showNativeToolsWarning = $true
|
||||
if ((Get-Variable 'DoNotDisplayNativeToolsInstallationWarnings' -ErrorAction 'SilentlyContinue') -and $DoNotDisplayNativeToolsInstallationWarnings) {
|
||||
$showNativeToolsWarning = $false
|
||||
}
|
||||
if ($showNativeToolsWarning) {
|
||||
Write-Warning $errMsg
|
||||
}
|
||||
$toolInstallationFailure = $true
|
||||
} else {
|
||||
# We cannot change this to Write-PipelineTelemetryError because of https://github.com/dotnet/arcade/issues/4482
|
||||
Write-Host $errMsg
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
}
|
||||
if ($Clean) {
|
||||
$LocalInstallerArguments += @{ Clean = $True }
|
||||
|
||||
if ((Get-Variable 'toolInstallationFailure' -ErrorAction 'SilentlyContinue') -and $toolInstallationFailure) {
|
||||
# We cannot change this to Write-PipelineTelemetryError because of https://github.com/dotnet/arcade/issues/4482
|
||||
Write-Host 'Native tools bootstrap failed'
|
||||
exit 1
|
||||
}
|
||||
|
||||
Write-Verbose "Installing $ToolName version $ToolVersion"
|
||||
Write-Verbose "Executing '$InstallerPath $($LocalInstallerArguments.Keys.ForEach({"-$_ '$($LocalInstallerArguments.$_)'"}) -join ' ')'"
|
||||
& $InstallerPath @LocalInstallerArguments
|
||||
if ($LASTEXITCODE -Ne "0") {
|
||||
$errMsg = "$ToolName installation failed"
|
||||
if ((Get-Variable 'DoNotAbortNativeToolsInstallationOnFailure' -ErrorAction 'SilentlyContinue') -and $DoNotAbortNativeToolsInstallationOnFailure) {
|
||||
$showNativeToolsWarning = $true
|
||||
if ((Get-Variable 'DoNotDisplayNativeToolsInstallationWarnings' -ErrorAction 'SilentlyContinue') -and $DoNotDisplayNativeToolsInstallationWarnings) {
|
||||
$showNativeToolsWarning = $false
|
||||
}
|
||||
if ($showNativeToolsWarning) {
|
||||
Write-Warning $errMsg
|
||||
}
|
||||
$toolInstallationFailure = $true
|
||||
} else {
|
||||
# We cannot change this to Write-PipelineTelemetryError because of https://github.com/dotnet/arcade/issues/4482
|
||||
Write-Host $errMsg
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if ((Get-Variable 'toolInstallationFailure' -ErrorAction 'SilentlyContinue') -and $toolInstallationFailure) {
|
||||
# We cannot change this to Write-PipelineTelemetryError because of https://github.com/dotnet/arcade/issues/4482
|
||||
Write-Host 'Native tools bootstrap failed'
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
else {
|
||||
|
@ -139,7 +191,7 @@ try {
|
|||
Write-Host "##vso[task.prependpath]$(Convert-Path -Path $InstallBin)"
|
||||
return $InstallBin
|
||||
}
|
||||
else {
|
||||
elseif (-not ($PathPromotion)) {
|
||||
Write-PipelineTelemetryError -Category 'NativeToolsBootstrap' -Message 'Native tools install directory does not exist, installation failed'
|
||||
exit 1
|
||||
}
|
||||
|
|
|
@ -10,13 +10,13 @@ force=false
|
|||
download_retries=5
|
||||
retry_wait_time_seconds=30
|
||||
global_json_file="$(dirname "$(dirname "${scriptroot}")")/global.json"
|
||||
declare -A native_assets
|
||||
declare -a native_assets
|
||||
|
||||
. $scriptroot/pipeline-logging-functions.sh
|
||||
. $scriptroot/native/common-library.sh
|
||||
|
||||
while (($# > 0)); do
|
||||
lowerI="$(echo $1 | awk '{print tolower($0)}')"
|
||||
lowerI="$(echo $1 | tr "[:upper:]" "[:lower:]")"
|
||||
case $lowerI in
|
||||
--baseuri)
|
||||
base_uri=$2
|
||||
|
@ -76,24 +76,89 @@ while (($# > 0)); do
|
|||
done
|
||||
|
||||
function ReadGlobalJsonNativeTools {
|
||||
# Get the native-tools section from the global.json.
|
||||
local native_tools_section=$(cat $global_json_file | awk '/"native-tools"/,/}/')
|
||||
# Only extract the contents of the object.
|
||||
local native_tools_list=$(echo $native_tools_section | awk -F"[{}]" '{print $2}')
|
||||
native_tools_list=${native_tools_list//[\" ]/}
|
||||
native_tools_list=$( echo "$native_tools_list" | sed 's/\s//g' | sed 's/,/\n/g' )
|
||||
# happy path: we have a proper JSON parsing tool `jq(1)` in PATH!
|
||||
if command -v jq &> /dev/null; then
|
||||
|
||||
local old_IFS=$IFS
|
||||
while read -r line; do
|
||||
# Lines are of the form: 'tool:version'
|
||||
IFS=:
|
||||
while read -r key value; do
|
||||
native_assets[$key]=$value
|
||||
done <<< "$line"
|
||||
done <<< "$native_tools_list"
|
||||
IFS=$old_IFS
|
||||
# jq: read each key/value pair under "native-tools" entry and emit:
|
||||
# KEY="<entry-key>" VALUE="<entry-value>"
|
||||
# followed by a null byte.
|
||||
#
|
||||
# bash: read line with null byte delimeter and push to array (for later `eval`uation).
|
||||
|
||||
return 0;
|
||||
while IFS= read -rd '' line; do
|
||||
native_assets+=("$line")
|
||||
done < <(jq -r '. |
|
||||
select(has("native-tools")) |
|
||||
."native-tools" |
|
||||
keys[] as $k |
|
||||
@sh "KEY=\($k) VALUE=\(.[$k])\u0000"' "$global_json_file")
|
||||
|
||||
return
|
||||
fi
|
||||
|
||||
# Warning: falling back to manually parsing JSON, which is not recommended.
|
||||
|
||||
# Following routine matches the output and escaping logic of jq(1)'s @sh formatter used above.
|
||||
# It has been tested with several weird strings with escaped characters in entries (key and value)
|
||||
# and results were compared with the output of jq(1) in binary representation using xxd(1);
|
||||
# just before the assignment to 'native_assets' array (above and below).
|
||||
|
||||
# try to capture the section under "native-tools".
|
||||
if [[ ! "$(cat "$global_json_file")" =~ \"native-tools\"[[:space:]\:\{]*([^\}]+) ]]; then
|
||||
return
|
||||
fi
|
||||
|
||||
section="${BASH_REMATCH[1]}"
|
||||
|
||||
parseStarted=0
|
||||
possibleEnd=0
|
||||
escaping=0
|
||||
escaped=0
|
||||
isKey=1
|
||||
|
||||
for (( i=0; i<${#section}; i++ )); do
|
||||
char="${section:$i:1}"
|
||||
if ! ((parseStarted)) && [[ "$char" =~ [[:space:],:] ]]; then continue; fi
|
||||
|
||||
if ! ((escaping)) && [[ "$char" == "\\" ]]; then
|
||||
escaping=1
|
||||
elif ((escaping)) && ! ((escaped)); then
|
||||
escaped=1
|
||||
fi
|
||||
|
||||
if ! ((parseStarted)) && [[ "$char" == "\"" ]]; then
|
||||
parseStarted=1
|
||||
possibleEnd=0
|
||||
elif [[ "$char" == "'" ]]; then
|
||||
token="$token'\\\''"
|
||||
possibleEnd=0
|
||||
elif ((escaping)) || [[ "$char" != "\"" ]]; then
|
||||
token="$token$char"
|
||||
possibleEnd=1
|
||||
fi
|
||||
|
||||
if ((possibleEnd)) && ! ((escaping)) && [[ "$char" == "\"" ]]; then
|
||||
# Use printf to unescape token to match jq(1)'s @sh formatting rules.
|
||||
# do not use 'token="$(printf "$token")"' syntax, as $() eats the trailing linefeed.
|
||||
printf -v token "'$token'"
|
||||
|
||||
if ((isKey)); then
|
||||
KEY="$token"
|
||||
isKey=0
|
||||
else
|
||||
line="KEY=$KEY VALUE=$token"
|
||||
native_assets+=("$line")
|
||||
isKey=1
|
||||
fi
|
||||
|
||||
# reset for next token
|
||||
parseStarted=0
|
||||
token=
|
||||
elif ((escaping)) && ((escaped)); then
|
||||
escaping=0
|
||||
escaped=0
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
native_base_dir=$install_directory
|
||||
|
@ -111,14 +176,14 @@ if [[ ${#native_assets[@]} -eq 0 ]]; then
|
|||
exit 0;
|
||||
else
|
||||
native_installer_dir="$scriptroot/native"
|
||||
for tool in "${!native_assets[@]}"
|
||||
do
|
||||
tool_version=${native_assets[$tool]}
|
||||
installer_path="$native_installer_dir/install-$tool.sh"
|
||||
for index in "${!native_assets[@]}"; do
|
||||
eval "${native_assets["$index"]}"
|
||||
|
||||
installer_path="$native_installer_dir/install-$KEY.sh"
|
||||
installer_command="$installer_path"
|
||||
installer_command+=" --baseuri $base_uri"
|
||||
installer_command+=" --installpath $install_bin"
|
||||
installer_command+=" --version $tool_version"
|
||||
installer_command+=" --version $VALUE"
|
||||
echo $installer_command
|
||||
|
||||
if [[ $force = true ]]; then
|
||||
|
|
|
@ -45,11 +45,11 @@ function SetupCredProvider {
|
|||
# Then, we set the 'VSS_NUGET_EXTERNAL_FEED_ENDPOINTS' environment variable to restore from the stable
|
||||
# feeds successfully
|
||||
|
||||
$nugetConfigPath = "$RepoRoot\NuGet.config"
|
||||
$nugetConfigPath = Join-Path $RepoRoot "NuGet.config"
|
||||
|
||||
if (-Not (Test-Path -Path $nugetConfigPath)) {
|
||||
Write-PipelineTelemetryError -Category 'Build' -Message 'NuGet.config file not found in repo root!'
|
||||
ExitWithExitCode 1
|
||||
ExitWithExitCode 1
|
||||
}
|
||||
|
||||
$endpoints = New-Object System.Collections.ArrayList
|
||||
|
@ -63,8 +63,6 @@ function SetupCredProvider {
|
|||
}
|
||||
|
||||
if (($endpoints | Measure-Object).Count -gt 0) {
|
||||
# [SuppressMessage("Microsoft.Security", "CS002:SecretInNextLine", Justification="Endpoint code example with no real credentials.")]
|
||||
# Create the JSON object. It should look like '{"endpointCredentials": [{"endpoint":"http://example.index.json", "username":"optional", "password":"accesstoken"}]}'
|
||||
$endpointCredentials = @{endpointCredentials=$endpoints} | ConvertTo-Json -Compress
|
||||
|
||||
# Create the environment variables the AzDo way
|
||||
|
@ -87,7 +85,7 @@ function SetupCredProvider {
|
|||
|
||||
#Workaround for https://github.com/microsoft/msbuild/issues/4430
|
||||
function InstallDotNetSdkAndRestoreArcade {
|
||||
$dotnetTempDir = "$RepoRoot\dotnet"
|
||||
$dotnetTempDir = Join-Path $RepoRoot "dotnet"
|
||||
$dotnetSdkVersion="2.1.507" # After experimentation we know this version works when restoring the SDK (compared to 3.0.*)
|
||||
$dotnet = "$dotnetTempDir\dotnet.exe"
|
||||
$restoreProjPath = "$PSScriptRoot\restore.proj"
|
||||
|
|
|
@ -39,7 +39,7 @@ function SetupCredProvider {
|
|||
# Then, we set the 'VSS_NUGET_EXTERNAL_FEED_ENDPOINTS' environment variable to restore from the stable
|
||||
# feeds successfully
|
||||
|
||||
local nugetConfigPath="$repo_root/NuGet.config"
|
||||
local nugetConfigPath="{$repo_root}NuGet.config"
|
||||
|
||||
if [ ! "$nugetConfigPath" ]; then
|
||||
Write-PipelineTelemetryError -category 'Build' "NuGet.config file not found in repo's root!"
|
||||
|
@ -62,8 +62,6 @@ function SetupCredProvider {
|
|||
endpoints+=']'
|
||||
|
||||
if [ ${#endpoints} -gt 2 ]; then
|
||||
# [SuppressMessage("Microsoft.Security", "CS002:SecretInNextLine", Justification="Endpoint code example with no real credentials.")]
|
||||
# Create the JSON object. It should look like '{"endpointCredentials": [{"endpoint":"http://example.index.json", "username":"optional", "password":"accesstoken"}]}'
|
||||
local endpointCredentials="{\"endpointCredentials\": "$endpoints"}"
|
||||
|
||||
echo "##vso[task.setvariable variable=VSS_NUGET_EXTERNAL_FEED_ENDPOINTS]$endpointCredentials"
|
||||
|
@ -103,7 +101,7 @@ authToken=''
|
|||
repoName=''
|
||||
|
||||
while [[ $# > 0 ]]; do
|
||||
opt="$(echo "$1" | awk '{print tolower($0)}')"
|
||||
opt="$(echo "$1" | tr "[:upper:]" "[:lower:]")"
|
||||
case "$opt" in
|
||||
--operation)
|
||||
operation=$2
|
||||
|
|
|
@ -0,0 +1,7 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<configuration>
|
||||
<packageSources>
|
||||
<clear />
|
||||
<add key="dotnet-core-internal-tooling" value="https://pkgs.dev.azure.com/devdiv/_packaging/dotnet-core-internal-tooling/nuget/v3/index.json" />
|
||||
</packageSources>
|
||||
</configuration>
|
|
@ -1,5 +1,4 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. -->
|
||||
<!-- Licensed to the .NET Foundation under one or more agreements. The .NET Foundation licenses this file to you under the MIT license. -->
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net472</TargetFramework>
|
||||
|
@ -9,6 +8,9 @@
|
|||
<ItemGroup>
|
||||
<!-- Clear references, the SDK may add some depending on UsuingToolXxx settings, but we only want to restore the following -->
|
||||
<PackageReference Remove="@(PackageReference)"/>
|
||||
<PackageReference Include="Microsoft.ManifestTool.CrossPlatform" Version="$(MicrosoftManifestToolCrossPlatformVersion)" />
|
||||
<PackageReference Include="Microsoft.VisualStudioEng.MicroBuild.Core" Version="$(MicrosoftVisualStudioEngMicroBuildCoreVersion)" />
|
||||
<PackageReference Include="Microsoft.VisualStudioEng.MicroBuild.Plugins.SwixBuild" Version="$(MicrosoftVisualStudioEngMicroBuildPluginsSwixBuildVersion)" />
|
||||
<PackageReference Include="Microsoft.DotNet.IBCMerge" Version="$(MicrosoftDotNetIBCMergeVersion)" Condition="'$(UsingToolIbcOptimization)' == 'true'" />
|
||||
<PackageReference Include="Drop.App" Version="$(DropAppVersion)" ExcludeAssets="all" Condition="'$(UsingToolVisualStudioIbcTraining)' == 'true'"/>
|
||||
</ItemGroup>
|
||||
|
|
|
@ -5,6 +5,8 @@ Param(
|
|||
[bool] $nodeReuse = $true,
|
||||
[switch] $ci,
|
||||
[switch] $prepareMachine,
|
||||
[switch] $excludePrereleaseVS,
|
||||
[string] $msbuildEngine = $null,
|
||||
[Parameter(ValueFromRemainingArguments=$true)][String[]]$extraArgs
|
||||
)
|
||||
|
||||
|
|
|
@ -19,7 +19,7 @@ prepare_machine=false
|
|||
extra_args=''
|
||||
|
||||
while (($# > 0)); do
|
||||
lowerI="$(echo $1 | awk '{print tolower($0)}')"
|
||||
lowerI="$(echo $1 | tr "[:upper:]" "[:lower:]")"
|
||||
case $lowerI in
|
||||
--verbosity)
|
||||
verbosity=$2
|
||||
|
|
|
@ -48,7 +48,7 @@ function DownloadAndExtract {
|
|||
-Verbose:$Verbose
|
||||
|
||||
if ($DownloadStatus -Eq $False) {
|
||||
Write-Error "Download failed"
|
||||
Write-Error "Download failed from $Uri"
|
||||
return $False
|
||||
}
|
||||
|
||||
|
@ -276,7 +276,8 @@ function Get-MachineArchitecture {
|
|||
}
|
||||
if (($ProcessorArchitecture -Eq "AMD64") -Or
|
||||
($ProcessorArchitecture -Eq "IA64") -Or
|
||||
($ProcessorArchitecture -Eq "ARM64")) {
|
||||
($ProcessorArchitecture -Eq "ARM64") -Or
|
||||
($ProcessorArchitecture -Eq "LOONGARCH64")) {
|
||||
return "x64"
|
||||
}
|
||||
return "x86"
|
||||
|
|
|
@ -148,8 +148,12 @@ function NewScriptShim {
|
|||
fi
|
||||
|
||||
if [[ ! -f $tool_file_path ]]; then
|
||||
Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Specified tool file path:'$tool_file_path' does not exist"
|
||||
return 1
|
||||
# try to see if the path is lower cased
|
||||
tool_file_path="$(echo $tool_file_path | tr "[:upper:]" "[:lower:]")"
|
||||
if [[ ! -f $tool_file_path ]]; then
|
||||
Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Specified tool file path:'$tool_file_path' does not exist"
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
|
||||
local shim_contents=$'#!/usr/bin/env bash\n'
|
||||
|
|
|
@ -1,121 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
#
|
||||
# This file locates the native compiler with the given name and version and sets the environment variables to locate it.
|
||||
#
|
||||
|
||||
source="${BASH_SOURCE[0]}"
|
||||
|
||||
# resolve $SOURCE until the file is no longer a symlink
|
||||
while [[ -h $source ]]; do
|
||||
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
|
||||
source="$(readlink "$source")"
|
||||
|
||||
# if $source was a relative symlink, we need to resolve it relative to the path where the
|
||||
# symlink file was located
|
||||
[[ $source != /* ]] && source="$scriptroot/$source"
|
||||
done
|
||||
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
|
||||
|
||||
if [ $# -lt 0 ]
|
||||
then
|
||||
echo "Usage..."
|
||||
echo "find-native-compiler.sh <compiler> <compiler major version> <compiler minor version>"
|
||||
echo "Specify the name of compiler (clang or gcc)."
|
||||
echo "Specify the major version of compiler."
|
||||
echo "Specify the minor version of compiler."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
. $scriptroot/../pipeline-logging-functions.sh
|
||||
|
||||
compiler="$1"
|
||||
cxxCompiler="$compiler++"
|
||||
majorVersion="$2"
|
||||
minorVersion="$3"
|
||||
|
||||
if [ "$compiler" = "gcc" ]; then cxxCompiler="g++"; fi
|
||||
|
||||
check_version_exists() {
|
||||
desired_version=-1
|
||||
|
||||
# Set up the environment to be used for building with the desired compiler.
|
||||
if command -v "$compiler-$1.$2" > /dev/null; then
|
||||
desired_version="-$1.$2"
|
||||
elif command -v "$compiler$1$2" > /dev/null; then
|
||||
desired_version="$1$2"
|
||||
elif command -v "$compiler-$1$2" > /dev/null; then
|
||||
desired_version="-$1$2"
|
||||
fi
|
||||
|
||||
echo "$desired_version"
|
||||
}
|
||||
|
||||
if [ -z "$CLR_CC" ]; then
|
||||
|
||||
# Set default versions
|
||||
if [ -z "$majorVersion" ]; then
|
||||
# note: gcc (all versions) and clang versions higher than 6 do not have minor version in file name, if it is zero.
|
||||
if [ "$compiler" = "clang" ]; then versions=( 9 8 7 6.0 5.0 4.0 3.9 3.8 3.7 3.6 3.5 )
|
||||
elif [ "$compiler" = "gcc" ]; then versions=( 9 8 7 6 5 4.9 ); fi
|
||||
|
||||
for version in "${versions[@]}"; do
|
||||
parts=(${version//./ })
|
||||
desired_version="$(check_version_exists "${parts[0]}" "${parts[1]}")"
|
||||
if [ "$desired_version" != "-1" ]; then majorVersion="${parts[0]}"; break; fi
|
||||
done
|
||||
|
||||
if [ -z "$majorVersion" ]; then
|
||||
if command -v "$compiler" > /dev/null; then
|
||||
if [ "$(uname)" != "Darwin" ]; then
|
||||
Write-PipelineTelemetryError -category "Build" -type "warning" "Specific version of $compiler not found, falling back to use the one in PATH."
|
||||
fi
|
||||
export CC="$(command -v "$compiler")"
|
||||
export CXX="$(command -v "$cxxCompiler")"
|
||||
else
|
||||
Write-PipelineTelemetryError -category "Build" "No usable version of $compiler found."
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
if [ "$compiler" = "clang" ] && [ "$majorVersion" -lt 5 ]; then
|
||||
if [ "$build_arch" = "arm" ] || [ "$build_arch" = "armel" ]; then
|
||||
if command -v "$compiler" > /dev/null; then
|
||||
Write-PipelineTelemetryError -category "Build" -type "warning" "Found clang version $majorVersion which is not supported on arm/armel architectures, falling back to use clang from PATH."
|
||||
export CC="$(command -v "$compiler")"
|
||||
export CXX="$(command -v "$cxxCompiler")"
|
||||
else
|
||||
Write-PipelineTelemetryError -category "Build" "Found clang version $majorVersion which is not supported on arm/armel architectures, and there is no clang in PATH."
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
else
|
||||
desired_version="$(check_version_exists "$majorVersion" "$minorVersion")"
|
||||
if [ "$desired_version" = "-1" ]; then
|
||||
Write-PipelineTelemetryError -category "Build" "Could not find specific version of $compiler: $majorVersion $minorVersion."
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -z "$CC" ]; then
|
||||
export CC="$(command -v "$compiler$desired_version")"
|
||||
export CXX="$(command -v "$cxxCompiler$desired_version")"
|
||||
if [ -z "$CXX" ]; then export CXX="$(command -v "$cxxCompiler")"; fi
|
||||
fi
|
||||
else
|
||||
if [ ! -f "$CLR_CC" ]; then
|
||||
Write-PipelineTelemetryError -category "Build" "CLR_CC is set but path '$CLR_CC' does not exist"
|
||||
exit 1
|
||||
fi
|
||||
export CC="$CLR_CC"
|
||||
export CXX="$CLR_CXX"
|
||||
fi
|
||||
|
||||
if [ -z "$CC" ]; then
|
||||
Write-PipelineTelemetryError -category "Build" "Unable to find $compiler."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
export CCC_CC="$CC"
|
||||
export CCC_CXX="$CXX"
|
||||
export SCAN_BUILD_COMMAND="$(command -v "scan-build$desired_version")"
|
|
@ -0,0 +1,144 @@
|
|||
#!/usr/bin/env bash
|
||||
#
|
||||
# This file detects the C/C++ compiler and exports it to the CC/CXX environment variables
|
||||
#
|
||||
# NOTE: some scripts source this file and rely on stdout being empty, make sure to not output anything here!
|
||||
|
||||
if [[ "$#" -lt 3 ]]; then
|
||||
echo "Usage..."
|
||||
echo "init-compiler.sh <script directory> <Architecture> <compiler>"
|
||||
echo "Specify the script directory."
|
||||
echo "Specify the target architecture."
|
||||
echo "Specify the name of compiler (clang or gcc)."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
nativescriptroot="$1"
|
||||
build_arch="$2"
|
||||
compiler="$3"
|
||||
|
||||
case "$compiler" in
|
||||
clang*|-clang*|--clang*)
|
||||
# clangx.y or clang-x.y
|
||||
version="$(echo "$compiler" | tr -d '[:alpha:]-=')"
|
||||
parts=(${version//./ })
|
||||
majorVersion="${parts[0]}"
|
||||
minorVersion="${parts[1]}"
|
||||
if [[ -z "$minorVersion" && "$majorVersion" -le 6 ]]; then
|
||||
minorVersion=0;
|
||||
fi
|
||||
compiler=clang
|
||||
;;
|
||||
|
||||
gcc*|-gcc*|--gcc*)
|
||||
# gccx.y or gcc-x.y
|
||||
version="$(echo "$compiler" | tr -d '[:alpha:]-=')"
|
||||
parts=(${version//./ })
|
||||
majorVersion="${parts[0]}"
|
||||
minorVersion="${parts[1]}"
|
||||
compiler=gcc
|
||||
;;
|
||||
esac
|
||||
|
||||
cxxCompiler="$compiler++"
|
||||
|
||||
. "$nativescriptroot"/../pipeline-logging-functions.sh
|
||||
|
||||
# clear the existing CC and CXX from environment
|
||||
CC=
|
||||
CXX=
|
||||
LDFLAGS=
|
||||
|
||||
if [[ "$compiler" == "gcc" ]]; then cxxCompiler="g++"; fi
|
||||
|
||||
check_version_exists() {
|
||||
desired_version=-1
|
||||
|
||||
# Set up the environment to be used for building with the desired compiler.
|
||||
if command -v "$compiler-$1.$2" > /dev/null; then
|
||||
desired_version="-$1.$2"
|
||||
elif command -v "$compiler$1$2" > /dev/null; then
|
||||
desired_version="$1$2"
|
||||
elif command -v "$compiler-$1$2" > /dev/null; then
|
||||
desired_version="-$1$2"
|
||||
fi
|
||||
|
||||
echo "$desired_version"
|
||||
}
|
||||
|
||||
if [[ -z "$CLR_CC" ]]; then
|
||||
|
||||
# Set default versions
|
||||
if [[ -z "$majorVersion" ]]; then
|
||||
# note: gcc (all versions) and clang versions higher than 6 do not have minor version in file name, if it is zero.
|
||||
if [[ "$compiler" == "clang" ]]; then versions=( 15 14 13 12 11 10 9 8 7 6.0 5.0 4.0 3.9 3.8 3.7 3.6 3.5 )
|
||||
elif [[ "$compiler" == "gcc" ]]; then versions=( 12 11 10 9 8 7 6 5 4.9 ); fi
|
||||
|
||||
for version in "${versions[@]}"; do
|
||||
parts=(${version//./ })
|
||||
desired_version="$(check_version_exists "${parts[0]}" "${parts[1]}")"
|
||||
if [[ "$desired_version" != "-1" ]]; then majorVersion="${parts[0]}"; break; fi
|
||||
done
|
||||
|
||||
if [[ -z "$majorVersion" ]]; then
|
||||
if command -v "$compiler" > /dev/null; then
|
||||
if [[ "$(uname)" != "Darwin" ]]; then
|
||||
Write-PipelineTelemetryError -category "Build" -type "warning" "Specific version of $compiler not found, falling back to use the one in PATH."
|
||||
fi
|
||||
CC="$(command -v "$compiler")"
|
||||
CXX="$(command -v "$cxxCompiler")"
|
||||
else
|
||||
Write-PipelineTelemetryError -category "Build" "No usable version of $compiler found."
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
if [[ "$compiler" == "clang" && "$majorVersion" -lt 5 ]]; then
|
||||
if [[ "$build_arch" == "arm" || "$build_arch" == "armel" ]]; then
|
||||
if command -v "$compiler" > /dev/null; then
|
||||
Write-PipelineTelemetryError -category "Build" -type "warning" "Found clang version $majorVersion which is not supported on arm/armel architectures, falling back to use clang from PATH."
|
||||
CC="$(command -v "$compiler")"
|
||||
CXX="$(command -v "$cxxCompiler")"
|
||||
else
|
||||
Write-PipelineTelemetryError -category "Build" "Found clang version $majorVersion which is not supported on arm/armel architectures, and there is no clang in PATH."
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
else
|
||||
desired_version="$(check_version_exists "$majorVersion" "$minorVersion")"
|
||||
if [[ "$desired_version" == "-1" ]]; then
|
||||
Write-PipelineTelemetryError -category "Build" "Could not find specific version of $compiler: $majorVersion $minorVersion."
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ -z "$CC" ]]; then
|
||||
CC="$(command -v "$compiler$desired_version")"
|
||||
CXX="$(command -v "$cxxCompiler$desired_version")"
|
||||
if [[ -z "$CXX" ]]; then CXX="$(command -v "$cxxCompiler")"; fi
|
||||
fi
|
||||
else
|
||||
if [[ ! -f "$CLR_CC" ]]; then
|
||||
Write-PipelineTelemetryError -category "Build" "CLR_CC is set but path '$CLR_CC' does not exist"
|
||||
exit 1
|
||||
fi
|
||||
CC="$CLR_CC"
|
||||
CXX="$CLR_CXX"
|
||||
fi
|
||||
|
||||
if [[ -z "$CC" ]]; then
|
||||
Write-PipelineTelemetryError -category "Build" "Unable to find $compiler."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Only lld version >= 9 can be considered stable
|
||||
if [[ "$compiler" == "clang" && "$majorVersion" -ge 9 ]]; then
|
||||
if "$CC" -fuse-ld=lld -Wl,--version >/dev/null 2>&1; then
|
||||
LDFLAGS="-fuse-ld=lld"
|
||||
fi
|
||||
fi
|
||||
|
||||
SCAN_BUILD_COMMAND="$(command -v "scan-build$desired_version")"
|
||||
|
||||
export CC CXX LDFLAGS SCAN_BUILD_COMMAND
|
|
@ -14,7 +14,7 @@ download_retries=5
|
|||
retry_wait_time_seconds=30
|
||||
|
||||
while (($# > 0)); do
|
||||
lowerI="$(echo $1 | awk '{print tolower($0)}')"
|
||||
lowerI="$(echo $1 | tr "[:upper:]" "[:lower:]")"
|
||||
case $lowerI in
|
||||
--baseuri)
|
||||
base_uri=$2
|
||||
|
@ -63,7 +63,7 @@ done
|
|||
|
||||
tool_name="cmake-test"
|
||||
tool_os=$(GetCurrentOS)
|
||||
tool_folder=$(echo $tool_os | awk '{print tolower($0)}')
|
||||
tool_folder="$(echo $tool_os | tr "[:upper:]" "[:lower:]")"
|
||||
tool_arch="x86_64"
|
||||
tool_name_moniker="$tool_name-$version-$tool_os-$tool_arch"
|
||||
tool_install_directory="$install_path/$tool_name/$version"
|
||||
|
@ -114,4 +114,4 @@ if [[ $? != 0 ]]; then
|
|||
exit 1
|
||||
fi
|
||||
|
||||
exit 0
|
||||
exit 0
|
||||
|
|
|
@ -14,7 +14,7 @@ download_retries=5
|
|||
retry_wait_time_seconds=30
|
||||
|
||||
while (($# > 0)); do
|
||||
lowerI="$(echo $1 | awk '{print tolower($0)}')"
|
||||
lowerI="$(echo $1 | tr "[:upper:]" "[:lower:]")"
|
||||
case $lowerI in
|
||||
--baseuri)
|
||||
base_uri=$2
|
||||
|
@ -63,7 +63,7 @@ done
|
|||
|
||||
tool_name="cmake"
|
||||
tool_os=$(GetCurrentOS)
|
||||
tool_folder=$(echo $tool_os | awk '{print tolower($0)}')
|
||||
tool_folder="$(echo $tool_os | tr "[:upper:]" "[:lower:]")"
|
||||
tool_arch="x86_64"
|
||||
tool_name_moniker="$tool_name-$version-$tool_os-$tool_arch"
|
||||
tool_install_directory="$install_path/$tool_name/$version"
|
||||
|
@ -114,4 +114,4 @@ if [[ $? != 0 ]]; then
|
|||
exit 1
|
||||
fi
|
||||
|
||||
exit 0
|
||||
exit 0
|
||||
|
|
|
@ -105,7 +105,7 @@ try {
|
|||
Write-Error "There are multiple copies of $ToolName in $($ToolInstallDirectory): `n$(@($ToolFilePath | out-string))"
|
||||
exit 1
|
||||
} elseif (@($ToolFilePath).Length -Lt 1) {
|
||||
Write-Host "$ToolName was not found in $ToolFilePath."
|
||||
Write-Host "$ToolName was not found in $ToolInstallDirectory."
|
||||
exit 1
|
||||
}
|
||||
|
||||
|
|
|
@ -1,30 +0,0 @@
|
|||
<Project Sdk="Microsoft.DotNet.Helix.Sdk" DefaultTargets="Test">
|
||||
<PropertyGroup Condition="'$(AGENT_OS)' != 'Windows_NT'">
|
||||
<Python>python3</Python>
|
||||
<HelixPreCommands>$(HelixPreCommands);chmod +x $HELIX_WORKITEM_PAYLOAD/SOD/SizeOnDisk</HelixPreCommands>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<HelixCorrelationPayload Include="$(CorrelationPayloadDirectory)">
|
||||
<PayloadDirectory>%(Identity)</PayloadDirectory>
|
||||
</HelixCorrelationPayload>
|
||||
</ItemGroup>
|
||||
|
||||
<PropertyGroup Condition="'$(AGENT_OS)' == 'Windows_NT'">
|
||||
<ScenarioDirectory>%HELIX_CORRELATION_PAYLOAD%\performance\src\scenarios\</ScenarioDirectory>
|
||||
<BlazorDirectory>$(ScenarioDirectory)blazor\</BlazorDirectory>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(AGENT_OS)' != 'Windows_NT'">
|
||||
<ScenarioDirectory>$HELIX_CORRELATION_PAYLOAD/performance/src/scenarios/</ScenarioDirectory>
|
||||
<BlazorDirectory>$(ScenarioDirectory)blazor/</BlazorDirectory>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<HelixWorkItem Include="SOD - New Blazor Template - Publish">
|
||||
<PayloadDirectory>$(WorkItemDirectory)</PayloadDirectory>
|
||||
<PreCommands>cd $(BlazorDirectory);$(Python) pre.py publish --msbuild %27/p:_TrimmerDumpDependencies=true%27 --msbuild-static AdditionalMonoLinkerOptions=%27"%24(AdditionalMonoLinkerOptions) --dump-dependencies"%27 --binlog %27./traces/blazor_publish.binlog%27</PreCommands>
|
||||
<Command>$(Python) test.py sod --scenario-name "%(Identity)"</Command>
|
||||
<PostCommands>$(Python) post.py</PostCommands>
|
||||
</HelixWorkItem>
|
||||
</ItemGroup>
|
||||
</Project>
|
|
@ -1,69 +0,0 @@
|
|||
<Project Sdk="Microsoft.DotNet.Helix.Sdk" DefaultTargets="Test">
|
||||
|
||||
<ItemGroup>
|
||||
<HelixCorrelationPayload Include="$(CorrelationPayloadDirectory)">
|
||||
<PayloadDirectory>%(Identity)</PayloadDirectory>
|
||||
</HelixCorrelationPayload>
|
||||
</ItemGroup>
|
||||
|
||||
<!--
|
||||
Crossgen and Crossgen2 Scenario WorkItems
|
||||
-->
|
||||
<PropertyGroup Condition="'$(AGENT_OS)' == 'Windows_NT'">
|
||||
<Python>py -3</Python>
|
||||
<HelixPreCommands>$(HelixPreCommands)</HelixPreCommands>
|
||||
<CoreRoot>%HELIX_CORRELATION_PAYLOAD%\Core_Root</CoreRoot>
|
||||
<ScenarioDirectory>%HELIX_CORRELATION_PAYLOAD%\performance\src\scenarios\</ScenarioDirectory>
|
||||
<CrossgenDirectory>$(ScenarioDirectory)crossgen\</CrossgenDirectory>
|
||||
<Crossgen2Directory>$(ScenarioDirectory)crossgen2\</Crossgen2Directory>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(AGENT_OS)' != 'Windows_NT'">
|
||||
<Python>python3</Python>
|
||||
<HelixPreCommands>$(HelixPreCommands);chmod +x $HELIX_WORKITEM_PAYLOAD/startup/Startup;chmod +x $HELIX_WORKITEM_PAYLOAD/startup/perfcollect;sudo apt update</HelixPreCommands>
|
||||
<CoreRoot>$HELIX_CORRELATION_PAYLOAD/Core_Root</CoreRoot>
|
||||
<ScenarioDirectory>$HELIX_CORRELATION_PAYLOAD/performance/src/scenarios/</ScenarioDirectory>
|
||||
<CrossgenDirectory>$(ScenarioDirectory)crossgen/</CrossgenDirectory>
|
||||
<Crossgen2Directory>$(ScenarioDirectory)crossgen2/</Crossgen2Directory>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<SingleAssembly Include="System.Private.Xml.dll"/>
|
||||
<SingleAssembly Include="System.Linq.Expressions.dll"/>
|
||||
<SingleAssembly Include="Microsoft.CodeAnalysis.VisualBasic.dll"/>
|
||||
<SingleAssembly Include="Microsoft.CodeAnalysis.CSharp.dll"/>
|
||||
<SingleAssembly Include="System.Private.CoreLib.dll"/>
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<Composite Include="framework-r2r.dll.rsp"/>
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<CrossgenWorkItem Include="@(SingleAssembly)">
|
||||
<PayloadDirectory>$(WorkItemDirectory)</PayloadDirectory>
|
||||
<Command>$(Python) $(CrossgenDirectory)test.py crossgen --core-root $(CoreRoot) --test-name %(Identity)</Command>
|
||||
</CrossgenWorkItem>
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<Crossgen2WorkItem Include="@(SingleAssembly)">
|
||||
<PayloadDirectory>$(WorkItemDirectory)</PayloadDirectory>
|
||||
<Command>$(Python) $(Crossgen2Directory)test.py crossgen2 --core-root $(CoreRoot) --single %(Identity)</Command>
|
||||
</Crossgen2WorkItem>
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<!-- Enable crossgen tests on Windows x64 and Windows x86 -->
|
||||
<HelixWorkItem Include="@(CrossgenWorkItem -> 'Crossgen %(Identity)')" Condition="'$(AGENT_OS)' == 'Windows_NT'">
|
||||
<Timeout>4:00</Timeout>
|
||||
</HelixWorkItem>
|
||||
<!-- Enable crossgen2 tests on Windows x64 and Linux x64 -->
|
||||
<HelixWorkItem Include="@(Crossgen2WorkItem -> 'Crossgen2 %(Identity)')" Condition="'$(Architecture)' == 'x64'">
|
||||
<Timeout>4:00</Timeout>
|
||||
</HelixWorkItem>
|
||||
<HelixWorkItem Include="Crossgen2 Composite Framework R2R" Condition="'$(Architecture)' == 'x64'">
|
||||
<PayloadDirectory>$(WorkItemDirectory)</PayloadDirectory>
|
||||
<Command>$(Python) $(Crossgen2Directory)test.py crossgen2 --core-root $(CoreRoot) --composite $(Crossgen2Directory)framework-r2r.dll.rsp</Command>
|
||||
<Timeout>1:00</Timeout>
|
||||
</HelixWorkItem>
|
||||
</ItemGroup>
|
||||
</Project>
|
|
@ -1,144 +0,0 @@
|
|||
<Project Sdk="Microsoft.DotNet.Helix.Sdk" DefaultTargets="Test">
|
||||
|
||||
<PropertyGroup Condition="'$(AGENT_OS)' == 'Windows_NT'">
|
||||
<WorkItemCommand>%HELIX_CORRELATION_PAYLOAD%\performance\scripts\benchmarks_ci.py --csproj %HELIX_CORRELATION_PAYLOAD%\performance\$(TargetCsproj)</WorkItemCommand>
|
||||
<CliArguments>--dotnet-versions %DOTNET_VERSION% --cli-source-info args --cli-branch %PERFLAB_BRANCH% --cli-commit-sha %PERFLAB_HASH% --cli-repository https://github.com/%PERFLAB_REPO% --cli-source-timestamp %PERFLAB_BUILDTIMESTAMP%</CliArguments>
|
||||
<Python>py -3</Python>
|
||||
<CoreRun>%HELIX_CORRELATION_PAYLOAD%\Core_Root\CoreRun.exe</CoreRun>
|
||||
<BaselineCoreRun>%HELIX_CORRELATION_PAYLOAD%\Baseline_Core_Root\CoreRun.exe</BaselineCoreRun>
|
||||
|
||||
<HelixPreCommands>$(HelixPreCommands);call %HELIX_CORRELATION_PAYLOAD%\performance\tools\machine-setup.cmd;set PYTHONPATH=%HELIX_WORKITEM_PAYLOAD%\scripts%3B%HELIX_WORKITEM_PAYLOAD%</HelixPreCommands>
|
||||
<ArtifactsDirectory>%HELIX_CORRELATION_PAYLOAD%\artifacts\BenchmarkDotNet.Artifacts</ArtifactsDirectory>
|
||||
<BaselineArtifactsDirectory>%HELIX_CORRELATION_PAYLOAD%\artifacts\BenchmarkDotNet.Artifacts_Baseline</BaselineArtifactsDirectory>
|
||||
<ResultsComparer>%HELIX_CORRELATION_PAYLOAD%\performance\src\tools\ResultsComparer\ResultsComparer.csproj</ResultsComparer>
|
||||
<DotnetExe>%HELIX_CORRELATION_PAYLOAD%\performance\tools\dotnet\$(Architecture)\dotnet.exe</DotnetExe>
|
||||
<Percent>%25%25</Percent>
|
||||
<XMLResults>%HELIX_WORKITEM_ROOT%\testResults.xml</XMLResults>
|
||||
</PropertyGroup>
|
||||
|
||||
<PropertyGroup Condition="'$(AGENT_OS)' != 'Windows_NT' and '$(RunFromPerfRepo)' == 'false'">
|
||||
<BaseDirectory>$HELIX_CORRELATION_PAYLOAD</BaseDirectory>
|
||||
<PerformanceDirectory>$(BaseDirectory)/performance</PerformanceDirectory>
|
||||
</PropertyGroup>
|
||||
|
||||
<PropertyGroup Condition="'$(AGENT_OS)' != 'Windows_NT' and '$(RunFromPerfRepo)' == 'true'">
|
||||
<BaseDirectory>$HELIX_WORKITEM_PAYLOAD</BaseDirectory>
|
||||
<PerformanceDirectory>$(BaseDirectory)</PerformanceDirectory>
|
||||
</PropertyGroup>
|
||||
|
||||
<PropertyGroup Condition="'$(AGENT_OS)' != 'Windows_NT'">
|
||||
<WorkItemCommand>$(PerformanceDirectory)/scripts/benchmarks_ci.py --csproj $(PerformanceDirectory)/$(TargetCsproj)</WorkItemCommand>
|
||||
<CliArguments>--dotnet-versions $DOTNET_VERSION --cli-source-info args --cli-branch $PERFLAB_BRANCH --cli-commit-sha $PERFLAB_HASH --cli-repository https://github.com/$PERFLAB_REPO --cli-source-timestamp $PERFLAB_BUILDTIMESTAMP</CliArguments>
|
||||
<Python>python3</Python>
|
||||
<CoreRun>$(BaseDirectory)/Core_Root/corerun</CoreRun>
|
||||
<BaselineCoreRun>$(BaseDirectory)/Baseline_Core_Root/corerun</BaselineCoreRun>
|
||||
<HelixPreCommands>$(HelixPreCommands);chmod +x $(PerformanceDirectory)/tools/machine-setup.sh;. $(PerformanceDirectory)/tools/machine-setup.sh</HelixPreCommands>
|
||||
<ArtifactsDirectory>$(BaseDirectory)/artifacts/BenchmarkDotNet.Artifacts</ArtifactsDirectory>
|
||||
<BaselineArtifactsDirectory>$(BaseDirectory)/artifacts/BenchmarkDotNet.Artifacts_Baseline</BaselineArtifactsDirectory>
|
||||
<ResultsComparer>$(PerformanceDirectory)/src/tools/ResultsComparer/ResultsComparer.csproj</ResultsComparer>
|
||||
<DotnetExe>$(PerformanceDirectory)/tools/dotnet/$(Architecture)/dotnet</DotnetExe>
|
||||
<Percent>%25</Percent>
|
||||
<XMLResults>$HELIX_WORKITEM_ROOT/testResults.xml</XMLResults>
|
||||
</PropertyGroup>
|
||||
|
||||
<PropertyGroup Condition="'$(WasmDotnet)' == 'true'">
|
||||
<CliArguments>$(CliArguments) --wasm</CliArguments>
|
||||
</PropertyGroup>
|
||||
|
||||
<PropertyGroup Condition="'$(MonoDotnet)' == 'true' and '$(AGENT_OS)' == 'Windows_NT'">
|
||||
<CoreRunArgument>--corerun %HELIX_CORRELATION_PAYLOAD%\dotnet-mono\shared\Microsoft.NETCore.App\6.0.0\corerun.exe</CoreRunArgument>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(MonoDotnet)' == 'true' and '$(AGENT_OS)' != 'Windows_NT'">
|
||||
<CoreRunArgument>--corerun $(BaseDirectory)/dotnet-mono/shared/Microsoft.NETCore.App/6.0.0/corerun</CoreRunArgument>
|
||||
</PropertyGroup>
|
||||
|
||||
<PropertyGroup Condition="'$(UseCoreRun)' == 'true'">
|
||||
<CoreRunArgument>--corerun $(CoreRun)</CoreRunArgument>
|
||||
</PropertyGroup>
|
||||
|
||||
<PropertyGroup Condition="'$(UseBaselineCoreRun)' == 'true'">
|
||||
<BaselineCoreRunArgument>--corerun $(BaselineCoreRun)</BaselineCoreRunArgument>
|
||||
</PropertyGroup>
|
||||
|
||||
<PropertyGroup Condition="'$(WorkItemCommand)' != ''">
|
||||
<WorkItemCommand>$(Python) $(WorkItemCommand) --incremental no --architecture $(Architecture) -f $(_Framework) $(PerfLabArguments)</WorkItemCommand>
|
||||
</PropertyGroup>
|
||||
|
||||
<PropertyGroup Condition="'$(_Framework)' != 'net461'">
|
||||
<WorkItemCommand>$(WorkItemCommand) $(CliArguments)</WorkItemCommand>
|
||||
</PropertyGroup>
|
||||
|
||||
<PropertyGroup>
|
||||
<WorkItemTimeout>2:30</WorkItemTimeout>
|
||||
<WorkItemTimeout Condition="'$(HelixSourcePrefix)' != 'official'">0:15</WorkItemTimeout>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<HelixCorrelationPayload Include="$(CorrelationPayloadDirectory)">
|
||||
<PayloadDirectory>%(Identity)</PayloadDirectory>
|
||||
</HelixCorrelationPayload>
|
||||
</ItemGroup>
|
||||
|
||||
<PropertyGroup>
|
||||
<PartitionCount>30</PartitionCount>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<Partition Include="$(BuildConfig).Partition0" Index="0" />
|
||||
<Partition Include="$(BuildConfig).Partition1" Index="1" />
|
||||
<Partition Include="$(BuildConfig).Partition2" Index="2" />
|
||||
<Partition Include="$(BuildConfig).Partition3" Index="3" />
|
||||
<Partition Include="$(BuildConfig).Partition4" Index="4" />
|
||||
<Partition Include="$(BuildConfig).Partition5" Index="5" />
|
||||
<Partition Include="$(BuildConfig).Partition6" Index="6" />
|
||||
<Partition Include="$(BuildConfig).Partition7" Index="7" />
|
||||
<Partition Include="$(BuildConfig).Partition8" Index="8" />
|
||||
<Partition Include="$(BuildConfig).Partition9" Index="9" />
|
||||
<Partition Include="$(BuildConfig).Partition10" Index="10" />
|
||||
<Partition Include="$(BuildConfig).Partition11" Index="11" />
|
||||
<Partition Include="$(BuildConfig).Partition12" Index="12" />
|
||||
<Partition Include="$(BuildConfig).Partition13" Index="13" />
|
||||
<Partition Include="$(BuildConfig).Partition14" Index="14" />
|
||||
<Partition Include="$(BuildConfig).Partition15" Index="15" />
|
||||
<Partition Include="$(BuildConfig).Partition16" Index="16" />
|
||||
<Partition Include="$(BuildConfig).Partition17" Index="17" />
|
||||
<Partition Include="$(BuildConfig).Partition18" Index="18" />
|
||||
<Partition Include="$(BuildConfig).Partition19" Index="19" />
|
||||
<Partition Include="$(BuildConfig).Partition20" Index="20" />
|
||||
<Partition Include="$(BuildConfig).Partition21" Index="21" />
|
||||
<Partition Include="$(BuildConfig).Partition22" Index="22" />
|
||||
<Partition Include="$(BuildConfig).Partition23" Index="23" />
|
||||
<Partition Include="$(BuildConfig).Partition24" Index="24" />
|
||||
<Partition Include="$(BuildConfig).Partition25" Index="25" />
|
||||
<Partition Include="$(BuildConfig).Partition26" Index="26" />
|
||||
<Partition Include="$(BuildConfig).Partition27" Index="27" />
|
||||
<Partition Include="$(BuildConfig).Partition28" Index="28" />
|
||||
<Partition Include="$(BuildConfig).Partition29" Index="29" />
|
||||
</ItemGroup>
|
||||
|
||||
<PropertyGroup Condition="'$(Compare)' == 'true'">
|
||||
<FailOnTestFailure>false</FailOnTestFailure>
|
||||
</PropertyGroup>
|
||||
|
||||
<!--
|
||||
Partition the Microbenchmarks project, but nothing else
|
||||
-->
|
||||
<ItemGroup Condition="$(TargetCsproj.Contains('MicroBenchmarks.csproj'))">
|
||||
<HelixWorkItem Include="@(Partition)">
|
||||
<PayloadDirectory>$(WorkItemDirectory)</PayloadDirectory>
|
||||
<PreCommands Condition="'$(Compare)' == 'true'">$(WorkItemCommand) --bdn-artifacts $(BaselineArtifactsDirectory) --bdn-arguments="--anyCategories $(BDNCategories) $(ExtraBenchmarkDotNetArguments) $(BaselineCoreRunArgument) --partition-count $(PartitionCount) --partition-index %(HelixWorkItem.Index)"</PreCommands>
|
||||
<Command>$(WorkItemCommand) --bdn-artifacts $(ArtifactsDirectory) --bdn-arguments="--anyCategories $(BDNCategories) $(ExtraBenchmarkDotNetArguments) $(CoreRunArgument) --partition-count $(PartitionCount) --partition-index %(HelixWorkItem.Index)"</Command>
|
||||
<PostCommands Condition="'$(Compare)' == 'true'">$(DotnetExe) run -f $(_Framework) -p $(ResultsComparer) --base $(BaselineArtifactsDirectory) --diff $(ArtifactsDirectory) --threshold 2$(Percent) --xml $(XMLResults);$(FinalCommand)</PostCommands>
|
||||
<Timeout>$(WorkItemTimeout)</Timeout>
|
||||
</HelixWorkItem>
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup Condition="!$(TargetCsproj.Contains('MicroBenchmarks.csproj'))">
|
||||
<HelixWorkItem Include="$(BuildConfig).WorkItem">
|
||||
<PayloadDirectory>$(WorkItemDirectory)</PayloadDirectory>
|
||||
<PreCommands Condition="'$(Compare)' == 'true'">$(WorkItemCommand) --bdn-artifacts $(BaselineArtifactsDirectory) --bdn-arguments="--anyCategories $(BDNCategories) $(ExtraBenchmarkDotNetArguments) $(BaselineCoreRunArgument)"</PreCommands>
|
||||
<Command>$(WorkItemCommand) --bdn-artifacts $(ArtifactsDirectory) --bdn-arguments="--anyCategories $(BDNCategories) $(ExtraBenchmarkDotNetArguments) $(CoreRunArgument)"</Command>
|
||||
<PostCommands Condition="'$(Compare)' == 'true'">$(DotnetExe) run -f $(_Framework) -p $(ResultsComparer) --base $(BaselineArtifactsDirectory) --diff $(ArtifactsDirectory) --threshold 2$(Percent) --xml $(XMLResults)</PostCommands>
|
||||
<Timeout>4:00</Timeout>
|
||||
</HelixWorkItem>
|
||||
</ItemGroup>
|
||||
</Project>
|
|
@ -1,147 +0,0 @@
|
|||
Param(
|
||||
[string] $SourceDirectory=$env:BUILD_SOURCESDIRECTORY,
|
||||
[string] $CoreRootDirectory,
|
||||
[string] $BaselineCoreRootDirectory,
|
||||
[string] $Architecture="x64",
|
||||
[string] $Framework="net5.0",
|
||||
[string] $CompilationMode="Tiered",
|
||||
[string] $Repository=$env:BUILD_REPOSITORY_NAME,
|
||||
[string] $Branch=$env:BUILD_SOURCEBRANCH,
|
||||
[string] $CommitSha=$env:BUILD_SOURCEVERSION,
|
||||
[string] $BuildNumber=$env:BUILD_BUILDNUMBER,
|
||||
[string] $RunCategories="Libraries Runtime",
|
||||
[string] $Csproj="src\benchmarks\micro\MicroBenchmarks.csproj",
|
||||
[string] $Kind="micro",
|
||||
[switch] $LLVM,
|
||||
[switch] $MonoInterpreter,
|
||||
[switch] $MonoAOT,
|
||||
[switch] $Internal,
|
||||
[switch] $Compare,
|
||||
[string] $MonoDotnet="",
|
||||
[string] $Configurations="CompilationMode=$CompilationMode RunKind=$Kind"
|
||||
)
|
||||
|
||||
$RunFromPerformanceRepo = ($Repository -eq "dotnet/performance") -or ($Repository -eq "dotnet-performance")
|
||||
$UseCoreRun = ($CoreRootDirectory -ne [string]::Empty)
|
||||
$UseBaselineCoreRun = ($BaselineCoreRootDirectory -ne [string]::Empty)
|
||||
|
||||
$PayloadDirectory = (Join-Path $SourceDirectory "Payload")
|
||||
$PerformanceDirectory = (Join-Path $PayloadDirectory "performance")
|
||||
$WorkItemDirectory = (Join-Path $SourceDirectory "workitem")
|
||||
$ExtraBenchmarkDotNetArguments = "--iterationCount 1 --warmupCount 0 --invocationCount 1 --unrollFactor 1 --strategy ColdStart --stopOnFirstError true"
|
||||
$Creator = $env:BUILD_DEFINITIONNAME
|
||||
$PerfLabArguments = ""
|
||||
$HelixSourcePrefix = "pr"
|
||||
|
||||
$Queue = "Windows.10.Amd64.ClientRS4.DevEx.15.8.Open"
|
||||
|
||||
# TODO: Implement a better logic to determine if Framework is .NET Core or >= .NET 5.
|
||||
if ($Framework.StartsWith("netcoreapp") -or ($Framework -eq "net5.0")) {
|
||||
$Queue = "Windows.10.Amd64.ClientRS5.Open"
|
||||
}
|
||||
|
||||
if ($Compare) {
|
||||
$Queue = "Windows.10.Amd64.19H1.Tiger.Perf.Open"
|
||||
$PerfLabArguments = ""
|
||||
$ExtraBenchmarkDotNetArguments = ""
|
||||
}
|
||||
|
||||
if ($Internal) {
|
||||
$Queue = "Windows.10.Amd64.19H1.Tiger.Perf"
|
||||
$PerfLabArguments = "--upload-to-perflab-container"
|
||||
$ExtraBenchmarkDotNetArguments = ""
|
||||
$Creator = ""
|
||||
$HelixSourcePrefix = "official"
|
||||
}
|
||||
|
||||
if($MonoInterpreter)
|
||||
{
|
||||
$ExtraBenchmarkDotNetArguments = "--category-exclusion-filter NoInterpreter"
|
||||
}
|
||||
|
||||
if($MonoDotnet -ne "")
|
||||
{
|
||||
$Configurations += " LLVM=$LLVM MonoInterpreter=$MonoInterpreter MonoAOT=$MonoAOT"
|
||||
if($ExtraBenchmarkDotNetArguments -eq "")
|
||||
{
|
||||
#FIX ME: We need to block these tests as they don't run on mono for now
|
||||
$ExtraBenchmarkDotNetArguments = "--exclusion-filter *Perf_Image* *Perf_NamedPipeStream*"
|
||||
}
|
||||
else
|
||||
{
|
||||
#FIX ME: We need to block these tests as they don't run on mono for now
|
||||
$ExtraBenchmarkDotNetArguments += " --exclusion-filter *Perf_Image* *Perf_NamedPipeStream*"
|
||||
}
|
||||
}
|
||||
|
||||
# FIX ME: This is a workaround until we get this from the actual pipeline
|
||||
$CommonSetupArguments="--channel master --queue $Queue --build-number $BuildNumber --build-configs $Configurations --architecture $Architecture"
|
||||
$SetupArguments = "--repository https://github.com/$Repository --branch $Branch --get-perf-hash --commit-sha $CommitSha $CommonSetupArguments"
|
||||
|
||||
|
||||
#This grabs the LKG version number of dotnet and passes it to our scripts
|
||||
$VersionJSON = Get-Content global.json | ConvertFrom-Json
|
||||
$DotNetVersion = $VersionJSON.tools.dotnet
|
||||
$SetupArguments = "--dotnet-versions $DotNetVersion $SetupArguments"
|
||||
|
||||
|
||||
if ($RunFromPerformanceRepo) {
|
||||
$SetupArguments = "--perf-hash $CommitSha $CommonSetupArguments"
|
||||
|
||||
robocopy $SourceDirectory $PerformanceDirectory /E /XD $PayloadDirectory $SourceDirectory\artifacts $SourceDirectory\.git
|
||||
}
|
||||
else {
|
||||
git clone --branch master --depth 1 --quiet https://github.com/dotnet/performance $PerformanceDirectory
|
||||
}
|
||||
|
||||
if($MonoDotnet -ne "")
|
||||
{
|
||||
$UsingMono = "true"
|
||||
$MonoDotnetPath = (Join-Path $PayloadDirectory "dotnet-mono")
|
||||
Move-Item -Path $MonoDotnet -Destination $MonoDotnetPath
|
||||
}
|
||||
|
||||
if ($UseCoreRun) {
|
||||
$NewCoreRoot = (Join-Path $PayloadDirectory "Core_Root")
|
||||
Move-Item -Path $CoreRootDirectory -Destination $NewCoreRoot
|
||||
}
|
||||
if ($UseBaselineCoreRun) {
|
||||
$NewBaselineCoreRoot = (Join-Path $PayloadDirectory "Baseline_Core_Root")
|
||||
Move-Item -Path $BaselineCoreRootDirectory -Destination $NewBaselineCoreRoot
|
||||
}
|
||||
|
||||
$DocsDir = (Join-Path $PerformanceDirectory "docs")
|
||||
robocopy $DocsDir $WorkItemDirectory
|
||||
|
||||
# Set variables that we will need to have in future steps
|
||||
$ci = $true
|
||||
|
||||
. "$PSScriptRoot\..\pipeline-logging-functions.ps1"
|
||||
|
||||
# Directories
|
||||
Write-PipelineSetVariable -Name 'PayloadDirectory' -Value "$PayloadDirectory" -IsMultiJobVariable $false
|
||||
Write-PipelineSetVariable -Name 'PerformanceDirectory' -Value "$PerformanceDirectory" -IsMultiJobVariable $false
|
||||
Write-PipelineSetVariable -Name 'WorkItemDirectory' -Value "$WorkItemDirectory" -IsMultiJobVariable $false
|
||||
|
||||
# Script Arguments
|
||||
Write-PipelineSetVariable -Name 'Python' -Value "py -3" -IsMultiJobVariable $false
|
||||
Write-PipelineSetVariable -Name 'ExtraBenchmarkDotNetArguments' -Value "$ExtraBenchmarkDotNetArguments" -IsMultiJobVariable $false
|
||||
Write-PipelineSetVariable -Name 'SetupArguments' -Value "$SetupArguments" -IsMultiJobVariable $false
|
||||
Write-PipelineSetVariable -Name 'PerfLabArguments' -Value "$PerfLabArguments" -IsMultiJobVariable $false
|
||||
Write-PipelineSetVariable -Name 'BDNCategories' -Value "$RunCategories" -IsMultiJobVariable $false
|
||||
Write-PipelineSetVariable -Name 'TargetCsproj' -Value "$Csproj" -IsMultiJobVariable $false
|
||||
Write-PipelineSetVariable -Name 'Kind' -Value "$Kind" -IsMultiJobVariable $false
|
||||
Write-PipelineSetVariable -Name 'Architecture' -Value "$Architecture" -IsMultiJobVariable $false
|
||||
Write-PipelineSetVariable -Name 'UseCoreRun' -Value "$UseCoreRun" -IsMultiJobVariable $false
|
||||
Write-PipelineSetVariable -Name 'UseBaselineCoreRun' -Value "$UseBaselineCoreRun" -IsMultiJobVariable $false
|
||||
Write-PipelineSetVariable -Name 'RunFromPerfRepo' -Value "$RunFromPerformanceRepo" -IsMultiJobVariable $false
|
||||
Write-PipelineSetVariable -Name 'Compare' -Value "$Compare" -IsMultiJobVariable $false
|
||||
Write-PipelineSetVariable -Name 'MonoDotnet' -Value "$UsingMono" -IsMultiJobVariable $false
|
||||
|
||||
# Helix Arguments
|
||||
Write-PipelineSetVariable -Name 'Creator' -Value "$Creator" -IsMultiJobVariable $false
|
||||
Write-PipelineSetVariable -Name 'Queue' -Value "$Queue" -IsMultiJobVariable $false
|
||||
Write-PipelineSetVariable -Name 'HelixSourcePrefix' -Value "$HelixSourcePrefix" -IsMultiJobVariable $false
|
||||
Write-PipelineSetVariable -Name '_BuildConfig' -Value "$Architecture.$Kind.$Framework" -IsMultiJobVariable $false
|
||||
|
||||
exit 0
|
|
@ -1,289 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source_directory=$BUILD_SOURCESDIRECTORY
|
||||
core_root_directory=
|
||||
baseline_core_root_directory=
|
||||
architecture=x64
|
||||
framework=net5.0
|
||||
compilation_mode=tiered
|
||||
repository=$BUILD_REPOSITORY_NAME
|
||||
branch=$BUILD_SOURCEBRANCH
|
||||
commit_sha=$BUILD_SOURCEVERSION
|
||||
build_number=$BUILD_BUILDNUMBER
|
||||
internal=false
|
||||
compare=false
|
||||
mono_dotnet=
|
||||
kind="micro"
|
||||
llvm=false
|
||||
monointerpreter=false
|
||||
monoaot=false
|
||||
run_categories="Libraries Runtime"
|
||||
csproj="src\benchmarks\micro\MicroBenchmarks.csproj"
|
||||
configurations="CompliationMode=$compilation_mode RunKind=$kind"
|
||||
run_from_perf_repo=false
|
||||
use_core_run=true
|
||||
use_baseline_core_run=true
|
||||
using_mono=false
|
||||
wasm_runtime_loc=
|
||||
using_wasm=false
|
||||
use_latest_dotnet=false
|
||||
|
||||
while (($# > 0)); do
|
||||
lowerI="$(echo $1 | awk '{print tolower($0)}')"
|
||||
case $lowerI in
|
||||
--sourcedirectory)
|
||||
source_directory=$2
|
||||
shift 2
|
||||
;;
|
||||
--corerootdirectory)
|
||||
core_root_directory=$2
|
||||
shift 2
|
||||
;;
|
||||
--baselinecorerootdirectory)
|
||||
baseline_core_root_directory=$2
|
||||
shift 2
|
||||
;;
|
||||
--architecture)
|
||||
architecture=$2
|
||||
shift 2
|
||||
;;
|
||||
--framework)
|
||||
framework=$2
|
||||
shift 2
|
||||
;;
|
||||
--compilationmode)
|
||||
compilation_mode=$2
|
||||
shift 2
|
||||
;;
|
||||
--repository)
|
||||
repository=$2
|
||||
shift 2
|
||||
;;
|
||||
--branch)
|
||||
branch=$2
|
||||
shift 2
|
||||
;;
|
||||
--commitsha)
|
||||
commit_sha=$2
|
||||
shift 2
|
||||
;;
|
||||
--buildnumber)
|
||||
build_number=$2
|
||||
shift 2
|
||||
;;
|
||||
--kind)
|
||||
kind=$2
|
||||
configurations="CompilationMode=$compilation_mode RunKind=$kind"
|
||||
shift 2
|
||||
;;
|
||||
--runcategories)
|
||||
run_categories=$2
|
||||
shift 2
|
||||
;;
|
||||
--csproj)
|
||||
csproj=$2
|
||||
shift 2
|
||||
;;
|
||||
--internal)
|
||||
internal=true
|
||||
shift 1
|
||||
;;
|
||||
--llvm)
|
||||
llvm=true
|
||||
shift 1
|
||||
;;
|
||||
--monointerpreter)
|
||||
monointerpreter=true
|
||||
shift 1
|
||||
;;
|
||||
--monoaot)
|
||||
monoaot=true
|
||||
shift 1
|
||||
;;
|
||||
--monodotnet)
|
||||
mono_dotnet=$2
|
||||
shift 2
|
||||
;;
|
||||
--wasm)
|
||||
wasm_runtime_loc=$2
|
||||
shift 2
|
||||
;;
|
||||
--compare)
|
||||
compare=true
|
||||
shift 1
|
||||
;;
|
||||
--configurations)
|
||||
configurations=$2
|
||||
shift 2
|
||||
;;
|
||||
--latestdotnet)
|
||||
use_latest_dotnet=true
|
||||
shift 1
|
||||
;;
|
||||
*)
|
||||
echo "Common settings:"
|
||||
echo " --corerootdirectory <value> Directory where Core_Root exists, if running perf testing with --corerun"
|
||||
echo " --architecture <value> Architecture of the testing being run"
|
||||
echo " --configurations <value> List of key=value pairs that will be passed to perf testing infrastructure."
|
||||
echo " ex: --configurations \"CompilationMode=Tiered OptimzationLevel=PGO\""
|
||||
echo " --help Print help and exit"
|
||||
echo ""
|
||||
echo "Advanced settings:"
|
||||
echo " --framework <value> The framework to run, if not running in master"
|
||||
echo " --compliationmode <value> The compilation mode if not passing --configurations"
|
||||
echo " --sourcedirectory <value> The directory of the sources. Defaults to env:BUILD_SOURCESDIRECTORY"
|
||||
echo " --repository <value> The name of the repository in the <owner>/<repository name> format. Defaults to env:BUILD_REPOSITORY_NAME"
|
||||
echo " --branch <value> The name of the branch. Defaults to env:BUILD_SOURCEBRANCH"
|
||||
echo " --commitsha <value> The commit sha1 to run against. Defaults to env:BUILD_SOURCEVERSION"
|
||||
echo " --buildnumber <value> The build number currently running. Defaults to env:BUILD_BUILDNUMBER"
|
||||
echo " --csproj The relative path to the benchmark csproj whose tests should be run. Defaults to src\benchmarks\micro\MicroBenchmarks.csproj"
|
||||
echo " --kind <value> Related to csproj. The kind of benchmarks that should be run. Defaults to micro"
|
||||
echo " --runcategories <value> Related to csproj. Categories of benchmarks to run. Defaults to \"coreclr corefx\""
|
||||
echo " --internal If the benchmarks are running as an official job."
|
||||
echo " --monodotnet Pass the path to the mono dotnet for mono performance testing."
|
||||
echo " --wasm Path to the unpacked wasm runtime pack."
|
||||
echo " --latestdotnet --dotnet-versions will not be specified. --dotnet-versions defaults to LKG version in global.json "
|
||||
echo ""
|
||||
exit 0
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [ "$repository" == "dotnet/performance" ] || [ "$repository" == "dotnet-performance" ]; then
|
||||
run_from_perf_repo=true
|
||||
fi
|
||||
|
||||
if [ -z "$configurations" ]; then
|
||||
configurations="CompilationMode=$compilation_mode"
|
||||
fi
|
||||
|
||||
if [ -z "$core_root_directory" ]; then
|
||||
use_core_run=false
|
||||
fi
|
||||
|
||||
if [ -z "$baseline_core_root_directory" ]; then
|
||||
use_baseline_core_run=false
|
||||
fi
|
||||
|
||||
payload_directory=$source_directory/Payload
|
||||
performance_directory=$payload_directory/performance
|
||||
workitem_directory=$source_directory/workitem
|
||||
extra_benchmark_dotnet_arguments="--iterationCount 1 --warmupCount 0 --invocationCount 1 --unrollFactor 1 --strategy ColdStart --stopOnFirstError true"
|
||||
perflab_arguments=
|
||||
queue=Ubuntu.1804.Amd64.Open
|
||||
creator=$BUILD_DEFINITIONNAME
|
||||
helix_source_prefix="pr"
|
||||
|
||||
if [[ "$compare" == true ]]; then
|
||||
extra_benchmark_dotnet_arguments=
|
||||
perflab_arguments=
|
||||
|
||||
# No open queues for arm64
|
||||
if [[ "$architecture" = "arm64" ]]; then
|
||||
echo "Compare not available for arm64"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
queue=Ubuntu.1804.Amd64.Tiger.Perf.Open
|
||||
fi
|
||||
|
||||
if [[ "$internal" == true ]]; then
|
||||
perflab_arguments="--upload-to-perflab-container"
|
||||
helix_source_prefix="official"
|
||||
creator=
|
||||
extra_benchmark_dotnet_arguments=
|
||||
|
||||
if [[ "$architecture" = "arm64" ]]; then
|
||||
queue=Ubuntu.1804.Arm64.Perf
|
||||
else
|
||||
queue=Ubuntu.1804.Amd64.Tiger.Perf
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ "$mono_dotnet" != "" ]] && [[ "$monointerpreter" == "false" ]]; then
|
||||
configurations="$configurations LLVM=$llvm MonoInterpreter=$monointerpreter MonoAOT=$monoaot"
|
||||
extra_benchmark_dotnet_arguments="$extra_benchmark_dotnet_arguments --category-exclusion-filter NoMono"
|
||||
fi
|
||||
|
||||
if [[ "$wasm_runtime_loc" != "" ]]; then
|
||||
configurations="CompilationMode=wasm RunKind=$kind"
|
||||
extra_benchmark_dotnet_arguments="$extra_benchmark_dotnet_arguments --category-exclusion-filter NoInterpreter NoWASM NoMono"
|
||||
fi
|
||||
|
||||
if [[ "$mono_dotnet" != "" ]] && [[ "$monointerpreter" == "true" ]]; then
|
||||
extra_benchmark_dotnet_arguments="$extra_benchmark_dotnet_arguments --category-exclusion-filter NoInterpreter NoMono"
|
||||
fi
|
||||
|
||||
common_setup_arguments="--channel master --queue $queue --build-number $build_number --build-configs $configurations --architecture $architecture"
|
||||
setup_arguments="--repository https://github.com/$repository --branch $branch --get-perf-hash --commit-sha $commit_sha $common_setup_arguments"
|
||||
|
||||
|
||||
if [[ "$use_latest_dotnet" = false ]]; then
|
||||
# Get the tools section from the global.json.
|
||||
# This grabs the LKG version number of dotnet and passes it to our scripts
|
||||
dotnet_version=`cat global.json | python3 -c 'import json,sys;obj=json.load(sys.stdin);print(obj["tools"]["dotnet"])'`
|
||||
setup_arguments="--dotnet-versions $dotnet_version $setup_arguments"
|
||||
fi
|
||||
|
||||
if [[ "$run_from_perf_repo" = true ]]; then
|
||||
payload_directory=
|
||||
workitem_directory=$source_directory
|
||||
performance_directory=$workitem_directory
|
||||
setup_arguments="--perf-hash $commit_sha $common_setup_arguments"
|
||||
else
|
||||
git clone --branch master --depth 1 --quiet https://github.com/dotnet/performance $performance_directory
|
||||
|
||||
docs_directory=$performance_directory/docs
|
||||
mv $docs_directory $workitem_directory
|
||||
fi
|
||||
|
||||
if [[ "$wasm_runtime_loc" != "" ]]; then
|
||||
using_wasm=true
|
||||
wasm_dotnet_path=$payload_directory/dotnet-wasm
|
||||
mv $wasm_runtime_loc $wasm_dotnet_path
|
||||
extra_benchmark_dotnet_arguments="$extra_benchmark_dotnet_arguments --wasmMainJS \$HELIX_CORRELATION_PAYLOAD/dotnet-wasm/runtime-test.js --wasmEngine /home/helixbot/.jsvu/v8 --customRuntimePack \$HELIX_CORRELATION_PAYLOAD/dotnet-wasm"
|
||||
fi
|
||||
|
||||
if [[ "$mono_dotnet" != "" ]]; then
|
||||
using_mono=true
|
||||
mono_dotnet_path=$payload_directory/dotnet-mono
|
||||
mv $mono_dotnet $mono_dotnet_path
|
||||
fi
|
||||
|
||||
if [[ "$use_core_run" = true ]]; then
|
||||
new_core_root=$payload_directory/Core_Root
|
||||
mv $core_root_directory $new_core_root
|
||||
fi
|
||||
|
||||
if [[ "$use_baseline_core_run" = true ]]; then
|
||||
new_baseline_core_root=$payload_directory/Baseline_Core_Root
|
||||
mv $baseline_core_root_directory $new_baseline_core_root
|
||||
fi
|
||||
|
||||
ci=true
|
||||
|
||||
_script_dir=$(pwd)/eng/common
|
||||
. "$_script_dir/pipeline-logging-functions.sh"
|
||||
|
||||
# Make sure all of our variables are available for future steps
|
||||
Write-PipelineSetVariable -name "UseCoreRun" -value "$use_core_run" -is_multi_job_variable false
|
||||
Write-PipelineSetVariable -name "UseBaselineCoreRun" -value "$use_baseline_core_run" -is_multi_job_variable false
|
||||
Write-PipelineSetVariable -name "Architecture" -value "$architecture" -is_multi_job_variable false
|
||||
Write-PipelineSetVariable -name "PayloadDirectory" -value "$payload_directory" -is_multi_job_variable false
|
||||
Write-PipelineSetVariable -name "PerformanceDirectory" -value "$performance_directory" -is_multi_job_variable false
|
||||
Write-PipelineSetVariable -name "WorkItemDirectory" -value "$workitem_directory" -is_multi_job_variable false
|
||||
Write-PipelineSetVariable -name "Queue" -value "$queue" -is_multi_job_variable false
|
||||
Write-PipelineSetVariable -name "SetupArguments" -value "$setup_arguments" -is_multi_job_variable false
|
||||
Write-PipelineSetVariable -name "Python" -value "python3" -is_multi_job_variable false
|
||||
Write-PipelineSetVariable -name "PerfLabArguments" -value "$perflab_arguments" -is_multi_job_variable false
|
||||
Write-PipelineSetVariable -name "ExtraBenchmarkDotNetArguments" -value "$extra_benchmark_dotnet_arguments" -is_multi_job_variable false
|
||||
Write-PipelineSetVariable -name "BDNCategories" -value "$run_categories" -is_multi_job_variable false
|
||||
Write-PipelineSetVariable -name "TargetCsproj" -value "$csproj" -is_multi_job_variable false
|
||||
Write-PipelineSetVariable -name "RunFromPerfRepo" -value "$run_from_perf_repo" -is_multi_job_variable false
|
||||
Write-PipelineSetVariable -name "Creator" -value "$creator" -is_multi_job_variable false
|
||||
Write-PipelineSetVariable -name "HelixSourcePrefix" -value "$helix_source_prefix" -is_multi_job_variable false
|
||||
Write-PipelineSetVariable -name "Kind" -value "$kind" -is_multi_job_variable false
|
||||
Write-PipelineSetVariable -name "_BuildConfig" -value "$architecture.$kind.$framework" -is_multi_job_variable false
|
||||
Write-PipelineSetVariable -name "Compare" -value "$compare" -is_multi_job_variable false
|
||||
Write-PipelineSetVariable -name "MonoDotnet" -value "$using_mono" -is_multi_job_variable false
|
||||
Write-PipelineSetVariable -name "WasmDotnet" -value "$using_wasm" -is_multi_job_variable false
|
|
@ -29,14 +29,14 @@ function Write-PipelineTelemetryError {
|
|||
[switch]$AsOutput,
|
||||
[switch]$Force)
|
||||
|
||||
$PSBoundParameters.Remove('Category') | Out-Null
|
||||
$PSBoundParameters.Remove('Category') | Out-Null
|
||||
|
||||
if($Force -Or ((Test-Path variable:ci) -And $ci)) {
|
||||
$Message = "(NETCORE_ENGINEERING_TELEMETRY=$Category) $Message"
|
||||
}
|
||||
$PSBoundParameters.Remove('Message') | Out-Null
|
||||
$PSBoundParameters.Add('Message', $Message)
|
||||
Write-PipelineTaskError @PSBoundParameters
|
||||
if ($Force -Or ((Test-Path variable:ci) -And $ci)) {
|
||||
$Message = "(NETCORE_ENGINEERING_TELEMETRY=$Category) $Message"
|
||||
}
|
||||
$PSBoundParameters.Remove('Message') | Out-Null
|
||||
$PSBoundParameters.Add('Message', $Message)
|
||||
Write-PipelineTaskError @PSBoundParameters
|
||||
}
|
||||
|
||||
# Specify "-Force" to force pipeline formatted output even if "$ci" is false or not set
|
||||
|
@ -55,8 +55,8 @@ function Write-PipelineTaskError {
|
|||
[switch]$Force
|
||||
)
|
||||
|
||||
if(!$Force -And (-Not (Test-Path variable:ci) -Or !$ci)) {
|
||||
if($Type -eq 'error') {
|
||||
if (!$Force -And (-Not (Test-Path variable:ci) -Or !$ci)) {
|
||||
if ($Type -eq 'error') {
|
||||
Write-Host $Message -ForegroundColor Red
|
||||
return
|
||||
}
|
||||
|
@ -66,47 +66,61 @@ function Write-PipelineTaskError {
|
|||
}
|
||||
}
|
||||
|
||||
if(($Type -ne 'error') -and ($Type -ne 'warning')) {
|
||||
if (($Type -ne 'error') -and ($Type -ne 'warning')) {
|
||||
Write-Host $Message
|
||||
return
|
||||
}
|
||||
$PSBoundParameters.Remove('Force') | Out-Null
|
||||
if(-not $PSBoundParameters.ContainsKey('Type')) {
|
||||
if (-not $PSBoundParameters.ContainsKey('Type')) {
|
||||
$PSBoundParameters.Add('Type', 'error')
|
||||
}
|
||||
Write-LogIssue @PSBoundParameters
|
||||
}
|
||||
}
|
||||
|
||||
function Write-PipelineSetVariable {
|
||||
function Write-PipelineSetVariable {
|
||||
[CmdletBinding()]
|
||||
param(
|
||||
[Parameter(Mandatory = $true)]
|
||||
[string]$Name,
|
||||
[string]$Value,
|
||||
[switch]$Secret,
|
||||
[switch]$AsOutput,
|
||||
[bool]$IsMultiJobVariable=$true)
|
||||
[Parameter(Mandatory = $true)]
|
||||
[string]$Name,
|
||||
[string]$Value,
|
||||
[switch]$Secret,
|
||||
[switch]$AsOutput,
|
||||
[bool]$IsMultiJobVariable = $true)
|
||||
|
||||
if((Test-Path variable:ci) -And $ci) {
|
||||
if ((Test-Path variable:ci) -And $ci) {
|
||||
Write-LoggingCommand -Area 'task' -Event 'setvariable' -Data $Value -Properties @{
|
||||
'variable' = $Name
|
||||
'isSecret' = $Secret
|
||||
'isOutput' = $IsMultiJobVariable
|
||||
'variable' = $Name
|
||||
'isSecret' = $Secret
|
||||
'isOutput' = $IsMultiJobVariable
|
||||
} -AsOutput:$AsOutput
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function Write-PipelinePrependPath {
|
||||
function Write-PipelinePrependPath {
|
||||
[CmdletBinding()]
|
||||
param(
|
||||
[Parameter(Mandatory=$true)]
|
||||
[string]$Path,
|
||||
[switch]$AsOutput)
|
||||
[Parameter(Mandatory = $true)]
|
||||
[string]$Path,
|
||||
[switch]$AsOutput)
|
||||
|
||||
if((Test-Path variable:ci) -And $ci) {
|
||||
if ((Test-Path variable:ci) -And $ci) {
|
||||
Write-LoggingCommand -Area 'task' -Event 'prependpath' -Data $Path -AsOutput:$AsOutput
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function Write-PipelineSetResult {
|
||||
[CmdletBinding()]
|
||||
param(
|
||||
[ValidateSet("Succeeded", "SucceededWithIssues", "Failed", "Cancelled", "Skipped")]
|
||||
[Parameter(Mandatory = $true)]
|
||||
[string]$Result,
|
||||
[string]$Message)
|
||||
if ((Test-Path variable:ci) -And $ci) {
|
||||
Write-LoggingCommand -Area 'task' -Event 'complete' -Data $Message -Properties @{
|
||||
'result' = $Result
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
<########################################
|
||||
# Private functions.
|
||||
|
@ -123,7 +137,8 @@ function Format-LoggingCommandData {
|
|||
foreach ($mapping in $script:loggingCommandEscapeMappings) {
|
||||
$Value = $Value.Replace($mapping.Token, $mapping.Replacement)
|
||||
}
|
||||
} else {
|
||||
}
|
||||
else {
|
||||
for ($i = $script:loggingCommandEscapeMappings.Length - 1 ; $i -ge 0 ; $i--) {
|
||||
$mapping = $script:loggingCommandEscapeMappings[$i]
|
||||
$Value = $Value.Replace($mapping.Replacement, $mapping.Token)
|
||||
|
@ -156,7 +171,8 @@ function Format-LoggingCommand {
|
|||
if ($first) {
|
||||
$null = $sb.Append(' ')
|
||||
$first = $false
|
||||
} else {
|
||||
}
|
||||
else {
|
||||
$null = $sb.Append(';')
|
||||
}
|
||||
|
||||
|
@ -193,7 +209,8 @@ function Write-LoggingCommand {
|
|||
$command = Format-LoggingCommand -Area $Area -Event $Event -Data $Data -Properties $Properties
|
||||
if ($AsOutput) {
|
||||
$command
|
||||
} else {
|
||||
}
|
||||
else {
|
||||
Write-Host $command
|
||||
}
|
||||
}
|
||||
|
@ -212,12 +229,12 @@ function Write-LogIssue {
|
|||
[switch]$AsOutput)
|
||||
|
||||
$command = Format-LoggingCommand -Area 'task' -Event 'logissue' -Data $Message -Properties @{
|
||||
'type' = $Type
|
||||
'code' = $ErrCode
|
||||
'sourcepath' = $SourcePath
|
||||
'linenumber' = $LineNumber
|
||||
'columnnumber' = $ColumnNumber
|
||||
}
|
||||
'type' = $Type
|
||||
'code' = $ErrCode
|
||||
'sourcepath' = $SourcePath
|
||||
'linenumber' = $LineNumber
|
||||
'columnnumber' = $ColumnNumber
|
||||
}
|
||||
if ($AsOutput) {
|
||||
return $command
|
||||
}
|
||||
|
@ -229,7 +246,8 @@ function Write-LogIssue {
|
|||
$foregroundColor = [System.ConsoleColor]::Red
|
||||
$backgroundColor = [System.ConsoleColor]::Black
|
||||
}
|
||||
} else {
|
||||
}
|
||||
else {
|
||||
$foregroundColor = $host.PrivateData.WarningForegroundColor
|
||||
$backgroundColor = $host.PrivateData.WarningBackgroundColor
|
||||
if ($foregroundColor -isnot [System.ConsoleColor] -or $backgroundColor -isnot [System.ConsoleColor]) {
|
||||
|
|
|
@ -6,7 +6,7 @@ function Write-PipelineTelemetryError {
|
|||
local function_args=()
|
||||
local message=''
|
||||
while [[ $# -gt 0 ]]; do
|
||||
opt="$(echo "${1/#--/-}" | awk '{print tolower($0)}')"
|
||||
opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")"
|
||||
case "$opt" in
|
||||
-category|-c)
|
||||
telemetry_category=$2
|
||||
|
@ -48,7 +48,7 @@ function Write-PipelineTaskError {
|
|||
local force=false
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
opt="$(echo "${1/#--/-}" | awk '{print tolower($0)}')"
|
||||
opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")"
|
||||
case "$opt" in
|
||||
-type|-t)
|
||||
message_type=$2
|
||||
|
@ -122,7 +122,7 @@ function Write-PipelineSetVariable {
|
|||
local is_multi_job_variable=true
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
opt="$(echo "${1/#--/-}" | awk '{print tolower($0)}')"
|
||||
opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")"
|
||||
case "$opt" in
|
||||
-name|-n)
|
||||
name=$2
|
||||
|
@ -164,7 +164,7 @@ function Write-PipelinePrependPath {
|
|||
local prepend_path=''
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
opt="$(echo "${1/#--/-}" | awk '{print tolower($0)}')"
|
||||
opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")"
|
||||
case "$opt" in
|
||||
-path|-p)
|
||||
prepend_path=$2
|
||||
|
@ -179,4 +179,28 @@ function Write-PipelinePrependPath {
|
|||
if [[ "$ci" == true ]]; then
|
||||
echo "##vso[task.prependpath]$prepend_path"
|
||||
fi
|
||||
}
|
||||
}
|
||||
|
||||
function Write-PipelineSetResult {
|
||||
local result=''
|
||||
local message=''
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")"
|
||||
case "$opt" in
|
||||
-result|-r)
|
||||
result=$2
|
||||
shift
|
||||
;;
|
||||
-message|-m)
|
||||
message=$2
|
||||
shift
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
if [[ "$ci" == true ]]; then
|
||||
echo "##vso[task.complete result=$result;]$message"
|
||||
fi
|
||||
}
|
||||
|
|
|
@ -69,9 +69,9 @@ function Trigger-Subscription([string]$SubscriptionId) {
|
|||
|
||||
function Validate-MaestroVars {
|
||||
try {
|
||||
Get-Variable MaestroApiEndPoint -Scope Global | Out-Null
|
||||
Get-Variable MaestroApiVersion -Scope Global | Out-Null
|
||||
Get-Variable MaestroApiAccessToken -Scope Global | Out-Null
|
||||
Get-Variable MaestroApiEndPoint | Out-Null
|
||||
Get-Variable MaestroApiVersion | Out-Null
|
||||
Get-Variable MaestroApiAccessToken | Out-Null
|
||||
|
||||
if (!($MaestroApiEndPoint -Match '^http[s]?://maestro-(int|prod).westus2.cloudapp.azure.com$')) {
|
||||
Write-PipelineTelemetryError -Category 'MaestroVars' -Message "MaestroApiEndPoint is not a valid Maestro URL. '$MaestroApiEndPoint'"
|
||||
|
|
|
@ -5,56 +5,36 @@ param(
|
|||
[Parameter(Mandatory=$true)][string] $MaestroToken,
|
||||
[Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro-prod.westus2.cloudapp.azure.com',
|
||||
[Parameter(Mandatory=$true)][string] $WaitPublishingFinish,
|
||||
[Parameter(Mandatory=$false)][string] $EnableSourceLinkValidation,
|
||||
[Parameter(Mandatory=$false)][string] $EnableSigningValidation,
|
||||
[Parameter(Mandatory=$false)][string] $EnableNugetValidation,
|
||||
[Parameter(Mandatory=$false)][string] $PublishInstallersAndChecksums,
|
||||
[Parameter(Mandatory=$false)][string] $ArtifactsPublishingAdditionalParameters,
|
||||
[Parameter(Mandatory=$false)][string] $SigningValidationAdditionalParameters
|
||||
[Parameter(Mandatory=$false)][string] $SymbolPublishingAdditionalParameters
|
||||
)
|
||||
|
||||
try {
|
||||
. $PSScriptRoot\post-build-utils.ps1
|
||||
# Hard coding darc version till the next arcade-services roll out, cos this version has required API changes for darc add-build-to-channel
|
||||
$darc = Get-Darc "1.1.0-beta.20418.1"
|
||||
|
||||
$darc = Get-Darc
|
||||
|
||||
$optionalParams = [System.Collections.ArrayList]::new()
|
||||
|
||||
if ("" -ne $ArtifactsPublishingAdditionalParameters) {
|
||||
$optionalParams.Add("artifact-publishing-parameters") | Out-Null
|
||||
$optionalParams.Add("--artifact-publishing-parameters") | Out-Null
|
||||
$optionalParams.Add($ArtifactsPublishingAdditionalParameters) | Out-Null
|
||||
}
|
||||
|
||||
if ("" -ne $SymbolPublishingAdditionalParameters) {
|
||||
$optionalParams.Add("--symbol-publishing-parameters") | Out-Null
|
||||
$optionalParams.Add($SymbolPublishingAdditionalParameters) | Out-Null
|
||||
}
|
||||
|
||||
if ("false" -eq $WaitPublishingFinish) {
|
||||
$optionalParams.Add("--no-wait") | Out-Null
|
||||
}
|
||||
|
||||
if ("false" -ne $PublishInstallersAndChecksums) {
|
||||
$optionalParams.Add("--publish-installers-and-checksums") | Out-Null
|
||||
}
|
||||
|
||||
if ("true" -eq $EnableNugetValidation) {
|
||||
$optionalParams.Add("--validate-nuget") | Out-Null
|
||||
}
|
||||
|
||||
if ("true" -eq $EnableSourceLinkValidation) {
|
||||
$optionalParams.Add("--validate-sourcelinkchecksums") | Out-Null
|
||||
}
|
||||
|
||||
if ("true" -eq $EnableSigningValidation) {
|
||||
$optionalParams.Add("--validate-signingchecksums") | Out-Null
|
||||
|
||||
if ("" -ne $SigningValidationAdditionalParameters) {
|
||||
$optionalParams.Add("--signing-validation-parameters") | Out-Null
|
||||
$optionalParams.Add($SigningValidationAdditionalParameters) | Out-Null
|
||||
}
|
||||
}
|
||||
|
||||
& $darc add-build-to-channel `
|
||||
--id $buildId `
|
||||
--publishing-infra-version $PublishingInfraVersion `
|
||||
--default-channels `
|
||||
--source-branch master `
|
||||
--source-branch main `
|
||||
--azdev-pat $AzdoToken `
|
||||
--bar-uri $MaestroApiEndPoint `
|
||||
--password $MaestroToken `
|
||||
|
|
|
@ -14,11 +14,19 @@ param(
|
|||
$global:RepoFiles = @{}
|
||||
|
||||
# Maximum number of jobs to run in parallel
|
||||
$MaxParallelJobs = 6
|
||||
$MaxParallelJobs = 16
|
||||
|
||||
$MaxRetries = 5
|
||||
$RetryWaitTimeInSeconds = 30
|
||||
|
||||
# Wait time between check for system load
|
||||
$SecondsBetweenLoadChecks = 10
|
||||
|
||||
if (!$InputPath -or !(Test-Path $InputPath)){
|
||||
Write-Host "No files to validate."
|
||||
ExitWithExitCode 0
|
||||
}
|
||||
|
||||
$ValidatePackage = {
|
||||
param(
|
||||
[string] $PackagePath # Full path to a Symbols.NuGet package
|
||||
|
@ -29,7 +37,10 @@ $ValidatePackage = {
|
|||
# Ensure input file exist
|
||||
if (!(Test-Path $PackagePath)) {
|
||||
Write-Host "Input file does not exist: $PackagePath"
|
||||
return 1
|
||||
return [pscustomobject]@{
|
||||
result = 1
|
||||
packagePath = $PackagePath
|
||||
}
|
||||
}
|
||||
|
||||
# Extensions for which we'll look for SourceLink information
|
||||
|
@ -59,7 +70,10 @@ $ValidatePackage = {
|
|||
|
||||
# We ignore resource DLLs
|
||||
if ($FileName.EndsWith('.resources.dll')) {
|
||||
return
|
||||
return [pscustomobject]@{
|
||||
result = 0
|
||||
packagePath = $PackagePath
|
||||
}
|
||||
}
|
||||
|
||||
[System.IO.Compression.ZipFileExtensions]::ExtractToFile($_, $TargetFile, $true)
|
||||
|
@ -91,36 +105,59 @@ $ValidatePackage = {
|
|||
$Status = 200
|
||||
$Cache = $using:RepoFiles
|
||||
|
||||
if ( !($Cache.ContainsKey($FilePath)) ) {
|
||||
try {
|
||||
$Uri = $Link -as [System.URI]
|
||||
|
||||
# Only GitHub links are valid
|
||||
if ($Uri.AbsoluteURI -ne $null -and ($Uri.Host -match 'github' -or $Uri.Host -match 'githubusercontent')) {
|
||||
$Status = (Invoke-WebRequest -Uri $Link -UseBasicParsing -Method HEAD -TimeoutSec 5).StatusCode
|
||||
$attempts = 0
|
||||
|
||||
while ($attempts -lt $using:MaxRetries) {
|
||||
if ( !($Cache.ContainsKey($FilePath)) ) {
|
||||
try {
|
||||
$Uri = $Link -as [System.URI]
|
||||
|
||||
if ($Link -match "submodules") {
|
||||
# Skip submodule links until sourcelink properly handles submodules
|
||||
$Status = 200
|
||||
}
|
||||
elseif ($Uri.AbsoluteURI -ne $null -and ($Uri.Host -match 'github' -or $Uri.Host -match 'githubusercontent')) {
|
||||
# Only GitHub links are valid
|
||||
$Status = (Invoke-WebRequest -Uri $Link -UseBasicParsing -Method HEAD -TimeoutSec 5).StatusCode
|
||||
}
|
||||
else {
|
||||
# If it's not a github link, we want to break out of the loop and not retry.
|
||||
$Status = 0
|
||||
$attempts = $using:MaxRetries
|
||||
}
|
||||
}
|
||||
else {
|
||||
catch {
|
||||
Write-Host $_
|
||||
$Status = 0
|
||||
}
|
||||
}
|
||||
catch {
|
||||
write-host $_
|
||||
$Status = 0
|
||||
}
|
||||
}
|
||||
|
||||
if ($Status -ne 200) {
|
||||
if ($NumFailedLinks -eq 0) {
|
||||
if ($FailedFiles.Value -eq 0) {
|
||||
Write-Host
|
||||
if ($Status -ne 200) {
|
||||
$attempts++
|
||||
|
||||
if ($attempts -lt $using:MaxRetries)
|
||||
{
|
||||
$attemptsLeft = $using:MaxRetries - $attempts
|
||||
Write-Warning "Download failed, $attemptsLeft attempts remaining, will retry in $using:RetryWaitTimeInSeconds seconds"
|
||||
Start-Sleep -Seconds $using:RetryWaitTimeInSeconds
|
||||
}
|
||||
else {
|
||||
if ($NumFailedLinks -eq 0) {
|
||||
if ($FailedFiles.Value -eq 0) {
|
||||
Write-Host
|
||||
}
|
||||
|
||||
Write-Host "`tFile $RealPath has broken links:"
|
||||
}
|
||||
|
||||
Write-Host "`t`tFailed to retrieve $Link"
|
||||
|
||||
$NumFailedLinks++
|
||||
}
|
||||
|
||||
Write-Host "`tFile $RealPath has broken links:"
|
||||
}
|
||||
|
||||
Write-Host "`t`tFailed to retrieve $Link"
|
||||
|
||||
$NumFailedLinks++
|
||||
else {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -136,7 +173,7 @@ $ValidatePackage = {
|
|||
}
|
||||
}
|
||||
catch {
|
||||
|
||||
Write-Host $_
|
||||
}
|
||||
finally {
|
||||
$zip.Dispose()
|
||||
|
@ -161,9 +198,12 @@ $ValidatePackage = {
|
|||
function CheckJobResult(
|
||||
$result,
|
||||
$packagePath,
|
||||
[ref]$ValidationFailures) {
|
||||
if ($jobResult.result -ne '0') {
|
||||
Write-PipelineTelemetryError -Category 'SourceLink' -Message "$packagePath has broken SourceLink links."
|
||||
[ref]$ValidationFailures,
|
||||
[switch]$logErrors) {
|
||||
if ($result -ne '0') {
|
||||
if ($logErrors) {
|
||||
Write-PipelineTelemetryError -Category 'SourceLink' -Message "$packagePath has broken SourceLink links."
|
||||
}
|
||||
$ValidationFailures.Value++
|
||||
}
|
||||
}
|
||||
|
@ -217,6 +257,7 @@ function ValidateSourceLinkLinks {
|
|||
# Process each NuGet package in parallel
|
||||
Get-ChildItem "$InputPath\*.symbols.nupkg" |
|
||||
ForEach-Object {
|
||||
Write-Host "Starting $($_.FullName)"
|
||||
Start-Job -ScriptBlock $ValidatePackage -ArgumentList $_.FullName | Out-Null
|
||||
$NumJobs = @(Get-Job -State 'Running').Count
|
||||
|
||||
|
@ -228,16 +269,14 @@ function ValidateSourceLinkLinks {
|
|||
|
||||
foreach ($Job in @(Get-Job -State 'Completed')) {
|
||||
$jobResult = Wait-Job -Id $Job.Id | Receive-Job
|
||||
CheckJobResult $jobResult.result $jobResult.packagePath ([ref]$ValidationFailures)
|
||||
CheckJobResult $jobResult.result $jobResult.packagePath ([ref]$ValidationFailures) -LogErrors
|
||||
Remove-Job -Id $Job.Id
|
||||
}
|
||||
}
|
||||
|
||||
foreach ($Job in @(Get-Job)) {
|
||||
$jobResult = Wait-Job -Id $Job.Id | Receive-Job
|
||||
if ($jobResult -ne '0') {
|
||||
$ValidationFailures++
|
||||
}
|
||||
CheckJobResult $jobResult.result $jobResult.packagePath ([ref]$ValidationFailures)
|
||||
Remove-Job -Id $Job.Id
|
||||
}
|
||||
if ($ValidationFailures -gt 0) {
|
||||
|
@ -266,6 +305,10 @@ function InstallSourcelinkCli {
|
|||
try {
|
||||
InstallSourcelinkCli
|
||||
|
||||
foreach ($Job in @(Get-Job)) {
|
||||
Remove-Job -Id $Job.Id
|
||||
}
|
||||
|
||||
ValidateSourceLinkLinks
|
||||
}
|
||||
catch {
|
||||
|
|
|
@ -1,30 +1,65 @@
|
|||
param(
|
||||
[Parameter(Mandatory=$true)][string] $InputPath, # Full path to directory where NuGet packages to be checked are stored
|
||||
[Parameter(Mandatory=$true)][string] $ExtractPath, # Full path to directory where the packages will be extracted during validation
|
||||
[Parameter(Mandatory=$true)][string] $DotnetSymbolVersion, # Version of dotnet symbol to use
|
||||
[Parameter(Mandatory=$false)][switch] $ContinueOnError, # If we should keep checking symbols after an error
|
||||
[Parameter(Mandatory=$false)][switch] $Clean # Clean extracted symbols directory after checking symbols
|
||||
[Parameter(Mandatory = $true)][string] $InputPath, # Full path to directory where NuGet packages to be checked are stored
|
||||
[Parameter(Mandatory = $true)][string] $ExtractPath, # Full path to directory where the packages will be extracted during validation
|
||||
[Parameter(Mandatory = $true)][string] $DotnetSymbolVersion, # Version of dotnet symbol to use
|
||||
[Parameter(Mandatory = $false)][switch] $CheckForWindowsPdbs, # If we should check for the existence of windows pdbs in addition to portable PDBs
|
||||
[Parameter(Mandatory = $false)][switch] $ContinueOnError, # If we should keep checking symbols after an error
|
||||
[Parameter(Mandatory = $false)][switch] $Clean, # Clean extracted symbols directory after checking symbols
|
||||
[Parameter(Mandatory = $false)][string] $SymbolExclusionFile # Exclude the symbols in the file from publishing to symbol server
|
||||
)
|
||||
|
||||
. $PSScriptRoot\..\tools.ps1
|
||||
# Maximum number of jobs to run in parallel
|
||||
$MaxParallelJobs = 6
|
||||
$MaxParallelJobs = 16
|
||||
|
||||
# Max number of retries
|
||||
$MaxRetry = 5
|
||||
|
||||
# Wait time between check for system load
|
||||
$SecondsBetweenLoadChecks = 10
|
||||
|
||||
# Set error codes
|
||||
Set-Variable -Name "ERROR_BADEXTRACT" -Option Constant -Value -1
|
||||
Set-Variable -Name "ERROR_FILEDOESNOTEXIST" -Option Constant -Value -2
|
||||
|
||||
$WindowsPdbVerificationParam = ""
|
||||
if ($CheckForWindowsPdbs) {
|
||||
$WindowsPdbVerificationParam = "--windows-pdbs"
|
||||
}
|
||||
|
||||
$ExclusionSet = New-Object System.Collections.Generic.HashSet[string];
|
||||
|
||||
if (!$InputPath -or !(Test-Path $InputPath)){
|
||||
Write-Host "No symbols to validate."
|
||||
ExitWithExitCode 0
|
||||
}
|
||||
|
||||
#Check if the path exists
|
||||
if ($SymbolExclusionFile -and (Test-Path $SymbolExclusionFile)){
|
||||
[string[]]$Exclusions = Get-Content "$SymbolExclusionFile"
|
||||
$Exclusions | foreach { if($_ -and $_.Trim()){$ExclusionSet.Add($_)} }
|
||||
}
|
||||
else{
|
||||
Write-Host "Symbol Exclusion file does not exists. No symbols to exclude."
|
||||
}
|
||||
|
||||
$CountMissingSymbols = {
|
||||
param(
|
||||
[string] $PackagePath # Path to a NuGet package
|
||||
[string] $PackagePath, # Path to a NuGet package
|
||||
[string] $WindowsPdbVerificationParam # If we should check for the existence of windows pdbs in addition to portable PDBs
|
||||
)
|
||||
|
||||
. $using:PSScriptRoot\..\tools.ps1
|
||||
|
||||
Add-Type -AssemblyName System.IO.Compression.FileSystem
|
||||
|
||||
Write-Host "Validating $PackagePath "
|
||||
|
||||
# Ensure input file exist
|
||||
if (!(Test-Path $PackagePath)) {
|
||||
Write-PipelineTaskError "Input file does not exist: $PackagePath"
|
||||
return -2
|
||||
return [pscustomobject]@{
|
||||
result = $using:ERROR_FILEDOESNOTEXIST
|
||||
packagePath = $PackagePath
|
||||
}
|
||||
}
|
||||
|
||||
# Extensions for which we'll look for symbols
|
||||
|
@ -45,24 +80,25 @@ $CountMissingSymbols = {
|
|||
Write-Host "Something went wrong extracting $PackagePath"
|
||||
Write-Host $_
|
||||
return [pscustomobject]@{
|
||||
result = -1
|
||||
result = $using:ERROR_BADEXTRACT
|
||||
packagePath = $PackagePath
|
||||
}
|
||||
}
|
||||
|
||||
Get-ChildItem -Recurse $ExtractPath |
|
||||
Where-Object {$RelevantExtensions -contains $_.Extension} |
|
||||
ForEach-Object {
|
||||
$FileName = $_.FullName
|
||||
if ($FileName -Match '\\ref\\') {
|
||||
Write-Host "`t Ignoring reference assembly file " $FileName
|
||||
return
|
||||
}
|
||||
Where-Object { $RelevantExtensions -contains $_.Extension } |
|
||||
ForEach-Object {
|
||||
$FileName = $_.FullName
|
||||
if ($FileName -Match '\\ref\\') {
|
||||
Write-Host "`t Ignoring reference assembly file " $FileName
|
||||
return
|
||||
}
|
||||
|
||||
$FirstMatchingSymbolDescriptionOrDefault = {
|
||||
$FirstMatchingSymbolDescriptionOrDefault = {
|
||||
param(
|
||||
[string] $FullPath, # Full path to the module that has to be checked
|
||||
[string] $TargetServerParam, # Parameter to pass to `Symbol Tool` indicating the server to lookup for symbols
|
||||
[string] $FullPath, # Full path to the module that has to be checked
|
||||
[string] $TargetServerParam, # Parameter to pass to `Symbol Tool` indicating the server to lookup for symbols
|
||||
[string] $WindowsPdbVerificationParam, # Parameter to pass to potential check for windows-pdbs.
|
||||
[string] $SymbolsPath
|
||||
)
|
||||
|
||||
|
@ -87,34 +123,60 @@ $CountMissingSymbols = {
|
|||
|
||||
# DWARF file for a .dylib
|
||||
$DylibDwarf = $SymbolPath.Replace($Extension, '.dylib.dwarf')
|
||||
|
||||
|
||||
$dotnetSymbolExe = "$env:USERPROFILE\.dotnet\tools"
|
||||
$dotnetSymbolExe = Resolve-Path "$dotnetSymbolExe\dotnet-symbol.exe"
|
||||
|
||||
& $dotnetSymbolExe --symbols --modules --windows-pdbs $TargetServerParam $FullPath -o $SymbolsPath | Out-Null
|
||||
$totalRetries = 0
|
||||
|
||||
if (Test-Path $PdbPath) {
|
||||
return 'PDB'
|
||||
}
|
||||
elseif (Test-Path $NGenPdb) {
|
||||
return 'NGen PDB'
|
||||
}
|
||||
elseif (Test-Path $SODbg) {
|
||||
return 'DBG for SO'
|
||||
}
|
||||
elseif (Test-Path $DylibDwarf) {
|
||||
return 'Dwarf for Dylib'
|
||||
}
|
||||
elseif (Test-Path $SymbolPath) {
|
||||
return 'Module'
|
||||
}
|
||||
else {
|
||||
return $null
|
||||
while ($totalRetries -lt $using:MaxRetry) {
|
||||
|
||||
# Save the output and get diagnostic output
|
||||
$output = & $dotnetSymbolExe --symbols --modules $WindowsPdbVerificationParam $TargetServerParam $FullPath -o $SymbolsPath --diagnostics | Out-String
|
||||
|
||||
if ((Test-Path $PdbPath) -and (Test-path $SymbolPath)) {
|
||||
return 'Module and PDB for Module'
|
||||
}
|
||||
elseif ((Test-Path $NGenPdb) -and (Test-Path $PdbPath) -and (Test-Path $SymbolPath)) {
|
||||
return 'Dll, PDB and NGen PDB'
|
||||
}
|
||||
elseif ((Test-Path $SODbg) -and (Test-Path $SymbolPath)) {
|
||||
return 'So and DBG for SO'
|
||||
}
|
||||
elseif ((Test-Path $DylibDwarf) -and (Test-Path $SymbolPath)) {
|
||||
return 'Dylib and Dwarf for Dylib'
|
||||
}
|
||||
elseif (Test-Path $SymbolPath) {
|
||||
return 'Module'
|
||||
}
|
||||
else
|
||||
{
|
||||
$totalRetries++
|
||||
}
|
||||
}
|
||||
|
||||
return $null
|
||||
}
|
||||
|
||||
$SymbolsOnMSDL = & $FirstMatchingSymbolDescriptionOrDefault $FileName '--microsoft-symbol-server' $SymbolsPath
|
||||
$SymbolsOnSymWeb = & $FirstMatchingSymbolDescriptionOrDefault $FileName '--internal-server' $SymbolsPath
|
||||
$FileRelativePath = $FileName.Replace("$ExtractPath\", "")
|
||||
if (($($using:ExclusionSet) -ne $null) -and ($($using:ExclusionSet).Contains($FileRelativePath) -or ($($using:ExclusionSet).Contains($FileRelativePath.Replace("\", "/"))))){
|
||||
Write-Host "Skipping $FileName from symbol validation"
|
||||
}
|
||||
|
||||
else {
|
||||
$FileGuid = New-Guid
|
||||
$ExpandedSymbolsPath = Join-Path -Path $SymbolsPath -ChildPath $FileGuid
|
||||
|
||||
$SymbolsOnMSDL = & $FirstMatchingSymbolDescriptionOrDefault `
|
||||
-FullPath $FileName `
|
||||
-TargetServerParam '--microsoft-symbol-server' `
|
||||
-SymbolsPath "$ExpandedSymbolsPath-msdl" `
|
||||
-WindowsPdbVerificationParam $WindowsPdbVerificationParam
|
||||
$SymbolsOnSymWeb = & $FirstMatchingSymbolDescriptionOrDefault `
|
||||
-FullPath $FileName `
|
||||
-TargetServerParam '--internal-server' `
|
||||
-SymbolsPath "$ExpandedSymbolsPath-symweb" `
|
||||
-WindowsPdbVerificationParam $WindowsPdbVerificationParam
|
||||
|
||||
Write-Host -NoNewLine "`t Checking file " $FileName "... "
|
||||
|
||||
|
@ -137,6 +199,7 @@ $CountMissingSymbols = {
|
|||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if ($using:Clean) {
|
||||
Remove-Item $ExtractPath -Recurse -Force
|
||||
|
@ -145,24 +208,31 @@ $CountMissingSymbols = {
|
|||
Pop-Location
|
||||
|
||||
return [pscustomobject]@{
|
||||
result = $MissingSymbols
|
||||
packagePath = $PackagePath
|
||||
}
|
||||
result = $MissingSymbols
|
||||
packagePath = $PackagePath
|
||||
}
|
||||
}
|
||||
|
||||
function CheckJobResult(
|
||||
$result,
|
||||
$packagePath,
|
||||
[ref]$DupedSymbols,
|
||||
[ref]$TotalFailures) {
|
||||
if ($result -eq '-1') {
|
||||
$result,
|
||||
$packagePath,
|
||||
[ref]$DupedSymbols,
|
||||
[ref]$TotalFailures) {
|
||||
if ($result -eq $ERROR_BADEXTRACT) {
|
||||
Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "$packagePath has duplicated symbol files"
|
||||
$DupedSymbols.Value++
|
||||
}
|
||||
elseif ($jobResult.result -ne '0') {
|
||||
elseif ($result -eq $ERROR_FILEDOESNOTEXIST) {
|
||||
Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "$packagePath does not exist"
|
||||
$TotalFailures.Value++
|
||||
}
|
||||
elseif ($result -gt '0') {
|
||||
Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "Missing symbols for $result modules in the package $packagePath"
|
||||
$TotalFailures.Value++
|
||||
}
|
||||
else {
|
||||
Write-Host "All symbols verified for package $packagePath"
|
||||
}
|
||||
}
|
||||
|
||||
function CheckSymbolsAvailable {
|
||||
|
@ -170,6 +240,7 @@ function CheckSymbolsAvailable {
|
|||
Remove-Item $ExtractPath -Force -Recurse -ErrorAction SilentlyContinue
|
||||
}
|
||||
|
||||
$TotalPackages = 0
|
||||
$TotalFailures = 0
|
||||
$DupedSymbols = 0
|
||||
|
||||
|
@ -192,9 +263,9 @@ function CheckSymbolsAvailable {
|
|||
return
|
||||
}
|
||||
|
||||
Write-Host "Validating $FileName "
|
||||
$TotalPackages++
|
||||
|
||||
Start-Job -ScriptBlock $CountMissingSymbols -ArgumentList $FullName | Out-Null
|
||||
Start-Job -ScriptBlock $CountMissingSymbols -ArgumentList @($FullName,$WindowsPdbVerificationParam) | Out-Null
|
||||
|
||||
$NumJobs = @(Get-Job -State 'Running').Count
|
||||
|
||||
|
@ -219,11 +290,11 @@ function CheckSymbolsAvailable {
|
|||
|
||||
if ($TotalFailures -gt 0 -or $DupedSymbols -gt 0) {
|
||||
if ($TotalFailures -gt 0) {
|
||||
Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "Symbols missing for $TotalFailures packages"
|
||||
Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "Symbols missing for $TotalFailures/$TotalPackages packages"
|
||||
}
|
||||
|
||||
if ($DupedSymbols -gt 0) {
|
||||
Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "$DupedSymbols packages had duplicated symbol files"
|
||||
Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "$DupedSymbols/$TotalPackages packages had duplicated symbol files and could not be extracted"
|
||||
}
|
||||
|
||||
ExitWithExitCode 1
|
||||
|
|
|
@ -0,0 +1,45 @@
|
|||
|
||||
Param(
|
||||
[Parameter(Mandatory=$true)][int] $buildId,
|
||||
[Parameter(Mandatory=$true)][string] $azdoOrgUri,
|
||||
[Parameter(Mandatory=$true)][string] $azdoProject,
|
||||
[Parameter(Mandatory=$true)][string] $token
|
||||
)
|
||||
|
||||
$ErrorActionPreference = 'Stop'
|
||||
Set-StrictMode -Version 2.0
|
||||
|
||||
function Get-AzDOHeaders(
|
||||
[string] $token)
|
||||
{
|
||||
$base64AuthInfo = [Convert]::ToBase64String([Text.Encoding]::ASCII.GetBytes(":${token}"))
|
||||
$headers = @{"Authorization"="Basic $base64AuthInfo"}
|
||||
return $headers
|
||||
}
|
||||
|
||||
function Update-BuildRetention(
|
||||
[string] $azdoOrgUri,
|
||||
[string] $azdoProject,
|
||||
[int] $buildId,
|
||||
[string] $token)
|
||||
{
|
||||
$headers = Get-AzDOHeaders -token $token
|
||||
$requestBody = "{
|
||||
`"keepForever`": `"true`"
|
||||
}"
|
||||
|
||||
$requestUri = "${azdoOrgUri}/${azdoProject}/_apis/build/builds/${buildId}?api-version=6.0"
|
||||
write-Host "Attempting to retain build using the following URI: ${requestUri} ..."
|
||||
|
||||
try {
|
||||
Invoke-RestMethod -Uri $requestUri -Method Patch -Body $requestBody -Header $headers -contentType "application/json"
|
||||
Write-Host "Updated retention settings for build ${buildId}."
|
||||
}
|
||||
catch {
|
||||
Write-Error "Failed to update retention settings for build: $_.Exception.Response.StatusDescription"
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
|
||||
Update-BuildRetention -azdoOrgUri $azdoOrgUri -azdoProject $azdoProject -buildId $buildId -token $token
|
||||
exit 0
|
|
@ -34,7 +34,7 @@ function Print-Usage() {
|
|||
function Build([string]$target) {
|
||||
$logSuffix = if ($target -eq 'Execute') { '' } else { ".$target" }
|
||||
$log = Join-Path $LogDir "$task$logSuffix.binlog"
|
||||
$outputPath = Join-Path $ToolsetDir "$task\\"
|
||||
$outputPath = Join-Path $ToolsetDir "$task\"
|
||||
|
||||
MSBuild $taskProject `
|
||||
/bl:$log `
|
||||
|
@ -53,7 +53,7 @@ try {
|
|||
}
|
||||
|
||||
if ($task -eq "") {
|
||||
Write-PipelineTelemetryError -Category 'Build' -Message "Missing required parameter '-task <value>'" -ForegroundColor Red
|
||||
Write-PipelineTelemetryError -Category 'Build' -Message "Missing required parameter '-task <value>'"
|
||||
Print-Usage
|
||||
ExitWithExitCode 1
|
||||
}
|
||||
|
@ -64,7 +64,7 @@ try {
|
|||
$GlobalJson.tools | Add-Member -Name "vs" -Value (ConvertFrom-Json "{ `"version`": `"16.5`" }") -MemberType NoteProperty
|
||||
}
|
||||
if( -not ($GlobalJson.tools.PSObject.Properties.Name -match "xcopy-msbuild" )) {
|
||||
$GlobalJson.tools | Add-Member -Name "xcopy-msbuild" -Value "16.5.0-alpha" -MemberType NoteProperty
|
||||
$GlobalJson.tools | Add-Member -Name "xcopy-msbuild" -Value "17.1.0" -MemberType NoteProperty
|
||||
}
|
||||
if ($GlobalJson.tools."xcopy-msbuild".Trim() -ine "none") {
|
||||
$xcopyMSBuildToolsFolder = InitializeXCopyMSBuild $GlobalJson.tools."xcopy-msbuild" -install $true
|
||||
|
@ -78,7 +78,7 @@ try {
|
|||
|
||||
$taskProject = GetSdkTaskProject $task
|
||||
if (!(Test-Path $taskProject)) {
|
||||
Write-PipelineTelemetryError -Category 'Build' -Message "Unknown task: $task" -ForegroundColor Red
|
||||
Write-PipelineTelemetryError -Category 'Build' -Message "Unknown task: $task"
|
||||
ExitWithExitCode 1
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,116 @@
|
|||
Param(
|
||||
[string] $GuardianCliLocation,
|
||||
[string] $WorkingDirectory,
|
||||
[string] $TargetDirectory,
|
||||
[string] $GdnFolder,
|
||||
# The list of Guardian tools to configure. For each object in the array:
|
||||
# - If the item is a [hashtable], it must contain these entries:
|
||||
# - Name = The tool name as Guardian knows it.
|
||||
# - Scenario = (Optional) Scenario-specific name for this configuration entry. It must be unique
|
||||
# among all tool entries with the same Name.
|
||||
# - Args = (Optional) Array of Guardian tool configuration args, like '@("Target > C:\temp")'
|
||||
# - If the item is a [string] $v, it is treated as '@{ Name="$v" }'
|
||||
[object[]] $ToolsList,
|
||||
[string] $GuardianLoggerLevel='Standard',
|
||||
# Optional: Additional params to add to any tool using CredScan.
|
||||
[string[]] $CrScanAdditionalRunConfigParams,
|
||||
# Optional: Additional params to add to any tool using PoliCheck.
|
||||
[string[]] $PoliCheckAdditionalRunConfigParams,
|
||||
# Optional: Additional params to add to any tool using CodeQL/Semmle.
|
||||
[string[]] $CodeQLAdditionalRunConfigParams
|
||||
)
|
||||
|
||||
$ErrorActionPreference = 'Stop'
|
||||
Set-StrictMode -Version 2.0
|
||||
$disableConfigureToolsetImport = $true
|
||||
$global:LASTEXITCODE = 0
|
||||
|
||||
try {
|
||||
# `tools.ps1` checks $ci to perform some actions. Since the SDL
|
||||
# scripts don't necessarily execute in the same agent that run the
|
||||
# build.ps1/sh script this variable isn't automatically set.
|
||||
$ci = $true
|
||||
. $PSScriptRoot\..\tools.ps1
|
||||
|
||||
# Normalize tools list: all in [hashtable] form with defined values for each key.
|
||||
$ToolsList = $ToolsList |
|
||||
ForEach-Object {
|
||||
if ($_ -is [string]) {
|
||||
$_ = @{ Name = $_ }
|
||||
}
|
||||
|
||||
if (-not ($_['Scenario'])) { $_.Scenario = "" }
|
||||
if (-not ($_['Args'])) { $_.Args = @() }
|
||||
$_
|
||||
}
|
||||
|
||||
Write-Host "List of tools to configure:"
|
||||
$ToolsList | ForEach-Object { $_ | Out-String | Write-Host }
|
||||
|
||||
# We store config files in the r directory of .gdn
|
||||
$gdnConfigPath = Join-Path $GdnFolder 'r'
|
||||
$ValidPath = Test-Path $GuardianCliLocation
|
||||
|
||||
if ($ValidPath -eq $False)
|
||||
{
|
||||
Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Invalid Guardian CLI Location."
|
||||
ExitWithExitCode 1
|
||||
}
|
||||
|
||||
foreach ($tool in $ToolsList) {
|
||||
# Put together the name and scenario to make a unique key.
|
||||
$toolConfigName = $tool.Name
|
||||
if ($tool.Scenario) {
|
||||
$toolConfigName += "_" + $tool.Scenario
|
||||
}
|
||||
|
||||
Write-Host "=== Configuring $toolConfigName..."
|
||||
|
||||
$gdnConfigFile = Join-Path $gdnConfigPath "$toolConfigName-configure.gdnconfig"
|
||||
|
||||
# For some tools, add default and automatic args.
|
||||
if ($tool.Name -eq 'credscan') {
|
||||
if ($targetDirectory) {
|
||||
$tool.Args += "`"TargetDirectory < $TargetDirectory`""
|
||||
}
|
||||
$tool.Args += "`"OutputType < pre`""
|
||||
$tool.Args += $CrScanAdditionalRunConfigParams
|
||||
} elseif ($tool.Name -eq 'policheck') {
|
||||
if ($targetDirectory) {
|
||||
$tool.Args += "`"Target < $TargetDirectory`""
|
||||
}
|
||||
$tool.Args += $PoliCheckAdditionalRunConfigParams
|
||||
} elseif ($tool.Name -eq 'semmle' -or $tool.Name -eq 'codeql') {
|
||||
if ($targetDirectory) {
|
||||
$tool.Args += "`"SourceCodeDirectory < $TargetDirectory`""
|
||||
}
|
||||
$tool.Args += $CodeQLAdditionalRunConfigParams
|
||||
}
|
||||
|
||||
# Create variable pointing to the args array directly so we can use splat syntax later.
|
||||
$toolArgs = $tool.Args
|
||||
|
||||
# Configure the tool. If args array is provided or the current tool has some default arguments
|
||||
# defined, add "--args" and splat each element on the end. Arg format is "{Arg id} < {Value}",
|
||||
# one per parameter. Doc page for "guardian configure":
|
||||
# https://dev.azure.com/securitytools/SecurityIntegration/_wiki/wikis/Guardian/1395/configure
|
||||
Exec-BlockVerbosely {
|
||||
& $GuardianCliLocation configure `
|
||||
--working-directory $WorkingDirectory `
|
||||
--tool $tool.Name `
|
||||
--output-path $gdnConfigFile `
|
||||
--logger-level $GuardianLoggerLevel `
|
||||
--noninteractive `
|
||||
--force `
|
||||
$(if ($toolArgs) { "--args" }) @toolArgs
|
||||
Exit-IfNZEC "Sdl"
|
||||
}
|
||||
|
||||
Write-Host "Created '$toolConfigName' configuration file: $gdnConfigFile"
|
||||
}
|
||||
}
|
||||
catch {
|
||||
Write-Host $_.ScriptStackTrace
|
||||
Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
|
||||
ExitWithExitCode 1
|
||||
}
|
|
@ -7,8 +7,17 @@ Param(
|
|||
[string] $SourceDirectory=$env:BUILD_SOURCESDIRECTORY, # Required: the directory where source files are located
|
||||
[string] $ArtifactsDirectory = (Join-Path $env:BUILD_ARTIFACTSTAGINGDIRECTORY ('artifacts')), # Required: the directory where build artifacts are located
|
||||
[string] $AzureDevOpsAccessToken, # Required: access token for dnceng; should be provided via KeyVault
|
||||
[string[]] $SourceToolsList, # Optional: list of SDL tools to run on source code
|
||||
[string[]] $ArtifactToolsList, # Optional: list of SDL tools to run on built artifacts
|
||||
|
||||
# Optional: list of SDL tools to run on source code. See 'configure-sdl-tool.ps1' for tools list
|
||||
# format.
|
||||
[object[]] $SourceToolsList,
|
||||
# Optional: list of SDL tools to run on built artifacts. See 'configure-sdl-tool.ps1' for tools
|
||||
# list format.
|
||||
[object[]] $ArtifactToolsList,
|
||||
# Optional: list of SDL tools to run without automatically specifying a target directory. See
|
||||
# 'configure-sdl-tool.ps1' for tools list format.
|
||||
[object[]] $CustomToolsList,
|
||||
|
||||
[bool] $TsaPublish=$False, # Optional: true will publish results to TSA; only set to true after onboarding to TSA; TSA is the automated framework used to upload test results as bugs.
|
||||
[string] $TsaBranchName=$env:BUILD_SOURCEBRANCH, # Optional: required for TSA publish; defaults to $(Build.SourceBranchName); TSA is the automated framework used to upload test results as bugs.
|
||||
[string] $TsaRepositoryName=$env:BUILD_REPOSITORY_NAME, # Optional: TSA repository name; will be generated automatically if not submitted; TSA is the automated framework used to upload test results as bugs.
|
||||
|
@ -25,6 +34,7 @@ Param(
|
|||
[string] $GuardianLoggerLevel='Standard', # Optional: the logger level for the Guardian CLI; options are Trace, Verbose, Standard, Warning, and Error
|
||||
[string[]] $CrScanAdditionalRunConfigParams, # Optional: Additional Params to custom build a CredScan run config in the format @("xyz:abc","sdf:1")
|
||||
[string[]] $PoliCheckAdditionalRunConfigParams, # Optional: Additional Params to custom build a Policheck run config in the format @("xyz:abc","sdf:1")
|
||||
[string[]] $CodeQLAdditionalRunConfigParams, # Optional: Additional Params to custom build a Semmle/CodeQL run config in the format @("xyz < abc","sdf < 1")
|
||||
[bool] $BreakOnFailure=$False # Optional: Fail the build if there were errors during the run
|
||||
)
|
||||
|
||||
|
@ -32,7 +42,7 @@ try {
|
|||
$ErrorActionPreference = 'Stop'
|
||||
Set-StrictMode -Version 2.0
|
||||
$disableConfigureToolsetImport = $true
|
||||
$LASTEXITCODE = 0
|
||||
$global:LASTEXITCODE = 0
|
||||
|
||||
# `tools.ps1` checks $ci to perform some actions. Since the SDL
|
||||
# scripts don't necessarily execute in the same agent that run the
|
||||
|
@ -63,13 +73,16 @@ try {
|
|||
ExitWithExitCode 1
|
||||
}
|
||||
|
||||
& $(Join-Path $PSScriptRoot 'init-sdl.ps1') -GuardianCliLocation $guardianCliLocation -Repository $RepoName -BranchName $BranchName -WorkingDirectory $workingDirectory -AzureDevOpsAccessToken $AzureDevOpsAccessToken -GuardianLoggerLevel $GuardianLoggerLevel
|
||||
Exec-BlockVerbosely {
|
||||
& $(Join-Path $PSScriptRoot 'init-sdl.ps1') -GuardianCliLocation $guardianCliLocation -Repository $RepoName -BranchName $BranchName -WorkingDirectory $workingDirectory -AzureDevOpsAccessToken $AzureDevOpsAccessToken -GuardianLoggerLevel $GuardianLoggerLevel
|
||||
}
|
||||
$gdnFolder = Join-Path $workingDirectory '.gdn'
|
||||
|
||||
if ($TsaOnboard) {
|
||||
if ($TsaCodebaseName -and $TsaNotificationEmail -and $TsaCodebaseAdmin -and $TsaBugAreaPath) {
|
||||
Write-Host "$guardianCliLocation tsa-onboard --codebase-name `"$TsaCodebaseName`" --notification-alias `"$TsaNotificationEmail`" --codebase-admin `"$TsaCodebaseAdmin`" --instance-url `"$TsaInstanceUrl`" --project-name `"$TsaProjectName`" --area-path `"$TsaBugAreaPath`" --iteration-path `"$TsaIterationPath`" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel"
|
||||
& $guardianCliLocation tsa-onboard --codebase-name "$TsaCodebaseName" --notification-alias "$TsaNotificationEmail" --codebase-admin "$TsaCodebaseAdmin" --instance-url "$TsaInstanceUrl" --project-name "$TsaProjectName" --area-path "$TsaBugAreaPath" --iteration-path "$TsaIterationPath" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel
|
||||
Exec-BlockVerbosely {
|
||||
& $guardianCliLocation tsa-onboard --codebase-name "$TsaCodebaseName" --notification-alias "$TsaNotificationEmail" --codebase-admin "$TsaCodebaseAdmin" --instance-url "$TsaInstanceUrl" --project-name "$TsaProjectName" --area-path "$TsaBugAreaPath" --iteration-path "$TsaIterationPath" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel
|
||||
}
|
||||
if ($LASTEXITCODE -ne 0) {
|
||||
Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Guardian tsa-onboard failed with exit code $LASTEXITCODE."
|
||||
ExitWithExitCode $LASTEXITCODE
|
||||
|
@ -80,15 +93,42 @@ try {
|
|||
}
|
||||
}
|
||||
|
||||
if ($ArtifactToolsList -and $ArtifactToolsList.Count -gt 0) {
|
||||
& $(Join-Path $PSScriptRoot 'run-sdl.ps1') -GuardianCliLocation $guardianCliLocation -WorkingDirectory $workingDirectory -TargetDirectory $ArtifactsDirectory -GdnFolder $gdnFolder -ToolsList $ArtifactToolsList -AzureDevOpsAccessToken $AzureDevOpsAccessToken -UpdateBaseline $UpdateBaseline -GuardianLoggerLevel $GuardianLoggerLevel -CrScanAdditionalRunConfigParams $CrScanAdditionalRunConfigParams -PoliCheckAdditionalRunConfigParams $PoliCheckAdditionalRunConfigParams
|
||||
}
|
||||
if ($SourceToolsList -and $SourceToolsList.Count -gt 0) {
|
||||
& $(Join-Path $PSScriptRoot 'run-sdl.ps1') -GuardianCliLocation $guardianCliLocation -WorkingDirectory $workingDirectory -TargetDirectory $SourceDirectory -GdnFolder $gdnFolder -ToolsList $SourceToolsList -AzureDevOpsAccessToken $AzureDevOpsAccessToken -UpdateBaseline $UpdateBaseline -GuardianLoggerLevel $GuardianLoggerLevel -CrScanAdditionalRunConfigParams $CrScanAdditionalRunConfigParams -PoliCheckAdditionalRunConfigParams $PoliCheckAdditionalRunConfigParams
|
||||
# Configure a list of tools with a default target directory. Populates the ".gdn/r" directory.
|
||||
function Configure-ToolsList([object[]] $tools, [string] $targetDirectory) {
|
||||
if ($tools -and $tools.Count -gt 0) {
|
||||
Exec-BlockVerbosely {
|
||||
& $(Join-Path $PSScriptRoot 'configure-sdl-tool.ps1') `
|
||||
-GuardianCliLocation $guardianCliLocation `
|
||||
-WorkingDirectory $workingDirectory `
|
||||
-TargetDirectory $targetDirectory `
|
||||
-GdnFolder $gdnFolder `
|
||||
-ToolsList $tools `
|
||||
-AzureDevOpsAccessToken $AzureDevOpsAccessToken `
|
||||
-GuardianLoggerLevel $GuardianLoggerLevel `
|
||||
-CrScanAdditionalRunConfigParams $CrScanAdditionalRunConfigParams `
|
||||
-PoliCheckAdditionalRunConfigParams $PoliCheckAdditionalRunConfigParams `
|
||||
-CodeQLAdditionalRunConfigParams $CodeQLAdditionalRunConfigParams
|
||||
if ($BreakOnFailure) {
|
||||
Exit-IfNZEC "Sdl"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if ($UpdateBaseline) {
|
||||
& (Join-Path $PSScriptRoot 'push-gdn.ps1') -Repository $RepoName -BranchName $BranchName -GdnFolder $GdnFolder -AzureDevOpsAccessToken $AzureDevOpsAccessToken -PushReason 'Update baseline'
|
||||
# Configure Artifact and Source tools with default Target directories.
|
||||
Configure-ToolsList $ArtifactToolsList $ArtifactsDirectory
|
||||
Configure-ToolsList $SourceToolsList $SourceDirectory
|
||||
# Configure custom tools with no default Target directory.
|
||||
Configure-ToolsList $CustomToolsList $null
|
||||
|
||||
# At this point, all tools are configured in the ".gdn" directory. Run them all in a single call.
|
||||
# (If we used "run" multiple times, each run would overwrite data from earlier runs.)
|
||||
Exec-BlockVerbosely {
|
||||
& $(Join-Path $PSScriptRoot 'run-sdl.ps1') `
|
||||
-GuardianCliLocation $guardianCliLocation `
|
||||
-WorkingDirectory $SourceDirectory `
|
||||
-UpdateBaseline $UpdateBaseline `
|
||||
-GdnFolder $gdnFolder
|
||||
}
|
||||
|
||||
if ($TsaPublish) {
|
||||
|
@ -96,8 +136,9 @@ try {
|
|||
if (-not $TsaRepositoryName) {
|
||||
$TsaRepositoryName = "$($Repository)-$($BranchName)"
|
||||
}
|
||||
Write-Host "$guardianCliLocation tsa-publish --all-tools --repository-name `"$TsaRepositoryName`" --branch-name `"$TsaBranchName`" --build-number `"$BuildNumber`" --codebase-name `"$TsaCodebaseName`" --notification-alias `"$TsaNotificationEmail`" --codebase-admin `"$TsaCodebaseAdmin`" --instance-url `"$TsaInstanceUrl`" --project-name `"$TsaProjectName`" --area-path `"$TsaBugAreaPath`" --iteration-path `"$TsaIterationPath`" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel"
|
||||
& $guardianCliLocation tsa-publish --all-tools --repository-name "$TsaRepositoryName" --branch-name "$TsaBranchName" --build-number "$BuildNumber" --onboard $True --codebase-name "$TsaCodebaseName" --notification-alias "$TsaNotificationEmail" --codebase-admin "$TsaCodebaseAdmin" --instance-url "$TsaInstanceUrl" --project-name "$TsaProjectName" --area-path "$TsaBugAreaPath" --iteration-path "$TsaIterationPath" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel
|
||||
Exec-BlockVerbosely {
|
||||
& $guardianCliLocation tsa-publish --all-tools --repository-name "$TsaRepositoryName" --branch-name "$TsaBranchName" --build-number "$BuildNumber" --onboard $True --codebase-name "$TsaCodebaseName" --notification-alias "$TsaNotificationEmail" --codebase-admin "$TsaCodebaseAdmin" --instance-url "$TsaInstanceUrl" --project-name "$TsaProjectName" --area-path "$TsaBugAreaPath" --iteration-path "$TsaIterationPath" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel
|
||||
}
|
||||
if ($LASTEXITCODE -ne 0) {
|
||||
Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Guardian tsa-publish failed with exit code $LASTEXITCODE."
|
||||
ExitWithExitCode $LASTEXITCODE
|
||||
|
@ -110,7 +151,11 @@ try {
|
|||
|
||||
if ($BreakOnFailure) {
|
||||
Write-Host "Failing the build in case of breaking results..."
|
||||
& $guardianCliLocation break
|
||||
Exec-BlockVerbosely {
|
||||
& $guardianCliLocation break --working-directory $workingDirectory --logger-level $GuardianLoggerLevel
|
||||
}
|
||||
} else {
|
||||
Write-Host "Letting the build pass even if there were breaking results..."
|
||||
}
|
||||
}
|
||||
catch {
|
||||
|
|
|
@ -0,0 +1,63 @@
|
|||
# This script looks for each archive file in a directory and extracts it into the target directory.
|
||||
# For example, the file "$InputPath/bin.tar.gz" extracts to "$ExtractPath/bin.tar.gz.extracted/**".
|
||||
# Uses the "tar" utility added to Windows 10 / Windows 2019 that supports tar.gz and zip.
|
||||
param(
|
||||
# Full path to directory where archives are stored.
|
||||
[Parameter(Mandatory=$true)][string] $InputPath,
|
||||
# Full path to directory to extract archives into. May be the same as $InputPath.
|
||||
[Parameter(Mandatory=$true)][string] $ExtractPath
|
||||
)
|
||||
|
||||
$ErrorActionPreference = 'Stop'
|
||||
Set-StrictMode -Version 2.0
|
||||
|
||||
$disableConfigureToolsetImport = $true
|
||||
|
||||
try {
|
||||
# `tools.ps1` checks $ci to perform some actions. Since the SDL
|
||||
# scripts don't necessarily execute in the same agent that run the
|
||||
# build.ps1/sh script this variable isn't automatically set.
|
||||
$ci = $true
|
||||
. $PSScriptRoot\..\tools.ps1
|
||||
|
||||
Measure-Command {
|
||||
$jobs = @()
|
||||
|
||||
# Find archive files for non-Windows and Windows builds.
|
||||
$archiveFiles = @(
|
||||
Get-ChildItem (Join-Path $InputPath "*.tar.gz")
|
||||
Get-ChildItem (Join-Path $InputPath "*.zip")
|
||||
)
|
||||
|
||||
foreach ($targzFile in $archiveFiles) {
|
||||
$jobs += Start-Job -ScriptBlock {
|
||||
$file = $using:targzFile
|
||||
$fileName = [System.IO.Path]::GetFileName($file)
|
||||
$extractDir = Join-Path $using:ExtractPath "$fileName.extracted"
|
||||
|
||||
New-Item $extractDir -ItemType Directory -Force | Out-Null
|
||||
|
||||
Write-Host "Extracting '$file' to '$extractDir'..."
|
||||
|
||||
# Pipe errors to stdout to prevent PowerShell detecting them and quitting the job early.
|
||||
# This type of quit skips the catch, so we wouldn't be able to tell which file triggered the
|
||||
# error. Save output so it can be stored in the exception string along with context.
|
||||
$output = tar -xf $file -C $extractDir 2>&1
|
||||
# Handle NZEC manually rather than using Exit-IfNZEC: we are in a background job, so we
|
||||
# don't have access to the outer scope.
|
||||
if ($LASTEXITCODE -ne 0) {
|
||||
throw "Error extracting '$file': non-zero exit code ($LASTEXITCODE). Output: '$output'"
|
||||
}
|
||||
|
||||
Write-Host "Extracted to $extractDir"
|
||||
}
|
||||
}
|
||||
|
||||
Receive-Job $jobs -Wait
|
||||
}
|
||||
}
|
||||
catch {
|
||||
Write-Host $_
|
||||
Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
|
||||
ExitWithExitCode 1
|
||||
}
|
|
@ -10,7 +10,7 @@ Param(
|
|||
$ErrorActionPreference = 'Stop'
|
||||
Set-StrictMode -Version 2.0
|
||||
$disableConfigureToolsetImport = $true
|
||||
$LASTEXITCODE = 0
|
||||
$global:LASTEXITCODE = 0
|
||||
|
||||
# `tools.ps1` checks $ci to perform some actions. Since the SDL
|
||||
# scripts don't necessarily execute in the same agent that run the
|
||||
|
@ -29,18 +29,7 @@ $zipFile = "$WorkingDirectory/gdn.zip"
|
|||
|
||||
Add-Type -AssemblyName System.IO.Compression.FileSystem
|
||||
$gdnFolder = (Join-Path $WorkingDirectory '.gdn')
|
||||
try {
|
||||
# We try to download the zip; if the request fails (e.g. the file doesn't exist), we catch it and init guardian instead
|
||||
Write-Host 'Downloading gdn folder from internal config repostiory...'
|
||||
Invoke-WebRequest -Headers @{ "Accept"="application/zip"; "Authorization"="Basic $encodedPat" } -Uri $uri -OutFile $zipFile
|
||||
if (Test-Path $gdnFolder) {
|
||||
# Remove the gdn folder if it exists (it shouldn't unless there's too much caching; this is just in case)
|
||||
Remove-Item -Force -Recurse $gdnFolder
|
||||
}
|
||||
[System.IO.Compression.ZipFile]::ExtractToDirectory($zipFile, $WorkingDirectory)
|
||||
Write-Host $gdnFolder
|
||||
ExitWithExitCode 0
|
||||
} catch [System.Net.WebException] { } # Catch and ignore webexception
|
||||
|
||||
try {
|
||||
# if the folder does not exist, we'll do a guardian init and push it to the remote repository
|
||||
Write-Host 'Initializing Guardian...'
|
||||
|
@ -57,7 +46,6 @@ try {
|
|||
Write-PipelineTelemetryError -Force -Category 'Build' -Message "Guardian baseline failed with exit code $LASTEXITCODE."
|
||||
ExitWithExitCode $LASTEXITCODE
|
||||
}
|
||||
& $(Join-Path $PSScriptRoot 'push-gdn.ps1') -Repository $Repository -BranchName $BranchName -GdnFolder $gdnFolder -AzureDevOpsAccessToken $AzureDevOpsAccessToken -PushReason 'Initialize gdn folder'
|
||||
ExitWithExitCode 0
|
||||
}
|
||||
catch {
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<packages>
|
||||
<package id="Microsoft.Guardian.Cli.win10-x64" version="0.20.1"/>
|
||||
<package id="Microsoft.Guardian.Cli" version="0.109.0"/>
|
||||
</packages>
|
||||
|
|
|
@ -1,69 +0,0 @@
|
|||
Param(
|
||||
[string] $Repository,
|
||||
[string] $BranchName='master',
|
||||
[string] $GdnFolder,
|
||||
[string] $AzureDevOpsAccessToken,
|
||||
[string] $PushReason
|
||||
)
|
||||
|
||||
$ErrorActionPreference = 'Stop'
|
||||
Set-StrictMode -Version 2.0
|
||||
$disableConfigureToolsetImport = $true
|
||||
$LASTEXITCODE = 0
|
||||
|
||||
try {
|
||||
# `tools.ps1` checks $ci to perform some actions. Since the SDL
|
||||
# scripts don't necessarily execute in the same agent that run the
|
||||
# build.ps1/sh script this variable isn't automatically set.
|
||||
$ci = $true
|
||||
. $PSScriptRoot\..\tools.ps1
|
||||
|
||||
# We create the temp directory where we'll store the sdl-config repository
|
||||
$sdlDir = Join-Path $env:TEMP 'sdl'
|
||||
if (Test-Path $sdlDir) {
|
||||
Remove-Item -Force -Recurse $sdlDir
|
||||
}
|
||||
|
||||
Write-Host "git clone https://dnceng:`$AzureDevOpsAccessToken@dev.azure.com/dnceng/internal/_git/sdl-tool-cfg $sdlDir"
|
||||
git clone https://dnceng:$AzureDevOpsAccessToken@dev.azure.com/dnceng/internal/_git/sdl-tool-cfg $sdlDir
|
||||
if ($LASTEXITCODE -ne 0) {
|
||||
Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Git clone failed with exit code $LASTEXITCODE."
|
||||
ExitWithExitCode $LASTEXITCODE
|
||||
}
|
||||
# We copy the .gdn folder from our local run into the git repository so it can be committed
|
||||
$sdlRepositoryFolder = Join-Path (Join-Path (Join-Path $sdlDir $Repository) $BranchName) '.gdn'
|
||||
if (Get-Command Robocopy) {
|
||||
Robocopy /S $GdnFolder $sdlRepositoryFolder
|
||||
} else {
|
||||
rsync -r $GdnFolder $sdlRepositoryFolder
|
||||
}
|
||||
# cd to the sdl-config directory so we can run git there
|
||||
Push-Location $sdlDir
|
||||
# git add . --> git commit --> git push
|
||||
Write-Host 'git add .'
|
||||
git add .
|
||||
if ($LASTEXITCODE -ne 0) {
|
||||
Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Git add failed with exit code $LASTEXITCODE."
|
||||
ExitWithExitCode $LASTEXITCODE
|
||||
}
|
||||
Write-Host "git -c user.email=`"dn-bot@microsoft.com`" -c user.name=`"Dotnet Bot`" commit -m `"$PushReason for $Repository/$BranchName`""
|
||||
git -c user.email="dn-bot@microsoft.com" -c user.name="Dotnet Bot" commit -m "$PushReason for $Repository/$BranchName"
|
||||
if ($LASTEXITCODE -ne 0) {
|
||||
Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Git commit failed with exit code $LASTEXITCODE."
|
||||
ExitWithExitCode $LASTEXITCODE
|
||||
}
|
||||
Write-Host 'git push'
|
||||
git push
|
||||
if ($LASTEXITCODE -ne 0) {
|
||||
Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Git push failed with exit code $LASTEXITCODE."
|
||||
ExitWithExitCode $LASTEXITCODE
|
||||
}
|
||||
|
||||
# Return to the original directory
|
||||
Pop-Location
|
||||
}
|
||||
catch {
|
||||
Write-Host $_.ScriptStackTrace
|
||||
Write-PipelineTelemetryError -Category 'Sdl' -Message $_
|
||||
ExitWithExitCode 1
|
||||
}
|
|
@ -1,19 +1,15 @@
|
|||
Param(
|
||||
[string] $GuardianCliLocation,
|
||||
[string] $WorkingDirectory,
|
||||
[string] $TargetDirectory,
|
||||
[string] $GdnFolder,
|
||||
[string[]] $ToolsList,
|
||||
[string] $UpdateBaseline,
|
||||
[string] $GuardianLoggerLevel='Standard',
|
||||
[string[]] $CrScanAdditionalRunConfigParams,
|
||||
[string[]] $PoliCheckAdditionalRunConfigParams
|
||||
[string] $GuardianLoggerLevel='Standard'
|
||||
)
|
||||
|
||||
$ErrorActionPreference = 'Stop'
|
||||
Set-StrictMode -Version 2.0
|
||||
$disableConfigureToolsetImport = $true
|
||||
$LASTEXITCODE = 0
|
||||
$global:LASTEXITCODE = 0
|
||||
|
||||
try {
|
||||
# `tools.ps1` checks $ci to perform some actions. Since the SDL
|
||||
|
@ -23,7 +19,6 @@ try {
|
|||
. $PSScriptRoot\..\tools.ps1
|
||||
|
||||
# We store config files in the r directory of .gdn
|
||||
Write-Host $ToolsList
|
||||
$gdnConfigPath = Join-Path $GdnFolder 'r'
|
||||
$ValidPath = Test-Path $GuardianCliLocation
|
||||
|
||||
|
@ -33,37 +28,18 @@ try {
|
|||
ExitWithExitCode 1
|
||||
}
|
||||
|
||||
$configParam = @('--config')
|
||||
$gdnConfigFiles = Get-ChildItem $gdnConfigPath -Recurse -Include '*.gdnconfig'
|
||||
Write-Host "Discovered Guardian config files:"
|
||||
$gdnConfigFiles | Out-String | Write-Host
|
||||
|
||||
foreach ($tool in $ToolsList) {
|
||||
$gdnConfigFile = Join-Path $gdnConfigPath "$tool-configure.gdnconfig"
|
||||
Write-Host $tool
|
||||
# We have to manually configure tools that run on source to look at the source directory only
|
||||
if ($tool -eq 'credscan') {
|
||||
Write-Host "$GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args `" TargetDirectory < $TargetDirectory `" `" OutputType < pre `" $(If ($CrScanAdditionalRunConfigParams) {$CrScanAdditionalRunConfigParams})"
|
||||
& $GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args " TargetDirectory < $TargetDirectory " "OutputType < pre" $(If ($CrScanAdditionalRunConfigParams) {$CrScanAdditionalRunConfigParams})
|
||||
if ($LASTEXITCODE -ne 0) {
|
||||
Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Guardian configure for $tool failed with exit code $LASTEXITCODE."
|
||||
ExitWithExitCode $LASTEXITCODE
|
||||
}
|
||||
}
|
||||
if ($tool -eq 'policheck') {
|
||||
Write-Host "$GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args `" Target < $TargetDirectory `" $(If ($PoliCheckAdditionalRunConfigParams) {$PoliCheckAdditionalRunConfigParams})"
|
||||
& $GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args " Target < $TargetDirectory " $(If ($PoliCheckAdditionalRunConfigParams) {$PoliCheckAdditionalRunConfigParams})
|
||||
if ($LASTEXITCODE -ne 0) {
|
||||
Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Guardian configure for $tool failed with exit code $LASTEXITCODE."
|
||||
ExitWithExitCode $LASTEXITCODE
|
||||
}
|
||||
}
|
||||
|
||||
$configParam+=$gdnConfigFile
|
||||
}
|
||||
|
||||
Write-Host "$GuardianCliLocation run --working-directory $WorkingDirectory --baseline mainbaseline --update-baseline $UpdateBaseline --logger-level $GuardianLoggerLevel $configParam"
|
||||
& $GuardianCliLocation run --working-directory $WorkingDirectory --tool $tool --baseline mainbaseline --update-baseline $UpdateBaseline --logger-level $GuardianLoggerLevel $configParam
|
||||
if ($LASTEXITCODE -ne 0) {
|
||||
Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Guardian run for $ToolsList using $configParam failed with exit code $LASTEXITCODE."
|
||||
ExitWithExitCode $LASTEXITCODE
|
||||
Exec-BlockVerbosely {
|
||||
& $GuardianCliLocation run `
|
||||
--working-directory $WorkingDirectory `
|
||||
--baseline mainbaseline `
|
||||
--update-baseline $UpdateBaseline `
|
||||
--logger-level $GuardianLoggerLevel `
|
||||
--config @gdnConfigFiles
|
||||
Exit-IfNZEC "Sdl"
|
||||
}
|
||||
}
|
||||
catch {
|
||||
|
|
|
@ -2,17 +2,33 @@ parameters:
|
|||
enable: 'false' # Whether the SDL validation job should execute or not
|
||||
overrideParameters: '' # Optional: to override values for parameters.
|
||||
additionalParameters: '' # Optional: parameters that need user specific values eg: '-SourceToolsList @("abc","def") -ArtifactToolsList @("ghi","jkl")'
|
||||
# Optional: if specified, restore and use this version of Guardian instead of the default.
|
||||
overrideGuardianVersion: ''
|
||||
# Optional: if true, publish the '.gdn' folder as a pipeline artifact. This can help with in-depth
|
||||
# diagnosis of problems with specific tool configurations.
|
||||
publishGuardianDirectoryToPipeline: false
|
||||
# The script to run to execute all SDL tools. Use this if you want to use a script to define SDL
|
||||
# parameters rather than relying on YAML. It may be better to use a local script, because you can
|
||||
# reproduce results locally without piecing together a command based on the YAML.
|
||||
executeAllSdlToolsScript: 'eng/common/sdl/execute-all-sdl-tools.ps1'
|
||||
# There is some sort of bug (has been reported) in Azure DevOps where if this parameter is named
|
||||
# 'continueOnError', the parameter value is not correctly picked up.
|
||||
# This can also be remedied by the caller (post-build.yml) if it does not use a nested parameter
|
||||
sdlContinueOnError: false # optional: determines whether to continue the build if the step errors;
|
||||
downloadArtifacts: true # optional: determines if the artifacts should be dowloaded
|
||||
# optional: determines if build artifacts should be downloaded.
|
||||
downloadArtifacts: true
|
||||
# optional: determines if this job should search the directory of downloaded artifacts for
|
||||
# 'tar.gz' and 'zip' archive files and extract them before running SDL validation tasks.
|
||||
extractArchiveArtifacts: false
|
||||
dependsOn: '' # Optional: dependencies of the job
|
||||
artifactNames: '' # Optional: patterns supplied to DownloadBuildArtifacts
|
||||
# Usage:
|
||||
# artifactNames:
|
||||
# - 'BlobArtifacts'
|
||||
# - 'Artifacts_Windows_NT_Release'
|
||||
# Optional: download a list of pipeline artifacts. 'downloadArtifacts' controls build artifacts,
|
||||
# not pipeline artifacts, so doesn't affect the use of this parameter.
|
||||
pipelineArtifactNames: []
|
||||
|
||||
jobs:
|
||||
- job: Run_SDL
|
||||
|
@ -22,16 +38,29 @@ jobs:
|
|||
variables:
|
||||
- group: DotNet-VSTS-Bot
|
||||
- name: AzDOProjectName
|
||||
value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOProjectName'] ]
|
||||
value: ${{ parameters.AzDOProjectName }}
|
||||
- name: AzDOPipelineId
|
||||
value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOPipelineId'] ]
|
||||
value: ${{ parameters.AzDOPipelineId }}
|
||||
- name: AzDOBuildId
|
||||
value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOBuildId'] ]
|
||||
value: ${{ parameters.AzDOBuildId }}
|
||||
- template: /eng/common/templates/variables/sdl-variables.yml
|
||||
- name: GuardianVersion
|
||||
value: ${{ coalesce(parameters.overrideGuardianVersion, '$(DefaultGuardianVersion)') }}
|
||||
pool:
|
||||
name: Hosted VS2017
|
||||
# We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
|
||||
${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
|
||||
name: VSEngSS-MicroBuild2022-1ES
|
||||
demands: Cmd
|
||||
# If it's not devdiv, it's dnceng
|
||||
${{ if ne(variables['System.TeamProject'], 'DevDiv') }}:
|
||||
name: NetCore1ESPool-Internal
|
||||
demands: ImageOverride -equals windows.vs2019.amd64
|
||||
steps:
|
||||
- checkout: self
|
||||
clean: true
|
||||
|
||||
- template: /eng/common/templates/post-build/setup-maestro-vars.yml
|
||||
|
||||
- ${{ if ne(parameters.downloadArtifacts, 'false')}}:
|
||||
- ${{ if ne(parameters.artifactNames, '') }}:
|
||||
- ${{ each artifactName in parameters.artifactNames }}:
|
||||
|
@ -45,6 +74,7 @@ jobs:
|
|||
buildId: $(AzDOBuildId)
|
||||
artifactName: ${{ artifactName }}
|
||||
downloadPath: $(Build.ArtifactStagingDirectory)\artifacts
|
||||
checkDownloadedFiles: true
|
||||
- ${{ if eq(parameters.artifactNames, '') }}:
|
||||
- task: DownloadBuildArtifacts@0
|
||||
displayName: Download Build Artifacts
|
||||
|
@ -57,35 +87,45 @@ jobs:
|
|||
downloadType: specific files
|
||||
itemPattern: "**"
|
||||
downloadPath: $(Build.ArtifactStagingDirectory)\artifacts
|
||||
checkDownloadedFiles: true
|
||||
|
||||
- ${{ each artifactName in parameters.pipelineArtifactNames }}:
|
||||
- task: DownloadPipelineArtifact@2
|
||||
displayName: Download Pipeline Artifacts
|
||||
inputs:
|
||||
buildType: specific
|
||||
buildVersionToDownload: specific
|
||||
project: $(AzDOProjectName)
|
||||
pipeline: $(AzDOPipelineId)
|
||||
buildId: $(AzDOBuildId)
|
||||
artifactName: ${{ artifactName }}
|
||||
downloadPath: $(Build.ArtifactStagingDirectory)\artifacts
|
||||
checkDownloadedFiles: true
|
||||
|
||||
- powershell: eng/common/sdl/extract-artifact-packages.ps1
|
||||
-InputPath $(Build.ArtifactStagingDirectory)\artifacts\BlobArtifacts
|
||||
-ExtractPath $(Build.ArtifactStagingDirectory)\artifacts\BlobArtifacts
|
||||
displayName: Extract Blob Artifacts
|
||||
continueOnError: ${{ parameters.sdlContinueOnError }}
|
||||
|
||||
- powershell: eng/common/sdl/extract-artifact-packages.ps1
|
||||
-InputPath $(Build.ArtifactStagingDirectory)\artifacts\PackageArtifacts
|
||||
-ExtractPath $(Build.ArtifactStagingDirectory)\artifacts\PackageArtifacts
|
||||
displayName: Extract Package Artifacts
|
||||
continueOnError: ${{ parameters.sdlContinueOnError }}
|
||||
- task: NuGetToolInstaller@1
|
||||
displayName: 'Install NuGet.exe'
|
||||
- task: NuGetCommand@2
|
||||
displayName: 'Install Guardian'
|
||||
inputs:
|
||||
restoreSolution: $(Build.SourcesDirectory)\eng\common\sdl\packages.config
|
||||
feedsToUse: config
|
||||
nugetConfigPath: $(Build.SourcesDirectory)\eng\common\sdl\NuGet.config
|
||||
externalFeedCredentials: GuardianConnect
|
||||
restoreDirectory: $(Build.SourcesDirectory)\.packages
|
||||
- ${{ if ne(parameters.overrideParameters, '') }}:
|
||||
- powershell: eng/common/sdl/execute-all-sdl-tools.ps1 ${{ parameters.overrideParameters }}
|
||||
displayName: Execute SDL
|
||||
continueOnError: ${{ parameters.sdlContinueOnError }}
|
||||
- ${{ if eq(parameters.overrideParameters, '') }}:
|
||||
- powershell: eng/common/sdl/execute-all-sdl-tools.ps1
|
||||
-GuardianPackageName Microsoft.Guardian.Cli.win10-x64.0.20.1
|
||||
-NugetPackageDirectory $(Build.SourcesDirectory)\.packages
|
||||
-AzureDevOpsAccessToken $(dn-bot-dotnet-build-rw-code-rw)
|
||||
${{ parameters.additionalParameters }}
|
||||
displayName: Execute SDL
|
||||
|
||||
- ${{ if ne(parameters.extractArchiveArtifacts, 'false') }}:
|
||||
- powershell: eng/common/sdl/extract-artifact-archives.ps1
|
||||
-InputPath $(Build.ArtifactStagingDirectory)\artifacts
|
||||
-ExtractPath $(Build.ArtifactStagingDirectory)\artifacts
|
||||
displayName: Extract Archive Artifacts
|
||||
continueOnError: ${{ parameters.sdlContinueOnError }}
|
||||
|
||||
- template: /eng/common/templates/steps/execute-sdl.yml
|
||||
parameters:
|
||||
overrideGuardianVersion: ${{ parameters.overrideGuardianVersion }}
|
||||
executeAllSdlToolsScript: ${{ parameters.executeAllSdlToolsScript }}
|
||||
overrideParameters: ${{ parameters.overrideParameters }}
|
||||
additionalParameters: ${{ parameters.additionalParameters }}
|
||||
publishGuardianDirectoryToPipeline: ${{ parameters.publishGuardianDirectoryToPipeline }}
|
||||
sdlContinueOnError: ${{ parameters.sdlContinueOnError }}
|
||||
|
|
|
@ -1,48 +0,0 @@
|
|||
parameters:
|
||||
# Optional: dependencies of the job
|
||||
dependsOn: ''
|
||||
|
||||
# Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool
|
||||
pool: {}
|
||||
|
||||
# Optional: Include toolset dependencies in the generated graph files
|
||||
includeToolset: false
|
||||
|
||||
jobs:
|
||||
- job: Generate_Graph_Files
|
||||
|
||||
dependsOn: ${{ parameters.dependsOn }}
|
||||
|
||||
displayName: Generate Graph Files
|
||||
|
||||
pool: ${{ parameters.pool }}
|
||||
|
||||
variables:
|
||||
# Publish-Build-Assets provides: MaestroAccessToken, BotAccount-dotnet-maestro-bot-PAT
|
||||
# DotNet-AllOrgs-Darc-Pats provides: dn-bot-devdiv-dnceng-rw-code-pat
|
||||
- group: Publish-Build-Assets
|
||||
- group: DotNet-AllOrgs-Darc-Pats
|
||||
- name: _GraphArguments
|
||||
value: -gitHubPat $(BotAccount-dotnet-maestro-bot-PAT)
|
||||
-azdoPat $(dn-bot-devdiv-dnceng-rw-code-pat)
|
||||
-barToken $(MaestroAccessToken)
|
||||
-outputFolder '$(Build.StagingDirectory)/GraphFiles/'
|
||||
- ${{ if ne(parameters.includeToolset, 'false') }}:
|
||||
- name: _GraphArguments
|
||||
value: ${{ variables._GraphArguments }} -includeToolset
|
||||
|
||||
steps:
|
||||
- task: PowerShell@2
|
||||
displayName: Generate Graph Files
|
||||
inputs:
|
||||
filePath: eng\common\generate-graph-files.ps1
|
||||
arguments: $(_GraphArguments)
|
||||
continueOnError: true
|
||||
- task: PublishBuildArtifacts@1
|
||||
displayName: Publish Graph to Artifacts
|
||||
inputs:
|
||||
PathtoPublish: '$(Build.StagingDirectory)/GraphFiles'
|
||||
PublishLocation: Container
|
||||
ArtifactName: GraphFiles
|
||||
continueOnError: true
|
||||
condition: always()
|
|
@ -24,12 +24,17 @@ parameters:
|
|||
enablePublishBuildAssets: false
|
||||
enablePublishTestResults: false
|
||||
enablePublishUsingPipelines: false
|
||||
useBuildManifest: false
|
||||
disableComponentGovernance: false
|
||||
mergeTestResults: false
|
||||
testRunTitle: ''
|
||||
testResultsFormat: ''
|
||||
name: ''
|
||||
preSteps: []
|
||||
runAsPublic: false
|
||||
# Sbom related params
|
||||
enableSbom: true
|
||||
PackageVersion: 7.0.0
|
||||
BuildDropPath: '$(Build.SourcesDirectory)/artifacts'
|
||||
|
||||
jobs:
|
||||
- job: ${{ parameters.name }}
|
||||
|
@ -103,7 +108,7 @@ jobs:
|
|||
|
||||
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
|
||||
- ${{ if eq(parameters.enableMicrobuild, 'true') }}:
|
||||
- task: MicroBuildSigningPlugin@2
|
||||
- task: MicroBuildSigningPlugin@3
|
||||
displayName: Install MicroBuild plugin
|
||||
inputs:
|
||||
signType: $(_SignType)
|
||||
|
@ -114,6 +119,7 @@ jobs:
|
|||
continueOnError: ${{ parameters.continueOnError }}
|
||||
condition: and(succeeded(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
|
||||
|
||||
- ${{ if and(eq(parameters.runAsPublic, 'false'), eq(variables['System.TeamProject'], 'internal')) }}:
|
||||
- task: NuGetAuthenticate@0
|
||||
|
||||
- ${{ if or(eq(parameters.artifacts.download, 'true'), ne(parameters.artifacts.download, '')) }}:
|
||||
|
@ -131,11 +137,15 @@ jobs:
|
|||
- task: RichCodeNavIndexer@0
|
||||
displayName: RichCodeNav Upload
|
||||
inputs:
|
||||
languages: 'csharp'
|
||||
environment: ${{ coalesce(parameters.richCodeNavigationEnvironment, 'prod') }}
|
||||
languages: ${{ coalesce(parameters.richCodeNavigationLanguage, 'csharp') }}
|
||||
environment: ${{ coalesce(parameters.richCodeNavigationEnvironment, 'production') }}
|
||||
richNavLogOutputDirectory: $(Build.SourcesDirectory)/artifacts/bin
|
||||
continueOnError: true
|
||||
|
||||
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), ne(parameters.disableComponentGovernance, 'true')) }}:
|
||||
- task: ComponentGovernanceComponentDetection@0
|
||||
continueOnError: true
|
||||
|
||||
- ${{ if eq(parameters.enableMicrobuild, 'true') }}:
|
||||
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
|
||||
- task: MicroBuildCleanup@1
|
||||
|
@ -202,9 +212,9 @@ jobs:
|
|||
continueOnError: true
|
||||
condition: always()
|
||||
|
||||
- ${{ if eq(parameters.enablePublishTestResults, 'true') }}:
|
||||
- ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'xunit')) }}:
|
||||
- task: PublishTestResults@2
|
||||
displayName: Publish Test Results
|
||||
displayName: Publish XUnit Test Results
|
||||
inputs:
|
||||
testResultsFormat: 'xUnit'
|
||||
testResultsFiles: '*.xml'
|
||||
|
@ -213,6 +223,17 @@ jobs:
|
|||
mergeTestResults: ${{ parameters.mergeTestResults }}
|
||||
continueOnError: true
|
||||
condition: always()
|
||||
- ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'vstest')) }}:
|
||||
- task: PublishTestResults@2
|
||||
displayName: Publish TRX Test Results
|
||||
inputs:
|
||||
testResultsFormat: 'VSTest'
|
||||
testResultsFiles: '*.trx'
|
||||
searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)'
|
||||
testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-trx
|
||||
mergeTestResults: ${{ parameters.mergeTestResults }}
|
||||
continueOnError: true
|
||||
condition: always()
|
||||
|
||||
- ${{ if and(eq(parameters.enablePublishBuildAssets, true), ne(parameters.enablePublishUsingPipelines, 'true'), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
|
||||
- task: CopyFiles@2
|
||||
|
@ -232,11 +253,9 @@ jobs:
|
|||
continueOnError: ${{ parameters.continueOnError }}
|
||||
condition: and(succeeded(), eq(variables['_DotNetPublishToBlobFeed'], 'true'))
|
||||
|
||||
- ${{ if eq(parameters.useBuildManifest, true) }}:
|
||||
- task: PublishBuildArtifacts@1
|
||||
displayName: Publish Build Manifest
|
||||
inputs:
|
||||
PathToPublish: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)/manifest.props'
|
||||
PublishLocation: Container
|
||||
ArtifactName: BuildManifests
|
||||
continueOnError: ${{ parameters.continueOnError }}
|
||||
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.enableSbom, 'true')) }}:
|
||||
- template: /eng/common/templates/steps/generate-sbom.yml
|
||||
parameters:
|
||||
PackageVersion: ${{ parameters.packageVersion}}
|
||||
BuildDropPath: ${{ parameters.buildDropPath }}
|
||||
|
||||
|
|
|
@ -0,0 +1,106 @@
|
|||
parameters:
|
||||
# Optional: dependencies of the job
|
||||
dependsOn: ''
|
||||
|
||||
# Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool
|
||||
pool: ''
|
||||
|
||||
CeapexPat: $(dn-bot-ceapex-package-r) # PAT for the loc AzDO instance https://dev.azure.com/ceapex
|
||||
GithubPat: $(BotAccount-dotnet-bot-repo-PAT)
|
||||
|
||||
SourcesDirectory: $(Build.SourcesDirectory)
|
||||
CreatePr: true
|
||||
AutoCompletePr: false
|
||||
ReusePr: true
|
||||
UseLfLineEndings: true
|
||||
UseCheckedInLocProjectJson: false
|
||||
LanguageSet: VS_Main_Languages
|
||||
LclSource: lclFilesInRepo
|
||||
LclPackageId: ''
|
||||
RepoType: gitHub
|
||||
GitHubOrg: dotnet
|
||||
MirrorRepo: ''
|
||||
MirrorBranch: main
|
||||
condition: ''
|
||||
|
||||
jobs:
|
||||
- job: OneLocBuild
|
||||
|
||||
dependsOn: ${{ parameters.dependsOn }}
|
||||
|
||||
displayName: OneLocBuild
|
||||
|
||||
${{ if ne(parameters.pool, '') }}:
|
||||
pool: ${{ parameters.pool }}
|
||||
${{ if eq(parameters.pool, '') }}:
|
||||
pool:
|
||||
# We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
|
||||
${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
|
||||
name: VSEngSS-MicroBuild2022-1ES
|
||||
demands: Cmd
|
||||
# If it's not devdiv, it's dnceng
|
||||
${{ if ne(variables['System.TeamProject'], 'DevDiv') }}:
|
||||
name: NetCore1ESPool-Internal
|
||||
demands: ImageOverride -equals windows.vs2019.amd64
|
||||
|
||||
variables:
|
||||
- group: OneLocBuildVariables # Contains the CeapexPat and GithubPat
|
||||
- name: _GenerateLocProjectArguments
|
||||
value: -SourcesDirectory ${{ parameters.SourcesDirectory }}
|
||||
-LanguageSet "${{ parameters.LanguageSet }}"
|
||||
-CreateNeutralXlfs
|
||||
- ${{ if eq(parameters.UseCheckedInLocProjectJson, 'true') }}:
|
||||
- name: _GenerateLocProjectArguments
|
||||
value: ${{ variables._GenerateLocProjectArguments }} -UseCheckedInLocProjectJson
|
||||
|
||||
|
||||
steps:
|
||||
- task: Powershell@2
|
||||
inputs:
|
||||
filePath: $(Build.SourcesDirectory)/eng/common/generate-locproject.ps1
|
||||
arguments: $(_GenerateLocProjectArguments)
|
||||
displayName: Generate LocProject.json
|
||||
condition: ${{ parameters.condition }}
|
||||
|
||||
- task: OneLocBuild@2
|
||||
displayName: OneLocBuild
|
||||
env:
|
||||
SYSTEM_ACCESSTOKEN: $(System.AccessToken)
|
||||
inputs:
|
||||
locProj: eng/Localize/LocProject.json
|
||||
outDir: $(Build.ArtifactStagingDirectory)
|
||||
lclSource: ${{ parameters.LclSource }}
|
||||
lclPackageId: ${{ parameters.LclPackageId }}
|
||||
isCreatePrSelected: ${{ parameters.CreatePr }}
|
||||
isAutoCompletePrSelected: ${{ parameters.AutoCompletePr }}
|
||||
${{ if eq(parameters.CreatePr, true) }}:
|
||||
isUseLfLineEndingsSelected: ${{ parameters.UseLfLineEndings }}
|
||||
${{ if eq(parameters.RepoType, 'gitHub') }}:
|
||||
isShouldReusePrSelected: ${{ parameters.ReusePr }}
|
||||
packageSourceAuth: patAuth
|
||||
patVariable: ${{ parameters.CeapexPat }}
|
||||
${{ if eq(parameters.RepoType, 'gitHub') }}:
|
||||
repoType: ${{ parameters.RepoType }}
|
||||
gitHubPatVariable: "${{ parameters.GithubPat }}"
|
||||
${{ if ne(parameters.MirrorRepo, '') }}:
|
||||
isMirrorRepoSelected: true
|
||||
gitHubOrganization: ${{ parameters.GitHubOrg }}
|
||||
mirrorRepo: ${{ parameters.MirrorRepo }}
|
||||
mirrorBranch: ${{ parameters.MirrorBranch }}
|
||||
condition: ${{ parameters.condition }}
|
||||
|
||||
- task: PublishBuildArtifacts@1
|
||||
displayName: Publish Localization Files
|
||||
inputs:
|
||||
PathtoPublish: '$(Build.ArtifactStagingDirectory)/loc'
|
||||
PublishLocation: Container
|
||||
ArtifactName: Loc
|
||||
condition: ${{ parameters.condition }}
|
||||
|
||||
- task: PublishBuildArtifacts@1
|
||||
displayName: Publish LocProject.json
|
||||
inputs:
|
||||
PathtoPublish: '$(Build.SourcesDirectory)/eng/Localize/'
|
||||
PublishLocation: Container
|
||||
ArtifactName: Loc
|
||||
condition: ${{ parameters.condition }}
|
|
@ -1,95 +0,0 @@
|
|||
parameters:
|
||||
steps: [] # optional -- any additional steps that need to happen before pulling down the performance repo and sending the performance benchmarks to helix (ie building your repo)
|
||||
variables: [] # optional -- list of additional variables to send to the template
|
||||
jobName: '' # required -- job name
|
||||
displayName: '' # optional -- display name for the job. Will use jobName if not passed
|
||||
pool: '' # required -- name of the Build pool
|
||||
container: '' # required -- name of the container
|
||||
osGroup: '' # required -- operating system for the job
|
||||
extraSetupParameters: '' # optional -- extra arguments to pass to the setup script
|
||||
frameworks: ['netcoreapp3.0'] # optional -- list of frameworks to run against
|
||||
continueOnError: 'false' # optional -- determines whether to continue the build if the step errors
|
||||
dependsOn: '' # optional -- dependencies of the job
|
||||
timeoutInMinutes: 320 # optional -- timeout for the job
|
||||
enableTelemetry: false # optional -- enable for telemetry
|
||||
|
||||
jobs:
|
||||
- template: ../jobs/jobs.yml
|
||||
parameters:
|
||||
dependsOn: ${{ parameters.dependsOn }}
|
||||
enableTelemetry: ${{ parameters.enableTelemetry }}
|
||||
enablePublishBuildArtifacts: true
|
||||
continueOnError: ${{ parameters.continueOnError }}
|
||||
|
||||
jobs:
|
||||
- job: '${{ parameters.jobName }}'
|
||||
|
||||
${{ if ne(parameters.displayName, '') }}:
|
||||
displayName: '${{ parameters.displayName }}'
|
||||
${{ if eq(parameters.displayName, '') }}:
|
||||
displayName: '${{ parameters.jobName }}'
|
||||
|
||||
timeoutInMinutes: ${{ parameters.timeoutInMinutes }}
|
||||
|
||||
variables:
|
||||
|
||||
- ${{ each variable in parameters.variables }}:
|
||||
- ${{ if ne(variable.name, '') }}:
|
||||
- name: ${{ variable.name }}
|
||||
value: ${{ variable.value }}
|
||||
- ${{ if ne(variable.group, '') }}:
|
||||
- group: ${{ variable.group }}
|
||||
|
||||
- IsInternal: ''
|
||||
- HelixApiAccessToken: ''
|
||||
- HelixPreCommand: ''
|
||||
|
||||
- ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
|
||||
- ${{ if eq( parameters.osGroup, 'Windows_NT') }}:
|
||||
- HelixPreCommand: 'set "PERFLAB_UPLOAD_TOKEN=$(PerfCommandUploadToken)"'
|
||||
- IsInternal: -Internal
|
||||
- ${{ if ne(parameters.osGroup, 'Windows_NT') }}:
|
||||
- HelixPreCommand: 'export PERFLAB_UPLOAD_TOKEN="$(PerfCommandUploadTokenLinux)"'
|
||||
- IsInternal: --internal
|
||||
|
||||
- group: DotNet-HelixApi-Access
|
||||
- group: dotnet-benchview
|
||||
|
||||
workspace:
|
||||
clean: all
|
||||
pool:
|
||||
${{ parameters.pool }}
|
||||
container: ${{ parameters.container }}
|
||||
strategy:
|
||||
matrix:
|
||||
${{ each framework in parameters.frameworks }}:
|
||||
${{ framework }}:
|
||||
_Framework: ${{ framework }}
|
||||
steps:
|
||||
- checkout: self
|
||||
clean: true
|
||||
# Run all of the steps to setup repo
|
||||
- ${{ each step in parameters.steps }}:
|
||||
- ${{ step }}
|
||||
- powershell: $(Build.SourcesDirectory)\eng\common\performance\performance-setup.ps1 $(IsInternal) -Framework $(_Framework) ${{ parameters.extraSetupParameters }}
|
||||
displayName: Performance Setup (Windows)
|
||||
condition: and(succeeded(), eq(variables['Agent.Os'], 'Windows_NT'))
|
||||
continueOnError: ${{ parameters.continueOnError }}
|
||||
- script: $(Build.SourcesDirectory)/eng/common/performance/performance-setup.sh $(IsInternal) --framework $(_Framework) ${{ parameters.extraSetupParameters }}
|
||||
displayName: Performance Setup (Unix)
|
||||
condition: and(succeeded(), ne(variables['Agent.Os'], 'Windows_NT'))
|
||||
continueOnError: ${{ parameters.continueOnError }}
|
||||
- script: $(Python) $(PerformanceDirectory)/scripts/ci_setup.py $(SetupArguments)
|
||||
displayName: Run ci setup script
|
||||
# Run perf testing in helix
|
||||
- template: /eng/common/templates/steps/perf-send-to-helix.yml
|
||||
parameters:
|
||||
HelixSource: '$(HelixSourcePrefix)/$(Build.Repository.Name)/$(Build.SourceBranch)' # sources must start with pr/, official/, prodcon/, or agent/
|
||||
HelixType: 'test/performance/$(Kind)/$(_Framework)/$(Architecture)'
|
||||
HelixAccessToken: $(HelixApiAccessToken)
|
||||
HelixTargetQueues: $(Queue)
|
||||
HelixPreCommands: $(HelixPreCommand)
|
||||
Creator: $(Creator)
|
||||
WorkItemTimeout: 4:00 # 4 hours
|
||||
WorkItemDirectory: '$(WorkItemDirectory)' # WorkItemDirectory can not be empty, so we send it some docs to keep it happy
|
||||
CorrelationPayloadDirectory: '$(PayloadDirectory)' # it gets checked out to a folder with shorter path than WorkItemDirectory so we can avoid file name too long exceptions
|
|
@ -23,26 +23,33 @@ parameters:
|
|||
# Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing
|
||||
publishUsingPipelines: false
|
||||
|
||||
# Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing
|
||||
publishAssetsImmediately: false
|
||||
|
||||
artifactsPublishingAdditionalParameters: ''
|
||||
|
||||
signingValidationAdditionalParameters: ''
|
||||
|
||||
jobs:
|
||||
- job: Asset_Registry_Publish
|
||||
|
||||
dependsOn: ${{ parameters.dependsOn }}
|
||||
|
||||
displayName: Publish to Build Asset Registry
|
||||
${{ if eq(parameters.publishAssetsImmediately, 'true') }}:
|
||||
displayName: Publish Assets
|
||||
${{ else }}:
|
||||
displayName: Publish to Build Asset Registry
|
||||
|
||||
pool: ${{ parameters.pool }}
|
||||
|
||||
variables:
|
||||
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
|
||||
- name: _BuildConfig
|
||||
value: ${{ parameters.configuration }}
|
||||
- group: Publish-Build-Assets
|
||||
# Skip component governance and codesign validation for SDL. These jobs
|
||||
# create no content.
|
||||
- name: skipComponentGovernanceDetection
|
||||
value: true
|
||||
- group: AzureDevOps-Artifact-Feeds-Pats
|
||||
- name: runCodesignValidationInjection
|
||||
value: false
|
||||
- ${{ if eq(parameters.publishAssetsImmediately, 'true') }}:
|
||||
- template: /eng/common/templates/post-build/common-variables.yml
|
||||
|
||||
steps:
|
||||
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
|
||||
|
@ -51,11 +58,17 @@ jobs:
|
|||
inputs:
|
||||
artifactName: AssetManifests
|
||||
downloadPath: '$(Build.StagingDirectory)/Download'
|
||||
checkDownloadedFiles: true
|
||||
condition: ${{ parameters.condition }}
|
||||
continueOnError: ${{ parameters.continueOnError }}
|
||||
|
||||
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
|
||||
- task: NuGetAuthenticate@0
|
||||
- task: NuGetAuthenticate@0
|
||||
|
||||
- task: PowerShell@2
|
||||
displayName: Enable cross-org NuGet feed authentication
|
||||
inputs:
|
||||
filePath: $(Build.SourcesDirectory)/eng/common/enable-cross-org-publishing.ps1
|
||||
arguments: -token $(dn-bot-all-orgs-artifact-feeds-rw)
|
||||
|
||||
- task: PowerShell@2
|
||||
displayName: Publish Build Assets
|
||||
|
@ -66,7 +79,6 @@ jobs:
|
|||
/p:BuildAssetRegistryToken=$(MaestroAccessToken)
|
||||
/p:MaestroApiEndpoint=https://maestro-prod.westus2.cloudapp.azure.com
|
||||
/p:PublishUsingPipelines=${{ parameters.publishUsingPipelines }}
|
||||
/p:Configuration=$(_BuildConfig)
|
||||
/p:OfficialBuildId=$(Build.BuildNumber)
|
||||
condition: ${{ parameters.condition }}
|
||||
continueOnError: ${{ parameters.continueOnError }}
|
||||
|
@ -86,7 +98,49 @@ jobs:
|
|||
PathtoPublish: '$(Build.StagingDirectory)/ReleaseConfigs.txt'
|
||||
PublishLocation: Container
|
||||
ArtifactName: ReleaseConfigs
|
||||
|
||||
|
||||
- task: powershell@2
|
||||
displayName: Check if SymbolPublishingExclusionsFile.txt exists
|
||||
inputs:
|
||||
targetType: inline
|
||||
script: |
|
||||
$symbolExclusionfile = "$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt"
|
||||
if(Test-Path -Path $symbolExclusionfile)
|
||||
{
|
||||
Write-Host "SymbolExclusionFile exists"
|
||||
Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]true"
|
||||
}
|
||||
else{
|
||||
Write-Host "Symbols Exclusion file does not exists"
|
||||
Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]false"
|
||||
}
|
||||
|
||||
- task: PublishBuildArtifacts@1
|
||||
displayName: Publish SymbolPublishingExclusionsFile Artifact
|
||||
condition: eq(variables['SymbolExclusionFile'], 'true')
|
||||
inputs:
|
||||
PathtoPublish: '$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt'
|
||||
PublishLocation: Container
|
||||
ArtifactName: ReleaseConfigs
|
||||
|
||||
- ${{ if eq(parameters.publishAssetsImmediately, 'true') }}:
|
||||
- template: /eng/common/templates/post-build/setup-maestro-vars.yml
|
||||
parameters:
|
||||
BARBuildId: ${{ parameters.BARBuildId }}
|
||||
PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
|
||||
|
||||
- task: PowerShell@2
|
||||
displayName: Publish Using Darc
|
||||
inputs:
|
||||
filePath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1
|
||||
arguments: -BuildId $(BARBuildId)
|
||||
-PublishingInfraVersion 3
|
||||
-AzdoToken '$(publishing-dnceng-devdiv-code-r-build-re)'
|
||||
-MaestroToken '$(MaestroApiAccessToken)'
|
||||
-WaitPublishingFinish true
|
||||
-ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}'
|
||||
-SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}'
|
||||
|
||||
- ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}:
|
||||
- template: /eng/common/templates/steps/publish-logs.yml
|
||||
parameters:
|
||||
|
|
|
@ -0,0 +1,63 @@
|
|||
parameters:
|
||||
# This template adds arcade-powered source-build to CI. The template produces a server job with a
|
||||
# default ID 'Source_Build_Complete' to put in a dependency list if necessary.
|
||||
|
||||
# Specifies the prefix for source-build jobs added to pipeline. Use this if disambiguation needed.
|
||||
jobNamePrefix: 'Source_Build'
|
||||
|
||||
# Defines the platform on which to run the job. By default, a linux-x64 machine, suitable for
|
||||
# managed-only repositories. This is an object with these properties:
|
||||
#
|
||||
# name: ''
|
||||
# The name of the job. This is included in the job ID.
|
||||
# targetRID: ''
|
||||
# The name of the target RID to use, instead of the one auto-detected by Arcade.
|
||||
# nonPortable: false
|
||||
# Enables non-portable mode. This means a more specific RID (e.g. fedora.32-x64 rather than
|
||||
# linux-x64), and compiling against distro-provided packages rather than portable ones.
|
||||
# skipPublishValidation: false
|
||||
# Disables publishing validation. By default, a check is performed to ensure no packages are
|
||||
# published by source-build.
|
||||
# container: ''
|
||||
# A container to use. Runs in docker.
|
||||
# pool: {}
|
||||
# A pool to use. Runs directly on an agent.
|
||||
# buildScript: ''
|
||||
# Specifies the build script to invoke to perform the build in the repo. The default
|
||||
# './build.sh' should work for typical Arcade repositories, but this is customizable for
|
||||
# difficult situations.
|
||||
# jobProperties: {}
|
||||
# A list of job properties to inject at the top level, for potential extensibility beyond
|
||||
# container and pool.
|
||||
platform: {}
|
||||
|
||||
jobs:
|
||||
- job: ${{ parameters.jobNamePrefix }}_${{ parameters.platform.name }}
|
||||
displayName: Source-Build (${{ parameters.platform.name }})
|
||||
|
||||
${{ each property in parameters.platform.jobProperties }}:
|
||||
${{ property.key }}: ${{ property.value }}
|
||||
|
||||
${{ if ne(parameters.platform.container, '') }}:
|
||||
container: ${{ parameters.platform.container }}
|
||||
|
||||
${{ if eq(parameters.platform.pool, '') }}:
|
||||
# The default VM host AzDO pool. This should be capable of running Docker containers: almost all
|
||||
# source-build builds run in Docker, including the default managed platform.
|
||||
pool:
|
||||
${{ if eq(variables['System.TeamProject'], 'public') }}:
|
||||
name: NetCore-Public
|
||||
demands: ImageOverride -equals Build.Ubuntu.1804.Amd64.Open
|
||||
${{ if eq(variables['System.TeamProject'], 'internal') }}:
|
||||
name: NetCore1ESPool-Internal
|
||||
demands: ImageOverride -equals Build.Ubuntu.1804.Amd64
|
||||
${{ if ne(parameters.platform.pool, '') }}:
|
||||
pool: ${{ parameters.platform.pool }}
|
||||
|
||||
workspace:
|
||||
clean: all
|
||||
|
||||
steps:
|
||||
- template: /eng/common/templates/steps/source-build.yml
|
||||
parameters:
|
||||
platform: ${{ parameters.platform }}
|
|
@ -0,0 +1,66 @@
|
|||
parameters:
|
||||
runAsPublic: false
|
||||
sourceIndexPackageVersion: 1.0.1-20220804.1
|
||||
sourceIndexPackageSource: https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json
|
||||
sourceIndexBuildCommand: powershell -NoLogo -NoProfile -ExecutionPolicy Bypass -Command "eng/common/build.ps1 -restore -build -binarylog -ci"
|
||||
preSteps: []
|
||||
binlogPath: artifacts/log/Debug/Build.binlog
|
||||
condition: ''
|
||||
dependsOn: ''
|
||||
pool: ''
|
||||
|
||||
jobs:
|
||||
- job: SourceIndexStage1
|
||||
dependsOn: ${{ parameters.dependsOn }}
|
||||
condition: ${{ parameters.condition }}
|
||||
variables:
|
||||
- name: SourceIndexPackageVersion
|
||||
value: ${{ parameters.sourceIndexPackageVersion }}
|
||||
- name: SourceIndexPackageSource
|
||||
value: ${{ parameters.sourceIndexPackageSource }}
|
||||
- name: BinlogPath
|
||||
value: ${{ parameters.binlogPath }}
|
||||
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
|
||||
- group: source-dot-net stage1 variables
|
||||
|
||||
${{ if ne(parameters.pool, '') }}:
|
||||
pool: ${{ parameters.pool }}
|
||||
${{ if eq(parameters.pool, '') }}:
|
||||
pool:
|
||||
${{ if eq(variables['System.TeamProject'], 'public') }}:
|
||||
name: NetCore-Public
|
||||
demands: ImageOverride -equals windows.vs2019.amd64.open
|
||||
${{ if eq(variables['System.TeamProject'], 'internal') }}:
|
||||
name: NetCore1ESPool-Internal
|
||||
demands: ImageOverride -equals windows.vs2019.amd64
|
||||
|
||||
steps:
|
||||
- ${{ each preStep in parameters.preSteps }}:
|
||||
- ${{ preStep }}
|
||||
|
||||
- task: UseDotNet@2
|
||||
displayName: Use .NET Core sdk 3.1
|
||||
inputs:
|
||||
packageType: sdk
|
||||
version: 3.1.x
|
||||
installationPath: $(Agent.TempDirectory)/dotnet
|
||||
workingDirectory: $(Agent.TempDirectory)
|
||||
|
||||
- script: |
|
||||
$(Agent.TempDirectory)/dotnet/dotnet tool install BinLogToSln --version $(SourceIndexPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools
|
||||
$(Agent.TempDirectory)/dotnet/dotnet tool install UploadIndexStage1 --version $(SourceIndexPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools
|
||||
displayName: Download Tools
|
||||
# Set working directory to temp directory so 'dotnet' doesn't try to use global.json and use the repo's sdk.
|
||||
workingDirectory: $(Agent.TempDirectory)
|
||||
|
||||
- script: ${{ parameters.sourceIndexBuildCommand }}
|
||||
displayName: Build Repository
|
||||
|
||||
- script: $(Agent.TempDirectory)/.source-index/tools/BinLogToSln -i $(BinlogPath) -r $(Build.SourcesDirectory) -n $(Build.Repository.Name) -o .source-index/stage1output
|
||||
displayName: Process Binlog into indexable sln
|
||||
|
||||
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
|
||||
- script: $(Agent.TempDirectory)/.source-index/tools/UploadIndexStage1 -i .source-index/stage1output -n $(Build.Repository.Name)
|
||||
displayName: Upload stage1 artifacts to source index
|
||||
env:
|
||||
BLOB_CONTAINER_URL: $(source-dot-net-stage1-blob-container-url)
|
|
@ -0,0 +1,31 @@
|
|||
parameters:
|
||||
# See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md
|
||||
continueOnError: false
|
||||
# Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
|
||||
jobs: []
|
||||
# Optional: if specified, restore and use this version of Guardian instead of the default.
|
||||
overrideGuardianVersion: ''
|
||||
|
||||
jobs:
|
||||
- template: /eng/common/templates/jobs/jobs.yml
|
||||
parameters:
|
||||
enableMicrobuild: false
|
||||
enablePublishBuildArtifacts: false
|
||||
enablePublishTestResults: false
|
||||
enablePublishBuildAssets: false
|
||||
enablePublishUsingPipelines: false
|
||||
enableTelemetry: true
|
||||
|
||||
variables:
|
||||
- group: Publish-Build-Assets
|
||||
# The Guardian version specified in 'eng/common/sdl/packages.config'. This value must be kept in
|
||||
# sync with the packages.config file.
|
||||
- name: DefaultGuardianVersion
|
||||
value: 0.109.0
|
||||
- name: GuardianPackagesConfigFile
|
||||
value: $(Build.SourcesDirectory)\eng\common\sdl\packages.config
|
||||
- name: GuardianVersion
|
||||
value: ${{ coalesce(parameters.overrideGuardianVersion, '$(DefaultGuardianVersion)') }}
|
||||
|
||||
jobs: ${{ parameters.jobs }}
|
||||
|
|
@ -7,7 +7,14 @@ parameters:
|
|||
|
||||
# Optional: Enable publishing using release pipelines
|
||||
enablePublishUsingPipelines: false
|
||||
|
||||
|
||||
# Optional: Enable running the source-build jobs to build repo from source
|
||||
enableSourceBuild: false
|
||||
|
||||
# Optional: Parameters for source-build template.
|
||||
# See /eng/common/templates/jobs/source-build.yml for options
|
||||
sourceBuildParameters: []
|
||||
|
||||
graphFileGeneration:
|
||||
# Optional: Enable generating the graph files at the end of the build
|
||||
enabled: false
|
||||
|
@ -20,10 +27,20 @@ parameters:
|
|||
# Optional: Override automatically derived dependsOn value for "publish build assets" job
|
||||
publishBuildAssetsDependsOn: ''
|
||||
|
||||
# Optional: Publish the assets as soon as the publish to BAR stage is complete, rather doing so in a separate stage.
|
||||
publishAssetsImmediately: false
|
||||
|
||||
# Optional: If using publishAssetsImmediately and additional parameters are needed, can be used to send along additional parameters (normally sent to post-build.yml)
|
||||
artifactsPublishingAdditionalParameters: ''
|
||||
signingValidationAdditionalParameters: ''
|
||||
|
||||
# Optional: should run as a public build even in the internal project
|
||||
# if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
|
||||
runAsPublic: false
|
||||
|
||||
enableSourceIndex: false
|
||||
sourceIndexParams: {}
|
||||
|
||||
# Internal resources (telemetry, microbuild) can only be accessed from non-public projects,
|
||||
# and some (Microbuild) should only be applied to non-PR cases for internal builds.
|
||||
|
||||
|
@ -43,6 +60,20 @@ jobs:
|
|||
|
||||
name: ${{ job.job }}
|
||||
|
||||
- ${{ if eq(parameters.enableSourceBuild, true) }}:
|
||||
- template: /eng/common/templates/jobs/source-build.yml
|
||||
parameters:
|
||||
allCompletedJobId: Source_Build_Complete
|
||||
${{ each parameter in parameters.sourceBuildParameters }}:
|
||||
${{ parameter.key }}: ${{ parameter.value }}
|
||||
|
||||
- ${{ if eq(parameters.enableSourceIndex, 'true') }}:
|
||||
- template: ../job/source-index-stage1.yml
|
||||
parameters:
|
||||
runAsPublic: ${{ parameters.runAsPublic }}
|
||||
${{ each parameter in parameters.sourceIndexParams }}:
|
||||
${{ parameter.key }}: ${{ parameter.value }}
|
||||
|
||||
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
|
||||
- ${{ if or(eq(parameters.enablePublishBuildAssets, true), eq(parameters.artifacts.publish.manifests, 'true'), ne(parameters.artifacts.publish.manifests, '')) }}:
|
||||
- template: ../job/publish-build-assets.yml
|
||||
|
@ -55,18 +86,21 @@ jobs:
|
|||
- ${{ if eq(parameters.publishBuildAssetsDependsOn, '') }}:
|
||||
- ${{ each job in parameters.jobs }}:
|
||||
- ${{ job.job }}
|
||||
- ${{ if eq(parameters.enableSourceBuild, true) }}:
|
||||
- Source_Build_Complete
|
||||
pool:
|
||||
vmImage: vs2017-win2016
|
||||
# We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
|
||||
${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
|
||||
name: VSEngSS-MicroBuild2022-1ES
|
||||
demands: Cmd
|
||||
# If it's not devdiv, it's dnceng
|
||||
${{ if ne(variables['System.TeamProject'], 'DevDiv') }}:
|
||||
name: NetCore1ESPool-Internal
|
||||
demands: ImageOverride -equals windows.vs2019.amd64
|
||||
|
||||
runAsPublic: ${{ parameters.runAsPublic }}
|
||||
publishUsingPipelines: ${{ parameters.enablePublishUsingPipelines }}
|
||||
publishAssetsImmediately: ${{ parameters.publishAssetsImmediately }}
|
||||
enablePublishBuildArtifacts: ${{ parameters.enablePublishBuildArtifacts }}
|
||||
|
||||
- ${{ if eq(parameters.graphFileGeneration.enabled, true) }}:
|
||||
- template: ../job/generate-graph-files.yml
|
||||
parameters:
|
||||
continueOnError: ${{ parameters.continueOnError }}
|
||||
includeToolset: ${{ parameters.graphFileGeneration.includeToolset }}
|
||||
dependsOn:
|
||||
- Asset_Registry_Publish
|
||||
pool:
|
||||
vmImage: vs2017-win2016
|
||||
artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
|
||||
signingValidationAdditionalParameters: ${{ parameters.signingValidationAdditionalParameters }}
|
||||
|
|
|
@ -0,0 +1,46 @@
|
|||
parameters:
|
||||
# This template adds arcade-powered source-build to CI. A job is created for each platform, as
|
||||
# well as an optional server job that completes when all platform jobs complete.
|
||||
|
||||
# The name of the "join" job for all source-build platforms. If set to empty string, the job is
|
||||
# not included. Existing repo pipelines can use this job depend on all source-build jobs
|
||||
# completing without maintaining a separate list of every single job ID: just depend on this one
|
||||
# server job. By default, not included. Recommended name if used: 'Source_Build_Complete'.
|
||||
allCompletedJobId: ''
|
||||
|
||||
# See /eng/common/templates/job/source-build.yml
|
||||
jobNamePrefix: 'Source_Build'
|
||||
|
||||
# This is the default platform provided by Arcade, intended for use by a managed-only repo.
|
||||
defaultManagedPlatform:
|
||||
name: 'Managed'
|
||||
container: 'mcr.microsoft.com/dotnet-buildtools/prereqs:centos-7-3e800f1-20190501005343'
|
||||
|
||||
# Defines the platforms on which to run build jobs. One job is created for each platform, and the
|
||||
# object in this array is sent to the job template as 'platform'. If no platforms are specified,
|
||||
# one job runs on 'defaultManagedPlatform'.
|
||||
platforms: []
|
||||
|
||||
jobs:
|
||||
|
||||
- ${{ if ne(parameters.allCompletedJobId, '') }}:
|
||||
- job: ${{ parameters.allCompletedJobId }}
|
||||
displayName: Source-Build Complete
|
||||
pool: server
|
||||
dependsOn:
|
||||
- ${{ each platform in parameters.platforms }}:
|
||||
- ${{ parameters.jobNamePrefix }}_${{ platform.name }}
|
||||
- ${{ if eq(length(parameters.platforms), 0) }}:
|
||||
- ${{ parameters.jobNamePrefix }}_${{ parameters.defaultManagedPlatform.name }}
|
||||
|
||||
- ${{ each platform in parameters.platforms }}:
|
||||
- template: /eng/common/templates/job/source-build.yml
|
||||
parameters:
|
||||
jobNamePrefix: ${{ parameters.jobNamePrefix }}
|
||||
platform: ${{ platform }}
|
||||
|
||||
- ${{ if eq(length(parameters.platforms), 0) }}:
|
||||
- template: /eng/common/templates/job/source-build.yml
|
||||
parameters:
|
||||
jobNamePrefix: ${{ parameters.jobNamePrefix }}
|
||||
platform: ${{ parameters.defaultManagedPlatform }}
|
|
@ -1,130 +0,0 @@
|
|||
parameters:
|
||||
# Optional: Clean sources before building
|
||||
clean: true
|
||||
|
||||
# Optional: Git fetch depth
|
||||
fetchDepth: ''
|
||||
|
||||
# Optional: name of the phase (not specifying phase name may cause name collisions)
|
||||
name: ''
|
||||
# Optional: display name of the phase
|
||||
displayName: ''
|
||||
|
||||
# Optional: condition for the job to run
|
||||
condition: ''
|
||||
|
||||
# Optional: dependencies of the phase
|
||||
dependsOn: ''
|
||||
|
||||
# Required: A defined YAML queue
|
||||
queue: {}
|
||||
|
||||
# Required: build steps
|
||||
steps: []
|
||||
|
||||
# Optional: variables
|
||||
variables: {}
|
||||
|
||||
# Optional: should run as a public build even in the internal project
|
||||
# if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
|
||||
runAsPublic: false
|
||||
|
||||
## Telemetry variables
|
||||
|
||||
# Optional: enable sending telemetry
|
||||
# if 'true', these "variables" must be specified in the variables object or as part of the queue matrix
|
||||
# _HelixBuildConfig - differentiate between Debug, Release, other
|
||||
# _HelixSource - Example: build/product
|
||||
# _HelixType - Example: official/dotnet/arcade/$(Build.SourceBranch)
|
||||
enableTelemetry: false
|
||||
|
||||
# Optional: Enable installing Microbuild plugin
|
||||
# if 'true', these "variables" must be specified in the variables object or as part of the queue matrix
|
||||
# _TeamName - the name of your team
|
||||
# _SignType - 'test' or 'real'
|
||||
enableMicrobuild: false
|
||||
|
||||
# Internal resources (telemetry, microbuild) can only be accessed from non-public projects,
|
||||
# and some (Microbuild) should only be applied to non-PR cases for internal builds.
|
||||
|
||||
phases:
|
||||
- phase: ${{ parameters.name }}
|
||||
|
||||
${{ if ne(parameters.displayName, '') }}:
|
||||
displayName: ${{ parameters.displayName }}
|
||||
|
||||
${{ if ne(parameters.condition, '') }}:
|
||||
condition: ${{ parameters.condition }}
|
||||
|
||||
${{ if ne(parameters.dependsOn, '') }}:
|
||||
dependsOn: ${{ parameters.dependsOn }}
|
||||
|
||||
queue: ${{ parameters.queue }}
|
||||
|
||||
${{ if ne(parameters.variables, '') }}:
|
||||
variables:
|
||||
${{ insert }}: ${{ parameters.variables }}
|
||||
|
||||
steps:
|
||||
- checkout: self
|
||||
clean: ${{ parameters.clean }}
|
||||
${{ if ne(parameters.fetchDepth, '') }}:
|
||||
fetchDepth: ${{ parameters.fetchDepth }}
|
||||
|
||||
- ${{ if eq(parameters.enableTelemetry, 'true') }}:
|
||||
- template: /eng/common/templates/steps/telemetry-start.yml
|
||||
parameters:
|
||||
buildConfig: $(_HelixBuildConfig)
|
||||
helixSource: $(_HelixSource)
|
||||
helixType: $(_HelixType)
|
||||
runAsPublic: ${{ parameters.runAsPublic }}
|
||||
|
||||
- ${{ if eq(parameters.enableMicrobuild, 'true') }}:
|
||||
# Internal only resource, and Microbuild signing shouldn't be applied to PRs.
|
||||
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
|
||||
- task: MicroBuildSigningPlugin@2
|
||||
displayName: Install MicroBuild plugin
|
||||
inputs:
|
||||
signType: $(_SignType)
|
||||
zipSources: false
|
||||
feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json
|
||||
|
||||
env:
|
||||
TeamName: $(_TeamName)
|
||||
continueOnError: false
|
||||
condition: and(succeeded(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
|
||||
|
||||
# Run provided build steps
|
||||
- ${{ parameters.steps }}
|
||||
|
||||
- ${{ if eq(parameters.enableMicrobuild, 'true') }}:
|
||||
# Internal only resources
|
||||
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
|
||||
- task: MicroBuildCleanup@1
|
||||
displayName: Execute Microbuild cleanup tasks
|
||||
condition: and(always(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
|
||||
env:
|
||||
TeamName: $(_TeamName)
|
||||
|
||||
- ${{ if eq(parameters.enableTelemetry, 'true') }}:
|
||||
- template: /eng/common/templates/steps/telemetry-end.yml
|
||||
parameters:
|
||||
helixSource: $(_HelixSource)
|
||||
helixType: $(_HelixType)
|
||||
|
||||
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
|
||||
- task: CopyFiles@2
|
||||
displayName: Gather Asset Manifests
|
||||
inputs:
|
||||
SourceFolder: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)/AssetManifest'
|
||||
TargetFolder: '$(Build.StagingDirectory)/AssetManifests'
|
||||
continueOnError: false
|
||||
condition: and(succeeded(), eq(variables['_DotNetPublishToBlobFeed'], 'true'))
|
||||
- task: PublishBuildArtifacts@1
|
||||
displayName: Push Asset Manifests
|
||||
inputs:
|
||||
PathtoPublish: '$(Build.StagingDirectory)/AssetManifests'
|
||||
PublishLocation: Container
|
||||
ArtifactName: AssetManifests
|
||||
continueOnError: false
|
||||
condition: and(succeeded(), eq(variables['_DotNetPublishToBlobFeed'], 'true'))
|
|
@ -1,51 +0,0 @@
|
|||
parameters:
|
||||
dependsOn: ''
|
||||
queue: {}
|
||||
configuration: 'Debug'
|
||||
condition: succeeded()
|
||||
continueOnError: false
|
||||
runAsPublic: false
|
||||
publishUsingPipelines: false
|
||||
phases:
|
||||
- phase: Asset_Registry_Publish
|
||||
displayName: Publish to Build Asset Registry
|
||||
dependsOn: ${{ parameters.dependsOn }}
|
||||
queue: ${{ parameters.queue }}
|
||||
variables:
|
||||
_BuildConfig: ${{ parameters.configuration }}
|
||||
steps:
|
||||
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
|
||||
- task: DownloadBuildArtifacts@0
|
||||
displayName: Download artifact
|
||||
inputs:
|
||||
artifactName: AssetManifests
|
||||
downloadPath: '$(Build.StagingDirectory)/Download'
|
||||
condition: ${{ parameters.condition }}
|
||||
continueOnError: ${{ parameters.continueOnError }}
|
||||
- task: AzureKeyVault@1
|
||||
inputs:
|
||||
azureSubscription: 'DotNet-Engineering-Services_KeyVault'
|
||||
KeyVaultName: EngKeyVault
|
||||
SecretsFilter: 'MaestroAccessToken'
|
||||
condition: ${{ parameters.condition }}
|
||||
continueOnError: ${{ parameters.continueOnError }}
|
||||
- task: PowerShell@2
|
||||
displayName: Publish Build Assets
|
||||
inputs:
|
||||
filePath: eng\common\sdk-task.ps1
|
||||
arguments: -task PublishBuildAssets -restore -msbuildEngine dotnet
|
||||
/p:ManifestsPath='$(Build.StagingDirectory)/Download/AssetManifests'
|
||||
/p:BuildAssetRegistryToken=$(MaestroAccessToken)
|
||||
/p:MaestroApiEndpoint=https://maestro-prod.westus2.cloudapp.azure.com
|
||||
/p:PublishUsingPipelines=${{ parameters.publishUsingPipelines }}
|
||||
/p:Configuration=$(_BuildConfig)
|
||||
condition: ${{ parameters.condition }}
|
||||
continueOnError: ${{ parameters.continueOnError }}
|
||||
- task: PublishBuildArtifacts@1
|
||||
displayName: Publish Logs to VSTS
|
||||
inputs:
|
||||
PathtoPublish: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)'
|
||||
PublishLocation: Container
|
||||
ArtifactName: $(Agent.Os)_Asset_Registry_Publish
|
||||
continueOnError: true
|
||||
condition: always()
|
|
@ -1,182 +0,0 @@
|
|||
parameters:
|
||||
BARBuildId: ''
|
||||
PromoteToChannelIds: ''
|
||||
artifactsPublishingAdditionalParameters: ''
|
||||
dependsOn:
|
||||
- Validate
|
||||
publishInstallersAndChecksums: true
|
||||
symbolPublishingAdditionalParameters: ''
|
||||
stageName: ''
|
||||
channelName: ''
|
||||
channelId: ''
|
||||
transportFeed: ''
|
||||
shippingFeed: ''
|
||||
symbolsFeed: ''
|
||||
|
||||
stages:
|
||||
- stage: ${{ parameters.stageName }}
|
||||
dependsOn: ${{ parameters.dependsOn }}
|
||||
variables:
|
||||
- template: ../common-variables.yml
|
||||
displayName: ${{ parameters.channelName }} Publishing
|
||||
jobs:
|
||||
- template: ../setup-maestro-vars.yml
|
||||
parameters:
|
||||
BARBuildId: ${{ parameters.BARBuildId }}
|
||||
PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
|
||||
|
||||
- job: publish_symbols
|
||||
displayName: Symbol Publishing
|
||||
dependsOn: setupMaestroVars
|
||||
condition: contains(dependencies.setupMaestroVars.outputs['setReleaseVars.TargetChannels'], format('[{0}]', ${{ parameters.channelId }} ))
|
||||
variables:
|
||||
- group: DotNet-Symbol-Server-Pats
|
||||
- name: AzDOProjectName
|
||||
value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOProjectName'] ]
|
||||
- name: AzDOPipelineId
|
||||
value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOPipelineId'] ]
|
||||
- name: AzDOBuildId
|
||||
value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOBuildId'] ]
|
||||
pool:
|
||||
vmImage: 'windows-2019'
|
||||
steps:
|
||||
# This is necessary whenever we want to publish/restore to an AzDO private feed
|
||||
- task: NuGetAuthenticate@0
|
||||
displayName: 'Authenticate to AzDO Feeds'
|
||||
|
||||
- task: DownloadBuildArtifacts@0
|
||||
displayName: Download Build Assets
|
||||
continueOnError: true
|
||||
inputs:
|
||||
buildType: specific
|
||||
buildVersionToDownload: specific
|
||||
project: $(AzDOProjectName)
|
||||
pipeline: $(AzDOPipelineId)
|
||||
buildId: $(AzDOBuildId)
|
||||
downloadType: 'specific'
|
||||
itemPattern: |
|
||||
PdbArtifacts/**
|
||||
BlobArtifacts/**
|
||||
downloadPath: '$(Build.ArtifactStagingDirectory)'
|
||||
|
||||
# This is necessary whenever we want to publish/restore to an AzDO private feed
|
||||
# Since sdk-task.ps1 tries to restore packages we need to do this authentication here
|
||||
# otherwise it'll complain about accessing a private feed.
|
||||
- task: NuGetAuthenticate@0
|
||||
displayName: 'Authenticate to AzDO Feeds'
|
||||
|
||||
- task: PowerShell@2
|
||||
displayName: Enable cross-org publishing
|
||||
inputs:
|
||||
filePath: eng\common\enable-cross-org-publishing.ps1
|
||||
arguments: -token $(dn-bot-dnceng-artifact-feeds-rw)
|
||||
|
||||
- task: PowerShell@2
|
||||
displayName: Publish
|
||||
inputs:
|
||||
filePath: eng\common\sdk-task.ps1
|
||||
arguments: -task PublishToSymbolServers -restore -msbuildEngine dotnet
|
||||
/p:DotNetSymbolServerTokenMsdl=$(microsoft-symbol-server-pat)
|
||||
/p:DotNetSymbolServerTokenSymWeb=$(symweb-symbol-server-pat)
|
||||
/p:PDBArtifactsDirectory='$(Build.ArtifactStagingDirectory)/PDBArtifacts/'
|
||||
/p:BlobBasePath='$(Build.ArtifactStagingDirectory)/BlobArtifacts/'
|
||||
/p:SymbolPublishingExclusionsFile='$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt'
|
||||
/p:Configuration=Release
|
||||
/p:PublishToMSDL=false
|
||||
${{ parameters.symbolPublishingAdditionalParameters }}
|
||||
|
||||
- template: ../../steps/publish-logs.yml
|
||||
parameters:
|
||||
StageLabel: '${{ parameters.stageName }}'
|
||||
JobLabel: 'SymbolPublishing'
|
||||
|
||||
- job: publish_assets
|
||||
displayName: Publish Assets
|
||||
dependsOn: setupMaestroVars
|
||||
timeoutInMinutes: 120
|
||||
variables:
|
||||
- name: BARBuildId
|
||||
value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.BARBuildId'] ]
|
||||
- name: IsStableBuild
|
||||
value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.IsStableBuild'] ]
|
||||
- name: AzDOProjectName
|
||||
value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOProjectName'] ]
|
||||
- name: AzDOPipelineId
|
||||
value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOPipelineId'] ]
|
||||
- name: AzDOBuildId
|
||||
value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOBuildId'] ]
|
||||
condition: contains(dependencies.setupMaestroVars.outputs['setReleaseVars.TargetChannels'], format('[{0}]', ${{ parameters.channelId }} ))
|
||||
pool:
|
||||
vmImage: 'windows-2019'
|
||||
steps:
|
||||
- task: DownloadBuildArtifacts@0
|
||||
displayName: Download Build Assets
|
||||
continueOnError: true
|
||||
inputs:
|
||||
buildType: specific
|
||||
buildVersionToDownload: specific
|
||||
project: $(AzDOProjectName)
|
||||
pipeline: $(AzDOPipelineId)
|
||||
buildId: $(AzDOBuildId)
|
||||
downloadType: 'specific'
|
||||
itemPattern: |
|
||||
PackageArtifacts/**
|
||||
BlobArtifacts/**
|
||||
AssetManifests/**
|
||||
downloadPath: '$(Build.ArtifactStagingDirectory)'
|
||||
|
||||
- task: NuGetToolInstaller@1
|
||||
displayName: 'Install NuGet.exe'
|
||||
|
||||
# This is necessary whenever we want to publish/restore to an AzDO private feed
|
||||
- task: NuGetAuthenticate@0
|
||||
displayName: 'Authenticate to AzDO Feeds'
|
||||
|
||||
- task: PowerShell@2
|
||||
displayName: Enable cross-org publishing
|
||||
inputs:
|
||||
filePath: eng\common\enable-cross-org-publishing.ps1
|
||||
arguments: -token $(dn-bot-dnceng-artifact-feeds-rw)
|
||||
|
||||
- task: PowerShell@2
|
||||
displayName: Publish Assets
|
||||
inputs:
|
||||
filePath: eng\common\sdk-task.ps1
|
||||
arguments: -task PublishArtifactsInManifest -restore -msbuildEngine dotnet
|
||||
/p:PublishingInfraVersion=2
|
||||
/p:IsStableBuild=$(IsStableBuild)
|
||||
/p:IsInternalBuild=$(IsInternalBuild)
|
||||
/p:RepositoryName=$(Build.Repository.Name)
|
||||
/p:CommitSha=$(Build.SourceVersion)
|
||||
/p:NugetPath=$(NuGetExeToolPath)
|
||||
/p:AzdoTargetFeedPAT='$(dn-bot-dnceng-universal-packages-rw)'
|
||||
/p:AzureStorageTargetFeedPAT='$(dotnetfeed-storage-access-key-1)'
|
||||
/p:BARBuildId=$(BARBuildId)
|
||||
/p:MaestroApiEndpoint='$(MaestroApiEndPoint)'
|
||||
/p:BuildAssetRegistryToken='$(MaestroApiAccessToken)'
|
||||
/p:ManifestsBasePath='$(Build.ArtifactStagingDirectory)/AssetManifests/'
|
||||
/p:BlobBasePath='$(Build.ArtifactStagingDirectory)/BlobArtifacts/'
|
||||
/p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts/'
|
||||
/p:Configuration=Release
|
||||
/p:PublishInstallersAndChecksums=${{ parameters.publishInstallersAndChecksums }}
|
||||
/p:ChecksumsTargetStaticFeed=$(InternalChecksumsBlobFeedUrl)
|
||||
/p:ChecksumsAzureAccountKey=$(InternalChecksumsBlobFeedKey)
|
||||
/p:InstallersTargetStaticFeed=$(InternalInstallersBlobFeedUrl)
|
||||
/p:InstallersAzureAccountKey=$(InternalInstallersBlobFeedKey)
|
||||
/p:AzureDevOpsStaticShippingFeed='${{ parameters.shippingFeed }}'
|
||||
/p:AzureDevOpsStaticShippingFeedKey='$(dn-bot-dnceng-artifact-feeds-rw)'
|
||||
/p:AzureDevOpsStaticTransportFeed='${{ parameters.transportFeed }}'
|
||||
/p:AzureDevOpsStaticTransportFeedKey='$(dn-bot-dnceng-artifact-feeds-rw)'
|
||||
/p:AzureDevOpsStaticSymbolsFeed='${{ parameters.symbolsFeed }}'
|
||||
/p:AzureDevOpsStaticSymbolsFeedKey='$(dn-bot-dnceng-artifact-feeds-rw)'
|
||||
/p:PublishToMSDL=false
|
||||
${{ parameters.artifactsPublishingAdditionalParameters }}
|
||||
|
||||
- template: ../../steps/publish-logs.yml
|
||||
parameters:
|
||||
StageLabel: '${{ parameters.stageName }}'
|
||||
JobLabel: 'AssetsPublishing'
|
||||
|
||||
- template: ../../steps/add-build-to-channel.yml
|
||||
parameters:
|
||||
ChannelId: ${{ parameters.channelId }}
|
|
@ -1,184 +0,0 @@
|
|||
parameters:
|
||||
BARBuildId: ''
|
||||
PromoteToChannelIds: ''
|
||||
artifactsPublishingAdditionalParameters: ''
|
||||
dependsOn:
|
||||
- Validate
|
||||
publishInstallersAndChecksums: true
|
||||
symbolPublishingAdditionalParameters: ''
|
||||
stageName: ''
|
||||
channelName: ''
|
||||
channelId: ''
|
||||
transportFeed: ''
|
||||
shippingFeed: ''
|
||||
symbolsFeed: ''
|
||||
# If the channel name is empty, no links will be generated
|
||||
akaMSChannelName: ''
|
||||
|
||||
stages:
|
||||
- stage: ${{ parameters.stageName }}
|
||||
dependsOn: ${{ parameters.dependsOn }}
|
||||
variables:
|
||||
- template: ../common-variables.yml
|
||||
displayName: ${{ parameters.channelName }} Publishing
|
||||
jobs:
|
||||
- template: ../setup-maestro-vars.yml
|
||||
parameters:
|
||||
BARBuildId: ${{ parameters.BARBuildId }}
|
||||
PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
|
||||
|
||||
- job: publish_symbols
|
||||
displayName: Symbol Publishing
|
||||
dependsOn: setupMaestroVars
|
||||
condition: contains(dependencies.setupMaestroVars.outputs['setReleaseVars.TargetChannels'], format('[{0}]', ${{ parameters.channelId }} ))
|
||||
variables:
|
||||
- group: DotNet-Symbol-Server-Pats
|
||||
- name: AzDOProjectName
|
||||
value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOProjectName'] ]
|
||||
- name: AzDOPipelineId
|
||||
value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOPipelineId'] ]
|
||||
- name: AzDOBuildId
|
||||
value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOBuildId'] ]
|
||||
pool:
|
||||
vmImage: 'windows-2019'
|
||||
steps:
|
||||
- task: DownloadBuildArtifacts@0
|
||||
displayName: Download Build Assets
|
||||
continueOnError: true
|
||||
inputs:
|
||||
buildType: specific
|
||||
buildVersionToDownload: specific
|
||||
project: $(AzDOProjectName)
|
||||
pipeline: $(AzDOPipelineId)
|
||||
buildId: $(AzDOBuildId)
|
||||
downloadType: 'specific'
|
||||
itemPattern: |
|
||||
PdbArtifacts/**
|
||||
BlobArtifacts/**
|
||||
downloadPath: '$(Build.ArtifactStagingDirectory)'
|
||||
|
||||
# This is necessary whenever we want to publish/restore to an AzDO private feed
|
||||
# Since sdk-task.ps1 tries to restore packages we need to do this authentication here
|
||||
# otherwise it'll complain about accessing a private feed.
|
||||
- task: NuGetAuthenticate@0
|
||||
displayName: 'Authenticate to AzDO Feeds'
|
||||
|
||||
- task: PowerShell@2
|
||||
displayName: Enable cross-org publishing
|
||||
inputs:
|
||||
filePath: eng\common\enable-cross-org-publishing.ps1
|
||||
arguments: -token $(dn-bot-dnceng-artifact-feeds-rw)
|
||||
|
||||
- task: PowerShell@2
|
||||
displayName: Publish
|
||||
inputs:
|
||||
filePath: eng\common\sdk-task.ps1
|
||||
arguments: -task PublishToSymbolServers -restore -msbuildEngine dotnet
|
||||
/p:DotNetSymbolServerTokenMsdl=$(microsoft-symbol-server-pat)
|
||||
/p:DotNetSymbolServerTokenSymWeb=$(symweb-symbol-server-pat)
|
||||
/p:PDBArtifactsDirectory='$(Build.ArtifactStagingDirectory)/PDBArtifacts/'
|
||||
/p:BlobBasePath='$(Build.ArtifactStagingDirectory)/BlobArtifacts/'
|
||||
/p:SymbolPublishingExclusionsFile='$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt'
|
||||
/p:Configuration=Release
|
||||
${{ parameters.symbolPublishingAdditionalParameters }}
|
||||
|
||||
- template: ../../steps/publish-logs.yml
|
||||
parameters:
|
||||
StageLabel: '${{ parameters.stageName }}'
|
||||
JobLabel: 'SymbolPublishing'
|
||||
|
||||
- job: publish_assets
|
||||
displayName: Publish Assets
|
||||
dependsOn: setupMaestroVars
|
||||
timeoutInMinutes: 120
|
||||
variables:
|
||||
- name: BARBuildId
|
||||
value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.BARBuildId'] ]
|
||||
- name: IsStableBuild
|
||||
value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.IsStableBuild'] ]
|
||||
- name: AzDOProjectName
|
||||
value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOProjectName'] ]
|
||||
- name: AzDOPipelineId
|
||||
value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOPipelineId'] ]
|
||||
- name: AzDOBuildId
|
||||
value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOBuildId'] ]
|
||||
- name: ArtifactsCategory
|
||||
value: ${{ coalesce(variables._DotNetArtifactsCategory, '.NETCore') }}
|
||||
condition: contains(dependencies.setupMaestroVars.outputs['setReleaseVars.TargetChannels'], format('[{0}]', ${{ parameters.channelId }} ))
|
||||
pool:
|
||||
vmImage: 'windows-2019'
|
||||
steps:
|
||||
- task: DownloadBuildArtifacts@0
|
||||
displayName: Download Build Assets
|
||||
continueOnError: true
|
||||
inputs:
|
||||
buildType: specific
|
||||
buildVersionToDownload: specific
|
||||
project: $(AzDOProjectName)
|
||||
pipeline: $(AzDOPipelineId)
|
||||
buildId: $(AzDOBuildId)
|
||||
downloadType: 'specific'
|
||||
itemPattern: |
|
||||
PackageArtifacts/**
|
||||
BlobArtifacts/**
|
||||
AssetManifests/**
|
||||
downloadPath: '$(Build.ArtifactStagingDirectory)'
|
||||
|
||||
- task: NuGetToolInstaller@1
|
||||
displayName: 'Install NuGet.exe'
|
||||
|
||||
# This is necessary whenever we want to publish/restore to an AzDO private feed
|
||||
- task: NuGetAuthenticate@0
|
||||
displayName: 'Authenticate to AzDO Feeds'
|
||||
|
||||
- task: PowerShell@2
|
||||
displayName: Enable cross-org publishing
|
||||
inputs:
|
||||
filePath: eng\common\enable-cross-org-publishing.ps1
|
||||
arguments: -token $(dn-bot-dnceng-artifact-feeds-rw)
|
||||
|
||||
- task: PowerShell@2
|
||||
displayName: Publish Assets
|
||||
inputs:
|
||||
filePath: eng\common\sdk-task.ps1
|
||||
arguments: -task PublishArtifactsInManifest -restore -msbuildEngine dotnet
|
||||
/p:PublishingInfraVersion=2
|
||||
/p:ArtifactsCategory=$(ArtifactsCategory)
|
||||
/p:IsStableBuild=$(IsStableBuild)
|
||||
/p:IsInternalBuild=$(IsInternalBuild)
|
||||
/p:RepositoryName=$(Build.Repository.Name)
|
||||
/p:CommitSha=$(Build.SourceVersion)
|
||||
/p:NugetPath=$(NuGetExeToolPath)
|
||||
/p:AzdoTargetFeedPAT='$(dn-bot-dnceng-universal-packages-rw)'
|
||||
/p:AzureStorageTargetFeedPAT='$(dotnetfeed-storage-access-key-1)'
|
||||
/p:BARBuildId=$(BARBuildId)
|
||||
/p:MaestroApiEndpoint='$(MaestroApiEndPoint)'
|
||||
/p:BuildAssetRegistryToken='$(MaestroApiAccessToken)'
|
||||
/p:ManifestsBasePath='$(Build.ArtifactStagingDirectory)/AssetManifests/'
|
||||
/p:BlobBasePath='$(Build.ArtifactStagingDirectory)/BlobArtifacts/'
|
||||
/p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts/'
|
||||
/p:Configuration=Release
|
||||
/p:PublishInstallersAndChecksums=${{ parameters.publishInstallersAndChecksums }}
|
||||
/p:InstallersTargetStaticFeed=$(InstallersBlobFeedUrl)
|
||||
/p:InstallersAzureAccountKey=$(dotnetcli-storage-key)
|
||||
/p:ChecksumsTargetStaticFeed=$(ChecksumsBlobFeedUrl)
|
||||
/p:ChecksumsAzureAccountKey=$(dotnetclichecksums-storage-key)
|
||||
/p:AzureDevOpsStaticShippingFeed='${{ parameters.shippingFeed }}'
|
||||
/p:AzureDevOpsStaticShippingFeedKey='$(dn-bot-dnceng-artifact-feeds-rw)'
|
||||
/p:AzureDevOpsStaticTransportFeed='${{ parameters.transportFeed }}'
|
||||
/p:AzureDevOpsStaticTransportFeedKey='$(dn-bot-dnceng-artifact-feeds-rw)'
|
||||
/p:AzureDevOpsStaticSymbolsFeed='${{ parameters.symbolsFeed }}'
|
||||
/p:AzureDevOpsStaticSymbolsFeedKey='$(dn-bot-dnceng-artifact-feeds-rw)'
|
||||
/p:LatestLinkShortUrlPrefix=dotnet/'${{ parameters.akaMSChannelName }}'
|
||||
/p:AkaMSClientId=$(akams-client-id)
|
||||
/p:AkaMSClientSecret=$(akams-client-secret)
|
||||
${{ parameters.artifactsPublishingAdditionalParameters }}
|
||||
|
||||
- template: ../../steps/publish-logs.yml
|
||||
parameters:
|
||||
StageLabel: '${{ parameters.stageName }}'
|
||||
JobLabel: 'AssetsPublishing'
|
||||
|
||||
- template: ../../steps/add-build-to-channel.yml
|
||||
parameters:
|
||||
ChannelId: ${{ parameters.channelId }}
|
|
@ -4,54 +4,6 @@ variables:
|
|||
- group: DotNet-DotNetCli-Storage
|
||||
- group: DotNet-MSRC-Storage
|
||||
- group: Publish-Build-Assets
|
||||
|
||||
# .NET Core 3.1 Dev
|
||||
- name: PublicDevRelease_31_Channel_Id
|
||||
value: 128
|
||||
|
||||
# .NET 5 Dev
|
||||
- name: Net_5_Dev_Channel_Id
|
||||
value: 131
|
||||
|
||||
# .NET Eng - Validation
|
||||
- name: Net_Eng_Validation_Channel_Id
|
||||
value: 9
|
||||
|
||||
# .NET Eng - Latest
|
||||
- name: Net_Eng_Latest_Channel_Id
|
||||
value: 2
|
||||
|
||||
# .NET 3 Eng - Validation
|
||||
- name: NET_3_Eng_Validation_Channel_Id
|
||||
value: 390
|
||||
|
||||
# .NET 3 Eng
|
||||
- name: NetCore_3_Tools_Channel_Id
|
||||
value: 344
|
||||
|
||||
# .NET Core 3.0 Internal Servicing
|
||||
- name: InternalServicing_30_Channel_Id
|
||||
value: 184
|
||||
|
||||
# .NET Core 3.0 Release
|
||||
- name: PublicRelease_30_Channel_Id
|
||||
value: 19
|
||||
|
||||
# .NET Core 3.1 Release
|
||||
- name: PublicRelease_31_Channel_Id
|
||||
value: 129
|
||||
|
||||
# General Testing
|
||||
- name: GeneralTesting_Channel_Id
|
||||
value: 529
|
||||
|
||||
# .NET Core 3.1 Blazor Features
|
||||
- name: NetCore_31_Blazor_Features_Channel_Id
|
||||
value: 531
|
||||
|
||||
# .NET Core Experimental
|
||||
- name: NetCore_Experimental_Channel_Id
|
||||
value: 562
|
||||
|
||||
# Whether the build is internal or not
|
||||
- name: IsInternalBuild
|
||||
|
@ -70,30 +22,5 @@ variables:
|
|||
- name: SymbolToolVersion
|
||||
value: 1.0.1
|
||||
|
||||
# Feed Configurations
|
||||
# These should include the suffix "/index.json"
|
||||
|
||||
# Default locations for Installers and checksums
|
||||
# Public Locations
|
||||
- name: ChecksumsBlobFeedUrl
|
||||
value: https://dotnetclichecksums.blob.core.windows.net/dotnet/index.json
|
||||
- name: InstallersBlobFeedUrl
|
||||
value: https://dotnetcli.blob.core.windows.net/dotnet/index.json
|
||||
|
||||
# Private Locations
|
||||
- name: InternalChecksumsBlobFeedUrl
|
||||
value: https://dotnetclichecksumsmsrc.blob.core.windows.net/dotnet/index.json
|
||||
- name: InternalChecksumsBlobFeedKey
|
||||
value: $(dotnetclichecksumsmsrc-storage-key)
|
||||
|
||||
- name: InternalInstallersBlobFeedUrl
|
||||
value: https://dotnetclimsrc.blob.core.windows.net/dotnet/index.json
|
||||
- name: InternalInstallersBlobFeedKey
|
||||
value: $(dotnetclimsrc-access-key)
|
||||
|
||||
# Skip component governance and codesign validation for SDL. These jobs
|
||||
# create no content.
|
||||
- name: skipComponentGovernanceDetection
|
||||
value: true
|
||||
- name: runCodesignValidationInjection
|
||||
value: false
|
||||
|
|
|
@ -1,605 +1,285 @@
|
|||
parameters:
|
||||
# Which publishing infra should be used. THIS SHOULD MATCH THE VERSION ON THE BUILD MANIFEST.
|
||||
# Publishing V2 accepts optionally outlining the publishing stages - default is inline.
|
||||
# Publishing V3 DOES NOT accept inlining the publishing stages.
|
||||
publishingInfraVersion: 2
|
||||
# When set to true the publishing templates from the repo will be used
|
||||
# otherwise Darc add-build-to-channel will be used to trigger the promotion pipeline
|
||||
inline: true
|
||||
# Which publishing infra should be used. THIS SHOULD MATCH THE VERSION ON THE BUILD MANIFEST.
|
||||
# Publishing V1 is no longer supported
|
||||
# Publishing V2 is no longer supported
|
||||
# Publishing V3 is the default
|
||||
- name: publishingInfraVersion
|
||||
displayName: Which version of publishing should be used to promote the build definition?
|
||||
type: number
|
||||
default: 3
|
||||
values:
|
||||
- 3
|
||||
|
||||
# Only used if inline==false. When set to true will stall the current build until
|
||||
# the Promotion Pipeline build finishes. Otherwise, the current build will continue
|
||||
# execution concurrently with the promotion build.
|
||||
waitPublishingFinish: true
|
||||
- name: BARBuildId
|
||||
displayName: BAR Build Id
|
||||
type: number
|
||||
default: 0
|
||||
|
||||
BARBuildId: ''
|
||||
PromoteToChannelIds: ''
|
||||
- name: PromoteToChannelIds
|
||||
displayName: Channel to promote BARBuildId to
|
||||
type: string
|
||||
default: ''
|
||||
|
||||
enableSourceLinkValidation: false
|
||||
enableSigningValidation: true
|
||||
enableSymbolValidation: false
|
||||
enableNugetValidation: true
|
||||
publishInstallersAndChecksums: true
|
||||
SDLValidationParameters:
|
||||
enable: false
|
||||
continueOnError: false
|
||||
params: ''
|
||||
artifactNames: ''
|
||||
downloadArtifacts: true
|
||||
- name: enableSourceLinkValidation
|
||||
displayName: Enable SourceLink validation
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
- name: enableSigningValidation
|
||||
displayName: Enable signing validation
|
||||
type: boolean
|
||||
default: true
|
||||
|
||||
- name: enableSymbolValidation
|
||||
displayName: Enable symbol validation
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
- name: enableNugetValidation
|
||||
displayName: Enable NuGet validation
|
||||
type: boolean
|
||||
default: true
|
||||
|
||||
- name: publishInstallersAndChecksums
|
||||
displayName: Publish installers and checksums
|
||||
type: boolean
|
||||
default: true
|
||||
|
||||
- name: SDLValidationParameters
|
||||
type: object
|
||||
default:
|
||||
enable: false
|
||||
publishGdn: false
|
||||
continueOnError: false
|
||||
params: ''
|
||||
artifactNames: ''
|
||||
downloadArtifacts: true
|
||||
|
||||
# These parameters let the user customize the call to sdk-task.ps1 for publishing
|
||||
# symbols & general artifacts as well as for signing validation
|
||||
symbolPublishingAdditionalParameters: ''
|
||||
artifactsPublishingAdditionalParameters: ''
|
||||
signingValidationAdditionalParameters: ''
|
||||
useBuildManifest: false
|
||||
- name: symbolPublishingAdditionalParameters
|
||||
displayName: Symbol publishing additional parameters
|
||||
type: string
|
||||
default: ''
|
||||
|
||||
- name: artifactsPublishingAdditionalParameters
|
||||
displayName: Artifact publishing additional parameters
|
||||
type: string
|
||||
default: ''
|
||||
|
||||
- name: signingValidationAdditionalParameters
|
||||
displayName: Signing validation additional parameters
|
||||
type: string
|
||||
default: ''
|
||||
|
||||
# Which stages should finish execution before post-build stages start
|
||||
validateDependsOn:
|
||||
- build
|
||||
publishDependsOn:
|
||||
- Validate
|
||||
- name: validateDependsOn
|
||||
type: object
|
||||
default:
|
||||
- build
|
||||
|
||||
- name: publishDependsOn
|
||||
type: object
|
||||
default:
|
||||
- Validate
|
||||
|
||||
# Optional: Call asset publishing rather than running in a separate stage
|
||||
- name: publishAssetsImmediately
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
# Channel ID's instantiated in this file.
|
||||
# When adding a new channel implementation the call to `check-channel-consistency.ps1`
|
||||
# needs to be updated with the new channel ID
|
||||
NetEngLatestChannelId: 2
|
||||
NetEngValidationChannelId: 9
|
||||
NetDev5ChannelId: 131
|
||||
NetDev6ChannelId: 1296
|
||||
GeneralTestingChannelId: 529
|
||||
NETCoreToolingDevChannelId: 548
|
||||
NETCoreToolingReleaseChannelId: 549
|
||||
NETInternalToolingChannelId: 551
|
||||
NETCoreExperimentalChannelId: 562
|
||||
NetEngServicesIntChannelId: 678
|
||||
NetEngServicesProdChannelId: 679
|
||||
Net5Preview8ChannelId: 1155
|
||||
Net5RC1ChannelId: 1157
|
||||
Net5RC2ChannelId: 1329
|
||||
NetCoreSDK313xxChannelId: 759
|
||||
NetCoreSDK313xxInternalChannelId: 760
|
||||
NetCoreSDK314xxChannelId: 921
|
||||
NetCoreSDK314xxInternalChannelId: 922
|
||||
VS166ChannelId: 1010
|
||||
VS167ChannelId: 1011
|
||||
VS168ChannelId: 1154
|
||||
VSMasterChannelId: 1012
|
||||
|
||||
stages:
|
||||
- stage: Validate
|
||||
dependsOn: ${{ parameters.validateDependsOn }}
|
||||
displayName: Validate Build Assets
|
||||
variables:
|
||||
- template: common-variables.yml
|
||||
jobs:
|
||||
- template: setup-maestro-vars.yml
|
||||
parameters:
|
||||
BARBuildId: ${{ parameters.BARBuildId }}
|
||||
PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
|
||||
|
||||
- job:
|
||||
displayName: Post-build Checks
|
||||
dependsOn: setupMaestroVars
|
||||
variables:
|
||||
- name: TargetChannels
|
||||
value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.TargetChannels'] ]
|
||||
pool:
|
||||
vmImage: 'windows-2019'
|
||||
steps:
|
||||
- task: PowerShell@2
|
||||
displayName: Maestro Channels Consistency
|
||||
inputs:
|
||||
filePath: $(Build.SourcesDirectory)/eng/common/post-build/check-channel-consistency.ps1
|
||||
arguments: -PromoteToChannels "$(TargetChannels)"
|
||||
-AvailableChannelIds ${{parameters.NetEngLatestChannelId}},${{parameters.NetEngValidationChannelId}},${{parameters.NetDev5ChannelId}},${{parameters.NetDev6ChannelId}},${{parameters.GeneralTestingChannelId}},${{parameters.NETCoreToolingDevChannelId}},${{parameters.NETCoreToolingReleaseChannelId}},${{parameters.NETInternalToolingChannelId}},${{parameters.NETCoreExperimentalChannelId}},${{parameters.NetEngServicesIntChannelId}},${{parameters.NetEngServicesProdChannelId}},${{parameters.Net5Preview8ChannelId}},${{parameters.Net5RC1ChannelId}},${{parameters.Net5RC2ChannelId}},${{parameters.NetCoreSDK313xxChannelId}},${{parameters.NetCoreSDK313xxInternalChannelId}},${{parameters.NetCoreSDK314xxChannelId}},${{parameters.NetCoreSDK314xxInternalChannelId}},${{parameters.VS166ChannelId}},${{parameters.VS167ChannelId}},${{parameters.VS168ChannelId}},${{parameters.VSMasterChannelId}}
|
||||
|
||||
- job:
|
||||
displayName: NuGet Validation
|
||||
dependsOn: setupMaestroVars
|
||||
condition: eq( ${{ parameters.enableNugetValidation }}, 'true')
|
||||
pool:
|
||||
vmImage: 'windows-2019'
|
||||
variables:
|
||||
- name: AzDOProjectName
|
||||
value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOProjectName'] ]
|
||||
- name: AzDOPipelineId
|
||||
value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOPipelineId'] ]
|
||||
- name: AzDOBuildId
|
||||
value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOBuildId'] ]
|
||||
steps:
|
||||
- task: DownloadBuildArtifacts@0
|
||||
displayName: Download Package Artifacts
|
||||
inputs:
|
||||
buildType: specific
|
||||
buildVersionToDownload: specific
|
||||
project: $(AzDOProjectName)
|
||||
pipeline: $(AzDOPipelineId)
|
||||
buildId: $(AzDOBuildId)
|
||||
artifactName: PackageArtifacts
|
||||
|
||||
- task: PowerShell@2
|
||||
displayName: Validate
|
||||
inputs:
|
||||
filePath: $(Build.SourcesDirectory)/eng/common/post-build/nuget-validation.ps1
|
||||
arguments: -PackagesPath $(Build.ArtifactStagingDirectory)/PackageArtifacts/
|
||||
-ToolDestinationPath $(Agent.BuildDirectory)/Extract/
|
||||
|
||||
- job:
|
||||
displayName: Signing Validation
|
||||
dependsOn: setupMaestroVars
|
||||
condition: eq( ${{ parameters.enableSigningValidation }}, 'true')
|
||||
- ${{ if or(eq( parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}:
|
||||
- stage: Validate
|
||||
dependsOn: ${{ parameters.validateDependsOn }}
|
||||
displayName: Validate Build Assets
|
||||
variables:
|
||||
- template: common-variables.yml
|
||||
- name: AzDOProjectName
|
||||
value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOProjectName'] ]
|
||||
- name: AzDOPipelineId
|
||||
value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOPipelineId'] ]
|
||||
- name: AzDOBuildId
|
||||
value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOBuildId'] ]
|
||||
pool:
|
||||
vmImage: 'windows-2019'
|
||||
steps:
|
||||
- ${{ if eq(parameters.useBuildManifest, true) }}:
|
||||
jobs:
|
||||
- job:
|
||||
displayName: NuGet Validation
|
||||
condition: eq( ${{ parameters.enableNugetValidation }}, 'true')
|
||||
pool:
|
||||
# We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
|
||||
${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
|
||||
name: VSEngSS-MicroBuild2022-1ES
|
||||
demands: Cmd
|
||||
# If it's not devdiv, it's dnceng
|
||||
${{ else }}:
|
||||
name: NetCore1ESPool-Internal
|
||||
demands: ImageOverride -equals windows.vs2019.amd64
|
||||
|
||||
steps:
|
||||
- template: setup-maestro-vars.yml
|
||||
parameters:
|
||||
BARBuildId: ${{ parameters.BARBuildId }}
|
||||
PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
|
||||
|
||||
- task: DownloadBuildArtifacts@0
|
||||
displayName: Download build manifest
|
||||
displayName: Download Package Artifacts
|
||||
inputs:
|
||||
buildType: specific
|
||||
buildVersionToDownload: specific
|
||||
project: $(AzDOProjectName)
|
||||
pipeline: $(AzDOPipelineId)
|
||||
buildId: $(AzDOBuildId)
|
||||
artifactName: BuildManifests
|
||||
- task: DownloadBuildArtifacts@0
|
||||
displayName: Download Package Artifacts
|
||||
inputs:
|
||||
buildType: specific
|
||||
buildVersionToDownload: specific
|
||||
project: $(AzDOProjectName)
|
||||
pipeline: $(AzDOPipelineId)
|
||||
buildId: $(AzDOBuildId)
|
||||
artifactName: PackageArtifacts
|
||||
artifactName: PackageArtifacts
|
||||
checkDownloadedFiles: true
|
||||
|
||||
# This is necessary whenever we want to publish/restore to an AzDO private feed
|
||||
# Since sdk-task.ps1 tries to restore packages we need to do this authentication here
|
||||
# otherwise it'll complain about accessing a private feed.
|
||||
- task: NuGetAuthenticate@0
|
||||
displayName: 'Authenticate to AzDO Feeds'
|
||||
- task: PowerShell@2
|
||||
displayName: Validate
|
||||
inputs:
|
||||
filePath: $(Build.SourcesDirectory)/eng/common/post-build/nuget-validation.ps1
|
||||
arguments: -PackagesPath $(Build.ArtifactStagingDirectory)/PackageArtifacts/
|
||||
-ToolDestinationPath $(Agent.BuildDirectory)/Extract/
|
||||
|
||||
- task: PowerShell@2
|
||||
displayName: Enable cross-org publishing
|
||||
inputs:
|
||||
filePath: eng\common\enable-cross-org-publishing.ps1
|
||||
arguments: -token $(dn-bot-dnceng-artifact-feeds-rw)
|
||||
- job:
|
||||
displayName: Signing Validation
|
||||
condition: and( eq( ${{ parameters.enableSigningValidation }}, 'true'), ne( variables['PostBuildSign'], 'true'))
|
||||
pool:
|
||||
# We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
|
||||
${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
|
||||
name: VSEngSS-MicroBuild2022-1ES
|
||||
demands: Cmd
|
||||
# If it's not devdiv, it's dnceng
|
||||
${{ else }}:
|
||||
name: NetCore1ESPool-Internal
|
||||
demands: ImageOverride -equals windows.vs2019.amd64
|
||||
steps:
|
||||
- template: setup-maestro-vars.yml
|
||||
parameters:
|
||||
BARBuildId: ${{ parameters.BARBuildId }}
|
||||
PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
|
||||
|
||||
# Signing validation will optionally work with the buildmanifest file which is downloaded from
|
||||
# Azure DevOps above.
|
||||
- task: PowerShell@2
|
||||
displayName: Validate
|
||||
inputs:
|
||||
filePath: eng\common\sdk-task.ps1
|
||||
arguments: -task SigningValidation -restore -msbuildEngine vs
|
||||
/p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts'
|
||||
/p:SignCheckExclusionsFile='$(Build.SourcesDirectory)/eng/SignCheckExclusionsFile.txt'
|
||||
${{ parameters.signingValidationAdditionalParameters }}
|
||||
- task: DownloadBuildArtifacts@0
|
||||
displayName: Download Package Artifacts
|
||||
inputs:
|
||||
buildType: specific
|
||||
buildVersionToDownload: specific
|
||||
project: $(AzDOProjectName)
|
||||
pipeline: $(AzDOPipelineId)
|
||||
buildId: $(AzDOBuildId)
|
||||
artifactName: PackageArtifacts
|
||||
checkDownloadedFiles: true
|
||||
itemPattern: |
|
||||
**
|
||||
!**/Microsoft.SourceBuild.Intermediate.*.nupkg
|
||||
|
||||
- template: ../steps/publish-logs.yml
|
||||
parameters:
|
||||
StageLabel: 'Validation'
|
||||
JobLabel: 'Signing'
|
||||
# This is necessary whenever we want to publish/restore to an AzDO private feed
|
||||
# Since sdk-task.ps1 tries to restore packages we need to do this authentication here
|
||||
# otherwise it'll complain about accessing a private feed.
|
||||
- task: NuGetAuthenticate@0
|
||||
displayName: 'Authenticate to AzDO Feeds'
|
||||
|
||||
- job:
|
||||
displayName: SourceLink Validation
|
||||
dependsOn: setupMaestroVars
|
||||
condition: eq( ${{ parameters.enableSourceLinkValidation }}, 'true')
|
||||
variables:
|
||||
- template: common-variables.yml
|
||||
- name: AzDOProjectName
|
||||
value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOProjectName'] ]
|
||||
- name: AzDOPipelineId
|
||||
value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOPipelineId'] ]
|
||||
- name: AzDOBuildId
|
||||
value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.AzDOBuildId'] ]
|
||||
pool:
|
||||
vmImage: 'windows-2019'
|
||||
steps:
|
||||
- task: DownloadBuildArtifacts@0
|
||||
displayName: Download Blob Artifacts
|
||||
inputs:
|
||||
buildType: specific
|
||||
buildVersionToDownload: specific
|
||||
project: $(AzDOProjectName)
|
||||
pipeline: $(AzDOPipelineId)
|
||||
buildId: $(AzDOBuildId)
|
||||
artifactName: BlobArtifacts
|
||||
- task: PowerShell@2
|
||||
displayName: Enable cross-org publishing
|
||||
inputs:
|
||||
filePath: eng\common\enable-cross-org-publishing.ps1
|
||||
arguments: -token $(dn-bot-dnceng-artifact-feeds-rw)
|
||||
|
||||
- task: PowerShell@2
|
||||
displayName: Validate
|
||||
inputs:
|
||||
filePath: $(Build.SourcesDirectory)/eng/common/post-build/sourcelink-validation.ps1
|
||||
arguments: -InputPath $(Build.ArtifactStagingDirectory)/BlobArtifacts/
|
||||
-ExtractPath $(Agent.BuildDirectory)/Extract/
|
||||
-GHRepoName $(Build.Repository.Name)
|
||||
-GHCommit $(Build.SourceVersion)
|
||||
-SourcelinkCliVersion $(SourceLinkCLIVersion)
|
||||
continueOnError: true
|
||||
# Signing validation will optionally work with the buildmanifest file which is downloaded from
|
||||
# Azure DevOps above.
|
||||
- task: PowerShell@2
|
||||
displayName: Validate
|
||||
inputs:
|
||||
filePath: eng\common\sdk-task.ps1
|
||||
arguments: -task SigningValidation -restore -msbuildEngine vs
|
||||
/p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts'
|
||||
/p:SignCheckExclusionsFile='$(Build.SourcesDirectory)/eng/SignCheckExclusionsFile.txt'
|
||||
${{ parameters.signingValidationAdditionalParameters }}
|
||||
|
||||
- template: /eng/common/templates/job/execute-sdl.yml
|
||||
parameters:
|
||||
enable: ${{ parameters.SDLValidationParameters.enable }}
|
||||
dependsOn: setupMaestroVars
|
||||
additionalParameters: ${{ parameters.SDLValidationParameters.params }}
|
||||
continueOnError: ${{ parameters.SDLValidationParameters.continueOnError }}
|
||||
artifactNames: ${{ parameters.SDLValidationParameters.artifactNames }}
|
||||
downloadArtifacts: ${{ parameters.SDLValidationParameters.downloadArtifacts }}
|
||||
- template: ../steps/publish-logs.yml
|
||||
parameters:
|
||||
StageLabel: 'Validation'
|
||||
JobLabel: 'Signing'
|
||||
|
||||
- ${{ if or(ge(parameters.publishingInfraVersion, 3), eq(parameters.inline, 'false')) }}:
|
||||
- job:
|
||||
displayName: SourceLink Validation
|
||||
condition: eq( ${{ parameters.enableSourceLinkValidation }}, 'true')
|
||||
pool:
|
||||
# We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
|
||||
${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
|
||||
name: VSEngSS-MicroBuild2022-1ES
|
||||
demands: Cmd
|
||||
# If it's not devdiv, it's dnceng
|
||||
${{ else }}:
|
||||
name: NetCore1ESPool-Internal
|
||||
demands: ImageOverride -equals windows.vs2019.amd64
|
||||
steps:
|
||||
- template: setup-maestro-vars.yml
|
||||
parameters:
|
||||
BARBuildId: ${{ parameters.BARBuildId }}
|
||||
PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
|
||||
|
||||
- task: DownloadBuildArtifacts@0
|
||||
displayName: Download Blob Artifacts
|
||||
inputs:
|
||||
buildType: specific
|
||||
buildVersionToDownload: specific
|
||||
project: $(AzDOProjectName)
|
||||
pipeline: $(AzDOPipelineId)
|
||||
buildId: $(AzDOBuildId)
|
||||
artifactName: BlobArtifacts
|
||||
checkDownloadedFiles: true
|
||||
|
||||
- task: PowerShell@2
|
||||
displayName: Validate
|
||||
inputs:
|
||||
filePath: $(Build.SourcesDirectory)/eng/common/post-build/sourcelink-validation.ps1
|
||||
arguments: -InputPath $(Build.ArtifactStagingDirectory)/BlobArtifacts/
|
||||
-ExtractPath $(Agent.BuildDirectory)/Extract/
|
||||
-GHRepoName $(Build.Repository.Name)
|
||||
-GHCommit $(Build.SourceVersion)
|
||||
-SourcelinkCliVersion $(SourceLinkCLIVersion)
|
||||
continueOnError: true
|
||||
|
||||
- template: /eng/common/templates/job/execute-sdl.yml
|
||||
parameters:
|
||||
enable: ${{ parameters.SDLValidationParameters.enable }}
|
||||
publishGuardianDirectoryToPipeline: ${{ parameters.SDLValidationParameters.publishGdn }}
|
||||
additionalParameters: ${{ parameters.SDLValidationParameters.params }}
|
||||
continueOnError: ${{ parameters.SDLValidationParameters.continueOnError }}
|
||||
artifactNames: ${{ parameters.SDLValidationParameters.artifactNames }}
|
||||
downloadArtifacts: ${{ parameters.SDLValidationParameters.downloadArtifacts }}
|
||||
|
||||
- ${{ if ne(parameters.publishAssetsImmediately, 'true') }}:
|
||||
- stage: publish_using_darc
|
||||
dependsOn: Validate
|
||||
${{ if or(eq(parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}:
|
||||
dependsOn: ${{ parameters.publishDependsOn }}
|
||||
${{ else }}:
|
||||
dependsOn: ${{ parameters.validateDependsOn }}
|
||||
displayName: Publish using Darc
|
||||
variables:
|
||||
- template: common-variables.yml
|
||||
jobs:
|
||||
- template: setup-maestro-vars.yml
|
||||
parameters:
|
||||
BARBuildId: ${{ parameters.BARBuildId }}
|
||||
PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
|
||||
|
||||
- job:
|
||||
displayName: Publish Using Darc
|
||||
dependsOn: setupMaestroVars
|
||||
variables:
|
||||
- name: BARBuildId
|
||||
value: $[ dependencies.setupMaestroVars.outputs['setReleaseVars.BARBuildId'] ]
|
||||
timeoutInMinutes: 120
|
||||
pool:
|
||||
vmImage: 'windows-2019'
|
||||
# We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
|
||||
${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
|
||||
name: VSEngSS-MicroBuild2022-1ES
|
||||
demands: Cmd
|
||||
# If it's not devdiv, it's dnceng
|
||||
${{ else }}:
|
||||
name: NetCore1ESPool-Internal
|
||||
demands: ImageOverride -equals windows.vs2019.amd64
|
||||
steps:
|
||||
- template: setup-maestro-vars.yml
|
||||
parameters:
|
||||
BARBuildId: ${{ parameters.BARBuildId }}
|
||||
PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
|
||||
|
||||
- task: NuGetAuthenticate@0
|
||||
|
||||
- task: PowerShell@2
|
||||
displayName: Publish Using Darc
|
||||
inputs:
|
||||
filePath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1
|
||||
arguments: -BuildId $(BARBuildId)
|
||||
-PublishingInfraVersion ${{ parameters.PublishingInfraVersion }}
|
||||
-PublishingInfraVersion ${{ parameters.publishingInfraVersion }}
|
||||
-AzdoToken '$(publishing-dnceng-devdiv-code-r-build-re)'
|
||||
-MaestroToken '$(MaestroApiAccessToken)'
|
||||
-WaitPublishingFinish ${{ parameters.waitPublishingFinish }}
|
||||
-PublishInstallersAndChecksums ${{ parameters.publishInstallersAndChecksums }}
|
||||
|
||||
- ${{ if and(le(parameters.publishingInfraVersion, 2), eq(parameters.inline, 'true')) }}:
|
||||
- template: \eng\common\templates\post-build\channels\generic-public-channel.yml
|
||||
parameters:
|
||||
BARBuildId: ${{ parameters.BARBuildId }}
|
||||
PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
|
||||
artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
|
||||
dependsOn: ${{ parameters.publishDependsOn }}
|
||||
publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
|
||||
symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
|
||||
stageName: 'NetCore_Dev5_Publish'
|
||||
channelName: '.NET 5 Dev'
|
||||
akaMSChannelName: 'net5/dev'
|
||||
channelId: ${{ parameters.NetDev5ChannelId }}
|
||||
transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet5-transport/nuget/v3/index.json'
|
||||
shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet5/nuget/v3/index.json'
|
||||
symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet5-symbols/nuget/v3/index.json'
|
||||
|
||||
- template: \eng\common\templates\post-build\channels\generic-public-channel.yml
|
||||
parameters:
|
||||
BARBuildId: ${{ parameters.BARBuildId }}
|
||||
PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
|
||||
artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
|
||||
dependsOn: ${{ parameters.publishDependsOn }}
|
||||
publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
|
||||
symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
|
||||
stageName: 'NetCore_Dev6_Publish'
|
||||
channelName: '.NET 6 Dev'
|
||||
akaMSChannelName: 'net6/dev'
|
||||
channelId: ${{ parameters.NetDev6ChannelId }}
|
||||
transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet6-transport/nuget/v3/index.json'
|
||||
shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet6/nuget/v3/index.json'
|
||||
symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet6-symbols/nuget/v3/index.json'
|
||||
|
||||
- template: \eng\common\templates\post-build\channels\generic-internal-channel.yml
|
||||
parameters:
|
||||
BARBuildId: ${{ parameters.BARBuildId }}
|
||||
PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
|
||||
artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
|
||||
dependsOn: ${{ parameters.publishDependsOn }}
|
||||
publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
|
||||
symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
|
||||
stageName: 'Net5_Preview8_Publish'
|
||||
channelName: '.NET 5 Preview 8'
|
||||
akaMSChannelName: 'net5/preview8'
|
||||
channelId: ${{ parameters.Net5Preview8ChannelId }}
|
||||
transportFeed: 'https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet5-internal-transport/nuget/v3/index.json'
|
||||
shippingFeed: 'https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet5-internal/nuget/v3/index.json'
|
||||
symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet5-internal-symbols/nuget/v3/index.json'
|
||||
|
||||
- template: \eng\common\templates\post-build\channels\generic-public-channel.yml
|
||||
parameters:
|
||||
BARBuildId: ${{ parameters.BARBuildId }}
|
||||
PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
|
||||
artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
|
||||
dependsOn: ${{ parameters.publishDependsOn }}
|
||||
publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
|
||||
symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
|
||||
stageName: 'Net5_RC1_Publish'
|
||||
channelName: '.NET 5 RC 1'
|
||||
akaMSChannelName: 'net5/rc1'
|
||||
channelId: ${{ parameters.Net5RC1ChannelId }}
|
||||
transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet5-transport/nuget/v3/index.json'
|
||||
shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet5/nuget/v3/index.json'
|
||||
symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet5-symbols/nuget/v3/index.json'
|
||||
|
||||
- template: \eng\common\templates\post-build\channels\generic-public-channel.yml
|
||||
parameters:
|
||||
BARBuildId: ${{ parameters.BARBuildId }}
|
||||
PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
|
||||
artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
|
||||
dependsOn: ${{ parameters.publishDependsOn }}
|
||||
publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
|
||||
symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
|
||||
stageName: 'Net5_RC2_Publish'
|
||||
channelName: '.NET 5 RC 2'
|
||||
akaMSChannelName: 'net5/rc2'
|
||||
channelId: ${{ parameters.Net5RC2ChannelId }}
|
||||
transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet5-transport/nuget/v3/index.json'
|
||||
shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet5/nuget/v3/index.json'
|
||||
symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet5-symbols/nuget/v3/index.json'
|
||||
|
||||
- template: \eng\common\templates\post-build\channels\generic-public-channel.yml
|
||||
parameters:
|
||||
BARBuildId: ${{ parameters.BARBuildId }}
|
||||
PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
|
||||
artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
|
||||
dependsOn: ${{ parameters.publishDependsOn }}
|
||||
publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
|
||||
symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
|
||||
stageName: 'Net_Eng_Latest_Publish'
|
||||
channelName: '.NET Eng - Latest'
|
||||
akaMSChannelName: 'eng/daily'
|
||||
channelId: ${{ parameters.NetEngLatestChannelId }}
|
||||
transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json'
|
||||
shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json'
|
||||
symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng-symbols/nuget/v3/index.json'
|
||||
|
||||
- template: \eng\common\templates\post-build\channels\generic-public-channel.yml
|
||||
parameters:
|
||||
BARBuildId: ${{ parameters.BARBuildId }}
|
||||
PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
|
||||
artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
|
||||
dependsOn: ${{ parameters.publishDependsOn }}
|
||||
publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
|
||||
symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
|
||||
stageName: 'Net_Eng_Validation_Publish'
|
||||
channelName: '.NET Eng - Validation'
|
||||
akaMSChannelName: 'eng/validation'
|
||||
channelId: ${{ parameters.NetEngValidationChannelId }}
|
||||
transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json'
|
||||
shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json'
|
||||
symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng-symbols/nuget/v3/index.json'
|
||||
|
||||
- template: \eng\common\templates\post-build\channels\generic-public-channel.yml
|
||||
parameters:
|
||||
BARBuildId: ${{ parameters.BARBuildId }}
|
||||
PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
|
||||
artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
|
||||
dependsOn: ${{ parameters.publishDependsOn }}
|
||||
publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
|
||||
symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
|
||||
stageName: 'General_Testing_Publish'
|
||||
channelName: 'General Testing'
|
||||
akaMSChannelName: 'generaltesting'
|
||||
channelId: ${{ parameters.GeneralTestingChannelId }}
|
||||
transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/general-testing/nuget/v3/index.json'
|
||||
shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/general-testing/nuget/v3/index.json'
|
||||
symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/general-testing-symbols/nuget/v3/index.json'
|
||||
|
||||
- template: \eng\common\templates\post-build\channels\generic-public-channel.yml
|
||||
parameters:
|
||||
BARBuildId: ${{ parameters.BARBuildId }}
|
||||
PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
|
||||
artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
|
||||
dependsOn: ${{ parameters.publishDependsOn }}
|
||||
publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
|
||||
symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
|
||||
stageName: 'NETCore_Tooling_Dev_Publishing'
|
||||
channelName: '.NET Core Tooling Dev'
|
||||
channelId: ${{ parameters.NETCoreToolingDevChannelId }}
|
||||
transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json'
|
||||
shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json'
|
||||
symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-symbols/nuget/v3/index.json'
|
||||
|
||||
- template: \eng\common\templates\post-build\channels\generic-public-channel.yml
|
||||
parameters:
|
||||
BARBuildId: ${{ parameters.BARBuildId }}
|
||||
PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
|
||||
artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
|
||||
dependsOn: ${{ parameters.publishDependsOn }}
|
||||
publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
|
||||
symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
|
||||
stageName: 'NETCore_Tooling_Release_Publishing'
|
||||
channelName: '.NET Core Tooling Release'
|
||||
channelId: ${{ parameters.NETCoreToolingReleaseChannelId }}
|
||||
transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json'
|
||||
shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json'
|
||||
symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-symbols/nuget/v3/index.json'
|
||||
|
||||
- template: \eng\common\templates\post-build\channels\generic-internal-channel.yml
|
||||
parameters:
|
||||
BARBuildId: ${{ parameters.BARBuildId }}
|
||||
PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
|
||||
artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
|
||||
dependsOn: ${{ parameters.publishDependsOn }}
|
||||
publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
|
||||
symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
|
||||
stageName: 'NET_Internal_Tooling_Publishing'
|
||||
channelName: '.NET Internal Tooling'
|
||||
channelId: ${{ parameters.NETInternalToolingChannelId }}
|
||||
transportFeed: 'https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet-tools-internal/nuget/v3/index.json'
|
||||
shippingFeed: 'https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet-tools-internal/nuget/v3/index.json'
|
||||
symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet-tools-internal-symbols/nuget/v3/index.json'
|
||||
|
||||
- template: \eng\common\templates\post-build\channels\generic-public-channel.yml
|
||||
parameters:
|
||||
BARBuildId: ${{ parameters.BARBuildId }}
|
||||
PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
|
||||
artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
|
||||
dependsOn: ${{ parameters.publishDependsOn }}
|
||||
publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
|
||||
symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
|
||||
stageName: 'NETCore_Experimental_Publishing'
|
||||
channelName: '.NET Core Experimental'
|
||||
channelId: ${{ parameters.NETCoreExperimentalChannelId }}
|
||||
transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-experimental/nuget/v3/index.json'
|
||||
shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-experimental/nuget/v3/index.json'
|
||||
symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-experimental-symbols/nuget/v3/index.json'
|
||||
|
||||
- template: \eng\common\templates\post-build\channels\generic-public-channel.yml
|
||||
parameters:
|
||||
BARBuildId: ${{ parameters.BARBuildId }}
|
||||
PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
|
||||
artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
|
||||
dependsOn: ${{ parameters.publishDependsOn }}
|
||||
publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
|
||||
symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
|
||||
stageName: 'Net_Eng_Services_Int_Publish'
|
||||
channelName: '.NET Eng Services - Int'
|
||||
channelId: ${{ parameters.NetEngServicesIntChannelId }}
|
||||
transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json'
|
||||
shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json'
|
||||
symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng-symbols/nuget/v3/index.json'
|
||||
|
||||
- template: \eng\common\templates\post-build\channels\generic-public-channel.yml
|
||||
parameters:
|
||||
BARBuildId: ${{ parameters.BARBuildId }}
|
||||
PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
|
||||
artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
|
||||
dependsOn: ${{ parameters.publishDependsOn }}
|
||||
publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
|
||||
symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
|
||||
stageName: 'Net_Eng_Services_Prod_Publish'
|
||||
channelName: '.NET Eng Services - Prod'
|
||||
channelId: ${{ parameters.NetEngServicesProdChannelId }}
|
||||
transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json'
|
||||
shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json'
|
||||
symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng-symbols/nuget/v3/index.json'
|
||||
|
||||
- template: \eng\common\templates\post-build\channels\generic-public-channel.yml
|
||||
parameters:
|
||||
BARBuildId: ${{ parameters.BARBuildId }}
|
||||
PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
|
||||
artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
|
||||
dependsOn: ${{ parameters.publishDependsOn }}
|
||||
publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
|
||||
symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
|
||||
stageName: 'NETCore_SDK_314xx_Publishing'
|
||||
channelName: '.NET Core SDK 3.1.4xx'
|
||||
channelId: ${{ parameters.NetCoreSDK314xxChannelId }}
|
||||
transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-transport/nuget/v3/index.json'
|
||||
shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1/nuget/v3/index.json'
|
||||
symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-symbols/nuget/v3/index.json'
|
||||
|
||||
- template: \eng\common\templates\post-build\channels\generic-internal-channel.yml
|
||||
parameters:
|
||||
BARBuildId: ${{ parameters.BARBuildId }}
|
||||
PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
|
||||
artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
|
||||
dependsOn: ${{ parameters.publishDependsOn }}
|
||||
publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
|
||||
symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
|
||||
stageName: 'NETCore_SDK_314xx_Internal_Publishing'
|
||||
channelName: '.NET Core SDK 3.1.4xx Internal'
|
||||
channelId: ${{ parameters.NetCoreSDK314xxInternalChannelId }}
|
||||
transportFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-transport/nuget/v3/index.json'
|
||||
shippingFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal/nuget/v3/index.json'
|
||||
symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-symbols/nuget/v3/index.json'
|
||||
|
||||
- template: \eng\common\templates\post-build\channels\generic-public-channel.yml
|
||||
parameters:
|
||||
BARBuildId: ${{ parameters.BARBuildId }}
|
||||
PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
|
||||
artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
|
||||
dependsOn: ${{ parameters.publishDependsOn }}
|
||||
publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
|
||||
symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
|
||||
stageName: 'NETCore_SDK_313xx_Publishing'
|
||||
channelName: '.NET Core SDK 3.1.3xx'
|
||||
channelId: ${{ parameters.NetCoreSDK313xxChannelId }}
|
||||
transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-transport/nuget/v3/index.json'
|
||||
shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1/nuget/v3/index.json'
|
||||
symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet3.1-symbols/nuget/v3/index.json'
|
||||
|
||||
- template: \eng\common\templates\post-build\channels\generic-internal-channel.yml
|
||||
parameters:
|
||||
BARBuildId: ${{ parameters.BARBuildId }}
|
||||
PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
|
||||
artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
|
||||
dependsOn: ${{ parameters.publishDependsOn }}
|
||||
publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
|
||||
symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
|
||||
stageName: 'NETCore_SDK_313xx_Internal_Publishing'
|
||||
channelName: '.NET Core SDK 3.1.3xx Internal'
|
||||
channelId: ${{ parameters.NetCoreSDK313xxInternalChannelId }}
|
||||
transportFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-transport/nuget/v3/index.json'
|
||||
shippingFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal/nuget/v3/index.json'
|
||||
symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-symbols/nuget/v3/index.json'
|
||||
|
||||
- template: \eng\common\templates\post-build\channels\generic-public-channel.yml
|
||||
parameters:
|
||||
BARBuildId: ${{ parameters.BARBuildId }}
|
||||
PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
|
||||
artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
|
||||
dependsOn: ${{ parameters.publishDependsOn }}
|
||||
publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
|
||||
symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
|
||||
stageName: 'VS16_6_Publishing'
|
||||
channelName: 'VS 16.6'
|
||||
channelId: ${{ parameters.VS166ChannelId }}
|
||||
transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-transport/nuget/v3/index.json'
|
||||
shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json'
|
||||
symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-symbols/nuget/v3/index.json'
|
||||
|
||||
- template: \eng\common\templates\post-build\channels\generic-public-channel.yml
|
||||
parameters:
|
||||
BARBuildId: ${{ parameters.BARBuildId }}
|
||||
PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
|
||||
artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
|
||||
dependsOn: ${{ parameters.publishDependsOn }}
|
||||
publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
|
||||
symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
|
||||
stageName: 'VS16_7_Publishing'
|
||||
channelName: 'VS 16.7'
|
||||
channelId: ${{ parameters.VS167ChannelId }}
|
||||
transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-transport/nuget/v3/index.json'
|
||||
shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json'
|
||||
symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-symbols/nuget/v3/index.json'
|
||||
|
||||
- template: \eng\common\templates\post-build\channels\generic-public-channel.yml
|
||||
parameters:
|
||||
BARBuildId: ${{ parameters.BARBuildId }}
|
||||
PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
|
||||
artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
|
||||
dependsOn: ${{ parameters.publishDependsOn }}
|
||||
publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
|
||||
symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
|
||||
stageName: 'VS16_8_Publishing'
|
||||
channelName: 'VS 16.8'
|
||||
channelId: ${{ parameters.VS168ChannelId }}
|
||||
transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-transport/nuget/v3/index.json'
|
||||
shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json'
|
||||
symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-symbols/nuget/v3/index.json'
|
||||
|
||||
- template: \eng\common\templates\post-build\channels\generic-public-channel.yml
|
||||
parameters:
|
||||
BARBuildId: ${{ parameters.BARBuildId }}
|
||||
PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
|
||||
artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
|
||||
dependsOn: ${{ parameters.publishDependsOn }}
|
||||
publishInstallersAndChecksums: ${{ parameters.publishInstallersAndChecksums }}
|
||||
symbolPublishingAdditionalParameters: ${{ parameters.symbolPublishingAdditionalParameters }}
|
||||
stageName: 'VS_Master_Publishing'
|
||||
channelName: 'VS Master'
|
||||
channelId: ${{ parameters.VSMasterChannelId }}
|
||||
transportFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-transport/nuget/v3/index.json'
|
||||
shippingFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json'
|
||||
symbolsFeed: 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools-symbols/nuget/v3/index.json'
|
||||
-WaitPublishingFinish true
|
||||
-ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}'
|
||||
-SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}'
|
Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше
Загрузка…
Ссылка в новой задаче