Convert to latest arcade version (#319)

Convert to latest arcade version

Update signing step

Fix variable list definition

Enable microbuild

Add sdk version to global.json

Update dotnet-install.sh permissions.
This commit is contained in:
Mike McLaughlin 2019-06-10 17:11:26 -07:00 коммит произвёл GitHub
Родитель a301c9c3e9
Коммит 50c7134c70
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: 4AEE18F83AFDEB23
85 изменённых файлов: 5193 добавлений и 1016 удалений

Просмотреть файл

@ -210,297 +210,277 @@ phases:
_BuildConfig: Debug
_BuildArch: x64
- phase: Sign_Package_Publish
dependsOn:
- Windows
- CentOS_7
- Alpine3_6
- MacOS
- Linux_cross
- Linux_cross64
condition: ne(variables['Build.Reason'], 'Schedule')
queue:
name: Hosted VS2017
variables:
- group: DotNet-Blob-Feed
- group: DotNet-MyGet-Publish
- name: _PublishBlobFeedUrl
value: https://dotnetfeed.blob.core.windows.net/dotnet-core/index.json
- name: _SOSNETCorePath
value: $(Build.SourcesDirectory)/artifacts/bin/SOS.NETCore/Release/netstandard2.0/publish
- name: _TeamName
value: DotNetCore
- template: /eng/common/templates/job/job.yml
parameters:
name: Sign_Package_Publish
dependsOn:
- Windows
- CentOS_7
- Alpine3_6
- MacOS
- Linux_cross
- Linux_cross64
condition: ne(variables['Build.Reason'], 'Schedule')
pool:
name: NetCoreInternal-Pool
queue: BuildPool.Windows.10.Amd64.VS2017
variables:
- _PublishBlobFeedUrl: https://dotnetfeed.blob.core.windows.net/dotnet-core/index.json
- _SOSNETCorePath: $(Build.SourcesDirectory)/artifacts/bin/SOS.NETCore/Release/netstandard2.0/publish
- _TeamName: DotNetCore
- _SignType: $(SignType)
enableMicrobuild: true
steps:
# Windows x64 download, copy and archive. Everything under "bin" is published for the Windows x64 build.
- task: DownloadBuildArtifacts@0
displayName: Download Windows x64 and Managed Artifacts
inputs:
artifactName: Windows_x64_Release
downloadPath: '$(System.ArtifactsDirectory)'
condition: succeeded()
steps:
- task: ms-vseng.MicroBuildTasks.30666190-6959-11e5-9f96-f56098202fef.MicroBuildSigningPlugin@1
displayName: Install MicroBuild plugin
inputs:
signType: $(SignType)
esrpSigning: true
zipSources: false
env:
TeamName: $(_TeamName)
continueOnError: false
condition: succeeded()
- task: CopyFiles@2
displayName: Copy Windows Artifacts
inputs:
sourceFolder: '$(System.ArtifactsDirectory)/Windows_x64_Release'
targetFolder: '$(Build.SourcesDirectory)/artifacts/bin'
condition: succeeded()
- task: AzureKeyVault@1
inputs:
azureSubscription: 'DncEng-VSTS'
keyVaultName: EngKeyVault
secretsFilter: 'dotnetfeed-storage-access-key-1,microsoft-symbol-server-pat,symweb-symbol-server-pat'
condition: succeeded()
- task: CopyFiles@2
displayName: Copy Windows x64 Artifacts
inputs:
sourceFolder: '$(Build.SourcesDirectory)/artifacts/bin/Windows_NT.x64.Release'
contents: 'sos.dll'
targetFolder: '$(Build.SourcesDirectory)/artifacts/Windows_NT.x64.Release'
condition: succeeded()
# Windows x64 download, copy and archive. Everything under "bin" is published for the Windows x64 build.
- task: CopyFiles@2
displayName: Copy Windows x64 Artifacts
inputs:
sourceFolder: '$(_SOSNETCorePath)'
contents: '*.dll'
overWrite: true
targetFolder: '$(Build.SourcesDirectory)/artifacts/Windows_NT.x64.Release'
condition: succeeded()
- task: DownloadBuildArtifacts@0
displayName: Download Windows x64 and Managed Artifacts
inputs:
artifactName: Windows_x64_Release
downloadPath: '$(System.ArtifactsDirectory)'
condition: succeeded()
# Windows x86 download, copy and archive.
- task: CopyFiles@2
displayName: Copy Windows Artifacts
inputs:
sourceFolder: '$(System.ArtifactsDirectory)/Windows_x64_Release'
targetFolder: '$(Build.SourcesDirectory)/artifacts/bin'
condition: succeeded()
- task: DownloadBuildArtifacts@0
displayName: Download Windows x86 Artifacts
inputs:
artifactName: Windows_x86_Release
downloadPath: '$(System.ArtifactsDirectory)'
condition: succeeded()
- task: CopyFiles@2
displayName: Copy Windows x64 Artifacts
inputs:
sourceFolder: '$(Build.SourcesDirectory)/artifacts/bin/Windows_NT.x64.Release'
contents: 'sos.dll'
targetFolder: '$(Build.SourcesDirectory)/artifacts/Windows_NT.x64.Release'
condition: succeeded()
- task: CopyFiles@2
displayName: Copy Windows x86 Artifacts
inputs:
sourceFolder: '$(System.ArtifactsDirectory)/Windows_x86_Release'
contents: 'sos.dll'
targetFolder: '$(Build.SourcesDirectory)/artifacts/bin/Windows_NT.x86.Release'
condition: succeeded()
- task: CopyFiles@2
displayName: Copy Windows x64 Artifacts
inputs:
sourceFolder: '$(_SOSNETCorePath)'
contents: '*.dll'
overWrite: true
targetFolder: '$(Build.SourcesDirectory)/artifacts/Windows_NT.x64.Release'
condition: succeeded()
- task: CopyFiles@2
displayName: Copy Windows x86 Artifacts
inputs:
sourceFolder: '$(_SOSNETCorePath)'
contents: '*.dll'
overWrite: true
targetFolder: '$(Build.SourcesDirectory)/artifacts/bin/Windows_NT.x86.Release'
condition: succeeded()
# Windows x86 download, copy and archive.
# Windows arm download, copy and archive.
- task: DownloadBuildArtifacts@0
displayName: Download Windows x86 Artifacts
inputs:
artifactName: Windows_x86_Release
downloadPath: '$(System.ArtifactsDirectory)'
condition: succeeded()
- task: DownloadBuildArtifacts@0
displayName: Download Windows Arm Artifacts
inputs:
artifactName: Windows_arm_Release
downloadPath: '$(System.ArtifactsDirectory)'
condition: succeeded()
- task: CopyFiles@2
displayName: Copy Windows x86 Artifacts
inputs:
sourceFolder: '$(System.ArtifactsDirectory)/Windows_x86_Release'
contents: 'sos.dll'
targetFolder: '$(Build.SourcesDirectory)/artifacts/bin/Windows_NT.x86.Release'
condition: succeeded()
- task: CopyFiles@2
displayName: Copy Windows Arm Artifacts
inputs:
sourceFolder: '$(System.ArtifactsDirectory)/Windows_arm_Release'
contents: 'sos.dll'
targetFolder: '$(Build.SourcesDirectory)/artifacts/bin/Windows_NT.arm.Release'
condition: succeeded()
- task: CopyFiles@2
displayName: Copy Windows x86 Artifacts
inputs:
sourceFolder: '$(_SOSNETCorePath)'
contents: '*.dll'
overWrite: true
targetFolder: '$(Build.SourcesDirectory)/artifacts/bin/Windows_NT.x86.Release'
condition: succeeded()
- task: CopyFiles@2
displayName: Copy Windows x86 Artifacts
inputs:
sourceFolder: '$(_SOSNETCorePath)'
contents: '*.dll'
overWrite: true
targetFolder: '$(Build.SourcesDirectory)/artifacts/bin/Windows_NT.arm.Release'
condition: succeeded()
# Windows arm download, copy and archive.
# Copy and archive the files for windbg extension gallery.
- task: DownloadBuildArtifacts@0
displayName: Download Windows Arm Artifacts
inputs:
artifactName: Windows_arm_Release
downloadPath: '$(System.ArtifactsDirectory)'
condition: succeeded()
- powershell: $(Build.SourcesDirectory)\eng\create-gallery-zip.ps1 $(Build.SourcesDirectory)
displayName: Create Extension Gallery Zip File
condition: succeeded()
- task: CopyFiles@2
displayName: Copy Windows Arm Artifacts
inputs:
sourceFolder: '$(System.ArtifactsDirectory)/Windows_arm_Release'
contents: 'sos.dll'
targetFolder: '$(Build.SourcesDirectory)/artifacts/bin/Windows_NT.arm.Release'
condition: succeeded()
# Linux x64 download, copy and archive
- task: CopyFiles@2
displayName: Copy Windows x86 Artifacts
inputs:
sourceFolder: '$(_SOSNETCorePath)'
contents: '*.dll'
overWrite: true
targetFolder: '$(Build.SourcesDirectory)/artifacts/bin/Windows_NT.arm.Release'
condition: succeeded()
- task: DownloadBuildArtifacts@0
displayName: Download Linux Artifacts
inputs:
artifactName: CentOS_7_x64_Release
downloadPath: '$(System.ArtifactsDirectory)'
condition: succeeded()
# Copy and archive the files for windbg extension gallery.
- task: CopyFiles@2
displayName: Copy Linux Artifacts
inputs:
sourceFolder: '$(System.ArtifactsDirectory)/CentOS_7_x64_Release'
contents: ?(libsos.so|libsosplugin.so|sosdocsunix.txt)
targetFolder: '$(Build.SourcesDirectory)/artifacts/bin/Linux.x64.Release'
condition: succeeded()
- powershell: $(Build.SourcesDirectory)\eng\create-gallery-zip.ps1 $(Build.SourcesDirectory)
displayName: Create Extension Gallery Zip File
condition: succeeded()
- task: CopyFiles@2
displayName: Copy Linux Artifacts
inputs:
sourceFolder: '$(_SOSNETCorePath)'
contents: '*.dll'
overWrite: true
targetFolder: '$(Build.SourcesDirectory)/artifacts/bin/Linux.x64.Release'
condition: succeeded()
# Linux x64 download, copy and archive
# Linux MUSL x64 download, copy and archive
- task: DownloadBuildArtifacts@0
displayName: Download Linux Artifacts
inputs:
artifactName: CentOS_7_x64_Release
downloadPath: '$(System.ArtifactsDirectory)'
condition: succeeded()
- task: DownloadBuildArtifacts@0
displayName: Download Linux Musl Artifacts
inputs:
artifactName: Alpine3_6_x64_Release
downloadPath: '$(System.ArtifactsDirectory)'
condition: succeeded()
- task: CopyFiles@2
displayName: Copy Linux Artifacts
inputs:
sourceFolder: '$(System.ArtifactsDirectory)/CentOS_7_x64_Release'
contents: ?(libsos.so|libsosplugin.so|sosdocsunix.txt)
targetFolder: '$(Build.SourcesDirectory)/artifacts/bin/Linux.x64.Release'
condition: succeeded()
- task: CopyFiles@2
displayName: Copy Linux Musl Artifacts
inputs:
sourceFolder: '$(System.ArtifactsDirectory)/Alpine3_6_x64_Release'
contents: ?(libsos.so|libsosplugin.so|sosdocsunix.txt)
targetFolder: '$(Build.SourcesDirectory)/artifacts/bin/Linux-musl.x64.Release'
condition: succeeded()
- task: CopyFiles@2
displayName: Copy Linux Artifacts
inputs:
sourceFolder: '$(_SOSNETCorePath)'
contents: '*.dll'
overWrite: true
targetFolder: '$(Build.SourcesDirectory)/artifacts/bin/Linux.x64.Release'
condition: succeeded()
- task: CopyFiles@2
displayName: Copy Linux Musl Artifacts
inputs:
sourceFolder: '$(_SOSNETCorePath)'
contents: '*.dll'
overWrite: true
targetFolder: '$(Build.SourcesDirectory)/artifacts/bin/Linux-musl.x64.Release'
condition: succeeded()
# Linux MUSL x64 download, copy and archive
# Linux arm download, copy and archive
- task: DownloadBuildArtifacts@0
displayName: Download Linux Musl Artifacts
inputs:
artifactName: Alpine3_6_x64_Release
downloadPath: '$(System.ArtifactsDirectory)'
condition: succeeded()
- task: DownloadBuildArtifacts@0
displayName: Download Linux Arm Artifacts
inputs:
artifactName: Linux_cross_arm_Release
downloadPath: '$(System.ArtifactsDirectory)'
condition: succeeded()
- task: CopyFiles@2
displayName: Copy Linux Musl Artifacts
inputs:
sourceFolder: '$(System.ArtifactsDirectory)/Alpine3_6_x64_Release'
contents: ?(libsos.so|libsosplugin.so|sosdocsunix.txt)
targetFolder: '$(Build.SourcesDirectory)/artifacts/bin/Linux-musl.x64.Release'
condition: succeeded()
- task: CopyFiles@2
displayName: Copy Linux Arm Artifacts
inputs:
sourceFolder: '$(System.ArtifactsDirectory)/Linux_cross_arm_Release'
contents: ?(libsos.so|libsosplugin.so|sosdocsunix.txt)
targetFolder: '$(Build.SourcesDirectory)/artifacts/bin/Linux.arm.Release'
condition: succeeded()
- task: CopyFiles@2
displayName: Copy Linux Musl Artifacts
inputs:
sourceFolder: '$(_SOSNETCorePath)'
contents: '*.dll'
overWrite: true
targetFolder: '$(Build.SourcesDirectory)/artifacts/bin/Linux-musl.x64.Release'
condition: succeeded()
- task: CopyFiles@2
displayName: Copy Linux Arm Artifacts
inputs:
sourceFolder: '$(_SOSNETCorePath)'
contents: '*.dll'
overWrite: true
targetFolder: '$(Build.SourcesDirectory)/artifacts/bin/Linux.arm.Release'
condition: succeeded()
# Linux arm download, copy and archive
# Linux arm64 download, copy and archive
- task: DownloadBuildArtifacts@0
displayName: Download Linux Arm Artifacts
inputs:
artifactName: Linux_cross_arm_Release
downloadPath: '$(System.ArtifactsDirectory)'
condition: succeeded()
- task: DownloadBuildArtifacts@0
displayName: Download Linux Arm64 Artifacts
inputs:
artifactName: Linux_cross64_arm64_Release
downloadPath: '$(System.ArtifactsDirectory)'
condition: succeeded()
- task: CopyFiles@2
displayName: Copy Linux Arm Artifacts
inputs:
sourceFolder: '$(System.ArtifactsDirectory)/Linux_cross_arm_Release'
contents: ?(libsos.so|libsosplugin.so|sosdocsunix.txt)
targetFolder: '$(Build.SourcesDirectory)/artifacts/bin/Linux.arm.Release'
condition: succeeded()
- task: CopyFiles@2
displayName: Copy Linux Arm64 Artifacts
inputs:
sourceFolder: '$(System.ArtifactsDirectory)/Linux_cross64_arm64_Release'
contents: ?(libsos.so|libsosplugin.so|sosdocsunix.txt)
targetFolder: '$(Build.SourcesDirectory)/artifacts/bin/Linux.arm64.Release'
condition: succeeded()
- task: CopyFiles@2
displayName: Copy Linux Arm Artifacts
inputs:
sourceFolder: '$(_SOSNETCorePath)'
contents: '*.dll'
overWrite: true
targetFolder: '$(Build.SourcesDirectory)/artifacts/bin/Linux.arm.Release'
condition: succeeded()
- task: CopyFiles@2
displayName: Copy Linux Arm64 Artifacts
inputs:
sourceFolder: '$(_SOSNETCorePath)'
contents: '*.dll'
overWrite: true
targetFolder: '$(Build.SourcesDirectory)/artifacts/bin/Linux.arm64.Release'
# Linux arm64 download, copy and archive
condition: succeeded()
# MacOS download, copy and archive
- task: DownloadBuildArtifacts@0
displayName: Download Linux Arm64 Artifacts
inputs:
artifactName: Linux_cross64_arm64_Release
downloadPath: '$(System.ArtifactsDirectory)'
condition: succeeded()
- task: DownloadBuildArtifacts@0
displayName: Download MacOS Artifacts
inputs:
artifactName: MacOS_x64_Release
downloadPath: '$(System.ArtifactsDirectory)'
condition: succeeded()
- task: CopyFiles@2
displayName: Copy Linux Arm64 Artifacts
inputs:
sourceFolder: '$(System.ArtifactsDirectory)/Linux_cross64_arm64_Release'
contents: ?(libsos.so|libsosplugin.so|sosdocsunix.txt)
targetFolder: '$(Build.SourcesDirectory)/artifacts/bin/Linux.arm64.Release'
condition: succeeded()
- task: CopyFiles@2
displayName: Copy MacOS Artifacts
inputs:
sourceFolder: '$(System.ArtifactsDirectory)/MacOS_x64_Release'
contents: ?(libsos.dylib|libsosplugin.dylib|sosdocsunix.txt)
targetFolder: '$(Build.SourcesDirectory)/artifacts/bin/OSX.x64.Release'
condition: succeeded()
- task: CopyFiles@2
displayName: Copy Linux Arm64 Artifacts
inputs:
sourceFolder: '$(_SOSNETCorePath)'
contents: '*.dll'
overWrite: true
targetFolder: '$(Build.SourcesDirectory)/artifacts/bin/Linux.arm64.Release'
- task: CopyFiles@2
displayName: Copy MacOS Artifacts
inputs:
sourceFolder: '$(_SOSNETCorePath)'
contents: '*.dll'
overWrite: true
targetFolder: '$(Build.SourcesDirectory)/artifacts/bin/OSX.x64.Release'
condition: succeeded()
condition: succeeded()
# MacOS download, copy and archive
# Create nuget package and sign binaries
- task: DownloadBuildArtifacts@0
displayName: Download MacOS Artifacts
inputs:
artifactName: MacOS_x64_Release
downloadPath: '$(System.ArtifactsDirectory)'
condition: succeeded()
- script: $(Build.SourcesDirectory)\eng\cipack.cmd
-configuration Release
-prepareMachine
/p:TeamName=$(_TeamName)
/p:DotNetSignType=$(SignType)
/p:DotNetPublishBlobFeedKey=$(dotnetfeed-storage-access-key-1)
/p:DotNetPublishBlobFeedUrl=$(_PublishBlobFeedUrl)
/p:DotNetPublishToBlobFeed=true
/p:DotNetSymbolServerTokenMsdl=$(microsoft-symbol-server-pat)
/p:DotNetSymbolServerTokenSymWeb=$(symweb-symbol-server-pat)
/p:OfficialBuildId=$(BUILD.BUILDNUMBER)
displayName: Sign / Package
continueOnError: true
condition: succeeded()
- task: CopyFiles@2
displayName: Copy MacOS Artifacts
inputs:
sourceFolder: '$(System.ArtifactsDirectory)/MacOS_x64_Release'
contents: ?(libsos.dylib|libsosplugin.dylib|sosdocsunix.txt)
targetFolder: '$(Build.SourcesDirectory)/artifacts/bin/OSX.x64.Release'
condition: succeeded()
- task: PublishBuildArtifacts@1
displayName: Publish Package Artifacts
inputs:
publishLocation: Container
pathtoPublish: '$(Build.SourcesDirectory)/artifacts/packages'
artifactName: Packages
condition: succeeded()
- task: CopyFiles@2
displayName: Copy MacOS Artifacts
inputs:
sourceFolder: '$(_SOSNETCorePath)'
contents: '*.dll'
overWrite: true
targetFolder: '$(Build.SourcesDirectory)/artifacts/bin/OSX.x64.Release'
condition: succeeded()
# Create nuget package and sign binaries
- script: $(Build.SourcesDirectory)\eng\cipack.cmd
-configuration Release
-prepareMachine
/p:TeamName=$(_TeamName)
/p:DotNetSignType=$(SignType)
/p:DotNetPublishBlobFeedKey=$(dotnetfeed-storage-access-key-1)
/p:DotNetPublishBlobFeedUrl=$(_PublishBlobFeedUrl)
/p:DotNetPublishToBlobFeed=true
/p:DotNetSymbolServerTokenMsdl=$(microsoft-symbol-server-pat)
/p:DotNetSymbolServerTokenSymWeb=$(symweb-symbol-server-pat)
/p:OfficialBuildId=$(BUILD.BUILDNUMBER)
displayName: Sign / Package
continueOnError: true
condition: succeeded()
- task: PublishBuildArtifacts@1
displayName: Publish Package Artifacts
inputs:
publishLocation: Container
pathtoPublish: '$(Build.SourcesDirectory)/artifacts/packages'
artifactName: Packages
condition: succeeded()
- task: PublishBuildArtifacts@1
displayName: Publish Logs Artifacts
inputs:
publishLocation: Container
pathtoPublish: '$(Build.SourcesDirectory)/artifacts/log'
artifactName: Logs_Packaging_Signing
continueOnError: true
condition: always()
- task: PublishBuildArtifacts@1
displayName: Publish Logs Artifacts
inputs:
publishLocation: Container
pathtoPublish: '$(Build.SourcesDirectory)/artifacts/log'
artifactName: Logs_Packaging_Signing
continueOnError: true
condition: always()

Просмотреть файл

@ -1,10 +1,5 @@
<!-- Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. -->
<Project>
<PropertyGroup>
<!-- Remove this when we update Arcade. -->
<ImportNetSdkFromRepoToolset>false</ImportNetSdkFromRepoToolset>
</PropertyGroup>
<PropertyGroup>
<ContinuousIntegrationBuild Condition="'$(OfficialBuildId)' != ''">true</ContinuousIntegrationBuild>
</PropertyGroup>
@ -20,8 +15,7 @@
<PropertyGroup>
<PackageIconUrl>http://go.microsoft.com/fwlink/?LinkID=288859</PackageIconUrl>
<RepositoryType>git</RepositoryType>
<!-- Uncomment this when we bump arcade versions -->
<!-- <PackageLicenseExpression>MIT</PackageLicenseExpression> -->
<PackageLicenseExpression>MIT</PackageLicenseExpression>
<EnableSourceControlManagerQueries>true</EnableSourceControlManagerQueries>
</PropertyGroup>

Просмотреть файл

@ -173,7 +173,6 @@ if not exist "%__DotNetCli%" (
exit /b 1
)
set __DotNetRuntimeVersion=2.1.9
REM =========================================================================================
REM ===
REM === Build Cross-Architecture Native Components (if applicable)
@ -339,7 +338,7 @@ if /i "%__BuildArch%" == "arm64" goto Done
:: Test components
if %__Test% EQU 1 (
:: Install the other versions of .NET Core runtime we are going to test on
powershell -ExecutionPolicy ByPass -NoProfile -command "& """%__ProjectDir%\eng\install-test-runtimes.ps1""" -DotNetDir %__ProjectDir%\.dotnet -RuntimeVersion21 %__DotNetRuntimeVersion% -TempDir %__IntermediatesDir% -BuildArch %__BuildArch%" %__DailyTest%
powershell -ExecutionPolicy ByPass -NoProfile -command "& """%__ProjectDir%\eng\install-test-runtimes.ps1""" -DotNetDir %__ProjectDir%\.dotnet -TempDir %__IntermediatesDir% -BuildArch %__BuildArch%" %__DailyTest%
:: Run the xunit tests
powershell -ExecutionPolicy ByPass -NoProfile -command "& """%__ProjectDir%\eng\common\Build.ps1""" -test -configuration %__BuildType% -verbosity %__Verbosity% %__TestArgs%"

Просмотреть файл

@ -15,13 +15,10 @@
<Target Name="GenerateVersionHeader" DependsOnTargets="GetAssemblyVersion;AddSourceRevisionToInformationalVersion" Condition="'$(NativeVersionHeaderFile)' != '' and '$(GenerateVersionHeader)' == 'true' and !Exists($(NativeVersionHeaderFile))">
<PropertyGroup Condition="'$(VersionSuffixDateStamp)' == ''">
<_FileMajor>42</_FileMajor>
<_FileMinor>42</_FileMinor>
<_FilePatch>42</_FilePatch>
<_FileRevision>42424</_FileRevision>
<VersionSuffixDateStamp>42</VersionSuffixDateStamp>
<VersionSuffixBuildOfTheDay>42424</VersionSuffixBuildOfTheDay>
<Error Message="VersionSuffixDateStamp is missing" Condition="'$(VersionSuffixDateStamp)' == ''" />
<PropertyGroup>
<ProductVersion>$(FileVersion.Replace(".", ","))</ProductVersion>
</PropertyGroup>
<PropertyGroup>
@ -34,11 +31,11 @@
#define VER_PRODUCTNAME_STR "Microsoft\xae .NET Framework"
#endif
#undef VER_PRODUCTVERSION
#define VER_PRODUCTVERSION $(_FileMajor),$(_FileMinor),$(VersionSuffixDateStamp),$(VersionSuffixBuildOfTheDay)
#define VER_PRODUCTVERSION $(ProductVersion)
#undef VER_PRODUCTVERSION_STR
#define VER_PRODUCTVERSION_STR "$(InformationalVersion)"
#undef VER_FILEVERSION
#define VER_FILEVERSION $(_FileMajor),$(_FileMinor),$(_FilePatch),$(_FileRevision)
#define VER_FILEVERSION $(ProductVersion)
#undef VER_FILEVERSION_STR
#define VER_FILEVERSION_STR "$(FileVersion)"
#ifndef VER_LEGALCOPYRIGHT_STR

Просмотреть файл

@ -1,8 +1,4 @@
<Project>
<PropertyGroup>
<MSBuildAllProjects>$(MSBuildAllProjects);$(MSBuildThisFileFullPath)</MSBuildAllProjects>
</PropertyGroup>
<ItemGroup>
<ItemsToSign Include="$(ArtifactsPackagesDir)**\*.zip"/>
</ItemGroup>

12
eng/Tools.props Normal file
Просмотреть файл

@ -0,0 +1,12 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Licensed to the .NET Foundation under one or more agreements. The .NET Foundation licenses this file to you under the MIT license. See the LICENSE file in the project root for more information. -->
<Project>
<PropertyGroup>
<!--When we are doing SDK validation we place the locally built packages in a custom path. This has to be propagated so the next
build can restore the local packages.-->
<RestoreSources Condition="'$(AdditionalRestoreSources)' != ''">
$(RestoreSources);
$(AdditionalRestoreSources)
</RestoreSources>
</PropertyGroup>
</Project>

Просмотреть файл

@ -6,9 +6,17 @@
</Dependency>
</ProductDependencies>
<ToolsetDependencies>
<Dependency Name="Microsoft.DotNet.Arcade.Sdk" Version="1.0.0-beta.18516.5">
<Dependency Name="Microsoft.DotNet.Arcade.Sdk" Version="1.0.0-beta.19309.1">
<Uri>https://github.com/dotnet/arcade</Uri>
<Sha>3f6ded0d4d606ed995f275a47c154ca6a22e8c7b</Sha>
<Sha>45361625125cdd3119050a852bd73163ca4b73f4</Sha>
</Dependency>
<Dependency Name="Microsoft.DotNet.Build.Tasks.Feed" Version="2.2.0-beta.19309.1">
<Uri>https://github.com/dotnet/arcade</Uri>
<Sha>45361625125cdd3119050a852bd73163ca4b73f4</Sha>
</Dependency>
<Dependency Name="Microsoft.DotNet.SignTool" Version="1.0.0-beta.19309.1">
<Uri>https://github.com/dotnet/arcade</Uri>
<Sha>45361625125cdd3119050a852bd73163ca4b73f4</Sha>
</Dependency>
</ToolsetDependencies>
</Dependencies>
</Dependencies>

Просмотреть файл

@ -277,7 +277,7 @@ __ResultsDir=$__RootBinDir/TestResults/$__BuildType
__PackagesBinDir=$__RootBinDir/packages/$__BuildType/Shipping
__ExtraCmakeArgs="-DCLR_MANAGED_BINARY_DIR=$__RootBinDir/bin -DCLR_BUILD_TYPE=$__BuildType"
__DotNetCli=$__ProjectRoot/.dotnet/dotnet
__DotNetRuntimeVersion=2.1.9
__DotNetRuntimeVersion=2.1.11
if [ ! -e $__DotNetCli ]; then
echo "dotnet cli not installed $__DotNetCli"
@ -466,7 +466,7 @@ if [ $__Test == true ]; then
echo "lldb: '$LLDB_PATH' gdb: '$GDB_PATH'"
# Run xunit SOS tests
"$__ProjectRoot/eng/common/build.sh" --test --configuration "$__BuildType" "$__TestArgs"
"$__ProjectRoot/eng/common/build.sh" --test --configuration "$__BuildType" $__TestArgs
if [ $? != 0 ]; then
exit 1
fi

Просмотреть файл

@ -75,14 +75,6 @@ phases:
_RootFs: --rootfs ${{ parameters.crossrootfsDir }}
steps:
- ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- task: AzureKeyVault@1
inputs:
azureSubscription: 'DncEng-VSTS'
KeyVaultName: EngKeyVault
SecretsFilter: 'dotnetfeed-storage-access-key-1,microsoft-symbol-server-pat,symweb-symbol-server-pat'
# Internal only build steps
- template: /eng/common/templates/steps/run-on-windows.yml
parameters:
agentOs: ${{ parameters.agentOs }}

Просмотреть файл

@ -1,3 +1,2 @@
@echo off
powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0Build.ps1""" -restore -build -test -sign -pack -publish -ci %*"
exit /b %ErrorLevel%
powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0Build.ps1""" -restore -build -test -sign -pack -publish -ci %*"

158
eng/common/CheckSymbols.ps1 Normal file
Просмотреть файл

@ -0,0 +1,158 @@
param(
[Parameter(Mandatory=$true)][string] $InputPath, # Full path to directory where NuGet packages to be checked are stored
[Parameter(Mandatory=$true)][string] $ExtractPath, # Full path to directory where the packages will be extracted during validation
[Parameter(Mandatory=$true)][string] $SymbolToolPath # Full path to directory where dotnet symbol-tool was installed
)
Add-Type -AssemblyName System.IO.Compression.FileSystem
function FirstMatchingSymbolDescriptionOrDefault {
param(
[string] $FullPath, # Full path to the module that has to be checked
[string] $TargetServerParam, # Parameter to pass to `Symbol Tool` indicating the server to lookup for symbols
[string] $SymbolsPath
)
$FileName = [System.IO.Path]::GetFileName($FullPath)
$Extension = [System.IO.Path]::GetExtension($FullPath)
# Those below are potential symbol files that the `dotnet symbol` might
# return. Which one will be returned depend on the type of file we are
# checking and which type of file was uploaded.
# The file itself is returned
$SymbolPath = $SymbolsPath + "\" + $FileName
# PDB file for the module
$PdbPath = $SymbolPath.Replace($Extension, ".pdb")
# PDB file for R2R module (created by crossgen)
$NGenPdb = $SymbolPath.Replace($Extension, ".ni.pdb")
# DBG file for a .so library
$SODbg = $SymbolPath.Replace($Extension, ".so.dbg")
# DWARF file for a .dylib
$DylibDwarf = $SymbolPath.Replace($Extension, ".dylib.dwarf")
.\dotnet-symbol.exe --symbols --modules --windows-pdbs $TargetServerParam $FullPath -o $SymbolsPath | Out-Null
if (Test-Path $PdbPath) {
return "PDB"
}
elseif (Test-Path $NGenPdb) {
return "NGen PDB"
}
elseif (Test-Path $SODbg) {
return "DBG for SO"
}
elseif (Test-Path $DylibDwarf) {
return "Dwarf for Dylib"
}
elseif (Test-Path $SymbolPath) {
return "Module"
}
else {
return $null
}
}
function CountMissingSymbols {
param(
[string] $PackagePath # Path to a NuGet package
)
# Ensure input file exist
if (!(Test-Path $PackagePath)) {
throw "Input file does not exist: $PackagePath"
}
# Extensions for which we'll look for symbols
$RelevantExtensions = @(".dll", ".exe", ".so", ".dylib")
# How many files are missing symbol information
$MissingSymbols = 0
$PackageId = [System.IO.Path]::GetFileNameWithoutExtension($PackagePath)
$PackageGuid = New-Guid
$ExtractPath = Join-Path -Path $ExtractPath -ChildPath $PackageGuid
$SymbolsPath = Join-Path -Path $ExtractPath -ChildPath "Symbols"
[System.IO.Compression.ZipFile]::ExtractToDirectory($PackagePath, $ExtractPath)
# Makes easier to reference `symbol tool`
Push-Location $SymbolToolPath
Get-ChildItem -Recurse $ExtractPath |
Where-Object {$RelevantExtensions -contains $_.Extension} |
ForEach-Object {
if ($_.FullName -Match "\\ref\\") {
Write-Host "`t Ignoring reference assembly file" $_.FullName
return
}
$SymbolsOnMSDL = FirstMatchingSymbolDescriptionOrDefault $_.FullName "--microsoft-symbol-server" $SymbolsPath
$SymbolsOnSymWeb = FirstMatchingSymbolDescriptionOrDefault $_.FullName "--internal-server" $SymbolsPath
Write-Host -NoNewLine "`t Checking file" $_.FullName "... "
if ($SymbolsOnMSDL -ne $null -and $SymbolsOnSymWeb -ne $null) {
Write-Host "Symbols found on MSDL (" $SymbolsOnMSDL ") and SymWeb (" $SymbolsOnSymWeb ")"
}
else {
$MissingSymbols++
if ($SymbolsOnMSDL -eq $null -and $SymbolsOnSymWeb -eq $null) {
Write-Host "No symbols found on MSDL or SymWeb!"
}
else {
if ($SymbolsOnMSDL -eq $null) {
Write-Host "No symbols found on MSDL!"
}
else {
Write-Host "No symbols found on SymWeb!"
}
}
}
}
Pop-Location
return $MissingSymbols
}
function CheckSymbolsAvailable {
if (Test-Path $ExtractPath) {
Remove-Item $ExtractPath -Force -Recurse -ErrorAction SilentlyContinue
}
Get-ChildItem "$InputPath\*.nupkg" |
ForEach-Object {
$FileName = $_.Name
# These packages from Arcade-Services include some native libraries that
# our current symbol uploader can't handle. Below is a workaround until
# we get issue: https://github.com/dotnet/arcade/issues/2457 sorted.
if ($FileName -Match "Microsoft\.DotNet\.Darc\.") {
Write-Host "Ignoring Arcade-services file: $FileName"
Write-Host
return
}
elseif ($FileName -Match "Microsoft\.DotNet\.Maestro\.Tasks\.") {
Write-Host "Ignoring Arcade-services file: $FileName"
Write-Host
return
}
Write-Host "Validating $FileName "
$Status = CountMissingSymbols "$InputPath\$FileName"
if ($Status -ne 0) {
Write-Error "Missing symbols for $Status modules in the package $FileName"
}
Write-Host
}
}
CheckSymbolsAvailable

Просмотреть файл

@ -0,0 +1,146 @@
# Source for this file was taken from https://github.com/microsoft/azure-pipelines-task-lib/blob/11c9439d4af17e6475d9fe058e6b2e03914d17e6/powershell/VstsTaskSdk/LoggingCommandFunctions.ps1
# NOTE: You should not be calling these method directly as they are likely to change. Instead you should be calling the Write-Pipeline* functions defined in tools.ps1
$script:loggingCommandPrefix = '##vso['
$script:loggingCommandEscapeMappings = @( # TODO: WHAT ABOUT "="? WHAT ABOUT "%"?
New-Object psobject -Property @{ Token = ';' ; Replacement = '%3B' }
New-Object psobject -Property @{ Token = "`r" ; Replacement = '%0D' }
New-Object psobject -Property @{ Token = "`n" ; Replacement = '%0A' }
New-Object psobject -Property @{ Token = "]" ; Replacement = '%5D' }
)
# TODO: BUG: Escape % ???
# TODO: Add test to verify don't need to escape "=".
<########################################
# Private functions.
########################################>
function Format-LoggingCommandData {
[CmdletBinding()]
param([string]$Value, [switch]$Reverse)
if (!$Value) {
return ''
}
if (!$Reverse) {
foreach ($mapping in $script:loggingCommandEscapeMappings) {
$Value = $Value.Replace($mapping.Token, $mapping.Replacement)
}
} else {
for ($i = $script:loggingCommandEscapeMappings.Length - 1 ; $i -ge 0 ; $i--) {
$mapping = $script:loggingCommandEscapeMappings[$i]
$Value = $Value.Replace($mapping.Replacement, $mapping.Token)
}
}
return $Value
}
function Format-LoggingCommand {
[CmdletBinding()]
param(
[Parameter(Mandatory = $true)]
[string]$Area,
[Parameter(Mandatory = $true)]
[string]$Event,
[string]$Data,
[hashtable]$Properties)
# Append the preamble.
[System.Text.StringBuilder]$sb = New-Object -TypeName System.Text.StringBuilder
$null = $sb.Append($script:loggingCommandPrefix).Append($Area).Append('.').Append($Event)
# Append the properties.
if ($Properties) {
$first = $true
foreach ($key in $Properties.Keys) {
[string]$value = Format-LoggingCommandData $Properties[$key]
if ($value) {
if ($first) {
$null = $sb.Append(' ')
$first = $false
} else {
$null = $sb.Append(';')
}
$null = $sb.Append("$key=$value")
}
}
}
# Append the tail and output the value.
$Data = Format-LoggingCommandData $Data
$sb.Append(']').Append($Data).ToString()
}
function Write-LoggingCommand {
[CmdletBinding(DefaultParameterSetName = 'Parameters')]
param(
[Parameter(Mandatory = $true, ParameterSetName = 'Parameters')]
[string]$Area,
[Parameter(Mandatory = $true, ParameterSetName = 'Parameters')]
[string]$Event,
[Parameter(ParameterSetName = 'Parameters')]
[string]$Data,
[Parameter(ParameterSetName = 'Parameters')]
[hashtable]$Properties,
[Parameter(Mandatory = $true, ParameterSetName = 'Object')]
$Command,
[switch]$AsOutput)
if ($PSCmdlet.ParameterSetName -eq 'Object') {
Write-LoggingCommand -Area $Command.Area -Event $Command.Event -Data $Command.Data -Properties $Command.Properties -AsOutput:$AsOutput
return
}
$command = Format-LoggingCommand -Area $Area -Event $Event -Data $Data -Properties $Properties
if ($AsOutput) {
$command
} else {
Write-Host $command
}
}
function Write-LogIssue {
[CmdletBinding()]
param(
[ValidateSet('warning', 'error')]
[Parameter(Mandatory = $true)]
[string]$Type,
[string]$Message,
[string]$ErrCode,
[string]$SourcePath,
[string]$LineNumber,
[string]$ColumnNumber,
[switch]$AsOutput)
$command = Format-LoggingCommand -Area 'task' -Event 'logissue' -Data $Message -Properties @{
'type' = $Type
'code' = $ErrCode
'sourcepath' = $SourcePath
'linenumber' = $LineNumber
'columnnumber' = $ColumnNumber
}
if ($AsOutput) {
return $command
}
if ($Type -eq 'error') {
$foregroundColor = $host.PrivateData.ErrorForegroundColor
$backgroundColor = $host.PrivateData.ErrorBackgroundColor
if ($foregroundColor -isnot [System.ConsoleColor] -or $backgroundColor -isnot [System.ConsoleColor]) {
$foregroundColor = [System.ConsoleColor]::Red
$backgroundColor = [System.ConsoleColor]::Black
}
} else {
$foregroundColor = $host.PrivateData.WarningForegroundColor
$backgroundColor = $host.PrivateData.WarningBackgroundColor
if ($foregroundColor -isnot [System.ConsoleColor] -or $backgroundColor -isnot [System.ConsoleColor]) {
$foregroundColor = [System.ConsoleColor]::Yellow
$backgroundColor = [System.ConsoleColor]::Black
}
}
Write-Host $command -ForegroundColor $foregroundColor -BackgroundColor $backgroundColor
}

Просмотреть файл

@ -0,0 +1,83 @@
<!-- Licensed to the .NET Foundation under one or more agreements. The .NET Foundation licenses this file to you under the MIT license. See the LICENSE file in the project root for more information. -->
<Project Sdk="Microsoft.NET.Sdk">
<!--
This MSBuild file is intended to be used as the body of the default
publishing release pipeline. The release pipeline will use this file
to invoke the PushToStaticFeed task that will read the build asset
manifest and publish the assets described in the manifest to
informed target feeds.
-->
<PropertyGroup>
<TargetFramework>netcoreapp2.1</TargetFramework>
</PropertyGroup>
<Import Project="$(MSBuildThisFileDirectory)DefaultVersions.props" Condition="Exists('$(MSBuildThisFileDirectory)DefaultVersions.props')" />
<!--
This won't be necessary once we solve this issue:
https://github.com/dotnet/arcade/issues/2266
-->
<Import Project="$(MSBuildThisFileDirectory)ArtifactsCategory.props" Condition="Exists('$(MSBuildThisFileDirectory)ArtifactsCategory.props')" />
<Import Project="$(NuGetPackageRoot)microsoft.dotnet.build.tasks.feed\$(MicrosoftDotNetBuildTasksFeedVersion)\build\Microsoft.DotNet.Build.Tasks.Feed.targets" />
<Target Name="PublishToFeed">
<Error Condition="'$(ArtifactsCategory)' == ''" Text="ArtifactsCategory: The artifacts' category produced by the build wasn't provided." />
<Error Condition="'$(AccountKeyToStaticFeed)' == ''" Text="AccountKeyToStaticFeed: Account key for target feed wasn't provided." />
<Error Condition="'$(ManifestsBasePath)' == ''" Text="Full path to asset manifests directory wasn't provided." />
<Error Condition="'$(BlobBasePath)' == '' AND '$(PackageBasePath)' == ''" Text="A valid full path to BlobBasePath of PackageBasePath is required." />
<ItemGroup>
<!-- Include all manifests found in the manifest folder. -->
<ManifestFiles Include="$(ManifestsBasePath)*.xml" />
</ItemGroup>
<Error Condition="'@(ManifestFiles)' == ''" Text="No manifest file was found in the provided path: $(ManifestsBasePath)" />
<!--
For now the type of packages being published will be informed for the whole build.
Eventually this will be specified on a per package basis:
TODO: https://github.com/dotnet/arcade/issues/2266
-->
<PropertyGroup>
<TargetStaticFeed Condition="'$(ArtifactsCategory.ToUpper())' == '.NETCORE'">https://dotnetfeed.blob.core.windows.net/dotnet-core/index.json</TargetStaticFeed>
<TargetStaticFeed Condition="'$(ArtifactsCategory.ToUpper())' == '.NETCOREVALIDATION'">https://dotnetfeed.blob.core.windows.net/arcade-validation/index.json</TargetStaticFeed>
<TargetStaticFeed Condition="'$(ArtifactsCategory.ToUpper())' == 'ASPNETCORE'">https://dotnetfeed.blob.core.windows.net/aspnet-aspnetcore/index.json</TargetStaticFeed>
<TargetStaticFeed Condition="'$(ArtifactsCategory.ToUpper())' == 'ASPNETCORETOOLING'">https://dotnetfeed.blob.core.windows.net/aspnet-aspnetcore-tooling/index.json</TargetStaticFeed>
<TargetStaticFeed Condition="'$(ArtifactsCategory.ToUpper())' == 'ENTITYFRAMEWORKCORE'">https://dotnetfeed.blob.core.windows.net/aspnet-entityframeworkcore/index.json</TargetStaticFeed>
<TargetStaticFeed Condition="'$(ArtifactsCategory.ToUpper())' == 'ASPNETEXTENSIONS'">https://dotnetfeed.blob.core.windows.net/aspnet-extensions/index.json</TargetStaticFeed>
<TargetStaticFeed Condition="'$(ArtifactsCategory.ToUpper())' == 'CORECLR'">https://dotnetfeed.blob.core.windows.net/dotnet-coreclr/index.json</TargetStaticFeed>
<TargetStaticFeed Condition="'$(ArtifactsCategory.ToUpper())' == 'CORESDK'">https://dotnetfeed.blob.core.windows.net/dotnet-sdk/index.json</TargetStaticFeed>
<TargetStaticFeed Condition="'$(ArtifactsCategory.ToUpper())' == 'TOOLSINTERNAL'">https://dotnetfeed.blob.core.windows.net/dotnet-tools-internal/index.json</TargetStaticFeed>
<TargetStaticFeed Condition="'$(ArtifactsCategory.ToUpper())' == 'TOOLSET'">https://dotnetfeed.blob.core.windows.net/dotnet-toolset/index.json</TargetStaticFeed>
<TargetStaticFeed Condition="'$(ArtifactsCategory.ToUpper())' == 'WINDOWSDESKTOP'">https://dotnetfeed.blob.core.windows.net/dotnet-windowsdesktop/index.json</TargetStaticFeed>
<TargetStaticFeed Condition="'$(ArtifactsCategory.ToUpper())' == 'NUGETCLIENT'">https://dotnetfeed.blob.core.windows.net/nuget-nugetclient/index.json</TargetStaticFeed>
<TargetStaticFeed Condition="'$(ArtifactsCategory.ToUpper())' == 'ASPNETENTITYFRAMEWORK6'">https://dotnetfeed.blob.core.windows.net/aspnet-entityframework6/index.json</TargetStaticFeed>
<TargetStaticFeed Condition="'$(ArtifactsCategory.ToUpper())' == 'ASPNETBLAZOR'">https://dotnetfeed.blob.core.windows.net/aspnet-blazor/index.json</TargetStaticFeed>
</PropertyGroup>
<Error
Condition="'$(TargetStaticFeed)' == ''"
Text="'$(ArtifactsCategory)' wasn't recognized as a valid artifact category. Valid categories are: '.NetCore' and '.NetCoreValidation'" />
<!-- Iterate publishing assets from each manifest file. -->
<PushArtifactsInManifestToFeed
ExpectedFeedUrl="$(TargetStaticFeed)"
AccountKey="$(AccountKeyToStaticFeed)"
BARBuildId="$(BARBuildId)"
MaestroApiEndpoint="$(MaestroApiEndpoint)"
BuildAssetRegistryToken="$(BuildAssetRegistryToken)"
Overwrite="$(OverrideAssetsWithSameName)"
PassIfExistingItemIdentical="$(PassIfExistingItemIdentical)"
MaxClients="$(MaxParallelUploads)"
UploadTimeoutInMinutes="$(MaxUploadTimeoutInMinutes)"
AssetManifestPath="%(ManifestFiles.Identity)"
BlobAssetsBasePath="$(BlobBasePath)"
PackageAssetsBasePath="$(PackageBasePath)"/>
</Target>
<ItemGroup>
<PackageReference Include="Microsoft.DotNet.Build.Tasks.Feed" Version="$(MicrosoftDotNetBuildTasksFeedVersion)" />
</ItemGroup>
</Project>

Просмотреть файл

@ -0,0 +1,82 @@
<!-- Licensed to the .NET Foundation under one or more agreements. The .NET Foundation licenses this file to you under the MIT license. See the LICENSE file in the project root for more information. -->
<Project Sdk="Microsoft.NET.Sdk">
<!--
This MSBuild file is intended to be used as the body of the default
publishing release pipeline. The release pipeline will use this file
to invoke the PublishSymbols tasks to publish symbols to MSDL and SymWeb.
Parameters:
- PDBArtifactsDirectory : Full path to directory containing PDB files to be published.
- BlobBasePath : Full path containing *.symbols.nupkg packages to be published.
- DotNetSymbolServerTokenMsdl : PAT to access MSDL.
- DotNetSymbolServerTokenSymWeb : PAT to access SymWeb.
- DotNetSymbolExpirationInDays : Expiration days for published packages. Default is 3650.
-->
<PropertyGroup>
<TargetFramework>netcoreapp2.1</TargetFramework>
</PropertyGroup>
<Import Project="$(NuGetPackageRoot)microsoft.symboluploader.build.task\$(SymbolUploaderVersion)\build\PublishSymbols.targets" />
<Target Name="PublishSymbols">
<ItemGroup>
<FilesToPublishToSymbolServer Include="$(PDBArtifactsDirectory)\*.pdb"/>
<PackagesToPublishToSymbolServer Include="$(BlobBasePath)\*.symbols.nupkg"/>
<!--
These packages from Arcade-Services include some native libraries that
our current symbol uploader can't handle. Below is a workaround until
we get issue: https://github.com/dotnet/arcade/issues/2457 sorted.
-->
<PackagesToPublishToSymbolServer Remove="$(BlobBasePath)\Microsoft.DotNet.Darc.*" />
<PackagesToPublishToSymbolServer Remove="$(BlobBasePath)\Microsoft.DotNet.Maestro.Tasks.*" />
</ItemGroup>
<PropertyGroup>
<DotNetSymbolExpirationInDays Condition="'$(DotNetSymbolExpirationInDays)' == ''">3650</DotNetSymbolExpirationInDays>
<PublishToSymbolServer>true</PublishToSymbolServer>
<PublishToSymbolServer Condition="'@(FilesToPublishToSymbolServer)' == '' and '@(PackagesToPublishToSymbolServer)' == ''">false</PublishToSymbolServer>
</PropertyGroup>
<Message
Importance="High"
Text="No symbol package(s) were found to publish."
Condition="$(PublishToSymbolServer) == false" />
<!-- Symbol Uploader: MSDL -->
<Message Importance="High" Text="Publishing symbol packages to MSDL ..." Condition="$(PublishToSymbolServer)" />
<PublishSymbols PackagesToPublish="@(PackagesToPublishToSymbolServer)"
FilesToPublish="@(FilesToPublishToSymbolServer)"
PersonalAccessToken="$(DotNetSymbolServerTokenMsdl)"
SymbolServerPath="https://microsoftpublicsymbols.artifacts.visualstudio.com/DefaultCollection"
ExpirationInDays="$(DotNetSymbolExpirationInDays)"
VerboseLogging="true"
DryRun="false"
ConvertPortablePdbsToWindowsPdbs="false"
PdbConversionTreatAsWarning=""
Condition="$(PublishToSymbolServer)"/>
<!--
Symbol Uploader: SymWeb
Watson, VS insertion testings and the typical internal dev usage require SymWeb.
Currently we need to call the task twice (https://github.com/dotnet/core-eng/issues/3489).
-->
<Message Importance="High" Text="Publishing symbol packages to SymWeb ..." Condition="$(PublishToSymbolServer)" />
<PublishSymbols PackagesToPublish="@(PackagesToPublishToSymbolServer)"
FilesToPublish="@(FilesToPublishToSymbolServer)"
PersonalAccessToken="$(DotNetSymbolServerTokenSymWeb)"
SymbolServerPath="https://microsoft.artifacts.visualstudio.com/DefaultCollection"
ExpirationInDays="$(DotNetSymbolExpirationInDays)"
VerboseLogging="true"
DryRun="false"
ConvertPortablePdbsToWindowsPdbs="false"
PdbConversionTreatAsWarning=""
Condition="$(PublishToSymbolServer)"/>
</Target>
<ItemGroup>
<PackageReference Include="Microsoft.SymbolUploader.Build.Task" Version="$(SymbolUploaderVersion)" />
</ItemGroup>
</Project>

28
eng/common/README.md Normal file
Просмотреть файл

@ -0,0 +1,28 @@
# Don't touch this folder
uuuuuuuuuuuuuuuuuuuu
u" uuuuuuuuuuuuuuuuuu "u
u" u$$$$$$$$$$$$$$$$$$$$u "u
u" u$$$$$$$$$$$$$$$$$$$$$$$$u "u
u" u$$$$$$$$$$$$$$$$$$$$$$$$$$$$u "u
u" u$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$u "u
u" u$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$u "u
$ $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$ $
$ $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$ $
$ $$$" ... "$... ...$" ... "$$$ ... "$$$ $
$ $$$u `"$$$$$$$ $$$ $$$$$ $$ $$$ $$$ $
$ $$$$$$uu "$$$$ $$$ $$$$$ $$ """ u$$$ $
$ $$$""$$$ $$$$ $$$u "$$$" u$$ $$$$$$$$ $
$ $$$$....,$$$$$..$$$$$....,$$$$..$$$$$$$$ $
$ $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$ $
"u "$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$" u"
"u "$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$" u"
"u "$$$$$$$$$$$$$$$$$$$$$$$$$$$$" u"
"u "$$$$$$$$$$$$$$$$$$$$$$$$" u"
"u "$$$$$$$$$$$$$$$$$$$$" u"
"u """""""""""""""""" u"
""""""""""""""""""""
!!! Changes made in this directory are subject to being overwritten by automation !!!
The files in this directory are shared by all Arcade repos and managed by automation. If you need to make changes to these files, open an issue or submit a pull request to https://github.com/dotnet/arcade first.

Просмотреть файл

@ -0,0 +1,83 @@
<!-- Licensed to the .NET Foundation under one or more agreements. The .NET Foundation licenses this file to you under the MIT license. See the LICENSE file in the project root for more information. -->
<Project Sdk="Microsoft.NET.Sdk">
<!--
This MSBuild file is intended to be used as the body of the default
publishing release pipeline. The release pipeline will use this file
to invoke the the SignCheck tool to validate that packages about to
be published are correctly signed.
Parameters:
- PackageBasePath : Directory containing all files that need to be validated.
- SignCheckVersion : Version of SignCheck package to be used.
- SignValidationExclusionList : ItemGroup containing exclusion list to be forwarded to SignCheck.
- EnableJarSigningCheck : Whether .jar files should be validated.
- EnableStrongNameCheck : Whether strong name check should be performed.
-->
<PropertyGroup>
<TargetFramework>netcoreapp2.1</TargetFramework>
</PropertyGroup>
<!--
From 'Signing.props' we import $(SignValidationExclusionList)
-->
<Import Project="$(MSBuildThisFileDirectory)Signing.props" Condition="Exists('$(MSBuildThisFileDirectory)Signing.props')" />
<Target Name="ValidateSigning">
<PropertyGroup>
<SignCheckToolPath>$(NuGetPackageRoot)Microsoft.DotNet.SignCheck\$(SignCheckVersion)\tools\Microsoft.DotNet.SignCheck.exe</SignCheckToolPath>
<SignCheckInputDir>$(PackageBasePath)</SignCheckInputDir>
<SignCheckLog>signcheck.log</SignCheckLog>
<SignCheckErrorLog>signcheck.errors.log</SignCheckErrorLog>
<SignCheckExclusionsFile>signcheck.exclusions.txt</SignCheckExclusionsFile>
</PropertyGroup>
<ItemGroup>
<!--
Documentation for these arguments is available here:
https://github.com/dotnet/arcade/tree/master/src/SignCheck
-->
<SignCheckArgs Include="--recursive" />
<SignCheckArgs Include="--traverse-subfolders" />
<SignCheckArgs Include="--file-status AllFiles" />
<SignCheckArgs Include="--log-file $(SignCheckLog)" />
<SignCheckArgs Include="--error-log-file $(SignCheckErrorLog)" />
<SignCheckArgs Include="--input-files $(SignCheckInputDir)" />
<SignCheckArgs Include="--exclusions-file $(SignCheckExclusionsFile)" Condition="'@(SignValidationExclusionList)' != ''" />
<SignCheckArgs Include="--verify-jar" Condition="'$(EnableJarSigningCheck)' == 'true'" />
<SignCheckArgs Include="--verify-strongname" Condition="'$(EnableStrongNameCheck)' == 'true'" />
</ItemGroup>
<WriteLinesToFile
File="$(SignCheckExclusionsFile)"
Lines="@(SignValidationExclusionList)"
Condition="'@(SignValidationExclusionList)' != ''"
Overwrite="true"
Encoding="Unicode"/>
<!--
IgnoreExitCode='true' because the tool doesn't return '0' on success.
-->
<Exec
Command="&quot;$(SignCheckToolPath)&quot; @(SignCheckArgs, ' ')"
IgnoreExitCode='true'
ConsoleToMsBuild="false"
StandardErrorImportance="high" />
<Error
Text="Signing validation failed. Check $(SignCheckErrorLog) for more information."
Condition="Exists($(SignCheckErrorLog)) and '$([System.IO.File]::ReadAllText($(SignCheckErrorLog)))' != ''" />
<Message
Text="##vso[artifact.upload containerfolder=LogFiles;artifactname=LogFiles]{SignCheckErrorLog}"
Condition="Exists($(SignCheckErrorLog)) and '$([System.IO.File]::ReadAllText($(SignCheckErrorLog)))' != ''" />
</Target>
<ItemGroup>
<PackageReference Include="Microsoft.DotNet.SignCheck" Version="$(SignCheckVersion)" />
</ItemGroup>
</Project>

Просмотреть файл

@ -0,0 +1,184 @@
param(
[Parameter(Mandatory=$true)][string] $InputPath, # Full path to directory where Symbols.NuGet packages to be checked are stored
[Parameter(Mandatory=$true)][string] $ExtractPath, # Full path to directory where the packages will be extracted during validation
[Parameter(Mandatory=$true)][string] $SourceLinkToolPath, # Full path to directory where dotnet SourceLink CLI was installed
[Parameter(Mandatory=$true)][string] $GHRepoName, # GitHub name of the repo including the Org. E.g., dotnet/arcade
[Parameter(Mandatory=$true)][string] $GHCommit # GitHub commit SHA used to build the packages
)
# Cache/HashMap (File -> Exist flag) used to consult whether a file exist
# in the repository at a specific commit point. This is populated by inserting
# all files present in the repo at a specific commit point.
$global:RepoFiles = @{}
$ValidatePackage = {
param(
[string] $PackagePath # Full path to a Symbols.NuGet package
)
# Ensure input file exist
if (!(Test-Path $PackagePath)) {
throw "Input file does not exist: $PackagePath"
}
# Extensions for which we'll look for SourceLink information
# For now we'll only care about Portable & Embedded PDBs
$RelevantExtensions = @(".dll", ".exe", ".pdb")
Write-Host -NoNewLine "Validating" ([System.IO.Path]::GetFileName($PackagePath)) "... "
$PackageId = [System.IO.Path]::GetFileNameWithoutExtension($PackagePath)
$ExtractPath = Join-Path -Path $using:ExtractPath -ChildPath $PackageId
$FailedFiles = 0
Add-Type -AssemblyName System.IO.Compression.FileSystem
[System.IO.Directory]::CreateDirectory($ExtractPath);
$zip = [System.IO.Compression.ZipFile]::OpenRead($PackagePath)
$zip.Entries |
Where-Object {$RelevantExtensions -contains [System.IO.Path]::GetExtension($_.Name)} |
ForEach-Object {
$FileName = $_.FullName
$Extension = [System.IO.Path]::GetExtension($_.Name)
$FakeName = -Join((New-Guid), $Extension)
$TargetFile = Join-Path -Path $ExtractPath -ChildPath $FakeName
# We ignore resource DLLs
if ($FileName.EndsWith(".resources.dll")) {
return
}
[System.IO.Compression.ZipFileExtensions]::ExtractToFile($_, $TargetFile, $true)
$ValidateFile = {
param(
[string] $FullPath, # Full path to the module that has to be checked
[string] $RealPath,
[ref] $FailedFiles
)
# Makes easier to reference `sourcelink cli`
Push-Location $using:SourceLinkToolPath
$SourceLinkInfos = .\sourcelink.exe print-urls $FullPath | Out-String
if ($LASTEXITCODE -eq 0 -and -not ([string]::IsNullOrEmpty($SourceLinkInfos))) {
$NumFailedLinks = 0
# We only care about Http addresses
$Matches = (Select-String '(http[s]?)(:\/\/)([^\s,]+)' -Input $SourceLinkInfos -AllMatches).Matches
if ($Matches.Count -ne 0) {
$Matches.Value |
ForEach-Object {
$Link = $_
$CommitUrl = -Join("https://raw.githubusercontent.com/", $using:GHRepoName, "/", $using:GHCommit, "/")
$FilePath = $Link.Replace($CommitUrl, "")
$Status = 200
$Cache = $using:RepoFiles
if ( !($Cache.ContainsKey($FilePath)) ) {
try {
$Uri = $Link -as [System.URI]
# Only GitHub links are valid
if ($Uri.AbsoluteURI -ne $null -and $Uri.Host -match "github") {
$Status = (Invoke-WebRequest -Uri $Link -UseBasicParsing -Method HEAD -TimeoutSec 5).StatusCode
}
else {
$Status = 0
}
}
catch {
$Status = 0
}
}
if ($Status -ne 200) {
if ($NumFailedLinks -eq 0) {
if ($FailedFiles.Value -eq 0) {
Write-Host
}
Write-Host "`tFile $RealPath has broken links:"
}
Write-Host "`t`tFailed to retrieve $Link"
$NumFailedLinks++
}
}
}
if ($NumFailedLinks -ne 0) {
$FailedFiles.value++
$global:LASTEXITCODE = 1
}
}
Pop-Location
}
&$ValidateFile $TargetFile $FileName ([ref]$FailedFiles)
}
$zip.Dispose()
if ($FailedFiles -eq 0) {
Write-Host "Passed."
}
}
function ValidateSourceLinkLinks {
if (!($GHRepoName -Match "^[^\s\/]+/[^\s\/]+$")) {
Write-Host "GHRepoName should be in the format <org>/<repo>"
$global:LASTEXITCODE = 1
return
}
if (!($GHCommit -Match "^[0-9a-fA-F]{40}$")) {
Write-Host "GHCommit should be a 40 chars hexadecimal string"
$global:LASTEXITCODE = 1
return
}
$RepoTreeURL = -Join("https://api.github.com/repos/", $GHRepoName, "/git/trees/", $GHCommit, "?recursive=1")
$CodeExtensions = @(".cs", ".vb", ".fs", ".fsi", ".fsx", ".fsscript")
try {
# Retrieve the list of files in the repo at that particular commit point and store them in the RepoFiles hash
$Data = Invoke-WebRequest $RepoTreeURL | ConvertFrom-Json | Select-Object -ExpandProperty tree
foreach ($file in $Data) {
$Extension = [System.IO.Path]::GetExtension($file.path)
if ($CodeExtensions.Contains($Extension)) {
$RepoFiles[$file.path] = 1
}
}
}
catch {
Write-Host "Problems downloading the list of files from the repo. Url used: $RepoTreeURL"
$global:LASTEXITCODE = 1
return
}
if (Test-Path $ExtractPath) {
Remove-Item $ExtractPath -Force -Recurse -ErrorAction SilentlyContinue
}
# Process each NuGet package in parallel
$Jobs = @()
Get-ChildItem "$InputPath\*.symbols.nupkg" |
ForEach-Object {
$Jobs += Start-Job -ScriptBlock $ValidatePackage -ArgumentList $_.FullName
}
foreach ($Job in $Jobs) {
Wait-Job -Id $Job.Id | Receive-Job
}
}
Measure-Command { ValidateSourceLinkLinks }

Просмотреть файл

@ -1,253 +1,100 @@
[CmdletBinding(PositionalBinding=$false)]
Param(
[string] $architecture = "<auto>",
[string] $configuration = "Debug",
[string] $projects = "",
[string] $verbosity = "minimal",
[switch] $restore,
[string] $architecture = "",
[string][Alias('c')]$configuration = "Debug",
[string]$platform = $null,
[string] $projects,
[string][Alias('v')]$verbosity = "minimal",
[string] $msbuildEngine = $null,
[bool] $warnAsError = $true,
[bool] $nodeReuse = $true,
[switch][Alias('r')]$restore,
[switch] $deployDeps,
[switch] $build,
[switch][Alias('b')]$build,
[switch] $rebuild,
[switch] $deploy,
[switch] $test,
[switch][Alias('t')]$test,
[switch] $integrationTest,
[switch] $performanceTest,
[switch] $sign,
[switch] $pack,
[switch] $publish,
[switch] $publishBuildAssets,
[switch][Alias('bl')]$binaryLog,
[switch] $ci,
[switch] $prepareMachine,
[switch] $help,
[Parameter(ValueFromRemainingArguments=$true)][String[]]$properties
)
set-strictmode -version 2.0
$ErrorActionPreference = "Stop"
[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12
. $PSScriptRoot\tools.ps1
function Print-Usage() {
Write-Host "Common settings:"
Write-Host " -configuration <value> Build configuration Debug, Release"
Write-Host " -configuration <value> Build configuration: 'Debug' or 'Release' (short: -c)"
Write-Host " -architecture <value> Build architecture x64, x86, arm, arm64"
Write-Host " -verbosity <value> Msbuild verbosity (q[uiet], m[inimal], n[ormal], d[etailed], and diag[nostic])"
Write-Host " -platform <value> Platform configuration: 'x86', 'x64' or any valid Platform value to pass to msbuild"
Write-Host " -verbosity <value> Msbuild verbosity: q[uiet], m[inimal], n[ormal], d[etailed], and diag[nostic] (short: -v)"
Write-Host " -binaryLog Output binary log (short: -bl)"
Write-Host " -help Print help and exit"
Write-Host ""
Write-Host "Actions:"
Write-Host " -restore Restore dependencies"
Write-Host " -build Build solution"
Write-Host " -restore Restore dependencies (short: -r)"
Write-Host " -build Build solution (short: -b)"
Write-Host " -rebuild Rebuild solution"
Write-Host " -deploy Deploy built VSIXes"
Write-Host " -deployDeps Deploy dependencies (e.g. VSIXes for integration tests)"
Write-Host " -test Run all unit tests in the solution"
Write-Host " -pack Package build outputs into NuGet packages and Willow components"
Write-Host " -test Run all unit tests in the solution (short: -t)"
Write-Host " -integrationTest Run all integration tests in the solution"
Write-Host " -performanceTest Run all performance tests in the solution"
Write-Host " -pack Package build outputs into NuGet packages and Willow components"
Write-Host " -sign Sign build outputs"
Write-Host " -publish Publish artifacts (e.g. symbols)"
Write-Host " -publishBuildAssets Push assets to BAR"
Write-Host ""
Write-Host "Advanced settings:"
Write-Host " -projects <value> Semi-colon delimited list of sln/proj's to build. Globbing is supported (*.sln)"
Write-Host " -ci Set when running on CI server"
Write-Host " -prepareMachine Prepare machine for CI run"
Write-Host " -prepareMachine Prepare machine for CI run, clean up processes after build"
Write-Host " -warnAsError <value> Sets warnaserror msbuild parameter ('true' or 'false')"
Write-Host " -msbuildEngine <value> Msbuild engine to use to run build ('dotnet', 'vs', or unspecified)."
Write-Host ""
Write-Host "Command line arguments not listed above are passed thru to msbuild."
Write-Host "The above arguments can be shortened as much as to be unambiguous (e.g. -co for configuration, -t for test, etc.)."
}
if ($help -or (($properties -ne $null) -and ($properties.Contains("/help") -or $properties.Contains("/?")))) {
Print-Usage
exit 0
}
function Create-Directory([string[]] $path) {
if (!(Test-Path $path)) {
New-Item -path $path -force -itemType "Directory" | Out-Null
}
}
function InitializeDotNetCli {
# Don't resolve runtime, shared framework, or SDK from other locations to ensure build determinism
$env:DOTNET_MULTILEVEL_LOOKUP=0
# Disable first run since we do not need all ASP.NET packages restored.
$env:DOTNET_SKIP_FIRST_TIME_EXPERIENCE=1
# Source Build uses DotNetCoreSdkDir variable
if ($env:DotNetCoreSdkDir -ne $null) {
$env:DOTNET_INSTALL_DIR = $env:DotNetCoreSdkDir
}
# Use dotnet installation specified in DOTNET_INSTALL_DIR if it contains the required SDK version,
# otherwise install the dotnet CLI and SDK to repo local .dotnet directory to avoid potential permission issues.
if (($env:DOTNET_INSTALL_DIR -ne $null) -and (Test-Path(Join-Path $env:DOTNET_INSTALL_DIR "sdk\$($GlobalJson.tools.dotnet)"))) {
$dotnetRoot = $env:DOTNET_INSTALL_DIR
} else {
$dotnetRoot = Join-Path $RepoRoot ".dotnet"
$env:DOTNET_INSTALL_DIR = $dotnetRoot
if ($restore) {
InstallDotNetSdk $dotnetRoot $GlobalJson.tools.dotnet
}
}
return $dotnetRoot
}
function GetDotNetInstallScript([string] $dotnetRoot) {
$installScript = "$dotnetRoot\dotnet-install.ps1"
if (!(Test-Path $installScript)) {
Create-Directory $dotnetRoot
Invoke-WebRequest "https://dot.net/v1/dotnet-install.ps1" -OutFile $installScript
}
return $installScript
}
function InstallDotNetSdk([string] $dotnetRoot, [string] $version) {
$installScript = GetDotNetInstallScript $dotnetRoot
if ($architecture -eq "arm") {
$architecture="x86"
}
if ($architecture -eq "arm64") {
$architecture="x64"
}
& $installScript -Version $version -InstallDir $dotnetRoot -Architecture $architecture
if ($lastExitCode -ne 0) {
Write-Host "Failed to install dotnet cli (exit code '$lastExitCode')." -ForegroundColor Red
exit $lastExitCode
}
}
function InitializeVisualStudioBuild {
$inVSEnvironment = !($env:VS150COMNTOOLS -eq $null) -and (Test-Path $env:VS150COMNTOOLS)
if ($inVSEnvironment) {
$vsInstallDir = Join-Path $env:VS150COMNTOOLS "..\.."
} else {
$vsInstallDir = LocateVisualStudio
$env:VS150COMNTOOLS = Join-Path $vsInstallDir "Common7\Tools\"
$env:VSSDK150Install = Join-Path $vsInstallDir "VSSDK\"
$env:VSSDKInstall = Join-Path $vsInstallDir "VSSDK\"
}
return $vsInstallDir;
}
function LocateVisualStudio {
$vswhereVersion = $GlobalJson.tools.vswhere
$toolsRoot = Join-Path $RepoRoot ".tools"
$vsWhereDir = Join-Path $toolsRoot "vswhere\$vswhereVersion"
$vsWhereExe = Join-Path $vsWhereDir "vswhere.exe"
if (!(Test-Path $vsWhereExe)) {
Create-Directory $vsWhereDir
Write-Host "Downloading vswhere"
Invoke-WebRequest "https://github.com/Microsoft/vswhere/releases/download/$vswhereVersion/vswhere.exe" -OutFile $vswhereExe
}
$vsInstallDir = & $vsWhereExe -latest -prerelease -property installationPath -requires Microsoft.Component.MSBuild -requires Microsoft.VisualStudio.Component.VSSDK -requires Microsoft.Net.Component.4.6.TargetingPack -requires Microsoft.VisualStudio.Component.Roslyn.Compiler -requires Microsoft.VisualStudio.Component.VSSDK
if ($lastExitCode -ne 0) {
Write-Host "Failed to locate Visual Studio (exit code '$lastExitCode')." -ForegroundColor Red
exit $lastExitCode
}
return $vsInstallDir
}
function GetBuildCommand() {
$tools = $GlobalJson.tools
if ((Get-Member -InputObject $tools -Name "dotnet") -ne $null) {
$dotnetRoot = InitializeDotNetCli
# by default build with dotnet cli:
$buildDriver = Join-Path $dotnetRoot "dotnet.exe"
$buildArgs = "msbuild"
}
if ((Get-Member -InputObject $tools -Name "vswhere") -ne $null) {
$vsInstallDir = InitializeVisualStudioBuild
# Presence of vswhere.version indicates the repo needs to build using VS msbuild:
$buildDriver = Join-Path $vsInstallDir "MSBuild\15.0\Bin\msbuild.exe"
$buildArgs = "/nodeReuse:$(!$ci)"
}
if ($buildDriver -eq $null) {
Write-Host "/global.json must either specify 'tools.dotnet' or 'tools.vswhere'." -ForegroundColor Red
exit 1
}
if ($ci) {
Write-Host "Using $buildDriver"
}
return $buildDriver, $buildArgs
}
function InitializeToolset([string] $buildDriver, [string]$buildArgs) {
$toolsetVersion = $GlobalJson.'msbuild-sdks'.'Microsoft.DotNet.Arcade.Sdk'
$toolsetLocationFile = Join-Path $ToolsetDir "$toolsetVersion.txt"
if (Test-Path $toolsetLocationFile) {
$path = Get-Content $toolsetLocationFile -TotalCount 1
if (Test-Path $path) {
$global:ToolsetBuildProj = $path
return
}
}
if (-not $restore) {
Write-Host "Toolset version $toolsetVersion has not been restored."
exit 1
}
$proj = Join-Path $ToolsetDir "restore.proj"
'<Project Sdk="Microsoft.DotNet.Arcade.Sdk"/>' | Set-Content $proj
& $buildDriver $buildArgs $proj /t:__WriteToolsetLocation /m /nologo /clp:None /warnaserror /bl:$ToolsetRestoreLog /v:$verbosity /p:__ToolsetLocationOutputFile=$toolsetLocationFile
if ($lastExitCode -ne 0) {
Write-Host "Failed to restore toolset (exit code '$lastExitCode')." -ForegroundColor Red
Write-Host "Build log: $ToolsetRestoreLog" -ForegroundColor DarkGray
exit $lastExitCode
}
$path = Get-Content $toolsetLocationFile -TotalCount 1
if (!(Test-Path $path)) {
throw "Invalid toolset path: $path"
}
$global:ToolsetBuildProj = $path
}
function InitializeCustomToolset {
if (-not $restore) {
return
}
$script = Join-Path $EngRoot "RestoreToolset.ps1"
$script = Join-Path $EngRoot "restore-toolset.ps1"
if (Test-Path $script) {
. $script
}
}
function Build([string] $buildDriver, [string]$buildArgs) {
& $buildDriver $buildArgs $ToolsetBuildProj `
/m /nologo /clp:Summary /warnaserror `
/v:$verbosity `
/bl:$BuildLog `
function Build {
$toolsetBuildProj = InitializeToolset
InitializeCustomToolset
$bl = if ($binaryLog) { "/bl:" + (Join-Path $LogDir "Build.binlog") } else { "" }
$platformArg = if ($platform) { "/p:Platform=$platform" } else { "" }
if ($projects) {
# Re-assign properties to a new variable because PowerShell doesn't let us append properties directly for unclear reasons.
# Explicitly set the type as string[] because otherwise PowerShell would make this char[] if $properties is empty.
[string[]] $msbuildArgs = $properties
$msbuildArgs += "/p:Projects=$projects"
$properties = $msbuildArgs
}
MSBuild $toolsetBuildProj `
$bl `
$platformArg `
/p:Configuration=$configuration `
/p:Projects=$projects `
/p:RepoRoot=$RepoRoot `
/p:Restore=$restore `
/p:DeployDeps=$deployDeps `
@ -260,70 +107,37 @@ function Build([string] $buildDriver, [string]$buildArgs) {
/p:PerformanceTest=$performanceTest `
/p:Sign=$sign `
/p:Publish=$publish `
/p:PublishBuildAssets=$publishBuildAssets `
/p:ContinuousIntegrationBuild=$ci `
/p:CIBuild=$ci `
$properties
if ($lastExitCode -ne 0) {
Write-Host "Build log: $BuildLog" -ForegroundColor DarkGray
exit $lastExitCode
}
}
function Stop-Processes() {
Write-Host "Killing running build processes..."
Get-Process -Name "msbuild" -ErrorAction SilentlyContinue | Stop-Process
Get-Process -Name "dotnet" -ErrorAction SilentlyContinue | Stop-Process
Get-Process -Name "vbcscompiler" -ErrorAction SilentlyContinue | Stop-Process
@properties
}
try {
$RepoRoot = Join-Path $PSScriptRoot "..\.."
$EngRoot = Join-Path $PSScriptRoot ".."
$ArtifactsDir = Join-Path $RepoRoot "artifacts"
$ToolsetDir = Join-Path $ArtifactsDir "toolset"
$LogDir = Join-Path (Join-Path $ArtifactsDir "log") $configuration
$BuildLog = Join-Path $LogDir "Build.binlog"
$ToolsetRestoreLog = Join-Path $LogDir "ToolsetRestore.binlog"
$TempDir = Join-Path (Join-Path $ArtifactsDir "tmp") $configuration
$GlobalJson = Get-Content -Raw -Path (Join-Path $RepoRoot "global.json") | ConvertFrom-Json
if ($projects -eq "") {
$projects = Join-Path $RepoRoot "*.sln"
if ($help -or (($null -ne $properties) -and ($properties.Contains("/help") -or $properties.Contains("/?")))) {
Print-Usage
exit 0
}
if ($env:NUGET_PACKAGES -eq $null) {
# Use local cache on CI to ensure deterministic build,
# use global cache in dev builds to avoid cost of downloading packages.
$env:NUGET_PACKAGES = if ($ci) { Join-Path $RepoRoot ".packages" }
else { Join-Path $env:UserProfile ".nuget\packages" }
}
Create-Directory $ToolsetDir
Create-Directory $LogDir
if ($ci) {
Create-Directory $TempDir
$env:TEMP = $TempDir
$env:TMP = $TempDir
$binaryLog = $true
$nodeReuse = $false
}
$driver, $args = GetBuildCommand
InitializeToolset $driver $args
InitializeCustomToolset
Build $driver $args
# Import custom tools configuration, if present in the repo.
# Note: Import in global scope so that the script set top-level variables without qualification.
$configureToolsetScript = Join-Path $EngRoot "configure-toolset.ps1"
if (Test-Path $configureToolsetScript) {
. $configureToolsetScript
}
if (($restore) -and ($null -eq $env:DisableNativeToolsetInstalls)) {
InitializeNativeTools
}
Build
}
catch {
Write-Host $_
Write-Host $_.Exception
Write-Host $_.ScriptStackTrace
exit 1
}
finally {
Pop-Location
if ($ci -and $prepareMachine) {
Stop-Processes
}
Write-PipelineTaskError -Message $_
ExitWithExitCode 1
}
ExitWithExitCode 0

Просмотреть файл

@ -1,5 +1,44 @@
#!/usr/bin/env bash
# Stop script if unbound variable found (use ${var:-} if intentional)
set -u
# Stop script if command returns non-zero exit code.
# Prevents hidden errors caused by missing error code propagation.
set -e
usage()
{
echo "Common settings:"
echo " --configuration <value> Build configuration: 'Debug' or 'Release' (short: -c)"
echo " --verbosity <value> Msbuild verbosity: q[uiet], m[inimal], n[ormal], d[etailed], and diag[nostic] (short: -v)"
echo " --binaryLog Create MSBuild binary log (short: -bl)"
echo " --help Print help and exit (short: -h)"
echo ""
echo "Actions:"
echo " --restore Restore dependencies (short: -r)"
echo " --build Build solution (short: -b)"
echo " --rebuild Rebuild solution"
echo " --test Run all unit tests in the solution (short: -t)"
echo " --integrationTest Run all integration tests in the solution"
echo " --performanceTest Run all performance tests in the solution"
echo " --pack Package build outputs into NuGet packages and Willow components"
echo " --sign Sign build outputs"
echo " --publish Publish artifacts (e.g. symbols)"
echo ""
echo "Advanced settings:"
echo " --projects <value> Project or solution file(s) to build"
echo " --ci Set when running on CI server"
echo " --prepareMachine Prepare machine for CI run, clean up processes after build"
echo " --nodeReuse <value> Sets nodereuse msbuild parameter ('true' or 'false')"
echo " --warnAsError <value> Sets warnaserror msbuild parameter ('true' or 'false')"
echo ""
echo "Command line arguments not listed above are passed thru to msbuild."
echo "Arguments can also be passed in with a single hyphen."
}
source="${BASH_SOURCE[0]}"
# resolve $source until the file is no longer a symlink
@ -10,251 +49,116 @@ while [[ -h "$source" ]]; do
# symlink file was located
[[ $source != /* ]] && source="$scriptroot/$source"
done
repo_root="$( cd -P "$( dirname "$source" )/../.." && pwd )"
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
help=false
restore=false
build=false
rebuild=false
test=false
pack=false
integration_test=false
performance_test=false
pack=false
publish=false
sign=false
public=false
ci=false
warn_as_error=true
node_reuse=true
binary_log=false
pipelines_log=false
projects=''
configuration='Debug'
prepare_machine=false
verbosity='minimal'
properties=''
while (($# > 0)); do
lowerI="$(echo $1 | awk '{print tolower($0)}')"
case $lowerI in
--build)
build=true
shift 1
;;
--ci)
ci=true
shift 1
;;
--configuration)
configuration=$2
shift 2
;;
--help)
echo "Common settings:"
echo " --configuration <value> Build configuration Debug, Release"
echo " --verbosity <value> Msbuild verbosity (q[uiet], m[inimal], n[ormal], d[etailed], and diag[nostic])"
echo " --help Print help and exit"
echo ""
echo "Actions:"
echo " --restore Restore dependencies"
echo " --build Build solution"
echo " --rebuild Rebuild solution"
echo " --test Run all unit tests in the solution"
echo " --sign Sign build outputs"
echo " --pack Package build outputs into NuGet packages and Willow components"
echo ""
echo "Advanced settings:"
echo " --solution <value> Path to solution to build"
echo " --ci Set when running on CI server"
echo " --prepareMachine Prepare machine for CI run"
echo ""
echo "Command line arguments not listed above are passed through to MSBuild."
while [[ $# > 0 ]]; do
opt="$(echo "${1/#--/-}" | awk '{print tolower($0)}')"
case "$opt" in
-help|-h)
usage
exit 0
;;
--pack)
pack=true
shift 1
-configuration|-c)
configuration=$2
shift
;;
--preparemachine)
prepare_machine=true
shift 1
;;
--rebuild)
rebuild=true
shift 1
;;
--restore)
restore=true
shift 1
;;
--sign)
sign=true
shift 1
;;
--solution)
solution=$2
shift 2
;;
--test)
test=true
shift 1
;;
--integrationtest)
integration_test=true
shift 1
;;
--performancetest)
performance_test=true
shift 1
;;
--publish)
publish=true
shift 1
;;
--verbosity)
-verbosity|-v)
verbosity=$2
shift 2
shift
;;
-binarylog|-bl)
binary_log=true
;;
-pipelineslog|-pl)
pipelines_log=true
;;
-restore|-r)
restore=true
;;
-build|-b)
build=true
;;
-rebuild)
rebuild=true
;;
-pack)
pack=true
;;
-test|-t)
test=true
;;
-integrationtest)
integration_test=true
;;
-performancetest)
performance_test=true
;;
-sign)
sign=true
;;
-publish)
publish=true
;;
-preparemachine)
prepare_machine=true
;;
-projects)
projects=$2
shift
;;
-ci)
ci=true
;;
-warnaserror)
warn_as_error=$2
shift
;;
-nodereuse)
node_reuse=$2
shift
;;
*)
properties="$properties $1"
shift 1
;;
esac
shift
done
eng_root="$repo_root/eng"
artifacts_dir="$repo_root/artifacts"
artifacts_configuration_dir="$artifacts_dir/$configuration"
toolset_dir="$artifacts_dir/toolset"
log_dir="$artifacts_dir/log/$configuration"
build_log="$log_dir/Build.binlog"
toolset_restore_log="$log_dir/ToolsetRestore.binlog"
temp_dir="$artifacts_dir/tmp/$configuration"
if [[ "$ci" == true ]]; then
pipelines_log=true
binary_log=true
node_reuse=false
fi
global_json_file="$repo_root/global.json"
build_driver=""
toolset_build_proj=""
# ReadVersionFromJson [json key]
function ReadGlobalVersion {
local key=$1
local unamestr="$(uname)"
local sedextended='-r'
if [[ "$unamestr" == 'Darwin' ]]; then
sedextended='-E'
fi;
local version="$(grep -m 1 "\"$key\"" $global_json_file | sed $sedextended 's/^ *//;s/.*: *"//;s/",?//')"
if [[ ! "$version" ]]; then
echo "Error: Cannot find \"$key\" in $global_json_file" >&2;
ExitWithExitCode 1
fi;
# return value
echo "$version"
}
function InitializeDotNetCli {
# Disable first run since we want to control all package sources
export DOTNET_SKIP_FIRST_TIME_EXPERIENCE=1
# Don't resolve runtime, shared framework, or SDK from other locations to ensure build determinism
export DOTNET_MULTILEVEL_LOOKUP=0
# Source Build uses DotNetCoreSdkDir variable
if [[ -n "$DotNetCoreSdkDir" ]]; then
export DOTNET_INSTALL_DIR="$DotNetCoreSdkDir"
fi
local dotnet_sdk_version=`ReadGlobalVersion "dotnet"`
local dotnet_root=""
# Use dotnet installation specified in DOTNET_INSTALL_DIR if it contains the required SDK version,
# otherwise install the dotnet CLI and SDK to repo local .dotnet directory to avoid potential permission issues.
if [[ -d "$DOTNET_INSTALL_DIR/sdk/$dotnet_sdk_version" ]]; then
dotnet_root="$DOTNET_INSTALL_DIR"
else
dotnet_root="$repo_root/.dotnet"
export DOTNET_INSTALL_DIR="$dotnet_root"
if [[ "$restore" == true ]]; then
InstallDotNetSdk $dotnet_root $dotnet_sdk_version
fi
fi
build_driver="$dotnet_root/dotnet"
}
function InstallDotNetSdk {
local root=$1
local version=$2
local install_script=`GetDotNetInstallScript $root`
bash "$install_script" --version $version --install-dir $root
local lastexitcode=$?
if [[ $lastexitcode != 0 ]]; then
echo "Failed to install dotnet SDK (exit code '$lastexitcode')."
ExitWithExitCode $lastexitcode
fi
}
function GetDotNetInstallScript {
local root=$1
local install_script="$root/dotnet-install.sh"
if [[ ! -a "$install_script" ]]; then
mkdir -p "$root"
# Use curl if available, otherwise use wget
if command -v curl > /dev/null; then
curl "https://dot.net/v1/dotnet-install.sh" -sSL --retry 10 --create-dirs -o "$install_script"
else
wget -q -O "$install_script" "https://dot.net/v1/dotnet-install.sh"
fi
fi
# return value
echo "$install_script"
}
function InitializeToolset {
local toolset_version=`ReadGlobalVersion "Microsoft.DotNet.Arcade.Sdk"`
local toolset_location_file="$toolset_dir/$toolset_version.txt"
if [[ -a "$toolset_location_file" ]]; then
local path=`cat $toolset_location_file`
if [[ -a "$path" ]]; then
toolset_build_proj=$path
return
fi
fi
if [[ "$restore" != true ]]; then
echo "Toolset version $toolsetVersion has not been restored."
ExitWithExitCode 2
fi
local proj="$toolset_dir/restore.proj"
echo '<Project Sdk="Microsoft.DotNet.Arcade.Sdk"/>' > $proj
"$build_driver" msbuild $proj /t:__WriteToolsetLocation /m /nologo /clp:None /warnaserror /bl:$toolset_restore_log /v:$verbosity /p:__ToolsetLocationOutputFile=$toolset_location_file
local lastexitcode=$?
if [[ $lastexitcode != 0 ]]; then
echo "Failed to restore toolset (exit code '$lastexitcode'). See log: $toolset_restore_log"
ExitWithExitCode $lastexitcode
fi
toolset_build_proj=`cat $toolset_location_file`
if [[ ! -a "$toolset_build_proj" ]]; then
echo "Invalid toolset path: $toolset_build_proj"
ExitWithExitCode 3
fi
}
. "$scriptroot/tools.sh"
function InitializeCustomToolset {
local script="$eng_root/RestoreToolset.sh"
local script="$eng_root/restore-toolset.sh"
if [[ -a "$script" ]]; then
. "$script"
@ -262,81 +166,51 @@ function InitializeCustomToolset {
}
function Build {
"$build_driver" msbuild $toolset_build_proj \
/m /nologo /clp:Summary /warnaserror \
/v:$verbosity \
/bl:$build_log \
InitializeToolset
InitializeCustomToolset
if [[ ! -z "$projects" ]]; then
properties="$properties /p:Projects=$projects"
fi
local bl=""
if [[ "$binary_log" == true ]]; then
bl="/bl:\"$log_dir/Build.binlog\""
fi
MSBuild $_InitializeToolset \
$bl \
/p:Configuration=$configuration \
/p:Projects=$projects \
/p:RepoRoot="$repo_root" \
/p:Restore=$restore \
/p:Build=$build \
/p:Rebuild=$rebuild \
/p:Deploy=$deploy \
/p:Test=$test \
/p:Pack=$pack \
/p:IntegrationTest=$integration_test \
/p:PerformanceTest=$performance_test \
/p:Sign=$sign \
/p:Publish=$publish \
/p:ContinuousIntegrationBuild=$ci \
/p:CIBuild=$ci \
$properties
local lastexitcode=$?
if [[ $lastexitcode != 0 ]]; then
echo "Failed to build $toolset_build_proj"
ExitWithExitCode $lastexitcode
fi
ExitWithExitCode 0
}
function ExitWithExitCode {
if [[ "$ci" == true && "$prepare_machine" == true ]]; then
StopProcesses
fi
exit $1
}
# Import custom tools configuration, if present in the repo.
configure_toolset_script="$eng_root/configure-toolset.sh"
if [[ -a "$configure_toolset_script" ]]; then
. "$configure_toolset_script"
fi
function StopProcesses {
echo "Killing running build processes..."
pkill -9 "dotnet"
pkill -9 "vbcscompiler"
}
# TODO: https://github.com/dotnet/arcade/issues/1468
# Temporary workaround to avoid breaking change.
# Remove once repos are updated.
if [[ -n "${useInstalledDotNetCli:-}" ]]; then
use_installed_dotnet_cli="$useInstalledDotNetCli"
fi
function Main {
# HOME may not be defined in some scenarios, but it is required by NuGet
if [[ -z $HOME ]]; then
export HOME="$repo_root/artifacts/.home/"
mkdir -p "$HOME"
fi
if [[ "$restore" == true && -z ${DisableNativeToolsetInstalls:-} ]]; then
InitializeNativeTools
fi
if [[ -z $projects ]]; then
projects="$repo_root/*.sln"
fi
if [[ -z $NUGET_PACKAGES ]]; then
if [[ $ci == true ]]; then
export NUGET_PACKAGES="$repo_root/.packages"
else
export NUGET_PACKAGES="$HOME/.nuget/packages"
fi
fi
mkdir -p "$toolset_dir"
mkdir -p "$log_dir"
if [[ $ci == true ]]; then
mkdir -p "$temp_dir"
export TEMP="$temp_dir"
export TMP="$temp_dir"
fi
InitializeDotNetCli
InitializeToolset
InitializeCustomToolset
Build
ExitWithExitCode $?
}
Main
Build

Просмотреть файл

@ -13,4 +13,4 @@ while [[ -h $source ]]; do
done
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
. "$scriptroot/build.sh" --restore --build --test --pack --publish --ci $@
. "$scriptroot/build.sh" --restore --build --test --pack --publish --ci $@

Просмотреть файл

@ -0,0 +1,41 @@
set(CROSS_NDK_TOOLCHAIN $ENV{ROOTFS_DIR}/../)
set(CROSS_ROOTFS ${CROSS_NDK_TOOLCHAIN}/sysroot)
set(CLR_CMAKE_PLATFORM_ANDROID "Android")
set(CMAKE_SYSTEM_NAME Linux)
set(CMAKE_SYSTEM_VERSION 1)
set(CMAKE_SYSTEM_PROCESSOR arm)
## Specify the toolchain
set(TOOLCHAIN "arm-linux-androideabi")
set(CMAKE_PREFIX_PATH ${CROSS_NDK_TOOLCHAIN})
set(TOOLCHAIN_PREFIX ${TOOLCHAIN}-)
find_program(CMAKE_C_COMPILER ${TOOLCHAIN_PREFIX}clang)
find_program(CMAKE_CXX_COMPILER ${TOOLCHAIN_PREFIX}clang++)
find_program(CMAKE_ASM_COMPILER ${TOOLCHAIN_PREFIX}clang)
find_program(CMAKE_AR ${TOOLCHAIN_PREFIX}ar)
find_program(CMAKE_LD ${TOOLCHAIN_PREFIX}ar)
find_program(CMAKE_OBJCOPY ${TOOLCHAIN_PREFIX}objcopy)
find_program(CMAKE_OBJDUMP ${TOOLCHAIN_PREFIX}objdump)
add_compile_options(--sysroot=${CROSS_ROOTFS})
add_compile_options(-fPIE)
add_compile_options(-mfloat-abi=soft)
include_directories(SYSTEM ${CROSS_NDK_TOOLCHAIN}/include/c++/4.9.x/)
include_directories(SYSTEM ${CROSS_NDK_TOOLCHAIN}/include/c++/4.9.x/arm-linux-androideabi/)
set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} -B ${CROSS_ROOTFS}/usr/lib/gcc/${TOOLCHAIN}")
set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} -L${CROSS_ROOTFS}/lib/${TOOLCHAIN}")
set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} --sysroot=${CROSS_ROOTFS}")
set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} -fPIE -pie")
set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} ${CROSS_LINK_FLAGS}" CACHE STRING "" FORCE)
set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} ${CROSS_LINK_FLAGS}" CACHE STRING "" FORCE)
set(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} ${CROSS_LINK_FLAGS}" CACHE STRING "" FORCE)
set(CMAKE_FIND_ROOT_PATH "${CROSS_ROOTFS}")
set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)
set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY)
set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY)

Просмотреть файл

@ -0,0 +1,42 @@
set(CROSS_NDK_TOOLCHAIN $ENV{ROOTFS_DIR}/../)
set(CROSS_ROOTFS ${CROSS_NDK_TOOLCHAIN}/sysroot)
set(CLR_CMAKE_PLATFORM_ANDROID "Android")
set(CMAKE_SYSTEM_NAME Linux)
set(CMAKE_SYSTEM_VERSION 1)
set(CMAKE_SYSTEM_PROCESSOR aarch64)
## Specify the toolchain
set(TOOLCHAIN "aarch64-linux-android")
set(CMAKE_PREFIX_PATH ${CROSS_NDK_TOOLCHAIN})
set(TOOLCHAIN_PREFIX ${TOOLCHAIN}-)
find_program(CMAKE_C_COMPILER ${TOOLCHAIN_PREFIX}clang)
find_program(CMAKE_CXX_COMPILER ${TOOLCHAIN_PREFIX}clang++)
find_program(CMAKE_ASM_COMPILER ${TOOLCHAIN_PREFIX}clang)
find_program(CMAKE_AR ${TOOLCHAIN_PREFIX}ar)
find_program(CMAKE_LD ${TOOLCHAIN_PREFIX}ar)
find_program(CMAKE_OBJCOPY ${TOOLCHAIN_PREFIX}objcopy)
find_program(CMAKE_OBJDUMP ${TOOLCHAIN_PREFIX}objdump)
add_compile_options(--sysroot=${CROSS_ROOTFS})
add_compile_options(-fPIE)
## Needed for Android or bionic specific conditionals
add_compile_options(-D__ANDROID__)
add_compile_options(-D__BIONIC__)
set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} -B ${CROSS_ROOTFS}/usr/lib/gcc/${TOOLCHAIN}")
set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} -L${CROSS_ROOTFS}/lib/${TOOLCHAIN}")
set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} --sysroot=${CROSS_ROOTFS}")
set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} -fPIE -pie")
set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} ${CROSS_LINK_FLAGS}" CACHE STRING "" FORCE)
set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} ${CROSS_LINK_FLAGS}" CACHE STRING "" FORCE)
set(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} ${CROSS_LINK_FLAGS}" CACHE STRING "" FORCE)
set(CMAKE_FIND_ROOT_PATH "${CROSS_ROOTFS}")
set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)
set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY)
set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY)

Просмотреть файл

@ -0,0 +1,11 @@
deb http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe
deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe
deb http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe
deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe
deb http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted
deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted
deb http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse
deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse

Просмотреть файл

@ -0,0 +1,3 @@
# Debian (sid) # UNSTABLE
deb http://ftp.debian.org/debian/ sid main contrib non-free
deb-src http://ftp.debian.org/debian/ sid main contrib non-free

Просмотреть файл

@ -0,0 +1,11 @@
deb http://ports.ubuntu.com/ubuntu-ports/ trusty main restricted universe
deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty main restricted universe
deb http://ports.ubuntu.com/ubuntu-ports/ trusty-updates main restricted universe
deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-updates main restricted universe
deb http://ports.ubuntu.com/ubuntu-ports/ trusty-backports main restricted
deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-backports main restricted
deb http://ports.ubuntu.com/ubuntu-ports/ trusty-security main restricted universe multiverse
deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-security main restricted universe multiverse

Просмотреть файл

@ -0,0 +1,11 @@
deb http://ports.ubuntu.com/ubuntu-ports/ xenial main restricted universe
deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial main restricted universe
deb http://ports.ubuntu.com/ubuntu-ports/ xenial-updates main restricted universe
deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-updates main restricted universe
deb http://ports.ubuntu.com/ubuntu-ports/ xenial-backports main restricted
deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-backports main restricted
deb http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse
deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse

Просмотреть файл

@ -0,0 +1,11 @@
deb http://ports.ubuntu.com/ubuntu-ports/ zesty main restricted universe
deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty main restricted universe
deb http://ports.ubuntu.com/ubuntu-ports/ zesty-updates main restricted universe
deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-updates main restricted universe
deb http://ports.ubuntu.com/ubuntu-ports/ zesty-backports main restricted
deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-backports main restricted
deb http://ports.ubuntu.com/ubuntu-ports/ zesty-security main restricted universe multiverse
deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-security main restricted universe multiverse

Просмотреть файл

@ -0,0 +1,71 @@
From e72c9d7ead60e3317bd6d1fade995c07021c947b Mon Sep 17 00:00:00 2001
From: Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
Date: Thu, 7 May 2015 13:25:04 -0400
Subject: [PATCH] Fix: building probe providers with C++ compiler
Robert Daniels wrote:
> > I'm attempting to use lttng userspace tracing with a C++ application
> > on an ARM platform. I'm using GCC 4.8.4 on Linux 3.14 with the 2.6
> > release of lttng. I've compiled lttng-modules, lttng-ust, and
> > lttng-tools and have been able to get a simple test working with C
> > code. When I attempt to run the hello.cxx test on my target it will
> > segfault.
>
>
> I spent a little time digging into this issue and finally discovered the
> cause of my segfault with ARM C++ tracepoints.
>
> There is a struct called 'lttng_event' in ust-events.h which contains an
> empty union 'u'. This was the cause of my issue. Under C, this empty union
> compiles to a zero byte member while under C++ it compiles to a one byte
> member, and in my case was four-byte aligned which caused my C++ code to
> have the 'cds_list_head node' offset incorrectly by four bytes. This lead
> to an incorrect linked list structure which caused my issue.
>
> Since this union is empty, I simply removed it from the struct and everything
> worked correctly.
>
> I don't know the history or purpose behind this empty union so I'd like to
> know if this is a safe fix. If it is I can submit a patch with the union
> removed.
That's a very nice catch!
We do not support building tracepoint probe provider with
g++ yet, as stated in lttng-ust(3):
"- Note for C++ support: although an application instrumented with
tracepoints can be compiled with g++, tracepoint probes should be
compiled with gcc (only tested with gcc so far)."
However, if it works fine with this fix, then I'm tempted to take it,
especially because removing the empty union does not appear to affect
the layout of struct lttng_event as seen from liblttng-ust, which must
be compiled with a C compiler, and from probe providers compiled with
a C compiler. So all we are changing is the layout of a probe provider
compiled with a C++ compiler, which is anyway buggy at the moment,
because it is not compatible with the layout expected by liblttng-ust
compiled with a C compiler.
Reported-by: Robert Daniels <robert.daniels@vantagecontrols.com>
Signed-off-by: Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
---
include/lttng/ust-events.h | 2 --
1 file changed, 2 deletions(-)
diff --git a/usr/include/lttng/ust-events.h b/usr/include/lttng/ust-events.h
index 328a875..3d7a274 100644
--- a/usr/include/lttng/ust-events.h
+++ b/usr/include/lttng/ust-events.h
@@ -407,8 +407,6 @@ struct lttng_event {
void *_deprecated1;
struct lttng_ctx *ctx;
enum lttng_ust_instrumentation instrumentation;
- union {
- } u;
struct cds_list_head node; /* Event list in session */
struct cds_list_head _deprecated2;
void *_deprecated3;
--
2.7.4

Просмотреть файл

@ -0,0 +1,97 @@
diff -u -r a/usr/include/urcu/uatomic/generic.h b/usr/include/urcu/uatomic/generic.h
--- a/usr/include/urcu/uatomic/generic.h 2014-03-28 06:04:42.000000000 +0900
+++ b/usr/include/urcu/uatomic/generic.h 2017-02-13 10:35:21.189927116 +0900
@@ -65,17 +65,17 @@
switch (len) {
#ifdef UATOMIC_HAS_ATOMIC_BYTE
case 1:
- return __sync_val_compare_and_swap_1(addr, old, _new);
+ return __sync_val_compare_and_swap_1((uint8_t *) addr, old, _new);
#endif
#ifdef UATOMIC_HAS_ATOMIC_SHORT
case 2:
- return __sync_val_compare_and_swap_2(addr, old, _new);
+ return __sync_val_compare_and_swap_2((uint16_t *) addr, old, _new);
#endif
case 4:
- return __sync_val_compare_and_swap_4(addr, old, _new);
+ return __sync_val_compare_and_swap_4((uint32_t *) addr, old, _new);
#if (CAA_BITS_PER_LONG == 64)
case 8:
- return __sync_val_compare_and_swap_8(addr, old, _new);
+ return __sync_val_compare_and_swap_8((uint64_t *) addr, old, _new);
#endif
}
_uatomic_link_error();
@@ -100,20 +100,20 @@
switch (len) {
#ifdef UATOMIC_HAS_ATOMIC_BYTE
case 1:
- __sync_and_and_fetch_1(addr, val);
+ __sync_and_and_fetch_1((uint8_t *) addr, val);
return;
#endif
#ifdef UATOMIC_HAS_ATOMIC_SHORT
case 2:
- __sync_and_and_fetch_2(addr, val);
+ __sync_and_and_fetch_2((uint16_t *) addr, val);
return;
#endif
case 4:
- __sync_and_and_fetch_4(addr, val);
+ __sync_and_and_fetch_4((uint32_t *) addr, val);
return;
#if (CAA_BITS_PER_LONG == 64)
case 8:
- __sync_and_and_fetch_8(addr, val);
+ __sync_and_and_fetch_8((uint64_t *) addr, val);
return;
#endif
}
@@ -139,20 +139,20 @@
switch (len) {
#ifdef UATOMIC_HAS_ATOMIC_BYTE
case 1:
- __sync_or_and_fetch_1(addr, val);
+ __sync_or_and_fetch_1((uint8_t *) addr, val);
return;
#endif
#ifdef UATOMIC_HAS_ATOMIC_SHORT
case 2:
- __sync_or_and_fetch_2(addr, val);
+ __sync_or_and_fetch_2((uint16_t *) addr, val);
return;
#endif
case 4:
- __sync_or_and_fetch_4(addr, val);
+ __sync_or_and_fetch_4((uint32_t *) addr, val);
return;
#if (CAA_BITS_PER_LONG == 64)
case 8:
- __sync_or_and_fetch_8(addr, val);
+ __sync_or_and_fetch_8((uint64_t *) addr, val);
return;
#endif
}
@@ -180,17 +180,17 @@
switch (len) {
#ifdef UATOMIC_HAS_ATOMIC_BYTE
case 1:
- return __sync_add_and_fetch_1(addr, val);
+ return __sync_add_and_fetch_1((uint8_t *) addr, val);
#endif
#ifdef UATOMIC_HAS_ATOMIC_SHORT
case 2:
- return __sync_add_and_fetch_2(addr, val);
+ return __sync_add_and_fetch_2((uint16_t *) addr, val);
#endif
case 4:
- return __sync_add_and_fetch_4(addr, val);
+ return __sync_add_and_fetch_4((uint32_t *) addr, val);
#if (CAA_BITS_PER_LONG == 64)
case 8:
- return __sync_add_and_fetch_8(addr, val);
+ return __sync_add_and_fetch_8((uint64_t *) addr, val);
#endif
}
_uatomic_link_error();

Просмотреть файл

@ -0,0 +1,11 @@
deb http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe
deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe
deb http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe
deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe
deb http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted
deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted
deb http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse
deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse

Просмотреть файл

@ -0,0 +1,11 @@
deb http://deb.debian.org/debian buster main
deb-src http://deb.debian.org/debian buster main
deb http://deb.debian.org/debian-security/ buster/updates main
deb-src http://deb.debian.org/debian-security/ buster/updates main
deb http://deb.debian.org/debian buster-updates main
deb-src http://deb.debian.org/debian buster-updates main
deb http://deb.debian.org/debian buster-backports main contrib non-free
deb-src http://deb.debian.org/debian buster-backports main contrib non-free

Просмотреть файл

@ -0,0 +1,12 @@
deb http://deb.debian.org/debian stretch main
deb-src http://deb.debian.org/debian stretch main
deb http://deb.debian.org/debian-security/ stretch/updates main
deb-src http://deb.debian.org/debian-security/ stretch/updates main
deb http://deb.debian.org/debian stretch-updates main
deb-src http://deb.debian.org/debian stretch-updates main
deb http://deb.debian.org/debian stretch-backports main contrib non-free
deb-src http://deb.debian.org/debian stretch-backports main contrib non-free

Просмотреть файл

@ -0,0 +1,11 @@
deb http://ports.ubuntu.com/ubuntu-ports/ trusty main restricted universe
deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty main restricted universe
deb http://ports.ubuntu.com/ubuntu-ports/ trusty-updates main restricted universe
deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-updates main restricted universe
deb http://ports.ubuntu.com/ubuntu-ports/ trusty-backports main restricted
deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-backports main restricted
deb http://ports.ubuntu.com/ubuntu-ports/ trusty-security main restricted universe multiverse
deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-security main restricted universe multiverse

Просмотреть файл

@ -0,0 +1,11 @@
deb http://ports.ubuntu.com/ubuntu-ports/ xenial main restricted universe
deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial main restricted universe
deb http://ports.ubuntu.com/ubuntu-ports/ xenial-updates main restricted universe
deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-updates main restricted universe
deb http://ports.ubuntu.com/ubuntu-ports/ xenial-backports main restricted
deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-backports main restricted
deb http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse
deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse

Просмотреть файл

@ -0,0 +1,11 @@
deb http://ports.ubuntu.com/ubuntu-ports/ zesty main restricted universe
deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty main restricted universe
deb http://ports.ubuntu.com/ubuntu-ports/ zesty-updates main restricted universe
deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-updates main restricted universe
deb http://ports.ubuntu.com/ubuntu-ports/ zesty-backports main restricted
deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-backports main restricted
deb http://ports.ubuntu.com/ubuntu-ports/ zesty-security main restricted universe multiverse
deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-security main restricted universe multiverse

Просмотреть файл

@ -0,0 +1,3 @@
# Debian (jessie) # Stable
deb http://ftp.debian.org/debian/ jessie main contrib non-free
deb-src http://ftp.debian.org/debian/ jessie main contrib non-free

Просмотреть файл

@ -0,0 +1,44 @@
#!/usr/bin/env bash
set -e
__ARM_SOFTFP_CrossDir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
__TIZEN_CROSSDIR="$__ARM_SOFTFP_CrossDir/tizen"
if [[ -z "$ROOTFS_DIR" ]]; then
echo "ROOTFS_DIR is not defined."
exit 1;
fi
# Clean-up (TODO-Cleanup: We may already delete $ROOTFS_DIR at ./cross/build-rootfs.sh.)
# hk0110
if [ -d "$ROOTFS_DIR" ]; then
umount $ROOTFS_DIR/*
rm -rf $ROOTFS_DIR
fi
TIZEN_TMP_DIR=$ROOTFS_DIR/tizen_tmp
mkdir -p $TIZEN_TMP_DIR
# Download files
echo ">>Start downloading files"
VERBOSE=1 $__ARM_SOFTFP_CrossDir/tizen-fetch.sh $TIZEN_TMP_DIR
echo "<<Finish downloading files"
echo ">>Start constructing Tizen rootfs"
TIZEN_RPM_FILES=`ls $TIZEN_TMP_DIR/*.rpm`
cd $ROOTFS_DIR
for f in $TIZEN_RPM_FILES; do
rpm2cpio $f | cpio -idm --quiet
done
echo "<<Finish constructing Tizen rootfs"
# Cleanup tmp
rm -rf $TIZEN_TMP_DIR
# Configure Tizen rootfs
echo ">>Start configuring Tizen rootfs"
rm ./usr/lib/libunwind.so
ln -s libunwind.so.8 ./usr/lib/libunwind.so
ln -sfn asm-arm ./usr/include/asm
patch -p1 < $__TIZEN_CROSSDIR/tizen.patch
echo "<<Finish configuring Tizen rootfs"

Просмотреть файл

@ -0,0 +1,171 @@
#!/usr/bin/env bash
set -e
if [[ -z "${VERBOSE// }" ]] || [ "$VERBOSE" -ne "$VERBOSE" ] 2>/dev/null; then
VERBOSE=0
fi
Log()
{
if [ $VERBOSE -ge $1 ]; then
echo ${@:2}
fi
}
Inform()
{
Log 1 -e "\x1B[0;34m$@\x1B[m"
}
Debug()
{
Log 2 -e "\x1B[0;32m$@\x1B[m"
}
Error()
{
>&2 Log 0 -e "\x1B[0;31m$@\x1B[m"
}
Fetch()
{
URL=$1
FILE=$2
PROGRESS=$3
if [ $VERBOSE -ge 1 ] && [ $PROGRESS ]; then
CURL_OPT="--progress-bar"
else
CURL_OPT="--silent"
fi
curl $CURL_OPT $URL > $FILE
}
hash curl 2> /dev/null || { Error "Require 'curl' Aborting."; exit 1; }
hash xmllint 2> /dev/null || { Error "Require 'xmllint' Aborting."; exit 1; }
hash sha256sum 2> /dev/null || { Error "Require 'sha256sum' Aborting."; exit 1; }
TMPDIR=$1
if [ ! -d $TMPDIR ]; then
TMPDIR=./tizen_tmp
Debug "Create temporary directory : $TMPDIR"
mkdir -p $TMPDIR
fi
TIZEN_URL=http://download.tizen.org/releases/milestone/tizen
BUILD_XML=build.xml
REPOMD_XML=repomd.xml
PRIMARY_XML=primary.xml
TARGET_URL="http://__not_initialized"
Xpath_get()
{
XPATH_RESULT=''
XPATH=$1
XML_FILE=$2
RESULT=$(xmllint --xpath $XPATH $XML_FILE)
if [[ -z ${RESULT// } ]]; then
Error "Can not find target from $XML_FILE"
Debug "Xpath = $XPATH"
exit 1
fi
XPATH_RESULT=$RESULT
}
fetch_tizen_pkgs_init()
{
TARGET=$1
PROFILE=$2
Debug "Initialize TARGET=$TARGET, PROFILE=$PROFILE"
TMP_PKG_DIR=$TMPDIR/tizen_${PROFILE}_pkgs
if [ -d $TMP_PKG_DIR ]; then rm -rf $TMP_PKG_DIR; fi
mkdir -p $TMP_PKG_DIR
PKG_URL=$TIZEN_URL/$PROFILE/latest
BUILD_XML_URL=$PKG_URL/$BUILD_XML
TMP_BUILD=$TMP_PKG_DIR/$BUILD_XML
TMP_REPOMD=$TMP_PKG_DIR/$REPOMD_XML
TMP_PRIMARY=$TMP_PKG_DIR/$PRIMARY_XML
TMP_PRIMARYGZ=${TMP_PRIMARY}.gz
Fetch $BUILD_XML_URL $TMP_BUILD
Debug "fetch $BUILD_XML_URL to $TMP_BUILD"
TARGET_XPATH="//build/buildtargets/buildtarget[@name=\"$TARGET\"]/repo[@type=\"binary\"]/text()"
Xpath_get $TARGET_XPATH $TMP_BUILD
TARGET_PATH=$XPATH_RESULT
TARGET_URL=$PKG_URL/$TARGET_PATH
REPOMD_URL=$TARGET_URL/repodata/repomd.xml
PRIMARY_XPATH='string(//*[local-name()="data"][@type="primary"]/*[local-name()="location"]/@href)'
Fetch $REPOMD_URL $TMP_REPOMD
Debug "fetch $REPOMD_URL to $TMP_REPOMD"
Xpath_get $PRIMARY_XPATH $TMP_REPOMD
PRIMARY_XML_PATH=$XPATH_RESULT
PRIMARY_URL=$TARGET_URL/$PRIMARY_XML_PATH
Fetch $PRIMARY_URL $TMP_PRIMARYGZ
Debug "fetch $PRIMARY_URL to $TMP_PRIMARYGZ"
gunzip $TMP_PRIMARYGZ
Debug "unzip $TMP_PRIMARYGZ to $TMP_PRIMARY"
}
fetch_tizen_pkgs()
{
ARCH=$1
PACKAGE_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="location"]/@href)'
PACKAGE_CHECKSUM_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="checksum"]/text())'
for pkg in ${@:2}
do
Inform "Fetching... $pkg"
XPATH=${PACKAGE_XPATH_TPL/_PKG_/$pkg}
XPATH=${XPATH/_ARCH_/$ARCH}
Xpath_get $XPATH $TMP_PRIMARY
PKG_PATH=$XPATH_RESULT
XPATH=${PACKAGE_CHECKSUM_XPATH_TPL/_PKG_/$pkg}
XPATH=${XPATH/_ARCH_/$ARCH}
Xpath_get $XPATH $TMP_PRIMARY
CHECKSUM=$XPATH_RESULT
PKG_URL=$TARGET_URL/$PKG_PATH
PKG_FILE=$(basename $PKG_PATH)
PKG_PATH=$TMPDIR/$PKG_FILE
Debug "Download $PKG_URL to $PKG_PATH"
Fetch $PKG_URL $PKG_PATH true
echo "$CHECKSUM $PKG_PATH" | sha256sum -c - > /dev/null
if [ $? -ne 0 ]; then
Error "Fail to fetch $PKG_URL to $PKG_PATH"
Debug "Checksum = $CHECKSUM"
exit 1
fi
done
}
Inform "Initialize arm base"
fetch_tizen_pkgs_init standard base
Inform "fetch common packages"
fetch_tizen_pkgs armv7l gcc glibc glibc-devel libicu libicu-devel libatomic
fetch_tizen_pkgs noarch linux-glibc-devel
Inform "fetch coreclr packages"
fetch_tizen_pkgs armv7l lldb lldb-devel libgcc libstdc++ libstdc++-devel libunwind libunwind-devel lttng-ust-devel lttng-ust userspace-rcu-devel userspace-rcu
Inform "fetch corefx packages"
fetch_tizen_pkgs armv7l libcom_err libcom_err-devel zlib zlib-devel libopenssl libopenssl-devel krb5 krb5-devel libcurl libcurl-devel
Inform "Initialize standard unified"
fetch_tizen_pkgs_init standard unified
Inform "fetch corefx packages"
fetch_tizen_pkgs armv7l gssdp gssdp-devel tizen-release

Просмотреть файл

@ -0,0 +1,50 @@
lang en_US.UTF-8
keyboard us
timezone --utc Asia/Seoul
part / --fstype="ext4" --size=3500 --ondisk=mmcblk0 --label rootfs --fsoptions=defaults,noatime
rootpw tizen
desktop --autologinuser=root
user --name root --groups audio,video --password 'tizen'
repo --name=standard --baseurl=http://download.tizen.org/releases/milestone/tizen/unified/latest/repos/standard/packages/ --ssl_verify=no
repo --name=base --baseurl=http://download.tizen.org/releases/milestone/tizen/base/latest/repos/standard/packages/ --ssl_verify=no
%packages
tar
gzip
sed
grep
gawk
perl
binutils
findutils
util-linux
lttng-ust
userspace-rcu
procps-ng
tzdata
ca-certificates
### Core FX
libicu
libunwind
iputils
zlib
krb5
libcurl
libopenssl
%end
%post
### Update /tmp privilege
chmod 777 /tmp
####################################
%end

Просмотреть файл

@ -0,0 +1,18 @@
diff -u -r a/usr/lib/libc.so b/usr/lib/libc.so
--- a/usr/lib/libc.so 2016-12-30 23:00:08.284951863 +0900
+++ b/usr/lib/libc.so 2016-12-30 23:00:32.140951815 +0900
@@ -2,4 +2,4 @@
Use the shared library, but some functions are only in
the static library, so try that secondarily. */
OUTPUT_FORMAT(elf32-littlearm)
-GROUP ( /lib/libc.so.6 /usr/lib/libc_nonshared.a AS_NEEDED ( /lib/ld-linux.so.3 ) )
+GROUP ( libc.so.6 libc_nonshared.a AS_NEEDED ( ld-linux.so.3 ) )
diff -u -r a/usr/lib/libpthread.so b/usr/lib/libpthread.so
--- a/usr/lib/libpthread.so 2016-12-30 23:00:19.408951841 +0900
+++ b/usr/lib/libpthread.so 2016-12-30 23:00:39.068951801 +0900
@@ -2,4 +2,4 @@
Use the shared library, but some functions are only in
the static library, so try that secondarily. */
OUTPUT_FORMAT(elf32-littlearm)
-GROUP ( /lib/libpthread.so.0 /usr/lib/libpthread_nonshared.a )
+GROUP ( libpthread.so.0 libpthread_nonshared.a )

Просмотреть файл

@ -0,0 +1,137 @@
#!/usr/bin/env bash
set -e
__NDK_Version=r14
usage()
{
echo "Creates a toolchain and sysroot used for cross-compiling for Android."
echo.
echo "Usage: $0 [BuildArch] [ApiLevel]"
echo.
echo "BuildArch is the target architecture of Android. Currently only arm64 is supported."
echo "ApiLevel is the target Android API level. API levels usually match to Android releases. See https://source.android.com/source/build-numbers.html"
echo.
echo "By default, the toolchain and sysroot will be generated in cross/android-rootfs/toolchain/[BuildArch]. You can change this behavior"
echo "by setting the TOOLCHAIN_DIR environment variable"
echo.
echo "By default, the NDK will be downloaded into the cross/android-rootfs/android-ndk-$__NDK_Version directory. If you already have an NDK installation,"
echo "you can set the NDK_DIR environment variable to have this script use that installation of the NDK."
echo "By default, this script will generate a file, android_platform, in the root of the ROOTFS_DIR directory that contains the RID for the supported and tested Android build: android.21-arm64. This file is to replace '/etc/os-release', which is not available for Android."
exit 1
}
__ApiLevel=21 # The minimum platform for arm64 is API level 21
__BuildArch=arm64
__AndroidArch=aarch64
__AndroidToolchain=aarch64-linux-android
for i in "$@"
do
lowerI="$(echo $i | awk '{print tolower($0)}')"
case $lowerI in
-?|-h|--help)
usage
exit 1
;;
arm64)
__BuildArch=arm64
__AndroidArch=aarch64
__AndroidToolchain=aarch64-linux-android
;;
arm)
__BuildArch=arm
__AndroidArch=arm
__AndroidToolchain=arm-linux-androideabi
;;
*[0-9])
__ApiLevel=$i
;;
*)
__UnprocessedBuildArgs="$__UnprocessedBuildArgs $i"
;;
esac
done
# Obtain the location of the bash script to figure out where the root of the repo is.
__CrossDir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
__Android_Cross_Dir="$__CrossDir/android-rootfs"
__NDK_Dir="$__Android_Cross_Dir/android-ndk-$__NDK_Version"
__libunwind_Dir="$__Android_Cross_Dir/libunwind"
__lldb_Dir="$__Android_Cross_Dir/lldb"
__ToolchainDir="$__Android_Cross_Dir/toolchain/$__BuildArch"
if [[ -n "$TOOLCHAIN_DIR" ]]; then
__ToolchainDir=$TOOLCHAIN_DIR
fi
if [[ -n "$NDK_DIR" ]]; then
__NDK_Dir=$NDK_DIR
fi
echo "Target API level: $__ApiLevel"
echo "Target architecture: $__BuildArch"
echo "NDK location: $__NDK_Dir"
echo "Target Toolchain location: $__ToolchainDir"
# Download the NDK if required
if [ ! -d $__NDK_Dir ]; then
echo Downloading the NDK into $__NDK_Dir
mkdir -p $__NDK_Dir
wget -nv -nc --show-progress https://dl.google.com/android/repository/android-ndk-$__NDK_Version-linux-x86_64.zip -O $__Android_Cross_Dir/android-ndk-$__NDK_Version-linux-x86_64.zip
unzip -q $__Android_Cross_Dir/android-ndk-$__NDK_Version-linux-x86_64.zip -d $__Android_Cross_Dir
fi
if [ ! -d $__lldb_Dir ]; then
mkdir -p $__lldb_Dir
echo Downloading LLDB into $__lldb_Dir
wget -nv -nc --show-progress https://dl.google.com/android/repository/lldb-2.3.3614996-linux-x86_64.zip -O $__Android_Cross_Dir/lldb-2.3.3614996-linux-x86_64.zip
unzip -q $__Android_Cross_Dir/lldb-2.3.3614996-linux-x86_64.zip -d $__lldb_Dir
fi
# Create the RootFS for both arm64 as well as aarch
rm -rf $__Android_Cross_Dir/toolchain
echo Generating the $__BuildArch toolchain
$__NDK_Dir/build/tools/make_standalone_toolchain.py --arch $__BuildArch --api $__ApiLevel --install-dir $__ToolchainDir
# Install the required packages into the toolchain
# TODO: Add logic to get latest pkg version instead of specific version number
rm -rf $__Android_Cross_Dir/deb/
rm -rf $__Android_Cross_Dir/tmp
mkdir -p $__Android_Cross_Dir/deb/
mkdir -p $__Android_Cross_Dir/tmp/$arch/
wget -nv -nc http://termux.net/dists/stable/main/binary-$__AndroidArch/libicu_60.2_$__AndroidArch.deb -O $__Android_Cross_Dir/deb/libicu_60.2_$__AndroidArch.deb
wget -nv -nc http://termux.net/dists/stable/main/binary-$__AndroidArch/libicu-dev_60.2_$__AndroidArch.deb -O $__Android_Cross_Dir/deb/libicu-dev_60.2_$__AndroidArch.deb
wget -nv -nc http://termux.net/dists/stable/main/binary-$__AndroidArch/libandroid-glob-dev_0.4_$__AndroidArch.deb -O $__Android_Cross_Dir/deb/libandroid-glob-dev_0.4_$__AndroidArch.deb
wget -nv -nc http://termux.net/dists/stable/main/binary-$__AndroidArch/libandroid-glob_0.4_$__AndroidArch.deb -O $__Android_Cross_Dir/deb/libandroid-glob_0.4_$__AndroidArch.deb
wget -nv -nc http://termux.net/dists/stable/main/binary-$__AndroidArch/libandroid-support-dev_22_$__AndroidArch.deb -O $__Android_Cross_Dir/deb/libandroid-support-dev_22_$__AndroidArch.deb
wget -nv -nc http://termux.net/dists/stable/main/binary-$__AndroidArch/libandroid-support_22_$__AndroidArch.deb -O $__Android_Cross_Dir/deb/libandroid-support_22_$__AndroidArch.deb
wget -nv -nc http://termux.net/dists/stable/main/binary-$__AndroidArch/liblzma-dev_5.2.3_$__AndroidArch.deb -O $__Android_Cross_Dir/deb/liblzma-dev_5.2.3_$__AndroidArch.deb
wget -nv -nc http://termux.net/dists/stable/main/binary-$__AndroidArch/liblzma_5.2.3_$__AndroidArch.deb -O $__Android_Cross_Dir/deb/liblzma_5.2.3_$__AndroidArch.deb
wget -nv -nc http://termux.net/dists/stable/main/binary-$__AndroidArch/libunwind-dev_1.2.20170304_$__AndroidArch.deb -O $__Android_Cross_Dir/deb/libunwind-dev_1.2.20170304_$__AndroidArch.deb
wget -nv -nc http://termux.net/dists/stable/main/binary-$__AndroidArch/libunwind_1.2.20170304_$__AndroidArch.deb -O $__Android_Cross_Dir/deb/libunwind_1.2.20170304_$__AndroidArch.deb
echo Unpacking Termux packages
dpkg -x $__Android_Cross_Dir/deb/libicu_60.2_$__AndroidArch.deb $__Android_Cross_Dir/tmp/$__AndroidArch/
dpkg -x $__Android_Cross_Dir/deb/libicu-dev_60.2_$__AndroidArch.deb $__Android_Cross_Dir/tmp/$__AndroidArch/
dpkg -x $__Android_Cross_Dir/deb/libandroid-glob-dev_0.4_$__AndroidArch.deb $__Android_Cross_Dir/tmp/$__AndroidArch/
dpkg -x $__Android_Cross_Dir/deb/libandroid-glob_0.4_$__AndroidArch.deb $__Android_Cross_Dir/tmp/$__AndroidArch/
dpkg -x $__Android_Cross_Dir/deb/libandroid-support-dev_22_$__AndroidArch.deb $__Android_Cross_Dir/tmp/$__AndroidArch/
dpkg -x $__Android_Cross_Dir/deb/libandroid-support_22_$__AndroidArch.deb $__Android_Cross_Dir/tmp/$__AndroidArch/
dpkg -x $__Android_Cross_Dir/deb/liblzma-dev_5.2.3_$__AndroidArch.deb $__Android_Cross_Dir/tmp/$__AndroidArch/
dpkg -x $__Android_Cross_Dir/deb/liblzma_5.2.3_$__AndroidArch.deb $__Android_Cross_Dir/tmp/$__AndroidArch/
dpkg -x $__Android_Cross_Dir/deb/libunwind-dev_1.2.20170304_$__AndroidArch.deb $__Android_Cross_Dir/tmp/$__AndroidArch/
dpkg -x $__Android_Cross_Dir/deb/libunwind_1.2.20170304_$__AndroidArch.deb $__Android_Cross_Dir/tmp/$__AndroidArch/
cp -R $__Android_Cross_Dir/tmp/$__AndroidArch/data/data/com.termux/files/usr/* $__ToolchainDir/sysroot/usr/
# Generate platform file for build.sh script to assign to __DistroRid
echo "Generating platform file..."
echo "RID=android.21-arm64" > $__ToolchainDir/sysroot/android_platform
echo Now run:
echo CONFIG_DIR=\`realpath cross/android/$__BuildArch\` ROOTFS_DIR=\`realpath $__ToolchainDir/sysroot\` ./build.sh cross $__BuildArch skipgenerateversion skipnuget cmakeargs -DENABLE_LLDBPLUGIN=0

Просмотреть файл

@ -0,0 +1,233 @@
#!/usr/bin/env bash
usage()
{
echo "Usage: $0 [BuildArch] [LinuxCodeName] [lldbx.y] [--skipunmount] --rootfsdir <directory>]"
echo "BuildArch can be: arm(default), armel, arm64, x86"
echo "LinuxCodeName - optional, Code name for Linux, can be: trusty, xenial(default), zesty, bionic, alpine. If BuildArch is armel, LinuxCodeName is jessie(default) or tizen."
echo "lldbx.y - optional, LLDB version, can be: lldb3.9(default), lldb4.0, lldb5.0, lldb6.0 no-lldb. Ignored for alpine"
echo "--skipunmount - optional, will skip the unmount of rootfs folder."
exit 1
}
__LinuxCodeName=xenial
__CrossDir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
__InitialDir=$PWD
__BuildArch=arm
__UbuntuArch=armhf
__UbuntuRepo="http://ports.ubuntu.com/"
__LLDB_Package="liblldb-3.9-dev"
__SkipUnmount=0
# base development support
__UbuntuPackages="build-essential"
__AlpinePackages="alpine-base"
__AlpinePackages+=" build-base"
__AlpinePackages+=" linux-headers"
__AlpinePackages+=" lldb-dev"
__AlpinePackages+=" llvm-dev"
# symlinks fixer
__UbuntuPackages+=" symlinks"
# CoreCLR and CoreFX dependencies
__UbuntuPackages+=" libicu-dev"
__UbuntuPackages+=" liblttng-ust-dev"
__UbuntuPackages+=" libunwind8-dev"
__AlpinePackages+=" gettext-dev"
__AlpinePackages+=" icu-dev"
__AlpinePackages+=" libunwind-dev"
__AlpinePackages+=" lttng-ust-dev"
# CoreFX dependencies
__UbuntuPackages+=" libcurl4-openssl-dev"
__UbuntuPackages+=" libkrb5-dev"
__UbuntuPackages+=" libssl-dev"
__UbuntuPackages+=" zlib1g-dev"
__AlpinePackages+=" curl-dev"
__AlpinePackages+=" krb5-dev"
__AlpinePackages+=" openssl-dev"
__AlpinePackages+=" zlib-dev"
__UnprocessedBuildArgs=
while :; do
if [ $# -le 0 ]; then
break
fi
lowerI="$(echo $1 | awk '{print tolower($0)}')"
case $lowerI in
-?|-h|--help)
usage
exit 1
;;
arm)
__BuildArch=arm
__UbuntuArch=armhf
__AlpineArch=armhf
__QEMUArch=arm
;;
arm64)
__BuildArch=arm64
__UbuntuArch=arm64
__AlpineArch=aarch64
__QEMUArch=aarch64
;;
armel)
__BuildArch=armel
__UbuntuArch=armel
__UbuntuRepo="http://ftp.debian.org/debian/"
__LinuxCodeName=jessie
;;
x86)
__BuildArch=x86
__UbuntuArch=i386
__UbuntuRepo="http://archive.ubuntu.com/ubuntu/"
;;
lldb3.6)
__LLDB_Package="lldb-3.6-dev"
;;
lldb3.8)
__LLDB_Package="lldb-3.8-dev"
;;
lldb3.9)
__LLDB_Package="liblldb-3.9-dev"
;;
lldb4.0)
__LLDB_Package="liblldb-4.0-dev"
;;
lldb5.0)
__LLDB_Package="liblldb-5.0-dev"
;;
lldb6.0)
__LLDB_Package="liblldb-6.0-dev"
;;
no-lldb)
unset __LLDB_Package
;;
trusty) # Ubuntu 14.04
if [ "$__LinuxCodeName" != "jessie" ]; then
__LinuxCodeName=trusty
fi
;;
xenial) # Ubuntu 16.04
if [ "$__LinuxCodeName" != "jessie" ]; then
__LinuxCodeName=xenial
fi
;;
zesty) # Ubuntu 17.04
if [ "$__LinuxCodeName" != "jessie" ]; then
__LinuxCodeName=zesty
fi
;;
bionic) # Ubuntu 18.04
if [ "$__LinuxCodeName" != "jessie" ]; then
__LinuxCodeName=bionic
fi
;;
jessie) # Debian 8
__LinuxCodeName=jessie
__UbuntuRepo="http://ftp.debian.org/debian/"
;;
stretch) # Debian 9
__LinuxCodeName=stretch
__UbuntuRepo="http://ftp.debian.org/debian/"
__LLDB_Package="liblldb-6.0-dev"
;;
buster) # Debian 10
__LinuxCodeName=buster
__UbuntuRepo="http://ftp.debian.org/debian/"
__LLDB_Package="liblldb-6.0-dev"
;;
tizen)
if [ "$__BuildArch" != "armel" ]; then
echo "Tizen is available only for armel."
usage;
exit 1;
fi
__LinuxCodeName=
__UbuntuRepo=
__Tizen=tizen
;;
alpine)
__LinuxCodeName=alpine
__UbuntuRepo=
;;
--skipunmount)
__SkipUnmount=1
;;
--rootfsdir|-rootfsdir)
shift
__RootfsDir=$1
;;
*)
__UnprocessedBuildArgs="$__UnprocessedBuildArgs $1"
;;
esac
shift
done
if [ "$__BuildArch" == "armel" ]; then
__LLDB_Package="lldb-3.5-dev"
fi
__UbuntuPackages+=" ${__LLDB_Package:-}"
if [ -z "$__RootfsDir" ] && [ ! -z "$ROOTFS_DIR" ]; then
__RootfsDir=$ROOTFS_DIR
fi
if [ -z "$__RootfsDir" ]; then
__RootfsDir="$__CrossDir/../../../.tools/rootfs/$__BuildArch"
fi
if [ -d "$__RootfsDir" ]; then
if [ $__SkipUnmount == 0 ]; then
umount $__RootfsDir/*
fi
rm -rf $__RootfsDir
fi
if [[ "$__LinuxCodeName" == "alpine" ]]; then
__ApkToolsVersion=2.9.1
__AlpineVersion=3.7
__ApkToolsDir=$(mktemp -d)
wget https://github.com/alpinelinux/apk-tools/releases/download/v$__ApkToolsVersion/apk-tools-$__ApkToolsVersion-x86_64-linux.tar.gz -P $__ApkToolsDir
tar -xf $__ApkToolsDir/apk-tools-$__ApkToolsVersion-x86_64-linux.tar.gz -C $__ApkToolsDir
mkdir -p $__RootfsDir/usr/bin
cp -v /usr/bin/qemu-$__QEMUArch-static $__RootfsDir/usr/bin
$__ApkToolsDir/apk-tools-$__ApkToolsVersion/apk \
-X http://dl-cdn.alpinelinux.org/alpine/v$__AlpineVersion/main \
-X http://dl-cdn.alpinelinux.org/alpine/v$__AlpineVersion/community \
-X http://dl-cdn.alpinelinux.org/alpine/edge/testing \
-U --allow-untrusted --root $__RootfsDir --arch $__AlpineArch --initdb \
add $__AlpinePackages
rm -r $__ApkToolsDir
elif [[ -n $__LinuxCodeName ]]; then
qemu-debootstrap --arch $__UbuntuArch $__LinuxCodeName $__RootfsDir $__UbuntuRepo
cp $__CrossDir/$__BuildArch/sources.list.$__LinuxCodeName $__RootfsDir/etc/apt/sources.list
chroot $__RootfsDir apt-get update
chroot $__RootfsDir apt-get -f -y install
chroot $__RootfsDir apt-get -y install $__UbuntuPackages
chroot $__RootfsDir symlinks -cr /usr
if [ $__SkipUnmount == 0 ]; then
umount $__RootfsDir/*
fi
if [[ "$__BuildArch" == "arm" && "$__LinuxCodeName" == "trusty" ]]; then
pushd $__RootfsDir
patch -p1 < $__CrossDir/$__BuildArch/trusty.patch
patch -p1 < $__CrossDir/$__BuildArch/trusty-lttng-2.4.patch
popd
fi
elif [ "$__Tizen" == "tizen" ]; then
ROOTFS_DIR=$__RootfsDir $__CrossDir/$__BuildArch/tizen-build-rootfs.sh
else
echo "Unsupported target platform."
usage;
exit 1
fi

Просмотреть файл

@ -0,0 +1,138 @@
set(CROSS_ROOTFS $ENV{ROOTFS_DIR})
set(TARGET_ARCH_NAME $ENV{TARGET_BUILD_ARCH})
set(CMAKE_SYSTEM_NAME Linux)
set(CMAKE_SYSTEM_VERSION 1)
if(TARGET_ARCH_NAME STREQUAL "armel")
set(CMAKE_SYSTEM_PROCESSOR armv7l)
set(TOOLCHAIN "arm-linux-gnueabi")
if("$ENV{__DistroRid}" MATCHES "tizen.*")
set(TIZEN_TOOLCHAIN "armv7l-tizen-linux-gnueabi/6.2.1")
endif()
elseif(TARGET_ARCH_NAME STREQUAL "arm")
set(CMAKE_SYSTEM_PROCESSOR armv7l)
if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/armv6-alpine-linux-musleabihf)
set(TOOLCHAIN "armv6-alpine-linux-musleabihf")
else()
set(TOOLCHAIN "arm-linux-gnueabihf")
endif()
elseif(TARGET_ARCH_NAME STREQUAL "arm64")
set(CMAKE_SYSTEM_PROCESSOR aarch64)
if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/aarch64-alpine-linux-musl)
set(TOOLCHAIN "aarch64-alpine-linux-musl")
else()
set(TOOLCHAIN "aarch64-linux-gnu")
endif()
elseif(TARGET_ARCH_NAME STREQUAL "x86")
set(CMAKE_SYSTEM_PROCESSOR i686)
set(TOOLCHAIN "i686-linux-gnu")
else()
message(FATAL_ERROR "Arch is ${TARGET_ARCH_NAME}. Only armel, arm, arm64 and x86 are supported!")
endif()
# Specify include paths
if(TARGET_ARCH_NAME STREQUAL "armel")
if(DEFINED TIZEN_TOOLCHAIN)
include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}/include/c++/)
include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}/include/c++/armv7l-tizen-linux-gnueabi)
endif()
endif()
# add_compile_param - adds only new options without duplicates.
# arg0 - list with result options, arg1 - list with new options.
# arg2 - optional argument, quick summary string for optional using CACHE FORCE mode.
macro(add_compile_param)
if(NOT ${ARGC} MATCHES "^(2|3)$")
message(FATAL_ERROR "Wrong using add_compile_param! Two or three parameters must be given! See add_compile_param description.")
endif()
foreach(OPTION ${ARGV1})
if(NOT ${ARGV0} MATCHES "${OPTION}($| )")
set(${ARGV0} "${${ARGV0}} ${OPTION}")
if(${ARGC} EQUAL "3") # CACHE FORCE mode
set(${ARGV0} "${${ARGV0}}" CACHE STRING "${ARGV2}" FORCE)
endif()
endif()
endforeach()
endmacro()
# Specify link flags
add_compile_param(CROSS_LINK_FLAGS "--sysroot=${CROSS_ROOTFS}")
add_compile_param(CROSS_LINK_FLAGS "--gcc-toolchain=${CROSS_ROOTFS}/usr")
add_compile_param(CROSS_LINK_FLAGS "--target=${TOOLCHAIN}")
add_compile_param(CROSS_LINK_FLAGS "-fuse-ld=gold")
if(TARGET_ARCH_NAME STREQUAL "armel")
if(DEFINED TIZEN_TOOLCHAIN) # For Tizen only
add_compile_param(CROSS_LINK_FLAGS "-B${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}")
add_compile_param(CROSS_LINK_FLAGS "-L${CROSS_ROOTFS}/lib")
add_compile_param(CROSS_LINK_FLAGS "-L${CROSS_ROOTFS}/usr/lib")
add_compile_param(CROSS_LINK_FLAGS "-L${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}")
endif()
elseif(TARGET_ARCH_NAME STREQUAL "x86")
add_compile_param(CROSS_LINK_FLAGS "-m32")
endif()
add_compile_param(CMAKE_EXE_LINKER_FLAGS "${CROSS_LINK_FLAGS}" "TOOLCHAIN_EXE_LINKER_FLAGS")
add_compile_param(CMAKE_SHARED_LINKER_FLAGS "${CROSS_LINK_FLAGS}" "TOOLCHAIN_EXE_LINKER_FLAGS")
add_compile_param(CMAKE_MODULE_LINKER_FLAGS "${CROSS_LINK_FLAGS}" "TOOLCHAIN_EXE_LINKER_FLAGS")
# Specify compile options
add_compile_options("--sysroot=${CROSS_ROOTFS}")
add_compile_options("--target=${TOOLCHAIN}")
add_compile_options("--gcc-toolchain=${CROSS_ROOTFS}/usr")
if(TARGET_ARCH_NAME MATCHES "^(arm|armel|arm64)$")
set(CMAKE_C_COMPILER_TARGET ${TOOLCHAIN})
set(CMAKE_CXX_COMPILER_TARGET ${TOOLCHAIN})
set(CMAKE_ASM_COMPILER_TARGET ${TOOLCHAIN})
endif()
if(TARGET_ARCH_NAME MATCHES "^(arm|armel)$")
add_compile_options(-mthumb)
add_compile_options(-mfpu=vfpv3)
if(TARGET_ARCH_NAME STREQUAL "armel")
add_compile_options(-mfloat-abi=softfp)
if(DEFINED TIZEN_TOOLCHAIN)
add_compile_options(-Wno-deprecated-declarations) # compile-time option
add_compile_options(-D__extern_always_inline=inline) # compile-time option
endif()
endif()
elseif(TARGET_ARCH_NAME STREQUAL "x86")
add_compile_options(-m32)
add_compile_options(-Wno-error=unused-command-line-argument)
endif()
# Set LLDB include and library paths
if(TARGET_ARCH_NAME MATCHES "^(arm|armel|x86)$")
if(TARGET_ARCH_NAME STREQUAL "x86")
set(LLVM_CROSS_DIR "$ENV{LLVM_CROSS_HOME}")
else() # arm/armel case
set(LLVM_CROSS_DIR "$ENV{LLVM_ARM_HOME}")
endif()
if(LLVM_CROSS_DIR)
set(WITH_LLDB_LIBS "${LLVM_CROSS_DIR}/lib/" CACHE STRING "")
set(WITH_LLDB_INCLUDES "${LLVM_CROSS_DIR}/include" CACHE STRING "")
set(LLDB_H "${WITH_LLDB_INCLUDES}" CACHE STRING "")
set(LLDB "${LLVM_CROSS_DIR}/lib/liblldb.so" CACHE STRING "")
else()
if(TARGET_ARCH_NAME STREQUAL "x86")
set(WITH_LLDB_LIBS "${CROSS_ROOTFS}/usr/lib/i386-linux-gnu" CACHE STRING "")
set(CHECK_LLVM_DIR "${CROSS_ROOTFS}/usr/lib/llvm-3.8/include")
if(EXISTS "${CHECK_LLVM_DIR}" AND IS_DIRECTORY "${CHECK_LLVM_DIR}")
set(WITH_LLDB_INCLUDES "${CHECK_LLVM_DIR}")
else()
set(WITH_LLDB_INCLUDES "${CROSS_ROOTFS}/usr/lib/llvm-3.6/include")
endif()
else() # arm/armel case
set(WITH_LLDB_LIBS "${CROSS_ROOTFS}/usr/lib/${TOOLCHAIN}" CACHE STRING "")
set(WITH_LLDB_INCLUDES "${CROSS_ROOTFS}/usr/lib/llvm-3.6/include" CACHE STRING "")
endif()
endif()
endif()
set(CMAKE_FIND_ROOT_PATH "${CROSS_ROOTFS}")
set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)
set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY)
set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY)

Просмотреть файл

@ -0,0 +1,11 @@
deb http://archive.ubuntu.com/ubuntu/ bionic main restricted universe
deb-src http://archive.ubuntu.com/ubuntu/ bionic main restricted universe
deb http://archive.ubuntu.com/ubuntu/ bionic-updates main restricted universe
deb-src http://archive.ubuntu.com/ubuntu/ bionic-updates main restricted universe
deb http://archive.ubuntu.com/ubuntu/ bionic-backports main restricted
deb-src http://archive.ubuntu.com/ubuntu/ bionic-backports main restricted
deb http://archive.ubuntu.com/ubuntu/ bionic-security main restricted universe multiverse
deb-src http://archive.ubuntu.com/ubuntu/ bionic-security main restricted universe multiverse

Просмотреть файл

@ -0,0 +1,11 @@
deb http://archive.ubuntu.com/ubuntu/ trusty main restricted universe
deb-src http://archive.ubuntu.com/ubuntu/ trusty main restricted universe
deb http://archive.ubuntu.com/ubuntu/ trusty-updates main restricted universe
deb-src http://archive.ubuntu.com/ubuntu/ trusty-updates main restricted universe
deb http://archive.ubuntu.com/ubuntu/ trusty-backports main restricted
deb-src http://archive.ubuntu.com/ubuntu/ trusty-backports main restricted
deb http://archive.ubuntu.com/ubuntu/ trusty-security main restricted universe multiverse
deb-src http://archive.ubuntu.com/ubuntu/ trusty-security main restricted universe multiverse

Просмотреть файл

@ -0,0 +1,11 @@
deb http://archive.ubuntu.com/ubuntu/ xenial main restricted universe
deb-src http://archive.ubuntu.com/ubuntu/ xenial main restricted universe
deb http://archive.ubuntu.com/ubuntu/ xenial-updates main restricted universe
deb-src http://archive.ubuntu.com/ubuntu/ xenial-updates main restricted universe
deb http://archive.ubuntu.com/ubuntu/ xenial-backports main restricted
deb-src http://archive.ubuntu.com/ubuntu/ xenial-backports main restricted
deb http://archive.ubuntu.com/ubuntu/ xenial-security main restricted universe multiverse
deb-src http://archive.ubuntu.com/ubuntu/ xenial-security main restricted universe multiverse

33
eng/common/darc-init.ps1 Normal file
Просмотреть файл

@ -0,0 +1,33 @@
param (
$darcVersion = $null,
$versionEndpoint = "https://maestro-prod.westus2.cloudapp.azure.com/api/assets/darc-version?api-version=2019-01-16"
)
$verbosity = "m"
. $PSScriptRoot\tools.ps1
function InstallDarcCli ($darcVersion) {
$darcCliPackageName = "microsoft.dotnet.darc"
$dotnetRoot = InitializeDotNetCli -install:$true
$dotnet = "$dotnetRoot\dotnet.exe"
$toolList = & "$dotnet" tool list -g
if ($toolList -like "*$darcCliPackageName*") {
& "$dotnet" tool uninstall $darcCliPackageName -g
}
# If the user didn't explicitly specify the darc version,
# query the Maestro API for the correct version of darc to install.
if (-not $darcVersion) {
$darcVersion = $(Invoke-WebRequest -Uri $versionEndpoint -UseBasicParsing).Content
}
$arcadeServicesSource = 'https://dotnetfeed.blob.core.windows.net/dotnet-core/index.json'
Write-Host "Installing Darc CLI version $darcVersion..."
Write-Host "You may need to restart your command window if this is the first dotnet tool you have installed."
& "$dotnet" tool install $darcCliPackageName --version $darcVersion --add-source "$arcadeServicesSource" -v $verbosity -g
}
InstallDarcCli $darcVersion

64
eng/common/darc-init.sh Normal file
Просмотреть файл

@ -0,0 +1,64 @@
#!/usr/bin/env bash
source="${BASH_SOURCE[0]}"
darcVersion=''
versionEndpoint="https://maestro-prod.westus2.cloudapp.azure.com/api/assets/darc-version?api-version=2019-01-16"
while [[ $# > 0 ]]; do
opt="$(echo "$1" | awk '{print tolower($0)}')"
case "$opt" in
--darcversion)
darcVersion=$2
shift
;;
--versionendpoint)
versionEndpoint=$2
shift
;;
*)
echo "Invalid argument: $1"
usage
exit 1
;;
esac
shift
done
# resolve $source until the file is no longer a symlink
while [[ -h "$source" ]]; do
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
source="$(readlink "$source")"
# if $source was a relative symlink, we need to resolve it relative to the path where the
# symlink file was located
[[ $source != /* ]] && source="$scriptroot/$source"
done
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
verbosity=m
. "$scriptroot/tools.sh"
if [ -z "$darcVersion" ]; then
darcVersion=$(curl -X GET "$versionEndpoint" -H "accept: text/plain")
fi
function InstallDarcCli {
local darc_cli_package_name="microsoft.dotnet.darc"
InitializeDotNetCli
local dotnet_root=$_InitializeDotNetCli
local uninstall_command=`$dotnet_root/dotnet tool uninstall $darc_cli_package_name -g`
local tool_list=$($dotnet_root/dotnet tool list -g)
if [[ $tool_list = *$darc_cli_package_name* ]]; then
echo $($dotnet_root/dotnet tool uninstall $darc_cli_package_name -g)
fi
local arcadeServicesSource="https://dotnetfeed.blob.core.windows.net/dotnet-core/index.json"
echo "Installing Darc CLI version $darcVersion..."
echo "You may need to restart your command shell if this is the first dotnet tool you have installed."
echo $($dotnet_root/dotnet tool install $darc_cli_package_name --version $darcVersion --add-source "$arcadeServicesSource" -v $verbosity -g)
}
InstallDarcCli

Просмотреть файл

@ -1,3 +1,2 @@
@echo off
powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0Build.ps1""" -restore -publishBuildAssets %*"
exit /b %ErrorLevel%
powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0dotnet-install.ps1""" %*"

27
eng/common/dotnet-install.ps1 поставляемый Normal file
Просмотреть файл

@ -0,0 +1,27 @@
[CmdletBinding(PositionalBinding=$false)]
Param(
[string] $verbosity = "minimal",
[string] $architecture = "",
[string] $version = "Latest",
[string] $runtime = "dotnet"
)
. $PSScriptRoot\tools.ps1
$dotnetRoot = Join-Path $RepoRoot ".dotnet"
$installdir = $dotnetRoot
try {
if ($architecture -and $architecture.Trim() -eq "x86") {
$installdir = Join-Path $installdir "x86"
}
InstallDotNet $installdir $version $architecture $runtime $true
}
catch {
Write-Host $_
Write-Host $_.Exception
Write-Host $_.ScriptStackTrace
ExitWithExitCode 1
}
ExitWithExitCode 0

49
eng/common/dotnet-install.sh поставляемый Executable file
Просмотреть файл

@ -0,0 +1,49 @@
#!/usr/bin/env bash
source="${BASH_SOURCE[0]}"
# resolve $source until the file is no longer a symlink
while [[ -h "$source" ]]; do
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
source="$(readlink "$source")"
# if $source was a relative symlink, we need to resolve it relative to the path where the
# symlink file was located
[[ $source != /* ]] && source="$scriptroot/$source"
done
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
version='Latest'
architecture=''
runtime='dotnet'
while [[ $# > 0 ]]; do
opt="$(echo "$1" | awk '{print tolower($0)}')"
case "$opt" in
-version|-v)
shift
version="$1"
;;
-architecture|-a)
shift
architecture="$1"
;;
-runtime|-r)
shift
runtime="$1"
;;
*)
echo "Invalid argument: $1"
usage
exit 1
;;
esac
shift
done
. "$scriptroot/tools.sh"
dotnetRoot="$repo_root/.dotnet"
InstallDotNet $dotnetRoot $version "$architecture" $runtime true || {
local exit_code=$?
echo "dotnet-install.sh failed (exit code '$exit_code')." >&2
ExitWithExitCode $exit_code
}
ExitWithExitCode 0

Просмотреть файл

@ -0,0 +1,87 @@
Param(
[Parameter(Mandatory=$true)][string] $barToken, # Token generated at https://maestro-prod.westus2.cloudapp.azure.com/Account/Tokens
[Parameter(Mandatory=$true)][string] $gitHubPat, # GitHub personal access token from https://github.com/settings/tokens (no auth scopes needed)
[Parameter(Mandatory=$true)][string] $azdoPat, # Azure Dev Ops tokens from https://dev.azure.com/dnceng/_details/security/tokens (code read scope needed)
[Parameter(Mandatory=$true)][string] $outputFolder, # Where the graphviz.txt file will be created
[string] $darcVersion = '1.1.0-beta.19175.6', # darc's version
[string] $graphvizVersion = '2.38', # GraphViz version
[switch] $includeToolset # Whether the graph should include toolset dependencies or not. i.e. arcade, optimization. For more about
# toolset dependencies see https://github.com/dotnet/arcade/blob/master/Documentation/Darc.md#toolset-vs-product-dependencies
)
$ErrorActionPreference = "Stop"
. $PSScriptRoot\tools.ps1
Import-Module -Name (Join-Path $PSScriptRoot "native\CommonLibrary.psm1")
function CheckExitCode ([string]$stage)
{
$exitCode = $LASTEXITCODE
if ($exitCode -ne 0) {
Write-Host "Something failed in stage: '$stage'. Check for errors above. Exiting now..."
ExitWithExitCode $exitCode
}
}
try {
Push-Location $PSScriptRoot
Write-Host "Installing darc..."
. .\darc-init.ps1 -darcVersion $darcVersion
CheckExitCode "Running darc-init"
$engCommonBaseDir = Join-Path $PSScriptRoot "native\"
$graphvizInstallDir = CommonLibrary\Get-NativeInstallDirectory
$nativeToolBaseUri = "https://netcorenativeassets.blob.core.windows.net/resource-packages/external"
$installBin = Join-Path $graphvizInstallDir "bin"
Write-Host "Installing dot..."
.\native\install-tool.ps1 -ToolName graphviz -InstallPath $installBin -BaseUri $nativeToolBaseUri -CommonLibraryDirectory $engCommonBaseDir -Version $graphvizVersion -Verbose
$darcExe = "$env:USERPROFILE\.dotnet\tools"
$darcExe = Resolve-Path "$darcExe\darc.exe"
Create-Directory $outputFolder
# Generate 3 graph descriptions:
# 1. Flat with coherency information
# 2. Graphviz (dot) file
# 3. Standard dependency graph
$graphVizFilePath = "$outputFolder\graphviz.txt"
$graphVizImageFilePath = "$outputFolder\graph.png"
$normalGraphFilePath = "$outputFolder\graph-full.txt"
$flatGraphFilePath = "$outputFolder\graph-flat.txt"
$baseOptions = @( "--github-pat", "$gitHubPat", "--azdev-pat", "$azdoPat", "--password", "$barToken" )
if ($includeToolset) {
Write-Host "Toolsets will be included in the graph..."
$baseOptions += @( "--include-toolset" )
}
Write-Host "Generating standard dependency graph..."
& "$darcExe" get-dependency-graph @baseOptions --output-file $normalGraphFilePath
CheckExitCode "Generating normal dependency graph"
Write-Host "Generating flat dependency graph and graphviz file..."
& "$darcExe" get-dependency-graph @baseOptions --flat --coherency --graphviz $graphVizFilePath --output-file $flatGraphFilePath
CheckExitCode "Generating flat and graphviz dependency graph"
Write-Host "Generating graph image $graphVizFilePath"
$dotFilePath = Join-Path $installBin "graphviz\$graphvizVersion\release\bin\dot.exe"
& "$dotFilePath" -Tpng -o"$graphVizImageFilePath" "$graphVizFilePath"
CheckExitCode "Generating graphviz image"
Write-Host "'$graphVizFilePath', '$flatGraphFilePath', '$normalGraphFilePath' and '$graphVizImageFilePath' created!"
}
catch {
if (!$includeToolset) {
Write-Host "This might be a toolset repo which includes only toolset dependencies. " -NoNewline -ForegroundColor Yellow
Write-Host "Since -includeToolset is not set there is no graph to create. Include -includeToolset and try again..." -ForegroundColor Yellow
}
Write-Host $_
Write-Host $_.Exception
Write-Host $_.ScriptStackTrace
ExitWithExitCode 1
} finally {
Pop-Location
}

Просмотреть файл

@ -1,9 +1,26 @@
<Project Sdk="Microsoft.DotNet.Helix.Sdk">
<Project Sdk="Microsoft.DotNet.Helix.Sdk" DefaultTargets="Test">
<PropertyGroup>
<Language>msbuild</Language>
</PropertyGroup>
<ItemGroup>
<HelixWorkItem Include="WorkItem">
<HelixCorrelationPayload Include="$(CorrelationPayloadDirectory)">
<PayloadDirectory>%(Identity)</PayloadDirectory>
</HelixCorrelationPayload>
</ItemGroup>
<ItemGroup>
<HelixWorkItem Include="WorkItem" Condition="'$(WorkItemDirectory)' != ''">
<PayloadDirectory>$(WorkItemDirectory)</PayloadDirectory>
<Command>$(WorkItemCommand)</Command>
<Timeout Condition="'$(WorkItemTimeout)' != ''">$(WorkItemTimeout)</Timeout>
</HelixWorkItem>
</ItemGroup>
<ItemGroup>
<XUnitProject Include="$(XUnitProjects.Split(';'))">
<Arguments />
</XUnitProject>
</ItemGroup>
</Project>

Просмотреть файл

@ -41,9 +41,13 @@ Param (
[switch] $Force = $False,
[int] $DownloadRetries = 5,
[int] $RetryWaitTimeInSeconds = 30,
[string] $GlobalJsonFile = "$PSScriptRoot\..\..\global.json"
[string] $GlobalJsonFile
)
if (!$GlobalJsonFile) {
$GlobalJsonFile = Join-Path (Get-Item $PSScriptRoot).Parent.Parent.FullName "global.json"
}
Set-StrictMode -version 2.0
$ErrorActionPreference="Stop"
@ -60,6 +64,7 @@ try {
}
$Env:CommonLibrary_NativeInstallDir = $NativeBaseDir
$InstallBin = Join-Path $NativeBaseDir "bin"
$InstallerPath = Join-Path $EngCommonBaseDir "install-tool.ps1"
# Process tools list
Write-Host "Processing $GlobalJsonFile"
@ -74,33 +79,42 @@ try {
$NativeTools.PSObject.Properties | ForEach-Object {
$ToolName = $_.Name
$ToolVersion = $_.Value
$InstallerFilename = "install-$ToolName.ps1"
$LocalInstallerCommand = Join-Path $EngCommonBaseDir $InstallerFilename
$LocalInstallerCommand += " -InstallPath $InstallBin"
$LocalInstallerCommand += " -BaseUri $BaseUri"
$LocalInstallerCommand += " -CommonLibraryDirectory $EngCommonBaseDir"
$LocalInstallerCommand += " -Version $ToolVersion"
$LocalInstallerArguments = @{ ToolName = "$ToolName" }
$LocalInstallerArguments += @{ InstallPath = "$InstallBin" }
$LocalInstallerArguments += @{ BaseUri = "$BaseUri" }
$LocalInstallerArguments += @{ CommonLibraryDirectory = "$EngCommonBaseDir" }
$LocalInstallerArguments += @{ Version = "$ToolVersion" }
if ($Verbose) {
$LocalInstallerCommand += " -Verbose"
$LocalInstallerArguments += @{ Verbose = $True }
}
if (Get-Variable 'Force' -ErrorAction 'SilentlyContinue') {
if($Force) {
$LocalInstallerCommand += " -Force"
$LocalInstallerArguments += @{ Force = $True }
}
}
if ($Clean) {
$LocalInstallerCommand += " -Clean"
$LocalInstallerArguments += @{ Clean = $True }
}
Write-Verbose "Installing $ToolName version $ToolVersion"
Write-Verbose "Executing '$LocalInstallerCommand'"
Invoke-Expression "$LocalInstallerCommand"
Write-Verbose "Executing '$InstallerPath $LocalInstallerArguments'"
& $InstallerPath @LocalInstallerArguments
if ($LASTEXITCODE -Ne "0") {
Write-Error "Execution failed"
exit 1
$errMsg = "$ToolName installation failed"
if ((Get-Variable 'DoNotAbortNativeToolsInstallationOnFailure' -ErrorAction 'SilentlyContinue') -and $DoNotAbortNativeToolsInstallationOnFailure) {
Write-Warning $errMsg
$toolInstallationFailure = $true
} else {
Write-Error $errMsg
exit 1
}
}
}
if ((Get-Variable 'toolInstallationFailure' -ErrorAction 'SilentlyContinue') -and $toolInstallationFailure) {
exit 1
}
}
else {
Write-Host "No native tools defined in global.json"
@ -112,9 +126,7 @@ try {
}
if (Test-Path $InstallBin) {
Write-Host "Native tools are available from" (Convert-Path -Path $InstallBin)
if ($env:BUILD_BUILDNUMBER) {
Write-Host "##vso[task.prependpath]" (Convert-Path -Path $InstallBin)
}
Write-Host "##vso[task.prependpath]$(Convert-Path -Path $InstallBin)"
}
else {
Write-Error "Native tools install directory does not exist, installation failed"

Просмотреть файл

@ -9,7 +9,7 @@ clean=false
force=false
download_retries=5
retry_wait_time_seconds=30
global_json_file="${scriptroot}/../../global.json"
global_json_file="$(dirname "$(dirname "${scriptroot}")")/global.json"
declare -A native_assets
. $scriptroot/native/common-library.sh
@ -71,6 +71,7 @@ function ReadGlobalJsonNativeTools {
local native_tools_list=$(echo $native_tools_section | awk -F"[{}]" '{print $2}')
native_tools_list=${native_tools_list//[\" ]/}
native_tools_list=${native_tools_list//,/$'\n'}
native_tools_list="$(echo -e "${native_tools_list}" | tr -d '[:space:]')"
local old_IFS=$IFS
while read -r line; do
@ -116,8 +117,6 @@ else
installer_command+=" --clean"
fi
echo "Installing $tool version $tool_version"
echo "Executing '$installer_command'"
$installer_command
if [[ $? != 0 ]]; then
@ -127,19 +126,16 @@ else
done
fi
if [[ ! -z $clean ]]; then
if [[ $clean = true ]]; then
exit 0
fi
if [[ -d $install_bin ]]; then
echo "Native tools are available from $install_bin"
if [[ !-z BUILD_BUILDNUMBER ]]; then
echo "##vso[task.prependpath]$install_bin"
fi
echo "##vso[task.prependpath]$install_bin"
else
echo "Native tools install directory does not exist, installation failed" >&2
exit 1
fi
exit 0

Просмотреть файл

@ -0,0 +1,4 @@
<!-- Licensed to the .NET Foundation under one or more agreements. The .NET Foundation licenses this file to you under the MIT license. See the LICENSE file in the project root for more information. -->
<Project>
<Import Project="Sdk.props" Sdk="Microsoft.DotNet.Arcade.Sdk" />
</Project>

Просмотреть файл

@ -0,0 +1,27 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. -->
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net472</TargetFramework>
<ImportDirectoryBuildTargets>false</ImportDirectoryBuildTargets>
</PropertyGroup>
<ItemGroup>
<!-- Clear references, the SDK may add some depending on UsuingToolXxx settings, but we only want to restore the following -->
<PackageReference Remove="@(PackageReference)"/>
<PackageReference Include="Microsoft.DotNet.IBCMerge" Version="$(MicrosoftDotNetIBCMergeVersion)" Condition="'$(UsingToolIbcOptimization)' == 'true'" />
<PackageReference Include="Drop.App" Version="$(DropAppVersion)" ExcludeAssets="all" Condition="'$(UsingToolVisualStudioIbcTraining)' == 'true'"/>
</ItemGroup>
<PropertyGroup>
<RestoreSources></RestoreSources>
<RestoreSources Condition="'$(UsingToolIbcOptimization)' == 'true'">
https://devdiv.pkgs.visualstudio.com/_packaging/dotnet-core-internal-tooling/nuget/v3/index.json;
</RestoreSources>
<RestoreSources Condition="'$(UsingToolVisualStudioIbcTraining)' == 'true'">
$(RestoreSources);
https://devdiv.pkgs.visualstudio.com/_packaging/VS/nuget/v3/index.json;
</RestoreSources>
</PropertyGroup>
<!-- Repository extensibility point -->
<Import Project="$(RepositoryEngineeringDir)InternalTools.props" Condition="Exists('$(RepositoryEngineeringDir)InternalTools.props')" />
</Project>

27
eng/common/msbuild.ps1 Normal file
Просмотреть файл

@ -0,0 +1,27 @@
[CmdletBinding(PositionalBinding=$false)]
Param(
[string] $verbosity = "minimal",
[bool] $warnAsError = $true,
[bool] $nodeReuse = $true,
[switch] $ci,
[switch] $prepareMachine,
[Parameter(ValueFromRemainingArguments=$true)][String[]]$extraArgs
)
. $PSScriptRoot\tools.ps1
try {
if ($ci) {
$nodeReuse = $false
}
MSBuild @extraArgs
}
catch {
Write-Host $_
Write-Host $_.Exception
Write-Host $_.ScriptStackTrace
ExitWithExitCode 1
}
ExitWithExitCode 0

58
eng/common/msbuild.sh Normal file
Просмотреть файл

@ -0,0 +1,58 @@
#!/usr/bin/env bash
source="${BASH_SOURCE[0]}"
# resolve $source until the file is no longer a symlink
while [[ -h "$source" ]]; do
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
source="$(readlink "$source")"
# if $source was a relative symlink, we need to resolve it relative to the path where the
# symlink file was located
[[ $source != /* ]] && source="$scriptroot/$source"
done
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
verbosity='minimal'
warn_as_error=true
node_reuse=true
prepare_machine=false
extra_args=''
while (($# > 0)); do
lowerI="$(echo $1 | awk '{print tolower($0)}')"
case $lowerI in
--verbosity)
verbosity=$2
shift 2
;;
--warnaserror)
warn_as_error=$2
shift 2
;;
--nodereuse)
node_reuse=$2
shift 2
;;
--ci)
ci=true
shift 1
;;
--preparemachine)
prepare_machine=true
shift 1
;;
*)
extra_args="$extra_args $1"
shift 1
;;
esac
done
. "$scriptroot/tools.sh"
if [[ "$ci" == true ]]; then
node_reuse=false
fi
MSBuild $extra_args
ExitWithExitCode 0

Просмотреть файл

@ -36,7 +36,7 @@ function DownloadAndExtract {
)
# Define verbose switch if undefined
$Verbose = $VerbosePreference -Eq "Continue"
$TempToolPath = CommonLibrary\Get-TempPathFilename -Path $Uri
# Download native tool
@ -57,7 +57,7 @@ function DownloadAndExtract {
-OutputDirectory $InstallDirectory `
-Force:$Force `
-Verbose:$Verbose
if ($UnzipStatus -Eq $False) {
Write-Error "Unzip failed"
return $False
@ -155,8 +155,11 @@ Generate a shim for a native tool
.DESCRIPTION
Creates a wrapper script (shim) that passes arguments forward to native tool assembly
.PARAMETER ShimPath
Path to shim file
.PARAMETER ShimName
The name of the shim
.PARAMETER ShimDirectory
The directory where shims are stored
.PARAMETER ToolFilePath
Path to file that shim forwards to
@ -171,38 +174,42 @@ function New-ScriptShim {
[CmdletBinding(PositionalBinding=$false)]
Param (
[Parameter(Mandatory=$True)]
[string] $ShimPath,
[string] $ShimName,
[Parameter(Mandatory=$True)]
[string] $ShimDirectory,
[Parameter(Mandatory=$True)]
[string] $ToolFilePath,
[Parameter(Mandatory=$True)]
[string] $BaseUri,
[switch] $Force
)
try {
Write-Verbose "Generating '$ShimPath' shim"
if ((Test-Path $ShimPath) -And (-Not $Force)) {
Write-Error "$ShimPath already exists"
return $False
}
Write-Verbose "Generating '$ShimName' shim"
if (-Not (Test-Path $ToolFilePath)){
Write-Error "Specified tool file path '$ToolFilePath' does not exist"
return $False
}
$ShimContents = "@echo off`n"
$ShimContents += "setlocal enableextensions enabledelayedexpansion`n"
$ShimContents += "set SHIMARGS=`n"
$ShimContents += "for %%x in (%*) do (set SHIMARGS=!SHIMARGS! `"%%~x`")`n"
$ShimContents += "`"$ToolFilePath`" %SHIMARGS%`n"
$ShimContents += "endlocal"
# Write shim file
$ShimContents | Out-File $ShimPath -Encoding "ASCII"
if (-Not $?) {
Write-Error "Failed to generate shim"
return $False
# WinShimmer is a small .NET Framework program that creates .exe shims to bootstrapped programs
# Many of the checks for installed programs expect a .exe extension for Windows tools, rather
# than a .bat or .cmd file.
# Source: https://github.com/dotnet/arcade/tree/master/src/WinShimmer
if (-Not (Test-Path "$ShimDirectory\WinShimmer\winshimmer.exe")) {
$InstallStatus = DownloadAndExtract -Uri "$BaseUri/windows/winshimmer/WinShimmer.zip" `
-InstallDirectory $ShimDirectory\WinShimmer `
-Force:$Force `
-DownloadRetries 2 `
-RetryWaitTimeInSeconds 5 `
-Verbose:$Verbose
}
if ((Test-Path (Join-Path $ShimDirectory "$ShimName.exe"))) {
Write-Host "$ShimName.exe already exists; replacing..."
Remove-Item (Join-Path $ShimDirectory "$ShimName.exe")
}
Invoke-Expression "$ShimDirectory\WinShimmer\winshimmer.exe $ShimName $ToolFilePath $ShimDirectory"
return $True
}
catch {

Просмотреть файл

@ -1,6 +1,6 @@
<#
.SYNOPSIS
Install cmake native tool
Install native tool
.DESCRIPTION
Install cmake native tool from Azure blob storage
@ -31,6 +31,8 @@ Returns 0 if install succeeds, 1 otherwise
#>
[CmdletBinding(PositionalBinding=$false)]
Param (
[Parameter(Mandatory=$True)]
[string] $ToolName,
[Parameter(Mandatory=$True)]
[string] $InstallPath,
[Parameter(Mandatory=$True)]
@ -50,8 +52,6 @@ Import-Module -Name (Join-Path $CommonLibraryDirectory "CommonLibrary.psm1")
try {
# Define verbose switch if undefined
$Verbose = $VerbosePreference -Eq "Continue"
$ToolName = "cmake"
$Arch = CommonLibrary\Get-MachineArchitecture
$ToolOs = "win64"
@ -60,9 +60,8 @@ try {
}
$ToolNameMoniker = "$ToolName-$Version-$ToolOs-$Arch"
$ToolInstallDirectory = Join-Path $InstallPath "$ToolName\$Version\"
$ToolFilePath = Join-Path $ToolInstallDirectory "$ToolNameMoniker\bin\$ToolName.exe"
$ShimPath = Join-Path $InstallPath "$ToolName.cmd"
$Uri = "$BaseUri/windows/$Toolname/$ToolNameMoniker.zip"
$Uri = "$BaseUri/windows/$ToolName/$ToolNameMoniker.zip"
$ShimPath = Join-Path $InstallPath "$ToolName.exe"
if ($Clean) {
Write-Host "Cleaning $ToolInstallDirectory"
@ -82,7 +81,7 @@ try {
}
# Install tool
if ((Test-Path $ToolFilePath) -And (-Not $Force)) {
if ((Test-Path $ToolInstallDirectory) -And (-Not $Force)) {
Write-Verbose "$ToolName ($Version) already exists, skipping install"
}
else {
@ -98,22 +97,34 @@ try {
exit 1
}
}
$ToolFilePath = Get-ChildItem $ToolInstallDirectory -Recurse -Filter "$ToolName.exe" | % { $_.FullName }
if (@($ToolFilePath).Length -Gt 1) {
Write-Error "There are multiple copies of $ToolName in $($ToolInstallDirectory): `n$(@($ToolFilePath | out-string))"
exit 1
} elseif (@($ToolFilePath).Length -Lt 1) {
Write-Error "$ToolName was not found in $ToolFilePath."
exit 1
}
# Generate shim
# Always rewrite shims so that we are referencing the expected version
$GenerateShimStatus = CommonLibrary\New-ScriptShim -ShimPath $ShimPath `
-ToolFilePath $ToolFilePath `
-Force `
$GenerateShimStatus = CommonLibrary\New-ScriptShim -ShimName $ToolName `
-ShimDirectory $InstallPath `
-ToolFilePath "$ToolFilePath" `
-BaseUri $BaseUri `
-Force:$Force `
-Verbose:$Verbose
if ($GenerateShimStatus -Eq $False) {
Write-Error "Generate shim failed"
return 1
}
exit 0
}
catch {
Write-Host $_
Write-Host $_.Exception
exit 1
}
exit 1
}

79
eng/common/sdk-task.ps1 Normal file
Просмотреть файл

@ -0,0 +1,79 @@
[CmdletBinding(PositionalBinding=$false)]
Param(
[string] $configuration = "Debug",
[string] $task,
[string] $verbosity = "minimal",
[string] $msbuildEngine = $null,
[switch] $restore,
[switch] $prepareMachine,
[switch] $help,
[Parameter(ValueFromRemainingArguments=$true)][String[]]$properties
)
$ci = $true
$binaryLog = $true
$warnAsError = $true
. $PSScriptRoot\tools.ps1
function Print-Usage() {
Write-Host "Common settings:"
Write-Host " -task <value> Name of Arcade task (name of a project in SdkTasks directory of the Arcade SDK package)"
Write-Host " -restore Restore dependencies"
Write-Host " -verbosity <value> Msbuild verbosity: q[uiet], m[inimal], n[ormal], d[etailed], and diag[nostic]"
Write-Host " -help Print help and exit"
Write-Host ""
Write-Host "Advanced settings:"
Write-Host " -prepareMachine Prepare machine for CI run"
Write-Host " -msbuildEngine <value> Msbuild engine to use to run build ('dotnet', 'vs', or unspecified)."
Write-Host ""
Write-Host "Command line arguments not listed above are passed thru to msbuild."
}
function Build([string]$target) {
$logSuffix = if ($target -eq "Execute") { "" } else { ".$target" }
$log = Join-Path $LogDir "$task$logSuffix.binlog"
$outputPath = Join-Path $ToolsetDir "$task\\"
MSBuild $taskProject `
/bl:$log `
/t:$target `
/p:Configuration=$configuration `
/p:RepoRoot=$RepoRoot `
/p:BaseIntermediateOutputPath=$outputPath `
@properties
}
try {
if ($help -or (($null -ne $properties) -and ($properties.Contains("/help") -or $properties.Contains("/?")))) {
Print-Usage
exit 0
}
if ($task -eq "") {
Write-Host "Missing required parameter '-task <value>'" -ForegroundColor Red
Print-Usage
ExitWithExitCode 1
}
$taskProject = GetSdkTaskProject $task
if (!(Test-Path $taskProject)) {
Write-Host "Unknown task: $task" -ForegroundColor Red
ExitWithExitCode 1
}
if ($restore) {
Build "Restore"
}
Build "Execute"
}
catch {
Write-Host $_
Write-Host $_.Exception
Write-Host $_.ScriptStackTrace
ExitWithExitCode 1
}
ExitWithExitCode 0

Просмотреть файл

@ -0,0 +1,13 @@
<?xml version="1.0" encoding="utf-8"?>
<configuration>
<solution>
<add key="disableSourceControlIntegration" value="true" />
</solution>
<packageSources>
<clear />
<add key="guardian" value="https://securitytools.pkgs.visualstudio.com/_packaging/Guardian/nuget/v3/index.json" />
</packageSources>
<disabledPackageSources>
<clear />
</disabledPackageSources>
</configuration>

Просмотреть файл

@ -0,0 +1,97 @@
Param(
[string] $GuardianPackageName, # Required: the name of guardian CLI pacakge (not needed if GuardianCliLocation is specified)
[string] $NugetPackageDirectory, # Required: directory where NuGet packages are installed (not needed if GuardianCliLocation is specified)
[string] $GuardianCliLocation, # Optional: Direct location of Guardian CLI executable if GuardianPackageName & NugetPackageDirectory are not specified
[string] $Repository, # Required: the name of the repository (e.g. dotnet/arcade)
[string] $BranchName="master", # Optional: name of branch or version of gdn settings; defaults to master
[string] $SourceDirectory, # Required: the directory where source files are located
[string] $ArtifactsDirectory, # Required: the directory where build artifacts are located
[string] $DncEngAccessToken, # Required: access token for dnceng; should be provided via KeyVault
[string[]] $SourceToolsList, # Optional: list of SDL tools to run on source code
[string[]] $ArtifactToolsList, # Optional: list of SDL tools to run on built artifacts
[bool] $TsaPublish=$False, # Optional: true will publish results to TSA; only set to true after onboarding to TSA; TSA is the automated framework used to upload test results as bugs.
[string] $TsaBranchName=$env:BUILD_SOURCEBRANCHNAME, # Optional: required for TSA publish; defaults to $(Build.SourceBranchName); TSA is the automated framework used to upload test results as bugs.
[string] $TsaRepositoryName, # Optional: TSA repository name; will be generated automatically if not submitted; TSA is the automated framework used to upload test results as bugs.
[string] $BuildNumber=$env:BUILD_BUILDNUMBER, # Optional: required for TSA publish; defaults to $(Build.BuildNumber)
[bool] $UpdateBaseline=$False, # Optional: if true, will update the baseline in the repository; should only be run after fixing any issues which need to be fixed
[bool] $TsaOnboard=$False, # Optional: if true, will onboard the repository to TSA; should only be run once; TSA is the automated framework used to upload test results as bugs.
[string] $TsaInstanceUrl, # Optional: only needed if TsaOnboard or TsaPublish is true; the instance-url registered with TSA; TSA is the automated framework used to upload test results as bugs.
[string] $TsaCodebaseName, # Optional: only needed if TsaOnboard or TsaPublish is true; the name of the codebase registered with TSA; TSA is the automated framework used to upload test results as bugs.
[string] $TsaProjectName, # Optional: only needed if TsaOnboard or TsaPublish is true; the name of the project registered with TSA; TSA is the automated framework used to upload test results as bugs.
[string] $TsaNotificationEmail, # Optional: only needed if TsaOnboard is true; the email(s) which will receive notifications of TSA bug filings (e.g. alias@microsoft.com); TSA is the automated framework used to upload test results as bugs.
[string] $TsaCodebaseAdmin, # Optional: only needed if TsaOnboard is true; the aliases which are admins of the TSA codebase (e.g. DOMAIN\alias); TSA is the automated framework used to upload test results as bugs.
[string] $TsaBugAreaPath, # Optional: only needed if TsaOnboard is true; the area path where TSA will file bugs in AzDO; TSA is the automated framework used to upload test results as bugs.
[string] $TsaIterationPath, # Optional: only needed if TsaOnboard is true; the iteration path where TSA will file bugs in AzDO; TSA is the automated framework used to upload test results as bugs.
[string] $GuardianLoggerLevel="Standard" # Optional: the logger level for the Guardian CLI; options are Trace, Verbose, Standard, Warning, and Error
)
$ErrorActionPreference = "Stop"
Set-StrictMode -Version 2.0
$LASTEXITCODE = 0
#Replace repo names to the format of org/repo
if (!($Repository.contains('/'))) {
$RepoName = $Repository -replace '(.*?)-(.*)', '$1/$2';
}
else{
$RepoName = $Repository;
}
if ($GuardianPackageName) {
$guardianCliLocation = Join-Path $NugetPackageDirectory (Join-Path $GuardianPackageName (Join-Path "tools" "guardian.cmd"))
} else {
$guardianCliLocation = $GuardianCliLocation
}
$ValidPath = Test-Path $guardianCliLocation
if ($ValidPath -eq $False)
{
Write-Host "Invalid Guardian CLI Location."
exit 1
}
& $(Join-Path $PSScriptRoot "init-sdl.ps1") -GuardianCliLocation $guardianCliLocation -Repository $RepoName -BranchName $BranchName -WorkingDirectory $ArtifactsDirectory -DncEngAccessToken $DncEngAccessToken -GuardianLoggerLevel $GuardianLoggerLevel
$gdnFolder = Join-Path $ArtifactsDirectory ".gdn"
if ($TsaOnboard) {
if ($TsaCodebaseName -and $TsaNotificationEmail -and $TsaCodebaseAdmin -and $TsaBugAreaPath) {
Write-Host "$guardianCliLocation tsa-onboard --codebase-name `"$TsaCodebaseName`" --notification-alias `"$TsaNotificationEmail`" --codebase-admin `"$TsaCodebaseAdmin`" --instance-url `"$TsaInstanceUrl`" --project-name `"$TsaProjectName`" --area-path `"$TsaBugAreaPath`" --iteration-path `"$TsaIterationPath`" --working-directory $ArtifactsDirectory --logger-level $GuardianLoggerLevel"
& $guardianCliLocation tsa-onboard --codebase-name "$TsaCodebaseName" --notification-alias "$TsaNotificationEmail" --codebase-admin "$TsaCodebaseAdmin" --instance-url "$TsaInstanceUrl" --project-name "$TsaProjectName" --area-path "$TsaBugAreaPath" --iteration-path "$TsaIterationPath" --working-directory $ArtifactsDirectory --logger-level $GuardianLoggerLevel
if ($LASTEXITCODE -ne 0) {
Write-Host "Guardian tsa-onboard failed with exit code $LASTEXITCODE."
exit $LASTEXITCODE
}
} else {
Write-Host "Could not onboard to TSA -- not all required values ($$TsaCodebaseName, $$TsaNotificationEmail, $$TsaCodebaseAdmin, $$TsaBugAreaPath) were specified."
exit 1
}
}
if ($ArtifactToolsList -and $ArtifactToolsList.Count -gt 0) {
& $(Join-Path $PSScriptRoot "run-sdl.ps1") -GuardianCliLocation $guardianCliLocation -WorkingDirectory $ArtifactsDirectory -TargetDirectory $ArtifactsDirectory -GdnFolder $gdnFolder -ToolsList $ArtifactToolsList -DncEngAccessToken $DncEngAccessToken -UpdateBaseline $UpdateBaseline -GuardianLoggerLevel $GuardianLoggerLevel
}
if ($SourceToolsList -and $SourceToolsList.Count -gt 0) {
& $(Join-Path $PSScriptRoot "run-sdl.ps1") -GuardianCliLocation $guardianCliLocation -WorkingDirectory $ArtifactsDirectory -TargetDirectory $SourceDirectory -GdnFolder $gdnFolder -ToolsList $SourceToolsList -DncEngAccessToken $DncEngAccessToken -UpdateBaseline $UpdateBaseline -GuardianLoggerLevel $GuardianLoggerLevel
}
if ($UpdateBaseline) {
& (Join-Path $PSScriptRoot "push-gdn.ps1") -Repository $RepoName -BranchName $BranchName -GdnFolder $GdnFolder -DncEngAccessToken $DncEngAccessToken -PushReason "Update baseline"
}
if ($TsaPublish) {
if ($TsaBranchName -and $BuildNumber) {
if (-not $TsaRepositoryName) {
$TsaRepositoryName = "$($Repository)-$($BranchName)"
}
Write-Host "$guardianCliLocation tsa-publish --all-tools --repository-name `"$TsaRepositoryName`" --branch-name `"$TsaBranchName`" --build-number `"$BuildNumber`" --codebase-name `"$TsaCodebaseName`" --notification-alias `"$TsaNotificationEmail`" --codebase-admin `"$TsaCodebaseAdmin`" --instance-url `"$TsaInstanceUrl`" --project-name `"$TsaProjectName`" --area-path `"$TsaBugAreaPath`" --iteration-path `"$TsaIterationPath`" --working-directory $SourceDirectory --logger-level $GuardianLoggerLevel"
& $guardianCliLocation tsa-publish --all-tools --repository-name "$TsaRepositoryName" --branch-name "$TsaBranchName" --build-number "$BuildNumber" --codebase-name "$TsaCodebaseName" --notification-alias "$TsaNotificationEmail" --codebase-admin "$TsaCodebaseAdmin" --instance-url "$TsaInstanceUrl" --project-name "$TsaProjectName" --area-path "$TsaBugAreaPath" --iteration-path "$TsaIterationPath" --working-directory $ArtifactsDirectory --logger-level $GuardianLoggerLevel
if ($LASTEXITCODE -ne 0) {
Write-Host "Guardian tsa-publish failed with exit code $LASTEXITCODE."
exit $LASTEXITCODE
}
} else {
Write-Host "Could not publish to TSA -- not all required values ($$TsaBranchName, $$BuildNumber) were specified."
exit 1
}
}

Просмотреть файл

@ -0,0 +1,48 @@
Param(
[string] $GuardianCliLocation,
[string] $Repository,
[string] $BranchName="master",
[string] $WorkingDirectory,
[string] $DncEngAccessToken,
[string] $GuardianLoggerLevel="Standard"
)
$ErrorActionPreference = "Stop"
Set-StrictMode -Version 2.0
$LASTEXITCODE = 0
# Construct basic auth from AzDO access token; construct URI to the repository's gdn folder stored in that repository; construct location of zip file
$encodedPat = [Convert]::ToBase64String([System.Text.Encoding]::ASCII.GetBytes(":$DncEngAccessToken"))
$escapedRepository = [Uri]::EscapeDataString("/$Repository/$BranchName/.gdn")
$uri = "https://dev.azure.com/dnceng/internal/_apis/git/repositories/sdl-tool-cfg/Items?path=$escapedRepository&versionDescriptor[versionOptions]=0&`$format=zip&api-version=5.0-preview.1"
$zipFile = "$WorkingDirectory/gdn.zip"
Add-Type -AssemblyName System.IO.Compression.FileSystem
$gdnFolder = (Join-Path $WorkingDirectory ".gdn")
Try
{
# We try to download the zip; if the request fails (e.g. the file doesn't exist), we catch it and init guardian instead
Write-Host "Downloading gdn folder from internal config repostiory..."
Invoke-WebRequest -Headers @{ "Accept"="application/zip"; "Authorization"="Basic $encodedPat" } -Uri $uri -OutFile $zipFile
if (Test-Path $gdnFolder) {
# Remove the gdn folder if it exists (it shouldn't unless there's too much caching; this is just in case)
Remove-Item -Force -Recurse $gdnFolder
}
[System.IO.Compression.ZipFile]::ExtractToDirectory($zipFile, $WorkingDirectory)
Write-Host $gdnFolder
} Catch [System.Net.WebException] {
# if the folder does not exist, we'll do a guardian init and push it to the remote repository
Write-Host "Initializing Guardian..."
Write-Host "$GuardianCliLocation init --working-directory $WorkingDirectory --logger-level $GuardianLoggerLevel"
& $GuardianCliLocation init --working-directory $WorkingDirectory --logger-level $GuardianLoggerLevel
if ($LASTEXITCODE -ne 0) {
Write-Error "Guardian init failed with exit code $LASTEXITCODE."
}
# We create the mainbaseline so it can be edited later
Write-Host "$GuardianCliLocation baseline --working-directory $WorkingDirectory --name mainbaseline"
& $GuardianCliLocation baseline --working-directory $WorkingDirectory --name mainbaseline
if ($LASTEXITCODE -ne 0) {
Write-Error "Guardian baseline failed with exit code $LASTEXITCODE."
}
& $(Join-Path $PSScriptRoot "push-gdn.ps1") -Repository $Repository -BranchName $BranchName -GdnFolder $gdnFolder -DncEngAccessToken $DncEngAccessToken -PushReason "Initialize gdn folder"
}

Просмотреть файл

@ -0,0 +1,4 @@
<?xml version="1.0" encoding="utf-8"?>
<packages>
<package id="Microsoft.Guardian.Cli" version="0.3.2"/>
</packages>

Просмотреть файл

@ -0,0 +1,51 @@
Param(
[string] $Repository,
[string] $BranchName="master",
[string] $GdnFolder,
[string] $DncEngAccessToken,
[string] $PushReason
)
$ErrorActionPreference = "Stop"
Set-StrictMode -Version 2.0
$LASTEXITCODE = 0
# We create the temp directory where we'll store the sdl-config repository
$sdlDir = Join-Path $env:TEMP "sdl"
if (Test-Path $sdlDir) {
Remove-Item -Force -Recurse $sdlDir
}
Write-Host "git clone https://dnceng:`$DncEngAccessToken@dev.azure.com/dnceng/internal/_git/sdl-tool-cfg $sdlDir"
git clone https://dnceng:$DncEngAccessToken@dev.azure.com/dnceng/internal/_git/sdl-tool-cfg $sdlDir
if ($LASTEXITCODE -ne 0) {
Write-Error "Git clone failed with exit code $LASTEXITCODE."
}
# We copy the .gdn folder from our local run into the git repository so it can be committed
$sdlRepositoryFolder = Join-Path (Join-Path (Join-Path $sdlDir $Repository) $BranchName) ".gdn"
if (Get-Command Robocopy) {
Robocopy /S $GdnFolder $sdlRepositoryFolder
} else {
rsync -r $GdnFolder $sdlRepositoryFolder
}
# cd to the sdl-config directory so we can run git there
Push-Location $sdlDir
# git add . --> git commit --> git push
Write-Host "git add ."
git add .
if ($LASTEXITCODE -ne 0) {
Write-Error "Git add failed with exit code $LASTEXITCODE."
}
Write-Host "git -c user.email=`"dn-bot@microsoft.com`" -c user.name=`"Dotnet Bot`" commit -m `"$PushReason for $Repository/$BranchName`""
git -c user.email="dn-bot@microsoft.com" -c user.name="Dotnet Bot" commit -m "$PushReason for $Repository/$BranchName"
if ($LASTEXITCODE -ne 0) {
Write-Error "Git commit failed with exit code $LASTEXITCODE."
}
Write-Host "git push"
git push
if ($LASTEXITCODE -ne 0) {
Write-Error "Git push failed with exit code $LASTEXITCODE."
}
# Return to the original directory
Pop-Location

Просмотреть файл

@ -0,0 +1,65 @@
Param(
[string] $GuardianCliLocation,
[string] $WorkingDirectory,
[string] $TargetDirectory,
[string] $GdnFolder,
[string[]] $ToolsList,
[string] $UpdateBaseline,
[string] $GuardianLoggerLevel="Standard"
)
$ErrorActionPreference = "Stop"
Set-StrictMode -Version 2.0
$LASTEXITCODE = 0
# We store config files in the r directory of .gdn
Write-Host $ToolsList
$gdnConfigPath = Join-Path $GdnFolder "r"
$ValidPath = Test-Path $GuardianCliLocation
if ($ValidPath -eq $False)
{
Write-Host "Invalid Guardian CLI Location."
exit 1
}
foreach ($tool in $ToolsList) {
$gdnConfigFile = Join-Path $gdnConfigPath "$tool-configure.gdnconfig"
$config = $False
Write-Host $tool
# We have to manually configure tools that run on source to look at the source directory only
if ($tool -eq "credscan") {
Write-Host "$GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args `" TargetDirectory : $TargetDirectory `""
& $GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args " TargetDirectory : $TargetDirectory "
if ($LASTEXITCODE -ne 0) {
Write-Host "Guardian configure for $tool failed with exit code $LASTEXITCODE."
exit $LASTEXITCODE
}
$config = $True
}
if ($tool -eq "policheck") {
Write-Host "$GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args `" Target : $TargetDirectory `""
& $GuardianCliLocation configure --working-directory $WorkingDirectory --tool $tool --output-path $gdnConfigFile --logger-level $GuardianLoggerLevel --noninteractive --force --args " Target : $TargetDirectory "
if ($LASTEXITCODE -ne 0) {
Write-Host "Guardian configure for $tool failed with exit code $LASTEXITCODE."
exit $LASTEXITCODE
}
$config = $True
}
Write-Host "$GuardianCliLocation run --working-directory $WorkingDirectory --tool $tool --baseline mainbaseline --update-baseline $UpdateBaseline --logger-level $GuardianLoggerLevel --config $gdnConfigFile $config"
if ($config) {
& $GuardianCliLocation run --working-directory $WorkingDirectory --tool $tool --baseline mainbaseline --update-baseline $UpdateBaseline --logger-level $GuardianLoggerLevel --config $gdnConfigFile
if ($LASTEXITCODE -ne 0) {
Write-Host "Guardian run for $tool using $gdnConfigFile failed with exit code $LASTEXITCODE."
exit $LASTEXITCODE
}
} else {
& $GuardianCliLocation run --working-directory $WorkingDirectory --tool $tool --baseline mainbaseline --update-baseline $UpdateBaseline --logger-level $GuardianLoggerLevel
if ($LASTEXITCODE -ne 0) {
Write-Host "Guardian run for $tool failed with exit code $LASTEXITCODE."
exit $LASTEXITCODE
}
}
}

Просмотреть файл

@ -0,0 +1,48 @@
parameters:
# Optional: dependencies of the job
dependsOn: ''
# Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool
pool: {}
# Optional: Include toolset dependencies in the generated graph files
includeToolset: false
jobs:
- job: Generate_Graph_Files
dependsOn: ${{ parameters.dependsOn }}
displayName: Generate Graph Files
pool: ${{ parameters.pool }}
variables:
# Publish-Build-Assets provides: MaestroAccessToken, BotAccount-dotnet-maestro-bot-PAT
# DotNet-AllOrgs-Darc-Pats provides: dn-bot-devdiv-dnceng-rw-code-pat
- group: Publish-Build-Assets
- group: DotNet-AllOrgs-Darc-Pats
- name: _GraphArguments
value: -gitHubPat $(BotAccount-dotnet-maestro-bot-PAT)
-azdoPat $(dn-bot-devdiv-dnceng-rw-code-pat)
-barToken $(MaestroAccessToken)
-outputFolder '$(Build.StagingDirectory)/GraphFiles/'
- ${{ if ne(parameters.includeToolset, 'false') }}:
- name: _GraphArguments
value: ${{ variables._GraphArguments }} -includeToolset
steps:
- task: PowerShell@2
displayName: Generate Graph Files
inputs:
filePath: eng\common\generate-graph-files.ps1
arguments: $(_GraphArguments)
continueOnError: true
- task: PublishBuildArtifacts@1
displayName: Publish Graph to Artifacts
inputs:
PathtoPublish: '$(Build.StagingDirectory)/GraphFiles'
PublishLocation: Container
ArtifactName: GraphFiles
continueOnError: true
condition: always()

Просмотреть файл

@ -0,0 +1,205 @@
parameters:
# Job schema parameters - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
cancelTimeoutInMinutes: ''
condition: ''
continueOnError: false
container: ''
dependsOn: ''
displayName: ''
steps: []
pool: ''
strategy: ''
timeoutInMinutes: ''
variables: []
workspace: ''
# Job base template specific parameters
# Optional: Enable installing Microbuild plugin
# if 'true', these "variables" must be specified in the variables object or as part of the queue matrix
# _TeamName - the name of your team
# _SignType - 'test' or 'real'
enableMicrobuild: false
# Optional: Include PublishBuildArtifacts task
enablePublishBuildArtifacts: false
# Optional: Enable publishing to the build asset registry
enablePublishBuildAssets: false
# Optional: Include PublishTestResults task
enablePublishTestResults: false
# Optional: enable sending telemetry
enableTelemetry: false
# Optional: define the helix repo for telemetry (example: 'dotnet/arcade')
helixRepo: ''
# Optional: define the helix type for telemetry (example: 'build/product/')
helixType: ''
# Required: name of the job
name: ''
# Optional: should run as a public build even in the internal project
# if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
runAsPublic: false
# Internal resources (telemetry, microbuild) can only be accessed from non-public projects,
# and some (Microbuild) should only be applied to non-PR cases for internal builds.
jobs:
- job: ${{ parameters.name }}
${{ if ne(parameters.cancelTimeoutInMinutes, '') }}:
cancelTimeoutInMinutes: ${{ parameters.cancelTimeoutInMinutes }}
${{ if ne(parameters.condition, '') }}:
condition: ${{ parameters.condition }}
${{ if ne(parameters.container, '') }}:
container: ${{ parameters.container }}
${{ if ne(parameters.continueOnError, '') }}:
continueOnError: ${{ parameters.continueOnError }}
${{ if ne(parameters.dependsOn, '') }}:
dependsOn: ${{ parameters.dependsOn }}
${{ if ne(parameters.displayName, '') }}:
displayName: ${{ parameters.displayName }}
${{ if ne(parameters.pool, '') }}:
pool: ${{ parameters.pool }}
${{ if ne(parameters.strategy, '') }}:
strategy: ${{ parameters.strategy }}
${{ if ne(parameters.timeoutInMinutes, '') }}:
timeoutInMinutes: ${{ parameters.timeoutInMinutes }}
variables:
- ${{ if eq(parameters.enableTelemetry, 'true') }}:
- name: DOTNET_CLI_TELEMETRY_PROFILE
value: '$(Build.Repository.Uri)'
- ${{ each variable in parameters.variables }}:
# handle name-value variable syntax
# example:
# - name: [key]
# value: [value]
- ${{ if ne(variable.name, '') }}:
- name: ${{ variable.name }}
value: ${{ variable.value }}
# handle variable groups
- ${{ if ne(variable.group, '') }}:
- group: ${{ variable.group }}
# handle key-value variable syntax.
# example:
# - [key]: [value]
- ${{ if and(eq(variable.name, ''), eq(variable.group, '')) }}:
- ${{ each pair in variable }}:
- name: ${{ pair.key }}
value: ${{ pair.value }}
# DotNet-HelixApi-Access provides 'HelixApiAccessToken' for internal builds
- ${{ if and(eq(parameters.enableTelemetry, 'true'), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- group: DotNet-HelixApi-Access
${{ if ne(parameters.workspace, '') }}:
workspace: ${{ parameters.workspace }}
steps:
- ${{ if eq(parameters.enableTelemetry, 'true') }}:
# Telemetry tasks are built from https://github.com/dotnet/arcade-extensions
- task: sendStartTelemetry@0
displayName: 'Send Helix Start Telemetry'
inputs:
helixRepo: ${{ parameters.helixRepo }}
${{ if ne(parameters.helixType, '') }}:
helixType: ${{ parameters.helixType }}
buildConfig: $(_BuildConfig)
runAsPublic: ${{ parameters.runAsPublic }}
continueOnError: ${{ parameters.continueOnError }}
condition: always()
- ${{ if eq(parameters.enableMicrobuild, 'true') }}:
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- task: MicroBuildSigningPlugin@2
displayName: Install MicroBuild plugin
inputs:
signType: $(_SignType)
zipSources: false
feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json
env:
TeamName: $(_TeamName)
continueOnError: ${{ parameters.continueOnError }}
condition: and(succeeded(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
- ${{ each step in parameters.steps }}:
- ${{ step }}
- ${{ if eq(parameters.enableMicrobuild, 'true') }}:
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- task: MicroBuildCleanup@1
displayName: Execute Microbuild cleanup tasks
condition: and(always(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
continueOnError: ${{ parameters.continueOnError }}
env:
TeamName: $(_TeamName)
- ${{ if eq(parameters.enableTelemetry, 'true') }}:
# Telemetry tasks are built from https://github.com/dotnet/arcade-extensions
- task: sendEndTelemetry@0
displayName: 'Send Helix End Telemetry'
continueOnError: ${{ parameters.continueOnError }}
condition: always()
- ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}:
- task: PublishBuildArtifacts@1
displayName: Publish Logs
inputs:
PathtoPublish: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)'
PublishLocation: Container
ArtifactName: $(Agent.Os)_$(Agent.JobName)
continueOnError: true
condition: always()
- ${{ if eq(parameters.enablePublishTestResults, 'true') }}:
- task: PublishTestResults@2
displayName: Publish Test Results
inputs:
testResultsFormat: 'xUnit'
testResultsFiles: '*.xml'
searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)'
continueOnError: true
condition: always()
- ${{ if and(eq(parameters.enablePublishBuildAssets, true), ne(variables['_PublishUsingPipelines'], 'true'), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- task: CopyFiles@2
displayName: Gather Asset Manifests
inputs:
SourceFolder: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)/AssetManifest'
TargetFolder: '$(Build.StagingDirectory)/AssetManifests'
continueOnError: ${{ parameters.continueOnError }}
condition: and(succeeded(), eq(variables['_DotNetPublishToBlobFeed'], 'true'))
- task: PublishBuildArtifacts@1
displayName: Push Asset Manifests
inputs:
PathtoPublish: '$(Build.StagingDirectory)/AssetManifests'
PublishLocation: Container
ArtifactName: AssetManifests
continueOnError: ${{ parameters.continueOnError }}
condition: and(succeeded(), eq(variables['_DotNetPublishToBlobFeed'], 'true'))

Просмотреть файл

@ -0,0 +1,70 @@
parameters:
configuration: 'Debug'
# Optional: condition for the job to run
condition: ''
# Optional: 'true' if future jobs should run even if this job fails
continueOnError: false
# Optional: dependencies of the job
dependsOn: ''
# Optional: Include PublishBuildArtifacts task
enablePublishBuildArtifacts: false
# Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool
pool: {}
# Optional: should run as a public build even in the internal project
# if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
runAsPublic: false
# Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing
publishUsingPipelines: false
jobs:
- job: Asset_Registry_Publish
dependsOn: ${{ parameters.dependsOn }}
displayName: Publish to Build Asset Registry
pool: ${{ parameters.pool }}
variables:
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- name: _BuildConfig
value: ${{ parameters.configuration }}
- group: Publish-Build-Assets
steps:
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- task: DownloadBuildArtifacts@0
displayName: Download artifact
inputs:
artifactName: AssetManifests
downloadPath: '$(Build.StagingDirectory)/Download'
condition: ${{ parameters.condition }}
continueOnError: ${{ parameters.continueOnError }}
- task: PowerShell@2
displayName: Publish Build Assets
inputs:
filePath: eng\common\sdk-task.ps1
arguments: -task PublishBuildAssets -restore -msbuildEngine dotnet
/p:ManifestsPath='$(Build.StagingDirectory)/Download/AssetManifests'
/p:BuildAssetRegistryToken=$(MaestroAccessToken)
/p:MaestroApiEndpoint=https://maestro-prod.westus2.cloudapp.azure.com
/p:PublishUsingPipelines=${{ parameters.publishUsingPipelines }}
/p:Configuration=$(_BuildConfig)
condition: ${{ parameters.condition }}
continueOnError: ${{ parameters.continueOnError }}
- ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}:
- task: PublishBuildArtifacts@1
displayName: Publish Logs to VSTS
inputs:
PathtoPublish: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)'
PublishLocation: Container
ArtifactName: $(Agent.Os)_PublishBuildAssets
continueOnError: true
condition: always()

Просмотреть файл

@ -0,0 +1,90 @@
parameters:
# Optional: 'true' if failures in job.yml job should not fail the job
continueOnError: false
# Optional: Enable installing Microbuild plugin
# if 'true', these "variables" must be specified in the variables object or as part of the queue matrix
# _TeamName - the name of your team
# _SignType - 'test' or 'real'
enableMicrobuild: false
# Optional: Include PublishBuildArtifacts task
enablePublishBuildArtifacts: false
# Optional: Enable publishing to the build asset registry
enablePublishBuildAssets: false
# Optional: Enable publishing using release pipelines
enablePublishUsingPipelines: false
graphFileGeneration:
# Optional: Enable generating the graph files at the end of the build
enabled: false
# Optional: Include toolset dependencies in the generated graph files
includeToolset: false
# Optional: Include PublishTestResults task
enablePublishTestResults: false
# Optional: enable sending telemetry
# if enabled then the 'helixRepo' parameter should also be specified
enableTelemetry: false
# Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
jobs: []
# Optional: define the helix repo for telemetry (example: 'dotnet/arcade')
helixRepo: ''
# Optional: Override automatically derived dependsOn value for "publish build assets" job
publishBuildAssetsDependsOn: ''
# Optional: should run as a public build even in the internal project
# if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
runAsPublic: false
# Internal resources (telemetry, microbuild) can only be accessed from non-public projects,
# and some (Microbuild) should only be applied to non-PR cases for internal builds.
jobs:
- ${{ each job in parameters.jobs }}:
- template: ../job/job.yml
parameters:
# pass along parameters
${{ each parameter in parameters }}:
${{ if ne(parameter.key, 'jobs') }}:
${{ parameter.key }}: ${{ parameter.value }}
# pass along job properties
${{ each property in job }}:
${{ if ne(property.key, 'job') }}:
${{ property.key }}: ${{ property.value }}
name: ${{ job.job }}
- ${{ if and(eq(parameters.enablePublishBuildAssets, true), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- template: ../job/publish-build-assets.yml
parameters:
continueOnError: ${{ parameters.continueOnError }}
dependsOn:
- ${{ if ne(parameters.publishBuildAssetsDependsOn, '') }}:
- ${{ each job in parameters.publishBuildAssetsDependsOn }}:
- ${{ job.job }}
- ${{ if eq(parameters.publishBuildAssetsDependsOn, '') }}:
- ${{ each job in parameters.jobs }}:
- ${{ job.job }}
pool:
vmImage: vs2017-win2016
runAsPublic: ${{ parameters.runAsPublic }}
publishUsingPipelines: ${{ parameters.enablePublishUsingPipelines }}
enablePublishBuildArtifacts: ${{ parameters.enablePublishBuildArtifacts }}
- ${{ if and(eq(parameters.graphFileGeneration.enabled, true), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- template: ../job/generate-graph-files.yml
parameters:
continueOnError: ${{ parameters.continueOnError }}
includeToolset: ${{ parameters.graphFileGeneration.includeToolset }}
dependsOn:
- Asset_Registry_Publish
pool:
vmImage: vs2017-win2016

Просмотреть файл

@ -7,8 +7,13 @@ parameters:
# Optional: name of the phase (not specifying phase name may cause name collisions)
name: ''
# Optional: display name of the phase
displayName: ''
# Phase dependencies
# Optional: condition for the job to run
condition: ''
# Optional: dependencies of the phase
dependsOn: ''
# Required: A defined YAML queue
@ -20,6 +25,10 @@ parameters:
# Optional: variables
variables: {}
# Optional: should run as a public build even in the internal project
# if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
runAsPublic: false
## Telemetry variables
# Optional: enable sending telemetry
@ -41,12 +50,20 @@ parameters:
phases:
- phase: ${{ parameters.name }}
dependsOn: ${{ parameters.dependsOn }}
${{ if ne(parameters.displayName, '') }}:
displayName: ${{ parameters.displayName }}
${{ if ne(parameters.condition, '') }}:
condition: ${{ parameters.condition }}
${{ if ne(parameters.dependsOn, '') }}:
dependsOn: ${{ parameters.dependsOn }}
queue: ${{ parameters.queue }}
${{ if ne(parameters.variables, '') }}:
variables: ${{ parameters.variables }}
variables:
${{ insert }}: ${{ parameters.variables }}
steps:
- checkout: self
@ -60,11 +77,12 @@ phases:
buildConfig: $(_HelixBuildConfig)
helixSource: $(_HelixSource)
helixType: $(_HelixType)
runAsPublic: ${{ parameters.runAsPublic }}
- ${{ if eq(parameters.enableMicrobuild, 'true') }}:
# Internal only resource, and Microbuild signing shouldn't be applied to PRs.
- ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- task: MicroBuildSigningPlugin@1
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- task: MicroBuildSigningPlugin@2
displayName: Install MicroBuild plugin
inputs:
signType: $(_SignType)
@ -81,7 +99,7 @@ phases:
- ${{ if eq(parameters.enableMicrobuild, 'true') }}:
# Internal only resources
- ${{ if ne(variables['System.TeamProject'], 'public') }}:
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- task: MicroBuildCleanup@1
displayName: Execute Microbuild cleanup tasks
condition: and(always(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
@ -94,7 +112,7 @@ phases:
helixSource: $(_HelixSource)
helixType: $(_HelixType)
- ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- task: CopyFiles@2
displayName: Gather Asset Manifests
inputs:
@ -109,4 +127,4 @@ phases:
PublishLocation: Container
ArtifactName: AssetManifests
continueOnError: false
condition: and(succeeded(), eq(variables['_DotNetPublishToBlobFeed'], 'true'))
condition: and(succeeded(), eq(variables['_DotNetPublishToBlobFeed'], 'true'))

Просмотреть файл

@ -1,27 +1,51 @@
parameters:
dependsOn: ''
queue: {}
configuration: 'Debug'
condition: succeeded()
continueOnError: false
runAsPublic: false
publishUsingPipelines: false
phases:
- phase: Asset_Registry_Publish
displayName: Publish to Build Asset Registry
dependsOn: ${{ parameters.dependsOn }}
queue: ${{ parameters.queue }}
variables:
_BuildConfig: ${{ parameters.configuration }}
steps:
- ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- task: DownloadBuildArtifacts@0
displayName: Download artifact
inputs:
artifactName: AssetManifests
artifactName: AssetManifests
downloadPath: '$(Build.StagingDirectory)/Download'
condition: succeeded()
condition: ${{ parameters.condition }}
continueOnError: ${{ parameters.continueOnError }}
- task: AzureKeyVault@1
inputs:
azureSubscription: 'DotNet-Engineering-Services_KeyVault'
KeyVaultName: EngKeyVault
SecretsFilter: 'MaestroAccessToken'
condition: succeeded()
- script: eng\common\publishbuildassets.cmd
/p:ManifestsPath='$(Build.StagingDirectory)/Download/AssetManifests'
/p:BuildAssetRegistryToken=$(MaestroAccessToken)
/p:MaestroApiEndpoint=https://maestro-prod.westus2.cloudapp.azure.com
displayName: Publish Build Assets
condition: ${{ parameters.condition }}
continueOnError: ${{ parameters.continueOnError }}
- task: PowerShell@2
displayName: Publish Build Assets
inputs:
filePath: eng\common\sdk-task.ps1
arguments: -task PublishBuildAssets -restore -msbuildEngine dotnet
/p:ManifestsPath='$(Build.StagingDirectory)/Download/AssetManifests'
/p:BuildAssetRegistryToken=$(MaestroAccessToken)
/p:MaestroApiEndpoint=https://maestro-prod.westus2.cloudapp.azure.com
/p:PublishUsingPipelines=${{ parameters.publishUsingPipelines }}
/p:Configuration=$(_BuildConfig)
condition: ${{ parameters.condition }}
continueOnError: ${{ parameters.continueOnError }}
- task: PublishBuildArtifacts@1
displayName: Publish Logs to VSTS
inputs:
PathtoPublish: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)'
PublishLocation: Container
ArtifactName: $(Agent.Os)_Asset_Registry_Publish
continueOnError: true
condition: always()

Просмотреть файл

@ -1,33 +0,0 @@
parameters:
HelixSource: 'pr/dotnet-github-anon-kaonashi-bot'
HelixType: ̓'tests/default'
HelixBuild: $(Build.BuildNumber)
HelixTargetQueues: ''
HelixAccessToken: ''
HelixPreCommands: ''
HelixPostCommands: ''
WorkItemDirectory: ''
WorkItemCommand: ''
EnableXUnitReporter: false
WaitForWorkItemCompletion: true
steps:
- task: DotNetCoreCLI@2
inputs:
command: custom
projects: eng/common/helixpublish.proj
custom: msbuild
arguments: '/t:test /p:Language=msbuild'
displayName: Send tests job to Helix
env:
HelixSource: ${{ parameters.HelixSource }}
HelixType: ${{ parameters.HelixType }}
HelixBuild: ${{ parameters.HelixBuild }}
HelixTargetQueues: ${{ parameters.HelixTargetQueues }}
HelixAccessToken: ${{ parameters.HelixAccessToken }}
HelixPreCommands: ${{ parameters.HelixPreCommands }}
HelixPostCommands: ${{ parameters.HelixPostCommands }}
EnableXUnitReporter: ${{ parameters.EnableXUnitReporter }}
WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
WorkItemDirectory: ${{ parameters.WorkItemDirectory }}
WorkItemCommand: ${{ parameters.WorkItemCommand }}

Просмотреть файл

@ -0,0 +1,91 @@
# Please remember to update the documentation if you make changes to these parameters!
parameters:
HelixSource: 'pr/default' # required -- sources must start with pr/, official/, prodcon/, or agent/
HelixType: 'tests/default/' # required -- Helix telemetry which identifies what type of data this is; should include "test" for clarity and must end in '/'
HelixBuild: $(Build.BuildNumber) # required -- the build number Helix will use to identify this -- automatically set to the AzDO build number
HelixTargetQueues: '' # required -- semicolon delimited list of Helix queues to test on; see https://helix.dot.net/ for a list of queues
HelixAccessToken: '' # required -- access token to make Helix API requests; should be provided by the appropriate variable group
HelixConfiguration: '' # optional -- additional property attached to a job
HelixPreCommands: '' # optional -- commands to run before Helix work item execution
HelixPostCommands: '' # optional -- commands to run after Helix work item execution
WorkItemDirectory: '' # optional -- a payload directory to zip up and send to Helix; requires WorkItemCommand; incompatible with XUnitProjects
WorkItemCommand: '' # optional -- a command to execute on the payload; requires WorkItemDirectory; incompatible with XUnitProjects
WorkItemTimeout: '' # optional -- a timeout in seconds for the work item command; requires WorkItemDirectory; incompatible with XUnitProjects
CorrelationPayloadDirectory: '' # optional -- a directory to zip up and send to Helix as a correlation payload
XUnitProjects: '' # optional -- semicolon delimited list of XUnitProjects to parse and send to Helix; requires XUnitRuntimeTargetFramework, XUnitPublishTargetFramework, XUnitRunnerVersion, and IncludeDotNetCli=true
XUnitWorkItemTimeout: '' # optional -- the workitem timeout in seconds for all workitems created from the xUnit projects specified by XUnitProjects
XUnitPublishTargetFramework: '' # optional -- framework to use to publish your xUnit projects
XUnitRuntimeTargetFramework: '' # optional -- framework to use for the xUnit console runner
XUnitRunnerVersion: '' # optional -- version of the xUnit nuget package you wish to use on Helix; required for XUnitProjects
IncludeDotNetCli: false # optional -- true will download a version of the .NET CLI onto the Helix machine as a correlation payload; requires DotNetCliPackageType and DotNetCliVersion
DotNetCliPackageType: '' # optional -- either 'sdk' or 'runtime'; determines whether the sdk or runtime will be sent to Helix; see https://raw.githubusercontent.com/dotnet/core/master/release-notes/releases.json
DotNetCliVersion: '' # optional -- version of the CLI to send to Helix; based on this: https://raw.githubusercontent.com/dotnet/core/master/release-notes/releases.json
EnableXUnitReporter: false # optional -- true enables XUnit result reporting to Mission Control
WaitForWorkItemCompletion: true # optional -- true will make the task wait until work items have been completed and fail the build if work items fail. False is "fire and forget."
IsExternal: false # [DEPRECATED] -- doesn't do anything, jobs are external if HelixAccessToken is empty and Creator is set
Creator: '' # optional -- if the build is external, use this to specify who is sending the job
DisplayNamePrefix: 'Run Tests' # optional -- rename the beginning of the displayName of the steps in AzDO
condition: succeeded() # optional -- condition for step to execute; defaults to succeeded()
continueOnError: false # optional -- determines whether to continue the build if the step errors; defaults to false
steps:
- powershell: 'powershell "$env:BUILD_SOURCESDIRECTORY\eng\common\msbuild.ps1 $env:BUILD_SOURCESDIRECTORY\eng\common\helixpublish.proj /restore /t:Test /bl:$env:BUILD_SOURCESDIRECTORY\artifacts\log\$env:BuildConfig\SendToHelix.binlog"'
displayName: ${{ parameters.DisplayNamePrefix }} (Windows)
env:
BuildConfig: $(_BuildConfig)
HelixSource: ${{ parameters.HelixSource }}
HelixType: ${{ parameters.HelixType }}
HelixBuild: ${{ parameters.HelixBuild }}
HelixConfiguration: ${{ parameters.HelixConfiguration }}
HelixTargetQueues: ${{ parameters.HelixTargetQueues }}
HelixAccessToken: ${{ parameters.HelixAccessToken }}
HelixPreCommands: ${{ parameters.HelixPreCommands }}
HelixPostCommands: ${{ parameters.HelixPostCommands }}
WorkItemDirectory: ${{ parameters.WorkItemDirectory }}
WorkItemCommand: ${{ parameters.WorkItemCommand }}
WorkItemTimeout: ${{ parameters.WorkItemTimeout }}
CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }}
XUnitProjects: ${{ parameters.XUnitProjects }}
XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }}
XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }}
XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }}
XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }}
IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }}
DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }}
DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
EnableXUnitReporter: ${{ parameters.EnableXUnitReporter }}
WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
Creator: ${{ parameters.Creator }}
SYSTEM_ACCESSTOKEN: $(System.AccessToken)
condition: and(${{ parameters.condition }}, eq(variables['Agent.Os'], 'Windows_NT'))
continueOnError: ${{ parameters.continueOnError }}
- script: $BUILD_SOURCESDIRECTORY/eng/common/msbuild.sh $BUILD_SOURCESDIRECTORY/eng/common/helixpublish.proj /restore /t:Test /bl:$BUILD_SOURCESDIRECTORY/artifacts/log/$BuildConfig/SendToHelix.binlog
displayName: ${{ parameters.DisplayNamePrefix }} (Unix)
env:
BuildConfig: $(_BuildConfig)
HelixSource: ${{ parameters.HelixSource }}
HelixType: ${{ parameters.HelixType }}
HelixBuild: ${{ parameters.HelixBuild }}
HelixConfiguration: ${{ parameters.HelixConfiguration }}
HelixTargetQueues: ${{ parameters.HelixTargetQueues }}
HelixAccessToken: ${{ parameters.HelixAccessToken }}
HelixPreCommands: ${{ parameters.HelixPreCommands }}
HelixPostCommands: ${{ parameters.HelixPostCommands }}
WorkItemDirectory: ${{ parameters.WorkItemDirectory }}
WorkItemCommand: ${{ parameters.WorkItemCommand }}
WorkItemTimeout: ${{ parameters.WorkItemTimeout }}
CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }}
XUnitProjects: ${{ parameters.XUnitProjects }}
XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }}
XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }}
XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }}
XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }}
IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }}
DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }}
DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
EnableXUnitReporter: ${{ parameters.EnableXUnitReporter }}
WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
Creator: ${{ parameters.Creator }}
SYSTEM_ACCESSTOKEN: $(System.AccessToken)
condition: and(${{ parameters.condition }}, ne(variables['Agent.Os'], 'Windows_NT'))
continueOnError: ${{ parameters.continueOnError }}

Просмотреть файл

@ -1,6 +1,6 @@
parameters:
helixSource: 'undefined_defaulted_in_telemetry.yml'
helixType: 'undefined_defaulted_in_telemetry.yml'
maxRetries: 5
retryDelay: 10 # in seconds
steps:
- bash: |
@ -11,27 +11,41 @@ steps:
fi
warningCount=0
# create a temporary file for curl output
res=`mktemp`
curlResult=`
curl --verbose --output $res --write-out "%{http_code}"\
-H 'Content-Type: application/json' \
-H "X-Helix-Job-Token: $Helix_JobToken" \
-H 'Content-Length: 0' \
-X POST -G "https://helix.dot.net/api/2018-03-14/telemetry/job/build/$Helix_WorkItemId/finish" \
--data-urlencode "errorCount=$errorCount" \
--data-urlencode "warningCount=$warningCount"`
curlStatus=$?
if [ $curlStatus -eq 0 ]; then
if [ $curlResult -gt 299 ] || [ $curlResult -lt 200 ]; then
curlStatus=$curlResult
curlStatus=1
retryCount=0
# retry loop to harden against spotty telemetry connections
# we don't retry successes and 4xx client errors
until [[ $curlStatus -eq 0 || ( $curlStatus -ge 400 && $curlStatus -le 499 ) || $retryCount -ge $MaxRetries ]]
do
if [ $retryCount -gt 0 ]; then
echo "Failed to send telemetry to Helix; waiting $RetryDelay seconds before retrying..."
sleep $RetryDelay
fi
fi
# create a temporary file for curl output
res=`mktemp`
curlResult=`
curl --verbose --output $res --write-out "%{http_code}"\
-H 'Content-Type: application/json' \
-H "X-Helix-Job-Token: $Helix_JobToken" \
-H 'Content-Length: 0' \
-X POST -G "https://helix.dot.net/api/2018-03-14/telemetry/job/build/$Helix_WorkItemId/finish" \
--data-urlencode "errorCount=$errorCount" \
--data-urlencode "warningCount=$warningCount"`
curlStatus=$?
if [ $curlStatus -eq 0 ]; then
if [ $curlResult -gt 299 ] || [ $curlResult -lt 200 ]; then
curlStatus=$curlResult
fi
fi
let retryCount++
done
if [ $curlStatus -ne 0 ]; then
echo "Failed to Send Build Finish information"
echo "Failed to Send Build Finish information after $retryCount retries"
vstsLogOutput="vso[task.logissue type=error;sourcepath=templates/steps/telemetry-end.yml;code=1;]Failed to Send Build Finish information: $curlStatus"
echo "##$vstsLogOutput"
exit 1
@ -41,6 +55,8 @@ steps:
# defined via VSTS variables in start-job.sh
Helix_JobToken: $(Helix_JobToken)
Helix_WorkItemId: $(Helix_WorkItemId)
MaxRetries: ${{ parameters.maxRetries }}
RetryDelay: ${{ parameters.retryDelay }}
condition: and(always(), ne(variables['Agent.Os'], 'Windows_NT'))
- powershell: |
if (($env:Agent_JobStatus -eq 'Succeeded') -or ($env:Agent_JobStatus -eq 'PartiallySucceeded')) {
@ -50,13 +66,30 @@ steps:
}
$WarningCount = 0
try {
Invoke-RestMethod -Uri "https://helix.dot.net/api/2018-03-14/telemetry/job/build/$env:Helix_WorkItemId/finish?errorCount=$ErrorCount&warningCount=$WarningCount" -Method Post -ContentType "application/json" -Body "" `
-Headers @{ 'X-Helix-Job-Token'=$env:Helix_JobToken }
# Basic retry loop to harden against server flakiness
$retryCount = 0
while ($retryCount -lt $env:MaxRetries) {
try {
Invoke-RestMethod -Uri "https://helix.dot.net/api/2018-03-14/telemetry/job/build/$env:Helix_WorkItemId/finish?errorCount=$ErrorCount&warningCount=$WarningCount" -Method Post -ContentType "application/json" -Body "" `
-Headers @{ 'X-Helix-Job-Token'=$env:Helix_JobToken }
break
}
catch {
$statusCode = $_.Exception.Response.StatusCode.value__
if ($statusCode -ge 400 -and $statusCode -le 499) {
Write-Host "##vso[task.logissue]error Failed to send telemetry to Helix (status code $statusCode); not retrying (4xx client error)"
Write-Host "##vso[task.logissue]error ", $_.Exception.GetType().FullName, $_.Exception.Message
exit 1
}
Write-Host "Failed to send telemetry to Helix (status code $statusCode); waiting $env:RetryDelay seconds before retrying..."
$retryCount++
sleep $env:RetryDelay
continue
}
}
catch {
Write-Error $_
Write-Error $_.Exception
if ($retryCount -ge $env:MaxRetries) {
Write-Host "##vso[task.logissue]error Failed to send telemetry to Helix after $retryCount retries."
exit 1
}
displayName: Send Windows Build End Telemetry
@ -64,4 +97,6 @@ steps:
# defined via VSTS variables in start-job.ps1
Helix_JobToken: $(Helix_JobToken)
Helix_WorkItemId: $(Helix_WorkItemId)
MaxRetries: ${{ parameters.maxRetries }}
RetryDelay: ${{ parameters.retryDelay }}
condition: and(always(),eq(variables['Agent.Os'], 'Windows_NT'))

Просмотреть файл

@ -2,9 +2,12 @@ parameters:
helixSource: 'undefined_defaulted_in_telemetry.yml'
helixType: 'undefined_defaulted_in_telemetry.yml'
buildConfig: ''
runAsPublic: false
maxRetries: 5
retryDelay: 10 # in seconds
steps:
- ${{ if not(eq(variables['System.TeamProject'], 'public')) }}:
- ${{ if and(eq(parameters.runAsPublic, 'false'), not(eq(variables['System.TeamProject'], 'public'))) }}:
- task: AzureKeyVault@1
inputs:
azureSubscription: 'HelixProd_KeyVault'
@ -29,39 +32,52 @@ steps:
}
}
JobListStuff
cat $jobInfo
# create a temporary file for curl output
res=`mktemp`
accessTokenParameter=''
if [[ ! "$HelixApiAccessToken" == "" ]]; then
accessTokenParameter="?access_token=$HelixApiAccessToken"
fi
accessTokenParameter="?access_token=$HelixApiAccessToken"
curlResult=`
cat $jobInfo |\
curl --verbose --output $res --write-out "%{http_code}" \
-H 'Content-Type: application/json' \
-X POST "https://helix.dot.net/api/2018-03-14/telemetry/job$accessTokenParameter" -d @-`
curlStatus=$?
if [ $curlStatus -eq 0 ]; then
if [ $curlResult -gt 299 ] || [ $curlResult -lt 200 ]; then
curlStatus=$curlResult
curlStatus=1
retryCount=0
# retry loop to harden against spotty telemetry connections
# we don't retry successes and 4xx client errors
until [[ $curlStatus -eq 0 || ( $curlStatus -ge 400 && $curlStatus -le 499 ) || $retryCount -ge $MaxRetries ]]
do
if [ $retryCount -gt 0 ]; then
echo "Failed to send telemetry to Helix; waiting $RetryDelay seconds before retrying..."
sleep $RetryDelay
fi
fi
curlResult=`
cat $jobInfo |\
curl --trace - --verbose --output $res --write-out "%{http_code}" \
-H 'Content-Type: application/json' \
-X POST "https://helix.dot.net/api/2018-03-14/telemetry/job$accessTokenParameter" -d @-`
curlStatus=$?
if [ $curlStatus -eq 0 ]; then
if [ $curlResult -gt 299 ] || [ $curlResult -lt 200 ]; then
curlStatus=$curlResult
fi
fi
let retryCount++
done
curlResult=`cat $res`
# validate status of curl command
if [ $curlStatus -ne 0 ]; then
echo "Failed To Send Job Start information"
echo "Failed To Send Job Start information after $retryCount retries"
# We have to append the ## vso prefix or vso will pick up the command when it dumps the inline script into the shell
vstsLogOutput="vso[task.logissue type=error;sourcepath=telemetry/start-job.sh;code=1;]Failed to Send Job Start information: $curlStatus"
echo "##$vstsLogOutput"
exit 1
fi
# Set the Helix_JobToken variable
export Helix_JobToken=`echo $curlResult | xargs echo` # Strip Quotes
echo "##vso[task.setvariable variable=Helix_JobToken;issecret=true;]$Helix_JobToken"
@ -75,29 +91,44 @@ steps:
Attempt: 1
OperatingSystem: $(Agent.Os)
Configuration: ${{ parameters.buildConfig }}
MaxRetries: ${{ parameters.maxRetries }}
RetryDelay: ${{ parameters.retryDelay }}
condition: and(always(), ne(variables['Agent.Os'], 'Windows_NT'))
- bash: |
res=`mktemp`
curlResult=`
curl --verbose --output $res --write-out "%{http_code}"\
-H 'Content-Type: application/json' \
-H "X-Helix-Job-Token: $Helix_JobToken" \
-H 'Content-Length: 0' \
-X POST -G "https://helix.dot.net/api/2018-03-14/telemetry/job/build" \
--data-urlencode "buildUri=$BuildUri"`
curlStatus=$?
if [ $curlStatus -eq 0 ]; then
if [ $curlResult -gt 299 ] || [ $curlResult -lt 200 ]; then
curlStatus=$curlResult
curlStatus=1
retryCount=0
# retry loop to harden against spotty telemetry connections
# we don't retry successes and 4xx client errors
until [[ $curlStatus -eq 0 || ( $curlStatus -ge 400 && $curlStatus -le 499 ) || $retryCount -ge $MaxRetries ]]
do
if [ $retryCount -gt 0 ]; then
echo "Failed to send telemetry to Helix; waiting $RetryDelay seconds before retrying..."
sleep $RetryDelay
fi
fi
curlResult=`cat $res`
res=`mktemp`
curlResult=`
curl --verbose --output $res --write-out "%{http_code}"\
-H 'Content-Type: application/json' \
-H "X-Helix-Job-Token: $Helix_JobToken" \
-H 'Content-Length: 0' \
-X POST -G "https://helix.dot.net/api/2018-03-14/telemetry/job/build" \
--data-urlencode "buildUri=$BuildUri"`
curlStatus=$?
if [ $curlStatus -eq 0 ]; then
if [ $curlResult -gt 299 ] || [ $curlResult -lt 200 ]; then
curlStatus=$curlResult
fi
fi
curlResult=`cat $res`
let retryCount++
done
# validate status of curl command
if [ $curlStatus -ne 0 ]; then
echo "Failed to Send Build Start information"
echo "Failed to Send Build Start information after $retryCount retries"
vstsLogOutput="vso[task.logissue type=error;sourcepath=telemetry/build/start.sh;code=1;]Failed to Send Build Start information: $curlStatus"
echo "##$vstsLogOutput"
exit 1
@ -109,8 +140,10 @@ steps:
env:
BuildUri: $(System.TaskDefinitionsUri)$(System.TeamProject)/_build/index?buildId=$(Build.BuildId)&_a=summary
Helix_JobToken: $(Helix_JobToken)
MaxRetries: ${{ parameters.maxRetries }}
RetryDelay: ${{ parameters.retryDelay }}
condition: and(always(), ne(variables['Agent.Os'], 'Windows_NT'))
- powershell: |
$jobInfo = [pscustomobject]@{
QueueId=$env:QueueId;
@ -120,17 +153,42 @@ steps:
Attempt=$env:Attempt;
Properties=[pscustomobject]@{ operatingSystem=$env:OperatingSystem; configuration=$env:Configuration };
}
$jobInfoJson = $jobInfo | ConvertTo-Json
if ($env:HelixApiAccessToken) {
$accessTokenParameter="?access_token=$($env:HelixApiAccessToken)"
}
Write-Host "Job Info: $jobInfoJson"
$jobToken = Invoke-RestMethod -Uri "https://helix.dot.net/api/2018-03-14/telemetry/job$($accessTokenParameter)" -Method Post -ContentType "application/json" -Body $jobInfoJson
# Basic retry loop to harden against server flakiness
$retryCount = 0
while ($retryCount -lt $env:MaxRetries) {
try {
$jobToken = Invoke-RestMethod -Uri "https://helix.dot.net/api/2018-03-14/telemetry/job$($accessTokenParameter)" -Method Post -ContentType "application/json" -Body $jobInfoJson
break
}
catch {
$statusCode = $_.Exception.Response.StatusCode.value__
if ($statusCode -ge 400 -and $statusCode -le 499) {
Write-Host "##vso[task.logissue]error Failed to send telemetry to Helix (status code $statusCode); not retrying (4xx client error)"
Write-Host "##vso[task.logissue]error ", $_.Exception.GetType().FullName, $_.Exception.Message
exit 1
}
Write-Host "Failed to send telemetry to Helix (status code $statusCode); waiting $env:RetryDelay seconds before retrying..."
$retryCount++
sleep $env:RetryDelay
continue
}
}
if ($retryCount -ge $env:MaxRetries) {
Write-Host "##vso[task.logissue]error Failed to send telemetry to Helix after $retryCount retries."
exit 1
}
$env:Helix_JobToken = $jobToken
Write-Host "##vso[task.setvariable variable=Helix_JobToken;issecret=true;]$env:Helix_JobToken"
displayName: Send Windows Job Start Telemetry
env:
HelixApiAccessToken: $(HelixApiAccessToken)
Source: ${{ parameters.helixSource }}
@ -140,15 +198,44 @@ steps:
Attempt: 1
OperatingSystem: $(Agent.Os)
Configuration: ${{ parameters.buildConfig }}
MaxRetries: ${{ parameters.maxRetries }}
RetryDelay: ${{ parameters.retryDelay }}
condition: and(always(), eq(variables['Agent.Os'], 'Windows_NT'))
displayName: Send Windows Job Start Telemetry
- powershell: |
$workItemId = Invoke-RestMethod -Uri "https://helix.dot.net/api/2018-03-14/telemetry/job/build?buildUri=$([Net.WebUtility]::UrlEncode($env:BuildUri))" -Method Post -ContentType "application/json" -Body "" `
-Headers @{ 'X-Helix-Job-Token'=$env:Helix_JobToken }
# Basic retry loop to harden against server flakiness
$retryCount = 0
while ($retryCount -lt $env:MaxRetries) {
try {
$workItemId = Invoke-RestMethod -Uri "https://helix.dot.net/api/2018-03-14/telemetry/job/build?buildUri=$([Net.WebUtility]::UrlEncode($env:BuildUri))" -Method Post -ContentType "application/json" -Body "" `
-Headers @{ 'X-Helix-Job-Token'=$env:Helix_JobToken }
break
}
catch {
$statusCode = $_.Exception.Response.StatusCode.value__
if ($statusCode -ge 400 -and $statusCode -le 499) {
Write-Host "##vso[task.logissue]error Failed to send telemetry to Helix (status code $statusCode); not retrying (4xx client error)"
Write-Host "##vso[task.logissue]error ", $_.Exception.GetType().FullName, $_.Exception.Message
exit 1
}
Write-Host "Failed to send telemetry to Helix (status code $statusCode); waiting $env:RetryDelay seconds before retrying..."
$retryCount++
sleep $env:RetryDelay
continue
}
}
if ($retryCount -ge $env:MaxRetries) {
Write-Host "##vso[task.logissue]error Failed to send telemetry to Helix after $retryCount retries."
exit 1
}
$env:Helix_WorkItemId = $workItemId
Write-Host "##vso[task.setvariable variable=Helix_WorkItemId]$env:Helix_WorkItemId"
displayName: Send Windows Build Start Telemetry
env:
BuildUri: $(System.TaskDefinitionsUri)$(System.TeamProject)/_build/index?buildId=$(Build.BuildId)&_a=summary
Helix_JobToken: $(Helix_JobToken)
MaxRetries: ${{ parameters.maxRetries }}
RetryDelay: ${{ parameters.retryDelay }}
condition: and(always(), eq(variables['Agent.Os'], 'Windows_NT'))

652
eng/common/tools.ps1 Normal file
Просмотреть файл

@ -0,0 +1,652 @@
# Initialize variables if they aren't already defined.
# These may be defined as parameters of the importing script, or set after importing this script.
[string]$architecture = if (Test-Path variable:architecture) { $architecture } else { "" }
# CI mode - set to true on CI server for PR validation build or official build.
[bool]$ci = if (Test-Path variable:ci) { $ci } else { $false }
# Build configuration. Common values include 'Debug' and 'Release', but the repository may use other names.
[string]$configuration = if (Test-Path variable:configuration) { $configuration } else { "Debug" }
# Set to true to output binary log from msbuild. Note that emitting binary log slows down the build.
# Binary log must be enabled on CI.
[bool]$binaryLog = if (Test-Path variable:binaryLog) { $binaryLog } else { $ci }
# Set to true to use the pipelines logger which will enable Azure logging output.
# https://github.com/Microsoft/azure-pipelines-tasks/blob/master/docs/authoring/commands.md
# This flag is meant as a temporary opt-opt for the feature while validate it across
# our consumers. It will be deleted in the future.
[bool]$pipelinesLog = if (Test-Path variable:pipelinesLog) { $pipelinesLog } else { $ci }
# Turns on machine preparation/clean up code that changes the machine state (e.g. kills build processes).
[bool]$prepareMachine = if (Test-Path variable:prepareMachine) { $prepareMachine } else { $false }
# True to restore toolsets and dependencies.
[bool]$restore = if (Test-Path variable:restore) { $restore } else { $true }
# Adjusts msbuild verbosity level.
[string]$verbosity = if (Test-Path variable:verbosity) { $verbosity } else { "minimal" }
# Set to true to reuse msbuild nodes. Recommended to not reuse on CI.
[bool]$nodeReuse = if (Test-Path variable:nodeReuse) { $nodeReuse } else { !$ci }
# Configures warning treatment in msbuild.
[bool]$warnAsError = if (Test-Path variable:warnAsError) { $warnAsError } else { $true }
# Specifies which msbuild engine to use for build: 'vs', 'dotnet' or unspecified (determined based on presence of tools.vs in global.json).
[string]$msbuildEngine = if (Test-Path variable:msbuildEngine) { $msbuildEngine } else { $null }
# True to attempt using .NET Core already that meets requirements specified in global.json
# installed on the machine instead of downloading one.
[bool]$useInstalledDotNetCli = if (Test-Path variable:useInstalledDotNetCli) { $useInstalledDotNetCli } else { $true }
# True to use global NuGet cache instead of restoring packages to repository-local directory.
[bool]$useGlobalNuGetCache = if (Test-Path variable:useGlobalNuGetCache) { $useGlobalNuGetCache } else { !$ci }
# An array of names of processes to stop on script exit if prepareMachine is true.
$processesToStopOnExit = if (Test-Path variable:processesToStopOnExit) { $processesToStopOnExit } else { @("msbuild", "dotnet", "vbcscompiler") }
set-strictmode -version 2.0
$ErrorActionPreference = "Stop"
[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12
function Create-Directory([string[]] $path) {
if (!(Test-Path $path)) {
New-Item -path $path -force -itemType "Directory" | Out-Null
}
}
function Unzip([string]$zipfile, [string]$outpath) {
Add-Type -AssemblyName System.IO.Compression.FileSystem
[System.IO.Compression.ZipFile]::ExtractToDirectory($zipfile, $outpath)
}
# This will exec a process using the console and return it's exit code.
# This will not throw when the process fails.
# Returns process exit code.
function Exec-Process([string]$command, [string]$commandArgs) {
$startInfo = New-Object System.Diagnostics.ProcessStartInfo
$startInfo.FileName = $command
$startInfo.Arguments = $commandArgs
$startInfo.UseShellExecute = $false
$startInfo.WorkingDirectory = Get-Location
$process = New-Object System.Diagnostics.Process
$process.StartInfo = $startInfo
$process.Start() | Out-Null
$finished = $false
try {
while (-not $process.WaitForExit(100)) {
# Non-blocking loop done to allow ctr-c interrupts
}
$finished = $true
return $global:LASTEXITCODE = $process.ExitCode
}
finally {
# If we didn't finish then an error occured or the user hit ctrl-c. Either
# way kill the process
if (-not $finished) {
$process.Kill()
}
}
}
function Write-PipelineTaskError {
[CmdletBinding()]
param(
[Parameter(Mandatory = $true)]
[string]$Message,
[Parameter(Mandatory = $false)]
[string]$Type = 'error',
[string]$ErrCode,
[string]$SourcePath,
[string]$LineNumber,
[string]$ColumnNumber,
[switch]$AsOutput)
if(!$ci) {
if($Type -eq 'error') {
Write-Host $Message -ForegroundColor Red
return
}
elseif ($Type -eq 'warning') {
Write-Host $Message -ForegroundColor Yellow
return
}
}
if(($Type -ne 'error') -and ($Type -ne 'warning')) {
Write-Host $Message
return
}
if(-not $PSBoundParameters.ContainsKey('Type')) {
$PSBoundParameters.Add('Type', 'error')
}
Write-LogIssue @PSBoundParameters
}
function Write-PipelineSetVariable {
[CmdletBinding()]
param(
[Parameter(Mandatory = $true)]
[string]$Name,
[string]$Value,
[switch]$Secret,
[switch]$AsOutput)
if($ci) {
Write-LoggingCommand -Area 'task' -Event 'setvariable' -Data $Value -Properties @{
'variable' = $Name
'issecret' = $Secret
} -AsOutput:$AsOutput
}
}
function Write-PipelinePrependPath {
[CmdletBinding()]
param(
[Parameter(Mandatory=$true)]
[string]$Path,
[switch]$AsOutput)
if($ci) {
Write-LoggingCommand -Area 'task' -Event 'prependpath' -Data $Path -AsOutput:$AsOutput
}
}
function InitializeDotNetCli([bool]$install) {
if (Test-Path variable:global:_DotNetInstallDir) {
return $global:_DotNetInstallDir
}
# Don't resolve runtime, shared framework, or SDK from other locations to ensure build determinism
$env:DOTNET_MULTILEVEL_LOOKUP=0
# Disable first run since we do not need all ASP.NET packages restored.
$env:DOTNET_SKIP_FIRST_TIME_EXPERIENCE=1
# Disable telemetry on CI.
if ($ci) {
$env:DOTNET_CLI_TELEMETRY_OPTOUT=1
}
# Source Build uses DotNetCoreSdkDir variable
if ($env:DotNetCoreSdkDir -ne $null) {
$env:DOTNET_INSTALL_DIR = $env:DotNetCoreSdkDir
}
# Find the first path on %PATH% that contains the dotnet.exe
if ($useInstalledDotNetCli -and (-not $globalJsonHasRuntimes) -and ($env:DOTNET_INSTALL_DIR -eq $null)) {
$dotnetCmd = Get-Command "dotnet.exe" -ErrorAction SilentlyContinue
if ($dotnetCmd -ne $null) {
$env:DOTNET_INSTALL_DIR = Split-Path $dotnetCmd.Path -Parent
}
}
$dotnetSdkVersion = $GlobalJson.tools.dotnet
# Use dotnet installation specified in DOTNET_INSTALL_DIR if it contains the required SDK version,
# otherwise install the dotnet CLI and SDK to repo local .dotnet directory to avoid potential permission issues.
if ((-not $globalJsonHasRuntimes) -and ($env:DOTNET_INSTALL_DIR -ne $null) -and (Test-Path(Join-Path $env:DOTNET_INSTALL_DIR "sdk\$dotnetSdkVersion"))) {
$dotnetRoot = $env:DOTNET_INSTALL_DIR
} else {
$dotnetRoot = Join-Path $RepoRoot ".dotnet"
if (-not (Test-Path(Join-Path $dotnetRoot "sdk\$dotnetSdkVersion"))) {
if ($install) {
InstallDotNetSdk $dotnetRoot $dotnetSdkVersion $architecture
} else {
Write-PipelineTaskError "Unable to find dotnet with SDK version '$dotnetSdkVersion'"
ExitWithExitCode 1
}
}
$env:DOTNET_INSTALL_DIR = $dotnetRoot
}
# Add dotnet to PATH. This prevents any bare invocation of dotnet in custom
# build steps from using anything other than what we've downloaded.
# It also ensures that VS msbuild will use the downloaded sdk targets.
$env:PATH = "$dotnetRoot;$env:PATH"
# Make Sure that our bootstrapped dotnet cli is avaliable in future steps of the Azure Pipelines build
Write-PipelinePrependPath -Path $dotnetRoot
Write-PipelineSetVariable -Name 'DOTNET_MULTILEVEL_LOOKUP' -Value '0'
Write-PipelineSetVariable -Name 'DOTNET_SKIP_FIRST_TIME_EXPERIENCE' -Value '1'
return $global:_DotNetInstallDir = $dotnetRoot
}
function GetDotNetInstallScript([string] $dotnetRoot) {
$installScript = Join-Path $dotnetRoot "dotnet-install.ps1"
if (!(Test-Path $installScript)) {
Create-Directory $dotnetRoot
Invoke-WebRequest "https://dot.net/v1/dotnet-install.ps1" -OutFile $installScript
}
return $installScript
}
function InstallDotNetSdk([string] $dotnetRoot, [string] $version, [string] $architecture = "") {
InstallDotNet $dotnetRoot $version $architecture
}
function InstallDotNet([string] $dotnetRoot, [string] $version, [string] $architecture = "", [string] $runtime = "", [bool] $skipNonVersionedFiles = $false) { $installScript = GetDotNetInstallScript $dotnetRoot
$installScript = GetDotNetInstallScript $dotnetRoot
$installParameters = @{
Version = $version
InstallDir = $dotnetRoot
}
if ($architecture) {
if ($architecture -eq "arm") {
$architecture="x86"
}
if ($architecture -eq "arm64") {
$architecture="x64"
}
$installParameters.Architecture = $architecture
}
if ($runtime) { $installParameters.Runtime = $runtime }
if ($skipNonVersionedFiles) { $installParameters.SkipNonVersionedFiles = $skipNonVersionedFiles }
& $installScript @installParameters
if ($lastExitCode -ne 0) {
Write-PipelineTaskError -Message "Failed to install dotnet cli (exit code '$lastExitCode')."
ExitWithExitCode $lastExitCode
}
}
#
# Locates Visual Studio MSBuild installation.
# The preference order for MSBuild to use is as follows:
#
# 1. MSBuild from an active VS command prompt
# 2. MSBuild from a compatible VS installation
# 3. MSBuild from the xcopy tool package
#
# Returns full path to msbuild.exe.
# Throws on failure.
#
function InitializeVisualStudioMSBuild([bool]$install, [object]$vsRequirements = $null) {
if (Test-Path variable:global:_MSBuildExe) {
return $global:_MSBuildExe
}
if (!$vsRequirements) { $vsRequirements = $GlobalJson.tools.vs }
$vsMinVersionStr = if ($vsRequirements.version) { $vsRequirements.version } else { "15.9" }
$vsMinVersion = [Version]::new($vsMinVersionStr)
# Try msbuild command available in the environment.
if ($env:VSINSTALLDIR -ne $null) {
$msbuildCmd = Get-Command "msbuild.exe" -ErrorAction SilentlyContinue
if ($msbuildCmd -ne $null) {
# Workaround for https://github.com/dotnet/roslyn/issues/35793
# Due to this issue $msbuildCmd.Version returns 0.0.0.0 for msbuild.exe 16.2+
$msbuildVersion = [Version]::new((Get-Item $msbuildCmd.Path).VersionInfo.ProductVersion.Split(@('-', '+'))[0])
if ($msbuildVersion -ge $vsMinVersion) {
return $global:_MSBuildExe = $msbuildCmd.Path
}
# Report error - the developer environment is initialized with incompatible VS version.
throw "Developer Command Prompt for VS $($env:VisualStudioVersion) is not recent enough. Please upgrade to $vsMinVersionStr or build from a plain CMD window"
}
}
# Locate Visual Studio installation or download x-copy msbuild.
$vsInfo = LocateVisualStudio $vsRequirements
if ($vsInfo -ne $null) {
$vsInstallDir = $vsInfo.installationPath
$vsMajorVersion = $vsInfo.installationVersion.Split('.')[0]
InitializeVisualStudioEnvironmentVariables $vsInstallDir $vsMajorVersion
} else {
if (Get-Member -InputObject $GlobalJson.tools -Name "xcopy-msbuild") {
$xcopyMSBuildVersion = $GlobalJson.tools.'xcopy-msbuild'
$vsMajorVersion = $xcopyMSBuildVersion.Split('.')[0]
} else {
$vsMajorVersion = $vsMinVersion.Major
$xcopyMSBuildVersion = "$vsMajorVersion.$($vsMinVersion.Minor).0-alpha"
}
$vsInstallDir = InitializeXCopyMSBuild $xcopyMSBuildVersion $install
if ($vsInstallDir -eq $null) {
throw "Unable to find Visual Studio that has required version and components installed"
}
}
$msbuildVersionDir = if ([int]$vsMajorVersion -lt 16) { "$vsMajorVersion.0" } else { "Current" }
return $global:_MSBuildExe = Join-Path $vsInstallDir "MSBuild\$msbuildVersionDir\Bin\msbuild.exe"
}
function InitializeVisualStudioEnvironmentVariables([string] $vsInstallDir, [string] $vsMajorVersion) {
$env:VSINSTALLDIR = $vsInstallDir
Set-Item "env:VS$($vsMajorVersion)0COMNTOOLS" (Join-Path $vsInstallDir "Common7\Tools\")
$vsSdkInstallDir = Join-Path $vsInstallDir "VSSDK\"
if (Test-Path $vsSdkInstallDir) {
Set-Item "env:VSSDK$($vsMajorVersion)0Install" $vsSdkInstallDir
$env:VSSDKInstall = $vsSdkInstallDir
}
}
function InstallXCopyMSBuild([string]$packageVersion) {
return InitializeXCopyMSBuild $packageVersion -install $true
}
function InitializeXCopyMSBuild([string]$packageVersion, [bool]$install) {
$packageName = "RoslynTools.MSBuild"
$packageDir = Join-Path $ToolsDir "msbuild\$packageVersion"
$packagePath = Join-Path $packageDir "$packageName.$packageVersion.nupkg"
if (!(Test-Path $packageDir)) {
if (!$install) {
return $null
}
Create-Directory $packageDir
Write-Host "Downloading $packageName $packageVersion"
Invoke-WebRequest "https://dotnet.myget.org/F/roslyn-tools/api/v2/package/$packageName/$packageVersion/" -OutFile $packagePath
Unzip $packagePath $packageDir
}
return Join-Path $packageDir "tools"
}
#
# Locates Visual Studio instance that meets the minimal requirements specified by tools.vs object in global.json.
#
# The following properties of tools.vs are recognized:
# "version": "{major}.{minor}"
# Two part minimal VS version, e.g. "15.9", "16.0", etc.
# "components": ["componentId1", "componentId2", ...]
# Array of ids of workload components that must be available in the VS instance.
# See e.g. https://docs.microsoft.com/en-us/visualstudio/install/workload-component-id-vs-enterprise?view=vs-2017
#
# Returns JSON describing the located VS instance (same format as returned by vswhere),
# or $null if no instance meeting the requirements is found on the machine.
#
function LocateVisualStudio([object]$vsRequirements = $null){
if (Get-Member -InputObject $GlobalJson.tools -Name "vswhere") {
$vswhereVersion = $GlobalJson.tools.vswhere
} else {
$vswhereVersion = "2.5.2"
}
$vsWhereDir = Join-Path $ToolsDir "vswhere\$vswhereVersion"
$vsWhereExe = Join-Path $vsWhereDir "vswhere.exe"
if (!(Test-Path $vsWhereExe)) {
Create-Directory $vsWhereDir
Write-Host "Downloading vswhere"
Invoke-WebRequest "https://github.com/Microsoft/vswhere/releases/download/$vswhereVersion/vswhere.exe" -OutFile $vswhereExe
}
if (!$vsRequirements) { $vsRequirements = $GlobalJson.tools.vs }
$args = @("-latest", "-prerelease", "-format", "json", "-requires", "Microsoft.Component.MSBuild")
if (Get-Member -InputObject $vsRequirements -Name "version") {
$args += "-version"
$args += $vsRequirements.version
}
if (Get-Member -InputObject $vsRequirements -Name "components") {
foreach ($component in $vsRequirements.components) {
$args += "-requires"
$args += $component
}
}
$vsInfo =& $vsWhereExe $args | ConvertFrom-Json
if ($lastExitCode -ne 0) {
return $null
}
# use first matching instance
return $vsInfo[0]
}
function InitializeBuildTool() {
if (Test-Path variable:global:_BuildTool) {
return $global:_BuildTool
}
if (-not $msbuildEngine) {
$msbuildEngine = GetDefaultMSBuildEngine
}
# Initialize dotnet cli if listed in 'tools'
$dotnetRoot = $null
if (Get-Member -InputObject $GlobalJson.tools -Name "dotnet") {
$dotnetRoot = InitializeDotNetCli -install:$restore
}
if ($msbuildEngine -eq "dotnet") {
if (!$dotnetRoot) {
Write-PipelineTaskError "/global.json must specify 'tools.dotnet'."
ExitWithExitCode 1
}
$buildTool = @{ Path = Join-Path $dotnetRoot "dotnet.exe"; Command = "msbuild"; Tool = "dotnet"; Framework = "netcoreapp2.1" }
} elseif ($msbuildEngine -eq "vs") {
try {
$msbuildPath = InitializeVisualStudioMSBuild -install:$restore
} catch {
Write-PipelineTaskError $_
ExitWithExitCode 1
}
$buildTool = @{ Path = $msbuildPath; Command = ""; Tool = "vs"; Framework = "net472" }
} else {
Write-PipelineTaskError "Unexpected value of -msbuildEngine: '$msbuildEngine'."
ExitWithExitCode 1
}
return $global:_BuildTool = $buildTool
}
function GetDefaultMSBuildEngine() {
# Presence of tools.vs indicates the repo needs to build using VS msbuild on Windows.
if (Get-Member -InputObject $GlobalJson.tools -Name "vs") {
return "vs"
}
if (Get-Member -InputObject $GlobalJson.tools -Name "dotnet") {
return "dotnet"
}
Write-PipelineTaskError "-msbuildEngine must be specified, or /global.json must specify 'tools.dotnet' or 'tools.vs'."
ExitWithExitCode 1
}
function GetNuGetPackageCachePath() {
if ($env:NUGET_PACKAGES -eq $null) {
# Use local cache on CI to ensure deterministic build,
# use global cache in dev builds to avoid cost of downloading packages.
if ($useGlobalNuGetCache) {
$env:NUGET_PACKAGES = Join-Path $env:UserProfile ".nuget\packages"
} else {
$env:NUGET_PACKAGES = Join-Path $RepoRoot ".packages"
}
}
return $env:NUGET_PACKAGES
}
# Returns a full path to an Arcade SDK task project file.
function GetSdkTaskProject([string]$taskName) {
return Join-Path (Split-Path (InitializeToolset) -Parent) "SdkTasks\$taskName.proj"
}
function InitializeNativeTools() {
if (Get-Member -InputObject $GlobalJson -Name "native-tools") {
$nativeArgs= @{}
if ($ci) {
$nativeArgs = @{
InstallDirectory = "$ToolsDir"
}
}
& "$PSScriptRoot/init-tools-native.ps1" @nativeArgs
}
}
function InitializeToolset() {
if (Test-Path variable:global:_ToolsetBuildProj) {
return $global:_ToolsetBuildProj
}
$nugetCache = GetNuGetPackageCachePath
$toolsetVersion = $GlobalJson.'msbuild-sdks'.'Microsoft.DotNet.Arcade.Sdk'
$toolsetLocationFile = Join-Path $ToolsetDir "$toolsetVersion.txt"
if (Test-Path $toolsetLocationFile) {
$path = Get-Content $toolsetLocationFile -TotalCount 1
if (Test-Path $path) {
return $global:_ToolsetBuildProj = $path
}
}
if (-not $restore) {
Write-PipelineTaskError "Toolset version $toolsetVersion has not been restored."
ExitWithExitCode 1
}
$buildTool = InitializeBuildTool
$proj = Join-Path $ToolsetDir "restore.proj"
$bl = if ($binaryLog) { "/bl:" + (Join-Path $LogDir "ToolsetRestore.binlog") } else { "" }
'<Project Sdk="Microsoft.DotNet.Arcade.Sdk"/>' | Set-Content $proj
MSBuild-Core $proj $bl /t:__WriteToolsetLocation /clp:ErrorsOnly`;NoSummary /p:__ToolsetLocationOutputFile=$toolsetLocationFile
$path = Get-Content $toolsetLocationFile -TotalCount 1
if (!(Test-Path $path)) {
throw "Invalid toolset path: $path"
}
return $global:_ToolsetBuildProj = $path
}
function ExitWithExitCode([int] $exitCode) {
if ($ci -and $prepareMachine) {
Stop-Processes
}
exit $exitCode
}
function Stop-Processes() {
Write-Host "Killing running build processes..."
foreach ($processName in $processesToStopOnExit) {
Get-Process -Name $processName -ErrorAction SilentlyContinue | Stop-Process
}
}
#
# Executes msbuild (or 'dotnet msbuild') with arguments passed to the function.
# The arguments are automatically quoted.
# Terminates the script if the build fails.
#
function MSBuild() {
if ($pipelinesLog) {
$buildTool = InitializeBuildTool
$toolsetBuildProject = InitializeToolset
$path = Split-Path -parent $toolsetBuildProject
$path = Join-Path $path (Join-Path $buildTool.Framework "Microsoft.DotNet.Arcade.Sdk.dll")
$args += "/logger:$path"
}
MSBuild-Core @args
}
#
# Executes msbuild (or 'dotnet msbuild') with arguments passed to the function.
# The arguments are automatically quoted.
# Terminates the script if the build fails.
#
function MSBuild-Core() {
if ($ci) {
if (!$binaryLog) {
throw "Binary log must be enabled in CI build."
}
if ($nodeReuse) {
throw "Node reuse must be disabled in CI build."
}
}
$buildTool = InitializeBuildTool
$cmdArgs = "$($buildTool.Command) /m /nologo /clp:Summary /v:$verbosity /nr:$nodeReuse /p:ContinuousIntegrationBuild=$ci"
if ($warnAsError) {
$cmdArgs += " /warnaserror /p:TreatWarningsAsErrors=true"
}
foreach ($arg in $args) {
if ($arg -ne $null -and $arg.Trim() -ne "") {
$cmdArgs += " `"$arg`""
}
}
$exitCode = Exec-Process $buildTool.Path $cmdArgs
if ($exitCode -ne 0) {
Write-PipelineTaskError "Build failed."
$buildLog = GetMSBuildBinaryLogCommandLineArgument $args
if ($buildLog -ne $null) {
Write-Host "See log: $buildLog" -ForegroundColor DarkGray
}
ExitWithExitCode $exitCode
}
}
function GetMSBuildBinaryLogCommandLineArgument($arguments) {
foreach ($argument in $arguments) {
if ($argument -ne $null) {
$arg = $argument.Trim()
if ($arg.StartsWith("/bl:", "OrdinalIgnoreCase")) {
return $arg.Substring("/bl:".Length)
}
if ($arg.StartsWith("/binaryLogger:", "OrdinalIgnoreCase")) {
return $arg.Substring("/binaryLogger:".Length)
}
}
}
return $null
}
. $PSScriptRoot\LoggingCommandFunctions.ps1
$RepoRoot = Resolve-Path (Join-Path $PSScriptRoot "..\..")
$EngRoot = Resolve-Path (Join-Path $PSScriptRoot "..")
$ArtifactsDir = Join-Path $RepoRoot "artifacts"
$ToolsetDir = Join-Path $ArtifactsDir "toolset"
$ToolsDir = Join-Path $RepoRoot ".tools"
$LogDir = Join-Path (Join-Path $ArtifactsDir "log") $configuration
$TempDir = Join-Path (Join-Path $ArtifactsDir "tmp") $configuration
$GlobalJson = Get-Content -Raw -Path (Join-Path $RepoRoot "global.json") | ConvertFrom-Json
# true if global.json contains a "runtimes" section
$globalJsonHasRuntimes = if ($GlobalJson.tools.PSObject.Properties.Name -Match 'runtimes') { $true } else { $false }
Create-Directory $ToolsetDir
Create-Directory $TempDir
Create-Directory $LogDir
Write-PipelineSetVariable -Name 'Artifacts' -Value $ArtifactsDir
Write-PipelineSetVariable -Name 'Artifacts.Toolset' -Value $ToolsetDir
Write-PipelineSetVariable -Name 'Artifacts.Log' -Value $LogDir
Write-PipelineSetVariable -Name 'TEMP' -Value $TempDir
Write-PipelineSetVariable -Name 'TMP' -Value $TempDir

463
eng/common/tools.sh Normal file
Просмотреть файл

@ -0,0 +1,463 @@
#!/usr/bin/env bash
# Initialize variables if they aren't already defined.
# CI mode - set to true on CI server for PR validation build or official build.
ci=${ci:-false}
# Set to true to use the pipelines logger which will enable Azure logging output.
# https://github.com/Microsoft/azure-pipelines-tasks/blob/master/docs/authoring/commands.md
# This flag is meant as a temporary opt-opt for the feature while validate it across
# our consumers. It will be deleted in the future.
if [[ "$ci" == true ]]; then
pipelines_log=${pipelines_log:-true}
else
pipelines_log=${pipelines_log:-false}
fi
# Build configuration. Common values include 'Debug' and 'Release', but the repository may use other names.
configuration=${configuration:-'Debug'}
# Set to true to output binary log from msbuild. Note that emitting binary log slows down the build.
# Binary log must be enabled on CI.
binary_log=${binary_log:-$ci}
# Turns on machine preparation/clean up code that changes the machine state (e.g. kills build processes).
prepare_machine=${prepare_machine:-false}
# True to restore toolsets and dependencies.
restore=${restore:-true}
# Adjusts msbuild verbosity level.
verbosity=${verbosity:-'minimal'}
# Set to true to reuse msbuild nodes. Recommended to not reuse on CI.
if [[ "$ci" == true ]]; then
node_reuse=${node_reuse:-false}
else
node_reuse=${node_reuse:-true}
fi
# Configures warning treatment in msbuild.
warn_as_error=${warn_as_error:-true}
# True to attempt using .NET Core already that meets requirements specified in global.json
# installed on the machine instead of downloading one.
use_installed_dotnet_cli=${use_installed_dotnet_cli:-true}
# True to use global NuGet cache instead of restoring packages to repository-local directory.
if [[ "$ci" == true ]]; then
use_global_nuget_cache=${use_global_nuget_cache:-false}
else
use_global_nuget_cache=${use_global_nuget_cache:-true}
fi
function EmitError {
if [[ "$ci" != true ]]; then
echo "$@" >&2
return
fi
message_type="error"
sourcepath=''
linenumber=''
columnnumber=''
error_code=''
while [[ $# -gt 0 ]]; do
opt="$(echo "${1/#--/-}" | awk '{print tolower($0)}')"
case "$opt" in
-type|-t)
message_type=$2
shift
;;
-sourcepath|-s)
sourcepath=$2
shift
;;
-linenumber|-l)
linenumber=$2
shift
;;
-columnnumber|-col)
columnnumber=$2
shift
;;
-code|-c)
error_code=$2
shift
;;
*)
break
;;
esac
shift
done
message='##vso[task.logissue'
message="$message type=$message_type"
if [ -n "$sourcepath" ]; then
message="$message;sourcepath=$sourcepath"
else
message="$message;sourcepath=${BASH_SOURCE[1]}"
fi
if [ -n "$linenumber" ]; then
message="$message;linenumber=$linenumber"
else
message="$message;linenumber=${BASH_LINENO[0]}"
fi
if [ -n "$columnnumber" ]; then
message="$message;columnnumber=$columnnumber"
fi
if [ -n "$error_code" ]; then
message="$message;code=$error_code"
fi
message="$message]$*"
echo "$message"
}
# Resolve any symlinks in the given path.
function ResolvePath {
local path=$1
while [[ -h $path ]]; do
local dir="$( cd -P "$( dirname "$path" )" && pwd )"
path="$(readlink "$path")"
# if $path was a relative symlink, we need to resolve it relative to the path where the
# symlink file was located
[[ $path != /* ]] && path="$dir/$path"
done
# return value
_ResolvePath="$path"
}
# ReadVersionFromJson [json key]
function ReadGlobalVersion {
local key=$1
local line=`grep -m 1 "$key" "$global_json_file"`
local pattern="\"$key\" *: *\"(.*)\""
if [[ ! $line =~ $pattern ]]; then
EmitError "Error: Cannot find \"$key\" in $global_json_file"
ExitWithExitCode 1
fi
# return value
_ReadGlobalVersion=${BASH_REMATCH[1]}
}
function InitializeDotNetCli {
if [[ -n "${_InitializeDotNetCli:-}" ]]; then
return
fi
local install=$1
# Don't resolve runtime, shared framework, or SDK from other locations to ensure build determinism
export DOTNET_MULTILEVEL_LOOKUP=0
# Disable first run since we want to control all package sources
export DOTNET_SKIP_FIRST_TIME_EXPERIENCE=1
# Disable telemetry on CI
if [[ $ci == true ]]; then
export DOTNET_CLI_TELEMETRY_OPTOUT=1
fi
# LTTNG is the logging infrastructure used by Core CLR. Need this variable set
# so it doesn't output warnings to the console.
export LTTNG_HOME="$HOME"
# Source Build uses DotNetCoreSdkDir variable
if [[ -n "${DotNetCoreSdkDir:-}" ]]; then
export DOTNET_INSTALL_DIR="$DotNetCoreSdkDir"
fi
# Find the first path on $PATH that contains the dotnet.exe
if [[ "$use_installed_dotnet_cli" == true && $global_json_has_runtimes == false && -z "${DOTNET_INSTALL_DIR:-}" ]]; then
local dotnet_path=`command -v dotnet`
if [[ -n "$dotnet_path" ]]; then
ResolvePath "$dotnet_path"
export DOTNET_INSTALL_DIR=`dirname "$_ResolvePath"`
fi
fi
ReadGlobalVersion "dotnet"
local dotnet_sdk_version=$_ReadGlobalVersion
local dotnet_root=""
# Use dotnet installation specified in DOTNET_INSTALL_DIR if it contains the required SDK version,
# otherwise install the dotnet CLI and SDK to repo local .dotnet directory to avoid potential permission issues.
if [[ $global_json_has_runtimes == false && -n "${DOTNET_INSTALL_DIR:-}" && -d "$DOTNET_INSTALL_DIR/sdk/$dotnet_sdk_version" ]]; then
dotnet_root="$DOTNET_INSTALL_DIR"
else
dotnet_root="$repo_root/.dotnet"
export DOTNET_INSTALL_DIR="$dotnet_root"
if [[ ! -d "$DOTNET_INSTALL_DIR/sdk/$dotnet_sdk_version" ]]; then
if [[ "$install" == true ]]; then
InstallDotNetSdk "$dotnet_root" "$dotnet_sdk_version"
else
EmitError "Unable to find dotnet with SDK version '$dotnet_sdk_version'"
ExitWithExitCode 1
fi
fi
fi
# Add dotnet to PATH. This prevents any bare invocation of dotnet in custom
# build steps from using anything other than what we've downloaded.
export PATH="$dotnet_root:$PATH"
if [[ $ci == true ]]; then
# Make Sure that our bootstrapped dotnet cli is available in future steps of the Azure Pipelines build
echo "##vso[task.prependpath]$dotnet_root"
echo "##vso[task.setvariable variable=DOTNET_MULTILEVEL_LOOKUP]0"
echo "##vso[task.setvariable variable=DOTNET_SKIP_FIRST_TIME_EXPERIENCE]1"
fi
# return value
_InitializeDotNetCli="$dotnet_root"
}
function InstallDotNetSdk {
local root=$1
local version=$2
local architecture=""
if [[ $# == 3 ]]; then
architecture=$3
fi
InstallDotNet "$root" "$version" $architecture
}
function InstallDotNet {
local root=$1
local version=$2
GetDotNetInstallScript "$root"
local install_script=$_GetDotNetInstallScript
local archArg=''
if [[ -n "${3:-}" ]]; then
archArg="--architecture $3"
fi
local runtimeArg=''
if [[ -n "${4:-}" ]]; then
runtimeArg="--runtime $4"
fi
local skipNonVersionedFilesArg=""
if [[ "$#" -ge "5" ]]; then
skipNonVersionedFilesArg="--skip-non-versioned-files"
fi
bash "$install_script" --version $version --install-dir "$root" $archArg $runtimeArg $skipNonVersionedFilesArg || {
local exit_code=$?
EmitError "Failed to install dotnet SDK (exit code '$exit_code')."
ExitWithExitCode $exit_code
}
}
function GetDotNetInstallScript {
local root=$1
local install_script="$root/dotnet-install.sh"
local install_script_url="https://dot.net/v1/dotnet-install.sh"
if [[ ! -a "$install_script" ]]; then
mkdir -p "$root"
echo "Downloading '$install_script_url'"
# Use curl if available, otherwise use wget
if command -v curl > /dev/null; then
curl "$install_script_url" -sSL --retry 10 --create-dirs -o "$install_script"
else
wget -q -O "$install_script" "$install_script_url"
fi
fi
# return value
_GetDotNetInstallScript="$install_script"
}
function InitializeBuildTool {
if [[ -n "${_InitializeBuildTool:-}" ]]; then
return
fi
InitializeDotNetCli $restore
# return values
_InitializeBuildTool="$_InitializeDotNetCli/dotnet"
_InitializeBuildToolCommand="msbuild"
_InitializeBuildToolFramework="netcoreapp2.1"
}
function GetNuGetPackageCachePath {
if [[ -z ${NUGET_PACKAGES:-} ]]; then
if [[ "$use_global_nuget_cache" == true ]]; then
export NUGET_PACKAGES="$HOME/.nuget/packages"
else
export NUGET_PACKAGES="$repo_root/.packages"
fi
fi
# return value
_GetNuGetPackageCachePath=$NUGET_PACKAGES
}
function InitializeNativeTools() {
if grep -Fq "native-tools" $global_json_file
then
local nativeArgs=""
if [[ "$ci" == true ]]; then
nativeArgs="-InstallDirectory $tools_dir"
fi
"$_script_dir/init-tools-native.sh" $nativeArgs
fi
}
function InitializeToolset {
if [[ -n "${_InitializeToolset:-}" ]]; then
return
fi
GetNuGetPackageCachePath
ReadGlobalVersion "Microsoft.DotNet.Arcade.Sdk"
local toolset_version=$_ReadGlobalVersion
local toolset_location_file="$toolset_dir/$toolset_version.txt"
if [[ -a "$toolset_location_file" ]]; then
local path=`cat "$toolset_location_file"`
if [[ -a "$path" ]]; then
# return value
_InitializeToolset="$path"
return
fi
fi
if [[ "$restore" != true ]]; then
EmitError "Toolset version $toolsetVersion has not been restored."
ExitWithExitCode 2
fi
local proj="$toolset_dir/restore.proj"
local bl=""
if [[ "$binary_log" == true ]]; then
bl="/bl:$log_dir/ToolsetRestore.binlog"
fi
echo '<Project Sdk="Microsoft.DotNet.Arcade.Sdk"/>' > "$proj"
MSBuild-Core "$proj" $bl /t:__WriteToolsetLocation /clp:ErrorsOnly\;NoSummary /p:__ToolsetLocationOutputFile="$toolset_location_file"
local toolset_build_proj=`cat "$toolset_location_file"`
if [[ ! -a "$toolset_build_proj" ]]; then
EmitError "Invalid toolset path: $toolset_build_proj"
ExitWithExitCode 3
fi
# return value
_InitializeToolset="$toolset_build_proj"
}
function ExitWithExitCode {
if [[ "$ci" == true && "$prepare_machine" == true ]]; then
StopProcesses
fi
exit $1
}
function StopProcesses {
echo "Killing running build processes..."
pkill -9 "dotnet" || true
pkill -9 "vbcscompiler" || true
return 0
}
function MSBuild {
local args=$@
if [[ "$pipelines_log" == true ]]; then
InitializeBuildTool
InitializeToolset
local toolset_dir="${_InitializeToolset%/*}"
local logger_path="$toolset_dir/$_InitializeBuildToolFramework/Microsoft.DotNet.Arcade.Sdk.dll"
args=( "${args[@]}" "-logger:$logger_path" )
fi
MSBuild-Core ${args[@]}
}
function MSBuild-Core {
if [[ "$ci" == true ]]; then
if [[ "$binary_log" != true ]]; then
EmitError "Binary log must be enabled in CI build."
ExitWithExitCode 1
fi
if [[ "$node_reuse" == true ]]; then
EmitError "Node reuse must be disabled in CI build."
ExitWithExitCode 1
fi
fi
InitializeBuildTool
local warnaserror_switch=""
if [[ $warn_as_error == true ]]; then
warnaserror_switch="/warnaserror"
fi
"$_InitializeBuildTool" "$_InitializeBuildToolCommand" /m /nologo /clp:Summary /v:$verbosity /nr:$node_reuse $warnaserror_switch /p:TreatWarningsAsErrors=$warn_as_error /p:ContinuousIntegrationBuild=$ci "$@" || {
local exit_code=$?
EmitError "Build failed (exit code '$exit_code')."
ExitWithExitCode $exit_code
}
}
ResolvePath "${BASH_SOURCE[0]}"
_script_dir=`dirname "$_ResolvePath"`
eng_root=`cd -P "$_script_dir/.." && pwd`
repo_root=`cd -P "$_script_dir/../.." && pwd`
artifacts_dir="$repo_root/artifacts"
toolset_dir="$artifacts_dir/toolset"
tools_dir="$repo_root/.tools"
log_dir="$artifacts_dir/log/$configuration"
temp_dir="$artifacts_dir/tmp/$configuration"
global_json_file="$repo_root/global.json"
# determine if global.json contains a "runtimes" entry
global_json_has_runtimes=false
dotnetlocal_key=`grep -m 1 "runtimes" "$global_json_file"` || true
if [[ -n "$dotnetlocal_key" ]]; then
global_json_has_runtimes=true
fi
# HOME may not be defined in some scenarios, but it is required by NuGet
if [[ -z $HOME ]]; then
export HOME="$repo_root/artifacts/.home/"
mkdir -p "$HOME"
fi
mkdir -p "$toolset_dir"
mkdir -p "$temp_dir"
mkdir -p "$log_dir"
if [[ $ci == true ]]; then
export TEMP="$temp_dir"
export TMP="$temp_dir"
fi

12
eng/configure-toolset.ps1 Normal file
Просмотреть файл

@ -0,0 +1,12 @@
# We depend on a local cli for a number of our buildtool
# commands like init-tools so for now we need to disable
# using the globally installed dotnet
$script:useInstalledDotNetCli = $false
# Always use the local repo packages directory instead of
# the user's NuGet cache
#$script:useGlobalNuGetCache = $false
# Working around issue https://github.com/dotnet/arcade/issues/2673
$script:DisableNativeToolsetInstalls = $true

12
eng/configure-toolset.sh Normal file
Просмотреть файл

@ -0,0 +1,12 @@
# We depend on a local cli for a number of our buildtool
# commands like init-tools so for now we need to disable
# using the globally installed dotnet
use_installed_dotnet_cli=false
# Always use the local repo packages directory instead of
# the user's NuGet cache
#use_global_nuget_cache=false
# Working around issue https://github.com/dotnet/arcade/issues/2673
DisableNativeToolsetInstalls=true

Просмотреть файл

@ -1,7 +1,6 @@
[cmdletbinding()]
param(
[string]$DotNetDir,
[string]$RuntimeVersion21,
[string]$TempDir,
[string]$BuildArch,
[switch]$DailyTest,
@ -12,15 +11,18 @@ param(
Set-StrictMode -Version Latest
$ErrorActionPreference="Stop"
$RuntimeVersion11="1.1.11"
$RuntimeVersion22="2.2.2"
$RuntimeVersion11="1.1.13"
$RuntimeVersion21="2.1.11"
$RuntimeVersion22="2.2.5"
$DailyTestText="true"
# Install the other versions of .NET Core runtime we are going to test. 1.1.x, 2.1.x (installed with the CLI), 2.2.x
# and latest. Only install the latest master for daily jobs and leave the RuntimeVersion* config properties blank.
# Install the other versions of .NET Core runtime we are going to test. 1.1.x, 2.1.x, 2.2.x
# and latest. Only install the latest master for daily jobs and leave the RuntimeVersion*
# config properties blank.
if (!$DailyTest) {
$DailyTestText="false"
. $DotNetDir\dotnet-install.ps1 -Version $RuntimeVersion11 -Architecture $BuildArch -SkipNonVersionedFiles -Runtime dotnet -InstallDir $DotNetDir
. $DotNetDir\dotnet-install.ps1 -Version $RuntimeVersion21 -Architecture $BuildArch -SkipNonVersionedFiles -Runtime dotnet -InstallDir $DotNetDir
. $DotNetDir\dotnet-install.ps1 -Version $RuntimeVersion22 -Architecture $BuildArch -SkipNonVersionedFiles -Runtime dotnet -InstallDir $DotNetDir
}

Просмотреть файл

@ -9,9 +9,9 @@ daily_test=0
branch="master"
uncached_feed="https://dotnetcli.blob.core.windows.net/dotnet"
runtime_version_11="1.1.11"
runtime_version_22="2.2.2"
runtime_version_11="1.1.13"
runtime_version_21=
runtime_version_22="2.2.5"
while [ $# -ne 0 ]; do
name=$1
@ -48,11 +48,13 @@ done
daily_test_text="true"
# Install the other versions of .NET Core runtime we are going to test. 1.1.x, 2.1.x (installed with the CLI), 2.2.x
# and latest. Only install the latest master for daily jobs and leave the RuntimeVersion* config properties blank.
# Install the other versions of .NET Core runtime we are going to test. 1.1.x, 2.1.x, 2.2.x
# and latest. Only install the latest master for daily jobs and leave the RuntimeVersion*
# config properties blank.
if [ $daily_test == 0 ]; then
daily_test_text="false"
bash "$dotnet_dir/dotnet-install.sh" --version "$runtime_version_11" --architecture "$build_arch" --skip-non-versioned-files --runtime dotnet --install-dir "$dotnet_dir"
bash "$dotnet_dir/dotnet-install.sh" --version "$runtime_version_21" --architecture "$build_arch" --skip-non-versioned-files --runtime dotnet --install-dir "$dotnet_dir"
bash "$dotnet_dir/dotnet-install.sh" --version "$runtime_version_22" --architecture "$build_arch" --skip-non-versioned-files --runtime dotnet --install-dir "$dotnet_dir"
fi

Просмотреть файл

@ -1,8 +1,11 @@
{
"sdk": {
"version": "3.0.100-preview5-011568"
},
"tools": {
"dotnet": "2.1.505"
"dotnet": "3.0.100-preview5-011568"
},
"msbuild-sdks": {
"Microsoft.DotNet.Arcade.Sdk": "1.0.0-beta.18516.5"
"Microsoft.DotNet.Arcade.Sdk": "1.0.0-beta.19309.1"
}
}