move to arcade
This commit is contained in:
Родитель
e2a658de3b
Коммит
dc64148b5e
|
@ -1,17 +0,0 @@
|
|||
<!-- Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. -->
|
||||
<Project>
|
||||
<Import Project="build\NuGet.props"/>
|
||||
<Import Project="build\Versions.props"/>
|
||||
|
||||
<PropertyGroup>
|
||||
<Configuration Condition="'$(Configuration)' == ''">Debug</Configuration>
|
||||
|
||||
<RepoRoot>$(MSBuildThisFileDirectory)</RepoRoot>
|
||||
<RepoToolsetDir>$(NuGetPackageRoot)roslyntools.repotoolset\$(RoslynToolsRepoToolsetVersion)\tools\</RepoToolsetDir>
|
||||
|
||||
<RepositoryUrl>https://github.com/dotnet/platform-compat</RepositoryUrl>
|
||||
<PackageProjectUrl>$(RepositoryUrl)</PackageProjectUrl>
|
||||
|
||||
<VSSDKTargetPlatformRegRootSuffix>Exp</VSSDKTargetPlatformRegRootSuffix>
|
||||
</PropertyGroup>
|
||||
</Project>
|
|
@ -1,4 +1,4 @@
|
|||
|
||||
@echo off
|
||||
powershell -ExecutionPolicy ByPass %~dp0build\Build.ps1 -restore -build -test -deploy -pack -log %*
|
||||
powershell -ExecutionPolicy ByPass %~dp0eng\common\Build.ps1 -restore -build -test -deploy -pack -binaryLog %*
|
||||
exit /b %ErrorLevel%
|
|
@ -1,3 +0,0 @@
|
|||
@echo off
|
||||
powershell -ExecutionPolicy ByPass %~dp0Build.ps1 -restore -build -test -sign -pack -ci %*
|
||||
exit /b %ErrorLevel%
|
|
@ -1,10 +0,0 @@
|
|||
<!-- Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. -->
|
||||
<Project>
|
||||
<PropertyGroup>
|
||||
<!-- Respect environment variable for the NuGet Packages Root if set; otherwise, use the current default location -->
|
||||
<NuGetPackageRoot Condition="'$(NuGetPackageRoot)' == ''">$(NUGET_PACKAGES)</NuGetPackageRoot>
|
||||
<NuGetPackageRoot Condition="'$(NuGetPackageRoot)' == '' AND '$(OS)' == 'Windows_NT'">$(UserProfile)\.nuget\packages\</NuGetPackageRoot>
|
||||
<NuGetPackageRoot Condition="'$(NuGetPackageRoot)' == '' AND '$(OS)' != 'Windows_NT'">$([System.Environment]::GetFolderPath(SpecialFolder.Personal))\.nuget\packages\</NuGetPackageRoot>
|
||||
<NuGetPackageRoot Condition="!HasTrailingSlash('$(NuGetPackageRoot)')">$(NuGetPackageRoot)\</NuGetPackageRoot>
|
||||
</PropertyGroup>
|
||||
</Project>
|
|
@ -1,20 +0,0 @@
|
|||
{
|
||||
"sign": [
|
||||
{
|
||||
"certificate": "MicrosoftSHA2",
|
||||
"strongName": "MsSharedLib72",
|
||||
"values": [
|
||||
"bin/Microsoft.DotNet.Analyzers.Compatibility/netstandard1.3/Microsoft.DotNet.Analyzers.Compatibility.dll",
|
||||
"bin/Microsoft.DotNet.Analyzers.Compatibility/netstandard1.3/*/Microsoft.DotNet.Analyzers.Compatibility.resources.dll",
|
||||
"bin/Microsoft.DotNet.Csv/netstandard1.0/Microsoft.DotNet.Csv.dll"
|
||||
]
|
||||
},
|
||||
{
|
||||
"certificate": "NuGet",
|
||||
"strongName": null,
|
||||
"values": [
|
||||
"bin/Microsoft.DotNet.Analyzers.Compatibility/*.nupkg"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
|
@ -1,10 +0,0 @@
|
|||
<!-- Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. -->
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net462</TargetFramework>
|
||||
<RestoreSources>https://dotnet.myget.org/F/roslyn-tools/api/v3/index.json</RestoreSources>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<PackageReference Include="RoslynTools.RepoToolset" Version="$(RoslynToolsRepoToolsetVersion)" />
|
||||
</ItemGroup>
|
||||
</Project>
|
|
@ -1,33 +0,0 @@
|
|||
<Project DefaultTargets="Build" TreatAsLocalProperty="SolutionPath">
|
||||
<!--
|
||||
Optional parameters:
|
||||
SolutionPath Path to the solution to build
|
||||
Configuration Build configuration: "Debug", "Release", etc.
|
||||
CIBuild "true" if building on CI server
|
||||
Restore "true" to restore toolset and solution
|
||||
Build "true" to build solution
|
||||
Rebuild "true" to rebuild solution
|
||||
Deploy "true" to deploy assets (e.g. VSIXes) built in this repo
|
||||
DeployDeps "true" to deploy assets (e.g. VSIXes) this repo depeends on.
|
||||
Test "true" to run tests
|
||||
IntegrationTest "true" to run integration tests
|
||||
Sign "true" to sign built binaries
|
||||
Pack "true" to build NuGet packages
|
||||
Properties List of properties to pass to each build phase ("Name=Value;Name=Value;...")
|
||||
-->
|
||||
<PropertyGroup>
|
||||
<SolutionPath Condition="'$(SolutionPath)' == ''">$(MSBuildThisFileDirectory)..\platform-compat.sln</SolutionPath>
|
||||
</PropertyGroup>
|
||||
|
||||
<Import Project="..\Directory.build.props"/>
|
||||
|
||||
<Target Name="Build">
|
||||
<MSBuild Projects="Toolset.proj"
|
||||
Targets="Restore"
|
||||
Properties="BaseIntermediateOutputPath=$(MSBuildThisFileDirectory)..\artifacts\toolset\;ExcludeRestorePackageImports=true;DeployDeps=$(DeployDeps)"
|
||||
Condition="'$(Restore)' == 'true'"/>
|
||||
|
||||
<MSBuild Projects="$(RepoToolsetDir)Build.proj"
|
||||
Properties="SolutionPath=$(SolutionPath);Configuration=$(Configuration);CIBuild=$(CIBuild);Restore=$(Restore);Build=$(Build);Rebuild=$(Rebuild);Deploy=$(Deploy);Test=$(Test);IntegrationTest=$(IntegrationTest);Sign=$(Sign);Pack=$(Pack);Properties=$(Properties)" />
|
||||
</Target>
|
||||
</Project>
|
196
build/build.ps1
196
build/build.ps1
|
@ -1,196 +0,0 @@
|
|||
[CmdletBinding(PositionalBinding=$false)]
|
||||
Param(
|
||||
[string] $configuration = "Debug",
|
||||
[string] $solution = "",
|
||||
[string] $verbosity = "minimal",
|
||||
[switch] $restore,
|
||||
[switch] $deployDeps,
|
||||
[switch] $build,
|
||||
[switch] $rebuild,
|
||||
[switch] $deploy,
|
||||
[switch] $test,
|
||||
[switch] $sign,
|
||||
[switch] $pack,
|
||||
[switch] $ci,
|
||||
[switch] $prepareMachine,
|
||||
[switch] $log,
|
||||
[switch] $help,
|
||||
[Parameter(ValueFromRemainingArguments=$true)][String[]]$properties
|
||||
)
|
||||
|
||||
[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12
|
||||
set-strictmode -version 2.0
|
||||
$ErrorActionPreference = "Stop"
|
||||
|
||||
function Print-Usage() {
|
||||
Write-Host "Common settings:"
|
||||
Write-Host " -configuration <value> Build configuration Debug, Release"
|
||||
Write-Host " -verbosity <value> Msbuild verbosity (q[uiet], m[inimal], n[ormal], d[etailed], and diag[nostic])"
|
||||
Write-Host " -help Print help and exit"
|
||||
Write-Host ""
|
||||
|
||||
Write-Host "Actions:"
|
||||
Write-Host " -restore Restore dependencies"
|
||||
Write-Host " -build Build solution"
|
||||
Write-Host " -rebuild Rebuild solution"
|
||||
Write-Host " -deploy Deploy built VSIXes"
|
||||
Write-Host " -deployDeps Deploy dependencies (Roslyn VSIXes for integration tests)"
|
||||
Write-Host " -test Run all unit tests in the solution"
|
||||
Write-Host " -sign Sign build outputs"
|
||||
Write-Host " -pack Package build outputs into NuGet packages and Willow components"
|
||||
Write-Host ""
|
||||
|
||||
Write-Host "Advanced settings:"
|
||||
Write-Host " -solution <value> Path to solution to build"
|
||||
Write-Host " -ci Set when running on CI server"
|
||||
Write-Host " -log Enable logging (by default on CI)"
|
||||
Write-Host " -prepareMachine Prepare machine for CI run"
|
||||
Write-Host ""
|
||||
Write-Host "Command line arguments not listed above are passed thru to msbuild."
|
||||
Write-Host "The above arguments can be shortened as much as to be unambiguous (e.g. -co for configuration, -t for test, etc.)."
|
||||
}
|
||||
|
||||
if ($help -or (($properties -ne $null) -and ($properties.Contains("/help") -or $properties.Contains("/?")))) {
|
||||
Print-Usage
|
||||
exit 0
|
||||
}
|
||||
|
||||
function Create-Directory([string[]] $path) {
|
||||
if (!(Test-Path $path)) {
|
||||
New-Item -path $path -force -itemType "Directory" | Out-Null
|
||||
}
|
||||
}
|
||||
|
||||
function GetVersion([string] $name) {
|
||||
foreach ($propertyGroup in $VersionsXml.Project.PropertyGroup) {
|
||||
if (Get-Member -inputObject $propertyGroup -name $name) {
|
||||
return $propertyGroup.$name
|
||||
}
|
||||
}
|
||||
|
||||
throw "Failed to find $name in Versions.props"
|
||||
}
|
||||
|
||||
function LocateVisualStudio {
|
||||
if ($InVSEnvironment) {
|
||||
return Join-Path $env:VS150COMNTOOLS "..\.."
|
||||
}
|
||||
|
||||
$vswhereVersion = GetVersion("VSWhereVersion")
|
||||
$vsWhereDir = Join-Path $ToolsRoot "vswhere\$vswhereVersion"
|
||||
$vsWhereExe = Join-Path $vsWhereDir "vswhere.exe"
|
||||
|
||||
if (!(Test-Path $vsWhereExe)) {
|
||||
Create-Directory $vsWhereDir
|
||||
Write-Host "Downloading vswhere"
|
||||
Invoke-WebRequest "http://github.com/Microsoft/vswhere/releases/download/$vswhereVersion/vswhere.exe" -OutFile $vswhereExe
|
||||
}
|
||||
|
||||
$vsInstallDir = & $vsWhereExe -latest -all -prerelease -property installationPath -requires Microsoft.Component.MSBuild -requires Microsoft.VisualStudio.Component.VSSDK -requires Microsoft.Net.Component.4.6.TargetingPack -requires Microsoft.VisualStudio.Component.Roslyn.Compiler
|
||||
|
||||
Write-Host "VSWhere returned '$vsInstallDir'"
|
||||
if (!(Test-Path $vsInstallDir)) {
|
||||
throw "Failed to locate Visual Studio (exit code '$lastExitCode')."
|
||||
}
|
||||
|
||||
return $vsInstallDir
|
||||
}
|
||||
|
||||
function InstallToolset {
|
||||
if (!(Test-Path $ToolsetBuildProj)) {
|
||||
& $MsbuildExe $ToolsetRestoreProj /t:restore /m /nologo /clp:None /warnaserror /v:quiet /p:DeployDeps=$deployDeps /p:NuGetPackageRoot=$NuGetPackageRoot /p:BaseIntermediateOutputPath=$ToolsetDir /p:ExcludeRestorePackageImports=true
|
||||
}
|
||||
}
|
||||
|
||||
function Build {
|
||||
if ($ci -or $log) {
|
||||
Create-Directory($logDir)
|
||||
$logCmd = "/bl:" + (Join-Path $LogDir "Build.binlog")
|
||||
} else {
|
||||
$logCmd = ""
|
||||
}
|
||||
|
||||
$nodeReuse = !$ci
|
||||
|
||||
& $MsbuildExe $ToolsetBuildProj /m /nologo /clp:Summary /nodeReuse:$nodeReuse /warnaserror /v:$verbosity $logCmd /p:Configuration=$configuration /p:SolutionPath=$solution /p:Restore=$restore /p:DeployDeps=$deployDeps /p:Build=$build /p:Rebuild=$rebuild /p:Deploy=$deploy /p:Test=$test /p:Sign=$sign /p:Pack=$pack /p:CIBuild=$ci /p:NuGetPackageRoot=$NuGetPackageRoot $properties
|
||||
}
|
||||
|
||||
function Stop-Processes() {
|
||||
Write-Host "Killing running build processes..."
|
||||
Get-Process -Name "msbuild" -ErrorAction SilentlyContinue | Stop-Process
|
||||
Get-Process -Name "vbcscompiler" -ErrorAction SilentlyContinue | Stop-Process
|
||||
}
|
||||
|
||||
function Clear-NuGetCache() {
|
||||
# clean nuget packages -- necessary to avoid mismatching versions of swix microbuild build plugin and VSSDK on Jenkins
|
||||
$nugetRoot = (Join-Path $env:USERPROFILE ".nuget\packages")
|
||||
if (Test-Path $nugetRoot) {
|
||||
Remove-Item $nugetRoot -Recurse -Force
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
$InVSEnvironment = !($env:VS150COMNTOOLS -eq $null) -and (Test-Path $env:VS150COMNTOOLS)
|
||||
$RepoRoot = Join-Path $PSScriptRoot "..\"
|
||||
$ToolsRoot = Join-Path $RepoRoot ".tools"
|
||||
$ToolsetRestoreProj = Join-Path $PSScriptRoot "Toolset.proj"
|
||||
$ArtifactsDir = Join-Path $RepoRoot "artifacts"
|
||||
$ToolsetDir = Join-Path $ArtifactsDir "toolset"
|
||||
$LogDir = Join-Path (Join-Path $ArtifactsDir $configuration) "log"
|
||||
$TempDir = Join-Path (Join-Path $ArtifactsDir $configuration) "tmp"
|
||||
[xml]$VersionsXml = Get-Content(Join-Path $PSScriptRoot "Versions.props")
|
||||
|
||||
if ($solution -eq "") {
|
||||
$solution = @(gci(Join-Path $RepoRoot "*.sln"))[0]
|
||||
}
|
||||
|
||||
if ($env:NUGET_PACKAGES -ne $null) {
|
||||
$NuGetPackageRoot = $env:NUGET_PACKAGES.TrimEnd("\") + "\"
|
||||
} else {
|
||||
$NuGetPackageRoot = Join-Path $env:UserProfile ".nuget\packages\"
|
||||
}
|
||||
|
||||
$ToolsetVersion = GetVersion("RoslynToolsRepoToolsetVersion")
|
||||
$ToolsetBuildProj = Join-Path $NuGetPackageRoot "RoslynTools.RepoToolset\$ToolsetVersion\tools\Build.proj"
|
||||
|
||||
$vsInstallDir = LocateVisualStudio
|
||||
$MsbuildExe = Join-Path $vsInstallDir "MSBuild\15.0\Bin\msbuild.exe"
|
||||
|
||||
if ($ci) {
|
||||
Create-Directory $TempDir
|
||||
$env:TEMP = $TempDir
|
||||
$env:TMP = $TempDir
|
||||
|
||||
Write-Host "Using $MsbuildExe"
|
||||
}
|
||||
|
||||
if (!$InVSEnvironment) {
|
||||
$env:VS150COMNTOOLS = Join-Path $vsInstallDir "Common7\Tools\"
|
||||
$env:VSSDK150Install = Join-Path $vsInstallDir "VSSDK\"
|
||||
$env:VSSDKInstall = Join-Path $vsInstallDir "VSSDK\"
|
||||
}
|
||||
|
||||
# Preparation of a CI machine
|
||||
if ($prepareMachine) {
|
||||
Clear-NuGetCache
|
||||
}
|
||||
|
||||
if ($restore) {
|
||||
InstallToolset
|
||||
}
|
||||
|
||||
Build
|
||||
exit $lastExitCode
|
||||
}
|
||||
catch {
|
||||
Write-Host $_
|
||||
Write-Host $_.Exception
|
||||
Write-Host $_.ScriptStackTrace
|
||||
exit 1
|
||||
}
|
||||
finally {
|
||||
Pop-Location
|
||||
if ($ci -and $prepareMachine) {
|
||||
Stop-Processes
|
||||
}
|
||||
}
|
|
@ -1,58 +0,0 @@
|
|||
# Publishes our build assets to nuget, myget, dotnet/versions, etc ..
|
||||
#
|
||||
# The publish operation is best visioned as an optional yet repeatable post build operation. It can be
|
||||
# run anytime after build or automatically as a post build step. But it is an operation that focuses on
|
||||
# build outputs and hence can't rely on source code from the build being available
|
||||
#
|
||||
# Repeatable is important here because we have to assume that publishes can and will fail with some
|
||||
# degree of regularity.
|
||||
[CmdletBinding(PositionalBinding = $false)]
|
||||
Param(
|
||||
# Standard options
|
||||
[string]$nugetPath = "",
|
||||
[string]$uploadUrl = "",
|
||||
[switch]$test,
|
||||
|
||||
# Credentials
|
||||
[string]$apiKey = ""
|
||||
)
|
||||
|
||||
[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12
|
||||
Set-StrictMode -version 2.0
|
||||
$ErrorActionPreference = "Stop"
|
||||
|
||||
# Publish the nuget packages to the specified URL
|
||||
function Publish-Nuget() {
|
||||
Write-Host "Publishing nuget to $uploadUrl"
|
||||
if (-not (Test-Path $nugetPath)) {
|
||||
throw "nuget $nugetPath does not exist"
|
||||
}
|
||||
|
||||
Write-Host " Publishing '$nugetPath'"
|
||||
if (-not $test) {
|
||||
$response = Invoke-WebRequest -Uri $uploadUrl -Headers @{"X-NuGet-ApiKey" = $apiKey} -ContentType 'multipart/form-data' -InFile $nugetPath -Method Post -UseBasicParsing
|
||||
if ($response.StatusCode -ne 201) {
|
||||
throw "Failed to upload nuget package: $nugetPath. Upload failed with Status code: $response.StatusCode"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
if ($nugetPath -eq "") {
|
||||
Write-Host "Must provide the path to the nuget package with -nugetPath"
|
||||
exit 1
|
||||
}
|
||||
|
||||
if ($uploadUrl -eq "") {
|
||||
Write-Host "Must provide the URL to upload the nuget package to -uploadUrl"
|
||||
exit 1
|
||||
}
|
||||
|
||||
Publish-Nuget
|
||||
}
|
||||
catch {
|
||||
Write-Host $_
|
||||
Write-Host $_.Exception
|
||||
Write-Host $_.ScriptStackTrace
|
||||
exit 1
|
||||
}
|
|
@ -0,0 +1,10 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Dependencies>
|
||||
<ProductDependencies></ProductDependencies>
|
||||
<ToolsetDependencies>
|
||||
<Dependency Name="Microsoft.DotNet.Arcade.Sdk" Version="1.0.0-beta.19157.4">
|
||||
<Uri>https://github.com/dotnet/arcade</Uri>
|
||||
<Sha>951b378f6b173d58c6d93ef33ce7ca83c2ce5ec5</Sha>
|
||||
</Dependency>
|
||||
</ToolsetDependencies>
|
||||
</Dependencies>
|
|
@ -1,14 +1,13 @@
|
|||
<Project xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
|
||||
<PropertyGroup>
|
||||
<VersionBase>0.2.13</VersionBase>
|
||||
<VersionPrefix>0.2.13</VersionPrefix>
|
||||
<PreReleaseVersionLabel>alpha</PreReleaseVersionLabel>
|
||||
|
||||
|
||||
<!-- Opt-in repo features -->
|
||||
<UsingToolVSSDK>true</UsingToolVSSDK>
|
||||
|
||||
<!-- Toolset -->
|
||||
<RoslynToolsRepoToolsetVersion>1.0.0-beta-62705-01</RoslynToolsRepoToolsetVersion>
|
||||
<VSWhereVersion>2.3.7</VSWhereVersion>
|
||||
<UsingToolNuGetRepack>true</UsingToolNuGetRepack>
|
||||
<UsingToolSymbolUploader>true</UsingToolSymbolUploader>
|
||||
|
||||
<!-- MSBuild -->
|
||||
<MicrosoftBuildVersion>15.3.409</MicrosoftBuildVersion>
|
||||
|
@ -36,9 +35,8 @@
|
|||
<MicrosoftWin32RegistryVersion>4.4.0-preview1-25205-01</MicrosoftWin32RegistryVersion>
|
||||
<NETStandardLibraryVersion>2.0.0-preview1-25207-01</NETStandardLibraryVersion>
|
||||
<SystemSecurityCryptographyCngVersion>4.4.0-beta-24913-02</SystemSecurityCryptographyCngVersion>
|
||||
|
||||
</PropertyGroup>
|
||||
|
||||
|
||||
<PropertyGroup>
|
||||
<!-- Feeds to use to restore dependent packages from. -->
|
||||
<RestoreSources>
|
|
@ -0,0 +1,2 @@
|
|||
@echo off
|
||||
powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0Build.ps1""" -restore -build -test -sign -pack -publish -ci %*"
|
|
@ -0,0 +1,50 @@
|
|||
<!--
|
||||
This MSBuild file is intended to be used as the body of the default
|
||||
publishing release pipeline. The release pipeline will use this file
|
||||
to invoke the PushToStaticFeed task that will read the build asset
|
||||
manifest and publish the assets described in the manifest to
|
||||
informed target feeds.
|
||||
-->
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>netcoreapp2.1</TargetFramework>
|
||||
</PropertyGroup>
|
||||
|
||||
<Import Project="$(MSBuildThisFileDirectory)DefaultVersions.props" Condition="Exists('$(MSBuildThisFileDirectory)DefaultVersions.props')" />
|
||||
<Import Project="$(MSBuildThisFileDirectory)Versions.props" Condition="Exists('$(MSBuildThisFileDirectory)Versions.props')" />
|
||||
|
||||
<Import Project="$(NuGetPackageRoot)microsoft.dotnet.build.tasks.feed\$(MicrosoftDotNetBuildTasksFeedVersion)\build\Microsoft.DotNet.Build.Tasks.Feed.targets" />
|
||||
|
||||
<Target Name="PublishToFeed">
|
||||
<Error Condition="'$(TargetStaticFeed)' == ''" Text="TargetStaticFeed: Target feed for publishing assets wasn't provided." />
|
||||
<Error Condition="'$(AccountKeyToStaticFeed)' == ''" Text="AccountKeyToStaticFeed: Account key for target feed wasn't provided." />
|
||||
<Error Condition="'$(ManifestsBasePath)' == ''" Text="Full path to asset manifests directory wasn't provided." />
|
||||
<Error Condition="'$(BlobBasePath)' == '' AND '$(PackageBasePath)' == ''" Text="A valid full path to BlobBasePath of PackageBasePath is required." />
|
||||
|
||||
<ItemGroup>
|
||||
<!-- Include all manifests found in the manifest folder. -->
|
||||
<ManifestFiles Include="$(ManifestsBasePath)*.xml" />
|
||||
</ItemGroup>
|
||||
|
||||
<Error Condition="'@(ManifestFiles)' == ''" Text="No manifest file was found in the provided path: $(ManifestsBasePath)" />
|
||||
|
||||
<!-- Iterate publishing assets from each manifest file. -->
|
||||
<PushArtifactsInManifestToFeed
|
||||
ExpectedFeedUrl="$(TargetStaticFeed)"
|
||||
AccountKey="$(AccountKeyToStaticFeed)"
|
||||
BARBuildId="$(BARBuildId)"
|
||||
MaestroApiEndpoint="$(MaestroApiEndpoint)"
|
||||
BuildAssetRegistryToken="$(BuildAssetRegistryToken)"
|
||||
Overwrite="$(OverrideAssetsWithSameName)"
|
||||
PassIfExistingItemIdentical="$(PassIfExistingItemIdentical)"
|
||||
MaxClients="$(MaxParallelUploads)"
|
||||
UploadTimeoutInMinutes="$(MaxUploadTimeoutInMinutes)"
|
||||
AssetManifestPath="%(ManifestFiles.Identity)"
|
||||
BlobAssetsBasePath="$(BlobBasePath)"
|
||||
PackageAssetsBasePath="$(PackageBasePath)" />
|
||||
</Target>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.DotNet.Build.Tasks.Feed" Version="$(MicrosoftDotNetBuildTasksFeedVersion)" />
|
||||
</ItemGroup>
|
||||
</Project>
|
|
@ -0,0 +1,28 @@
|
|||
# Don't touch this folder
|
||||
|
||||
uuuuuuuuuuuuuuuuuuuu
|
||||
u" uuuuuuuuuuuuuuuuuu "u
|
||||
u" u$$$$$$$$$$$$$$$$$$$$u "u
|
||||
u" u$$$$$$$$$$$$$$$$$$$$$$$$u "u
|
||||
u" u$$$$$$$$$$$$$$$$$$$$$$$$$$$$u "u
|
||||
u" u$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$u "u
|
||||
u" u$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$u "u
|
||||
$ $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$ $
|
||||
$ $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$ $
|
||||
$ $$$" ... "$... ...$" ... "$$$ ... "$$$ $
|
||||
$ $$$u `"$$$$$$$ $$$ $$$$$ $$ $$$ $$$ $
|
||||
$ $$$$$$uu "$$$$ $$$ $$$$$ $$ """ u$$$ $
|
||||
$ $$$""$$$ $$$$ $$$u "$$$" u$$ $$$$$$$$ $
|
||||
$ $$$$....,$$$$$..$$$$$....,$$$$..$$$$$$$$ $
|
||||
$ $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$ $
|
||||
"u "$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$" u"
|
||||
"u "$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$" u"
|
||||
"u "$$$$$$$$$$$$$$$$$$$$$$$$$$$$" u"
|
||||
"u "$$$$$$$$$$$$$$$$$$$$$$$$" u"
|
||||
"u "$$$$$$$$$$$$$$$$$$$$" u"
|
||||
"u """""""""""""""""" u"
|
||||
""""""""""""""""""""
|
||||
|
||||
!!! Changes made in this directory are subject to being overwritten by automation !!!
|
||||
|
||||
The files in this directory are shared by all Arcade repos and managed by automation. If you need to make changes to these files, open an issue or submit a pull request to https://github.com/dotnet/arcade first.
|
|
@ -0,0 +1,134 @@
|
|||
[CmdletBinding(PositionalBinding=$false)]
|
||||
Param(
|
||||
[string][Alias('c')]$configuration = "Debug",
|
||||
[string] $projects,
|
||||
[string][Alias('v')]$verbosity = "minimal",
|
||||
[string] $msbuildEngine = $null,
|
||||
[bool] $warnAsError = $true,
|
||||
[bool] $nodeReuse = $true,
|
||||
[switch][Alias('r')]$restore,
|
||||
[switch] $deployDeps,
|
||||
[switch][Alias('b')]$build,
|
||||
[switch] $rebuild,
|
||||
[switch] $deploy,
|
||||
[switch][Alias('t')]$test,
|
||||
[switch] $integrationTest,
|
||||
[switch] $performanceTest,
|
||||
[switch] $sign,
|
||||
[switch] $pack,
|
||||
[switch] $publish,
|
||||
[switch][Alias('bl')]$binaryLog,
|
||||
[switch] $ci,
|
||||
[switch] $prepareMachine,
|
||||
[switch] $help,
|
||||
[Parameter(ValueFromRemainingArguments=$true)][String[]]$properties
|
||||
)
|
||||
|
||||
. $PSScriptRoot\tools.ps1
|
||||
|
||||
function Print-Usage() {
|
||||
Write-Host "Common settings:"
|
||||
Write-Host " -configuration <value> Build configuration: 'Debug' or 'Release' (short: -c)"
|
||||
Write-Host " -verbosity <value> Msbuild verbosity: q[uiet], m[inimal], n[ormal], d[etailed], and diag[nostic] (short: -v)"
|
||||
Write-Host " -binaryLog Output binary log (short: -bl)"
|
||||
Write-Host " -help Print help and exit"
|
||||
Write-Host ""
|
||||
|
||||
Write-Host "Actions:"
|
||||
Write-Host " -restore Restore dependencies (short: -r)"
|
||||
Write-Host " -build Build solution (short: -b)"
|
||||
Write-Host " -rebuild Rebuild solution"
|
||||
Write-Host " -deploy Deploy built VSIXes"
|
||||
Write-Host " -deployDeps Deploy dependencies (e.g. VSIXes for integration tests)"
|
||||
Write-Host " -test Run all unit tests in the solution (short: -t)"
|
||||
Write-Host " -integrationTest Run all integration tests in the solution"
|
||||
Write-Host " -performanceTest Run all performance tests in the solution"
|
||||
Write-Host " -pack Package build outputs into NuGet packages and Willow components"
|
||||
Write-Host " -sign Sign build outputs"
|
||||
Write-Host " -publish Publish artifacts (e.g. symbols)"
|
||||
Write-Host ""
|
||||
|
||||
Write-Host "Advanced settings:"
|
||||
Write-Host " -projects <value> Semi-colon delimited list of sln/proj's to build. Globbing is supported (*.sln)"
|
||||
Write-Host " -ci Set when running on CI server"
|
||||
Write-Host " -prepareMachine Prepare machine for CI run, clean up processes after build"
|
||||
Write-Host " -warnAsError <value> Sets warnaserror msbuild parameter ('true' or 'false')"
|
||||
Write-Host " -msbuildEngine <value> Msbuild engine to use to run build ('dotnet', 'vs', or unspecified)."
|
||||
Write-Host ""
|
||||
|
||||
Write-Host "Command line arguments not listed above are passed thru to msbuild."
|
||||
Write-Host "The above arguments can be shortened as much as to be unambiguous (e.g. -co for configuration, -t for test, etc.)."
|
||||
}
|
||||
|
||||
function InitializeCustomToolset {
|
||||
if (-not $restore) {
|
||||
return
|
||||
}
|
||||
|
||||
$script = Join-Path $EngRoot "restore-toolset.ps1"
|
||||
|
||||
if (Test-Path $script) {
|
||||
. $script
|
||||
}
|
||||
}
|
||||
|
||||
function Build {
|
||||
$toolsetBuildProj = InitializeToolset
|
||||
InitializeCustomToolset
|
||||
|
||||
$bl = if ($binaryLog) { "/bl:" + (Join-Path $LogDir "Build.binlog") } else { "" }
|
||||
|
||||
if ($projects) {
|
||||
# Re-assign properties to a new variable because PowerShell doesn't let us append properties directly for unclear reasons.
|
||||
# Explicitly set the type as string[] because otherwise PowerShell would make this char[] if $properties is empty.
|
||||
[string[]] $msbuildArgs = $properties
|
||||
$msbuildArgs += "/p:Projects=$projects"
|
||||
$properties = $msbuildArgs
|
||||
}
|
||||
|
||||
MSBuild $toolsetBuildProj `
|
||||
$bl `
|
||||
/p:Configuration=$configuration `
|
||||
/p:RepoRoot=$RepoRoot `
|
||||
/p:Restore=$restore `
|
||||
/p:DeployDeps=$deployDeps `
|
||||
/p:Build=$build `
|
||||
/p:Rebuild=$rebuild `
|
||||
/p:Deploy=$deploy `
|
||||
/p:Test=$test `
|
||||
/p:Pack=$pack `
|
||||
/p:IntegrationTest=$integrationTest `
|
||||
/p:PerformanceTest=$performanceTest `
|
||||
/p:Sign=$sign `
|
||||
/p:Publish=$publish `
|
||||
@properties
|
||||
}
|
||||
|
||||
try {
|
||||
if ($help -or (($null -ne $properties) -and ($properties.Contains("/help") -or $properties.Contains("/?")))) {
|
||||
Print-Usage
|
||||
exit 0
|
||||
}
|
||||
|
||||
if ($ci) {
|
||||
$binaryLog = $true
|
||||
$nodeReuse = $false
|
||||
}
|
||||
|
||||
# Import custom tools configuration, if present in the repo.
|
||||
# Note: Import in global scope so that the script set top-level variables without qualification.
|
||||
$configureToolsetScript = Join-Path $EngRoot "configure-toolset.ps1"
|
||||
if (Test-Path $configureToolsetScript) {
|
||||
. $configureToolsetScript
|
||||
}
|
||||
|
||||
Build
|
||||
}
|
||||
catch {
|
||||
Write-Host $_
|
||||
Write-Host $_.Exception
|
||||
Write-Host $_.ScriptStackTrace
|
||||
ExitWithExitCode 1
|
||||
}
|
||||
|
||||
ExitWithExitCode 0
|
|
@ -0,0 +1,218 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# Stop script if unbound variable found (use ${var:-} if intentional)
|
||||
set -u
|
||||
|
||||
# Stop script if command returns non-zero exit code.
|
||||
# Prevents hidden errors caused by missing error code propagation.
|
||||
set -e
|
||||
|
||||
usage()
|
||||
{
|
||||
echo "Common settings:"
|
||||
echo " --configuration <value> Build configuration: 'Debug' or 'Release' (short: -c)"
|
||||
echo " --verbosity <value> Msbuild verbosity: q[uiet], m[inimal], n[ormal], d[etailed], and diag[nostic] (short: -v)"
|
||||
echo " --binaryLog Create MSBuild binary log (short: -bl)"
|
||||
echo " --help Print help and exit (short: -h)"
|
||||
echo ""
|
||||
|
||||
echo "Actions:"
|
||||
echo " --restore Restore dependencies (short: -r)"
|
||||
echo " --build Build solution (short: -b)"
|
||||
echo " --rebuild Rebuild solution"
|
||||
echo " --test Run all unit tests in the solution (short: -t)"
|
||||
echo " --integrationTest Run all integration tests in the solution"
|
||||
echo " --performanceTest Run all performance tests in the solution"
|
||||
echo " --pack Package build outputs into NuGet packages and Willow components"
|
||||
echo " --sign Sign build outputs"
|
||||
echo " --publish Publish artifacts (e.g. symbols)"
|
||||
echo ""
|
||||
|
||||
echo "Advanced settings:"
|
||||
echo " --projects <value> Project or solution file(s) to build"
|
||||
echo " --ci Set when running on CI server"
|
||||
echo " --prepareMachine Prepare machine for CI run, clean up processes after build"
|
||||
echo " --nodeReuse <value> Sets nodereuse msbuild parameter ('true' or 'false')"
|
||||
echo " --warnAsError <value> Sets warnaserror msbuild parameter ('true' or 'false')"
|
||||
echo ""
|
||||
echo "Command line arguments starting with '/p:' are passed through to MSBuild."
|
||||
echo "Arguments can also be passed in with a single hyphen."
|
||||
}
|
||||
|
||||
source="${BASH_SOURCE[0]}"
|
||||
|
||||
# resolve $source until the file is no longer a symlink
|
||||
while [[ -h "$source" ]]; do
|
||||
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
|
||||
source="$(readlink "$source")"
|
||||
# if $source was a relative symlink, we need to resolve it relative to the path where the
|
||||
# symlink file was located
|
||||
[[ $source != /* ]] && source="$scriptroot/$source"
|
||||
done
|
||||
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
|
||||
|
||||
restore=false
|
||||
build=false
|
||||
rebuild=false
|
||||
test=false
|
||||
integration_test=false
|
||||
performance_test=false
|
||||
pack=false
|
||||
publish=false
|
||||
sign=false
|
||||
public=false
|
||||
ci=false
|
||||
|
||||
warn_as_error=true
|
||||
node_reuse=true
|
||||
binary_log=false
|
||||
|
||||
projects=''
|
||||
configuration='Debug'
|
||||
prepare_machine=false
|
||||
verbosity='minimal'
|
||||
|
||||
properties=''
|
||||
|
||||
while [[ $# > 0 ]]; do
|
||||
opt="$(echo "${1/#--/-}" | awk '{print tolower($0)}')"
|
||||
case "$opt" in
|
||||
-help|-h)
|
||||
usage
|
||||
exit 0
|
||||
;;
|
||||
-configuration|-c)
|
||||
configuration=$2
|
||||
shift
|
||||
;;
|
||||
-verbosity|-v)
|
||||
verbosity=$2
|
||||
shift
|
||||
;;
|
||||
-binarylog|-bl)
|
||||
binary_log=true
|
||||
;;
|
||||
-restore|-r)
|
||||
restore=true
|
||||
;;
|
||||
-build|-b)
|
||||
build=true
|
||||
;;
|
||||
-rebuild)
|
||||
rebuild=true
|
||||
;;
|
||||
-pack)
|
||||
pack=true
|
||||
;;
|
||||
-test|-t)
|
||||
test=true
|
||||
;;
|
||||
-integrationtest)
|
||||
integration_test=true
|
||||
;;
|
||||
-performancetest)
|
||||
performance_test=true
|
||||
;;
|
||||
-sign)
|
||||
sign=true
|
||||
;;
|
||||
-publish)
|
||||
publish=true
|
||||
;;
|
||||
-preparemachine)
|
||||
prepare_machine=true
|
||||
;;
|
||||
-projects)
|
||||
projects=$2
|
||||
shift
|
||||
;;
|
||||
-ci)
|
||||
ci=true
|
||||
;;
|
||||
-warnaserror)
|
||||
warn_as_error=$2
|
||||
shift
|
||||
;;
|
||||
-nodereuse)
|
||||
node_reuse=$2
|
||||
shift
|
||||
;;
|
||||
/p:*)
|
||||
properties="$properties $1"
|
||||
;;
|
||||
/m:*)
|
||||
properties="$properties $1"
|
||||
;;
|
||||
/bl:*)
|
||||
properties="$properties $1"
|
||||
;;
|
||||
*)
|
||||
echo "Invalid argument: $1"
|
||||
usage
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
shift
|
||||
done
|
||||
|
||||
if [[ "$ci" == true ]]; then
|
||||
binary_log=true
|
||||
node_reuse=false
|
||||
fi
|
||||
|
||||
. "$scriptroot/tools.sh"
|
||||
|
||||
function InitializeCustomToolset {
|
||||
local script="$eng_root/restore-toolset.sh"
|
||||
|
||||
if [[ -a "$script" ]]; then
|
||||
. "$script"
|
||||
fi
|
||||
}
|
||||
|
||||
function Build {
|
||||
InitializeToolset
|
||||
InitializeCustomToolset
|
||||
|
||||
if [[ ! -z "$projects" ]]; then
|
||||
properties="$properties /p:Projects=$projects"
|
||||
fi
|
||||
|
||||
local bl=""
|
||||
if [[ "$binary_log" == true ]]; then
|
||||
bl="/bl:\"$log_dir/Build.binlog\""
|
||||
fi
|
||||
|
||||
MSBuild $_InitializeToolset \
|
||||
$bl \
|
||||
/p:Configuration=$configuration \
|
||||
/p:RepoRoot="$repo_root" \
|
||||
/p:Restore=$restore \
|
||||
/p:Build=$build \
|
||||
/p:Rebuild=$rebuild \
|
||||
/p:Test=$test \
|
||||
/p:Pack=$pack \
|
||||
/p:IntegrationTest=$integration_test \
|
||||
/p:PerformanceTest=$performance_test \
|
||||
/p:Sign=$sign \
|
||||
/p:Publish=$publish \
|
||||
$properties
|
||||
|
||||
ExitWithExitCode 0
|
||||
}
|
||||
|
||||
# Import custom tools configuration, if present in the repo.
|
||||
configure_toolset_script="$eng_root/configure-toolset.sh"
|
||||
if [[ -a "$configure_toolset_script" ]]; then
|
||||
. "$configure_toolset_script"
|
||||
fi
|
||||
|
||||
# TODO: https://github.com/dotnet/arcade/issues/1468
|
||||
# Temporary workaround to avoid breaking change.
|
||||
# Remove once repos are updated.
|
||||
if [[ -n "${useInstalledDotNetCli:-}" ]]; then
|
||||
use_installed_dotnet_cli="$useInstalledDotNetCli"
|
||||
fi
|
||||
|
||||
Build
|
|
@ -0,0 +1,16 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source="${BASH_SOURCE[0]}"
|
||||
|
||||
# resolve $SOURCE until the file is no longer a symlink
|
||||
while [[ -h $source ]]; do
|
||||
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
|
||||
source="$(readlink "$source")"
|
||||
|
||||
# if $source was a relative symlink, we need to resolve it relative to the path where
|
||||
# the symlink file was located
|
||||
[[ $source != /* ]] && source="$scriptroot/$source"
|
||||
done
|
||||
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
|
||||
|
||||
. "$scriptroot/build.sh" --restore --build --test --pack --publish --ci $@
|
|
@ -0,0 +1,41 @@
|
|||
set(CROSS_NDK_TOOLCHAIN $ENV{ROOTFS_DIR}/../)
|
||||
set(CROSS_ROOTFS ${CROSS_NDK_TOOLCHAIN}/sysroot)
|
||||
set(CLR_CMAKE_PLATFORM_ANDROID "Android")
|
||||
|
||||
set(CMAKE_SYSTEM_NAME Linux)
|
||||
set(CMAKE_SYSTEM_VERSION 1)
|
||||
set(CMAKE_SYSTEM_PROCESSOR arm)
|
||||
|
||||
## Specify the toolchain
|
||||
set(TOOLCHAIN "arm-linux-androideabi")
|
||||
set(CMAKE_PREFIX_PATH ${CROSS_NDK_TOOLCHAIN})
|
||||
set(TOOLCHAIN_PREFIX ${TOOLCHAIN}-)
|
||||
|
||||
find_program(CMAKE_C_COMPILER ${TOOLCHAIN_PREFIX}clang)
|
||||
find_program(CMAKE_CXX_COMPILER ${TOOLCHAIN_PREFIX}clang++)
|
||||
find_program(CMAKE_ASM_COMPILER ${TOOLCHAIN_PREFIX}clang)
|
||||
find_program(CMAKE_AR ${TOOLCHAIN_PREFIX}ar)
|
||||
find_program(CMAKE_LD ${TOOLCHAIN_PREFIX}ar)
|
||||
find_program(CMAKE_OBJCOPY ${TOOLCHAIN_PREFIX}objcopy)
|
||||
find_program(CMAKE_OBJDUMP ${TOOLCHAIN_PREFIX}objdump)
|
||||
|
||||
add_compile_options(--sysroot=${CROSS_ROOTFS})
|
||||
add_compile_options(-fPIE)
|
||||
add_compile_options(-mfloat-abi=soft)
|
||||
include_directories(SYSTEM ${CROSS_NDK_TOOLCHAIN}/include/c++/4.9.x/)
|
||||
include_directories(SYSTEM ${CROSS_NDK_TOOLCHAIN}/include/c++/4.9.x/arm-linux-androideabi/)
|
||||
|
||||
set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} -B ${CROSS_ROOTFS}/usr/lib/gcc/${TOOLCHAIN}")
|
||||
set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} -L${CROSS_ROOTFS}/lib/${TOOLCHAIN}")
|
||||
set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} --sysroot=${CROSS_ROOTFS}")
|
||||
set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} -fPIE -pie")
|
||||
|
||||
set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} ${CROSS_LINK_FLAGS}" CACHE STRING "" FORCE)
|
||||
set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} ${CROSS_LINK_FLAGS}" CACHE STRING "" FORCE)
|
||||
set(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} ${CROSS_LINK_FLAGS}" CACHE STRING "" FORCE)
|
||||
|
||||
set(CMAKE_FIND_ROOT_PATH "${CROSS_ROOTFS}")
|
||||
set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
|
||||
set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)
|
||||
set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY)
|
||||
set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY)
|
|
@ -0,0 +1,42 @@
|
|||
set(CROSS_NDK_TOOLCHAIN $ENV{ROOTFS_DIR}/../)
|
||||
set(CROSS_ROOTFS ${CROSS_NDK_TOOLCHAIN}/sysroot)
|
||||
set(CLR_CMAKE_PLATFORM_ANDROID "Android")
|
||||
|
||||
set(CMAKE_SYSTEM_NAME Linux)
|
||||
set(CMAKE_SYSTEM_VERSION 1)
|
||||
set(CMAKE_SYSTEM_PROCESSOR aarch64)
|
||||
|
||||
## Specify the toolchain
|
||||
set(TOOLCHAIN "aarch64-linux-android")
|
||||
set(CMAKE_PREFIX_PATH ${CROSS_NDK_TOOLCHAIN})
|
||||
set(TOOLCHAIN_PREFIX ${TOOLCHAIN}-)
|
||||
|
||||
find_program(CMAKE_C_COMPILER ${TOOLCHAIN_PREFIX}clang)
|
||||
find_program(CMAKE_CXX_COMPILER ${TOOLCHAIN_PREFIX}clang++)
|
||||
find_program(CMAKE_ASM_COMPILER ${TOOLCHAIN_PREFIX}clang)
|
||||
find_program(CMAKE_AR ${TOOLCHAIN_PREFIX}ar)
|
||||
find_program(CMAKE_LD ${TOOLCHAIN_PREFIX}ar)
|
||||
find_program(CMAKE_OBJCOPY ${TOOLCHAIN_PREFIX}objcopy)
|
||||
find_program(CMAKE_OBJDUMP ${TOOLCHAIN_PREFIX}objdump)
|
||||
|
||||
add_compile_options(--sysroot=${CROSS_ROOTFS})
|
||||
add_compile_options(-fPIE)
|
||||
|
||||
## Needed for Android or bionic specific conditionals
|
||||
add_compile_options(-D__ANDROID__)
|
||||
add_compile_options(-D__BIONIC__)
|
||||
|
||||
set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} -B ${CROSS_ROOTFS}/usr/lib/gcc/${TOOLCHAIN}")
|
||||
set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} -L${CROSS_ROOTFS}/lib/${TOOLCHAIN}")
|
||||
set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} --sysroot=${CROSS_ROOTFS}")
|
||||
set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} -fPIE -pie")
|
||||
|
||||
set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} ${CROSS_LINK_FLAGS}" CACHE STRING "" FORCE)
|
||||
set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} ${CROSS_LINK_FLAGS}" CACHE STRING "" FORCE)
|
||||
set(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} ${CROSS_LINK_FLAGS}" CACHE STRING "" FORCE)
|
||||
|
||||
set(CMAKE_FIND_ROOT_PATH "${CROSS_ROOTFS}")
|
||||
set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
|
||||
set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)
|
||||
set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY)
|
||||
set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY)
|
|
@ -0,0 +1,11 @@
|
|||
deb http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse
|
|
@ -0,0 +1,3 @@
|
|||
# Debian (sid) # UNSTABLE
|
||||
deb http://ftp.debian.org/debian/ sid main contrib non-free
|
||||
deb-src http://ftp.debian.org/debian/ sid main contrib non-free
|
|
@ -0,0 +1,11 @@
|
|||
deb http://ports.ubuntu.com/ubuntu-ports/ trusty main restricted universe
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty main restricted universe
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ trusty-updates main restricted universe
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-updates main restricted universe
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ trusty-backports main restricted
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-backports main restricted
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ trusty-security main restricted universe multiverse
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-security main restricted universe multiverse
|
|
@ -0,0 +1,11 @@
|
|||
deb http://ports.ubuntu.com/ubuntu-ports/ vivid main restricted universe
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ vivid main restricted universe
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ vivid-updates main restricted universe
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ vivid-updates main restricted universe
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ vivid-backports main restricted
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ vivid-backports main restricted
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ vivid-security main restricted universe multiverse
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ vivid-security main restricted universe multiverse
|
|
@ -0,0 +1,11 @@
|
|||
deb http://ports.ubuntu.com/ubuntu-ports/ wily main restricted universe
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ wily main restricted universe
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ wily-updates main restricted universe
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ wily-updates main restricted universe
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ wily-backports main restricted
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ wily-backports main restricted
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ wily-security main restricted universe multiverse
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ wily-security main restricted universe multiverse
|
|
@ -0,0 +1,11 @@
|
|||
deb http://ports.ubuntu.com/ubuntu-ports/ xenial main restricted universe
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial main restricted universe
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ xenial-updates main restricted universe
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-updates main restricted universe
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ xenial-backports main restricted
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-backports main restricted
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse
|
|
@ -0,0 +1,11 @@
|
|||
deb http://ports.ubuntu.com/ubuntu-ports/ zesty main restricted universe
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty main restricted universe
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ zesty-updates main restricted universe
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-updates main restricted universe
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ zesty-backports main restricted
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-backports main restricted
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ zesty-security main restricted universe multiverse
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-security main restricted universe multiverse
|
|
@ -0,0 +1,71 @@
|
|||
From e72c9d7ead60e3317bd6d1fade995c07021c947b Mon Sep 17 00:00:00 2001
|
||||
From: Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
|
||||
Date: Thu, 7 May 2015 13:25:04 -0400
|
||||
Subject: [PATCH] Fix: building probe providers with C++ compiler
|
||||
|
||||
Robert Daniels wrote:
|
||||
> > I'm attempting to use lttng userspace tracing with a C++ application
|
||||
> > on an ARM platform. I'm using GCC 4.8.4 on Linux 3.14 with the 2.6
|
||||
> > release of lttng. I've compiled lttng-modules, lttng-ust, and
|
||||
> > lttng-tools and have been able to get a simple test working with C
|
||||
> > code. When I attempt to run the hello.cxx test on my target it will
|
||||
> > segfault.
|
||||
>
|
||||
>
|
||||
> I spent a little time digging into this issue and finally discovered the
|
||||
> cause of my segfault with ARM C++ tracepoints.
|
||||
>
|
||||
> There is a struct called 'lttng_event' in ust-events.h which contains an
|
||||
> empty union 'u'. This was the cause of my issue. Under C, this empty union
|
||||
> compiles to a zero byte member while under C++ it compiles to a one byte
|
||||
> member, and in my case was four-byte aligned which caused my C++ code to
|
||||
> have the 'cds_list_head node' offset incorrectly by four bytes. This lead
|
||||
> to an incorrect linked list structure which caused my issue.
|
||||
>
|
||||
> Since this union is empty, I simply removed it from the struct and everything
|
||||
> worked correctly.
|
||||
>
|
||||
> I don't know the history or purpose behind this empty union so I'd like to
|
||||
> know if this is a safe fix. If it is I can submit a patch with the union
|
||||
> removed.
|
||||
|
||||
That's a very nice catch!
|
||||
|
||||
We do not support building tracepoint probe provider with
|
||||
g++ yet, as stated in lttng-ust(3):
|
||||
|
||||
"- Note for C++ support: although an application instrumented with
|
||||
tracepoints can be compiled with g++, tracepoint probes should be
|
||||
compiled with gcc (only tested with gcc so far)."
|
||||
|
||||
However, if it works fine with this fix, then I'm tempted to take it,
|
||||
especially because removing the empty union does not appear to affect
|
||||
the layout of struct lttng_event as seen from liblttng-ust, which must
|
||||
be compiled with a C compiler, and from probe providers compiled with
|
||||
a C compiler. So all we are changing is the layout of a probe provider
|
||||
compiled with a C++ compiler, which is anyway buggy at the moment,
|
||||
because it is not compatible with the layout expected by liblttng-ust
|
||||
compiled with a C compiler.
|
||||
|
||||
Reported-by: Robert Daniels <robert.daniels@vantagecontrols.com>
|
||||
Signed-off-by: Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
|
||||
---
|
||||
include/lttng/ust-events.h | 2 --
|
||||
1 file changed, 2 deletions(-)
|
||||
|
||||
diff --git a/usr/include/lttng/ust-events.h b/usr/include/lttng/ust-events.h
|
||||
index 328a875..3d7a274 100644
|
||||
--- a/usr/include/lttng/ust-events.h
|
||||
+++ b/usr/include/lttng/ust-events.h
|
||||
@@ -407,8 +407,6 @@ struct lttng_event {
|
||||
void *_deprecated1;
|
||||
struct lttng_ctx *ctx;
|
||||
enum lttng_ust_instrumentation instrumentation;
|
||||
- union {
|
||||
- } u;
|
||||
struct cds_list_head node; /* Event list in session */
|
||||
struct cds_list_head _deprecated2;
|
||||
void *_deprecated3;
|
||||
--
|
||||
2.7.4
|
||||
|
|
@ -0,0 +1,97 @@
|
|||
diff -u -r a/usr/include/urcu/uatomic/generic.h b/usr/include/urcu/uatomic/generic.h
|
||||
--- a/usr/include/urcu/uatomic/generic.h 2014-03-28 06:04:42.000000000 +0900
|
||||
+++ b/usr/include/urcu/uatomic/generic.h 2017-02-13 10:35:21.189927116 +0900
|
||||
@@ -65,17 +65,17 @@
|
||||
switch (len) {
|
||||
#ifdef UATOMIC_HAS_ATOMIC_BYTE
|
||||
case 1:
|
||||
- return __sync_val_compare_and_swap_1(addr, old, _new);
|
||||
+ return __sync_val_compare_and_swap_1((uint8_t *) addr, old, _new);
|
||||
#endif
|
||||
#ifdef UATOMIC_HAS_ATOMIC_SHORT
|
||||
case 2:
|
||||
- return __sync_val_compare_and_swap_2(addr, old, _new);
|
||||
+ return __sync_val_compare_and_swap_2((uint16_t *) addr, old, _new);
|
||||
#endif
|
||||
case 4:
|
||||
- return __sync_val_compare_and_swap_4(addr, old, _new);
|
||||
+ return __sync_val_compare_and_swap_4((uint32_t *) addr, old, _new);
|
||||
#if (CAA_BITS_PER_LONG == 64)
|
||||
case 8:
|
||||
- return __sync_val_compare_and_swap_8(addr, old, _new);
|
||||
+ return __sync_val_compare_and_swap_8((uint64_t *) addr, old, _new);
|
||||
#endif
|
||||
}
|
||||
_uatomic_link_error();
|
||||
@@ -100,20 +100,20 @@
|
||||
switch (len) {
|
||||
#ifdef UATOMIC_HAS_ATOMIC_BYTE
|
||||
case 1:
|
||||
- __sync_and_and_fetch_1(addr, val);
|
||||
+ __sync_and_and_fetch_1((uint8_t *) addr, val);
|
||||
return;
|
||||
#endif
|
||||
#ifdef UATOMIC_HAS_ATOMIC_SHORT
|
||||
case 2:
|
||||
- __sync_and_and_fetch_2(addr, val);
|
||||
+ __sync_and_and_fetch_2((uint16_t *) addr, val);
|
||||
return;
|
||||
#endif
|
||||
case 4:
|
||||
- __sync_and_and_fetch_4(addr, val);
|
||||
+ __sync_and_and_fetch_4((uint32_t *) addr, val);
|
||||
return;
|
||||
#if (CAA_BITS_PER_LONG == 64)
|
||||
case 8:
|
||||
- __sync_and_and_fetch_8(addr, val);
|
||||
+ __sync_and_and_fetch_8((uint64_t *) addr, val);
|
||||
return;
|
||||
#endif
|
||||
}
|
||||
@@ -139,20 +139,20 @@
|
||||
switch (len) {
|
||||
#ifdef UATOMIC_HAS_ATOMIC_BYTE
|
||||
case 1:
|
||||
- __sync_or_and_fetch_1(addr, val);
|
||||
+ __sync_or_and_fetch_1((uint8_t *) addr, val);
|
||||
return;
|
||||
#endif
|
||||
#ifdef UATOMIC_HAS_ATOMIC_SHORT
|
||||
case 2:
|
||||
- __sync_or_and_fetch_2(addr, val);
|
||||
+ __sync_or_and_fetch_2((uint16_t *) addr, val);
|
||||
return;
|
||||
#endif
|
||||
case 4:
|
||||
- __sync_or_and_fetch_4(addr, val);
|
||||
+ __sync_or_and_fetch_4((uint32_t *) addr, val);
|
||||
return;
|
||||
#if (CAA_BITS_PER_LONG == 64)
|
||||
case 8:
|
||||
- __sync_or_and_fetch_8(addr, val);
|
||||
+ __sync_or_and_fetch_8((uint64_t *) addr, val);
|
||||
return;
|
||||
#endif
|
||||
}
|
||||
@@ -180,17 +180,17 @@
|
||||
switch (len) {
|
||||
#ifdef UATOMIC_HAS_ATOMIC_BYTE
|
||||
case 1:
|
||||
- return __sync_add_and_fetch_1(addr, val);
|
||||
+ return __sync_add_and_fetch_1((uint8_t *) addr, val);
|
||||
#endif
|
||||
#ifdef UATOMIC_HAS_ATOMIC_SHORT
|
||||
case 2:
|
||||
- return __sync_add_and_fetch_2(addr, val);
|
||||
+ return __sync_add_and_fetch_2((uint16_t *) addr, val);
|
||||
#endif
|
||||
case 4:
|
||||
- return __sync_add_and_fetch_4(addr, val);
|
||||
+ return __sync_add_and_fetch_4((uint32_t *) addr, val);
|
||||
#if (CAA_BITS_PER_LONG == 64)
|
||||
case 8:
|
||||
- return __sync_add_and_fetch_8(addr, val);
|
||||
+ return __sync_add_and_fetch_8((uint64_t *) addr, val);
|
||||
#endif
|
||||
}
|
||||
_uatomic_link_error();
|
|
@ -0,0 +1,11 @@
|
|||
deb http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse
|
|
@ -0,0 +1,11 @@
|
|||
deb http://ports.ubuntu.com/ubuntu-ports/ trusty main restricted universe
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty main restricted universe
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ trusty-updates main restricted universe
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-updates main restricted universe
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ trusty-backports main restricted
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-backports main restricted
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ trusty-security main restricted universe multiverse
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-security main restricted universe multiverse
|
|
@ -0,0 +1,11 @@
|
|||
deb http://ports.ubuntu.com/ubuntu-ports/ vivid main restricted universe
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ vivid main restricted universe
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ vivid-updates main restricted universe
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ vivid-updates main restricted universe
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ vivid-backports main restricted
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ vivid-backports main restricted
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ vivid-security main restricted universe multiverse
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ vivid-security main restricted universe multiverse
|
|
@ -0,0 +1,11 @@
|
|||
deb http://ports.ubuntu.com/ubuntu-ports/ wily main restricted universe
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ wily main restricted universe
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ wily-updates main restricted universe
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ wily-updates main restricted universe
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ wily-backports main restricted
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ wily-backports main restricted
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ wily-security main restricted universe multiverse
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ wily-security main restricted universe multiverse
|
|
@ -0,0 +1,11 @@
|
|||
deb http://ports.ubuntu.com/ubuntu-ports/ xenial main restricted universe
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial main restricted universe
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ xenial-updates main restricted universe
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-updates main restricted universe
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ xenial-backports main restricted
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-backports main restricted
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse
|
|
@ -0,0 +1,11 @@
|
|||
deb http://ports.ubuntu.com/ubuntu-ports/ zesty main restricted universe
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty main restricted universe
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ zesty-updates main restricted universe
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-updates main restricted universe
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ zesty-backports main restricted
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-backports main restricted
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ zesty-security main restricted universe multiverse
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-security main restricted universe multiverse
|
|
@ -0,0 +1,3 @@
|
|||
# Debian (jessie) # Stable
|
||||
deb http://ftp.debian.org/debian/ jessie main contrib non-free
|
||||
deb-src http://ftp.debian.org/debian/ jessie main contrib non-free
|
|
@ -0,0 +1,44 @@
|
|||
#!/usr/bin/env bash
|
||||
set -e
|
||||
|
||||
__ARM_SOFTFP_CrossDir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
|
||||
__TIZEN_CROSSDIR="$__ARM_SOFTFP_CrossDir/tizen"
|
||||
|
||||
if [[ -z "$ROOTFS_DIR" ]]; then
|
||||
echo "ROOTFS_DIR is not defined."
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
# Clean-up (TODO-Cleanup: We may already delete $ROOTFS_DIR at ./cross/build-rootfs.sh.)
|
||||
# hk0110
|
||||
if [ -d "$ROOTFS_DIR" ]; then
|
||||
umount $ROOTFS_DIR/*
|
||||
rm -rf $ROOTFS_DIR
|
||||
fi
|
||||
|
||||
TIZEN_TMP_DIR=$ROOTFS_DIR/tizen_tmp
|
||||
mkdir -p $TIZEN_TMP_DIR
|
||||
|
||||
# Download files
|
||||
echo ">>Start downloading files"
|
||||
VERBOSE=1 $__ARM_SOFTFP_CrossDir/tizen-fetch.sh $TIZEN_TMP_DIR
|
||||
echo "<<Finish downloading files"
|
||||
|
||||
echo ">>Start constructing Tizen rootfs"
|
||||
TIZEN_RPM_FILES=`ls $TIZEN_TMP_DIR/*.rpm`
|
||||
cd $ROOTFS_DIR
|
||||
for f in $TIZEN_RPM_FILES; do
|
||||
rpm2cpio $f | cpio -idm --quiet
|
||||
done
|
||||
echo "<<Finish constructing Tizen rootfs"
|
||||
|
||||
# Cleanup tmp
|
||||
rm -rf $TIZEN_TMP_DIR
|
||||
|
||||
# Configure Tizen rootfs
|
||||
echo ">>Start configuring Tizen rootfs"
|
||||
rm ./usr/lib/libunwind.so
|
||||
ln -s libunwind.so.8 ./usr/lib/libunwind.so
|
||||
ln -sfn asm-arm ./usr/include/asm
|
||||
patch -p1 < $__TIZEN_CROSSDIR/tizen.patch
|
||||
echo "<<Finish configuring Tizen rootfs"
|
|
@ -0,0 +1,171 @@
|
|||
#!/usr/bin/env bash
|
||||
set -e
|
||||
|
||||
if [[ -z "${VERBOSE// }" ]] || [ "$VERBOSE" -ne "$VERBOSE" ] 2>/dev/null; then
|
||||
VERBOSE=0
|
||||
fi
|
||||
|
||||
Log()
|
||||
{
|
||||
if [ $VERBOSE -ge $1 ]; then
|
||||
echo ${@:2}
|
||||
fi
|
||||
}
|
||||
|
||||
Inform()
|
||||
{
|
||||
Log 1 -e "\x1B[0;34m$@\x1B[m"
|
||||
}
|
||||
|
||||
Debug()
|
||||
{
|
||||
Log 2 -e "\x1B[0;32m$@\x1B[m"
|
||||
}
|
||||
|
||||
Error()
|
||||
{
|
||||
>&2 Log 0 -e "\x1B[0;31m$@\x1B[m"
|
||||
}
|
||||
|
||||
Fetch()
|
||||
{
|
||||
URL=$1
|
||||
FILE=$2
|
||||
PROGRESS=$3
|
||||
if [ $VERBOSE -ge 1 ] && [ $PROGRESS ]; then
|
||||
CURL_OPT="--progress-bar"
|
||||
else
|
||||
CURL_OPT="--silent"
|
||||
fi
|
||||
curl $CURL_OPT $URL > $FILE
|
||||
}
|
||||
|
||||
hash curl 2> /dev/null || { Error "Require 'curl' Aborting."; exit 1; }
|
||||
hash xmllint 2> /dev/null || { Error "Require 'xmllint' Aborting."; exit 1; }
|
||||
hash sha256sum 2> /dev/null || { Error "Require 'sha256sum' Aborting."; exit 1; }
|
||||
|
||||
TMPDIR=$1
|
||||
if [ ! -d $TMPDIR ]; then
|
||||
TMPDIR=./tizen_tmp
|
||||
Debug "Create temporary directory : $TMPDIR"
|
||||
mkdir -p $TMPDIR
|
||||
fi
|
||||
|
||||
TIZEN_URL=http://download.tizen.org/releases/milestone/tizen
|
||||
BUILD_XML=build.xml
|
||||
REPOMD_XML=repomd.xml
|
||||
PRIMARY_XML=primary.xml
|
||||
TARGET_URL="http://__not_initialized"
|
||||
|
||||
Xpath_get()
|
||||
{
|
||||
XPATH_RESULT=''
|
||||
XPATH=$1
|
||||
XML_FILE=$2
|
||||
RESULT=$(xmllint --xpath $XPATH $XML_FILE)
|
||||
if [[ -z ${RESULT// } ]]; then
|
||||
Error "Can not find target from $XML_FILE"
|
||||
Debug "Xpath = $XPATH"
|
||||
exit 1
|
||||
fi
|
||||
XPATH_RESULT=$RESULT
|
||||
}
|
||||
|
||||
fetch_tizen_pkgs_init()
|
||||
{
|
||||
TARGET=$1
|
||||
PROFILE=$2
|
||||
Debug "Initialize TARGET=$TARGET, PROFILE=$PROFILE"
|
||||
|
||||
TMP_PKG_DIR=$TMPDIR/tizen_${PROFILE}_pkgs
|
||||
if [ -d $TMP_PKG_DIR ]; then rm -rf $TMP_PKG_DIR; fi
|
||||
mkdir -p $TMP_PKG_DIR
|
||||
|
||||
PKG_URL=$TIZEN_URL/$PROFILE/latest
|
||||
|
||||
BUILD_XML_URL=$PKG_URL/$BUILD_XML
|
||||
TMP_BUILD=$TMP_PKG_DIR/$BUILD_XML
|
||||
TMP_REPOMD=$TMP_PKG_DIR/$REPOMD_XML
|
||||
TMP_PRIMARY=$TMP_PKG_DIR/$PRIMARY_XML
|
||||
TMP_PRIMARYGZ=${TMP_PRIMARY}.gz
|
||||
|
||||
Fetch $BUILD_XML_URL $TMP_BUILD
|
||||
|
||||
Debug "fetch $BUILD_XML_URL to $TMP_BUILD"
|
||||
|
||||
TARGET_XPATH="//build/buildtargets/buildtarget[@name=\"$TARGET\"]/repo[@type=\"binary\"]/text()"
|
||||
Xpath_get $TARGET_XPATH $TMP_BUILD
|
||||
TARGET_PATH=$XPATH_RESULT
|
||||
TARGET_URL=$PKG_URL/$TARGET_PATH
|
||||
|
||||
REPOMD_URL=$TARGET_URL/repodata/repomd.xml
|
||||
PRIMARY_XPATH='string(//*[local-name()="data"][@type="primary"]/*[local-name()="location"]/@href)'
|
||||
|
||||
Fetch $REPOMD_URL $TMP_REPOMD
|
||||
|
||||
Debug "fetch $REPOMD_URL to $TMP_REPOMD"
|
||||
|
||||
Xpath_get $PRIMARY_XPATH $TMP_REPOMD
|
||||
PRIMARY_XML_PATH=$XPATH_RESULT
|
||||
PRIMARY_URL=$TARGET_URL/$PRIMARY_XML_PATH
|
||||
|
||||
Fetch $PRIMARY_URL $TMP_PRIMARYGZ
|
||||
|
||||
Debug "fetch $PRIMARY_URL to $TMP_PRIMARYGZ"
|
||||
|
||||
gunzip $TMP_PRIMARYGZ
|
||||
|
||||
Debug "unzip $TMP_PRIMARYGZ to $TMP_PRIMARY"
|
||||
}
|
||||
|
||||
fetch_tizen_pkgs()
|
||||
{
|
||||
ARCH=$1
|
||||
PACKAGE_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="location"]/@href)'
|
||||
|
||||
PACKAGE_CHECKSUM_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="checksum"]/text())'
|
||||
|
||||
for pkg in ${@:2}
|
||||
do
|
||||
Inform "Fetching... $pkg"
|
||||
XPATH=${PACKAGE_XPATH_TPL/_PKG_/$pkg}
|
||||
XPATH=${XPATH/_ARCH_/$ARCH}
|
||||
Xpath_get $XPATH $TMP_PRIMARY
|
||||
PKG_PATH=$XPATH_RESULT
|
||||
|
||||
XPATH=${PACKAGE_CHECKSUM_XPATH_TPL/_PKG_/$pkg}
|
||||
XPATH=${XPATH/_ARCH_/$ARCH}
|
||||
Xpath_get $XPATH $TMP_PRIMARY
|
||||
CHECKSUM=$XPATH_RESULT
|
||||
|
||||
PKG_URL=$TARGET_URL/$PKG_PATH
|
||||
PKG_FILE=$(basename $PKG_PATH)
|
||||
PKG_PATH=$TMPDIR/$PKG_FILE
|
||||
|
||||
Debug "Download $PKG_URL to $PKG_PATH"
|
||||
Fetch $PKG_URL $PKG_PATH true
|
||||
|
||||
echo "$CHECKSUM $PKG_PATH" | sha256sum -c - > /dev/null
|
||||
if [ $? -ne 0 ]; then
|
||||
Error "Fail to fetch $PKG_URL to $PKG_PATH"
|
||||
Debug "Checksum = $CHECKSUM"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
Inform "Initialize arm base"
|
||||
fetch_tizen_pkgs_init standard base
|
||||
Inform "fetch common packages"
|
||||
fetch_tizen_pkgs armv7l gcc glibc glibc-devel libicu libicu-devel
|
||||
fetch_tizen_pkgs noarch linux-glibc-devel
|
||||
Inform "fetch coreclr packages"
|
||||
fetch_tizen_pkgs armv7l lldb lldb-devel libgcc libstdc++ libstdc++-devel libunwind libunwind-devel tizen-release lttng-ust-devel lttng-ust userspace-rcu-devel userspace-rcu
|
||||
Inform "fetch corefx packages"
|
||||
fetch_tizen_pkgs armv7l libcom_err libcom_err-devel zlib zlib-devel libopenssl libopenssl-devel krb5 krb5-devel libcurl libcurl-devel
|
||||
|
||||
Inform "Initialize standard unified"
|
||||
fetch_tizen_pkgs_init standard unified
|
||||
Inform "fetch corefx packages"
|
||||
fetch_tizen_pkgs armv7l gssdp gssdp-devel
|
||||
|
|
@ -0,0 +1,50 @@
|
|||
lang en_US.UTF-8
|
||||
keyboard us
|
||||
timezone --utc Asia/Seoul
|
||||
|
||||
part / --fstype="ext4" --size=3500 --ondisk=mmcblk0 --label rootfs --fsoptions=defaults,noatime
|
||||
|
||||
rootpw tizen
|
||||
desktop --autologinuser=root
|
||||
user --name root --groups audio,video --password 'tizen'
|
||||
|
||||
repo --name=standard --baseurl=http://download.tizen.org/releases/milestone/tizen/unified/latest/repos/standard/packages/ --ssl_verify=no
|
||||
repo --name=base --baseurl=http://download.tizen.org/releases/milestone/tizen/base/latest/repos/standard/packages/ --ssl_verify=no
|
||||
|
||||
%packages
|
||||
tar
|
||||
gzip
|
||||
|
||||
sed
|
||||
grep
|
||||
gawk
|
||||
perl
|
||||
|
||||
binutils
|
||||
findutils
|
||||
util-linux
|
||||
lttng-ust
|
||||
userspace-rcu
|
||||
procps-ng
|
||||
tzdata
|
||||
ca-certificates
|
||||
|
||||
|
||||
### Core FX
|
||||
libicu
|
||||
libunwind
|
||||
iputils
|
||||
zlib
|
||||
krb5
|
||||
libcurl
|
||||
libopenssl
|
||||
|
||||
%end
|
||||
|
||||
%post
|
||||
|
||||
### Update /tmp privilege
|
||||
chmod 777 /tmp
|
||||
####################################
|
||||
|
||||
%end
|
|
@ -0,0 +1,18 @@
|
|||
diff -u -r a/usr/lib/libc.so b/usr/lib/libc.so
|
||||
--- a/usr/lib/libc.so 2016-12-30 23:00:08.284951863 +0900
|
||||
+++ b/usr/lib/libc.so 2016-12-30 23:00:32.140951815 +0900
|
||||
@@ -2,4 +2,4 @@
|
||||
Use the shared library, but some functions are only in
|
||||
the static library, so try that secondarily. */
|
||||
OUTPUT_FORMAT(elf32-littlearm)
|
||||
-GROUP ( /lib/libc.so.6 /usr/lib/libc_nonshared.a AS_NEEDED ( /lib/ld-linux.so.3 ) )
|
||||
+GROUP ( libc.so.6 libc_nonshared.a AS_NEEDED ( ld-linux.so.3 ) )
|
||||
diff -u -r a/usr/lib/libpthread.so b/usr/lib/libpthread.so
|
||||
--- a/usr/lib/libpthread.so 2016-12-30 23:00:19.408951841 +0900
|
||||
+++ b/usr/lib/libpthread.so 2016-12-30 23:00:39.068951801 +0900
|
||||
@@ -2,4 +2,4 @@
|
||||
Use the shared library, but some functions are only in
|
||||
the static library, so try that secondarily. */
|
||||
OUTPUT_FORMAT(elf32-littlearm)
|
||||
-GROUP ( /lib/libpthread.so.0 /usr/lib/libpthread_nonshared.a )
|
||||
+GROUP ( libpthread.so.0 libpthread_nonshared.a )
|
|
@ -0,0 +1,137 @@
|
|||
#!/usr/bin/env bash
|
||||
set -e
|
||||
__NDK_Version=r14
|
||||
|
||||
usage()
|
||||
{
|
||||
echo "Creates a toolchain and sysroot used for cross-compiling for Android."
|
||||
echo.
|
||||
echo "Usage: $0 [BuildArch] [ApiLevel]"
|
||||
echo.
|
||||
echo "BuildArch is the target architecture of Android. Currently only arm64 is supported."
|
||||
echo "ApiLevel is the target Android API level. API levels usually match to Android releases. See https://source.android.com/source/build-numbers.html"
|
||||
echo.
|
||||
echo "By default, the toolchain and sysroot will be generated in cross/android-rootfs/toolchain/[BuildArch]. You can change this behavior"
|
||||
echo "by setting the TOOLCHAIN_DIR environment variable"
|
||||
echo.
|
||||
echo "By default, the NDK will be downloaded into the cross/android-rootfs/android-ndk-$__NDK_Version directory. If you already have an NDK installation,"
|
||||
echo "you can set the NDK_DIR environment variable to have this script use that installation of the NDK."
|
||||
echo "By default, this script will generate a file, android_platform, in the root of the ROOTFS_DIR directory that contains the RID for the supported and tested Android build: android.21-arm64. This file is to replace '/etc/os-release', which is not available for Android."
|
||||
exit 1
|
||||
}
|
||||
|
||||
__ApiLevel=21 # The minimum platform for arm64 is API level 21
|
||||
__BuildArch=arm64
|
||||
__AndroidArch=aarch64
|
||||
__AndroidToolchain=aarch64-linux-android
|
||||
|
||||
for i in "$@"
|
||||
do
|
||||
lowerI="$(echo $i | awk '{print tolower($0)}')"
|
||||
case $lowerI in
|
||||
-?|-h|--help)
|
||||
usage
|
||||
exit 1
|
||||
;;
|
||||
arm64)
|
||||
__BuildArch=arm64
|
||||
__AndroidArch=aarch64
|
||||
__AndroidToolchain=aarch64-linux-android
|
||||
;;
|
||||
arm)
|
||||
__BuildArch=arm
|
||||
__AndroidArch=arm
|
||||
__AndroidToolchain=arm-linux-androideabi
|
||||
;;
|
||||
*[0-9])
|
||||
__ApiLevel=$i
|
||||
;;
|
||||
*)
|
||||
__UnprocessedBuildArgs="$__UnprocessedBuildArgs $i"
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
# Obtain the location of the bash script to figure out where the root of the repo is.
|
||||
__CrossDir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
|
||||
__Android_Cross_Dir="$__CrossDir/android-rootfs"
|
||||
__NDK_Dir="$__Android_Cross_Dir/android-ndk-$__NDK_Version"
|
||||
__libunwind_Dir="$__Android_Cross_Dir/libunwind"
|
||||
__lldb_Dir="$__Android_Cross_Dir/lldb"
|
||||
__ToolchainDir="$__Android_Cross_Dir/toolchain/$__BuildArch"
|
||||
|
||||
if [[ -n "$TOOLCHAIN_DIR" ]]; then
|
||||
__ToolchainDir=$TOOLCHAIN_DIR
|
||||
fi
|
||||
|
||||
if [[ -n "$NDK_DIR" ]]; then
|
||||
__NDK_Dir=$NDK_DIR
|
||||
fi
|
||||
|
||||
echo "Target API level: $__ApiLevel"
|
||||
echo "Target architecture: $__BuildArch"
|
||||
echo "NDK location: $__NDK_Dir"
|
||||
echo "Target Toolchain location: $__ToolchainDir"
|
||||
|
||||
# Download the NDK if required
|
||||
if [ ! -d $__NDK_Dir ]; then
|
||||
echo Downloading the NDK into $__NDK_Dir
|
||||
mkdir -p $__NDK_Dir
|
||||
wget -nv -nc --show-progress https://dl.google.com/android/repository/android-ndk-$__NDK_Version-linux-x86_64.zip -O $__Android_Cross_Dir/android-ndk-$__NDK_Version-linux-x86_64.zip
|
||||
unzip -q $__Android_Cross_Dir/android-ndk-$__NDK_Version-linux-x86_64.zip -d $__Android_Cross_Dir
|
||||
fi
|
||||
|
||||
if [ ! -d $__lldb_Dir ]; then
|
||||
mkdir -p $__lldb_Dir
|
||||
echo Downloading LLDB into $__lldb_Dir
|
||||
wget -nv -nc --show-progress https://dl.google.com/android/repository/lldb-2.3.3614996-linux-x86_64.zip -O $__Android_Cross_Dir/lldb-2.3.3614996-linux-x86_64.zip
|
||||
unzip -q $__Android_Cross_Dir/lldb-2.3.3614996-linux-x86_64.zip -d $__lldb_Dir
|
||||
fi
|
||||
|
||||
# Create the RootFS for both arm64 as well as aarch
|
||||
rm -rf $__Android_Cross_Dir/toolchain
|
||||
|
||||
echo Generating the $__BuildArch toolchain
|
||||
$__NDK_Dir/build/tools/make_standalone_toolchain.py --arch $__BuildArch --api $__ApiLevel --install-dir $__ToolchainDir
|
||||
|
||||
# Install the required packages into the toolchain
|
||||
# TODO: Add logic to get latest pkg version instead of specific version number
|
||||
rm -rf $__Android_Cross_Dir/deb/
|
||||
rm -rf $__Android_Cross_Dir/tmp
|
||||
|
||||
mkdir -p $__Android_Cross_Dir/deb/
|
||||
mkdir -p $__Android_Cross_Dir/tmp/$arch/
|
||||
wget -nv -nc http://termux.net/dists/stable/main/binary-$__AndroidArch/libicu_60.2_$__AndroidArch.deb -O $__Android_Cross_Dir/deb/libicu_60.2_$__AndroidArch.deb
|
||||
wget -nv -nc http://termux.net/dists/stable/main/binary-$__AndroidArch/libicu-dev_60.2_$__AndroidArch.deb -O $__Android_Cross_Dir/deb/libicu-dev_60.2_$__AndroidArch.deb
|
||||
|
||||
wget -nv -nc http://termux.net/dists/stable/main/binary-$__AndroidArch/libandroid-glob-dev_0.4_$__AndroidArch.deb -O $__Android_Cross_Dir/deb/libandroid-glob-dev_0.4_$__AndroidArch.deb
|
||||
wget -nv -nc http://termux.net/dists/stable/main/binary-$__AndroidArch/libandroid-glob_0.4_$__AndroidArch.deb -O $__Android_Cross_Dir/deb/libandroid-glob_0.4_$__AndroidArch.deb
|
||||
wget -nv -nc http://termux.net/dists/stable/main/binary-$__AndroidArch/libandroid-support-dev_22_$__AndroidArch.deb -O $__Android_Cross_Dir/deb/libandroid-support-dev_22_$__AndroidArch.deb
|
||||
wget -nv -nc http://termux.net/dists/stable/main/binary-$__AndroidArch/libandroid-support_22_$__AndroidArch.deb -O $__Android_Cross_Dir/deb/libandroid-support_22_$__AndroidArch.deb
|
||||
wget -nv -nc http://termux.net/dists/stable/main/binary-$__AndroidArch/liblzma-dev_5.2.3_$__AndroidArch.deb -O $__Android_Cross_Dir/deb/liblzma-dev_5.2.3_$__AndroidArch.deb
|
||||
wget -nv -nc http://termux.net/dists/stable/main/binary-$__AndroidArch/liblzma_5.2.3_$__AndroidArch.deb -O $__Android_Cross_Dir/deb/liblzma_5.2.3_$__AndroidArch.deb
|
||||
wget -nv -nc http://termux.net/dists/stable/main/binary-$__AndroidArch/libunwind-dev_1.2.20170304_$__AndroidArch.deb -O $__Android_Cross_Dir/deb/libunwind-dev_1.2.20170304_$__AndroidArch.deb
|
||||
wget -nv -nc http://termux.net/dists/stable/main/binary-$__AndroidArch/libunwind_1.2.20170304_$__AndroidArch.deb -O $__Android_Cross_Dir/deb/libunwind_1.2.20170304_$__AndroidArch.deb
|
||||
|
||||
echo Unpacking Termux packages
|
||||
dpkg -x $__Android_Cross_Dir/deb/libicu_60.2_$__AndroidArch.deb $__Android_Cross_Dir/tmp/$__AndroidArch/
|
||||
dpkg -x $__Android_Cross_Dir/deb/libicu-dev_60.2_$__AndroidArch.deb $__Android_Cross_Dir/tmp/$__AndroidArch/
|
||||
dpkg -x $__Android_Cross_Dir/deb/libandroid-glob-dev_0.4_$__AndroidArch.deb $__Android_Cross_Dir/tmp/$__AndroidArch/
|
||||
dpkg -x $__Android_Cross_Dir/deb/libandroid-glob_0.4_$__AndroidArch.deb $__Android_Cross_Dir/tmp/$__AndroidArch/
|
||||
dpkg -x $__Android_Cross_Dir/deb/libandroid-support-dev_22_$__AndroidArch.deb $__Android_Cross_Dir/tmp/$__AndroidArch/
|
||||
dpkg -x $__Android_Cross_Dir/deb/libandroid-support_22_$__AndroidArch.deb $__Android_Cross_Dir/tmp/$__AndroidArch/
|
||||
dpkg -x $__Android_Cross_Dir/deb/liblzma-dev_5.2.3_$__AndroidArch.deb $__Android_Cross_Dir/tmp/$__AndroidArch/
|
||||
dpkg -x $__Android_Cross_Dir/deb/liblzma_5.2.3_$__AndroidArch.deb $__Android_Cross_Dir/tmp/$__AndroidArch/
|
||||
dpkg -x $__Android_Cross_Dir/deb/libunwind-dev_1.2.20170304_$__AndroidArch.deb $__Android_Cross_Dir/tmp/$__AndroidArch/
|
||||
dpkg -x $__Android_Cross_Dir/deb/libunwind_1.2.20170304_$__AndroidArch.deb $__Android_Cross_Dir/tmp/$__AndroidArch/
|
||||
|
||||
cp -R $__Android_Cross_Dir/tmp/$__AndroidArch/data/data/com.termux/files/usr/* $__ToolchainDir/sysroot/usr/
|
||||
|
||||
# Generate platform file for build.sh script to assign to __DistroRid
|
||||
echo "Generating platform file..."
|
||||
|
||||
echo "RID=android.21-arm64" > $__ToolchainDir/sysroot/android_platform
|
||||
echo Now run:
|
||||
echo CONFIG_DIR=\`realpath cross/android/$__BuildArch\` ROOTFS_DIR=\`realpath $__ToolchainDir/sysroot\` ./build.sh cross $__BuildArch skipgenerateversion skipnuget cmakeargs -DENABLE_LLDBPLUGIN=0
|
||||
|
|
@ -0,0 +1,210 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
usage()
|
||||
{
|
||||
echo "Usage: $0 [BuildArch] [LinuxCodeName] [lldbx.y] [--skipunmount]"
|
||||
echo "BuildArch can be: arm(default), armel, arm64, x86"
|
||||
echo "LinuxCodeName - optional, Code name for Linux, can be: trusty(default), vivid, wily, xenial, zesty, bionic, alpine. If BuildArch is armel, LinuxCodeName is jessie(default) or tizen."
|
||||
echo "lldbx.y - optional, LLDB version, can be: lldb3.6(default), lldb3.8, lldb3.9, lldb4.0, no-lldb. Ignored for alpine"
|
||||
echo "--skipunmount - optional, will skip the unmount of rootfs folder."
|
||||
exit 1
|
||||
}
|
||||
|
||||
__LinuxCodeName=trusty
|
||||
__CrossDir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
|
||||
__InitialDir=$PWD
|
||||
__BuildArch=arm
|
||||
__UbuntuArch=armhf
|
||||
__UbuntuRepo="http://ports.ubuntu.com/"
|
||||
__LLDB_Package="lldb-3.6-dev"
|
||||
__SkipUnmount=0
|
||||
|
||||
# base development support
|
||||
__UbuntuPackages="build-essential"
|
||||
|
||||
__AlpinePackages="alpine-base"
|
||||
__AlpinePackages+=" build-base"
|
||||
__AlpinePackages+=" linux-headers"
|
||||
__AlpinePackages+=" lldb-dev"
|
||||
__AlpinePackages+=" llvm-dev"
|
||||
|
||||
# symlinks fixer
|
||||
__UbuntuPackages+=" symlinks"
|
||||
|
||||
# CoreCLR and CoreFX dependencies
|
||||
__UbuntuPackages+=" libicu-dev"
|
||||
__UbuntuPackages+=" liblttng-ust-dev"
|
||||
__UbuntuPackages+=" libunwind8-dev"
|
||||
|
||||
__AlpinePackages+=" gettext-dev"
|
||||
__AlpinePackages+=" icu-dev"
|
||||
__AlpinePackages+=" libunwind-dev"
|
||||
__AlpinePackages+=" lttng-ust-dev"
|
||||
|
||||
# CoreFX dependencies
|
||||
__UbuntuPackages+=" libcurl4-openssl-dev"
|
||||
__UbuntuPackages+=" libkrb5-dev"
|
||||
__UbuntuPackages+=" libssl-dev"
|
||||
__UbuntuPackages+=" zlib1g-dev"
|
||||
|
||||
__AlpinePackages+=" curl-dev"
|
||||
__AlpinePackages+=" krb5-dev"
|
||||
__AlpinePackages+=" openssl-dev"
|
||||
__AlpinePackages+=" zlib-dev"
|
||||
|
||||
__UnprocessedBuildArgs=
|
||||
for i in "$@" ; do
|
||||
lowerI="$(echo $i | awk '{print tolower($0)}')"
|
||||
case $lowerI in
|
||||
-?|-h|--help)
|
||||
usage
|
||||
exit 1
|
||||
;;
|
||||
arm)
|
||||
__BuildArch=arm
|
||||
__UbuntuArch=armhf
|
||||
__AlpineArch=armhf
|
||||
__QEMUArch=arm
|
||||
;;
|
||||
arm64)
|
||||
__BuildArch=arm64
|
||||
__UbuntuArch=arm64
|
||||
__AlpineArch=aarch64
|
||||
__QEMUArch=aarch64
|
||||
;;
|
||||
armel)
|
||||
__BuildArch=armel
|
||||
__UbuntuArch=armel
|
||||
__UbuntuRepo="http://ftp.debian.org/debian/"
|
||||
__LinuxCodeName=jessie
|
||||
;;
|
||||
x86)
|
||||
__BuildArch=x86
|
||||
__UbuntuArch=i386
|
||||
__UbuntuRepo="http://archive.ubuntu.com/ubuntu/"
|
||||
;;
|
||||
lldb3.6)
|
||||
__LLDB_Package="lldb-3.6-dev"
|
||||
;;
|
||||
lldb3.8)
|
||||
__LLDB_Package="lldb-3.8-dev"
|
||||
;;
|
||||
lldb3.9)
|
||||
__LLDB_Package="liblldb-3.9-dev"
|
||||
;;
|
||||
lldb4.0)
|
||||
__LLDB_Package="liblldb-4.0-dev"
|
||||
;;
|
||||
no-lldb)
|
||||
unset __LLDB_Package
|
||||
;;
|
||||
vivid)
|
||||
if [ "$__LinuxCodeName" != "jessie" ]; then
|
||||
__LinuxCodeName=vivid
|
||||
fi
|
||||
;;
|
||||
wily)
|
||||
if [ "$__LinuxCodeName" != "jessie" ]; then
|
||||
__LinuxCodeName=wily
|
||||
fi
|
||||
;;
|
||||
xenial)
|
||||
if [ "$__LinuxCodeName" != "jessie" ]; then
|
||||
__LinuxCodeName=xenial
|
||||
fi
|
||||
;;
|
||||
zesty)
|
||||
if [ "$__LinuxCodeName" != "jessie" ]; then
|
||||
__LinuxCodeName=zesty
|
||||
fi
|
||||
;;
|
||||
bionic)
|
||||
if [ "$__LinuxCodeName" != "jessie" ]; then
|
||||
__LinuxCodeName=bionic
|
||||
fi
|
||||
;;
|
||||
jessie)
|
||||
__LinuxCodeName=jessie
|
||||
__UbuntuRepo="http://ftp.debian.org/debian/"
|
||||
;;
|
||||
tizen)
|
||||
if [ "$__BuildArch" != "armel" ]; then
|
||||
echo "Tizen is available only for armel."
|
||||
usage;
|
||||
exit 1;
|
||||
fi
|
||||
__LinuxCodeName=
|
||||
__UbuntuRepo=
|
||||
__Tizen=tizen
|
||||
;;
|
||||
alpine)
|
||||
__LinuxCodeName=alpine
|
||||
__UbuntuRepo=
|
||||
;;
|
||||
--skipunmount)
|
||||
__SkipUnmount=1
|
||||
;;
|
||||
*)
|
||||
__UnprocessedBuildArgs="$__UnprocessedBuildArgs $i"
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [ "$__BuildArch" == "armel" ]; then
|
||||
__LLDB_Package="lldb-3.5-dev"
|
||||
fi
|
||||
__UbuntuPackages+=" ${__LLDB_Package:-}"
|
||||
|
||||
__RootfsDir="$__CrossDir/rootfs/$__BuildArch"
|
||||
|
||||
if [[ -n "$ROOTFS_DIR" ]]; then
|
||||
__RootfsDir=$ROOTFS_DIR
|
||||
fi
|
||||
|
||||
if [ -d "$__RootfsDir" ]; then
|
||||
if [ $__SkipUnmount == 0 ]; then
|
||||
umount $__RootfsDir/*
|
||||
fi
|
||||
rm -rf $__RootfsDir
|
||||
fi
|
||||
|
||||
if [[ "$__LinuxCodeName" == "alpine" ]]; then
|
||||
__ApkToolsVersion=2.9.1
|
||||
__AlpineVersion=3.7
|
||||
__ApkToolsDir=$(mktemp -d)
|
||||
wget https://github.com/alpinelinux/apk-tools/releases/download/v$__ApkToolsVersion/apk-tools-$__ApkToolsVersion-x86_64-linux.tar.gz -P $__ApkToolsDir
|
||||
tar -xf $__ApkToolsDir/apk-tools-$__ApkToolsVersion-x86_64-linux.tar.gz -C $__ApkToolsDir
|
||||
mkdir -p $__RootfsDir/usr/bin
|
||||
cp -v /usr/bin/qemu-$__QEMUArch-static $__RootfsDir/usr/bin
|
||||
$__ApkToolsDir/apk-tools-$__ApkToolsVersion/apk \
|
||||
-X http://dl-cdn.alpinelinux.org/alpine/v$__AlpineVersion/main \
|
||||
-X http://dl-cdn.alpinelinux.org/alpine/v$__AlpineVersion/community \
|
||||
-X http://dl-cdn.alpinelinux.org/alpine/edge/testing \
|
||||
-U --allow-untrusted --root $__RootfsDir --arch $__AlpineArch --initdb \
|
||||
add $__AlpinePackages
|
||||
rm -r $__ApkToolsDir
|
||||
elif [[ -n $__LinuxCodeName ]]; then
|
||||
qemu-debootstrap --arch $__UbuntuArch $__LinuxCodeName $__RootfsDir $__UbuntuRepo
|
||||
cp $__CrossDir/$__BuildArch/sources.list.$__LinuxCodeName $__RootfsDir/etc/apt/sources.list
|
||||
chroot $__RootfsDir apt-get update
|
||||
chroot $__RootfsDir apt-get -f -y install
|
||||
chroot $__RootfsDir apt-get -y install $__UbuntuPackages
|
||||
chroot $__RootfsDir symlinks -cr /usr
|
||||
|
||||
if [ $__SkipUnmount == 0 ]; then
|
||||
umount $__RootfsDir/*
|
||||
fi
|
||||
|
||||
if [[ "$__BuildArch" == "arm" && "$__LinuxCodeName" == "trusty" ]]; then
|
||||
pushd $__RootfsDir
|
||||
patch -p1 < $__CrossDir/$__BuildArch/trusty.patch
|
||||
patch -p1 < $__CrossDir/$__BuildArch/trusty-lttng-2.4.patch
|
||||
popd
|
||||
fi
|
||||
elif [ "$__Tizen" == "tizen" ]; then
|
||||
ROOTFS_DIR=$__RootfsDir $__CrossDir/$__BuildArch/tizen-build-rootfs.sh
|
||||
else
|
||||
echo "Unsupported target platform."
|
||||
usage;
|
||||
exit 1
|
||||
fi
|
|
@ -0,0 +1,138 @@
|
|||
set(CROSS_ROOTFS $ENV{ROOTFS_DIR})
|
||||
|
||||
set(TARGET_ARCH_NAME $ENV{TARGET_BUILD_ARCH})
|
||||
set(CMAKE_SYSTEM_NAME Linux)
|
||||
set(CMAKE_SYSTEM_VERSION 1)
|
||||
|
||||
if(TARGET_ARCH_NAME STREQUAL "armel")
|
||||
set(CMAKE_SYSTEM_PROCESSOR armv7l)
|
||||
set(TOOLCHAIN "arm-linux-gnueabi")
|
||||
if("$ENV{__DistroRid}" MATCHES "tizen.*")
|
||||
set(TIZEN_TOOLCHAIN "armv7l-tizen-linux-gnueabi/6.2.1")
|
||||
endif()
|
||||
elseif(TARGET_ARCH_NAME STREQUAL "arm")
|
||||
set(CMAKE_SYSTEM_PROCESSOR armv7l)
|
||||
if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/armv6-alpine-linux-musleabihf)
|
||||
set(TOOLCHAIN "armv6-alpine-linux-musleabihf")
|
||||
else()
|
||||
set(TOOLCHAIN "arm-linux-gnueabihf")
|
||||
endif()
|
||||
elseif(TARGET_ARCH_NAME STREQUAL "arm64")
|
||||
set(CMAKE_SYSTEM_PROCESSOR aarch64)
|
||||
if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/aarch64-alpine-linux-musl)
|
||||
set(TOOLCHAIN "aarch64-alpine-linux-musl")
|
||||
else()
|
||||
set(TOOLCHAIN "aarch64-linux-gnu")
|
||||
endif()
|
||||
elseif(TARGET_ARCH_NAME STREQUAL "x86")
|
||||
set(CMAKE_SYSTEM_PROCESSOR i686)
|
||||
set(TOOLCHAIN "i686-linux-gnu")
|
||||
else()
|
||||
message(FATAL_ERROR "Arch is ${TARGET_ARCH_NAME}. Only armel, arm, arm64 and x86 are supported!")
|
||||
endif()
|
||||
|
||||
# Specify include paths
|
||||
if(TARGET_ARCH_NAME STREQUAL "armel")
|
||||
if(DEFINED TIZEN_TOOLCHAIN)
|
||||
include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}/include/c++/)
|
||||
include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}/include/c++/armv7l-tizen-linux-gnueabi)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
# add_compile_param - adds only new options without duplicates.
|
||||
# arg0 - list with result options, arg1 - list with new options.
|
||||
# arg2 - optional argument, quick summary string for optional using CACHE FORCE mode.
|
||||
macro(add_compile_param)
|
||||
if(NOT ${ARGC} MATCHES "^(2|3)$")
|
||||
message(FATAL_ERROR "Wrong using add_compile_param! Two or three parameters must be given! See add_compile_param description.")
|
||||
endif()
|
||||
foreach(OPTION ${ARGV1})
|
||||
if(NOT ${ARGV0} MATCHES "${OPTION}($| )")
|
||||
set(${ARGV0} "${${ARGV0}} ${OPTION}")
|
||||
if(${ARGC} EQUAL "3") # CACHE FORCE mode
|
||||
set(${ARGV0} "${${ARGV0}}" CACHE STRING "${ARGV2}" FORCE)
|
||||
endif()
|
||||
endif()
|
||||
endforeach()
|
||||
endmacro()
|
||||
|
||||
# Specify link flags
|
||||
add_compile_param(CROSS_LINK_FLAGS "--sysroot=${CROSS_ROOTFS}")
|
||||
add_compile_param(CROSS_LINK_FLAGS "--gcc-toolchain=${CROSS_ROOTFS}/usr")
|
||||
add_compile_param(CROSS_LINK_FLAGS "--target=${TOOLCHAIN}")
|
||||
add_compile_param(CROSS_LINK_FLAGS "-fuse-ld=gold")
|
||||
|
||||
if(TARGET_ARCH_NAME STREQUAL "armel")
|
||||
if(DEFINED TIZEN_TOOLCHAIN) # For Tizen only
|
||||
add_compile_param(CROSS_LINK_FLAGS "-B${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}")
|
||||
add_compile_param(CROSS_LINK_FLAGS "-L${CROSS_ROOTFS}/lib")
|
||||
add_compile_param(CROSS_LINK_FLAGS "-L${CROSS_ROOTFS}/usr/lib")
|
||||
add_compile_param(CROSS_LINK_FLAGS "-L${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}")
|
||||
endif()
|
||||
elseif(TARGET_ARCH_NAME STREQUAL "x86")
|
||||
add_compile_param(CROSS_LINK_FLAGS "-m32")
|
||||
endif()
|
||||
|
||||
add_compile_param(CMAKE_EXE_LINKER_FLAGS "${CROSS_LINK_FLAGS}" "TOOLCHAIN_EXE_LINKER_FLAGS")
|
||||
add_compile_param(CMAKE_SHARED_LINKER_FLAGS "${CROSS_LINK_FLAGS}" "TOOLCHAIN_EXE_LINKER_FLAGS")
|
||||
add_compile_param(CMAKE_MODULE_LINKER_FLAGS "${CROSS_LINK_FLAGS}" "TOOLCHAIN_EXE_LINKER_FLAGS")
|
||||
|
||||
# Specify compile options
|
||||
add_compile_options("--sysroot=${CROSS_ROOTFS}")
|
||||
add_compile_options("--target=${TOOLCHAIN}")
|
||||
add_compile_options("--gcc-toolchain=${CROSS_ROOTFS}/usr")
|
||||
|
||||
if(TARGET_ARCH_NAME MATCHES "^(arm|armel|arm64)$")
|
||||
set(CMAKE_C_COMPILER_TARGET ${TOOLCHAIN})
|
||||
set(CMAKE_CXX_COMPILER_TARGET ${TOOLCHAIN})
|
||||
set(CMAKE_ASM_COMPILER_TARGET ${TOOLCHAIN})
|
||||
endif()
|
||||
|
||||
if(TARGET_ARCH_NAME MATCHES "^(arm|armel)$")
|
||||
add_compile_options(-mthumb)
|
||||
add_compile_options(-mfpu=vfpv3)
|
||||
if(TARGET_ARCH_NAME STREQUAL "armel")
|
||||
add_compile_options(-mfloat-abi=softfp)
|
||||
if(DEFINED TIZEN_TOOLCHAIN)
|
||||
add_compile_options(-Wno-deprecated-declarations) # compile-time option
|
||||
add_compile_options(-D__extern_always_inline=inline) # compile-time option
|
||||
endif()
|
||||
endif()
|
||||
elseif(TARGET_ARCH_NAME STREQUAL "x86")
|
||||
add_compile_options(-m32)
|
||||
add_compile_options(-Wno-error=unused-command-line-argument)
|
||||
endif()
|
||||
|
||||
# Set LLDB include and library paths
|
||||
if(TARGET_ARCH_NAME MATCHES "^(arm|armel|x86)$")
|
||||
if(TARGET_ARCH_NAME STREQUAL "x86")
|
||||
set(LLVM_CROSS_DIR "$ENV{LLVM_CROSS_HOME}")
|
||||
else() # arm/armel case
|
||||
set(LLVM_CROSS_DIR "$ENV{LLVM_ARM_HOME}")
|
||||
endif()
|
||||
if(LLVM_CROSS_DIR)
|
||||
set(WITH_LLDB_LIBS "${LLVM_CROSS_DIR}/lib/" CACHE STRING "")
|
||||
set(WITH_LLDB_INCLUDES "${LLVM_CROSS_DIR}/include" CACHE STRING "")
|
||||
set(LLDB_H "${WITH_LLDB_INCLUDES}" CACHE STRING "")
|
||||
set(LLDB "${LLVM_CROSS_DIR}/lib/liblldb.so" CACHE STRING "")
|
||||
else()
|
||||
if(TARGET_ARCH_NAME STREQUAL "x86")
|
||||
set(WITH_LLDB_LIBS "${CROSS_ROOTFS}/usr/lib/i386-linux-gnu" CACHE STRING "")
|
||||
set(CHECK_LLVM_DIR "${CROSS_ROOTFS}/usr/lib/llvm-3.8/include")
|
||||
if(EXISTS "${CHECK_LLVM_DIR}" AND IS_DIRECTORY "${CHECK_LLVM_DIR}")
|
||||
set(WITH_LLDB_INCLUDES "${CHECK_LLVM_DIR}")
|
||||
else()
|
||||
set(WITH_LLDB_INCLUDES "${CROSS_ROOTFS}/usr/lib/llvm-3.6/include")
|
||||
endif()
|
||||
else() # arm/armel case
|
||||
set(WITH_LLDB_LIBS "${CROSS_ROOTFS}/usr/lib/${TOOLCHAIN}" CACHE STRING "")
|
||||
set(WITH_LLDB_INCLUDES "${CROSS_ROOTFS}/usr/lib/llvm-3.6/include" CACHE STRING "")
|
||||
endif()
|
||||
endif()
|
||||
endif()
|
||||
|
||||
set(CMAKE_FIND_ROOT_PATH "${CROSS_ROOTFS}")
|
||||
set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
|
||||
set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)
|
||||
set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY)
|
||||
set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY)
|
|
@ -0,0 +1,32 @@
|
|||
param (
|
||||
$darcVersion = $null
|
||||
)
|
||||
|
||||
$verbosity = "m"
|
||||
. $PSScriptRoot\tools.ps1
|
||||
|
||||
function InstallDarcCli ($darcVersion) {
|
||||
$darcCliPackageName = "microsoft.dotnet.darc"
|
||||
|
||||
$dotnetRoot = InitializeDotNetCli -install:$true
|
||||
$dotnet = "$dotnetRoot\dotnet.exe"
|
||||
$toolList = Invoke-Expression "& `"$dotnet`" tool list -g"
|
||||
|
||||
if ($toolList -like "*$darcCliPackageName*") {
|
||||
Invoke-Expression "& `"$dotnet`" tool uninstall $darcCliPackageName -g"
|
||||
}
|
||||
|
||||
# Until we can anonymously query the BAR API for the latest arcade-services
|
||||
# build applied to the PROD channel, this is hardcoded.
|
||||
if (-not $darcVersion) {
|
||||
$darcVersion = '1.1.0-beta.19151.3'
|
||||
}
|
||||
|
||||
$arcadeServicesSource = 'https://dotnetfeed.blob.core.windows.net/dotnet-arcade/index.json'
|
||||
|
||||
Write-Host "Installing Darc CLI version $darcVersion..."
|
||||
Write-Host "You may need to restart your command window if this is the first dotnet tool you have installed."
|
||||
Invoke-Expression "& `"$dotnet`" tool install $darcCliPackageName --version $darcVersion --add-source '$arcadeServicesSource' -v $verbosity -g"
|
||||
}
|
||||
|
||||
InstallDarcCli $darcVersion
|
|
@ -0,0 +1,55 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source="${BASH_SOURCE[0]}"
|
||||
darcVersion="1.1.0-beta.19151.3"
|
||||
|
||||
while [[ $# > 0 ]]; do
|
||||
opt="$(echo "$1" | awk '{print tolower($0)}')"
|
||||
case "$opt" in
|
||||
--darcversion)
|
||||
darcVersion=$2
|
||||
shift
|
||||
;;
|
||||
*)
|
||||
echo "Invalid argument: $1"
|
||||
usage
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
shift
|
||||
done
|
||||
|
||||
# resolve $source until the file is no longer a symlink
|
||||
while [[ -h "$source" ]]; do
|
||||
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
|
||||
source="$(readlink "$source")"
|
||||
# if $source was a relative symlink, we need to resolve it relative to the path where the
|
||||
# symlink file was located
|
||||
[[ $source != /* ]] && source="$scriptroot/$source"
|
||||
done
|
||||
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
|
||||
verbosity=m
|
||||
|
||||
. "$scriptroot/tools.sh"
|
||||
|
||||
function InstallDarcCli {
|
||||
local darc_cli_package_name="microsoft.dotnet.darc"
|
||||
|
||||
InitializeDotNetCli
|
||||
local dotnet_root=$_InitializeDotNetCli
|
||||
|
||||
local uninstall_command=`$dotnet_root/dotnet tool uninstall $darc_cli_package_name -g`
|
||||
local tool_list=$($dotnet_root/dotnet tool list -g)
|
||||
if [[ $tool_list = *$darc_cli_package_name* ]]; then
|
||||
echo $($dotnet_root/dotnet tool uninstall $darc_cli_package_name -g)
|
||||
fi
|
||||
|
||||
local arcadeServicesSource="https://dotnetfeed.blob.core.windows.net/dotnet-arcade/index.json"
|
||||
|
||||
echo "Installing Darc CLI version $toolset_version..."
|
||||
echo "You may need to restart your command shell if this is the first dotnet tool you have installed."
|
||||
echo $($dotnet_root/dotnet tool install $darc_cli_package_name --version $darcVersion --add-source "$arcadeServicesSource" -v $verbosity -g)
|
||||
}
|
||||
|
||||
InstallDarcCli
|
|
@ -0,0 +1,61 @@
|
|||
Param(
|
||||
[Parameter(Mandatory=$true)][string] $barToken, # Token generated at https://maestro-prod.westus2.cloudapp.azure.com/Account/Tokens
|
||||
[Parameter(Mandatory=$true)][string] $gitHubPat, # GitHub personal access token from https://github.com/settings/tokens (no auth scopes needed)
|
||||
[Parameter(Mandatory=$true)][string] $azdoPat, # Azure Dev Ops tokens from https://dev.azure.com/dnceng/_details/security/tokens (code read scope needed)
|
||||
[Parameter(Mandatory=$true)][string] $outputFolder, # Where the graphviz.txt file will be created
|
||||
[string] $darcVersion = '1.1.0-beta.19156.4', # darc's version
|
||||
[switch] $includeToolset # Whether the graph should include toolset dependencies or not. i.e. arcade, optimization. For more about
|
||||
# toolset dependencies see https://github.com/dotnet/arcade/blob/master/Documentation/Darc.md#toolset-vs-product-dependencies
|
||||
)
|
||||
|
||||
$ErrorActionPreference = "Stop"
|
||||
. $PSScriptRoot\tools.ps1
|
||||
|
||||
function CheckExitCode ([string]$stage)
|
||||
{
|
||||
$exitCode = $LASTEXITCODE
|
||||
if ($exitCode -ne 0) {
|
||||
Write-Host "Something failed in stage: '$stage'. Check for errors above. Exiting now..."
|
||||
ExitWithExitCode $exitCode
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
Push-Location $PSScriptRoot
|
||||
|
||||
Write-Host "Installing darc..."
|
||||
. .\darc-init.ps1 -darcVersion $darcVersion
|
||||
CheckExitCode "Running darc-init"
|
||||
|
||||
$darcExe = "$env:USERPROFILE\.dotnet\tools"
|
||||
$darcExe = Resolve-Path "$darcExe\darc.exe"
|
||||
|
||||
Create-Directory $outputFolder
|
||||
|
||||
$graphVizFilePath = "$outputFolder\graphviz.txt"
|
||||
$graphFilePath = "$outputFolder\graph.txt"
|
||||
$options = "get-dependency-graph --graphviz '$graphVizFilePath' --github-pat $gitHubPat --azdev-pat $azdoPat --password $barToken --output-file $graphFilePath"
|
||||
|
||||
if ($includeToolset) {
|
||||
Write-Host "Toolsets will be included in the graph..."
|
||||
$options += " --include-toolset"
|
||||
}
|
||||
|
||||
Write-Host "Generating dependency graph..."
|
||||
$darc = Invoke-Expression "& `"$darcExe`" $options"
|
||||
CheckExitCode "Generating dependency graph"
|
||||
|
||||
$graph = Get-Content $graphVizFilePath
|
||||
Set-Content $graphVizFilePath -Value "Paste the following digraph object in http://www.webgraphviz.com `r`n", $graph
|
||||
Write-Host "'$graphVizFilePath' and '$graphFilePath' created!"
|
||||
}
|
||||
catch {
|
||||
if (!$includeToolset) {
|
||||
Write-Host "This might be a toolset repo which includes only toolset dependencies. " -NoNewline -ForegroundColor Yellow
|
||||
Write-Host "Since -includeToolset is not set there is no graph to create. Include -includeToolset and try again..." -ForegroundColor Yellow
|
||||
}
|
||||
Write-Host $_
|
||||
Write-Host $_.Exception
|
||||
Write-Host $_.ScriptStackTrace
|
||||
ExitWithExitCode 1
|
||||
}
|
|
@ -0,0 +1,26 @@
|
|||
<Project Sdk="Microsoft.DotNet.Helix.Sdk" DefaultTargets="Test">
|
||||
|
||||
<PropertyGroup>
|
||||
<Language>msbuild</Language>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<HelixCorrelationPayload Include="$(CorrelationPayloadDirectory)">
|
||||
<PayloadDirectory>%(Identity)</PayloadDirectory>
|
||||
</HelixCorrelationPayload>
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<HelixWorkItem Include="WorkItem" Condition="'$(WorkItemDirectory)' != ''">
|
||||
<PayloadDirectory>$(WorkItemDirectory)</PayloadDirectory>
|
||||
<Command>$(WorkItemCommand)</Command>
|
||||
<Timeout Condition="'$(WorkItemTimeout)' != ''">$(WorkItemTimeout)</Timeout>
|
||||
</HelixWorkItem>
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<XUnitProject Include="$(XUnitProjects.Split(';'))">
|
||||
<Arguments />
|
||||
</XUnitProject>
|
||||
</ItemGroup>
|
||||
</Project>
|
|
@ -0,0 +1,3 @@
|
|||
@echo off
|
||||
powershell -NoProfile -NoLogo -ExecutionPolicy ByPass -command "& """%~dp0init-tools-native.ps1""" %*"
|
||||
exit /b %ErrorLevel%
|
|
@ -0,0 +1,128 @@
|
|||
<#
|
||||
.SYNOPSIS
|
||||
Entry point script for installing native tools
|
||||
|
||||
.DESCRIPTION
|
||||
Reads $RepoRoot\global.json file to determine native assets to install
|
||||
and executes installers for those tools
|
||||
|
||||
.PARAMETER BaseUri
|
||||
Base file directory or Url from which to acquire tool archives
|
||||
|
||||
.PARAMETER InstallDirectory
|
||||
Directory to install native toolset. This is a command-line override for the default
|
||||
Install directory precedence order:
|
||||
- InstallDirectory command-line override
|
||||
- NETCOREENG_INSTALL_DIRECTORY environment variable
|
||||
- (default) %USERPROFILE%/.netcoreeng/native
|
||||
|
||||
.PARAMETER Clean
|
||||
Switch specifying to not install anything, but cleanup native asset folders
|
||||
|
||||
.PARAMETER Force
|
||||
Clean and then install tools
|
||||
|
||||
.PARAMETER DownloadRetries
|
||||
Total number of retry attempts
|
||||
|
||||
.PARAMETER RetryWaitTimeInSeconds
|
||||
Wait time between retry attempts in seconds
|
||||
|
||||
.PARAMETER GlobalJsonFile
|
||||
File path to global.json file
|
||||
|
||||
.NOTES
|
||||
#>
|
||||
[CmdletBinding(PositionalBinding=$false)]
|
||||
Param (
|
||||
[string] $BaseUri = "https://netcorenativeassets.blob.core.windows.net/resource-packages/external",
|
||||
[string] $InstallDirectory,
|
||||
[switch] $Clean = $False,
|
||||
[switch] $Force = $False,
|
||||
[int] $DownloadRetries = 5,
|
||||
[int] $RetryWaitTimeInSeconds = 30,
|
||||
[string] $GlobalJsonFile = "$PSScriptRoot\..\..\global.json"
|
||||
)
|
||||
|
||||
Set-StrictMode -version 2.0
|
||||
$ErrorActionPreference="Stop"
|
||||
|
||||
Import-Module -Name (Join-Path $PSScriptRoot "native\CommonLibrary.psm1")
|
||||
|
||||
try {
|
||||
# Define verbose switch if undefined
|
||||
$Verbose = $VerbosePreference -Eq "Continue"
|
||||
|
||||
$EngCommonBaseDir = Join-Path $PSScriptRoot "native\"
|
||||
$NativeBaseDir = $InstallDirectory
|
||||
if (!$NativeBaseDir) {
|
||||
$NativeBaseDir = CommonLibrary\Get-NativeInstallDirectory
|
||||
}
|
||||
$Env:CommonLibrary_NativeInstallDir = $NativeBaseDir
|
||||
$InstallBin = Join-Path $NativeBaseDir "bin"
|
||||
$InstallerPath = Join-Path $EngCommonBaseDir "install-tool.ps1"
|
||||
|
||||
# Process tools list
|
||||
Write-Host "Processing $GlobalJsonFile"
|
||||
If (-Not (Test-Path $GlobalJsonFile)) {
|
||||
Write-Host "Unable to find '$GlobalJsonFile'"
|
||||
exit 0
|
||||
}
|
||||
$NativeTools = Get-Content($GlobalJsonFile) -Raw |
|
||||
ConvertFrom-Json |
|
||||
Select-Object -Expand "native-tools" -ErrorAction SilentlyContinue
|
||||
if ($NativeTools) {
|
||||
$NativeTools.PSObject.Properties | ForEach-Object {
|
||||
$ToolName = $_.Name
|
||||
$ToolVersion = $_.Value
|
||||
$LocalInstallerCommand = $InstallerPath
|
||||
$LocalInstallerCommand += " -ToolName $ToolName"
|
||||
$LocalInstallerCommand += " -InstallPath $InstallBin"
|
||||
$LocalInstallerCommand += " -BaseUri $BaseUri"
|
||||
$LocalInstallerCommand += " -CommonLibraryDirectory $EngCommonBaseDir"
|
||||
$LocalInstallerCommand += " -Version $ToolVersion"
|
||||
|
||||
if ($Verbose) {
|
||||
$LocalInstallerCommand += " -Verbose"
|
||||
}
|
||||
if (Get-Variable 'Force' -ErrorAction 'SilentlyContinue') {
|
||||
if($Force) {
|
||||
$LocalInstallerCommand += " -Force"
|
||||
}
|
||||
}
|
||||
if ($Clean) {
|
||||
$LocalInstallerCommand += " -Clean"
|
||||
}
|
||||
|
||||
Write-Verbose "Installing $ToolName version $ToolVersion"
|
||||
Write-Verbose "Executing '$LocalInstallerCommand'"
|
||||
Invoke-Expression "$LocalInstallerCommand"
|
||||
if ($LASTEXITCODE -Ne "0") {
|
||||
Write-Error "Execution failed"
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
Write-Host "No native tools defined in global.json"
|
||||
exit 0
|
||||
}
|
||||
|
||||
if ($Clean) {
|
||||
exit 0
|
||||
}
|
||||
if (Test-Path $InstallBin) {
|
||||
Write-Host "Native tools are available from" (Convert-Path -Path $InstallBin)
|
||||
Write-Host "##vso[task.prependpath]$(Convert-Path -Path $InstallBin)"
|
||||
}
|
||||
else {
|
||||
Write-Error "Native tools install directory does not exist, installation failed"
|
||||
exit 1
|
||||
}
|
||||
exit 0
|
||||
}
|
||||
catch {
|
||||
Write-Host $_
|
||||
Write-Host $_.Exception
|
||||
exit 1
|
||||
}
|
|
@ -0,0 +1,145 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source="${BASH_SOURCE[0]}"
|
||||
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
|
||||
|
||||
base_uri='https://netcorenativeassets.blob.core.windows.net/resource-packages/external'
|
||||
install_directory=''
|
||||
clean=false
|
||||
force=false
|
||||
download_retries=5
|
||||
retry_wait_time_seconds=30
|
||||
global_json_file="${scriptroot}/../../global.json"
|
||||
declare -A native_assets
|
||||
|
||||
. $scriptroot/native/common-library.sh
|
||||
|
||||
while (($# > 0)); do
|
||||
lowerI="$(echo $1 | awk '{print tolower($0)}')"
|
||||
case $lowerI in
|
||||
--baseuri)
|
||||
base_uri=$2
|
||||
shift 2
|
||||
;;
|
||||
--installdirectory)
|
||||
install_directory=$2
|
||||
shift 2
|
||||
;;
|
||||
--clean)
|
||||
clean=true
|
||||
shift 1
|
||||
;;
|
||||
--force)
|
||||
force=true
|
||||
shift 1
|
||||
;;
|
||||
--downloadretries)
|
||||
download_retries=$2
|
||||
shift 2
|
||||
;;
|
||||
--retrywaittimeseconds)
|
||||
retry_wait_time_seconds=$2
|
||||
shift 2
|
||||
;;
|
||||
--help)
|
||||
echo "Common settings:"
|
||||
echo " --installdirectory Directory to install native toolset."
|
||||
echo " This is a command-line override for the default"
|
||||
echo " Install directory precedence order:"
|
||||
echo " - InstallDirectory command-line override"
|
||||
echo " - NETCOREENG_INSTALL_DIRECTORY environment variable"
|
||||
echo " - (default) %USERPROFILE%/.netcoreeng/native"
|
||||
echo ""
|
||||
echo " --clean Switch specifying not to install anything, but cleanup native asset folders"
|
||||
echo " --force Clean and then install tools"
|
||||
echo " --help Print help and exit"
|
||||
echo ""
|
||||
echo "Advanced settings:"
|
||||
echo " --baseuri <value> Base URI for where to download native tools from"
|
||||
echo " --downloadretries <value> Number of times a download should be attempted"
|
||||
echo " --retrywaittimeseconds <value> Wait time between download attempts"
|
||||
echo ""
|
||||
exit 0
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
function ReadGlobalJsonNativeTools {
|
||||
# Get the native-tools section from the global.json.
|
||||
local native_tools_section=$(cat $global_json_file | awk '/"native-tools"/,/}/')
|
||||
# Only extract the contents of the object.
|
||||
local native_tools_list=$(echo $native_tools_section | awk -F"[{}]" '{print $2}')
|
||||
native_tools_list=${native_tools_list//[\" ]/}
|
||||
native_tools_list=${native_tools_list//,/$'\n'}
|
||||
|
||||
local old_IFS=$IFS
|
||||
while read -r line; do
|
||||
# Lines are of the form: 'tool:version'
|
||||
IFS=:
|
||||
while read -r key value; do
|
||||
native_assets[$key]=$value
|
||||
done <<< "$line"
|
||||
done <<< "$native_tools_list"
|
||||
IFS=$old_IFS
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
native_base_dir=$install_directory
|
||||
if [[ -z $install_directory ]]; then
|
||||
native_base_dir=$(GetNativeInstallDirectory)
|
||||
fi
|
||||
|
||||
install_bin="${native_base_dir}/bin"
|
||||
|
||||
ReadGlobalJsonNativeTools
|
||||
|
||||
if [[ ${#native_assets[@]} -eq 0 ]]; then
|
||||
echo "No native tools defined in global.json"
|
||||
exit 0;
|
||||
else
|
||||
native_installer_dir="$scriptroot/native"
|
||||
for tool in "${!native_assets[@]}"
|
||||
do
|
||||
tool_version=${native_assets[$tool]}
|
||||
installer_name="install-$tool.sh"
|
||||
installer_command="$native_installer_dir/$installer_name"
|
||||
installer_command+=" --baseuri $base_uri"
|
||||
installer_command+=" --installpath $install_bin"
|
||||
installer_command+=" --version $tool_version"
|
||||
|
||||
if [[ $force = true ]]; then
|
||||
installer_command+=" --force"
|
||||
fi
|
||||
|
||||
if [[ $clean = true ]]; then
|
||||
installer_command+=" --clean"
|
||||
fi
|
||||
|
||||
echo "Installing $tool version $tool_version"
|
||||
echo "Executing '$installer_command'"
|
||||
$installer_command
|
||||
|
||||
if [[ $? != 0 ]]; then
|
||||
echo "Execution Failed" >&2
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
if [[ ! -z $clean ]]; then
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [[ -d $install_bin ]]; then
|
||||
echo "Native tools are available from $install_bin"
|
||||
if [[ !-z BUILD_BUILDNUMBER ]]; then
|
||||
echo "##vso[task.prependpath]$install_bin"
|
||||
fi
|
||||
else
|
||||
echo "Native tools install directory does not exist, installation failed" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
exit 0
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
<!-- Licensed to the .NET Foundation under one or more agreements. The .NET Foundation licenses this file to you under the MIT license. See the LICENSE file in the project root for more information. -->
|
||||
<Project>
|
||||
<Import Project="Sdk.props" Sdk="Microsoft.DotNet.Arcade.Sdk" />
|
||||
</Project>
|
|
@ -0,0 +1,23 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. -->
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net472</TargetFramework>
|
||||
<ImportDirectoryBuildTargets>false</ImportDirectoryBuildTargets>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<!-- Clear references, the SDK may add some depending on UsuingToolXxx settings, but we only want to restore the following -->
|
||||
<PackageReference Remove="@(PackageReference)"/>
|
||||
<PackageReference Include="Microsoft.DotNet.IBCMerge" Version="$(MicrosoftDotNetIBCMergeVersion)" Condition="'$(UsingToolIbcOptimization)' == 'true'" />
|
||||
<PackageReference Include="Drop.App" Version="$(DropAppVersion)" ExcludeAssets="all" Condition="'$(UsingToolVisualStudioIbcTraining)' == 'true'"/>
|
||||
</ItemGroup>
|
||||
<PropertyGroup>
|
||||
<RestoreSources>
|
||||
https://devdiv.pkgs.visualstudio.com/_packaging/8f470c7e-ac49-4afe-a6ee-cf784e438b93/nuget/v3/index.json;
|
||||
https://devdiv.pkgs.visualstudio.com/_packaging/VS/nuget/v3/index.json;
|
||||
</RestoreSources>
|
||||
</PropertyGroup>
|
||||
|
||||
<!-- Repository extensibility point -->
|
||||
<Import Project="$(RepositoryEngineeringDir)InternalTools.props" Condition="Exists('$(RepositoryEngineeringDir)InternalTools.props')" />
|
||||
</Project>
|
|
@ -0,0 +1,27 @@
|
|||
[CmdletBinding(PositionalBinding=$false)]
|
||||
Param(
|
||||
[string] $verbosity = "minimal",
|
||||
[bool] $warnAsError = $true,
|
||||
[bool] $nodeReuse = $true,
|
||||
[switch] $ci,
|
||||
[switch] $prepareMachine,
|
||||
[Parameter(ValueFromRemainingArguments=$true)][String[]]$extraArgs
|
||||
)
|
||||
|
||||
. $PSScriptRoot\tools.ps1
|
||||
|
||||
try {
|
||||
if ($ci) {
|
||||
$nodeReuse = $false
|
||||
}
|
||||
|
||||
MSBuild @extraArgs
|
||||
}
|
||||
catch {
|
||||
Write-Host $_
|
||||
Write-Host $_.Exception
|
||||
Write-Host $_.ScriptStackTrace
|
||||
ExitWithExitCode 1
|
||||
}
|
||||
|
||||
ExitWithExitCode 0
|
|
@ -0,0 +1,58 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source="${BASH_SOURCE[0]}"
|
||||
|
||||
# resolve $source until the file is no longer a symlink
|
||||
while [[ -h "$source" ]]; do
|
||||
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
|
||||
source="$(readlink "$source")"
|
||||
# if $source was a relative symlink, we need to resolve it relative to the path where the
|
||||
# symlink file was located
|
||||
[[ $source != /* ]] && source="$scriptroot/$source"
|
||||
done
|
||||
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
|
||||
|
||||
verbosity='minimal'
|
||||
warn_as_error=true
|
||||
node_reuse=true
|
||||
prepare_machine=false
|
||||
extra_args=''
|
||||
|
||||
while (($# > 0)); do
|
||||
lowerI="$(echo $1 | awk '{print tolower($0)}')"
|
||||
case $lowerI in
|
||||
--verbosity)
|
||||
verbosity=$2
|
||||
shift 2
|
||||
;;
|
||||
--warnaserror)
|
||||
warn_as_error=$2
|
||||
shift 2
|
||||
;;
|
||||
--nodereuse)
|
||||
node_reuse=$2
|
||||
shift 2
|
||||
;;
|
||||
--ci)
|
||||
ci=true
|
||||
shift 1
|
||||
;;
|
||||
--preparemachine)
|
||||
prepare_machine=true
|
||||
shift 1
|
||||
;;
|
||||
*)
|
||||
extra_args="$extra_args $1"
|
||||
shift 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
. "$scriptroot/tools.sh"
|
||||
|
||||
if [[ "$ci" == true ]]; then
|
||||
node_reuse=false
|
||||
fi
|
||||
|
||||
MSBuild $extra_args
|
||||
ExitWithExitCode 0
|
|
@ -0,0 +1,358 @@
|
|||
<#
|
||||
.SYNOPSIS
|
||||
Helper module to install an archive to a directory
|
||||
|
||||
.DESCRIPTION
|
||||
Helper module to download and extract an archive to a specified directory
|
||||
|
||||
.PARAMETER Uri
|
||||
Uri of artifact to download
|
||||
|
||||
.PARAMETER InstallDirectory
|
||||
Directory to extract artifact contents to
|
||||
|
||||
.PARAMETER Force
|
||||
Force download / extraction if file or contents already exist. Default = False
|
||||
|
||||
.PARAMETER DownloadRetries
|
||||
Total number of retry attempts. Default = 5
|
||||
|
||||
.PARAMETER RetryWaitTimeInSeconds
|
||||
Wait time between retry attempts in seconds. Default = 30
|
||||
|
||||
.NOTES
|
||||
Returns False if download or extraction fail, True otherwise
|
||||
#>
|
||||
function DownloadAndExtract {
|
||||
[CmdletBinding(PositionalBinding=$false)]
|
||||
Param (
|
||||
[Parameter(Mandatory=$True)]
|
||||
[string] $Uri,
|
||||
[Parameter(Mandatory=$True)]
|
||||
[string] $InstallDirectory,
|
||||
[switch] $Force = $False,
|
||||
[int] $DownloadRetries = 5,
|
||||
[int] $RetryWaitTimeInSeconds = 30
|
||||
)
|
||||
# Define verbose switch if undefined
|
||||
$Verbose = $VerbosePreference -Eq "Continue"
|
||||
|
||||
$TempToolPath = CommonLibrary\Get-TempPathFilename -Path $Uri
|
||||
|
||||
# Download native tool
|
||||
$DownloadStatus = CommonLibrary\Get-File -Uri $Uri `
|
||||
-Path $TempToolPath `
|
||||
-DownloadRetries $DownloadRetries `
|
||||
-RetryWaitTimeInSeconds $RetryWaitTimeInSeconds `
|
||||
-Force:$Force `
|
||||
-Verbose:$Verbose
|
||||
|
||||
if ($DownloadStatus -Eq $False) {
|
||||
Write-Error "Download failed"
|
||||
return $False
|
||||
}
|
||||
|
||||
# Extract native tool
|
||||
$UnzipStatus = CommonLibrary\Expand-Zip -ZipPath $TempToolPath `
|
||||
-OutputDirectory $InstallDirectory `
|
||||
-Force:$Force `
|
||||
-Verbose:$Verbose
|
||||
|
||||
if ($UnzipStatus -Eq $False) {
|
||||
Write-Error "Unzip failed"
|
||||
return $False
|
||||
}
|
||||
return $True
|
||||
}
|
||||
|
||||
<#
|
||||
.SYNOPSIS
|
||||
Download a file, retry on failure
|
||||
|
||||
.DESCRIPTION
|
||||
Download specified file and retry if attempt fails
|
||||
|
||||
.PARAMETER Uri
|
||||
Uri of file to download. If Uri is a local path, the file will be copied instead of downloaded
|
||||
|
||||
.PARAMETER Path
|
||||
Path to download or copy uri file to
|
||||
|
||||
.PARAMETER Force
|
||||
Overwrite existing file if present. Default = False
|
||||
|
||||
.PARAMETER DownloadRetries
|
||||
Total number of retry attempts. Default = 5
|
||||
|
||||
.PARAMETER RetryWaitTimeInSeconds
|
||||
Wait time between retry attempts in seconds Default = 30
|
||||
|
||||
#>
|
||||
function Get-File {
|
||||
[CmdletBinding(PositionalBinding=$false)]
|
||||
Param (
|
||||
[Parameter(Mandatory=$True)]
|
||||
[string] $Uri,
|
||||
[Parameter(Mandatory=$True)]
|
||||
[string] $Path,
|
||||
[int] $DownloadRetries = 5,
|
||||
[int] $RetryWaitTimeInSeconds = 30,
|
||||
[switch] $Force = $False
|
||||
)
|
||||
$Attempt = 0
|
||||
|
||||
if ($Force) {
|
||||
if (Test-Path $Path) {
|
||||
Remove-Item $Path -Force
|
||||
}
|
||||
}
|
||||
if (Test-Path $Path) {
|
||||
Write-Host "File '$Path' already exists, skipping download"
|
||||
return $True
|
||||
}
|
||||
|
||||
$DownloadDirectory = Split-Path -ErrorAction Ignore -Path "$Path" -Parent
|
||||
if (-Not (Test-Path $DownloadDirectory)) {
|
||||
New-Item -path $DownloadDirectory -force -itemType "Directory" | Out-Null
|
||||
}
|
||||
|
||||
if (Test-Path -IsValid -Path $Uri) {
|
||||
Write-Verbose "'$Uri' is a file path, copying file to '$Path'"
|
||||
Copy-Item -Path $Uri -Destination $Path
|
||||
return $?
|
||||
}
|
||||
else {
|
||||
Write-Verbose "Downloading $Uri"
|
||||
while($Attempt -Lt $DownloadRetries)
|
||||
{
|
||||
try {
|
||||
Invoke-WebRequest -UseBasicParsing -Uri $Uri -OutFile $Path
|
||||
Write-Verbose "Downloaded to '$Path'"
|
||||
return $True
|
||||
}
|
||||
catch {
|
||||
$Attempt++
|
||||
if ($Attempt -Lt $DownloadRetries) {
|
||||
$AttemptsLeft = $DownloadRetries - $Attempt
|
||||
Write-Warning "Download failed, $AttemptsLeft attempts remaining, will retry in $RetryWaitTimeInSeconds seconds"
|
||||
Start-Sleep -Seconds $RetryWaitTimeInSeconds
|
||||
}
|
||||
else {
|
||||
Write-Error $_
|
||||
Write-Error $_.Exception
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return $False
|
||||
}
|
||||
|
||||
<#
|
||||
.SYNOPSIS
|
||||
Generate a shim for a native tool
|
||||
|
||||
.DESCRIPTION
|
||||
Creates a wrapper script (shim) that passes arguments forward to native tool assembly
|
||||
|
||||
.PARAMETER ShimName
|
||||
The name of the shim
|
||||
|
||||
.PARAMETER ShimDirectory
|
||||
The directory where shims are stored
|
||||
|
||||
.PARAMETER ToolFilePath
|
||||
Path to file that shim forwards to
|
||||
|
||||
.PARAMETER Force
|
||||
Replace shim if already present. Default = False
|
||||
|
||||
.NOTES
|
||||
Returns $True if generating shim succeeds, $False otherwise
|
||||
#>
|
||||
function New-ScriptShim {
|
||||
[CmdletBinding(PositionalBinding=$false)]
|
||||
Param (
|
||||
[Parameter(Mandatory=$True)]
|
||||
[string] $ShimName,
|
||||
[Parameter(Mandatory=$True)]
|
||||
[string] $ShimDirectory,
|
||||
[Parameter(Mandatory=$True)]
|
||||
[string] $ToolFilePath,
|
||||
[Parameter(Mandatory=$True)]
|
||||
[string] $BaseUri,
|
||||
[switch] $Force
|
||||
)
|
||||
try {
|
||||
Write-Verbose "Generating '$ShimName' shim"
|
||||
|
||||
if (-Not (Test-Path $ToolFilePath)){
|
||||
Write-Error "Specified tool file path '$ToolFilePath' does not exist"
|
||||
return $False
|
||||
}
|
||||
|
||||
# WinShimmer is a small .NET Framework program that creates .exe shims to bootstrapped programs
|
||||
# Many of the checks for installed programs expect a .exe extension for Windows tools, rather
|
||||
# than a .bat or .cmd file.
|
||||
# Source: https://github.com/dotnet/arcade/tree/master/src/WinShimmer
|
||||
if (-Not (Test-Path "$ShimDirectory\WinShimmer\winshimmer.exe")) {
|
||||
$InstallStatus = DownloadAndExtract -Uri "$BaseUri/windows/winshimmer/WinShimmer.zip" `
|
||||
-InstallDirectory $ShimDirectory\WinShimmer `
|
||||
-Force:$Force `
|
||||
-DownloadRetries 2 `
|
||||
-RetryWaitTimeInSeconds 5 `
|
||||
-Verbose:$Verbose
|
||||
}
|
||||
|
||||
if ((Test-Path (Join-Path $ShimDirectory "$ShimName.exe"))) {
|
||||
Write-Host "$ShimName.exe already exists; replacing..."
|
||||
Remove-Item (Join-Path $ShimDirectory "$ShimName.exe")
|
||||
}
|
||||
|
||||
Invoke-Expression "$ShimDirectory\WinShimmer\winshimmer.exe $ShimName $ToolFilePath $ShimDirectory"
|
||||
return $True
|
||||
}
|
||||
catch {
|
||||
Write-Host $_
|
||||
Write-Host $_.Exception
|
||||
return $False
|
||||
}
|
||||
}
|
||||
|
||||
<#
|
||||
.SYNOPSIS
|
||||
Returns the machine architecture of the host machine
|
||||
|
||||
.NOTES
|
||||
Returns 'x64' on 64 bit machines
|
||||
Returns 'x86' on 32 bit machines
|
||||
#>
|
||||
function Get-MachineArchitecture {
|
||||
$ProcessorArchitecture = $Env:PROCESSOR_ARCHITECTURE
|
||||
$ProcessorArchitectureW6432 = $Env:PROCESSOR_ARCHITEW6432
|
||||
if($ProcessorArchitecture -Eq "X86")
|
||||
{
|
||||
if(($ProcessorArchitectureW6432 -Eq "") -Or
|
||||
($ProcessorArchitectureW6432 -Eq "X86")) {
|
||||
return "x86"
|
||||
}
|
||||
$ProcessorArchitecture = $ProcessorArchitectureW6432
|
||||
}
|
||||
if (($ProcessorArchitecture -Eq "AMD64") -Or
|
||||
($ProcessorArchitecture -Eq "IA64") -Or
|
||||
($ProcessorArchitecture -Eq "ARM64")) {
|
||||
return "x64"
|
||||
}
|
||||
return "x86"
|
||||
}
|
||||
|
||||
<#
|
||||
.SYNOPSIS
|
||||
Get the name of a temporary folder under the native install directory
|
||||
#>
|
||||
function Get-TempDirectory {
|
||||
return Join-Path (Get-NativeInstallDirectory) "temp/"
|
||||
}
|
||||
|
||||
function Get-TempPathFilename {
|
||||
[CmdletBinding(PositionalBinding=$false)]
|
||||
Param (
|
||||
[Parameter(Mandatory=$True)]
|
||||
[string] $Path
|
||||
)
|
||||
$TempDir = CommonLibrary\Get-TempDirectory
|
||||
$TempFilename = Split-Path $Path -leaf
|
||||
$TempPath = Join-Path $TempDir $TempFilename
|
||||
return $TempPath
|
||||
}
|
||||
|
||||
<#
|
||||
.SYNOPSIS
|
||||
Returns the base directory to use for native tool installation
|
||||
|
||||
.NOTES
|
||||
Returns the value of the NETCOREENG_INSTALL_DIRECTORY if that environment variable
|
||||
is set, or otherwise returns an install directory under the %USERPROFILE%
|
||||
#>
|
||||
function Get-NativeInstallDirectory {
|
||||
$InstallDir = $Env:NETCOREENG_INSTALL_DIRECTORY
|
||||
if (!$InstallDir) {
|
||||
$InstallDir = Join-Path $Env:USERPROFILE ".netcoreeng/native/"
|
||||
}
|
||||
return $InstallDir
|
||||
}
|
||||
|
||||
<#
|
||||
.SYNOPSIS
|
||||
Unzip an archive
|
||||
|
||||
.DESCRIPTION
|
||||
Powershell module to unzip an archive to a specified directory
|
||||
|
||||
.PARAMETER ZipPath (Required)
|
||||
Path to archive to unzip
|
||||
|
||||
.PARAMETER OutputDirectory (Required)
|
||||
Output directory for archive contents
|
||||
|
||||
.PARAMETER Force
|
||||
Overwrite output directory contents if they already exist
|
||||
|
||||
.NOTES
|
||||
- Returns True and does not perform an extraction if output directory already exists but Overwrite is not True.
|
||||
- Returns True if unzip operation is successful
|
||||
- Returns False if Overwrite is True and it is unable to remove contents of OutputDirectory
|
||||
- Returns False if unable to extract zip archive
|
||||
#>
|
||||
function Expand-Zip {
|
||||
[CmdletBinding(PositionalBinding=$false)]
|
||||
Param (
|
||||
[Parameter(Mandatory=$True)]
|
||||
[string] $ZipPath,
|
||||
[Parameter(Mandatory=$True)]
|
||||
[string] $OutputDirectory,
|
||||
[switch] $Force
|
||||
)
|
||||
|
||||
Write-Verbose "Extracting '$ZipPath' to '$OutputDirectory'"
|
||||
try {
|
||||
if ((Test-Path $OutputDirectory) -And (-Not $Force)) {
|
||||
Write-Host "Directory '$OutputDirectory' already exists, skipping extract"
|
||||
return $True
|
||||
}
|
||||
if (Test-Path $OutputDirectory) {
|
||||
Write-Verbose "'Force' is 'True', but '$OutputDirectory' exists, removing directory"
|
||||
Remove-Item $OutputDirectory -Force -Recurse
|
||||
if ($? -Eq $False) {
|
||||
Write-Error "Unable to remove '$OutputDirectory'"
|
||||
return $False
|
||||
}
|
||||
}
|
||||
if (-Not (Test-Path $OutputDirectory)) {
|
||||
New-Item -path $OutputDirectory -Force -itemType "Directory" | Out-Null
|
||||
}
|
||||
|
||||
Add-Type -assembly "system.io.compression.filesystem"
|
||||
[io.compression.zipfile]::ExtractToDirectory("$ZipPath", "$OutputDirectory")
|
||||
if ($? -Eq $False) {
|
||||
Write-Error "Unable to extract '$ZipPath'"
|
||||
return $False
|
||||
}
|
||||
}
|
||||
catch {
|
||||
Write-Host $_
|
||||
Write-Host $_.Exception
|
||||
|
||||
return $False
|
||||
}
|
||||
return $True
|
||||
}
|
||||
|
||||
export-modulemember -function DownloadAndExtract
|
||||
export-modulemember -function Expand-Zip
|
||||
export-modulemember -function Get-File
|
||||
export-modulemember -function Get-MachineArchitecture
|
||||
export-modulemember -function Get-NativeInstallDirectory
|
||||
export-modulemember -function Get-TempDirectory
|
||||
export-modulemember -function Get-TempPathFilename
|
||||
export-modulemember -function New-ScriptShim
|
|
@ -0,0 +1,168 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
function GetNativeInstallDirectory {
|
||||
local install_dir
|
||||
|
||||
if [[ -z $NETCOREENG_INSTALL_DIRECTORY ]]; then
|
||||
install_dir=$HOME/.netcoreeng/native/
|
||||
else
|
||||
install_dir=$NETCOREENG_INSTALL_DIRECTORY
|
||||
fi
|
||||
|
||||
echo $install_dir
|
||||
return 0
|
||||
}
|
||||
|
||||
function GetTempDirectory {
|
||||
|
||||
echo $(GetNativeInstallDirectory)temp/
|
||||
return 0
|
||||
}
|
||||
|
||||
function ExpandZip {
|
||||
local zip_path=$1
|
||||
local output_directory=$2
|
||||
local force=${3:-false}
|
||||
|
||||
echo "Extracting $zip_path to $output_directory"
|
||||
if [[ -d $output_directory ]] && [[ $force = false ]]; then
|
||||
echo "Directory '$output_directory' already exists, skipping extract"
|
||||
return 0
|
||||
fi
|
||||
|
||||
if [[ -d $output_directory ]]; then
|
||||
echo "'Force flag enabled, but '$output_directory' exists. Removing directory"
|
||||
rm -rf $output_directory
|
||||
if [[ $? != 0 ]]; then
|
||||
echo Unable to remove '$output_directory'>&2
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "Creating directory: '$output_directory'"
|
||||
mkdir -p $output_directory
|
||||
|
||||
echo "Extracting archive"
|
||||
tar -xf $zip_path -C $output_directory
|
||||
if [[ $? != 0 ]]; then
|
||||
echo "Unable to extract '$zip_path'" >&2
|
||||
return 1
|
||||
fi
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
function GetCurrentOS {
|
||||
local unameOut="$(uname -s)"
|
||||
case $unameOut in
|
||||
Linux*) echo "Linux";;
|
||||
Darwin*) echo "MacOS";;
|
||||
esac
|
||||
return 0
|
||||
}
|
||||
|
||||
function GetFile {
|
||||
local uri=$1
|
||||
local path=$2
|
||||
local force=${3:-false}
|
||||
local download_retries=${4:-5}
|
||||
local retry_wait_time_seconds=${5:-30}
|
||||
|
||||
if [[ -f $path ]]; then
|
||||
if [[ $force = false ]]; then
|
||||
echo "File '$path' already exists. Skipping download"
|
||||
return 0
|
||||
else
|
||||
rm -rf $path
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ -f $uri ]]; then
|
||||
echo "'$uri' is a file path, copying file to '$path'"
|
||||
cp $uri $path
|
||||
return $?
|
||||
fi
|
||||
|
||||
echo "Downloading $uri"
|
||||
# Use curl if available, otherwise use wget
|
||||
if command -v curl > /dev/null; then
|
||||
curl "$uri" -sSL --retry $download_retries --retry-delay $retry_wait_time_seconds --create-dirs -o "$path" --fail
|
||||
else
|
||||
wget -q -O "$path" "$uri" --tries="$download_retries"
|
||||
fi
|
||||
|
||||
return $?
|
||||
}
|
||||
|
||||
function GetTempPathFileName {
|
||||
local path=$1
|
||||
|
||||
local temp_dir=$(GetTempDirectory)
|
||||
local temp_file_name=$(basename $path)
|
||||
echo $temp_dir$temp_file_name
|
||||
return 0
|
||||
}
|
||||
|
||||
function DownloadAndExtract {
|
||||
local uri=$1
|
||||
local installDir=$2
|
||||
local force=${3:-false}
|
||||
local download_retries=${4:-5}
|
||||
local retry_wait_time_seconds=${5:-30}
|
||||
|
||||
local temp_tool_path=$(GetTempPathFileName $uri)
|
||||
|
||||
echo "downloading to: $temp_tool_path"
|
||||
|
||||
# Download file
|
||||
GetFile "$uri" "$temp_tool_path" $force $download_retries $retry_wait_time_seconds
|
||||
if [[ $? != 0 ]]; then
|
||||
echo "Failed to download '$uri' to '$temp_tool_path'." >&2
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Extract File
|
||||
echo "extracting from $temp_tool_path to $installDir"
|
||||
ExpandZip "$temp_tool_path" "$installDir" $force $download_retries $retry_wait_time_seconds
|
||||
if [[ $? != 0 ]]; then
|
||||
echo "Failed to extract '$temp_tool_path' to '$installDir'." >&2
|
||||
return 1
|
||||
fi
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
function NewScriptShim {
|
||||
local shimpath=$1
|
||||
local tool_file_path=$2
|
||||
local force=${3:-false}
|
||||
|
||||
echo "Generating '$shimpath' shim"
|
||||
if [[ -f $shimpath ]]; then
|
||||
if [[ $force = false ]]; then
|
||||
echo "File '$shimpath' already exists." >&2
|
||||
return 1
|
||||
else
|
||||
rm -rf $shimpath
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ ! -f $tool_file_path ]]; then
|
||||
echo "Specified tool file path:'$tool_file_path' does not exist" >&2
|
||||
return 1
|
||||
fi
|
||||
|
||||
local shim_contents=$'#!/usr/bin/env bash\n'
|
||||
shim_contents+="SHIMARGS="$'$1\n'
|
||||
shim_contents+="$tool_file_path"$' $SHIMARGS\n'
|
||||
|
||||
# Write shim file
|
||||
echo "$shim_contents" > $shimpath
|
||||
|
||||
chmod +x $shimpath
|
||||
|
||||
echo "Finished generating shim '$shimpath'"
|
||||
|
||||
return $?
|
||||
}
|
||||
|
|
@ -0,0 +1,117 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source="${BASH_SOURCE[0]}"
|
||||
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
|
||||
|
||||
. $scriptroot/common-library.sh
|
||||
|
||||
base_uri=
|
||||
install_path=
|
||||
version=
|
||||
clean=false
|
||||
force=false
|
||||
download_retries=5
|
||||
retry_wait_time_seconds=30
|
||||
|
||||
while (($# > 0)); do
|
||||
lowerI="$(echo $1 | awk '{print tolower($0)}')"
|
||||
case $lowerI in
|
||||
--baseuri)
|
||||
base_uri=$2
|
||||
shift 2
|
||||
;;
|
||||
--installpath)
|
||||
install_path=$2
|
||||
shift 2
|
||||
;;
|
||||
--version)
|
||||
version=$2
|
||||
shift 2
|
||||
;;
|
||||
--clean)
|
||||
clean=true
|
||||
shift 1
|
||||
;;
|
||||
--force)
|
||||
force=true
|
||||
shift 1
|
||||
;;
|
||||
--downloadretries)
|
||||
download_retries=$2
|
||||
shift 2
|
||||
;;
|
||||
--retrywaittimeseconds)
|
||||
retry_wait_time_seconds=$2
|
||||
shift 2
|
||||
;;
|
||||
--help)
|
||||
echo "Common settings:"
|
||||
echo " --baseuri <value> Base file directory or Url wrom which to acquire tool archives"
|
||||
echo " --installpath <value> Base directory to install native tool to"
|
||||
echo " --clean Don't install the tool, just clean up the current install of the tool"
|
||||
echo " --force Force install of tools even if they previously exist"
|
||||
echo " --help Print help and exit"
|
||||
echo ""
|
||||
echo "Advanced settings:"
|
||||
echo " --downloadretries Total number of retry attempts"
|
||||
echo " --retrywaittimeseconds Wait time between retry attempts in seconds"
|
||||
echo ""
|
||||
exit 0
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
tool_name="cmake"
|
||||
tool_os=$(GetCurrentOS)
|
||||
tool_folder=$(echo $tool_os | awk '{print tolower($0)}')
|
||||
tool_arch="x86_64"
|
||||
tool_name_moniker="$tool_name-$version-$tool_os-$tool_arch"
|
||||
tool_install_directory="$install_path/$tool_name/$version"
|
||||
tool_file_path="$tool_install_directory/$tool_name_moniker/bin/$tool_name"
|
||||
shim_path="$install_path/$tool_name.sh"
|
||||
uri="${base_uri}/$tool_folder/cmake/$tool_name_moniker.tar.gz"
|
||||
|
||||
# Clean up tool and installers
|
||||
if [[ $clean = true ]]; then
|
||||
echo "Cleaning $tool_install_directory"
|
||||
if [[ -d $tool_install_directory ]]; then
|
||||
rm -rf $tool_install_directory
|
||||
fi
|
||||
|
||||
echo "Cleaning $shim_path"
|
||||
if [[ -f $shim_path ]]; then
|
||||
rm -rf $shim_path
|
||||
fi
|
||||
|
||||
tool_temp_path=$(GetTempPathFileName $uri)
|
||||
echo "Cleaning $tool_temp_path"
|
||||
if [[ -f $tool_temp_path ]]; then
|
||||
rm -rf $tool_temp_path
|
||||
fi
|
||||
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Install tool
|
||||
if [[ -f $tool_file_path ]] && [[ $force = false ]]; then
|
||||
echo "$tool_name ($version) already exists, skipping install"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
DownloadAndExtract $uri $tool_install_directory $force $download_retries $retry_wait_time_seconds
|
||||
|
||||
if [[ $? != 0 ]]; then
|
||||
echo "Installation failed" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Generate Shim
|
||||
# Always rewrite shims so that we are referencing the expected version
|
||||
NewScriptShim $shim_path $tool_file_path true
|
||||
|
||||
if [[ $? != 0 ]]; then
|
||||
echo "Shim generation failed" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
exit 0
|
|
@ -0,0 +1,130 @@
|
|||
<#
|
||||
.SYNOPSIS
|
||||
Install native tool
|
||||
|
||||
.DESCRIPTION
|
||||
Install cmake native tool from Azure blob storage
|
||||
|
||||
.PARAMETER InstallPath
|
||||
Base directory to install native tool to
|
||||
|
||||
.PARAMETER BaseUri
|
||||
Base file directory or Url from which to acquire tool archives
|
||||
|
||||
.PARAMETER CommonLibraryDirectory
|
||||
Path to folder containing common library modules
|
||||
|
||||
.PARAMETER Force
|
||||
Force install of tools even if they previously exist
|
||||
|
||||
.PARAMETER Clean
|
||||
Don't install the tool, just clean up the current install of the tool
|
||||
|
||||
.PARAMETER DownloadRetries
|
||||
Total number of retry attempts
|
||||
|
||||
.PARAMETER RetryWaitTimeInSeconds
|
||||
Wait time between retry attempts in seconds
|
||||
|
||||
.NOTES
|
||||
Returns 0 if install succeeds, 1 otherwise
|
||||
#>
|
||||
[CmdletBinding(PositionalBinding=$false)]
|
||||
Param (
|
||||
[Parameter(Mandatory=$True)]
|
||||
[string] $ToolName,
|
||||
[Parameter(Mandatory=$True)]
|
||||
[string] $InstallPath,
|
||||
[Parameter(Mandatory=$True)]
|
||||
[string] $BaseUri,
|
||||
[Parameter(Mandatory=$True)]
|
||||
[string] $Version,
|
||||
[string] $CommonLibraryDirectory = $PSScriptRoot,
|
||||
[switch] $Force = $False,
|
||||
[switch] $Clean = $False,
|
||||
[int] $DownloadRetries = 5,
|
||||
[int] $RetryWaitTimeInSeconds = 30
|
||||
)
|
||||
|
||||
# Import common library modules
|
||||
Import-Module -Name (Join-Path $CommonLibraryDirectory "CommonLibrary.psm1")
|
||||
|
||||
try {
|
||||
# Define verbose switch if undefined
|
||||
$Verbose = $VerbosePreference -Eq "Continue"
|
||||
|
||||
$Arch = CommonLibrary\Get-MachineArchitecture
|
||||
$ToolOs = "win64"
|
||||
if($Arch -Eq "x32") {
|
||||
$ToolOs = "win32"
|
||||
}
|
||||
$ToolNameMoniker = "$ToolName-$Version-$ToolOs-$Arch"
|
||||
$ToolInstallDirectory = Join-Path $InstallPath "$ToolName\$Version\"
|
||||
$Uri = "$BaseUri/windows/$ToolName/$ToolNameMoniker.zip"
|
||||
$ShimPath = Join-Path $InstallPath "$ToolName.exe"
|
||||
|
||||
if ($Clean) {
|
||||
Write-Host "Cleaning $ToolInstallDirectory"
|
||||
if (Test-Path $ToolInstallDirectory) {
|
||||
Remove-Item $ToolInstallDirectory -Force -Recurse
|
||||
}
|
||||
Write-Host "Cleaning $ShimPath"
|
||||
if (Test-Path $ShimPath) {
|
||||
Remove-Item $ShimPath -Force
|
||||
}
|
||||
$ToolTempPath = CommonLibrary\Get-TempPathFilename -Path $Uri
|
||||
Write-Host "Cleaning $ToolTempPath"
|
||||
if (Test-Path $ToolTempPath) {
|
||||
Remove-Item $ToolTempPath -Force
|
||||
}
|
||||
exit 0
|
||||
}
|
||||
|
||||
# Install tool
|
||||
if ((Test-Path $ToolInstallDirectory) -And (-Not $Force)) {
|
||||
Write-Verbose "$ToolName ($Version) already exists, skipping install"
|
||||
}
|
||||
else {
|
||||
$InstallStatus = CommonLibrary\DownloadAndExtract -Uri $Uri `
|
||||
-InstallDirectory $ToolInstallDirectory `
|
||||
-Force:$Force `
|
||||
-DownloadRetries $DownloadRetries `
|
||||
-RetryWaitTimeInSeconds $RetryWaitTimeInSeconds `
|
||||
-Verbose:$Verbose
|
||||
|
||||
if ($InstallStatus -Eq $False) {
|
||||
Write-Error "Installation failed"
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
|
||||
$ToolFilePath = Get-ChildItem $ToolInstallDirectory -Recurse -Filter "$ToolName.exe" | % { $_.FullName }
|
||||
if (@($ToolFilePath).Length -Gt 1) {
|
||||
Write-Error "There are multiple copies of $ToolName in $($ToolInstallDirectory): `n$(@($ToolFilePath | out-string))"
|
||||
exit 1
|
||||
} elseif (@($ToolFilePath).Length -Lt 1) {
|
||||
Write-Error "$ToolName was not found in $ToolFilePath."
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Generate shim
|
||||
# Always rewrite shims so that we are referencing the expected version
|
||||
$GenerateShimStatus = CommonLibrary\New-ScriptShim -ShimName $ToolName `
|
||||
-ShimDirectory $InstallPath `
|
||||
-ToolFilePath "$ToolFilePath" `
|
||||
-BaseUri $BaseUri `
|
||||
-Force:$Force `
|
||||
-Verbose:$Verbose
|
||||
|
||||
if ($GenerateShimStatus -Eq $False) {
|
||||
Write-Error "Generate shim failed"
|
||||
return 1
|
||||
}
|
||||
|
||||
exit 0
|
||||
}
|
||||
catch {
|
||||
Write-Host $_
|
||||
Write-Host $_.Exception
|
||||
exit 1
|
||||
}
|
|
@ -0,0 +1,79 @@
|
|||
[CmdletBinding(PositionalBinding=$false)]
|
||||
Param(
|
||||
[string] $configuration = "Debug",
|
||||
[string] $task,
|
||||
[string] $verbosity = "minimal",
|
||||
[string] $msbuildEngine = $null,
|
||||
[switch] $restore,
|
||||
[switch] $prepareMachine,
|
||||
[switch] $help,
|
||||
[Parameter(ValueFromRemainingArguments=$true)][String[]]$properties
|
||||
)
|
||||
|
||||
$ci = $true
|
||||
$binaryLog = $true
|
||||
$warnAsError = $true
|
||||
|
||||
. $PSScriptRoot\tools.ps1
|
||||
|
||||
function Print-Usage() {
|
||||
Write-Host "Common settings:"
|
||||
Write-Host " -task <value> Name of Arcade task (name of a project in SdkTasks directory of the Arcade SDK package)"
|
||||
Write-Host " -restore Restore dependencies"
|
||||
Write-Host " -verbosity <value> Msbuild verbosity: q[uiet], m[inimal], n[ormal], d[etailed], and diag[nostic]"
|
||||
Write-Host " -help Print help and exit"
|
||||
Write-Host ""
|
||||
|
||||
Write-Host "Advanced settings:"
|
||||
Write-Host " -prepareMachine Prepare machine for CI run"
|
||||
Write-Host " -msbuildEngine <value> Msbuild engine to use to run build ('dotnet', 'vs', or unspecified)."
|
||||
Write-Host ""
|
||||
Write-Host "Command line arguments not listed above are passed thru to msbuild."
|
||||
}
|
||||
|
||||
function Build([string]$target) {
|
||||
$logSuffix = if ($target -eq "Execute") { "" } else { ".$target" }
|
||||
$log = Join-Path $LogDir "$task$logSuffix.binlog"
|
||||
$outputPath = Join-Path $ToolsetDir "$task\\"
|
||||
|
||||
MSBuild $taskProject `
|
||||
/bl:$log `
|
||||
/t:$target `
|
||||
/p:Configuration=$configuration `
|
||||
/p:RepoRoot=$RepoRoot `
|
||||
/p:BaseIntermediateOutputPath=$outputPath `
|
||||
@properties
|
||||
}
|
||||
|
||||
try {
|
||||
if ($help -or (($null -ne $properties) -and ($properties.Contains("/help") -or $properties.Contains("/?")))) {
|
||||
Print-Usage
|
||||
exit 0
|
||||
}
|
||||
|
||||
if ($task -eq "") {
|
||||
Write-Host "Missing required parameter '-task <value>'" -ForegroundColor Red
|
||||
Print-Usage
|
||||
ExitWithExitCode 1
|
||||
}
|
||||
|
||||
$taskProject = GetSdkTaskProject $task
|
||||
if (!(Test-Path $taskProject)) {
|
||||
Write-Host "Unknown task: $task" -ForegroundColor Red
|
||||
ExitWithExitCode 1
|
||||
}
|
||||
|
||||
if ($restore) {
|
||||
Build "Restore"
|
||||
}
|
||||
|
||||
Build "Execute"
|
||||
}
|
||||
catch {
|
||||
Write-Host $_
|
||||
Write-Host $_.Exception
|
||||
Write-Host $_.ScriptStackTrace
|
||||
ExitWithExitCode 1
|
||||
}
|
||||
|
||||
ExitWithExitCode 0
|
|
@ -0,0 +1,197 @@
|
|||
parameters:
|
||||
# Job schema parameters - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
|
||||
cancelTimeoutInMinutes: ''
|
||||
|
||||
condition: ''
|
||||
|
||||
continueOnError: false
|
||||
|
||||
container: ''
|
||||
|
||||
dependsOn: ''
|
||||
|
||||
displayName: ''
|
||||
|
||||
steps: []
|
||||
|
||||
pool: ''
|
||||
|
||||
strategy: ''
|
||||
|
||||
timeoutInMinutes: ''
|
||||
|
||||
variables: []
|
||||
|
||||
workspace: ''
|
||||
|
||||
# Job base template specific parameters
|
||||
# Optional: Enable installing Microbuild plugin
|
||||
# if 'true', these "variables" must be specified in the variables object or as part of the queue matrix
|
||||
# _TeamName - the name of your team
|
||||
# _SignType - 'test' or 'real'
|
||||
enableMicrobuild: false
|
||||
|
||||
# Optional: Include PublishBuildArtifacts task
|
||||
enablePublishBuildArtifacts: false
|
||||
|
||||
# Optional: Enable publishing to the build asset registry
|
||||
enablePublishBuildAssets: false
|
||||
|
||||
# Optional: Include PublishTestResults task
|
||||
enablePublishTestResults: false
|
||||
|
||||
# Optional: enable sending telemetry
|
||||
enableTelemetry: false
|
||||
|
||||
# Optional: define the helix repo for telemeetry (example: 'dotnet/arcade')
|
||||
helixRepo: ''
|
||||
|
||||
# Required: name of the job
|
||||
name: ''
|
||||
|
||||
# Optional: should run as a public build even in the internal project
|
||||
# if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
|
||||
runAsPublic: false
|
||||
|
||||
# Internal resources (telemetry, microbuild) can only be accessed from non-public projects,
|
||||
# and some (Microbuild) should only be applied to non-PR cases for internal builds.
|
||||
|
||||
jobs:
|
||||
- job: ${{ parameters.name }}
|
||||
|
||||
${{ if ne(parameters.cancelTimeoutInMinutes, '') }}:
|
||||
cancelTimeoutInMinutes: ${{ parameters.cancelTimeoutInMinutes }}
|
||||
|
||||
${{ if ne(parameters.condition, '') }}:
|
||||
condition: ${{ parameters.condition }}
|
||||
|
||||
${{ if ne(parameters.container, '') }}:
|
||||
container: ${{ parameters.container }}
|
||||
|
||||
${{ if ne(parameters.continueOnError, '') }}:
|
||||
continueOnError: ${{ parameters.continueOnError }}
|
||||
|
||||
${{ if ne(parameters.dependsOn, '') }}:
|
||||
dependsOn: ${{ parameters.dependsOn }}
|
||||
|
||||
${{ if ne(parameters.displayName, '') }}:
|
||||
displayName: ${{ parameters.displayName }}
|
||||
|
||||
${{ if ne(parameters.pool, '') }}:
|
||||
pool: ${{ parameters.pool }}
|
||||
|
||||
${{ if ne(parameters.strategy, '') }}:
|
||||
strategy: ${{ parameters.strategy }}
|
||||
|
||||
${{ if ne(parameters.timeoutInMinutes, '') }}:
|
||||
timeoutInMinutes: ${{ parameters.timeoutInMinutes }}
|
||||
|
||||
variables:
|
||||
- ${{ each variable in parameters.variables }}:
|
||||
# handle name-value variable syntax
|
||||
# example:
|
||||
# - name: [key]
|
||||
# value: [value]
|
||||
- ${{ if ne(variable.name, '') }}:
|
||||
- name: ${{ variable.name }}
|
||||
value: ${{ variable.value }}
|
||||
|
||||
# handle variable groups
|
||||
- ${{ if ne(variable.group, '') }}:
|
||||
- group: ${{ variable.group }}
|
||||
|
||||
# handle key-value variable syntax.
|
||||
# example:
|
||||
# - [key]: [value]
|
||||
- ${{ if and(eq(variable.name, ''), eq(variable.group, '')) }}:
|
||||
- ${{ each pair in variable }}:
|
||||
- name: ${{ pair.key }}
|
||||
value: ${{ pair.value }}
|
||||
|
||||
# DotNet-HelixApi-Access provides 'HelixApiAccessToken' for internal builds
|
||||
- ${{ if and(eq(parameters.enableTelemetry, 'true'), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
|
||||
- group: DotNet-HelixApi-Access
|
||||
|
||||
${{ if ne(parameters.workspace, '') }}:
|
||||
workspace: ${{ parameters.workspace }}
|
||||
|
||||
steps:
|
||||
- ${{ if eq(parameters.enableTelemetry, 'true') }}:
|
||||
# Telemetry tasks are built from https://github.com/dotnet/arcade-extensions
|
||||
- task: sendStartTelemetry@0
|
||||
displayName: 'Send Helix Start Telemetry'
|
||||
inputs:
|
||||
helixRepo: ${{ parameters.helixRepo }}
|
||||
buildConfig: $(_BuildConfig)
|
||||
runAsPublic: ${{ parameters.runAsPublic }}
|
||||
continueOnError: ${{ parameters.continueOnError }}
|
||||
condition: always()
|
||||
|
||||
- ${{ if eq(parameters.enableMicrobuild, 'true') }}:
|
||||
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
|
||||
- task: MicroBuildSigningPlugin@2
|
||||
displayName: Install MicroBuild plugin
|
||||
inputs:
|
||||
signType: $(_SignType)
|
||||
zipSources: false
|
||||
feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json
|
||||
env:
|
||||
TeamName: $(_TeamName)
|
||||
continueOnError: ${{ parameters.continueOnError }}
|
||||
condition: and(succeeded(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
|
||||
|
||||
- ${{ each step in parameters.steps }}:
|
||||
- ${{ step }}
|
||||
|
||||
- ${{ if eq(parameters.enableMicrobuild, 'true') }}:
|
||||
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
|
||||
- task: MicroBuildCleanup@1
|
||||
displayName: Execute Microbuild cleanup tasks
|
||||
condition: and(always(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
|
||||
continueOnError: ${{ parameters.continueOnError }}
|
||||
env:
|
||||
TeamName: $(_TeamName)
|
||||
|
||||
- ${{ if eq(parameters.enableTelemetry, 'true') }}:
|
||||
# Telemetry tasks are built from https://github.com/dotnet/arcade-extensions
|
||||
- task: sendEndTelemetry@0
|
||||
displayName: 'Send Helix End Telemetry'
|
||||
continueOnError: ${{ parameters.continueOnError }}
|
||||
condition: always()
|
||||
|
||||
- ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}:
|
||||
- task: PublishBuildArtifacts@1
|
||||
displayName: Publish Logs
|
||||
inputs:
|
||||
PathtoPublish: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)'
|
||||
PublishLocation: Container
|
||||
ArtifactName: $(Agent.Os)_$(Agent.JobName)
|
||||
continueOnError: true
|
||||
condition: always()
|
||||
|
||||
- ${{ if eq(parameters.enablePublishTestResults, 'true') }}:
|
||||
- task: PublishTestResults@2
|
||||
displayName: Publish Test Results
|
||||
inputs:
|
||||
testResultsFormat: 'xUnit'
|
||||
testResultsFiles: '*.xml'
|
||||
searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)'
|
||||
continueOnError: true
|
||||
condition: always()
|
||||
|
||||
- ${{ if and(eq(parameters.enablePublishBuildAssets, true), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
|
||||
- task: CopyFiles@2
|
||||
displayName: Gather Asset Manifests
|
||||
inputs:
|
||||
SourceFolder: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)/AssetManifest'
|
||||
TargetFolder: '$(Build.StagingDirectory)/AssetManifests'
|
||||
continueOnError: ${{ parameters.continueOnError }}
|
||||
condition: and(succeeded(), eq(variables['_DotNetPublishToBlobFeed'], 'true'))
|
||||
- task: PublishBuildArtifacts@1
|
||||
displayName: Push Asset Manifests
|
||||
inputs:
|
||||
PathtoPublish: '$(Build.StagingDirectory)/AssetManifests'
|
||||
PublishLocation: Container
|
||||
ArtifactName: AssetManifests
|
||||
continueOnError: ${{ parameters.continueOnError }}
|
||||
condition: and(succeeded(), eq(variables['_DotNetPublishToBlobFeed'], 'true'))
|
|
@ -0,0 +1,66 @@
|
|||
parameters:
|
||||
configuration: 'Debug'
|
||||
|
||||
# Optional: condition for the job to run
|
||||
condition: ''
|
||||
|
||||
# Optional: 'true' if future jobs should run even if this job fails
|
||||
continueOnError: false
|
||||
|
||||
# Optional: dependencies of the job
|
||||
dependsOn: ''
|
||||
|
||||
# Optional: Include PublishBuildArtifacts task
|
||||
enablePublishBuildArtifacts: false
|
||||
|
||||
# Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool
|
||||
pool: {}
|
||||
|
||||
# Optional: should run as a public build even in the internal project
|
||||
# if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
|
||||
runAsPublic: false
|
||||
|
||||
jobs:
|
||||
- job: Asset_Registry_Publish
|
||||
|
||||
dependsOn: ${{ parameters.dependsOn }}
|
||||
|
||||
displayName: Publish to Build Asset Registry
|
||||
|
||||
pool: ${{ parameters.pool }}
|
||||
|
||||
variables:
|
||||
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
|
||||
- name: _BuildConfig
|
||||
value: ${{ parameters.configuration }}
|
||||
- group: Publish-Build-Assets
|
||||
|
||||
steps:
|
||||
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
|
||||
- task: DownloadBuildArtifacts@0
|
||||
displayName: Download artifact
|
||||
inputs:
|
||||
artifactName: AssetManifests
|
||||
downloadPath: '$(Build.StagingDirectory)/Download'
|
||||
condition: ${{ parameters.condition }}
|
||||
continueOnError: ${{ parameters.continueOnError }}
|
||||
- task: PowerShell@2
|
||||
displayName: Publish Build Assets
|
||||
inputs:
|
||||
filePath: eng\common\sdk-task.ps1
|
||||
arguments: -task PublishBuildAssets -restore -msbuildEngine dotnet
|
||||
/p:ManifestsPath='$(Build.StagingDirectory)/Download/AssetManifests'
|
||||
/p:BuildAssetRegistryToken=$(MaestroAccessToken)
|
||||
/p:MaestroApiEndpoint=https://maestro-prod.westus2.cloudapp.azure.com
|
||||
/p:Configuration=$(_BuildConfig)
|
||||
condition: ${{ parameters.condition }}
|
||||
continueOnError: ${{ parameters.continueOnError }}
|
||||
- ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}:
|
||||
- task: PublishBuildArtifacts@1
|
||||
displayName: Publish Logs to VSTS
|
||||
inputs:
|
||||
PathtoPublish: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)'
|
||||
PublishLocation: Container
|
||||
ArtifactName: $(Agent.Os)_PublishBuildAssets
|
||||
continueOnError: true
|
||||
condition: always()
|
|
@ -0,0 +1,71 @@
|
|||
parameters:
|
||||
# Optional: 'true' if failures in job.yml job should not fail the job
|
||||
continueOnError: false
|
||||
|
||||
# Optional: Enable installing Microbuild plugin
|
||||
# if 'true', these "variables" must be specified in the variables object or as part of the queue matrix
|
||||
# _TeamName - the name of your team
|
||||
# _SignType - 'test' or 'real'
|
||||
enableMicrobuild: false
|
||||
|
||||
# Optional: Include PublishBuildArtifacts task
|
||||
enablePublishBuildArtifacts: false
|
||||
|
||||
# Optional: Enable publishing to the build asset registry
|
||||
enablePublishBuildAssets: false
|
||||
|
||||
# Optional: Include PublishTestResults task
|
||||
enablePublishTestResults: false
|
||||
|
||||
# Optional: enable sending telemetry
|
||||
# if enabled then the 'helixRepo' parameter should also be specified
|
||||
enableTelemetry: false
|
||||
|
||||
# Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
|
||||
jobs: []
|
||||
|
||||
# Optional: define the helix repo for telemetry (example: 'dotnet/arcade')
|
||||
helixRepo: ''
|
||||
|
||||
# Optional: Override automatically derived dependsOn value for "publish build assets" job
|
||||
publishBuildAssetsDependsOn: ''
|
||||
|
||||
# Optional: should run as a public build even in the internal project
|
||||
# if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
|
||||
runAsPublic: false
|
||||
|
||||
# Internal resources (telemetry, microbuild) can only be accessed from non-public projects,
|
||||
# and some (Microbuild) should only be applied to non-PR cases for internal builds.
|
||||
|
||||
jobs:
|
||||
- ${{ each job in parameters.jobs }}:
|
||||
- template: ../job/job.yml
|
||||
parameters:
|
||||
# pass along parameters
|
||||
${{ each parameter in parameters }}:
|
||||
${{ if ne(parameter.key, 'jobs') }}:
|
||||
${{ parameter.key }}: ${{ parameter.value }}
|
||||
|
||||
# pass along job properties
|
||||
${{ each property in job }}:
|
||||
${{ if ne(property.key, 'job') }}:
|
||||
${{ property.key }}: ${{ property.value }}
|
||||
|
||||
name: ${{ job.job }}
|
||||
|
||||
- ${{ if and(eq(parameters.enablePublishBuildAssets, true), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
|
||||
- template: ../job/publish-build-assets.yml
|
||||
parameters:
|
||||
continueOnError: ${{ parameters.continueOnError }}
|
||||
dependsOn:
|
||||
- ${{ if ne(parameters.publishBuildAssetsDependsOn, '') }}:
|
||||
- ${{ each job in parameters.publishBuildAssetsDependsOn }}:
|
||||
- ${{ job.job }}
|
||||
- ${{ if eq(parameters.publishBuildAssetsDependsOn, '') }}:
|
||||
- ${{ each job in parameters.jobs }}:
|
||||
- ${{ job.job }}
|
||||
pool:
|
||||
vmImage: vs2017-win2016
|
||||
runAsPublic: ${{ parameters.runAsPublic }}
|
||||
enablePublishBuildArtifacts: ${{ parameters.enablePublishBuildArtifacts }}
|
||||
|
|
@ -0,0 +1,130 @@
|
|||
parameters:
|
||||
# Optional: Clean sources before building
|
||||
clean: true
|
||||
|
||||
# Optional: Git fetch depth
|
||||
fetchDepth: ''
|
||||
|
||||
# Optional: name of the phase (not specifying phase name may cause name collisions)
|
||||
name: ''
|
||||
# Optional: display name of the phase
|
||||
displayName: ''
|
||||
|
||||
# Optional: condition for the job to run
|
||||
condition: ''
|
||||
|
||||
# Optional: dependencies of the phase
|
||||
dependsOn: ''
|
||||
|
||||
# Required: A defined YAML queue
|
||||
queue: {}
|
||||
|
||||
# Required: build steps
|
||||
steps: []
|
||||
|
||||
# Optional: variables
|
||||
variables: {}
|
||||
|
||||
# Optional: should run as a public build even in the internal project
|
||||
# if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
|
||||
runAsPublic: false
|
||||
|
||||
## Telemetry variables
|
||||
|
||||
# Optional: enable sending telemetry
|
||||
# if 'true', these "variables" must be specified in the variables object or as part of the queue matrix
|
||||
# _HelixBuildConfig - differentiate between Debug, Release, other
|
||||
# _HelixSource - Example: build/product
|
||||
# _HelixType - Example: official/dotnet/arcade/$(Build.SourceBranch)
|
||||
enableTelemetry: false
|
||||
|
||||
# Optional: Enable installing Microbuild plugin
|
||||
# if 'true', these "variables" must be specified in the variables object or as part of the queue matrix
|
||||
# _TeamName - the name of your team
|
||||
# _SignType - 'test' or 'real'
|
||||
enableMicrobuild: false
|
||||
|
||||
# Internal resources (telemetry, microbuild) can only be accessed from non-public projects,
|
||||
# and some (Microbuild) should only be applied to non-PR cases for internal builds.
|
||||
|
||||
phases:
|
||||
- phase: ${{ parameters.name }}
|
||||
|
||||
${{ if ne(parameters.displayName, '') }}:
|
||||
displayName: ${{ parameters.displayName }}
|
||||
|
||||
${{ if ne(parameters.condition, '') }}:
|
||||
condition: ${{ parameters.condition }}
|
||||
|
||||
${{ if ne(parameters.dependsOn, '') }}:
|
||||
dependsOn: ${{ parameters.dependsOn }}
|
||||
|
||||
queue: ${{ parameters.queue }}
|
||||
|
||||
${{ if ne(parameters.variables, '') }}:
|
||||
variables:
|
||||
${{ insert }}: ${{ parameters.variables }}
|
||||
|
||||
steps:
|
||||
- checkout: self
|
||||
clean: ${{ parameters.clean }}
|
||||
${{ if ne(parameters.fetchDepth, '') }}:
|
||||
fetchDepth: ${{ parameters.fetchDepth }}
|
||||
|
||||
- ${{ if eq(parameters.enableTelemetry, 'true') }}:
|
||||
- template: /eng/common/templates/steps/telemetry-start.yml
|
||||
parameters:
|
||||
buildConfig: $(_HelixBuildConfig)
|
||||
helixSource: $(_HelixSource)
|
||||
helixType: $(_HelixType)
|
||||
runAsPublic: ${{ parameters.runAsPublic }}
|
||||
|
||||
- ${{ if eq(parameters.enableMicrobuild, 'true') }}:
|
||||
# Internal only resource, and Microbuild signing shouldn't be applied to PRs.
|
||||
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
|
||||
- task: MicroBuildSigningPlugin@2
|
||||
displayName: Install MicroBuild plugin
|
||||
inputs:
|
||||
signType: $(_SignType)
|
||||
zipSources: false
|
||||
feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json
|
||||
|
||||
env:
|
||||
TeamName: $(_TeamName)
|
||||
continueOnError: false
|
||||
condition: and(succeeded(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
|
||||
|
||||
# Run provided build steps
|
||||
- ${{ parameters.steps }}
|
||||
|
||||
- ${{ if eq(parameters.enableMicrobuild, 'true') }}:
|
||||
# Internal only resources
|
||||
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
|
||||
- task: MicroBuildCleanup@1
|
||||
displayName: Execute Microbuild cleanup tasks
|
||||
condition: and(always(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
|
||||
env:
|
||||
TeamName: $(_TeamName)
|
||||
|
||||
- ${{ if eq(parameters.enableTelemetry, 'true') }}:
|
||||
- template: /eng/common/templates/steps/telemetry-end.yml
|
||||
parameters:
|
||||
helixSource: $(_HelixSource)
|
||||
helixType: $(_HelixType)
|
||||
|
||||
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
|
||||
- task: CopyFiles@2
|
||||
displayName: Gather Asset Manifests
|
||||
inputs:
|
||||
SourceFolder: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)/AssetManifest'
|
||||
TargetFolder: '$(Build.StagingDirectory)/AssetManifests'
|
||||
continueOnError: false
|
||||
condition: and(succeeded(), eq(variables['_DotNetPublishToBlobFeed'], 'true'))
|
||||
- task: PublishBuildArtifacts@1
|
||||
displayName: Push Asset Manifests
|
||||
inputs:
|
||||
PathtoPublish: '$(Build.StagingDirectory)/AssetManifests'
|
||||
PublishLocation: Container
|
||||
ArtifactName: AssetManifests
|
||||
continueOnError: false
|
||||
condition: and(succeeded(), eq(variables['_DotNetPublishToBlobFeed'], 'true'))
|
|
@ -0,0 +1,49 @@
|
|||
parameters:
|
||||
dependsOn: ''
|
||||
queue: {}
|
||||
configuration: 'Debug'
|
||||
condition: succeeded()
|
||||
continueOnError: false
|
||||
runAsPublic: false
|
||||
phases:
|
||||
- phase: Asset_Registry_Publish
|
||||
displayName: Publish to Build Asset Registry
|
||||
dependsOn: ${{ parameters.dependsOn }}
|
||||
queue: ${{ parameters.queue }}
|
||||
variables:
|
||||
_BuildConfig: ${{ parameters.configuration }}
|
||||
steps:
|
||||
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
|
||||
- task: DownloadBuildArtifacts@0
|
||||
displayName: Download artifact
|
||||
inputs:
|
||||
artifactName: AssetManifests
|
||||
downloadPath: '$(Build.StagingDirectory)/Download'
|
||||
condition: ${{ parameters.condition }}
|
||||
continueOnError: ${{ parameters.continueOnError }}
|
||||
- task: AzureKeyVault@1
|
||||
inputs:
|
||||
azureSubscription: 'DotNet-Engineering-Services_KeyVault'
|
||||
KeyVaultName: EngKeyVault
|
||||
SecretsFilter: 'MaestroAccessToken'
|
||||
condition: ${{ parameters.condition }}
|
||||
continueOnError: ${{ parameters.continueOnError }}
|
||||
- task: PowerShell@2
|
||||
displayName: Publish Build Assets
|
||||
inputs:
|
||||
filePath: eng\common\sdk-task.ps1
|
||||
arguments: -task PublishBuildAssets -restore -msbuildEngine dotnet
|
||||
/p:ManifestsPath='$(Build.StagingDirectory)/Download/AssetManifests'
|
||||
/p:BuildAssetRegistryToken=$(MaestroAccessToken)
|
||||
/p:MaestroApiEndpoint=https://maestro-prod.westus2.cloudapp.azure.com
|
||||
/p:Configuration=$(_BuildConfig)
|
||||
condition: ${{ parameters.condition }}
|
||||
continueOnError: ${{ parameters.continueOnError }}
|
||||
- task: PublishBuildArtifacts@1
|
||||
displayName: Publish Logs to VSTS
|
||||
inputs:
|
||||
PathtoPublish: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)'
|
||||
PublishLocation: Container
|
||||
ArtifactName: $(Agent.Os)_Asset_Registry_Publish
|
||||
continueOnError: true
|
||||
condition: always()
|
|
@ -0,0 +1,12 @@
|
|||
# build-reason.yml
|
||||
# Description: runs steps if build.reason condition is valid. conditions is a string of valid build reasons
|
||||
# to include steps (',' separated).
|
||||
parameters:
|
||||
conditions: ''
|
||||
steps: []
|
||||
|
||||
steps:
|
||||
- ${{ if and( not(startsWith(parameters.conditions, 'not')), contains(parameters.conditions, variables['build.reason'])) }}:
|
||||
- ${{ parameters.steps }}
|
||||
- ${{ if and( startsWith(parameters.conditions, 'not'), not(contains(parameters.conditions, variables['build.reason']))) }}:
|
||||
- ${{ parameters.steps }}
|
|
@ -0,0 +1,51 @@
|
|||
parameters:
|
||||
HelixSource: 'pr/dotnet-github-anon-kaonashi-bot'
|
||||
HelixType: ̓'tests/default'
|
||||
HelixBuild: $(Build.BuildNumber)
|
||||
HelixTargetQueues: ''
|
||||
HelixAccessToken: ''
|
||||
HelixPreCommands: ''
|
||||
HelixPostCommands: ''
|
||||
WorkItemDirectory: ''
|
||||
WorkItemCommand: ''
|
||||
CorrelationPayloadDirectory: ''
|
||||
XUnitProjects: ''
|
||||
XUnitTargetFramework: ''
|
||||
XUnitRunnerVersion: ''
|
||||
IncludeDotNetCli: false
|
||||
DotNetCliPackageType: ''
|
||||
DotNetCliVersion: ''
|
||||
EnableXUnitReporter: false
|
||||
WaitForWorkItemCompletion: true
|
||||
condition: succeeded()
|
||||
continueOnError: false
|
||||
|
||||
steps:
|
||||
- task: DotNetCoreCLI@2
|
||||
inputs:
|
||||
command: custom
|
||||
projects: eng/common/helixpublish.proj
|
||||
custom: msbuild
|
||||
arguments: '/bl:$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)/SendToHelix.binlog'
|
||||
displayName: Send job to Helix
|
||||
env:
|
||||
HelixSource: ${{ parameters.HelixSource }}
|
||||
HelixType: ${{ parameters.HelixType }}
|
||||
HelixBuild: ${{ parameters.HelixBuild }}
|
||||
HelixTargetQueues: ${{ parameters.HelixTargetQueues }}
|
||||
HelixAccessToken: ${{ parameters.HelixAccessToken }}
|
||||
HelixPreCommands: ${{ parameters.HelixPreCommands }}
|
||||
HelixPostCommands: ${{ parameters.HelixPostCommands }}
|
||||
WorkItemDirectory: ${{ parameters.WorkItemDirectory }}
|
||||
WorkItemCommand: ${{ parameters.WorkItemCommand }}
|
||||
CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }}
|
||||
XUnitProjects: ${{ parameters.XUnitProjects }}
|
||||
XUnitRuntimeTargetFramework: ${{ parameters.XUnitTargetFramework }}
|
||||
XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }}
|
||||
IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }}
|
||||
DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }}
|
||||
DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
|
||||
EnableXUnitReporter: ${{ parameters.EnableXUnitReporter }}
|
||||
WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
|
||||
condition: ${{ parameters.condition }}
|
||||
continueOnError: ${{ parameters.continueOnError }}
|
|
@ -0,0 +1,7 @@
|
|||
parameters:
|
||||
agentOs: ''
|
||||
steps: []
|
||||
|
||||
steps:
|
||||
- ${{ if ne(parameters.agentOs, 'Windows_NT') }}:
|
||||
- ${{ parameters.steps }}
|
|
@ -0,0 +1,7 @@
|
|||
parameters:
|
||||
agentOs: ''
|
||||
steps: []
|
||||
|
||||
steps:
|
||||
- ${{ if eq(parameters.agentOs, 'Windows_NT') }}:
|
||||
- ${{ parameters.steps }}
|
|
@ -0,0 +1,33 @@
|
|||
parameters:
|
||||
# if parameter1 equals parameter 2, run 'ifScript' command, else run 'elsescript' command
|
||||
parameter1: ''
|
||||
parameter2: ''
|
||||
ifScript: ''
|
||||
elseScript: ''
|
||||
|
||||
# name of script step
|
||||
name: Script
|
||||
|
||||
# display name of script step
|
||||
displayName: If-Equal-Else Script
|
||||
|
||||
# environment
|
||||
env: {}
|
||||
|
||||
# conditional expression for step execution
|
||||
condition: ''
|
||||
|
||||
steps:
|
||||
- ${{ if and(ne(parameters.ifScript, ''), eq(parameters.parameter1, parameters.parameter2)) }}:
|
||||
- script: ${{ parameters.ifScript }}
|
||||
name: ${{ parameters.name }}
|
||||
displayName: ${{ parameters.displayName }}
|
||||
env: ${{ parameters.env }}
|
||||
condition: ${{ parameters.condition }}
|
||||
|
||||
- ${{ if and(ne(parameters.elseScript, ''), ne(parameters.parameter1, parameters.parameter2)) }}:
|
||||
- script: ${{ parameters.elseScript }}
|
||||
name: ${{ parameters.name }}
|
||||
displayName: ${{ parameters.displayName }}
|
||||
env: ${{ parameters.env }}
|
||||
condition: ${{ parameters.condition }}
|
|
@ -0,0 +1,83 @@
|
|||
parameters:
|
||||
HelixSource: 'pr/default' # required -- sources must start with pr/, official/, prodcon/, or agent/
|
||||
HelixType: 'tests/default/' # required -- Helix telemetry which identifies what type of data this is; should include "test" for clarity and must end in '/'
|
||||
HelixBuild: $(Build.BuildNumber) # required -- the build number Helix will use to identify this -- automatically set to the AzDO build number
|
||||
HelixTargetQueues: '' # required -- semicolon delimited list of Helix queues to test on; see https://helix.dot.net/api/2018-03-14/info/queues for a list of queues
|
||||
HelixAccessToken: '' # required -- access token to make Helix API requests; should be provided by the appropriate variable group
|
||||
HelixPreCommands: '' # optional -- commands to run before Helix work item execution
|
||||
HelixPostCommands: '' # optional -- commands to run after Helix work item execution
|
||||
WorkItemDirectory: '' # optional -- a payload directory to zip up and send to Helix; requires WorkItemCommand; incompatible with XUnitProjects
|
||||
WorkItemCommand: '' # optional -- a command to execute on the payload; requires WorkItemDirectory; incompatible with XUnitProjects
|
||||
WorkItemTimeout: '' # optional -- a timeout in seconds for the work item command; requires WorkItemDirectory; incompatible with XUnitProjects
|
||||
CorrelationPayloadDirectory: '' # optional -- a directory to zip up and send to Helix as a correlation payload
|
||||
XUnitProjects: '' # optional -- semicolon delimited list of XUnitProjects to parse and send to Helix; requires XUnitRuntimeTargetFramework, XUnitPublishTargetFramework, XUnitRunnerVersion, and IncludeDotNetCli=true
|
||||
XUnitPublishTargetFramework: '' # optional -- framework to use to publish your xUnit projects
|
||||
XUnitRuntimeTargetFramework: '' # optional -- framework to use for the xUnit console runner
|
||||
XUnitRunnerVersion: '' # optional -- version of the xUnit nuget package you wish to use on Helix; required for XUnitProjects
|
||||
IncludeDotNetCli: false # optional -- true will download a version of the .NET CLI onto the Helix machine as a correlation payload; requires DotNetCliPackageType and DotNetCliVersion
|
||||
DotNetCliPackageType: '' # optional -- either 'sdk' or 'runtime'; determines whether the sdk or runtime will be sent to Helix; see https://raw.githubusercontent.com/dotnet/core/master/release-notes/releases.json
|
||||
DotNetCliVersion: '' # optional -- version of the CLI to send to Helix; based on this: https://raw.githubusercontent.com/dotnet/core/master/release-notes/releases.json
|
||||
EnableXUnitReporter: false # optional -- true enables XUnit result reporting to Mission Control
|
||||
WaitForWorkItemCompletion: true # optional -- true will make the task wait until work items have been completed and fail the build if work items fail. False is "fire and forget."
|
||||
IsExternal: false # [DEPRECATED] -- doesn't do anything, jobs are external if HelixAccessToken is empty and Creator is set
|
||||
Creator: '' # optional -- if the build is external, use this to specify who is sending the job
|
||||
condition: succeeded() # optional -- condition for step to execute; defaults to succeeded()
|
||||
continueOnError: false # optional -- determines whether to continue the build if the step errors; defaults to false
|
||||
|
||||
steps:
|
||||
- powershell: 'powershell "$env:BUILD_SOURCESDIRECTORY\eng\common\msbuild.ps1 $env:BUILD_SOURCESDIRECTORY\eng\common\helixpublish.proj /restore /t:Test /bl:$env:BUILD_SOURCESDIRECTORY\artifacts\log\$env:BuildConfig\SendToHelix.binlog"'
|
||||
displayName: Send job to Helix (Windows)
|
||||
env:
|
||||
BuildConfig: $(_BuildConfig)
|
||||
HelixSource: ${{ parameters.HelixSource }}
|
||||
HelixType: ${{ parameters.HelixType }}
|
||||
HelixBuild: ${{ parameters.HelixBuild }}
|
||||
HelixTargetQueues: ${{ parameters.HelixTargetQueues }}
|
||||
HelixAccessToken: ${{ parameters.HelixAccessToken }}
|
||||
HelixPreCommands: ${{ parameters.HelixPreCommands }}
|
||||
HelixPostCommands: ${{ parameters.HelixPostCommands }}
|
||||
WorkItemDirectory: ${{ parameters.WorkItemDirectory }}
|
||||
WorkItemCommand: ${{ parameters.WorkItemCommand }}
|
||||
WorkItemTimeout: ${{ parameters.WorkItemTimeout }}
|
||||
CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }}
|
||||
XUnitProjects: ${{ parameters.XUnitProjects }}
|
||||
XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }}
|
||||
XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }}
|
||||
XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }}
|
||||
IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }}
|
||||
DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }}
|
||||
DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
|
||||
EnableXUnitReporter: ${{ parameters.EnableXUnitReporter }}
|
||||
WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
|
||||
Creator: ${{ parameters.Creator }}
|
||||
SYSTEM_ACCESSTOKEN: $(System.AccessToken)
|
||||
condition: and(${{ parameters.condition }}, eq(variables['Agent.Os'], 'Windows_NT'))
|
||||
continueOnError: ${{ parameters.continueOnError }}
|
||||
- script: $BUILD_SOURCESDIRECTORY/eng/common/msbuild.sh $BUILD_SOURCESDIRECTORY/eng/common/helixpublish.proj /restore /t:Test /bl:$BUILD_SOURCESDIRECTORY/artifacts/log/$BuildConfig/SendToHelix.binlog
|
||||
displayName: Send job to Helix (Unix)
|
||||
env:
|
||||
BuildConfig: $(_BuildConfig)
|
||||
HelixSource: ${{ parameters.HelixSource }}
|
||||
HelixType: ${{ parameters.HelixType }}
|
||||
HelixBuild: ${{ parameters.HelixBuild }}
|
||||
HelixTargetQueues: ${{ parameters.HelixTargetQueues }}
|
||||
HelixAccessToken: ${{ parameters.HelixAccessToken }}
|
||||
HelixPreCommands: ${{ parameters.HelixPreCommands }}
|
||||
HelixPostCommands: ${{ parameters.HelixPostCommands }}
|
||||
WorkItemDirectory: ${{ parameters.WorkItemDirectory }}
|
||||
WorkItemCommand: ${{ parameters.WorkItemCommand }}
|
||||
WorkItemTimeout: ${{ parameters.WorkItemTimeout }}
|
||||
CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }}
|
||||
XUnitProjects: ${{ parameters.XUnitProjects }}
|
||||
XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }}
|
||||
XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }}
|
||||
XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }}
|
||||
IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }}
|
||||
DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }}
|
||||
DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
|
||||
EnableXUnitReporter: ${{ parameters.EnableXUnitReporter }}
|
||||
WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
|
||||
Creator: ${{ parameters.Creator }}
|
||||
SYSTEM_ACCESSTOKEN: $(System.AccessToken)
|
||||
condition: and(${{ parameters.condition }}, ne(variables['Agent.Os'], 'Windows_NT'))
|
||||
continueOnError: ${{ parameters.continueOnError }}
|
|
@ -0,0 +1,102 @@
|
|||
parameters:
|
||||
maxRetries: 5
|
||||
retryDelay: 10 # in seconds
|
||||
|
||||
steps:
|
||||
- bash: |
|
||||
if [ "$AGENT_JOBSTATUS" = "Succeeded" ] || [ "$AGENT_JOBSTATUS" = "PartiallySucceeded" ]; then
|
||||
errorCount=0
|
||||
else
|
||||
errorCount=1
|
||||
fi
|
||||
warningCount=0
|
||||
|
||||
curlStatus=1
|
||||
retryCount=0
|
||||
# retry loop to harden against spotty telemetry connections
|
||||
# we don't retry successes and 4xx client errors
|
||||
until [[ $curlStatus -eq 0 || ( $curlStatus -ge 400 && $curlStatus -le 499 ) || $retryCount -ge $MaxRetries ]]
|
||||
do
|
||||
if [ $retryCount -gt 0 ]; then
|
||||
echo "Failed to send telemetry to Helix; waiting $RetryDelay seconds before retrying..."
|
||||
sleep $RetryDelay
|
||||
fi
|
||||
|
||||
# create a temporary file for curl output
|
||||
res=`mktemp`
|
||||
|
||||
curlResult=`
|
||||
curl --verbose --output $res --write-out "%{http_code}"\
|
||||
-H 'Content-Type: application/json' \
|
||||
-H "X-Helix-Job-Token: $Helix_JobToken" \
|
||||
-H 'Content-Length: 0' \
|
||||
-X POST -G "https://helix.dot.net/api/2018-03-14/telemetry/job/build/$Helix_WorkItemId/finish" \
|
||||
--data-urlencode "errorCount=$errorCount" \
|
||||
--data-urlencode "warningCount=$warningCount"`
|
||||
curlStatus=$?
|
||||
|
||||
if [ $curlStatus -eq 0 ]; then
|
||||
if [ $curlResult -gt 299 ] || [ $curlResult -lt 200 ]; then
|
||||
curlStatus=$curlResult
|
||||
fi
|
||||
fi
|
||||
|
||||
let retryCount++
|
||||
done
|
||||
|
||||
if [ $curlStatus -ne 0 ]; then
|
||||
echo "Failed to Send Build Finish information after $retryCount retries"
|
||||
vstsLogOutput="vso[task.logissue type=error;sourcepath=templates/steps/telemetry-end.yml;code=1;]Failed to Send Build Finish information: $curlStatus"
|
||||
echo "##$vstsLogOutput"
|
||||
exit 1
|
||||
fi
|
||||
displayName: Send Unix Build End Telemetry
|
||||
env:
|
||||
# defined via VSTS variables in start-job.sh
|
||||
Helix_JobToken: $(Helix_JobToken)
|
||||
Helix_WorkItemId: $(Helix_WorkItemId)
|
||||
MaxRetries: ${{ parameters.maxRetries }}
|
||||
RetryDelay: ${{ parameters.retryDelay }}
|
||||
condition: and(always(), ne(variables['Agent.Os'], 'Windows_NT'))
|
||||
- powershell: |
|
||||
if (($env:Agent_JobStatus -eq 'Succeeded') -or ($env:Agent_JobStatus -eq 'PartiallySucceeded')) {
|
||||
$ErrorCount = 0
|
||||
} else {
|
||||
$ErrorCount = 1
|
||||
}
|
||||
$WarningCount = 0
|
||||
|
||||
# Basic retry loop to harden against server flakiness
|
||||
$retryCount = 0
|
||||
while ($retryCount -lt $env:MaxRetries) {
|
||||
try {
|
||||
Invoke-RestMethod -Uri "https://helix.dot.net/api/2018-03-14/telemetry/job/build/$env:Helix_WorkItemId/finish?errorCount=$ErrorCount&warningCount=$WarningCount" -Method Post -ContentType "application/json" -Body "" `
|
||||
-Headers @{ 'X-Helix-Job-Token'=$env:Helix_JobToken }
|
||||
break
|
||||
}
|
||||
catch {
|
||||
$statusCode = $_.Exception.Response.StatusCode.value__
|
||||
if ($statusCode -ge 400 -and $statusCode -le 499) {
|
||||
Write-Host "##vso[task.logissue]error Failed to send telemetry to Helix (status code $statusCode); not retrying (4xx client error)"
|
||||
Write-Host "##vso[task.logissue]error ", $_.Exception.GetType().FullName, $_.Exception.Message
|
||||
exit 1
|
||||
}
|
||||
Write-Host "Failed to send telemetry to Helix (status code $statusCode); waiting $env:RetryDelay seconds before retrying..."
|
||||
$retryCount++
|
||||
sleep $env:RetryDelay
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
if ($retryCount -ge $env:MaxRetries) {
|
||||
Write-Host "##vso[task.logissue]error Failed to send telemetry to Helix after $retryCount retries."
|
||||
exit 1
|
||||
}
|
||||
displayName: Send Windows Build End Telemetry
|
||||
env:
|
||||
# defined via VSTS variables in start-job.ps1
|
||||
Helix_JobToken: $(Helix_JobToken)
|
||||
Helix_WorkItemId: $(Helix_WorkItemId)
|
||||
MaxRetries: ${{ parameters.maxRetries }}
|
||||
RetryDelay: ${{ parameters.retryDelay }}
|
||||
condition: and(always(),eq(variables['Agent.Os'], 'Windows_NT'))
|
|
@ -0,0 +1,241 @@
|
|||
parameters:
|
||||
helixSource: 'undefined_defaulted_in_telemetry.yml'
|
||||
helixType: 'undefined_defaulted_in_telemetry.yml'
|
||||
buildConfig: ''
|
||||
runAsPublic: false
|
||||
maxRetries: 5
|
||||
retryDelay: 10 # in seconds
|
||||
|
||||
steps:
|
||||
- ${{ if and(eq(parameters.runAsPublic, 'false'), not(eq(variables['System.TeamProject'], 'public'))) }}:
|
||||
- task: AzureKeyVault@1
|
||||
inputs:
|
||||
azureSubscription: 'HelixProd_KeyVault'
|
||||
KeyVaultName: HelixProdKV
|
||||
SecretsFilter: 'HelixApiAccessToken'
|
||||
condition: always()
|
||||
- bash: |
|
||||
# create a temporary file
|
||||
jobInfo=`mktemp`
|
||||
|
||||
# write job info content to temporary file
|
||||
cat > $jobInfo <<JobListStuff
|
||||
{
|
||||
"QueueId": "$QueueId",
|
||||
"Source": "$Source",
|
||||
"Type": "$Type",
|
||||
"Build": "$Build",
|
||||
"Attempt": "$Attempt",
|
||||
"Properties": {
|
||||
"operatingSystem": "$OperatingSystem",
|
||||
"configuration": "$Configuration"
|
||||
}
|
||||
}
|
||||
JobListStuff
|
||||
|
||||
cat $jobInfo
|
||||
|
||||
# create a temporary file for curl output
|
||||
res=`mktemp`
|
||||
|
||||
accessTokenParameter="?access_token=$HelixApiAccessToken"
|
||||
|
||||
curlStatus=1
|
||||
retryCount=0
|
||||
# retry loop to harden against spotty telemetry connections
|
||||
# we don't retry successes and 4xx client errors
|
||||
until [[ $curlStatus -eq 0 || ( $curlStatus -ge 400 && $curlStatus -le 499 ) || $retryCount -ge $MaxRetries ]]
|
||||
do
|
||||
if [ $retryCount -gt 0 ]; then
|
||||
echo "Failed to send telemetry to Helix; waiting $RetryDelay seconds before retrying..."
|
||||
sleep $RetryDelay
|
||||
fi
|
||||
|
||||
curlResult=`
|
||||
cat $jobInfo |\
|
||||
curl --trace - --verbose --output $res --write-out "%{http_code}" \
|
||||
-H 'Content-Type: application/json' \
|
||||
-X POST "https://helix.dot.net/api/2018-03-14/telemetry/job$accessTokenParameter" -d @-`
|
||||
curlStatus=$?
|
||||
|
||||
if [ $curlStatus -eq 0 ]; then
|
||||
if [ $curlResult -gt 299 ] || [ $curlResult -lt 200 ]; then
|
||||
curlStatus=$curlResult
|
||||
fi
|
||||
fi
|
||||
|
||||
let retryCount++
|
||||
done
|
||||
|
||||
curlResult=`cat $res`
|
||||
|
||||
# validate status of curl command
|
||||
if [ $curlStatus -ne 0 ]; then
|
||||
echo "Failed To Send Job Start information after $retryCount retries"
|
||||
# We have to append the ## vso prefix or vso will pick up the command when it dumps the inline script into the shell
|
||||
vstsLogOutput="vso[task.logissue type=error;sourcepath=telemetry/start-job.sh;code=1;]Failed to Send Job Start information: $curlStatus"
|
||||
echo "##$vstsLogOutput"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Set the Helix_JobToken variable
|
||||
export Helix_JobToken=`echo $curlResult | xargs echo` # Strip Quotes
|
||||
echo "##vso[task.setvariable variable=Helix_JobToken;issecret=true;]$Helix_JobToken"
|
||||
displayName: Send Unix Job Start Telemetry
|
||||
env:
|
||||
HelixApiAccessToken: $(HelixApiAccessToken)
|
||||
Source: ${{ parameters.helixSource }}
|
||||
Type: ${{ parameters.helixType }}
|
||||
Build: $(Build.BuildNumber)
|
||||
QueueId: $(Agent.Os)
|
||||
Attempt: 1
|
||||
OperatingSystem: $(Agent.Os)
|
||||
Configuration: ${{ parameters.buildConfig }}
|
||||
MaxRetries: ${{ parameters.maxRetries }}
|
||||
RetryDelay: ${{ parameters.retryDelay }}
|
||||
condition: and(always(), ne(variables['Agent.Os'], 'Windows_NT'))
|
||||
- bash: |
|
||||
curlStatus=1
|
||||
retryCount=0
|
||||
# retry loop to harden against spotty telemetry connections
|
||||
# we don't retry successes and 4xx client errors
|
||||
until [[ $curlStatus -eq 0 || ( $curlStatus -ge 400 && $curlStatus -le 499 ) || $retryCount -ge $MaxRetries ]]
|
||||
do
|
||||
if [ $retryCount -gt 0 ]; then
|
||||
echo "Failed to send telemetry to Helix; waiting $RetryDelay seconds before retrying..."
|
||||
sleep $RetryDelay
|
||||
fi
|
||||
|
||||
res=`mktemp`
|
||||
curlResult=`
|
||||
curl --verbose --output $res --write-out "%{http_code}"\
|
||||
-H 'Content-Type: application/json' \
|
||||
-H "X-Helix-Job-Token: $Helix_JobToken" \
|
||||
-H 'Content-Length: 0' \
|
||||
-X POST -G "https://helix.dot.net/api/2018-03-14/telemetry/job/build" \
|
||||
--data-urlencode "buildUri=$BuildUri"`
|
||||
curlStatus=$?
|
||||
|
||||
if [ $curlStatus -eq 0 ]; then
|
||||
if [ $curlResult -gt 299 ] || [ $curlResult -lt 200 ]; then
|
||||
curlStatus=$curlResult
|
||||
fi
|
||||
fi
|
||||
|
||||
curlResult=`cat $res`
|
||||
let retryCount++
|
||||
done
|
||||
|
||||
# validate status of curl command
|
||||
if [ $curlStatus -ne 0 ]; then
|
||||
echo "Failed to Send Build Start information after $retryCount retries"
|
||||
vstsLogOutput="vso[task.logissue type=error;sourcepath=telemetry/build/start.sh;code=1;]Failed to Send Build Start information: $curlStatus"
|
||||
echo "##$vstsLogOutput"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
export Helix_WorkItemId=`echo $curlResult | xargs echo` # Strip Quotes
|
||||
echo "##vso[task.setvariable variable=Helix_WorkItemId]$Helix_WorkItemId"
|
||||
displayName: Send Unix Build Start Telemetry
|
||||
env:
|
||||
BuildUri: $(System.TaskDefinitionsUri)$(System.TeamProject)/_build/index?buildId=$(Build.BuildId)&_a=summary
|
||||
Helix_JobToken: $(Helix_JobToken)
|
||||
MaxRetries: ${{ parameters.maxRetries }}
|
||||
RetryDelay: ${{ parameters.retryDelay }}
|
||||
condition: and(always(), ne(variables['Agent.Os'], 'Windows_NT'))
|
||||
|
||||
- powershell: |
|
||||
$jobInfo = [pscustomobject]@{
|
||||
QueueId=$env:QueueId;
|
||||
Source=$env:Source;
|
||||
Type=$env:Type;
|
||||
Build=$env:Build;
|
||||
Attempt=$env:Attempt;
|
||||
Properties=[pscustomobject]@{ operatingSystem=$env:OperatingSystem; configuration=$env:Configuration };
|
||||
}
|
||||
|
||||
$jobInfoJson = $jobInfo | ConvertTo-Json
|
||||
|
||||
if ($env:HelixApiAccessToken) {
|
||||
$accessTokenParameter="?access_token=$($env:HelixApiAccessToken)"
|
||||
}
|
||||
Write-Host "Job Info: $jobInfoJson"
|
||||
|
||||
# Basic retry loop to harden against server flakiness
|
||||
$retryCount = 0
|
||||
while ($retryCount -lt $env:MaxRetries) {
|
||||
try {
|
||||
$jobToken = Invoke-RestMethod -Uri "https://helix.dot.net/api/2018-03-14/telemetry/job$($accessTokenParameter)" -Method Post -ContentType "application/json" -Body $jobInfoJson
|
||||
break
|
||||
}
|
||||
catch {
|
||||
$statusCode = $_.Exception.Response.StatusCode.value__
|
||||
if ($statusCode -ge 400 -and $statusCode -le 499) {
|
||||
Write-Host "##vso[task.logissue]error Failed to send telemetry to Helix (status code $statusCode); not retrying (4xx client error)"
|
||||
Write-Host "##vso[task.logissue]error ", $_.Exception.GetType().FullName, $_.Exception.Message
|
||||
exit 1
|
||||
}
|
||||
Write-Host "Failed to send telemetry to Helix (status code $statusCode); waiting $env:RetryDelay seconds before retrying..."
|
||||
$retryCount++
|
||||
sleep $env:RetryDelay
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
if ($retryCount -ge $env:MaxRetries) {
|
||||
Write-Host "##vso[task.logissue]error Failed to send telemetry to Helix after $retryCount retries."
|
||||
exit 1
|
||||
}
|
||||
|
||||
$env:Helix_JobToken = $jobToken
|
||||
Write-Host "##vso[task.setvariable variable=Helix_JobToken;issecret=true;]$env:Helix_JobToken"
|
||||
env:
|
||||
HelixApiAccessToken: $(HelixApiAccessToken)
|
||||
Source: ${{ parameters.helixSource }}
|
||||
Type: ${{ parameters.helixType }}
|
||||
Build: $(Build.BuildNumber)
|
||||
QueueId: $(Agent.Os)
|
||||
Attempt: 1
|
||||
OperatingSystem: $(Agent.Os)
|
||||
Configuration: ${{ parameters.buildConfig }}
|
||||
MaxRetries: ${{ parameters.maxRetries }}
|
||||
RetryDelay: ${{ parameters.retryDelay }}
|
||||
condition: and(always(), eq(variables['Agent.Os'], 'Windows_NT'))
|
||||
displayName: Send Windows Job Start Telemetry
|
||||
- powershell: |
|
||||
# Basic retry loop to harden against server flakiness
|
||||
$retryCount = 0
|
||||
while ($retryCount -lt $env:MaxRetries) {
|
||||
try {
|
||||
$workItemId = Invoke-RestMethod -Uri "https://helix.dot.net/api/2018-03-14/telemetry/job/build?buildUri=$([Net.WebUtility]::UrlEncode($env:BuildUri))" -Method Post -ContentType "application/json" -Body "" `
|
||||
-Headers @{ 'X-Helix-Job-Token'=$env:Helix_JobToken }
|
||||
break
|
||||
}
|
||||
catch {
|
||||
$statusCode = $_.Exception.Response.StatusCode.value__
|
||||
if ($statusCode -ge 400 -and $statusCode -le 499) {
|
||||
Write-Host "##vso[task.logissue]error Failed to send telemetry to Helix (status code $statusCode); not retrying (4xx client error)"
|
||||
Write-Host "##vso[task.logissue]error ", $_.Exception.GetType().FullName, $_.Exception.Message
|
||||
exit 1
|
||||
}
|
||||
Write-Host "Failed to send telemetry to Helix (status code $statusCode); waiting $env:RetryDelay seconds before retrying..."
|
||||
$retryCount++
|
||||
sleep $env:RetryDelay
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
if ($retryCount -ge $env:MaxRetries) {
|
||||
Write-Host "##vso[task.logissue]error Failed to send telemetry to Helix after $retryCount retries."
|
||||
exit 1
|
||||
}
|
||||
|
||||
$env:Helix_WorkItemId = $workItemId
|
||||
Write-Host "##vso[task.setvariable variable=Helix_WorkItemId]$env:Helix_WorkItemId"
|
||||
displayName: Send Windows Build Start Telemetry
|
||||
env:
|
||||
BuildUri: $(System.TaskDefinitionsUri)$(System.TeamProject)/_build/index?buildId=$(Build.BuildId)&_a=summary
|
||||
Helix_JobToken: $(Helix_JobToken)
|
||||
MaxRetries: ${{ parameters.maxRetries }}
|
||||
RetryDelay: ${{ parameters.retryDelay }}
|
||||
condition: and(always(), eq(variables['Agent.Os'], 'Windows_NT'))
|
|
@ -0,0 +1,523 @@
|
|||
# Initialize variables if they aren't already defined.
|
||||
# These may be defined as parameters of the importing script, or set after importing this script.
|
||||
|
||||
# CI mode - set to true on CI server for PR validation build or official build.
|
||||
[bool]$ci = if (Test-Path variable:ci) { $ci } else { $false }
|
||||
|
||||
# Build configuration. Common values include 'Debug' and 'Release', but the repository may use other names.
|
||||
[string]$configuration = if (Test-Path variable:configuration) { $configuration } else { "Debug" }
|
||||
|
||||
# Set to true to output binary log from msbuild. Note that emitting binary log slows down the build.
|
||||
# Binary log must be enabled on CI.
|
||||
[bool]$binaryLog = if (Test-Path variable:binaryLog) { $binaryLog } else { $ci }
|
||||
|
||||
# Turns on machine preparation/clean up code that changes the machine state (e.g. kills build processes).
|
||||
[bool]$prepareMachine = if (Test-Path variable:prepareMachine) { $prepareMachine } else { $false }
|
||||
|
||||
# True to restore toolsets and dependencies.
|
||||
[bool]$restore = if (Test-Path variable:restore) { $restore } else { $true }
|
||||
|
||||
# Adjusts msbuild verbosity level.
|
||||
[string]$verbosity = if (Test-Path variable:verbosity) { $verbosity } else { "minimal" }
|
||||
|
||||
# Set to true to reuse msbuild nodes. Recommended to not reuse on CI.
|
||||
[bool]$nodeReuse = if (Test-Path variable:nodeReuse) { $nodeReuse } else { !$ci }
|
||||
|
||||
# Configures warning treatment in msbuild.
|
||||
[bool]$warnAsError = if (Test-Path variable:warnAsError) { $warnAsError } else { $true }
|
||||
|
||||
# Specifies which msbuild engine to use for build: 'vs', 'dotnet' or unspecified (determined based on presence of tools.vs in global.json).
|
||||
[string]$msbuildEngine = if (Test-Path variable:msbuildEngine) { $msbuildEngine } else { $null }
|
||||
|
||||
# True to attempt using .NET Core already that meets requirements specified in global.json
|
||||
# installed on the machine instead of downloading one.
|
||||
[bool]$useInstalledDotNetCli = if (Test-Path variable:useInstalledDotNetCli) { $useInstalledDotNetCli } else { $true }
|
||||
|
||||
# True to use global NuGet cache instead of restoring packages to repository-local directory.
|
||||
[bool]$useGlobalNuGetCache = if (Test-Path variable:useGlobalNuGetCache) { $useGlobalNuGetCache } else { !$ci }
|
||||
|
||||
# An array of names of processes to stop on script exit if prepareMachine is true.
|
||||
$processesToStopOnExit = if (Test-Path variable:processesToStopOnExit) { $processesToStopOnExit } else { @("msbuild", "dotnet", "vbcscompiler") }
|
||||
|
||||
set-strictmode -version 2.0
|
||||
$ErrorActionPreference = "Stop"
|
||||
[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12
|
||||
|
||||
function Create-Directory([string[]] $path) {
|
||||
if (!(Test-Path $path)) {
|
||||
New-Item -path $path -force -itemType "Directory" | Out-Null
|
||||
}
|
||||
}
|
||||
|
||||
function Unzip([string]$zipfile, [string]$outpath) {
|
||||
Add-Type -AssemblyName System.IO.Compression.FileSystem
|
||||
[System.IO.Compression.ZipFile]::ExtractToDirectory($zipfile, $outpath)
|
||||
}
|
||||
|
||||
# This will exec a process using the console and return it's exit code.
|
||||
# This will not throw when the process fails.
|
||||
# Returns process exit code.
|
||||
function Exec-Process([string]$command, [string]$commandArgs) {
|
||||
$startInfo = New-Object System.Diagnostics.ProcessStartInfo
|
||||
$startInfo.FileName = $command
|
||||
$startInfo.Arguments = $commandArgs
|
||||
$startInfo.UseShellExecute = $false
|
||||
$startInfo.WorkingDirectory = Get-Location
|
||||
|
||||
$process = New-Object System.Diagnostics.Process
|
||||
$process.StartInfo = $startInfo
|
||||
$process.Start() | Out-Null
|
||||
|
||||
$finished = $false
|
||||
try {
|
||||
while (-not $process.WaitForExit(100)) {
|
||||
# Non-blocking loop done to allow ctr-c interrupts
|
||||
}
|
||||
|
||||
$finished = $true
|
||||
return $global:LASTEXITCODE = $process.ExitCode
|
||||
}
|
||||
finally {
|
||||
# If we didn't finish then an error occured or the user hit ctrl-c. Either
|
||||
# way kill the process
|
||||
if (-not $finished) {
|
||||
$process.Kill()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function InitializeDotNetCli([bool]$install) {
|
||||
if (Test-Path variable:global:_DotNetInstallDir) {
|
||||
return $global:_DotNetInstallDir
|
||||
}
|
||||
|
||||
# Don't resolve runtime, shared framework, or SDK from other locations to ensure build determinism
|
||||
$env:DOTNET_MULTILEVEL_LOOKUP=0
|
||||
|
||||
# Disable first run since we do not need all ASP.NET packages restored.
|
||||
$env:DOTNET_SKIP_FIRST_TIME_EXPERIENCE=1
|
||||
|
||||
# Disable telemetry on CI.
|
||||
if ($ci) {
|
||||
$env:DOTNET_CLI_TELEMETRY_OPTOUT=1
|
||||
}
|
||||
|
||||
# Source Build uses DotNetCoreSdkDir variable
|
||||
if ($env:DotNetCoreSdkDir -ne $null) {
|
||||
$env:DOTNET_INSTALL_DIR = $env:DotNetCoreSdkDir
|
||||
}
|
||||
|
||||
# Find the first path on %PATH% that contains the dotnet.exe
|
||||
if ($useInstalledDotNetCli -and ($env:DOTNET_INSTALL_DIR -eq $null)) {
|
||||
$dotnetCmd = Get-Command "dotnet.exe" -ErrorAction SilentlyContinue
|
||||
if ($dotnetCmd -ne $null) {
|
||||
$env:DOTNET_INSTALL_DIR = Split-Path $dotnetCmd.Path -Parent
|
||||
}
|
||||
}
|
||||
|
||||
$dotnetSdkVersion = $GlobalJson.tools.dotnet
|
||||
|
||||
# Use dotnet installation specified in DOTNET_INSTALL_DIR if it contains the required SDK version,
|
||||
# otherwise install the dotnet CLI and SDK to repo local .dotnet directory to avoid potential permission issues.
|
||||
if (($env:DOTNET_INSTALL_DIR -ne $null) -and (Test-Path(Join-Path $env:DOTNET_INSTALL_DIR "sdk\$dotnetSdkVersion"))) {
|
||||
$dotnetRoot = $env:DOTNET_INSTALL_DIR
|
||||
} else {
|
||||
$dotnetRoot = Join-Path $RepoRoot ".dotnet"
|
||||
|
||||
if (-not (Test-Path(Join-Path $dotnetRoot "sdk\$dotnetSdkVersion"))) {
|
||||
if ($install) {
|
||||
InstallDotNetSdk $dotnetRoot $dotnetSdkVersion
|
||||
} else {
|
||||
Write-Host "Unable to find dotnet with SDK version '$dotnetSdkVersion'" -ForegroundColor Red
|
||||
ExitWithExitCode 1
|
||||
}
|
||||
}
|
||||
|
||||
$env:DOTNET_INSTALL_DIR = $dotnetRoot
|
||||
}
|
||||
|
||||
# Add dotnet to PATH. This prevents any bare invocation of dotnet in custom
|
||||
# build steps from using anything other than what we've downloaded.
|
||||
# It also ensures that VS msbuild will use the downloaded sdk targets.
|
||||
$env:PATH = "$dotnetRoot;$env:PATH"
|
||||
|
||||
if ($ci) {
|
||||
# Make Sure that our bootstrapped dotnet cli is avaliable in future steps of the Azure Pipelines build
|
||||
Write-Host "##vso[task.prependpath]$dotnetRoot"
|
||||
Write-Host "##vso[task.setvariable variable=DOTNET_MULTILEVEL_LOOKUP]0"
|
||||
Write-Host "##vso[task.setvariable variable=DOTNET_SKIP_FIRST_TIME_EXPERIENCE]1"
|
||||
}
|
||||
|
||||
return $global:_DotNetInstallDir = $dotnetRoot
|
||||
}
|
||||
|
||||
function GetDotNetInstallScript([string] $dotnetRoot) {
|
||||
$installScript = "$dotnetRoot\dotnet-install.ps1"
|
||||
if (!(Test-Path $installScript)) {
|
||||
Create-Directory $dotnetRoot
|
||||
Invoke-WebRequest "https://dot.net/v1/dotnet-install.ps1" -OutFile $installScript
|
||||
}
|
||||
|
||||
return $installScript
|
||||
}
|
||||
|
||||
function InstallDotNetSdk([string] $dotnetRoot, [string] $version, [string] $architecture = "") {
|
||||
$installScript = GetDotNetInstallScript $dotnetRoot
|
||||
$archArg = if ($architecture) { $architecture } else { "<auto>" }
|
||||
& $installScript -Version $version -InstallDir $dotnetRoot -Architecture $archArg
|
||||
if ($lastExitCode -ne 0) {
|
||||
Write-Host "Failed to install dotnet cli (exit code '$lastExitCode')." -ForegroundColor Red
|
||||
ExitWithExitCode $lastExitCode
|
||||
}
|
||||
}
|
||||
|
||||
#
|
||||
# Locates Visual Studio MSBuild installation.
|
||||
# The preference order for MSBuild to use is as follows:
|
||||
#
|
||||
# 1. MSBuild from an active VS command prompt
|
||||
# 2. MSBuild from a compatible VS installation
|
||||
# 3. MSBuild from the xcopy tool package
|
||||
#
|
||||
# Returns full path to msbuild.exe.
|
||||
# Throws on failure.
|
||||
#
|
||||
function InitializeVisualStudioMSBuild([bool]$install, [object]$vsRequirements = $null) {
|
||||
if (Test-Path variable:global:_MSBuildExe) {
|
||||
return $global:_MSBuildExe
|
||||
}
|
||||
|
||||
if (!$vsRequirements) { $vsRequirements = $GlobalJson.tools.vs }
|
||||
$vsMinVersionStr = if ($vsRequirements.version) { $vsRequirements.version } else { "15.9" }
|
||||
$vsMinVersion = [Version]::new($vsMinVersionStr)
|
||||
|
||||
# Try msbuild command available in the environment.
|
||||
if ($env:VSINSTALLDIR -ne $null) {
|
||||
$msbuildCmd = Get-Command "msbuild.exe" -ErrorAction SilentlyContinue
|
||||
if ($msbuildCmd -ne $null) {
|
||||
if ($msbuildCmd.Version -ge $vsMinVersion) {
|
||||
return $global:_MSBuildExe = $msbuildCmd.Path
|
||||
}
|
||||
|
||||
# Report error - the developer environment is initialized with incompatible VS version.
|
||||
throw "Developer Command Prompt for VS $($env:VisualStudioVersion) is not recent enough. Please upgrade to $vsMinVersionStr or build from a plain CMD window"
|
||||
}
|
||||
}
|
||||
|
||||
# Locate Visual Studio installation or download x-copy msbuild.
|
||||
$vsInfo = LocateVisualStudio $vsRequirements
|
||||
if ($vsInfo -ne $null) {
|
||||
$vsInstallDir = $vsInfo.installationPath
|
||||
$vsMajorVersion = $vsInfo.installationVersion.Split('.')[0]
|
||||
|
||||
InitializeVisualStudioEnvironmentVariables $vsInstallDir $vsMajorVersion
|
||||
} else {
|
||||
|
||||
if (Get-Member -InputObject $GlobalJson.tools -Name "xcopy-msbuild") {
|
||||
$xcopyMSBuildVersion = $GlobalJson.tools.'xcopy-msbuild'
|
||||
$vsMajorVersion = $xcopyMSBuildVersion.Split('.')[0]
|
||||
} else {
|
||||
$vsMajorVersion = $vsMinVersion.Major
|
||||
$xcopyMSBuildVersion = "$vsMajorVersion.$($vsMinVersion.Minor).0-alpha"
|
||||
}
|
||||
|
||||
$vsInstallDir = InitializeXCopyMSBuild $xcopyMSBuildVersion $install
|
||||
if ($vsInstallDir -eq $null) {
|
||||
throw "Unable to find Visual Studio that has required version and components installed"
|
||||
}
|
||||
}
|
||||
|
||||
$msbuildVersionDir = if ([int]$vsMajorVersion -lt 16) { "$vsMajorVersion.0" } else { "Current" }
|
||||
return $global:_MSBuildExe = Join-Path $vsInstallDir "MSBuild\$msbuildVersionDir\Bin\msbuild.exe"
|
||||
}
|
||||
|
||||
function InitializeVisualStudioEnvironmentVariables([string] $vsInstallDir, [string] $vsMajorVersion) {
|
||||
$env:VSINSTALLDIR = $vsInstallDir
|
||||
Set-Item "env:VS$($vsMajorVersion)0COMNTOOLS" (Join-Path $vsInstallDir "Common7\Tools\")
|
||||
|
||||
$vsSdkInstallDir = Join-Path $vsInstallDir "VSSDK\"
|
||||
if (Test-Path $vsSdkInstallDir) {
|
||||
Set-Item "env:VSSDK$($vsMajorVersion)0Install" $vsSdkInstallDir
|
||||
$env:VSSDKInstall = $vsSdkInstallDir
|
||||
}
|
||||
}
|
||||
|
||||
function InstallXCopyMSBuild([string]$packageVersion) {
|
||||
return InitializeXCopyMSBuild $packageVersion -install $true
|
||||
}
|
||||
|
||||
function InitializeXCopyMSBuild([string]$packageVersion, [bool]$install) {
|
||||
$packageName = "RoslynTools.MSBuild"
|
||||
$packageDir = Join-Path $ToolsDir "msbuild\$packageVersion"
|
||||
$packagePath = Join-Path $packageDir "$packageName.$packageVersion.nupkg"
|
||||
|
||||
if (!(Test-Path $packageDir)) {
|
||||
if (!$install) {
|
||||
return $null
|
||||
}
|
||||
|
||||
Create-Directory $packageDir
|
||||
Write-Host "Downloading $packageName $packageVersion"
|
||||
Invoke-WebRequest "https://dotnet.myget.org/F/roslyn-tools/api/v2/package/$packageName/$packageVersion/" -OutFile $packagePath
|
||||
Unzip $packagePath $packageDir
|
||||
}
|
||||
|
||||
return Join-Path $packageDir "tools"
|
||||
}
|
||||
|
||||
#
|
||||
# Locates Visual Studio instance that meets the minimal requirements specified by tools.vs object in global.json.
|
||||
#
|
||||
# The following properties of tools.vs are recognized:
|
||||
# "version": "{major}.{minor}"
|
||||
# Two part minimal VS version, e.g. "15.9", "16.0", etc.
|
||||
# "components": ["componentId1", "componentId2", ...]
|
||||
# Array of ids of workload components that must be available in the VS instance.
|
||||
# See e.g. https://docs.microsoft.com/en-us/visualstudio/install/workload-component-id-vs-enterprise?view=vs-2017
|
||||
#
|
||||
# Returns JSON describing the located VS instance (same format as returned by vswhere),
|
||||
# or $null if no instance meeting the requirements is found on the machine.
|
||||
#
|
||||
function LocateVisualStudio([object]$vsRequirements = $null){
|
||||
if (Get-Member -InputObject $GlobalJson.tools -Name "vswhere") {
|
||||
$vswhereVersion = $GlobalJson.tools.vswhere
|
||||
} else {
|
||||
$vswhereVersion = "2.5.2"
|
||||
}
|
||||
|
||||
$vsWhereDir = Join-Path $ToolsDir "vswhere\$vswhereVersion"
|
||||
$vsWhereExe = Join-Path $vsWhereDir "vswhere.exe"
|
||||
|
||||
if (!(Test-Path $vsWhereExe)) {
|
||||
Create-Directory $vsWhereDir
|
||||
Write-Host "Downloading vswhere"
|
||||
Invoke-WebRequest "https://github.com/Microsoft/vswhere/releases/download/$vswhereVersion/vswhere.exe" -OutFile $vswhereExe
|
||||
}
|
||||
|
||||
if (!$vsRequirements) { $vsRequirements = $GlobalJson.tools.vs }
|
||||
$args = @("-latest", "-prerelease", "-format", "json", "-requires", "Microsoft.Component.MSBuild")
|
||||
|
||||
if (Get-Member -InputObject $vsRequirements -Name "version") {
|
||||
$args += "-version"
|
||||
$args += $vsRequirements.version
|
||||
}
|
||||
|
||||
if (Get-Member -InputObject $vsRequirements -Name "components") {
|
||||
foreach ($component in $vsRequirements.components) {
|
||||
$args += "-requires"
|
||||
$args += $component
|
||||
}
|
||||
}
|
||||
|
||||
$vsInfo =& $vsWhereExe $args | ConvertFrom-Json
|
||||
|
||||
if ($lastExitCode -ne 0) {
|
||||
return $null
|
||||
}
|
||||
|
||||
# use first matching instance
|
||||
return $vsInfo[0]
|
||||
}
|
||||
|
||||
function InitializeBuildTool() {
|
||||
if (Test-Path variable:global:_BuildTool) {
|
||||
return $global:_BuildTool
|
||||
}
|
||||
|
||||
if (-not $msbuildEngine) {
|
||||
$msbuildEngine = GetDefaultMSBuildEngine
|
||||
}
|
||||
|
||||
# Initialize dotnet cli if listed in 'tools'
|
||||
$dotnetRoot = $null
|
||||
if (Get-Member -InputObject $GlobalJson.tools -Name "dotnet") {
|
||||
$dotnetRoot = InitializeDotNetCli -install:$restore
|
||||
}
|
||||
|
||||
if ($msbuildEngine -eq "dotnet") {
|
||||
if (!$dotnetRoot) {
|
||||
Write-Host "/global.json must specify 'tools.dotnet'." -ForegroundColor Red
|
||||
ExitWithExitCode 1
|
||||
}
|
||||
|
||||
$buildTool = @{ Path = Join-Path $dotnetRoot "dotnet.exe"; Command = "msbuild" }
|
||||
} elseif ($msbuildEngine -eq "vs") {
|
||||
try {
|
||||
$msbuildPath = InitializeVisualStudioMSBuild -install:$restore
|
||||
} catch {
|
||||
Write-Host $_ -ForegroundColor Red
|
||||
ExitWithExitCode 1
|
||||
}
|
||||
|
||||
$buildTool = @{ Path = $msbuildPath; Command = "" }
|
||||
} else {
|
||||
Write-Host "Unexpected value of -msbuildEngine: '$msbuildEngine'." -ForegroundColor Red
|
||||
ExitWithExitCode 1
|
||||
}
|
||||
|
||||
return $global:_BuildTool = $buildTool
|
||||
}
|
||||
|
||||
function GetDefaultMSBuildEngine() {
|
||||
# Presence of tools.vs indicates the repo needs to build using VS msbuild on Windows.
|
||||
if (Get-Member -InputObject $GlobalJson.tools -Name "vs") {
|
||||
return "vs"
|
||||
}
|
||||
|
||||
if (Get-Member -InputObject $GlobalJson.tools -Name "dotnet") {
|
||||
return "dotnet"
|
||||
}
|
||||
|
||||
Write-Host "-msbuildEngine must be specified, or /global.json must specify 'tools.dotnet' or 'tools.vs'." -ForegroundColor Red
|
||||
ExitWithExitCode 1
|
||||
}
|
||||
|
||||
function GetNuGetPackageCachePath() {
|
||||
if ($env:NUGET_PACKAGES -eq $null) {
|
||||
# Use local cache on CI to ensure deterministic build,
|
||||
# use global cache in dev builds to avoid cost of downloading packages.
|
||||
if ($useGlobalNuGetCache) {
|
||||
$env:NUGET_PACKAGES = Join-Path $env:UserProfile ".nuget\packages"
|
||||
} else {
|
||||
$env:NUGET_PACKAGES = Join-Path $RepoRoot ".packages"
|
||||
}
|
||||
}
|
||||
|
||||
return $env:NUGET_PACKAGES
|
||||
}
|
||||
|
||||
# Returns a full path to an Arcade SDK task project file.
|
||||
function GetSdkTaskProject([string]$taskName) {
|
||||
return Join-Path (Split-Path (InitializeToolset) -Parent) "SdkTasks\$taskName.proj"
|
||||
}
|
||||
|
||||
function InitializeToolset() {
|
||||
if (Test-Path variable:global:_ToolsetBuildProj) {
|
||||
return $global:_ToolsetBuildProj
|
||||
}
|
||||
|
||||
$nugetCache = GetNuGetPackageCachePath
|
||||
|
||||
$toolsetVersion = $GlobalJson.'msbuild-sdks'.'Microsoft.DotNet.Arcade.Sdk'
|
||||
$toolsetLocationFile = Join-Path $ToolsetDir "$toolsetVersion.txt"
|
||||
|
||||
if (Test-Path $toolsetLocationFile) {
|
||||
$path = Get-Content $toolsetLocationFile -TotalCount 1
|
||||
if (Test-Path $path) {
|
||||
return $global:_ToolsetBuildProj = $path
|
||||
}
|
||||
}
|
||||
|
||||
if (-not $restore) {
|
||||
Write-Host "Toolset version $toolsetVersion has not been restored." -ForegroundColor Red
|
||||
ExitWithExitCode 1
|
||||
}
|
||||
|
||||
$buildTool = InitializeBuildTool
|
||||
|
||||
$proj = Join-Path $ToolsetDir "restore.proj"
|
||||
$bl = if ($binaryLog) { "/bl:" + (Join-Path $LogDir "ToolsetRestore.binlog") } else { "" }
|
||||
|
||||
'<Project Sdk="Microsoft.DotNet.Arcade.Sdk"/>' | Set-Content $proj
|
||||
MSBuild $proj $bl /t:__WriteToolsetLocation /clp:ErrorsOnly`;NoSummary /p:__ToolsetLocationOutputFile=$toolsetLocationFile
|
||||
|
||||
$path = Get-Content $toolsetLocationFile -TotalCount 1
|
||||
if (!(Test-Path $path)) {
|
||||
throw "Invalid toolset path: $path"
|
||||
}
|
||||
|
||||
return $global:_ToolsetBuildProj = $path
|
||||
}
|
||||
|
||||
function ExitWithExitCode([int] $exitCode) {
|
||||
if ($ci -and $prepareMachine) {
|
||||
Stop-Processes
|
||||
}
|
||||
exit $exitCode
|
||||
}
|
||||
|
||||
function Stop-Processes() {
|
||||
Write-Host "Killing running build processes..."
|
||||
foreach ($processName in $processesToStopOnExit) {
|
||||
Get-Process -Name $processName -ErrorAction SilentlyContinue | Stop-Process
|
||||
}
|
||||
}
|
||||
|
||||
#
|
||||
# Executes msbuild (or 'dotnet msbuild') with arguments passed to the function.
|
||||
# The arguments are automatically quoted.
|
||||
# Terminates the script if the build fails.
|
||||
#
|
||||
function MSBuild() {
|
||||
if ($ci) {
|
||||
if (!$binaryLog) {
|
||||
throw "Binary log must be enabled in CI build."
|
||||
}
|
||||
|
||||
if ($nodeReuse) {
|
||||
throw "Node reuse must be disabled in CI build."
|
||||
}
|
||||
}
|
||||
|
||||
$buildTool = InitializeBuildTool
|
||||
|
||||
$cmdArgs = "$($buildTool.Command) /m /nologo /clp:Summary /v:$verbosity /nr:$nodeReuse /p:ContinuousIntegrationBuild=$ci"
|
||||
|
||||
if ($warnAsError) {
|
||||
$cmdArgs += " /warnaserror /p:TreatWarningsAsErrors=true"
|
||||
}
|
||||
|
||||
foreach ($arg in $args) {
|
||||
if ($arg -ne $null -and $arg.Trim() -ne "") {
|
||||
$cmdArgs += " `"$arg`""
|
||||
}
|
||||
}
|
||||
|
||||
$exitCode = Exec-Process $buildTool.Path $cmdArgs
|
||||
|
||||
if ($exitCode -ne 0) {
|
||||
Write-Host "Build failed." -ForegroundColor Red
|
||||
|
||||
$buildLog = GetMSBuildBinaryLogCommandLineArgument $args
|
||||
if ($buildLog -ne $null) {
|
||||
Write-Host "See log: $buildLog" -ForegroundColor DarkGray
|
||||
}
|
||||
|
||||
ExitWithExitCode $exitCode
|
||||
}
|
||||
}
|
||||
|
||||
function GetMSBuildBinaryLogCommandLineArgument($arguments) {
|
||||
foreach ($argument in $arguments) {
|
||||
if ($argument -ne $null) {
|
||||
$arg = $argument.Trim()
|
||||
if ($arg.StartsWith("/bl:", "OrdinalIgnoreCase")) {
|
||||
return $arg.Substring("/bl:".Length)
|
||||
}
|
||||
|
||||
if ($arg.StartsWith("/binaryLogger:", "OrdinalIgnoreCase")) {
|
||||
return $arg.Substring("/binaryLogger:".Length)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return $null
|
||||
}
|
||||
|
||||
$RepoRoot = Resolve-Path (Join-Path $PSScriptRoot "..\..")
|
||||
$EngRoot = Resolve-Path (Join-Path $PSScriptRoot "..")
|
||||
$ArtifactsDir = Join-Path $RepoRoot "artifacts"
|
||||
$ToolsetDir = Join-Path $ArtifactsDir "toolset"
|
||||
$ToolsDir = Join-Path $RepoRoot ".tools"
|
||||
$LogDir = Join-Path (Join-Path $ArtifactsDir "log") $configuration
|
||||
$TempDir = Join-Path (Join-Path $ArtifactsDir "tmp") $configuration
|
||||
$GlobalJson = Get-Content -Raw -Path (Join-Path $RepoRoot "global.json") | ConvertFrom-Json
|
||||
|
||||
Create-Directory $ToolsetDir
|
||||
Create-Directory $TempDir
|
||||
Create-Directory $LogDir
|
||||
|
||||
if ($ci) {
|
||||
$env:TEMP = $TempDir
|
||||
$env:TMP = $TempDir
|
||||
}
|
|
@ -0,0 +1,328 @@
|
|||
# Initialize variables if they aren't already defined.
|
||||
|
||||
# CI mode - set to true on CI server for PR validation build or official build.
|
||||
ci=${ci:-false}
|
||||
|
||||
# Build configuration. Common values include 'Debug' and 'Release', but the repository may use other names.
|
||||
configuration=${configuration:-'Debug'}
|
||||
|
||||
# Set to true to output binary log from msbuild. Note that emitting binary log slows down the build.
|
||||
# Binary log must be enabled on CI.
|
||||
binary_log=${binary_log:-$ci}
|
||||
|
||||
# Turns on machine preparation/clean up code that changes the machine state (e.g. kills build processes).
|
||||
prepare_machine=${prepare_machine:-false}
|
||||
|
||||
# True to restore toolsets and dependencies.
|
||||
restore=${restore:-true}
|
||||
|
||||
# Adjusts msbuild verbosity level.
|
||||
verbosity=${verbosity:-'minimal'}
|
||||
|
||||
# Set to true to reuse msbuild nodes. Recommended to not reuse on CI.
|
||||
if [[ "$ci" == true ]]; then
|
||||
node_reuse=${node_reuse:-false}
|
||||
else
|
||||
node_reuse=${node_reuse:-true}
|
||||
fi
|
||||
|
||||
# Configures warning treatment in msbuild.
|
||||
warn_as_error=${warn_as_error:-true}
|
||||
|
||||
# True to attempt using .NET Core already that meets requirements specified in global.json
|
||||
# installed on the machine instead of downloading one.
|
||||
use_installed_dotnet_cli=${use_installed_dotnet_cli:-true}
|
||||
|
||||
# True to use global NuGet cache instead of restoring packages to repository-local directory.
|
||||
if [[ "$ci" == true ]]; then
|
||||
use_global_nuget_cache=${use_global_nuget_cache:-false}
|
||||
else
|
||||
use_global_nuget_cache=${use_global_nuget_cache:-true}
|
||||
fi
|
||||
|
||||
# Resolve any symlinks in the given path.
|
||||
function ResolvePath {
|
||||
local path=$1
|
||||
|
||||
while [[ -h $path ]]; do
|
||||
local dir="$( cd -P "$( dirname "$path" )" && pwd )"
|
||||
path="$(readlink "$path")"
|
||||
|
||||
# if $path was a relative symlink, we need to resolve it relative to the path where the
|
||||
# symlink file was located
|
||||
[[ $path != /* ]] && path="$dir/$path"
|
||||
done
|
||||
|
||||
# return value
|
||||
_ResolvePath="$path"
|
||||
}
|
||||
|
||||
# ReadVersionFromJson [json key]
|
||||
function ReadGlobalVersion {
|
||||
local key=$1
|
||||
|
||||
local line=`grep -m 1 "$key" "$global_json_file"`
|
||||
local pattern="\"$key\" *: *\"(.*)\""
|
||||
|
||||
if [[ ! $line =~ $pattern ]]; then
|
||||
echo "Error: Cannot find \"$key\" in $global_json_file" >&2
|
||||
ExitWithExitCode 1
|
||||
fi
|
||||
|
||||
# return value
|
||||
_ReadGlobalVersion=${BASH_REMATCH[1]}
|
||||
}
|
||||
|
||||
function InitializeDotNetCli {
|
||||
if [[ -n "${_InitializeDotNetCli:-}" ]]; then
|
||||
return
|
||||
fi
|
||||
|
||||
local install=$1
|
||||
|
||||
# Don't resolve runtime, shared framework, or SDK from other locations to ensure build determinism
|
||||
export DOTNET_MULTILEVEL_LOOKUP=0
|
||||
|
||||
# Disable first run since we want to control all package sources
|
||||
export DOTNET_SKIP_FIRST_TIME_EXPERIENCE=1
|
||||
|
||||
# Disable telemetry on CI
|
||||
if [[ $ci == true ]]; then
|
||||
export DOTNET_CLI_TELEMETRY_OPTOUT=1
|
||||
fi
|
||||
|
||||
# LTTNG is the logging infrastructure used by Core CLR. Need this variable set
|
||||
# so it doesn't output warnings to the console.
|
||||
export LTTNG_HOME="$HOME"
|
||||
|
||||
# Source Build uses DotNetCoreSdkDir variable
|
||||
if [[ -n "${DotNetCoreSdkDir:-}" ]]; then
|
||||
export DOTNET_INSTALL_DIR="$DotNetCoreSdkDir"
|
||||
fi
|
||||
|
||||
# Find the first path on $PATH that contains the dotnet.exe
|
||||
if [[ "$use_installed_dotnet_cli" == true && -z "${DOTNET_INSTALL_DIR:-}" ]]; then
|
||||
local dotnet_path=`command -v dotnet`
|
||||
if [[ -n "$dotnet_path" ]]; then
|
||||
ResolvePath "$dotnet_path"
|
||||
export DOTNET_INSTALL_DIR=`dirname "$_ResolvePath"`
|
||||
fi
|
||||
fi
|
||||
|
||||
ReadGlobalVersion "dotnet"
|
||||
local dotnet_sdk_version=$_ReadGlobalVersion
|
||||
local dotnet_root=""
|
||||
|
||||
# Use dotnet installation specified in DOTNET_INSTALL_DIR if it contains the required SDK version,
|
||||
# otherwise install the dotnet CLI and SDK to repo local .dotnet directory to avoid potential permission issues.
|
||||
if [[ -n "${DOTNET_INSTALL_DIR:-}" && -d "$DOTNET_INSTALL_DIR/sdk/$dotnet_sdk_version" ]]; then
|
||||
dotnet_root="$DOTNET_INSTALL_DIR"
|
||||
else
|
||||
dotnet_root="$repo_root/.dotnet"
|
||||
export DOTNET_INSTALL_DIR="$dotnet_root"
|
||||
|
||||
if [[ ! -d "$DOTNET_INSTALL_DIR/sdk/$dotnet_sdk_version" ]]; then
|
||||
if [[ "$install" == true ]]; then
|
||||
InstallDotNetSdk "$dotnet_root" "$dotnet_sdk_version"
|
||||
else
|
||||
echo "Unable to find dotnet with SDK version '$dotnet_sdk_version'" >&2
|
||||
ExitWithExitCode 1
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# Add dotnet to PATH. This prevents any bare invocation of dotnet in custom
|
||||
# build steps from using anything other than what we've downloaded.
|
||||
export PATH="$dotnet_root:$PATH"
|
||||
|
||||
if [[ $ci == true ]]; then
|
||||
# Make Sure that our bootstrapped dotnet cli is avaliable in future steps of the Azure Pipelines build
|
||||
echo "##vso[task.prependpath]$dotnet_root"
|
||||
echo "##vso[task.setvariable variable=DOTNET_MULTILEVEL_LOOKUP]0"
|
||||
echo "##vso[task.setvariable variable=DOTNET_SKIP_FIRST_TIME_EXPERIENCE]1"
|
||||
fi
|
||||
|
||||
# return value
|
||||
_InitializeDotNetCli="$dotnet_root"
|
||||
}
|
||||
|
||||
function InstallDotNetSdk {
|
||||
local root=$1
|
||||
local version=$2
|
||||
|
||||
GetDotNetInstallScript "$root"
|
||||
local install_script=$_GetDotNetInstallScript
|
||||
|
||||
local arch_arg=""
|
||||
if [[ $# == 3 ]]; then
|
||||
arch_arg="--architecture $3"
|
||||
fi
|
||||
|
||||
bash "$install_script" --version $version --install-dir "$root" $arch_arg || {
|
||||
local exit_code=$?
|
||||
echo "Failed to install dotnet SDK (exit code '$exit_code')." >&2
|
||||
ExitWithExitCode $exit_code
|
||||
}
|
||||
}
|
||||
|
||||
function GetDotNetInstallScript {
|
||||
local root=$1
|
||||
local install_script="$root/dotnet-install.sh"
|
||||
local install_script_url="https://dot.net/v1/dotnet-install.sh"
|
||||
|
||||
if [[ ! -a "$install_script" ]]; then
|
||||
mkdir -p "$root"
|
||||
|
||||
echo "Downloading '$install_script_url'"
|
||||
|
||||
# Use curl if available, otherwise use wget
|
||||
if command -v curl > /dev/null; then
|
||||
curl "$install_script_url" -sSL --retry 10 --create-dirs -o "$install_script"
|
||||
else
|
||||
wget -q -O "$install_script" "$install_script_url"
|
||||
fi
|
||||
fi
|
||||
|
||||
# return value
|
||||
_GetDotNetInstallScript="$install_script"
|
||||
}
|
||||
|
||||
function InitializeBuildTool {
|
||||
if [[ -n "${_InitializeBuildTool:-}" ]]; then
|
||||
return
|
||||
fi
|
||||
|
||||
InitializeDotNetCli $restore
|
||||
|
||||
# return values
|
||||
_InitializeBuildTool="$_InitializeDotNetCli/dotnet"
|
||||
_InitializeBuildToolCommand="msbuild"
|
||||
}
|
||||
|
||||
function GetNuGetPackageCachePath {
|
||||
if [[ -z ${NUGET_PACKAGES:-} ]]; then
|
||||
if [[ "$use_global_nuget_cache" == true ]]; then
|
||||
export NUGET_PACKAGES="$HOME/.nuget/packages"
|
||||
else
|
||||
export NUGET_PACKAGES="$repo_root/.packages"
|
||||
fi
|
||||
fi
|
||||
|
||||
# return value
|
||||
_GetNuGetPackageCachePath=$NUGET_PACKAGES
|
||||
}
|
||||
|
||||
function InitializeToolset {
|
||||
if [[ -n "${_InitializeToolset:-}" ]]; then
|
||||
return
|
||||
fi
|
||||
|
||||
GetNuGetPackageCachePath
|
||||
|
||||
ReadGlobalVersion "Microsoft.DotNet.Arcade.Sdk"
|
||||
|
||||
local toolset_version=$_ReadGlobalVersion
|
||||
local toolset_location_file="$toolset_dir/$toolset_version.txt"
|
||||
|
||||
if [[ -a "$toolset_location_file" ]]; then
|
||||
local path=`cat "$toolset_location_file"`
|
||||
if [[ -a "$path" ]]; then
|
||||
# return value
|
||||
_InitializeToolset="$path"
|
||||
return
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ "$restore" != true ]]; then
|
||||
echo "Toolset version $toolsetVersion has not been restored." >&2
|
||||
ExitWithExitCode 2
|
||||
fi
|
||||
|
||||
local proj="$toolset_dir/restore.proj"
|
||||
|
||||
local bl=""
|
||||
if [[ "$binary_log" == true ]]; then
|
||||
bl="/bl:$log_dir/ToolsetRestore.binlog"
|
||||
fi
|
||||
|
||||
echo '<Project Sdk="Microsoft.DotNet.Arcade.Sdk"/>' > "$proj"
|
||||
MSBuild "$proj" $bl /t:__WriteToolsetLocation /clp:ErrorsOnly\;NoSummary /p:__ToolsetLocationOutputFile="$toolset_location_file"
|
||||
|
||||
local toolset_build_proj=`cat "$toolset_location_file"`
|
||||
|
||||
if [[ ! -a "$toolset_build_proj" ]]; then
|
||||
echo "Invalid toolset path: $toolset_build_proj" >&2
|
||||
ExitWithExitCode 3
|
||||
fi
|
||||
|
||||
# return value
|
||||
_InitializeToolset="$toolset_build_proj"
|
||||
}
|
||||
|
||||
function ExitWithExitCode {
|
||||
if [[ "$ci" == true && "$prepare_machine" == true ]]; then
|
||||
StopProcesses
|
||||
fi
|
||||
exit $1
|
||||
}
|
||||
|
||||
function StopProcesses {
|
||||
echo "Killing running build processes..."
|
||||
pkill -9 "dotnet" || true
|
||||
pkill -9 "vbcscompiler" || true
|
||||
return 0
|
||||
}
|
||||
|
||||
function MSBuild {
|
||||
if [[ "$ci" == true ]]; then
|
||||
if [[ "$binary_log" != true ]]; then
|
||||
echo "Binary log must be enabled in CI build." >&2
|
||||
ExitWithExitCode 1
|
||||
fi
|
||||
|
||||
if [[ "$node_reuse" == true ]]; then
|
||||
echo "Node reuse must be disabled in CI build." >&2
|
||||
ExitWithExitCode 1
|
||||
fi
|
||||
fi
|
||||
|
||||
InitializeBuildTool
|
||||
|
||||
local warnaserror_switch=""
|
||||
if [[ $warn_as_error == true ]]; then
|
||||
warnaserror_switch="/warnaserror"
|
||||
fi
|
||||
|
||||
"$_InitializeBuildTool" "$_InitializeBuildToolCommand" /m /nologo /clp:Summary /v:$verbosity /nr:$node_reuse $warnaserror_switch /p:TreatWarningsAsErrors=$warn_as_error /p:ContinuousIntegrationBuild=$ci "$@" || {
|
||||
local exit_code=$?
|
||||
echo "Build failed (exit code '$exit_code')." >&2
|
||||
ExitWithExitCode $exit_code
|
||||
}
|
||||
}
|
||||
|
||||
ResolvePath "${BASH_SOURCE[0]}"
|
||||
_script_dir=`dirname "$_ResolvePath"`
|
||||
|
||||
eng_root=`cd -P "$_script_dir/.." && pwd`
|
||||
repo_root=`cd -P "$_script_dir/../.." && pwd`
|
||||
artifacts_dir="$repo_root/artifacts"
|
||||
toolset_dir="$artifacts_dir/toolset"
|
||||
log_dir="$artifacts_dir/log/$configuration"
|
||||
temp_dir="$artifacts_dir/tmp/$configuration"
|
||||
|
||||
global_json_file="$repo_root/global.json"
|
||||
|
||||
# HOME may not be defined in some scenarios, but it is required by NuGet
|
||||
if [[ -z $HOME ]]; then
|
||||
export HOME="$repo_root/artifacts/.home/"
|
||||
mkdir -p "$HOME"
|
||||
fi
|
||||
|
||||
mkdir -p "$toolset_dir"
|
||||
mkdir -p "$temp_dir"
|
||||
mkdir -p "$log_dir"
|
||||
|
||||
if [[ $ci == true ]]; then
|
||||
export TEMP="$temp_dir"
|
||||
export TMP="$temp_dir"
|
||||
fi
|
|
@ -0,0 +1,10 @@
|
|||
{
|
||||
"tools": {
|
||||
"vs": {
|
||||
"version": "15.9"
|
||||
}
|
||||
},
|
||||
"msbuild-sdks": {
|
||||
"Microsoft.DotNet.Arcade.Sdk": "1.0.0-beta.19157.4"
|
||||
}
|
||||
}
|
60
netci.groovy
60
netci.groovy
|
@ -1,60 +0,0 @@
|
|||
// Groovy Script: http://www.groovy-lang.org/syntax.html
|
||||
// Jenkins DSL: https://github.com/jenkinsci/job-dsl-plugin/wiki
|
||||
|
||||
import jobs.generation.Utilities;
|
||||
import jobs.generation.ArchivalSettings;
|
||||
|
||||
static addArchival(def job, def configName) {
|
||||
def archivalSettings = new ArchivalSettings()
|
||||
archivalSettings.addFiles("**/artifacts/**")
|
||||
archivalSettings.excludeFiles("**/artifacts/${configName}/obj/**")
|
||||
archivalSettings.excludeFiles("**/artifacts/${configName}/tmp/**")
|
||||
archivalSettings.excludeFiles("**/artifacts/${configName}/VSSetup.obj/**")
|
||||
archivalSettings.setFailIfNothingArchived()
|
||||
archivalSettings.setArchiveOnFailure()
|
||||
|
||||
Utilities.addArchival(job, archivalSettings)
|
||||
}
|
||||
|
||||
static addGithubTrigger(def job, def isPR, def branchName, def jobName) {
|
||||
if (isPR) {
|
||||
def prContext = "prtest/${jobName.replace('_', '/')}"
|
||||
def triggerPhrase = "(?i)^\\s*(@?dotnet-bot\\s+)?(re)?test\\s+(${prContext})(\\s+please)?\\s*\$"
|
||||
def triggerOnPhraseOnly = false
|
||||
|
||||
Utilities.addGithubPRTriggerForBranch(job, branchName, prContext, triggerPhrase, triggerOnPhraseOnly)
|
||||
} else {
|
||||
Utilities.addGithubPushTrigger(job)
|
||||
}
|
||||
}
|
||||
|
||||
def createJob(def platform, def configName, def isPR) {
|
||||
def projectName = GithubProject
|
||||
def branchName = GithubBranchName
|
||||
def jobName = "${platform}_${configName}"
|
||||
def newJob = job(Utilities.getFullJobName(projectName, jobName, isPR))
|
||||
|
||||
Utilities.standardJobSetup(newJob, projectName, isPR, "*/${branchName}")
|
||||
|
||||
addGithubTrigger(newJob, isPR, branchName, jobName)
|
||||
addArchival(newJob, configName)
|
||||
|
||||
return newJob
|
||||
}
|
||||
|
||||
[true, false].each { isPR ->
|
||||
['windows'].each { platform ->
|
||||
['debug', 'release'].each { configName ->
|
||||
def newJob = createJob(platform, configName, isPR)
|
||||
|
||||
Utilities.setMachineAffinity(newJob, 'Windows_NT', 'latest-or-auto')
|
||||
Utilities.addXUnitDotNETResults(newJob, "**/artifacts/${configName}/TestResults/*.xml")
|
||||
|
||||
newJob.with {
|
||||
steps {
|
||||
batchFile(".\\build\\CIBuild.cmd -configuration ${configName} -prepareMachine")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -2,6 +2,7 @@
|
|||
<configuration>
|
||||
<packageSources>
|
||||
<clear />
|
||||
<add key="arcade" value="https://dotnetfeed.blob.core.windows.net/dotnet-tools-internal/index.json" />
|
||||
<add key="BuildTools" value="https://dotnet.myget.org/F/dotnet-buildtools/api/v3/index.json" />
|
||||
<add key="CoreNightlies" value="https://dotnet.myget.org/F/dotnet-core/api/v3/index.json" />
|
||||
<add key="nuget.org" value="https://api.nuget.org/v3/index.json" />
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
@echo off
|
||||
powershell -ExecutionPolicy ByPass %~dp0build\Build.ps1 -restore %*
|
||||
powershell -ExecutionPolicy ByPass %~dp0eng\common\Build.ps1 -restore %*
|
||||
exit /b %ErrorLevel%
|
|
@ -1,8 +1,8 @@
|
|||
<Project>
|
||||
<Import Project="..\Directory.Build.props"/>
|
||||
<Import Project="$(RepoToolsetDir)Settings.props" />
|
||||
<Import Project="Sdk.props" Sdk="Microsoft.DotNet.Arcade.Sdk" />
|
||||
<PropertyGroup>
|
||||
<PublicRelease Condition="'$(PublicRelease)' == '' AND '$(APPVEYOR)' != 'True'">True</PublicRelease>
|
||||
<VSSDKTargetPlatformRegRootSuffix>Exp</VSSDKTargetPlatformRegRootSuffix>
|
||||
<Configuration Condition="'$(Configuration)' == ''">Debug</Configuration>
|
||||
<DebugType Condition="'$(Configuration)' == 'Debug'">Full</DebugType>
|
||||
</PropertyGroup>
|
||||
</Project>
|
|
@ -1,3 +1,3 @@
|
|||
<Project>
|
||||
<Import Project="$(RepoToolsetDir)Imports.targets" />
|
||||
<Import Project="Sdk.targets" Sdk="Microsoft.DotNet.Arcade.Sdk" />
|
||||
</Project>
|
|
@ -24,11 +24,6 @@
|
|||
<ProjectReference Include="..\Microsoft.DotNet.Analyzers.Compatibility\Microsoft.DotNet.Analyzers.Compatibility.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="xunit" Version="2.3.1" />
|
||||
<PackageReference Include="xunit.runner.console" Version="2.3.1" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<Service Include="{82a7f48d-3b50-4b1e-b82e-3ada8210c358}" />
|
||||
</ItemGroup>
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<PackageManifest Version="2.0.0" xmlns="http://schemas.microsoft.com/developer/vsx-schema/2011" xmlns:d="http://schemas.microsoft.com/developer/vsx-schema-design/2011">
|
||||
<Metadata>
|
||||
<Identity Id="Microsoft.DotNet.Analyzers.Compatibility.Vsix..fc1bb5fb-358c-445b-b11a-60edf03b81e1" Version="|Microsoft.DotNet.Analyzers.Compatibility;GetBuildVersion|" Language="en-US" Publisher="Microsoft"/>
|
||||
<Identity Id="Microsoft.DotNet.Analyzers.Compatibility.Vsix..fc1bb5fb-358c-445b-b11a-60edf03b81e1" Version="|%CurrentProject%;GetVsixVersion|" Language="en-US" Publisher="Microsoft"/>
|
||||
<DisplayName>Microsoft .NET Compatibility Analyzers</DisplayName>
|
||||
<Description xml:space="preserve">This analyzer checks your code for usage of APIs that aren't implemented on all operating systems.</Description>
|
||||
</Metadata>
|
||||
|
|
2
test.cmd
2
test.cmd
|
@ -1,3 +1,3 @@
|
|||
@echo off
|
||||
powershell -ExecutionPolicy ByPass %~dp0build\Build.ps1 -test %*
|
||||
powershell -ExecutionPolicy ByPass %~dp0eng\common\Build.ps1 -test %*
|
||||
exit /b %ErrorLevel%
|
Загрузка…
Ссылка в новой задаче