This commit is contained in:
Andrew Stanton-Nurse 2018-11-27 18:06:41 -08:00 коммит произвёл GitHub
Родитель 1cd2943088
Коммит aabd0fa271
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: 4AEE18F83AFDEB23
101 изменённых файлов: 4857 добавлений и 111 удалений

157
.gitignore поставляемый
Просмотреть файл

@ -1,29 +1,132 @@
[Oo]bj/
[Bb]in/
TestResults/
.nuget/
*.sln.ide/
_ReSharper.*/
packages/
shared/
artifacts/
PublishProfiles/
.vs/
node_modules/
*.user
syntax: glob
### VisualStudio ###
# Tools directory
/[Tt]ools/
.dotnet/
.packages/
# User-specific files
*.suo
*.cache
*.docstates
*.exe
_ReSharper.*
*.psess
*.vsp
*.pidb
*.userprefs
*.*sdf
*.ipch
.settings
*.orig
*.user*.userprefs
*DS_Store
*.user
*.userosscache
*.sln.docstates
launchSettings.json
# Build results
artifacts/
[Dd]ebug/
[Rr]elease/
x64/
x86/ !eng/common/cross/x86/
[Bb]in/
[Oo]bj/
msbuild.log
msbuild.err
msbuild.wrn
msbuild.binlog
# Visual Studio 2015
.vs/
# Visual Studio 2015 Pre-CTP6
*.sln.ide
*.ide/
# MSTest test Results
[Tt]est[Rr]esult*/
[Bb]uild[Ll]og.*
#NUNIT
*.VisualState.xml
TestResult.xml
# ReSharper is a .NET coding add-in
_ReSharper*/
*.[Rr]e[Ss]harper
*.DotSettings.user
# DotCover is a Code Coverage Tool
*.dotCover
# NuGet Packages
*.nuget.props
*.nuget.targets
*.nupkg
**/packages/*
### Windows ###
# Windows image file caches
Thumbs.db
ehthumbs.db
# Folder config file
Desktop.ini
# Recycle Bin used on file shares
$RECYCLE.BIN/
# Windows Installer files
*.cab
*.msi
*.msm
*.msp
# Windows shortcuts
*.lnk
### Linux ###
*~
# KDE directory preferences
.directory
### OSX ###
.DS_Store
.AppleDouble
.LSOverride
# Icon must end with two \r
Icon
# Thumbnails
._*
# Files that might appear on external disk
.Spotlight-V100
.Trashes
# Directories potentially created on remote AFP share
.AppleDB
.AppleDesktop
Network Trash Folder
Temporary Items
.apdisk
# vim temporary files
[._]*.s[a-w][a-z]
[._]s[a-w][a-z]
*.un~
Session.vim
.netrwhist
*~
# Visual Studio Code
.vscode/
# Private test configuration and binaries.
config.ps1
**/IISApplications
# Node.js modules
node_modules/
# Python Compile Outputs
*.pyc

16
.vscode/launch.json поставляемый Normal file
Просмотреть файл

@ -0,0 +1,16 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"type": "PowerShell",
"request": "launch",
"name": "PowerShell ./eng/common/build.ps1",
"script": "${workspaceRoot}/eng/common/build.ps1",
"args": [],
"cwd": "${workspaceRoot}"
},
]
}

Просмотреть файл

@ -1,25 +1,32 @@
<Project>
<Import Project="version.props" />
<?xml version="1.0" encoding="utf-8"?>
<Project>
<!-- Leave this file here, even if it's empty. It stops chaining imports. -->
<PropertyGroup>
<Product>Microsoft ASP.NET Core</Product>
<RepositoryUrl>https://github.com/aspnet/AspLabs</RepositoryUrl>
<RepositoryType>git</RepositoryType>
<RepositoryRoot>$(MSBuildThisFileDirectory)</RepositoryRoot>
<ImportNetSdkFromRepoToolset>false</ImportNetSdkFromRepoToolset>
<AssemblyOriginatorKeyFile>$(MSBuildThisFileDirectory)eng\AspNetCore.snk</AssemblyOriginatorKeyFile>
<SignAssembly>true</SignAssembly>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<LangVersion>7.3</LangVersion>
<!-- Suppress the message about using a preview version of the .NET Core SDK -->
<SuppressNETCoreSdkPreviewMessage>true</SuppressNETCoreSdkPreviewMessage>
<!-- By default, disable packing all .csproj files into .nupkgs. Projects which should be packed opt-in by setting this to true. -->
<IsPackable>false</IsPackable>
<ArtifactsDir>$(RepositoryRoot)artifacts\</ArtifactsDir>
<PackageOutputPath>$(ArtifactsDir)packages\</PackageOutputPath>
<BaseIntermediateOutputPath>$(ArtifactsDir)obj\$(MSBuildProjectName)\</BaseIntermediateOutputPath>
<BaseOutputPath>$(ArtifactsDir)bin\$(MSBuildProjectName)\</BaseOutputPath>
</PropertyGroup>
<PropertyGroup>
<!-- <RestoreSources>$(RestoreSources);http://api.nuget.org/v3/index.json;</RestoreSources> -->
<!-- aspnetcore-dev feed for internal packages like Microsoft.Extensions.Logging.Testing -->
<RestoreSources>$(RestoreSources);https://dotnet.myget.org/F/aspnetcore-dev/api/v3/index.json</RestoreSources>
<RestoreSources>$(RestoreSources);https://dotnet.myget.org/F/dotnet-core/api/v3/index.json</RestoreSources>
</PropertyGroup>
<PropertyGroup Label="Package and Assembly Metadata">
<Product>Microsoft ASP.NET Core</Product>
</PropertyGroup>
<PropertyGroup Label="Warning Suppressions">
<!--
Suppress a warning about upcoming deprecation of PackageLicenseUrl. When embedding licenses are supported,
replace PackageLicenseUrl with PackageLicenseExpression.
-->
<NoWarn>$(NoWarn);NU5125</NoWarn>
</PropertyGroup>
<Import Project="Sdk.props" Sdk="Microsoft.DotNet.Arcade.Sdk" />
</Project>

Просмотреть файл

@ -1,2 +1,5 @@
<?xml version="1.0" encoding="utf-8"?>
<Project>
</Project>
<!-- Leave this file here, even if it's empty. It stops chaining imports. -->
<Import Project="Sdk.targets" Sdk="Microsoft.DotNet.Arcade.Sdk" />
</Project>

Просмотреть файл

@ -2,9 +2,8 @@
<configuration>
<packageSources>
<clear />
<!-- <add key="myget.org dotnet-core" value="https://dotnet.myget.org/F/dotnet-core/api/v3/index.json" />
<add key="myget.org aspnetcore-dev" value="https://dotnet.myget.org/F/aspnetcore-dev/api/v3/index.json" />
<add key="myget.org aspnetcore-tools" value="https://dotnet.myget.org/F/aspnetcore-tools/api/v3/index.json" /> -->
<add key="arcade" value="https://dotnetfeed.blob.core.windows.net/dotnet-tools-internal/index.json" />
<add key="dotnet-core" value="https://dotnetfeed.blob.core.windows.net/dotnet-core/index.json" />
<add key="nuget.org" value="https://api.nuget.org/v3/index.json" />
</packageSources>
</configuration>
</configuration>

Просмотреть файл

@ -1,4 +1,10 @@
AspLabs
=======
[![Build Status](https://dev.azure.com/dnceng/public/_apis/build/status/aspnet/AspLabs/aspnet-AspLabs-github)](https://dev.azure.com/dnceng/public/_build/latest?definitionId=256)
Repo for ASP.NET experiments that are not ready for a production release.
## Building
To build this repo, run the `build.cmd` or `build.sh` in the root of this repo. This repo uses the .NET [Arcade toolset](https://github.com/dotnet/arcade).

158
azure-pipelines.yml Normal file
Просмотреть файл

@ -0,0 +1,158 @@
variables:
Build.Repository.Clean: true
_TeamName: DotNetCore
# Variables for public PR builds
${{ if or(eq(variables['System.TeamProject'], 'public'), in(variables['Build.Reason'], 'PullRequest'), contains(variables['Build.DefinitionName'], 'github')) }}:
_HelixType: build/product
_HelixSource: pr/aspnet/AspLabs/$(Build.SourceBranch)
# These are needed to suppress a warning in the cibuild invocation since AzDO leaves the `$(_SignArgs)` in place and it fails to resolve.
_SignArgs: ''
_OfficialBuildIdArgs: ''
# Variables for internal Official builds
${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), contains(variables['Build.DefinitionName'], 'official')) }}:
_HelixType: build/product
_HelixSource: official/aspnet/AspLabs/$(Build.SourceBranch)
_SignType: test
_SignArgs: /p:DotNetSignType=$(_SignType) /p:TeamName=$(_TeamName)
_OfficialBuildIdArgs: /p:OfficialBuildId=$(Build.BuildNumber)
resources:
containers:
- container: LinuxContainer
image: microsoft/dotnet-buildtools-prereqs:ubuntu-14.04-cross-0cd4667-20170319080304
trigger:
- master
pr:
- master
# Three phases for each of the three OSes we want to run on
phases:
- template: /eng/common/templates/phases/base.yml
parameters:
name: Windows
enableTelemetry: true
# enableMicrobuild can't be read from a user-defined variable (Azure DevOps limitation)
${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), contains(variables['Build.DefinitionName'], 'official')) }}:
enableMicrobuild: true
queue:
# agent pool can't be read from a user-defined variable (Azure DevOps limitation)
${{ if or(eq(variables['System.TeamProject'], 'public'), in(variables['Build.Reason'], 'PullRequest'), contains(variables['Build.DefinitionName'], 'github')) }}:
name: dotnet-external-temp
${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), contains(variables['Build.DefinitionName'], 'official')) }}:
name: dotnet-internal-temp
matrix:
debug:
_BuildConfig: Debug
release:
_BuildConfig: Release
steps:
- script: eng\common\cibuild.cmd
-configuration $(_BuildConfig)
-prepareMachine
$(_SignArgs)
$(_OfficialBuildIdArgs)
name: Build
displayName: Build
condition: succeeded()
- task: PublishTestResults@2
displayName: Publish xUnit Test Results
condition: always()
continueOnError: true
inputs:
testRunner: xunit
testResultsFiles: 'artifacts/TestResults/$(_BuildConfig)/*.xml'
- task: PublishBuildArtifacts@1
displayName: Publish Packages
inputs:
PathtoPublish: '$(Build.SourcesDirectory)/artifacts/packages/$(_BuildConfig)'
PublishLocation: Container
ArtifactName: Packages_$(Agent.Os)_$(Agent.JobName)
continueOnError: true
condition: always()
- task: PublishBuildArtifacts@1
displayName: Publish Logs
inputs:
PathtoPublish: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)'
PublishLocation: Container
ArtifactName: Logs_$(Agent.Os)_$(Agent.JobName)
continueOnError: true
condition: always()
variables:
_HelixBuildConfig: $(_BuildConfig)
- template: /eng/common/templates/phases/base.yml
parameters:
name: macOS
enableTelemetry: true
queue:
name: Hosted macOS
matrix:
debug:
_BuildConfig: Debug
release:
_BuildConfig: Release
steps:
- script: eng/common/cibuild.sh
--configuration $(_BuildConfig)
--prepareMachine
name: Build
displayName: Build
condition: succeeded()
- task: PublishTestResults@2
displayName: Publish xUnit Test Results
condition: always()
continueOnError: true
inputs:
testRunner: xunit
testResultsFiles: 'artifacts/TestResults/$(_BuildConfig)/*.xml'
- task: PublishBuildArtifacts@1
displayName: Publish Logs
inputs:
PathtoPublish: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)'
PublishLocation: Container
ArtifactName: Logs_$(Agent.Os)_$(Agent.JobName)
continueOnError: true
condition: always()
variables:
_HelixBuildConfig: $(_BuildConfig)
- template: /eng/common/templates/phases/base.yml
parameters:
name: Linux
enableTelemetry: true
queue:
name: Hosted Ubuntu 1604
container: LinuxContainer
matrix:
debug:
_BuildConfig: Debug
release:
_BuildConfig: Release
steps:
- script: eng/common/cibuild.sh
--configuration $(_BuildConfig)
--prepareMachine
name: Build
displayName: Build
condition: succeeded()
- task: PublishTestResults@2
displayName: Publish xUnit Test Results
condition: always()
continueOnError: true
inputs:
testRunner: xunit
testResultsFiles: 'artifacts/TestResults/$(_BuildConfig)/*.xml'
- task: PublishBuildArtifacts@1
displayName: Publish Logs
inputs:
PathtoPublish: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)'
PublishLocation: Container
ArtifactName: Logs_$(Agent.Os)_$(Agent.JobName)
continueOnError: true
condition: always()
variables:
_HelixBuildConfig: $(_BuildConfig)

3
build.cmd Normal file
Просмотреть файл

@ -0,0 +1,3 @@
@echo off
powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0eng\common\Build.ps1""" -build -restore -pack -test %*"
exit /b %ErrorLevel%

16
build.sh Executable file
Просмотреть файл

@ -0,0 +1,16 @@
#!/usr/bin/env bash
source="${BASH_SOURCE[0]}"
# resolve $SOURCE until the file is no longer a symlink
while [[ -h $source ]]; do
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
source="$(readlink "$source")"
# if $source was a relative symlink, we need to resolve it relative to the path where the
# symlink file was located
[[ $source != /* ]] && source="$scriptroot/$source"
done
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
"$scriptroot/eng/common/build.sh" --pack --build --restore --test $@

10
eng/Build.props Normal file
Просмотреть файл

@ -0,0 +1,10 @@
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<!-- Specify the solutions to build. Add all new solutions/projects here as necessary or the main build won't build them! -->
<ItemGroup>
<ProjectToBuild Include="$(MSBuildThisFileDirectory)..\src\DotNetDiagnostics\dotnet-monitor.sln" />
<ProjectToBuild Include="$(MSBuildThisFileDirectory)..\src\HttpRepl\HttpRepl.sln" />
<ProjectToBuild Include="$(MSBuildThisFileDirectory)..\src\Proxy\Proxy.sln" />
<ProjectToBuild Include="$(MSBuildThisFileDirectory)..\src\WebHooks\WebHooks.sln" />
</ItemGroup>
</Project>

10
eng/Signing.props Normal file
Просмотреть файл

@ -0,0 +1,10 @@
<Project>
<!--
These are third party libraries that we use in this repo. We need to sign them even if they
are already signed. However, they must be signed with a 3rd party certificate.
-->
<ItemGroup>
<FileSignInfo Include="Newtonsoft.Json.dll" CertificateName="3PartySHA2" />
<FileSignInfo Include="Newtonsoft.Json.Bson.dll" CertificateName="3PartySHA2" />
</ItemGroup>
</Project>

10
eng/Version.Details.xml Normal file
Просмотреть файл

@ -0,0 +1,10 @@
<?xml version="1.0" encoding="utf-8"?>
<Dependencies>
<ProductDependencies></ProductDependencies>
<ToolsetDependencies>
<Dependency Name="Microsoft.DotNet.Arcade.Sdk" Version="1.0.0-beta.18526.8">
<Uri>https://github.com/dotnet/arcade</Uri>
<Sha>ef208f75c0494f47bf79a317773adcb398c35f92</Sha>
</Dependency>
</ToolsetDependencies>
</Dependencies>

13
eng/Versions.props Normal file
Просмотреть файл

@ -0,0 +1,13 @@
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<!-- Opt out of certain Arcade features -->
<PropertyGroup>
<UsingToolXliff>false</UsingToolXliff>
</PropertyGroup>
<PropertyGroup>
<MajorVersion>0</MajorVersion>
<MinorVersion>1</MinorVersion>
<PreReleaseVersionLabel>alpha</PreReleaseVersionLabel>
</PropertyGroup>
</Project>

3
eng/common/CIBuild.cmd Normal file
Просмотреть файл

@ -0,0 +1,3 @@
@echo off
powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0Build.ps1""" -restore -build -test -sign -pack -publish -ci %*"
exit /b %ErrorLevel%

Просмотреть файл

@ -0,0 +1,3 @@
@echo off
powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0Build.ps1""" -restore -publishBuildAssets %*"
exit /b %ErrorLevel%

Просмотреть файл

@ -0,0 +1,3 @@
@echo off
powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0Build.ps1""" -restore -pushBuildAssets %*"
exit /b %ErrorLevel%

107
eng/common/build.ps1 Normal file
Просмотреть файл

@ -0,0 +1,107 @@
[CmdletBinding(PositionalBinding=$false)]
Param(
[string] $configuration = "Debug",
[string] $projects = "",
[string] $verbosity = "minimal",
[string] $msbuildEngine = $null,
[bool] $warnaserror = $true,
[bool] $nodereuse = $true,
[switch] $restore,
[switch] $deployDeps,
[switch] $build,
[switch] $rebuild,
[switch] $deploy,
[switch] $test,
[switch] $integrationTest,
[switch] $performanceTest,
[switch] $sign,
[switch] $pack,
[switch] $publish,
[switch] $publishBuildAssets,
[switch] $ci,
[switch] $prepareMachine,
[switch] $help,
[Parameter(ValueFromRemainingArguments=$true)][String[]]$properties
)
. $PSScriptRoot\tools.ps1
function Print-Usage() {
Write-Host "Common settings:"
Write-Host " -configuration <value> Build configuration Debug, Release"
Write-Host " -verbosity <value> Msbuild verbosity (q[uiet], m[inimal], n[ormal], d[etailed], and diag[nostic])"
Write-Host " -help Print help and exit"
Write-Host ""
Write-Host "Actions:"
Write-Host " -restore Restore dependencies"
Write-Host " -build Build solution"
Write-Host " -rebuild Rebuild solution"
Write-Host " -deploy Deploy built VSIXes"
Write-Host " -deployDeps Deploy dependencies (e.g. VSIXes for integration tests)"
Write-Host " -test Run all unit tests in the solution"
Write-Host " -pack Package build outputs into NuGet packages and Willow components"
Write-Host " -integrationTest Run all integration tests in the solution"
Write-Host " -performanceTest Run all performance tests in the solution"
Write-Host " -sign Sign build outputs"
Write-Host " -publish Publish artifacts (e.g. symbols)"
Write-Host " -publishBuildAssets Push assets to BAR"
Write-Host ""
Write-Host "Advanced settings:"
Write-Host " -projects <value> Semi-colon delimited list of sln/proj's to build. Globbing is supported (*.sln)"
Write-Host " -ci Set when running on CI server"
Write-Host " -prepareMachine Prepare machine for CI run"
Write-Host " -msbuildEngine <value> Msbuild engine to use to run build ('dotnet', 'vs', or unspecified)."
Write-Host ""
Write-Host "Command line arguments not listed above are passed thru to msbuild."
Write-Host "The above arguments can be shortened as much as to be unambiguous (e.g. -co for configuration, -t for test, etc.)."
}
if ($help -or (($properties -ne $null) -and ($properties.Contains("/help") -or $properties.Contains("/?")))) {
Print-Usage
exit 0
}
try {
if ($projects -eq "") {
$projects = Join-Path $RepoRoot "*.sln"
}
InitializeTools
$BuildLog = Join-Path $LogDir "Build.binlog"
MSBuild $ToolsetBuildProj `
/bl:$BuildLog `
/p:Configuration=$configuration `
/p:Projects=$projects `
/p:RepoRoot=$RepoRoot `
/p:Restore=$restore `
/p:DeployDeps=$deployDeps `
/p:Build=$build `
/p:Rebuild=$rebuild `
/p:Deploy=$deploy `
/p:Test=$test `
/p:Pack=$pack `
/p:IntegrationTest=$integrationTest `
/p:PerformanceTest=$performanceTest `
/p:Sign=$sign `
/p:Publish=$publish `
/p:PublishBuildAssets=$publishBuildAssets `
/p:ContinuousIntegrationBuild=$ci `
@properties
if ($lastExitCode -ne 0) {
Write-Host "Build Failed (exit code '$lastExitCode'). See log: $BuildLog" -ForegroundColor Red
ExitWithExitCode $lastExitCode
}
ExitWithExitCode $lastExitCode
}
catch {
Write-Host $_
Write-Host $_.Exception
Write-Host $_.ScriptStackTrace
ExitWithExitCode 1
}

171
eng/common/build.sh Executable file
Просмотреть файл

@ -0,0 +1,171 @@
#!/usr/bin/env bash
source="${BASH_SOURCE[0]}"
# resolve $source until the file is no longer a symlink
while [[ -h "$source" ]]; do
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
source="$(readlink "$source")"
# if $source was a relative symlink, we need to resolve it relative to the path where the
# symlink file was located
[[ $source != /* ]] && source="$scriptroot/$source"
done
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
help=false
restore=false
build=false
rebuild=false
test=false
pack=false
publish=false
integration_test=false
performance_test=false
sign=false
public=false
ci=false
warnaserror=true
nodereuse=true
projects=''
configuration='Debug'
prepare_machine=false
verbosity='minimal'
properties=''
while (($# > 0)); do
lowerI="$(echo $1 | awk '{print tolower($0)}')"
case $lowerI in
--build)
build=true
shift 1
;;
--ci)
ci=true
shift 1
;;
--configuration)
configuration=$2
shift 2
;;
--help)
echo "Common settings:"
echo " --configuration <value> Build configuration Debug, Release"
echo " --verbosity <value> Msbuild verbosity (q[uiet], m[inimal], n[ormal], d[etailed], and diag[nostic])"
echo " --help Print help and exit"
echo ""
echo "Actions:"
echo " --restore Restore dependencies"
echo " --build Build solution"
echo " --rebuild Rebuild solution"
echo " --test Run all unit tests in the solution"
echo " --sign Sign build outputs"
echo " --publish Publish artifacts (e.g. symbols)"
echo " --pack Package build outputs into NuGet packages and Willow components"
echo ""
echo "Advanced settings:"
echo " --solution <value> Path to solution to build"
echo " --ci Set when running on CI server"
echo " --prepareMachine Prepare machine for CI run"
echo ""
echo "Command line arguments not listed above are passed through to MSBuild."
exit 0
;;
--pack)
pack=true
shift 1
;;
--preparemachine)
prepare_machine=true
shift 1
;;
--rebuild)
rebuild=true
shift 1
;;
--restore)
restore=true
shift 1
;;
--sign)
sign=true
shift 1
;;
--solution)
solution=$2
shift 2
;;
--projects)
projects=$2
shift 2
;;
--test)
test=true
shift 1
;;
--integrationtest)
integration_test=true
shift 1
;;
--performancetest)
performance_test=true
shift 1
;;
--publish)
publish=true
shift 1
;;
--verbosity)
verbosity=$2
shift 2
;;
--warnaserror)
warnaserror=$2
shift 2
;;
--nodereuse)
nodereuse=$2
shift 2
;;
*)
properties="$properties $1"
shift 1
;;
esac
done
. "$scriptroot/tools.sh"
if [[ -z $projects ]]; then
projects="$repo_root/*.sln"
fi
InitializeTools
build_log="$log_dir/Build.binlog"
MSBuild "$toolset_build_proj" \
/bl:"$build_log" \
/p:Configuration=$configuration \
/p:Projects="$projects" \
/p:RepoRoot="$repo_root" \
/p:Restore=$restore \
/p:Build=$build \
/p:Rebuild=$rebuild \
/p:Test=$test \
/p:Pack=$pack \
/p:IntegrationTest=$integration_test \
/p:PerformanceTest=$performance_test \
/p:Sign=$sign \
/p:Publish=$publish \
/p:ContinuousIntegrationBuild=$ci \
$properties
lastexitcode=$?
if [[ $lastexitcode != 0 ]]; then
echo "Build failed (exit code '$lastexitcode'). See log: $build_log"
fi
ExitWithExitCode $lastexitcode

16
eng/common/cibuild.sh Executable file
Просмотреть файл

@ -0,0 +1,16 @@
#!/usr/bin/env bash
source="${BASH_SOURCE[0]}"
# resolve $SOURCE until the file is no longer a symlink
while [[ -h $source ]]; do
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
source="$(readlink "$source")"
# if $source was a relative symlink, we need to resolve it relative to the path where
# the symlink file was located
[[ $source != /* ]] && source="$scriptroot/$source"
done
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
. "$scriptroot/build.sh" --restore --build --test --pack --publish --ci $@

Просмотреть файл

@ -0,0 +1 @@
This folder will be mirrored by the Git-TFS Mirror recursively.

Просмотреть файл

@ -0,0 +1,41 @@
set(CROSS_NDK_TOOLCHAIN $ENV{ROOTFS_DIR}/../)
set(CROSS_ROOTFS ${CROSS_NDK_TOOLCHAIN}/sysroot)
set(CLR_CMAKE_PLATFORM_ANDROID "Android")
set(CMAKE_SYSTEM_NAME Linux)
set(CMAKE_SYSTEM_VERSION 1)
set(CMAKE_SYSTEM_PROCESSOR arm)
## Specify the toolchain
set(TOOLCHAIN "arm-linux-androideabi")
set(CMAKE_PREFIX_PATH ${CROSS_NDK_TOOLCHAIN})
set(TOOLCHAIN_PREFIX ${TOOLCHAIN}-)
find_program(CMAKE_C_COMPILER ${TOOLCHAIN_PREFIX}clang)
find_program(CMAKE_CXX_COMPILER ${TOOLCHAIN_PREFIX}clang++)
find_program(CMAKE_ASM_COMPILER ${TOOLCHAIN_PREFIX}clang)
find_program(CMAKE_AR ${TOOLCHAIN_PREFIX}ar)
find_program(CMAKE_LD ${TOOLCHAIN_PREFIX}ar)
find_program(CMAKE_OBJCOPY ${TOOLCHAIN_PREFIX}objcopy)
find_program(CMAKE_OBJDUMP ${TOOLCHAIN_PREFIX}objdump)
add_compile_options(--sysroot=${CROSS_ROOTFS})
add_compile_options(-fPIE)
add_compile_options(-mfloat-abi=soft)
include_directories(SYSTEM ${CROSS_NDK_TOOLCHAIN}/include/c++/4.9.x/)
include_directories(SYSTEM ${CROSS_NDK_TOOLCHAIN}/include/c++/4.9.x/arm-linux-androideabi/)
set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} -B ${CROSS_ROOTFS}/usr/lib/gcc/${TOOLCHAIN}")
set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} -L${CROSS_ROOTFS}/lib/${TOOLCHAIN}")
set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} --sysroot=${CROSS_ROOTFS}")
set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} -fPIE -pie")
set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} ${CROSS_LINK_FLAGS}" CACHE STRING "" FORCE)
set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} ${CROSS_LINK_FLAGS}" CACHE STRING "" FORCE)
set(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} ${CROSS_LINK_FLAGS}" CACHE STRING "" FORCE)
set(CMAKE_FIND_ROOT_PATH "${CROSS_ROOTFS}")
set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)
set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY)
set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY)

Просмотреть файл

@ -0,0 +1,42 @@
set(CROSS_NDK_TOOLCHAIN $ENV{ROOTFS_DIR}/../)
set(CROSS_ROOTFS ${CROSS_NDK_TOOLCHAIN}/sysroot)
set(CLR_CMAKE_PLATFORM_ANDROID "Android")
set(CMAKE_SYSTEM_NAME Linux)
set(CMAKE_SYSTEM_VERSION 1)
set(CMAKE_SYSTEM_PROCESSOR aarch64)
## Specify the toolchain
set(TOOLCHAIN "aarch64-linux-android")
set(CMAKE_PREFIX_PATH ${CROSS_NDK_TOOLCHAIN})
set(TOOLCHAIN_PREFIX ${TOOLCHAIN}-)
find_program(CMAKE_C_COMPILER ${TOOLCHAIN_PREFIX}clang)
find_program(CMAKE_CXX_COMPILER ${TOOLCHAIN_PREFIX}clang++)
find_program(CMAKE_ASM_COMPILER ${TOOLCHAIN_PREFIX}clang)
find_program(CMAKE_AR ${TOOLCHAIN_PREFIX}ar)
find_program(CMAKE_LD ${TOOLCHAIN_PREFIX}ar)
find_program(CMAKE_OBJCOPY ${TOOLCHAIN_PREFIX}objcopy)
find_program(CMAKE_OBJDUMP ${TOOLCHAIN_PREFIX}objdump)
add_compile_options(--sysroot=${CROSS_ROOTFS})
add_compile_options(-fPIE)
## Needed for Android or bionic specific conditionals
add_compile_options(-D__ANDROID__)
add_compile_options(-D__BIONIC__)
set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} -B ${CROSS_ROOTFS}/usr/lib/gcc/${TOOLCHAIN}")
set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} -L${CROSS_ROOTFS}/lib/${TOOLCHAIN}")
set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} --sysroot=${CROSS_ROOTFS}")
set(CROSS_LINK_FLAGS "${CROSS_LINK_FLAGS} -fPIE -pie")
set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} ${CROSS_LINK_FLAGS}" CACHE STRING "" FORCE)
set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} ${CROSS_LINK_FLAGS}" CACHE STRING "" FORCE)
set(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} ${CROSS_LINK_FLAGS}" CACHE STRING "" FORCE)
set(CMAKE_FIND_ROOT_PATH "${CROSS_ROOTFS}")
set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)
set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY)
set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY)

Просмотреть файл

@ -0,0 +1,11 @@
deb http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe
deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe
deb http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe
deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe
deb http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted
deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted
deb http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse
deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse

Просмотреть файл

@ -0,0 +1,3 @@
# Debian (sid) # UNSTABLE
deb http://ftp.debian.org/debian/ sid main contrib non-free
deb-src http://ftp.debian.org/debian/ sid main contrib non-free

Просмотреть файл

@ -0,0 +1,11 @@
deb http://ports.ubuntu.com/ubuntu-ports/ trusty main restricted universe
deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty main restricted universe
deb http://ports.ubuntu.com/ubuntu-ports/ trusty-updates main restricted universe
deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-updates main restricted universe
deb http://ports.ubuntu.com/ubuntu-ports/ trusty-backports main restricted
deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-backports main restricted
deb http://ports.ubuntu.com/ubuntu-ports/ trusty-security main restricted universe multiverse
deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-security main restricted universe multiverse

Просмотреть файл

@ -0,0 +1,11 @@
deb http://ports.ubuntu.com/ubuntu-ports/ vivid main restricted universe
deb-src http://ports.ubuntu.com/ubuntu-ports/ vivid main restricted universe
deb http://ports.ubuntu.com/ubuntu-ports/ vivid-updates main restricted universe
deb-src http://ports.ubuntu.com/ubuntu-ports/ vivid-updates main restricted universe
deb http://ports.ubuntu.com/ubuntu-ports/ vivid-backports main restricted
deb-src http://ports.ubuntu.com/ubuntu-ports/ vivid-backports main restricted
deb http://ports.ubuntu.com/ubuntu-ports/ vivid-security main restricted universe multiverse
deb-src http://ports.ubuntu.com/ubuntu-ports/ vivid-security main restricted universe multiverse

Просмотреть файл

@ -0,0 +1,11 @@
deb http://ports.ubuntu.com/ubuntu-ports/ wily main restricted universe
deb-src http://ports.ubuntu.com/ubuntu-ports/ wily main restricted universe
deb http://ports.ubuntu.com/ubuntu-ports/ wily-updates main restricted universe
deb-src http://ports.ubuntu.com/ubuntu-ports/ wily-updates main restricted universe
deb http://ports.ubuntu.com/ubuntu-ports/ wily-backports main restricted
deb-src http://ports.ubuntu.com/ubuntu-ports/ wily-backports main restricted
deb http://ports.ubuntu.com/ubuntu-ports/ wily-security main restricted universe multiverse
deb-src http://ports.ubuntu.com/ubuntu-ports/ wily-security main restricted universe multiverse

Просмотреть файл

@ -0,0 +1,11 @@
deb http://ports.ubuntu.com/ubuntu-ports/ xenial main restricted universe
deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial main restricted universe
deb http://ports.ubuntu.com/ubuntu-ports/ xenial-updates main restricted universe
deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-updates main restricted universe
deb http://ports.ubuntu.com/ubuntu-ports/ xenial-backports main restricted
deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-backports main restricted
deb http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse
deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse

Просмотреть файл

@ -0,0 +1,11 @@
deb http://ports.ubuntu.com/ubuntu-ports/ zesty main restricted universe
deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty main restricted universe
deb http://ports.ubuntu.com/ubuntu-ports/ zesty-updates main restricted universe
deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-updates main restricted universe
deb http://ports.ubuntu.com/ubuntu-ports/ zesty-backports main restricted
deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-backports main restricted
deb http://ports.ubuntu.com/ubuntu-ports/ zesty-security main restricted universe multiverse
deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-security main restricted universe multiverse

Просмотреть файл

@ -0,0 +1,71 @@
From e72c9d7ead60e3317bd6d1fade995c07021c947b Mon Sep 17 00:00:00 2001
From: Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
Date: Thu, 7 May 2015 13:25:04 -0400
Subject: [PATCH] Fix: building probe providers with C++ compiler
Robert Daniels wrote:
> > I'm attempting to use lttng userspace tracing with a C++ application
> > on an ARM platform. I'm using GCC 4.8.4 on Linux 3.14 with the 2.6
> > release of lttng. I've compiled lttng-modules, lttng-ust, and
> > lttng-tools and have been able to get a simple test working with C
> > code. When I attempt to run the hello.cxx test on my target it will
> > segfault.
>
>
> I spent a little time digging into this issue and finally discovered the
> cause of my segfault with ARM C++ tracepoints.
>
> There is a struct called 'lttng_event' in ust-events.h which contains an
> empty union 'u'. This was the cause of my issue. Under C, this empty union
> compiles to a zero byte member while under C++ it compiles to a one byte
> member, and in my case was four-byte aligned which caused my C++ code to
> have the 'cds_list_head node' offset incorrectly by four bytes. This lead
> to an incorrect linked list structure which caused my issue.
>
> Since this union is empty, I simply removed it from the struct and everything
> worked correctly.
>
> I don't know the history or purpose behind this empty union so I'd like to
> know if this is a safe fix. If it is I can submit a patch with the union
> removed.
That's a very nice catch!
We do not support building tracepoint probe provider with
g++ yet, as stated in lttng-ust(3):
"- Note for C++ support: although an application instrumented with
tracepoints can be compiled with g++, tracepoint probes should be
compiled with gcc (only tested with gcc so far)."
However, if it works fine with this fix, then I'm tempted to take it,
especially because removing the empty union does not appear to affect
the layout of struct lttng_event as seen from liblttng-ust, which must
be compiled with a C compiler, and from probe providers compiled with
a C compiler. So all we are changing is the layout of a probe provider
compiled with a C++ compiler, which is anyway buggy at the moment,
because it is not compatible with the layout expected by liblttng-ust
compiled with a C compiler.
Reported-by: Robert Daniels <robert.daniels@vantagecontrols.com>
Signed-off-by: Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
---
include/lttng/ust-events.h | 2 --
1 file changed, 2 deletions(-)
diff --git a/usr/include/lttng/ust-events.h b/usr/include/lttng/ust-events.h
index 328a875..3d7a274 100644
--- a/usr/include/lttng/ust-events.h
+++ b/usr/include/lttng/ust-events.h
@@ -407,8 +407,6 @@ struct lttng_event {
void *_deprecated1;
struct lttng_ctx *ctx;
enum lttng_ust_instrumentation instrumentation;
- union {
- } u;
struct cds_list_head node; /* Event list in session */
struct cds_list_head _deprecated2;
void *_deprecated3;
--
2.7.4

Просмотреть файл

@ -0,0 +1,97 @@
diff -u -r a/usr/include/urcu/uatomic/generic.h b/usr/include/urcu/uatomic/generic.h
--- a/usr/include/urcu/uatomic/generic.h 2014-03-28 06:04:42.000000000 +0900
+++ b/usr/include/urcu/uatomic/generic.h 2017-02-13 10:35:21.189927116 +0900
@@ -65,17 +65,17 @@
switch (len) {
#ifdef UATOMIC_HAS_ATOMIC_BYTE
case 1:
- return __sync_val_compare_and_swap_1(addr, old, _new);
+ return __sync_val_compare_and_swap_1((uint8_t *) addr, old, _new);
#endif
#ifdef UATOMIC_HAS_ATOMIC_SHORT
case 2:
- return __sync_val_compare_and_swap_2(addr, old, _new);
+ return __sync_val_compare_and_swap_2((uint16_t *) addr, old, _new);
#endif
case 4:
- return __sync_val_compare_and_swap_4(addr, old, _new);
+ return __sync_val_compare_and_swap_4((uint32_t *) addr, old, _new);
#if (CAA_BITS_PER_LONG == 64)
case 8:
- return __sync_val_compare_and_swap_8(addr, old, _new);
+ return __sync_val_compare_and_swap_8((uint64_t *) addr, old, _new);
#endif
}
_uatomic_link_error();
@@ -100,20 +100,20 @@
switch (len) {
#ifdef UATOMIC_HAS_ATOMIC_BYTE
case 1:
- __sync_and_and_fetch_1(addr, val);
+ __sync_and_and_fetch_1((uint8_t *) addr, val);
return;
#endif
#ifdef UATOMIC_HAS_ATOMIC_SHORT
case 2:
- __sync_and_and_fetch_2(addr, val);
+ __sync_and_and_fetch_2((uint16_t *) addr, val);
return;
#endif
case 4:
- __sync_and_and_fetch_4(addr, val);
+ __sync_and_and_fetch_4((uint32_t *) addr, val);
return;
#if (CAA_BITS_PER_LONG == 64)
case 8:
- __sync_and_and_fetch_8(addr, val);
+ __sync_and_and_fetch_8((uint64_t *) addr, val);
return;
#endif
}
@@ -139,20 +139,20 @@
switch (len) {
#ifdef UATOMIC_HAS_ATOMIC_BYTE
case 1:
- __sync_or_and_fetch_1(addr, val);
+ __sync_or_and_fetch_1((uint8_t *) addr, val);
return;
#endif
#ifdef UATOMIC_HAS_ATOMIC_SHORT
case 2:
- __sync_or_and_fetch_2(addr, val);
+ __sync_or_and_fetch_2((uint16_t *) addr, val);
return;
#endif
case 4:
- __sync_or_and_fetch_4(addr, val);
+ __sync_or_and_fetch_4((uint32_t *) addr, val);
return;
#if (CAA_BITS_PER_LONG == 64)
case 8:
- __sync_or_and_fetch_8(addr, val);
+ __sync_or_and_fetch_8((uint64_t *) addr, val);
return;
#endif
}
@@ -180,17 +180,17 @@
switch (len) {
#ifdef UATOMIC_HAS_ATOMIC_BYTE
case 1:
- return __sync_add_and_fetch_1(addr, val);
+ return __sync_add_and_fetch_1((uint8_t *) addr, val);
#endif
#ifdef UATOMIC_HAS_ATOMIC_SHORT
case 2:
- return __sync_add_and_fetch_2(addr, val);
+ return __sync_add_and_fetch_2((uint16_t *) addr, val);
#endif
case 4:
- return __sync_add_and_fetch_4(addr, val);
+ return __sync_add_and_fetch_4((uint32_t *) addr, val);
#if (CAA_BITS_PER_LONG == 64)
case 8:
- return __sync_add_and_fetch_8(addr, val);
+ return __sync_add_and_fetch_8((uint64_t *) addr, val);
#endif
}
_uatomic_link_error();

Просмотреть файл

@ -0,0 +1,11 @@
deb http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe
deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe
deb http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe
deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe
deb http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted
deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted
deb http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse
deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse

Просмотреть файл

@ -0,0 +1,11 @@
deb http://ports.ubuntu.com/ubuntu-ports/ trusty main restricted universe
deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty main restricted universe
deb http://ports.ubuntu.com/ubuntu-ports/ trusty-updates main restricted universe
deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-updates main restricted universe
deb http://ports.ubuntu.com/ubuntu-ports/ trusty-backports main restricted
deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-backports main restricted
deb http://ports.ubuntu.com/ubuntu-ports/ trusty-security main restricted universe multiverse
deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-security main restricted universe multiverse

Просмотреть файл

@ -0,0 +1,11 @@
deb http://ports.ubuntu.com/ubuntu-ports/ vivid main restricted universe
deb-src http://ports.ubuntu.com/ubuntu-ports/ vivid main restricted universe
deb http://ports.ubuntu.com/ubuntu-ports/ vivid-updates main restricted universe
deb-src http://ports.ubuntu.com/ubuntu-ports/ vivid-updates main restricted universe
deb http://ports.ubuntu.com/ubuntu-ports/ vivid-backports main restricted
deb-src http://ports.ubuntu.com/ubuntu-ports/ vivid-backports main restricted
deb http://ports.ubuntu.com/ubuntu-ports/ vivid-security main restricted universe multiverse
deb-src http://ports.ubuntu.com/ubuntu-ports/ vivid-security main restricted universe multiverse

Просмотреть файл

@ -0,0 +1,11 @@
deb http://ports.ubuntu.com/ubuntu-ports/ wily main restricted universe
deb-src http://ports.ubuntu.com/ubuntu-ports/ wily main restricted universe
deb http://ports.ubuntu.com/ubuntu-ports/ wily-updates main restricted universe
deb-src http://ports.ubuntu.com/ubuntu-ports/ wily-updates main restricted universe
deb http://ports.ubuntu.com/ubuntu-ports/ wily-backports main restricted
deb-src http://ports.ubuntu.com/ubuntu-ports/ wily-backports main restricted
deb http://ports.ubuntu.com/ubuntu-ports/ wily-security main restricted universe multiverse
deb-src http://ports.ubuntu.com/ubuntu-ports/ wily-security main restricted universe multiverse

Просмотреть файл

@ -0,0 +1,11 @@
deb http://ports.ubuntu.com/ubuntu-ports/ xenial main restricted universe
deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial main restricted universe
deb http://ports.ubuntu.com/ubuntu-ports/ xenial-updates main restricted universe
deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-updates main restricted universe
deb http://ports.ubuntu.com/ubuntu-ports/ xenial-backports main restricted
deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-backports main restricted
deb http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse
deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse

Просмотреть файл

@ -0,0 +1,11 @@
deb http://ports.ubuntu.com/ubuntu-ports/ zesty main restricted universe
deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty main restricted universe
deb http://ports.ubuntu.com/ubuntu-ports/ zesty-updates main restricted universe
deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-updates main restricted universe
deb http://ports.ubuntu.com/ubuntu-ports/ zesty-backports main restricted
deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-backports main restricted
deb http://ports.ubuntu.com/ubuntu-ports/ zesty-security main restricted universe multiverse
deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-security main restricted universe multiverse

Просмотреть файл

@ -0,0 +1,3 @@
# Debian (jessie) # Stable
deb http://ftp.debian.org/debian/ jessie main contrib non-free
deb-src http://ftp.debian.org/debian/ jessie main contrib non-free

Просмотреть файл

@ -0,0 +1,44 @@
#!/usr/bin/env bash
set -e
__ARM_SOFTFP_CrossDir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
__TIZEN_CROSSDIR="$__ARM_SOFTFP_CrossDir/tizen"
if [[ -z "$ROOTFS_DIR" ]]; then
echo "ROOTFS_DIR is not defined."
exit 1;
fi
# Clean-up (TODO-Cleanup: We may already delete $ROOTFS_DIR at ./cross/build-rootfs.sh.)
# hk0110
if [ -d "$ROOTFS_DIR" ]; then
umount $ROOTFS_DIR/*
rm -rf $ROOTFS_DIR
fi
TIZEN_TMP_DIR=$ROOTFS_DIR/tizen_tmp
mkdir -p $TIZEN_TMP_DIR
# Download files
echo ">>Start downloading files"
VERBOSE=1 $__ARM_SOFTFP_CrossDir/tizen-fetch.sh $TIZEN_TMP_DIR
echo "<<Finish downloading files"
echo ">>Start constructing Tizen rootfs"
TIZEN_RPM_FILES=`ls $TIZEN_TMP_DIR/*.rpm`
cd $ROOTFS_DIR
for f in $TIZEN_RPM_FILES; do
rpm2cpio $f | cpio -idm --quiet
done
echo "<<Finish constructing Tizen rootfs"
# Cleanup tmp
rm -rf $TIZEN_TMP_DIR
# Configure Tizen rootfs
echo ">>Start configuring Tizen rootfs"
rm ./usr/lib/libunwind.so
ln -s libunwind.so.8 ./usr/lib/libunwind.so
ln -sfn asm-arm ./usr/include/asm
patch -p1 < $__TIZEN_CROSSDIR/tizen.patch
echo "<<Finish configuring Tizen rootfs"

Просмотреть файл

@ -0,0 +1,171 @@
#!/usr/bin/env bash
set -e
if [[ -z "${VERBOSE// }" ]] || [ "$VERBOSE" -ne "$VERBOSE" ] 2>/dev/null; then
VERBOSE=0
fi
Log()
{
if [ $VERBOSE -ge $1 ]; then
echo ${@:2}
fi
}
Inform()
{
Log 1 -e "\x1B[0;34m$@\x1B[m"
}
Debug()
{
Log 2 -e "\x1B[0;32m$@\x1B[m"
}
Error()
{
>&2 Log 0 -e "\x1B[0;31m$@\x1B[m"
}
Fetch()
{
URL=$1
FILE=$2
PROGRESS=$3
if [ $VERBOSE -ge 1 ] && [ $PROGRESS ]; then
CURL_OPT="--progress-bar"
else
CURL_OPT="--silent"
fi
curl $CURL_OPT $URL > $FILE
}
hash curl 2> /dev/null || { Error "Require 'curl' Aborting."; exit 1; }
hash xmllint 2> /dev/null || { Error "Require 'xmllint' Aborting."; exit 1; }
hash sha256sum 2> /dev/null || { Error "Require 'sha256sum' Aborting."; exit 1; }
TMPDIR=$1
if [ ! -d $TMPDIR ]; then
TMPDIR=./tizen_tmp
Debug "Create temporary directory : $TMPDIR"
mkdir -p $TMPDIR
fi
TIZEN_URL=http://download.tizen.org/releases/milestone/tizen
BUILD_XML=build.xml
REPOMD_XML=repomd.xml
PRIMARY_XML=primary.xml
TARGET_URL="http://__not_initialized"
Xpath_get()
{
XPATH_RESULT=''
XPATH=$1
XML_FILE=$2
RESULT=$(xmllint --xpath $XPATH $XML_FILE)
if [[ -z ${RESULT// } ]]; then
Error "Can not find target from $XML_FILE"
Debug "Xpath = $XPATH"
exit 1
fi
XPATH_RESULT=$RESULT
}
fetch_tizen_pkgs_init()
{
TARGET=$1
PROFILE=$2
Debug "Initialize TARGET=$TARGET, PROFILE=$PROFILE"
TMP_PKG_DIR=$TMPDIR/tizen_${PROFILE}_pkgs
if [ -d $TMP_PKG_DIR ]; then rm -rf $TMP_PKG_DIR; fi
mkdir -p $TMP_PKG_DIR
PKG_URL=$TIZEN_URL/$PROFILE/latest
BUILD_XML_URL=$PKG_URL/$BUILD_XML
TMP_BUILD=$TMP_PKG_DIR/$BUILD_XML
TMP_REPOMD=$TMP_PKG_DIR/$REPOMD_XML
TMP_PRIMARY=$TMP_PKG_DIR/$PRIMARY_XML
TMP_PRIMARYGZ=${TMP_PRIMARY}.gz
Fetch $BUILD_XML_URL $TMP_BUILD
Debug "fetch $BUILD_XML_URL to $TMP_BUILD"
TARGET_XPATH="//build/buildtargets/buildtarget[@name=\"$TARGET\"]/repo[@type=\"binary\"]/text()"
Xpath_get $TARGET_XPATH $TMP_BUILD
TARGET_PATH=$XPATH_RESULT
TARGET_URL=$PKG_URL/$TARGET_PATH
REPOMD_URL=$TARGET_URL/repodata/repomd.xml
PRIMARY_XPATH='string(//*[local-name()="data"][@type="primary"]/*[local-name()="location"]/@href)'
Fetch $REPOMD_URL $TMP_REPOMD
Debug "fetch $REPOMD_URL to $TMP_REPOMD"
Xpath_get $PRIMARY_XPATH $TMP_REPOMD
PRIMARY_XML_PATH=$XPATH_RESULT
PRIMARY_URL=$TARGET_URL/$PRIMARY_XML_PATH
Fetch $PRIMARY_URL $TMP_PRIMARYGZ
Debug "fetch $PRIMARY_URL to $TMP_PRIMARYGZ"
gunzip $TMP_PRIMARYGZ
Debug "unzip $TMP_PRIMARYGZ to $TMP_PRIMARY"
}
fetch_tizen_pkgs()
{
ARCH=$1
PACKAGE_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="location"]/@href)'
PACKAGE_CHECKSUM_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="checksum"]/text())'
for pkg in ${@:2}
do
Inform "Fetching... $pkg"
XPATH=${PACKAGE_XPATH_TPL/_PKG_/$pkg}
XPATH=${XPATH/_ARCH_/$ARCH}
Xpath_get $XPATH $TMP_PRIMARY
PKG_PATH=$XPATH_RESULT
XPATH=${PACKAGE_CHECKSUM_XPATH_TPL/_PKG_/$pkg}
XPATH=${XPATH/_ARCH_/$ARCH}
Xpath_get $XPATH $TMP_PRIMARY
CHECKSUM=$XPATH_RESULT
PKG_URL=$TARGET_URL/$PKG_PATH
PKG_FILE=$(basename $PKG_PATH)
PKG_PATH=$TMPDIR/$PKG_FILE
Debug "Download $PKG_URL to $PKG_PATH"
Fetch $PKG_URL $PKG_PATH true
echo "$CHECKSUM $PKG_PATH" | sha256sum -c - > /dev/null
if [ $? -ne 0 ]; then
Error "Fail to fetch $PKG_URL to $PKG_PATH"
Debug "Checksum = $CHECKSUM"
exit 1
fi
done
}
Inform "Initialize arm base"
fetch_tizen_pkgs_init standard base
Inform "fetch common packages"
fetch_tizen_pkgs armv7l gcc glibc glibc-devel libicu libicu-devel
fetch_tizen_pkgs noarch linux-glibc-devel
Inform "fetch coreclr packages"
fetch_tizen_pkgs armv7l lldb lldb-devel libgcc libstdc++ libstdc++-devel libunwind libunwind-devel tizen-release lttng-ust-devel lttng-ust userspace-rcu-devel userspace-rcu
Inform "fetch corefx packages"
fetch_tizen_pkgs armv7l libcom_err libcom_err-devel zlib zlib-devel libopenssl libopenssl-devel krb5 krb5-devel libcurl libcurl-devel
Inform "Initialize standard unified"
fetch_tizen_pkgs_init standard unified
Inform "fetch corefx packages"
fetch_tizen_pkgs armv7l gssdp gssdp-devel

Просмотреть файл

@ -0,0 +1,50 @@
lang en_US.UTF-8
keyboard us
timezone --utc Asia/Seoul
part / --fstype="ext4" --size=3500 --ondisk=mmcblk0 --label rootfs --fsoptions=defaults,noatime
rootpw tizen
desktop --autologinuser=root
user --name root --groups audio,video --password 'tizen'
repo --name=standard --baseurl=http://download.tizen.org/releases/milestone/tizen/unified/latest/repos/standard/packages/ --ssl_verify=no
repo --name=base --baseurl=http://download.tizen.org/releases/milestone/tizen/base/latest/repos/standard/packages/ --ssl_verify=no
%packages
tar
gzip
sed
grep
gawk
perl
binutils
findutils
util-linux
lttng-ust
userspace-rcu
procps-ng
tzdata
ca-certificates
### Core FX
libicu
libunwind
iputils
zlib
krb5
libcurl
libopenssl
%end
%post
### Update /tmp privilege
chmod 777 /tmp
####################################
%end

Просмотреть файл

@ -0,0 +1,18 @@
diff -u -r a/usr/lib/libc.so b/usr/lib/libc.so
--- a/usr/lib/libc.so 2016-12-30 23:00:08.284951863 +0900
+++ b/usr/lib/libc.so 2016-12-30 23:00:32.140951815 +0900
@@ -2,4 +2,4 @@
Use the shared library, but some functions are only in
the static library, so try that secondarily. */
OUTPUT_FORMAT(elf32-littlearm)
-GROUP ( /lib/libc.so.6 /usr/lib/libc_nonshared.a AS_NEEDED ( /lib/ld-linux.so.3 ) )
+GROUP ( libc.so.6 libc_nonshared.a AS_NEEDED ( ld-linux.so.3 ) )
diff -u -r a/usr/lib/libpthread.so b/usr/lib/libpthread.so
--- a/usr/lib/libpthread.so 2016-12-30 23:00:19.408951841 +0900
+++ b/usr/lib/libpthread.so 2016-12-30 23:00:39.068951801 +0900
@@ -2,4 +2,4 @@
Use the shared library, but some functions are only in
the static library, so try that secondarily. */
OUTPUT_FORMAT(elf32-littlearm)
-GROUP ( /lib/libpthread.so.0 /usr/lib/libpthread_nonshared.a )
+GROUP ( libpthread.so.0 libpthread_nonshared.a )

Просмотреть файл

@ -0,0 +1,137 @@
#!/usr/bin/env bash
set -e
__NDK_Version=r14
usage()
{
echo "Creates a toolchain and sysroot used for cross-compiling for Android."
echo.
echo "Usage: $0 [BuildArch] [ApiLevel]"
echo.
echo "BuildArch is the target architecture of Android. Currently only arm64 is supported."
echo "ApiLevel is the target Android API level. API levels usually match to Android releases. See https://source.android.com/source/build-numbers.html"
echo.
echo "By default, the toolchain and sysroot will be generated in cross/android-rootfs/toolchain/[BuildArch]. You can change this behavior"
echo "by setting the TOOLCHAIN_DIR environment variable"
echo.
echo "By default, the NDK will be downloaded into the cross/android-rootfs/android-ndk-$__NDK_Version directory. If you already have an NDK installation,"
echo "you can set the NDK_DIR environment variable to have this script use that installation of the NDK."
echo "By default, this script will generate a file, android_platform, in the root of the ROOTFS_DIR directory that contains the RID for the supported and tested Android build: android.21-arm64. This file is to replace '/etc/os-release', which is not available for Android."
exit 1
}
__ApiLevel=21 # The minimum platform for arm64 is API level 21
__BuildArch=arm64
__AndroidArch=aarch64
__AndroidToolchain=aarch64-linux-android
for i in "$@"
do
lowerI="$(echo $i | awk '{print tolower($0)}')"
case $lowerI in
-?|-h|--help)
usage
exit 1
;;
arm64)
__BuildArch=arm64
__AndroidArch=aarch64
__AndroidToolchain=aarch64-linux-android
;;
arm)
__BuildArch=arm
__AndroidArch=arm
__AndroidToolchain=arm-linux-androideabi
;;
*[0-9])
__ApiLevel=$i
;;
*)
__UnprocessedBuildArgs="$__UnprocessedBuildArgs $i"
;;
esac
done
# Obtain the location of the bash script to figure out where the root of the repo is.
__CrossDir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
__Android_Cross_Dir="$__CrossDir/android-rootfs"
__NDK_Dir="$__Android_Cross_Dir/android-ndk-$__NDK_Version"
__libunwind_Dir="$__Android_Cross_Dir/libunwind"
__lldb_Dir="$__Android_Cross_Dir/lldb"
__ToolchainDir="$__Android_Cross_Dir/toolchain/$__BuildArch"
if [[ -n "$TOOLCHAIN_DIR" ]]; then
__ToolchainDir=$TOOLCHAIN_DIR
fi
if [[ -n "$NDK_DIR" ]]; then
__NDK_Dir=$NDK_DIR
fi
echo "Target API level: $__ApiLevel"
echo "Target architecture: $__BuildArch"
echo "NDK location: $__NDK_Dir"
echo "Target Toolchain location: $__ToolchainDir"
# Download the NDK if required
if [ ! -d $__NDK_Dir ]; then
echo Downloading the NDK into $__NDK_Dir
mkdir -p $__NDK_Dir
wget -nv -nc --show-progress https://dl.google.com/android/repository/android-ndk-$__NDK_Version-linux-x86_64.zip -O $__Android_Cross_Dir/android-ndk-$__NDK_Version-linux-x86_64.zip
unzip -q $__Android_Cross_Dir/android-ndk-$__NDK_Version-linux-x86_64.zip -d $__Android_Cross_Dir
fi
if [ ! -d $__lldb_Dir ]; then
mkdir -p $__lldb_Dir
echo Downloading LLDB into $__lldb_Dir
wget -nv -nc --show-progress https://dl.google.com/android/repository/lldb-2.3.3614996-linux-x86_64.zip -O $__Android_Cross_Dir/lldb-2.3.3614996-linux-x86_64.zip
unzip -q $__Android_Cross_Dir/lldb-2.3.3614996-linux-x86_64.zip -d $__lldb_Dir
fi
# Create the RootFS for both arm64 as well as aarch
rm -rf $__Android_Cross_Dir/toolchain
echo Generating the $__BuildArch toolchain
$__NDK_Dir/build/tools/make_standalone_toolchain.py --arch $__BuildArch --api $__ApiLevel --install-dir $__ToolchainDir
# Install the required packages into the toolchain
# TODO: Add logic to get latest pkg version instead of specific version number
rm -rf $__Android_Cross_Dir/deb/
rm -rf $__Android_Cross_Dir/tmp
mkdir -p $__Android_Cross_Dir/deb/
mkdir -p $__Android_Cross_Dir/tmp/$arch/
wget -nv -nc http://termux.net/dists/stable/main/binary-$__AndroidArch/libicu_60.2_$__AndroidArch.deb -O $__Android_Cross_Dir/deb/libicu_60.2_$__AndroidArch.deb
wget -nv -nc http://termux.net/dists/stable/main/binary-$__AndroidArch/libicu-dev_60.2_$__AndroidArch.deb -O $__Android_Cross_Dir/deb/libicu-dev_60.2_$__AndroidArch.deb
wget -nv -nc http://termux.net/dists/stable/main/binary-$__AndroidArch/libandroid-glob-dev_0.4_$__AndroidArch.deb -O $__Android_Cross_Dir/deb/libandroid-glob-dev_0.4_$__AndroidArch.deb
wget -nv -nc http://termux.net/dists/stable/main/binary-$__AndroidArch/libandroid-glob_0.4_$__AndroidArch.deb -O $__Android_Cross_Dir/deb/libandroid-glob_0.4_$__AndroidArch.deb
wget -nv -nc http://termux.net/dists/stable/main/binary-$__AndroidArch/libandroid-support-dev_22_$__AndroidArch.deb -O $__Android_Cross_Dir/deb/libandroid-support-dev_22_$__AndroidArch.deb
wget -nv -nc http://termux.net/dists/stable/main/binary-$__AndroidArch/libandroid-support_22_$__AndroidArch.deb -O $__Android_Cross_Dir/deb/libandroid-support_22_$__AndroidArch.deb
wget -nv -nc http://termux.net/dists/stable/main/binary-$__AndroidArch/liblzma-dev_5.2.3_$__AndroidArch.deb -O $__Android_Cross_Dir/deb/liblzma-dev_5.2.3_$__AndroidArch.deb
wget -nv -nc http://termux.net/dists/stable/main/binary-$__AndroidArch/liblzma_5.2.3_$__AndroidArch.deb -O $__Android_Cross_Dir/deb/liblzma_5.2.3_$__AndroidArch.deb
wget -nv -nc http://termux.net/dists/stable/main/binary-$__AndroidArch/libunwind-dev_1.2.20170304_$__AndroidArch.deb -O $__Android_Cross_Dir/deb/libunwind-dev_1.2.20170304_$__AndroidArch.deb
wget -nv -nc http://termux.net/dists/stable/main/binary-$__AndroidArch/libunwind_1.2.20170304_$__AndroidArch.deb -O $__Android_Cross_Dir/deb/libunwind_1.2.20170304_$__AndroidArch.deb
echo Unpacking Termux packages
dpkg -x $__Android_Cross_Dir/deb/libicu_60.2_$__AndroidArch.deb $__Android_Cross_Dir/tmp/$__AndroidArch/
dpkg -x $__Android_Cross_Dir/deb/libicu-dev_60.2_$__AndroidArch.deb $__Android_Cross_Dir/tmp/$__AndroidArch/
dpkg -x $__Android_Cross_Dir/deb/libandroid-glob-dev_0.4_$__AndroidArch.deb $__Android_Cross_Dir/tmp/$__AndroidArch/
dpkg -x $__Android_Cross_Dir/deb/libandroid-glob_0.4_$__AndroidArch.deb $__Android_Cross_Dir/tmp/$__AndroidArch/
dpkg -x $__Android_Cross_Dir/deb/libandroid-support-dev_22_$__AndroidArch.deb $__Android_Cross_Dir/tmp/$__AndroidArch/
dpkg -x $__Android_Cross_Dir/deb/libandroid-support_22_$__AndroidArch.deb $__Android_Cross_Dir/tmp/$__AndroidArch/
dpkg -x $__Android_Cross_Dir/deb/liblzma-dev_5.2.3_$__AndroidArch.deb $__Android_Cross_Dir/tmp/$__AndroidArch/
dpkg -x $__Android_Cross_Dir/deb/liblzma_5.2.3_$__AndroidArch.deb $__Android_Cross_Dir/tmp/$__AndroidArch/
dpkg -x $__Android_Cross_Dir/deb/libunwind-dev_1.2.20170304_$__AndroidArch.deb $__Android_Cross_Dir/tmp/$__AndroidArch/
dpkg -x $__Android_Cross_Dir/deb/libunwind_1.2.20170304_$__AndroidArch.deb $__Android_Cross_Dir/tmp/$__AndroidArch/
cp -R $__Android_Cross_Dir/tmp/$__AndroidArch/data/data/com.termux/files/usr/* $__ToolchainDir/sysroot/usr/
# Generate platform file for build.sh script to assign to __DistroRid
echo "Generating platform file..."
echo "RID=android.21-arm64" > $__ToolchainDir/sysroot/android_platform
echo Now run:
echo CONFIG_DIR=\`realpath cross/android/$__BuildArch\` ROOTFS_DIR=\`realpath $__ToolchainDir/sysroot\` ./build.sh cross $__BuildArch skipgenerateversion skipnuget cmakeargs -DENABLE_LLDBPLUGIN=0

210
eng/common/cross/build-rootfs.sh Executable file
Просмотреть файл

@ -0,0 +1,210 @@
#!/usr/bin/env bash
usage()
{
echo "Usage: $0 [BuildArch] [LinuxCodeName] [lldbx.y] [--skipunmount]"
echo "BuildArch can be: arm(default), armel, arm64, x86"
echo "LinuxCodeName - optional, Code name for Linux, can be: trusty(default), vivid, wily, xenial, zesty, bionic, alpine. If BuildArch is armel, LinuxCodeName is jessie(default) or tizen."
echo "lldbx.y - optional, LLDB version, can be: lldb3.6(default), lldb3.8, lldb3.9, lldb4.0, no-lldb. Ignored for alpine"
echo "--skipunmount - optional, will skip the unmount of rootfs folder."
exit 1
}
__LinuxCodeName=trusty
__CrossDir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
__InitialDir=$PWD
__BuildArch=arm
__UbuntuArch=armhf
__UbuntuRepo="http://ports.ubuntu.com/"
__LLDB_Package="lldb-3.6-dev"
__SkipUnmount=0
# base development support
__UbuntuPackages="build-essential"
__AlpinePackages="alpine-base"
__AlpinePackages+=" build-base"
__AlpinePackages+=" linux-headers"
__AlpinePackages+=" lldb-dev"
__AlpinePackages+=" llvm-dev"
# symlinks fixer
__UbuntuPackages+=" symlinks"
# CoreCLR and CoreFX dependencies
__UbuntuPackages+=" libicu-dev"
__UbuntuPackages+=" liblttng-ust-dev"
__UbuntuPackages+=" libunwind8-dev"
__AlpinePackages+=" gettext-dev"
__AlpinePackages+=" icu-dev"
__AlpinePackages+=" libunwind-dev"
__AlpinePackages+=" lttng-ust-dev"
# CoreFX dependencies
__UbuntuPackages+=" libcurl4-openssl-dev"
__UbuntuPackages+=" libkrb5-dev"
__UbuntuPackages+=" libssl-dev"
__UbuntuPackages+=" zlib1g-dev"
__AlpinePackages+=" curl-dev"
__AlpinePackages+=" krb5-dev"
__AlpinePackages+=" openssl-dev"
__AlpinePackages+=" zlib-dev"
__UnprocessedBuildArgs=
for i in "$@" ; do
lowerI="$(echo $i | awk '{print tolower($0)}')"
case $lowerI in
-?|-h|--help)
usage
exit 1
;;
arm)
__BuildArch=arm
__UbuntuArch=armhf
__AlpineArch=armhf
__QEMUArch=arm
;;
arm64)
__BuildArch=arm64
__UbuntuArch=arm64
__AlpineArch=aarch64
__QEMUArch=aarch64
;;
armel)
__BuildArch=armel
__UbuntuArch=armel
__UbuntuRepo="http://ftp.debian.org/debian/"
__LinuxCodeName=jessie
;;
x86)
__BuildArch=x86
__UbuntuArch=i386
__UbuntuRepo="http://archive.ubuntu.com/ubuntu/"
;;
lldb3.6)
__LLDB_Package="lldb-3.6-dev"
;;
lldb3.8)
__LLDB_Package="lldb-3.8-dev"
;;
lldb3.9)
__LLDB_Package="liblldb-3.9-dev"
;;
lldb4.0)
__LLDB_Package="liblldb-4.0-dev"
;;
no-lldb)
unset __LLDB_Package
;;
vivid)
if [ "$__LinuxCodeName" != "jessie" ]; then
__LinuxCodeName=vivid
fi
;;
wily)
if [ "$__LinuxCodeName" != "jessie" ]; then
__LinuxCodeName=wily
fi
;;
xenial)
if [ "$__LinuxCodeName" != "jessie" ]; then
__LinuxCodeName=xenial
fi
;;
zesty)
if [ "$__LinuxCodeName" != "jessie" ]; then
__LinuxCodeName=zesty
fi
;;
bionic)
if [ "$__LinuxCodeName" != "jessie" ]; then
__LinuxCodeName=bionic
fi
;;
jessie)
__LinuxCodeName=jessie
__UbuntuRepo="http://ftp.debian.org/debian/"
;;
tizen)
if [ "$__BuildArch" != "armel" ]; then
echo "Tizen is available only for armel."
usage;
exit 1;
fi
__LinuxCodeName=
__UbuntuRepo=
__Tizen=tizen
;;
alpine)
__LinuxCodeName=alpine
__UbuntuRepo=
;;
--skipunmount)
__SkipUnmount=1
;;
*)
__UnprocessedBuildArgs="$__UnprocessedBuildArgs $i"
;;
esac
done
if [ "$__BuildArch" == "armel" ]; then
__LLDB_Package="lldb-3.5-dev"
fi
__UbuntuPackages+=" ${__LLDB_Package:-}"
__RootfsDir="$__CrossDir/rootfs/$__BuildArch"
if [[ -n "$ROOTFS_DIR" ]]; then
__RootfsDir=$ROOTFS_DIR
fi
if [ -d "$__RootfsDir" ]; then
if [ $__SkipUnmount == 0 ]; then
umount $__RootfsDir/*
fi
rm -rf $__RootfsDir
fi
if [[ "$__LinuxCodeName" == "alpine" ]]; then
__ApkToolsVersion=2.9.1
__AlpineVersion=3.7
__ApkToolsDir=$(mktemp -d)
wget https://github.com/alpinelinux/apk-tools/releases/download/v$__ApkToolsVersion/apk-tools-$__ApkToolsVersion-x86_64-linux.tar.gz -P $__ApkToolsDir
tar -xf $__ApkToolsDir/apk-tools-$__ApkToolsVersion-x86_64-linux.tar.gz -C $__ApkToolsDir
mkdir -p $__RootfsDir/usr/bin
cp -v /usr/bin/qemu-$__QEMUArch-static $__RootfsDir/usr/bin
$__ApkToolsDir/apk-tools-$__ApkToolsVersion/apk \
-X http://dl-cdn.alpinelinux.org/alpine/v$__AlpineVersion/main \
-X http://dl-cdn.alpinelinux.org/alpine/v$__AlpineVersion/community \
-X http://dl-cdn.alpinelinux.org/alpine/edge/testing \
-U --allow-untrusted --root $__RootfsDir --arch $__AlpineArch --initdb \
add $__AlpinePackages
rm -r $__ApkToolsDir
elif [[ -n $__LinuxCodeName ]]; then
qemu-debootstrap --arch $__UbuntuArch $__LinuxCodeName $__RootfsDir $__UbuntuRepo
cp $__CrossDir/$__BuildArch/sources.list.$__LinuxCodeName $__RootfsDir/etc/apt/sources.list
chroot $__RootfsDir apt-get update
chroot $__RootfsDir apt-get -f -y install
chroot $__RootfsDir apt-get -y install $__UbuntuPackages
chroot $__RootfsDir symlinks -cr /usr
if [ $__SkipUnmount == 0 ]; then
umount $__RootfsDir/*
fi
if [[ "$__BuildArch" == "arm" && "$__LinuxCodeName" == "trusty" ]]; then
pushd $__RootfsDir
patch -p1 < $__CrossDir/$__BuildArch/trusty.patch
patch -p1 < $__CrossDir/$__BuildArch/trusty-lttng-2.4.patch
popd
fi
elif [ "$__Tizen" == "tizen" ]; then
ROOTFS_DIR=$__RootfsDir $__CrossDir/$__BuildArch/tizen-build-rootfs.sh
else
echo "Unsupported target platform."
usage;
exit 1
fi

Просмотреть файл

@ -0,0 +1,138 @@
set(CROSS_ROOTFS $ENV{ROOTFS_DIR})
set(TARGET_ARCH_NAME $ENV{TARGET_BUILD_ARCH})
set(CMAKE_SYSTEM_NAME Linux)
set(CMAKE_SYSTEM_VERSION 1)
if(TARGET_ARCH_NAME STREQUAL "armel")
set(CMAKE_SYSTEM_PROCESSOR armv7l)
set(TOOLCHAIN "arm-linux-gnueabi")
if("$ENV{__DistroRid}" MATCHES "tizen.*")
set(TIZEN_TOOLCHAIN "armv7l-tizen-linux-gnueabi/6.2.1")
endif()
elseif(TARGET_ARCH_NAME STREQUAL "arm")
set(CMAKE_SYSTEM_PROCESSOR armv7l)
if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/armv6-alpine-linux-musleabihf)
set(TOOLCHAIN "armv6-alpine-linux-musleabihf")
else()
set(TOOLCHAIN "arm-linux-gnueabihf")
endif()
elseif(TARGET_ARCH_NAME STREQUAL "arm64")
set(CMAKE_SYSTEM_PROCESSOR aarch64)
if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/aarch64-alpine-linux-musl)
set(TOOLCHAIN "aarch64-alpine-linux-musl")
else()
set(TOOLCHAIN "aarch64-linux-gnu")
endif()
elseif(TARGET_ARCH_NAME STREQUAL "x86")
set(CMAKE_SYSTEM_PROCESSOR i686)
set(TOOLCHAIN "i686-linux-gnu")
else()
message(FATAL_ERROR "Arch is ${TARGET_ARCH_NAME}. Only armel, arm, arm64 and x86 are supported!")
endif()
# Specify include paths
if(TARGET_ARCH_NAME STREQUAL "armel")
if(DEFINED TIZEN_TOOLCHAIN)
include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}/include/c++/)
include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}/include/c++/armv7l-tizen-linux-gnueabi)
endif()
endif()
# add_compile_param - adds only new options without duplicates.
# arg0 - list with result options, arg1 - list with new options.
# arg2 - optional argument, quick summary string for optional using CACHE FORCE mode.
macro(add_compile_param)
if(NOT ${ARGC} MATCHES "^(2|3)$")
message(FATAL_ERROR "Wrong using add_compile_param! Two or three parameters must be given! See add_compile_param description.")
endif()
foreach(OPTION ${ARGV1})
if(NOT ${ARGV0} MATCHES "${OPTION}($| )")
set(${ARGV0} "${${ARGV0}} ${OPTION}")
if(${ARGC} EQUAL "3") # CACHE FORCE mode
set(${ARGV0} "${${ARGV0}}" CACHE STRING "${ARGV2}" FORCE)
endif()
endif()
endforeach()
endmacro()
# Specify link flags
add_compile_param(CROSS_LINK_FLAGS "--sysroot=${CROSS_ROOTFS}")
add_compile_param(CROSS_LINK_FLAGS "--gcc-toolchain=${CROSS_ROOTFS}/usr")
add_compile_param(CROSS_LINK_FLAGS "--target=${TOOLCHAIN}")
add_compile_param(CROSS_LINK_FLAGS "-fuse-ld=gold")
if(TARGET_ARCH_NAME STREQUAL "armel")
if(DEFINED TIZEN_TOOLCHAIN) # For Tizen only
add_compile_param(CROSS_LINK_FLAGS "-B${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}")
add_compile_param(CROSS_LINK_FLAGS "-L${CROSS_ROOTFS}/lib")
add_compile_param(CROSS_LINK_FLAGS "-L${CROSS_ROOTFS}/usr/lib")
add_compile_param(CROSS_LINK_FLAGS "-L${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}")
endif()
elseif(TARGET_ARCH_NAME STREQUAL "x86")
add_compile_param(CROSS_LINK_FLAGS "-m32")
endif()
add_compile_param(CMAKE_EXE_LINKER_FLAGS "${CROSS_LINK_FLAGS}" "TOOLCHAIN_EXE_LINKER_FLAGS")
add_compile_param(CMAKE_SHARED_LINKER_FLAGS "${CROSS_LINK_FLAGS}" "TOOLCHAIN_EXE_LINKER_FLAGS")
add_compile_param(CMAKE_MODULE_LINKER_FLAGS "${CROSS_LINK_FLAGS}" "TOOLCHAIN_EXE_LINKER_FLAGS")
# Specify compile options
add_compile_options("--sysroot=${CROSS_ROOTFS}")
add_compile_options("--target=${TOOLCHAIN}")
add_compile_options("--gcc-toolchain=${CROSS_ROOTFS}/usr")
if(TARGET_ARCH_NAME MATCHES "^(arm|armel|arm64)$")
set(CMAKE_C_COMPILER_TARGET ${TOOLCHAIN})
set(CMAKE_CXX_COMPILER_TARGET ${TOOLCHAIN})
set(CMAKE_ASM_COMPILER_TARGET ${TOOLCHAIN})
endif()
if(TARGET_ARCH_NAME MATCHES "^(arm|armel)$")
add_compile_options(-mthumb)
add_compile_options(-mfpu=vfpv3)
if(TARGET_ARCH_NAME STREQUAL "armel")
add_compile_options(-mfloat-abi=softfp)
if(DEFINED TIZEN_TOOLCHAIN)
add_compile_options(-Wno-deprecated-declarations) # compile-time option
add_compile_options(-D__extern_always_inline=inline) # compile-time option
endif()
endif()
elseif(TARGET_ARCH_NAME STREQUAL "x86")
add_compile_options(-m32)
add_compile_options(-Wno-error=unused-command-line-argument)
endif()
# Set LLDB include and library paths
if(TARGET_ARCH_NAME MATCHES "^(arm|armel|x86)$")
if(TARGET_ARCH_NAME STREQUAL "x86")
set(LLVM_CROSS_DIR "$ENV{LLVM_CROSS_HOME}")
else() # arm/armel case
set(LLVM_CROSS_DIR "$ENV{LLVM_ARM_HOME}")
endif()
if(LLVM_CROSS_DIR)
set(WITH_LLDB_LIBS "${LLVM_CROSS_DIR}/lib/" CACHE STRING "")
set(WITH_LLDB_INCLUDES "${LLVM_CROSS_DIR}/include" CACHE STRING "")
set(LLDB_H "${WITH_LLDB_INCLUDES}" CACHE STRING "")
set(LLDB "${LLVM_CROSS_DIR}/lib/liblldb.so" CACHE STRING "")
else()
if(TARGET_ARCH_NAME STREQUAL "x86")
set(WITH_LLDB_LIBS "${CROSS_ROOTFS}/usr/lib/i386-linux-gnu" CACHE STRING "")
set(CHECK_LLVM_DIR "${CROSS_ROOTFS}/usr/lib/llvm-3.8/include")
if(EXISTS "${CHECK_LLVM_DIR}" AND IS_DIRECTORY "${CHECK_LLVM_DIR}")
set(WITH_LLDB_INCLUDES "${CHECK_LLVM_DIR}")
else()
set(WITH_LLDB_INCLUDES "${CROSS_ROOTFS}/usr/lib/llvm-3.6/include")
endif()
else() # arm/armel case
set(WITH_LLDB_LIBS "${CROSS_ROOTFS}/usr/lib/${TOOLCHAIN}" CACHE STRING "")
set(WITH_LLDB_INCLUDES "${CROSS_ROOTFS}/usr/lib/llvm-3.6/include" CACHE STRING "")
endif()
endif()
endif()
set(CMAKE_FIND_ROOT_PATH "${CROSS_ROOTFS}")
set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)
set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY)
set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY)

Просмотреть файл

@ -0,0 +1,11 @@
deb http://archive.ubuntu.com/ubuntu/ bionic main restricted universe
deb-src http://archive.ubuntu.com/ubuntu/ bionic main restricted universe
deb http://archive.ubuntu.com/ubuntu/ bionic-updates main restricted universe
deb-src http://archive.ubuntu.com/ubuntu/ bionic-updates main restricted universe
deb http://archive.ubuntu.com/ubuntu/ bionic-backports main restricted
deb-src http://archive.ubuntu.com/ubuntu/ bionic-backports main restricted
deb http://archive.ubuntu.com/ubuntu/ bionic-security main restricted universe multiverse
deb-src http://archive.ubuntu.com/ubuntu/ bionic-security main restricted universe multiverse

Просмотреть файл

@ -0,0 +1,11 @@
deb http://archive.ubuntu.com/ubuntu/ trusty main restricted universe
deb-src http://archive.ubuntu.com/ubuntu/ trusty main restricted universe
deb http://archive.ubuntu.com/ubuntu/ trusty-updates main restricted universe
deb-src http://archive.ubuntu.com/ubuntu/ trusty-updates main restricted universe
deb http://archive.ubuntu.com/ubuntu/ trusty-backports main restricted
deb-src http://archive.ubuntu.com/ubuntu/ trusty-backports main restricted
deb http://archive.ubuntu.com/ubuntu/ trusty-security main restricted universe multiverse
deb-src http://archive.ubuntu.com/ubuntu/ trusty-security main restricted universe multiverse

Просмотреть файл

@ -0,0 +1,11 @@
deb http://archive.ubuntu.com/ubuntu/ vivid main restricted universe
deb-src http://archive.ubuntu.com/ubuntu/ vivid main restricted universe
deb http://archive.ubuntu.com/ubuntu/ vivid-updates main restricted universe
deb-src http://archive.ubuntu.com/ubuntu/ vivid-updates main restricted universe
deb http://archive.ubuntu.com/ubuntu/ vivid-backports main restricted
deb-src http://archive.ubuntu.com/ubuntu/ vivid-backports main restricted
deb http://archive.ubuntu.com/ubuntu/ vivid-security main restricted universe multiverse
deb-src http://archive.ubuntu.com/ubuntu/ vivid-security main restricted universe multiverse

Просмотреть файл

@ -0,0 +1,11 @@
deb http://archive.ubuntu.com/ubuntu/ wily main restricted universe
deb-src http://archive.ubuntu.com/ubuntu/ wily main restricted universe
deb http://archive.ubuntu.com/ubuntu/ wily-updates main restricted universe
deb-src http://archive.ubuntu.com/ubuntu/ wily-updates main restricted universe
deb http://archive.ubuntu.com/ubuntu/ wily-backports main restricted
deb-src http://archive.ubuntu.com/ubuntu/ wily-backports main restricted
deb http://archive.ubuntu.com/ubuntu/ wily-security main restricted universe multiverse
deb-src http://archive.ubuntu.com/ubuntu/ wily-security main restricted universe multiverse

Просмотреть файл

@ -0,0 +1,11 @@
deb http://archive.ubuntu.com/ubuntu/ xenial main restricted universe
deb-src http://archive.ubuntu.com/ubuntu/ xenial main restricted universe
deb http://archive.ubuntu.com/ubuntu/ xenial-updates main restricted universe
deb-src http://archive.ubuntu.com/ubuntu/ xenial-updates main restricted universe
deb http://archive.ubuntu.com/ubuntu/ xenial-backports main restricted
deb-src http://archive.ubuntu.com/ubuntu/ xenial-backports main restricted
deb http://archive.ubuntu.com/ubuntu/ xenial-security main restricted universe multiverse
deb-src http://archive.ubuntu.com/ubuntu/ xenial-security main restricted universe multiverse

21
eng/common/darc-init.ps1 Normal file
Просмотреть файл

@ -0,0 +1,21 @@
$verbosity = "m"
. $PSScriptRoot\tools.ps1
function InstallDarcCli {
$darcCliPackageName = "microsoft.dotnet.darc"
$dotnet = "$env:DOTNET_INSTALL_DIR\dotnet.exe"
$toolList = Invoke-Expression "& `"$dotnet`" tool list -g"
if ($toolList -like "*$darcCliPackageName*") {
Invoke-Expression "& `"$dotnet`" tool uninstall $darcCliPackageName -g"
}
$toolsetVersion = $GlobalJson.'msbuild-sdks'.'Microsoft.DotNet.Arcade.Sdk'
Write-Host "Installing Darc CLI version $toolsetVersion..."
Write-Host "You may need to restart your command window if this is the first dotnet tool you have installed."
Invoke-Expression "& `"$dotnet`" tool install $darcCliPackageName --version $toolsetVersion -v $verbosity -g"
}
InitializeTools
InstallDarcCli

35
eng/common/darc-init.sh Executable file
Просмотреть файл

@ -0,0 +1,35 @@
#!/usr/bin/env bash
source="${BASH_SOURCE[0]}"
# resolve $source until the file is no longer a symlink
while [[ -h "$source" ]]; do
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
source="$(readlink "$source")"
# if $source was a relative symlink, we need to resolve it relative to the path where the
# symlink file was located
[[ $source != /* ]] && source="$scriptroot/$source"
done
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
verbosity=m
. "$scriptroot/tools.sh"
function InstallDarcCli {
local darc_cli_package_name="microsoft.dotnet.darc"
local uninstall_command=`$DOTNET_INSTALL_DIR/dotnet tool uninstall $darc_cli_package_name -g`
local tool_list=$($DOTNET_INSTALL_DIR/dotnet tool list -g)
if [[ $tool_list = *$darc_cli_package_name* ]]; then
echo $($DOTNET_INSTALL_DIR/dotnet tool uninstall $darc_cli_package_name -g)
fi
ReadGlobalVersion "Microsoft.DotNet.Arcade.Sdk"
local toolset_version=$_ReadGlobalVersion
echo "Installing Darc CLI version $toolset_version..."
echo "You may need to restart your command shell if this is the first dotnet tool you have installed."
echo $($DOTNET_INSTALL_DIR/dotnet tool install $darc_cli_package_name --version $toolset_version -v $verbosity -g)
}
InitializeTools
InstallDarcCli

Просмотреть файл

@ -0,0 +1,15 @@
<Project Sdk="Microsoft.DotNet.Helix.Sdk">
<ItemGroup>
<HelixCorrelationPayload Include="$(CorrelationPayloadDirectory)">
<PayloadDirectory>%(Identity)</PayloadDirectory>
</HelixCorrelationPayload>
</ItemGroup>
<ItemGroup>
<HelixWorkItem Include="WorkItem">
<PayloadDirectory>$(WorkItemDirectory)</PayloadDirectory>
<Command>$(WorkItemCommand)</Command>
</HelixWorkItem>
</ItemGroup>
</Project>

Просмотреть файл

@ -0,0 +1,3 @@
@echo off
powershell -NoProfile -NoLogo -ExecutionPolicy ByPass -command "& """%~dp0init-tools-native.ps1""" %*"
exit /b %ErrorLevel%

Просмотреть файл

@ -0,0 +1,128 @@
<#
.SYNOPSIS
Entry point script for installing native tools
.DESCRIPTION
Reads $RepoRoot\global.json file to determine native assets to install
and executes installers for those tools
.PARAMETER BaseUri
Base file directory or Url from which to acquire tool archives
.PARAMETER InstallDirectory
Directory to install native toolset. This is a command-line override for the default
Install directory precedence order:
- InstallDirectory command-line override
- NETCOREENG_INSTALL_DIRECTORY environment variable
- (default) %USERPROFILE%/.netcoreeng/native
.PARAMETER Clean
Switch specifying to not install anything, but cleanup native asset folders
.PARAMETER Force
Clean and then install tools
.PARAMETER DownloadRetries
Total number of retry attempts
.PARAMETER RetryWaitTimeInSeconds
Wait time between retry attempts in seconds
.PARAMETER GlobalJsonFile
File path to global.json file
.NOTES
#>
[CmdletBinding(PositionalBinding=$false)]
Param (
[string] $BaseUri = "https://netcorenativeassets.blob.core.windows.net/resource-packages/external",
[string] $InstallDirectory,
[switch] $Clean = $False,
[switch] $Force = $False,
[int] $DownloadRetries = 5,
[int] $RetryWaitTimeInSeconds = 30,
[string] $GlobalJsonFile = "$PSScriptRoot\..\..\global.json"
)
Set-StrictMode -version 2.0
$ErrorActionPreference="Stop"
Import-Module -Name (Join-Path $PSScriptRoot "native\CommonLibrary.psm1")
try {
# Define verbose switch if undefined
$Verbose = $VerbosePreference -Eq "Continue"
$EngCommonBaseDir = Join-Path $PSScriptRoot "native\"
$NativeBaseDir = $InstallDirectory
if (!$NativeBaseDir) {
$NativeBaseDir = CommonLibrary\Get-NativeInstallDirectory
}
$Env:CommonLibrary_NativeInstallDir = $NativeBaseDir
$InstallBin = Join-Path $NativeBaseDir "bin"
$InstallerPath = Join-Path $EngCommonBaseDir "install-tool.ps1"
# Process tools list
Write-Host "Processing $GlobalJsonFile"
If (-Not (Test-Path $GlobalJsonFile)) {
Write-Host "Unable to find '$GlobalJsonFile'"
exit 0
}
$NativeTools = Get-Content($GlobalJsonFile) -Raw |
ConvertFrom-Json |
Select-Object -Expand "native-tools" -ErrorAction SilentlyContinue
if ($NativeTools) {
$NativeTools.PSObject.Properties | ForEach-Object {
$ToolName = $_.Name
$ToolVersion = $_.Value
$LocalInstallerCommand = $InstallerPath
$LocalInstallerCommand += " -ToolName $ToolName"
$LocalInstallerCommand += " -InstallPath $InstallBin"
$LocalInstallerCommand += " -BaseUri $BaseUri"
$LocalInstallerCommand += " -CommonLibraryDirectory $EngCommonBaseDir"
$LocalInstallerCommand += " -Version $ToolVersion"
if ($Verbose) {
$LocalInstallerCommand += " -Verbose"
}
if (Get-Variable 'Force' -ErrorAction 'SilentlyContinue') {
if($Force) {
$LocalInstallerCommand += " -Force"
}
}
if ($Clean) {
$LocalInstallerCommand += " -Clean"
}
Write-Verbose "Installing $ToolName version $ToolVersion"
Write-Verbose "Executing '$LocalInstallerCommand'"
Invoke-Expression "$LocalInstallerCommand"
if ($LASTEXITCODE -Ne "0") {
Write-Error "Execution failed"
exit 1
}
}
}
else {
Write-Host "No native tools defined in global.json"
exit 0
}
if ($Clean) {
exit 0
}
if (Test-Path $InstallBin) {
Write-Host "Native tools are available from" (Convert-Path -Path $InstallBin)
Write-Host "##vso[task.prependpath]$(Convert-Path -Path $InstallBin)"
}
else {
Write-Error "Native tools install directory does not exist, installation failed"
exit 1
}
exit 0
}
catch {
Write-Host $_
Write-Host $_.Exception
exit 1
}

145
eng/common/init-tools-native.sh Executable file
Просмотреть файл

@ -0,0 +1,145 @@
#!/usr/bin/env bash
source="${BASH_SOURCE[0]}"
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
base_uri='https://netcorenativeassets.blob.core.windows.net/resource-packages/external'
install_directory=''
clean=false
force=false
download_retries=5
retry_wait_time_seconds=30
global_json_file="${scriptroot}/../../global.json"
declare -A native_assets
. $scriptroot/native/common-library.sh
while (($# > 0)); do
lowerI="$(echo $1 | awk '{print tolower($0)}')"
case $lowerI in
--baseuri)
base_uri=$2
shift 2
;;
--installdirectory)
install_directory=$2
shift 2
;;
--clean)
clean=true
shift 1
;;
--force)
force=true
shift 1
;;
--downloadretries)
download_retries=$2
shift 2
;;
--retrywaittimeseconds)
retry_wait_time_seconds=$2
shift 2
;;
--help)
echo "Common settings:"
echo " --installdirectory Directory to install native toolset."
echo " This is a command-line override for the default"
echo " Install directory precedence order:"
echo " - InstallDirectory command-line override"
echo " - NETCOREENG_INSTALL_DIRECTORY environment variable"
echo " - (default) %USERPROFILE%/.netcoreeng/native"
echo ""
echo " --clean Switch specifying not to install anything, but cleanup native asset folders"
echo " --force Clean and then install tools"
echo " --help Print help and exit"
echo ""
echo "Advanced settings:"
echo " --baseuri <value> Base URI for where to download native tools from"
echo " --downloadretries <value> Number of times a download should be attempted"
echo " --retrywaittimeseconds <value> Wait time between download attempts"
echo ""
exit 0
;;
esac
done
function ReadGlobalJsonNativeTools {
# Get the native-tools section from the global.json.
local native_tools_section=$(cat $global_json_file | awk '/"native-tools"/,/}/')
# Only extract the contents of the object.
local native_tools_list=$(echo $native_tools_section | awk -F"[{}]" '{print $2}')
native_tools_list=${native_tools_list//[\" ]/}
native_tools_list=${native_tools_list//,/$'\n'}
local old_IFS=$IFS
while read -r line; do
# Lines are of the form: 'tool:version'
IFS=:
while read -r key value; do
native_assets[$key]=$value
done <<< "$line"
done <<< "$native_tools_list"
IFS=$old_IFS
return 0;
}
native_base_dir=$install_directory
if [[ -z $install_directory ]]; then
native_base_dir=$(GetNativeInstallDirectory)
fi
install_bin="${native_base_dir}/bin"
ReadGlobalJsonNativeTools
if [[ ${#native_assets[@]} -eq 0 ]]; then
echo "No native tools defined in global.json"
exit 0;
else
native_installer_dir="$scriptroot/native"
for tool in "${!native_assets[@]}"
do
tool_version=${native_assets[$tool]}
installer_name="install-$tool.sh"
installer_command="$native_installer_dir/$installer_name"
installer_command+=" --baseuri $base_uri"
installer_command+=" --installpath $install_bin"
installer_command+=" --version $tool_version"
if [[ $force = true ]]; then
installer_command+=" --force"
fi
if [[ $clean = true ]]; then
installer_command+=" --clean"
fi
echo "Installing $tool version $tool_version"
echo "Executing '$installer_command'"
$installer_command
if [[ $? != 0 ]]; then
echo "Execution Failed" >&2
exit 1
fi
done
fi
if [[ ! -z $clean ]]; then
exit 0
fi
if [[ -d $install_bin ]]; then
echo "Native tools are available from $install_bin"
if [[ !-z BUILD_BUILDNUMBER ]]; then
echo "##vso[task.prependpath]$install_bin"
fi
else
echo "Native tools install directory does not exist, installation failed" >&2
exit 1
fi
exit 0

258
eng/common/init-tools.ps1 Normal file
Просмотреть файл

@ -0,0 +1,258 @@
# Initialize variables if they aren't already defined
$ci = if (Test-Path variable:ci) { $ci } else { $false }
$configuration = if (Test-Path variable:configuration) { $configuration } else { "Debug" }
$nodereuse = if (Test-Path variable:nodereuse) { $nodereuse } else { $true }
$prepareMachine = if (Test-Path variable:prepareMachine) { $prepareMachine } else { $false }
$restore = if (Test-Path variable:restore) { $restore } else { $true }
$verbosity = if (Test-Path variable:verbosity) { $verbosity } else { "minimal" }
$warnaserror = if (Test-Path variable:warnaserror) { $warnaserror } else { $true }
set-strictmode -version 2.0
$ErrorActionPreference = "Stop"
[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12
function Create-Directory([string[]] $path) {
if (!(Test-Path $path)) {
New-Item -path $path -force -itemType "Directory" | Out-Null
}
}
function InitializeDotNetCli {
# Don't resolve runtime, shared framework, or SDK from other locations to ensure build determinism
$env:DOTNET_MULTILEVEL_LOOKUP=0
# Disable first run since we do not need all ASP.NET packages restored.
$env:DOTNET_SKIP_FIRST_TIME_EXPERIENCE=1
# Source Build uses DotNetCoreSdkDir variable
if ($env:DotNetCoreSdkDir -ne $null) {
$env:DOTNET_INSTALL_DIR = $env:DotNetCoreSdkDir
}
# Use dotnet installation specified in DOTNET_INSTALL_DIR if it contains the required SDK version,
# otherwise install the dotnet CLI and SDK to repo local .dotnet directory to avoid potential permission issues.
if (($env:DOTNET_INSTALL_DIR -ne $null) -and (Test-Path(Join-Path $env:DOTNET_INSTALL_DIR "sdk\$($GlobalJson.tools.dotnet)"))) {
$dotnetRoot = $env:DOTNET_INSTALL_DIR
} else {
$dotnetRoot = Join-Path $RepoRoot ".dotnet"
$env:DOTNET_INSTALL_DIR = $dotnetRoot
if ($restore) {
InstallDotNetSdk $dotnetRoot $GlobalJson.tools.dotnet
}
}
return $dotnetRoot
}
function GetDotNetInstallScript([string] $dotnetRoot) {
$installScript = "$dotnetRoot\dotnet-install.ps1"
if (!(Test-Path $installScript)) {
Create-Directory $dotnetRoot
Invoke-WebRequest "https://dot.net/v1/dotnet-install.ps1" -OutFile $installScript
}
return $installScript
}
function InstallDotNetSdk([string] $dotnetRoot, [string] $version) {
$installScript = GetDotNetInstallScript $dotnetRoot
& $installScript -Version $version -InstallDir $dotnetRoot
if ($lastExitCode -ne 0) {
Write-Host "Failed to install dotnet cli (exit code '$lastExitCode')." -ForegroundColor Red
ExitWithExitCode $lastExitCode
}
}
function InitializeVisualStudioBuild {
$inVSEnvironment = !($env:VS150COMNTOOLS -eq $null) -and (Test-Path $env:VS150COMNTOOLS)
if ($inVSEnvironment) {
$vsInstallDir = Join-Path $env:VS150COMNTOOLS "..\.."
} else {
$vsInstallDir = LocateVisualStudio
$env:VS150COMNTOOLS = Join-Path $vsInstallDir "Common7\Tools\"
$env:VSSDK150Install = Join-Path $vsInstallDir "VSSDK\"
$env:VSSDKInstall = Join-Path $vsInstallDir "VSSDK\"
}
return $vsInstallDir;
}
function LocateVisualStudio {
$vswhereVersion = $GlobalJson.tools.vswhere
$toolsRoot = Join-Path $RepoRoot ".tools"
$vsWhereDir = Join-Path $toolsRoot "vswhere\$vswhereVersion"
$vsWhereExe = Join-Path $vsWhereDir "vswhere.exe"
if (!(Test-Path $vsWhereExe)) {
Create-Directory $vsWhereDir
Write-Host "Downloading vswhere"
Invoke-WebRequest "https://github.com/Microsoft/vswhere/releases/download/$vswhereVersion/vswhere.exe" -OutFile $vswhereExe
}
$vsInstallDir = & $vsWhereExe -latest -prerelease -property installationPath -requires Microsoft.Component.MSBuild -requires Microsoft.VisualStudio.Component.VSSDK -requires Microsoft.Net.Component.4.6.TargetingPack -requires Microsoft.VisualStudio.Component.Roslyn.Compiler -requires Microsoft.VisualStudio.Component.VSSDK
if ($lastExitCode -ne 0) {
Write-Host "Failed to locate Visual Studio (exit code '$lastExitCode')." -ForegroundColor Red
ExitWithExitCode $lastExitCode
}
return $vsInstallDir
}
function InitializeTools() {
$tools = $GlobalJson.tools
if ((Get-Member -InputObject $tools -Name "dotnet") -ne $null) {
$dotnetRoot = InitializeDotNetCli
# by default build with dotnet cli:
$script:buildDriver = Join-Path $dotnetRoot "dotnet.exe"
$script:buildArgs = "msbuild"
}
if ((Get-Member -InputObject $tools -Name "vswhere") -ne $null) {
$vsInstallDir = InitializeVisualStudioBuild
# Presence of vswhere.version indicates the repo needs to build using VS msbuild:
$script:buildDriver = Join-Path $vsInstallDir "MSBuild\15.0\Bin\msbuild.exe"
if ($ci) { $nodereuse = $false }
}
if ($buildDriver -eq $null) {
Write-Host "/global.json must either specify 'tools.dotnet' or 'tools.vswhere'." -ForegroundColor Red
ExitWithExitCode 1
}
InitializeToolSet $script:buildDriver $script:buildArgs
InitializeCustomToolset
}
function InitializeToolset([string] $buildDriver, [string]$buildArgs) {
$toolsetLocationFile = Join-Path $ToolsetDir "$ToolsetVersion.txt"
if (Test-Path $toolsetLocationFile) {
$path = Get-Content $toolsetLocationFile -TotalCount 1
if (Test-Path $path) {
$script:ToolsetBuildProj = $path
return
}
}
if (-not $restore) {
Write-Host "Toolset version $ToolsetVersion has not been restored."
ExitWithExitCode 1
}
$ToolsetRestoreLog = Join-Path $LogDir "ToolsetRestore.binlog"
$proj = Join-Path $ToolsetDir "restore.proj"
'<Project Sdk="Microsoft.DotNet.Arcade.Sdk"/>' | Set-Content $proj
MSBuild $proj /t:__WriteToolsetLocation /clp:None /bl:$ToolsetRestoreLog /p:__ToolsetLocationOutputFile=$toolsetLocationFile
if ($lastExitCode -ne 0) {
Write-Host "Failed to restore toolset (exit code '$lastExitCode'). See log: $ToolsetRestoreLog" -ForegroundColor Red
ExitWithExitCode $lastExitCode
}
$path = Get-Content $toolsetLocationFile -TotalCount 1
if (!(Test-Path $path)) {
throw "Invalid toolset path: $path"
}
$script:ToolsetBuildProj = $path
}
function InitializeCustomToolset {
if (-not $restore) {
return
}
$script = Join-Path $EngRoot "RestoreToolset.ps1"
if (Test-Path $script) {
. $script
}
}
function ExitWithExitCode([int] $exitCode) {
if ($ci -and $prepareMachine) {
Stop-Processes
}
exit $exitCode
}
function Stop-Processes() {
Write-Host "Killing running build processes..."
Get-Process -Name "msbuild" -ErrorAction SilentlyContinue | Stop-Process
Get-Process -Name "dotnet" -ErrorAction SilentlyContinue | Stop-Process
Get-Process -Name "vbcscompiler" -ErrorAction SilentlyContinue | Stop-Process
}
function MsBuild() {
$msbuildArgs = "$buildArgs /m /nologo /clp:Summary /v:$verbosity"
$extraArgs = "$args"
if ($warnaserror) {
$msbuildArgs += " /warnaserror"
}
$msbuildArgs += " /nr:$nodereuse"
Write-Debug "`"$buildDriver`" $msbuildArgs $extraArgs"
Invoke-Expression "& `"$buildDriver`" $msbuildArgs $extraArgs"
return $lastExitCode
}
function InstallDarcCli {
$DarcCliPackageName = "microsoft.dotnet.darc"
$ToolList = Invoke-Expression "$buildDriver tool list -g"
if ($ToolList -like "*$DarcCliPackageName*") {
Invoke-Expression "$buildDriver tool uninstall $DarcCliPackageName -g"
}
Write-Host "Installing Darc CLI version $toolsetVersion..."
Write-Host "You may need to restart your command window if this is the first dotnet tool you have installed."
Invoke-Expression "$buildDriver tool install $DarcCliPackageName --version $toolsetVersion -v $verbosity -g"
}
try {
$RepoRoot = Resolve-Path (Join-Path $PSScriptRoot "..\..")
$EngRoot = Resolve-Path (Join-Path $PSScriptRoot "..")
$ArtifactsDir = Join-Path $RepoRoot "artifacts"
$ToolsetDir = Join-Path $ArtifactsDir "toolset"
$LogDir = Join-Path (Join-Path $ArtifactsDir "log") $configuration
$TempDir = Join-Path (Join-Path $ArtifactsDir "tmp") $configuration
$GlobalJson = Get-Content -Raw -Path (Join-Path $RepoRoot "global.json") | ConvertFrom-Json
$ToolsetVersion = $GlobalJson.'msbuild-sdks'.'Microsoft.DotNet.Arcade.Sdk'
if ($env:NUGET_PACKAGES -eq $null) {
# Use local cache on CI to ensure deterministic build,
# use global cache in dev builds to avoid cost of downloading packages.
$env:NUGET_PACKAGES = if ($ci) { Join-Path $RepoRoot ".packages" }
else { Join-Path $env:UserProfile ".nuget\packages" }
}
Create-Directory $ToolsetDir
Create-Directory $LogDir
if ($ci) {
Create-Directory $TempDir
$env:TEMP = $TempDir
$env:TMP = $TempDir
}
InitializeTools
}
catch {
Write-Host $_
Write-Host $_.Exception
Write-Host $_.ScriptStackTrace
ExitWithExitCode 1
}

225
eng/common/init-tools.sh Executable file
Просмотреть файл

@ -0,0 +1,225 @@
#!/usr/bin/env bash
ci=${ci:-false}
configuration=${configuration:-'Debug'}
nodereuse=${nodereuse:-true}
prepare_machine=${prepare_machine:-false}
restore=${restore:-true}
verbosity=${verbosity:-'minimal'}
warnaserror=${warnaserror:-true}
repo_root="$scriptroot/../.."
eng_root="$scriptroot/.."
artifacts_dir="$repo_root/artifacts"
toolset_dir="$artifacts_dir/toolset"
log_dir="$artifacts_dir/log/$configuration"
temp_dir="$artifacts_dir/tmp/$configuration"
global_json_file="$repo_root/global.json"
build_driver=""
toolset_build_proj=""
# ReadVersionFromJson [json key]
function ReadGlobalVersion {
local key=$1
local unamestr="$(uname)"
local sedextended='-r'
if [[ "$unamestr" == 'Darwin' ]]; then
sedextended='-E'
fi;
local version="$(grep -m 1 "\"$key\"" $global_json_file | sed $sedextended 's/^ *//;s/.*: *"//;s/",?//')"
if [[ ! "$version" ]]; then
echo "Error: Cannot find \"$key\" in $global_json_file" >&2;
ExitWithExitCode 1
fi;
# return value
echo "$version"
}
toolset_version=`ReadGlobalVersion "Microsoft.DotNet.Arcade.Sdk"`
function InitializeDotNetCli {
# Disable first run since we want to control all package sources
export DOTNET_SKIP_FIRST_TIME_EXPERIENCE=1
# Don't resolve runtime, shared framework, or SDK from other locations to ensure build determinism
export DOTNET_MULTILEVEL_LOOKUP=0
# Source Build uses DotNetCoreSdkDir variable
if [[ -n "$DotNetCoreSdkDir" ]]; then
export DOTNET_INSTALL_DIR="$DotNetCoreSdkDir"
fi
local dotnet_sdk_version=`ReadGlobalVersion "dotnet"`
local dotnet_root=""
# Use dotnet installation specified in DOTNET_INSTALL_DIR if it contains the required SDK version,
# otherwise install the dotnet CLI and SDK to repo local .dotnet directory to avoid potential permission issues.
if [[ -d "$DOTNET_INSTALL_DIR/sdk/$dotnet_sdk_version" ]]; then
dotnet_root="$DOTNET_INSTALL_DIR"
else
dotnet_root="$repo_root/.dotnet"
export DOTNET_INSTALL_DIR="$dotnet_root"
if [[ "$restore" == true ]]; then
InstallDotNetSdk $dotnet_root $dotnet_sdk_version
fi
fi
build_driver="$dotnet_root/dotnet"
}
function InstallDotNetSdk {
local root=$1
local version=$2
local install_script=`GetDotNetInstallScript $root`
bash "$install_script" --version $version --install-dir $root
local lastexitcode=$?
if [[ $lastexitcode != 0 ]]; then
echo "Failed to install dotnet SDK (exit code '$lastexitcode')."
ExitWithExitCode $lastexitcode
fi
}
function GetDotNetInstallScript {
local root=$1
local install_script="$root/dotnet-install.sh"
if [[ ! -a "$install_script" ]]; then
mkdir -p "$root"
# Use curl if available, otherwise use wget
if command -v curl > /dev/null; then
curl "https://dot.net/v1/dotnet-install.sh" -sSL --retry 10 --create-dirs -o "$install_script"
else
wget -q -O "$install_script" "https://dot.net/v1/dotnet-install.sh"
fi
fi
# return value
echo "$install_script"
}
function InitializeToolset {
local toolset_location_file="$toolset_dir/$toolset_version.txt"
if [[ -a "$toolset_location_file" ]]; then
local path=`cat $toolset_location_file`
if [[ -a "$path" ]]; then
toolset_build_proj=$path
return
fi
fi
if [[ "$restore" != true ]]; then
echo "Toolset version $toolsetVersion has not been restored."
ExitWithExitCode 2
fi
local toolset_restore_log="$log_dir/ToolsetRestore.binlog"
local proj="$toolset_dir/restore.proj"
echo '<Project Sdk="Microsoft.DotNet.Arcade.Sdk"/>' > $proj
MSBuild "$proj /t:__WriteToolsetLocation /clp:None /bl:$toolset_restore_log /p:__ToolsetLocationOutputFile=$toolset_location_file"
local lastexitcode=$?
if [[ $lastexitcode != 0 ]]; then
echo "Failed to restore toolset (exit code '$lastexitcode'). See log: $toolset_restore_log"
ExitWithExitCode $lastexitcode
fi
toolset_build_proj=`cat $toolset_location_file`
if [[ ! -a "$toolset_build_proj" ]]; then
echo "Invalid toolset path: $toolset_build_proj"
ExitWithExitCode 3
fi
}
function InitializeCustomToolset {
local script="$eng_root/RestoreToolset.sh"
if [[ -a "$script" ]]; then
. "$script"
fi
}
function InitializeTools {
InitializeDotNetCli
InitializeToolset
InitializeCustomToolset
}
function ExitWithExitCode {
if [[ "$ci" == true && "$prepare_machine" == true ]]; then
StopProcesses
fi
exit $1
}
function StopProcesses {
echo "Killing running build processes..."
pkill -9 "dotnet"
pkill -9 "vbcscompiler"
}
function MSBuild {
local msbuildArgs="msbuild /m /nologo /clp:Summary /v:$verbosity"
local extraArgs="$@"
if [[ $warnaserror == true ]]; then
msbuildArgs="$msbuildArgs /warnaserror"
fi
msbuildArgs="$msbuildArgs /nr:$nodereuse"
#echo "$build_driver $msbuildArgs $extraArgs"
"$build_driver" $msbuildArgs $extraArgs
return $?
}
function InstallDarcCli {
local darc_cli_package_name="microsoft.dotnet.darc"
local uninstall_command=`$DOTNET_INSTALL_DIR/dotnet tool uninstall $darc_cli_package_name -g`
local tool_list=$($DOTNET_INSTALL_DIR/dotnet tool list -g)
if [[ $tool_list = *$darc_cli_package_name* ]]; then
echo $($DOTNET_INSTALL_DIR/dotnet tool uninstall $darc_cli_package_name -g)
fi
echo "Installing Darc CLI version $toolset_version..."
echo "You may need to restart your command shell if this is the first dotnet tool you have installed."
echo $($DOTNET_INSTALL_DIR/dotnet tool install $darc_cli_package_name --version $toolset_version -v $verbosity -g)
}
# HOME may not be defined in some scenarios, but it is required by NuGet
if [[ -z $HOME ]]; then
export HOME="$repo_root/artifacts/.home/"
mkdir -p "$HOME"
fi
if [[ -z $NUGET_PACKAGES ]]; then
if [[ $ci == true ]]; then
export NUGET_PACKAGES="$repo_root/.packages"
else
export NUGET_PACKAGES="$HOME/.nuget/packages"
fi
fi
mkdir -p "$toolset_dir"
mkdir -p "$log_dir"
if [[ $ci == true ]]; then
mkdir -p "$temp_dir"
export TEMP="$temp_dir"
export TMP="$temp_dir"
fi
InitializeTools

23
eng/common/msbuild.ps1 Normal file
Просмотреть файл

@ -0,0 +1,23 @@
[CmdletBinding(PositionalBinding=$false)]
Param(
[string] $verbosity = "minimal",
[bool] $warnaserror = $true,
[bool] $nodereuse = $true,
[switch] $ci,
[switch] $prepareMachine,
[Parameter(ValueFromRemainingArguments=$true)][String[]]$extraArgs
)
. $PSScriptRoot\tools.ps1
try {
InitializeTools
MSBuild @extraArgs
ExitWithExitCode $lastExitCode
}
catch {
Write-Host $_
Write-Host $_.Exception
Write-Host $_.ScriptStackTrace
ExitWithExitCode 1
}

55
eng/common/msbuild.sh Executable file
Просмотреть файл

@ -0,0 +1,55 @@
#!/usr/bin/env bash
source="${BASH_SOURCE[0]}"
# resolve $source until the file is no longer a symlink
while [[ -h "$source" ]]; do
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
source="$(readlink "$source")"
# if $source was a relative symlink, we need to resolve it relative to the path where the
# symlink file was located
[[ $source != /* ]] && source="$scriptroot/$source"
done
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
verbosity='minimal'
warnaserror=true
nodereuse=true
prepare_machine=false
extraargs=''
while (($# > 0)); do
lowerI="$(echo $1 | awk '{print tolower($0)}')"
case $lowerI in
--verbosity)
verbosity=$2
shift 2
;;
--warnaserror)
warnaserror=$2
shift 2
;;
--nodereuse)
nodereuse=$2
shift 2
;;
--ci)
ci=true
shift 1
;;
--preparemachine)
prepare_machine=true
shift 1
;;
*)
extraargs="$extraargs $1"
shift 1
;;
esac
done
. "$scriptroot/tools.sh"
InitializeTools
MSBuild $extraargs
ExitWithExitCode $?

Просмотреть файл

@ -0,0 +1,358 @@
<#
.SYNOPSIS
Helper module to install an archive to a directory
.DESCRIPTION
Helper module to download and extract an archive to a specified directory
.PARAMETER Uri
Uri of artifact to download
.PARAMETER InstallDirectory
Directory to extract artifact contents to
.PARAMETER Force
Force download / extraction if file or contents already exist. Default = False
.PARAMETER DownloadRetries
Total number of retry attempts. Default = 5
.PARAMETER RetryWaitTimeInSeconds
Wait time between retry attempts in seconds. Default = 30
.NOTES
Returns False if download or extraction fail, True otherwise
#>
function DownloadAndExtract {
[CmdletBinding(PositionalBinding=$false)]
Param (
[Parameter(Mandatory=$True)]
[string] $Uri,
[Parameter(Mandatory=$True)]
[string] $InstallDirectory,
[switch] $Force = $False,
[int] $DownloadRetries = 5,
[int] $RetryWaitTimeInSeconds = 30
)
# Define verbose switch if undefined
$Verbose = $VerbosePreference -Eq "Continue"
$TempToolPath = CommonLibrary\Get-TempPathFilename -Path $Uri
# Download native tool
$DownloadStatus = CommonLibrary\Get-File -Uri $Uri `
-Path $TempToolPath `
-DownloadRetries $DownloadRetries `
-RetryWaitTimeInSeconds $RetryWaitTimeInSeconds `
-Force:$Force `
-Verbose:$Verbose
if ($DownloadStatus -Eq $False) {
Write-Error "Download failed"
return $False
}
# Extract native tool
$UnzipStatus = CommonLibrary\Expand-Zip -ZipPath $TempToolPath `
-OutputDirectory $InstallDirectory `
-Force:$Force `
-Verbose:$Verbose
if ($UnzipStatus -Eq $False) {
Write-Error "Unzip failed"
return $False
}
return $True
}
<#
.SYNOPSIS
Download a file, retry on failure
.DESCRIPTION
Download specified file and retry if attempt fails
.PARAMETER Uri
Uri of file to download. If Uri is a local path, the file will be copied instead of downloaded
.PARAMETER Path
Path to download or copy uri file to
.PARAMETER Force
Overwrite existing file if present. Default = False
.PARAMETER DownloadRetries
Total number of retry attempts. Default = 5
.PARAMETER RetryWaitTimeInSeconds
Wait time between retry attempts in seconds Default = 30
#>
function Get-File {
[CmdletBinding(PositionalBinding=$false)]
Param (
[Parameter(Mandatory=$True)]
[string] $Uri,
[Parameter(Mandatory=$True)]
[string] $Path,
[int] $DownloadRetries = 5,
[int] $RetryWaitTimeInSeconds = 30,
[switch] $Force = $False
)
$Attempt = 0
if ($Force) {
if (Test-Path $Path) {
Remove-Item $Path -Force
}
}
if (Test-Path $Path) {
Write-Host "File '$Path' already exists, skipping download"
return $True
}
$DownloadDirectory = Split-Path -ErrorAction Ignore -Path "$Path" -Parent
if (-Not (Test-Path $DownloadDirectory)) {
New-Item -path $DownloadDirectory -force -itemType "Directory" | Out-Null
}
if (Test-Path -IsValid -Path $Uri) {
Write-Verbose "'$Uri' is a file path, copying file to '$Path'"
Copy-Item -Path $Uri -Destination $Path
return $?
}
else {
Write-Verbose "Downloading $Uri"
while($Attempt -Lt $DownloadRetries)
{
try {
Invoke-WebRequest -UseBasicParsing -Uri $Uri -OutFile $Path
Write-Verbose "Downloaded to '$Path'"
return $True
}
catch {
$Attempt++
if ($Attempt -Lt $DownloadRetries) {
$AttemptsLeft = $DownloadRetries - $Attempt
Write-Warning "Download failed, $AttemptsLeft attempts remaining, will retry in $RetryWaitTimeInSeconds seconds"
Start-Sleep -Seconds $RetryWaitTimeInSeconds
}
else {
Write-Error $_
Write-Error $_.Exception
}
}
}
}
return $False
}
<#
.SYNOPSIS
Generate a shim for a native tool
.DESCRIPTION
Creates a wrapper script (shim) that passes arguments forward to native tool assembly
.PARAMETER ShimName
The name of the shim
.PARAMETER ShimDirectory
The directory where shims are stored
.PARAMETER ToolFilePath
Path to file that shim forwards to
.PARAMETER Force
Replace shim if already present. Default = False
.NOTES
Returns $True if generating shim succeeds, $False otherwise
#>
function New-ScriptShim {
[CmdletBinding(PositionalBinding=$false)]
Param (
[Parameter(Mandatory=$True)]
[string] $ShimName,
[Parameter(Mandatory=$True)]
[string] $ShimDirectory,
[Parameter(Mandatory=$True)]
[string] $ToolFilePath,
[Parameter(Mandatory=$True)]
[string] $BaseUri,
[switch] $Force
)
try {
Write-Verbose "Generating '$ShimName' shim"
if (-Not (Test-Path $ToolFilePath)){
Write-Error "Specified tool file path '$ToolFilePath' does not exist"
return $False
}
# WinShimmer is a small .NET Framework program that creates .exe shims to bootstrapped programs
# Many of the checks for installed programs expect a .exe extension for Windows tools, rather
# than a .bat or .cmd file.
# Source: https://github.com/dotnet/arcade/tree/master/src/WinShimmer
if (-Not (Test-Path "$ShimDirectory\WinShimmer\winshimmer.exe")) {
$InstallStatus = DownloadAndExtract -Uri "$BaseUri/windows/winshimmer/WinShimmer.zip" `
-InstallDirectory $ShimDirectory\WinShimmer `
-Force:$Force `
-DownloadRetries 2 `
-RetryWaitTimeInSeconds 5 `
-Verbose:$Verbose
}
if ((Test-Path (Join-Path $ShimDirectory "$ShimName.exe"))) {
Write-Host "$ShimName.exe already exists; replacing..."
Remove-Item (Join-Path $ShimDirectory "$ShimName.exe")
}
Invoke-Expression "$ShimDirectory\WinShimmer\winshimmer.exe $ShimName $ToolFilePath $ShimDirectory"
return $True
}
catch {
Write-Host $_
Write-Host $_.Exception
return $False
}
}
<#
.SYNOPSIS
Returns the machine architecture of the host machine
.NOTES
Returns 'x64' on 64 bit machines
Returns 'x86' on 32 bit machines
#>
function Get-MachineArchitecture {
$ProcessorArchitecture = $Env:PROCESSOR_ARCHITECTURE
$ProcessorArchitectureW6432 = $Env:PROCESSOR_ARCHITEW6432
if($ProcessorArchitecture -Eq "X86")
{
if(($ProcessorArchitectureW6432 -Eq "") -Or
($ProcessorArchitectureW6432 -Eq "X86")) {
return "x86"
}
$ProcessorArchitecture = $ProcessorArchitectureW6432
}
if (($ProcessorArchitecture -Eq "AMD64") -Or
($ProcessorArchitecture -Eq "IA64") -Or
($ProcessorArchitecture -Eq "ARM64")) {
return "x64"
}
return "x86"
}
<#
.SYNOPSIS
Get the name of a temporary folder under the native install directory
#>
function Get-TempDirectory {
return Join-Path (Get-NativeInstallDirectory) "temp/"
}
function Get-TempPathFilename {
[CmdletBinding(PositionalBinding=$false)]
Param (
[Parameter(Mandatory=$True)]
[string] $Path
)
$TempDir = CommonLibrary\Get-TempDirectory
$TempFilename = Split-Path $Path -leaf
$TempPath = Join-Path $TempDir $TempFilename
return $TempPath
}
<#
.SYNOPSIS
Returns the base directory to use for native tool installation
.NOTES
Returns the value of the NETCOREENG_INSTALL_DIRECTORY if that environment variable
is set, or otherwise returns an install directory under the %USERPROFILE%
#>
function Get-NativeInstallDirectory {
$InstallDir = $Env:NETCOREENG_INSTALL_DIRECTORY
if (!$InstallDir) {
$InstallDir = Join-Path $Env:USERPROFILE ".netcoreeng/native/"
}
return $InstallDir
}
<#
.SYNOPSIS
Unzip an archive
.DESCRIPTION
Powershell module to unzip an archive to a specified directory
.PARAMETER ZipPath (Required)
Path to archive to unzip
.PARAMETER OutputDirectory (Required)
Output directory for archive contents
.PARAMETER Force
Overwrite output directory contents if they already exist
.NOTES
- Returns True and does not perform an extraction if output directory already exists but Overwrite is not True.
- Returns True if unzip operation is successful
- Returns False if Overwrite is True and it is unable to remove contents of OutputDirectory
- Returns False if unable to extract zip archive
#>
function Expand-Zip {
[CmdletBinding(PositionalBinding=$false)]
Param (
[Parameter(Mandatory=$True)]
[string] $ZipPath,
[Parameter(Mandatory=$True)]
[string] $OutputDirectory,
[switch] $Force
)
Write-Verbose "Extracting '$ZipPath' to '$OutputDirectory'"
try {
if ((Test-Path $OutputDirectory) -And (-Not $Force)) {
Write-Host "Directory '$OutputDirectory' already exists, skipping extract"
return $True
}
if (Test-Path $OutputDirectory) {
Write-Verbose "'Force' is 'True', but '$OutputDirectory' exists, removing directory"
Remove-Item $OutputDirectory -Force -Recurse
if ($? -Eq $False) {
Write-Error "Unable to remove '$OutputDirectory'"
return $False
}
}
if (-Not (Test-Path $OutputDirectory)) {
New-Item -path $OutputDirectory -Force -itemType "Directory" | Out-Null
}
Add-Type -assembly "system.io.compression.filesystem"
[io.compression.zipfile]::ExtractToDirectory("$ZipPath", "$OutputDirectory")
if ($? -Eq $False) {
Write-Error "Unable to extract '$ZipPath'"
return $False
}
}
catch {
Write-Host $_
Write-Host $_.Exception
return $False
}
return $True
}
export-modulemember -function DownloadAndExtract
export-modulemember -function Expand-Zip
export-modulemember -function Get-File
export-modulemember -function Get-MachineArchitecture
export-modulemember -function Get-NativeInstallDirectory
export-modulemember -function Get-TempDirectory
export-modulemember -function Get-TempPathFilename
export-modulemember -function New-ScriptShim

Просмотреть файл

@ -0,0 +1,168 @@
#!/usr/bin/env bash
function GetNativeInstallDirectory {
local install_dir
if [[ -z $NETCOREENG_INSTALL_DIRECTORY ]]; then
install_dir=$HOME/.netcoreeng/native/
else
install_dir=$NETCOREENG_INSTALL_DIRECTORY
fi
echo $install_dir
return 0
}
function GetTempDirectory {
echo $(GetNativeInstallDirectory)temp/
return 0
}
function ExpandZip {
local zip_path=$1
local output_directory=$2
local force=${3:-false}
echo "Extracting $zip_path to $output_directory"
if [[ -d $output_directory ]] && [[ $force = false ]]; then
echo "Directory '$output_directory' already exists, skipping extract"
return 0
fi
if [[ -d $output_directory ]]; then
echo "'Force flag enabled, but '$output_directory' exists. Removing directory"
rm -rf $output_directory
if [[ $? != 0 ]]; then
echo Unable to remove '$output_directory'>&2
return 1
fi
fi
echo "Creating directory: '$output_directory'"
mkdir -p $output_directory
echo "Extracting archive"
tar -xf $zip_path -C $output_directory
if [[ $? != 0 ]]; then
echo "Unable to extract '$zip_path'" >&2
return 1
fi
return 0
}
function GetCurrentOS {
local unameOut="$(uname -s)"
case $unameOut in
Linux*) echo "Linux";;
Darwin*) echo "MacOS";;
esac
return 0
}
function GetFile {
local uri=$1
local path=$2
local force=${3:-false}
local download_retries=${4:-5}
local retry_wait_time_seconds=${5:-30}
if [[ -f $path ]]; then
if [[ $force = false ]]; then
echo "File '$path' already exists. Skipping download"
return 0
else
rm -rf $path
fi
fi
if [[ -f $uri ]]; then
echo "'$uri' is a file path, copying file to '$path'"
cp $uri $path
return $?
fi
echo "Downloading $uri"
# Use curl if available, otherwise use wget
if command -v curl > /dev/null; then
curl "$uri" -sSL --retry $download_retries --retry-delay $retry_wait_time_seconds --create-dirs -o "$path" --fail
else
wget -q -O "$path" "$uri" --tries="$download_retries"
fi
return $?
}
function GetTempPathFileName {
local path=$1
local temp_dir=$(GetTempDirectory)
local temp_file_name=$(basename $path)
echo $temp_dir$temp_file_name
return 0
}
function DownloadAndExtract {
local uri=$1
local installDir=$2
local force=${3:-false}
local download_retries=${4:-5}
local retry_wait_time_seconds=${5:-30}
local temp_tool_path=$(GetTempPathFileName $uri)
echo "downloading to: $temp_tool_path"
# Download file
GetFile "$uri" "$temp_tool_path" $force $download_retries $retry_wait_time_seconds
if [[ $? != 0 ]]; then
echo "Failed to download '$uri' to '$temp_tool_path'." >&2
return 1
fi
# Extract File
echo "extracting from $temp_tool_path to $installDir"
ExpandZip "$temp_tool_path" "$installDir" $force $download_retries $retry_wait_time_seconds
if [[ $? != 0 ]]; then
echo "Failed to extract '$temp_tool_path' to '$installDir'." >&2
return 1
fi
return 0
}
function NewScriptShim {
local shimpath=$1
local tool_file_path=$2
local force=${3:-false}
echo "Generating '$shimpath' shim"
if [[ -f $shimpath ]]; then
if [[ $force = false ]]; then
echo "File '$shimpath' already exists." >&2
return 1
else
rm -rf $shimpath
fi
fi
if [[ ! -f $tool_file_path ]]; then
echo "Specified tool file path:'$tool_file_path' does not exist" >&2
return 1
fi
local shim_contents=$'#!/usr/bin/env bash\n'
shim_contents+="SHIMARGS="$'$1\n'
shim_contents+="$tool_file_path"$' $SHIMARGS\n'
# Write shim file
echo "$shim_contents" > $shimpath
chmod +x $shimpath
echo "Finished generating shim '$shimpath'"
return $?
}

Просмотреть файл

@ -0,0 +1,119 @@
<#
.SYNOPSIS
Install cmake native tool
.DESCRIPTION
Install cmake native tool from Azure blob storage
.PARAMETER InstallPath
Base directory to install native tool to
.PARAMETER BaseUri
Base file directory or Url from which to acquire tool archives
.PARAMETER CommonLibraryDirectory
Path to folder containing common library modules
.PARAMETER Force
Force install of tools even if they previously exist
.PARAMETER Clean
Don't install the tool, just clean up the current install of the tool
.PARAMETER DownloadRetries
Total number of retry attempts
.PARAMETER RetryWaitTimeInSeconds
Wait time between retry attempts in seconds
.NOTES
Returns 0 if install succeeds, 1 otherwise
#>
[CmdletBinding(PositionalBinding=$false)]
Param (
[Parameter(Mandatory=$True)]
[string] $InstallPath,
[Parameter(Mandatory=$True)]
[string] $BaseUri,
[Parameter(Mandatory=$True)]
[string] $Version,
[string] $CommonLibraryDirectory = $PSScriptRoot,
[switch] $Force = $False,
[switch] $Clean = $False,
[int] $DownloadRetries = 5,
[int] $RetryWaitTimeInSeconds = 30
)
# Import common library modules
Import-Module -Name (Join-Path $CommonLibraryDirectory "CommonLibrary.psm1")
try {
# Define verbose switch if undefined
$Verbose = $VerbosePreference -Eq "Continue"
$ToolName = "cmake"
$Arch = CommonLibrary\Get-MachineArchitecture
$ToolOs = "win64"
if($Arch -Eq "x32") {
$ToolOs = "win32"
}
$ToolNameMoniker = "$ToolName-$Version-$ToolOs-$Arch"
$ToolInstallDirectory = Join-Path $InstallPath "$ToolName\$Version\"
$ToolFilePath = Join-Path $ToolInstallDirectory "$ToolNameMoniker\bin\$ToolName.exe"
$ShimPath = Join-Path $InstallPath "$ToolName.cmd"
$Uri = "$BaseUri/windows/$Toolname/$ToolNameMoniker.zip"
if ($Clean) {
Write-Host "Cleaning $ToolInstallDirectory"
if (Test-Path $ToolInstallDirectory) {
Remove-Item $ToolInstallDirectory -Force -Recurse
}
Write-Host "Cleaning $ShimPath"
if (Test-Path $ShimPath) {
Remove-Item $ShimPath -Force
}
$ToolTempPath = CommonLibrary\Get-TempPathFilename -Path $Uri
Write-Host "Cleaning $ToolTempPath"
if (Test-Path $ToolTempPath) {
Remove-Item $ToolTempPath -Force
}
exit 0
}
# Install tool
if ((Test-Path $ToolFilePath) -And (-Not $Force)) {
Write-Verbose "$ToolName ($Version) already exists, skipping install"
}
else {
$InstallStatus = CommonLibrary\DownloadAndExtract -Uri $Uri `
-InstallDirectory $ToolInstallDirectory `
-Force:$Force `
-DownloadRetries $DownloadRetries `
-RetryWaitTimeInSeconds $RetryWaitTimeInSeconds `
-Verbose:$Verbose
if ($InstallStatus -Eq $False) {
Write-Error "Installation failed"
exit 1
}
}
# Generate shim
# Always rewrite shims so that we are referencing the expected version
$GenerateShimStatus = CommonLibrary\New-ScriptShim -ShimPath $ShimPath `
-ToolFilePath $ToolFilePath `
-Force `
-Verbose:$Verbose
if ($GenerateShimStatus -Eq $False) {
Write-Error "Generate shim failed"
return 1
}
exit 0
}
catch {
Write-Host $_
Write-Host $_.Exception
exit 1
}

Просмотреть файл

@ -0,0 +1,117 @@
#!/usr/bin/env bash
source="${BASH_SOURCE[0]}"
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
. $scriptroot/common-library.sh
base_uri=
install_path=
version=
clean=false
force=false
download_retries=5
retry_wait_time_seconds=30
while (($# > 0)); do
lowerI="$(echo $1 | awk '{print tolower($0)}')"
case $lowerI in
--baseuri)
base_uri=$2
shift 2
;;
--installpath)
install_path=$2
shift 2
;;
--version)
version=$2
shift 2
;;
--clean)
clean=true
shift 1
;;
--force)
force=true
shift 1
;;
--downloadretries)
download_retries=$2
shift 2
;;
--retrywaittimeseconds)
retry_wait_time_seconds=$2
shift 2
;;
--help)
echo "Common settings:"
echo " --baseuri <value> Base file directory or Url wrom which to acquire tool archives"
echo " --installpath <value> Base directory to install native tool to"
echo " --clean Don't install the tool, just clean up the current install of the tool"
echo " --force Force install of tools even if they previously exist"
echo " --help Print help and exit"
echo ""
echo "Advanced settings:"
echo " --downloadretries Total number of retry attempts"
echo " --retrywaittimeseconds Wait time between retry attempts in seconds"
echo ""
exit 0
;;
esac
done
tool_name="cmake"
tool_os=$(GetCurrentOS)
tool_folder=$(echo $tool_os | awk '{print tolower($0)}')
tool_arch="x86_64"
tool_name_moniker="$tool_name-$version-$tool_os-$tool_arch"
tool_install_directory="$install_path/$tool_name/$version"
tool_file_path="$tool_install_directory/$tool_name_moniker/bin/$tool_name"
shim_path="$install_path/$tool_name.sh"
uri="${base_uri}/$tool_folder/cmake/$tool_name_moniker.tar.gz"
# Clean up tool and installers
if [[ $clean = true ]]; then
echo "Cleaning $tool_install_directory"
if [[ -d $tool_install_directory ]]; then
rm -rf $tool_install_directory
fi
echo "Cleaning $shim_path"
if [[ -f $shim_path ]]; then
rm -rf $shim_path
fi
tool_temp_path=$(GetTempPathFileName $uri)
echo "Cleaning $tool_temp_path"
if [[ -f $tool_temp_path ]]; then
rm -rf $tool_temp_path
fi
exit 0
fi
# Install tool
if [[ -f $tool_file_path ]] && [[ $force = false ]]; then
echo "$tool_name ($version) already exists, skipping install"
exit 0
fi
DownloadAndExtract $uri $tool_install_directory $force $download_retries $retry_wait_time_seconds
if [[ $? != 0 ]]; then
echo "Installation failed" >&2
exit 1
fi
# Generate Shim
# Always rewrite shims so that we are referencing the expected version
NewScriptShim $shim_path $tool_file_path true
if [[ $? != 0 ]]; then
echo "Shim generation failed" >&2
exit 1
fi
exit 0

Просмотреть файл

@ -0,0 +1,130 @@
<#
.SYNOPSIS
Install native tool
.DESCRIPTION
Install cmake native tool from Azure blob storage
.PARAMETER InstallPath
Base directory to install native tool to
.PARAMETER BaseUri
Base file directory or Url from which to acquire tool archives
.PARAMETER CommonLibraryDirectory
Path to folder containing common library modules
.PARAMETER Force
Force install of tools even if they previously exist
.PARAMETER Clean
Don't install the tool, just clean up the current install of the tool
.PARAMETER DownloadRetries
Total number of retry attempts
.PARAMETER RetryWaitTimeInSeconds
Wait time between retry attempts in seconds
.NOTES
Returns 0 if install succeeds, 1 otherwise
#>
[CmdletBinding(PositionalBinding=$false)]
Param (
[Parameter(Mandatory=$True)]
[string] $ToolName,
[Parameter(Mandatory=$True)]
[string] $InstallPath,
[Parameter(Mandatory=$True)]
[string] $BaseUri,
[Parameter(Mandatory=$True)]
[string] $Version,
[string] $CommonLibraryDirectory = $PSScriptRoot,
[switch] $Force = $False,
[switch] $Clean = $False,
[int] $DownloadRetries = 5,
[int] $RetryWaitTimeInSeconds = 30
)
# Import common library modules
Import-Module -Name (Join-Path $CommonLibraryDirectory "CommonLibrary.psm1")
try {
# Define verbose switch if undefined
$Verbose = $VerbosePreference -Eq "Continue"
$Arch = CommonLibrary\Get-MachineArchitecture
$ToolOs = "win64"
if($Arch -Eq "x32") {
$ToolOs = "win32"
}
$ToolNameMoniker = "$ToolName-$Version-$ToolOs-$Arch"
$ToolInstallDirectory = Join-Path $InstallPath "$ToolName\$Version\"
$Uri = "$BaseUri/windows/$ToolName/$ToolNameMoniker.zip"
$ShimPath = Join-Path $InstallPath "$ToolName.exe"
if ($Clean) {
Write-Host "Cleaning $ToolInstallDirectory"
if (Test-Path $ToolInstallDirectory) {
Remove-Item $ToolInstallDirectory -Force -Recurse
}
Write-Host "Cleaning $ShimPath"
if (Test-Path $ShimPath) {
Remove-Item $ShimPath -Force
}
$ToolTempPath = CommonLibrary\Get-TempPathFilename -Path $Uri
Write-Host "Cleaning $ToolTempPath"
if (Test-Path $ToolTempPath) {
Remove-Item $ToolTempPath -Force
}
exit 0
}
# Install tool
if ((Test-Path $ToolInstallDirectory) -And (-Not $Force)) {
Write-Verbose "$ToolName ($Version) already exists, skipping install"
}
else {
$InstallStatus = CommonLibrary\DownloadAndExtract -Uri $Uri `
-InstallDirectory $ToolInstallDirectory `
-Force:$Force `
-DownloadRetries $DownloadRetries `
-RetryWaitTimeInSeconds $RetryWaitTimeInSeconds `
-Verbose:$Verbose
if ($InstallStatus -Eq $False) {
Write-Error "Installation failed"
exit 1
}
}
$ToolFilePath = Get-ChildItem $ToolInstallDirectory -Recurse -Filter "$ToolName.exe" | % { $_.FullName }
if (@($ToolFilePath).Length -Gt 1) {
Write-Error "There are multiple copies of $ToolName in $($ToolInstallDirectory): `n$(@($ToolFilePath | out-string))"
exit 1
} elseif (@($ToolFilePath).Length -Lt 1) {
Write-Error "$ToolName was not found in $ToolFilePath."
exit 1
}
# Generate shim
# Always rewrite shims so that we are referencing the expected version
$GenerateShimStatus = CommonLibrary\New-ScriptShim -ShimName $ToolName `
-ShimDirectory $InstallPath `
-ToolFilePath "$ToolFilePath" `
-BaseUri $BaseUri `
-Force:$Force `
-Verbose:$Verbose
if ($GenerateShimStatus -Eq $False) {
Write-Error "Generate shim failed"
return 1
}
exit 0
}
catch {
Write-Host $_
Write-Host $_.Exception
exit 1
}

Просмотреть файл

@ -0,0 +1,108 @@
parameters:
# Optional: Clean sources before building
clean: true
# Optional: Git fetch depth
fetchDepth: ''
# Optional: name of the phase (not specifying phase name may cause name collisions)
name: ''
# Required: A defined YAML queue
queue: {}
# Required: build steps
steps: []
# Optional: variables
variables: {}
## Telemetry variables
# Optional: enable sending telemetry
# if 'true', these "variables" must be specified in the variables object or as part of the queue matrix
# _HelixBuildConfig - differentiate between Debug, Release, other
# _HelixSource - Example: build/product
# _HelixType - Example: official/dotnet/arcade/$(Build.SourceBranch)
enableTelemetry: false
# Optional: Enable installing Microbuild plugin
# if 'true', these "variables" must be specified in the variables object or as part of the queue matrix
# _TeamName - the name of your team
# _SignType - 'test' or 'real'
enableMicrobuild: false
# Internal resources (telemetry, microbuild) can only be accessed from non-public projects,
# and some (Microbuild) should only be applied to non-PR cases for internal builds.
phases:
- phase: ${{ parameters.name }}
queue: ${{ parameters.queue }}
${{ if ne(parameters.variables, '') }}:
variables:
${{ insert }}: ${{ parameters.variables }}
steps:
- checkout: self
clean: ${{ parameters.clean }}
${{ if ne(parameters.fetchDepth, '') }}:
fetchDepth: ${{ parameters.fetchDepth }}
- ${{ if eq(parameters.enableTelemetry, 'true') }}:
- template: /eng/common/templates/steps/telemetry-start.yml
parameters:
buildConfig: $(_HelixBuildConfig)
helixSource: $(_HelixSource)
helixType: $(_HelixType)
- ${{ if eq(parameters.enableMicrobuild, 'true') }}:
# Internal only resource, and Microbuild signing shouldn't be applied to PRs.
- ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- task: MicroBuildSigningPlugin@2
displayName: Install MicroBuild plugin
inputs:
signType: $(_SignType)
zipSources: false
feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json
env:
TeamName: $(_TeamName)
continueOnError: false
condition: and(succeeded(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
# Run provided build steps
- ${{ parameters.steps }}
- ${{ if eq(parameters.enableMicrobuild, 'true') }}:
# Internal only resources
- ${{ if ne(variables['System.TeamProject'], 'public') }}:
- task: MicroBuildCleanup@1
displayName: Execute Microbuild cleanup tasks
condition: and(always(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
env:
TeamName: $(_TeamName)
- ${{ if eq(parameters.enableTelemetry, 'true') }}:
- template: /eng/common/templates/steps/telemetry-end.yml
parameters:
helixSource: $(_HelixSource)
helixType: $(_HelixType)
- ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- task: CopyFiles@2
displayName: Gather Asset Manifests
inputs:
SourceFolder: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)/AssetManifest'
TargetFolder: '$(Build.StagingDirectory)/AssetManifests'
continueOnError: false
condition: and(succeeded(), eq(variables['_DotNetPublishToBlobFeed'], 'true'))
- task: PublishBuildArtifacts@1
displayName: Push Asset Manifests
inputs:
PathtoPublish: '$(Build.StagingDirectory)/AssetManifests'
PublishLocation: Container
ArtifactName: AssetManifests
continueOnError: false
condition: and(succeeded(), eq(variables['_DotNetPublishToBlobFeed'], 'true'))

Просмотреть файл

@ -0,0 +1,26 @@
parameters:
dependsOn: ''
queue: {}
phases:
- phase: Push to B.A.R.
dependsOn: ${{ parameters.dependsOn }}
queue: ${{ parameters.queue }}
steps:
- ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- task: DownloadBuildArtifacts@0
displayName: Download artifact
inputs:
artifactName: AssetManifests
downloadPath: '$(Build.StagingDirectory)/Download'
condition: succeeded()
- task: AzureKeyVault@1
inputs:
azureSubscription: 'DotNet-Engineering-Services_KeyVault'
KeyVaultName: EngKeyVault
SecretsFilter: 'MaestroAccessToken'
condition: succeeded()
- script: eng\common\publishbuildassets.cmd
/p:ManifestZipFilePath='$(Build.StagingDirectory)/Download/AssetManifests'
/p:BuildAssetRegistryToken=$(MaestroAccessToken)
/p:MaestroApiEndpoint=https://maestro-int.westus2.cloudapp.azure.com
displayName: Push Build Assets

Просмотреть файл

@ -0,0 +1,37 @@
parameters:
dependsOn: ''
queue: {}
configuration: 'Debug'
condition: succeeded()
continueOnError: false
phases:
- phase: Asset_Registry_Publish
displayName: Publish to Build Asset Registry
dependsOn: ${{ parameters.dependsOn }}
queue: ${{ parameters.queue }}
variables:
config: ${{ parameters.configuration }}
steps:
- ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- task: DownloadBuildArtifacts@0
displayName: Download artifact
inputs:
artifactName: AssetManifests
downloadPath: '$(Build.StagingDirectory)/Download'
condition: ${{ parameters.condition }}
continueOnError: ${{ parameters.continueOnError }}
- task: AzureKeyVault@1
inputs:
azureSubscription: 'DotNet-Engineering-Services_KeyVault'
KeyVaultName: EngKeyVault
SecretsFilter: 'MaestroAccessToken'
condition: ${{ parameters.condition }}
continueOnError: ${{ parameters.continueOnError }}
- script: eng\common\publishbuildassets.cmd
/p:ManifestsPath='$(Build.StagingDirectory)/Download/AssetManifests'
/p:BuildAssetRegistryToken=$(MaestroAccessToken)
/p:MaestroApiEndpoint=https://maestro-prod.westus2.cloudapp.azure.com
/p:Configuration=$(config)
displayName: Publish Build Assets
condition: ${{ parameters.condition }}
continueOnError: ${{ parameters.continueOnError }}

Просмотреть файл

@ -0,0 +1,26 @@
parameters:
dependsOn: ''
queue: {}
phases:
- phase: Push to B.A.R.
dependsOn: ${{ parameters.dependsOn }}
queue: ${{ parameters.queue }}
steps:
- ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- task: DownloadBuildArtifacts@0
displayName: Download artifact
inputs:
artifactName: AssetManifests
downloadPath: '$(Build.StagingDirectory)/Download'
condition: succeeded()
- task: AzureKeyVault@1
inputs:
azureSubscription: 'DotNet-Engineering-Services_KeyVault'
KeyVaultName: EngKeyVault
SecretsFilter: 'MaestroAccessToken'
condition: succeeded()
- script: eng\common\pushbuildassets.cmd
/p:ManifestZipFilePath='$(Build.StagingDirectory)/Download/AssetManifests'
/p:BuildAssetRegistryToken=$(MaestroAccessToken)
/p:MaestroApiEndpoint=https://maestro-int.westus2.cloudapp.azure.com
displayName: Push Build Assets

Просмотреть файл

@ -0,0 +1,12 @@
# build-reason.yml
# Description: runs steps if build.reason condition is valid. conditions is a string of valid build reasons
# to include steps (',' separated).
parameters:
conditions: ''
steps: []
steps:
- ${{ if and( not(startsWith(parameters.conditions, 'not')), contains(parameters.conditions, variables['build.reason'])) }}:
- ${{ parameters.steps }}
- ${{ if and( startsWith(parameters.conditions, 'not'), not(contains(parameters.conditions, variables['build.reason']))) }}:
- ${{ parameters.steps }}

Просмотреть файл

@ -0,0 +1,44 @@
parameters:
HelixSource: 'pr/dotnet-github-anon-kaonashi-bot'
HelixType: ̓'tests/default'
HelixBuild: $(Build.BuildNumber)
HelixTargetQueues: ''
HelixAccessToken: ''
HelixPreCommands: ''
HelixPostCommands: ''
WorkItemDirectory: ''
WorkItemCommand: ''
CorrelationPayloadDirectory: ''
IncludeDotNetCli: false
DotNetCliPackageType: ''
DotNetCliVersion: ''
EnableXUnitReporter: false
WaitForWorkItemCompletion: true
condition: succeeded()
continueOnError: false
steps:
- task: DotNetCoreCLI@2
inputs:
command: custom
projects: eng/common/helixpublish.proj
custom: msbuild
arguments: '/t:test /p:Language=msbuild'
displayName: Send job to Helix
env:
HelixSource: ${{ parameters.HelixSource }}
HelixType: ${{ parameters.HelixType }}
HelixBuild: ${{ parameters.HelixBuild }}
HelixTargetQueues: ${{ parameters.HelixTargetQueues }}
HelixAccessToken: ${{ parameters.HelixAccessToken }}
HelixPreCommands: ${{ parameters.HelixPreCommands }}
HelixPostCommands: ${{ parameters.HelixPostCommands }}
WorkItemDirectory: ${{ parameters.WorkItemDirectory }}
WorkItemCommand: ${{ parameters.WorkItemCommand }}
IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }}
DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }}
DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
EnableXUnitReporter: ${{ parameters.EnableXUnitReporter }}
WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
condition: ${{ parameters.condition }}
continueOnError: ${{ parameters.continueOnError }}

Просмотреть файл

@ -0,0 +1,7 @@
parameters:
agentOs: ''
steps: []
steps:
- ${{ if ne(parameters.agentOs, 'Windows_NT') }}:
- ${{ parameters.steps }}

Просмотреть файл

@ -0,0 +1,7 @@
parameters:
agentOs: ''
steps: []
steps:
- ${{ if eq(parameters.agentOs, 'Windows_NT') }}:
- ${{ parameters.steps }}

Просмотреть файл

@ -0,0 +1,33 @@
parameters:
# if parameter1 equals parameter 2, run 'ifScript' command, else run 'elsescript' command
parameter1: ''
parameter2: ''
ifScript: ''
elseScript: ''
# name of script step
name: Script
# display name of script step
displayName: If-Equal-Else Script
# environment
env: {}
# conditional expression for step execution
condition: ''
steps:
- ${{ if and(ne(parameters.ifScript, ''), eq(parameters.parameter1, parameters.parameter2)) }}:
- script: ${{ parameters.ifScript }}
name: ${{ parameters.name }}
displayName: ${{ parameters.displayName }}
env: ${{ parameters.env }}
condition: ${{ parameters.condition }}
- ${{ if and(ne(parameters.elseScript, ''), ne(parameters.parameter1, parameters.parameter2)) }}:
- script: ${{ parameters.elseScript }}
name: ${{ parameters.name }}
displayName: ${{ parameters.displayName }}
env: ${{ parameters.env }}
condition: ${{ parameters.condition }}

Просмотреть файл

@ -0,0 +1,67 @@
parameters:
helixSource: 'undefined_defaulted_in_telemetry.yml'
helixType: 'undefined_defaulted_in_telemetry.yml'
steps:
- bash: |
if [ "$AGENT_JOBSTATUS" = "Succeeded" ] || [ "$AGENT_JOBSTATUS" = "PartiallySucceeded" ]; then
errorCount=0
else
errorCount=1
fi
warningCount=0
# create a temporary file for curl output
res=`mktemp`
curlResult=`
curl --verbose --output $res --write-out "%{http_code}"\
-H 'Content-Type: application/json' \
-H "X-Helix-Job-Token: $Helix_JobToken" \
-H 'Content-Length: 0' \
-X POST -G "https://helix.dot.net/api/2018-03-14/telemetry/job/build/$Helix_WorkItemId/finish" \
--data-urlencode "errorCount=$errorCount" \
--data-urlencode "warningCount=$warningCount"`
curlStatus=$?
if [ $curlStatus -eq 0 ]; then
if [ $curlResult -gt 299 ] || [ $curlResult -lt 200 ]; then
curlStatus=$curlResult
fi
fi
if [ $curlStatus -ne 0 ]; then
echo "Failed to Send Build Finish information"
vstsLogOutput="vso[task.logissue type=error;sourcepath=templates/steps/telemetry-end.yml;code=1;]Failed to Send Build Finish information: $curlStatus"
echo "##$vstsLogOutput"
exit 1
fi
displayName: Send Unix Build End Telemetry
env:
# defined via VSTS variables in start-job.sh
Helix_JobToken: $(Helix_JobToken)
Helix_WorkItemId: $(Helix_WorkItemId)
condition: and(always(), ne(variables['Agent.Os'], 'Windows_NT'))
- powershell: |
if (($env:Agent_JobStatus -eq 'Succeeded') -or ($env:Agent_JobStatus -eq 'PartiallySucceeded')) {
$ErrorCount = 0
} else {
$ErrorCount = 1
}
$WarningCount = 0
try {
Invoke-RestMethod -Uri "https://helix.dot.net/api/2018-03-14/telemetry/job/build/$env:Helix_WorkItemId/finish?errorCount=$ErrorCount&warningCount=$WarningCount" -Method Post -ContentType "application/json" -Body "" `
-Headers @{ 'X-Helix-Job-Token'=$env:Helix_JobToken }
}
catch {
Write-Error $_
Write-Error $_.Exception
exit 1
}
displayName: Send Windows Build End Telemetry
env:
# defined via VSTS variables in start-job.ps1
Helix_JobToken: $(Helix_JobToken)
Helix_WorkItemId: $(Helix_WorkItemId)
condition: and(always(),eq(variables['Agent.Os'], 'Windows_NT'))

Просмотреть файл

@ -0,0 +1,154 @@
parameters:
helixSource: 'undefined_defaulted_in_telemetry.yml'
helixType: 'undefined_defaulted_in_telemetry.yml'
buildConfig: ''
steps:
- ${{ if not(eq(variables['System.TeamProject'], 'public')) }}:
- task: AzureKeyVault@1
inputs:
azureSubscription: 'HelixProd_KeyVault'
KeyVaultName: HelixProdKV
SecretsFilter: 'HelixApiAccessToken'
condition: always()
- bash: |
# create a temporary file
jobInfo=`mktemp`
# write job info content to temporary file
cat > $jobInfo <<JobListStuff
{
"QueueId": "$QueueId",
"Source": "$Source",
"Type": "$Type",
"Build": "$Build",
"Attempt": "$Attempt",
"Properties": {
"operatingSystem": "$OperatingSystem",
"configuration": "$Configuration"
}
}
JobListStuff
# create a temporary file for curl output
res=`mktemp`
accessTokenParameter=''
if [[ ! "$HelixApiAccessToken" == "" ]]; then
accessTokenParameter="?access_token=$HelixApiAccessToken"
fi
curlResult=`
cat $jobInfo |\
curl --verbose --output $res --write-out "%{http_code}" \
-H 'Content-Type: application/json' \
-X POST "https://helix.dot.net/api/2018-03-14/telemetry/job$accessTokenParameter" -d @-`
curlStatus=$?
if [ $curlStatus -eq 0 ]; then
if [ $curlResult -gt 299 ] || [ $curlResult -lt 200 ]; then
curlStatus=$curlResult
fi
fi
curlResult=`cat $res`
# validate status of curl command
if [ $curlStatus -ne 0 ]; then
echo "Failed To Send Job Start information"
# We have to append the ## vso prefix or vso will pick up the command when it dumps the inline script into the shell
vstsLogOutput="vso[task.logissue type=error;sourcepath=telemetry/start-job.sh;code=1;]Failed to Send Job Start information: $curlStatus"
echo "##$vstsLogOutput"
exit 1
fi
# Set the Helix_JobToken variable
export Helix_JobToken=`echo $curlResult | xargs echo` # Strip Quotes
echo "##vso[task.setvariable variable=Helix_JobToken;issecret=true;]$Helix_JobToken"
displayName: Send Unix Job Start Telemetry
env:
HelixApiAccessToken: $(HelixApiAccessToken)
Source: ${{ parameters.helixSource }}
Type: ${{ parameters.helixType }}
Build: $(Build.BuildNumber)
QueueId: $(Agent.Os)
Attempt: 1
OperatingSystem: $(Agent.Os)
Configuration: ${{ parameters.buildConfig }}
condition: and(always(), ne(variables['Agent.Os'], 'Windows_NT'))
- bash: |
res=`mktemp`
curlResult=`
curl --verbose --output $res --write-out "%{http_code}"\
-H 'Content-Type: application/json' \
-H "X-Helix-Job-Token: $Helix_JobToken" \
-H 'Content-Length: 0' \
-X POST -G "https://helix.dot.net/api/2018-03-14/telemetry/job/build" \
--data-urlencode "buildUri=$BuildUri"`
curlStatus=$?
if [ $curlStatus -eq 0 ]; then
if [ $curlResult -gt 299 ] || [ $curlResult -lt 200 ]; then
curlStatus=$curlResult
fi
fi
curlResult=`cat $res`
# validate status of curl command
if [ $curlStatus -ne 0 ]; then
echo "Failed to Send Build Start information"
vstsLogOutput="vso[task.logissue type=error;sourcepath=telemetry/build/start.sh;code=1;]Failed to Send Build Start information: $curlStatus"
echo "##$vstsLogOutput"
exit 1
fi
export Helix_WorkItemId=`echo $curlResult | xargs echo` # Strip Quotes
echo "##vso[task.setvariable variable=Helix_WorkItemId]$Helix_WorkItemId"
displayName: Send Unix Build Start Telemetry
env:
BuildUri: $(System.TaskDefinitionsUri)$(System.TeamProject)/_build/index?buildId=$(Build.BuildId)&_a=summary
Helix_JobToken: $(Helix_JobToken)
condition: and(always(), ne(variables['Agent.Os'], 'Windows_NT'))
- powershell: |
$jobInfo = [pscustomobject]@{
QueueId=$env:QueueId;
Source=$env:Source;
Type=$env:Type;
Build=$env:Build;
Attempt=$env:Attempt;
Properties=[pscustomobject]@{ operatingSystem=$env:OperatingSystem; configuration=$env:Configuration };
}
$jobInfoJson = $jobInfo | ConvertTo-Json
if ($env:HelixApiAccessToken) {
$accessTokenParameter="?access_token=$($env:HelixApiAccessToken)"
}
Write-Host "Job Info: $jobInfoJson"
$jobToken = Invoke-RestMethod -Uri "https://helix.dot.net/api/2018-03-14/telemetry/job$($accessTokenParameter)" -Method Post -ContentType "application/json" -Body $jobInfoJson
$env:Helix_JobToken = $jobToken
Write-Host "##vso[task.setvariable variable=Helix_JobToken;issecret=true;]$env:Helix_JobToken"
displayName: Send Windows Job Start Telemetry
env:
HelixApiAccessToken: $(HelixApiAccessToken)
Source: ${{ parameters.helixSource }}
Type: ${{ parameters.helixType }}
Build: $(Build.BuildNumber)
QueueId: $(Agent.Os)
Attempt: 1
OperatingSystem: $(Agent.Os)
Configuration: ${{ parameters.buildConfig }}
condition: and(always(), eq(variables['Agent.Os'], 'Windows_NT'))
- powershell: |
$workItemId = Invoke-RestMethod -Uri "https://helix.dot.net/api/2018-03-14/telemetry/job/build?buildUri=$([Net.WebUtility]::UrlEncode($env:BuildUri))" -Method Post -ContentType "application/json" -Body "" `
-Headers @{ 'X-Helix-Job-Token'=$env:Helix_JobToken }
$env:Helix_WorkItemId = $workItemId
Write-Host "##vso[task.setvariable variable=Helix_WorkItemId]$env:Helix_WorkItemId"
displayName: Send Windows Build Start Telemetry
env:
BuildUri: $(System.TaskDefinitionsUri)$(System.TeamProject)/_build/index?buildId=$(Build.BuildId)&_a=summary
Helix_JobToken: $(Helix_JobToken)
condition: and(always(), eq(variables['Agent.Os'], 'Windows_NT'))

284
eng/common/tools.ps1 Normal file
Просмотреть файл

@ -0,0 +1,284 @@
# Initialize variables if they aren't already defined
$ci = if (Test-Path variable:ci) { $ci } else { $false }
$configuration = if (Test-Path variable:configuration) { $configuration } else { "Debug" }
$nodereuse = if (Test-Path variable:nodereuse) { $nodereuse } else { !$ci }
$prepareMachine = if (Test-Path variable:prepareMachine) { $prepareMachine } else { $false }
$restore = if (Test-Path variable:restore) { $restore } else { $true }
$verbosity = if (Test-Path variable:verbosity) { $verbosity } else { "minimal" }
$warnaserror = if (Test-Path variable:warnaserror) { $warnaserror } else { $true }
$msbuildEngine = if (Test-Path variable:msbuildEngine) { $msbuildEngine } else { $null }
$useInstalledDotNetCli = if (Test-Path variable:useInstalledDotNetCli) { $useInstalledDotNetCli } else { $true }
set-strictmode -version 2.0
$ErrorActionPreference = "Stop"
[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12
function Create-Directory([string[]] $path) {
if (!(Test-Path $path)) {
New-Item -path $path -force -itemType "Directory" | Out-Null
}
}
function InitializeDotNetCli([bool]$install) {
# Don't resolve runtime, shared framework, or SDK from other locations to ensure build determinism
$env:DOTNET_MULTILEVEL_LOOKUP=0
# Disable first run since we do not need all ASP.NET packages restored.
$env:DOTNET_SKIP_FIRST_TIME_EXPERIENCE=1
# Source Build uses DotNetCoreSdkDir variable
if ($env:DotNetCoreSdkDir -ne $null) {
$env:DOTNET_INSTALL_DIR = $env:DotNetCoreSdkDir
}
# Find the first path on %PATH% that contains the dotnet.exe
if ($useInstalledDotNetCli -and ($env:DOTNET_INSTALL_DIR -eq $null)) {
# PATCH: Fix https://github.com/dotnet/arcade/issues/1251
$env:DOTNET_INSTALL_DIR = ${env:PATH}.Split(';') | where { ($_ -ne "") -and (Test-Path (Join-Path $_ "dotnet.exe")) } | select -first 1
}
$dotnetSdkVersion = $GlobalJson.tools.dotnet
# Use dotnet installation specified in DOTNET_INSTALL_DIR if it contains the required SDK version,
# otherwise install the dotnet CLI and SDK to repo local .dotnet directory to avoid potential permission issues.
if (($env:DOTNET_INSTALL_DIR -ne $null) -and (Test-Path(Join-Path $env:DOTNET_INSTALL_DIR "sdk\$dotnetSdkVersion"))) {
$dotnetRoot = $env:DOTNET_INSTALL_DIR
} else {
$dotnetRoot = Join-Path $RepoRoot ".dotnet"
$env:DOTNET_INSTALL_DIR = $dotnetRoot
if (-not (Test-Path(Join-Path $env:DOTNET_INSTALL_DIR "sdk\$dotnetSdkVersion"))) {
if ($install) {
InstallDotNetSdk $dotnetRoot $dotnetSdkVersion
} else {
Write-Host "Unable to find dotnet with SDK version '$dotnetSdkVersion'" -ForegroundColor Red
ExitWithExitCode 1
}
}
}
return $dotnetRoot
}
function GetDotNetInstallScript([string] $dotnetRoot) {
$installScript = "$dotnetRoot\dotnet-install.ps1"
if (!(Test-Path $installScript)) {
Create-Directory $dotnetRoot
Invoke-WebRequest "https://dot.net/v1/dotnet-install.ps1" -OutFile $installScript
}
return $installScript
}
function InstallDotNetSdk([string] $dotnetRoot, [string] $version) {
$installScript = GetDotNetInstallScript $dotnetRoot
& $installScript -Version $version -InstallDir $dotnetRoot
if ($lastExitCode -ne 0) {
Write-Host "Failed to install dotnet cli (exit code '$lastExitCode')." -ForegroundColor Red
ExitWithExitCode $lastExitCode
}
}
function InitializeVisualStudioBuild {
$vsToolsPath = $env:VS150COMNTOOLS
if ($vsToolsPath -eq $null) {
$vsToolsPath = $env:VS160COMNTOOLS
}
if (($vsToolsPath -ne $null) -and (Test-Path $vsToolsPath)) {
$vsInstallDir = [System.IO.Path]::GetFullPath((Join-Path $vsToolsPath "..\.."))
} else {
$vsInfo = LocateVisualStudio
$vsInstallDir = $vsInfo.installationPath
$vsSdkInstallDir = Join-Path $vsInstallDir "VSSDK\"
$vsVersion = $vsInfo.installationVersion.Split('.')[0] + "0"
Set-Item "env:VS$($vsVersion)COMNTOOLS" (Join-Path $vsInstallDir "Common7\Tools\")
Set-Item "env:VSSDK$($vsVersion)Install" $vsSdkInstallDir
$env:VSSDKInstall = $vsSdkInstallDir
}
return $vsInstallDir
}
function LocateVisualStudio {
$vswhereVersion = $GlobalJson.tools.vswhere
if (!$vsWhereVersion) {
Write-Host "vswhere version must be specified in /global.json." -ForegroundColor Red
ExitWithExitCode 1
}
$toolsRoot = Join-Path $RepoRoot ".tools"
$vsWhereDir = Join-Path $toolsRoot "vswhere\$vswhereVersion"
$vsWhereExe = Join-Path $vsWhereDir "vswhere.exe"
if (!(Test-Path $vsWhereExe)) {
Create-Directory $vsWhereDir
Write-Host "Downloading vswhere"
Invoke-WebRequest "https://github.com/Microsoft/vswhere/releases/download/$vswhereVersion/vswhere.exe" -OutFile $vswhereExe
}
$vsInfo = & $vsWhereExe `
-latest `
-prerelease `
-format json `
-requires Microsoft.Component.MSBuild `
-requires Microsoft.VisualStudio.Component.VSSDK `
-requires Microsoft.VisualStudio.Component.Roslyn.Compiler | ConvertFrom-Json
if ($lastExitCode -ne 0) {
Write-Host "Failed to locate Visual Studio (exit code '$lastExitCode')." -ForegroundColor Red
ExitWithExitCode $lastExitCode
}
# use first matching instance
return $vsInfo[0]
}
function ConfigureTools {
# Include custom tools configuration
$script = Join-Path $EngRoot "configure-toolset.ps1"
if (Test-Path $script) {
. $script
}
}
function InitializeTools() {
ConfigureTools
$tools = $GlobalJson.tools
# Initialize dotnet cli if listed in 'tools'
$dotnetRoot = $null
if ((Get-Member -InputObject $tools -Name "dotnet") -ne $null) {
$dotnetRoot = InitializeDotNetCli -install:$restore
}
if (-not $msbuildEngine) {
# Presence of vswhere.version indicates the repo needs to build using VS msbuild.
if ((Get-Member -InputObject $tools -Name "vswhere") -ne $null) {
$msbuildEngine = "vs"
} elseif ($dotnetRoot -ne $null) {
$msbuildEngine = "dotnet"
} else {
Write-Host "-msbuildEngine must be specified, or /global.json must specify 'tools.dotnet' or 'tools.vswhere'." -ForegroundColor Red
ExitWithExitCode 1
}
}
if ($msbuildEngine -eq "dotnet") {
if (!$dotnetRoot) {
Write-Host "/global.json must specify 'tools.dotnet'." -ForegroundColor Red
ExitWithExitCode 1
}
$script:buildDriver = Join-Path $dotnetRoot "dotnet.exe"
$script:buildArgs = "msbuild"
} elseif ($msbuildEngine -eq "vs") {
$vsInstallDir = InitializeVisualStudioBuild
$script:buildDriver = Join-Path $vsInstallDir "MSBuild\15.0\Bin\msbuild.exe"
$script:buildArgs = ""
} else {
Write-Host "Unexpected value of -msbuildEngine: '$msbuildEngine'." -ForegroundColor Red
ExitWithExitCode 1
}
InitializeToolSet $script:buildDriver $script:buildArgs
InitializeCustomToolset
}
function InitializeToolset([string] $buildDriver, [string]$buildArgs) {
$toolsetVersion = $GlobalJson.'msbuild-sdks'.'Microsoft.DotNet.Arcade.Sdk'
$toolsetLocationFile = Join-Path $ToolsetDir "$toolsetVersion.txt"
if (Test-Path $toolsetLocationFile) {
$path = Get-Content $toolsetLocationFile -TotalCount 1
if (Test-Path $path) {
$script:ToolsetBuildProj = $path
return
}
}
if (-not $restore) {
Write-Host "Toolset version $toolsetVersion has not been restored."
ExitWithExitCode 1
}
$ToolsetRestoreLog = Join-Path $LogDir "ToolsetRestore.binlog"
$proj = Join-Path $ToolsetDir "restore.proj"
'<Project Sdk="Microsoft.DotNet.Arcade.Sdk"/>' | Set-Content $proj
MSBuild $proj /t:__WriteToolsetLocation /clp:None /bl:$ToolsetRestoreLog /p:__ToolsetLocationOutputFile=$toolsetLocationFile
if ($lastExitCode -ne 0) {
Write-Host "Failed to restore toolset (exit code '$lastExitCode'). See log: $ToolsetRestoreLog" -ForegroundColor Red
ExitWithExitCode $lastExitCode
}
$path = Get-Content $toolsetLocationFile -TotalCount 1
if (!(Test-Path $path)) {
throw "Invalid toolset path: $path"
}
$script:ToolsetBuildProj = $path
}
function InitializeCustomToolset {
if (-not $restore) {
return
}
$script = Join-Path $EngRoot "restore-toolset.ps1"
if (Test-Path $script) {
. $script
}
}
function ExitWithExitCode([int] $exitCode) {
if ($ci -and $prepareMachine) {
Stop-Processes
}
exit $exitCode
}
function Stop-Processes() {
Write-Host "Killing running build processes..."
Get-Process -Name "msbuild" -ErrorAction SilentlyContinue | Stop-Process
Get-Process -Name "dotnet" -ErrorAction SilentlyContinue | Stop-Process
Get-Process -Name "vbcscompiler" -ErrorAction SilentlyContinue | Stop-Process
}
function MsBuild() {
$warnaserrorSwitch = if ($warnaserror) { "/warnaserror" } else { "" }
& $buildDriver $buildArgs $warnaserrorSwitch /m /nologo /clp:Summary /v:$verbosity /nr:$nodereuse $args
}
$RepoRoot = Resolve-Path (Join-Path $PSScriptRoot "..\..")
$EngRoot = Resolve-Path (Join-Path $PSScriptRoot "..")
$ArtifactsDir = Join-Path $RepoRoot "artifacts"
$ToolsetDir = Join-Path $ArtifactsDir "toolset"
$LogDir = Join-Path (Join-Path $ArtifactsDir "log") $configuration
$TempDir = Join-Path (Join-Path $ArtifactsDir "tmp") $configuration
$GlobalJson = Get-Content -Raw -Path (Join-Path $RepoRoot "global.json") | ConvertFrom-Json
if ($env:NUGET_PACKAGES -eq $null) {
# Use local cache on CI to ensure deterministic build,
# use global cache in dev builds to avoid cost of downloading packages.
$env:NUGET_PACKAGES = if ($ci) { Join-Path $RepoRoot ".packages" }
else { Join-Path $env:UserProfile ".nuget\packages" }
}
Create-Directory $ToolsetDir
Create-Directory $LogDir
if ($ci) {
Create-Directory $TempDir
$env:TEMP = $TempDir
$env:TMP = $TempDir
}

258
eng/common/tools.sh Executable file
Просмотреть файл

@ -0,0 +1,258 @@
#!/usr/bin/env bash
# Stop script if unbound variable found (use ${var:-} if intentional)
set -u
ci=${ci:-false}
configuration=${configuration:-'Debug'}
nodereuse=${nodereuse:-true}
prepare_machine=${prepare_machine:-false}
restore=${restore:-true}
verbosity=${verbosity:-'minimal'}
warnaserror=${warnaserror:-true}
useInstalledDotNetCli=${useInstalledDotNetCli:-true}
repo_root="$scriptroot/../.."
eng_root="$scriptroot/.."
artifacts_dir="$repo_root/artifacts"
toolset_dir="$artifacts_dir/toolset"
log_dir="$artifacts_dir/log/$configuration"
temp_dir="$artifacts_dir/tmp/$configuration"
global_json_file="$repo_root/global.json"
build_driver=""
toolset_build_proj=""
function ResolvePath {
local path=$1
# resolve $path until the file is no longer a symlink
while [[ -h $path ]]; do
local dir="$( cd -P "$( dirname "$path" )" && pwd )"
path="$(readlink "$path")"
# if $path was a relative symlink, we need to resolve it relative to the path where the
# symlink file was located
[[ $path != /* ]] && path="$dir/$path"
done
# return value
_ResolvePath="$path"
}
# ReadVersionFromJson [json key]
function ReadGlobalVersion {
local key=$1
local line=`grep -m 1 "$key" "$global_json_file"`
local pattern="\"$key\" *: *\"(.*)\""
if [[ ! $line =~ $pattern ]]; then
echo "Error: Cannot find \"$key\" in $global_json_file" >&2
ExitWithExitCode 1
fi
# return value
_ReadGlobalVersion=${BASH_REMATCH[1]}
}
function InitializeDotNetCli {
local install=$1
# Don't resolve runtime, shared framework, or SDK from other locations to ensure build determinism
export DOTNET_MULTILEVEL_LOOKUP=0
# Disable first run since we want to control all package sources
export DOTNET_SKIP_FIRST_TIME_EXPERIENCE=1
# Source Build uses DotNetCoreSdkDir variable
if [[ -n "${DotNetCoreSdkDir:-}" ]]; then
export DOTNET_INSTALL_DIR="$DotNetCoreSdkDir"
fi
# Find the first path on $PATH that contains the dotnet.exe
if [[ "$useInstalledDotNetCli" == true && -z "${DOTNET_INSTALL_DIR:-}" ]]; then
local dotnet_path=`command -v dotnet`
if [[ -n "$dotnet_path" ]]; then
ResolvePath "$dotnet_path"
export DOTNET_INSTALL_DIR=`dirname "$_ResolvePath"`
fi
fi
ReadGlobalVersion "dotnet"
local dotnet_sdk_version=$_ReadGlobalVersion
local dotnet_root=""
# Use dotnet installation specified in DOTNET_INSTALL_DIR if it contains the required SDK version,
# otherwise install the dotnet CLI and SDK to repo local .dotnet directory to avoid potential permission issues.
if [[ -n "${DOTNET_INSTALL_DIR:-}" && -d "$DOTNET_INSTALL_DIR/sdk/$dotnet_sdk_version" ]]; then
dotnet_root="$DOTNET_INSTALL_DIR"
else
dotnet_root="$repo_root/.dotnet"
export DOTNET_INSTALL_DIR="$dotnet_root"
if [[ ! -d "$DOTNET_INSTALL_DIR/sdk/$dotnet_sdk_version" ]]; then
if [[ "$install" == true ]]; then
InstallDotNetSdk "$dotnet_root" "$dotnet_sdk_version"
else
echo "Unable to find dotnet with SDK version '$dotnet_sdk_version'" >&2
ExitWithExitCode 1
fi
fi
fi
# return value
_InitializeDotNetCli="$dotnet_root"
}
function InstallDotNetSdk {
local root=$1
local version=$2
GetDotNetInstallScript "$root"
local install_script=$_GetDotNetInstallScript
bash "$install_script" --version $version --install-dir "$root"
local lastexitcode=$?
if [[ $lastexitcode != 0 ]]; then
echo "Failed to install dotnet SDK (exit code '$lastexitcode')." >&2
ExitWithExitCode $lastexitcode
fi
}
function GetDotNetInstallScript {
local root=$1
local install_script="$root/dotnet-install.sh"
local install_script_url="https://dot.net/v1/dotnet-install.sh"
if [[ ! -a "$install_script" ]]; then
mkdir -p "$root"
echo "Downloading '$install_script_url'"
# Use curl if available, otherwise use wget
if command -v curl > /dev/null; then
curl "$install_script_url" -sSL --retry 10 --create-dirs -o "$install_script"
else
wget -q -O "$install_script" "$install_script_url"
fi
fi
# return value
_GetDotNetInstallScript="$install_script"
}
function InitializeToolset {
ReadGlobalVersion "Microsoft.DotNet.Arcade.Sdk"
local toolset_version=$_ReadGlobalVersion
local toolset_location_file="$toolset_dir/$toolset_version.txt"
if [[ -a "$toolset_location_file" ]]; then
local path=`cat "$toolset_location_file"`
if [[ -a "$path" ]]; then
toolset_build_proj="$path"
return
fi
fi
if [[ "$restore" != true ]]; then
echo "Toolset version $toolsetVersion has not been restored." >&2
ExitWithExitCode 2
fi
local toolset_restore_log="$log_dir/ToolsetRestore.binlog"
local proj="$toolset_dir/restore.proj"
echo '<Project Sdk="Microsoft.DotNet.Arcade.Sdk"/>' > "$proj"
MSBuild "$proj" /t:__WriteToolsetLocation /clp:None /bl:"$toolset_restore_log" /p:__ToolsetLocationOutputFile="$toolset_location_file"
local lastexitcode=$?
if [[ $lastexitcode != 0 ]]; then
echo "Failed to restore toolset (exit code '$lastexitcode'). See log: $toolset_restore_log" >&2
ExitWithExitCode $lastexitcode
fi
toolset_build_proj=`cat "$toolset_location_file"`
if [[ ! -a "$toolset_build_proj" ]]; then
echo "Invalid toolset path: $toolset_build_proj" >&2
ExitWithExitCode 3
fi
}
function InitializeCustomToolset {
local script="$eng_root/restore-toolset.sh"
if [[ -a "$script" ]]; then
. "$script"
fi
}
function ConfigureTools {
local script="$eng_root/configure-toolset.sh"
if [[ -a "$script" ]]; then
. "$script"
fi
}
function InitializeTools {
ConfigureTools
InitializeDotNetCli $restore
build_driver="$_InitializeDotNetCli/dotnet"
InitializeToolset
InitializeCustomToolset
}
function ExitWithExitCode {
if [[ "$ci" == true && "$prepare_machine" == true ]]; then
StopProcesses
fi
exit $1
}
function StopProcesses {
echo "Killing running build processes..."
pkill -9 "dotnet"
pkill -9 "vbcscompiler"
return 0
}
function MSBuild {
local warnaserror_switch=""
if [[ $warnaserror == true ]]; then
warnaserror_switch="/warnaserror"
fi
"$build_driver" msbuild /m /nologo /clp:Summary /v:$verbosity /nr:$nodereuse $warnaserror_switch "$@"
return $?
}
# HOME may not be defined in some scenarios, but it is required by NuGet
if [[ -z $HOME ]]; then
export HOME="$repo_root/artifacts/.home/"
mkdir -p "$HOME"
fi
if [[ -z ${NUGET_PACKAGES:-} ]]; then
if [[ $ci == true ]]; then
export NUGET_PACKAGES="$repo_root/.packages"
else
export NUGET_PACKAGES="$HOME/.nuget/packages"
fi
fi
mkdir -p "$toolset_dir"
mkdir -p "$log_dir"
if [[ $ci == true ]]; then
mkdir -p "$temp_dir"
export TEMP="$temp_dir"
export TMP="$temp_dir"
fi

8
global.json Normal file
Просмотреть файл

@ -0,0 +1,8 @@
{
"tools": {
"dotnet": "2.2.100"
},
"msbuild-sdks": {
"Microsoft.DotNet.Arcade.Sdk": "1.0.0-beta.18570.17"
}
}

Просмотреть файл

@ -1,6 +1,8 @@
<Project>
<!-- Chain up to the next file (can be copy-pasted to either Directory.Build.props or Directory.Build.targets) -->
<Import Project="$([MSBuild]::GetDirectoryNameOfFileAbove($(MSBuildThisFileDirectory).., '$(MSBuildThisFileName)$(MSBuildThisFileExtension)'))\$(MSBuildThisFileName)$(MSBuildThisFileExtension)" />
<PropertyGroup>
<LangVersion>latest</LangVersion>
<RestoreSources>
$(RestoreSources);
http://api.nuget.org/v3/index.json;

Просмотреть файл

@ -13,18 +13,18 @@ namespace Microsoft.Diagnostics.Tools.Analyze.Commands
public override string Description => "Dumps the stack trace associated with an event, if there is one.";
protected override async Task RunAsyncCore(IConsole console, string[] args, AnalysisSession session, TraceLog trace)
protected override Task RunAsyncCore(IConsole console, string[] args, AnalysisSession session, TraceLog trace)
{
if (args.Length < 1)
{
console.Error.WriteLine("Usage: eventstack <eventIndex>");
return;
return Task.CompletedTask;
}
if (!int.TryParse(args[0], NumberStyles.HexNumber, CultureInfo.InvariantCulture, out var eventIndex))
{
console.Error.WriteLine("Usage: eventstack <eventIndex>");
return;
return Task.CompletedTask;
}
var evt = trace.Events.ElementAt(eventIndex);
@ -37,6 +37,8 @@ namespace Microsoft.Diagnostics.Tools.Analyze.Commands
{
console.Error.WriteLine($"Unable to find any call stacks for event {eventIndex:X4}!");
}
return Task.CompletedTask;
}
private void WriteStack(TraceCallStack stack, IConsole console)

Просмотреть файл

@ -2,10 +2,11 @@
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>netcoreapp2.2</TargetFramework>
<RootNamespace>Microsoft.Diagnostics.Tools.Analyze</RootNamespace>
<IsPackable>true</IsPackable>
<PackAsTool>true</PackAsTool>
</PropertyGroup>
<ItemGroup>
@ -15,7 +16,7 @@
<ItemGroup>
<PackageReference Include="McMaster.Extensions.CommandLineUtils" Version="2.2.5" />
<PackageReference Include="Microsoft.Diagnostics.Runtime" Version="1.0.0-preview1-18569-05" />
<PackageReference Include="Microsoft.Diagnostics.Runtime" Version="1.0.0" />
<PackageReference Include="Microsoft.Diagnostics.Tracing.TraceEvent" Version="2.0.30" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="2.2.0-preview3-35497" />
<PackageReference Include="Newtonsoft.Json" Version="11.0.2" />

Просмотреть файл

@ -4,10 +4,10 @@
<OutputType>Exe</OutputType>
<TargetFramework>netcoreapp2.2</TargetFramework>
<RootNamespace>Microsoft.Diagnostics.Tools.Collect</RootNamespace>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|AnyCPU'">
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
<IsPackable>true</IsPackable>
<PackAsTool>true</PackAsTool>
</PropertyGroup>
<ItemGroup>

Просмотреть файл

@ -4,6 +4,9 @@
<OutputType>Exe</OutputType>
<TargetFramework>netcoreapp2.2</TargetFramework>
<RootNamespace>Microsoft.Diagnostics.Tools.Dump</RootNamespace>
<IsPackable>true</IsPackable>
<PackAsTool>true</PackAsTool>
</PropertyGroup>
<ItemGroup>

Просмотреть файл

@ -7,10 +7,7 @@
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="15.9.0" />
<PackageReference Include="Newtonsoft.Json" Version="11.0.2" />
<PackageReference Include="xunit" Version="2.4.1" />
<PackageReference Include="xunit.runner.visualstudio" Version="2.4.1" />
</ItemGroup>
<ItemGroup>

Просмотреть файл

@ -3,11 +3,12 @@
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>netcoreapp2.1</TargetFramework>
<PackAsTool>true</PackAsTool>
<AssemblyName>dotnet-httprepl</AssemblyName>
<LangVersion>latest</LangVersion>
<Description>Command line tool to for making HTTP calls and viewing their results.</Description>
<PackageTags>dotnet;http;httprepl</PackageTags>
<IsPackable>true</IsPackable>
<PackAsTool>true</PackAsTool>
<PackAsToolShimRuntimeIdentifiers>win-x64;win-x86</PackAsToolShimRuntimeIdentifiers>
</PropertyGroup>

Просмотреть файл

@ -8,10 +8,4 @@
<ProjectReference Include="..\Microsoft.Repl\Microsoft.Repl.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="15.9.0" />
<PackageReference Include="xunit" Version="2.4.1" />
<PackageReference Include="xunit.runner.visualstudio" Version="2.4.1" />
</ItemGroup>
</Project>

Просмотреть файл

@ -3,11 +3,17 @@ Microsoft Visual Studio Solution File, Format Version 12.00
# Visual Studio 15
VisualStudioVersion = 15.0.26124.0
MinimumVisualStudioVersion = 15.0.26124.0
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Microsoft.AspNetCore.Proxy", "src\Microsoft.AspNetCore.Proxy.csproj", "{01C51C11-BDCD-4D69-A829-2BA3846D431C}"
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.AspNetCore.Proxy", "src\Microsoft.AspNetCore.Proxy.csproj", "{01C51C11-BDCD-4D69-A829-2BA3846D431C}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Microsoft.AspNetCore.Proxy.Samples", "sample\Microsoft.AspNetCore.Proxy.Samples.csproj", "{9EA6AF75-6D14-46D8-9002-30E2EAFBEC31}"
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.AspNetCore.Proxy.Samples", "sample\Microsoft.AspNetCore.Proxy.Samples.csproj", "{9EA6AF75-6D14-46D8-9002-30E2EAFBEC31}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Microsoft.AspNetCore.Proxy.Test", "test\Microsoft.AspNetCore.Proxy.Test.csproj", "{FCD0F93A-9683-42DD-BE8E-2240F4DB403E}"
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.AspNetCore.Proxy.Test", "test\Microsoft.AspNetCore.Proxy.Test.csproj", "{FCD0F93A-9683-42DD-BE8E-2240F4DB403E}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "src", "src", "{7B36FA7E-3D3E-4D24-9690-8EBD66C23039}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "samples", "samples", "{B07F3593-F5C6-4101-BF97-1E4E58ABC22A}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "test", "test", "{F2DEA5D2-835F-4FE9-B231-DF0DAFAB04C6}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
@ -18,9 +24,6 @@ Global
Release|x64 = Release|x64
Release|x86 = Release|x86
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution
{01C51C11-BDCD-4D69-A829-2BA3846D431C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{01C51C11-BDCD-4D69-A829-2BA3846D431C}.Debug|Any CPU.Build.0 = Debug|Any CPU
@ -59,4 +62,15 @@ Global
{FCD0F93A-9683-42DD-BE8E-2240F4DB403E}.Release|x86.ActiveCfg = Release|Any CPU
{FCD0F93A-9683-42DD-BE8E-2240F4DB403E}.Release|x86.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
EndGlobalSection
GlobalSection(NestedProjects) = preSolution
{01C51C11-BDCD-4D69-A829-2BA3846D431C} = {7B36FA7E-3D3E-4D24-9690-8EBD66C23039}
{9EA6AF75-6D14-46D8-9002-30E2EAFBEC31} = {B07F3593-F5C6-4101-BF97-1E4E58ABC22A}
{FCD0F93A-9683-42DD-BE8E-2240F4DB403E} = {F2DEA5D2-835F-4FE9-B231-DF0DAFAB04C6}
EndGlobalSection
GlobalSection(ExtensibilityGlobals) = postSolution
SolutionGuid = {ED16A561-5A6A-421E-8C8E-83423ACEF2EB}
EndGlobalSection
EndGlobal

Просмотреть файл

@ -9,7 +9,7 @@
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.AspNetCore.Server.IISIntegration" Version="2.1.0" />
<PackageReference Include="Microsoft.AspNetCore.Server.Kestrel" Version="2.1.0" />
<PackageReference Include="Microsoft.AspNetCore.Server.IISIntegration" Version="2.1.1" />
<PackageReference Include="Microsoft.AspNetCore.Server.Kestrel" Version="2.1.1" />
</ItemGroup>
</Project>

Просмотреть файл

@ -10,8 +10,8 @@
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.AspNetCore.WebSockets" Version="2.1.0" />
<PackageReference Include="Microsoft.Extensions.Options" Version="2.1.0" />
<PackageReference Include="Microsoft.AspNetCore.WebSockets" Version="2.1.1" />
<PackageReference Include="Microsoft.Extensions.Options" Version="2.1.1" />
</ItemGroup>
</Project>

Просмотреть файл

@ -1,4 +1,4 @@
<Project Sdk="Microsoft.NET.Sdk">
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>netcoreapp2.1</TargetFramework>
@ -9,10 +9,9 @@
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.AspNetCore.Server.Kestrel" Version="2.1.0" />
<PackageReference Include="Microsoft.AspNetCore.TestHost" Version="2.1.0" />
<PackageReference Include="Microsoft.AspNetCore.Server.Kestrel" Version="2.1.1" />
<PackageReference Include="Microsoft.AspNetCore.TestHost" Version="2.1.1" />
<PackageReference Include="Microsoft.AspNetCore.Testing" Version="2.1.0" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="15.8.0" />
<PackageReference Include="xunit.runner.visualstudio" Version="2.4.0" />
<PackageReference Include="xunit" Version="2.4.0" />
</ItemGroup>

Просмотреть файл

@ -1,5 +1,6 @@
<Project>
<Import Project="$([MSBuild]::GetDirectoryNameOfFileAbove($(MSBuildThisFileDirectory)..\, Directory.Build.props))\Directory.Build.props" />
<!-- Chain up to the next file (can be copy-pasted to either Directory.Build.props or Directory.Build.targets) -->
<Import Project="$([MSBuild]::GetDirectoryNameOfFileAbove($(MSBuildThisFileDirectory).., '$(MSBuildThisFileName)$(MSBuildThisFileExtension)'))\$(MSBuildThisFileName)$(MSBuildThisFileExtension)" />
<Import Project="dependencies.props" />

Просмотреть файл

@ -1,5 +1,6 @@
<Project>
<Import Project="$([MSBuild]::GetDirectoryNameOfFileAbove($(MSBuildThisFileDirectory)..\, Directory.Build.targets))\Directory.Build.targets" />
<!-- Chain up to the next file (can be copy-pasted to either Directory.Build.props or Directory.Build.targets) -->
<Import Project="$([MSBuild]::GetDirectoryNameOfFileAbove($(MSBuildThisFileDirectory).., '$(MSBuildThisFileName)$(MSBuildThisFileExtension)'))\$(MSBuildThisFileName)$(MSBuildThisFileExtension)" />
<!-- In Directory.Build.targets because default items are added after Directory.Build.props is imported, causing invalid duplicate entries. -->
<ItemGroup Condition="Exists( '$(MSBuildProjectDirectory)\Properties\Resources.resx' )">

Просмотреть файл

@ -292,7 +292,7 @@ Global
{F42E56B7-60D4-481A-8585-04015B2B501E} = {9575CB90-BC4B-43BB-8AEA-82C53FDA4187}
EndGlobalSection
GlobalSection(ExtensibilityGlobals) = postSolution
EnterpriseLibraryConfigurationToolBinariesPathV6 = packages\EnterpriseLibrary.TransientFaultHandling.6.0.1304.0\lib\portable-net45+win+wp8;packages\EnterpriseLibrary.TransientFaultHandling.Data.6.0.1304.1\lib\NET45
SolutionGuid = {12CE6238-F847-4984-8622-1ED46072150A}
EnterpriseLibraryConfigurationToolBinariesPathV6 = packages\EnterpriseLibrary.TransientFaultHandling.6.0.1304.0\lib\portable-net45+win+wp8;packages\EnterpriseLibrary.TransientFaultHandling.Data.6.0.1304.1\lib\NET45
EndGlobalSection
EndGlobal

Просмотреть файл

@ -12,9 +12,9 @@
<MicrosoftAspNetCorePackageVersion>2.1.0</MicrosoftAspNetCorePackageVersion>
<MicrosoftAspNetCoreStaticFilesPackageVersion>2.1.0</MicrosoftAspNetCoreStaticFilesPackageVersion>
<MicrosoftAspNetCoreTestingPackageVersion>2.1.0</MicrosoftAspNetCoreTestingPackageVersion>
<MicrosoftExtensionsConfigurationPackageVersion>2.1.0</MicrosoftExtensionsConfigurationPackageVersion>
<MicrosoftExtensionsLoggingTestingPackageVersion>2.1.0</MicrosoftExtensionsLoggingTestingPackageVersion>
<MicrosoftExtensionsPrimitivesPackageVersion>2.1.0</MicrosoftExtensionsPrimitivesPackageVersion>
<MicrosoftExtensionsConfigurationPackageVersion>2.1.1</MicrosoftExtensionsConfigurationPackageVersion>
<MicrosoftExtensionsLoggingTestingPackageVersion>2.1.1</MicrosoftExtensionsLoggingTestingPackageVersion>
<MicrosoftExtensionsPrimitivesPackageVersion>2.1.1</MicrosoftExtensionsPrimitivesPackageVersion>
<MicrosoftNETTestSdkPackageVersion>15.6.1</MicrosoftNETTestSdkPackageVersion>
<MoqPackageVersion>4.9.0</MoqPackageVersion>
<NewtonsoftJsonPackageVersion>11.0.2</NewtonsoftJsonPackageVersion>

Просмотреть файл

@ -1,9 +1,8 @@
<Project>
<Import Project="..\Directory.Build.props" />
<!-- Chain up to the next file (can be copy-pasted to either Directory.Build.props or Directory.Build.targets) -->
<Import Project="$([MSBuild]::GetDirectoryNameOfFileAbove($(MSBuildThisFileDirectory).., '$(MSBuildThisFileName)$(MSBuildThisFileExtension)'))\$(MSBuildThisFileName)$(MSBuildThisFileExtension)" />
<PropertyGroup>
<IsPackable>false</IsPackable>
<DeveloperBuildTestWebsiteTfms Condition=" '$(DeveloperBuildTestWebsiteTfms)' == '' ">netcoreapp2.1</DeveloperBuildTestWebsiteTfms>
<StandardTestWebsiteTfms>$(DeveloperBuildTestWebsiteTfms)</StandardTestWebsiteTfms>
<StandardTestWebsiteTfms Condition=" '$(DeveloperBuild)' != 'true' ">netcoreapp2.1</StandardTestWebsiteTfms>

Просмотреть файл

@ -1,10 +1,11 @@
<Project>
<Import Project="..\Directory.Build.props" />
<!-- Chain up to the next file (can be copy-pasted to either Directory.Build.props or Directory.Build.targets) -->
<Import Project="$([MSBuild]::GetDirectoryNameOfFileAbove($(MSBuildThisFileDirectory).., '$(MSBuildThisFileName)$(MSBuildThisFileExtension)'))\$(MSBuildThisFileName)$(MSBuildThisFileExtension)" />
<PropertyGroup>
<GenerateDocumentationFile>true</GenerateDocumentationFile>
<RootNamespace>Microsoft.AspNetCore.WebHooks</RootNamespace>
<TargetFramework>netstandard2.0</TargetFramework>
<IsPackable>true</IsPackable>
</PropertyGroup>
</Project>

Просмотреть файл

@ -3,9 +3,8 @@
<Description>ASP.NET Core AzureContainerRegistry WebHooks infrastructure. Contains the AzureContainerRegistryWebHookAttribute class and AddAzureContainerRegistryWebHooks method.</Description>
<PackageTags>aspnetcore;webhook;receiver;azurecontainerregistry</PackageTags>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.AspNetCore.Mvc.Formatters.Json" Version="$(MicrosoftAspNetCoreMvcFormattersJsonPackageVersion)" />
<ProjectReference Include="..\Microsoft.AspNetCore.WebHooks.Receivers\Microsoft.AspNetCore.WebHooks.Receivers.csproj" />
</ItemGroup>
</Project>
</Project>

Просмотреть файл

@ -160,7 +160,7 @@ namespace Microsoft.AspNetCore.WebHooks.Properties {
}
/// <summary>
/// Looks up a localized string similar to Could not find a valid configuration for the &apos;{0}&apos; WebHook receiver, instance &apos;{1}&apos;. The value must be between {2} and {3} characters long..
/// Looks up a localized string similar to Could not find a valid configuration for the &apos;{0}&apos; WebHook receiver, instance &apos;{1}&apos;. The value must be at least {2} characters long..
/// </summary>
internal static string Security_BadSecret {
get {

Просмотреть файл

@ -1,5 +1,6 @@
<Project>
<Import Project="..\Directory.Build.props" />
<!-- Chain up to the next file (can be copy-pasted to either Directory.Build.props or Directory.Build.targets) -->
<Import Project="$([MSBuild]::GetDirectoryNameOfFileAbove($(MSBuildThisFileDirectory).., '$(MSBuildThisFileName)$(MSBuildThisFileExtension)'))\$(MSBuildThisFileName)$(MSBuildThisFileExtension)" />
<PropertyGroup>
<DeveloperBuildTestTfms Condition=" '$(DeveloperBuildTestTfms)' == '' ">netcoreapp2.1</DeveloperBuildTestTfms>
@ -9,14 +10,12 @@
</PropertyGroup>
<PropertyGroup>
<IsPackable>false</IsPackable>
<IsTestProject>true</IsTestProject>
<RootNamespace>Microsoft.AspNetCore.WebHooks</RootNamespace>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Logging.Testing" Version="$(MicrosoftExtensionsLoggingTestingPackageVersion)" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="$(MicrosoftNETTestSdkPackageVersion)" />
<PackageReference Include="Moq" Version="$(MoqPackageVersion)" />
<PackageReference Include="xunit" Version="$(XunitPackageVersion)" />
<PackageReference Include="xunit.analyzers" Version="$(XunitAnalyzersPackageVersion)" />

Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше