Merge pull request #226 from dotnet/darc-main-4d08d89e-e7bb-4c7b-a796-a914af7f0743

[main] Update dependencies from dotnet/arcade
This commit is contained in:
Marc Paine 2022-10-03 13:05:46 -07:00 коммит произвёл GitHub
Родитель 3ac54972cd 30d6580cb9
Коммит 37de204be6
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: 4AEE18F83AFDEB23
44 изменённых файлов: 1063 добавлений и 396 удалений

Просмотреть файл

@ -3,9 +3,9 @@
<ProductDependencies>
</ProductDependencies>
<ToolsetDependencies>
<Dependency Name="Microsoft.DotNet.Arcade.Sdk" Version="7.0.0-beta.22124.1">
<Dependency Name="Microsoft.DotNet.Arcade.Sdk" Version="8.0.0-beta.22480.2">
<Uri>https://github.com/dotnet/arcade</Uri>
<Sha>eac1a3f1eb7404c0438664381b58d7238600aafc</Sha>
<Sha>60e9ab3c31d68167f8dac5b8e2c536deb12ef737</Sha>
</Dependency>
</ToolsetDependencies>
</Dependencies>

Просмотреть файл

@ -146,22 +146,22 @@ $userName = "dn-bot"
# Insert credential nodes for Maestro's private feeds
InsertMaestroPrivateFeedCredentials -Sources $sources -Creds $creds -Username $userName -Password $Password
# 3.1 uses a different feed url format so it's handled differently here
$dotnet31Source = $sources.SelectSingleNode("add[@key='dotnet3.1']")
if ($dotnet31Source -ne $null) {
AddPackageSource -Sources $sources -SourceName "dotnet3.1-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal/nuget/v2" -Creds $creds -Username $userName -Password $Password
AddPackageSource -Sources $sources -SourceName "dotnet3.1-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-transport/nuget/v2" -Creds $creds -Username $userName -Password $Password
}
$dotnet5Source = $sources.SelectSingleNode("add[@key='dotnet5']")
if ($dotnet5Source -ne $null) {
AddPackageSource -Sources $sources -SourceName "dotnet5-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet5-internal/nuget/v2" -Creds $creds -Username $userName -Password $Password
AddPackageSource -Sources $sources -SourceName "dotnet5-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet5-internal-transport/nuget/v2" -Creds $creds -Username $userName -Password $Password
}
$dotnetVersions = @('5','6','7')
$dotnet6Source = $sources.SelectSingleNode("add[@key='dotnet6']")
if ($dotnet6Source -ne $null) {
AddPackageSource -Sources $sources -SourceName "dotnet6-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet6-internal/nuget/v2" -Creds $creds -Username $userName -Password $Password
AddPackageSource -Sources $sources -SourceName "dotnet6-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet6-internal-transport/nuget/v2" -Creds $creds -Username $userName -Password $Password
foreach ($dotnetVersion in $dotnetVersions) {
$feedPrefix = "dotnet" + $dotnetVersion;
$dotnetSource = $sources.SelectSingleNode("add[@key='$feedPrefix']")
if ($dotnetSource -ne $null) {
AddPackageSource -Sources $sources -SourceName "$feedPrefix-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/$feedPrefix-internal/nuget/v2" -Creds $creds -Username $userName -Password $Password
AddPackageSource -Sources $sources -SourceName "$feedPrefix-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/$feedPrefix-internal-transport/nuget/v2" -Creds $creds -Username $userName -Password $Password
}
}
$doc.Save($filename)

Просмотреть файл

@ -105,53 +105,33 @@ if [ "$?" == "0" ]; then
PackageSources+=('dotnet3.1-internal-transport')
fi
# Ensure dotnet5-internal and dotnet5-internal-transport are in the packageSources if the public dotnet5 feeds are present
grep -i "<add key=\"dotnet5\"" $ConfigFile
if [ "$?" == "0" ]; then
grep -i "<add key=\"dotnet5-internal\"" $ConfigFile
if [ "$?" != "0" ]; then
echo "Adding dotnet5-internal to the packageSources."
PackageSourcesNodeFooter="</packageSources>"
PackageSourceTemplate="${TB}<add key=\"dotnet5-internal\" value=\"https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet5-internal/nuget/v2\" />"
DotNetVersions=('5' '6' '7')
sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile
for DotNetVersion in ${DotNetVersions[@]} ; do
FeedPrefix="dotnet${DotNetVersion}";
grep -i "<add key=\"$FeedPrefix\"" $ConfigFile
if [ "$?" == "0" ]; then
grep -i "<add key=\"$FeedPrefix-internal\"" $ConfigFile
if [ "$?" != "0" ]; then
echo "Adding $FeedPrefix-internal to the packageSources."
PackageSourcesNodeFooter="</packageSources>"
PackageSourceTemplate="${TB}<add key=\"$FeedPrefix-internal\" value=\"https://pkgs.dev.azure.com/dnceng/internal/_packaging/$FeedPrefix-internal/nuget/v2\" />"
sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile
fi
PackageSources+=("$FeedPrefix-internal")
grep -i "<add key=\"$FeedPrefix-internal-transport\">" $ConfigFile
if [ "$?" != "0" ]; then
echo "Adding $FeedPrefix-internal-transport to the packageSources."
PackageSourcesNodeFooter="</packageSources>"
PackageSourceTemplate="${TB}<add key=\"$FeedPrefix-internal-transport\" value=\"https://pkgs.dev.azure.com/dnceng/internal/_packaging/$FeedPrefix-internal-transport/nuget/v2\" />"
sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile
fi
PackageSources+=("$FeedPrefix-internal-transport")
fi
PackageSources+=('dotnet5-internal')
grep -i "<add key=\"dotnet5-internal-transport\">" $ConfigFile
if [ "$?" != "0" ]; then
echo "Adding dotnet5-internal-transport to the packageSources."
PackageSourcesNodeFooter="</packageSources>"
PackageSourceTemplate="${TB}<add key=\"dotnet5-internal-transport\" value=\"https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet5-internal-transport/nuget/v2\" />"
sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile
fi
PackageSources+=('dotnet5-internal-transport')
fi
# Ensure dotnet6-internal and dotnet6-internal-transport are in the packageSources if the public dotnet6 feeds are present
grep -i "<add key=\"dotnet6\"" $ConfigFile
if [ "$?" == "0" ]; then
grep -i "<add key=\"dotnet6-internal\"" $ConfigFile
if [ "$?" != "0" ]; then
echo "Adding dotnet6-internal to the packageSources."
PackageSourcesNodeFooter="</packageSources>"
PackageSourceTemplate="${TB}<add key=\"dotnet6-internal\" value=\"https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet6-internal/nuget/v2\" />"
sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile
fi
PackageSources+=('dotnet6-internal')
grep -i "<add key=\"dotnet6-internal-transport\">" $ConfigFile
if [ "$?" != "0" ]; then
echo "Adding dotnet6-internal-transport to the packageSources."
PackageSourcesNodeFooter="</packageSources>"
PackageSourceTemplate="${TB}<add key=\"dotnet6-internal-transport\" value=\"https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet6-internal-transport/nuget/v2\" />"
sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile
fi
PackageSources+=('dotnet6-internal-transport')
fi
done
# I want things split line by line
PrevIFS=$IFS

Просмотреть файл

@ -26,6 +26,7 @@ Param(
[string] $runtimeSourceFeed = '',
[string] $runtimeSourceFeedKey = '',
[switch] $excludePrereleaseVS,
[switch] $nativeToolsOnMachine,
[switch] $help,
[Parameter(ValueFromRemainingArguments=$true)][String[]]$properties
)
@ -67,6 +68,7 @@ function Print-Usage() {
Write-Host " -warnAsError <value> Sets warnaserror msbuild parameter ('true' or 'false')"
Write-Host " -msbuildEngine <value> Msbuild engine to use to run build ('dotnet', 'vs', or unspecified)."
Write-Host " -excludePrereleaseVS Set to exclude build engines in prerelease versions of Visual Studio"
Write-Host " -nativeToolsOnMachine Sets the native tools on machine environment variable (indicating that the script should use native tools on machine)"
Write-Host ""
Write-Host "Command line arguments not listed above are passed thru to msbuild."
@ -146,6 +148,9 @@ try {
$nodeReuse = $false
}
if ($nativeToolsOnMachine) {
$env:NativeToolsOnMachine = $true
}
if ($restore) {
InitializeNativeTools
}

Просмотреть файл

@ -19,6 +19,9 @@ usage()
echo "Actions:"
echo " --restore Restore dependencies (short: -r)"
echo " --build Build solution (short: -b)"
echo " --sourceBuild Source-build the solution (short: -sb)"
echo " Will additionally trigger the following actions: --restore, --build, --pack"
echo " If --configuration is not set explicitly, will also set it to 'Release'"
echo " --rebuild Rebuild solution"
echo " --test Run all unit tests in the solution (short: -t)"
echo " --integrationTest Run all integration tests in the solution"
@ -55,6 +58,7 @@ scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
restore=false
build=false
source_build=false
rebuild=false
test=false
integration_test=false
@ -73,7 +77,7 @@ exclude_ci_binary_log=false
pipelines_log=false
projects=''
configuration='Debug'
configuration=''
prepare_machine=false
verbosity='minimal'
runtime_source_feed=''
@ -119,6 +123,12 @@ while [[ $# > 0 ]]; do
-pack)
pack=true
;;
-sourcebuild|-sb)
build=true
source_build=true
restore=true
pack=true
;;
-test|-t)
test=true
;;
@ -168,6 +178,10 @@ while [[ $# > 0 ]]; do
shift
done
if [[ -z "$configuration" ]]; then
if [[ "$source_build" = true ]]; then configuration="Release"; else configuration="Debug"; fi
fi
if [[ "$ci" == true ]]; then
pipelines_log=true
node_reuse=false
@ -205,6 +219,7 @@ function Build {
/p:RepoRoot="$repo_root" \
/p:Restore=$restore \
/p:Build=$build \
/p:ArcadeBuildFromSource=$source_build \
/p:Rebuild=$rebuild \
/p:Test=$test \
/p:Pack=$pack \

Просмотреть файл

@ -0,0 +1,11 @@
deb http://ports.ubuntu.com/ubuntu-ports/ focal main restricted universe
deb-src http://ports.ubuntu.com/ubuntu-ports/ focal main restricted universe
deb http://ports.ubuntu.com/ubuntu-ports/ focal-updates main restricted universe
deb-src http://ports.ubuntu.com/ubuntu-ports/ focal-updates main restricted universe
deb http://ports.ubuntu.com/ubuntu-ports/ focal-backports main restricted
deb-src http://ports.ubuntu.com/ubuntu-ports/ focal-backports main restricted
deb http://ports.ubuntu.com/ubuntu-ports/ focal-security main restricted universe multiverse
deb-src http://ports.ubuntu.com/ubuntu-ports/ focal-security main restricted universe multiverse

Просмотреть файл

@ -0,0 +1,11 @@
deb http://ports.ubuntu.com/ubuntu-ports/ jammy main restricted universe
deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy main restricted universe
deb http://ports.ubuntu.com/ubuntu-ports/ jammy-updates main restricted universe
deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy-updates main restricted universe
deb http://ports.ubuntu.com/ubuntu-ports/ jammy-backports main restricted
deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy-backports main restricted
deb http://ports.ubuntu.com/ubuntu-ports/ jammy-security main restricted universe multiverse
deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy-security main restricted universe multiverse

Просмотреть файл

@ -0,0 +1,11 @@
deb http://ports.ubuntu.com/ubuntu-ports/ focal main restricted universe
deb-src http://ports.ubuntu.com/ubuntu-ports/ focal main restricted universe
deb http://ports.ubuntu.com/ubuntu-ports/ focal-updates main restricted universe
deb-src http://ports.ubuntu.com/ubuntu-ports/ focal-updates main restricted universe
deb http://ports.ubuntu.com/ubuntu-ports/ focal-backports main restricted
deb-src http://ports.ubuntu.com/ubuntu-ports/ focal-backports main restricted
deb http://ports.ubuntu.com/ubuntu-ports/ focal-security main restricted universe multiverse
deb-src http://ports.ubuntu.com/ubuntu-ports/ focal-security main restricted universe multiverse

Просмотреть файл

@ -0,0 +1,11 @@
deb http://ports.ubuntu.com/ubuntu-ports/ jammy main restricted universe
deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy main restricted universe
deb http://ports.ubuntu.com/ubuntu-ports/ jammy-updates main restricted universe
deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy-updates main restricted universe
deb http://ports.ubuntu.com/ubuntu-ports/ jammy-backports main restricted
deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy-backports main restricted
deb http://ports.ubuntu.com/ubuntu-ports/ jammy-security main restricted universe multiverse
deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy-security main restricted universe multiverse

Просмотреть файл

@ -4,22 +4,27 @@ set -e
usage()
{
echo "Usage: $0 [BuildArch] [CodeName] [lldbx.y] [--skipunmount] --rootfsdir <directory>]"
echo "BuildArch can be: arm(default), armel, arm64, x86"
echo "Usage: $0 [BuildArch] [CodeName] [lldbx.y] [llvmx[.y]] [--skipunmount] --rootfsdir <directory>]"
echo "BuildArch can be: arm(default), arm64, armel, armv6, ppc64le, riscv64, s390x, x64, x86"
echo "CodeName - optional, Code name for Linux, can be: xenial(default), zesty, bionic, alpine, alpine3.13 or alpine3.14. If BuildArch is armel, LinuxCodeName is jessie(default) or tizen."
echo " for FreeBSD can be: freebsd12, freebsd13"
echo " for illumos can be: illumos."
echo " for illumos can be: illumos"
echo " for Haiku can be: haiku."
echo "lldbx.y - optional, LLDB version, can be: lldb3.9(default), lldb4.0, lldb5.0, lldb6.0 no-lldb. Ignored for alpine and FreeBSD"
echo "llvmx[.y] - optional, LLVM version for LLVM related packages."
echo "--skipunmount - optional, will skip the unmount of rootfs folder."
echo "--use-mirror - optional, use mirror URL to fetch resources, when available."
echo "--jobs N - optional, restrict to N jobs."
exit 1
}
__CodeName=xenial
__CrossDir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
__InitialDir=$PWD
__BuildArch=arm
__AlpineArch=armv7
__FreeBSDArch=arm
__FreeBSDMachineArch=armv7
__IllumosArch=arm7
__QEMUArch=arm
__UbuntuArch=armhf
__UbuntuRepo="http://ports.ubuntu.com/"
@ -39,7 +44,7 @@ __AlpinePackages+=" libedit"
# symlinks fixer
__UbuntuPackages+=" symlinks"
# CoreCLR and CoreFX dependencies
# runtime dependencies
__UbuntuPackages+=" libicu-dev"
__UbuntuPackages+=" liblttng-ust-dev"
__UbuntuPackages+=" libunwind8-dev"
@ -48,8 +53,9 @@ __AlpinePackages+=" gettext-dev"
__AlpinePackages+=" icu-dev"
__AlpinePackages+=" libunwind-dev"
__AlpinePackages+=" lttng-ust-dev"
__AlpinePackages+=" compiler-rt-static"
# CoreFX dependencies
# runtime libraries' dependencies
__UbuntuPackages+=" libcurl4-openssl-dev"
__UbuntuPackages+=" libkrb5-dev"
__UbuntuPackages+=" libssl-dev"
@ -70,26 +76,38 @@ __FreeBSDPackages+=" openssl"
__FreeBSDPackages+=" krb5"
__FreeBSDPackages+=" terminfo-db"
__IllumosPackages="icu-64.2nb2"
__IllumosPackages+=" mit-krb5-1.16.2nb4"
__IllumosPackages+=" openssl-1.1.1e"
__IllumosPackages+=" zlib-1.2.11"
__IllumosPackages="icu"
__IllumosPackages+=" mit-krb5"
__IllumosPackages+=" openssl"
__IllumosPackages+=" zlib"
__HaikuPackages="gmp"
__HaikuPackages+=" gmp_devel"
__HaikuPackages+=" krb5"
__HaikuPackages+=" krb5_devel"
__HaikuPackages+=" libiconv"
__HaikuPackages+=" libiconv_devel"
__HaikuPackages+=" llvm12_libunwind"
__HaikuPackages+=" llvm12_libunwind_devel"
__HaikuPackages+=" mpfr"
__HaikuPackages+=" mpfr_devel"
# ML.NET dependencies
__UbuntuPackages+=" libomp5"
__UbuntuPackages+=" libomp-dev"
__Keyring=
__UseMirror=0
__UnprocessedBuildArgs=
while :; do
if [ $# -le 0 ]; then
if [[ "$#" -le 0 ]]; then
break
fi
lowerI="$(echo $1 | tr "[:upper:]" "[:lower:]")"
lowerI="$(echo "$1" | tr "[:upper:]" "[:lower:]")"
case $lowerI in
-?|-h|--help)
-\?|-h|--help)
usage
exit 1
;;
@ -99,6 +117,20 @@ while :; do
__AlpineArch=armv7
__QEMUArch=arm
;;
arm64)
__BuildArch=arm64
__UbuntuArch=arm64
__AlpineArch=aarch64
__QEMUArch=aarch64
__FreeBSDArch=arm64
__FreeBSDMachineArch=aarch64
;;
armel)
__BuildArch=armel
__UbuntuArch=armel
__UbuntuRepo="http://ftp.debian.org/debian/"
__CodeName=jessie
;;
armv6)
__BuildArch=armv6
__UbuntuArch=armhf
@ -106,19 +138,10 @@ while :; do
__UbuntuRepo="http://raspbian.raspberrypi.org/raspbian/"
__CodeName=buster
__LLDB_Package="liblldb-6.0-dev"
__Keyring="/usr/share/keyrings/raspbian-archive-keyring.gpg"
;;
arm64)
__BuildArch=arm64
__UbuntuArch=arm64
__AlpineArch=aarch64
__QEMUArch=aarch64
;;
armel)
__BuildArch=armel
__UbuntuArch=armel
__UbuntuRepo="http://ftp.debian.org/debian/"
__CodeName=jessie
if [[ -e "/usr/share/keyrings/raspbian-archive-keyring.gpg" ]]; then
__Keyring="--keyring /usr/share/keyrings/raspbian-archive-keyring.gpg"
fi
;;
ppc64le)
__BuildArch=ppc64le
@ -129,6 +152,18 @@ while :; do
__UbuntuPackages=$(echo ${__UbuntuPackages} | sed 's/ libomp5//')
unset __LLDB_Package
;;
riscv64)
__BuildArch=riscv64
__UbuntuArch=riscv64
__UbuntuRepo="http://deb.debian.org/debian-ports"
__CodeName=sid
__UbuntuPackages=$(echo ${__UbuntuPackages} | sed 's/ libunwind8-dev//')
unset __LLDB_Package
if [[ -e "/usr/share/keyrings/debian-ports-archive-keyring.gpg" ]]; then
__Keyring="--keyring /usr/share/keyrings/debian-ports-archive-keyring.gpg --include=debian-ports-archive-keyring"
fi
;;
s390x)
__BuildArch=s390x
__UbuntuArch=s390x
@ -138,47 +173,69 @@ while :; do
__UbuntuPackages=$(echo ${__UbuntuPackages} | sed 's/ libomp5//')
unset __LLDB_Package
;;
x64)
__BuildArch=x64
__UbuntuArch=amd64
__FreeBSDArch=amd64
__FreeBSDMachineArch=amd64
__illumosArch=x86_64
__UbuntuRepo=
;;
x86)
__BuildArch=x86
__UbuntuArch=i386
__UbuntuRepo="http://archive.ubuntu.com/ubuntu/"
;;
lldb3.6)
__LLDB_Package="lldb-3.6-dev"
;;
lldb3.8)
__LLDB_Package="lldb-3.8-dev"
;;
lldb3.9)
__LLDB_Package="liblldb-3.9-dev"
;;
lldb4.0)
__LLDB_Package="liblldb-4.0-dev"
;;
lldb5.0)
__LLDB_Package="liblldb-5.0-dev"
;;
lldb6.0)
__LLDB_Package="liblldb-6.0-dev"
lldb*)
version="${lowerI/lldb/}"
parts=(${version//./ })
# for versions > 6.0, lldb has dropped the minor version
if [[ "${parts[0]}" -gt 6 ]]; then
version="${parts[0]}"
fi
__LLDB_Package="liblldb-${version}-dev"
;;
no-lldb)
unset __LLDB_Package
;;
llvm*)
version="${lowerI/llvm/}"
parts=(${version//./ })
__LLVM_MajorVersion="${parts[0]}"
__LLVM_MinorVersion="${parts[1]}"
# for versions > 6.0, llvm has dropped the minor version
if [[ -z "$__LLVM_MinorVersion" && "$__LLVM_MajorVersion" -le 6 ]]; then
__LLVM_MinorVersion=0;
fi
;;
xenial) # Ubuntu 16.04
if [ "$__CodeName" != "jessie" ]; then
if [[ "$__CodeName" != "jessie" ]]; then
__CodeName=xenial
fi
;;
zesty) # Ubuntu 17.04
if [ "$__CodeName" != "jessie" ]; then
if [[ "$__CodeName" != "jessie" ]]; then
__CodeName=zesty
fi
;;
bionic) # Ubuntu 18.04
if [ "$__CodeName" != "jessie" ]; then
if [[ "$__CodeName" != "jessie" ]]; then
__CodeName=bionic
fi
;;
focal) # Ubuntu 20.04
if [[ "$__CodeName" != "jessie" ]]; then
__CodeName=focal
fi
;;
jammy) # Ubuntu 22.04
if [[ "$__CodeName" != "jessie" ]]; then
__CodeName=jammy
fi
;;
jessie) # Debian 8
__CodeName=jessie
__UbuntuRepo="http://ftp.debian.org/debian/"
@ -194,11 +251,6 @@ while :; do
__LLDB_Package="liblldb-6.0-dev"
;;
tizen)
if [ "$__BuildArch" != "arm" ] && [ "$__BuildArch" != "armel" ] && [ "$__BuildArch" != "arm64" ]; then
echo "Tizen is available only for arm, armel and arm64."
usage;
exit 1;
fi
__CodeName=
__UbuntuRepo=
__Tizen=tizen
@ -217,18 +269,20 @@ while :; do
;;
freebsd12)
__CodeName=freebsd
__BuildArch=x64
__SkipUnmount=1
;;
freebsd13)
__CodeName=freebsd
__FreeBSDBase="13.0-RELEASE"
__FreeBSDABI="13"
__BuildArch=x64
__SkipUnmount=1
;;
illumos)
__CodeName=illumos
__SkipUnmount=1
;;
haiku)
__CodeName=haiku
__BuildArch=x64
__SkipUnmount=1
;;
@ -237,11 +291,15 @@ while :; do
;;
--rootfsdir|-rootfsdir)
shift
__RootfsDir=$1
__RootfsDir="$1"
;;
--use-mirror)
__UseMirror=1
;;
--use-jobs)
shift
MAXJOBS=$1
;;
*)
__UnprocessedBuildArgs="$__UnprocessedBuildArgs $1"
;;
@ -250,77 +308,76 @@ while :; do
shift
done
if [ -e "$__Keyring" ]; then
__Keyring="--keyring=$__Keyring"
else
__Keyring=""
fi
if [ "$__BuildArch" == "armel" ]; then
if [[ "$__BuildArch" == "armel" ]]; then
__LLDB_Package="lldb-3.5-dev"
fi
__UbuntuPackages+=" ${__LLDB_Package:-}"
if [ -z "$__RootfsDir" ] && [ ! -z "$ROOTFS_DIR" ]; then
__RootfsDir=$ROOTFS_DIR
if [[ -n "$__LLVM_MajorVersion" ]]; then
__UbuntuPackages+=" libclang-common-${__LLVM_MajorVersion}${__LLVM_MinorVersion:+.$__LLVM_MinorVersion}-dev"
fi
if [ -z "$__RootfsDir" ]; then
if [[ -z "$__RootfsDir" && -n "$ROOTFS_DIR" ]]; then
__RootfsDir="$ROOTFS_DIR"
fi
if [[ -z "$__RootfsDir" ]]; then
__RootfsDir="$__CrossDir/../../../.tools/rootfs/$__BuildArch"
fi
if [ -d "$__RootfsDir" ]; then
if [ $__SkipUnmount == 0 ]; then
umount $__RootfsDir/* || true
if [[ -d "$__RootfsDir" ]]; then
if [[ "$__SkipUnmount" == "0" ]]; then
umount "$__RootfsDir"/* || true
fi
rm -rf $__RootfsDir
rm -rf "$__RootfsDir"
fi
mkdir -p $__RootfsDir
mkdir -p "$__RootfsDir"
__RootfsDir="$( cd "$__RootfsDir" && pwd )"
if [[ "$__CodeName" == "alpine" ]]; then
__ApkToolsVersion=2.9.1
__ApkToolsDir=$(mktemp -d)
wget https://github.com/alpinelinux/apk-tools/releases/download/v$__ApkToolsVersion/apk-tools-$__ApkToolsVersion-x86_64-linux.tar.gz -P $__ApkToolsDir
tar -xf $__ApkToolsDir/apk-tools-$__ApkToolsVersion-x86_64-linux.tar.gz -C $__ApkToolsDir
mkdir -p $__RootfsDir/usr/bin
cp -v /usr/bin/qemu-$__QEMUArch-static $__RootfsDir/usr/bin
__ApkToolsDir="$(mktemp -d)"
wget "https://github.com/alpinelinux/apk-tools/releases/download/v$__ApkToolsVersion/apk-tools-$__ApkToolsVersion-x86_64-linux.tar.gz" -P "$__ApkToolsDir"
tar -xf "$__ApkToolsDir/apk-tools-$__ApkToolsVersion-x86_64-linux.tar.gz" -C "$__ApkToolsDir"
mkdir -p "$__RootfsDir"/usr/bin
cp -v "/usr/bin/qemu-$__QEMUArch-static" "$__RootfsDir/usr/bin"
$__ApkToolsDir/apk-tools-$__ApkToolsVersion/apk \
-X http://dl-cdn.alpinelinux.org/alpine/v$__AlpineVersion/main \
-X http://dl-cdn.alpinelinux.org/alpine/v$__AlpineVersion/community \
-U --allow-untrusted --root $__RootfsDir --arch $__AlpineArch --initdb \
"$__ApkToolsDir/apk-tools-$__ApkToolsVersion/apk" \
-X "http://dl-cdn.alpinelinux.org/alpine/v$__AlpineVersion/main" \
-X "http://dl-cdn.alpinelinux.org/alpine/v$__AlpineVersion/community" \
-U --allow-untrusted --root "$__RootfsDir" --arch "$__AlpineArch" --initdb \
add $__AlpinePackages
rm -r $__ApkToolsDir
rm -r "$__ApkToolsDir"
elif [[ "$__CodeName" == "freebsd" ]]; then
mkdir -p $__RootfsDir/usr/local/etc
JOBS="$(getconf _NPROCESSORS_ONLN)"
wget -O - https://download.freebsd.org/ftp/releases/amd64/${__FreeBSDBase}/base.txz | tar -C $__RootfsDir -Jxf - ./lib ./usr/lib ./usr/libdata ./usr/include ./usr/share/keys ./etc ./bin/freebsd-version
echo "ABI = \"FreeBSD:${__FreeBSDABI}:amd64\"; FINGERPRINTS = \"${__RootfsDir}/usr/share/keys\"; REPOS_DIR = [\"${__RootfsDir}/etc/pkg\"]; REPO_AUTOUPDATE = NO; RUN_SCRIPTS = NO;" > ${__RootfsDir}/usr/local/etc/pkg.conf
echo "FreeBSD: { url: "pkg+http://pkg.FreeBSD.org/\${ABI}/quarterly", mirror_type: \"srv\", signature_type: \"fingerprints\", fingerprints: \"${__RootfsDir}/usr/share/keys/pkg\", enabled: yes }" > ${__RootfsDir}/etc/pkg/FreeBSD.conf
mkdir -p $__RootfsDir/tmp
mkdir -p "$__RootfsDir"/usr/local/etc
JOBS=${MAXJOBS:="$(getconf _NPROCESSORS_ONLN)"}
wget -O - "https://download.freebsd.org/ftp/releases/${__FreeBSDArch}/${__FreeBSDMachineArch}/${__FreeBSDBase}/base.txz" | tar -C "$__RootfsDir" -Jxf - ./lib ./usr/lib ./usr/libdata ./usr/include ./usr/share/keys ./etc ./bin/freebsd-version
echo "ABI = \"FreeBSD:${__FreeBSDABI}:${__FreeBSDMachineArch}\"; FINGERPRINTS = \"${__RootfsDir}/usr/share/keys\"; REPOS_DIR = [\"${__RootfsDir}/etc/pkg\"]; REPO_AUTOUPDATE = NO; RUN_SCRIPTS = NO;" > "${__RootfsDir}"/usr/local/etc/pkg.conf
echo "FreeBSD: { url: \"pkg+http://pkg.FreeBSD.org/\${ABI}/quarterly\", mirror_type: \"srv\", signature_type: \"fingerprints\", fingerprints: \"${__RootfsDir}/usr/share/keys/pkg\", enabled: yes }" > "${__RootfsDir}"/etc/pkg/FreeBSD.conf
mkdir -p "$__RootfsDir"/tmp
# get and build package manager
wget -O - https://github.com/freebsd/pkg/archive/${__FreeBSDPkg}.tar.gz | tar -C $__RootfsDir/tmp -zxf -
cd $__RootfsDir/tmp/pkg-${__FreeBSDPkg}
wget -O - "https://github.com/freebsd/pkg/archive/${__FreeBSDPkg}.tar.gz" | tar -C "$__RootfsDir"/tmp -zxf -
cd "$__RootfsDir/tmp/pkg-${__FreeBSDPkg}"
# needed for install to succeed
mkdir -p $__RootfsDir/host/etc
./autogen.sh && ./configure --prefix=$__RootfsDir/host && make -j "$JOBS" && make install
rm -rf $__RootfsDir/tmp/pkg-${__FreeBSDPkg}
mkdir -p "$__RootfsDir"/host/etc
./autogen.sh && ./configure --prefix="$__RootfsDir"/host && make -j "$JOBS" && make install
rm -rf "$__RootfsDir/tmp/pkg-${__FreeBSDPkg}"
# install packages we need.
INSTALL_AS_USER=$(whoami) $__RootfsDir/host/sbin/pkg -r $__RootfsDir -C $__RootfsDir/usr/local/etc/pkg.conf update
INSTALL_AS_USER=$(whoami) $__RootfsDir/host/sbin/pkg -r $__RootfsDir -C $__RootfsDir/usr/local/etc/pkg.conf install --yes $__FreeBSDPackages
INSTALL_AS_USER=$(whoami) "$__RootfsDir"/host/sbin/pkg -r "$__RootfsDir" -C "$__RootfsDir"/usr/local/etc/pkg.conf update
INSTALL_AS_USER=$(whoami) "$__RootfsDir"/host/sbin/pkg -r "$__RootfsDir" -C "$__RootfsDir"/usr/local/etc/pkg.conf install --yes $__FreeBSDPackages
elif [[ "$__CodeName" == "illumos" ]]; then
mkdir "$__RootfsDir/tmp"
pushd "$__RootfsDir/tmp"
JOBS="$(getconf _NPROCESSORS_ONLN)"
JOBS=${MAXJOBS:="$(getconf _NPROCESSORS_ONLN)"}
echo "Downloading sysroot."
wget -O - https://github.com/illumos/sysroot/releases/download/20181213-de6af22ae73b-v1/illumos-sysroot-i386-20181213-de6af22ae73b-v1.tar.gz | tar -C "$__RootfsDir" -xzf -
echo "Building binutils. Please wait.."
wget -O - https://ftp.gnu.org/gnu/binutils/binutils-2.33.1.tar.bz2 | tar -xjf -
mkdir build-binutils && cd build-binutils
../binutils-2.33.1/configure --prefix="$__RootfsDir" --target="x86_64-sun-solaris2.10" --program-prefix="x86_64-illumos-" --with-sysroot="$__RootfsDir"
../binutils-2.33.1/configure --prefix="$__RootfsDir" --target="${__illumosArch}-sun-solaris2.10" --program-prefix="${__illumosArch}-illumos-" --with-sysroot="$__RootfsDir"
make -j "$JOBS" && make install && cd ..
echo "Building gcc. Please wait.."
wget -O - https://ftp.gnu.org/gnu/gcc/gcc-8.4.0/gcc-8.4.0.tar.xz | tar -xJf -
@ -330,7 +387,7 @@ elif [[ "$__CodeName" == "illumos" ]]; then
CFLAGS_FOR_TARGET="-fPIC"
export CFLAGS CXXFLAGS CXXFLAGS_FOR_TARGET CFLAGS_FOR_TARGET
mkdir build-gcc && cd build-gcc
../gcc-8.4.0/configure --prefix="$__RootfsDir" --target="x86_64-sun-solaris2.10" --program-prefix="x86_64-illumos-" --with-sysroot="$__RootfsDir" --with-gnu-as \
../gcc-8.4.0/configure --prefix="$__RootfsDir" --target="${__illumosArch}-sun-solaris2.10" --program-prefix="${__illumosArch}-illumos-" --with-sysroot="$__RootfsDir" --with-gnu-as \
--with-gnu-ld --disable-nls --disable-libgomp --disable-libquadmath --disable-libssp --disable-libvtv --disable-libcilkrts --disable-libada --disable-libsanitizer \
--disable-libquadmath-support --disable-shared --enable-tls
make -j "$JOBS" && make install && cd ..
@ -338,14 +395,18 @@ elif [[ "$__CodeName" == "illumos" ]]; then
if [[ "$__UseMirror" == 1 ]]; then
BaseUrl=http://pkgsrc.smartos.skylime.net
fi
BaseUrl="$BaseUrl"/packages/SmartOS/2020Q1/x86_64/All
BaseUrl="$BaseUrl/packages/SmartOS/trunk/${__illumosArch}/All"
echo "Downloading manifest"
wget "$BaseUrl"
echo "Downloading dependencies."
read -ra array <<<"$__IllumosPackages"
for package in "${array[@]}"; do
echo "Installing $package..."
echo "Installing '$package'"
package="$(grep ">$package-[0-9]" All | sed -En 's/.*href="(.*)\.tgz".*/\1/p')"
echo "Resolved name '$package'"
wget "$BaseUrl"/"$package".tgz
ar -x "$package".tgz
tar --skip-old-files -xzf "$package".tmp.tgz -C "$__RootfsDir" 2>/dev/null
tar --skip-old-files -xzf "$package".tmp.tg* -C "$__RootfsDir" 2>/dev/null
done
echo "Cleaning up temporary files."
popd
@ -356,26 +417,90 @@ elif [[ "$__CodeName" == "illumos" ]]; then
wget -P "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/dlt.h
wget -P "$__RootfsDir"/usr/include/netpacket https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/inet/sockmods/netpacket/packet.h
wget -P "$__RootfsDir"/usr/include/sys https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/sys/sdt.h
elif [[ -n $__CodeName ]]; then
qemu-debootstrap $__Keyring --arch $__UbuntuArch $__CodeName $__RootfsDir $__UbuntuRepo
cp $__CrossDir/$__BuildArch/sources.list.$__CodeName $__RootfsDir/etc/apt/sources.list
chroot $__RootfsDir apt-get update
chroot $__RootfsDir apt-get -f -y install
chroot $__RootfsDir apt-get -y install $__UbuntuPackages
chroot $__RootfsDir symlinks -cr /usr
chroot $__RootfsDir apt-get clean
elif [[ "$__CodeName" == "haiku" ]]; then
JOBS=${MAXJOBS:="$(getconf _NPROCESSORS_ONLN)"}
if [ $__SkipUnmount == 0 ]; then
umount $__RootfsDir/* || true
echo "Building Haiku sysroot for x86_64"
mkdir -p "$__RootfsDir/tmp"
cd "$__RootfsDir/tmp"
git clone -b hrev56235 https://review.haiku-os.org/haiku
git clone -b btrev43195 https://review.haiku-os.org/buildtools
cd "$__RootfsDir/tmp/buildtools" && git checkout 7487388f5110021d400b9f3b88e1a7f310dc066d
# Fetch some unmerged patches
cd "$__RootfsDir/tmp/haiku"
## Add development build profile (slimmer than nightly)
git fetch origin refs/changes/64/4164/1 && git -c commit.gpgsign=false cherry-pick FETCH_HEAD
# Build jam
cd "$__RootfsDir/tmp/buildtools/jam"
make
# Configure cross tools
echo "Building cross-compiler"
mkdir -p "$__RootfsDir/generated"
cd "$__RootfsDir/generated"
"$__RootfsDir/tmp/haiku/configure" -j"$JOBS" --sysroot "$__RootfsDir" --cross-tools-source "$__RootfsDir/tmp/buildtools" --build-cross-tools x86_64
# Build Haiku packages
echo "Building Haiku"
echo 'HAIKU_BUILD_PROFILE = "development-raw" ;' > UserProfileConfig
"$__RootfsDir/tmp/buildtools/jam/jam0" -j"$JOBS" -q '<build>package' '<repository>Haiku'
BaseUrl="https://depot.haiku-os.org/__api/v2/pkg/get-pkg"
# Download additional packages
echo "Downloading additional required packages"
read -ra array <<<"$__HaikuPackages"
for package in "${array[@]}"; do
echo "Downloading $package..."
# API documented here: https://github.com/haiku/haikudepotserver/blob/master/haikudepotserver-api2/src/main/resources/api2/pkg.yaml#L60
# The schema here: https://github.com/haiku/haikudepotserver/blob/master/haikudepotserver-api2/src/main/resources/api2/pkg.yaml#L598
hpkgDownloadUrl="$(wget -qO- --post-data='{"name":"'"$package"'","repositorySourceCode":"haikuports_x86_64","versionType":"LATEST","naturalLanguageCode":"en"}' \
--header='Content-Type:application/json' "$BaseUrl" | jq -r '.result.versions[].hpkgDownloadURL')"
wget -P "$__RootfsDir/generated/download" "$hpkgDownloadUrl"
done
# Setup the sysroot
echo "Setting up sysroot and extracting needed packages"
mkdir -p "$__RootfsDir/boot/system"
for file in "$__RootfsDir/generated/objects/haiku/x86_64/packaging/packages/"*.hpkg; do
"$__RootfsDir/generated/objects/linux/x86_64/release/tools/package/package" extract -C "$__RootfsDir/boot/system" "$file"
done
for file in "$__RootfsDir/generated/download/"*.hpkg; do
"$__RootfsDir/generated/objects/linux/x86_64/release/tools/package/package" extract -C "$__RootfsDir/boot/system" "$file"
done
# Cleaning up temporary files
echo "Cleaning up temporary files"
rm -rf "$__RootfsDir/tmp"
for name in "$__RootfsDir/generated/"*; do
if [[ "$name" =~ "cross-tools-" ]]; then
: # Keep the cross-compiler
else
rm -rf "$name"
fi
done
elif [[ -n "$__CodeName" ]]; then
qemu-debootstrap $__Keyring --arch "$__UbuntuArch" "$__CodeName" "$__RootfsDir" "$__UbuntuRepo"
cp "$__CrossDir/$__BuildArch/sources.list.$__CodeName" "$__RootfsDir/etc/apt/sources.list"
chroot "$__RootfsDir" apt-get update
chroot "$__RootfsDir" apt-get -f -y install
chroot "$__RootfsDir" apt-get -y install $__UbuntuPackages
chroot "$__RootfsDir" symlinks -cr /usr
chroot "$__RootfsDir" apt-get clean
if [[ "$__SkipUnmount" == "0" ]]; then
umount "$__RootfsDir"/* || true
fi
if [[ "$__BuildArch" == "armel" && "$__CodeName" == "jessie" ]]; then
pushd $__RootfsDir
patch -p1 < $__CrossDir/$__BuildArch/armel.jessie.patch
pushd "$__RootfsDir"
patch -p1 < "$__CrossDir/$__BuildArch/armel.jessie.patch"
popd
fi
elif [[ "$__Tizen" == "tizen" ]]; then
ROOTFS_DIR=$__RootfsDir $__CrossDir/$__BuildArch/tizen-build-rootfs.sh
ROOTFS_DIR="$__RootfsDir" "$__CrossDir/$__BuildArch/tizen-build-rootfs.sh"
else
echo "Unsupported target platform."
usage;

Просмотреть файл

@ -0,0 +1 @@
deb http://deb.debian.org/debian-ports sid main

Просмотреть файл

@ -7,6 +7,8 @@ if(EXISTS ${CROSS_ROOTFS}/bin/freebsd-version)
elseif(EXISTS ${CROSS_ROOTFS}/usr/platform/i86pc)
set(CMAKE_SYSTEM_NAME SunOS)
set(ILLUMOS 1)
elseif(EXISTS ${CROSS_ROOTFS}/boot/system/develop/headers/config/HaikuConfig.h)
set(CMAKE_SYSTEM_NAME Haiku)
else()
set(CMAKE_SYSTEM_NAME Linux)
set(LINUX 1)
@ -19,13 +21,7 @@ elseif(EXISTS ${CROSS_ROOTFS}/android_platform)
set(ANDROID 1)
endif()
if(TARGET_ARCH_NAME STREQUAL "armel")
set(CMAKE_SYSTEM_PROCESSOR armv7l)
set(TOOLCHAIN "arm-linux-gnueabi")
if(TIZEN)
set(TIZEN_TOOLCHAIN "armv7l-tizen-linux-gnueabi/9.2.0")
endif()
elseif(TARGET_ARCH_NAME STREQUAL "arm")
if(TARGET_ARCH_NAME STREQUAL "arm")
set(CMAKE_SYSTEM_PROCESSOR armv7l)
if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/armv7-alpine-linux-musleabihf)
set(TOOLCHAIN "armv7-alpine-linux-musleabihf")
@ -37,6 +33,24 @@ elseif(TARGET_ARCH_NAME STREQUAL "arm")
if(TIZEN)
set(TIZEN_TOOLCHAIN "armv7hl-tizen-linux-gnueabihf/9.2.0")
endif()
elseif(TARGET_ARCH_NAME STREQUAL "arm64")
set(CMAKE_SYSTEM_PROCESSOR aarch64)
if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/aarch64-alpine-linux-musl)
set(TOOLCHAIN "aarch64-alpine-linux-musl")
elseif(LINUX)
set(TOOLCHAIN "aarch64-linux-gnu")
if(TIZEN)
set(TIZEN_TOOLCHAIN "aarch64-tizen-linux-gnu/9.2.0")
endif()
elseif(FREEBSD)
set(triple "aarch64-unknown-freebsd12")
endif()
elseif(TARGET_ARCH_NAME STREQUAL "armel")
set(CMAKE_SYSTEM_PROCESSOR armv7l)
set(TOOLCHAIN "arm-linux-gnueabi")
if(TIZEN)
set(TIZEN_TOOLCHAIN "armv7l-tizen-linux-gnueabi/9.2.0")
endif()
elseif(TARGET_ARCH_NAME STREQUAL "armv6")
set(CMAKE_SYSTEM_PROCESSOR armv6l)
if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/armv6-alpine-linux-musleabihf)
@ -44,33 +58,37 @@ elseif(TARGET_ARCH_NAME STREQUAL "armv6")
else()
set(TOOLCHAIN "arm-linux-gnueabihf")
endif()
elseif(TARGET_ARCH_NAME STREQUAL "arm64")
set(CMAKE_SYSTEM_PROCESSOR aarch64)
if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/aarch64-alpine-linux-musl)
set(TOOLCHAIN "aarch64-alpine-linux-musl")
else()
set(TOOLCHAIN "aarch64-linux-gnu")
endif()
if(TIZEN)
set(TIZEN_TOOLCHAIN "aarch64-tizen-linux-gnu/9.2.0")
endif()
elseif(TARGET_ARCH_NAME STREQUAL "ppc64le")
set(CMAKE_SYSTEM_PROCESSOR ppc64le)
set(TOOLCHAIN "powerpc64le-linux-gnu")
elseif(TARGET_ARCH_NAME STREQUAL "riscv64")
set(CMAKE_SYSTEM_PROCESSOR riscv64)
set(TOOLCHAIN "riscv64-linux-gnu")
elseif(TARGET_ARCH_NAME STREQUAL "s390x")
set(CMAKE_SYSTEM_PROCESSOR s390x)
set(TOOLCHAIN "s390x-linux-gnu")
elseif(TARGET_ARCH_NAME STREQUAL "x64")
set(CMAKE_SYSTEM_PROCESSOR x86_64)
if(LINUX)
set(TOOLCHAIN "x86_64-linux-gnu")
if(TIZEN)
set(TIZEN_TOOLCHAIN "x86_64-tizen-linux-gnu/9.2.0")
endif()
elseif(FREEBSD)
set(triple "x86_64-unknown-freebsd12")
elseif(ILLUMOS)
set(TOOLCHAIN "x86_64-illumos")
elseif(HAIKU)
set(TOOLCHAIN "x64_64-unknown-haiku")
endif()
elseif(TARGET_ARCH_NAME STREQUAL "x86")
set(CMAKE_SYSTEM_PROCESSOR i686)
set(TOOLCHAIN "i686-linux-gnu")
elseif (FREEBSD)
set(CMAKE_SYSTEM_PROCESSOR "x86_64")
set(triple "x86_64-unknown-freebsd12")
elseif (ILLUMOS)
set(CMAKE_SYSTEM_PROCESSOR "x86_64")
set(TOOLCHAIN "x86_64-illumos")
if(TIZEN)
set(TIZEN_TOOLCHAIN "i586-tizen-linux-gnu/9.2.0")
endif()
else()
message(FATAL_ERROR "Arch is ${TARGET_ARCH_NAME}. Only armel, arm, armv6, arm64, ppc64le, s390x and x86 are supported!")
message(FATAL_ERROR "Arch is ${TARGET_ARCH_NAME}. Only arm, arm64, armel, armv6, ppc64le, riscv64, s390x, x64 and x86 are supported!")
endif()
if(DEFINED ENV{TOOLCHAIN})
@ -91,6 +109,10 @@ if(TIZEN)
include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}/include/c++/)
include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}/include/c++/aarch64-tizen-linux-gnu)
endif()
if(TARGET_ARCH_NAME STREQUAL "x86")
include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}/include/c++/)
include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}/include/c++/i586-tizen-linux-gnu)
endif()
endif()
if(ANDROID)
@ -152,6 +174,41 @@ elseif(ILLUMOS)
set(CMAKE_C_STANDARD_LIBRARIES "${CMAKE_C_STANDARD_LIBRARIES} -lssp")
set(CMAKE_CXX_STANDARD_LIBRARIES "${CMAKE_CXX_STANDARD_LIBRARIES} -lssp")
elseif(HAIKU)
set(CMAKE_SYSROOT "${CROSS_ROOTFS}")
set(TOOLSET_PREFIX ${TOOLCHAIN}-)
function(locate_toolchain_exec exec var)
string(TOUPPER ${exec} EXEC_UPPERCASE)
if(NOT "$ENV{CLR_${EXEC_UPPERCASE}}" STREQUAL "")
set(${var} "$ENV{CLR_${EXEC_UPPERCASE}}" PARENT_SCOPE)
return()
endif()
set(SEARCH_PATH "${CROSS_ROOTFS}/generated/cross-tools-x86_64/bin")
find_program(EXEC_LOCATION_${exec}
PATHS ${SEARCH_PATH}
NAMES
"${TOOLSET_PREFIX}${exec}${CLR_CMAKE_COMPILER_FILE_NAME_VERSION}"
"${TOOLSET_PREFIX}${exec}")
if (EXEC_LOCATION_${exec} STREQUAL "EXEC_LOCATION_${exec}-NOTFOUND")
message(FATAL_ERROR "Unable to find toolchain executable. Name: ${exec}, Prefix: ${TOOLSET_PREFIX}.")
endif()
set(${var} ${EXEC_LOCATION_${exec}} PARENT_SCOPE)
endfunction()
set(CMAKE_SYSTEM_PREFIX_PATH "${CROSS_ROOTFS}")
locate_toolchain_exec(gcc CMAKE_C_COMPILER)
locate_toolchain_exec(g++ CMAKE_CXX_COMPILER)
set(CMAKE_C_STANDARD_LIBRARIES "${CMAKE_C_STANDARD_LIBRARIES} -lssp")
set(CMAKE_CXX_STANDARD_LIBRARIES "${CMAKE_CXX_STANDARD_LIBRARIES} -lssp")
# let CMake set up the correct search paths
include(Platform/Haiku)
else()
set(CMAKE_SYSROOT "${CROSS_ROOTFS}")
@ -197,6 +254,13 @@ elseif(TARGET_ARCH_NAME STREQUAL "arm64")
endif()
elseif(TARGET_ARCH_NAME STREQUAL "x86")
add_toolchain_linker_flag(-m32)
if(TIZEN)
add_toolchain_linker_flag("-B${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}")
add_toolchain_linker_flag("-L${CROSS_ROOTFS}/lib")
add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/lib")
add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}")
endif()
elseif(ILLUMOS)
add_toolchain_linker_flag("-L${CROSS_ROOTFS}/lib/amd64")
add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/amd64/lib")
@ -204,7 +268,7 @@ endif()
# Specify compile options
if((TARGET_ARCH_NAME MATCHES "^(arm|armv6|armel|arm64|ppc64le|s390x)$" AND NOT ANDROID) OR ILLUMOS)
if((TARGET_ARCH_NAME MATCHES "^(arm|arm64|armel|armv6|ppc64le|riscv64|s390x)$" AND NOT ANDROID AND NOT FREEBSD) OR ILLUMOS OR HAIKU)
set(CMAKE_C_COMPILER_TARGET ${TOOLCHAIN})
set(CMAKE_CXX_COMPILER_TARGET ${TOOLCHAIN})
set(CMAKE_ASM_COMPILER_TARGET ${TOOLCHAIN})
@ -232,7 +296,7 @@ elseif(TARGET_ARCH_NAME STREQUAL "x86")
endif()
if(TIZEN)
if(TARGET_ARCH_NAME MATCHES "^(arm|armel|arm64)$")
if(TARGET_ARCH_NAME MATCHES "^(arm|armel|arm64|x86)$")
add_compile_options(-Wno-deprecated-declarations) # compile-time option
add_compile_options(-D__extern_always_inline=inline) # compile-time option
endif()

Просмотреть файл

@ -0,0 +1,11 @@
deb http://archive.ubuntu.com/ubuntu/ focal main restricted universe
deb-src http://archive.ubuntu.com/ubuntu/ focal main restricted universe
deb http://archive.ubuntu.com/ubuntu/ focal-updates main restricted universe
deb-src http://archive.ubuntu.com/ubuntu/ focal-updates main restricted universe
deb http://archive.ubuntu.com/ubuntu/ focal-backports main restricted
deb-src http://archive.ubuntu.com/ubuntu/ focal-backports main restricted
deb http://archive.ubuntu.com/ubuntu/ focal-security main restricted universe multiverse
deb-src http://archive.ubuntu.com/ubuntu/ focal-security main restricted universe multiverse

Просмотреть файл

@ -0,0 +1,11 @@
deb http://archive.ubuntu.com/ubuntu/ jammy main restricted universe
deb-src http://archive.ubuntu.com/ubuntu/ jammy main restricted universe
deb http://archive.ubuntu.com/ubuntu/ jammy-updates main restricted universe
deb-src http://archive.ubuntu.com/ubuntu/ jammy-updates main restricted universe
deb http://archive.ubuntu.com/ubuntu/ jammy-backports main restricted
deb-src http://archive.ubuntu.com/ubuntu/ jammy-backports main restricted
deb http://archive.ubuntu.com/ubuntu/ jammy-security main restricted universe multiverse
deb-src http://archive.ubuntu.com/ubuntu/ jammy-security main restricted universe multiverse

Просмотреть файл

@ -0,0 +1,35 @@
#!/usr/bin/env bash
set -e
__X86_CrossDir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
__TIZEN_CROSSDIR="$__X86_CrossDir/tizen"
if [[ -z "$ROOTFS_DIR" ]]; then
echo "ROOTFS_DIR is not defined."
exit 1;
fi
TIZEN_TMP_DIR=$ROOTFS_DIR/tizen_tmp
mkdir -p $TIZEN_TMP_DIR
# Download files
echo ">>Start downloading files"
VERBOSE=1 $__X86_CrossDir/tizen-fetch.sh $TIZEN_TMP_DIR
echo "<<Finish downloading files"
echo ">>Start constructing Tizen rootfs"
TIZEN_RPM_FILES=`ls $TIZEN_TMP_DIR/*.rpm`
cd $ROOTFS_DIR
for f in $TIZEN_RPM_FILES; do
rpm2cpio $f | cpio -idm --quiet
done
echo "<<Finish constructing Tizen rootfs"
# Cleanup tmp
rm -rf $TIZEN_TMP_DIR
# Configure Tizen rootfs
echo ">>Start configuring Tizen rootfs"
ln -sfn asm-x86 ./usr/include/asm
patch -p1 < $__TIZEN_CROSSDIR/tizen.patch
echo "<<Finish configuring Tizen rootfs"

Просмотреть файл

@ -0,0 +1,170 @@
#!/usr/bin/env bash
set -e
if [[ -z "${VERBOSE// }" ]] || [ "$VERBOSE" -ne "$VERBOSE" ] 2>/dev/null; then
VERBOSE=0
fi
Log()
{
if [ $VERBOSE -ge $1 ]; then
echo ${@:2}
fi
}
Inform()
{
Log 1 -e "\x1B[0;34m$@\x1B[m"
}
Debug()
{
Log 2 -e "\x1B[0;32m$@\x1B[m"
}
Error()
{
>&2 Log 0 -e "\x1B[0;31m$@\x1B[m"
}
Fetch()
{
URL=$1
FILE=$2
PROGRESS=$3
if [ $VERBOSE -ge 1 ] && [ $PROGRESS ]; then
CURL_OPT="--progress-bar"
else
CURL_OPT="--silent"
fi
curl $CURL_OPT $URL > $FILE
}
hash curl 2> /dev/null || { Error "Require 'curl' Aborting."; exit 1; }
hash xmllint 2> /dev/null || { Error "Require 'xmllint' Aborting."; exit 1; }
hash sha256sum 2> /dev/null || { Error "Require 'sha256sum' Aborting."; exit 1; }
TMPDIR=$1
if [ ! -d $TMPDIR ]; then
TMPDIR=./tizen_tmp
Debug "Create temporary directory : $TMPDIR"
mkdir -p $TMPDIR
fi
TIZEN_URL=http://download.tizen.org/snapshots/tizen
BUILD_XML=build.xml
REPOMD_XML=repomd.xml
PRIMARY_XML=primary.xml
TARGET_URL="http://__not_initialized"
Xpath_get()
{
XPATH_RESULT=''
XPATH=$1
XML_FILE=$2
RESULT=$(xmllint --xpath $XPATH $XML_FILE)
if [[ -z ${RESULT// } ]]; then
Error "Can not find target from $XML_FILE"
Debug "Xpath = $XPATH"
exit 1
fi
XPATH_RESULT=$RESULT
}
fetch_tizen_pkgs_init()
{
TARGET=$1
PROFILE=$2
Debug "Initialize TARGET=$TARGET, PROFILE=$PROFILE"
TMP_PKG_DIR=$TMPDIR/tizen_${PROFILE}_pkgs
if [ -d $TMP_PKG_DIR ]; then rm -rf $TMP_PKG_DIR; fi
mkdir -p $TMP_PKG_DIR
PKG_URL=$TIZEN_URL/$PROFILE/latest
BUILD_XML_URL=$PKG_URL/$BUILD_XML
TMP_BUILD=$TMP_PKG_DIR/$BUILD_XML
TMP_REPOMD=$TMP_PKG_DIR/$REPOMD_XML
TMP_PRIMARY=$TMP_PKG_DIR/$PRIMARY_XML
TMP_PRIMARYGZ=${TMP_PRIMARY}.gz
Fetch $BUILD_XML_URL $TMP_BUILD
Debug "fetch $BUILD_XML_URL to $TMP_BUILD"
TARGET_XPATH="//build/buildtargets/buildtarget[@name=\"$TARGET\"]/repo[@type=\"binary\"]/text()"
Xpath_get $TARGET_XPATH $TMP_BUILD
TARGET_PATH=$XPATH_RESULT
TARGET_URL=$PKG_URL/$TARGET_PATH
REPOMD_URL=$TARGET_URL/repodata/repomd.xml
PRIMARY_XPATH='string(//*[local-name()="data"][@type="primary"]/*[local-name()="location"]/@href)'
Fetch $REPOMD_URL $TMP_REPOMD
Debug "fetch $REPOMD_URL to $TMP_REPOMD"
Xpath_get $PRIMARY_XPATH $TMP_REPOMD
PRIMARY_XML_PATH=$XPATH_RESULT
PRIMARY_URL=$TARGET_URL/$PRIMARY_XML_PATH
Fetch $PRIMARY_URL $TMP_PRIMARYGZ
Debug "fetch $PRIMARY_URL to $TMP_PRIMARYGZ"
gunzip $TMP_PRIMARYGZ
Debug "unzip $TMP_PRIMARYGZ to $TMP_PRIMARY"
}
fetch_tizen_pkgs()
{
ARCH=$1
PACKAGE_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="location"]/@href)'
PACKAGE_CHECKSUM_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="checksum"]/text())'
for pkg in ${@:2}
do
Inform "Fetching... $pkg"
XPATH=${PACKAGE_XPATH_TPL/_PKG_/$pkg}
XPATH=${XPATH/_ARCH_/$ARCH}
Xpath_get $XPATH $TMP_PRIMARY
PKG_PATH=$XPATH_RESULT
XPATH=${PACKAGE_CHECKSUM_XPATH_TPL/_PKG_/$pkg}
XPATH=${XPATH/_ARCH_/$ARCH}
Xpath_get $XPATH $TMP_PRIMARY
CHECKSUM=$XPATH_RESULT
PKG_URL=$TARGET_URL/$PKG_PATH
PKG_FILE=$(basename $PKG_PATH)
PKG_PATH=$TMPDIR/$PKG_FILE
Debug "Download $PKG_URL to $PKG_PATH"
Fetch $PKG_URL $PKG_PATH true
echo "$CHECKSUM $PKG_PATH" | sha256sum -c - > /dev/null
if [ $? -ne 0 ]; then
Error "Fail to fetch $PKG_URL to $PKG_PATH"
Debug "Checksum = $CHECKSUM"
exit 1
fi
done
}
Inform "Initialize i686 base"
fetch_tizen_pkgs_init standard base
Inform "fetch common packages"
fetch_tizen_pkgs i686 gcc gcc-devel-static glibc glibc-devel libicu libicu-devel libatomic linux-glibc-devel keyutils keyutils-devel libkeyutils
Inform "fetch coreclr packages"
fetch_tizen_pkgs i686 lldb lldb-devel libgcc libstdc++ libstdc++-devel libunwind libunwind-devel lttng-ust-devel lttng-ust userspace-rcu-devel userspace-rcu
Inform "fetch corefx packages"
fetch_tizen_pkgs i686 libcom_err libcom_err-devel zlib zlib-devel libopenssl11 libopenssl1.1-devel krb5 krb5-devel
Inform "Initialize standard unified"
fetch_tizen_pkgs_init standard unified
Inform "fetch corefx packages"
fetch_tizen_pkgs i686 gssdp gssdp-devel tizen-release

Просмотреть файл

@ -0,0 +1,9 @@
diff -u -r a/usr/lib/libc.so b/usr/lib/libc.so
--- a/usr/lib/libc.so 2016-12-30 23:00:08.284951863 +0900
+++ b/usr/lib/libc.so 2016-12-30 23:00:32.140951815 +0900
@@ -2,4 +2,4 @@
Use the shared library, but some functions are only in
the static library, so try that secondarily. */
OUTPUT_FORMAT(elf32-i386)
-GROUP ( /lib/libc.so.6 /usr/lib/libc_nonshared.a AS_NEEDED ( /lib/ld-linux.so.2 ) )
+GROUP ( libc.so.6 libc_nonshared.a AS_NEEDED ( ld-linux.so.2 ) )

4
eng/common/dotnet-install.sh поставляемый
Просмотреть файл

@ -52,7 +52,7 @@ done
# Use uname to determine what the CPU is, see https://en.wikipedia.org/wiki/Uname#Examples
cpuname=$(uname -m)
case $cpuname in
aarch64)
arm64|aarch64)
buildarch=arm64
;;
loongarch64)
@ -64,7 +64,7 @@ case $cpuname in
armv*l)
buildarch=arm
;;
i686)
i[3-6]86)
buildarch=x86
;;
*)

Просмотреть файл

@ -10,9 +10,7 @@ Param(
Set-StrictMode -Version 2.0
$ErrorActionPreference = "Stop"
. $PSScriptRoot\tools.ps1
Import-Module -Name (Join-Path $PSScriptRoot 'native\CommonLibrary.psm1')
. $PSScriptRoot\pipeline-logging-functions.ps1
$exclusionsFilePath = "$SourcesDirectory\eng\Localize\LocExclusions.json"
$exclusions = @{ Exclusions = @() }
@ -28,13 +26,15 @@ $jsonFiles = @()
$jsonTemplateFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory" | Where-Object { $_.FullName -Match "\.template\.config\\localize\\.+\.en\.json" } # .NET templating pattern
$jsonTemplateFiles | ForEach-Object {
$null = $_.Name -Match "(.+)\.[\w-]+\.json" # matches '[filename].[langcode].json
$destinationFile = "$($_.Directory.FullName)\$($Matches.1).json"
$jsonFiles += Copy-Item "$($_.FullName)" -Destination $destinationFile -PassThru
}
$jsonWinformsTemplateFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory" | Where-Object { $_.FullName -Match "en\\strings\.json" } # current winforms pattern
$wxlFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory" | Where-Object { $_.FullName -Match "\\.+\.wxl" -And -Not( $_.Directory.Name -Match "\d{4}" ) } # localized files live in four digit lang ID directories; this excludes them
$xlfFiles = @()
$allXlfFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory\*\*.xlf"
@ -46,7 +46,7 @@ if ($allXlfFiles) {
}
$langXlfFiles | ForEach-Object {
$null = $_.Name -Match "(.+)\.[\w-]+\.xlf" # matches '[filename].[langcode].xlf
$destinationFile = "$($_.Directory.FullName)\$($Matches.1).xlf"
$xlfFiles += Copy-Item "$($_.FullName)" -Destination $destinationFile -PassThru
}
@ -59,10 +59,10 @@ $locJson = @{
LanguageSet = $LanguageSet
LocItems = @(
$locFiles | ForEach-Object {
$outputPath = "$(($_.DirectoryName | Resolve-Path -Relative) + "\")"
$outputPath = "$(($_.DirectoryName | Resolve-Path -Relative) + "\")"
$continue = $true
foreach ($exclusion in $exclusions.Exclusions) {
if ($outputPath.Contains($exclusion))
if ($_.FullName.Contains($exclusion))
{
$continue = $false
}
@ -79,8 +79,7 @@ $locJson = @{
CopyOption = "LangIDOnPath"
OutputPath = "$($_.Directory.Parent.FullName | Resolve-Path -Relative)\"
}
}
else {
} else {
return @{
SourceFile = $sourceFile
CopyOption = "LangIDOnName"
@ -90,6 +89,32 @@ $locJson = @{
}
}
)
},
@{
LanguageSet = $LanguageSet
CloneLanguageSet = "WiX_CloneLanguages"
LssFiles = @( "wxl_loc.lss" )
LocItems = @(
$wxlFiles | ForEach-Object {
$outputPath = "$($_.Directory.FullName | Resolve-Path -Relative)\"
$continue = $true
foreach ($exclusion in $exclusions.Exclusions) {
if ($_.FullName.Contains($exclusion))
{
$continue = $false
}
}
$sourceFile = ($_.FullName | Resolve-Path -Relative)
if ($continue)
{
return @{
SourceFile = $sourceFile
CopyOption = "LangIDOnPath"
OutputPath = $outputPath
}
}
}
)
}
)
}
@ -108,10 +133,10 @@ else {
if ((Get-FileHash "$SourcesDirectory\eng\Localize\LocProject-generated.json").Hash -ne (Get-FileHash "$SourcesDirectory\eng\Localize\LocProject.json").Hash) {
Write-PipelineTelemetryError -Category "OneLocBuild" -Message "Existing LocProject.json differs from generated LocProject.json. Download LocProject-generated.json and compare them."
exit 1
}
else {
Write-Host "Generated LocProject.json and current LocProject.json are identical."
}
}
}

Просмотреть файл

@ -2,6 +2,8 @@ Param(
[Parameter(Mandatory=$true)][string] $ManifestDirPath # Manifest directory where sbom will be placed
)
. $PSScriptRoot\pipeline-logging-functions.ps1
Write-Host "Creating dir $ManifestDirPath"
# create directory for sbom manifest to be placed
if (!(Test-Path -path $ManifestDirPath))

Просмотреть файл

@ -2,6 +2,18 @@
source="${BASH_SOURCE[0]}"
# resolve $SOURCE until the file is no longer a symlink
while [[ -h $source ]]; do
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
source="$(readlink "$source")"
# if $source was a relative symlink, we need to resolve it relative to the path where the
# symlink file was located
[[ $source != /* ]] && source="$scriptroot/$source"
done
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
. $scriptroot/pipeline-logging-functions.sh
manifest_dir=$1
if [ ! -d "$manifest_dir" ] ; then

Просмотреть файл

@ -31,6 +31,10 @@ Wait time between retry attempts in seconds
.PARAMETER GlobalJsonFile
File path to global.json file
.PARAMETER PathPromotion
Optional switch to enable either promote native tools specified in the global.json to the path (in Azure Pipelines)
or break the build if a native tool is not found on the path (on a local dev machine)
.NOTES
#>
[CmdletBinding(PositionalBinding=$false)]
@ -41,7 +45,8 @@ Param (
[switch] $Force = $False,
[int] $DownloadRetries = 5,
[int] $RetryWaitTimeInSeconds = 30,
[string] $GlobalJsonFile
[string] $GlobalJsonFile,
[switch] $PathPromotion
)
if (!$GlobalJsonFile) {
@ -77,53 +82,102 @@ try {
ConvertFrom-Json |
Select-Object -Expand 'native-tools' -ErrorAction SilentlyContinue
if ($NativeTools) {
$NativeTools.PSObject.Properties | ForEach-Object {
$ToolName = $_.Name
$ToolVersion = $_.Value
$LocalInstallerArguments = @{ ToolName = "$ToolName" }
$LocalInstallerArguments += @{ InstallPath = "$InstallBin" }
$LocalInstallerArguments += @{ BaseUri = "$BaseUri" }
$LocalInstallerArguments += @{ CommonLibraryDirectory = "$EngCommonBaseDir" }
$LocalInstallerArguments += @{ Version = "$ToolVersion" }
if ($PathPromotion -eq $True) {
if ($env:SYSTEM_TEAMPROJECT) { # check to see if we're in an Azure pipelines build
$NativeTools.PSObject.Properties | ForEach-Object {
$ToolName = $_.Name
$ToolVersion = $_.Value
$InstalledTools = @{}
if ($Verbose) {
$LocalInstallerArguments += @{ Verbose = $True }
if ((Get-Command "$ToolName" -ErrorAction SilentlyContinue) -eq $null) {
if ($ToolVersion -eq "latest") {
$ToolVersion = ""
}
$ArcadeToolsDirectory = "C:\arcade-tools"
if (-not (Test-Path $ArcadeToolsDirectory)) {
Write-Error "Arcade tools directory '$ArcadeToolsDirectory' was not found; artifacts were not properly installed."
exit 1
}
$ToolDirectories = (Get-ChildItem -Path "$ArcadeToolsDirectory" -Filter "$ToolName-$ToolVersion*" | Sort-Object -Descending)
if ($ToolDirectories -eq $null) {
Write-Error "Unable to find directory for $ToolName $ToolVersion; please make sure the tool is installed on this image."
exit 1
}
$ToolDirectory = $ToolDirectories[0]
$BinPathFile = "$($ToolDirectory.FullName)\binpath.txt"
if (-not (Test-Path -Path "$BinPathFile")) {
Write-Error "Unable to find binpath.txt in '$($ToolDirectory.FullName)' ($ToolName $ToolVersion); artifact is either installed incorrectly or is not a bootstrappable tool."
exit 1
}
$BinPath = Get-Content "$BinPathFile"
$ToolPath = Convert-Path -Path $BinPath
Write-Host "Adding $ToolName to the path ($ToolPath)..."
Write-Host "##vso[task.prependpath]$ToolPath"
$env:PATH = "$ToolPath;$env:PATH"
$InstalledTools += @{ $ToolName = $ToolDirectory.FullName }
}
}
return $InstalledTools
} else {
$NativeTools.PSObject.Properties | ForEach-Object {
$ToolName = $_.Name
$ToolVersion = $_.Value
if ((Get-Command "$ToolName" -ErrorAction SilentlyContinue) -eq $null) {
Write-PipelineTelemetryError -Category 'NativeToolsBootstrap' -Message "$ToolName not found on path. Please install $ToolName $ToolVersion before proceeding."
}
}
exit 0
}
if (Get-Variable 'Force' -ErrorAction 'SilentlyContinue') {
if($Force) {
$LocalInstallerArguments += @{ Force = $True }
} else {
$NativeTools.PSObject.Properties | ForEach-Object {
$ToolName = $_.Name
$ToolVersion = $_.Value
$LocalInstallerArguments = @{ ToolName = "$ToolName" }
$LocalInstallerArguments += @{ InstallPath = "$InstallBin" }
$LocalInstallerArguments += @{ BaseUri = "$BaseUri" }
$LocalInstallerArguments += @{ CommonLibraryDirectory = "$EngCommonBaseDir" }
$LocalInstallerArguments += @{ Version = "$ToolVersion" }
if ($Verbose) {
$LocalInstallerArguments += @{ Verbose = $True }
}
if (Get-Variable 'Force' -ErrorAction 'SilentlyContinue') {
if($Force) {
$LocalInstallerArguments += @{ Force = $True }
}
}
if ($Clean) {
$LocalInstallerArguments += @{ Clean = $True }
}
Write-Verbose "Installing $ToolName version $ToolVersion"
Write-Verbose "Executing '$InstallerPath $($LocalInstallerArguments.Keys.ForEach({"-$_ '$($LocalInstallerArguments.$_)'"}) -join ' ')'"
& $InstallerPath @LocalInstallerArguments
if ($LASTEXITCODE -Ne "0") {
$errMsg = "$ToolName installation failed"
if ((Get-Variable 'DoNotAbortNativeToolsInstallationOnFailure' -ErrorAction 'SilentlyContinue') -and $DoNotAbortNativeToolsInstallationOnFailure) {
$showNativeToolsWarning = $true
if ((Get-Variable 'DoNotDisplayNativeToolsInstallationWarnings' -ErrorAction 'SilentlyContinue') -and $DoNotDisplayNativeToolsInstallationWarnings) {
$showNativeToolsWarning = $false
}
if ($showNativeToolsWarning) {
Write-Warning $errMsg
}
$toolInstallationFailure = $true
} else {
# We cannot change this to Write-PipelineTelemetryError because of https://github.com/dotnet/arcade/issues/4482
Write-Host $errMsg
exit 1
}
}
}
if ($Clean) {
$LocalInstallerArguments += @{ Clean = $True }
if ((Get-Variable 'toolInstallationFailure' -ErrorAction 'SilentlyContinue') -and $toolInstallationFailure) {
# We cannot change this to Write-PipelineTelemetryError because of https://github.com/dotnet/arcade/issues/4482
Write-Host 'Native tools bootstrap failed'
exit 1
}
Write-Verbose "Installing $ToolName version $ToolVersion"
Write-Verbose "Executing '$InstallerPath $($LocalInstallerArguments.Keys.ForEach({"-$_ '$($LocalInstallerArguments.$_)'"}) -join ' ')'"
& $InstallerPath @LocalInstallerArguments
if ($LASTEXITCODE -Ne "0") {
$errMsg = "$ToolName installation failed"
if ((Get-Variable 'DoNotAbortNativeToolsInstallationOnFailure' -ErrorAction 'SilentlyContinue') -and $DoNotAbortNativeToolsInstallationOnFailure) {
$showNativeToolsWarning = $true
if ((Get-Variable 'DoNotDisplayNativeToolsInstallationWarnings' -ErrorAction 'SilentlyContinue') -and $DoNotDisplayNativeToolsInstallationWarnings) {
$showNativeToolsWarning = $false
}
if ($showNativeToolsWarning) {
Write-Warning $errMsg
}
$toolInstallationFailure = $true
} else {
# We cannot change this to Write-PipelineTelemetryError because of https://github.com/dotnet/arcade/issues/4482
Write-Host $errMsg
exit 1
}
}
}
if ((Get-Variable 'toolInstallationFailure' -ErrorAction 'SilentlyContinue') -and $toolInstallationFailure) {
# We cannot change this to Write-PipelineTelemetryError because of https://github.com/dotnet/arcade/issues/4482
Write-Host 'Native tools bootstrap failed'
exit 1
}
}
else {
@ -139,7 +193,7 @@ try {
Write-Host "##vso[task.prependpath]$(Convert-Path -Path $InstallBin)"
return $InstallBin
}
else {
elseif (-not ($PathPromotion)) {
Write-PipelineTelemetryError -Category 'NativeToolsBootstrap' -Message 'Native tools install directory does not exist, installation failed'
exit 1
}

Просмотреть файл

@ -8,6 +8,9 @@
<ItemGroup>
<!-- Clear references, the SDK may add some depending on UsuingToolXxx settings, but we only want to restore the following -->
<PackageReference Remove="@(PackageReference)"/>
<PackageReference Include="Microsoft.ManifestTool.CrossPlatform" Version="$(MicrosoftManifestToolCrossPlatformVersion)" />
<PackageReference Include="Microsoft.VisualStudioEng.MicroBuild.Core" Version="$(MicrosoftVisualStudioEngMicroBuildCoreVersion)" />
<PackageReference Include="Microsoft.VisualStudioEng.MicroBuild.Plugins.SwixBuild" Version="$(MicrosoftVisualStudioEngMicroBuildPluginsSwixBuildVersion)" />
<PackageReference Include="Microsoft.DotNet.IBCMerge" Version="$(MicrosoftDotNetIBCMergeVersion)" Condition="'$(UsingToolIbcOptimization)' == 'true'" />
<PackageReference Include="Drop.App" Version="$(DropAppVersion)" ExcludeAssets="all" Condition="'$(UsingToolVisualStudioIbcTraining)' == 'true'"/>
</ItemGroup>

Просмотреть файл

@ -71,7 +71,7 @@ if [[ -z "$CLR_CC" ]]; then
# Set default versions
if [[ -z "$majorVersion" ]]; then
# note: gcc (all versions) and clang versions higher than 6 do not have minor version in file name, if it is zero.
if [[ "$compiler" == "clang" ]]; then versions=( 13 12 11 10 9 8 7 6.0 5.0 4.0 3.9 3.8 3.7 3.6 3.5 )
if [[ "$compiler" == "clang" ]]; then versions=( 15 14 13 12 11 10 9 8 7 6.0 5.0 4.0 3.9 3.8 3.7 3.6 3.5 )
elif [[ "$compiler" == "gcc" ]]; then versions=( 12 11 10 9 8 7 6 5 4.9 ); fi
for version in "${versions[@]}"; do

Просмотреть файл

@ -8,8 +8,6 @@ Param(
$ErrorActionPreference = 'Stop'
Set-StrictMode -Version 2.0
. $PSScriptRoot\tools.ps1
function Get-AzDOHeaders(
[string] $token)
@ -38,10 +36,10 @@ function Update-BuildRetention(
Write-Host "Updated retention settings for build ${buildId}."
}
catch {
Write-PipelineTelemetryError -Category "Build" -Message "Failed to update retention settings for build: $_.Exception.Response.StatusDescription"
ExitWithExitCode 1
Write-Error "Failed to update retention settings for build: $_.Exception.Response.StatusDescription"
exit 1
}
}
Update-BuildRetention -azdoOrgUri $azdoOrgUri -azdoProject $azdoProject -buildId $buildId -token $token
ExitWithExitCode 0
exit 0

Просмотреть файл

@ -64,7 +64,7 @@ try {
$GlobalJson.tools | Add-Member -Name "vs" -Value (ConvertFrom-Json "{ `"version`": `"16.5`" }") -MemberType NoteProperty
}
if( -not ($GlobalJson.tools.PSObject.Properties.Name -match "xcopy-msbuild" )) {
$GlobalJson.tools | Add-Member -Name "xcopy-msbuild" -Value "16.10.0-preview2" -MemberType NoteProperty
$GlobalJson.tools | Add-Member -Name "xcopy-msbuild" -Value "17.2.1" -MemberType NoteProperty
}
if ($GlobalJson.tools."xcopy-msbuild".Trim() -ine "none") {
$xcopyMSBuildToolsFolder = InitializeXCopyMSBuild $GlobalJson.tools."xcopy-msbuild" -install $true

Просмотреть файл

@ -7,6 +7,11 @@
<clear />
<add key="guardian" value="https://securitytools.pkgs.visualstudio.com/_packaging/Guardian/nuget/v3/index.json" />
</packageSources>
<packageSourceMapping>
<packageSource key="guardian">
<package pattern="microsoft.guardian.cli" />
</packageSource>
</packageSourceMapping>
<disabledPackageSources>
<clear />
</disabledPackageSources>

38
eng/common/sdl/sdl.ps1 Normal file
Просмотреть файл

@ -0,0 +1,38 @@
function Install-Gdn {
param(
[Parameter(Mandatory=$true)]
[string]$Path,
# If omitted, install the latest version of Guardian, otherwise install that specific version.
[string]$Version
)
$ErrorActionPreference = 'Stop'
Set-StrictMode -Version 2.0
$disableConfigureToolsetImport = $true
$global:LASTEXITCODE = 0
# `tools.ps1` checks $ci to perform some actions. Since the SDL
# scripts don't necessarily execute in the same agent that run the
# build.ps1/sh script this variable isn't automatically set.
$ci = $true
. $PSScriptRoot\..\tools.ps1
$argumentList = @("install", "Microsoft.Guardian.Cli", "-Source https://securitytools.pkgs.visualstudio.com/_packaging/Guardian/nuget/v3/index.json", "-OutputDirectory $Path", "-NonInteractive", "-NoCache")
if ($Version) {
$argumentList += "-Version $Version"
}
Start-Process nuget -Verbose -ArgumentList $argumentList -NoNewWindow -Wait
$gdnCliPath = Get-ChildItem -Filter guardian.cmd -Recurse -Path $Path
if (!$gdnCliPath)
{
Write-PipelineTelemetryError -Category 'Sdl' -Message 'Failure installing Guardian'
}
return $gdnCliPath.FullName
}

Просмотреть файл

@ -34,7 +34,7 @@ jobs:
- job: Run_SDL
dependsOn: ${{ parameters.dependsOn }}
displayName: Run SDL tool
condition: eq( ${{ parameters.enable }}, 'true')
condition: and(succeededOrFailed(), eq( ${{ parameters.enable }}, 'true'))
variables:
- group: DotNet-VSTS-Bot
- name: AzDOProjectName
@ -54,12 +54,14 @@ jobs:
# If it's not devdiv, it's dnceng
${{ if ne(variables['System.TeamProject'], 'DevDiv') }}:
name: NetCore1ESPool-Internal
demands: ImageOverride -equals Build.Server.Amd64.VS2019
demands: ImageOverride -equals windows.vs2019.amd64
steps:
- checkout: self
clean: true
- template: /eng/common/templates/post-build/setup-maestro-vars.yml
# If the template caller didn't provide an AzDO parameter, set them all up as Maestro vars.
- ${{ if not(and(parameters.AzDOProjectName, parameters.AzDOPipelineId, parameters.AzDOBuildId)) }}:
- template: /eng/common/templates/post-build/setup-maestro-vars.yml
- ${{ if ne(parameters.downloadArtifacts, 'false')}}:
- ${{ if ne(parameters.artifactNames, '') }}:

Просмотреть файл

@ -25,6 +25,7 @@ parameters:
enablePublishTestResults: false
enablePublishUsingPipelines: false
disableComponentGovernance: false
componentGovernanceIgnoreDirectories: ''
mergeTestResults: false
testRunTitle: ''
testResultsFormat: ''
@ -140,11 +141,14 @@ jobs:
languages: ${{ coalesce(parameters.richCodeNavigationLanguage, 'csharp') }}
environment: ${{ coalesce(parameters.richCodeNavigationEnvironment, 'production') }}
richNavLogOutputDirectory: $(Build.SourcesDirectory)/artifacts/bin
uploadRichNavArtifacts: ${{ coalesce(parameters.richCodeNavigationUploadArtifacts, false) }}
continueOnError: true
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), ne(parameters.disableComponentGovernance, 'true')) }}:
- task: ComponentGovernanceComponentDetection@0
continueOnError: true
inputs:
ignoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }}
- ${{ if eq(parameters.enableMicrobuild, 'true') }}:
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
@ -183,24 +187,6 @@ jobs:
displayName: Publish logs
continueOnError: true
condition: always()
- ${{ if or(eq(parameters.artifacts.publish.manifests, 'true'), ne(parameters.artifacts.publish.manifests, '')) }}:
- ${{ if and(ne(parameters.enablePublishUsingPipelines, 'true'), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- task: CopyFiles@2
displayName: Gather Asset Manifests
inputs:
SourceFolder: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)/AssetManifest'
TargetFolder: '$(Build.ArtifactStagingDirectory)/AssetManifests'
continueOnError: ${{ parameters.continueOnError }}
condition: and(succeeded(), eq(variables['_DotNetPublishToBlobFeed'], 'true'))
- task: PublishBuildArtifacts@1
displayName: Push Asset Manifests
inputs:
PathtoPublish: '$(Build.ArtifactStagingDirectory)/AssetManifests'
PublishLocation: Container
ArtifactName: AssetManifests
continueOnError: ${{ parameters.continueOnError }}
condition: and(succeeded(), eq(variables['_DotNetPublishToBlobFeed'], 'true'))
- ${{ if ne(parameters.enablePublishBuildArtifacts, 'false') }}:
- task: PublishBuildArtifacts@1
@ -234,28 +220,11 @@ jobs:
mergeTestResults: ${{ parameters.mergeTestResults }}
continueOnError: true
condition: always()
- ${{ if and(eq(parameters.enablePublishBuildAssets, true), ne(parameters.enablePublishUsingPipelines, 'true'), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- task: CopyFiles@2
displayName: Gather Asset Manifests
inputs:
SourceFolder: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)/AssetManifest'
TargetFolder: '$(Build.StagingDirectory)/AssetManifests'
continueOnError: ${{ parameters.continueOnError }}
condition: and(succeeded(), eq(variables['_DotNetPublishToBlobFeed'], 'true'))
- task: PublishBuildArtifacts@1
displayName: Push Asset Manifests
inputs:
PathtoPublish: '$(Build.StagingDirectory)/AssetManifests'
PublishLocation: Container
ArtifactName: AssetManifests
continueOnError: ${{ parameters.continueOnError }}
condition: and(succeeded(), eq(variables['_DotNetPublishToBlobFeed'], 'true'))
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.enableSbom, 'true')) }}:
- template: /eng/common/templates/steps/generate-sbom.yml
parameters:
PackageVersion: ${{ parameters.packageVersion}}
BuildDropPath: ${{ parameters.buildDropPath }}
IgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }}

Просмотреть файл

@ -41,7 +41,7 @@ jobs:
# If it's not devdiv, it's dnceng
${{ if ne(variables['System.TeamProject'], 'DevDiv') }}:
name: NetCore1ESPool-Internal
demands: ImageOverride -equals Build.Server.Amd64.VS2019
demands: ImageOverride -equals windows.vs2019.amd64
variables:
- group: OneLocBuildVariables # Contains the CeapexPat and GithubPat
@ -72,8 +72,8 @@ jobs:
lclSource: ${{ parameters.LclSource }}
lclPackageId: ${{ parameters.LclPackageId }}
isCreatePrSelected: ${{ parameters.CreatePr }}
isAutoCompletePrSelected: ${{ parameters.AutoCompletePr }}
${{ if eq(parameters.CreatePr, true) }}:
isAutoCompletePrSelected: ${{ parameters.AutoCompletePr }}
isUseLfLineEndingsSelected: ${{ parameters.UseLfLineEndings }}
${{ if eq(parameters.RepoType, 'gitHub') }}:
isShouldReusePrSelected: ${{ parameters.ReusePr }}

Просмотреть файл

@ -23,23 +23,33 @@ parameters:
# Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing
publishUsingPipelines: false
# Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing
publishAssetsImmediately: false
artifactsPublishingAdditionalParameters: ''
signingValidationAdditionalParameters: ''
jobs:
- job: Asset_Registry_Publish
dependsOn: ${{ parameters.dependsOn }}
displayName: Publish to Build Asset Registry
${{ if eq(parameters.publishAssetsImmediately, 'true') }}:
displayName: Publish Assets
${{ else }}:
displayName: Publish to Build Asset Registry
pool: ${{ parameters.pool }}
variables:
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- name: _BuildConfig
value: ${{ parameters.configuration }}
- group: Publish-Build-Assets
- group: AzureDevOps-Artifact-Feeds-Pats
- name: runCodesignValidationInjection
value: false
- ${{ if eq(parameters.publishAssetsImmediately, 'true') }}:
- template: /eng/common/templates/post-build/common-variables.yml
steps:
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
@ -52,14 +62,13 @@ jobs:
condition: ${{ parameters.condition }}
continueOnError: ${{ parameters.continueOnError }}
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- task: NuGetAuthenticate@0
- task: NuGetAuthenticate@0
- task: PowerShell@2
displayName: Enable cross-org NuGet feed authentication
inputs:
filePath: $(Build.SourcesDirectory)/eng/common/enable-cross-org-publishing.ps1
arguments: -token $(dn-bot-all-orgs-artifact-feeds-rw)
- task: PowerShell@2
displayName: Enable cross-org NuGet feed authentication
inputs:
filePath: $(Build.SourcesDirectory)/eng/common/enable-cross-org-publishing.ps1
arguments: -token $(dn-bot-all-orgs-artifact-feeds-rw)
- task: PowerShell@2
displayName: Publish Build Assets
@ -70,7 +79,6 @@ jobs:
/p:BuildAssetRegistryToken=$(MaestroAccessToken)
/p:MaestroApiEndpoint=https://maestro-prod.westus2.cloudapp.azure.com
/p:PublishUsingPipelines=${{ parameters.publishUsingPipelines }}
/p:Configuration=$(_BuildConfig)
/p:OfficialBuildId=$(Build.BuildNumber)
condition: ${{ parameters.condition }}
continueOnError: ${{ parameters.continueOnError }}
@ -114,7 +122,25 @@ jobs:
PathtoPublish: '$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt'
PublishLocation: Container
ArtifactName: ReleaseConfigs
- ${{ if eq(parameters.publishAssetsImmediately, 'true') }}:
- template: /eng/common/templates/post-build/setup-maestro-vars.yml
parameters:
BARBuildId: ${{ parameters.BARBuildId }}
PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
- task: PowerShell@2
displayName: Publish Using Darc
inputs:
filePath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1
arguments: -BuildId $(BARBuildId)
-PublishingInfraVersion 3
-AzdoToken '$(publishing-dnceng-devdiv-code-r-build-re)'
-MaestroToken '$(MaestroApiAccessToken)'
-WaitPublishingFinish true
-ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}'
-SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}'
- ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}:
- template: /eng/common/templates/steps/publish-logs.yml
parameters:

Просмотреть файл

@ -46,7 +46,7 @@ jobs:
# source-build builds run in Docker, including the default managed platform.
pool:
${{ if eq(variables['System.TeamProject'], 'public') }}:
name: NetCore1ESPool-Public
name: NetCore-Public
demands: ImageOverride -equals Build.Ubuntu.1804.Amd64.Open
${{ if eq(variables['System.TeamProject'], 'internal') }}:
name: NetCore1ESPool-Internal

Просмотреть файл

@ -1,12 +1,13 @@
parameters:
runAsPublic: false
sourceIndexPackageVersion: 1.0.1-20210614.1
sourceIndexPackageVersion: 1.0.1-20220804.1
sourceIndexPackageSource: https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json
sourceIndexBuildCommand: powershell -NoLogo -NoProfile -ExecutionPolicy Bypass -Command "eng/common/build.ps1 -restore -build -binarylog -ci"
preSteps: []
binlogPath: artifacts/log/Debug/Build.binlog
condition: ''
dependsOn: ''
pool: ''
jobs:
- job: SourceIndexStage1
@ -22,13 +23,17 @@ jobs:
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- group: source-dot-net stage1 variables
pool:
${{ if eq(variables['System.TeamProject'], 'public') }}:
name: NetCore1ESPool-Public
demands: ImageOverride -equals Build.Server.Amd64.VS2019.Open
${{ if eq(variables['System.TeamProject'], 'internal') }}:
name: NetCore1ESPool-Internal
demands: ImageOverride -equals Build.Server.Amd64.VS2019
${{ if ne(parameters.pool, '') }}:
pool: ${{ parameters.pool }}
${{ if eq(parameters.pool, '') }}:
pool:
${{ if eq(variables['System.TeamProject'], 'public') }}:
name: NetCore-Public
demands: ImageOverride -equals windows.vs2019.amd64.open
${{ if eq(variables['System.TeamProject'], 'internal') }}:
name: NetCore1ESPool-Internal
demands: ImageOverride -equals windows.vs2019.amd64
steps:
- ${{ each preStep in parameters.preSteps }}:
- ${{ preStep }}

Просмотреть файл

@ -27,6 +27,13 @@ parameters:
# Optional: Override automatically derived dependsOn value for "publish build assets" job
publishBuildAssetsDependsOn: ''
# Optional: Publish the assets as soon as the publish to BAR stage is complete, rather doing so in a separate stage.
publishAssetsImmediately: false
# Optional: If using publishAssetsImmediately and additional parameters are needed, can be used to send along additional parameters (normally sent to post-build.yml)
artifactsPublishingAdditionalParameters: ''
signingValidationAdditionalParameters: ''
# Optional: should run as a public build even in the internal project
# if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
runAsPublic: false
@ -68,7 +75,6 @@ jobs:
${{ parameter.key }}: ${{ parameter.value }}
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- ${{ if or(eq(parameters.enablePublishBuildAssets, true), eq(parameters.artifacts.publish.manifests, 'true'), ne(parameters.artifacts.publish.manifests, '')) }}:
- template: ../job/publish-build-assets.yml
parameters:
@ -90,8 +96,11 @@ jobs:
# If it's not devdiv, it's dnceng
${{ if ne(variables['System.TeamProject'], 'DevDiv') }}:
name: NetCore1ESPool-Internal
demands: ImageOverride -equals Build.Server.Amd64.VS2019
demands: ImageOverride -equals windows.vs2019.amd64
runAsPublic: ${{ parameters.runAsPublic }}
publishUsingPipelines: ${{ parameters.enablePublishUsingPipelines }}
publishAssetsImmediately: ${{ parameters.publishAssetsImmediately }}
enablePublishBuildArtifacts: ${{ parameters.enablePublishBuildArtifacts }}
artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
signingValidationAdditionalParameters: ${{ parameters.signingValidationAdditionalParameters }}

Просмотреть файл

@ -14,7 +14,7 @@ parameters:
# This is the default platform provided by Arcade, intended for use by a managed-only repo.
defaultManagedPlatform:
name: 'Managed'
container: 'mcr.microsoft.com/dotnet-buildtools/prereqs:centos-7-3e800f1-20190501005343'
container: 'mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream8-latest'
# Defines the platforms on which to run build jobs. One job is created for each platform, and the
# object in this array is sent to the job template as 'platform'. If no platforms are specified,

Просмотреть файл

@ -49,6 +49,7 @@ parameters:
type: object
default:
enable: false
publishGdn: false
continueOnError: false
params: ''
artifactNames: ''
@ -82,6 +83,11 @@ parameters:
default:
- Validate
# Optional: Call asset publishing rather than running in a separate stage
- name: publishAssetsImmediately
type: boolean
default: false
stages:
- ${{ if or(eq( parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}:
- stage: Validate
@ -92,16 +98,16 @@ stages:
jobs:
- job:
displayName: NuGet Validation
condition: eq( ${{ parameters.enableNugetValidation }}, 'true')
condition: and(succeededOrFailed(), eq( ${{ parameters.enableNugetValidation }}, 'true'))
pool:
# We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
name: VSEngSS-MicroBuild2022-1ES
demands: Cmd
# If it's not devdiv, it's dnceng
${{ if ne(variables['System.TeamProject'], 'DevDiv') }}:
${{ else }}:
name: NetCore1ESPool-Internal
demands: ImageOverride -equals Build.Server.Amd64.VS2019
demands: ImageOverride -equals windows.vs2019.amd64
steps:
- template: setup-maestro-vars.yml
@ -136,9 +142,9 @@ stages:
name: VSEngSS-MicroBuild2022-1ES
demands: Cmd
# If it's not devdiv, it's dnceng
${{ if ne(variables['System.TeamProject'], 'DevDiv') }}:
${{ else }}:
name: NetCore1ESPool-Internal
demands: ImageOverride -equals Build.Server.Amd64.VS2019
demands: ImageOverride -equals windows.vs2019.amd64
steps:
- template: setup-maestro-vars.yml
parameters:
@ -196,9 +202,9 @@ stages:
name: VSEngSS-MicroBuild2022-1ES
demands: Cmd
# If it's not devdiv, it's dnceng
${{ if ne(variables['System.TeamProject'], 'DevDiv') }}:
${{ else }}:
name: NetCore1ESPool-Internal
demands: ImageOverride -equals Build.Server.Amd64.VS2019
demands: ImageOverride -equals windows.vs2019.amd64
steps:
- template: setup-maestro-vars.yml
parameters:
@ -230,48 +236,50 @@ stages:
- template: /eng/common/templates/job/execute-sdl.yml
parameters:
enable: ${{ parameters.SDLValidationParameters.enable }}
publishGuardianDirectoryToPipeline: ${{ parameters.SDLValidationParameters.publishGdn }}
additionalParameters: ${{ parameters.SDLValidationParameters.params }}
continueOnError: ${{ parameters.SDLValidationParameters.continueOnError }}
artifactNames: ${{ parameters.SDLValidationParameters.artifactNames }}
downloadArtifacts: ${{ parameters.SDLValidationParameters.downloadArtifacts }}
- stage: publish_using_darc
${{ if or(eq(parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}:
dependsOn: ${{ parameters.publishDependsOn }}
${{ if and(ne(parameters.enableNugetValidation, 'true'), ne(parameters.enableSigningValidation, 'true'), ne(parameters.enableSourceLinkValidation, 'true'), ne(parameters.SDLValidationParameters.enable, 'true')) }}:
dependsOn: ${{ parameters.validateDependsOn }}
displayName: Publish using Darc
variables:
- template: common-variables.yml
jobs:
- job:
displayName: Publish Using Darc
timeoutInMinutes: 120
pool:
# We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
- ${{ if ne(parameters.publishAssetsImmediately, 'true') }}:
- stage: publish_using_darc
${{ if or(eq(parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}:
dependsOn: ${{ parameters.publishDependsOn }}
${{ else }}:
dependsOn: ${{ parameters.validateDependsOn }}
displayName: Publish using Darc
variables:
- template: common-variables.yml
jobs:
- job:
displayName: Publish Using Darc
timeoutInMinutes: 120
pool:
# We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
name: VSEngSS-MicroBuild2022-1ES
demands: Cmd
# If it's not devdiv, it's dnceng
${{ if ne(variables['System.TeamProject'], 'DevDiv') }}:
${{ else }}:
name: NetCore1ESPool-Internal
demands: ImageOverride -equals Build.Server.Amd64.VS2019
steps:
- template: setup-maestro-vars.yml
parameters:
BARBuildId: ${{ parameters.BARBuildId }}
PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
demands: ImageOverride -equals windows.vs2019.amd64
steps:
- template: setup-maestro-vars.yml
parameters:
BARBuildId: ${{ parameters.BARBuildId }}
PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
- task: NuGetAuthenticate@0
- task: NuGetAuthenticate@0
- task: PowerShell@2
displayName: Publish Using Darc
inputs:
filePath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1
arguments: -BuildId $(BARBuildId)
-PublishingInfraVersion ${{ parameters.publishingInfraVersion }}
-AzdoToken '$(publishing-dnceng-devdiv-code-r-build-re)'
-MaestroToken '$(MaestroApiAccessToken)'
-WaitPublishingFinish true
-ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}'
-SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}'
- task: PowerShell@2
displayName: Publish Using Darc
inputs:
filePath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1
arguments: -BuildId $(BARBuildId)
-PublishingInfraVersion ${{ parameters.publishingInfraVersion }}
-AzdoToken '$(publishing-dnceng-devdiv-code-r-build-re)'
-MaestroToken '$(MaestroApiAccessToken)'
-WaitPublishingFinish true
-ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}'
-SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}'

Просмотреть файл

@ -8,29 +8,28 @@ parameters:
condition: ''
steps:
- ${{ if ne(parameters.overrideGuardianVersion, '') }}:
- powershell: |
$content = Get-Content $(GuardianPackagesConfigFile)
Write-Host "packages.config content was:`n$content"
$content = $content.Replace('$(DefaultGuardianVersion)', '$(GuardianVersion)')
$content | Set-Content $(GuardianPackagesConfigFile)
Write-Host "packages.config content updated to:`n$content"
displayName: Use overridden Guardian version ${{ parameters.overrideGuardianVersion }}
- task: NuGetAuthenticate@1
inputs:
nuGetServiceConnections: GuardianConnect
- task: NuGetToolInstaller@1
displayName: 'Install NuGet.exe'
- task: NuGetCommand@2
displayName: 'Install Guardian'
inputs:
restoreSolution: $(Build.SourcesDirectory)\eng\common\sdl\packages.config
feedsToUse: config
nugetConfigPath: $(Build.SourcesDirectory)\eng\common\sdl\NuGet.config
externalFeedCredentials: GuardianConnect
restoreDirectory: $(Build.SourcesDirectory)\.packages
- ${{ if ne(parameters.overrideGuardianVersion, '') }}:
- pwsh: |
Set-Location -Path $(Build.SourcesDirectory)\eng\common\sdl
. .\sdl.ps1
$guardianCliLocation = Install-Gdn -Path $(Build.SourcesDirectory)\.artifacts -Version ${{ parameters.overrideGuardianVersion }}
Write-Host "##vso[task.setvariable variable=GuardianCliLocation]$guardianCliLocation"
displayName: Install Guardian (Overridden)
- ${{ if eq(parameters.overrideGuardianVersion, '') }}:
- pwsh: |
Set-Location -Path $(Build.SourcesDirectory)\eng\common\sdl
. .\sdl.ps1
$guardianCliLocation = Install-Gdn -Path $(Build.SourcesDirectory)\.artifacts
Write-Host "##vso[task.setvariable variable=GuardianCliLocation]$guardianCliLocation"
displayName: Install Guardian
- ${{ if ne(parameters.overrideParameters, '') }}:
- powershell: ${{ parameters.executeAllSdlToolsScript }} ${{ parameters.overrideParameters }}
@ -40,7 +39,7 @@ steps:
- ${{ if eq(parameters.overrideParameters, '') }}:
- powershell: ${{ parameters.executeAllSdlToolsScript }}
-GuardianPackageName Microsoft.Guardian.Cli.$(GuardianVersion)
-GuardianCliLocation $(GuardianCliLocation)
-NugetPackageDirectory $(Build.SourcesDirectory)\.packages
-AzureDevOpsAccessToken $(dn-bot-dotnet-build-rw-code-rw)
${{ parameters.additionalParameters }}
@ -62,7 +61,28 @@ steps:
c
i
condition: succeededOrFailed()
- publish: $(Agent.BuildDirectory)/.gdn
artifact: GuardianConfiguration
displayName: Publish GuardianConfiguration
condition: succeededOrFailed()
# Publish the SARIF files in a container named CodeAnalysisLogs to enable integration
# with the "SARIF SAST Scans Tab" Azure DevOps extension
- task: CopyFiles@2
displayName: Copy SARIF files
inputs:
flattenFolders: true
sourceFolder: $(Agent.BuildDirectory)/.gdn/rc/
contents: '**/*.sarif'
targetFolder: $(Build.SourcesDirectory)/CodeAnalysisLogs
condition: succeededOrFailed()
# Use PublishBuildArtifacts because the SARIF extension only checks this case
# see microsoft/sarif-azuredevops-extension#4
- task: PublishBuildArtifacts@1
displayName: Publish SARIF files to CodeAnalysisLogs container
inputs:
pathToPublish: $(Build.SourcesDirectory)/CodeAnalysisLogs
artifactName: CodeAnalysisLogs
condition: succeededOrFailed()

Просмотреть файл

@ -2,12 +2,14 @@
# PackageName - The name of the package this SBOM represents.
# PackageVersion - The version of the package this SBOM represents.
# ManifestDirPath - The path of the directory where the generated manifest files will be placed
# IgnoreDirectories - Directories to ignore for SBOM generation. This will be passed through to the CG component detector.
parameters:
PackageVersion: 7.0.0
BuildDropPath: '$(Build.SourcesDirectory)/artifacts'
PackageName: '.NET'
ManifestDirPath: $(Build.ArtifactStagingDirectory)/sbom
IgnoreDirectories: ''
sbomContinueOnError: true
steps:
@ -34,6 +36,8 @@ steps:
BuildDropPath: ${{ parameters.buildDropPath }}
PackageVersion: ${{ parameters.packageVersion }}
ManifestDirPath: ${{ parameters.manifestDirPath }}
${{ if ne(parameters.IgnoreDirectories, '') }}:
AdditionalComponentDetectorArgs: '--IgnoreDirectories ${{ parameters.IgnoreDirectories }}'
- task: PublishPipelineArtifact@1
displayName: Publish SBOM manifest

Просмотреть файл

@ -3,7 +3,7 @@ parameters:
HelixSource: 'pr/default' # required -- sources must start with pr/, official/, prodcon/, or agent/
HelixType: 'tests/default/' # required -- Helix telemetry which identifies what type of data this is; should include "test" for clarity and must end in '/'
HelixBuild: $(Build.BuildNumber) # required -- the build number Helix will use to identify this -- automatically set to the AzDO build number
HelixTargetQueues: '' # required -- semicolon delimited list of Helix queues to test on; see https://helix.dot.net/ for a list of queues
HelixTargetQueues: '' # required -- semicolon-delimited list of Helix queues to test on; see https://helix.dot.net/ for a list of queues
HelixAccessToken: '' # required -- access token to make Helix API requests; should be provided by the appropriate variable group
HelixConfiguration: '' # optional -- additional property attached to a job
HelixPreCommands: '' # optional -- commands to run before Helix work item execution
@ -12,7 +12,7 @@ parameters:
WorkItemCommand: '' # optional -- a command to execute on the payload; requires WorkItemDirectory; incompatible with XUnitProjects
WorkItemTimeout: '' # optional -- a timeout in TimeSpan.Parse-ready value (e.g. 00:02:00) for the work item command; requires WorkItemDirectory; incompatible with XUnitProjects
CorrelationPayloadDirectory: '' # optional -- a directory to zip up and send to Helix as a correlation payload
XUnitProjects: '' # optional -- semicolon delimited list of XUnitProjects to parse and send to Helix; requires XUnitRuntimeTargetFramework, XUnitPublishTargetFramework, XUnitRunnerVersion, and IncludeDotNetCli=true
XUnitProjects: '' # optional -- semicolon-delimited list of XUnitProjects to parse and send to Helix; requires XUnitRuntimeTargetFramework, XUnitPublishTargetFramework, XUnitRunnerVersion, and IncludeDotNetCli=true
XUnitWorkItemTimeout: '' # optional -- the workitem timeout in seconds for all workitems created from the xUnit projects specified by XUnitProjects
XUnitPublishTargetFramework: '' # optional -- framework to use to publish your xUnit projects
XUnitRuntimeTargetFramework: '' # optional -- framework to use for the xUnit console runner
@ -22,14 +22,14 @@ parameters:
DotNetCliVersion: '' # optional -- version of the CLI to send to Helix; based on this: https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json
WaitForWorkItemCompletion: true # optional -- true will make the task wait until work items have been completed and fail the build if work items fail. False is "fire and forget."
IsExternal: false # [DEPRECATED] -- doesn't do anything, jobs are external if HelixAccessToken is empty and Creator is set
HelixBaseUri: 'https://helix.dot.net/' # optional -- sets the Helix API base URI (allows targeting int)
HelixBaseUri: 'https://helix.dot.net/' # optional -- sets the Helix API base URI (allows targeting https://helix.int-dot.net )
Creator: '' # optional -- if the build is external, use this to specify who is sending the job
DisplayNamePrefix: 'Run Tests' # optional -- rename the beginning of the displayName of the steps in AzDO
condition: succeeded() # optional -- condition for step to execute; defaults to succeeded()
continueOnError: false # optional -- determines whether to continue the build if the step errors; defaults to false
steps:
- powershell: 'powershell "$env:BUILD_SOURCESDIRECTORY\eng\common\msbuild.ps1 $env:BUILD_SOURCESDIRECTORY\eng\common\helixpublish.proj /restore /t:Test /bl:$env:BUILD_SOURCESDIRECTORY\artifacts\log\$env:BuildConfig\SendToHelix.binlog"'
- powershell: 'powershell "$env:BUILD_SOURCESDIRECTORY\eng\common\msbuild.ps1 $env:BUILD_SOURCESDIRECTORY\eng\common\helixpublish.proj /restore /p:TreatWarningsAsErrors=false /t:Test /bl:$env:BUILD_SOURCESDIRECTORY\artifacts\log\$env:BuildConfig\SendToHelix.binlog"'
displayName: ${{ parameters.DisplayNamePrefix }} (Windows)
env:
BuildConfig: $(_BuildConfig)
@ -59,7 +59,7 @@ steps:
SYSTEM_ACCESSTOKEN: $(System.AccessToken)
condition: and(${{ parameters.condition }}, eq(variables['Agent.Os'], 'Windows_NT'))
continueOnError: ${{ parameters.continueOnError }}
- script: $BUILD_SOURCESDIRECTORY/eng/common/msbuild.sh $BUILD_SOURCESDIRECTORY/eng/common/helixpublish.proj /restore /t:Test /bl:$BUILD_SOURCESDIRECTORY/artifacts/log/$BuildConfig/SendToHelix.binlog
- script: $BUILD_SOURCESDIRECTORY/eng/common/msbuild.sh $BUILD_SOURCESDIRECTORY/eng/common/helixpublish.proj /restore /p:TreatWarningsAsErrors=false /t:Test /bl:$BUILD_SOURCESDIRECTORY/artifacts/log/$BuildConfig/SendToHelix.binlog
displayName: ${{ parameters.DisplayNamePrefix }} (Unix)
env:
BuildConfig: $(_BuildConfig)

Просмотреть файл

@ -68,6 +68,11 @@ steps:
publishArgs='--publish'
fi
assetManifestFileName=SourceBuild_RidSpecific.xml
if [ '${{ parameters.platform.name }}' != '' ]; then
assetManifestFileName=SourceBuild_${{ parameters.platform.name }}.xml
fi
${{ coalesce(parameters.platform.buildScript, './build.sh') }} --ci \
--configuration $buildConfig \
--restore --build --pack $publishArgs -bl \
@ -76,7 +81,8 @@ steps:
$internalRestoreArgs \
$targetRidArgs \
/p:SourceBuildNonPortable=${{ parameters.platform.nonPortable }} \
/p:ArcadeBuildFromSource=true
/p:ArcadeBuildFromSource=true \
/p:AssetManifestFileName=$assetManifestFileName
displayName: Build
# Upload build logs for diagnosis.

Просмотреть файл

@ -365,10 +365,17 @@ function InitializeVisualStudioMSBuild([bool]$install, [object]$vsRequirements =
# If the version of msbuild is going to be xcopied,
# use this version. Version matches a package here:
# https://dev.azure.com/dnceng/public/_packaging?_a=package&feed=dotnet-eng&package=RoslynTools.MSBuild&protocolType=NuGet&version=16.10.0-preview2&view=overview
$defaultXCopyMSBuildVersion = '16.10.0-preview2'
# https://dev.azure.com/dnceng/public/_packaging?_a=package&feed=dotnet-eng&package=RoslynTools.MSBuild&protocolType=NuGet&version=17.2.1&view=overview
$defaultXCopyMSBuildVersion = '17.2.1'
if (!$vsRequirements) { $vsRequirements = $GlobalJson.tools.vs }
if (!$vsRequirements) {
if (Get-Member -InputObject $GlobalJson.tools -Name 'vs') {
$vsRequirements = $GlobalJson.tools.vs
}
else {
$vsRequirements = New-Object PSObject -Property @{ version = $vsMinVersionReqdStr }
}
}
$vsMinVersionStr = if ($vsRequirements.version) { $vsRequirements.version } else { $vsMinVersionReqdStr }
$vsMinVersion = [Version]::new($vsMinVersionStr)
@ -573,7 +580,7 @@ function InitializeBuildTool() {
ExitWithExitCode 1
}
$dotnetPath = Join-Path $dotnetRoot (GetExecutableFileName 'dotnet')
$buildTool = @{ Path = $dotnetPath; Command = 'msbuild'; Tool = 'dotnet'; Framework = 'netcoreapp3.1' }
$buildTool = @{ Path = $dotnetPath; Command = 'msbuild'; Tool = 'dotnet'; Framework = 'net7.0' }
} elseif ($msbuildEngine -eq "vs") {
try {
$msbuildPath = InitializeVisualStudioMSBuild -install:$restore
@ -635,6 +642,10 @@ function InitializeNativeTools() {
InstallDirectory = "$ToolsDir"
}
}
if ($env:NativeToolsOnMachine) {
Write-Host "Variable NativeToolsOnMachine detected, enabling native tool path promotion..."
$nativeArgs += @{ PathPromotion = $true }
}
& "$PSScriptRoot/init-tools-native.ps1" @nativeArgs
}
}

Просмотреть файл

@ -312,7 +312,7 @@ function InitializeBuildTool {
# return values
_InitializeBuildTool="$_InitializeDotNetCli/dotnet"
_InitializeBuildToolCommand="msbuild"
_InitializeBuildToolFramework="netcoreapp3.1"
_InitializeBuildToolFramework="net7.0"
}
# Set RestoreNoCache as a workaround for https://github.com/NuGet/Home/issues/3116

Просмотреть файл

@ -1,8 +1,8 @@
{
"tools": {
"dotnet": "6.0.100"
"dotnet": "7.0.100-rc.1.22431.12"
},
"msbuild-sdks": {
"Microsoft.DotNet.Arcade.Sdk": "7.0.0-beta.22124.1"
"Microsoft.DotNet.Arcade.Sdk": "8.0.0-beta.22480.2"
}
}