Merge arcade to master (#5525)
* Initial commit for Arcade migration
* Added omitted files
* Changed strong name signing to use the same key for shipping and test assemblies
* arcade linux build (#5423)
* arcade linux build
* put file execution permission change into source control
* The `-test` command for windows. Nuget packages (#5464)
* working on testing
* testing updates
* tests almost working
* build changes
* all tests should be working
* changes from PR comments
* fixes for .net 3.1
* Fixed extension check. Removed <PackageId> where not needed
* Removed pkg folder and updated paths.
* Added test key. (#5475)
* Added test key.
* Update PublicKey.cs
Removed extra newline.
* Update ComponentCatalog.cs
Fixed 3 spaces to 4.
* Windows CI working (#5477)
* ci testing changes
* comments from pr
* Added Linux & Mac changes for Arcade (#5479)
* Initial Windows, Linux, Macos builds test
* Add Linux/MacOS specific CI requirements
* Run Arcade CI tests on MacOS/Linux
* Fix final package building
* Add benchmark download to benchmars .csporj file
* Print detailed status of each unit test
* Install CentOS & Ubuntu build dependencies
* Use container names to differenciate between Ubuntu & CentOS
* Remove sudo usage in CentOS
* Fix Linux build dependencies
* Add -y param to apt install
* Remove installation of Linux dependencies
* Minor additions
* Rename Benchmarks to PerformanceTests for Arcade
* Changes
* Added benchmark doc changes
* Pre-merge changes
* Fixing failing Arcade Windows Builds (#5482)
* Try Windows build single quote fix
* Remove %20
* Added variable space value
* Using variables for spacing
* Added space values as job parameters
* Try conditional variables again
* fix official builds
* Revert "fix official builds"
This reverts commit 7dbbdc7b94
.
* fixing tensorflow rebase issue
* Fixes for many of the CI builds. (#5496)
* yml log changes
* Fix NetFX builds by ensuring assembly version is set correctly and not to Arcade default of 42.42.42.42 (#5503)
* Fixed official builds for Arcade SDK (#5512)
* Added fixes for official builds
* Make .sh files executable
* fix mkl nuget issue
Co-authored-by: Frank Dong <frdong@microsoft.com>
* fix code generator tests failure (#5520)
* Added fixes for official builds
* Make .sh files executable
* fix mkl nuget issue
* fix code generate test fails
* only add necessary dependency
Co-authored-by: Mustafa Bal <5262061+mstfbl@users.noreply.github.com>
* Fixed memory leaks from OnnxTransformer (#5518)
* Fixed memory leak from OnnxTransformer and related x86 build fixes
* Reverting x86 build related fixes to focus only on the memory leaks
* Updated docs
* Reverted OnnxRuntimeOutputCatcher to private class
* Addressed code review comments
* Refactored OnnxTransform back to using MapperBase based on code review comments
* Handle integration tests and nightly build testing (#5509)
* Make -integrationTests work
* Update .yml file
* Added the TargetArchitecture properties
* Try out -integrationTest
* Missed -integrationTest flag
* Renamed FunctionalTestBaseClass to IntegrationTestBaseClass
* Missed rename
* Modified tests to make them more stable
* Fixed leak in object pool (#5521)
Co-authored-by: frank-dong-ms <55860649+frank-dong-ms@users.noreply.github.com>
Co-authored-by: Michael Sharp <51342856+michaelgsharp@users.noreply.github.com>
Co-authored-by: Mustafa Bal <5262061+mstfbl@users.noreply.github.com>
Co-authored-by: Frank Dong <frdong@microsoft.com>
Co-authored-by: Michael Sharp <misharp@microsoft.com>
Co-authored-by: Antonio Velázquez <38739674+antoniovs1029@users.noreply.github.com>
This commit is contained in:
Родитель
d257b880e5
Коммит
c2f8ed5c9c
|
@ -5,6 +5,8 @@
|
|||
|
||||
# Tool Runtime Dir
|
||||
/[Tt]ools/
|
||||
/.dotnet/
|
||||
/.packages/
|
||||
|
||||
# User-specific files
|
||||
*.suo
|
||||
|
|
|
@ -6,7 +6,7 @@ resources:
|
|||
containers:
|
||||
- container: CentosContainer
|
||||
image: mcr.microsoft.com/dotnet-buildtools/prereqs:centos-7-mlnet-8bba86b-20190314145033
|
||||
|
||||
|
||||
- container: UbuntuContainer
|
||||
image: mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-16.04-mlnet-20200515184230-2c829e8
|
||||
|
||||
|
@ -29,7 +29,7 @@ jobs:
|
|||
_targetFramework: netcoreapp3.1
|
||||
innerLoop: true
|
||||
pool:
|
||||
name: Hosted Ubuntu 1604
|
||||
name: Hosted Ubuntu 1604
|
||||
|
||||
- template: /build/ci/job-template.yml
|
||||
parameters:
|
||||
|
@ -38,7 +38,7 @@ jobs:
|
|||
container: UbuntuContainer
|
||||
innerLoop: true
|
||||
pool:
|
||||
name: Hosted Ubuntu 1604
|
||||
name: Hosted Ubuntu 1604
|
||||
|
||||
- template: /build/ci/job-template.yml
|
||||
parameters:
|
||||
|
|
|
@ -1,13 +1,24 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Project>
|
||||
<!-- Directory.Build.props contains the common build settings for all projects in the repo. -->
|
||||
|
||||
<Import Project="build/BranchInfo.props" />
|
||||
<Import Project="build/Dependencies.props" />
|
||||
<Import Project="Sdk.props" Sdk="Microsoft.DotNet.Arcade.Sdk" />
|
||||
|
||||
<Import Project="eng/BranchInfo.props" />
|
||||
<Import Project="eng/ExternalBenchmarkDataFiles.props" />
|
||||
<Import Project="eng/TensorflowMetaFiles.props" />
|
||||
|
||||
<PropertyGroup>
|
||||
<CreateSymbolicLinksForPublishFilesIfPossible>true</CreateSymbolicLinksForPublishFilesIfPossible>
|
||||
</PropertyGroup>
|
||||
|
||||
<PropertyGroup>
|
||||
<Copyright>$(CopyrightNetFoundation)</Copyright>
|
||||
<TreatWarningsAsErrors>True</TreatWarningsAsErrors>
|
||||
<DebugType>portable</DebugType>
|
||||
<DebugSymbols>true</DebugSymbols>
|
||||
<LangVersion>latest</LangVersion>
|
||||
</PropertyGroup>
|
||||
|
||||
<PropertyGroup>
|
||||
<Configuration Condition="'$(Configuration)'==''">Debug</Configuration>
|
||||
<Configurations>Debug;Release;Debug-netcoreapp3_1;Release-netcoreapp3_1;Debug-netfx;Release-netfx</Configurations>
|
||||
|
@ -15,103 +26,12 @@
|
|||
<TargetArchitecture Condition="'$(TargetArchitecture)' == ''">x64</TargetArchitecture>
|
||||
<NativeTargetArchitecture Condition="'$(NativeTargetArchitecture)' == ''">$(TargetArchitecture)</NativeTargetArchitecture>
|
||||
<PlatformConfig>$(Platform).$(Configuration)</PlatformConfig>
|
||||
<StrongNameKeyId>Open</StrongNameKeyId>
|
||||
</PropertyGroup>
|
||||
|
||||
<PropertyGroup>
|
||||
<RestoreSources>
|
||||
https://api.nuget.org/v3/index.json;
|
||||
https://dotnetfeed.blob.core.windows.net/dotnet-core/index.json;
|
||||
https://dotnet.myget.org/F/dotnet-core/api/v3/index.json;
|
||||
https://dotnet.myget.org/F/roslyn-analyzers/api/v3/index.json;
|
||||
https://pkgs.dev.azure.com/dnceng/public/_packaging/MachineLearning/nuget/v3/index.json;
|
||||
https://pkgs.dev.azure.com/dnceng/public/_packaging/machinelearning-testdata/nuget/v3/index.json;
|
||||
</RestoreSources>
|
||||
</PropertyGroup>
|
||||
|
||||
<!-- Common repo directories -->
|
||||
<PropertyGroup>
|
||||
<RepoRoot>$(MSBuildThisFileDirectory)</RepoRoot>
|
||||
<SourceDir>$(RepoRoot)src/</SourceDir>
|
||||
<PackageAssetsPath>$(ArtifactsDir)pkgassets/</PackageAssetsPath>
|
||||
<PkgDir>$(RepoRoot)pkg/</PkgDir>
|
||||
|
||||
<!-- Output directories -->
|
||||
<BinDir Condition="'$(BinDir)'==''">$([MSBuild]::NormalizeDirectory('$(RepoRoot)', 'bin'))</BinDir>
|
||||
<BaseOutputPath Condition="'$(BaseOutputPath)'==''">$(BinDir)</BaseOutputPath>
|
||||
<ObjDir Condition="'$(ObjDir)'==''">$([MSBuild]::NormalizeDirectory('$(BinDir)', 'obj'))</ObjDir>
|
||||
<RootIntermediateOutputPath Condition="'$(RootIntermediateOutputPath)'==''">$(ObjDir)</RootIntermediateOutputPath>
|
||||
|
||||
<IntermediateOutputRootPath Condition="'$(IntermediateOutputRootPath)' == ''">$(RootIntermediateOutputPath)$(PlatformConfig)\</IntermediateOutputRootPath>
|
||||
<IntermediateOutputPath Condition="'$(IntermediateOutputPath)' == ''">$(IntermediateOutputRootPath)$(MSBuildProjectName)\</IntermediateOutputPath>
|
||||
<BaseIntermediateOutputPath Condition="'$(BaseIntermediateOutputPath)' == ''">$(IntermediateOutputPath)</BaseIntermediateOutputPath>
|
||||
|
||||
<OutputPath Condition="'$(OutputPath)'==''">$(BaseOutputPath)$(PlatformConfig)\$(MSBuildProjectName)\</OutputPath>
|
||||
|
||||
<PackageAssetsPath>$(ObjDir)/packages/</PackageAssetsPath>
|
||||
|
||||
<PackageOutputPath Condition="'$(PackageOutputPath)'=='' and '$(NonShippingPackage)' == 'true'">$(BinDir)packages_noship/</PackageOutputPath>
|
||||
<PackageOutputPath Condition="'$(PackageOutputPath)'==''">$(BinDir)packages/</PackageOutputPath>
|
||||
|
||||
<NativeOutputPath>$(BaseOutputPath)$(NativeTargetArchitecture).$(Configuration)\Native\</NativeOutputPath>
|
||||
|
||||
<!-- Input Directories -->
|
||||
<PackagesDir>$(DotNetRestorePackagesPath)</PackagesDir>
|
||||
<PackagesDir Condition="'$(PackagesDir)'==''">$(RepoRoot)packages/</PackagesDir>
|
||||
<RestorePackagesPath>$(PackagesDir)</RestorePackagesPath>
|
||||
<ToolsDir Condition="'$(ToolsDir)'==''">$(RepoRoot)Tools/</ToolsDir>
|
||||
</PropertyGroup>
|
||||
|
||||
<Import Project="$(ToolsDir)BuildVersion.targets"
|
||||
Condition="Exists('$(ToolsDir)BuildVersion.targets')" />
|
||||
|
||||
<!-- Version properties -->
|
||||
<PropertyGroup>
|
||||
<VersionPrefix>$(MajorVersion).$(MinorVersion).$(PatchVersion)</VersionPrefix>
|
||||
<BuildNumberMajor Condition="'$(BuildNumberMajor)' == ''">00001</BuildNumberMajor>
|
||||
<BuildNumberMinor Condition="'$(BuildNumberMinor)' == ''">0</BuildNumberMinor>
|
||||
<AssemblyFileVersion Condition="'$(AssemblyFileVersion)'==''">$(MajorVersion).$(MinorVersion).$(BuildNumberMajor).$(BuildNumberMinor)</AssemblyFileVersion>
|
||||
|
||||
<StabilizePackageVersion Condition="'$(StabilizePackageVersion)' == ''">false</StabilizePackageVersion>
|
||||
<IncludeBuildNumberInPackageVersion Condition="'$(IncludeBuildNumberInPackageVersion)' == '' and '$(StabilizePackageVersion)' != 'true'">true</IncludeBuildNumberInPackageVersion>
|
||||
|
||||
<VersionSuffix Condition="'$(IncludeBuildNumberInPackageVersion)' == 'true'">$(BuildNumberMajor)-$(BuildNumberMinor)</VersionSuffix>
|
||||
<NoPackageAnalysis>true</NoPackageAnalysis>
|
||||
</PropertyGroup>
|
||||
|
||||
<!-- SourceLink properties used by dotnet/buildtools - need to be set before importing $(ToolsDir)versioning.props -->
|
||||
<PropertyGroup>
|
||||
<UseSourceLink>true</UseSourceLink>
|
||||
<GitHubRepositoryName>machinelearning</GitHubRepositoryName>
|
||||
</PropertyGroup>
|
||||
|
||||
<!--
|
||||
Source code control properties used by the .NET Core SDK to inject SCC info into the NuGet package.
|
||||
In future versions, these will be used for SourceLink and to generate AssemblyInfo.
|
||||
-->
|
||||
<PropertyGroup>
|
||||
<PrivateRepositoryUrl>https://github.com/dotnet/$(GitHubRepositoryName)</PrivateRepositoryUrl>
|
||||
<PublishRepositoryUrl>true</PublishRepositoryUrl>
|
||||
<SourceRevisionId>$(LatestCommit)</SourceRevisionId>
|
||||
</PropertyGroup>
|
||||
|
||||
<Import Project="$(ToolsDir)versioning.props"
|
||||
Condition="Exists('$(ToolsDir)versioning.props') and '$(DisableImportVersioningProps)' != 'true'" />
|
||||
|
||||
<!-- Language configuration -->
|
||||
<PropertyGroup>
|
||||
<LangVersion Condition="'$(MSBuildProjectExtension)'=='.csproj'">8.0</LangVersion>
|
||||
<LangVersion Condition="'$(MSBuildProjectExtension)'=='.fsproj'">4.7</LangVersion>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
</PropertyGroup>
|
||||
|
||||
<PropertyGroup>
|
||||
<RunningOnUnix Condition="'$(OS)'!='Windows_NT'">true</RunningOnUnix>
|
||||
</PropertyGroup>
|
||||
|
||||
<!-- Signing properties -->
|
||||
<PropertyGroup>
|
||||
<AssemblyOriginatorKeyFile Condition="'$(AssemblyOriginatorKeyFile)' == ''">$(ToolsDir)Open.snk</AssemblyOriginatorKeyFile>
|
||||
<SignAssembly>true</SignAssembly>
|
||||
<PublicSign Condition="'$(OS)' != 'Windows_NT'">true</PublicSign>
|
||||
</PropertyGroup>
|
||||
|
||||
<!-- Need to explicitly set these properties for the -netcoreapp3_1 or -netfx configurations becuase they are typically based off 'Debug' or 'Release' configs -->
|
||||
|
@ -125,4 +45,34 @@
|
|||
<Optimize>true</Optimize>
|
||||
</PropertyGroup>
|
||||
|
||||
<PropertyGroup>
|
||||
<!-- Optional: Publish the repository URL in the built .nupkg (in the NuSpec <Repository> element) -->
|
||||
<PublishRepositoryUrl>true</PublishRepositoryUrl>
|
||||
|
||||
<!-- Optional: Embed source files that are not tracked by the source control manager in the PDB -->
|
||||
<EmbedUntrackedSources>true</EmbedUntrackedSources>
|
||||
|
||||
<!-- Optional: Build symbol package (.snupkg) to distribute the PDB containing Source Link -->
|
||||
<IncludeSymbols>true</IncludeSymbols>
|
||||
<SymbolPackageFormat>snupkg</SymbolPackageFormat>
|
||||
</PropertyGroup>
|
||||
|
||||
<PropertyGroup Condition="'$(DotNetBuildFromSource)' == 'true'">
|
||||
<!--
|
||||
When building using source-build the process is:
|
||||
- Newtonsoft.Json versions 9.0.1 and 12.0.2 are built by source-build
|
||||
- Version 12.0.2 is written to Version.props
|
||||
- Arcade needs to use 9.0.1 so we need to override Version.props value here
|
||||
-->
|
||||
<NewtonsoftJsonVersion>9.0.1</NewtonsoftJsonVersion>
|
||||
</PropertyGroup>
|
||||
|
||||
<PropertyGroup Condition="'$(DotNetBuildOffline)' == 'true'">
|
||||
<!--
|
||||
Arcade has a special version prop for CodeAnalysis.CSharp in GenFacades
|
||||
to try to match the version loaded by msbuild. In the offline build, this
|
||||
is simply the source-built version.
|
||||
-->
|
||||
<MsbuildTaskMicrosoftCodeAnalysisCSharpVersion>$(MicrosoftCodeAnalysisCSharpVersion)</MsbuildTaskMicrosoftCodeAnalysisCSharpVersion>
|
||||
</PropertyGroup>
|
||||
</Project>
|
||||
|
|
|
@ -1,51 +1,39 @@
|
|||
<Project InitialTargets="CheckForBuildTools">
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.VisualStudio.Threading.Analyzers" Version="16.5.132" PrivateAssets="all" />
|
||||
</ItemGroup>
|
||||
|
||||
<Target Name="CheckForBuildTools">
|
||||
<Error Condition="!Exists('$(ToolsDir)')"
|
||||
Text="The tools directory [$(ToolsDir)] does not exist. Please run build in the root of the repo to ensure the tools are installed before attempting to build an individual project." />
|
||||
</Target>
|
||||
|
||||
<Target Name="CopyNativeAssemblies"
|
||||
BeforeTargets="PrepareForRun">
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Project>
|
||||
<Import Project="Sdk.targets" Sdk="Microsoft.DotNet.Arcade.Sdk" />
|
||||
|
||||
<Target Name="CopyNativeAssembiles" AfterTargets="CopyFilesToOutputDirectory">
|
||||
<PropertyGroup>
|
||||
<LibPrefix Condition="'$(OS)' != 'Windows_NT'">lib</LibPrefix>
|
||||
<LibExtension Condition="'$(OS)' == 'Windows_NT'">.dll</LibExtension>
|
||||
<LibExtension Condition="'$(OS)' != 'Windows_NT'">.so</LibExtension>
|
||||
<LibExtension Condition="$([MSBuild]::IsOSPlatform('osx'))">.dylib</LibExtension>
|
||||
</PropertyGroup>
|
||||
<LibPrefix Condition="'$(OS)' != 'Windows_NT'">lib</LibPrefix>
|
||||
<LibExtension Condition="'$(OS)' == 'Windows_NT'">.dll</LibExtension>
|
||||
<LibExtension Condition="'$(OS)' != 'Windows_NT'">.so</LibExtension>
|
||||
<LibExtension Condition="$([MSBuild]::IsOSPlatform('osx'))">.dylib</LibExtension>
|
||||
|
||||
<ItemGroup>
|
||||
<NativeAssemblyReference>
|
||||
<FullAssemblyPath>$(NativeOutputPath)$(LibPrefix)%(NativeAssemblyReference.Identity)$(LibExtension)</FullAssemblyPath>
|
||||
</NativeAssemblyReference>
|
||||
</ItemGroup>
|
||||
<TargetArchitecture Condition="'$(Platform)' == ''">x64</TargetArchitecture>
|
||||
<NativeTargetArchitecture Condition="'$(NativeTargetArchitecture)' == ''">$(TargetArchitecture)</NativeTargetArchitecture>
|
||||
<BinDir Condition="'$(BinDir)'==''">$([MSBuild]::NormalizeDirectory('$(RepoRoot)', 'artifacts', 'bin'))</BinDir>
|
||||
<NativeOutputPath>$(BinDir)Native\$(NativeTargetArchitecture).$(Configuration)\</NativeOutputPath>
|
||||
|
||||
<Copy SourceFiles = "@(NativeAssemblyReference->'%(FullAssemblyPath)')"
|
||||
DestinationFolder="$(OutputPath)"
|
||||
OverwriteReadOnlyFiles="$(OverwriteReadOnlyFiles)"
|
||||
Retries="$(CopyRetryCount)"
|
||||
RetryDelayMilliseconds="$(CopyRetryDelayMilliseconds)"
|
||||
UseHardlinksIfPossible="$(CreateHardLinksForPublishFilesIfPossible)"
|
||||
UseSymboliclinksIfPossible="$(CreateSymbolicLinksForPublishFilesIfPossible)">
|
||||
<Output TaskParameter="DestinationFiles" ItemName="FileWrites"/>
|
||||
</Copy>
|
||||
<Platform Condition="'$(Platform)'==''">AnyCPU</Platform>
|
||||
<PlatformConfig>$(Platform).$(Configuration)</PlatformConfig>
|
||||
<OutputPath Condition="'$(OutputPath)'==''">$(BinDir)$(MSBuildProjectName)\Debug</OutputPath>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<NativeAssemblyReference>
|
||||
<FullAssemblyPath>$(NativeOutputPath)$(LibPrefix)%(NativeAssemblyReference.Identity)$(LibExtension)</FullAssemblyPath>
|
||||
</NativeAssemblyReference>
|
||||
</ItemGroup>
|
||||
|
||||
<Copy SourceFiles = "@(NativeAssemblyReference->'%(FullAssemblyPath)')"
|
||||
DestinationFolder="$(OutDir)"
|
||||
OverwriteReadOnlyFiles="$(OverwriteReadOnlyFiles)"
|
||||
Retries="$(CopyRetryCount)"
|
||||
RetryDelayMilliseconds="$(CopyRetryDelayMilliseconds)"
|
||||
UseHardlinksIfPossible="$(CreateHardLinksForPublishFilesIfPossible)"
|
||||
UseSymboliclinksIfPossible="$(CreateSymbolicLinksForPublishFilesIfPossible)">
|
||||
<Output TaskParameter="DestinationFiles" ItemName="FileWrites"/>
|
||||
</Copy>
|
||||
</Target>
|
||||
|
||||
<Import Project="$(ToolsDir)/versioning.targets" Condition="Exists('$(ToolsDir)/versioning.targets')" />
|
||||
|
||||
<!-- Workaround: AssemblyInfo.cs is not embedded -->
|
||||
<!-- https://github.com/dotnet/sourcelink/issues/572 -->
|
||||
<PropertyGroup>
|
||||
<TargetFrameworkMonikerAssemblyAttributesPath>$([System.IO.Path]::Combine('$(IntermediateOutputPath)','$(TargetFrameworkMoniker).AssemblyAttributes$(DefaultLanguageSourceExtension)'))</TargetFrameworkMonikerAssemblyAttributesPath>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<EmbeddedFiles Include="$(GeneratedAssemblyInfoFile)"/>
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
</Project>
|
||||
|
|
132
Microsoft.ML.sln
132
Microsoft.ML.sln
|
@ -33,7 +33,7 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.TestFramework"
|
|||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.Predictor.Tests", "test\Microsoft.ML.Predictor.Tests\Microsoft.ML.Predictor.Tests.csproj", "{6B047E09-39C9-4583-96F3-685D84CA4117}"
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.Functional.Tests", "test\Microsoft.ML.Functional.Tests\Microsoft.ML.Functional.Tests.csproj", "{CFED9F0C-FF81-4C96-8D5E-0436264CA7B5}"
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.IntegrationTests", "test\Microsoft.ML.IntegrationTests\Microsoft.ML.IntegrationTests.csproj", "{CFED9F0C-FF81-4C96-8D5E-0436264CA7B5}"
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.ResultProcessor", "src\Microsoft.ML.ResultProcessor\Microsoft.ML.ResultProcessor.csproj", "{3769FCC3-9AFF-4C37-97E9-6854324681DF}"
|
||||
EndProject
|
||||
|
@ -48,18 +48,7 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "pkg", "pkg", "{D3D38B03-B55
|
|||
pkg\Directory.Build.props = pkg\Directory.Build.props
|
||||
EndProjectSection
|
||||
EndProject
|
||||
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Microsoft.ML", "Microsoft.ML", "{DEC8F776-49F7-4D87-836C-FE4DC057D08C}"
|
||||
ProjectSection(SolutionItems) = preProject
|
||||
pkg\Microsoft.ML\Microsoft.ML.nupkgproj = pkg\Microsoft.ML\Microsoft.ML.nupkgproj
|
||||
pkg\Microsoft.ML\Microsoft.ML.symbols.nupkgproj = pkg\Microsoft.ML\Microsoft.ML.symbols.nupkgproj
|
||||
EndProjectSection
|
||||
EndProject
|
||||
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Microsoft.ML.Parquet", "Microsoft.ML.Parquet", "{6C95FC87-F5F2-4EEF-BB97-567F2F5DD141}"
|
||||
ProjectSection(SolutionItems) = preProject
|
||||
pkg\Microsoft.ML.Parquet\Microsoft.ML.Parquet.nupkgproj = pkg\Microsoft.ML.Parquet\Microsoft.ML.Parquet.nupkgproj
|
||||
EndProjectSection
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.Benchmarks", "test\Microsoft.ML.Benchmarks\Microsoft.ML.Benchmarks.csproj", "{7A9DB75F-2CA5-4184-9EF5-1F17EB39483F}"
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.PerformanceTests", "test\Microsoft.ML.PerformanceTests\Microsoft.ML.PerformanceTests.csproj", "{7A9DB75F-2CA5-4184-9EF5-1F17EB39483F}"
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.Maml", "src\Microsoft.ML.Maml\Microsoft.ML.Maml.csproj", "{64F40A0D-D4C2-4AA7-8470-E9CC437827E4}"
|
||||
EndProject
|
||||
|
@ -217,8 +206,8 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Microsoft.ML.EntryPoints",
|
|||
EndProject
|
||||
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "common", "common", "{A84717CB-F11A-41C5-A74D-C0F1D47B7431}"
|
||||
ProjectSection(SolutionItems) = preProject
|
||||
pkg\common\CommonPackage.props = pkg\common\CommonPackage.props
|
||||
pkg\common\DnnImageFeaturizer.props = pkg\common\DnnImageFeaturizer.props
|
||||
eng\pkg\CommonPackage.props = eng\pkg\CommonPackage.props
|
||||
eng\pkg\DnnImageFeaturizer.props = eng\pkg\DnnImageFeaturizer.props
|
||||
EndProjectSection
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.DataView", "src\Microsoft.ML.DataView\Microsoft.ML.DataView.csproj", "{85D0CAFD-2FE8-496A-88C7-585D35B94243}"
|
||||
|
@ -293,9 +282,9 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.Vision", "src\
|
|||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.TestFrameworkCommon", "test\Microsoft.ML.TestFrameworkCommon\Microsoft.ML.TestFrameworkCommon.csproj", "{A22FAD27-77E8-4460-8B92-EC7090B7173A}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Microsoft.ML.NightlyBuild.Tests", "test\Microsoft.ML.NightlyBuild.Tests\Microsoft.ML.NightlyBuild.Tests.csproj", "{A1CAC86F-F4BB-4B6D-9D18-E9AE15B3C66E}"
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.NightlyBuild.Tests", "test\Microsoft.ML.NightlyBuild.Tests\Microsoft.ML.NightlyBuild.Tests.csproj", "{A1CAC86F-F4BB-4B6D-9D18-E9AE15B3C66E}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Microsoft.ML.NugetPackageVersionUpdater", "test\Microsoft.ML.NugetPackageVersionUpdater\Microsoft.ML.NugetPackageVersionUpdater.csproj", "{C8DB58DC-6434-4431-A81F-263D86E2A5F3}"
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.NugetPackageVersionUpdater", "test\Microsoft.ML.NugetPackageVersionUpdater\Microsoft.ML.NugetPackageVersionUpdater.csproj", "{C8DB58DC-6434-4431-A81F-263D86E2A5F3}"
|
||||
EndProject
|
||||
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "build", "build", "{C91F81E3-B900-4968-A6DF-F53B515E97E1}"
|
||||
EndProject
|
||||
|
@ -304,6 +293,10 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "netstandard2.0", "netstanda
|
|||
pkg\Microsoft.ML.CpuMath\build\netstandard2.0\Microsoft.ML.CpuMath.props = pkg\Microsoft.ML.CpuMath\build\netstandard2.0\Microsoft.ML.CpuMath.props
|
||||
EndProjectSection
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML", "src\Microsoft.ML\Microsoft.ML.csproj", "{6CF88209-69DB-4B36-9604-3ECD9F163E96}"
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.ML.Mkl.Redist", "src\Microsoft.ML.Mkl.Redist\Microsoft.ML.Mkl.Redist.csproj", "{4584326B-C5B3-4CAE-B98A-34C5F5AA16F3}"
|
||||
EndProject
|
||||
Global
|
||||
GlobalSection(SolutionConfigurationPlatforms) = preSolution
|
||||
Debug|Any CPU = Debug|Any CPU
|
||||
|
@ -1639,6 +1632,30 @@ Global
|
|||
{3C8F910B-7F23-4D25-B521-6D5AC9570ADD}.Release-netfx|Any CPU.Build.0 = Release-netfx|Any CPU
|
||||
{3C8F910B-7F23-4D25-B521-6D5AC9570ADD}.Release-netfx|x64.ActiveCfg = Release-netfx|Any CPU
|
||||
{3C8F910B-7F23-4D25-B521-6D5AC9570ADD}.Release-netfx|x64.Build.0 = Release-netfx|Any CPU
|
||||
{E2DD0721-5B0F-4606-8182-4C7EFB834518}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{E2DD0721-5B0F-4606-8182-4C7EFB834518}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{E2DD0721-5B0F-4606-8182-4C7EFB834518}.Debug|x64.ActiveCfg = Debug|Any CPU
|
||||
{E2DD0721-5B0F-4606-8182-4C7EFB834518}.Debug|x64.Build.0 = Debug|Any CPU
|
||||
{E2DD0721-5B0F-4606-8182-4C7EFB834518}.Debug-netcoreapp3_1|Any CPU.ActiveCfg = Debug-netcoreapp3_1|Any CPU
|
||||
{E2DD0721-5B0F-4606-8182-4C7EFB834518}.Debug-netcoreapp3_1|Any CPU.Build.0 = Debug-netcoreapp3_1|Any CPU
|
||||
{E2DD0721-5B0F-4606-8182-4C7EFB834518}.Debug-netcoreapp3_1|x64.ActiveCfg = Debug-netcoreapp3_1|Any CPU
|
||||
{E2DD0721-5B0F-4606-8182-4C7EFB834518}.Debug-netcoreapp3_1|x64.Build.0 = Debug-netcoreapp3_1|Any CPU
|
||||
{E2DD0721-5B0F-4606-8182-4C7EFB834518}.Debug-netfx|Any CPU.ActiveCfg = Debug-netfx|Any CPU
|
||||
{E2DD0721-5B0F-4606-8182-4C7EFB834518}.Debug-netfx|Any CPU.Build.0 = Debug-netfx|Any CPU
|
||||
{E2DD0721-5B0F-4606-8182-4C7EFB834518}.Debug-netfx|x64.ActiveCfg = Debug-netfx|Any CPU
|
||||
{E2DD0721-5B0F-4606-8182-4C7EFB834518}.Debug-netfx|x64.Build.0 = Debug-netfx|Any CPU
|
||||
{E2DD0721-5B0F-4606-8182-4C7EFB834518}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{E2DD0721-5B0F-4606-8182-4C7EFB834518}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{E2DD0721-5B0F-4606-8182-4C7EFB834518}.Release|x64.ActiveCfg = Release|Any CPU
|
||||
{E2DD0721-5B0F-4606-8182-4C7EFB834518}.Release|x64.Build.0 = Release|Any CPU
|
||||
{E2DD0721-5B0F-4606-8182-4C7EFB834518}.Release-netcoreapp3_1|Any CPU.ActiveCfg = Release-netcoreapp3_1|Any CPU
|
||||
{E2DD0721-5B0F-4606-8182-4C7EFB834518}.Release-netcoreapp3_1|Any CPU.Build.0 = Release-netcoreapp3_1|Any CPU
|
||||
{E2DD0721-5B0F-4606-8182-4C7EFB834518}.Release-netcoreapp3_1|x64.ActiveCfg = Release-netcoreapp3_1|Any CPU
|
||||
{E2DD0721-5B0F-4606-8182-4C7EFB834518}.Release-netcoreapp3_1|x64.Build.0 = Release-netcoreapp3_1|Any CPU
|
||||
{E2DD0721-5B0F-4606-8182-4C7EFB834518}.Release-netfx|Any CPU.ActiveCfg = Release-netfx|Any CPU
|
||||
{E2DD0721-5B0F-4606-8182-4C7EFB834518}.Release-netfx|Any CPU.Build.0 = Release-netfx|Any CPU
|
||||
{E2DD0721-5B0F-4606-8182-4C7EFB834518}.Release-netfx|x64.ActiveCfg = Release-netfx|Any CPU
|
||||
{E2DD0721-5B0F-4606-8182-4C7EFB834518}.Release-netfx|x64.Build.0 = Release-netfx|Any CPU
|
||||
{56CB0850-7341-4D71-9AE4-9EFC472D93DD}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{56CB0850-7341-4D71-9AE4-9EFC472D93DD}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{56CB0850-7341-4D71-9AE4-9EFC472D93DD}.Debug|x64.ActiveCfg = Debug|Any CPU
|
||||
|
@ -1771,30 +1788,54 @@ Global
|
|||
{C8DB58DC-6434-4431-A81F-263D86E2A5F3}.Release-netfx|Any CPU.Build.0 = Release-netfx|Any CPU
|
||||
{C8DB58DC-6434-4431-A81F-263D86E2A5F3}.Release-netfx|x64.ActiveCfg = Release-netfx|Any CPU
|
||||
{C8DB58DC-6434-4431-A81F-263D86E2A5F3}.Release-netfx|x64.Build.0 = Release-netfx|Any CPU
|
||||
{E2DD0721-5B0F-4606-8182-4C7EFB834518}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{E2DD0721-5B0F-4606-8182-4C7EFB834518}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{E2DD0721-5B0F-4606-8182-4C7EFB834518}.Debug|x64.ActiveCfg = Debug|Any CPU
|
||||
{E2DD0721-5B0F-4606-8182-4C7EFB834518}.Debug|x64.Build.0 = Debug|Any CPU
|
||||
{E2DD0721-5B0F-4606-8182-4C7EFB834518}.Debug-netcoreapp3_1|Any CPU.ActiveCfg = Debug-netcoreapp3_1|Any CPU
|
||||
{E2DD0721-5B0F-4606-8182-4C7EFB834518}.Debug-netcoreapp3_1|Any CPU.Build.0 = Debug-netcoreapp3_1|Any CPU
|
||||
{E2DD0721-5B0F-4606-8182-4C7EFB834518}.Debug-netcoreapp3_1|x64.ActiveCfg = Debug-netcoreapp3_1|Any CPU
|
||||
{E2DD0721-5B0F-4606-8182-4C7EFB834518}.Debug-netcoreapp3_1|x64.Build.0 = Debug-netcoreapp3_1|Any CPU
|
||||
{E2DD0721-5B0F-4606-8182-4C7EFB834518}.Debug-netfx|Any CPU.ActiveCfg = Debug-netfx|Any CPU
|
||||
{E2DD0721-5B0F-4606-8182-4C7EFB834518}.Debug-netfx|Any CPU.Build.0 = Debug-netfx|Any CPU
|
||||
{E2DD0721-5B0F-4606-8182-4C7EFB834518}.Debug-netfx|x64.ActiveCfg = Debug-netfx|Any CPU
|
||||
{E2DD0721-5B0F-4606-8182-4C7EFB834518}.Debug-netfx|x64.Build.0 = Debug-netfx|Any CPU
|
||||
{E2DD0721-5B0F-4606-8182-4C7EFB834518}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{E2DD0721-5B0F-4606-8182-4C7EFB834518}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{E2DD0721-5B0F-4606-8182-4C7EFB834518}.Release|x64.ActiveCfg = Release|Any CPU
|
||||
{E2DD0721-5B0F-4606-8182-4C7EFB834518}.Release|x64.Build.0 = Release|Any CPU
|
||||
{E2DD0721-5B0F-4606-8182-4C7EFB834518}.Release-netcoreapp3_1|Any CPU.ActiveCfg = Release-netcoreapp3_1|Any CPU
|
||||
{E2DD0721-5B0F-4606-8182-4C7EFB834518}.Release-netcoreapp3_1|Any CPU.Build.0 = Release-netcoreapp3_1|Any CPU
|
||||
{E2DD0721-5B0F-4606-8182-4C7EFB834518}.Release-netcoreapp3_1|x64.ActiveCfg = Release-netcoreapp3_1|Any CPU
|
||||
{E2DD0721-5B0F-4606-8182-4C7EFB834518}.Release-netcoreapp3_1|x64.Build.0 = Release-netcoreapp3_1|Any CPU
|
||||
{E2DD0721-5B0F-4606-8182-4C7EFB834518}.Release-netfx|Any CPU.ActiveCfg = Release-netfx|Any CPU
|
||||
{E2DD0721-5B0F-4606-8182-4C7EFB834518}.Release-netfx|Any CPU.Build.0 = Release-netfx|Any CPU
|
||||
{E2DD0721-5B0F-4606-8182-4C7EFB834518}.Release-netfx|x64.ActiveCfg = Release-netfx|Any CPU
|
||||
{E2DD0721-5B0F-4606-8182-4C7EFB834518}.Release-netfx|x64.Build.0 = Release-netfx|Any CPU
|
||||
{6CF88209-69DB-4B36-9604-3ECD9F163E96}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{6CF88209-69DB-4B36-9604-3ECD9F163E96}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{6CF88209-69DB-4B36-9604-3ECD9F163E96}.Debug|x64.ActiveCfg = Debug|Any CPU
|
||||
{6CF88209-69DB-4B36-9604-3ECD9F163E96}.Debug|x64.Build.0 = Debug|Any CPU
|
||||
{6CF88209-69DB-4B36-9604-3ECD9F163E96}.Debug-netcoreapp3_1|Any CPU.ActiveCfg = Debug-netcoreapp3_1|Any CPU
|
||||
{6CF88209-69DB-4B36-9604-3ECD9F163E96}.Debug-netcoreapp3_1|Any CPU.Build.0 = Debug-netcoreapp3_1|Any CPU
|
||||
{6CF88209-69DB-4B36-9604-3ECD9F163E96}.Debug-netcoreapp3_1|x64.ActiveCfg = Debug-netcoreapp3_1|Any CPU
|
||||
{6CF88209-69DB-4B36-9604-3ECD9F163E96}.Debug-netcoreapp3_1|x64.Build.0 = Debug-netcoreapp3_1|Any CPU
|
||||
{6CF88209-69DB-4B36-9604-3ECD9F163E96}.Debug-netfx|Any CPU.ActiveCfg = Debug-netfx|Any CPU
|
||||
{6CF88209-69DB-4B36-9604-3ECD9F163E96}.Debug-netfx|Any CPU.Build.0 = Debug-netfx|Any CPU
|
||||
{6CF88209-69DB-4B36-9604-3ECD9F163E96}.Debug-netfx|x64.ActiveCfg = Debug-netfx|Any CPU
|
||||
{6CF88209-69DB-4B36-9604-3ECD9F163E96}.Debug-netfx|x64.Build.0 = Debug-netfx|Any CPU
|
||||
{6CF88209-69DB-4B36-9604-3ECD9F163E96}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{6CF88209-69DB-4B36-9604-3ECD9F163E96}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{6CF88209-69DB-4B36-9604-3ECD9F163E96}.Release|x64.ActiveCfg = Release|Any CPU
|
||||
{6CF88209-69DB-4B36-9604-3ECD9F163E96}.Release|x64.Build.0 = Release|Any CPU
|
||||
{6CF88209-69DB-4B36-9604-3ECD9F163E96}.Release-netcoreapp3_1|Any CPU.ActiveCfg = Release-netcoreapp3_1|Any CPU
|
||||
{6CF88209-69DB-4B36-9604-3ECD9F163E96}.Release-netcoreapp3_1|Any CPU.Build.0 = Release-netcoreapp3_1|Any CPU
|
||||
{6CF88209-69DB-4B36-9604-3ECD9F163E96}.Release-netcoreapp3_1|x64.ActiveCfg = Release-netcoreapp3_1|Any CPU
|
||||
{6CF88209-69DB-4B36-9604-3ECD9F163E96}.Release-netcoreapp3_1|x64.Build.0 = Release-netcoreapp3_1|Any CPU
|
||||
{6CF88209-69DB-4B36-9604-3ECD9F163E96}.Release-netfx|Any CPU.ActiveCfg = Release-netfx|Any CPU
|
||||
{6CF88209-69DB-4B36-9604-3ECD9F163E96}.Release-netfx|Any CPU.Build.0 = Release-netfx|Any CPU
|
||||
{6CF88209-69DB-4B36-9604-3ECD9F163E96}.Release-netfx|x64.ActiveCfg = Release-netfx|Any CPU
|
||||
{6CF88209-69DB-4B36-9604-3ECD9F163E96}.Release-netfx|x64.Build.0 = Release-netfx|Any CPU
|
||||
{4584326B-C5B3-4CAE-B98A-34C5F5AA16F3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{4584326B-C5B3-4CAE-B98A-34C5F5AA16F3}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{4584326B-C5B3-4CAE-B98A-34C5F5AA16F3}.Debug|x64.ActiveCfg = Debug|Any CPU
|
||||
{4584326B-C5B3-4CAE-B98A-34C5F5AA16F3}.Debug|x64.Build.0 = Debug|Any CPU
|
||||
{4584326B-C5B3-4CAE-B98A-34C5F5AA16F3}.Debug-netcoreapp3_1|Any CPU.ActiveCfg = Debug-netcoreapp3_1|Any CPU
|
||||
{4584326B-C5B3-4CAE-B98A-34C5F5AA16F3}.Debug-netcoreapp3_1|Any CPU.Build.0 = Debug-netcoreapp3_1|Any CPU
|
||||
{4584326B-C5B3-4CAE-B98A-34C5F5AA16F3}.Debug-netcoreapp3_1|x64.ActiveCfg = Debug-netcoreapp3_1|Any CPU
|
||||
{4584326B-C5B3-4CAE-B98A-34C5F5AA16F3}.Debug-netcoreapp3_1|x64.Build.0 = Debug-netcoreapp3_1|Any CPU
|
||||
{4584326B-C5B3-4CAE-B98A-34C5F5AA16F3}.Debug-netfx|Any CPU.ActiveCfg = Debug-netfx|Any CPU
|
||||
{4584326B-C5B3-4CAE-B98A-34C5F5AA16F3}.Debug-netfx|Any CPU.Build.0 = Debug-netfx|Any CPU
|
||||
{4584326B-C5B3-4CAE-B98A-34C5F5AA16F3}.Debug-netfx|x64.ActiveCfg = Debug-netfx|Any CPU
|
||||
{4584326B-C5B3-4CAE-B98A-34C5F5AA16F3}.Debug-netfx|x64.Build.0 = Debug-netfx|Any CPU
|
||||
{4584326B-C5B3-4CAE-B98A-34C5F5AA16F3}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{4584326B-C5B3-4CAE-B98A-34C5F5AA16F3}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{4584326B-C5B3-4CAE-B98A-34C5F5AA16F3}.Release|x64.ActiveCfg = Release|Any CPU
|
||||
{4584326B-C5B3-4CAE-B98A-34C5F5AA16F3}.Release|x64.Build.0 = Release|Any CPU
|
||||
{4584326B-C5B3-4CAE-B98A-34C5F5AA16F3}.Release-netcoreapp3_1|Any CPU.ActiveCfg = Release-netcoreapp3_1|Any CPU
|
||||
{4584326B-C5B3-4CAE-B98A-34C5F5AA16F3}.Release-netcoreapp3_1|Any CPU.Build.0 = Release-netcoreapp3_1|Any CPU
|
||||
{4584326B-C5B3-4CAE-B98A-34C5F5AA16F3}.Release-netcoreapp3_1|x64.ActiveCfg = Release-netcoreapp3_1|Any CPU
|
||||
{4584326B-C5B3-4CAE-B98A-34C5F5AA16F3}.Release-netcoreapp3_1|x64.Build.0 = Release-netcoreapp3_1|Any CPU
|
||||
{4584326B-C5B3-4CAE-B98A-34C5F5AA16F3}.Release-netfx|Any CPU.ActiveCfg = Release-netfx|Any CPU
|
||||
{4584326B-C5B3-4CAE-B98A-34C5F5AA16F3}.Release-netfx|Any CPU.Build.0 = Release-netfx|Any CPU
|
||||
{4584326B-C5B3-4CAE-B98A-34C5F5AA16F3}.Release-netfx|x64.ActiveCfg = Release-netfx|Any CPU
|
||||
{4584326B-C5B3-4CAE-B98A-34C5F5AA16F3}.Release-netfx|x64.Build.0 = Release-netfx|Any CPU
|
||||
EndGlobalSection
|
||||
GlobalSection(SolutionProperties) = preSolution
|
||||
HideSolutionNode = FALSE
|
||||
|
@ -1817,12 +1858,9 @@ Global
|
|||
{B7B593C5-FB8C-4ADA-A638-5B53B47D087E} = {09EADF06-BE25-4228-AB53-95AE3E15B530}
|
||||
{16BB1454-2108-40E5-B3A6-594654005303} = {09EADF06-BE25-4228-AB53-95AE3E15B530}
|
||||
{55C8122D-79EA-48AB-85D0-EB551FC1C427} = {09EADF06-BE25-4228-AB53-95AE3E15B530}
|
||||
{DEC8F776-49F7-4D87-836C-FE4DC057D08C} = {D3D38B03-B557-484D-8348-8BADEE4DF592}
|
||||
{6C95FC87-F5F2-4EEF-BB97-567F2F5DD141} = {D3D38B03-B557-484D-8348-8BADEE4DF592}
|
||||
{7A9DB75F-2CA5-4184-9EF5-1F17EB39483F} = {AED9C836-31E3-4F3F-8ABC-929555D3F3C4}
|
||||
{64F40A0D-D4C2-4AA7-8470-E9CC437827E4} = {09EADF06-BE25-4228-AB53-95AE3E15B530}
|
||||
{362A98CF-FBF7-4EBB-A11B-990BBF845B15} = {09EADF06-BE25-4228-AB53-95AE3E15B530}
|
||||
{487213C9-E8A9-4F94-85D7-28A05DBBFE3A} = {DEC8F776-49F7-4D87-836C-FE4DC057D08C}
|
||||
{9252A8EB-ABFB-440C-AB4D-1D562753CE0F} = {487213C9-E8A9-4F94-85D7-28A05DBBFE3A}
|
||||
{3DEB504D-7A07-48CE-91A2-8047461CB3D4} = {AED9C836-31E3-4F3F-8ABC-929555D3F3C4}
|
||||
{001F3B4E-FBE4-4001-AFD2-A6A989CD1C25} = {09EADF06-BE25-4228-AB53-95AE3E15B530}
|
||||
|
@ -1880,6 +1918,8 @@ Global
|
|||
{F5D11F71-2D61-4AE9-99D7-0F0B54649B15} = {D3D38B03-B557-484D-8348-8BADEE4DF592}
|
||||
{A6924919-9E37-4023-8B7F-E85C8E3CC9B3} = {DA452A53-2E94-4433-B08C-041EDEC729E6}
|
||||
{3C8F910B-7F23-4D25-B521-6D5AC9570ADD} = {DA452A53-2E94-4433-B08C-041EDEC729E6}
|
||||
{E2DD0721-5B0F-4606-8182-4C7EFB834518} = {09EADF06-BE25-4228-AB53-95AE3E15B530}
|
||||
{1BA5C784-52E8-4A87-8525-26B2452F2882} = {D3D38B03-B557-484D-8348-8BADEE4DF592}
|
||||
{56CB0850-7341-4D71-9AE4-9EFC472D93DD} = {09EADF06-BE25-4228-AB53-95AE3E15B530}
|
||||
{46CC5637-3DDF-4100-93FC-44BB87B2DB81} = {AED9C836-31E3-4F3F-8ABC-929555D3F3C4}
|
||||
{3817A875-278C-4140-BF66-3C4A8CA55F0D} = {D3D38B03-B557-484D-8348-8BADEE4DF592}
|
||||
|
@ -1889,8 +1929,8 @@ Global
|
|||
{C8DB58DC-6434-4431-A81F-263D86E2A5F3} = {AED9C836-31E3-4F3F-8ABC-929555D3F3C4}
|
||||
{C91F81E3-B900-4968-A6DF-F53B515E97E1} = {BF66A305-DF10-47E4-8D81-42049B149D2B}
|
||||
{027DBA48-85B6-46F1-9487-0B49B5057FC0} = {C91F81E3-B900-4968-A6DF-F53B515E97E1}
|
||||
{E2DD0721-5B0F-4606-8182-4C7EFB834518} = {09EADF06-BE25-4228-AB53-95AE3E15B530}
|
||||
{1BA5C784-52E8-4A87-8525-26B2452F2882} = {D3D38B03-B557-484D-8348-8BADEE4DF592}
|
||||
{6CF88209-69DB-4B36-9604-3ECD9F163E96} = {09EADF06-BE25-4228-AB53-95AE3E15B530}
|
||||
{4584326B-C5B3-4CAE-B98A-34C5F5AA16F3} = {09EADF06-BE25-4228-AB53-95AE3E15B530}
|
||||
EndGlobalSection
|
||||
GlobalSection(ExtensibilityGlobals) = postSolution
|
||||
SolutionGuid = {41165AF1-35BB-4832-A189-73060F82B01D}
|
||||
|
|
|
@ -0,0 +1,21 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<configuration>
|
||||
<solution>
|
||||
<add key="disableSourceControlIntegration" value="true" />
|
||||
</solution>
|
||||
<packageSources>
|
||||
<clear />
|
||||
<add key="nuget.org" value="https://api.nuget.org/v3/index.json" />
|
||||
<add key="dotnet-tools" value="https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json" />
|
||||
<add key="dotnet-eng" value="https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json" />
|
||||
<add key="vs-buildservices" value="https://pkgs.dev.azure.com/azure-public/vside/_packaging/vs-buildservices/nuget/v3/index.json" />
|
||||
<add key="dotnet-core" value="https://dotnetfeed.blob.core.windows.net/dotnet-core/index.json" />
|
||||
<add key="myget-core" value="https://dotnet.myget.org/F/dotnet-core/api/v3/index.json" />
|
||||
<add key="myget-roslyn" value="https://dotnet.myget.org/F/roslyn-analyzers/api/v3/index.json" />
|
||||
<add key="mlnet-daily" value="https://pkgs.dev.azure.com/dnceng/public/_packaging/MachineLearning/nuget/v3/index.json" />
|
||||
<add key="mlnet-testdata" value="https://pkgs.dev.azure.com/dnceng/public/_packaging/machinelearning-testdata/nuget/v3/index.json" />
|
||||
</packageSources>
|
||||
<disabledPackageSources>
|
||||
<clear />
|
||||
</disabledPackageSources>
|
||||
</configuration>
|
|
@ -1,2 +1,3 @@
|
|||
@call "%~dp0run.cmd" build %*
|
||||
@exit /b %ERRORLEVEL%
|
||||
@echo off
|
||||
powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0eng\common\Build.ps1""" -restore -build -warnAsError 0 %*"
|
||||
exit /b %ErrorLevel%
|
||||
|
|
2
build.sh
2
build.sh
|
@ -10,4 +10,4 @@ while [ -h "$SOURCE" ]; do # resolve $SOURCE until the file is no longer a symli
|
|||
done
|
||||
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
|
||||
|
||||
"$DIR/run.sh" build "$@"
|
||||
"$DIR/eng/common/build.sh" --restore --build --warnAsError false "$@"
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
#TODO: Need to update build documentation.
|
||||
parameters:
|
||||
name: ''
|
||||
architecture: x64
|
||||
|
@ -21,13 +22,18 @@ jobs:
|
|||
timeoutInMinutes: 120
|
||||
cancelTimeoutInMinutes: 10
|
||||
variables:
|
||||
dotnetPath: $(Build.SourcesDirectory)/Tools/dotnetcli/dotnet
|
||||
dotnetPath: $(Build.SourcesDirectory)/.dotnet/dotnet
|
||||
nugetFeed: https://pkgs.dev.azure.com/dnceng/public/_packaging/MachineLearning/nuget/v3/index.json
|
||||
nightlyBuildProjPath: $(Build.SourcesDirectory)/test/Microsoft.ML.NightlyBuild.Tests/Microsoft.ML.NightlyBuild.Tests.csproj
|
||||
nightlyBuildRunPath: $(Build.SourcesDirectory)/bin/AnyCPU.$(_configuration)/Microsoft.ML.NightlyBuild.Tests/$(_targetFramework)
|
||||
nightlyBuildRunPath: $(Build.SourcesDirectory)/artifacts/bin/Microsoft.ML.NightlyBuild.Tests/$(_configuration)/$(_targetFramework)
|
||||
runNightlyBuildProj: $(Build.SourcesDirectory)/test/run-night-build-tests.proj
|
||||
packageUpdaterProjPath: $(Build.SourcesDirectory)/test/Microsoft.ML.NugetPackageVersionUpdater/Microsoft.ML.NugetPackageVersionUpdater.csproj
|
||||
versionFilePath: $(Build.SourcesDirectory)/test/Microsoft.ML.NugetPackageVersionUpdater/latest_versions.txt
|
||||
PROCDUMP_PATH: '$(Build.SourcesDirectory)/Tools/ProcDump/'
|
||||
${{ if eq(parameters.buildScript, 'build.cmd') }}:
|
||||
spaceValue: ' '
|
||||
${{ if eq(parameters.buildScript, './build.sh') }}:
|
||||
spaceValue: '%20'
|
||||
strategy:
|
||||
matrix:
|
||||
${{ if eq(parameters.customMatrixes, '') }}:
|
||||
|
@ -43,7 +49,7 @@ jobs:
|
|||
_targetFramework: netcoreapp2.1
|
||||
${{ if ne(parameters.customMatrixes, '') }}:
|
||||
${{ insert }}: ${{ parameters.customMatrixes }}
|
||||
|
||||
|
||||
pool: ${{ parameters.pool }}
|
||||
${{ if ne(parameters.container, '') }}:
|
||||
container: ${{ parameters.container }}
|
||||
|
@ -57,13 +63,14 @@ jobs:
|
|||
brew untap local/openssl |
|
||||
brew untap local/python2
|
||||
displayName: MacOS Homebrew bug Workaround
|
||||
# Extra MacOS step required to install OS-specific dependencies
|
||||
- ${{ if eq(parameters.pool.name, 'Hosted macOS') }}:
|
||||
- script: brew update && brew unlink python@3.8 && brew install mono-libgdiplus && brew install $(Build.SourcesDirectory)/build/libomp.rb && brew link libomp --force
|
||||
displayName: Install build dependencies
|
||||
displayName: Install MacOS build dependencies
|
||||
- ${{ if and( eq(parameters.nightlyBuild, 'true'), eq(parameters.pool.name, 'Hosted Ubuntu 1604')) }}:
|
||||
- bash: echo "##vso[task.setvariable variable=LD_LIBRARY_PATH]$(nightlyBuildRunPath):$LD_LIBRARY_PATH"
|
||||
displayName: Set LD_LIBRARY_PATH for Ubuntu and CentOS to locate Native shared library in current running path
|
||||
- script: ${{ parameters.buildScript }} -$(_configuration) -buildArch=${{ parameters.architecture }}
|
||||
- script: ${{ parameters.buildScript }} -configuration $(_configuration) /p:TargetArchitecture=${{ parameters.architecture }} /p:TestArchitectures=${{ parameters.architecture }} /p:RestorePackagesPath=$(Build.SourcesDirectory)\packages /p:NUGET_PACKAGES=$(Build.SourcesDirectory)\packages
|
||||
displayName: Build
|
||||
- ${{ if eq(parameters.pool.name, 'Hosted macOS') }}:
|
||||
- task: Bash@3
|
||||
|
@ -78,13 +85,13 @@ jobs:
|
|||
script: cd packages;find . -type d -path "*/runtimes/osx-*" -exec rm -rv {} +;find . -type d -path "*/runtimes/win-*" -exec rm -rv {} +;cd ..
|
||||
displayName: Clean up non-Linux runtime folders of NuGet Packages to save disk space
|
||||
- ${{ if eq(parameters.buildScript, 'build.cmd') }}:
|
||||
- script: dir /s "$(Build.SourcesDirectory)"
|
||||
displayName: show bin folder disk usage
|
||||
- task: PowerShell@2
|
||||
inputs:
|
||||
targetType: inline
|
||||
script: Get-ChildItem -Path '.\packages\*\runtimes\*' -Recurse | Select -ExpandProperty FullName | Where {$_ -notlike '*\win-*'} | sort length -Descending | Remove-Item -Recurse -Confirm:$false -Force
|
||||
script: Get-ChildItem -Path '$(Build.SourcesDirectory)\packages\*\runtimes\*' -Recurse | Select -ExpandProperty FullName | Where {$_ -notlike '*\win-*'} | sort length -Descending | Remove-Item -Recurse -Confirm:$false -Force
|
||||
displayName: Clean up non-Windows runtime folders of NuGet Packages to save disk space
|
||||
- script: dir /s "bin"
|
||||
displayName: show bin folder disk usage
|
||||
- ${{ if eq(parameters.nightlyBuild, 'true') }}:
|
||||
- script: $(dotnetPath) restore $(nightlyBuildProjPath)
|
||||
displayName: Restore nightly build project
|
||||
|
@ -94,30 +101,25 @@ jobs:
|
|||
displayName: Update package versions for nightly build
|
||||
- ${{ if eq(parameters.buildScript, 'build.cmd') }}:
|
||||
- powershell: |
|
||||
Get-ChildItem -Path '.\bin\AnyCPU.*' -Recurse |
|
||||
Get-ChildItem -Path '.\artifacts\bin\*' -Recurse |
|
||||
Select -ExpandProperty FullName |
|
||||
Where {$_ -notlike '*\Microsoft.ML.NightlyBuild.Tests*'} |
|
||||
sort length -Descending |
|
||||
Remove-Item -force
|
||||
Where {$_ -NotMatch '.*\\Microsoft\.ML\.NightlyBuild\.Tests.*|.*\\Native.*'} |
|
||||
sort length -Descending |
|
||||
Remove-Item -force
|
||||
Write-Output "Done cleaning up usless project..."
|
||||
displayName: Clean up useless project
|
||||
- script: $(dotnetPath) msbuild -restore $(nightlyBuildProjPath) /p:ReferenceTypeForTestFramework="Nuget" /p:Configuration=$(_configuration) /p:TargetArchitecture=${{ parameters.architecture }}
|
||||
displayName: Build Nightly-Build Project with latest package versions
|
||||
- script: ${{ parameters.buildScript }} -$(_configuration) -runnightlybuildtests
|
||||
- script: $(dotnetPath) msbuild $(runNightlyBuildProj) /t:RunNightlyBuildTests /p:Configuration=$(_configuration) /p:TargetArchitecture=${{ parameters.architecture }} /p:TestArchitectures=${{ parameters.architecture }}
|
||||
displayName: Run Nightly Build Tests
|
||||
- ${{ if eq(parameters.nightlyBuild, 'false') }}:
|
||||
- script: ${{ parameters.buildScript }} -- /t:DownloadExternalTestFiles /p:IncludeBenchmarkData=$(_includeBenchmarkData)
|
||||
displayName: Download Benchmark Data
|
||||
timeoutInMinutes: 10
|
||||
- script: ${{ parameters.buildScript }} -- /t:DownloadTensorflowMetaFiles /p:IncludeTensorflowMetaFile=true
|
||||
displayName: Download Tensorflow Meta File
|
||||
timeoutInMinutes: 20
|
||||
- ${{ if eq(parameters.innerLoop, 'false') }}:
|
||||
- ${{ if and(eq(parameters.runSpecific, 'false'), eq(parameters.useVSTestTask, 'false')) }}:
|
||||
- script: ${{ parameters.buildScript }} -$(_configuration) -runtests -coverage=${{ parameters.codeCoverage }}
|
||||
# TODO: Code coverage needs to be fixed.
|
||||
- script: ${{ parameters.buildScript }} /p:Build=false -configuration $(_configuration) /p:TargetArchitecture=${{ parameters.architecture }} /p:TestArchitectures=${{ parameters.architecture }} -test -integrationTest -ci #-coverage=${{ parameters.codeCoverage }}
|
||||
displayName: Run All Tests.
|
||||
- ${{ if and(eq(parameters.runSpecific, 'true'), eq(parameters.useVSTestTask, 'false')) }}:
|
||||
- script: ${{ parameters.buildScript }} -$(_configuration) -runSpecificTests -coverage=${{ parameters.codeCoverage }}
|
||||
- script: ${{ parameters.buildScript }} /p:Build=false -configuration $(_configuration) /p:TargetArchitecture=${{ parameters.architecture }} /p:TestArchitectures=${{ parameters.architecture }} -test -integrationTest -ci /p:TestRunnerAdditionalArguments='-trait$(spaceValue)Category=RunSpecificTest' /p:RestorePackagesPath=$(Build.SourcesDirectory)\packages /p:NUGET_PACKAGES=$(Build.SourcesDirectory)\packages #-coverage=${{ parameters.codeCoverage }}
|
||||
displayName: Run Specific Tests.
|
||||
- ${{ if and(eq(parameters.buildScript, 'build.cmd'), eq(parameters.useVSTestTask, 'true')) }}:
|
||||
- task: VSTest@2
|
||||
|
@ -127,10 +129,10 @@ jobs:
|
|||
testAssemblyVer2: |
|
||||
**\*test.dll
|
||||
**\*tests.dll
|
||||
!**\obj\**
|
||||
!**\obj\**
|
||||
runSettingsFile: $(Build.SourcesDirectory)/tools-local/vstest.runsettings
|
||||
searchFolder: '$(System.DefaultWorkingDirectory)'
|
||||
vstestLocationMethod: 'version'
|
||||
vstestLocationMethod: 'version'
|
||||
vsTestVersion: 'latest'
|
||||
runInParallel: False
|
||||
runTestsInIsolation: True
|
||||
|
@ -141,9 +143,9 @@ jobs:
|
|||
collectDumpOn: onAbortOnly
|
||||
publishRunAttachments: true
|
||||
- ${{ if eq(parameters.innerLoop, 'true') }}:
|
||||
- script: ${{ parameters.buildScript }} -$(_configuration) -runCITests -coverage=${{ parameters.codeCoverage }}
|
||||
- script: ${{ parameters.buildScript }} /p:Build=false -configuration $(_configuration) /p:TargetArchitecture=${{ parameters.architecture }} /p:TestArchitectures=${{ parameters.architecture }} -test -integrationTest -ci /p:TestRunnerAdditionalArguments='-notrait$(spaceValue)Category=SkipInCI' /p:RestorePackagesPath=$(Build.SourcesDirectory)\packages /p:NUGET_PACKAGES=$(Build.SourcesDirectory)\packages #-coverage=${{ parameters.codeCoverage }}
|
||||
displayName: Run CI Tests.
|
||||
- script: $(Build.SourcesDirectory)/Tools/dotnetcli/dotnet msbuild -restore build/Codecoverage.proj
|
||||
- script: $(dotnetPath) msbuild -restore build/Codecoverage.proj
|
||||
displayName: Upload coverage to codecov.io
|
||||
condition: and(succeeded(), eq(${{ parameters.codeCoverage }}, True))
|
||||
- task: PublishTestResults@2
|
||||
|
@ -151,7 +153,7 @@ jobs:
|
|||
condition: succeededOrFailed()
|
||||
inputs:
|
||||
testRunner: 'vSTest'
|
||||
searchFolder: '$(System.DefaultWorkingDirectory)/bin'
|
||||
searchFolder: '$(System.DefaultWorkingDirectory)/artifacts/TestResults'
|
||||
testResultsFiles: '**/*.trx'
|
||||
testRunTitle: Machinelearning_Tests_${{ parameters.name }}_$(_configuration)_$(Build.BuildNumber)
|
||||
configuration: $(_configuration)
|
||||
|
@ -161,16 +163,15 @@ jobs:
|
|||
condition: not(succeeded())
|
||||
inputs:
|
||||
sourceFolder: $(Build.SourcesDirectory)
|
||||
contents: '?(msbuild.*|binclash.log|init-tools.log)'
|
||||
contents: 'artifacts/log/**'
|
||||
targetFolder: $(Build.ArtifactStagingDirectory)
|
||||
- task: CopyFiles@2
|
||||
displayName: Stage test output
|
||||
condition: not(succeeded())
|
||||
inputs:
|
||||
sourceFolder: $(Build.SourcesDirectory)/bin
|
||||
sourceFolder: $(Build.SourcesDirectory)
|
||||
contents: |
|
||||
**/TestOutput/**/*
|
||||
**/*.trx
|
||||
artifacts/TestResults/**
|
||||
targetFolder: $(Build.ArtifactStagingDirectory)
|
||||
- task: CopyFiles@2
|
||||
displayName: Stage process dump and pdb if any
|
||||
|
@ -180,7 +181,7 @@ jobs:
|
|||
contents: |
|
||||
*.dmp
|
||||
CrashDumps/*.dmp
|
||||
bin/**/*.pdb
|
||||
artifacts/bin/**/*.pdb
|
||||
targetFolder: $(Build.ArtifactStagingDirectory)
|
||||
- task: PublishBuildArtifacts@1
|
||||
displayName: Publish build and test logs
|
||||
|
@ -190,5 +191,5 @@ jobs:
|
|||
artifactName: ${{ parameters.name }} $(_config_short)
|
||||
artifactType: container
|
||||
- ${{ if eq(parameters.nightlyBuild, 'false') }}:
|
||||
- script: ${{ parameters.buildScript }} -buildPackages
|
||||
- script: ${{ parameters.buildScript }} /p:Build=false -pack -ci /p:RestorePackagesPath=$(Build.SourcesDirectory)\packages /p:NUGET_PACKAGES=$(Build.SourcesDirectory)\packages
|
||||
displayName: Build Packages
|
||||
|
|
|
@ -1,7 +1,12 @@
|
|||
<Project>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.DotNet.BuildTools" Version="$(BuildToolsPackageVersion)" />
|
||||
</ItemGroup>
|
||||
|
||||
<Import Project="$(MSBuildThisFileDirectory)..\Directory.Build.props" />
|
||||
|
||||
<UsingTask TaskName="ExecWithRetriesForNuGetPush" AssemblyFile="$(ToolsDir)Microsoft.DotNet.Build.Tasks.dll" />
|
||||
<UsingTask TaskName="ExecWithRetriesForNuGetPush" AssemblyFile="$(NuGetPackageRoot)\microsoft.dotnet.buildtools\$(BuildToolsPackageVersion)\lib\Microsoft.DotNet.Build.Tasks.dll" />
|
||||
|
||||
<PropertyGroup>
|
||||
<PublishSymbolsPackage>Microsoft.SymbolUploader.Build.Task</PublishSymbolsPackage>
|
||||
|
@ -9,15 +14,15 @@
|
|||
<NuGetPushTimeoutSeconds Condition="'$(NuGetPushTimeoutSeconds)' == ''">600</NuGetPushTimeoutSeconds>
|
||||
</PropertyGroup>
|
||||
|
||||
<Import Project="$(PackagesDir)\$(PublishSymbolsPackage.ToLower())\$(PublishSymbolsPackageVersion)\build\PublishSymbols.targets" />
|
||||
<Import Project="$(NuGetPackageRoot)\$(PublishSymbolsPackage.ToLower())\$(MicrosoftSymbolUploaderBuildTaskVersion)\build\PublishSymbols.targets" />
|
||||
|
||||
<Target Name="PublishPackages">
|
||||
<Error Condition="'$(NuGetFeedUrl)' == ''" Text="Missing required property NuGetFeedUrl" />
|
||||
<Error Condition="'$(NuGetApiKey)' == ''" Text="Missing required property NuGetApiKey" />
|
||||
|
||||
<ItemGroup>
|
||||
<NuGetPackages Include="$(PackageOutputPath)**\*.nupkg"
|
||||
Exclude="$(PackageOutputPath)**\*.symbols.*nupkg" />
|
||||
<NuGetPackages Include="$(ArtifactsDir)packages\**\*.nupkg"
|
||||
Exclude="$(ArtifactsDir)packages\**\*.snupkg" />
|
||||
|
||||
<!--
|
||||
IgnorableErrorMessages applies to the "ExectWithRetriesForNuGetPush" task.
|
||||
|
@ -34,7 +39,7 @@
|
|||
<Message Text="Pushing ML.NET packages to $(NuGetFeedUrl)" />
|
||||
|
||||
<PropertyGroup>
|
||||
<DotnetToolCommand>$(ToolsDir)dotnetcli/dotnet</DotnetToolCommand>
|
||||
<DotnetToolCommand>$(MSBuildThisFileDirectory)..\.dotnet\dotnet</DotnetToolCommand>
|
||||
<NuGetPushCommand>$(DotnetToolCommand) nuget push --source $(NuGetFeedUrl) --api-key $(NuGetApiKey) --timeout $(NuGetPushTimeoutSeconds)</NuGetPushCommand>
|
||||
</PropertyGroup>
|
||||
|
||||
|
@ -46,7 +51,7 @@
|
|||
Condition="'$(EnablePublishSymbols)'=='true'"
|
||||
DependsOnTargets="SetupPublishSymbols">
|
||||
<Message Text="Attempting to Publish Symbols" />
|
||||
<Error Condition="!Exists('$(PackageOutputPath)')" Text="'PackageOutputPath' folder '$(PackageOutputPath)' does not exist."/>
|
||||
<Error Condition="!Exists('$(ArtifactsDir)packages')" Text="'PackageOutputPath' folder '$(PackageOutputPath)' does not exist."/>
|
||||
<Error Condition="'$(SymbolServerPath)'==''" Text="Missing property SymbolServerPath" />
|
||||
<Error Condition="'$(SymbolServerPAT)'==''" Text="Missing property SymbolServerPAT" />
|
||||
<CallTarget Targets="PublishSymbols" />
|
||||
|
@ -59,11 +64,11 @@
|
|||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<SymbolPackagesToPublish Include="$(PackageOutputPath)**\*.symbols.*nupkg" />
|
||||
<SymbolPackagesToPublish Include="$(ArtifactsDir)packages\**\*.snupkg" />
|
||||
</ItemGroup>
|
||||
|
||||
<Message Importance="High" Text="Publishing @(SymbolPackagesToPublish) to $(SymbolServerPath)"/>
|
||||
<Error Condition="'@(SymbolPackagesToPublish)'==''" Text="There are no symbol nuget packages to publish" />
|
||||
</Target>
|
||||
|
||||
</Project>
|
||||
</Project>
|
|
@ -1,50 +0,0 @@
|
|||
<Project InitialTargets="SetSigningProperties" DefaultTargets="SignBinaries">
|
||||
<Import Project="$(MSBuildThisFileDirectory)..\Directory.Build.props" />
|
||||
|
||||
<!-- This will be overridden if we're building with MicroBuild. -->
|
||||
<Target Name="SignFiles">
|
||||
<Message Text="Attempting to sign %(FilesToSign.Identity) with authenticode='%(FilesToSign.Authenticode)'" />
|
||||
</Target>
|
||||
|
||||
<Import Project="$(ToolsDir)MicroBuild.Core.props" />
|
||||
<Import Project="$(ToolsDir)MicroBuild.Core.targets" />
|
||||
|
||||
<Target Name="SetSigningProperties">
|
||||
<PropertyGroup>
|
||||
<!-- The OutDir and IntermediateOutputPath properties are required by MicroBuild. MicroBuild only
|
||||
signs files that are under these paths. -->
|
||||
<OutDir Condition="'$(OutDir)' == '' AND '$(SignNugetPackages)' != 'true'">$(PackageAssetsPath)</OutDir>
|
||||
<OutDir Condition="'$(OutDir)' == '' AND '$(SignNugetPackages)' == 'true'">$(PackageOutputPath)</OutDir>
|
||||
<IntermediateOutputPath Condition="'$(IntermediateOutputPath)' == ''">$(IntermediateOutputRootPath)</IntermediateOutputPath>
|
||||
|
||||
</PropertyGroup>
|
||||
|
||||
<Error Condition="!Exists('$(OutDir)')" Text="'OutDir' folder '$(OutDir)' does not exist."/>
|
||||
</Target>
|
||||
|
||||
<Target Name="SignBinaries" Condition="'$(SignType)' == 'real'" DependsOnTargets="GetFilesToSign">
|
||||
<CallTarget Targets="SignFiles" />
|
||||
</Target>
|
||||
|
||||
<Target Name="GetFilesToSign">
|
||||
|
||||
<!-- If we are not signing nuget packages we default to sign binaries -->
|
||||
<ItemGroup Condition="'$(SignNugetPackages)' != 'true'">
|
||||
|
||||
<FilesToSign Include="$(OutDir)**/*.dll">
|
||||
<Authenticode>Microsoft400</Authenticode>
|
||||
</FilesToSign>
|
||||
</ItemGroup>
|
||||
|
||||
<!-- Get nuget packages for signing -->
|
||||
<ItemGroup Condition="'$(SignNugetPackages)' == 'true'">
|
||||
<FilesToSign Include="$(OutDir)*.nupkg" Exclude="$(OutDir)*.symbols.nupkg">
|
||||
<Authenticode>NuGet</Authenticode>
|
||||
</FilesToSign>
|
||||
|
||||
</ItemGroup>
|
||||
|
||||
<Error Condition="'@(FilesToSign)' == ''" Text="There are no files to sign. FilesToSign group is empty."/>
|
||||
</Target>
|
||||
|
||||
</Project>
|
|
@ -1,5 +1,5 @@
|
|||
################################################################################
|
||||
# ML.NET's official, signed build
|
||||
# ML.NET's official, signed build
|
||||
################################################################################
|
||||
|
||||
resources:
|
||||
|
@ -23,14 +23,18 @@ phases:
|
|||
- agent.os -equals linux
|
||||
container: CentosContainer
|
||||
steps:
|
||||
- script: ./restore.sh
|
||||
displayName: restore all projects
|
||||
- script: ./build.sh -configuration $(BuildConfig) /p:SkipRIDAgnosticAssets=true -projects $(Build.SourcesDirectory)/src/Redist/Microsoft.ML.DnnImageFeaturizer.ModelRedist/Microsoft.ML.DnnImageFeaturizer.ModelRedist.proj
|
||||
displayName: build redist
|
||||
# Only build native assets to avoid conflicts.
|
||||
- script: ./build.sh -buildNative -$(BuildConfig) -skipRIDAgnosticAssets
|
||||
- script: ./build.sh -configuration $(BuildConfig) -projects $(Build.SourcesDirectory)/src/Native/Native.proj /p:TargetArchitecture=x64
|
||||
displayName: Build
|
||||
|
||||
- task: PublishBuildArtifacts@1
|
||||
displayName: Publish Linux package assets
|
||||
inputs:
|
||||
pathToPublish: $(Build.SourcesDirectory)/bin/obj/packages
|
||||
pathToPublish: $(Build.SourcesDirectory)/artifacts/pkgassets
|
||||
artifactName: PackageAssets
|
||||
artifactType: container
|
||||
|
||||
|
@ -46,23 +50,26 @@ phases:
|
|||
queue:
|
||||
name: Hosted macOS
|
||||
steps:
|
||||
# Work around MacOS Homebrew image/environment bug: https://github.com/actions/virtual-environments/issues/1811
|
||||
- script: |
|
||||
brew uninstall openssl@1.0.2t |
|
||||
brew uninstall python@2.7.17 |
|
||||
brew untap local/openssl |
|
||||
brew untap local/python2
|
||||
displayName: MacOS Homebrew bug Workaround
|
||||
- script: brew update && brew install $(Build.SourcesDirectory)/build/libomp.rb && brew link libomp --force
|
||||
- script: brew update && brew unlink python@3.8 && brew install mono-libgdiplus && brew install $(Build.SourcesDirectory)/build/libomp.rb && brew link libomp --force
|
||||
displayName: Install build dependencies
|
||||
- script: ./restore.sh
|
||||
displayName: restore all projects
|
||||
- script: ./build.sh -configuration $(BuildConfig) /p:SkipRIDAgnosticAssets=true -projects $(Build.SourcesDirectory)/src/Redist/Microsoft.ML.DnnImageFeaturizer.ModelRedist/Microsoft.ML.DnnImageFeaturizer.ModelRedist.proj
|
||||
displayName: build redist
|
||||
# Only build native assets to avoid conflicts.
|
||||
- script: ./build.sh -buildNative -$(BuildConfig) -skipRIDAgnosticAssets
|
||||
- script: ./build.sh -projects $(Build.SourcesDirectory)/src/Native/Native.proj -configuration $(BuildConfig) /p:TargetArchitecture=x64
|
||||
displayName: Build
|
||||
|
||||
- task: PublishBuildArtifacts@1
|
||||
displayName: Publish macOS package assets
|
||||
inputs:
|
||||
pathToPublish: $(Build.SourcesDirectory)/bin/obj/packages
|
||||
pathToPublish: $(Build.SourcesDirectory)/artifacts/pkgassets
|
||||
artifactName: PackageAssets
|
||||
artifactType: container
|
||||
|
||||
|
@ -80,7 +87,7 @@ phases:
|
|||
_TeamName: DotNetCore
|
||||
queue:
|
||||
name: DotNetCore-Build
|
||||
demands:
|
||||
demands:
|
||||
- agent.os -equals Windows_NT
|
||||
steps:
|
||||
|
||||
|
@ -95,27 +102,26 @@ phases:
|
|||
continueOnError: false
|
||||
condition: and(succeeded(), in(variables._SignType, 'real', 'test'))
|
||||
|
||||
- script: ./restore.cmd
|
||||
displayName: restore all projects
|
||||
- script: ./build.cmd -configuration $(BuildConfig) /p:SkipRIDAgnosticAssets=true -projects $(Build.SourcesDirectory)/src/Redist/Microsoft.ML.DnnImageFeaturizer.ModelRedist/Microsoft.ML.DnnImageFeaturizer.ModelRedist.proj
|
||||
displayName: build redist
|
||||
# Only build native assets to avoid conflicts.
|
||||
- script: ./build.cmd -buildNative -$(BuildConfig) -buildArch=x86 -skipRIDAgnosticAssets
|
||||
- script: ./build.cmd -projects $(Build.SourcesDirectory)/src/Native/Native.proj -configuration $(BuildConfig) /p:TargetArchitecture=x86
|
||||
displayName: Build
|
||||
|
||||
- task: MSBuild@1
|
||||
displayName: Sign Windows_x86 Binaries
|
||||
inputs:
|
||||
solution: build/sign.proj
|
||||
msbuildArguments: /p:SignType=$(_SignType)
|
||||
msbuildVersion: 15.0
|
||||
continueOnError: false
|
||||
|
||||
- script: ./sign.cmd -configuration $(BuildConfig) /p:TargetArchitecture=x86 /p:SignBinaries=true
|
||||
displayName: sign binaries
|
||||
|
||||
- task: PublishBuildArtifacts@1
|
||||
displayName: Publish Windows_x86 package assets
|
||||
inputs:
|
||||
pathToPublish: $(Build.SourcesDirectory)/bin/obj/packages
|
||||
pathToPublish: $(Build.SourcesDirectory)/artifacts/pkgassets
|
||||
artifactName: PackageAssets
|
||||
artifactType: container
|
||||
|
||||
# Terminate all dotnet build processes.
|
||||
- script: $(Build.SourcesDirectory)/Tools/dotnetcli/dotnet.exe build-server shutdown
|
||||
- script: $(Build.SourcesDirectory)/.dotnet/dotnet.exe build-server shutdown
|
||||
displayName: Dotnet Server Shutdown
|
||||
|
||||
################################################################################
|
||||
|
@ -132,7 +138,7 @@ phases:
|
|||
_TeamName: DotNetCore
|
||||
queue:
|
||||
name: DotNetCore-Build
|
||||
demands:
|
||||
demands:
|
||||
- agent.os -equals Windows_NT
|
||||
steps:
|
||||
|
||||
|
@ -147,8 +153,8 @@ phases:
|
|||
continueOnError: false
|
||||
condition: and(succeeded(), in(variables._SignType, 'real', 'test'))
|
||||
|
||||
# Build both native and managed assets.
|
||||
- script: ./build.cmd -$(BuildConfig)
|
||||
# Build both native and managed assets.
|
||||
- script: ./build.cmd -configuration $(BuildConfig) /p:TargetArchitecture=x64
|
||||
displayName: Build
|
||||
|
||||
- task: ComponentGovernanceComponentDetection@0
|
||||
|
@ -157,24 +163,19 @@ phases:
|
|||
verbosity: 'Verbose'
|
||||
alertWarningLevel: 'High'
|
||||
|
||||
|
||||
- task: MSBuild@1
|
||||
displayName: Sign Windows_x64 Binaries
|
||||
inputs:
|
||||
solution: build/sign.proj
|
||||
msbuildArguments: /p:SignType=$(_SignType)
|
||||
msbuildVersion: 15.0
|
||||
continueOnError: false
|
||||
|
||||
- script: ./sign.cmd -configuration $(BuildConfig) /p:TargetArchitecture=x64 /p:SignBinaries=true
|
||||
displayName: sign binaries
|
||||
|
||||
- task: PublishBuildArtifacts@1
|
||||
displayName: Publish Windows_x64 package assets
|
||||
inputs:
|
||||
pathToPublish: $(Build.SourcesDirectory)/bin/obj/packages
|
||||
pathToPublish: $(Build.SourcesDirectory)/artifacts/pkgassets
|
||||
artifactName: PackageAssets
|
||||
artifactType: container
|
||||
|
||||
# Terminate all dotnet build processes.
|
||||
- script: $(Build.SourcesDirectory)/Tools/dotnetcli/dotnet.exe build-server shutdown
|
||||
- script: $(Build.SourcesDirectory)/.dotnet/dotnet.exe build-server shutdown
|
||||
displayName: Dotnet Server Shutdown
|
||||
|
||||
################################################################################
|
||||
|
@ -199,7 +200,7 @@ phases:
|
|||
_MsdlSymbolServerPath: https://microsoftpublicsymbols.artifacts.visualstudio.com/DefaultCollection
|
||||
queue:
|
||||
name: DotNetCore-Build
|
||||
demands:
|
||||
demands:
|
||||
- agent.os -equals Windows_NT
|
||||
steps:
|
||||
|
||||
|
@ -220,31 +221,27 @@ phases:
|
|||
displayName: Download package assets
|
||||
inputs:
|
||||
artifactName: PackageAssets
|
||||
downloadPath: $(Build.SourcesDirectory)/bin/obj/packages
|
||||
downloadPath: $(Build.SourcesDirectory)/artifacts/pkgassets
|
||||
|
||||
# Workaround https://github.com/Microsoft/vsts-tasks/issues/6739
|
||||
- task: CopyFiles@2
|
||||
displayName: Copy package assets to correct folder
|
||||
inputs:
|
||||
sourceFolder: $(Build.SourcesDirectory)/bin/obj/packages/PackageAssets
|
||||
targetFolder: $(Build.SourcesDirectory)/bin/obj/packages
|
||||
|
||||
- script: ./build.cmd -buildPackages
|
||||
displayName: Create Packages
|
||||
sourceFolder: $(Build.SourcesDirectory)/artifacts/pkgassets/PackageAssets
|
||||
targetFolder: $(Build.SourcesDirectory)/artifacts/pkgassets
|
||||
|
||||
- task: MSBuild@1
|
||||
displayName: Sign Packages
|
||||
inputs:
|
||||
solution: build/sign.proj
|
||||
msbuildArguments: /p:SignType=$(_SignType) /p:SignNugetPackages=true
|
||||
msbuildVersion: 15.0
|
||||
- script: ./build.cmd -pack -configuration $(BuildConfig)
|
||||
displayName: Build Packages
|
||||
|
||||
- script: ./sign.cmd /p:SignNugetPackages=true
|
||||
displayName: sign packages
|
||||
continueOnError: false
|
||||
|
||||
- task: NuGetAuthenticate@0
|
||||
inputs:
|
||||
nuGetServiceConnections: machinelearning-dnceng-public-feed # To allow publishing to a feed of another organization
|
||||
|
||||
- script: Tools\dotnetcli\dotnet msbuild build\publish.proj /t:PublishPackages /p:NuGetFeedUrl=$(_AzureDevopsFeedUrl) /p:NuGetApiKey=AzureArtifacts
|
||||
- script: $(Build.SourcesDirectory)\.dotnet\dotnet.exe msbuild build\publish.proj /t:PublishPackages /p:NuGetFeedUrl=$(_AzureDevopsFeedUrl) /p:NuGetApiKey=AzureArtifacts
|
||||
displayName: Publish Packages to AzureDevOps Feed
|
||||
|
||||
- task: MSBuild@1
|
||||
|
@ -264,5 +261,5 @@ phases:
|
|||
continueOnError: true
|
||||
|
||||
# Terminate all dotnet build processes.
|
||||
- script: $(Build.SourcesDirectory)/Tools/dotnetcli/dotnet.exe build-server shutdown
|
||||
- script: $(Build.SourcesDirectory)/.dotnet/dotnet.exe build-server shutdown
|
||||
displayName: Dotnet Server Shutdown
|
|
@ -7,6 +7,14 @@
|
|||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\..\..\src\Microsoft.ML.AutoML\Microsoft.ML.AutoML.csproj" />
|
||||
<ProjectReference Include="..\..\..\src\Microsoft.ML.Core\Microsoft.ML.Core.csproj" >
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
</ProjectReference>
|
||||
|
||||
<ProjectReference Include="..\..\..\src\Microsoft.ML.Transforms\Microsoft.ML.Transforms.csproj" >
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
</ProjectReference>
|
||||
|
||||
<NativeAssemblyReference Include="MatrixFactorizationNative" />
|
||||
<NativeAssemblyReference Include="FastTreeNative" />
|
||||
</ItemGroup>
|
||||
|
|
|
@ -20,9 +20,13 @@
|
|||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\..\..\src\Microsoft.ML.Vision\Microsoft.ML.Vision.csproj" />
|
||||
<ProjectReference Include="..\..\..\src\Microsoft.ML.ImageAnalytics\Microsoft.ML.ImageAnalytics.csproj" />
|
||||
<ProjectReference Include="..\..\..\src\Microsoft.ML.Data\Microsoft.ML.Data.csproj" />
|
||||
<ProjectReference Include="..\..\..\src\Microsoft.ML.LightGbm\Microsoft.ML.LightGbm.csproj" />
|
||||
<ProjectReference Include="..\..\..\src\Microsoft.ML.TensorFlow\Microsoft.ML.TensorFlow.csproj" />
|
||||
<ProjectReference Include="..\..\..\src\Microsoft.ML.SamplesUtils\Microsoft.ML.SamplesUtils.csproj" />
|
||||
<ProjectReference Include="..\..\..\src\Microsoft.ML.StandardTrainers\Microsoft.ML.StandardTrainers.csproj" />
|
||||
<ProjectReference Include="..\..\..\src\Microsoft.ML.Transforms\Microsoft.ML.Transforms.csproj" />
|
||||
|
||||
<NativeAssemblyReference Include="CpuMathNative" />
|
||||
<NativeAssemblyReference Include="FastTreeNative" />
|
||||
|
@ -48,14 +52,14 @@
|
|||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<Content Include="$(ObjDir)DnnImageModels\ResNet18Onnx\ResNet18.onnx">
|
||||
<Content Include="$(ArtifactsObjDir)DnnImageModels\ResNet18Onnx\ResNet18.onnx">
|
||||
<Link>DnnImageModels\ResNet18Onnx\ResNet18.onnx</Link>
|
||||
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
|
||||
</Content>
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<Content Include="$(ObjDir)DnnImageModels\ResNetPrepOnnx\ResNetPreprocess.onnx">
|
||||
<Content Include="$(ArtifactsObjDir)DnnImageModels\ResNetPrepOnnx\ResNetPreprocess.onnx">
|
||||
<Link>DnnImageModels\ResNetPrepOnnx\ResNetPreprocess.onnx</Link>
|
||||
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
|
||||
</Content>
|
||||
|
|
|
@ -82,7 +82,8 @@ namespace Samples.Dynamic.ModelOperations
|
|||
//Create the pipeline using onnx file.
|
||||
var onnxModelPath = "your_path_to_sample_onnx_conversion_1.onnx";
|
||||
var onnxEstimator = mlContext.Transforms.ApplyOnnxModel(onnxModelPath);
|
||||
var onnxTransformer = onnxEstimator.Fit(trainTestOriginalData.TrainSet);
|
||||
//Make sure to either use the 'using' clause or explicitly dispose the returned onnxTransformer to prevent memory leaks
|
||||
using var onnxTransformer = onnxEstimator.Fit(trainTestOriginalData.TrainSet);
|
||||
|
||||
//Inference the testset
|
||||
var output = transformer.Transform(trainTestOriginalData.TestSet);
|
||||
|
|
|
@ -30,6 +30,9 @@
|
|||
<ProjectReference Include="..\..\..\src\Microsoft.ML.DnnImageFeaturizer.ResNet18\Microsoft.ML.DnnImageFeaturizer.ResNet18.csproj" />
|
||||
<ProjectReference Include="..\..\..\src\Microsoft.ML.Transforms\Microsoft.ML.Transforms.csproj" />
|
||||
<ProjectReference Include="..\..\..\src\Microsoft.ML.OnnxConverter\Microsoft.ML.OnnxConverter.csproj" />
|
||||
<ProjectReference Include="..\..\..\src\Microsoft.ML.ImageAnalytics\Microsoft.ML.ImageAnalytics.csproj" />
|
||||
<ProjectReference Include="..\..\..\src\Microsoft.ML.Core\Microsoft.ML.Core.csproj" />
|
||||
<ProjectReference Include="..\..\..\src\Microsoft.ML.Data\Microsoft.ML.Data.csproj" />
|
||||
|
||||
<NativeAssemblyReference Include="CpuMathNative" />
|
||||
<NativeAssemblyReference Include="FastTreeNative" />
|
||||
|
@ -977,14 +980,14 @@
|
|||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<Content Include="$(ObjDir)DnnImageModels\ResNet18Onnx\ResNet18.onnx">
|
||||
<Content Include="$(ArtifactsObjDir)DnnImageModels\ResNet18Onnx\ResNet18.onnx">
|
||||
<Link>DnnImageModels\ResNet18Onnx\ResNet18.onnx</Link>
|
||||
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
|
||||
</Content>
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<Content Include="$(ObjDir)DnnImageModels\ResNetPrepOnnx\ResNetPreprocess.onnx">
|
||||
<Content Include="$(ArtifactsObjDir)DnnImageModels\ResNetPrepOnnx\ResNetPreprocess.onnx">
|
||||
<Link>DnnImageModels\ResNetPrepOnnx\ResNetPreprocess.onnx</Link>
|
||||
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
|
||||
</Content>
|
||||
|
|
|
@ -0,0 +1,17 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Project>
|
||||
|
||||
<ItemDefinitionGroup>
|
||||
<ProjectToBuild>
|
||||
<RestoreInParallel>true</RestoreInParallel>
|
||||
<BuildInParallel>false</BuildInParallel>
|
||||
</ProjectToBuild>
|
||||
</ItemDefinitionGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectToBuild Include="$(RepoRoot)src/Redist/Microsoft.ML.DnnImageFeaturizer.ModelRedist/Microsoft.ML.DnnImageFeaturizer.ModelRedist.proj" />
|
||||
<ProjectToBuild Include="$(RepoRoot)src/Native/Native.proj" />
|
||||
<ProjectToBuild Include="$(RepoRoot)Microsoft.ML.sln" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
|
@ -0,0 +1,14 @@
|
|||
<Project>
|
||||
<ItemGroup Condition="'$(SignBinaries)' == 'true'">
|
||||
<ItemsToSign Remove="@(ItemsToSign)" />
|
||||
<ItemsToSign Include="$(ArtifactsDir)pkgassets\**\*.dll" />
|
||||
<FileExtensionSignInfo Include="*.dll" CertificateName="Microsoft400" />
|
||||
</ItemGroup>
|
||||
<ItemGroup Condition="'$(SignNugetPackages)' == 'true'">
|
||||
<ItemsToSign Remove="@(ItemsToSign)" />
|
||||
<ItemsToSign Include="$(ArtifactsDir)packages\**\*.nupkg" />
|
||||
<ItemsToSign Include="$(ArtifactsDir)packages\**\*.snupkg" />
|
||||
<FileExtensionSignInfo Include="*.nupkg" CertificateName="NuGet" />
|
||||
<FileExtensionSignInfo Include="*.snupkg" CertificateName="NuGet" />
|
||||
</ItemGroup>
|
||||
</Project>
|
|
@ -0,0 +1,9 @@
|
|||
<Project>
|
||||
|
||||
<ItemGroup>
|
||||
<!-- SourceLink -->
|
||||
<PackageReference Include="Microsoft.SourceLink.GitHub" Version="$(MicrosoftSourceLinkVersion)" PrivateAssets="all" IsImplicitlyDefined="true" />
|
||||
<PackageReference Include="Microsoft.SourceLink.AzureRepos.Git" Version="$(MicrosoftSourceLinkVersion)" PrivateAssets="all" IsImplicitlyDefined="true" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
|
@ -0,0 +1,67 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Dependencies>
|
||||
<ProductDependencies>
|
||||
<Dependency Name="Microsoft.SymbolUploader.Build.Task" Version="1.1.145102">
|
||||
<Uri>https://dev.azure.com/dnceng/internal/_git/dotnet-symuploader</Uri>
|
||||
<Sha>9bdfdb0af37d2e93bdecf238a8a51c0a965444d6</Sha>
|
||||
</Dependency>
|
||||
</ProductDependencies>
|
||||
<ToolsetDependencies>
|
||||
<Dependency Name="Microsoft.DotNet.Arcade.Sdk" Version="5.0.0-beta.20461.7">
|
||||
<Uri>https://github.com/dotnet/arcade</Uri>
|
||||
<Sha>a81e6e87cf21837abfde2da6eb9b057bea9f49fc</Sha>
|
||||
</Dependency>
|
||||
<Dependency Name="Microsoft.DotNet.Build.Tasks.Feed" Version="5.0.0-beta.20461.7">
|
||||
<Uri>https://github.com/dotnet/arcade</Uri>
|
||||
<Sha>a81e6e87cf21837abfde2da6eb9b057bea9f49fc</Sha>
|
||||
</Dependency>
|
||||
<Dependency Name="Microsoft.DotNet.SignTool" Version="5.0.0-beta.20461.7">
|
||||
<Uri>https://github.com/dotnet/arcade</Uri>
|
||||
<Sha>a81e6e87cf21837abfde2da6eb9b057bea9f49fc</Sha>
|
||||
</Dependency>
|
||||
<Dependency Name="Microsoft.DotNet.Helix.Sdk" Version="5.0.0-beta.20461.7">
|
||||
<Uri>https://github.com/dotnet/arcade</Uri>
|
||||
<Sha>a81e6e87cf21837abfde2da6eb9b057bea9f49fc</Sha>
|
||||
</Dependency>
|
||||
<Dependency Name="Microsoft.DotNet.SwaggerGenerator.MSBuild" Version="5.0.0-beta.20461.7">
|
||||
<Uri>https://github.com/dotnet/arcade</Uri>
|
||||
<Sha>a81e6e87cf21837abfde2da6eb9b057bea9f49fc</Sha>
|
||||
</Dependency>
|
||||
<Dependency Name="Microsoft.DotNet.Maestro.Client" Version="1.1.0-beta.20258.6">
|
||||
<Uri>https://github.com/dotnet/arcade-services</Uri>
|
||||
<Sha>869869342f1ec338de96adcea6e003b61f195256</Sha>
|
||||
</Dependency>
|
||||
<Dependency Name="Microsoft.DotNet.Maestro.Tasks" Version="1.1.0-beta.20461.2">
|
||||
<Uri>https://github.com/dotnet/arcade-services</Uri>
|
||||
<Sha>592654f4a6855d7738a7c7c780355ac54457fdae</Sha>
|
||||
</Dependency>
|
||||
<Dependency Name="Microsoft.DotNet.XHarness.CLI" Version="1.0.0-prerelease.20457.1">
|
||||
<Uri>https://github.com/dotnet/xharness</Uri>
|
||||
<Sha>3b64ab7ab565cfd19fe7102e3d76271f16f0fc6d</Sha>
|
||||
</Dependency>
|
||||
<Dependency Name="Microsoft.Net.Compilers.Toolset" Version="3.8.0-3.20460.2">
|
||||
<Uri>https://github.com/dotnet/roslyn</Uri>
|
||||
<Sha>d57cda76c2b76cff75487a085d289cfadd99150b</Sha>
|
||||
</Dependency>
|
||||
<Dependency Name="Microsoft.SourceLink.GitHub" Version="1.1.0-beta-20464-02">
|
||||
<Uri>https://github.com/dotnet/sourcelink</Uri>
|
||||
<Sha>8a3edd1902dbfe3adba65f22e3bb7aa2cc73e97f</Sha>
|
||||
</Dependency>
|
||||
<Dependency Name="Microsoft.SourceLink.AzureRepos.Git" Version="1.1.0-beta-20464-02">
|
||||
<Uri>https://github.com/dotnet/sourcelink</Uri>
|
||||
<Sha>8a3edd1902dbfe3adba65f22e3bb7aa2cc73e97f</Sha>
|
||||
</Dependency>
|
||||
<Dependency Name="Microsoft.DiaSymReader.Pdb2Pdb" Version="1.1.0-beta2-19575-01">
|
||||
<Uri>https://github.com/dotnet/symreader-converter</Uri>
|
||||
<Sha>c5ba7c88f92e2dde156c324a8c8edc04d9fa4fe0</Sha>
|
||||
</Dependency>
|
||||
<Dependency Name="Microsoft.DiaSymReader.Converter" Version="1.1.0-beta2-19575-01">
|
||||
<Uri>https://github.com/dotnet/symreader-converter</Uri>
|
||||
<Sha>c5ba7c88f92e2dde156c324a8c8edc04d9fa4fe0</Sha>
|
||||
</Dependency>
|
||||
<Dependency Name="XliffTasks" Version="1.0.0-beta.20420.1">
|
||||
<Uri>https://github.com/dotnet/xliff-tasks</Uri>
|
||||
<Sha>975065e08307a459dc2649b1c852f5c4cafd2f91</Sha>
|
||||
</Dependency>
|
||||
</ToolsetDependencies>
|
||||
</Dependencies>
|
|
@ -0,0 +1,138 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Project>
|
||||
<PropertyGroup>
|
||||
<!-- This repo version -->
|
||||
<!--
|
||||
Arcade automatically changes the version of assemblies to 42.42.42.42 unless DotNetUseShippingVersions
|
||||
is set to true.
|
||||
Details in Arcade documentation:
|
||||
https://github.com/dotnet/arcade/blob/c788ffa83b088cafe9dbffc1cbc8155ba88b2553/Documentation/CorePackages/Versioning.md#output
|
||||
-->
|
||||
<DotNetUseShippingVersions>true</DotNetUseShippingVersions>
|
||||
<VersionPrefix>1.5.3</VersionPrefix>
|
||||
<PreReleaseVersionLabel>dev</PreReleaseVersionLabel>
|
||||
<AssemblyVersion>1.0.0.0</AssemblyVersion>
|
||||
|
||||
<!--ML.NET Core dependencies-->
|
||||
<NewtonsoftJsonPackageVersion>10.0.3</NewtonsoftJsonPackageVersion>
|
||||
<SystemCodeDomPackageVersion>4.4.0</SystemCodeDomPackageVersion>
|
||||
<SystemReflectionEmitLightweightPackageVersion>4.3.0</SystemReflectionEmitLightweightPackageVersion>
|
||||
<SystemThreadingChannelsPackageVersion>4.7.1</SystemThreadingChannelsPackageVersion>
|
||||
|
||||
<!-- Other/External dependencies -->
|
||||
<GoogleProtobufPackageVersion>3.10.1</GoogleProtobufPackageVersion>
|
||||
<LightGBMPackageVersion>2.2.3</LightGBMPackageVersion>
|
||||
<MicrosoftExtensionsPackageVersion>2.1.0</MicrosoftExtensionsPackageVersion>
|
||||
<MicrosoftMLOnnxRuntimePackageVersion>1.5.2</MicrosoftMLOnnxRuntimePackageVersion>
|
||||
<MlNetMklDepsPackageVersion>0.0.0.9</MlNetMklDepsPackageVersion>
|
||||
<ParquetDotNetPackageVersion>2.1.3</ParquetDotNetPackageVersion>
|
||||
<SystemDrawingCommonPackageVersion>4.5.0</SystemDrawingCommonPackageVersion>
|
||||
<SystemIOFileSystemAccessControl>4.5.0</SystemIOFileSystemAccessControl>
|
||||
<SystemSecurityPrincipalWindows>4.5.0</SystemSecurityPrincipalWindows>
|
||||
<TensorFlowVersion>2.3.1</TensorFlowVersion>
|
||||
<TensorFlowMajorVersion>2</TensorFlowMajorVersion>
|
||||
<TensorflowDotNETVersion>0.20.1</TensorflowDotNETVersion>
|
||||
|
||||
<MicrosoftCodeAnalysisCSharpInternalAnalyzerVersion>3.3.1</MicrosoftCodeAnalysisCSharpInternalAnalyzerVersion>
|
||||
<MicrosoftCSharpVersion>4.5.0</MicrosoftCSharpVersion>
|
||||
<SystemCompositionVersion>1.2.0</SystemCompositionVersion>
|
||||
|
||||
<!-- Build/infrastructure Dependencies -->
|
||||
<PublishSymbolsPackageVersion>1.0.0-beta-62824-02</PublishSymbolsPackageVersion>
|
||||
<CodecovVersion>1.9.0</CodecovVersion>
|
||||
<CoverletCollectorVersion>1.2.1</CoverletCollectorVersion>
|
||||
<ReportGeneratorVersion>4.3.6</ReportGeneratorVersion>
|
||||
<MicrosoftDotNetApiCompatPackageVersion>1.0.0-beta.19225.5</MicrosoftDotNetApiCompatPackageVersion>
|
||||
<MicrosoftSourceLinkVersion>1.1.0-beta-20206-02</MicrosoftSourceLinkVersion>
|
||||
<BuildToolsPackageVersion>3.0.0-preview4-04926-01</BuildToolsPackageVersion>
|
||||
|
||||
<!-- Test-only Dependencies -->
|
||||
<BenchmarkDotNetVersion>0.12.0</BenchmarkDotNetVersion>
|
||||
<MicrosoftCodeAnalysisTestingVersion>1.0.1-beta1.20080.1</MicrosoftCodeAnalysisTestingVersion>
|
||||
<MicrosoftExtensionsTestPackageVersion>3.0.1</MicrosoftExtensionsTestPackageVersion>
|
||||
<MicrosoftMLTestDatabasesPackageVersion>0.0.6-test</MicrosoftMLTestDatabasesPackageVersion>
|
||||
<MicrosoftMLTestModelsPackageVersion>0.0.6-test</MicrosoftMLTestModelsPackageVersion>
|
||||
<MicrosoftMLTensorFlowTestModelsVersion>0.0.13-test</MicrosoftMLTensorFlowTestModelsVersion>
|
||||
<MicrosoftMLOnnxTestModelsVersion>0.0.6-test</MicrosoftMLOnnxTestModelsVersion>
|
||||
<SystemDataSqlClientVersion>4.6.1</SystemDataSqlClientVersion>
|
||||
<XunitCombinatorialVersion>1.2.7</XunitCombinatorialVersion>
|
||||
<SystemDataSQLiteCoreVersion>1.0.112.2</SystemDataSQLiteCoreVersion>
|
||||
|
||||
<!-- Opt-out repo features -->
|
||||
<UsingToolXliff>false</UsingToolXliff>
|
||||
<UsingToolNetFrameworkReferenceAssemblies Condition="'$(DotNetBuildFromSource)' != 'true'">true</UsingToolNetFrameworkReferenceAssemblies>
|
||||
<!-- Libs -->
|
||||
<CommandLineParserVersion>2.2.1</CommandLineParserVersion>
|
||||
<CredentialManagementVersion>1.0.2</CredentialManagementVersion>
|
||||
<MicrosoftCciVersion>4.0.0-rc3-24214-00</MicrosoftCciVersion>
|
||||
<HandlebarsNetVersion>1.10.1</HandlebarsNetVersion>
|
||||
<LibGit2SharpVersion>0.25.2</LibGit2SharpVersion>
|
||||
<log4netVersion>2.0.8</log4netVersion>
|
||||
<SystemNetHttpVersion>4.3.4</SystemNetHttpVersion>
|
||||
<AzureStorageBlobsVersion>12.3.0</AzureStorageBlobsVersion>
|
||||
<MicrosoftAzureKeyVaultVersion>3.0.0</MicrosoftAzureKeyVaultVersion>
|
||||
<MicrosoftAzureServicesAppAuthenticationVersion>1.3.1</MicrosoftAzureServicesAppAuthenticationVersion>
|
||||
<MicrosoftDataAnalysisVersion>0.1.0</MicrosoftDataAnalysisVersion>
|
||||
<MicrosoftBuildVersion>15.7.179</MicrosoftBuildVersion>
|
||||
<MicrosoftBuildFrameworkVersion>15.7.179</MicrosoftBuildFrameworkVersion>
|
||||
<MicrosoftBuildTasksCoreVersion>15.7.179</MicrosoftBuildTasksCoreVersion>
|
||||
<MicrosoftBuildUtilitiesCoreVersion>15.7.179</MicrosoftBuildUtilitiesCoreVersion>
|
||||
<MicrosoftCodeAnalysisAnalyzersVersion>2.6.3</MicrosoftCodeAnalysisAnalyzersVersion>
|
||||
<MicrosoftCodeAnalysisCSharpVersion>2.9.0</MicrosoftCodeAnalysisCSharpVersion>
|
||||
<MsbuildTaskMicrosoftCodeAnalysisCSharpVersion>3.4.0</MsbuildTaskMicrosoftCodeAnalysisCSharpVersion>
|
||||
<MicrosoftIdentityModelClientsActiveDirectoryVersion>3.19.8</MicrosoftIdentityModelClientsActiveDirectoryVersion>
|
||||
<MicrosoftRestClientRuntimeVersion>2.3.13</MicrosoftRestClientRuntimeVersion>
|
||||
<MicrosoftExtensionsDependencyModelVersion>2.1.0</MicrosoftExtensionsDependencyModelVersion>
|
||||
<MicrosoftExtensionsFileSystemGlobbingVersion>2.0.0</MicrosoftExtensionsFileSystemGlobbingVersion>
|
||||
<MicrosoftNETCorePlatformsVersion>2.1.0</MicrosoftNETCorePlatformsVersion>
|
||||
<MicrosoftNETCore3PlatformsVersion>3.1.0</MicrosoftNETCore3PlatformsVersion>
|
||||
<MicrosoftNetCompilersToolsetVersion>3.8.0-3.20460.2</MicrosoftNetCompilersToolsetVersion>
|
||||
<MoqVersion>4.8.3</MoqVersion>
|
||||
<MonoOptionsVersion>5.3.0.1</MonoOptionsVersion>
|
||||
<McMasterExtensionsCommandLineUtils>2.3.0</McMasterExtensionsCommandLineUtils>
|
||||
<NewtonsoftJsonVersion>9.0.1</NewtonsoftJsonVersion>
|
||||
<SystemTextJsonVersion>4.7.0</SystemTextJsonVersion>
|
||||
<NuGetVersioningVersion>4.4.0</NuGetVersioningVersion>
|
||||
<NuGetVersion>5.6.0-preview.2.6489</NuGetVersion>
|
||||
<OctokitVersion>0.32.0</OctokitVersion>
|
||||
<DotNetSleetLibVersion>2.2.143</DotNetSleetLibVersion>
|
||||
<SwashbuckleAspNetCoreSwaggerVersion>3.0.0</SwashbuckleAspNetCoreSwaggerVersion>
|
||||
<SystemBuffersVersion>4.5.0</SystemBuffersVersion>
|
||||
<SystemCollectionsImmutableVersion>1.5.0</SystemCollectionsImmutableVersion>
|
||||
<SystemDiagnosticsTraceSourceVersion>4.0.0</SystemDiagnosticsTraceSourceVersion>
|
||||
<SystemIOCompressionVersion>4.3.0</SystemIOCompressionVersion>
|
||||
<SystemIOPackagingVersion>4.5.0</SystemIOPackagingVersion>
|
||||
<SystemIOFileSystemPrimitivesVersion>4.3.0</SystemIOFileSystemPrimitivesVersion>
|
||||
<SystemMemoryVersion>4.5.3</SystemMemoryVersion>
|
||||
<SystemNumericsVectorsVersion>4.5.0</SystemNumericsVectorsVersion>
|
||||
<SystemReflectionMetadataVersion>1.6.0</SystemReflectionMetadataVersion>
|
||||
<SystemRuntimeCompilerServicesUnsafeVersion>4.7.0</SystemRuntimeCompilerServicesUnsafeVersion>
|
||||
<SystemRuntimeInteropServicesRuntimeInformation>4.3.0</SystemRuntimeInteropServicesRuntimeInformation>
|
||||
<SystemTextEncodingsWebVersion>4.5.0</SystemTextEncodingsWebVersion>
|
||||
<SystemThreadingTasksExtensionVersion>4.5.2</SystemThreadingTasksExtensionVersion>
|
||||
<SystemValueTupleVersion>4.4.0</SystemValueTupleVersion>
|
||||
<WindowsAzureStorageVersion>8.5.0</WindowsAzureStorageVersion>
|
||||
<XUnitVersion>2.4.0</XUnitVersion>
|
||||
<XUnitAbstractionsVersion>2.0.3</XUnitAbstractionsVersion>
|
||||
<XUnitVSRunnerVersion>2.4.0</XUnitVSRunnerVersion>
|
||||
<MicrosoftDotNetBuildTasksFeedVersion>5.0.0-beta.20461.7</MicrosoftDotNetBuildTasksFeedVersion>
|
||||
<MicrosoftDotNetSignToolVersion>5.0.0-beta.20461.7</MicrosoftDotNetSignToolVersion>
|
||||
<MicrosoftAzureDocumentDBVersion>1.22.0</MicrosoftAzureDocumentDBVersion>
|
||||
<MicrosoftAzureCosmosDBTableVersion>1.1.2</MicrosoftAzureCosmosDBTableVersion>
|
||||
<MicrosoftAspNetCoreAllVersion>2.0.0</MicrosoftAspNetCoreAllVersion>
|
||||
<MicrosoftDotNetGitHubIssueLabelerAssetsVersion>1.6.0</MicrosoftDotNetGitHubIssueLabelerAssetsVersion>
|
||||
<MicrosoftMLVersion>1.0.0</MicrosoftMLVersion>
|
||||
<MicrosoftVisualStudioWebCodeGenerationDesignVersion>2.0.4</MicrosoftVisualStudioWebCodeGenerationDesignVersion>
|
||||
<MicrosoftDiaSymReaderConverterVersion>1.1.0-beta2-19575-01</MicrosoftDiaSymReaderConverterVersion>
|
||||
<MicrosoftDiaSymReaderPdb2PdbVersion>1.1.0-beta2-19575-01</MicrosoftDiaSymReaderPdb2PdbVersion>
|
||||
<MicrosoftDiaSymReaderNativeVersion>1.7.0</MicrosoftDiaSymReaderNativeVersion>
|
||||
<MicrosoftDotNetMaestroClientVersion>1.1.0-beta.20258.6</MicrosoftDotNetMaestroClientVersion>
|
||||
<MicrosoftSourceLinkGitHubVersion>1.1.0-beta-20464-02</MicrosoftSourceLinkGitHubVersion>
|
||||
<MicrosoftSourceLinkAzureReposGitVersion>1.1.0-beta-20464-02</MicrosoftSourceLinkAzureReposGitVersion>
|
||||
<MicrosoftDotNetSwaggerGeneratorMSBuildVersion>5.0.0-beta.20461.7</MicrosoftDotNetSwaggerGeneratorMSBuildVersion>
|
||||
<XliffTasksVersion>1.0.0-beta.20420.1</XliffTasksVersion>
|
||||
<MicrosoftDotNetMaestroTasksVersion>1.1.0-beta.20461.2</MicrosoftDotNetMaestroTasksVersion>
|
||||
<MicrosoftDotNetXHarnessCLIVersion>1.0.0-prerelease.20457.1</MicrosoftDotNetXHarnessCLIVersion>
|
||||
<MicrosoftSymbolUploaderBuildTaskVersion>1.1.145102</MicrosoftSymbolUploaderBuildTaskVersion>
|
||||
</PropertyGroup>
|
||||
</Project>
|
|
@ -0,0 +1,2 @@
|
|||
@echo off
|
||||
powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0Build.ps1""" -restore -build -test -sign -pack -publish -ci %*"
|
|
@ -0,0 +1,11 @@
|
|||
@{
|
||||
IncludeRules=@('PSAvoidUsingCmdletAliases',
|
||||
'PSAvoidUsingWMICmdlet',
|
||||
'PSAvoidUsingPositionalParameters',
|
||||
'PSAvoidUsingInvokeExpression',
|
||||
'PSUseDeclaredVarsMoreThanAssignments',
|
||||
'PSUseCmdletCorrectly',
|
||||
'PSStandardDSCFunctionsInResource',
|
||||
'PSUseIdenticalMandatoryParametersForDSC',
|
||||
'PSUseIdenticalParametersForDSC')
|
||||
}
|
|
@ -0,0 +1,28 @@
|
|||
# Don't touch this folder
|
||||
|
||||
uuuuuuuuuuuuuuuuuuuu
|
||||
u" uuuuuuuuuuuuuuuuuu "u
|
||||
u" u$$$$$$$$$$$$$$$$$$$$u "u
|
||||
u" u$$$$$$$$$$$$$$$$$$$$$$$$u "u
|
||||
u" u$$$$$$$$$$$$$$$$$$$$$$$$$$$$u "u
|
||||
u" u$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$u "u
|
||||
u" u$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$u "u
|
||||
$ $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$ $
|
||||
$ $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$ $
|
||||
$ $$$" ... "$... ...$" ... "$$$ ... "$$$ $
|
||||
$ $$$u `"$$$$$$$ $$$ $$$$$ $$ $$$ $$$ $
|
||||
$ $$$$$$uu "$$$$ $$$ $$$$$ $$ """ u$$$ $
|
||||
$ $$$""$$$ $$$$ $$$u "$$$" u$$ $$$$$$$$ $
|
||||
$ $$$$....,$$$$$..$$$$$....,$$$$..$$$$$$$$ $
|
||||
$ $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$ $
|
||||
"u "$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$" u"
|
||||
"u "$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$" u"
|
||||
"u "$$$$$$$$$$$$$$$$$$$$$$$$$$$$" u"
|
||||
"u "$$$$$$$$$$$$$$$$$$$$$$$$" u"
|
||||
"u "$$$$$$$$$$$$$$$$$$$$" u"
|
||||
"u """""""""""""""""" u"
|
||||
""""""""""""""""""""
|
||||
|
||||
!!! Changes made in this directory are subject to being overwritten by automation !!!
|
||||
|
||||
The files in this directory are shared by all Arcade repos and managed by automation. If you need to make changes to these files, open an issue or submit a pull request to https://github.com/dotnet/arcade first.
|
|
@ -0,0 +1,160 @@
|
|||
# This file is a temporary workaround for internal builds to be able to restore from private AzDO feeds.
|
||||
# This file should be removed as part of this issue: https://github.com/dotnet/arcade/issues/4080
|
||||
#
|
||||
# What the script does is iterate over all package sources in the pointed NuGet.config and add a credential entry
|
||||
# under <packageSourceCredentials> for each Maestro managed private feed. Two additional credential
|
||||
# entries are also added for the two private static internal feeds: dotnet3-internal and dotnet3-internal-transport.
|
||||
#
|
||||
# This script needs to be called in every job that will restore packages and which the base repo has
|
||||
# private AzDO feeds in the NuGet.config.
|
||||
#
|
||||
# See example YAML call for this script below. Note the use of the variable `$(dn-bot-dnceng-artifact-feeds-rw)`
|
||||
# from the AzureDevOps-Artifact-Feeds-Pats variable group.
|
||||
#
|
||||
# Any disabledPackageSources entries which start with "darc-int" will be re-enabled as part of this script executing
|
||||
#
|
||||
# - task: PowerShell@2
|
||||
# displayName: Setup Private Feeds Credentials
|
||||
# condition: eq(variables['Agent.OS'], 'Windows_NT')
|
||||
# inputs:
|
||||
# filePath: $(Build.SourcesDirectory)/eng/common/SetupNugetSources.ps1
|
||||
# arguments: -ConfigFile $(Build.SourcesDirectory)/NuGet.config -Password $Env:Token
|
||||
# env:
|
||||
# Token: $(dn-bot-dnceng-artifact-feeds-rw)
|
||||
|
||||
[CmdletBinding()]
|
||||
param (
|
||||
[Parameter(Mandatory = $true)][string]$ConfigFile,
|
||||
[Parameter(Mandatory = $true)][string]$Password
|
||||
)
|
||||
|
||||
$ErrorActionPreference = "Stop"
|
||||
Set-StrictMode -Version 2.0
|
||||
[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12
|
||||
|
||||
. $PSScriptRoot\tools.ps1
|
||||
|
||||
# Add source entry to PackageSources
|
||||
function AddPackageSource($sources, $SourceName, $SourceEndPoint, $creds, $Username, $Password) {
|
||||
$packageSource = $sources.SelectSingleNode("add[@key='$SourceName']")
|
||||
|
||||
if ($packageSource -eq $null)
|
||||
{
|
||||
$packageSource = $doc.CreateElement("add")
|
||||
$packageSource.SetAttribute("key", $SourceName)
|
||||
$packageSource.SetAttribute("value", $SourceEndPoint)
|
||||
$sources.AppendChild($packageSource) | Out-Null
|
||||
}
|
||||
else {
|
||||
Write-Host "Package source $SourceName already present."
|
||||
}
|
||||
|
||||
AddCredential -Creds $creds -Source $SourceName -Username $Username -Password $Password
|
||||
}
|
||||
|
||||
# Add a credential node for the specified source
|
||||
function AddCredential($creds, $source, $username, $password) {
|
||||
# Looks for credential configuration for the given SourceName. Create it if none is found.
|
||||
$sourceElement = $creds.SelectSingleNode($Source)
|
||||
if ($sourceElement -eq $null)
|
||||
{
|
||||
$sourceElement = $doc.CreateElement($Source)
|
||||
$creds.AppendChild($sourceElement) | Out-Null
|
||||
}
|
||||
|
||||
# Add the <Username> node to the credential if none is found.
|
||||
$usernameElement = $sourceElement.SelectSingleNode("add[@key='Username']")
|
||||
if ($usernameElement -eq $null)
|
||||
{
|
||||
$usernameElement = $doc.CreateElement("add")
|
||||
$usernameElement.SetAttribute("key", "Username")
|
||||
$sourceElement.AppendChild($usernameElement) | Out-Null
|
||||
}
|
||||
$usernameElement.SetAttribute("value", $Username)
|
||||
|
||||
# Add the <ClearTextPassword> to the credential if none is found.
|
||||
# Add it as a clear text because there is no support for encrypted ones in non-windows .Net SDKs.
|
||||
# -> https://github.com/NuGet/Home/issues/5526
|
||||
$passwordElement = $sourceElement.SelectSingleNode("add[@key='ClearTextPassword']")
|
||||
if ($passwordElement -eq $null)
|
||||
{
|
||||
$passwordElement = $doc.CreateElement("add")
|
||||
$passwordElement.SetAttribute("key", "ClearTextPassword")
|
||||
$sourceElement.AppendChild($passwordElement) | Out-Null
|
||||
}
|
||||
$passwordElement.SetAttribute("value", $Password)
|
||||
}
|
||||
|
||||
function InsertMaestroPrivateFeedCredentials($Sources, $Creds, $Username, $Password) {
|
||||
$maestroPrivateSources = $Sources.SelectNodes("add[contains(@key,'darc-int')]")
|
||||
|
||||
Write-Host "Inserting credentials for $($maestroPrivateSources.Count) Maestro's private feeds."
|
||||
|
||||
ForEach ($PackageSource in $maestroPrivateSources) {
|
||||
Write-Host "`tInserting credential for Maestro's feed:" $PackageSource.Key
|
||||
AddCredential -Creds $creds -Source $PackageSource.Key -Username $Username -Password $Password
|
||||
}
|
||||
}
|
||||
|
||||
function EnablePrivatePackageSources($DisabledPackageSources) {
|
||||
$maestroPrivateSources = $DisabledPackageSources.SelectNodes("add[contains(@key,'darc-int')]")
|
||||
ForEach ($DisabledPackageSource in $maestroPrivateSources) {
|
||||
Write-Host "`tEnsuring private source '$($DisabledPackageSource.key)' is enabled"
|
||||
$DisabledPackageSource.SetAttribute("value", "false")
|
||||
}
|
||||
}
|
||||
|
||||
if (!(Test-Path $ConfigFile -PathType Leaf)) {
|
||||
Write-PipelineTelemetryError -Category 'Build' -Message "Eng/common/SetupNugetSources.ps1 returned a non-zero exit code. Couldn't find the NuGet config file: $ConfigFile"
|
||||
ExitWithExitCode 1
|
||||
}
|
||||
|
||||
if (!$Password) {
|
||||
Write-PipelineTelemetryError -Category 'Build' -Message 'Eng/common/SetupNugetSources.ps1 returned a non-zero exit code. Please supply a valid PAT'
|
||||
ExitWithExitCode 1
|
||||
}
|
||||
|
||||
# Load NuGet.config
|
||||
$doc = New-Object System.Xml.XmlDocument
|
||||
$filename = (Get-Item $ConfigFile).FullName
|
||||
$doc.Load($filename)
|
||||
|
||||
# Get reference to <PackageSources> or create one if none exist already
|
||||
$sources = $doc.DocumentElement.SelectSingleNode("packageSources")
|
||||
if ($sources -eq $null) {
|
||||
$sources = $doc.CreateElement("packageSources")
|
||||
$doc.DocumentElement.AppendChild($sources) | Out-Null
|
||||
}
|
||||
|
||||
# Looks for a <PackageSourceCredentials> node. Create it if none is found.
|
||||
$creds = $doc.DocumentElement.SelectSingleNode("packageSourceCredentials")
|
||||
if ($creds -eq $null) {
|
||||
$creds = $doc.CreateElement("packageSourceCredentials")
|
||||
$doc.DocumentElement.AppendChild($creds) | Out-Null
|
||||
}
|
||||
|
||||
# Check for disabledPackageSources; we'll enable any darc-int ones we find there
|
||||
$disabledSources = $doc.DocumentElement.SelectSingleNode("disabledPackageSources")
|
||||
if ($disabledSources -ne $null) {
|
||||
Write-Host "Checking for any darc-int disabled package sources in the disabledPackageSources node"
|
||||
EnablePrivatePackageSources -DisabledPackageSources $disabledSources
|
||||
}
|
||||
|
||||
$userName = "dn-bot"
|
||||
|
||||
# Insert credential nodes for Maestro's private feeds
|
||||
InsertMaestroPrivateFeedCredentials -Sources $sources -Creds $creds -Username $userName -Password $Password
|
||||
|
||||
$dotnet31Source = $sources.SelectSingleNode("add[@key='dotnet3.1']")
|
||||
if ($dotnet31Source -ne $null) {
|
||||
AddPackageSource -Sources $sources -SourceName "dotnet3.1-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal/nuget/v2" -Creds $creds -Username $userName -Password $Password
|
||||
AddPackageSource -Sources $sources -SourceName "dotnet3.1-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-transport/nuget/v2" -Creds $creds -Username $userName -Password $Password
|
||||
}
|
||||
|
||||
$dotnet5Source = $sources.SelectSingleNode("add[@key='dotnet5']")
|
||||
if ($dotnet5Source -ne $null) {
|
||||
AddPackageSource -Sources $sources -SourceName "dotnet5-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet5-internal/nuget/v2" -Creds $creds -Username $userName -Password $Password
|
||||
AddPackageSource -Sources $sources -SourceName "dotnet5-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet5-internal-transport/nuget/v2" -Creds $creds -Username $userName -Password $Password
|
||||
}
|
||||
|
||||
$doc.Save($filename)
|
|
@ -0,0 +1,167 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# This file is a temporary workaround for internal builds to be able to restore from private AzDO feeds.
|
||||
# This file should be removed as part of this issue: https://github.com/dotnet/arcade/issues/4080
|
||||
#
|
||||
# What the script does is iterate over all package sources in the pointed NuGet.config and add a credential entry
|
||||
# under <packageSourceCredentials> for each Maestro's managed private feed. Two additional credential
|
||||
# entries are also added for the two private static internal feeds: dotnet3-internal and dotnet3-internal-transport.
|
||||
#
|
||||
# This script needs to be called in every job that will restore packages and which the base repo has
|
||||
# private AzDO feeds in the NuGet.config.
|
||||
#
|
||||
# See example YAML call for this script below. Note the use of the variable `$(dn-bot-dnceng-artifact-feeds-rw)`
|
||||
# from the AzureDevOps-Artifact-Feeds-Pats variable group.
|
||||
#
|
||||
# Any disabledPackageSources entries which start with "darc-int" will be re-enabled as part of this script executing.
|
||||
#
|
||||
# - task: Bash@3
|
||||
# displayName: Setup Private Feeds Credentials
|
||||
# inputs:
|
||||
# filePath: $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh
|
||||
# arguments: $(Build.SourcesDirectory)/NuGet.config $Token
|
||||
# condition: ne(variables['Agent.OS'], 'Windows_NT')
|
||||
# env:
|
||||
# Token: $(dn-bot-dnceng-artifact-feeds-rw)
|
||||
|
||||
ConfigFile=$1
|
||||
CredToken=$2
|
||||
NL='\n'
|
||||
TB=' '
|
||||
|
||||
source="${BASH_SOURCE[0]}"
|
||||
|
||||
# resolve $source until the file is no longer a symlink
|
||||
while [[ -h "$source" ]]; do
|
||||
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
|
||||
source="$(readlink "$source")"
|
||||
# if $source was a relative symlink, we need to resolve it relative to the path where the
|
||||
# symlink file was located
|
||||
[[ $source != /* ]] && source="$scriptroot/$source"
|
||||
done
|
||||
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
|
||||
|
||||
. "$scriptroot/tools.sh"
|
||||
|
||||
if [ ! -f "$ConfigFile" ]; then
|
||||
Write-PipelineTelemetryError -Category 'Build' "Error: Eng/common/SetupNugetSources.sh returned a non-zero exit code. Couldn't find the NuGet config file: $ConfigFile"
|
||||
ExitWithExitCode 1
|
||||
fi
|
||||
|
||||
if [ -z "$CredToken" ]; then
|
||||
Write-PipelineTelemetryError -category 'Build' "Error: Eng/common/SetupNugetSources.sh returned a non-zero exit code. Please supply a valid PAT"
|
||||
ExitWithExitCode 1
|
||||
fi
|
||||
|
||||
if [[ `uname -s` == "Darwin" ]]; then
|
||||
NL=$'\\\n'
|
||||
TB=''
|
||||
fi
|
||||
|
||||
# Ensure there is a <packageSources>...</packageSources> section.
|
||||
grep -i "<packageSources>" $ConfigFile
|
||||
if [ "$?" != "0" ]; then
|
||||
echo "Adding <packageSources>...</packageSources> section."
|
||||
ConfigNodeHeader="<configuration>"
|
||||
PackageSourcesTemplate="${TB}<packageSources>${NL}${TB}</packageSources>"
|
||||
|
||||
sed -i.bak "s|$ConfigNodeHeader|$ConfigNodeHeader${NL}$PackageSourcesTemplate|" $ConfigFile
|
||||
fi
|
||||
|
||||
# Ensure there is a <packageSourceCredentials>...</packageSourceCredentials> section.
|
||||
grep -i "<packageSourceCredentials>" $ConfigFile
|
||||
if [ "$?" != "0" ]; then
|
||||
echo "Adding <packageSourceCredentials>...</packageSourceCredentials> section."
|
||||
|
||||
PackageSourcesNodeFooter="</packageSources>"
|
||||
PackageSourceCredentialsTemplate="${TB}<packageSourceCredentials>${NL}${TB}</packageSourceCredentials>"
|
||||
|
||||
sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourcesNodeFooter${NL}$PackageSourceCredentialsTemplate|" $ConfigFile
|
||||
fi
|
||||
|
||||
PackageSources=()
|
||||
|
||||
# Ensure dotnet3.1-internal and dotnet3.1-internal-transport are in the packageSources if the public dotnet3.1 feeds are present
|
||||
grep -i "<add key=\"dotnet3.1\"" $ConfigFile
|
||||
if [ "$?" == "0" ]; then
|
||||
grep -i "<add key=\"dotnet3.1-internal\"" $ConfigFile
|
||||
if [ "$?" != "0" ]; then
|
||||
echo "Adding dotnet3.1-internal to the packageSources."
|
||||
PackageSourcesNodeFooter="</packageSources>"
|
||||
PackageSourceTemplate="${TB}<add key=\"dotnet3.1-internal\" value=\"https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal/nuget/v2\" />"
|
||||
|
||||
sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile
|
||||
fi
|
||||
PackageSources+=('dotnet3.1-internal')
|
||||
|
||||
grep -i "<add key=\"dotnet3.1-internal-transport\">" $ConfigFile
|
||||
if [ "$?" != "0" ]; then
|
||||
echo "Adding dotnet3.1-internal-transport to the packageSources."
|
||||
PackageSourcesNodeFooter="</packageSources>"
|
||||
PackageSourceTemplate="${TB}<add key=\"dotnet3.1-internal-transport\" value=\"https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-transport/nuget/v2\" />"
|
||||
|
||||
sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile
|
||||
fi
|
||||
PackageSources+=('dotnet3.1-internal-transport')
|
||||
fi
|
||||
|
||||
# Ensure dotnet5-internal and dotnet5-internal-transport are in the packageSources if the public dotnet5 feeds are present
|
||||
grep -i "<add key=\"dotnet5\"" $ConfigFile
|
||||
if [ "$?" == "0" ]; then
|
||||
grep -i "<add key=\"dotnet5-internal\"" $ConfigFile
|
||||
if [ "$?" != "0" ]; then
|
||||
echo "Adding dotnet5-internal to the packageSources."
|
||||
PackageSourcesNodeFooter="</packageSources>"
|
||||
PackageSourceTemplate="${TB}<add key=\"dotnet5-internal\" value=\"https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet5-internal/nuget/v2\" />"
|
||||
|
||||
sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile
|
||||
fi
|
||||
PackageSources+=('dotnet5-internal')
|
||||
|
||||
grep -i "<add key=\"dotnet5-internal-transport\">" $ConfigFile
|
||||
if [ "$?" != "0" ]; then
|
||||
echo "Adding dotnet5-internal-transport to the packageSources."
|
||||
PackageSourcesNodeFooter="</packageSources>"
|
||||
PackageSourceTemplate="${TB}<add key=\"dotnet5-internal-transport\" value=\"https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet5-internal-transport/nuget/v2\" />"
|
||||
|
||||
sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile
|
||||
fi
|
||||
PackageSources+=('dotnet5-internal-transport')
|
||||
fi
|
||||
|
||||
# I want things split line by line
|
||||
PrevIFS=$IFS
|
||||
IFS=$'\n'
|
||||
PackageSources+="$IFS"
|
||||
PackageSources+=$(grep -oh '"darc-int-[^"]*"' $ConfigFile | tr -d '"')
|
||||
IFS=$PrevIFS
|
||||
|
||||
for FeedName in ${PackageSources[@]} ; do
|
||||
# Check if there is no existing credential for this FeedName
|
||||
grep -i "<$FeedName>" $ConfigFile
|
||||
if [ "$?" != "0" ]; then
|
||||
echo "Adding credentials for $FeedName."
|
||||
|
||||
PackageSourceCredentialsNodeFooter="</packageSourceCredentials>"
|
||||
NewCredential="${TB}${TB}<$FeedName>${NL}<add key=\"Username\" value=\"dn-bot\" />${NL}<add key=\"ClearTextPassword\" value=\"$CredToken\" />${NL}</$FeedName>"
|
||||
|
||||
sed -i.bak "s|$PackageSourceCredentialsNodeFooter|$NewCredential${NL}$PackageSourceCredentialsNodeFooter|" $ConfigFile
|
||||
fi
|
||||
done
|
||||
|
||||
# Re-enable any entries in disabledPackageSources where the feed name contains darc-int
|
||||
grep -i "<disabledPackageSources>" $ConfigFile
|
||||
if [ "$?" == "0" ]; then
|
||||
DisabledDarcIntSources=()
|
||||
echo "Re-enabling any disabled \"darc-int\" package sources in $ConfigFile"
|
||||
DisabledDarcIntSources+=$(grep -oh '"darc-int-[^"]*" value="true"' $ConfigFile | tr -d '"')
|
||||
for DisabledSourceName in ${DisabledDarcIntSources[@]} ; do
|
||||
if [[ $DisabledSourceName == darc-int* ]]
|
||||
then
|
||||
OldDisableValue="add key=\"$DisabledSourceName\" value=\"true\""
|
||||
NewDisableValue="add key=\"$DisabledSourceName\" value=\"false\""
|
||||
sed -i.bak "s|$OldDisableValue|$NewDisableValue|" $ConfigFile
|
||||
echo "Neutralized disablePackageSources entry for '$DisabledSourceName'"
|
||||
fi
|
||||
done
|
||||
fi
|
|
@ -0,0 +1,161 @@
|
|||
[CmdletBinding(PositionalBinding=$false)]
|
||||
Param(
|
||||
[string][Alias('c')]$configuration = "Debug",
|
||||
[string]$platform = $null,
|
||||
[string] $projects,
|
||||
[string][Alias('v')]$verbosity = "minimal",
|
||||
[string] $msbuildEngine = $null,
|
||||
[bool] $warnAsError = $true,
|
||||
[bool] $nodeReuse = $true,
|
||||
[bool] $useDefaultDotnetInstall = $false,
|
||||
[switch][Alias('r')]$restore,
|
||||
[switch] $deployDeps,
|
||||
[switch][Alias('b')]$build,
|
||||
[switch] $rebuild,
|
||||
[switch] $deploy,
|
||||
[switch][Alias('t')]$test,
|
||||
[switch] $integrationTest,
|
||||
[switch] $performanceTest,
|
||||
[switch] $sign,
|
||||
[switch] $pack,
|
||||
[switch] $publish,
|
||||
[switch] $clean,
|
||||
[switch][Alias('bl')]$binaryLog,
|
||||
[switch][Alias('nobl')]$excludeCIBinarylog,
|
||||
[switch] $ci,
|
||||
[switch] $prepareMachine,
|
||||
[string] $runtimeSourceFeed = '',
|
||||
[string] $runtimeSourceFeedKey = '',
|
||||
[switch] $help,
|
||||
[Parameter(ValueFromRemainingArguments=$true)][String[]]$properties
|
||||
)
|
||||
|
||||
# Unset 'Platform' environment variable to avoid unwanted collision in InstallDotNetCore.targets file
|
||||
# some computer has this env var defined (e.g. Some HP)
|
||||
if($env:Platform) {
|
||||
$env:Platform=""
|
||||
}
|
||||
function Print-Usage() {
|
||||
Write-Host "Common settings:"
|
||||
Write-Host " -configuration <value> Build configuration: 'Debug' or 'Release' (short: -c)"
|
||||
Write-Host " -platform <value> Platform configuration: 'x86', 'x64' or any valid Platform value to pass to msbuild"
|
||||
Write-Host " -verbosity <value> Msbuild verbosity: q[uiet], m[inimal], n[ormal], d[etailed], and diag[nostic] (short: -v)"
|
||||
Write-Host " -binaryLog Output binary log (short: -bl)"
|
||||
Write-Host " -help Print help and exit"
|
||||
Write-Host ""
|
||||
|
||||
Write-Host "Actions:"
|
||||
Write-Host " -restore Restore dependencies (short: -r)"
|
||||
Write-Host " -build Build solution (short: -b)"
|
||||
Write-Host " -rebuild Rebuild solution"
|
||||
Write-Host " -deploy Deploy built VSIXes"
|
||||
Write-Host " -deployDeps Deploy dependencies (e.g. VSIXes for integration tests)"
|
||||
Write-Host " -test Run all unit tests in the solution (short: -t)"
|
||||
Write-Host " -integrationTest Run all integration tests in the solution"
|
||||
Write-Host " -performanceTest Run all performance tests in the solution"
|
||||
Write-Host " -pack Package build outputs into NuGet packages and Willow components"
|
||||
Write-Host " -sign Sign build outputs"
|
||||
Write-Host " -publish Publish artifacts (e.g. symbols)"
|
||||
Write-Host " -clean Clean the solution"
|
||||
Write-Host ""
|
||||
|
||||
Write-Host "Advanced settings:"
|
||||
Write-Host " -projects <value> Semi-colon delimited list of sln/proj's to build. Globbing is supported (*.sln)"
|
||||
Write-Host " -ci Set when running on CI server"
|
||||
Write-Host " -excludeCIBinarylog Don't output binary log (short: -nobl)"
|
||||
Write-Host " -prepareMachine Prepare machine for CI run, clean up processes after build"
|
||||
Write-Host " -warnAsError <value> Sets warnaserror msbuild parameter ('true' or 'false')"
|
||||
Write-Host " -msbuildEngine <value> Msbuild engine to use to run build ('dotnet', 'vs', or unspecified)."
|
||||
Write-Host " -useDefaultDotnetInstall <value> Use dotnet-install.* scripts from public location as opposed to from eng common folder"
|
||||
Write-Host ""
|
||||
|
||||
Write-Host "Command line arguments not listed above are passed thru to msbuild."
|
||||
Write-Host "The above arguments can be shortened as much as to be unambiguous (e.g. -co for configuration, -t for test, etc.)."
|
||||
}
|
||||
|
||||
. $PSScriptRoot\tools.ps1
|
||||
|
||||
function InitializeCustomToolset {
|
||||
if (-not $restore) {
|
||||
return
|
||||
}
|
||||
|
||||
$script = Join-Path $EngRoot 'restore-toolset.ps1'
|
||||
|
||||
if (Test-Path $script) {
|
||||
. $script
|
||||
}
|
||||
}
|
||||
|
||||
function Build {
|
||||
$toolsetBuildProj = InitializeToolset
|
||||
InitializeCustomToolset
|
||||
|
||||
$bl = if ($binaryLog) { '/bl:' + (Join-Path $LogDir 'Build.binlog') } else { '' }
|
||||
$platformArg = if ($platform) { "/p:Platform=$platform" } else { '' }
|
||||
|
||||
if ($projects) {
|
||||
# Re-assign properties to a new variable because PowerShell doesn't let us append properties directly for unclear reasons.
|
||||
# Explicitly set the type as string[] because otherwise PowerShell would make this char[] if $properties is empty.
|
||||
[string[]] $msbuildArgs = $properties
|
||||
|
||||
# Resolve relative project paths into full paths
|
||||
$projects = ($projects.Split(';').ForEach({Resolve-Path $_}) -join ';')
|
||||
|
||||
$msbuildArgs += "/p:Projects=$projects"
|
||||
$properties = $msbuildArgs
|
||||
}
|
||||
|
||||
MSBuild $toolsetBuildProj `
|
||||
$bl `
|
||||
$platformArg `
|
||||
/p:Configuration=$configuration `
|
||||
/p:RepoRoot=$RepoRoot `
|
||||
/p:Restore=$restore `
|
||||
/p:DeployDeps=$deployDeps `
|
||||
/p:Build=$build `
|
||||
/p:Rebuild=$rebuild `
|
||||
/p:Deploy=$deploy `
|
||||
/p:Test=$test `
|
||||
/p:Pack=$pack `
|
||||
/p:IntegrationTest=$integrationTest `
|
||||
/p:PerformanceTest=$performanceTest `
|
||||
/p:Sign=$sign `
|
||||
/p:Publish=$publish `
|
||||
@properties
|
||||
}
|
||||
|
||||
try {
|
||||
if ($clean) {
|
||||
if (Test-Path $ArtifactsDir) {
|
||||
Remove-Item -Recurse -Force $ArtifactsDir
|
||||
Write-Host 'Artifacts directory deleted.'
|
||||
}
|
||||
exit 0
|
||||
}
|
||||
|
||||
if ($help -or (($null -ne $properties) -and ($properties.Contains('/help') -or $properties.Contains('/?')))) {
|
||||
Print-Usage
|
||||
exit 0
|
||||
}
|
||||
|
||||
if ($ci) {
|
||||
if (-not $excludeCIBinarylog) {
|
||||
$binaryLog = $true
|
||||
}
|
||||
$nodeReuse = $false
|
||||
}
|
||||
|
||||
if ($restore) {
|
||||
InitializeNativeTools
|
||||
}
|
||||
|
||||
Build
|
||||
}
|
||||
catch {
|
||||
Write-Host $_.ScriptStackTrace
|
||||
Write-PipelineTelemetryError -Category 'InitializeToolset' -Message $_
|
||||
ExitWithExitCode 1
|
||||
}
|
||||
|
||||
ExitWithExitCode 0
|
|
@ -0,0 +1,239 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# Stop script if unbound variable found (use ${var:-} if intentional)
|
||||
set -u
|
||||
|
||||
# Stop script if command returns non-zero exit code.
|
||||
# Prevents hidden errors caused by missing error code propagation.
|
||||
set -e
|
||||
|
||||
usage()
|
||||
{
|
||||
echo "Common settings:"
|
||||
echo " --configuration <value> Build configuration: 'Debug' or 'Release' (short: -c)"
|
||||
echo " --verbosity <value> Msbuild verbosity: q[uiet], m[inimal], n[ormal], d[etailed], and diag[nostic] (short: -v)"
|
||||
echo " --binaryLog Create MSBuild binary log (short: -bl)"
|
||||
echo " --help Print help and exit (short: -h)"
|
||||
echo ""
|
||||
|
||||
echo "Actions:"
|
||||
echo " --restore Restore dependencies (short: -r)"
|
||||
echo " --build Build solution (short: -b)"
|
||||
echo " --rebuild Rebuild solution"
|
||||
echo " --test Run all unit tests in the solution (short: -t)"
|
||||
echo " --integrationTest Run all integration tests in the solution"
|
||||
echo " --performanceTest Run all performance tests in the solution"
|
||||
echo " --pack Package build outputs into NuGet packages and Willow components"
|
||||
echo " --sign Sign build outputs"
|
||||
echo " --publish Publish artifacts (e.g. symbols)"
|
||||
echo " --clean Clean the solution"
|
||||
echo ""
|
||||
|
||||
echo "Advanced settings:"
|
||||
echo " --projects <value> Project or solution file(s) to build"
|
||||
echo " --ci Set when running on CI server"
|
||||
echo " --excludeCIBinarylog Don't output binary log (short: -nobl)"
|
||||
echo " --prepareMachine Prepare machine for CI run, clean up processes after build"
|
||||
echo " --nodeReuse <value> Sets nodereuse msbuild parameter ('true' or 'false')"
|
||||
echo " --warnAsError <value> Sets warnaserror msbuild parameter ('true' or 'false')"
|
||||
echo " --useDefaultDotnetInstall <value> Use dotnet-install.* scripts from public location as opposed to from eng common folder"
|
||||
|
||||
echo ""
|
||||
echo "Command line arguments not listed above are passed thru to msbuild."
|
||||
echo "Arguments can also be passed in with a single hyphen."
|
||||
}
|
||||
|
||||
source="${BASH_SOURCE[0]}"
|
||||
|
||||
# resolve $source until the file is no longer a symlink
|
||||
while [[ -h "$source" ]]; do
|
||||
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
|
||||
source="$(readlink "$source")"
|
||||
# if $source was a relative symlink, we need to resolve it relative to the path where the
|
||||
# symlink file was located
|
||||
[[ $source != /* ]] && source="$scriptroot/$source"
|
||||
done
|
||||
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
|
||||
|
||||
restore=false
|
||||
build=false
|
||||
rebuild=false
|
||||
test=false
|
||||
integration_test=false
|
||||
performance_test=false
|
||||
pack=false
|
||||
publish=false
|
||||
sign=false
|
||||
public=false
|
||||
ci=false
|
||||
clean=false
|
||||
|
||||
warn_as_error=true
|
||||
node_reuse=true
|
||||
binary_log=false
|
||||
exclude_ci_binary_log=false
|
||||
pipelines_log=false
|
||||
|
||||
projects=''
|
||||
configuration='Debug'
|
||||
prepare_machine=false
|
||||
verbosity='minimal'
|
||||
runtime_source_feed=''
|
||||
runtime_source_feed_key=''
|
||||
use_default_dotnet_install=false
|
||||
|
||||
properties=''
|
||||
while [[ $# > 0 ]]; do
|
||||
opt="$(echo "${1/#--/-}" | awk '{print tolower($0)}')"
|
||||
case "$opt" in
|
||||
-help|-h)
|
||||
usage
|
||||
exit 0
|
||||
;;
|
||||
-clean)
|
||||
clean=true
|
||||
;;
|
||||
-configuration|-c)
|
||||
configuration=$2
|
||||
shift
|
||||
;;
|
||||
-verbosity|-v)
|
||||
verbosity=$2
|
||||
shift
|
||||
;;
|
||||
-binarylog|-bl)
|
||||
binary_log=true
|
||||
;;
|
||||
-excludeCIBinarylog|-nobl)
|
||||
exclude_ci_binary_log=true
|
||||
;;
|
||||
-pipelineslog|-pl)
|
||||
pipelines_log=true
|
||||
;;
|
||||
-restore|-r)
|
||||
restore=true
|
||||
;;
|
||||
-build|-b)
|
||||
build=true
|
||||
;;
|
||||
-rebuild)
|
||||
rebuild=true
|
||||
;;
|
||||
-pack)
|
||||
pack=true
|
||||
;;
|
||||
-test|-t)
|
||||
test=true
|
||||
;;
|
||||
-integrationtest)
|
||||
integration_test=true
|
||||
;;
|
||||
-performancetest)
|
||||
performance_test=true
|
||||
;;
|
||||
-sign)
|
||||
sign=true
|
||||
;;
|
||||
-publish)
|
||||
publish=true
|
||||
;;
|
||||
-preparemachine)
|
||||
prepare_machine=true
|
||||
;;
|
||||
-projects)
|
||||
projects=$2
|
||||
shift
|
||||
;;
|
||||
-ci)
|
||||
ci=true
|
||||
;;
|
||||
-warnaserror)
|
||||
warn_as_error=$2
|
||||
shift
|
||||
;;
|
||||
-nodereuse)
|
||||
node_reuse=$2
|
||||
shift
|
||||
;;
|
||||
-runtimesourcefeed)
|
||||
runtime_source_feed=$2
|
||||
shift
|
||||
;;
|
||||
-runtimesourcefeedkey)
|
||||
runtime_source_feed_key=$2
|
||||
shift
|
||||
;;
|
||||
-usedefaultdotnetinstall)
|
||||
use_default_dotnet_install=$2
|
||||
shift
|
||||
;;
|
||||
*)
|
||||
properties="$properties $1"
|
||||
;;
|
||||
esac
|
||||
|
||||
shift
|
||||
done
|
||||
|
||||
if [[ "$ci" == true ]]; then
|
||||
pipelines_log=true
|
||||
node_reuse=false
|
||||
if [[ "$exclude_ci_binary_log" == false ]]; then
|
||||
binary_log=true
|
||||
fi
|
||||
fi
|
||||
|
||||
. "$scriptroot/tools.sh"
|
||||
|
||||
function InitializeCustomToolset {
|
||||
local script="$eng_root/restore-toolset.sh"
|
||||
|
||||
if [[ -a "$script" ]]; then
|
||||
. "$script"
|
||||
fi
|
||||
}
|
||||
|
||||
function Build {
|
||||
InitializeToolset
|
||||
InitializeCustomToolset
|
||||
|
||||
if [[ ! -z "$projects" ]]; then
|
||||
properties="$properties /p:Projects=$projects"
|
||||
fi
|
||||
|
||||
local bl=""
|
||||
if [[ "$binary_log" == true ]]; then
|
||||
bl="/bl:\"$log_dir/Build.binlog\""
|
||||
fi
|
||||
|
||||
MSBuild $_InitializeToolset \
|
||||
$bl \
|
||||
/p:Configuration=$configuration \
|
||||
/p:RepoRoot="$repo_root" \
|
||||
/p:Restore=$restore \
|
||||
/p:Build=$build \
|
||||
/p:Rebuild=$rebuild \
|
||||
/p:Test=$test \
|
||||
/p:Pack=$pack \
|
||||
/p:IntegrationTest=$integration_test \
|
||||
/p:PerformanceTest=$performance_test \
|
||||
/p:Sign=$sign \
|
||||
/p:Publish=$publish \
|
||||
$properties
|
||||
|
||||
ExitWithExitCode 0
|
||||
}
|
||||
|
||||
if [[ "$clean" == true ]]; then
|
||||
if [ -d "$artifacts_dir" ]; then
|
||||
rm -rf $artifacts_dir
|
||||
echo "Artifacts directory deleted."
|
||||
fi
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [[ "$restore" == true ]]; then
|
||||
InitializeNativeTools
|
||||
fi
|
||||
|
||||
Build
|
|
@ -0,0 +1,16 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source="${BASH_SOURCE[0]}"
|
||||
|
||||
# resolve $SOURCE until the file is no longer a symlink
|
||||
while [[ -h $source ]]; do
|
||||
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
|
||||
source="$(readlink "$source")"
|
||||
|
||||
# if $source was a relative symlink, we need to resolve it relative to the path where
|
||||
# the symlink file was located
|
||||
[[ $source != /* ]] && source="$scriptroot/$source"
|
||||
done
|
||||
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
|
||||
|
||||
. "$scriptroot/build.sh" --restore --build --test --pack --publish --ci $@
|
|
@ -0,0 +1,11 @@
|
|||
deb http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse
|
|
@ -0,0 +1,3 @@
|
|||
# Debian (sid) # UNSTABLE
|
||||
deb http://ftp.debian.org/debian/ sid main contrib non-free
|
||||
deb-src http://ftp.debian.org/debian/ sid main contrib non-free
|
|
@ -0,0 +1,11 @@
|
|||
deb http://ports.ubuntu.com/ubuntu-ports/ trusty main restricted universe
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty main restricted universe
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ trusty-updates main restricted universe
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-updates main restricted universe
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ trusty-backports main restricted
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-backports main restricted
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ trusty-security main restricted universe multiverse
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-security main restricted universe multiverse
|
|
@ -0,0 +1,11 @@
|
|||
deb http://ports.ubuntu.com/ubuntu-ports/ xenial main restricted universe
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial main restricted universe
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ xenial-updates main restricted universe
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-updates main restricted universe
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ xenial-backports main restricted
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-backports main restricted
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse
|
|
@ -0,0 +1,11 @@
|
|||
deb http://ports.ubuntu.com/ubuntu-ports/ zesty main restricted universe
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty main restricted universe
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ zesty-updates main restricted universe
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-updates main restricted universe
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ zesty-backports main restricted
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-backports main restricted
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ zesty-security main restricted universe multiverse
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-security main restricted universe multiverse
|
|
@ -0,0 +1,71 @@
|
|||
From e72c9d7ead60e3317bd6d1fade995c07021c947b Mon Sep 17 00:00:00 2001
|
||||
From: Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
|
||||
Date: Thu, 7 May 2015 13:25:04 -0400
|
||||
Subject: [PATCH] Fix: building probe providers with C++ compiler
|
||||
|
||||
Robert Daniels wrote:
|
||||
> > I'm attempting to use lttng userspace tracing with a C++ application
|
||||
> > on an ARM platform. I'm using GCC 4.8.4 on Linux 3.14 with the 2.6
|
||||
> > release of lttng. I've compiled lttng-modules, lttng-ust, and
|
||||
> > lttng-tools and have been able to get a simple test working with C
|
||||
> > code. When I attempt to run the hello.cxx test on my target it will
|
||||
> > segfault.
|
||||
>
|
||||
>
|
||||
> I spent a little time digging into this issue and finally discovered the
|
||||
> cause of my segfault with ARM C++ tracepoints.
|
||||
>
|
||||
> There is a struct called 'lttng_event' in ust-events.h which contains an
|
||||
> empty union 'u'. This was the cause of my issue. Under C, this empty union
|
||||
> compiles to a zero byte member while under C++ it compiles to a one byte
|
||||
> member, and in my case was four-byte aligned which caused my C++ code to
|
||||
> have the 'cds_list_head node' offset incorrectly by four bytes. This lead
|
||||
> to an incorrect linked list structure which caused my issue.
|
||||
>
|
||||
> Since this union is empty, I simply removed it from the struct and everything
|
||||
> worked correctly.
|
||||
>
|
||||
> I don't know the history or purpose behind this empty union so I'd like to
|
||||
> know if this is a safe fix. If it is I can submit a patch with the union
|
||||
> removed.
|
||||
|
||||
That's a very nice catch!
|
||||
|
||||
We do not support building tracepoint probe provider with
|
||||
g++ yet, as stated in lttng-ust(3):
|
||||
|
||||
"- Note for C++ support: although an application instrumented with
|
||||
tracepoints can be compiled with g++, tracepoint probes should be
|
||||
compiled with gcc (only tested with gcc so far)."
|
||||
|
||||
However, if it works fine with this fix, then I'm tempted to take it,
|
||||
especially because removing the empty union does not appear to affect
|
||||
the layout of struct lttng_event as seen from liblttng-ust, which must
|
||||
be compiled with a C compiler, and from probe providers compiled with
|
||||
a C compiler. So all we are changing is the layout of a probe provider
|
||||
compiled with a C++ compiler, which is anyway buggy at the moment,
|
||||
because it is not compatible with the layout expected by liblttng-ust
|
||||
compiled with a C compiler.
|
||||
|
||||
Reported-by: Robert Daniels <robert.daniels@vantagecontrols.com>
|
||||
Signed-off-by: Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
|
||||
---
|
||||
include/lttng/ust-events.h | 2 --
|
||||
1 file changed, 2 deletions(-)
|
||||
|
||||
diff --git a/usr/include/lttng/ust-events.h b/usr/include/lttng/ust-events.h
|
||||
index 328a875..3d7a274 100644
|
||||
--- a/usr/include/lttng/ust-events.h
|
||||
+++ b/usr/include/lttng/ust-events.h
|
||||
@@ -407,8 +407,6 @@ struct lttng_event {
|
||||
void *_deprecated1;
|
||||
struct lttng_ctx *ctx;
|
||||
enum lttng_ust_instrumentation instrumentation;
|
||||
- union {
|
||||
- } u;
|
||||
struct cds_list_head node; /* Event list in session */
|
||||
struct cds_list_head _deprecated2;
|
||||
void *_deprecated3;
|
||||
--
|
||||
2.7.4
|
||||
|
|
@ -0,0 +1,97 @@
|
|||
diff -u -r a/usr/include/urcu/uatomic/generic.h b/usr/include/urcu/uatomic/generic.h
|
||||
--- a/usr/include/urcu/uatomic/generic.h 2014-03-28 06:04:42.000000000 +0900
|
||||
+++ b/usr/include/urcu/uatomic/generic.h 2017-02-13 10:35:21.189927116 +0900
|
||||
@@ -65,17 +65,17 @@
|
||||
switch (len) {
|
||||
#ifdef UATOMIC_HAS_ATOMIC_BYTE
|
||||
case 1:
|
||||
- return __sync_val_compare_and_swap_1(addr, old, _new);
|
||||
+ return __sync_val_compare_and_swap_1((uint8_t *) addr, old, _new);
|
||||
#endif
|
||||
#ifdef UATOMIC_HAS_ATOMIC_SHORT
|
||||
case 2:
|
||||
- return __sync_val_compare_and_swap_2(addr, old, _new);
|
||||
+ return __sync_val_compare_and_swap_2((uint16_t *) addr, old, _new);
|
||||
#endif
|
||||
case 4:
|
||||
- return __sync_val_compare_and_swap_4(addr, old, _new);
|
||||
+ return __sync_val_compare_and_swap_4((uint32_t *) addr, old, _new);
|
||||
#if (CAA_BITS_PER_LONG == 64)
|
||||
case 8:
|
||||
- return __sync_val_compare_and_swap_8(addr, old, _new);
|
||||
+ return __sync_val_compare_and_swap_8((uint64_t *) addr, old, _new);
|
||||
#endif
|
||||
}
|
||||
_uatomic_link_error();
|
||||
@@ -100,20 +100,20 @@
|
||||
switch (len) {
|
||||
#ifdef UATOMIC_HAS_ATOMIC_BYTE
|
||||
case 1:
|
||||
- __sync_and_and_fetch_1(addr, val);
|
||||
+ __sync_and_and_fetch_1((uint8_t *) addr, val);
|
||||
return;
|
||||
#endif
|
||||
#ifdef UATOMIC_HAS_ATOMIC_SHORT
|
||||
case 2:
|
||||
- __sync_and_and_fetch_2(addr, val);
|
||||
+ __sync_and_and_fetch_2((uint16_t *) addr, val);
|
||||
return;
|
||||
#endif
|
||||
case 4:
|
||||
- __sync_and_and_fetch_4(addr, val);
|
||||
+ __sync_and_and_fetch_4((uint32_t *) addr, val);
|
||||
return;
|
||||
#if (CAA_BITS_PER_LONG == 64)
|
||||
case 8:
|
||||
- __sync_and_and_fetch_8(addr, val);
|
||||
+ __sync_and_and_fetch_8((uint64_t *) addr, val);
|
||||
return;
|
||||
#endif
|
||||
}
|
||||
@@ -139,20 +139,20 @@
|
||||
switch (len) {
|
||||
#ifdef UATOMIC_HAS_ATOMIC_BYTE
|
||||
case 1:
|
||||
- __sync_or_and_fetch_1(addr, val);
|
||||
+ __sync_or_and_fetch_1((uint8_t *) addr, val);
|
||||
return;
|
||||
#endif
|
||||
#ifdef UATOMIC_HAS_ATOMIC_SHORT
|
||||
case 2:
|
||||
- __sync_or_and_fetch_2(addr, val);
|
||||
+ __sync_or_and_fetch_2((uint16_t *) addr, val);
|
||||
return;
|
||||
#endif
|
||||
case 4:
|
||||
- __sync_or_and_fetch_4(addr, val);
|
||||
+ __sync_or_and_fetch_4((uint32_t *) addr, val);
|
||||
return;
|
||||
#if (CAA_BITS_PER_LONG == 64)
|
||||
case 8:
|
||||
- __sync_or_and_fetch_8(addr, val);
|
||||
+ __sync_or_and_fetch_8((uint64_t *) addr, val);
|
||||
return;
|
||||
#endif
|
||||
}
|
||||
@@ -180,17 +180,17 @@
|
||||
switch (len) {
|
||||
#ifdef UATOMIC_HAS_ATOMIC_BYTE
|
||||
case 1:
|
||||
- return __sync_add_and_fetch_1(addr, val);
|
||||
+ return __sync_add_and_fetch_1((uint8_t *) addr, val);
|
||||
#endif
|
||||
#ifdef UATOMIC_HAS_ATOMIC_SHORT
|
||||
case 2:
|
||||
- return __sync_add_and_fetch_2(addr, val);
|
||||
+ return __sync_add_and_fetch_2((uint16_t *) addr, val);
|
||||
#endif
|
||||
case 4:
|
||||
- return __sync_add_and_fetch_4(addr, val);
|
||||
+ return __sync_add_and_fetch_4((uint32_t *) addr, val);
|
||||
#if (CAA_BITS_PER_LONG == 64)
|
||||
case 8:
|
||||
- return __sync_add_and_fetch_8(addr, val);
|
||||
+ return __sync_add_and_fetch_8((uint64_t *) addr, val);
|
||||
#endif
|
||||
}
|
||||
_uatomic_link_error();
|
|
@ -0,0 +1,11 @@
|
|||
deb http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse
|
|
@ -0,0 +1,11 @@
|
|||
deb http://deb.debian.org/debian buster main
|
||||
deb-src http://deb.debian.org/debian buster main
|
||||
|
||||
deb http://deb.debian.org/debian-security/ buster/updates main
|
||||
deb-src http://deb.debian.org/debian-security/ buster/updates main
|
||||
|
||||
deb http://deb.debian.org/debian buster-updates main
|
||||
deb-src http://deb.debian.org/debian buster-updates main
|
||||
|
||||
deb http://deb.debian.org/debian buster-backports main contrib non-free
|
||||
deb-src http://deb.debian.org/debian buster-backports main contrib non-free
|
|
@ -0,0 +1,12 @@
|
|||
deb http://deb.debian.org/debian stretch main
|
||||
deb-src http://deb.debian.org/debian stretch main
|
||||
|
||||
deb http://deb.debian.org/debian-security/ stretch/updates main
|
||||
deb-src http://deb.debian.org/debian-security/ stretch/updates main
|
||||
|
||||
deb http://deb.debian.org/debian stretch-updates main
|
||||
deb-src http://deb.debian.org/debian stretch-updates main
|
||||
|
||||
deb http://deb.debian.org/debian stretch-backports main contrib non-free
|
||||
deb-src http://deb.debian.org/debian stretch-backports main contrib non-free
|
||||
|
|
@ -0,0 +1,11 @@
|
|||
deb http://ports.ubuntu.com/ubuntu-ports/ trusty main restricted universe
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty main restricted universe
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ trusty-updates main restricted universe
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-updates main restricted universe
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ trusty-backports main restricted
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-backports main restricted
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ trusty-security main restricted universe multiverse
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ trusty-security main restricted universe multiverse
|
|
@ -0,0 +1,11 @@
|
|||
deb http://ports.ubuntu.com/ubuntu-ports/ xenial main restricted universe
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial main restricted universe
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ xenial-updates main restricted universe
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-updates main restricted universe
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ xenial-backports main restricted
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-backports main restricted
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse
|
|
@ -0,0 +1,11 @@
|
|||
deb http://ports.ubuntu.com/ubuntu-ports/ zesty main restricted universe
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty main restricted universe
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ zesty-updates main restricted universe
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-updates main restricted universe
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ zesty-backports main restricted
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-backports main restricted
|
||||
|
||||
deb http://ports.ubuntu.com/ubuntu-ports/ zesty-security main restricted universe multiverse
|
||||
deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-security main restricted universe multiverse
|
|
@ -0,0 +1,35 @@
|
|||
#!/usr/bin/env bash
|
||||
set -e
|
||||
|
||||
__CrossDir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
|
||||
__TIZEN_CROSSDIR="$__CrossDir/tizen"
|
||||
|
||||
if [[ -z "$ROOTFS_DIR" ]]; then
|
||||
echo "ROOTFS_DIR is not defined."
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
TIZEN_TMP_DIR=$ROOTFS_DIR/tizen_tmp
|
||||
mkdir -p $TIZEN_TMP_DIR
|
||||
|
||||
# Download files
|
||||
echo ">>Start downloading files"
|
||||
VERBOSE=1 $__CrossDir/tizen-fetch.sh $TIZEN_TMP_DIR
|
||||
echo "<<Finish downloading files"
|
||||
|
||||
echo ">>Start constructing Tizen rootfs"
|
||||
TIZEN_RPM_FILES=`ls $TIZEN_TMP_DIR/*.rpm`
|
||||
cd $ROOTFS_DIR
|
||||
for f in $TIZEN_RPM_FILES; do
|
||||
rpm2cpio $f | cpio -idm --quiet
|
||||
done
|
||||
echo "<<Finish constructing Tizen rootfs"
|
||||
|
||||
# Cleanup tmp
|
||||
rm -rf $TIZEN_TMP_DIR
|
||||
|
||||
# Configure Tizen rootfs
|
||||
echo ">>Start configuring Tizen rootfs"
|
||||
ln -sfn asm-arm64 ./usr/include/asm
|
||||
patch -p1 < $__TIZEN_CROSSDIR/tizen.patch
|
||||
echo "<<Finish configuring Tizen rootfs"
|
|
@ -0,0 +1,170 @@
|
|||
#!/usr/bin/env bash
|
||||
set -e
|
||||
|
||||
if [[ -z "${VERBOSE// }" ]] || [ "$VERBOSE" -ne "$VERBOSE" ] 2>/dev/null; then
|
||||
VERBOSE=0
|
||||
fi
|
||||
|
||||
Log()
|
||||
{
|
||||
if [ $VERBOSE -ge $1 ]; then
|
||||
echo ${@:2}
|
||||
fi
|
||||
}
|
||||
|
||||
Inform()
|
||||
{
|
||||
Log 1 -e "\x1B[0;34m$@\x1B[m"
|
||||
}
|
||||
|
||||
Debug()
|
||||
{
|
||||
Log 2 -e "\x1B[0;32m$@\x1B[m"
|
||||
}
|
||||
|
||||
Error()
|
||||
{
|
||||
>&2 Log 0 -e "\x1B[0;31m$@\x1B[m"
|
||||
}
|
||||
|
||||
Fetch()
|
||||
{
|
||||
URL=$1
|
||||
FILE=$2
|
||||
PROGRESS=$3
|
||||
if [ $VERBOSE -ge 1 ] && [ $PROGRESS ]; then
|
||||
CURL_OPT="--progress-bar"
|
||||
else
|
||||
CURL_OPT="--silent"
|
||||
fi
|
||||
curl $CURL_OPT $URL > $FILE
|
||||
}
|
||||
|
||||
hash curl 2> /dev/null || { Error "Require 'curl' Aborting."; exit 1; }
|
||||
hash xmllint 2> /dev/null || { Error "Require 'xmllint' Aborting."; exit 1; }
|
||||
hash sha256sum 2> /dev/null || { Error "Require 'sha256sum' Aborting."; exit 1; }
|
||||
|
||||
TMPDIR=$1
|
||||
if [ ! -d $TMPDIR ]; then
|
||||
TMPDIR=./tizen_tmp
|
||||
Debug "Create temporary directory : $TMPDIR"
|
||||
mkdir -p $TMPDIR
|
||||
fi
|
||||
|
||||
TIZEN_URL=http://download.tizen.org/snapshots/tizen/
|
||||
BUILD_XML=build.xml
|
||||
REPOMD_XML=repomd.xml
|
||||
PRIMARY_XML=primary.xml
|
||||
TARGET_URL="http://__not_initialized"
|
||||
|
||||
Xpath_get()
|
||||
{
|
||||
XPATH_RESULT=''
|
||||
XPATH=$1
|
||||
XML_FILE=$2
|
||||
RESULT=$(xmllint --xpath $XPATH $XML_FILE)
|
||||
if [[ -z ${RESULT// } ]]; then
|
||||
Error "Can not find target from $XML_FILE"
|
||||
Debug "Xpath = $XPATH"
|
||||
exit 1
|
||||
fi
|
||||
XPATH_RESULT=$RESULT
|
||||
}
|
||||
|
||||
fetch_tizen_pkgs_init()
|
||||
{
|
||||
TARGET=$1
|
||||
PROFILE=$2
|
||||
Debug "Initialize TARGET=$TARGET, PROFILE=$PROFILE"
|
||||
|
||||
TMP_PKG_DIR=$TMPDIR/tizen_${PROFILE}_pkgs
|
||||
if [ -d $TMP_PKG_DIR ]; then rm -rf $TMP_PKG_DIR; fi
|
||||
mkdir -p $TMP_PKG_DIR
|
||||
|
||||
PKG_URL=$TIZEN_URL/$PROFILE/latest
|
||||
|
||||
BUILD_XML_URL=$PKG_URL/$BUILD_XML
|
||||
TMP_BUILD=$TMP_PKG_DIR/$BUILD_XML
|
||||
TMP_REPOMD=$TMP_PKG_DIR/$REPOMD_XML
|
||||
TMP_PRIMARY=$TMP_PKG_DIR/$PRIMARY_XML
|
||||
TMP_PRIMARYGZ=${TMP_PRIMARY}.gz
|
||||
|
||||
Fetch $BUILD_XML_URL $TMP_BUILD
|
||||
|
||||
Debug "fetch $BUILD_XML_URL to $TMP_BUILD"
|
||||
|
||||
TARGET_XPATH="//build/buildtargets/buildtarget[@name=\"$TARGET\"]/repo[@type=\"binary\"]/text()"
|
||||
Xpath_get $TARGET_XPATH $TMP_BUILD
|
||||
TARGET_PATH=$XPATH_RESULT
|
||||
TARGET_URL=$PKG_URL/$TARGET_PATH
|
||||
|
||||
REPOMD_URL=$TARGET_URL/repodata/repomd.xml
|
||||
PRIMARY_XPATH='string(//*[local-name()="data"][@type="primary"]/*[local-name()="location"]/@href)'
|
||||
|
||||
Fetch $REPOMD_URL $TMP_REPOMD
|
||||
|
||||
Debug "fetch $REPOMD_URL to $TMP_REPOMD"
|
||||
|
||||
Xpath_get $PRIMARY_XPATH $TMP_REPOMD
|
||||
PRIMARY_XML_PATH=$XPATH_RESULT
|
||||
PRIMARY_URL=$TARGET_URL/$PRIMARY_XML_PATH
|
||||
|
||||
Fetch $PRIMARY_URL $TMP_PRIMARYGZ
|
||||
|
||||
Debug "fetch $PRIMARY_URL to $TMP_PRIMARYGZ"
|
||||
|
||||
gunzip $TMP_PRIMARYGZ
|
||||
|
||||
Debug "unzip $TMP_PRIMARYGZ to $TMP_PRIMARY"
|
||||
}
|
||||
|
||||
fetch_tizen_pkgs()
|
||||
{
|
||||
ARCH=$1
|
||||
PACKAGE_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="location"]/@href)'
|
||||
|
||||
PACKAGE_CHECKSUM_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="checksum"]/text())'
|
||||
|
||||
for pkg in ${@:2}
|
||||
do
|
||||
Inform "Fetching... $pkg"
|
||||
XPATH=${PACKAGE_XPATH_TPL/_PKG_/$pkg}
|
||||
XPATH=${XPATH/_ARCH_/$ARCH}
|
||||
Xpath_get $XPATH $TMP_PRIMARY
|
||||
PKG_PATH=$XPATH_RESULT
|
||||
|
||||
XPATH=${PACKAGE_CHECKSUM_XPATH_TPL/_PKG_/$pkg}
|
||||
XPATH=${XPATH/_ARCH_/$ARCH}
|
||||
Xpath_get $XPATH $TMP_PRIMARY
|
||||
CHECKSUM=$XPATH_RESULT
|
||||
|
||||
PKG_URL=$TARGET_URL/$PKG_PATH
|
||||
PKG_FILE=$(basename $PKG_PATH)
|
||||
PKG_PATH=$TMPDIR/$PKG_FILE
|
||||
|
||||
Debug "Download $PKG_URL to $PKG_PATH"
|
||||
Fetch $PKG_URL $PKG_PATH true
|
||||
|
||||
echo "$CHECKSUM $PKG_PATH" | sha256sum -c - > /dev/null
|
||||
if [ $? -ne 0 ]; then
|
||||
Error "Fail to fetch $PKG_URL to $PKG_PATH"
|
||||
Debug "Checksum = $CHECKSUM"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
Inform "Initialize arm base"
|
||||
fetch_tizen_pkgs_init standard base
|
||||
Inform "fetch common packages"
|
||||
fetch_tizen_pkgs aarch64 gcc glibc glibc-devel libicu libicu-devel libatomic linux-glibc-devel
|
||||
Inform "fetch coreclr packages"
|
||||
fetch_tizen_pkgs aarch64 lldb lldb-devel libgcc libstdc++ libstdc++-devel libunwind libunwind-devel lttng-ust-devel lttng-ust userspace-rcu-devel userspace-rcu
|
||||
Inform "fetch corefx packages"
|
||||
fetch_tizen_pkgs aarch64 libcom_err libcom_err-devel zlib zlib-devel libopenssl11 libopenssl1.1-devel krb5 krb5-devel
|
||||
|
||||
Inform "Initialize standard unified"
|
||||
fetch_tizen_pkgs_init standard unified
|
||||
Inform "fetch corefx packages"
|
||||
fetch_tizen_pkgs aarch64 gssdp gssdp-devel tizen-release
|
||||
|
|
@ -0,0 +1,9 @@
|
|||
diff -u -r a/usr/lib/libc.so b/usr/lib/libc.so
|
||||
--- a/usr/lib64/libc.so 2016-12-30 23:00:08.284951863 +0900
|
||||
+++ b/usr/lib64/libc.so 2016-12-30 23:00:32.140951815 +0900
|
||||
@@ -2,4 +2,4 @@
|
||||
Use the shared library, but some functions are only in
|
||||
the static library, so try that secondarily. */
|
||||
OUTPUT_FORMAT(elf64-littleaarch64)
|
||||
-GROUP ( /lib64/libc.so.6 /usr/lib64/libc_nonshared.a AS_NEEDED ( /lib/ld-linux-aarch64.so.1 ) )
|
||||
+GROUP ( libc.so.6 libc_nonshared.a AS_NEEDED ( ld-linux-aarch64.so.1 ) )
|
|
@ -0,0 +1,3 @@
|
|||
# Debian (jessie) # Stable
|
||||
deb http://ftp.debian.org/debian/ jessie main contrib non-free
|
||||
deb-src http://ftp.debian.org/debian/ jessie main contrib non-free
|
|
@ -0,0 +1,35 @@
|
|||
#!/usr/bin/env bash
|
||||
set -e
|
||||
|
||||
__ARM_SOFTFP_CrossDir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
|
||||
__TIZEN_CROSSDIR="$__ARM_SOFTFP_CrossDir/tizen"
|
||||
|
||||
if [[ -z "$ROOTFS_DIR" ]]; then
|
||||
echo "ROOTFS_DIR is not defined."
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
TIZEN_TMP_DIR=$ROOTFS_DIR/tizen_tmp
|
||||
mkdir -p $TIZEN_TMP_DIR
|
||||
|
||||
# Download files
|
||||
echo ">>Start downloading files"
|
||||
VERBOSE=1 $__ARM_SOFTFP_CrossDir/tizen-fetch.sh $TIZEN_TMP_DIR
|
||||
echo "<<Finish downloading files"
|
||||
|
||||
echo ">>Start constructing Tizen rootfs"
|
||||
TIZEN_RPM_FILES=`ls $TIZEN_TMP_DIR/*.rpm`
|
||||
cd $ROOTFS_DIR
|
||||
for f in $TIZEN_RPM_FILES; do
|
||||
rpm2cpio $f | cpio -idm --quiet
|
||||
done
|
||||
echo "<<Finish constructing Tizen rootfs"
|
||||
|
||||
# Cleanup tmp
|
||||
rm -rf $TIZEN_TMP_DIR
|
||||
|
||||
# Configure Tizen rootfs
|
||||
echo ">>Start configuring Tizen rootfs"
|
||||
ln -sfn asm-arm ./usr/include/asm
|
||||
patch -p1 < $__TIZEN_CROSSDIR/tizen.patch
|
||||
echo "<<Finish configuring Tizen rootfs"
|
|
@ -0,0 +1,170 @@
|
|||
#!/usr/bin/env bash
|
||||
set -e
|
||||
|
||||
if [[ -z "${VERBOSE// }" ]] || [ "$VERBOSE" -ne "$VERBOSE" ] 2>/dev/null; then
|
||||
VERBOSE=0
|
||||
fi
|
||||
|
||||
Log()
|
||||
{
|
||||
if [ $VERBOSE -ge $1 ]; then
|
||||
echo ${@:2}
|
||||
fi
|
||||
}
|
||||
|
||||
Inform()
|
||||
{
|
||||
Log 1 -e "\x1B[0;34m$@\x1B[m"
|
||||
}
|
||||
|
||||
Debug()
|
||||
{
|
||||
Log 2 -e "\x1B[0;32m$@\x1B[m"
|
||||
}
|
||||
|
||||
Error()
|
||||
{
|
||||
>&2 Log 0 -e "\x1B[0;31m$@\x1B[m"
|
||||
}
|
||||
|
||||
Fetch()
|
||||
{
|
||||
URL=$1
|
||||
FILE=$2
|
||||
PROGRESS=$3
|
||||
if [ $VERBOSE -ge 1 ] && [ $PROGRESS ]; then
|
||||
CURL_OPT="--progress-bar"
|
||||
else
|
||||
CURL_OPT="--silent"
|
||||
fi
|
||||
curl $CURL_OPT $URL > $FILE
|
||||
}
|
||||
|
||||
hash curl 2> /dev/null || { Error "Require 'curl' Aborting."; exit 1; }
|
||||
hash xmllint 2> /dev/null || { Error "Require 'xmllint' Aborting."; exit 1; }
|
||||
hash sha256sum 2> /dev/null || { Error "Require 'sha256sum' Aborting."; exit 1; }
|
||||
|
||||
TMPDIR=$1
|
||||
if [ ! -d $TMPDIR ]; then
|
||||
TMPDIR=./tizen_tmp
|
||||
Debug "Create temporary directory : $TMPDIR"
|
||||
mkdir -p $TMPDIR
|
||||
fi
|
||||
|
||||
TIZEN_URL=http://download.tizen.org/snapshots/tizen
|
||||
BUILD_XML=build.xml
|
||||
REPOMD_XML=repomd.xml
|
||||
PRIMARY_XML=primary.xml
|
||||
TARGET_URL="http://__not_initialized"
|
||||
|
||||
Xpath_get()
|
||||
{
|
||||
XPATH_RESULT=''
|
||||
XPATH=$1
|
||||
XML_FILE=$2
|
||||
RESULT=$(xmllint --xpath $XPATH $XML_FILE)
|
||||
if [[ -z ${RESULT// } ]]; then
|
||||
Error "Can not find target from $XML_FILE"
|
||||
Debug "Xpath = $XPATH"
|
||||
exit 1
|
||||
fi
|
||||
XPATH_RESULT=$RESULT
|
||||
}
|
||||
|
||||
fetch_tizen_pkgs_init()
|
||||
{
|
||||
TARGET=$1
|
||||
PROFILE=$2
|
||||
Debug "Initialize TARGET=$TARGET, PROFILE=$PROFILE"
|
||||
|
||||
TMP_PKG_DIR=$TMPDIR/tizen_${PROFILE}_pkgs
|
||||
if [ -d $TMP_PKG_DIR ]; then rm -rf $TMP_PKG_DIR; fi
|
||||
mkdir -p $TMP_PKG_DIR
|
||||
|
||||
PKG_URL=$TIZEN_URL/$PROFILE/latest
|
||||
|
||||
BUILD_XML_URL=$PKG_URL/$BUILD_XML
|
||||
TMP_BUILD=$TMP_PKG_DIR/$BUILD_XML
|
||||
TMP_REPOMD=$TMP_PKG_DIR/$REPOMD_XML
|
||||
TMP_PRIMARY=$TMP_PKG_DIR/$PRIMARY_XML
|
||||
TMP_PRIMARYGZ=${TMP_PRIMARY}.gz
|
||||
|
||||
Fetch $BUILD_XML_URL $TMP_BUILD
|
||||
|
||||
Debug "fetch $BUILD_XML_URL to $TMP_BUILD"
|
||||
|
||||
TARGET_XPATH="//build/buildtargets/buildtarget[@name=\"$TARGET\"]/repo[@type=\"binary\"]/text()"
|
||||
Xpath_get $TARGET_XPATH $TMP_BUILD
|
||||
TARGET_PATH=$XPATH_RESULT
|
||||
TARGET_URL=$PKG_URL/$TARGET_PATH
|
||||
|
||||
REPOMD_URL=$TARGET_URL/repodata/repomd.xml
|
||||
PRIMARY_XPATH='string(//*[local-name()="data"][@type="primary"]/*[local-name()="location"]/@href)'
|
||||
|
||||
Fetch $REPOMD_URL $TMP_REPOMD
|
||||
|
||||
Debug "fetch $REPOMD_URL to $TMP_REPOMD"
|
||||
|
||||
Xpath_get $PRIMARY_XPATH $TMP_REPOMD
|
||||
PRIMARY_XML_PATH=$XPATH_RESULT
|
||||
PRIMARY_URL=$TARGET_URL/$PRIMARY_XML_PATH
|
||||
|
||||
Fetch $PRIMARY_URL $TMP_PRIMARYGZ
|
||||
|
||||
Debug "fetch $PRIMARY_URL to $TMP_PRIMARYGZ"
|
||||
|
||||
gunzip $TMP_PRIMARYGZ
|
||||
|
||||
Debug "unzip $TMP_PRIMARYGZ to $TMP_PRIMARY"
|
||||
}
|
||||
|
||||
fetch_tizen_pkgs()
|
||||
{
|
||||
ARCH=$1
|
||||
PACKAGE_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="location"]/@href)'
|
||||
|
||||
PACKAGE_CHECKSUM_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="checksum"]/text())'
|
||||
|
||||
for pkg in ${@:2}
|
||||
do
|
||||
Inform "Fetching... $pkg"
|
||||
XPATH=${PACKAGE_XPATH_TPL/_PKG_/$pkg}
|
||||
XPATH=${XPATH/_ARCH_/$ARCH}
|
||||
Xpath_get $XPATH $TMP_PRIMARY
|
||||
PKG_PATH=$XPATH_RESULT
|
||||
|
||||
XPATH=${PACKAGE_CHECKSUM_XPATH_TPL/_PKG_/$pkg}
|
||||
XPATH=${XPATH/_ARCH_/$ARCH}
|
||||
Xpath_get $XPATH $TMP_PRIMARY
|
||||
CHECKSUM=$XPATH_RESULT
|
||||
|
||||
PKG_URL=$TARGET_URL/$PKG_PATH
|
||||
PKG_FILE=$(basename $PKG_PATH)
|
||||
PKG_PATH=$TMPDIR/$PKG_FILE
|
||||
|
||||
Debug "Download $PKG_URL to $PKG_PATH"
|
||||
Fetch $PKG_URL $PKG_PATH true
|
||||
|
||||
echo "$CHECKSUM $PKG_PATH" | sha256sum -c - > /dev/null
|
||||
if [ $? -ne 0 ]; then
|
||||
Error "Fail to fetch $PKG_URL to $PKG_PATH"
|
||||
Debug "Checksum = $CHECKSUM"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
Inform "Initialize arm base"
|
||||
fetch_tizen_pkgs_init standard base
|
||||
Inform "fetch common packages"
|
||||
fetch_tizen_pkgs armv7l gcc glibc glibc-devel libicu libicu-devel libatomic linux-glibc-devel
|
||||
Inform "fetch coreclr packages"
|
||||
fetch_tizen_pkgs armv7l lldb lldb-devel libgcc libstdc++ libstdc++-devel libunwind libunwind-devel lttng-ust-devel lttng-ust userspace-rcu-devel userspace-rcu
|
||||
Inform "fetch corefx packages"
|
||||
fetch_tizen_pkgs armv7l libcom_err libcom_err-devel zlib zlib-devel libopenssl11 libopenssl1.1-devel krb5 krb5-devel
|
||||
|
||||
Inform "Initialize standard unified"
|
||||
fetch_tizen_pkgs_init standard unified
|
||||
Inform "fetch corefx packages"
|
||||
fetch_tizen_pkgs armv7l gssdp gssdp-devel tizen-release
|
||||
|
|
@ -0,0 +1,50 @@
|
|||
lang en_US.UTF-8
|
||||
keyboard us
|
||||
timezone --utc Asia/Seoul
|
||||
|
||||
part / --fstype="ext4" --size=3500 --ondisk=mmcblk0 --label rootfs --fsoptions=defaults,noatime
|
||||
|
||||
rootpw tizen
|
||||
desktop --autologinuser=root
|
||||
user --name root --groups audio,video --password 'tizen'
|
||||
|
||||
repo --name=standard --baseurl=http://download.tizen.org/releases/milestone/tizen/unified/latest/repos/standard/packages/ --ssl_verify=no
|
||||
repo --name=base --baseurl=http://download.tizen.org/releases/milestone/tizen/base/latest/repos/standard/packages/ --ssl_verify=no
|
||||
|
||||
%packages
|
||||
tar
|
||||
gzip
|
||||
|
||||
sed
|
||||
grep
|
||||
gawk
|
||||
perl
|
||||
|
||||
binutils
|
||||
findutils
|
||||
util-linux
|
||||
lttng-ust
|
||||
userspace-rcu
|
||||
procps-ng
|
||||
tzdata
|
||||
ca-certificates
|
||||
|
||||
|
||||
### Core FX
|
||||
libicu
|
||||
libunwind
|
||||
iputils
|
||||
zlib
|
||||
krb5
|
||||
libcurl
|
||||
libopenssl
|
||||
|
||||
%end
|
||||
|
||||
%post
|
||||
|
||||
### Update /tmp privilege
|
||||
chmod 777 /tmp
|
||||
####################################
|
||||
|
||||
%end
|
|
@ -0,0 +1,9 @@
|
|||
diff -u -r a/usr/lib/libc.so b/usr/lib/libc.so
|
||||
--- a/usr/lib/libc.so 2016-12-30 23:00:08.284951863 +0900
|
||||
+++ b/usr/lib/libc.so 2016-12-30 23:00:32.140951815 +0900
|
||||
@@ -2,4 +2,4 @@
|
||||
Use the shared library, but some functions are only in
|
||||
the static library, so try that secondarily. */
|
||||
OUTPUT_FORMAT(elf32-littlearm)
|
||||
-GROUP ( /lib/libc.so.6 /usr/lib/libc_nonshared.a AS_NEEDED ( /lib/ld-linux.so.3 ) )
|
||||
+GROUP ( libc.so.6 libc_nonshared.a AS_NEEDED ( ld-linux.so.3 ) )
|
|
@ -0,0 +1,131 @@
|
|||
#!/usr/bin/env bash
|
||||
set -e
|
||||
__NDK_Version=r21
|
||||
|
||||
usage()
|
||||
{
|
||||
echo "Creates a toolchain and sysroot used for cross-compiling for Android."
|
||||
echo.
|
||||
echo "Usage: $0 [BuildArch] [ApiLevel]"
|
||||
echo.
|
||||
echo "BuildArch is the target architecture of Android. Currently only arm64 is supported."
|
||||
echo "ApiLevel is the target Android API level. API levels usually match to Android releases. See https://source.android.com/source/build-numbers.html"
|
||||
echo.
|
||||
echo "By default, the toolchain and sysroot will be generated in cross/android-rootfs/toolchain/[BuildArch]. You can change this behavior"
|
||||
echo "by setting the TOOLCHAIN_DIR environment variable"
|
||||
echo.
|
||||
echo "By default, the NDK will be downloaded into the cross/android-rootfs/android-ndk-$__NDK_Version directory. If you already have an NDK installation,"
|
||||
echo "you can set the NDK_DIR environment variable to have this script use that installation of the NDK."
|
||||
echo "By default, this script will generate a file, android_platform, in the root of the ROOTFS_DIR directory that contains the RID for the supported and tested Android build: android.28-arm64. This file is to replace '/etc/os-release', which is not available for Android."
|
||||
exit 1
|
||||
}
|
||||
|
||||
__ApiLevel=28 # The minimum platform for arm64 is API level 21 but the minimum version that support glob(3) is 28. See $ANDROID_NDK/toolchains/llvm/prebuilt/linux-x86_64/sysroot/usr/include/glob.h
|
||||
__BuildArch=arm64
|
||||
__AndroidArch=aarch64
|
||||
__AndroidToolchain=aarch64-linux-android
|
||||
|
||||
for i in "$@"
|
||||
do
|
||||
lowerI="$(echo $i | awk '{print tolower($0)}')"
|
||||
case $lowerI in
|
||||
-?|-h|--help)
|
||||
usage
|
||||
exit 1
|
||||
;;
|
||||
arm64)
|
||||
__BuildArch=arm64
|
||||
__AndroidArch=aarch64
|
||||
__AndroidToolchain=aarch64-linux-android
|
||||
;;
|
||||
arm)
|
||||
__BuildArch=arm
|
||||
__AndroidArch=arm
|
||||
__AndroidToolchain=arm-linux-androideabi
|
||||
;;
|
||||
*[0-9])
|
||||
__ApiLevel=$i
|
||||
;;
|
||||
*)
|
||||
__UnprocessedBuildArgs="$__UnprocessedBuildArgs $i"
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
# Obtain the location of the bash script to figure out where the root of the repo is.
|
||||
__ScriptBaseDir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
|
||||
__CrossDir="$__ScriptBaseDir/../../../.tools/android-rootfs"
|
||||
|
||||
if [[ ! -f "$__CrossDir" ]]; then
|
||||
mkdir -p "$__CrossDir"
|
||||
fi
|
||||
|
||||
# Resolve absolute path to avoid `../` in build logs
|
||||
__CrossDir="$( cd "$__CrossDir" && pwd )"
|
||||
|
||||
__NDK_Dir="$__CrossDir/android-ndk-$__NDK_Version"
|
||||
__lldb_Dir="$__CrossDir/lldb"
|
||||
__ToolchainDir="$__CrossDir/android-ndk-$__NDK_Version"
|
||||
|
||||
if [[ -n "$TOOLCHAIN_DIR" ]]; then
|
||||
__ToolchainDir=$TOOLCHAIN_DIR
|
||||
fi
|
||||
|
||||
if [[ -n "$NDK_DIR" ]]; then
|
||||
__NDK_Dir=$NDK_DIR
|
||||
fi
|
||||
|
||||
echo "Target API level: $__ApiLevel"
|
||||
echo "Target architecture: $__BuildArch"
|
||||
echo "NDK location: $__NDK_Dir"
|
||||
echo "Target Toolchain location: $__ToolchainDir"
|
||||
|
||||
# Download the NDK if required
|
||||
if [ ! -d $__NDK_Dir ]; then
|
||||
echo Downloading the NDK into $__NDK_Dir
|
||||
mkdir -p $__NDK_Dir
|
||||
wget -q --progress=bar:force:noscroll --show-progress https://dl.google.com/android/repository/android-ndk-$__NDK_Version-linux-x86_64.zip -O $__CrossDir/android-ndk-$__NDK_Version-linux-x86_64.zip
|
||||
unzip -q $__CrossDir/android-ndk-$__NDK_Version-linux-x86_64.zip -d $__CrossDir
|
||||
fi
|
||||
|
||||
if [ ! -d $__lldb_Dir ]; then
|
||||
mkdir -p $__lldb_Dir
|
||||
echo Downloading LLDB into $__lldb_Dir
|
||||
wget -q --progress=bar:force:noscroll --show-progress https://dl.google.com/android/repository/lldb-2.3.3614996-linux-x86_64.zip -O $__CrossDir/lldb-2.3.3614996-linux-x86_64.zip
|
||||
unzip -q $__CrossDir/lldb-2.3.3614996-linux-x86_64.zip -d $__lldb_Dir
|
||||
fi
|
||||
|
||||
echo "Download dependencies..."
|
||||
__TmpDir=$__CrossDir/tmp/$__BuildArch/
|
||||
mkdir -p "$__TmpDir"
|
||||
|
||||
# combined dependencies for coreclr, installer and libraries
|
||||
__AndroidPackages="libicu"
|
||||
__AndroidPackages+=" libandroid-glob"
|
||||
__AndroidPackages+=" liblzma"
|
||||
__AndroidPackages+=" krb5"
|
||||
__AndroidPackages+=" openssl"
|
||||
|
||||
for path in $(wget -qO- http://termux.net/dists/stable/main/binary-$__AndroidArch/Packages |\
|
||||
grep -A15 "Package: \(${__AndroidPackages// /\\|}\)" | grep -v "static\|tool" | grep Filename); do
|
||||
|
||||
if [[ "$path" != "Filename:" ]]; then
|
||||
echo "Working on: $path"
|
||||
wget -qO- http://termux.net/$path | dpkg -x - "$__TmpDir"
|
||||
fi
|
||||
done
|
||||
|
||||
cp -R "$__TmpDir/data/data/com.termux/files/usr/"* "$__ToolchainDir/sysroot/usr/"
|
||||
|
||||
# Generate platform file for build.sh script to assign to __DistroRid
|
||||
echo "Generating platform file..."
|
||||
echo "RID=android.${__ApiLevel}-${__BuildArch}" > $__ToolchainDir/sysroot/android_platform
|
||||
|
||||
echo "Now to build coreclr, libraries and installers; run:"
|
||||
echo ROOTFS_DIR=\$\(realpath $__ToolchainDir/sysroot\) ./build.sh --cross --arch $__BuildArch \
|
||||
--subsetCategory coreclr
|
||||
echo ROOTFS_DIR=\$\(realpath $__ToolchainDir/sysroot\) ./build.sh --cross --arch $__BuildArch \
|
||||
--subsetCategory libraries
|
||||
echo ROOTFS_DIR=\$\(realpath $__ToolchainDir/sysroot\) ./build.sh --cross --arch $__BuildArch \
|
||||
--subsetCategory installer
|
|
@ -0,0 +1,349 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
usage()
|
||||
{
|
||||
echo "Usage: $0 [BuildArch] [CodeName] [lldbx.y] [--skipunmount] --rootfsdir <directory>]"
|
||||
echo "BuildArch can be: arm(default), armel, arm64, x86"
|
||||
echo "CodeName - optional, Code name for Linux, can be: trusty, xenial(default), zesty, bionic, alpine. If BuildArch is armel, LinuxCodeName is jessie(default) or tizen."
|
||||
echo " for FreeBSD can be: freebsd11 or freebsd12."
|
||||
echo " for illumos can be: illumos."
|
||||
echo "lldbx.y - optional, LLDB version, can be: lldb3.9(default), lldb4.0, lldb5.0, lldb6.0 no-lldb. Ignored for alpine and FReeBSD"
|
||||
echo "--skipunmount - optional, will skip the unmount of rootfs folder."
|
||||
echo "--use-mirror - optional, use mirror URL to fetch resources, when available."
|
||||
exit 1
|
||||
}
|
||||
|
||||
__CodeName=xenial
|
||||
__CrossDir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
|
||||
__InitialDir=$PWD
|
||||
__BuildArch=arm
|
||||
__AlpineArch=armv7
|
||||
__QEMUArch=arm
|
||||
__UbuntuArch=armhf
|
||||
__UbuntuRepo="http://ports.ubuntu.com/"
|
||||
__LLDB_Package="liblldb-3.9-dev"
|
||||
__SkipUnmount=0
|
||||
|
||||
# base development support
|
||||
__UbuntuPackages="build-essential"
|
||||
|
||||
__AlpinePackages="alpine-base"
|
||||
__AlpinePackages+=" build-base"
|
||||
__AlpinePackages+=" linux-headers"
|
||||
__AlpinePackagesEdgeCommunity=" lldb-dev"
|
||||
__AlpinePackagesEdgeMain=" llvm10-libs"
|
||||
__AlpinePackagesEdgeMain+=" python3"
|
||||
__AlpinePackagesEdgeMain+=" libedit"
|
||||
|
||||
# symlinks fixer
|
||||
__UbuntuPackages+=" symlinks"
|
||||
|
||||
# CoreCLR and CoreFX dependencies
|
||||
__UbuntuPackages+=" libicu-dev"
|
||||
__UbuntuPackages+=" liblttng-ust-dev"
|
||||
__UbuntuPackages+=" libunwind8-dev"
|
||||
|
||||
__AlpinePackages+=" gettext-dev"
|
||||
__AlpinePackages+=" icu-dev"
|
||||
__AlpinePackages+=" libunwind-dev"
|
||||
__AlpinePackages+=" lttng-ust-dev"
|
||||
|
||||
# CoreFX dependencies
|
||||
__UbuntuPackages+=" libcurl4-openssl-dev"
|
||||
__UbuntuPackages+=" libkrb5-dev"
|
||||
__UbuntuPackages+=" libssl-dev"
|
||||
__UbuntuPackages+=" zlib1g-dev"
|
||||
|
||||
__AlpinePackages+=" curl-dev"
|
||||
__AlpinePackages+=" krb5-dev"
|
||||
__AlpinePackages+=" openssl-dev"
|
||||
__AlpinePackages+=" zlib-dev"
|
||||
|
||||
__FreeBSDBase="12.1-RELEASE"
|
||||
__FreeBSDPkg="1.12.0"
|
||||
__FreeBSDPackages="libunwind"
|
||||
__FreeBSDPackages+=" icu"
|
||||
__FreeBSDPackages+=" libinotify"
|
||||
__FreeBSDPackages+=" lttng-ust"
|
||||
__FreeBSDPackages+=" krb5"
|
||||
|
||||
__IllumosPackages="icu-64.2nb2"
|
||||
__IllumosPackages+=" mit-krb5-1.16.2nb4"
|
||||
__IllumosPackages+=" openssl-1.1.1e"
|
||||
__IllumosPackages+=" zlib-1.2.11"
|
||||
|
||||
__UseMirror=0
|
||||
|
||||
__UnprocessedBuildArgs=
|
||||
while :; do
|
||||
if [ $# -le 0 ]; then
|
||||
break
|
||||
fi
|
||||
|
||||
lowerI="$(echo $1 | awk '{print tolower($0)}')"
|
||||
case $lowerI in
|
||||
-?|-h|--help)
|
||||
usage
|
||||
exit 1
|
||||
;;
|
||||
arm)
|
||||
__BuildArch=arm
|
||||
__UbuntuArch=armhf
|
||||
__AlpineArch=armv7
|
||||
__QEMUArch=arm
|
||||
;;
|
||||
arm64)
|
||||
__BuildArch=arm64
|
||||
__UbuntuArch=arm64
|
||||
__AlpineArch=aarch64
|
||||
__QEMUArch=aarch64
|
||||
;;
|
||||
armel)
|
||||
__BuildArch=armel
|
||||
__UbuntuArch=armel
|
||||
__UbuntuRepo="http://ftp.debian.org/debian/"
|
||||
__CodeName=jessie
|
||||
;;
|
||||
x86)
|
||||
__BuildArch=x86
|
||||
__UbuntuArch=i386
|
||||
__UbuntuRepo="http://archive.ubuntu.com/ubuntu/"
|
||||
;;
|
||||
lldb3.6)
|
||||
__LLDB_Package="lldb-3.6-dev"
|
||||
;;
|
||||
lldb3.8)
|
||||
__LLDB_Package="lldb-3.8-dev"
|
||||
;;
|
||||
lldb3.9)
|
||||
__LLDB_Package="liblldb-3.9-dev"
|
||||
;;
|
||||
lldb4.0)
|
||||
__LLDB_Package="liblldb-4.0-dev"
|
||||
;;
|
||||
lldb5.0)
|
||||
__LLDB_Package="liblldb-5.0-dev"
|
||||
;;
|
||||
lldb6.0)
|
||||
__LLDB_Package="liblldb-6.0-dev"
|
||||
;;
|
||||
no-lldb)
|
||||
unset __LLDB_Package
|
||||
;;
|
||||
trusty) # Ubuntu 14.04
|
||||
if [ "$__CodeName" != "jessie" ]; then
|
||||
__CodeName=trusty
|
||||
fi
|
||||
;;
|
||||
xenial) # Ubuntu 16.04
|
||||
if [ "$__CodeName" != "jessie" ]; then
|
||||
__CodeName=xenial
|
||||
fi
|
||||
;;
|
||||
zesty) # Ubuntu 17.04
|
||||
if [ "$__CodeName" != "jessie" ]; then
|
||||
__CodeName=zesty
|
||||
fi
|
||||
;;
|
||||
bionic) # Ubuntu 18.04
|
||||
if [ "$__CodeName" != "jessie" ]; then
|
||||
__CodeName=bionic
|
||||
fi
|
||||
;;
|
||||
jessie) # Debian 8
|
||||
__CodeName=jessie
|
||||
__UbuntuRepo="http://ftp.debian.org/debian/"
|
||||
;;
|
||||
stretch) # Debian 9
|
||||
__CodeName=stretch
|
||||
__UbuntuRepo="http://ftp.debian.org/debian/"
|
||||
__LLDB_Package="liblldb-6.0-dev"
|
||||
;;
|
||||
buster) # Debian 10
|
||||
__CodeName=buster
|
||||
__UbuntuRepo="http://ftp.debian.org/debian/"
|
||||
__LLDB_Package="liblldb-6.0-dev"
|
||||
;;
|
||||
tizen)
|
||||
if [ "$__BuildArch" != "armel" ] && [ "$__BuildArch" != "arm64" ]; then
|
||||
echo "Tizen is available only for armel and arm64."
|
||||
usage;
|
||||
exit 1;
|
||||
fi
|
||||
__CodeName=
|
||||
__UbuntuRepo=
|
||||
__Tizen=tizen
|
||||
;;
|
||||
alpine)
|
||||
__CodeName=alpine
|
||||
__UbuntuRepo=
|
||||
;;
|
||||
freebsd11)
|
||||
__FreeBSDBase="11.3-RELEASE"
|
||||
;&
|
||||
freebsd12)
|
||||
__CodeName=freebsd
|
||||
__BuildArch=x64
|
||||
__SkipUnmount=1
|
||||
;;
|
||||
illumos)
|
||||
__CodeName=illumos
|
||||
__BuildArch=x64
|
||||
__SkipUnmount=1
|
||||
;;
|
||||
--skipunmount)
|
||||
__SkipUnmount=1
|
||||
;;
|
||||
--rootfsdir|-rootfsdir)
|
||||
shift
|
||||
__RootfsDir=$1
|
||||
;;
|
||||
--use-mirror)
|
||||
__UseMirror=1
|
||||
;;
|
||||
*)
|
||||
__UnprocessedBuildArgs="$__UnprocessedBuildArgs $1"
|
||||
;;
|
||||
esac
|
||||
|
||||
shift
|
||||
done
|
||||
|
||||
if [ "$__BuildArch" == "armel" ]; then
|
||||
__LLDB_Package="lldb-3.5-dev"
|
||||
fi
|
||||
__UbuntuPackages+=" ${__LLDB_Package:-}"
|
||||
|
||||
if [ -z "$__RootfsDir" ] && [ ! -z "$ROOTFS_DIR" ]; then
|
||||
__RootfsDir=$ROOTFS_DIR
|
||||
fi
|
||||
|
||||
if [ -z "$__RootfsDir" ]; then
|
||||
__RootfsDir="$__CrossDir/../../../.tools/rootfs/$__BuildArch"
|
||||
fi
|
||||
|
||||
if [ -d "$__RootfsDir" ]; then
|
||||
if [ $__SkipUnmount == 0 ]; then
|
||||
umount $__RootfsDir/* || true
|
||||
fi
|
||||
rm -rf $__RootfsDir
|
||||
fi
|
||||
|
||||
mkdir -p $__RootfsDir
|
||||
__RootfsDir="$( cd "$__RootfsDir" && pwd )"
|
||||
|
||||
if [[ "$__CodeName" == "alpine" ]]; then
|
||||
__ApkToolsVersion=2.9.1
|
||||
__AlpineVersion=3.9
|
||||
__ApkToolsDir=$(mktemp -d)
|
||||
wget https://github.com/alpinelinux/apk-tools/releases/download/v$__ApkToolsVersion/apk-tools-$__ApkToolsVersion-x86_64-linux.tar.gz -P $__ApkToolsDir
|
||||
tar -xf $__ApkToolsDir/apk-tools-$__ApkToolsVersion-x86_64-linux.tar.gz -C $__ApkToolsDir
|
||||
mkdir -p $__RootfsDir/usr/bin
|
||||
cp -v /usr/bin/qemu-$__QEMUArch-static $__RootfsDir/usr/bin
|
||||
|
||||
$__ApkToolsDir/apk-tools-$__ApkToolsVersion/apk \
|
||||
-X http://dl-cdn.alpinelinux.org/alpine/v$__AlpineVersion/main \
|
||||
-X http://dl-cdn.alpinelinux.org/alpine/v$__AlpineVersion/community \
|
||||
-U --allow-untrusted --root $__RootfsDir --arch $__AlpineArch --initdb \
|
||||
add $__AlpinePackages
|
||||
|
||||
$__ApkToolsDir/apk-tools-$__ApkToolsVersion/apk \
|
||||
-X http://dl-cdn.alpinelinux.org/alpine/edge/main \
|
||||
-U --allow-untrusted --root $__RootfsDir --arch $__AlpineArch --initdb \
|
||||
add $__AlpinePackagesEdgeMain
|
||||
|
||||
$__ApkToolsDir/apk-tools-$__ApkToolsVersion/apk \
|
||||
-X http://dl-cdn.alpinelinux.org/alpine/edge/community \
|
||||
-U --allow-untrusted --root $__RootfsDir --arch $__AlpineArch --initdb \
|
||||
add $__AlpinePackagesEdgeCommunity
|
||||
|
||||
rm -r $__ApkToolsDir
|
||||
elif [[ "$__CodeName" == "freebsd" ]]; then
|
||||
mkdir -p $__RootfsDir/usr/local/etc
|
||||
wget -O - https://download.freebsd.org/ftp/releases/amd64/${__FreeBSDBase}/base.txz | tar -C $__RootfsDir -Jxf - ./lib ./usr/lib ./usr/libdata ./usr/include ./usr/share/keys ./etc ./bin/freebsd-version
|
||||
# For now, ask for 11 ABI even on 12. This can be revisited later.
|
||||
echo "ABI = \"FreeBSD:11:amd64\"; FINGERPRINTS = \"${__RootfsDir}/usr/share/keys\"; REPOS_DIR = [\"${__RootfsDir}/etc/pkg\"]; REPO_AUTOUPDATE = NO; RUN_SCRIPTS = NO;" > ${__RootfsDir}/usr/local/etc/pkg.conf
|
||||
echo "FreeBSD: { url: "pkg+http://pkg.FreeBSD.org/\${ABI}/quarterly", mirror_type: \"srv\", signature_type: \"fingerprints\", fingerprints: \"${__RootfsDir}/usr/share/keys/pkg\", enabled: yes }" > ${__RootfsDir}/etc/pkg/FreeBSD.conf
|
||||
mkdir -p $__RootfsDir/tmp
|
||||
# get and build package manager
|
||||
wget -O - https://github.com/freebsd/pkg/archive/${__FreeBSDPkg}.tar.gz | tar -C $__RootfsDir/tmp -zxf -
|
||||
cd $__RootfsDir/tmp/pkg-${__FreeBSDPkg}
|
||||
# needed for install to succeed
|
||||
mkdir -p $__RootfsDir/host/etc
|
||||
./autogen.sh && ./configure --prefix=$__RootfsDir/host && make && make install
|
||||
rm -rf $__RootfsDir/tmp/pkg-${__FreeBSDPkg}
|
||||
# install packages we need.
|
||||
INSTALL_AS_USER=$(whoami) $__RootfsDir/host/sbin/pkg -r $__RootfsDir -C $__RootfsDir/usr/local/etc/pkg.conf update
|
||||
INSTALL_AS_USER=$(whoami) $__RootfsDir/host/sbin/pkg -r $__RootfsDir -C $__RootfsDir/usr/local/etc/pkg.conf install --yes $__FreeBSDPackages
|
||||
elif [[ "$__CodeName" == "illumos" ]]; then
|
||||
mkdir "$__RootfsDir/tmp"
|
||||
pushd "$__RootfsDir/tmp"
|
||||
JOBS="$(getconf _NPROCESSORS_ONLN)"
|
||||
echo "Downloading sysroot."
|
||||
wget -O - https://github.com/illumos/sysroot/releases/download/20181213-de6af22ae73b-v1/illumos-sysroot-i386-20181213-de6af22ae73b-v1.tar.gz | tar -C "$__RootfsDir" -xzf -
|
||||
echo "Building binutils. Please wait.."
|
||||
wget -O - https://ftp.gnu.org/gnu/binutils/binutils-2.33.1.tar.bz2 | tar -xjf -
|
||||
mkdir build-binutils && cd build-binutils
|
||||
../binutils-2.33.1/configure --prefix="$__RootfsDir" --target="x86_64-sun-solaris2.10" --program-prefix="x86_64-illumos-" --with-sysroot="$__RootfsDir"
|
||||
make -j "$JOBS" && make install && cd ..
|
||||
echo "Building gcc. Please wait.."
|
||||
wget -O - https://ftp.gnu.org/gnu/gcc/gcc-8.4.0/gcc-8.4.0.tar.xz | tar -xJf -
|
||||
CFLAGS="-fPIC"
|
||||
CXXFLAGS="-fPIC"
|
||||
CXXFLAGS_FOR_TARGET="-fPIC"
|
||||
CFLAGS_FOR_TARGET="-fPIC"
|
||||
export CFLAGS CXXFLAGS CXXFLAGS_FOR_TARGET CFLAGS_FOR_TARGET
|
||||
mkdir build-gcc && cd build-gcc
|
||||
../gcc-8.4.0/configure --prefix="$__RootfsDir" --target="x86_64-sun-solaris2.10" --program-prefix="x86_64-illumos-" --with-sysroot="$__RootfsDir" --with-gnu-as \
|
||||
--with-gnu-ld --disable-nls --disable-libgomp --disable-libquadmath --disable-libssp --disable-libvtv --disable-libcilkrts --disable-libada --disable-libsanitizer \
|
||||
--disable-libquadmath-support --disable-shared --enable-tls
|
||||
make -j "$JOBS" && make install && cd ..
|
||||
BaseUrl=https://pkgsrc.joyent.com
|
||||
if [[ "$__UseMirror" == 1 ]]; then
|
||||
BaseUrl=http://pkgsrc.smartos.skylime.net
|
||||
fi
|
||||
BaseUrl="$BaseUrl"/packages/SmartOS/2020Q1/x86_64/All
|
||||
echo "Downloading dependencies."
|
||||
read -ra array <<<"$__IllumosPackages"
|
||||
for package in "${array[@]}"; do
|
||||
echo "Installing $package..."
|
||||
wget "$BaseUrl"/"$package".tgz
|
||||
ar -x "$package".tgz
|
||||
tar --skip-old-files -xzf "$package".tmp.tgz -C "$__RootfsDir" 2>/dev/null
|
||||
done
|
||||
echo "Cleaning up temporary files."
|
||||
popd
|
||||
rm -rf "$__RootfsDir"/{tmp,+*}
|
||||
mkdir -p "$__RootfsDir"/usr/include/net
|
||||
mkdir -p "$__RootfsDir"/usr/include/netpacket
|
||||
wget -P "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/bpf.h
|
||||
wget -P "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/dlt.h
|
||||
wget -P "$__RootfsDir"/usr/include/netpacket https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/inet/sockmods/netpacket/packet.h
|
||||
wget -P "$__RootfsDir"/usr/include/sys https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/sys/sdt.h
|
||||
elif [[ -n $__CodeName ]]; then
|
||||
qemu-debootstrap --arch $__UbuntuArch $__CodeName $__RootfsDir $__UbuntuRepo
|
||||
cp $__CrossDir/$__BuildArch/sources.list.$__CodeName $__RootfsDir/etc/apt/sources.list
|
||||
chroot $__RootfsDir apt-get update
|
||||
chroot $__RootfsDir apt-get -f -y install
|
||||
chroot $__RootfsDir apt-get -y install $__UbuntuPackages
|
||||
chroot $__RootfsDir symlinks -cr /usr
|
||||
|
||||
if [ $__SkipUnmount == 0 ]; then
|
||||
umount $__RootfsDir/* || true
|
||||
fi
|
||||
|
||||
if [[ "$__BuildArch" == "arm" && "$__CodeName" == "trusty" ]]; then
|
||||
pushd $__RootfsDir
|
||||
patch -p1 < $__CrossDir/$__BuildArch/trusty.patch
|
||||
patch -p1 < $__CrossDir/$__BuildArch/trusty-lttng-2.4.patch
|
||||
popd
|
||||
fi
|
||||
elif [[ "$__Tizen" == "tizen" ]]; then
|
||||
ROOTFS_DIR=$__RootfsDir $__CrossDir/$__BuildArch/tizen-build-rootfs.sh
|
||||
else
|
||||
echo "Unsupported target platform."
|
||||
usage;
|
||||
exit 1
|
||||
fi
|
|
@ -0,0 +1,235 @@
|
|||
set(CROSS_ROOTFS $ENV{ROOTFS_DIR})
|
||||
|
||||
set(TARGET_ARCH_NAME $ENV{TARGET_BUILD_ARCH})
|
||||
if(EXISTS ${CROSS_ROOTFS}/bin/freebsd-version)
|
||||
set(CMAKE_SYSTEM_NAME FreeBSD)
|
||||
elseif(EXISTS ${CROSS_ROOTFS}/usr/platform/i86pc)
|
||||
set(CMAKE_SYSTEM_NAME SunOS)
|
||||
set(ILLUMOS 1)
|
||||
else()
|
||||
set(CMAKE_SYSTEM_NAME Linux)
|
||||
endif()
|
||||
set(CMAKE_SYSTEM_VERSION 1)
|
||||
|
||||
if(TARGET_ARCH_NAME STREQUAL "armel")
|
||||
set(CMAKE_SYSTEM_PROCESSOR armv7l)
|
||||
set(TOOLCHAIN "arm-linux-gnueabi")
|
||||
if("$ENV{__DistroRid}" MATCHES "tizen.*")
|
||||
set(TIZEN_TOOLCHAIN "armv7l-tizen-linux-gnueabi/9.2.0")
|
||||
endif()
|
||||
elseif(TARGET_ARCH_NAME STREQUAL "arm")
|
||||
set(CMAKE_SYSTEM_PROCESSOR armv7l)
|
||||
if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/armv7-alpine-linux-musleabihf)
|
||||
set(TOOLCHAIN "armv7-alpine-linux-musleabihf")
|
||||
elseif(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/armv6-alpine-linux-musleabihf)
|
||||
set(TOOLCHAIN "armv6-alpine-linux-musleabihf")
|
||||
else()
|
||||
set(TOOLCHAIN "arm-linux-gnueabihf")
|
||||
endif()
|
||||
elseif(TARGET_ARCH_NAME STREQUAL "arm64")
|
||||
set(CMAKE_SYSTEM_PROCESSOR aarch64)
|
||||
if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/aarch64-alpine-linux-musl)
|
||||
set(TOOLCHAIN "aarch64-alpine-linux-musl")
|
||||
else()
|
||||
set(TOOLCHAIN "aarch64-linux-gnu")
|
||||
endif()
|
||||
if("$ENV{__DistroRid}" MATCHES "tizen.*")
|
||||
set(TIZEN_TOOLCHAIN "aarch64-tizen-linux-gnu/9.2.0")
|
||||
endif()
|
||||
elseif(TARGET_ARCH_NAME STREQUAL "x86")
|
||||
set(CMAKE_SYSTEM_PROCESSOR i686)
|
||||
set(TOOLCHAIN "i686-linux-gnu")
|
||||
elseif (CMAKE_SYSTEM_NAME STREQUAL "FreeBSD")
|
||||
set(CMAKE_SYSTEM_PROCESSOR "x86_64")
|
||||
set(triple "x86_64-unknown-freebsd11")
|
||||
elseif (ILLUMOS)
|
||||
set(CMAKE_SYSTEM_PROCESSOR "x86_64")
|
||||
set(TOOLCHAIN "x86_64-illumos")
|
||||
else()
|
||||
message(FATAL_ERROR "Arch is ${TARGET_ARCH_NAME}. Only armel, arm, arm64 and x86 are supported!")
|
||||
endif()
|
||||
|
||||
if(DEFINED ENV{TOOLCHAIN})
|
||||
set(TOOLCHAIN $ENV{TOOLCHAIN})
|
||||
endif()
|
||||
|
||||
# Specify include paths
|
||||
if(DEFINED TIZEN_TOOLCHAIN)
|
||||
if(TARGET_ARCH_NAME STREQUAL "armel")
|
||||
include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}/include/c++/)
|
||||
include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}/include/c++/armv7l-tizen-linux-gnueabi)
|
||||
endif()
|
||||
if(TARGET_ARCH_NAME STREQUAL "arm64")
|
||||
include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}/include/c++/)
|
||||
include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}/include/c++/aarch64-tizen-linux-gnu)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if("$ENV{__DistroRid}" MATCHES "android.*")
|
||||
if(TARGET_ARCH_NAME STREQUAL "arm")
|
||||
set(ANDROID_ABI armeabi-v7a)
|
||||
elseif(TARGET_ARCH_NAME STREQUAL "arm64")
|
||||
set(ANDROID_ABI arm64-v8a)
|
||||
endif()
|
||||
|
||||
# extract platform number required by the NDK's toolchain
|
||||
string(REGEX REPLACE ".*\\.([0-9]+)-.*" "\\1" ANDROID_PLATFORM "$ENV{__DistroRid}")
|
||||
|
||||
set(ANDROID_TOOLCHAIN clang)
|
||||
set(FEATURE_EVENT_TRACE 0) # disable event trace as there is no lttng-ust package in termux repository
|
||||
set(CMAKE_SYSTEM_LIBRARY_PATH "${CROSS_ROOTFS}/usr/lib")
|
||||
set(CMAKE_SYSTEM_INCLUDE_PATH "${CROSS_ROOTFS}/usr/include")
|
||||
|
||||
# include official NDK toolchain script
|
||||
include(${CROSS_ROOTFS}/../build/cmake/android.toolchain.cmake)
|
||||
elseif(CMAKE_SYSTEM_NAME STREQUAL "FreeBSD")
|
||||
# we cross-compile by instructing clang
|
||||
set(CMAKE_C_COMPILER_TARGET ${triple})
|
||||
set(CMAKE_CXX_COMPILER_TARGET ${triple})
|
||||
set(CMAKE_ASM_COMPILER_TARGET ${triple})
|
||||
set(CMAKE_SYSROOT "${CROSS_ROOTFS}")
|
||||
elseif(ILLUMOS)
|
||||
set(CMAKE_SYSROOT "${CROSS_ROOTFS}")
|
||||
|
||||
include_directories(SYSTEM ${CROSS_ROOTFS}/include)
|
||||
|
||||
set(TOOLSET_PREFIX ${TOOLCHAIN}-)
|
||||
function(locate_toolchain_exec exec var)
|
||||
string(TOUPPER ${exec} EXEC_UPPERCASE)
|
||||
if(NOT "$ENV{CLR_${EXEC_UPPERCASE}}" STREQUAL "")
|
||||
set(${var} "$ENV{CLR_${EXEC_UPPERCASE}}" PARENT_SCOPE)
|
||||
return()
|
||||
endif()
|
||||
|
||||
find_program(EXEC_LOCATION_${exec}
|
||||
NAMES
|
||||
"${TOOLSET_PREFIX}${exec}${CLR_CMAKE_COMPILER_FILE_NAME_VERSION}"
|
||||
"${TOOLSET_PREFIX}${exec}")
|
||||
|
||||
if (EXEC_LOCATION_${exec} STREQUAL "EXEC_LOCATION_${exec}-NOTFOUND")
|
||||
message(FATAL_ERROR "Unable to find toolchain executable. Name: ${exec}, Prefix: ${TOOLSET_PREFIX}.")
|
||||
endif()
|
||||
set(${var} ${EXEC_LOCATION_${exec}} PARENT_SCOPE)
|
||||
endfunction()
|
||||
|
||||
set(CMAKE_SYSTEM_PREFIX_PATH "${CROSS_ROOTFS}")
|
||||
|
||||
locate_toolchain_exec(gcc CMAKE_C_COMPILER)
|
||||
locate_toolchain_exec(g++ CMAKE_CXX_COMPILER)
|
||||
|
||||
set(CMAKE_C_STANDARD_LIBRARIES "${CMAKE_C_STANDARD_LIBRARIES} -lssp")
|
||||
set(CMAKE_CXX_STANDARD_LIBRARIES "${CMAKE_CXX_STANDARD_LIBRARIES} -lssp")
|
||||
else()
|
||||
set(CMAKE_SYSROOT "${CROSS_ROOTFS}")
|
||||
|
||||
set(CMAKE_C_COMPILER_EXTERNAL_TOOLCHAIN "${CROSS_ROOTFS}/usr")
|
||||
set(CMAKE_CXX_COMPILER_EXTERNAL_TOOLCHAIN "${CROSS_ROOTFS}/usr")
|
||||
set(CMAKE_ASM_COMPILER_EXTERNAL_TOOLCHAIN "${CROSS_ROOTFS}/usr")
|
||||
endif()
|
||||
|
||||
# Specify link flags
|
||||
|
||||
function(add_toolchain_linker_flag Flag)
|
||||
set(Config "${ARGV1}")
|
||||
set(CONFIG_SUFFIX "")
|
||||
if (NOT Config STREQUAL "")
|
||||
set(CONFIG_SUFFIX "_${Config}")
|
||||
endif()
|
||||
set("CMAKE_EXE_LINKER_FLAGS${CONFIG_SUFFIX}" "${CMAKE_EXE_LINKER_FLAGS${CONFIG_SUFFIX}} ${Flag}" PARENT_SCOPE)
|
||||
set("CMAKE_SHARED_LINKER_FLAGS${CONFIG_SUFFIX}" "${CMAKE_SHARED_LINKER_FLAGS${CONFIG_SUFFIX}} ${Flag}" PARENT_SCOPE)
|
||||
endfunction()
|
||||
|
||||
|
||||
if(TARGET_ARCH_NAME STREQUAL "armel")
|
||||
if(DEFINED TIZEN_TOOLCHAIN) # For Tizen only
|
||||
add_toolchain_linker_flag("-B${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}")
|
||||
add_toolchain_linker_flag("-L${CROSS_ROOTFS}/lib")
|
||||
add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/lib")
|
||||
add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}")
|
||||
endif()
|
||||
elseif(TARGET_ARCH_NAME STREQUAL "arm64")
|
||||
if(DEFINED TIZEN_TOOLCHAIN) # For Tizen only
|
||||
add_toolchain_linker_flag("-B${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}")
|
||||
add_toolchain_linker_flag("-L${CROSS_ROOTFS}/lib64")
|
||||
add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/lib64")
|
||||
add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}")
|
||||
|
||||
add_toolchain_linker_flag("-Wl,--rpath-link=${CROSS_ROOTFS}/lib64")
|
||||
add_toolchain_linker_flag("-Wl,--rpath-link=${CROSS_ROOTFS}/usr/lib64")
|
||||
add_toolchain_linker_flag("-Wl,--rpath-link=${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}")
|
||||
endif()
|
||||
elseif(TARGET_ARCH_NAME STREQUAL "x86")
|
||||
add_toolchain_linker_flag(-m32)
|
||||
elseif(ILLUMOS)
|
||||
add_toolchain_linker_flag("-L${CROSS_ROOTFS}/lib/amd64")
|
||||
add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/amd64/lib")
|
||||
endif()
|
||||
|
||||
# Specify compile options
|
||||
|
||||
if((TARGET_ARCH_NAME MATCHES "^(arm|armel|arm64)$" AND NOT "$ENV{__DistroRid}" MATCHES "android.*") OR ILLUMOS)
|
||||
set(CMAKE_C_COMPILER_TARGET ${TOOLCHAIN})
|
||||
set(CMAKE_CXX_COMPILER_TARGET ${TOOLCHAIN})
|
||||
set(CMAKE_ASM_COMPILER_TARGET ${TOOLCHAIN})
|
||||
endif()
|
||||
|
||||
if(TARGET_ARCH_NAME MATCHES "^(arm|armel)$")
|
||||
add_compile_options(-mthumb)
|
||||
if (NOT DEFINED CLR_ARM_FPU_TYPE)
|
||||
set (CLR_ARM_FPU_TYPE vfpv3)
|
||||
endif (NOT DEFINED CLR_ARM_FPU_TYPE)
|
||||
|
||||
add_compile_options (-mfpu=${CLR_ARM_FPU_TYPE})
|
||||
if (NOT DEFINED CLR_ARM_FPU_CAPABILITY)
|
||||
set (CLR_ARM_FPU_CAPABILITY 0x7)
|
||||
endif (NOT DEFINED CLR_ARM_FPU_CAPABILITY)
|
||||
|
||||
add_definitions (-DCLR_ARM_FPU_CAPABILITY=${CLR_ARM_FPU_CAPABILITY})
|
||||
|
||||
if(TARGET_ARCH_NAME STREQUAL "armel")
|
||||
add_compile_options(-mfloat-abi=softfp)
|
||||
endif()
|
||||
elseif(TARGET_ARCH_NAME STREQUAL "x86")
|
||||
add_compile_options(-m32)
|
||||
add_compile_options(-Wno-error=unused-command-line-argument)
|
||||
endif()
|
||||
|
||||
if(DEFINED TIZEN_TOOLCHAIN)
|
||||
if(TARGET_ARCH_NAME MATCHES "^(armel|arm64)$")
|
||||
add_compile_options(-Wno-deprecated-declarations) # compile-time option
|
||||
add_compile_options(-D__extern_always_inline=inline) # compile-time option
|
||||
endif()
|
||||
endif()
|
||||
|
||||
# Set LLDB include and library paths for builds that need lldb.
|
||||
if(TARGET_ARCH_NAME MATCHES "^(arm|armel|x86)$")
|
||||
if(TARGET_ARCH_NAME STREQUAL "x86")
|
||||
set(LLVM_CROSS_DIR "$ENV{LLVM_CROSS_HOME}")
|
||||
else() # arm/armel case
|
||||
set(LLVM_CROSS_DIR "$ENV{LLVM_ARM_HOME}")
|
||||
endif()
|
||||
if(LLVM_CROSS_DIR)
|
||||
set(WITH_LLDB_LIBS "${LLVM_CROSS_DIR}/lib/" CACHE STRING "")
|
||||
set(WITH_LLDB_INCLUDES "${LLVM_CROSS_DIR}/include" CACHE STRING "")
|
||||
set(LLDB_H "${WITH_LLDB_INCLUDES}" CACHE STRING "")
|
||||
set(LLDB "${LLVM_CROSS_DIR}/lib/liblldb.so" CACHE STRING "")
|
||||
else()
|
||||
if(TARGET_ARCH_NAME STREQUAL "x86")
|
||||
set(WITH_LLDB_LIBS "${CROSS_ROOTFS}/usr/lib/i386-linux-gnu" CACHE STRING "")
|
||||
set(CHECK_LLVM_DIR "${CROSS_ROOTFS}/usr/lib/llvm-3.8/include")
|
||||
if(EXISTS "${CHECK_LLVM_DIR}" AND IS_DIRECTORY "${CHECK_LLVM_DIR}")
|
||||
set(WITH_LLDB_INCLUDES "${CHECK_LLVM_DIR}")
|
||||
else()
|
||||
set(WITH_LLDB_INCLUDES "${CROSS_ROOTFS}/usr/lib/llvm-3.6/include")
|
||||
endif()
|
||||
else() # arm/armel case
|
||||
set(WITH_LLDB_LIBS "${CROSS_ROOTFS}/usr/lib/${TOOLCHAIN}" CACHE STRING "")
|
||||
set(WITH_LLDB_INCLUDES "${CROSS_ROOTFS}/usr/lib/llvm-3.6/include" CACHE STRING "")
|
||||
endif()
|
||||
endif()
|
||||
endif()
|
||||
|
||||
set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
|
||||
set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)
|
||||
set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY)
|
||||
set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY)
|
|
@ -0,0 +1,47 @@
|
|||
param (
|
||||
$darcVersion = $null,
|
||||
$versionEndpoint = 'https://maestro-prod.westus2.cloudapp.azure.com/api/assets/darc-version?api-version=2019-01-16',
|
||||
$verbosity = 'minimal',
|
||||
$toolpath = $null
|
||||
)
|
||||
|
||||
. $PSScriptRoot\tools.ps1
|
||||
|
||||
function InstallDarcCli ($darcVersion, $toolpath) {
|
||||
$darcCliPackageName = 'microsoft.dotnet.darc'
|
||||
|
||||
$dotnetRoot = InitializeDotNetCli -install:$true
|
||||
$dotnet = "$dotnetRoot\dotnet.exe"
|
||||
$toolList = & "$dotnet" tool list -g
|
||||
|
||||
if ($toolList -like "*$darcCliPackageName*") {
|
||||
& "$dotnet" tool uninstall $darcCliPackageName -g
|
||||
}
|
||||
|
||||
# If the user didn't explicitly specify the darc version,
|
||||
# query the Maestro API for the correct version of darc to install.
|
||||
if (-not $darcVersion) {
|
||||
$darcVersion = $(Invoke-WebRequest -Uri $versionEndpoint -UseBasicParsing).Content
|
||||
}
|
||||
|
||||
$arcadeServicesSource = 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json'
|
||||
|
||||
Write-Host "Installing Darc CLI version $darcVersion..."
|
||||
Write-Host 'You may need to restart your command window if this is the first dotnet tool you have installed.'
|
||||
if (-not $toolpath) {
|
||||
Write-Host "'$dotnet' tool install $darcCliPackageName --version $darcVersion --add-source '$arcadeServicesSource' -v $verbosity -g"
|
||||
& "$dotnet" tool install $darcCliPackageName --version $darcVersion --add-source "$arcadeServicesSource" -v $verbosity -g
|
||||
}else {
|
||||
Write-Host "'$dotnet' tool install $darcCliPackageName --version $darcVersion --add-source '$arcadeServicesSource' -v $verbosity --tool-path '$toolpath'"
|
||||
& "$dotnet" tool install $darcCliPackageName --version $darcVersion --add-source "$arcadeServicesSource" -v $verbosity --tool-path "$toolpath"
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
InstallDarcCli $darcVersion $toolpath
|
||||
}
|
||||
catch {
|
||||
Write-Host $_.ScriptStackTrace
|
||||
Write-PipelineTelemetryError -Category 'Darc' -Message $_
|
||||
ExitWithExitCode 1
|
||||
}
|
|
@ -0,0 +1,82 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source="${BASH_SOURCE[0]}"
|
||||
darcVersion=''
|
||||
versionEndpoint='https://maestro-prod.westus2.cloudapp.azure.com/api/assets/darc-version?api-version=2019-01-16'
|
||||
verbosity='minimal'
|
||||
|
||||
while [[ $# > 0 ]]; do
|
||||
opt="$(echo "$1" | awk '{print tolower($0)}')"
|
||||
case "$opt" in
|
||||
--darcversion)
|
||||
darcVersion=$2
|
||||
shift
|
||||
;;
|
||||
--versionendpoint)
|
||||
versionEndpoint=$2
|
||||
shift
|
||||
;;
|
||||
--verbosity)
|
||||
verbosity=$2
|
||||
shift
|
||||
;;
|
||||
--toolpath)
|
||||
toolpath=$2
|
||||
shift
|
||||
;;
|
||||
*)
|
||||
echo "Invalid argument: $1"
|
||||
usage
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
shift
|
||||
done
|
||||
|
||||
# resolve $source until the file is no longer a symlink
|
||||
while [[ -h "$source" ]]; do
|
||||
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
|
||||
source="$(readlink "$source")"
|
||||
# if $source was a relative symlink, we need to resolve it relative to the path where the
|
||||
# symlink file was located
|
||||
[[ $source != /* ]] && source="$scriptroot/$source"
|
||||
done
|
||||
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
|
||||
|
||||
. "$scriptroot/tools.sh"
|
||||
|
||||
if [ -z "$darcVersion" ]; then
|
||||
darcVersion=$(curl -X GET "$versionEndpoint" -H "accept: text/plain")
|
||||
fi
|
||||
|
||||
function InstallDarcCli {
|
||||
local darc_cli_package_name="microsoft.dotnet.darc"
|
||||
|
||||
InitializeDotNetCli
|
||||
local dotnet_root=$_InitializeDotNetCli
|
||||
|
||||
if [ -z "$toolpath" ]; then
|
||||
local tool_list=$($dotnet_root/dotnet tool list -g)
|
||||
if [[ $tool_list = *$darc_cli_package_name* ]]; then
|
||||
echo $($dotnet_root/dotnet tool uninstall $darc_cli_package_name -g)
|
||||
fi
|
||||
else
|
||||
local tool_list=$($dotnet_root/dotnet tool list --tool-path "$toolpath")
|
||||
if [[ $tool_list = *$darc_cli_package_name* ]]; then
|
||||
echo $($dotnet_root/dotnet tool uninstall $darc_cli_package_name --tool-path "$toolpath")
|
||||
fi
|
||||
fi
|
||||
|
||||
local arcadeServicesSource="https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json"
|
||||
|
||||
echo "Installing Darc CLI version $darcVersion..."
|
||||
echo "You may need to restart your command shell if this is the first dotnet tool you have installed."
|
||||
if [ -z "$toolpath" ]; then
|
||||
echo $($dotnet_root/dotnet tool install $darc_cli_package_name --version $darcVersion --add-source "$arcadeServicesSource" -v $verbosity -g)
|
||||
else
|
||||
echo $($dotnet_root/dotnet tool install $darc_cli_package_name --version $darcVersion --add-source "$arcadeServicesSource" -v $verbosity --tool-path "$toolpath")
|
||||
fi
|
||||
}
|
||||
|
||||
InstallDarcCli
|
|
@ -0,0 +1,774 @@
|
|||
#
|
||||
# Copyright (c) .NET Foundation and contributors. All rights reserved.
|
||||
# Licensed under the MIT license. See LICENSE file in the project root for full license information.
|
||||
#
|
||||
|
||||
# Copied from https://dot.net/v1/dotnet-install.ps1 on 8/26/2020
|
||||
|
||||
<#
|
||||
.SYNOPSIS
|
||||
Installs dotnet cli
|
||||
.DESCRIPTION
|
||||
Installs dotnet cli. If dotnet installation already exists in the given directory
|
||||
it will update it only if the requested version differs from the one already installed.
|
||||
.PARAMETER Channel
|
||||
Default: LTS
|
||||
Download from the Channel specified. Possible values:
|
||||
- Current - most current release
|
||||
- LTS - most current supported release
|
||||
- 2-part version in a format A.B - represents a specific release
|
||||
examples: 2.0, 1.0
|
||||
- Branch name
|
||||
examples: release/2.0.0, Master
|
||||
Note: The version parameter overrides the channel parameter.
|
||||
.PARAMETER Version
|
||||
Default: latest
|
||||
Represents a build version on specific channel. Possible values:
|
||||
- latest - most latest build on specific channel
|
||||
- coherent - most latest coherent build on specific channel
|
||||
coherent applies only to SDK downloads
|
||||
- 3-part version in a format A.B.C - represents specific version of build
|
||||
examples: 2.0.0-preview2-006120, 1.1.0
|
||||
.PARAMETER InstallDir
|
||||
Default: %LocalAppData%\Microsoft\dotnet
|
||||
Path to where to install dotnet. Note that binaries will be placed directly in a given directory.
|
||||
.PARAMETER Architecture
|
||||
Default: <auto> - this value represents currently running OS architecture
|
||||
Architecture of dotnet binaries to be installed.
|
||||
Possible values are: <auto>, amd64, x64, x86, arm64, arm
|
||||
.PARAMETER SharedRuntime
|
||||
This parameter is obsolete and may be removed in a future version of this script.
|
||||
The recommended alternative is '-Runtime dotnet'.
|
||||
Installs just the shared runtime bits, not the entire SDK.
|
||||
.PARAMETER Runtime
|
||||
Installs just a shared runtime, not the entire SDK.
|
||||
Possible values:
|
||||
- dotnet - the Microsoft.NETCore.App shared runtime
|
||||
- aspnetcore - the Microsoft.AspNetCore.App shared runtime
|
||||
- windowsdesktop - the Microsoft.WindowsDesktop.App shared runtime
|
||||
.PARAMETER DryRun
|
||||
If set it will not perform installation but instead display what command line to use to consistently install
|
||||
currently requested version of dotnet cli. In example if you specify version 'latest' it will display a link
|
||||
with specific version so that this command can be used deterministicly in a build script.
|
||||
It also displays binaries location if you prefer to install or download it yourself.
|
||||
.PARAMETER NoPath
|
||||
By default this script will set environment variable PATH for the current process to the binaries folder inside installation folder.
|
||||
If set it will display binaries location but not set any environment variable.
|
||||
.PARAMETER Verbose
|
||||
Displays diagnostics information.
|
||||
.PARAMETER AzureFeed
|
||||
Default: https://dotnetcli.azureedge.net/dotnet
|
||||
This parameter typically is not changed by the user.
|
||||
It allows changing the URL for the Azure feed used by this installer.
|
||||
.PARAMETER UncachedFeed
|
||||
This parameter typically is not changed by the user.
|
||||
It allows changing the URL for the Uncached feed used by this installer.
|
||||
.PARAMETER FeedCredential
|
||||
Used as a query string to append to the Azure feed.
|
||||
It allows changing the URL to use non-public blob storage accounts.
|
||||
.PARAMETER ProxyAddress
|
||||
If set, the installer will use the proxy when making web requests
|
||||
.PARAMETER ProxyUseDefaultCredentials
|
||||
Default: false
|
||||
Use default credentials, when using proxy address.
|
||||
.PARAMETER ProxyBypassList
|
||||
If set with ProxyAddress, will provide the list of comma separated urls that will bypass the proxy
|
||||
.PARAMETER SkipNonVersionedFiles
|
||||
Default: false
|
||||
Skips installing non-versioned files if they already exist, such as dotnet.exe.
|
||||
.PARAMETER NoCdn
|
||||
Disable downloading from the Azure CDN, and use the uncached feed directly.
|
||||
.PARAMETER JSonFile
|
||||
Determines the SDK version from a user specified global.json file
|
||||
Note: global.json must have a value for 'SDK:Version'
|
||||
#>
|
||||
[cmdletbinding()]
|
||||
param(
|
||||
[string]$Channel="LTS",
|
||||
[string]$Version="Latest",
|
||||
[string]$JSonFile,
|
||||
[string]$InstallDir="<auto>",
|
||||
[string]$Architecture="<auto>",
|
||||
[ValidateSet("dotnet", "aspnetcore", "windowsdesktop", IgnoreCase = $false)]
|
||||
[string]$Runtime,
|
||||
[Obsolete("This parameter may be removed in a future version of this script. The recommended alternative is '-Runtime dotnet'.")]
|
||||
[switch]$SharedRuntime,
|
||||
[switch]$DryRun,
|
||||
[switch]$NoPath,
|
||||
[string]$AzureFeed="https://dotnetcli.azureedge.net/dotnet",
|
||||
[string]$UncachedFeed="https://dotnetcli.blob.core.windows.net/dotnet",
|
||||
[string]$FeedCredential,
|
||||
[string]$ProxyAddress,
|
||||
[switch]$ProxyUseDefaultCredentials,
|
||||
[string[]]$ProxyBypassList=@(),
|
||||
[switch]$SkipNonVersionedFiles,
|
||||
[switch]$NoCdn
|
||||
)
|
||||
|
||||
Set-StrictMode -Version Latest
|
||||
$ErrorActionPreference="Stop"
|
||||
$ProgressPreference="SilentlyContinue"
|
||||
|
||||
if ($NoCdn) {
|
||||
$AzureFeed = $UncachedFeed
|
||||
}
|
||||
|
||||
$BinFolderRelativePath=""
|
||||
|
||||
if ($SharedRuntime -and (-not $Runtime)) {
|
||||
$Runtime = "dotnet"
|
||||
}
|
||||
|
||||
# example path with regex: shared/1.0.0-beta-12345/somepath
|
||||
$VersionRegEx="/\d+\.\d+[^/]+/"
|
||||
$OverrideNonVersionedFiles = !$SkipNonVersionedFiles
|
||||
|
||||
function Say($str) {
|
||||
try
|
||||
{
|
||||
Write-Host "dotnet-install: $str"
|
||||
}
|
||||
catch
|
||||
{
|
||||
# Some platforms cannot utilize Write-Host (Azure Functions, for instance). Fall back to Write-Output
|
||||
Write-Output "dotnet-install: $str"
|
||||
}
|
||||
}
|
||||
|
||||
function Say-Verbose($str) {
|
||||
try
|
||||
{
|
||||
Write-Verbose "dotnet-install: $str"
|
||||
}
|
||||
catch
|
||||
{
|
||||
# Some platforms cannot utilize Write-Verbose (Azure Functions, for instance). Fall back to Write-Output
|
||||
Write-Output "dotnet-install: $str"
|
||||
}
|
||||
}
|
||||
|
||||
function Say-Invocation($Invocation) {
|
||||
$command = $Invocation.MyCommand;
|
||||
$args = (($Invocation.BoundParameters.Keys | foreach { "-$_ `"$($Invocation.BoundParameters[$_])`"" }) -join " ")
|
||||
Say-Verbose "$command $args"
|
||||
}
|
||||
|
||||
function Invoke-With-Retry([ScriptBlock]$ScriptBlock, [int]$MaxAttempts = 3, [int]$SecondsBetweenAttempts = 1) {
|
||||
$Attempts = 0
|
||||
|
||||
while ($true) {
|
||||
try {
|
||||
return $ScriptBlock.Invoke()
|
||||
}
|
||||
catch {
|
||||
$Attempts++
|
||||
if ($Attempts -lt $MaxAttempts) {
|
||||
Start-Sleep $SecondsBetweenAttempts
|
||||
}
|
||||
else {
|
||||
throw
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function Get-Machine-Architecture() {
|
||||
Say-Invocation $MyInvocation
|
||||
|
||||
# On PS x86, PROCESSOR_ARCHITECTURE reports x86 even on x64 systems.
|
||||
# To get the correct architecture, we need to use PROCESSOR_ARCHITEW6432.
|
||||
# PS x64 doesn't define this, so we fall back to PROCESSOR_ARCHITECTURE.
|
||||
# Possible values: amd64, x64, x86, arm64, arm
|
||||
|
||||
if( $ENV:PROCESSOR_ARCHITEW6432 -ne $null )
|
||||
{
|
||||
return $ENV:PROCESSOR_ARCHITEW6432
|
||||
}
|
||||
|
||||
return $ENV:PROCESSOR_ARCHITECTURE
|
||||
}
|
||||
|
||||
function Get-CLIArchitecture-From-Architecture([string]$Architecture) {
|
||||
Say-Invocation $MyInvocation
|
||||
|
||||
switch ($Architecture.ToLower()) {
|
||||
{ $_ -eq "<auto>" } { return Get-CLIArchitecture-From-Architecture $(Get-Machine-Architecture) }
|
||||
{ ($_ -eq "amd64") -or ($_ -eq "x64") } { return "x64" }
|
||||
{ $_ -eq "x86" } { return "x86" }
|
||||
{ $_ -eq "arm" } { return "arm" }
|
||||
{ $_ -eq "arm64" } { return "arm64" }
|
||||
default { throw "Architecture not supported. If you think this is a bug, report it at https://github.com/dotnet/sdk/issues" }
|
||||
}
|
||||
}
|
||||
|
||||
# The version text returned from the feeds is a 1-line or 2-line string:
|
||||
# For the SDK and the dotnet runtime (2 lines):
|
||||
# Line 1: # commit_hash
|
||||
# Line 2: # 4-part version
|
||||
# For the aspnetcore runtime (1 line):
|
||||
# Line 1: # 4-part version
|
||||
function Get-Version-Info-From-Version-Text([string]$VersionText) {
|
||||
Say-Invocation $MyInvocation
|
||||
|
||||
$Data = -split $VersionText
|
||||
|
||||
$VersionInfo = @{
|
||||
CommitHash = $(if ($Data.Count -gt 1) { $Data[0] })
|
||||
Version = $Data[-1] # last line is always the version number.
|
||||
}
|
||||
return $VersionInfo
|
||||
}
|
||||
|
||||
function Load-Assembly([string] $Assembly) {
|
||||
try {
|
||||
Add-Type -Assembly $Assembly | Out-Null
|
||||
}
|
||||
catch {
|
||||
# On Nano Server, Powershell Core Edition is used. Add-Type is unable to resolve base class assemblies because they are not GAC'd.
|
||||
# Loading the base class assemblies is not unnecessary as the types will automatically get resolved.
|
||||
}
|
||||
}
|
||||
|
||||
function GetHTTPResponse([Uri] $Uri)
|
||||
{
|
||||
Invoke-With-Retry(
|
||||
{
|
||||
|
||||
$HttpClient = $null
|
||||
|
||||
try {
|
||||
# HttpClient is used vs Invoke-WebRequest in order to support Nano Server which doesn't support the Invoke-WebRequest cmdlet.
|
||||
Load-Assembly -Assembly System.Net.Http
|
||||
|
||||
if(-not $ProxyAddress) {
|
||||
try {
|
||||
# Despite no proxy being explicitly specified, we may still be behind a default proxy
|
||||
$DefaultProxy = [System.Net.WebRequest]::DefaultWebProxy;
|
||||
if($DefaultProxy -and (-not $DefaultProxy.IsBypassed($Uri))) {
|
||||
$ProxyAddress = $DefaultProxy.GetProxy($Uri).OriginalString
|
||||
$ProxyUseDefaultCredentials = $true
|
||||
}
|
||||
} catch {
|
||||
# Eat the exception and move forward as the above code is an attempt
|
||||
# at resolving the DefaultProxy that may not have been a problem.
|
||||
$ProxyAddress = $null
|
||||
Say-Verbose("Exception ignored: $_.Exception.Message - moving forward...")
|
||||
}
|
||||
}
|
||||
|
||||
if($ProxyAddress) {
|
||||
$HttpClientHandler = New-Object System.Net.Http.HttpClientHandler
|
||||
$HttpClientHandler.Proxy = New-Object System.Net.WebProxy -Property @{
|
||||
Address=$ProxyAddress;
|
||||
UseDefaultCredentials=$ProxyUseDefaultCredentials;
|
||||
BypassList = $ProxyBypassList;
|
||||
}
|
||||
$HttpClient = New-Object System.Net.Http.HttpClient -ArgumentList $HttpClientHandler
|
||||
}
|
||||
else {
|
||||
|
||||
$HttpClient = New-Object System.Net.Http.HttpClient
|
||||
}
|
||||
# Default timeout for HttpClient is 100s. For a 50 MB download this assumes 500 KB/s average, any less will time out
|
||||
# 20 minutes allows it to work over much slower connections.
|
||||
$HttpClient.Timeout = New-TimeSpan -Minutes 20
|
||||
$Response = $HttpClient.GetAsync("${Uri}${FeedCredential}").Result
|
||||
if (($Response -eq $null) -or (-not ($Response.IsSuccessStatusCode))) {
|
||||
# The feed credential is potentially sensitive info. Do not log FeedCredential to console output.
|
||||
$ErrorMsg = "Failed to download $Uri."
|
||||
if ($Response -ne $null) {
|
||||
$ErrorMsg += " $Response"
|
||||
}
|
||||
|
||||
throw $ErrorMsg
|
||||
}
|
||||
|
||||
return $Response
|
||||
}
|
||||
finally {
|
||||
if ($HttpClient -ne $null) {
|
||||
$HttpClient.Dispose()
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
function Get-Latest-Version-Info([string]$AzureFeed, [string]$Channel, [bool]$Coherent) {
|
||||
Say-Invocation $MyInvocation
|
||||
|
||||
$VersionFileUrl = $null
|
||||
if ($Runtime -eq "dotnet") {
|
||||
$VersionFileUrl = "$UncachedFeed/Runtime/$Channel/latest.version"
|
||||
}
|
||||
elseif ($Runtime -eq "aspnetcore") {
|
||||
$VersionFileUrl = "$UncachedFeed/aspnetcore/Runtime/$Channel/latest.version"
|
||||
}
|
||||
# Currently, the WindowsDesktop runtime is manufactured with the .Net core runtime
|
||||
elseif ($Runtime -eq "windowsdesktop") {
|
||||
$VersionFileUrl = "$UncachedFeed/Runtime/$Channel/latest.version"
|
||||
}
|
||||
elseif (-not $Runtime) {
|
||||
if ($Coherent) {
|
||||
$VersionFileUrl = "$UncachedFeed/Sdk/$Channel/latest.coherent.version"
|
||||
}
|
||||
else {
|
||||
$VersionFileUrl = "$UncachedFeed/Sdk/$Channel/latest.version"
|
||||
}
|
||||
}
|
||||
else {
|
||||
throw "Invalid value for `$Runtime"
|
||||
}
|
||||
try {
|
||||
$Response = GetHTTPResponse -Uri $VersionFileUrl
|
||||
}
|
||||
catch {
|
||||
throw "Could not resolve version information."
|
||||
}
|
||||
$StringContent = $Response.Content.ReadAsStringAsync().Result
|
||||
|
||||
switch ($Response.Content.Headers.ContentType) {
|
||||
{ ($_ -eq "application/octet-stream") } { $VersionText = $StringContent }
|
||||
{ ($_ -eq "text/plain") } { $VersionText = $StringContent }
|
||||
{ ($_ -eq "text/plain; charset=UTF-8") } { $VersionText = $StringContent }
|
||||
default { throw "``$Response.Content.Headers.ContentType`` is an unknown .version file content type." }
|
||||
}
|
||||
|
||||
$VersionInfo = Get-Version-Info-From-Version-Text $VersionText
|
||||
|
||||
return $VersionInfo
|
||||
}
|
||||
|
||||
function Parse-Jsonfile-For-Version([string]$JSonFile) {
|
||||
Say-Invocation $MyInvocation
|
||||
|
||||
If (-Not (Test-Path $JSonFile)) {
|
||||
throw "Unable to find '$JSonFile'"
|
||||
}
|
||||
try {
|
||||
$JSonContent = Get-Content($JSonFile) -Raw | ConvertFrom-Json | Select-Object -expand "sdk" -ErrorAction SilentlyContinue
|
||||
}
|
||||
catch {
|
||||
throw "Json file unreadable: '$JSonFile'"
|
||||
}
|
||||
if ($JSonContent) {
|
||||
try {
|
||||
$JSonContent.PSObject.Properties | ForEach-Object {
|
||||
$PropertyName = $_.Name
|
||||
if ($PropertyName -eq "version") {
|
||||
$Version = $_.Value
|
||||
Say-Verbose "Version = $Version"
|
||||
}
|
||||
}
|
||||
}
|
||||
catch {
|
||||
throw "Unable to parse the SDK node in '$JSonFile'"
|
||||
}
|
||||
}
|
||||
else {
|
||||
throw "Unable to find the SDK node in '$JSonFile'"
|
||||
}
|
||||
If ($Version -eq $null) {
|
||||
throw "Unable to find the SDK:version node in '$JSonFile'"
|
||||
}
|
||||
return $Version
|
||||
}
|
||||
|
||||
function Get-Specific-Version-From-Version([string]$AzureFeed, [string]$Channel, [string]$Version, [string]$JSonFile) {
|
||||
Say-Invocation $MyInvocation
|
||||
|
||||
if (-not $JSonFile) {
|
||||
switch ($Version.ToLower()) {
|
||||
{ $_ -eq "latest" } {
|
||||
$LatestVersionInfo = Get-Latest-Version-Info -AzureFeed $AzureFeed -Channel $Channel -Coherent $False
|
||||
return $LatestVersionInfo.Version
|
||||
}
|
||||
{ $_ -eq "coherent" } {
|
||||
$LatestVersionInfo = Get-Latest-Version-Info -AzureFeed $AzureFeed -Channel $Channel -Coherent $True
|
||||
return $LatestVersionInfo.Version
|
||||
}
|
||||
default { return $Version }
|
||||
}
|
||||
}
|
||||
else {
|
||||
return Parse-Jsonfile-For-Version $JSonFile
|
||||
}
|
||||
}
|
||||
|
||||
function Get-Download-Link([string]$AzureFeed, [string]$SpecificVersion, [string]$CLIArchitecture) {
|
||||
Say-Invocation $MyInvocation
|
||||
|
||||
# If anything fails in this lookup it will default to $SpecificVersion
|
||||
$SpecificProductVersion = Get-Product-Version -AzureFeed $AzureFeed -SpecificVersion $SpecificVersion
|
||||
|
||||
if ($Runtime -eq "dotnet") {
|
||||
$PayloadURL = "$AzureFeed/Runtime/$SpecificVersion/dotnet-runtime-$SpecificProductVersion-win-$CLIArchitecture.zip"
|
||||
}
|
||||
elseif ($Runtime -eq "aspnetcore") {
|
||||
$PayloadURL = "$AzureFeed/aspnetcore/Runtime/$SpecificVersion/aspnetcore-runtime-$SpecificProductVersion-win-$CLIArchitecture.zip"
|
||||
}
|
||||
elseif ($Runtime -eq "windowsdesktop") {
|
||||
$PayloadURL = "$AzureFeed/Runtime/$SpecificVersion/windowsdesktop-runtime-$SpecificProductVersion-win-$CLIArchitecture.zip"
|
||||
}
|
||||
elseif (-not $Runtime) {
|
||||
$PayloadURL = "$AzureFeed/Sdk/$SpecificVersion/dotnet-sdk-$SpecificProductVersion-win-$CLIArchitecture.zip"
|
||||
}
|
||||
else {
|
||||
throw "Invalid value for `$Runtime"
|
||||
}
|
||||
|
||||
Say-Verbose "Constructed primary named payload URL: $PayloadURL"
|
||||
|
||||
return $PayloadURL, $SpecificProductVersion
|
||||
}
|
||||
|
||||
function Get-LegacyDownload-Link([string]$AzureFeed, [string]$SpecificVersion, [string]$CLIArchitecture) {
|
||||
Say-Invocation $MyInvocation
|
||||
|
||||
if (-not $Runtime) {
|
||||
$PayloadURL = "$AzureFeed/Sdk/$SpecificVersion/dotnet-dev-win-$CLIArchitecture.$SpecificVersion.zip"
|
||||
}
|
||||
elseif ($Runtime -eq "dotnet") {
|
||||
$PayloadURL = "$AzureFeed/Runtime/$SpecificVersion/dotnet-win-$CLIArchitecture.$SpecificVersion.zip"
|
||||
}
|
||||
else {
|
||||
return $null
|
||||
}
|
||||
|
||||
Say-Verbose "Constructed legacy named payload URL: $PayloadURL"
|
||||
|
||||
return $PayloadURL
|
||||
}
|
||||
|
||||
function Get-Product-Version([string]$AzureFeed, [string]$SpecificVersion) {
|
||||
Say-Invocation $MyInvocation
|
||||
|
||||
if ($Runtime -eq "dotnet") {
|
||||
$ProductVersionTxtURL = "$AzureFeed/Runtime/$SpecificVersion/productVersion.txt"
|
||||
}
|
||||
elseif ($Runtime -eq "aspnetcore") {
|
||||
$ProductVersionTxtURL = "$AzureFeed/aspnetcore/Runtime/$SpecificVersion/productVersion.txt"
|
||||
}
|
||||
elseif ($Runtime -eq "windowsdesktop") {
|
||||
$ProductVersionTxtURL = "$AzureFeed/Runtime/$SpecificVersion/productVersion.txt"
|
||||
}
|
||||
elseif (-not $Runtime) {
|
||||
$ProductVersionTxtURL = "$AzureFeed/Sdk/$SpecificVersion/productVersion.txt"
|
||||
}
|
||||
else {
|
||||
throw "Invalid value specified for `$Runtime"
|
||||
}
|
||||
|
||||
Say-Verbose "Checking for existence of $ProductVersionTxtURL"
|
||||
|
||||
try {
|
||||
$productVersionResponse = GetHTTPResponse($productVersionTxtUrl)
|
||||
|
||||
if ($productVersionResponse.StatusCode -eq 200) {
|
||||
$productVersion = $productVersionResponse.Content.ReadAsStringAsync().Result.Trim()
|
||||
if ($productVersion -ne $SpecificVersion)
|
||||
{
|
||||
Say "Using alternate version $productVersion found in $ProductVersionTxtURL"
|
||||
}
|
||||
|
||||
return $productVersion
|
||||
}
|
||||
else {
|
||||
Say-Verbose "Got StatusCode $($productVersionResponse.StatusCode) trying to get productVersion.txt at $productVersionTxtUrl, so using default value of $SpecificVersion"
|
||||
$productVersion = $SpecificVersion
|
||||
}
|
||||
} catch {
|
||||
Say-Verbose "Could not read productVersion.txt at $productVersionTxtUrl, so using default value of $SpecificVersion"
|
||||
$productVersion = $SpecificVersion
|
||||
}
|
||||
|
||||
return $productVersion
|
||||
}
|
||||
|
||||
function Get-User-Share-Path() {
|
||||
Say-Invocation $MyInvocation
|
||||
|
||||
$InstallRoot = $env:DOTNET_INSTALL_DIR
|
||||
if (!$InstallRoot) {
|
||||
$InstallRoot = "$env:LocalAppData\Microsoft\dotnet"
|
||||
}
|
||||
return $InstallRoot
|
||||
}
|
||||
|
||||
function Resolve-Installation-Path([string]$InstallDir) {
|
||||
Say-Invocation $MyInvocation
|
||||
|
||||
if ($InstallDir -eq "<auto>") {
|
||||
return Get-User-Share-Path
|
||||
}
|
||||
return $InstallDir
|
||||
}
|
||||
|
||||
function Is-Dotnet-Package-Installed([string]$InstallRoot, [string]$RelativePathToPackage, [string]$SpecificVersion) {
|
||||
Say-Invocation $MyInvocation
|
||||
|
||||
$DotnetPackagePath = Join-Path -Path $InstallRoot -ChildPath $RelativePathToPackage | Join-Path -ChildPath $SpecificVersion
|
||||
Say-Verbose "Is-Dotnet-Package-Installed: DotnetPackagePath=$DotnetPackagePath"
|
||||
return Test-Path $DotnetPackagePath -PathType Container
|
||||
}
|
||||
|
||||
function Get-Absolute-Path([string]$RelativeOrAbsolutePath) {
|
||||
# Too much spam
|
||||
# Say-Invocation $MyInvocation
|
||||
|
||||
return $ExecutionContext.SessionState.Path.GetUnresolvedProviderPathFromPSPath($RelativeOrAbsolutePath)
|
||||
}
|
||||
|
||||
function Get-Path-Prefix-With-Version($path) {
|
||||
$match = [regex]::match($path, $VersionRegEx)
|
||||
if ($match.Success) {
|
||||
return $entry.FullName.Substring(0, $match.Index + $match.Length)
|
||||
}
|
||||
|
||||
return $null
|
||||
}
|
||||
|
||||
function Get-List-Of-Directories-And-Versions-To-Unpack-From-Dotnet-Package([System.IO.Compression.ZipArchive]$Zip, [string]$OutPath) {
|
||||
Say-Invocation $MyInvocation
|
||||
|
||||
$ret = @()
|
||||
foreach ($entry in $Zip.Entries) {
|
||||
$dir = Get-Path-Prefix-With-Version $entry.FullName
|
||||
if ($dir -ne $null) {
|
||||
$path = Get-Absolute-Path $(Join-Path -Path $OutPath -ChildPath $dir)
|
||||
if (-Not (Test-Path $path -PathType Container)) {
|
||||
$ret += $dir
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
$ret = $ret | Sort-Object | Get-Unique
|
||||
|
||||
$values = ($ret | foreach { "$_" }) -join ";"
|
||||
Say-Verbose "Directories to unpack: $values"
|
||||
|
||||
return $ret
|
||||
}
|
||||
|
||||
# Example zip content and extraction algorithm:
|
||||
# Rule: files if extracted are always being extracted to the same relative path locally
|
||||
# .\
|
||||
# a.exe # file does not exist locally, extract
|
||||
# b.dll # file exists locally, override only if $OverrideFiles set
|
||||
# aaa\ # same rules as for files
|
||||
# ...
|
||||
# abc\1.0.0\ # directory contains version and exists locally
|
||||
# ... # do not extract content under versioned part
|
||||
# abc\asd\ # same rules as for files
|
||||
# ...
|
||||
# def\ghi\1.0.1\ # directory contains version and does not exist locally
|
||||
# ... # extract content
|
||||
function Extract-Dotnet-Package([string]$ZipPath, [string]$OutPath) {
|
||||
Say-Invocation $MyInvocation
|
||||
|
||||
Load-Assembly -Assembly System.IO.Compression.FileSystem
|
||||
Set-Variable -Name Zip
|
||||
try {
|
||||
$Zip = [System.IO.Compression.ZipFile]::OpenRead($ZipPath)
|
||||
|
||||
$DirectoriesToUnpack = Get-List-Of-Directories-And-Versions-To-Unpack-From-Dotnet-Package -Zip $Zip -OutPath $OutPath
|
||||
|
||||
foreach ($entry in $Zip.Entries) {
|
||||
$PathWithVersion = Get-Path-Prefix-With-Version $entry.FullName
|
||||
if (($PathWithVersion -eq $null) -Or ($DirectoriesToUnpack -contains $PathWithVersion)) {
|
||||
$DestinationPath = Get-Absolute-Path $(Join-Path -Path $OutPath -ChildPath $entry.FullName)
|
||||
$DestinationDir = Split-Path -Parent $DestinationPath
|
||||
$OverrideFiles=$OverrideNonVersionedFiles -Or (-Not (Test-Path $DestinationPath))
|
||||
if ((-Not $DestinationPath.EndsWith("\")) -And $OverrideFiles) {
|
||||
New-Item -ItemType Directory -Force -Path $DestinationDir | Out-Null
|
||||
[System.IO.Compression.ZipFileExtensions]::ExtractToFile($entry, $DestinationPath, $OverrideNonVersionedFiles)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
finally {
|
||||
if ($Zip -ne $null) {
|
||||
$Zip.Dispose()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function DownloadFile($Source, [string]$OutPath) {
|
||||
if ($Source -notlike "http*") {
|
||||
# Using System.IO.Path.GetFullPath to get the current directory
|
||||
# does not work in this context - $pwd gives the current directory
|
||||
if (![System.IO.Path]::IsPathRooted($Source)) {
|
||||
$Source = $(Join-Path -Path $pwd -ChildPath $Source)
|
||||
}
|
||||
$Source = Get-Absolute-Path $Source
|
||||
Say "Copying file from $Source to $OutPath"
|
||||
Copy-Item $Source $OutPath
|
||||
return
|
||||
}
|
||||
|
||||
$Stream = $null
|
||||
|
||||
try {
|
||||
$Response = GetHTTPResponse -Uri $Source
|
||||
$Stream = $Response.Content.ReadAsStreamAsync().Result
|
||||
$File = [System.IO.File]::Create($OutPath)
|
||||
$Stream.CopyTo($File)
|
||||
$File.Close()
|
||||
}
|
||||
finally {
|
||||
if ($Stream -ne $null) {
|
||||
$Stream.Dispose()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function Prepend-Sdk-InstallRoot-To-Path([string]$InstallRoot, [string]$BinFolderRelativePath) {
|
||||
$BinPath = Get-Absolute-Path $(Join-Path -Path $InstallRoot -ChildPath $BinFolderRelativePath)
|
||||
if (-Not $NoPath) {
|
||||
$SuffixedBinPath = "$BinPath;"
|
||||
if (-Not $env:path.Contains($SuffixedBinPath)) {
|
||||
Say "Adding to current process PATH: `"$BinPath`". Note: This change will not be visible if PowerShell was run as a child process."
|
||||
$env:path = $SuffixedBinPath + $env:path
|
||||
} else {
|
||||
Say-Verbose "Current process PATH already contains `"$BinPath`""
|
||||
}
|
||||
}
|
||||
else {
|
||||
Say "Binaries of dotnet can be found in $BinPath"
|
||||
}
|
||||
}
|
||||
|
||||
$CLIArchitecture = Get-CLIArchitecture-From-Architecture $Architecture
|
||||
$SpecificVersion = Get-Specific-Version-From-Version -AzureFeed $AzureFeed -Channel $Channel -Version $Version -JSonFile $JSonFile
|
||||
$DownloadLink, $EffectiveVersion = Get-Download-Link -AzureFeed $AzureFeed -SpecificVersion $SpecificVersion -CLIArchitecture $CLIArchitecture
|
||||
$LegacyDownloadLink = Get-LegacyDownload-Link -AzureFeed $AzureFeed -SpecificVersion $SpecificVersion -CLIArchitecture $CLIArchitecture
|
||||
|
||||
$InstallRoot = Resolve-Installation-Path $InstallDir
|
||||
Say-Verbose "InstallRoot: $InstallRoot"
|
||||
$ScriptName = $MyInvocation.MyCommand.Name
|
||||
|
||||
if ($DryRun) {
|
||||
Say "Payload URLs:"
|
||||
Say "Primary named payload URL: $DownloadLink"
|
||||
if ($LegacyDownloadLink) {
|
||||
Say "Legacy named payload URL: $LegacyDownloadLink"
|
||||
}
|
||||
$RepeatableCommand = ".\$ScriptName -Version `"$SpecificVersion`" -InstallDir `"$InstallRoot`" -Architecture `"$CLIArchitecture`""
|
||||
if ($Runtime -eq "dotnet") {
|
||||
$RepeatableCommand+=" -Runtime `"dotnet`""
|
||||
}
|
||||
elseif ($Runtime -eq "aspnetcore") {
|
||||
$RepeatableCommand+=" -Runtime `"aspnetcore`""
|
||||
}
|
||||
foreach ($key in $MyInvocation.BoundParameters.Keys) {
|
||||
if (-not (@("Architecture","Channel","DryRun","InstallDir","Runtime","SharedRuntime","Version") -contains $key)) {
|
||||
$RepeatableCommand+=" -$key `"$($MyInvocation.BoundParameters[$key])`""
|
||||
}
|
||||
}
|
||||
Say "Repeatable invocation: $RepeatableCommand"
|
||||
exit 0
|
||||
}
|
||||
|
||||
if ($Runtime -eq "dotnet") {
|
||||
$assetName = ".NET Core Runtime"
|
||||
$dotnetPackageRelativePath = "shared\Microsoft.NETCore.App"
|
||||
}
|
||||
elseif ($Runtime -eq "aspnetcore") {
|
||||
$assetName = "ASP.NET Core Runtime"
|
||||
$dotnetPackageRelativePath = "shared\Microsoft.AspNetCore.App"
|
||||
}
|
||||
elseif ($Runtime -eq "windowsdesktop") {
|
||||
$assetName = ".NET Core Windows Desktop Runtime"
|
||||
$dotnetPackageRelativePath = "shared\Microsoft.WindowsDesktop.App"
|
||||
}
|
||||
elseif (-not $Runtime) {
|
||||
$assetName = ".NET Core SDK"
|
||||
$dotnetPackageRelativePath = "sdk"
|
||||
}
|
||||
else {
|
||||
throw "Invalid value for `$Runtime"
|
||||
}
|
||||
|
||||
if ($SpecificVersion -ne $EffectiveVersion)
|
||||
{
|
||||
Say "Performing installation checks for effective version: $EffectiveVersion"
|
||||
$SpecificVersion = $EffectiveVersion
|
||||
}
|
||||
|
||||
# Check if the SDK version is already installed.
|
||||
$isAssetInstalled = Is-Dotnet-Package-Installed -InstallRoot $InstallRoot -RelativePathToPackage $dotnetPackageRelativePath -SpecificVersion $SpecificVersion
|
||||
if ($isAssetInstalled) {
|
||||
Say "$assetName version $SpecificVersion is already installed."
|
||||
Prepend-Sdk-InstallRoot-To-Path -InstallRoot $InstallRoot -BinFolderRelativePath $BinFolderRelativePath
|
||||
exit 0
|
||||
}
|
||||
|
||||
New-Item -ItemType Directory -Force -Path $InstallRoot | Out-Null
|
||||
|
||||
$installDrive = $((Get-Item $InstallRoot).PSDrive.Name);
|
||||
$diskInfo = Get-PSDrive -Name $installDrive
|
||||
if ($diskInfo.Free / 1MB -le 100) {
|
||||
Say "There is not enough disk space on drive ${installDrive}:"
|
||||
exit 0
|
||||
}
|
||||
|
||||
$ZipPath = [System.IO.Path]::combine([System.IO.Path]::GetTempPath(), [System.IO.Path]::GetRandomFileName())
|
||||
Say-Verbose "Zip path: $ZipPath"
|
||||
|
||||
$DownloadFailed = $false
|
||||
Say "Downloading link: $DownloadLink"
|
||||
try {
|
||||
DownloadFile -Source $DownloadLink -OutPath $ZipPath
|
||||
}
|
||||
catch {
|
||||
Say "Cannot download: $DownloadLink"
|
||||
if ($LegacyDownloadLink) {
|
||||
$DownloadLink = $LegacyDownloadLink
|
||||
$ZipPath = [System.IO.Path]::combine([System.IO.Path]::GetTempPath(), [System.IO.Path]::GetRandomFileName())
|
||||
Say-Verbose "Legacy zip path: $ZipPath"
|
||||
Say "Downloading legacy link: $DownloadLink"
|
||||
try {
|
||||
DownloadFile -Source $DownloadLink -OutPath $ZipPath
|
||||
}
|
||||
catch {
|
||||
Say "Cannot download: $DownloadLink"
|
||||
$DownloadFailed = $true
|
||||
}
|
||||
}
|
||||
else {
|
||||
$DownloadFailed = $true
|
||||
}
|
||||
}
|
||||
|
||||
if ($DownloadFailed) {
|
||||
throw "Could not find/download: `"$assetName`" with version = $SpecificVersion`nRefer to: https://aka.ms/dotnet-os-lifecycle for information on .NET Core support"
|
||||
}
|
||||
|
||||
Say "Extracting zip from $DownloadLink"
|
||||
Extract-Dotnet-Package -ZipPath $ZipPath -OutPath $InstallRoot
|
||||
|
||||
# Check if the SDK version is installed; if not, fail the installation.
|
||||
$isAssetInstalled = $false
|
||||
|
||||
# if the version contains "RTM" or "servicing"; check if a 'release-type' SDK version is installed.
|
||||
if ($SpecificVersion -Match "rtm" -or $SpecificVersion -Match "servicing") {
|
||||
$ReleaseVersion = $SpecificVersion.Split("-")[0]
|
||||
Say-Verbose "Checking installation: version = $ReleaseVersion"
|
||||
$isAssetInstalled = Is-Dotnet-Package-Installed -InstallRoot $InstallRoot -RelativePathToPackage $dotnetPackageRelativePath -SpecificVersion $ReleaseVersion
|
||||
}
|
||||
|
||||
# Check if the SDK version is installed.
|
||||
if (!$isAssetInstalled) {
|
||||
Say-Verbose "Checking installation: version = $SpecificVersion"
|
||||
$isAssetInstalled = Is-Dotnet-Package-Installed -InstallRoot $InstallRoot -RelativePathToPackage $dotnetPackageRelativePath -SpecificVersion $SpecificVersion
|
||||
}
|
||||
|
||||
if (!$isAssetInstalled) {
|
||||
throw "`"$assetName`" with version = $SpecificVersion failed to install with an unknown error."
|
||||
}
|
||||
|
||||
Remove-Item $ZipPath
|
||||
|
||||
Prepend-Sdk-InstallRoot-To-Path -InstallRoot $InstallRoot -BinFolderRelativePath $BinFolderRelativePath
|
||||
|
||||
Say "Installation finished"
|
||||
exit 0
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -0,0 +1,2 @@
|
|||
@echo off
|
||||
powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0dotnet-install.ps1""" %*"
|
|
@ -0,0 +1,28 @@
|
|||
[CmdletBinding(PositionalBinding=$false)]
|
||||
Param(
|
||||
[string] $verbosity = 'minimal',
|
||||
[string] $architecture = '',
|
||||
[string] $version = 'Latest',
|
||||
[string] $runtime = 'dotnet',
|
||||
[string] $RuntimeSourceFeed = '',
|
||||
[string] $RuntimeSourceFeedKey = ''
|
||||
)
|
||||
|
||||
. $PSScriptRoot\tools.ps1
|
||||
|
||||
$dotnetRoot = Join-Path $RepoRoot '.dotnet'
|
||||
|
||||
$installdir = $dotnetRoot
|
||||
try {
|
||||
if ($architecture -and $architecture.Trim() -eq 'x86') {
|
||||
$installdir = Join-Path $installdir 'x86'
|
||||
}
|
||||
InstallDotNet $installdir $version $architecture $runtime $true -RuntimeSourceFeed $RuntimeSourceFeed -RuntimeSourceFeedKey $RuntimeSourceFeedKey
|
||||
}
|
||||
catch {
|
||||
Write-Host $_.ScriptStackTrace
|
||||
Write-PipelineTelemetryError -Category 'InitializeToolset' -Message $_
|
||||
ExitWithExitCode 1
|
||||
}
|
||||
|
||||
ExitWithExitCode 0
|
|
@ -0,0 +1,89 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source="${BASH_SOURCE[0]}"
|
||||
# resolve $source until the file is no longer a symlink
|
||||
while [[ -h "$source" ]]; do
|
||||
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
|
||||
source="$(readlink "$source")"
|
||||
# if $source was a relative symlink, we need to resolve it relative to the path where the
|
||||
# symlink file was located
|
||||
[[ $source != /* ]] && source="$scriptroot/$source"
|
||||
done
|
||||
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
|
||||
|
||||
. "$scriptroot/tools.sh"
|
||||
|
||||
version='Latest'
|
||||
architecture=''
|
||||
runtime='dotnet'
|
||||
runtimeSourceFeed=''
|
||||
runtimeSourceFeedKey=''
|
||||
while [[ $# > 0 ]]; do
|
||||
opt="$(echo "$1" | awk '{print tolower($0)}')"
|
||||
case "$opt" in
|
||||
-version|-v)
|
||||
shift
|
||||
version="$1"
|
||||
;;
|
||||
-architecture|-a)
|
||||
shift
|
||||
architecture="$1"
|
||||
;;
|
||||
-runtime|-r)
|
||||
shift
|
||||
runtime="$1"
|
||||
;;
|
||||
-runtimesourcefeed)
|
||||
shift
|
||||
runtimeSourceFeed="$1"
|
||||
;;
|
||||
-runtimesourcefeedkey)
|
||||
shift
|
||||
runtimeSourceFeedKey="$1"
|
||||
;;
|
||||
*)
|
||||
Write-PipelineTelemetryError -Category 'Build' -Message "Invalid argument: $1"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
# Use uname to determine what the CPU is.
|
||||
cpuname=$(uname -p)
|
||||
# Some Linux platforms report unknown for platform, but the arch for machine.
|
||||
if [[ "$cpuname" == "unknown" ]]; then
|
||||
cpuname=$(uname -m)
|
||||
fi
|
||||
|
||||
case $cpuname in
|
||||
aarch64)
|
||||
buildarch=arm64
|
||||
;;
|
||||
amd64|x86_64)
|
||||
buildarch=x64
|
||||
;;
|
||||
armv*l)
|
||||
buildarch=arm
|
||||
;;
|
||||
i686)
|
||||
buildarch=x86
|
||||
;;
|
||||
*)
|
||||
echo "Unknown CPU $cpuname detected, treating it as x64"
|
||||
buildarch=x64
|
||||
;;
|
||||
esac
|
||||
|
||||
dotnetRoot="$repo_root/.dotnet"
|
||||
if [[ $architecture != "" ]] && [[ $architecture != $buildarch ]]; then
|
||||
dotnetRoot="$dotnetRoot/$architecture"
|
||||
fi
|
||||
|
||||
InstallDotNet $dotnetRoot $version "$architecture" $runtime true $runtimeSourceFeed $runtimeSourceFeedKey || {
|
||||
local exit_code=$?
|
||||
Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "dotnet-install.sh failed (exit code '$exit_code')." >&2
|
||||
ExitWithExitCode $exit_code
|
||||
}
|
||||
|
||||
ExitWithExitCode 0
|
|
@ -0,0 +1,13 @@
|
|||
param(
|
||||
[string] $token
|
||||
)
|
||||
|
||||
|
||||
. $PSScriptRoot\pipeline-logging-functions.ps1
|
||||
|
||||
# Write-PipelineSetVariable will no-op if a variable named $ci is not defined
|
||||
# Since this script is only ever called in AzDO builds, just universally set it
|
||||
$ci = $true
|
||||
|
||||
Write-PipelineSetVariable -Name 'VSS_NUGET_ACCESSTOKEN' -Value $token -IsMultiJobVariable $false
|
||||
Write-PipelineSetVariable -Name 'VSS_NUGET_URI_PREFIXES' -Value 'https://dnceng.pkgs.visualstudio.com/;https://pkgs.dev.azure.com/dnceng/;https://devdiv.pkgs.visualstudio.com/;https://pkgs.dev.azure.com/devdiv/' -IsMultiJobVariable $false
|
|
@ -0,0 +1,86 @@
|
|||
Param(
|
||||
[Parameter(Mandatory=$true)][string] $barToken, # Token generated at https://maestro-prod.westus2.cloudapp.azure.com/Account/Tokens
|
||||
[Parameter(Mandatory=$true)][string] $gitHubPat, # GitHub personal access token from https://github.com/settings/tokens (no auth scopes needed)
|
||||
[Parameter(Mandatory=$true)][string] $azdoPat, # Azure Dev Ops tokens from https://dev.azure.com/dnceng/_details/security/tokens (code read scope needed)
|
||||
[Parameter(Mandatory=$true)][string] $outputFolder, # Where the graphviz.txt file will be created
|
||||
[string] $darcVersion, # darc's version
|
||||
[string] $graphvizVersion = '2.38', # GraphViz version
|
||||
[switch] $includeToolset # Whether the graph should include toolset dependencies or not. i.e. arcade, optimization. For more about
|
||||
# toolset dependencies see https://github.com/dotnet/arcade/blob/master/Documentation/Darc.md#toolset-vs-product-dependencies
|
||||
)
|
||||
|
||||
function CheckExitCode ([string]$stage)
|
||||
{
|
||||
$exitCode = $LASTEXITCODE
|
||||
if ($exitCode -ne 0) {
|
||||
Write-PipelineTelemetryError -Category 'Arcade' -Message "Something failed in stage: '$stage'. Check for errors above. Exiting now..."
|
||||
ExitWithExitCode $exitCode
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
$ErrorActionPreference = 'Stop'
|
||||
. $PSScriptRoot\tools.ps1
|
||||
|
||||
Import-Module -Name (Join-Path $PSScriptRoot 'native\CommonLibrary.psm1')
|
||||
|
||||
Push-Location $PSScriptRoot
|
||||
|
||||
Write-Host 'Installing darc...'
|
||||
. .\darc-init.ps1 -darcVersion $darcVersion
|
||||
CheckExitCode 'Running darc-init'
|
||||
|
||||
$engCommonBaseDir = Join-Path $PSScriptRoot 'native\'
|
||||
$graphvizInstallDir = CommonLibrary\Get-NativeInstallDirectory
|
||||
$nativeToolBaseUri = 'https://netcorenativeassets.blob.core.windows.net/resource-packages/external'
|
||||
$installBin = Join-Path $graphvizInstallDir 'bin'
|
||||
|
||||
Write-Host 'Installing dot...'
|
||||
.\native\install-tool.ps1 -ToolName graphviz -InstallPath $installBin -BaseUri $nativeToolBaseUri -CommonLibraryDirectory $engCommonBaseDir -Version $graphvizVersion -Verbose
|
||||
|
||||
$darcExe = "$env:USERPROFILE\.dotnet\tools"
|
||||
$darcExe = Resolve-Path "$darcExe\darc.exe"
|
||||
|
||||
Create-Directory $outputFolder
|
||||
|
||||
# Generate 3 graph descriptions:
|
||||
# 1. Flat with coherency information
|
||||
# 2. Graphviz (dot) file
|
||||
# 3. Standard dependency graph
|
||||
$graphVizFilePath = "$outputFolder\graphviz.txt"
|
||||
$graphVizImageFilePath = "$outputFolder\graph.png"
|
||||
$normalGraphFilePath = "$outputFolder\graph-full.txt"
|
||||
$flatGraphFilePath = "$outputFolder\graph-flat.txt"
|
||||
$baseOptions = @( '--github-pat', "$gitHubPat", '--azdev-pat', "$azdoPat", '--password', "$barToken" )
|
||||
|
||||
if ($includeToolset) {
|
||||
Write-Host 'Toolsets will be included in the graph...'
|
||||
$baseOptions += @( '--include-toolset' )
|
||||
}
|
||||
|
||||
Write-Host 'Generating standard dependency graph...'
|
||||
& "$darcExe" get-dependency-graph @baseOptions --output-file $normalGraphFilePath
|
||||
CheckExitCode 'Generating normal dependency graph'
|
||||
|
||||
Write-Host 'Generating flat dependency graph and graphviz file...'
|
||||
& "$darcExe" get-dependency-graph @baseOptions --flat --coherency --graphviz $graphVizFilePath --output-file $flatGraphFilePath
|
||||
CheckExitCode 'Generating flat and graphviz dependency graph'
|
||||
|
||||
Write-Host "Generating graph image $graphVizFilePath"
|
||||
$dotFilePath = Join-Path $installBin "graphviz\$graphvizVersion\release\bin\dot.exe"
|
||||
& "$dotFilePath" -Tpng -o"$graphVizImageFilePath" "$graphVizFilePath"
|
||||
CheckExitCode 'Generating graphviz image'
|
||||
|
||||
Write-Host "'$graphVizFilePath', '$flatGraphFilePath', '$normalGraphFilePath' and '$graphVizImageFilePath' created!"
|
||||
}
|
||||
catch {
|
||||
if (!$includeToolset) {
|
||||
Write-Host 'This might be a toolset repo which includes only toolset dependencies. ' -NoNewline -ForegroundColor Yellow
|
||||
Write-Host 'Since -includeToolset is not set there is no graph to create. Include -includeToolset and try again...' -ForegroundColor Yellow
|
||||
}
|
||||
Write-Host $_.ScriptStackTrace
|
||||
Write-PipelineTelemetryError -Category 'Arcade' -Message $_
|
||||
ExitWithExitCode 1
|
||||
} finally {
|
||||
Pop-Location
|
||||
}
|
|
@ -0,0 +1,26 @@
|
|||
<Project Sdk="Microsoft.DotNet.Helix.Sdk" DefaultTargets="Test">
|
||||
|
||||
<PropertyGroup>
|
||||
<Language>msbuild</Language>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<HelixCorrelationPayload Include="$(CorrelationPayloadDirectory)">
|
||||
<PayloadDirectory>%(Identity)</PayloadDirectory>
|
||||
</HelixCorrelationPayload>
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<HelixWorkItem Include="WorkItem" Condition="'$(WorkItemDirectory)' != ''">
|
||||
<PayloadDirectory>$(WorkItemDirectory)</PayloadDirectory>
|
||||
<Command>$(WorkItemCommand)</Command>
|
||||
<Timeout Condition="'$(WorkItemTimeout)' != ''">$(WorkItemTimeout)</Timeout>
|
||||
</HelixWorkItem>
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<XUnitProject Include="$(XUnitProjects.Split(';'))">
|
||||
<Arguments />
|
||||
</XUnitProject>
|
||||
</ItemGroup>
|
||||
</Project>
|
|
@ -0,0 +1,3 @@
|
|||
@echo off
|
||||
powershell -NoProfile -NoLogo -ExecutionPolicy ByPass -command "& """%~dp0init-tools-native.ps1""" %*"
|
||||
exit /b %ErrorLevel%
|
|
@ -0,0 +1,152 @@
|
|||
<#
|
||||
.SYNOPSIS
|
||||
Entry point script for installing native tools
|
||||
|
||||
.DESCRIPTION
|
||||
Reads $RepoRoot\global.json file to determine native assets to install
|
||||
and executes installers for those tools
|
||||
|
||||
.PARAMETER BaseUri
|
||||
Base file directory or Url from which to acquire tool archives
|
||||
|
||||
.PARAMETER InstallDirectory
|
||||
Directory to install native toolset. This is a command-line override for the default
|
||||
Install directory precedence order:
|
||||
- InstallDirectory command-line override
|
||||
- NETCOREENG_INSTALL_DIRECTORY environment variable
|
||||
- (default) %USERPROFILE%/.netcoreeng/native
|
||||
|
||||
.PARAMETER Clean
|
||||
Switch specifying to not install anything, but cleanup native asset folders
|
||||
|
||||
.PARAMETER Force
|
||||
Clean and then install tools
|
||||
|
||||
.PARAMETER DownloadRetries
|
||||
Total number of retry attempts
|
||||
|
||||
.PARAMETER RetryWaitTimeInSeconds
|
||||
Wait time between retry attempts in seconds
|
||||
|
||||
.PARAMETER GlobalJsonFile
|
||||
File path to global.json file
|
||||
|
||||
.NOTES
|
||||
#>
|
||||
[CmdletBinding(PositionalBinding=$false)]
|
||||
Param (
|
||||
[string] $BaseUri = 'https://netcorenativeassets.blob.core.windows.net/resource-packages/external',
|
||||
[string] $InstallDirectory,
|
||||
[switch] $Clean = $False,
|
||||
[switch] $Force = $False,
|
||||
[int] $DownloadRetries = 5,
|
||||
[int] $RetryWaitTimeInSeconds = 30,
|
||||
[string] $GlobalJsonFile
|
||||
)
|
||||
|
||||
if (!$GlobalJsonFile) {
|
||||
$GlobalJsonFile = Join-Path (Get-Item $PSScriptRoot).Parent.Parent.FullName 'global.json'
|
||||
}
|
||||
|
||||
Set-StrictMode -version 2.0
|
||||
$ErrorActionPreference='Stop'
|
||||
|
||||
. $PSScriptRoot\pipeline-logging-functions.ps1
|
||||
Import-Module -Name (Join-Path $PSScriptRoot 'native\CommonLibrary.psm1')
|
||||
|
||||
try {
|
||||
# Define verbose switch if undefined
|
||||
$Verbose = $VerbosePreference -Eq 'Continue'
|
||||
|
||||
$EngCommonBaseDir = Join-Path $PSScriptRoot 'native\'
|
||||
$NativeBaseDir = $InstallDirectory
|
||||
if (!$NativeBaseDir) {
|
||||
$NativeBaseDir = CommonLibrary\Get-NativeInstallDirectory
|
||||
}
|
||||
$Env:CommonLibrary_NativeInstallDir = $NativeBaseDir
|
||||
$InstallBin = Join-Path $NativeBaseDir 'bin'
|
||||
$InstallerPath = Join-Path $EngCommonBaseDir 'install-tool.ps1'
|
||||
|
||||
# Process tools list
|
||||
Write-Host "Processing $GlobalJsonFile"
|
||||
If (-Not (Test-Path $GlobalJsonFile)) {
|
||||
Write-Host "Unable to find '$GlobalJsonFile'"
|
||||
exit 0
|
||||
}
|
||||
$NativeTools = Get-Content($GlobalJsonFile) -Raw |
|
||||
ConvertFrom-Json |
|
||||
Select-Object -Expand 'native-tools' -ErrorAction SilentlyContinue
|
||||
if ($NativeTools) {
|
||||
$NativeTools.PSObject.Properties | ForEach-Object {
|
||||
$ToolName = $_.Name
|
||||
$ToolVersion = $_.Value
|
||||
$LocalInstallerArguments = @{ ToolName = "$ToolName" }
|
||||
$LocalInstallerArguments += @{ InstallPath = "$InstallBin" }
|
||||
$LocalInstallerArguments += @{ BaseUri = "$BaseUri" }
|
||||
$LocalInstallerArguments += @{ CommonLibraryDirectory = "$EngCommonBaseDir" }
|
||||
$LocalInstallerArguments += @{ Version = "$ToolVersion" }
|
||||
|
||||
if ($Verbose) {
|
||||
$LocalInstallerArguments += @{ Verbose = $True }
|
||||
}
|
||||
if (Get-Variable 'Force' -ErrorAction 'SilentlyContinue') {
|
||||
if($Force) {
|
||||
$LocalInstallerArguments += @{ Force = $True }
|
||||
}
|
||||
}
|
||||
if ($Clean) {
|
||||
$LocalInstallerArguments += @{ Clean = $True }
|
||||
}
|
||||
|
||||
Write-Verbose "Installing $ToolName version $ToolVersion"
|
||||
Write-Verbose "Executing '$InstallerPath $($LocalInstallerArguments.Keys.ForEach({"-$_ '$($LocalInstallerArguments.$_)'"}) -join ' ')'"
|
||||
& $InstallerPath @LocalInstallerArguments
|
||||
if ($LASTEXITCODE -Ne "0") {
|
||||
$errMsg = "$ToolName installation failed"
|
||||
if ((Get-Variable 'DoNotAbortNativeToolsInstallationOnFailure' -ErrorAction 'SilentlyContinue') -and $DoNotAbortNativeToolsInstallationOnFailure) {
|
||||
$showNativeToolsWarning = $true
|
||||
if ((Get-Variable 'DoNotDisplayNativeToolsInstallationWarnings' -ErrorAction 'SilentlyContinue') -and $DoNotDisplayNativeToolsInstallationWarnings) {
|
||||
$showNativeToolsWarning = $false
|
||||
}
|
||||
if ($showNativeToolsWarning) {
|
||||
Write-Warning $errMsg
|
||||
}
|
||||
$toolInstallationFailure = $true
|
||||
} else {
|
||||
# We cannot change this to Write-PipelineTelemetryError because of https://github.com/dotnet/arcade/issues/4482
|
||||
Write-Host $errMsg
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if ((Get-Variable 'toolInstallationFailure' -ErrorAction 'SilentlyContinue') -and $toolInstallationFailure) {
|
||||
# We cannot change this to Write-PipelineTelemetryError because of https://github.com/dotnet/arcade/issues/4482
|
||||
Write-Host 'Native tools bootstrap failed'
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
else {
|
||||
Write-Host 'No native tools defined in global.json'
|
||||
exit 0
|
||||
}
|
||||
|
||||
if ($Clean) {
|
||||
exit 0
|
||||
}
|
||||
if (Test-Path $InstallBin) {
|
||||
Write-Host 'Native tools are available from ' (Convert-Path -Path $InstallBin)
|
||||
Write-Host "##vso[task.prependpath]$(Convert-Path -Path $InstallBin)"
|
||||
return $InstallBin
|
||||
}
|
||||
else {
|
||||
Write-PipelineTelemetryError -Category 'NativeToolsBootstrap' -Message 'Native tools install directory does not exist, installation failed'
|
||||
exit 1
|
||||
}
|
||||
exit 0
|
||||
}
|
||||
catch {
|
||||
Write-Host $_.ScriptStackTrace
|
||||
Write-PipelineTelemetryError -Category 'NativeToolsBootstrap' -Message $_
|
||||
ExitWithExitCode 1
|
||||
}
|
|
@ -0,0 +1,173 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source="${BASH_SOURCE[0]}"
|
||||
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
|
||||
|
||||
base_uri='https://netcorenativeassets.blob.core.windows.net/resource-packages/external'
|
||||
install_directory=''
|
||||
clean=false
|
||||
force=false
|
||||
download_retries=5
|
||||
retry_wait_time_seconds=30
|
||||
global_json_file="$(dirname "$(dirname "${scriptroot}")")/global.json"
|
||||
declare -A native_assets
|
||||
|
||||
. $scriptroot/pipeline-logging-functions.sh
|
||||
. $scriptroot/native/common-library.sh
|
||||
|
||||
while (($# > 0)); do
|
||||
lowerI="$(echo $1 | awk '{print tolower($0)}')"
|
||||
case $lowerI in
|
||||
--baseuri)
|
||||
base_uri=$2
|
||||
shift 2
|
||||
;;
|
||||
--installdirectory)
|
||||
install_directory=$2
|
||||
shift 2
|
||||
;;
|
||||
--clean)
|
||||
clean=true
|
||||
shift 1
|
||||
;;
|
||||
--force)
|
||||
force=true
|
||||
shift 1
|
||||
;;
|
||||
--donotabortonfailure)
|
||||
donotabortonfailure=true
|
||||
shift 1
|
||||
;;
|
||||
--donotdisplaywarnings)
|
||||
donotdisplaywarnings=true
|
||||
shift 1
|
||||
;;
|
||||
--downloadretries)
|
||||
download_retries=$2
|
||||
shift 2
|
||||
;;
|
||||
--retrywaittimeseconds)
|
||||
retry_wait_time_seconds=$2
|
||||
shift 2
|
||||
;;
|
||||
--help)
|
||||
echo "Common settings:"
|
||||
echo " --installdirectory Directory to install native toolset."
|
||||
echo " This is a command-line override for the default"
|
||||
echo " Install directory precedence order:"
|
||||
echo " - InstallDirectory command-line override"
|
||||
echo " - NETCOREENG_INSTALL_DIRECTORY environment variable"
|
||||
echo " - (default) %USERPROFILE%/.netcoreeng/native"
|
||||
echo ""
|
||||
echo " --clean Switch specifying not to install anything, but cleanup native asset folders"
|
||||
echo " --donotabortonfailure Switch specifiying whether to abort native tools installation on failure"
|
||||
echo " --donotdisplaywarnings Switch specifiying whether to display warnings during native tools installation on failure"
|
||||
echo " --force Clean and then install tools"
|
||||
echo " --help Print help and exit"
|
||||
echo ""
|
||||
echo "Advanced settings:"
|
||||
echo " --baseuri <value> Base URI for where to download native tools from"
|
||||
echo " --downloadretries <value> Number of times a download should be attempted"
|
||||
echo " --retrywaittimeseconds <value> Wait time between download attempts"
|
||||
echo ""
|
||||
exit 0
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
function ReadGlobalJsonNativeTools {
|
||||
# Get the native-tools section from the global.json.
|
||||
local native_tools_section=$(cat $global_json_file | awk '/"native-tools"/,/}/')
|
||||
# Only extract the contents of the object.
|
||||
local native_tools_list=$(echo $native_tools_section | awk -F"[{}]" '{print $2}')
|
||||
native_tools_list=${native_tools_list//[\" ]/}
|
||||
native_tools_list=$( echo "$native_tools_list" | sed 's/\s//g' | sed 's/,/\n/g' )
|
||||
|
||||
local old_IFS=$IFS
|
||||
while read -r line; do
|
||||
# Lines are of the form: 'tool:version'
|
||||
IFS=:
|
||||
while read -r key value; do
|
||||
native_assets[$key]=$value
|
||||
done <<< "$line"
|
||||
done <<< "$native_tools_list"
|
||||
IFS=$old_IFS
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
native_base_dir=$install_directory
|
||||
if [[ -z $install_directory ]]; then
|
||||
native_base_dir=$(GetNativeInstallDirectory)
|
||||
fi
|
||||
|
||||
install_bin="${native_base_dir}/bin"
|
||||
installed_any=false
|
||||
|
||||
ReadGlobalJsonNativeTools
|
||||
|
||||
if [[ ${#native_assets[@]} -eq 0 ]]; then
|
||||
echo "No native tools defined in global.json"
|
||||
exit 0;
|
||||
else
|
||||
native_installer_dir="$scriptroot/native"
|
||||
for tool in "${!native_assets[@]}"
|
||||
do
|
||||
tool_version=${native_assets[$tool]}
|
||||
installer_path="$native_installer_dir/install-$tool.sh"
|
||||
installer_command="$installer_path"
|
||||
installer_command+=" --baseuri $base_uri"
|
||||
installer_command+=" --installpath $install_bin"
|
||||
installer_command+=" --version $tool_version"
|
||||
echo $installer_command
|
||||
|
||||
if [[ $force = true ]]; then
|
||||
installer_command+=" --force"
|
||||
fi
|
||||
|
||||
if [[ $clean = true ]]; then
|
||||
installer_command+=" --clean"
|
||||
fi
|
||||
|
||||
if [[ -a $installer_path ]]; then
|
||||
$installer_command
|
||||
if [[ $? != 0 ]]; then
|
||||
if [[ $donotabortonfailure = true ]]; then
|
||||
if [[ $donotdisplaywarnings != true ]]; then
|
||||
Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Execution Failed"
|
||||
fi
|
||||
else
|
||||
Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Execution Failed"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
$installed_any = true
|
||||
fi
|
||||
else
|
||||
if [[ $donotabortonfailure == true ]]; then
|
||||
if [[ $donotdisplaywarnings != true ]]; then
|
||||
Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Execution Failed: no install script"
|
||||
fi
|
||||
else
|
||||
Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Execution Failed: no install script"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
if [[ $clean = true ]]; then
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [[ -d $install_bin ]]; then
|
||||
echo "Native tools are available from $install_bin"
|
||||
echo "##vso[task.prependpath]$install_bin"
|
||||
else
|
||||
if [[ $installed_any = true ]]; then
|
||||
Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Native tools install directory does not exist, installation failed"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
exit 0
|
|
@ -0,0 +1,134 @@
|
|||
param(
|
||||
[Parameter(Mandatory=$true)][string] $Operation,
|
||||
[string] $AuthToken,
|
||||
[string] $CommitSha,
|
||||
[string] $RepoName,
|
||||
[switch] $IsFeedPrivate
|
||||
)
|
||||
|
||||
$ErrorActionPreference = 'Stop'
|
||||
Set-StrictMode -Version 2.0
|
||||
. $PSScriptRoot\tools.ps1
|
||||
|
||||
# Sets VSS_NUGET_EXTERNAL_FEED_ENDPOINTS based on the "darc-int-*" feeds defined in NuGet.config. This is needed
|
||||
# in build agents by CredProvider to authenticate the restore requests to internal feeds as specified in
|
||||
# https://github.com/microsoft/artifacts-credprovider/blob/0f53327cd12fd893d8627d7b08a2171bf5852a41/README.md#environment-variables. This should ONLY be called from identified
|
||||
# internal builds
|
||||
function SetupCredProvider {
|
||||
param(
|
||||
[string] $AuthToken
|
||||
)
|
||||
|
||||
# Install the Cred Provider NuGet plugin
|
||||
Write-Host 'Setting up Cred Provider NuGet plugin in the agent...'
|
||||
Write-Host "Getting 'installcredprovider.ps1' from 'https://github.com/microsoft/artifacts-credprovider'..."
|
||||
|
||||
$url = 'https://raw.githubusercontent.com/microsoft/artifacts-credprovider/master/helpers/installcredprovider.ps1'
|
||||
|
||||
Write-Host "Writing the contents of 'installcredprovider.ps1' locally..."
|
||||
Invoke-WebRequest $url -OutFile installcredprovider.ps1
|
||||
|
||||
Write-Host 'Installing plugin...'
|
||||
.\installcredprovider.ps1 -Force
|
||||
|
||||
Write-Host "Deleting local copy of 'installcredprovider.ps1'..."
|
||||
Remove-Item .\installcredprovider.ps1
|
||||
|
||||
if (-Not("$env:USERPROFILE\.nuget\plugins\netcore")) {
|
||||
Write-PipelineTelemetryError -Category 'Arcade' -Message 'CredProvider plugin was not installed correctly!'
|
||||
ExitWithExitCode 1
|
||||
}
|
||||
else {
|
||||
Write-Host 'CredProvider plugin was installed correctly!'
|
||||
}
|
||||
|
||||
# Then, we set the 'VSS_NUGET_EXTERNAL_FEED_ENDPOINTS' environment variable to restore from the stable
|
||||
# feeds successfully
|
||||
|
||||
$nugetConfigPath = "$RepoRoot\NuGet.config"
|
||||
|
||||
if (-Not (Test-Path -Path $nugetConfigPath)) {
|
||||
Write-PipelineTelemetryError -Category 'Build' -Message 'NuGet.config file not found in repo root!'
|
||||
ExitWithExitCode 1
|
||||
}
|
||||
|
||||
$endpoints = New-Object System.Collections.ArrayList
|
||||
$nugetConfigPackageSources = Select-Xml -Path $nugetConfigPath -XPath "//packageSources/add[contains(@key, 'darc-int-')]/@value" | foreach{$_.Node.Value}
|
||||
|
||||
if (($nugetConfigPackageSources | Measure-Object).Count -gt 0 ) {
|
||||
foreach ($stableRestoreResource in $nugetConfigPackageSources) {
|
||||
$trimmedResource = ([string]$stableRestoreResource).Trim()
|
||||
[void]$endpoints.Add(@{endpoint="$trimmedResource"; password="$AuthToken"})
|
||||
}
|
||||
}
|
||||
|
||||
if (($endpoints | Measure-Object).Count -gt 0) {
|
||||
# [SuppressMessage("Microsoft.Security", "CS002:SecretInNextLine", Justification="Endpoint code example with no real credentials.")]
|
||||
# Create the JSON object. It should look like '{"endpointCredentials": [{"endpoint":"http://example.index.json", "username":"optional", "password":"accesstoken"}]}'
|
||||
$endpointCredentials = @{endpointCredentials=$endpoints} | ConvertTo-Json -Compress
|
||||
|
||||
# Create the environment variables the AzDo way
|
||||
Write-LoggingCommand -Area 'task' -Event 'setvariable' -Data $endpointCredentials -Properties @{
|
||||
'variable' = 'VSS_NUGET_EXTERNAL_FEED_ENDPOINTS'
|
||||
'issecret' = 'false'
|
||||
}
|
||||
|
||||
# We don't want sessions cached since we will be updating the endpoints quite frequently
|
||||
Write-LoggingCommand -Area 'task' -Event 'setvariable' -Data 'False' -Properties @{
|
||||
'variable' = 'NUGET_CREDENTIALPROVIDER_SESSIONTOKENCACHE_ENABLED'
|
||||
'issecret' = 'false'
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
Write-Host 'No internal endpoints found in NuGet.config'
|
||||
}
|
||||
}
|
||||
|
||||
#Workaround for https://github.com/microsoft/msbuild/issues/4430
|
||||
function InstallDotNetSdkAndRestoreArcade {
|
||||
$dotnetTempDir = "$RepoRoot\dotnet"
|
||||
$dotnetSdkVersion="2.1.507" # After experimentation we know this version works when restoring the SDK (compared to 3.0.*)
|
||||
$dotnet = "$dotnetTempDir\dotnet.exe"
|
||||
$restoreProjPath = "$PSScriptRoot\restore.proj"
|
||||
|
||||
Write-Host "Installing dotnet SDK version $dotnetSdkVersion to restore Arcade SDK..."
|
||||
InstallDotNetSdk "$dotnetTempDir" "$dotnetSdkVersion"
|
||||
|
||||
'<Project Sdk="Microsoft.DotNet.Arcade.Sdk"/>' | Out-File "$restoreProjPath"
|
||||
|
||||
& $dotnet restore $restoreProjPath
|
||||
|
||||
Write-Host 'Arcade SDK restored!'
|
||||
|
||||
if (Test-Path -Path $restoreProjPath) {
|
||||
Remove-Item $restoreProjPath
|
||||
}
|
||||
|
||||
if (Test-Path -Path $dotnetTempDir) {
|
||||
Remove-Item $dotnetTempDir -Recurse
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
Push-Location $PSScriptRoot
|
||||
|
||||
if ($Operation -like 'setup') {
|
||||
SetupCredProvider $AuthToken
|
||||
}
|
||||
elseif ($Operation -like 'install-restore') {
|
||||
InstallDotNetSdkAndRestoreArcade
|
||||
}
|
||||
else {
|
||||
Write-PipelineTelemetryError -Category 'Arcade' -Message "Unknown operation '$Operation'!"
|
||||
ExitWithExitCode 1
|
||||
}
|
||||
}
|
||||
catch {
|
||||
Write-Host $_.ScriptStackTrace
|
||||
Write-PipelineTelemetryError -Category 'Arcade' -Message $_
|
||||
ExitWithExitCode 1
|
||||
}
|
||||
finally {
|
||||
Pop-Location
|
||||
}
|
|
@ -0,0 +1,143 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
# Sets VSS_NUGET_EXTERNAL_FEED_ENDPOINTS based on the "darc-int-*" feeds defined in NuGet.config. This is needed
|
||||
# in build agents by CredProvider to authenticate the restore requests to internal feeds as specified in
|
||||
# https://github.com/microsoft/artifacts-credprovider/blob/0f53327cd12fd893d8627d7b08a2171bf5852a41/README.md#environment-variables.
|
||||
# This should ONLY be called from identified internal builds
|
||||
function SetupCredProvider {
|
||||
local authToken=$1
|
||||
|
||||
# Install the Cred Provider NuGet plugin
|
||||
echo "Setting up Cred Provider NuGet plugin in the agent..."...
|
||||
echo "Getting 'installcredprovider.ps1' from 'https://github.com/microsoft/artifacts-credprovider'..."
|
||||
|
||||
local url="https://raw.githubusercontent.com/microsoft/artifacts-credprovider/master/helpers/installcredprovider.sh"
|
||||
|
||||
echo "Writing the contents of 'installcredprovider.ps1' locally..."
|
||||
local installcredproviderPath="installcredprovider.sh"
|
||||
if command -v curl > /dev/null; then
|
||||
curl $url > "$installcredproviderPath"
|
||||
else
|
||||
wget -q -O "$installcredproviderPath" "$url"
|
||||
fi
|
||||
|
||||
echo "Installing plugin..."
|
||||
. "$installcredproviderPath"
|
||||
|
||||
echo "Deleting local copy of 'installcredprovider.sh'..."
|
||||
rm installcredprovider.sh
|
||||
|
||||
if [ ! -d "$HOME/.nuget/plugins" ]; then
|
||||
Write-PipelineTelemetryError -category 'Build' 'CredProvider plugin was not installed correctly!'
|
||||
ExitWithExitCode 1
|
||||
else
|
||||
echo "CredProvider plugin was installed correctly!"
|
||||
fi
|
||||
|
||||
# Then, we set the 'VSS_NUGET_EXTERNAL_FEED_ENDPOINTS' environment variable to restore from the stable
|
||||
# feeds successfully
|
||||
|
||||
local nugetConfigPath="$repo_root/NuGet.config"
|
||||
|
||||
if [ ! "$nugetConfigPath" ]; then
|
||||
Write-PipelineTelemetryError -category 'Build' "NuGet.config file not found in repo's root!"
|
||||
ExitWithExitCode 1
|
||||
fi
|
||||
|
||||
local endpoints='['
|
||||
local nugetConfigPackageValues=`cat "$nugetConfigPath" | grep "key=\"darc-int-"`
|
||||
local pattern="value=\"(.*)\""
|
||||
|
||||
for value in $nugetConfigPackageValues
|
||||
do
|
||||
if [[ $value =~ $pattern ]]; then
|
||||
local endpoint="${BASH_REMATCH[1]}"
|
||||
endpoints+="{\"endpoint\": \"$endpoint\", \"password\": \"$authToken\"},"
|
||||
fi
|
||||
done
|
||||
|
||||
endpoints=${endpoints%?}
|
||||
endpoints+=']'
|
||||
|
||||
if [ ${#endpoints} -gt 2 ]; then
|
||||
# [SuppressMessage("Microsoft.Security", "CS002:SecretInNextLine", Justification="Endpoint code example with no real credentials.")]
|
||||
# Create the JSON object. It should look like '{"endpointCredentials": [{"endpoint":"http://example.index.json", "username":"optional", "password":"accesstoken"}]}'
|
||||
local endpointCredentials="{\"endpointCredentials\": "$endpoints"}"
|
||||
|
||||
echo "##vso[task.setvariable variable=VSS_NUGET_EXTERNAL_FEED_ENDPOINTS]$endpointCredentials"
|
||||
echo "##vso[task.setvariable variable=NUGET_CREDENTIALPROVIDER_SESSIONTOKENCACHE_ENABLED]False"
|
||||
else
|
||||
echo "No internal endpoints found in NuGet.config"
|
||||
fi
|
||||
}
|
||||
|
||||
# Workaround for https://github.com/microsoft/msbuild/issues/4430
|
||||
function InstallDotNetSdkAndRestoreArcade {
|
||||
local dotnetTempDir="$repo_root/dotnet"
|
||||
local dotnetSdkVersion="2.1.507" # After experimentation we know this version works when restoring the SDK (compared to 3.0.*)
|
||||
local restoreProjPath="$repo_root/eng/common/restore.proj"
|
||||
|
||||
echo "Installing dotnet SDK version $dotnetSdkVersion to restore Arcade SDK..."
|
||||
echo "<Project Sdk=\"Microsoft.DotNet.Arcade.Sdk\"/>" > "$restoreProjPath"
|
||||
|
||||
InstallDotNetSdk "$dotnetTempDir" "$dotnetSdkVersion"
|
||||
|
||||
local res=`$dotnetTempDir/dotnet restore $restoreProjPath`
|
||||
echo "Arcade SDK restored!"
|
||||
|
||||
# Cleanup
|
||||
if [ "$restoreProjPath" ]; then
|
||||
rm "$restoreProjPath"
|
||||
fi
|
||||
|
||||
if [ "$dotnetTempDir" ]; then
|
||||
rm -r $dotnetTempDir
|
||||
fi
|
||||
}
|
||||
|
||||
source="${BASH_SOURCE[0]}"
|
||||
operation=''
|
||||
authToken=''
|
||||
repoName=''
|
||||
|
||||
while [[ $# > 0 ]]; do
|
||||
opt="$(echo "$1" | awk '{print tolower($0)}')"
|
||||
case "$opt" in
|
||||
--operation)
|
||||
operation=$2
|
||||
shift
|
||||
;;
|
||||
--authtoken)
|
||||
authToken=$2
|
||||
shift
|
||||
;;
|
||||
*)
|
||||
echo "Invalid argument: $1"
|
||||
usage
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
shift
|
||||
done
|
||||
|
||||
while [[ -h "$source" ]]; do
|
||||
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
|
||||
source="$(readlink "$source")"
|
||||
# if $source was a relative symlink, we need to resolve it relative to the path where the
|
||||
# symlink file was located
|
||||
[[ $source != /* ]] && source="$scriptroot/$source"
|
||||
done
|
||||
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
|
||||
|
||||
. "$scriptroot/tools.sh"
|
||||
|
||||
if [ "$operation" = "setup" ]; then
|
||||
SetupCredProvider $authToken
|
||||
elif [ "$operation" = "install-restore" ]; then
|
||||
InstallDotNetSdkAndRestoreArcade
|
||||
else
|
||||
echo "Unknown operation '$operation'!"
|
||||
fi
|
|
@ -0,0 +1,4 @@
|
|||
<!-- Licensed to the .NET Foundation under one or more agreements. The .NET Foundation licenses this file to you under the MIT license. -->
|
||||
<Project>
|
||||
<Import Project="Sdk.props" Sdk="Microsoft.DotNet.Arcade.Sdk" />
|
||||
</Project>
|
|
@ -0,0 +1,28 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. -->
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net472</TargetFramework>
|
||||
<ImportDirectoryBuildTargets>false</ImportDirectoryBuildTargets>
|
||||
<AutomaticallyUseReferenceAssemblyPackages>false</AutomaticallyUseReferenceAssemblyPackages>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<!-- Clear references, the SDK may add some depending on UsuingToolXxx settings, but we only want to restore the following -->
|
||||
<PackageReference Remove="@(PackageReference)"/>
|
||||
<PackageReference Include="Microsoft.DotNet.IBCMerge" Version="$(MicrosoftDotNetIBCMergeVersion)" Condition="'$(UsingToolIbcOptimization)' == 'true'" />
|
||||
<PackageReference Include="Drop.App" Version="$(DropAppVersion)" ExcludeAssets="all" Condition="'$(UsingToolVisualStudioIbcTraining)' == 'true'"/>
|
||||
</ItemGroup>
|
||||
<PropertyGroup>
|
||||
<RestoreSources></RestoreSources>
|
||||
<RestoreSources Condition="'$(UsingToolIbcOptimization)' == 'true'">
|
||||
https://devdiv.pkgs.visualstudio.com/_packaging/dotnet-core-internal-tooling/nuget/v3/index.json;
|
||||
</RestoreSources>
|
||||
<RestoreSources Condition="'$(UsingToolVisualStudioIbcTraining)' == 'true'">
|
||||
$(RestoreSources);
|
||||
https://devdiv.pkgs.visualstudio.com/_packaging/VS/nuget/v3/index.json;
|
||||
</RestoreSources>
|
||||
</PropertyGroup>
|
||||
|
||||
<!-- Repository extensibility point -->
|
||||
<Import Project="$(RepositoryEngineeringDir)InternalTools.props" Condition="Exists('$(RepositoryEngineeringDir)InternalTools.props')" />
|
||||
</Project>
|
|
@ -0,0 +1,26 @@
|
|||
[CmdletBinding(PositionalBinding=$false)]
|
||||
Param(
|
||||
[string] $verbosity = 'minimal',
|
||||
[bool] $warnAsError = $true,
|
||||
[bool] $nodeReuse = $true,
|
||||
[switch] $ci,
|
||||
[switch] $prepareMachine,
|
||||
[Parameter(ValueFromRemainingArguments=$true)][String[]]$extraArgs
|
||||
)
|
||||
|
||||
. $PSScriptRoot\tools.ps1
|
||||
|
||||
try {
|
||||
if ($ci) {
|
||||
$nodeReuse = $false
|
||||
}
|
||||
|
||||
MSBuild @extraArgs
|
||||
}
|
||||
catch {
|
||||
Write-Host $_.ScriptStackTrace
|
||||
Write-PipelineTelemetryError -Category 'Build' -Message $_
|
||||
ExitWithExitCode 1
|
||||
}
|
||||
|
||||
ExitWithExitCode 0
|
|
@ -0,0 +1,58 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source="${BASH_SOURCE[0]}"
|
||||
|
||||
# resolve $source until the file is no longer a symlink
|
||||
while [[ -h "$source" ]]; do
|
||||
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
|
||||
source="$(readlink "$source")"
|
||||
# if $source was a relative symlink, we need to resolve it relative to the path where the
|
||||
# symlink file was located
|
||||
[[ $source != /* ]] && source="$scriptroot/$source"
|
||||
done
|
||||
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
|
||||
|
||||
verbosity='minimal'
|
||||
warn_as_error=true
|
||||
node_reuse=true
|
||||
prepare_machine=false
|
||||
extra_args=''
|
||||
|
||||
while (($# > 0)); do
|
||||
lowerI="$(echo $1 | awk '{print tolower($0)}')"
|
||||
case $lowerI in
|
||||
--verbosity)
|
||||
verbosity=$2
|
||||
shift 2
|
||||
;;
|
||||
--warnaserror)
|
||||
warn_as_error=$2
|
||||
shift 2
|
||||
;;
|
||||
--nodereuse)
|
||||
node_reuse=$2
|
||||
shift 2
|
||||
;;
|
||||
--ci)
|
||||
ci=true
|
||||
shift 1
|
||||
;;
|
||||
--preparemachine)
|
||||
prepare_machine=true
|
||||
shift 1
|
||||
;;
|
||||
*)
|
||||
extra_args="$extra_args $1"
|
||||
shift 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
. "$scriptroot/tools.sh"
|
||||
|
||||
if [[ "$ci" == true ]]; then
|
||||
node_reuse=false
|
||||
fi
|
||||
|
||||
MSBuild $extra_args
|
||||
ExitWithExitCode 0
|
|
@ -0,0 +1,399 @@
|
|||
<#
|
||||
.SYNOPSIS
|
||||
Helper module to install an archive to a directory
|
||||
|
||||
.DESCRIPTION
|
||||
Helper module to download and extract an archive to a specified directory
|
||||
|
||||
.PARAMETER Uri
|
||||
Uri of artifact to download
|
||||
|
||||
.PARAMETER InstallDirectory
|
||||
Directory to extract artifact contents to
|
||||
|
||||
.PARAMETER Force
|
||||
Force download / extraction if file or contents already exist. Default = False
|
||||
|
||||
.PARAMETER DownloadRetries
|
||||
Total number of retry attempts. Default = 5
|
||||
|
||||
.PARAMETER RetryWaitTimeInSeconds
|
||||
Wait time between retry attempts in seconds. Default = 30
|
||||
|
||||
.NOTES
|
||||
Returns False if download or extraction fail, True otherwise
|
||||
#>
|
||||
function DownloadAndExtract {
|
||||
[CmdletBinding(PositionalBinding=$false)]
|
||||
Param (
|
||||
[Parameter(Mandatory=$True)]
|
||||
[string] $Uri,
|
||||
[Parameter(Mandatory=$True)]
|
||||
[string] $InstallDirectory,
|
||||
[switch] $Force = $False,
|
||||
[int] $DownloadRetries = 5,
|
||||
[int] $RetryWaitTimeInSeconds = 30
|
||||
)
|
||||
# Define verbose switch if undefined
|
||||
$Verbose = $VerbosePreference -Eq "Continue"
|
||||
|
||||
$TempToolPath = CommonLibrary\Get-TempPathFilename -Path $Uri
|
||||
|
||||
# Download native tool
|
||||
$DownloadStatus = CommonLibrary\Get-File -Uri $Uri `
|
||||
-Path $TempToolPath `
|
||||
-DownloadRetries $DownloadRetries `
|
||||
-RetryWaitTimeInSeconds $RetryWaitTimeInSeconds `
|
||||
-Force:$Force `
|
||||
-Verbose:$Verbose
|
||||
|
||||
if ($DownloadStatus -Eq $False) {
|
||||
Write-Error "Download failed"
|
||||
return $False
|
||||
}
|
||||
|
||||
# Extract native tool
|
||||
$UnzipStatus = CommonLibrary\Expand-Zip -ZipPath $TempToolPath `
|
||||
-OutputDirectory $InstallDirectory `
|
||||
-Force:$Force `
|
||||
-Verbose:$Verbose
|
||||
|
||||
if ($UnzipStatus -Eq $False) {
|
||||
# Retry Download one more time with Force=true
|
||||
$DownloadRetryStatus = CommonLibrary\Get-File -Uri $Uri `
|
||||
-Path $TempToolPath `
|
||||
-DownloadRetries 1 `
|
||||
-RetryWaitTimeInSeconds $RetryWaitTimeInSeconds `
|
||||
-Force:$True `
|
||||
-Verbose:$Verbose
|
||||
|
||||
if ($DownloadRetryStatus -Eq $False) {
|
||||
Write-Error "Last attempt of download failed as well"
|
||||
return $False
|
||||
}
|
||||
|
||||
# Retry unzip again one more time with Force=true
|
||||
$UnzipRetryStatus = CommonLibrary\Expand-Zip -ZipPath $TempToolPath `
|
||||
-OutputDirectory $InstallDirectory `
|
||||
-Force:$True `
|
||||
-Verbose:$Verbose
|
||||
if ($UnzipRetryStatus -Eq $False)
|
||||
{
|
||||
Write-Error "Last attempt of unzip failed as well"
|
||||
# Clean up partial zips and extracts
|
||||
if (Test-Path $TempToolPath) {
|
||||
Remove-Item $TempToolPath -Force
|
||||
}
|
||||
if (Test-Path $InstallDirectory) {
|
||||
Remove-Item $InstallDirectory -Force -Recurse
|
||||
}
|
||||
return $False
|
||||
}
|
||||
}
|
||||
|
||||
return $True
|
||||
}
|
||||
|
||||
<#
|
||||
.SYNOPSIS
|
||||
Download a file, retry on failure
|
||||
|
||||
.DESCRIPTION
|
||||
Download specified file and retry if attempt fails
|
||||
|
||||
.PARAMETER Uri
|
||||
Uri of file to download. If Uri is a local path, the file will be copied instead of downloaded
|
||||
|
||||
.PARAMETER Path
|
||||
Path to download or copy uri file to
|
||||
|
||||
.PARAMETER Force
|
||||
Overwrite existing file if present. Default = False
|
||||
|
||||
.PARAMETER DownloadRetries
|
||||
Total number of retry attempts. Default = 5
|
||||
|
||||
.PARAMETER RetryWaitTimeInSeconds
|
||||
Wait time between retry attempts in seconds Default = 30
|
||||
|
||||
#>
|
||||
function Get-File {
|
||||
[CmdletBinding(PositionalBinding=$false)]
|
||||
Param (
|
||||
[Parameter(Mandatory=$True)]
|
||||
[string] $Uri,
|
||||
[Parameter(Mandatory=$True)]
|
||||
[string] $Path,
|
||||
[int] $DownloadRetries = 5,
|
||||
[int] $RetryWaitTimeInSeconds = 30,
|
||||
[switch] $Force = $False
|
||||
)
|
||||
$Attempt = 0
|
||||
|
||||
if ($Force) {
|
||||
if (Test-Path $Path) {
|
||||
Remove-Item $Path -Force
|
||||
}
|
||||
}
|
||||
if (Test-Path $Path) {
|
||||
Write-Host "File '$Path' already exists, skipping download"
|
||||
return $True
|
||||
}
|
||||
|
||||
$DownloadDirectory = Split-Path -ErrorAction Ignore -Path "$Path" -Parent
|
||||
if (-Not (Test-Path $DownloadDirectory)) {
|
||||
New-Item -path $DownloadDirectory -force -itemType "Directory" | Out-Null
|
||||
}
|
||||
|
||||
$TempPath = "$Path.tmp"
|
||||
if (Test-Path -IsValid -Path $Uri) {
|
||||
Write-Verbose "'$Uri' is a file path, copying temporarily to '$TempPath'"
|
||||
Copy-Item -Path $Uri -Destination $TempPath
|
||||
Write-Verbose "Moving temporary file to '$Path'"
|
||||
Move-Item -Path $TempPath -Destination $Path
|
||||
return $?
|
||||
}
|
||||
else {
|
||||
Write-Verbose "Downloading $Uri"
|
||||
# Don't display the console progress UI - it's a huge perf hit
|
||||
$ProgressPreference = 'SilentlyContinue'
|
||||
while($Attempt -Lt $DownloadRetries)
|
||||
{
|
||||
try {
|
||||
Invoke-WebRequest -UseBasicParsing -Uri $Uri -OutFile $TempPath
|
||||
Write-Verbose "Downloaded to temporary location '$TempPath'"
|
||||
Move-Item -Path $TempPath -Destination $Path
|
||||
Write-Verbose "Moved temporary file to '$Path'"
|
||||
return $True
|
||||
}
|
||||
catch {
|
||||
$Attempt++
|
||||
if ($Attempt -Lt $DownloadRetries) {
|
||||
$AttemptsLeft = $DownloadRetries - $Attempt
|
||||
Write-Warning "Download failed, $AttemptsLeft attempts remaining, will retry in $RetryWaitTimeInSeconds seconds"
|
||||
Start-Sleep -Seconds $RetryWaitTimeInSeconds
|
||||
}
|
||||
else {
|
||||
Write-Error $_
|
||||
Write-Error $_.Exception
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return $False
|
||||
}
|
||||
|
||||
<#
|
||||
.SYNOPSIS
|
||||
Generate a shim for a native tool
|
||||
|
||||
.DESCRIPTION
|
||||
Creates a wrapper script (shim) that passes arguments forward to native tool assembly
|
||||
|
||||
.PARAMETER ShimName
|
||||
The name of the shim
|
||||
|
||||
.PARAMETER ShimDirectory
|
||||
The directory where shims are stored
|
||||
|
||||
.PARAMETER ToolFilePath
|
||||
Path to file that shim forwards to
|
||||
|
||||
.PARAMETER Force
|
||||
Replace shim if already present. Default = False
|
||||
|
||||
.NOTES
|
||||
Returns $True if generating shim succeeds, $False otherwise
|
||||
#>
|
||||
function New-ScriptShim {
|
||||
[CmdletBinding(PositionalBinding=$false)]
|
||||
Param (
|
||||
[Parameter(Mandatory=$True)]
|
||||
[string] $ShimName,
|
||||
[Parameter(Mandatory=$True)]
|
||||
[string] $ShimDirectory,
|
||||
[Parameter(Mandatory=$True)]
|
||||
[string] $ToolFilePath,
|
||||
[Parameter(Mandatory=$True)]
|
||||
[string] $BaseUri,
|
||||
[switch] $Force
|
||||
)
|
||||
try {
|
||||
Write-Verbose "Generating '$ShimName' shim"
|
||||
|
||||
if (-Not (Test-Path $ToolFilePath)){
|
||||
Write-Error "Specified tool file path '$ToolFilePath' does not exist"
|
||||
return $False
|
||||
}
|
||||
|
||||
# WinShimmer is a small .NET Framework program that creates .exe shims to bootstrapped programs
|
||||
# Many of the checks for installed programs expect a .exe extension for Windows tools, rather
|
||||
# than a .bat or .cmd file.
|
||||
# Source: https://github.com/dotnet/arcade/tree/master/src/WinShimmer
|
||||
if (-Not (Test-Path "$ShimDirectory\WinShimmer\winshimmer.exe")) {
|
||||
$InstallStatus = DownloadAndExtract -Uri "$BaseUri/windows/winshimmer/WinShimmer.zip" `
|
||||
-InstallDirectory $ShimDirectory\WinShimmer `
|
||||
-Force:$Force `
|
||||
-DownloadRetries 2 `
|
||||
-RetryWaitTimeInSeconds 5 `
|
||||
-Verbose:$Verbose
|
||||
}
|
||||
|
||||
if ((Test-Path (Join-Path $ShimDirectory "$ShimName.exe"))) {
|
||||
Write-Host "$ShimName.exe already exists; replacing..."
|
||||
Remove-Item (Join-Path $ShimDirectory "$ShimName.exe")
|
||||
}
|
||||
|
||||
& "$ShimDirectory\WinShimmer\winshimmer.exe" $ShimName $ToolFilePath $ShimDirectory
|
||||
return $True
|
||||
}
|
||||
catch {
|
||||
Write-Host $_
|
||||
Write-Host $_.Exception
|
||||
return $False
|
||||
}
|
||||
}
|
||||
|
||||
<#
|
||||
.SYNOPSIS
|
||||
Returns the machine architecture of the host machine
|
||||
|
||||
.NOTES
|
||||
Returns 'x64' on 64 bit machines
|
||||
Returns 'x86' on 32 bit machines
|
||||
#>
|
||||
function Get-MachineArchitecture {
|
||||
$ProcessorArchitecture = $Env:PROCESSOR_ARCHITECTURE
|
||||
$ProcessorArchitectureW6432 = $Env:PROCESSOR_ARCHITEW6432
|
||||
if($ProcessorArchitecture -Eq "X86")
|
||||
{
|
||||
if(($ProcessorArchitectureW6432 -Eq "") -Or
|
||||
($ProcessorArchitectureW6432 -Eq "X86")) {
|
||||
return "x86"
|
||||
}
|
||||
$ProcessorArchitecture = $ProcessorArchitectureW6432
|
||||
}
|
||||
if (($ProcessorArchitecture -Eq "AMD64") -Or
|
||||
($ProcessorArchitecture -Eq "IA64") -Or
|
||||
($ProcessorArchitecture -Eq "ARM64")) {
|
||||
return "x64"
|
||||
}
|
||||
return "x86"
|
||||
}
|
||||
|
||||
<#
|
||||
.SYNOPSIS
|
||||
Get the name of a temporary folder under the native install directory
|
||||
#>
|
||||
function Get-TempDirectory {
|
||||
return Join-Path (Get-NativeInstallDirectory) "temp/"
|
||||
}
|
||||
|
||||
function Get-TempPathFilename {
|
||||
[CmdletBinding(PositionalBinding=$false)]
|
||||
Param (
|
||||
[Parameter(Mandatory=$True)]
|
||||
[string] $Path
|
||||
)
|
||||
$TempDir = CommonLibrary\Get-TempDirectory
|
||||
$TempFilename = Split-Path $Path -leaf
|
||||
$TempPath = Join-Path $TempDir $TempFilename
|
||||
return $TempPath
|
||||
}
|
||||
|
||||
<#
|
||||
.SYNOPSIS
|
||||
Returns the base directory to use for native tool installation
|
||||
|
||||
.NOTES
|
||||
Returns the value of the NETCOREENG_INSTALL_DIRECTORY if that environment variable
|
||||
is set, or otherwise returns an install directory under the %USERPROFILE%
|
||||
#>
|
||||
function Get-NativeInstallDirectory {
|
||||
$InstallDir = $Env:NETCOREENG_INSTALL_DIRECTORY
|
||||
if (!$InstallDir) {
|
||||
$InstallDir = Join-Path $Env:USERPROFILE ".netcoreeng/native/"
|
||||
}
|
||||
return $InstallDir
|
||||
}
|
||||
|
||||
<#
|
||||
.SYNOPSIS
|
||||
Unzip an archive
|
||||
|
||||
.DESCRIPTION
|
||||
Powershell module to unzip an archive to a specified directory
|
||||
|
||||
.PARAMETER ZipPath (Required)
|
||||
Path to archive to unzip
|
||||
|
||||
.PARAMETER OutputDirectory (Required)
|
||||
Output directory for archive contents
|
||||
|
||||
.PARAMETER Force
|
||||
Overwrite output directory contents if they already exist
|
||||
|
||||
.NOTES
|
||||
- Returns True and does not perform an extraction if output directory already exists but Overwrite is not True.
|
||||
- Returns True if unzip operation is successful
|
||||
- Returns False if Overwrite is True and it is unable to remove contents of OutputDirectory
|
||||
- Returns False if unable to extract zip archive
|
||||
#>
|
||||
function Expand-Zip {
|
||||
[CmdletBinding(PositionalBinding=$false)]
|
||||
Param (
|
||||
[Parameter(Mandatory=$True)]
|
||||
[string] $ZipPath,
|
||||
[Parameter(Mandatory=$True)]
|
||||
[string] $OutputDirectory,
|
||||
[switch] $Force
|
||||
)
|
||||
|
||||
Write-Verbose "Extracting '$ZipPath' to '$OutputDirectory'"
|
||||
try {
|
||||
if ((Test-Path $OutputDirectory) -And (-Not $Force)) {
|
||||
Write-Host "Directory '$OutputDirectory' already exists, skipping extract"
|
||||
return $True
|
||||
}
|
||||
if (Test-Path $OutputDirectory) {
|
||||
Write-Verbose "'Force' is 'True', but '$OutputDirectory' exists, removing directory"
|
||||
Remove-Item $OutputDirectory -Force -Recurse
|
||||
if ($? -Eq $False) {
|
||||
Write-Error "Unable to remove '$OutputDirectory'"
|
||||
return $False
|
||||
}
|
||||
}
|
||||
|
||||
$TempOutputDirectory = Join-Path "$(Split-Path -Parent $OutputDirectory)" "$(Split-Path -Leaf $OutputDirectory).tmp"
|
||||
if (Test-Path $TempOutputDirectory) {
|
||||
Remove-Item $TempOutputDirectory -Force -Recurse
|
||||
}
|
||||
New-Item -Path $TempOutputDirectory -Force -ItemType "Directory" | Out-Null
|
||||
|
||||
Add-Type -assembly "system.io.compression.filesystem"
|
||||
[io.compression.zipfile]::ExtractToDirectory("$ZipPath", "$TempOutputDirectory")
|
||||
if ($? -Eq $False) {
|
||||
Write-Error "Unable to extract '$ZipPath'"
|
||||
return $False
|
||||
}
|
||||
|
||||
Move-Item -Path $TempOutputDirectory -Destination $OutputDirectory
|
||||
}
|
||||
catch {
|
||||
Write-Host $_
|
||||
Write-Host $_.Exception
|
||||
|
||||
return $False
|
||||
}
|
||||
return $True
|
||||
}
|
||||
|
||||
export-modulemember -function DownloadAndExtract
|
||||
export-modulemember -function Expand-Zip
|
||||
export-modulemember -function Get-File
|
||||
export-modulemember -function Get-MachineArchitecture
|
||||
export-modulemember -function Get-NativeInstallDirectory
|
||||
export-modulemember -function Get-TempDirectory
|
||||
export-modulemember -function Get-TempPathFilename
|
||||
export-modulemember -function New-ScriptShim
|
|
@ -0,0 +1,168 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
function GetNativeInstallDirectory {
|
||||
local install_dir
|
||||
|
||||
if [[ -z $NETCOREENG_INSTALL_DIRECTORY ]]; then
|
||||
install_dir=$HOME/.netcoreeng/native/
|
||||
else
|
||||
install_dir=$NETCOREENG_INSTALL_DIRECTORY
|
||||
fi
|
||||
|
||||
echo $install_dir
|
||||
return 0
|
||||
}
|
||||
|
||||
function GetTempDirectory {
|
||||
|
||||
echo $(GetNativeInstallDirectory)temp/
|
||||
return 0
|
||||
}
|
||||
|
||||
function ExpandZip {
|
||||
local zip_path=$1
|
||||
local output_directory=$2
|
||||
local force=${3:-false}
|
||||
|
||||
echo "Extracting $zip_path to $output_directory"
|
||||
if [[ -d $output_directory ]] && [[ $force = false ]]; then
|
||||
echo "Directory '$output_directory' already exists, skipping extract"
|
||||
return 0
|
||||
fi
|
||||
|
||||
if [[ -d $output_directory ]]; then
|
||||
echo "'Force flag enabled, but '$output_directory' exists. Removing directory"
|
||||
rm -rf $output_directory
|
||||
if [[ $? != 0 ]]; then
|
||||
Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Unable to remove '$output_directory'"
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "Creating directory: '$output_directory'"
|
||||
mkdir -p $output_directory
|
||||
|
||||
echo "Extracting archive"
|
||||
tar -xf $zip_path -C $output_directory
|
||||
if [[ $? != 0 ]]; then
|
||||
Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Unable to extract '$zip_path'"
|
||||
return 1
|
||||
fi
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
function GetCurrentOS {
|
||||
local unameOut="$(uname -s)"
|
||||
case $unameOut in
|
||||
Linux*) echo "Linux";;
|
||||
Darwin*) echo "MacOS";;
|
||||
esac
|
||||
return 0
|
||||
}
|
||||
|
||||
function GetFile {
|
||||
local uri=$1
|
||||
local path=$2
|
||||
local force=${3:-false}
|
||||
local download_retries=${4:-5}
|
||||
local retry_wait_time_seconds=${5:-30}
|
||||
|
||||
if [[ -f $path ]]; then
|
||||
if [[ $force = false ]]; then
|
||||
echo "File '$path' already exists. Skipping download"
|
||||
return 0
|
||||
else
|
||||
rm -rf $path
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ -f $uri ]]; then
|
||||
echo "'$uri' is a file path, copying file to '$path'"
|
||||
cp $uri $path
|
||||
return $?
|
||||
fi
|
||||
|
||||
echo "Downloading $uri"
|
||||
# Use curl if available, otherwise use wget
|
||||
if command -v curl > /dev/null; then
|
||||
curl "$uri" -sSL --retry $download_retries --retry-delay $retry_wait_time_seconds --create-dirs -o "$path" --fail
|
||||
else
|
||||
wget -q -O "$path" "$uri" --tries="$download_retries"
|
||||
fi
|
||||
|
||||
return $?
|
||||
}
|
||||
|
||||
function GetTempPathFileName {
|
||||
local path=$1
|
||||
|
||||
local temp_dir=$(GetTempDirectory)
|
||||
local temp_file_name=$(basename $path)
|
||||
echo $temp_dir$temp_file_name
|
||||
return 0
|
||||
}
|
||||
|
||||
function DownloadAndExtract {
|
||||
local uri=$1
|
||||
local installDir=$2
|
||||
local force=${3:-false}
|
||||
local download_retries=${4:-5}
|
||||
local retry_wait_time_seconds=${5:-30}
|
||||
|
||||
local temp_tool_path=$(GetTempPathFileName $uri)
|
||||
|
||||
echo "downloading to: $temp_tool_path"
|
||||
|
||||
# Download file
|
||||
GetFile "$uri" "$temp_tool_path" $force $download_retries $retry_wait_time_seconds
|
||||
if [[ $? != 0 ]]; then
|
||||
Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Failed to download '$uri' to '$temp_tool_path'."
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Extract File
|
||||
echo "extracting from $temp_tool_path to $installDir"
|
||||
ExpandZip "$temp_tool_path" "$installDir" $force $download_retries $retry_wait_time_seconds
|
||||
if [[ $? != 0 ]]; then
|
||||
Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Failed to extract '$temp_tool_path' to '$installDir'."
|
||||
return 1
|
||||
fi
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
function NewScriptShim {
|
||||
local shimpath=$1
|
||||
local tool_file_path=$2
|
||||
local force=${3:-false}
|
||||
|
||||
echo "Generating '$shimpath' shim"
|
||||
if [[ -f $shimpath ]]; then
|
||||
if [[ $force = false ]]; then
|
||||
echo "File '$shimpath' already exists." >&2
|
||||
return 1
|
||||
else
|
||||
rm -rf $shimpath
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ ! -f $tool_file_path ]]; then
|
||||
Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Specified tool file path:'$tool_file_path' does not exist"
|
||||
return 1
|
||||
fi
|
||||
|
||||
local shim_contents=$'#!/usr/bin/env bash\n'
|
||||
shim_contents+="SHIMARGS="$'$1\n'
|
||||
shim_contents+="$tool_file_path"$' $SHIMARGS\n'
|
||||
|
||||
# Write shim file
|
||||
echo "$shim_contents" > $shimpath
|
||||
|
||||
chmod +x $shimpath
|
||||
|
||||
echo "Finished generating shim '$shimpath'"
|
||||
|
||||
return $?
|
||||
}
|
||||
|
|
@ -0,0 +1,121 @@
|
|||
#!/usr/bin/env bash
|
||||
#
|
||||
# This file locates the native compiler with the given name and version and sets the environment variables to locate it.
|
||||
#
|
||||
|
||||
source="${BASH_SOURCE[0]}"
|
||||
|
||||
# resolve $SOURCE until the file is no longer a symlink
|
||||
while [[ -h $source ]]; do
|
||||
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
|
||||
source="$(readlink "$source")"
|
||||
|
||||
# if $source was a relative symlink, we need to resolve it relative to the path where the
|
||||
# symlink file was located
|
||||
[[ $source != /* ]] && source="$scriptroot/$source"
|
||||
done
|
||||
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
|
||||
|
||||
if [ $# -lt 0 ]
|
||||
then
|
||||
echo "Usage..."
|
||||
echo "find-native-compiler.sh <compiler> <compiler major version> <compiler minor version>"
|
||||
echo "Specify the name of compiler (clang or gcc)."
|
||||
echo "Specify the major version of compiler."
|
||||
echo "Specify the minor version of compiler."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
. $scriptroot/../pipeline-logging-functions.sh
|
||||
|
||||
compiler="$1"
|
||||
cxxCompiler="$compiler++"
|
||||
majorVersion="$2"
|
||||
minorVersion="$3"
|
||||
|
||||
if [ "$compiler" = "gcc" ]; then cxxCompiler="g++"; fi
|
||||
|
||||
check_version_exists() {
|
||||
desired_version=-1
|
||||
|
||||
# Set up the environment to be used for building with the desired compiler.
|
||||
if command -v "$compiler-$1.$2" > /dev/null; then
|
||||
desired_version="-$1.$2"
|
||||
elif command -v "$compiler$1$2" > /dev/null; then
|
||||
desired_version="$1$2"
|
||||
elif command -v "$compiler-$1$2" > /dev/null; then
|
||||
desired_version="-$1$2"
|
||||
fi
|
||||
|
||||
echo "$desired_version"
|
||||
}
|
||||
|
||||
if [ -z "$CLR_CC" ]; then
|
||||
|
||||
# Set default versions
|
||||
if [ -z "$majorVersion" ]; then
|
||||
# note: gcc (all versions) and clang versions higher than 6 do not have minor version in file name, if it is zero.
|
||||
if [ "$compiler" = "clang" ]; then versions=( 9 8 7 6.0 5.0 4.0 3.9 3.8 3.7 3.6 3.5 )
|
||||
elif [ "$compiler" = "gcc" ]; then versions=( 9 8 7 6 5 4.9 ); fi
|
||||
|
||||
for version in "${versions[@]}"; do
|
||||
parts=(${version//./ })
|
||||
desired_version="$(check_version_exists "${parts[0]}" "${parts[1]}")"
|
||||
if [ "$desired_version" != "-1" ]; then majorVersion="${parts[0]}"; break; fi
|
||||
done
|
||||
|
||||
if [ -z "$majorVersion" ]; then
|
||||
if command -v "$compiler" > /dev/null; then
|
||||
if [ "$(uname)" != "Darwin" ]; then
|
||||
Write-PipelineTelemetryError -category "Build" -type "warning" "Specific version of $compiler not found, falling back to use the one in PATH."
|
||||
fi
|
||||
export CC="$(command -v "$compiler")"
|
||||
export CXX="$(command -v "$cxxCompiler")"
|
||||
else
|
||||
Write-PipelineTelemetryError -category "Build" "No usable version of $compiler found."
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
if [ "$compiler" = "clang" ] && [ "$majorVersion" -lt 5 ]; then
|
||||
if [ "$build_arch" = "arm" ] || [ "$build_arch" = "armel" ]; then
|
||||
if command -v "$compiler" > /dev/null; then
|
||||
Write-PipelineTelemetryError -category "Build" -type "warning" "Found clang version $majorVersion which is not supported on arm/armel architectures, falling back to use clang from PATH."
|
||||
export CC="$(command -v "$compiler")"
|
||||
export CXX="$(command -v "$cxxCompiler")"
|
||||
else
|
||||
Write-PipelineTelemetryError -category "Build" "Found clang version $majorVersion which is not supported on arm/armel architectures, and there is no clang in PATH."
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
else
|
||||
desired_version="$(check_version_exists "$majorVersion" "$minorVersion")"
|
||||
if [ "$desired_version" = "-1" ]; then
|
||||
Write-PipelineTelemetryError -category "Build" "Could not find specific version of $compiler: $majorVersion $minorVersion."
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -z "$CC" ]; then
|
||||
export CC="$(command -v "$compiler$desired_version")"
|
||||
export CXX="$(command -v "$cxxCompiler$desired_version")"
|
||||
if [ -z "$CXX" ]; then export CXX="$(command -v "$cxxCompiler")"; fi
|
||||
fi
|
||||
else
|
||||
if [ ! -f "$CLR_CC" ]; then
|
||||
Write-PipelineTelemetryError -category "Build" "CLR_CC is set but path '$CLR_CC' does not exist"
|
||||
exit 1
|
||||
fi
|
||||
export CC="$CLR_CC"
|
||||
export CXX="$CLR_CXX"
|
||||
fi
|
||||
|
||||
if [ -z "$CC" ]; then
|
||||
Write-PipelineTelemetryError -category "Build" "Unable to find $compiler."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
export CCC_CC="$CC"
|
||||
export CCC_CXX="$CXX"
|
||||
export SCAN_BUILD_COMMAND="$(command -v "scan-build$desired_version")"
|
|
@ -0,0 +1,117 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source="${BASH_SOURCE[0]}"
|
||||
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
|
||||
|
||||
. $scriptroot/common-library.sh
|
||||
|
||||
base_uri=
|
||||
install_path=
|
||||
version=
|
||||
clean=false
|
||||
force=false
|
||||
download_retries=5
|
||||
retry_wait_time_seconds=30
|
||||
|
||||
while (($# > 0)); do
|
||||
lowerI="$(echo $1 | awk '{print tolower($0)}')"
|
||||
case $lowerI in
|
||||
--baseuri)
|
||||
base_uri=$2
|
||||
shift 2
|
||||
;;
|
||||
--installpath)
|
||||
install_path=$2
|
||||
shift 2
|
||||
;;
|
||||
--version)
|
||||
version=$2
|
||||
shift 2
|
||||
;;
|
||||
--clean)
|
||||
clean=true
|
||||
shift 1
|
||||
;;
|
||||
--force)
|
||||
force=true
|
||||
shift 1
|
||||
;;
|
||||
--downloadretries)
|
||||
download_retries=$2
|
||||
shift 2
|
||||
;;
|
||||
--retrywaittimeseconds)
|
||||
retry_wait_time_seconds=$2
|
||||
shift 2
|
||||
;;
|
||||
--help)
|
||||
echo "Common settings:"
|
||||
echo " --baseuri <value> Base file directory or Url wrom which to acquire tool archives"
|
||||
echo " --installpath <value> Base directory to install native tool to"
|
||||
echo " --clean Don't install the tool, just clean up the current install of the tool"
|
||||
echo " --force Force install of tools even if they previously exist"
|
||||
echo " --help Print help and exit"
|
||||
echo ""
|
||||
echo "Advanced settings:"
|
||||
echo " --downloadretries Total number of retry attempts"
|
||||
echo " --retrywaittimeseconds Wait time between retry attempts in seconds"
|
||||
echo ""
|
||||
exit 0
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
tool_name="cmake-test"
|
||||
tool_os=$(GetCurrentOS)
|
||||
tool_folder=$(echo $tool_os | awk '{print tolower($0)}')
|
||||
tool_arch="x86_64"
|
||||
tool_name_moniker="$tool_name-$version-$tool_os-$tool_arch"
|
||||
tool_install_directory="$install_path/$tool_name/$version"
|
||||
tool_file_path="$tool_install_directory/$tool_name_moniker/bin/$tool_name"
|
||||
shim_path="$install_path/$tool_name.sh"
|
||||
uri="${base_uri}/$tool_folder/$tool_name/$tool_name_moniker.tar.gz"
|
||||
|
||||
# Clean up tool and installers
|
||||
if [[ $clean = true ]]; then
|
||||
echo "Cleaning $tool_install_directory"
|
||||
if [[ -d $tool_install_directory ]]; then
|
||||
rm -rf $tool_install_directory
|
||||
fi
|
||||
|
||||
echo "Cleaning $shim_path"
|
||||
if [[ -f $shim_path ]]; then
|
||||
rm -rf $shim_path
|
||||
fi
|
||||
|
||||
tool_temp_path=$(GetTempPathFileName $uri)
|
||||
echo "Cleaning $tool_temp_path"
|
||||
if [[ -f $tool_temp_path ]]; then
|
||||
rm -rf $tool_temp_path
|
||||
fi
|
||||
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Install tool
|
||||
if [[ -f $tool_file_path ]] && [[ $force = false ]]; then
|
||||
echo "$tool_name ($version) already exists, skipping install"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
DownloadAndExtract $uri $tool_install_directory $force $download_retries $retry_wait_time_seconds
|
||||
|
||||
if [[ $? != 0 ]]; then
|
||||
Write-PipelineTelemetryError -category 'NativeToolsBootstrap' 'Installation failed'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Generate Shim
|
||||
# Always rewrite shims so that we are referencing the expected version
|
||||
NewScriptShim $shim_path $tool_file_path true
|
||||
|
||||
if [[ $? != 0 ]]; then
|
||||
Write-PipelineTelemetryError -category 'NativeToolsBootstrap' 'Shim generation failed'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
exit 0
|
|
@ -0,0 +1,117 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source="${BASH_SOURCE[0]}"
|
||||
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
|
||||
|
||||
. $scriptroot/common-library.sh
|
||||
|
||||
base_uri=
|
||||
install_path=
|
||||
version=
|
||||
clean=false
|
||||
force=false
|
||||
download_retries=5
|
||||
retry_wait_time_seconds=30
|
||||
|
||||
while (($# > 0)); do
|
||||
lowerI="$(echo $1 | awk '{print tolower($0)}')"
|
||||
case $lowerI in
|
||||
--baseuri)
|
||||
base_uri=$2
|
||||
shift 2
|
||||
;;
|
||||
--installpath)
|
||||
install_path=$2
|
||||
shift 2
|
||||
;;
|
||||
--version)
|
||||
version=$2
|
||||
shift 2
|
||||
;;
|
||||
--clean)
|
||||
clean=true
|
||||
shift 1
|
||||
;;
|
||||
--force)
|
||||
force=true
|
||||
shift 1
|
||||
;;
|
||||
--downloadretries)
|
||||
download_retries=$2
|
||||
shift 2
|
||||
;;
|
||||
--retrywaittimeseconds)
|
||||
retry_wait_time_seconds=$2
|
||||
shift 2
|
||||
;;
|
||||
--help)
|
||||
echo "Common settings:"
|
||||
echo " --baseuri <value> Base file directory or Url wrom which to acquire tool archives"
|
||||
echo " --installpath <value> Base directory to install native tool to"
|
||||
echo " --clean Don't install the tool, just clean up the current install of the tool"
|
||||
echo " --force Force install of tools even if they previously exist"
|
||||
echo " --help Print help and exit"
|
||||
echo ""
|
||||
echo "Advanced settings:"
|
||||
echo " --downloadretries Total number of retry attempts"
|
||||
echo " --retrywaittimeseconds Wait time between retry attempts in seconds"
|
||||
echo ""
|
||||
exit 0
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
tool_name="cmake"
|
||||
tool_os=$(GetCurrentOS)
|
||||
tool_folder=$(echo $tool_os | awk '{print tolower($0)}')
|
||||
tool_arch="x86_64"
|
||||
tool_name_moniker="$tool_name-$version-$tool_os-$tool_arch"
|
||||
tool_install_directory="$install_path/$tool_name/$version"
|
||||
tool_file_path="$tool_install_directory/$tool_name_moniker/bin/$tool_name"
|
||||
shim_path="$install_path/$tool_name.sh"
|
||||
uri="${base_uri}/$tool_folder/$tool_name/$tool_name_moniker.tar.gz"
|
||||
|
||||
# Clean up tool and installers
|
||||
if [[ $clean = true ]]; then
|
||||
echo "Cleaning $tool_install_directory"
|
||||
if [[ -d $tool_install_directory ]]; then
|
||||
rm -rf $tool_install_directory
|
||||
fi
|
||||
|
||||
echo "Cleaning $shim_path"
|
||||
if [[ -f $shim_path ]]; then
|
||||
rm -rf $shim_path
|
||||
fi
|
||||
|
||||
tool_temp_path=$(GetTempPathFileName $uri)
|
||||
echo "Cleaning $tool_temp_path"
|
||||
if [[ -f $tool_temp_path ]]; then
|
||||
rm -rf $tool_temp_path
|
||||
fi
|
||||
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Install tool
|
||||
if [[ -f $tool_file_path ]] && [[ $force = false ]]; then
|
||||
echo "$tool_name ($version) already exists, skipping install"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
DownloadAndExtract $uri $tool_install_directory $force $download_retries $retry_wait_time_seconds
|
||||
|
||||
if [[ $? != 0 ]]; then
|
||||
Write-PipelineTelemetryError -category 'NativeToolsBootstrap' 'Installation failed'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Generate Shim
|
||||
# Always rewrite shims so that we are referencing the expected version
|
||||
NewScriptShim $shim_path $tool_file_path true
|
||||
|
||||
if [[ $? != 0 ]]; then
|
||||
Write-PipelineTelemetryError -category 'NativeToolsBootstrap' 'Shim generation failed'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
exit 0
|
|
@ -0,0 +1,132 @@
|
|||
<#
|
||||
.SYNOPSIS
|
||||
Install native tool
|
||||
|
||||
.DESCRIPTION
|
||||
Install cmake native tool from Azure blob storage
|
||||
|
||||
.PARAMETER InstallPath
|
||||
Base directory to install native tool to
|
||||
|
||||
.PARAMETER BaseUri
|
||||
Base file directory or Url from which to acquire tool archives
|
||||
|
||||
.PARAMETER CommonLibraryDirectory
|
||||
Path to folder containing common library modules
|
||||
|
||||
.PARAMETER Force
|
||||
Force install of tools even if they previously exist
|
||||
|
||||
.PARAMETER Clean
|
||||
Don't install the tool, just clean up the current install of the tool
|
||||
|
||||
.PARAMETER DownloadRetries
|
||||
Total number of retry attempts
|
||||
|
||||
.PARAMETER RetryWaitTimeInSeconds
|
||||
Wait time between retry attempts in seconds
|
||||
|
||||
.NOTES
|
||||
Returns 0 if install succeeds, 1 otherwise
|
||||
#>
|
||||
[CmdletBinding(PositionalBinding=$false)]
|
||||
Param (
|
||||
[Parameter(Mandatory=$True)]
|
||||
[string] $ToolName,
|
||||
[Parameter(Mandatory=$True)]
|
||||
[string] $InstallPath,
|
||||
[Parameter(Mandatory=$True)]
|
||||
[string] $BaseUri,
|
||||
[Parameter(Mandatory=$True)]
|
||||
[string] $Version,
|
||||
[string] $CommonLibraryDirectory = $PSScriptRoot,
|
||||
[switch] $Force = $False,
|
||||
[switch] $Clean = $False,
|
||||
[int] $DownloadRetries = 5,
|
||||
[int] $RetryWaitTimeInSeconds = 30
|
||||
)
|
||||
|
||||
. $PSScriptRoot\..\pipeline-logging-functions.ps1
|
||||
|
||||
# Import common library modules
|
||||
Import-Module -Name (Join-Path $CommonLibraryDirectory "CommonLibrary.psm1")
|
||||
|
||||
try {
|
||||
# Define verbose switch if undefined
|
||||
$Verbose = $VerbosePreference -Eq "Continue"
|
||||
|
||||
$Arch = CommonLibrary\Get-MachineArchitecture
|
||||
$ToolOs = "win64"
|
||||
if($Arch -Eq "x32") {
|
||||
$ToolOs = "win32"
|
||||
}
|
||||
$ToolNameMoniker = "$ToolName-$Version-$ToolOs-$Arch"
|
||||
$ToolInstallDirectory = Join-Path $InstallPath "$ToolName\$Version\"
|
||||
$Uri = "$BaseUri/windows/$ToolName/$ToolNameMoniker.zip"
|
||||
$ShimPath = Join-Path $InstallPath "$ToolName.exe"
|
||||
|
||||
if ($Clean) {
|
||||
Write-Host "Cleaning $ToolInstallDirectory"
|
||||
if (Test-Path $ToolInstallDirectory) {
|
||||
Remove-Item $ToolInstallDirectory -Force -Recurse
|
||||
}
|
||||
Write-Host "Cleaning $ShimPath"
|
||||
if (Test-Path $ShimPath) {
|
||||
Remove-Item $ShimPath -Force
|
||||
}
|
||||
$ToolTempPath = CommonLibrary\Get-TempPathFilename -Path $Uri
|
||||
Write-Host "Cleaning $ToolTempPath"
|
||||
if (Test-Path $ToolTempPath) {
|
||||
Remove-Item $ToolTempPath -Force
|
||||
}
|
||||
exit 0
|
||||
}
|
||||
|
||||
# Install tool
|
||||
if ((Test-Path $ToolInstallDirectory) -And (-Not $Force)) {
|
||||
Write-Verbose "$ToolName ($Version) already exists, skipping install"
|
||||
}
|
||||
else {
|
||||
$InstallStatus = CommonLibrary\DownloadAndExtract -Uri $Uri `
|
||||
-InstallDirectory $ToolInstallDirectory `
|
||||
-Force:$Force `
|
||||
-DownloadRetries $DownloadRetries `
|
||||
-RetryWaitTimeInSeconds $RetryWaitTimeInSeconds `
|
||||
-Verbose:$Verbose
|
||||
|
||||
if ($InstallStatus -Eq $False) {
|
||||
Write-PipelineTelemetryError "Installation failed" -Category "NativeToolsetBootstrapping"
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
|
||||
$ToolFilePath = Get-ChildItem $ToolInstallDirectory -Recurse -Filter "$ToolName.exe" | % { $_.FullName }
|
||||
if (@($ToolFilePath).Length -Gt 1) {
|
||||
Write-Error "There are multiple copies of $ToolName in $($ToolInstallDirectory): `n$(@($ToolFilePath | out-string))"
|
||||
exit 1
|
||||
} elseif (@($ToolFilePath).Length -Lt 1) {
|
||||
Write-Host "$ToolName was not found in $ToolFilePath."
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Generate shim
|
||||
# Always rewrite shims so that we are referencing the expected version
|
||||
$GenerateShimStatus = CommonLibrary\New-ScriptShim -ShimName $ToolName `
|
||||
-ShimDirectory $InstallPath `
|
||||
-ToolFilePath "$ToolFilePath" `
|
||||
-BaseUri $BaseUri `
|
||||
-Force:$Force `
|
||||
-Verbose:$Verbose
|
||||
|
||||
if ($GenerateShimStatus -Eq $False) {
|
||||
Write-PipelineTelemetryError "Generate shim failed" -Category "NativeToolsetBootstrapping"
|
||||
return 1
|
||||
}
|
||||
|
||||
exit 0
|
||||
}
|
||||
catch {
|
||||
Write-Host $_.ScriptStackTrace
|
||||
Write-PipelineTelemetryError -Category "NativeToolsetBootstrapping" -Message $_
|
||||
exit 1
|
||||
}
|
|
@ -0,0 +1,30 @@
|
|||
<Project Sdk="Microsoft.DotNet.Helix.Sdk" DefaultTargets="Test">
|
||||
<PropertyGroup Condition="'$(AGENT_OS)' != 'Windows_NT'">
|
||||
<Python>python3</Python>
|
||||
<HelixPreCommands>$(HelixPreCommands);chmod +x $HELIX_WORKITEM_PAYLOAD/SOD/SizeOnDisk</HelixPreCommands>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<HelixCorrelationPayload Include="$(CorrelationPayloadDirectory)">
|
||||
<PayloadDirectory>%(Identity)</PayloadDirectory>
|
||||
</HelixCorrelationPayload>
|
||||
</ItemGroup>
|
||||
|
||||
<PropertyGroup Condition="'$(AGENT_OS)' == 'Windows_NT'">
|
||||
<ScenarioDirectory>%HELIX_CORRELATION_PAYLOAD%\performance\src\scenarios\</ScenarioDirectory>
|
||||
<BlazorDirectory>$(ScenarioDirectory)blazor\</BlazorDirectory>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(AGENT_OS)' != 'Windows_NT'">
|
||||
<ScenarioDirectory>$HELIX_CORRELATION_PAYLOAD/performance/src/scenarios/</ScenarioDirectory>
|
||||
<BlazorDirectory>$(ScenarioDirectory)blazor/</BlazorDirectory>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<HelixWorkItem Include="SOD - New Blazor Template - Publish">
|
||||
<PayloadDirectory>$(WorkItemDirectory)</PayloadDirectory>
|
||||
<PreCommands>cd $(BlazorDirectory);$(Python) pre.py publish --msbuild %27/p:_TrimmerDumpDependencies=true%27 --msbuild-static AdditionalMonoLinkerOptions=%27"%24(AdditionalMonoLinkerOptions) --dump-dependencies"%27 --binlog %27./traces/blazor_publish.binlog%27</PreCommands>
|
||||
<Command>$(Python) test.py sod --scenario-name "%(Identity)"</Command>
|
||||
<PostCommands>$(Python) post.py</PostCommands>
|
||||
</HelixWorkItem>
|
||||
</ItemGroup>
|
||||
</Project>
|
|
@ -0,0 +1,69 @@
|
|||
<Project Sdk="Microsoft.DotNet.Helix.Sdk" DefaultTargets="Test">
|
||||
|
||||
<ItemGroup>
|
||||
<HelixCorrelationPayload Include="$(CorrelationPayloadDirectory)">
|
||||
<PayloadDirectory>%(Identity)</PayloadDirectory>
|
||||
</HelixCorrelationPayload>
|
||||
</ItemGroup>
|
||||
|
||||
<!--
|
||||
Crossgen and Crossgen2 Scenario WorkItems
|
||||
-->
|
||||
<PropertyGroup Condition="'$(AGENT_OS)' == 'Windows_NT'">
|
||||
<Python>py -3</Python>
|
||||
<HelixPreCommands>$(HelixPreCommands)</HelixPreCommands>
|
||||
<CoreRoot>%HELIX_CORRELATION_PAYLOAD%\Core_Root</CoreRoot>
|
||||
<ScenarioDirectory>%HELIX_CORRELATION_PAYLOAD%\performance\src\scenarios\</ScenarioDirectory>
|
||||
<CrossgenDirectory>$(ScenarioDirectory)crossgen\</CrossgenDirectory>
|
||||
<Crossgen2Directory>$(ScenarioDirectory)crossgen2\</Crossgen2Directory>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(AGENT_OS)' != 'Windows_NT'">
|
||||
<Python>python3</Python>
|
||||
<HelixPreCommands>$(HelixPreCommands);chmod +x $HELIX_WORKITEM_PAYLOAD/startup/Startup;chmod +x $HELIX_WORKITEM_PAYLOAD/startup/perfcollect;sudo apt update</HelixPreCommands>
|
||||
<CoreRoot>$HELIX_CORRELATION_PAYLOAD/Core_Root</CoreRoot>
|
||||
<ScenarioDirectory>$HELIX_CORRELATION_PAYLOAD/performance/src/scenarios/</ScenarioDirectory>
|
||||
<CrossgenDirectory>$(ScenarioDirectory)crossgen/</CrossgenDirectory>
|
||||
<Crossgen2Directory>$(ScenarioDirectory)crossgen2/</Crossgen2Directory>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<SingleAssembly Include="System.Private.Xml.dll"/>
|
||||
<SingleAssembly Include="System.Linq.Expressions.dll"/>
|
||||
<SingleAssembly Include="Microsoft.CodeAnalysis.VisualBasic.dll"/>
|
||||
<SingleAssembly Include="Microsoft.CodeAnalysis.CSharp.dll"/>
|
||||
<SingleAssembly Include="System.Private.CoreLib.dll"/>
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<Composite Include="framework-r2r.dll.rsp"/>
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<CrossgenWorkItem Include="@(SingleAssembly)">
|
||||
<PayloadDirectory>$(WorkItemDirectory)</PayloadDirectory>
|
||||
<Command>$(Python) $(CrossgenDirectory)test.py crossgen --core-root $(CoreRoot) --test-name %(Identity)</Command>
|
||||
</CrossgenWorkItem>
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<Crossgen2WorkItem Include="@(SingleAssembly)">
|
||||
<PayloadDirectory>$(WorkItemDirectory)</PayloadDirectory>
|
||||
<Command>$(Python) $(Crossgen2Directory)test.py crossgen2 --core-root $(CoreRoot) --single %(Identity)</Command>
|
||||
</Crossgen2WorkItem>
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<!-- Enable crossgen tests on Windows x64 and Windows x86 -->
|
||||
<HelixWorkItem Include="@(CrossgenWorkItem -> 'Crossgen %(Identity)')" Condition="'$(AGENT_OS)' == 'Windows_NT'">
|
||||
<Timeout>4:00</Timeout>
|
||||
</HelixWorkItem>
|
||||
<!-- Enable crossgen2 tests on Windows x64 and Linux x64 -->
|
||||
<HelixWorkItem Include="@(Crossgen2WorkItem -> 'Crossgen2 %(Identity)')" Condition="'$(Architecture)' == 'x64'">
|
||||
<Timeout>4:00</Timeout>
|
||||
</HelixWorkItem>
|
||||
<HelixWorkItem Include="Crossgen2 Composite Framework R2R" Condition="'$(Architecture)' == 'x64'">
|
||||
<PayloadDirectory>$(WorkItemDirectory)</PayloadDirectory>
|
||||
<Command>$(Python) $(Crossgen2Directory)test.py crossgen2 --core-root $(CoreRoot) --composite $(Crossgen2Directory)framework-r2r.dll.rsp</Command>
|
||||
<Timeout>1:00</Timeout>
|
||||
</HelixWorkItem>
|
||||
</ItemGroup>
|
||||
</Project>
|
|
@ -0,0 +1,144 @@
|
|||
<Project Sdk="Microsoft.DotNet.Helix.Sdk" DefaultTargets="Test">
|
||||
|
||||
<PropertyGroup Condition="'$(AGENT_OS)' == 'Windows_NT'">
|
||||
<WorkItemCommand>%HELIX_CORRELATION_PAYLOAD%\performance\scripts\benchmarks_ci.py --csproj %HELIX_CORRELATION_PAYLOAD%\performance\$(TargetCsproj)</WorkItemCommand>
|
||||
<CliArguments>--dotnet-versions %DOTNET_VERSION% --cli-source-info args --cli-branch %PERFLAB_BRANCH% --cli-commit-sha %PERFLAB_HASH% --cli-repository https://github.com/%PERFLAB_REPO% --cli-source-timestamp %PERFLAB_BUILDTIMESTAMP%</CliArguments>
|
||||
<Python>py -3</Python>
|
||||
<CoreRun>%HELIX_CORRELATION_PAYLOAD%\Core_Root\CoreRun.exe</CoreRun>
|
||||
<BaselineCoreRun>%HELIX_CORRELATION_PAYLOAD%\Baseline_Core_Root\CoreRun.exe</BaselineCoreRun>
|
||||
|
||||
<HelixPreCommands>$(HelixPreCommands);call %HELIX_CORRELATION_PAYLOAD%\performance\tools\machine-setup.cmd;set PYTHONPATH=%HELIX_WORKITEM_PAYLOAD%\scripts%3B%HELIX_WORKITEM_PAYLOAD%</HelixPreCommands>
|
||||
<ArtifactsDirectory>%HELIX_CORRELATION_PAYLOAD%\artifacts\BenchmarkDotNet.Artifacts</ArtifactsDirectory>
|
||||
<BaselineArtifactsDirectory>%HELIX_CORRELATION_PAYLOAD%\artifacts\BenchmarkDotNet.Artifacts_Baseline</BaselineArtifactsDirectory>
|
||||
<ResultsComparer>%HELIX_CORRELATION_PAYLOAD%\performance\src\tools\ResultsComparer\ResultsComparer.csproj</ResultsComparer>
|
||||
<DotnetExe>%HELIX_CORRELATION_PAYLOAD%\performance\tools\dotnet\$(Architecture)\dotnet.exe</DotnetExe>
|
||||
<Percent>%25%25</Percent>
|
||||
<XMLResults>%HELIX_WORKITEM_ROOT%\testResults.xml</XMLResults>
|
||||
</PropertyGroup>
|
||||
|
||||
<PropertyGroup Condition="'$(AGENT_OS)' != 'Windows_NT' and '$(RunFromPerfRepo)' == 'false'">
|
||||
<BaseDirectory>$HELIX_CORRELATION_PAYLOAD</BaseDirectory>
|
||||
<PerformanceDirectory>$(BaseDirectory)/performance</PerformanceDirectory>
|
||||
</PropertyGroup>
|
||||
|
||||
<PropertyGroup Condition="'$(AGENT_OS)' != 'Windows_NT' and '$(RunFromPerfRepo)' == 'true'">
|
||||
<BaseDirectory>$HELIX_WORKITEM_PAYLOAD</BaseDirectory>
|
||||
<PerformanceDirectory>$(BaseDirectory)</PerformanceDirectory>
|
||||
</PropertyGroup>
|
||||
|
||||
<PropertyGroup Condition="'$(AGENT_OS)' != 'Windows_NT'">
|
||||
<WorkItemCommand>$(PerformanceDirectory)/scripts/benchmarks_ci.py --csproj $(PerformanceDirectory)/$(TargetCsproj)</WorkItemCommand>
|
||||
<CliArguments>--dotnet-versions $DOTNET_VERSION --cli-source-info args --cli-branch $PERFLAB_BRANCH --cli-commit-sha $PERFLAB_HASH --cli-repository https://github.com/$PERFLAB_REPO --cli-source-timestamp $PERFLAB_BUILDTIMESTAMP</CliArguments>
|
||||
<Python>python3</Python>
|
||||
<CoreRun>$(BaseDirectory)/Core_Root/corerun</CoreRun>
|
||||
<BaselineCoreRun>$(BaseDirectory)/Baseline_Core_Root/corerun</BaselineCoreRun>
|
||||
<HelixPreCommands>$(HelixPreCommands);chmod +x $(PerformanceDirectory)/tools/machine-setup.sh;. $(PerformanceDirectory)/tools/machine-setup.sh</HelixPreCommands>
|
||||
<ArtifactsDirectory>$(BaseDirectory)/artifacts/BenchmarkDotNet.Artifacts</ArtifactsDirectory>
|
||||
<BaselineArtifactsDirectory>$(BaseDirectory)/artifacts/BenchmarkDotNet.Artifacts_Baseline</BaselineArtifactsDirectory>
|
||||
<ResultsComparer>$(PerformanceDirectory)/src/tools/ResultsComparer/ResultsComparer.csproj</ResultsComparer>
|
||||
<DotnetExe>$(PerformanceDirectory)/tools/dotnet/$(Architecture)/dotnet</DotnetExe>
|
||||
<Percent>%25</Percent>
|
||||
<XMLResults>$HELIX_WORKITEM_ROOT/testResults.xml</XMLResults>
|
||||
</PropertyGroup>
|
||||
|
||||
<PropertyGroup Condition="'$(WasmDotnet)' == 'true'">
|
||||
<CliArguments>$(CliArguments) --wasm</CliArguments>
|
||||
</PropertyGroup>
|
||||
|
||||
<PropertyGroup Condition="'$(MonoDotnet)' == 'true' and '$(AGENT_OS)' == 'Windows_NT'">
|
||||
<CoreRunArgument>--corerun %HELIX_CORRELATION_PAYLOAD%\dotnet-mono\shared\Microsoft.NETCore.App\6.0.0\corerun.exe</CoreRunArgument>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(MonoDotnet)' == 'true' and '$(AGENT_OS)' != 'Windows_NT'">
|
||||
<CoreRunArgument>--corerun $(BaseDirectory)/dotnet-mono/shared/Microsoft.NETCore.App/6.0.0/corerun</CoreRunArgument>
|
||||
</PropertyGroup>
|
||||
|
||||
<PropertyGroup Condition="'$(UseCoreRun)' == 'true'">
|
||||
<CoreRunArgument>--corerun $(CoreRun)</CoreRunArgument>
|
||||
</PropertyGroup>
|
||||
|
||||
<PropertyGroup Condition="'$(UseBaselineCoreRun)' == 'true'">
|
||||
<BaselineCoreRunArgument>--corerun $(BaselineCoreRun)</BaselineCoreRunArgument>
|
||||
</PropertyGroup>
|
||||
|
||||
<PropertyGroup Condition="'$(WorkItemCommand)' != ''">
|
||||
<WorkItemCommand>$(Python) $(WorkItemCommand) --incremental no --architecture $(Architecture) -f $(_Framework) $(PerfLabArguments)</WorkItemCommand>
|
||||
</PropertyGroup>
|
||||
|
||||
<PropertyGroup Condition="'$(_Framework)' != 'net461'">
|
||||
<WorkItemCommand>$(WorkItemCommand) $(CliArguments)</WorkItemCommand>
|
||||
</PropertyGroup>
|
||||
|
||||
<PropertyGroup>
|
||||
<WorkItemTimeout>2:30</WorkItemTimeout>
|
||||
<WorkItemTimeout Condition="'$(HelixSourcePrefix)' != 'official'">0:15</WorkItemTimeout>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<HelixCorrelationPayload Include="$(CorrelationPayloadDirectory)">
|
||||
<PayloadDirectory>%(Identity)</PayloadDirectory>
|
||||
</HelixCorrelationPayload>
|
||||
</ItemGroup>
|
||||
|
||||
<PropertyGroup>
|
||||
<PartitionCount>30</PartitionCount>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<Partition Include="$(BuildConfig).Partition0" Index="0" />
|
||||
<Partition Include="$(BuildConfig).Partition1" Index="1" />
|
||||
<Partition Include="$(BuildConfig).Partition2" Index="2" />
|
||||
<Partition Include="$(BuildConfig).Partition3" Index="3" />
|
||||
<Partition Include="$(BuildConfig).Partition4" Index="4" />
|
||||
<Partition Include="$(BuildConfig).Partition5" Index="5" />
|
||||
<Partition Include="$(BuildConfig).Partition6" Index="6" />
|
||||
<Partition Include="$(BuildConfig).Partition7" Index="7" />
|
||||
<Partition Include="$(BuildConfig).Partition8" Index="8" />
|
||||
<Partition Include="$(BuildConfig).Partition9" Index="9" />
|
||||
<Partition Include="$(BuildConfig).Partition10" Index="10" />
|
||||
<Partition Include="$(BuildConfig).Partition11" Index="11" />
|
||||
<Partition Include="$(BuildConfig).Partition12" Index="12" />
|
||||
<Partition Include="$(BuildConfig).Partition13" Index="13" />
|
||||
<Partition Include="$(BuildConfig).Partition14" Index="14" />
|
||||
<Partition Include="$(BuildConfig).Partition15" Index="15" />
|
||||
<Partition Include="$(BuildConfig).Partition16" Index="16" />
|
||||
<Partition Include="$(BuildConfig).Partition17" Index="17" />
|
||||
<Partition Include="$(BuildConfig).Partition18" Index="18" />
|
||||
<Partition Include="$(BuildConfig).Partition19" Index="19" />
|
||||
<Partition Include="$(BuildConfig).Partition20" Index="20" />
|
||||
<Partition Include="$(BuildConfig).Partition21" Index="21" />
|
||||
<Partition Include="$(BuildConfig).Partition22" Index="22" />
|
||||
<Partition Include="$(BuildConfig).Partition23" Index="23" />
|
||||
<Partition Include="$(BuildConfig).Partition24" Index="24" />
|
||||
<Partition Include="$(BuildConfig).Partition25" Index="25" />
|
||||
<Partition Include="$(BuildConfig).Partition26" Index="26" />
|
||||
<Partition Include="$(BuildConfig).Partition27" Index="27" />
|
||||
<Partition Include="$(BuildConfig).Partition28" Index="28" />
|
||||
<Partition Include="$(BuildConfig).Partition29" Index="29" />
|
||||
</ItemGroup>
|
||||
|
||||
<PropertyGroup Condition="'$(Compare)' == 'true'">
|
||||
<FailOnTestFailure>false</FailOnTestFailure>
|
||||
</PropertyGroup>
|
||||
|
||||
<!--
|
||||
Partition the Microbenchmarks project, but nothing else
|
||||
-->
|
||||
<ItemGroup Condition="$(TargetCsproj.Contains('MicroBenchmarks.csproj'))">
|
||||
<HelixWorkItem Include="@(Partition)">
|
||||
<PayloadDirectory>$(WorkItemDirectory)</PayloadDirectory>
|
||||
<PreCommands Condition="'$(Compare)' == 'true'">$(WorkItemCommand) --bdn-artifacts $(BaselineArtifactsDirectory) --bdn-arguments="--anyCategories $(BDNCategories) $(ExtraBenchmarkDotNetArguments) $(BaselineCoreRunArgument) --partition-count $(PartitionCount) --partition-index %(HelixWorkItem.Index)"</PreCommands>
|
||||
<Command>$(WorkItemCommand) --bdn-artifacts $(ArtifactsDirectory) --bdn-arguments="--anyCategories $(BDNCategories) $(ExtraBenchmarkDotNetArguments) $(CoreRunArgument) --partition-count $(PartitionCount) --partition-index %(HelixWorkItem.Index)"</Command>
|
||||
<PostCommands Condition="'$(Compare)' == 'true'">$(DotnetExe) run -f $(_Framework) -p $(ResultsComparer) --base $(BaselineArtifactsDirectory) --diff $(ArtifactsDirectory) --threshold 2$(Percent) --xml $(XMLResults);$(FinalCommand)</PostCommands>
|
||||
<Timeout>$(WorkItemTimeout)</Timeout>
|
||||
</HelixWorkItem>
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup Condition="!$(TargetCsproj.Contains('MicroBenchmarks.csproj'))">
|
||||
<HelixWorkItem Include="$(BuildConfig).WorkItem">
|
||||
<PayloadDirectory>$(WorkItemDirectory)</PayloadDirectory>
|
||||
<PreCommands Condition="'$(Compare)' == 'true'">$(WorkItemCommand) --bdn-artifacts $(BaselineArtifactsDirectory) --bdn-arguments="--anyCategories $(BDNCategories) $(ExtraBenchmarkDotNetArguments) $(BaselineCoreRunArgument)"</PreCommands>
|
||||
<Command>$(WorkItemCommand) --bdn-artifacts $(ArtifactsDirectory) --bdn-arguments="--anyCategories $(BDNCategories) $(ExtraBenchmarkDotNetArguments) $(CoreRunArgument)"</Command>
|
||||
<PostCommands Condition="'$(Compare)' == 'true'">$(DotnetExe) run -f $(_Framework) -p $(ResultsComparer) --base $(BaselineArtifactsDirectory) --diff $(ArtifactsDirectory) --threshold 2$(Percent) --xml $(XMLResults)</PostCommands>
|
||||
<Timeout>4:00</Timeout>
|
||||
</HelixWorkItem>
|
||||
</ItemGroup>
|
||||
</Project>
|
|
@ -0,0 +1,147 @@
|
|||
Param(
|
||||
[string] $SourceDirectory=$env:BUILD_SOURCESDIRECTORY,
|
||||
[string] $CoreRootDirectory,
|
||||
[string] $BaselineCoreRootDirectory,
|
||||
[string] $Architecture="x64",
|
||||
[string] $Framework="net5.0",
|
||||
[string] $CompilationMode="Tiered",
|
||||
[string] $Repository=$env:BUILD_REPOSITORY_NAME,
|
||||
[string] $Branch=$env:BUILD_SOURCEBRANCH,
|
||||
[string] $CommitSha=$env:BUILD_SOURCEVERSION,
|
||||
[string] $BuildNumber=$env:BUILD_BUILDNUMBER,
|
||||
[string] $RunCategories="Libraries Runtime",
|
||||
[string] $Csproj="src\benchmarks\micro\MicroBenchmarks.csproj",
|
||||
[string] $Kind="micro",
|
||||
[switch] $LLVM,
|
||||
[switch] $MonoInterpreter,
|
||||
[switch] $MonoAOT,
|
||||
[switch] $Internal,
|
||||
[switch] $Compare,
|
||||
[string] $MonoDotnet="",
|
||||
[string] $Configurations="CompilationMode=$CompilationMode RunKind=$Kind"
|
||||
)
|
||||
|
||||
$RunFromPerformanceRepo = ($Repository -eq "dotnet/performance") -or ($Repository -eq "dotnet-performance")
|
||||
$UseCoreRun = ($CoreRootDirectory -ne [string]::Empty)
|
||||
$UseBaselineCoreRun = ($BaselineCoreRootDirectory -ne [string]::Empty)
|
||||
|
||||
$PayloadDirectory = (Join-Path $SourceDirectory "Payload")
|
||||
$PerformanceDirectory = (Join-Path $PayloadDirectory "performance")
|
||||
$WorkItemDirectory = (Join-Path $SourceDirectory "workitem")
|
||||
$ExtraBenchmarkDotNetArguments = "--iterationCount 1 --warmupCount 0 --invocationCount 1 --unrollFactor 1 --strategy ColdStart --stopOnFirstError true"
|
||||
$Creator = $env:BUILD_DEFINITIONNAME
|
||||
$PerfLabArguments = ""
|
||||
$HelixSourcePrefix = "pr"
|
||||
|
||||
$Queue = "Windows.10.Amd64.ClientRS4.DevEx.15.8.Open"
|
||||
|
||||
# TODO: Implement a better logic to determine if Framework is .NET Core or >= .NET 5.
|
||||
if ($Framework.StartsWith("netcoreapp") -or ($Framework -eq "net5.0")) {
|
||||
$Queue = "Windows.10.Amd64.ClientRS5.Open"
|
||||
}
|
||||
|
||||
if ($Compare) {
|
||||
$Queue = "Windows.10.Amd64.19H1.Tiger.Perf.Open"
|
||||
$PerfLabArguments = ""
|
||||
$ExtraBenchmarkDotNetArguments = ""
|
||||
}
|
||||
|
||||
if ($Internal) {
|
||||
$Queue = "Windows.10.Amd64.19H1.Tiger.Perf"
|
||||
$PerfLabArguments = "--upload-to-perflab-container"
|
||||
$ExtraBenchmarkDotNetArguments = ""
|
||||
$Creator = ""
|
||||
$HelixSourcePrefix = "official"
|
||||
}
|
||||
|
||||
if($MonoInterpreter)
|
||||
{
|
||||
$ExtraBenchmarkDotNetArguments = "--category-exclusion-filter NoInterpreter"
|
||||
}
|
||||
|
||||
if($MonoDotnet -ne "")
|
||||
{
|
||||
$Configurations += " LLVM=$LLVM MonoInterpreter=$MonoInterpreter MonoAOT=$MonoAOT"
|
||||
if($ExtraBenchmarkDotNetArguments -eq "")
|
||||
{
|
||||
#FIX ME: We need to block these tests as they don't run on mono for now
|
||||
$ExtraBenchmarkDotNetArguments = "--exclusion-filter *Perf_Image* *Perf_NamedPipeStream*"
|
||||
}
|
||||
else
|
||||
{
|
||||
#FIX ME: We need to block these tests as they don't run on mono for now
|
||||
$ExtraBenchmarkDotNetArguments += " --exclusion-filter *Perf_Image* *Perf_NamedPipeStream*"
|
||||
}
|
||||
}
|
||||
|
||||
# FIX ME: This is a workaround until we get this from the actual pipeline
|
||||
$CommonSetupArguments="--channel master --queue $Queue --build-number $BuildNumber --build-configs $Configurations --architecture $Architecture"
|
||||
$SetupArguments = "--repository https://github.com/$Repository --branch $Branch --get-perf-hash --commit-sha $CommitSha $CommonSetupArguments"
|
||||
|
||||
|
||||
#This grabs the LKG version number of dotnet and passes it to our scripts
|
||||
$VersionJSON = Get-Content global.json | ConvertFrom-Json
|
||||
$DotNetVersion = $VersionJSON.tools.dotnet
|
||||
$SetupArguments = "--dotnet-versions $DotNetVersion $SetupArguments"
|
||||
|
||||
|
||||
if ($RunFromPerformanceRepo) {
|
||||
$SetupArguments = "--perf-hash $CommitSha $CommonSetupArguments"
|
||||
|
||||
robocopy $SourceDirectory $PerformanceDirectory /E /XD $PayloadDirectory $SourceDirectory\artifacts $SourceDirectory\.git
|
||||
}
|
||||
else {
|
||||
git clone --branch master --depth 1 --quiet https://github.com/dotnet/performance $PerformanceDirectory
|
||||
}
|
||||
|
||||
if($MonoDotnet -ne "")
|
||||
{
|
||||
$UsingMono = "true"
|
||||
$MonoDotnetPath = (Join-Path $PayloadDirectory "dotnet-mono")
|
||||
Move-Item -Path $MonoDotnet -Destination $MonoDotnetPath
|
||||
}
|
||||
|
||||
if ($UseCoreRun) {
|
||||
$NewCoreRoot = (Join-Path $PayloadDirectory "Core_Root")
|
||||
Move-Item -Path $CoreRootDirectory -Destination $NewCoreRoot
|
||||
}
|
||||
if ($UseBaselineCoreRun) {
|
||||
$NewBaselineCoreRoot = (Join-Path $PayloadDirectory "Baseline_Core_Root")
|
||||
Move-Item -Path $BaselineCoreRootDirectory -Destination $NewBaselineCoreRoot
|
||||
}
|
||||
|
||||
$DocsDir = (Join-Path $PerformanceDirectory "docs")
|
||||
robocopy $DocsDir $WorkItemDirectory
|
||||
|
||||
# Set variables that we will need to have in future steps
|
||||
$ci = $true
|
||||
|
||||
. "$PSScriptRoot\..\pipeline-logging-functions.ps1"
|
||||
|
||||
# Directories
|
||||
Write-PipelineSetVariable -Name 'PayloadDirectory' -Value "$PayloadDirectory" -IsMultiJobVariable $false
|
||||
Write-PipelineSetVariable -Name 'PerformanceDirectory' -Value "$PerformanceDirectory" -IsMultiJobVariable $false
|
||||
Write-PipelineSetVariable -Name 'WorkItemDirectory' -Value "$WorkItemDirectory" -IsMultiJobVariable $false
|
||||
|
||||
# Script Arguments
|
||||
Write-PipelineSetVariable -Name 'Python' -Value "py -3" -IsMultiJobVariable $false
|
||||
Write-PipelineSetVariable -Name 'ExtraBenchmarkDotNetArguments' -Value "$ExtraBenchmarkDotNetArguments" -IsMultiJobVariable $false
|
||||
Write-PipelineSetVariable -Name 'SetupArguments' -Value "$SetupArguments" -IsMultiJobVariable $false
|
||||
Write-PipelineSetVariable -Name 'PerfLabArguments' -Value "$PerfLabArguments" -IsMultiJobVariable $false
|
||||
Write-PipelineSetVariable -Name 'BDNCategories' -Value "$RunCategories" -IsMultiJobVariable $false
|
||||
Write-PipelineSetVariable -Name 'TargetCsproj' -Value "$Csproj" -IsMultiJobVariable $false
|
||||
Write-PipelineSetVariable -Name 'Kind' -Value "$Kind" -IsMultiJobVariable $false
|
||||
Write-PipelineSetVariable -Name 'Architecture' -Value "$Architecture" -IsMultiJobVariable $false
|
||||
Write-PipelineSetVariable -Name 'UseCoreRun' -Value "$UseCoreRun" -IsMultiJobVariable $false
|
||||
Write-PipelineSetVariable -Name 'UseBaselineCoreRun' -Value "$UseBaselineCoreRun" -IsMultiJobVariable $false
|
||||
Write-PipelineSetVariable -Name 'RunFromPerfRepo' -Value "$RunFromPerformanceRepo" -IsMultiJobVariable $false
|
||||
Write-PipelineSetVariable -Name 'Compare' -Value "$Compare" -IsMultiJobVariable $false
|
||||
Write-PipelineSetVariable -Name 'MonoDotnet' -Value "$UsingMono" -IsMultiJobVariable $false
|
||||
|
||||
# Helix Arguments
|
||||
Write-PipelineSetVariable -Name 'Creator' -Value "$Creator" -IsMultiJobVariable $false
|
||||
Write-PipelineSetVariable -Name 'Queue' -Value "$Queue" -IsMultiJobVariable $false
|
||||
Write-PipelineSetVariable -Name 'HelixSourcePrefix' -Value "$HelixSourcePrefix" -IsMultiJobVariable $false
|
||||
Write-PipelineSetVariable -Name '_BuildConfig' -Value "$Architecture.$Kind.$Framework" -IsMultiJobVariable $false
|
||||
|
||||
exit 0
|
|
@ -0,0 +1,289 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source_directory=$BUILD_SOURCESDIRECTORY
|
||||
core_root_directory=
|
||||
baseline_core_root_directory=
|
||||
architecture=x64
|
||||
framework=net5.0
|
||||
compilation_mode=tiered
|
||||
repository=$BUILD_REPOSITORY_NAME
|
||||
branch=$BUILD_SOURCEBRANCH
|
||||
commit_sha=$BUILD_SOURCEVERSION
|
||||
build_number=$BUILD_BUILDNUMBER
|
||||
internal=false
|
||||
compare=false
|
||||
mono_dotnet=
|
||||
kind="micro"
|
||||
llvm=false
|
||||
monointerpreter=false
|
||||
monoaot=false
|
||||
run_categories="Libraries Runtime"
|
||||
csproj="src\benchmarks\micro\MicroBenchmarks.csproj"
|
||||
configurations="CompliationMode=$compilation_mode RunKind=$kind"
|
||||
run_from_perf_repo=false
|
||||
use_core_run=true
|
||||
use_baseline_core_run=true
|
||||
using_mono=false
|
||||
wasm_runtime_loc=
|
||||
using_wasm=false
|
||||
use_latest_dotnet=false
|
||||
|
||||
while (($# > 0)); do
|
||||
lowerI="$(echo $1 | awk '{print tolower($0)}')"
|
||||
case $lowerI in
|
||||
--sourcedirectory)
|
||||
source_directory=$2
|
||||
shift 2
|
||||
;;
|
||||
--corerootdirectory)
|
||||
core_root_directory=$2
|
||||
shift 2
|
||||
;;
|
||||
--baselinecorerootdirectory)
|
||||
baseline_core_root_directory=$2
|
||||
shift 2
|
||||
;;
|
||||
--architecture)
|
||||
architecture=$2
|
||||
shift 2
|
||||
;;
|
||||
--framework)
|
||||
framework=$2
|
||||
shift 2
|
||||
;;
|
||||
--compilationmode)
|
||||
compilation_mode=$2
|
||||
shift 2
|
||||
;;
|
||||
--repository)
|
||||
repository=$2
|
||||
shift 2
|
||||
;;
|
||||
--branch)
|
||||
branch=$2
|
||||
shift 2
|
||||
;;
|
||||
--commitsha)
|
||||
commit_sha=$2
|
||||
shift 2
|
||||
;;
|
||||
--buildnumber)
|
||||
build_number=$2
|
||||
shift 2
|
||||
;;
|
||||
--kind)
|
||||
kind=$2
|
||||
configurations="CompilationMode=$compilation_mode RunKind=$kind"
|
||||
shift 2
|
||||
;;
|
||||
--runcategories)
|
||||
run_categories=$2
|
||||
shift 2
|
||||
;;
|
||||
--csproj)
|
||||
csproj=$2
|
||||
shift 2
|
||||
;;
|
||||
--internal)
|
||||
internal=true
|
||||
shift 1
|
||||
;;
|
||||
--llvm)
|
||||
llvm=true
|
||||
shift 1
|
||||
;;
|
||||
--monointerpreter)
|
||||
monointerpreter=true
|
||||
shift 1
|
||||
;;
|
||||
--monoaot)
|
||||
monoaot=true
|
||||
shift 1
|
||||
;;
|
||||
--monodotnet)
|
||||
mono_dotnet=$2
|
||||
shift 2
|
||||
;;
|
||||
--wasm)
|
||||
wasm_runtime_loc=$2
|
||||
shift 2
|
||||
;;
|
||||
--compare)
|
||||
compare=true
|
||||
shift 1
|
||||
;;
|
||||
--configurations)
|
||||
configurations=$2
|
||||
shift 2
|
||||
;;
|
||||
--latestdotnet)
|
||||
use_latest_dotnet=true
|
||||
shift 1
|
||||
;;
|
||||
*)
|
||||
echo "Common settings:"
|
||||
echo " --corerootdirectory <value> Directory where Core_Root exists, if running perf testing with --corerun"
|
||||
echo " --architecture <value> Architecture of the testing being run"
|
||||
echo " --configurations <value> List of key=value pairs that will be passed to perf testing infrastructure."
|
||||
echo " ex: --configurations \"CompilationMode=Tiered OptimzationLevel=PGO\""
|
||||
echo " --help Print help and exit"
|
||||
echo ""
|
||||
echo "Advanced settings:"
|
||||
echo " --framework <value> The framework to run, if not running in master"
|
||||
echo " --compliationmode <value> The compilation mode if not passing --configurations"
|
||||
echo " --sourcedirectory <value> The directory of the sources. Defaults to env:BUILD_SOURCESDIRECTORY"
|
||||
echo " --repository <value> The name of the repository in the <owner>/<repository name> format. Defaults to env:BUILD_REPOSITORY_NAME"
|
||||
echo " --branch <value> The name of the branch. Defaults to env:BUILD_SOURCEBRANCH"
|
||||
echo " --commitsha <value> The commit sha1 to run against. Defaults to env:BUILD_SOURCEVERSION"
|
||||
echo " --buildnumber <value> The build number currently running. Defaults to env:BUILD_BUILDNUMBER"
|
||||
echo " --csproj The relative path to the benchmark csproj whose tests should be run. Defaults to src\benchmarks\micro\MicroBenchmarks.csproj"
|
||||
echo " --kind <value> Related to csproj. The kind of benchmarks that should be run. Defaults to micro"
|
||||
echo " --runcategories <value> Related to csproj. Categories of benchmarks to run. Defaults to \"coreclr corefx\""
|
||||
echo " --internal If the benchmarks are running as an official job."
|
||||
echo " --monodotnet Pass the path to the mono dotnet for mono performance testing."
|
||||
echo " --wasm Path to the unpacked wasm runtime pack."
|
||||
echo " --latestdotnet --dotnet-versions will not be specified. --dotnet-versions defaults to LKG version in global.json "
|
||||
echo ""
|
||||
exit 0
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [ "$repository" == "dotnet/performance" ] || [ "$repository" == "dotnet-performance" ]; then
|
||||
run_from_perf_repo=true
|
||||
fi
|
||||
|
||||
if [ -z "$configurations" ]; then
|
||||
configurations="CompilationMode=$compilation_mode"
|
||||
fi
|
||||
|
||||
if [ -z "$core_root_directory" ]; then
|
||||
use_core_run=false
|
||||
fi
|
||||
|
||||
if [ -z "$baseline_core_root_directory" ]; then
|
||||
use_baseline_core_run=false
|
||||
fi
|
||||
|
||||
payload_directory=$source_directory/Payload
|
||||
performance_directory=$payload_directory/performance
|
||||
workitem_directory=$source_directory/workitem
|
||||
extra_benchmark_dotnet_arguments="--iterationCount 1 --warmupCount 0 --invocationCount 1 --unrollFactor 1 --strategy ColdStart --stopOnFirstError true"
|
||||
perflab_arguments=
|
||||
queue=Ubuntu.1804.Amd64.Open
|
||||
creator=$BUILD_DEFINITIONNAME
|
||||
helix_source_prefix="pr"
|
||||
|
||||
if [[ "$compare" == true ]]; then
|
||||
extra_benchmark_dotnet_arguments=
|
||||
perflab_arguments=
|
||||
|
||||
# No open queues for arm64
|
||||
if [[ "$architecture" = "arm64" ]]; then
|
||||
echo "Compare not available for arm64"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
queue=Ubuntu.1804.Amd64.Tiger.Perf.Open
|
||||
fi
|
||||
|
||||
if [[ "$internal" == true ]]; then
|
||||
perflab_arguments="--upload-to-perflab-container"
|
||||
helix_source_prefix="official"
|
||||
creator=
|
||||
extra_benchmark_dotnet_arguments=
|
||||
|
||||
if [[ "$architecture" = "arm64" ]]; then
|
||||
queue=Ubuntu.1804.Arm64.Perf
|
||||
else
|
||||
queue=Ubuntu.1804.Amd64.Tiger.Perf
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ "$mono_dotnet" != "" ]] && [[ "$monointerpreter" == "false" ]]; then
|
||||
configurations="$configurations LLVM=$llvm MonoInterpreter=$monointerpreter MonoAOT=$monoaot"
|
||||
extra_benchmark_dotnet_arguments="$extra_benchmark_dotnet_arguments --category-exclusion-filter NoMono"
|
||||
fi
|
||||
|
||||
if [[ "$wasm_runtime_loc" != "" ]]; then
|
||||
configurations="CompilationMode=wasm RunKind=$kind"
|
||||
extra_benchmark_dotnet_arguments="$extra_benchmark_dotnet_arguments --category-exclusion-filter NoInterpreter NoWASM NoMono"
|
||||
fi
|
||||
|
||||
if [[ "$mono_dotnet" != "" ]] && [[ "$monointerpreter" == "true" ]]; then
|
||||
extra_benchmark_dotnet_arguments="$extra_benchmark_dotnet_arguments --category-exclusion-filter NoInterpreter NoMono"
|
||||
fi
|
||||
|
||||
common_setup_arguments="--channel master --queue $queue --build-number $build_number --build-configs $configurations --architecture $architecture"
|
||||
setup_arguments="--repository https://github.com/$repository --branch $branch --get-perf-hash --commit-sha $commit_sha $common_setup_arguments"
|
||||
|
||||
|
||||
if [[ "$use_latest_dotnet" = false ]]; then
|
||||
# Get the tools section from the global.json.
|
||||
# This grabs the LKG version number of dotnet and passes it to our scripts
|
||||
dotnet_version=`cat global.json | python3 -c 'import json,sys;obj=json.load(sys.stdin);print(obj["tools"]["dotnet"])'`
|
||||
setup_arguments="--dotnet-versions $dotnet_version $setup_arguments"
|
||||
fi
|
||||
|
||||
if [[ "$run_from_perf_repo" = true ]]; then
|
||||
payload_directory=
|
||||
workitem_directory=$source_directory
|
||||
performance_directory=$workitem_directory
|
||||
setup_arguments="--perf-hash $commit_sha $common_setup_arguments"
|
||||
else
|
||||
git clone --branch master --depth 1 --quiet https://github.com/dotnet/performance $performance_directory
|
||||
|
||||
docs_directory=$performance_directory/docs
|
||||
mv $docs_directory $workitem_directory
|
||||
fi
|
||||
|
||||
if [[ "$wasm_runtime_loc" != "" ]]; then
|
||||
using_wasm=true
|
||||
wasm_dotnet_path=$payload_directory/dotnet-wasm
|
||||
mv $wasm_runtime_loc $wasm_dotnet_path
|
||||
extra_benchmark_dotnet_arguments="$extra_benchmark_dotnet_arguments --wasmMainJS \$HELIX_CORRELATION_PAYLOAD/dotnet-wasm/runtime-test.js --wasmEngine /home/helixbot/.jsvu/v8 --customRuntimePack \$HELIX_CORRELATION_PAYLOAD/dotnet-wasm"
|
||||
fi
|
||||
|
||||
if [[ "$mono_dotnet" != "" ]]; then
|
||||
using_mono=true
|
||||
mono_dotnet_path=$payload_directory/dotnet-mono
|
||||
mv $mono_dotnet $mono_dotnet_path
|
||||
fi
|
||||
|
||||
if [[ "$use_core_run" = true ]]; then
|
||||
new_core_root=$payload_directory/Core_Root
|
||||
mv $core_root_directory $new_core_root
|
||||
fi
|
||||
|
||||
if [[ "$use_baseline_core_run" = true ]]; then
|
||||
new_baseline_core_root=$payload_directory/Baseline_Core_Root
|
||||
mv $baseline_core_root_directory $new_baseline_core_root
|
||||
fi
|
||||
|
||||
ci=true
|
||||
|
||||
_script_dir=$(pwd)/eng/common
|
||||
. "$_script_dir/pipeline-logging-functions.sh"
|
||||
|
||||
# Make sure all of our variables are available for future steps
|
||||
Write-PipelineSetVariable -name "UseCoreRun" -value "$use_core_run" -is_multi_job_variable false
|
||||
Write-PipelineSetVariable -name "UseBaselineCoreRun" -value "$use_baseline_core_run" -is_multi_job_variable false
|
||||
Write-PipelineSetVariable -name "Architecture" -value "$architecture" -is_multi_job_variable false
|
||||
Write-PipelineSetVariable -name "PayloadDirectory" -value "$payload_directory" -is_multi_job_variable false
|
||||
Write-PipelineSetVariable -name "PerformanceDirectory" -value "$performance_directory" -is_multi_job_variable false
|
||||
Write-PipelineSetVariable -name "WorkItemDirectory" -value "$workitem_directory" -is_multi_job_variable false
|
||||
Write-PipelineSetVariable -name "Queue" -value "$queue" -is_multi_job_variable false
|
||||
Write-PipelineSetVariable -name "SetupArguments" -value "$setup_arguments" -is_multi_job_variable false
|
||||
Write-PipelineSetVariable -name "Python" -value "python3" -is_multi_job_variable false
|
||||
Write-PipelineSetVariable -name "PerfLabArguments" -value "$perflab_arguments" -is_multi_job_variable false
|
||||
Write-PipelineSetVariable -name "ExtraBenchmarkDotNetArguments" -value "$extra_benchmark_dotnet_arguments" -is_multi_job_variable false
|
||||
Write-PipelineSetVariable -name "BDNCategories" -value "$run_categories" -is_multi_job_variable false
|
||||
Write-PipelineSetVariable -name "TargetCsproj" -value "$csproj" -is_multi_job_variable false
|
||||
Write-PipelineSetVariable -name "RunFromPerfRepo" -value "$run_from_perf_repo" -is_multi_job_variable false
|
||||
Write-PipelineSetVariable -name "Creator" -value "$creator" -is_multi_job_variable false
|
||||
Write-PipelineSetVariable -name "HelixSourcePrefix" -value "$helix_source_prefix" -is_multi_job_variable false
|
||||
Write-PipelineSetVariable -name "Kind" -value "$kind" -is_multi_job_variable false
|
||||
Write-PipelineSetVariable -name "_BuildConfig" -value "$architecture.$kind.$framework" -is_multi_job_variable false
|
||||
Write-PipelineSetVariable -name "Compare" -value "$compare" -is_multi_job_variable false
|
||||
Write-PipelineSetVariable -name "MonoDotnet" -value "$using_mono" -is_multi_job_variable false
|
||||
Write-PipelineSetVariable -name "WasmDotnet" -value "$using_wasm" -is_multi_job_variable false
|
|
@ -0,0 +1,242 @@
|
|||
# Source for this file was taken from https://github.com/microsoft/azure-pipelines-task-lib/blob/11c9439d4af17e6475d9fe058e6b2e03914d17e6/powershell/VstsTaskSdk/LoggingCommandFunctions.ps1 and modified.
|
||||
|
||||
# NOTE: You should not be calling these method directly as they are likely to change. Instead you should be calling the Write-Pipeline* functions defined in tools.ps1
|
||||
|
||||
$script:loggingCommandPrefix = '##vso['
|
||||
$script:loggingCommandEscapeMappings = @( # TODO: WHAT ABOUT "="? WHAT ABOUT "%"?
|
||||
New-Object psobject -Property @{ Token = ';' ; Replacement = '%3B' }
|
||||
New-Object psobject -Property @{ Token = "`r" ; Replacement = '%0D' }
|
||||
New-Object psobject -Property @{ Token = "`n" ; Replacement = '%0A' }
|
||||
New-Object psobject -Property @{ Token = "]" ; Replacement = '%5D' }
|
||||
)
|
||||
# TODO: BUG: Escape % ???
|
||||
# TODO: Add test to verify don't need to escape "=".
|
||||
|
||||
# Specify "-Force" to force pipeline formatted output even if "$ci" is false or not set
|
||||
function Write-PipelineTelemetryError {
|
||||
[CmdletBinding()]
|
||||
param(
|
||||
[Parameter(Mandatory = $true)]
|
||||
[string]$Category,
|
||||
[Parameter(Mandatory = $true)]
|
||||
[string]$Message,
|
||||
[Parameter(Mandatory = $false)]
|
||||
[string]$Type = 'error',
|
||||
[string]$ErrCode,
|
||||
[string]$SourcePath,
|
||||
[string]$LineNumber,
|
||||
[string]$ColumnNumber,
|
||||
[switch]$AsOutput,
|
||||
[switch]$Force)
|
||||
|
||||
$PSBoundParameters.Remove('Category') | Out-Null
|
||||
|
||||
if($Force -Or ((Test-Path variable:ci) -And $ci)) {
|
||||
$Message = "(NETCORE_ENGINEERING_TELEMETRY=$Category) $Message"
|
||||
}
|
||||
$PSBoundParameters.Remove('Message') | Out-Null
|
||||
$PSBoundParameters.Add('Message', $Message)
|
||||
Write-PipelineTaskError @PSBoundParameters
|
||||
}
|
||||
|
||||
# Specify "-Force" to force pipeline formatted output even if "$ci" is false or not set
|
||||
function Write-PipelineTaskError {
|
||||
[CmdletBinding()]
|
||||
param(
|
||||
[Parameter(Mandatory = $true)]
|
||||
[string]$Message,
|
||||
[Parameter(Mandatory = $false)]
|
||||
[string]$Type = 'error',
|
||||
[string]$ErrCode,
|
||||
[string]$SourcePath,
|
||||
[string]$LineNumber,
|
||||
[string]$ColumnNumber,
|
||||
[switch]$AsOutput,
|
||||
[switch]$Force
|
||||
)
|
||||
|
||||
if(!$Force -And (-Not (Test-Path variable:ci) -Or !$ci)) {
|
||||
if($Type -eq 'error') {
|
||||
Write-Host $Message -ForegroundColor Red
|
||||
return
|
||||
}
|
||||
elseif ($Type -eq 'warning') {
|
||||
Write-Host $Message -ForegroundColor Yellow
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
if(($Type -ne 'error') -and ($Type -ne 'warning')) {
|
||||
Write-Host $Message
|
||||
return
|
||||
}
|
||||
$PSBoundParameters.Remove('Force') | Out-Null
|
||||
if(-not $PSBoundParameters.ContainsKey('Type')) {
|
||||
$PSBoundParameters.Add('Type', 'error')
|
||||
}
|
||||
Write-LogIssue @PSBoundParameters
|
||||
}
|
||||
|
||||
function Write-PipelineSetVariable {
|
||||
[CmdletBinding()]
|
||||
param(
|
||||
[Parameter(Mandatory = $true)]
|
||||
[string]$Name,
|
||||
[string]$Value,
|
||||
[switch]$Secret,
|
||||
[switch]$AsOutput,
|
||||
[bool]$IsMultiJobVariable=$true)
|
||||
|
||||
if((Test-Path variable:ci) -And $ci) {
|
||||
Write-LoggingCommand -Area 'task' -Event 'setvariable' -Data $Value -Properties @{
|
||||
'variable' = $Name
|
||||
'isSecret' = $Secret
|
||||
'isOutput' = $IsMultiJobVariable
|
||||
} -AsOutput:$AsOutput
|
||||
}
|
||||
}
|
||||
|
||||
function Write-PipelinePrependPath {
|
||||
[CmdletBinding()]
|
||||
param(
|
||||
[Parameter(Mandatory=$true)]
|
||||
[string]$Path,
|
||||
[switch]$AsOutput)
|
||||
|
||||
if((Test-Path variable:ci) -And $ci) {
|
||||
Write-LoggingCommand -Area 'task' -Event 'prependpath' -Data $Path -AsOutput:$AsOutput
|
||||
}
|
||||
}
|
||||
|
||||
<########################################
|
||||
# Private functions.
|
||||
########################################>
|
||||
function Format-LoggingCommandData {
|
||||
[CmdletBinding()]
|
||||
param([string]$Value, [switch]$Reverse)
|
||||
|
||||
if (!$Value) {
|
||||
return ''
|
||||
}
|
||||
|
||||
if (!$Reverse) {
|
||||
foreach ($mapping in $script:loggingCommandEscapeMappings) {
|
||||
$Value = $Value.Replace($mapping.Token, $mapping.Replacement)
|
||||
}
|
||||
} else {
|
||||
for ($i = $script:loggingCommandEscapeMappings.Length - 1 ; $i -ge 0 ; $i--) {
|
||||
$mapping = $script:loggingCommandEscapeMappings[$i]
|
||||
$Value = $Value.Replace($mapping.Replacement, $mapping.Token)
|
||||
}
|
||||
}
|
||||
|
||||
return $Value
|
||||
}
|
||||
|
||||
function Format-LoggingCommand {
|
||||
[CmdletBinding()]
|
||||
param(
|
||||
[Parameter(Mandatory = $true)]
|
||||
[string]$Area,
|
||||
[Parameter(Mandatory = $true)]
|
||||
[string]$Event,
|
||||
[string]$Data,
|
||||
[hashtable]$Properties)
|
||||
|
||||
# Append the preamble.
|
||||
[System.Text.StringBuilder]$sb = New-Object -TypeName System.Text.StringBuilder
|
||||
$null = $sb.Append($script:loggingCommandPrefix).Append($Area).Append('.').Append($Event)
|
||||
|
||||
# Append the properties.
|
||||
if ($Properties) {
|
||||
$first = $true
|
||||
foreach ($key in $Properties.Keys) {
|
||||
[string]$value = Format-LoggingCommandData $Properties[$key]
|
||||
if ($value) {
|
||||
if ($first) {
|
||||
$null = $sb.Append(' ')
|
||||
$first = $false
|
||||
} else {
|
||||
$null = $sb.Append(';')
|
||||
}
|
||||
|
||||
$null = $sb.Append("$key=$value")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Append the tail and output the value.
|
||||
$Data = Format-LoggingCommandData $Data
|
||||
$sb.Append(']').Append($Data).ToString()
|
||||
}
|
||||
|
||||
function Write-LoggingCommand {
|
||||
[CmdletBinding(DefaultParameterSetName = 'Parameters')]
|
||||
param(
|
||||
[Parameter(Mandatory = $true, ParameterSetName = 'Parameters')]
|
||||
[string]$Area,
|
||||
[Parameter(Mandatory = $true, ParameterSetName = 'Parameters')]
|
||||
[string]$Event,
|
||||
[Parameter(ParameterSetName = 'Parameters')]
|
||||
[string]$Data,
|
||||
[Parameter(ParameterSetName = 'Parameters')]
|
||||
[hashtable]$Properties,
|
||||
[Parameter(Mandatory = $true, ParameterSetName = 'Object')]
|
||||
$Command,
|
||||
[switch]$AsOutput)
|
||||
|
||||
if ($PSCmdlet.ParameterSetName -eq 'Object') {
|
||||
Write-LoggingCommand -Area $Command.Area -Event $Command.Event -Data $Command.Data -Properties $Command.Properties -AsOutput:$AsOutput
|
||||
return
|
||||
}
|
||||
|
||||
$command = Format-LoggingCommand -Area $Area -Event $Event -Data $Data -Properties $Properties
|
||||
if ($AsOutput) {
|
||||
$command
|
||||
} else {
|
||||
Write-Host $command
|
||||
}
|
||||
}
|
||||
|
||||
function Write-LogIssue {
|
||||
[CmdletBinding()]
|
||||
param(
|
||||
[ValidateSet('warning', 'error')]
|
||||
[Parameter(Mandatory = $true)]
|
||||
[string]$Type,
|
||||
[string]$Message,
|
||||
[string]$ErrCode,
|
||||
[string]$SourcePath,
|
||||
[string]$LineNumber,
|
||||
[string]$ColumnNumber,
|
||||
[switch]$AsOutput)
|
||||
|
||||
$command = Format-LoggingCommand -Area 'task' -Event 'logissue' -Data $Message -Properties @{
|
||||
'type' = $Type
|
||||
'code' = $ErrCode
|
||||
'sourcepath' = $SourcePath
|
||||
'linenumber' = $LineNumber
|
||||
'columnnumber' = $ColumnNumber
|
||||
}
|
||||
if ($AsOutput) {
|
||||
return $command
|
||||
}
|
||||
|
||||
if ($Type -eq 'error') {
|
||||
$foregroundColor = $host.PrivateData.ErrorForegroundColor
|
||||
$backgroundColor = $host.PrivateData.ErrorBackgroundColor
|
||||
if ($foregroundColor -isnot [System.ConsoleColor] -or $backgroundColor -isnot [System.ConsoleColor]) {
|
||||
$foregroundColor = [System.ConsoleColor]::Red
|
||||
$backgroundColor = [System.ConsoleColor]::Black
|
||||
}
|
||||
} else {
|
||||
$foregroundColor = $host.PrivateData.WarningForegroundColor
|
||||
$backgroundColor = $host.PrivateData.WarningBackgroundColor
|
||||
if ($foregroundColor -isnot [System.ConsoleColor] -or $backgroundColor -isnot [System.ConsoleColor]) {
|
||||
$foregroundColor = [System.ConsoleColor]::Yellow
|
||||
$backgroundColor = [System.ConsoleColor]::Black
|
||||
}
|
||||
}
|
||||
|
||||
Write-Host $command -ForegroundColor $foregroundColor -BackgroundColor $backgroundColor
|
||||
}
|
|
@ -0,0 +1,182 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
function Write-PipelineTelemetryError {
|
||||
local telemetry_category=''
|
||||
local force=false
|
||||
local function_args=()
|
||||
local message=''
|
||||
while [[ $# -gt 0 ]]; do
|
||||
opt="$(echo "${1/#--/-}" | awk '{print tolower($0)}')"
|
||||
case "$opt" in
|
||||
-category|-c)
|
||||
telemetry_category=$2
|
||||
shift
|
||||
;;
|
||||
-force|-f)
|
||||
force=true
|
||||
;;
|
||||
-*)
|
||||
function_args+=("$1 $2")
|
||||
shift
|
||||
;;
|
||||
*)
|
||||
message=$*
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
if [[ $force != true ]] && [[ "$ci" != true ]]; then
|
||||
echo "$message" >&2
|
||||
return
|
||||
fi
|
||||
|
||||
if [[ $force == true ]]; then
|
||||
function_args+=("-force")
|
||||
fi
|
||||
message="(NETCORE_ENGINEERING_TELEMETRY=$telemetry_category) $message"
|
||||
function_args+=("$message")
|
||||
Write-PipelineTaskError ${function_args[@]}
|
||||
}
|
||||
|
||||
function Write-PipelineTaskError {
|
||||
local message_type="error"
|
||||
local sourcepath=''
|
||||
local linenumber=''
|
||||
local columnnumber=''
|
||||
local error_code=''
|
||||
local force=false
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
opt="$(echo "${1/#--/-}" | awk '{print tolower($0)}')"
|
||||
case "$opt" in
|
||||
-type|-t)
|
||||
message_type=$2
|
||||
shift
|
||||
;;
|
||||
-sourcepath|-s)
|
||||
sourcepath=$2
|
||||
shift
|
||||
;;
|
||||
-linenumber|-ln)
|
||||
linenumber=$2
|
||||
shift
|
||||
;;
|
||||
-columnnumber|-cn)
|
||||
columnnumber=$2
|
||||
shift
|
||||
;;
|
||||
-errcode|-e)
|
||||
error_code=$2
|
||||
shift
|
||||
;;
|
||||
-force|-f)
|
||||
force=true
|
||||
;;
|
||||
*)
|
||||
break
|
||||
;;
|
||||
esac
|
||||
|
||||
shift
|
||||
done
|
||||
|
||||
if [[ $force != true ]] && [[ "$ci" != true ]]; then
|
||||
echo "$@" >&2
|
||||
return
|
||||
fi
|
||||
|
||||
local message="##vso[task.logissue"
|
||||
|
||||
message="$message type=$message_type"
|
||||
|
||||
if [ -n "$sourcepath" ]; then
|
||||
message="$message;sourcepath=$sourcepath"
|
||||
fi
|
||||
|
||||
if [ -n "$linenumber" ]; then
|
||||
message="$message;linenumber=$linenumber"
|
||||
fi
|
||||
|
||||
if [ -n "$columnnumber" ]; then
|
||||
message="$message;columnnumber=$columnnumber"
|
||||
fi
|
||||
|
||||
if [ -n "$error_code" ]; then
|
||||
message="$message;code=$error_code"
|
||||
fi
|
||||
|
||||
message="$message]$*"
|
||||
echo "$message"
|
||||
}
|
||||
|
||||
function Write-PipelineSetVariable {
|
||||
if [[ "$ci" != true ]]; then
|
||||
return
|
||||
fi
|
||||
|
||||
local name=''
|
||||
local value=''
|
||||
local secret=false
|
||||
local as_output=false
|
||||
local is_multi_job_variable=true
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
opt="$(echo "${1/#--/-}" | awk '{print tolower($0)}')"
|
||||
case "$opt" in
|
||||
-name|-n)
|
||||
name=$2
|
||||
shift
|
||||
;;
|
||||
-value|-v)
|
||||
value=$2
|
||||
shift
|
||||
;;
|
||||
-secret|-s)
|
||||
secret=true
|
||||
;;
|
||||
-as_output|-a)
|
||||
as_output=true
|
||||
;;
|
||||
-is_multi_job_variable|-i)
|
||||
is_multi_job_variable=$2
|
||||
shift
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
value=${value/;/%3B}
|
||||
value=${value/\\r/%0D}
|
||||
value=${value/\\n/%0A}
|
||||
value=${value/]/%5D}
|
||||
|
||||
local message="##vso[task.setvariable variable=$name;isSecret=$secret;isOutput=$is_multi_job_variable]$value"
|
||||
|
||||
if [[ "$as_output" == true ]]; then
|
||||
$message
|
||||
else
|
||||
echo "$message"
|
||||
fi
|
||||
}
|
||||
|
||||
function Write-PipelinePrependPath {
|
||||
local prepend_path=''
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
opt="$(echo "${1/#--/-}" | awk '{print tolower($0)}')"
|
||||
case "$opt" in
|
||||
-path|-p)
|
||||
prepend_path=$2
|
||||
shift
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
export PATH="$prepend_path:$PATH"
|
||||
|
||||
if [[ "$ci" == true ]]; then
|
||||
echo "##vso[task.prependpath]$prepend_path"
|
||||
fi
|
||||
}
|
|
@ -0,0 +1,48 @@
|
|||
param(
|
||||
[Parameter(Mandatory=$true)][int] $BuildId,
|
||||
[Parameter(Mandatory=$true)][int] $ChannelId,
|
||||
[Parameter(Mandatory=$true)][string] $MaestroApiAccessToken,
|
||||
[Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro-prod.westus2.cloudapp.azure.com',
|
||||
[Parameter(Mandatory=$false)][string] $MaestroApiVersion = '2019-01-16'
|
||||
)
|
||||
|
||||
try {
|
||||
. $PSScriptRoot\post-build-utils.ps1
|
||||
|
||||
# Check that the channel we are going to promote the build to exist
|
||||
$channelInfo = Get-MaestroChannel -ChannelId $ChannelId
|
||||
|
||||
if (!$channelInfo) {
|
||||
Write-PipelineTelemetryCategory -Category 'PromoteBuild' -Message "Channel with BAR ID $ChannelId was not found in BAR!"
|
||||
ExitWithExitCode 1
|
||||
}
|
||||
|
||||
# Get info about which channel(s) the build has already been promoted to
|
||||
$buildInfo = Get-MaestroBuild -BuildId $BuildId
|
||||
|
||||
if (!$buildInfo) {
|
||||
Write-PipelineTelemetryError -Category 'PromoteBuild' -Message "Build with BAR ID $BuildId was not found in BAR!"
|
||||
ExitWithExitCode 1
|
||||
}
|
||||
|
||||
# Find whether the build is already assigned to the channel or not
|
||||
if ($buildInfo.channels) {
|
||||
foreach ($channel in $buildInfo.channels) {
|
||||
if ($channel.Id -eq $ChannelId) {
|
||||
Write-Host "The build with BAR ID $BuildId is already on channel $ChannelId!"
|
||||
ExitWithExitCode 0
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Write-Host "Promoting build '$BuildId' to channel '$ChannelId'."
|
||||
|
||||
Assign-BuildToChannel -BuildId $BuildId -ChannelId $ChannelId
|
||||
|
||||
Write-Host 'done.'
|
||||
}
|
||||
catch {
|
||||
Write-Host $_
|
||||
Write-PipelineTelemetryError -Category 'PromoteBuild' -Message "There was an error while trying to promote build '$BuildId' to channel '$ChannelId'"
|
||||
ExitWithExitCode 1
|
||||
}
|
|
@ -0,0 +1,40 @@
|
|||
param(
|
||||
[Parameter(Mandatory=$true)][string] $PromoteToChannels, # List of channels that the build should be promoted to
|
||||
[Parameter(Mandatory=$true)][array] $AvailableChannelIds # List of channel IDs available in the YAML implementation
|
||||
)
|
||||
|
||||
try {
|
||||
. $PSScriptRoot\post-build-utils.ps1
|
||||
|
||||
if ($PromoteToChannels -eq "") {
|
||||
Write-PipelineTaskError -Type 'warning' -Message "This build won't publish assets as it's not configured to any Maestro channel. If that wasn't intended use Darc to configure a default channel using add-default-channel for this branch or to promote it to a channel using add-build-to-channel. See https://github.com/dotnet/arcade/blob/master/Documentation/Darc.md#assigning-an-individual-build-to-a-channel for more info."
|
||||
ExitWithExitCode 0
|
||||
}
|
||||
|
||||
# Check that every channel that Maestro told to promote the build to
|
||||
# is available in YAML
|
||||
$PromoteToChannelsIds = $PromoteToChannels -split "\D" | Where-Object { $_ }
|
||||
|
||||
$hasErrors = $false
|
||||
|
||||
foreach ($id in $PromoteToChannelsIds) {
|
||||
if (($id -ne 0) -and ($id -notin $AvailableChannelIds)) {
|
||||
Write-PipelineTaskError -Message "Channel $id is not present in the post-build YAML configuration! This is an error scenario. Please contact @dnceng."
|
||||
$hasErrors = $true
|
||||
}
|
||||
}
|
||||
|
||||
# The `Write-PipelineTaskError` doesn't error the script and we might report several errors
|
||||
# in the previous lines. The check below makes sure that we return an error state from the
|
||||
# script if we reported any validation error
|
||||
if ($hasErrors) {
|
||||
ExitWithExitCode 1
|
||||
}
|
||||
|
||||
Write-Host 'done.'
|
||||
}
|
||||
catch {
|
||||
Write-Host $_
|
||||
Write-PipelineTelemetryError -Category 'CheckChannelConsistency' -Message "There was an error while trying to check consistency of Maestro default channels for the build and post-build YAML configuration."
|
||||
ExitWithExitCode 1
|
||||
}
|
|
@ -0,0 +1,24 @@
|
|||
# This script validates NuGet package metadata information using this
|
||||
# tool: https://github.com/NuGet/NuGetGallery/tree/jver-verify/src/VerifyMicrosoftPackage
|
||||
|
||||
param(
|
||||
[Parameter(Mandatory=$true)][string] $PackagesPath, # Path to where the packages to be validated are
|
||||
[Parameter(Mandatory=$true)][string] $ToolDestinationPath # Where the validation tool should be downloaded to
|
||||
)
|
||||
|
||||
try {
|
||||
. $PSScriptRoot\post-build-utils.ps1
|
||||
|
||||
$url = 'https://raw.githubusercontent.com/NuGet/NuGetGallery/3e25ad135146676bcab0050a516939d9958bfa5d/src/VerifyMicrosoftPackage/verify.ps1'
|
||||
|
||||
New-Item -ItemType 'directory' -Path ${ToolDestinationPath} -Force
|
||||
|
||||
Invoke-WebRequest $url -OutFile ${ToolDestinationPath}\verify.ps1
|
||||
|
||||
& ${ToolDestinationPath}\verify.ps1 ${PackagesPath}\*.nupkg
|
||||
}
|
||||
catch {
|
||||
Write-Host $_.ScriptStackTrace
|
||||
Write-PipelineTelemetryError -Category 'NuGetValidation' -Message $_
|
||||
ExitWithExitCode 1
|
||||
}
|
|
@ -0,0 +1,91 @@
|
|||
# Most of the functions in this file require the variables `MaestroApiEndPoint`,
|
||||
# `MaestroApiVersion` and `MaestroApiAccessToken` to be globally available.
|
||||
|
||||
$ErrorActionPreference = 'Stop'
|
||||
Set-StrictMode -Version 2.0
|
||||
|
||||
# `tools.ps1` checks $ci to perform some actions. Since the post-build
|
||||
# scripts don't necessarily execute in the same agent that run the
|
||||
# build.ps1/sh script this variable isn't automatically set.
|
||||
$ci = $true
|
||||
$disableConfigureToolsetImport = $true
|
||||
. $PSScriptRoot\..\tools.ps1
|
||||
|
||||
function Create-MaestroApiRequestHeaders([string]$ContentType = 'application/json') {
|
||||
Validate-MaestroVars
|
||||
|
||||
$headers = New-Object 'System.Collections.Generic.Dictionary[[String],[String]]'
|
||||
$headers.Add('Accept', $ContentType)
|
||||
$headers.Add('Authorization',"Bearer $MaestroApiAccessToken")
|
||||
return $headers
|
||||
}
|
||||
|
||||
function Get-MaestroChannel([int]$ChannelId) {
|
||||
Validate-MaestroVars
|
||||
|
||||
$apiHeaders = Create-MaestroApiRequestHeaders
|
||||
$apiEndpoint = "$MaestroApiEndPoint/api/channels/${ChannelId}?api-version=$MaestroApiVersion"
|
||||
|
||||
$result = try { Invoke-WebRequest -Method Get -Uri $apiEndpoint -Headers $apiHeaders | ConvertFrom-Json } catch { Write-Host "Error: $_" }
|
||||
return $result
|
||||
}
|
||||
|
||||
function Get-MaestroBuild([int]$BuildId) {
|
||||
Validate-MaestroVars
|
||||
|
||||
$apiHeaders = Create-MaestroApiRequestHeaders -AuthToken $MaestroApiAccessToken
|
||||
$apiEndpoint = "$MaestroApiEndPoint/api/builds/${BuildId}?api-version=$MaestroApiVersion"
|
||||
|
||||
$result = try { return Invoke-WebRequest -Method Get -Uri $apiEndpoint -Headers $apiHeaders | ConvertFrom-Json } catch { Write-Host "Error: $_" }
|
||||
return $result
|
||||
}
|
||||
|
||||
function Get-MaestroSubscriptions([string]$SourceRepository, [int]$ChannelId) {
|
||||
Validate-MaestroVars
|
||||
|
||||
$SourceRepository = [System.Web.HttpUtility]::UrlEncode($SourceRepository)
|
||||
$apiHeaders = Create-MaestroApiRequestHeaders -AuthToken $MaestroApiAccessToken
|
||||
$apiEndpoint = "$MaestroApiEndPoint/api/subscriptions?sourceRepository=$SourceRepository&channelId=$ChannelId&api-version=$MaestroApiVersion"
|
||||
|
||||
$result = try { Invoke-WebRequest -Method Get -Uri $apiEndpoint -Headers $apiHeaders | ConvertFrom-Json } catch { Write-Host "Error: $_" }
|
||||
return $result
|
||||
}
|
||||
|
||||
function Assign-BuildToChannel([int]$BuildId, [int]$ChannelId) {
|
||||
Validate-MaestroVars
|
||||
|
||||
$apiHeaders = Create-MaestroApiRequestHeaders -AuthToken $MaestroApiAccessToken
|
||||
$apiEndpoint = "$MaestroApiEndPoint/api/channels/${ChannelId}/builds/${BuildId}?api-version=$MaestroApiVersion"
|
||||
Invoke-WebRequest -Method Post -Uri $apiEndpoint -Headers $apiHeaders | Out-Null
|
||||
}
|
||||
|
||||
function Trigger-Subscription([string]$SubscriptionId) {
|
||||
Validate-MaestroVars
|
||||
|
||||
$apiHeaders = Create-MaestroApiRequestHeaders -AuthToken $MaestroApiAccessToken
|
||||
$apiEndpoint = "$MaestroApiEndPoint/api/subscriptions/$SubscriptionId/trigger?api-version=$MaestroApiVersion"
|
||||
Invoke-WebRequest -Uri $apiEndpoint -Headers $apiHeaders -Method Post | Out-Null
|
||||
}
|
||||
|
||||
function Validate-MaestroVars {
|
||||
try {
|
||||
Get-Variable MaestroApiEndPoint -Scope Global | Out-Null
|
||||
Get-Variable MaestroApiVersion -Scope Global | Out-Null
|
||||
Get-Variable MaestroApiAccessToken -Scope Global | Out-Null
|
||||
|
||||
if (!($MaestroApiEndPoint -Match '^http[s]?://maestro-(int|prod).westus2.cloudapp.azure.com$')) {
|
||||
Write-PipelineTelemetryError -Category 'MaestroVars' -Message "MaestroApiEndPoint is not a valid Maestro URL. '$MaestroApiEndPoint'"
|
||||
ExitWithExitCode 1
|
||||
}
|
||||
|
||||
if (!($MaestroApiVersion -Match '^[0-9]{4}-[0-9]{2}-[0-9]{2}$')) {
|
||||
Write-PipelineTelemetryError -Category 'MaestroVars' -Message "MaestroApiVersion does not match a version string in the format yyyy-MM-DD. '$MaestroApiVersion'"
|
||||
ExitWithExitCode 1
|
||||
}
|
||||
}
|
||||
catch {
|
||||
Write-PipelineTelemetryError -Category 'MaestroVars' -Message 'Error: Variables `MaestroApiEndPoint`, `MaestroApiVersion` and `MaestroApiAccessToken` are required while using this script.'
|
||||
Write-Host $_
|
||||
ExitWithExitCode 1
|
||||
}
|
||||
}
|
|
@ -0,0 +1,74 @@
|
|||
param(
|
||||
[Parameter(Mandatory=$true)][int] $BuildId,
|
||||
[Parameter(Mandatory=$true)][int] $PublishingInfraVersion,
|
||||
[Parameter(Mandatory=$true)][string] $AzdoToken,
|
||||
[Parameter(Mandatory=$true)][string] $MaestroToken,
|
||||
[Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro-prod.westus2.cloudapp.azure.com',
|
||||
[Parameter(Mandatory=$true)][string] $WaitPublishingFinish,
|
||||
[Parameter(Mandatory=$false)][string] $EnableSourceLinkValidation,
|
||||
[Parameter(Mandatory=$false)][string] $EnableSigningValidation,
|
||||
[Parameter(Mandatory=$false)][string] $EnableNugetValidation,
|
||||
[Parameter(Mandatory=$false)][string] $PublishInstallersAndChecksums,
|
||||
[Parameter(Mandatory=$false)][string] $ArtifactsPublishingAdditionalParameters,
|
||||
[Parameter(Mandatory=$false)][string] $SigningValidationAdditionalParameters
|
||||
)
|
||||
|
||||
try {
|
||||
. $PSScriptRoot\post-build-utils.ps1
|
||||
# Hard coding darc version till the next arcade-services roll out, cos this version has required API changes for darc add-build-to-channel
|
||||
$darc = Get-Darc "1.1.0-beta.20418.1"
|
||||
|
||||
$optionalParams = [System.Collections.ArrayList]::new()
|
||||
|
||||
if ("" -ne $ArtifactsPublishingAdditionalParameters) {
|
||||
$optionalParams.Add("artifact-publishing-parameters") | Out-Null
|
||||
$optionalParams.Add($ArtifactsPublishingAdditionalParameters) | Out-Null
|
||||
}
|
||||
|
||||
if ("false" -eq $WaitPublishingFinish) {
|
||||
$optionalParams.Add("--no-wait") | Out-Null
|
||||
}
|
||||
|
||||
if ("false" -ne $PublishInstallersAndChecksums) {
|
||||
$optionalParams.Add("--publish-installers-and-checksums") | Out-Null
|
||||
}
|
||||
|
||||
if ("true" -eq $EnableNugetValidation) {
|
||||
$optionalParams.Add("--validate-nuget") | Out-Null
|
||||
}
|
||||
|
||||
if ("true" -eq $EnableSourceLinkValidation) {
|
||||
$optionalParams.Add("--validate-sourcelinkchecksums") | Out-Null
|
||||
}
|
||||
|
||||
if ("true" -eq $EnableSigningValidation) {
|
||||
$optionalParams.Add("--validate-signingchecksums") | Out-Null
|
||||
|
||||
if ("" -ne $SigningValidationAdditionalParameters) {
|
||||
$optionalParams.Add("--signing-validation-parameters") | Out-Null
|
||||
$optionalParams.Add($SigningValidationAdditionalParameters) | Out-Null
|
||||
}
|
||||
}
|
||||
|
||||
& $darc add-build-to-channel `
|
||||
--id $buildId `
|
||||
--publishing-infra-version $PublishingInfraVersion `
|
||||
--default-channels `
|
||||
--source-branch master `
|
||||
--azdev-pat $AzdoToken `
|
||||
--bar-uri $MaestroApiEndPoint `
|
||||
--password $MaestroToken `
|
||||
@optionalParams
|
||||
|
||||
if ($LastExitCode -ne 0) {
|
||||
Write-Host "Problems using Darc to promote build ${buildId} to default channels. Stopping execution..."
|
||||
exit 1
|
||||
}
|
||||
|
||||
Write-Host 'done.'
|
||||
}
|
||||
catch {
|
||||
Write-Host $_
|
||||
Write-PipelineTelemetryError -Category 'PromoteBuild' -Message "There was an error while trying to publish build '$BuildId' to default channels."
|
||||
ExitWithExitCode 1
|
||||
}
|
|
@ -0,0 +1,276 @@
|
|||
param(
|
||||
[Parameter(Mandatory=$true)][string] $InputPath, # Full path to directory where Symbols.NuGet packages to be checked are stored
|
||||
[Parameter(Mandatory=$true)][string] $ExtractPath, # Full path to directory where the packages will be extracted during validation
|
||||
[Parameter(Mandatory=$false)][string] $GHRepoName, # GitHub name of the repo including the Org. E.g., dotnet/arcade
|
||||
[Parameter(Mandatory=$false)][string] $GHCommit, # GitHub commit SHA used to build the packages
|
||||
[Parameter(Mandatory=$true)][string] $SourcelinkCliVersion # Version of SourceLink CLI to use
|
||||
)
|
||||
|
||||
. $PSScriptRoot\post-build-utils.ps1
|
||||
|
||||
# Cache/HashMap (File -> Exist flag) used to consult whether a file exist
|
||||
# in the repository at a specific commit point. This is populated by inserting
|
||||
# all files present in the repo at a specific commit point.
|
||||
$global:RepoFiles = @{}
|
||||
|
||||
# Maximum number of jobs to run in parallel
|
||||
$MaxParallelJobs = 6
|
||||
|
||||
# Wait time between check for system load
|
||||
$SecondsBetweenLoadChecks = 10
|
||||
|
||||
$ValidatePackage = {
|
||||
param(
|
||||
[string] $PackagePath # Full path to a Symbols.NuGet package
|
||||
)
|
||||
|
||||
. $using:PSScriptRoot\..\tools.ps1
|
||||
|
||||
# Ensure input file exist
|
||||
if (!(Test-Path $PackagePath)) {
|
||||
Write-Host "Input file does not exist: $PackagePath"
|
||||
return 1
|
||||
}
|
||||
|
||||
# Extensions for which we'll look for SourceLink information
|
||||
# For now we'll only care about Portable & Embedded PDBs
|
||||
$RelevantExtensions = @('.dll', '.exe', '.pdb')
|
||||
|
||||
Write-Host -NoNewLine 'Validating ' ([System.IO.Path]::GetFileName($PackagePath)) '...'
|
||||
|
||||
$PackageId = [System.IO.Path]::GetFileNameWithoutExtension($PackagePath)
|
||||
$ExtractPath = Join-Path -Path $using:ExtractPath -ChildPath $PackageId
|
||||
$FailedFiles = 0
|
||||
|
||||
Add-Type -AssemblyName System.IO.Compression.FileSystem
|
||||
|
||||
[System.IO.Directory]::CreateDirectory($ExtractPath) | Out-Null
|
||||
|
||||
try {
|
||||
$zip = [System.IO.Compression.ZipFile]::OpenRead($PackagePath)
|
||||
|
||||
$zip.Entries |
|
||||
Where-Object {$RelevantExtensions -contains [System.IO.Path]::GetExtension($_.Name)} |
|
||||
ForEach-Object {
|
||||
$FileName = $_.FullName
|
||||
$Extension = [System.IO.Path]::GetExtension($_.Name)
|
||||
$FakeName = -Join((New-Guid), $Extension)
|
||||
$TargetFile = Join-Path -Path $ExtractPath -ChildPath $FakeName
|
||||
|
||||
# We ignore resource DLLs
|
||||
if ($FileName.EndsWith('.resources.dll')) {
|
||||
return
|
||||
}
|
||||
|
||||
[System.IO.Compression.ZipFileExtensions]::ExtractToFile($_, $TargetFile, $true)
|
||||
|
||||
$ValidateFile = {
|
||||
param(
|
||||
[string] $FullPath, # Full path to the module that has to be checked
|
||||
[string] $RealPath,
|
||||
[ref] $FailedFiles
|
||||
)
|
||||
|
||||
$sourcelinkExe = "$env:USERPROFILE\.dotnet\tools"
|
||||
$sourcelinkExe = Resolve-Path "$sourcelinkExe\sourcelink.exe"
|
||||
$SourceLinkInfos = & $sourcelinkExe print-urls $FullPath | Out-String
|
||||
|
||||
if ($LASTEXITCODE -eq 0 -and -not ([string]::IsNullOrEmpty($SourceLinkInfos))) {
|
||||
$NumFailedLinks = 0
|
||||
|
||||
# We only care about Http addresses
|
||||
$Matches = (Select-String '(http[s]?)(:\/\/)([^\s,]+)' -Input $SourceLinkInfos -AllMatches).Matches
|
||||
|
||||
if ($Matches.Count -ne 0) {
|
||||
$Matches.Value |
|
||||
ForEach-Object {
|
||||
$Link = $_
|
||||
$CommitUrl = "https://raw.githubusercontent.com/${using:GHRepoName}/${using:GHCommit}/"
|
||||
|
||||
$FilePath = $Link.Replace($CommitUrl, "")
|
||||
$Status = 200
|
||||
$Cache = $using:RepoFiles
|
||||
|
||||
if ( !($Cache.ContainsKey($FilePath)) ) {
|
||||
try {
|
||||
$Uri = $Link -as [System.URI]
|
||||
|
||||
# Only GitHub links are valid
|
||||
if ($Uri.AbsoluteURI -ne $null -and ($Uri.Host -match 'github' -or $Uri.Host -match 'githubusercontent')) {
|
||||
$Status = (Invoke-WebRequest -Uri $Link -UseBasicParsing -Method HEAD -TimeoutSec 5).StatusCode
|
||||
}
|
||||
else {
|
||||
$Status = 0
|
||||
}
|
||||
}
|
||||
catch {
|
||||
write-host $_
|
||||
$Status = 0
|
||||
}
|
||||
}
|
||||
|
||||
if ($Status -ne 200) {
|
||||
if ($NumFailedLinks -eq 0) {
|
||||
if ($FailedFiles.Value -eq 0) {
|
||||
Write-Host
|
||||
}
|
||||
|
||||
Write-Host "`tFile $RealPath has broken links:"
|
||||
}
|
||||
|
||||
Write-Host "`t`tFailed to retrieve $Link"
|
||||
|
||||
$NumFailedLinks++
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if ($NumFailedLinks -ne 0) {
|
||||
$FailedFiles.value++
|
||||
$global:LASTEXITCODE = 1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
&$ValidateFile $TargetFile $FileName ([ref]$FailedFiles)
|
||||
}
|
||||
}
|
||||
catch {
|
||||
|
||||
}
|
||||
finally {
|
||||
$zip.Dispose()
|
||||
}
|
||||
|
||||
if ($FailedFiles -eq 0) {
|
||||
Write-Host 'Passed.'
|
||||
return [pscustomobject]@{
|
||||
result = 0
|
||||
packagePath = $PackagePath
|
||||
}
|
||||
}
|
||||
else {
|
||||
Write-PipelineTelemetryError -Category 'SourceLink' -Message "$PackagePath has broken SourceLink links."
|
||||
return [pscustomobject]@{
|
||||
result = 1
|
||||
packagePath = $PackagePath
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function CheckJobResult(
|
||||
$result,
|
||||
$packagePath,
|
||||
[ref]$ValidationFailures) {
|
||||
if ($jobResult.result -ne '0') {
|
||||
Write-PipelineTelemetryError -Category 'SourceLink' -Message "$packagePath has broken SourceLink links."
|
||||
$ValidationFailures.Value++
|
||||
}
|
||||
}
|
||||
|
||||
function ValidateSourceLinkLinks {
|
||||
if ($GHRepoName -ne '' -and !($GHRepoName -Match '^[^\s\/]+/[^\s\/]+$')) {
|
||||
if (!($GHRepoName -Match '^[^\s-]+-[^\s]+$')) {
|
||||
Write-PipelineTelemetryError -Category 'SourceLink' -Message "GHRepoName should be in the format <org>/<repo> or <org>-<repo>. '$GHRepoName'"
|
||||
ExitWithExitCode 1
|
||||
}
|
||||
else {
|
||||
$GHRepoName = $GHRepoName -replace '^([^\s-]+)-([^\s]+)$', '$1/$2';
|
||||
}
|
||||
}
|
||||
|
||||
if ($GHCommit -ne '' -and !($GHCommit -Match '^[0-9a-fA-F]{40}$')) {
|
||||
Write-PipelineTelemetryError -Category 'SourceLink' -Message "GHCommit should be a 40 chars hexadecimal string. '$GHCommit'"
|
||||
ExitWithExitCode 1
|
||||
}
|
||||
|
||||
if ($GHRepoName -ne '' -and $GHCommit -ne '') {
|
||||
$RepoTreeURL = -Join('http://api.github.com/repos/', $GHRepoName, '/git/trees/', $GHCommit, '?recursive=1')
|
||||
$CodeExtensions = @('.cs', '.vb', '.fs', '.fsi', '.fsx', '.fsscript')
|
||||
|
||||
try {
|
||||
# Retrieve the list of files in the repo at that particular commit point and store them in the RepoFiles hash
|
||||
$Data = Invoke-WebRequest $RepoTreeURL -UseBasicParsing | ConvertFrom-Json | Select-Object -ExpandProperty tree
|
||||
|
||||
foreach ($file in $Data) {
|
||||
$Extension = [System.IO.Path]::GetExtension($file.path)
|
||||
|
||||
if ($CodeExtensions.Contains($Extension)) {
|
||||
$RepoFiles[$file.path] = 1
|
||||
}
|
||||
}
|
||||
}
|
||||
catch {
|
||||
Write-Host "Problems downloading the list of files from the repo. Url used: $RepoTreeURL . Execution will proceed without caching."
|
||||
}
|
||||
}
|
||||
elseif ($GHRepoName -ne '' -or $GHCommit -ne '') {
|
||||
Write-Host 'For using the http caching mechanism both GHRepoName and GHCommit should be informed.'
|
||||
}
|
||||
|
||||
if (Test-Path $ExtractPath) {
|
||||
Remove-Item $ExtractPath -Force -Recurse -ErrorAction SilentlyContinue
|
||||
}
|
||||
|
||||
$ValidationFailures = 0
|
||||
|
||||
# Process each NuGet package in parallel
|
||||
Get-ChildItem "$InputPath\*.symbols.nupkg" |
|
||||
ForEach-Object {
|
||||
Start-Job -ScriptBlock $ValidatePackage -ArgumentList $_.FullName | Out-Null
|
||||
$NumJobs = @(Get-Job -State 'Running').Count
|
||||
|
||||
while ($NumJobs -ge $MaxParallelJobs) {
|
||||
Write-Host "There are $NumJobs validation jobs running right now. Waiting $SecondsBetweenLoadChecks seconds to check again."
|
||||
sleep $SecondsBetweenLoadChecks
|
||||
$NumJobs = @(Get-Job -State 'Running').Count
|
||||
}
|
||||
|
||||
foreach ($Job in @(Get-Job -State 'Completed')) {
|
||||
$jobResult = Wait-Job -Id $Job.Id | Receive-Job
|
||||
CheckJobResult $jobResult.result $jobResult.packagePath ([ref]$ValidationFailures)
|
||||
Remove-Job -Id $Job.Id
|
||||
}
|
||||
}
|
||||
|
||||
foreach ($Job in @(Get-Job)) {
|
||||
$jobResult = Wait-Job -Id $Job.Id | Receive-Job
|
||||
if ($jobResult -ne '0') {
|
||||
$ValidationFailures++
|
||||
}
|
||||
Remove-Job -Id $Job.Id
|
||||
}
|
||||
if ($ValidationFailures -gt 0) {
|
||||
Write-PipelineTelemetryError -Category 'SourceLink' -Message "$ValidationFailures package(s) failed validation."
|
||||
ExitWithExitCode 1
|
||||
}
|
||||
}
|
||||
|
||||
function InstallSourcelinkCli {
|
||||
$sourcelinkCliPackageName = 'sourcelink'
|
||||
|
||||
$dotnetRoot = InitializeDotNetCli -install:$true
|
||||
$dotnet = "$dotnetRoot\dotnet.exe"
|
||||
$toolList = & "$dotnet" tool list --global
|
||||
|
||||
if (($toolList -like "*$sourcelinkCliPackageName*") -and ($toolList -like "*$sourcelinkCliVersion*")) {
|
||||
Write-Host "SourceLink CLI version $sourcelinkCliVersion is already installed."
|
||||
}
|
||||
else {
|
||||
Write-Host "Installing SourceLink CLI version $sourcelinkCliVersion..."
|
||||
Write-Host 'You may need to restart your command window if this is the first dotnet tool you have installed.'
|
||||
& "$dotnet" tool install $sourcelinkCliPackageName --version $sourcelinkCliVersion --verbosity "minimal" --global
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
InstallSourcelinkCli
|
||||
|
||||
ValidateSourceLinkLinks
|
||||
}
|
||||
catch {
|
||||
Write-Host $_.Exception
|
||||
Write-Host $_.ScriptStackTrace
|
||||
Write-PipelineTelemetryError -Category 'SourceLink' -Message $_
|
||||
ExitWithExitCode 1
|
||||
}
|
|
@ -0,0 +1,268 @@
|
|||
param(
|
||||
[Parameter(Mandatory=$true)][string] $InputPath, # Full path to directory where NuGet packages to be checked are stored
|
||||
[Parameter(Mandatory=$true)][string] $ExtractPath, # Full path to directory where the packages will be extracted during validation
|
||||
[Parameter(Mandatory=$true)][string] $DotnetSymbolVersion, # Version of dotnet symbol to use
|
||||
[Parameter(Mandatory=$false)][switch] $ContinueOnError, # If we should keep checking symbols after an error
|
||||
[Parameter(Mandatory=$false)][switch] $Clean # Clean extracted symbols directory after checking symbols
|
||||
)
|
||||
|
||||
# Maximum number of jobs to run in parallel
|
||||
$MaxParallelJobs = 6
|
||||
|
||||
# Wait time between check for system load
|
||||
$SecondsBetweenLoadChecks = 10
|
||||
|
||||
$CountMissingSymbols = {
|
||||
param(
|
||||
[string] $PackagePath # Path to a NuGet package
|
||||
)
|
||||
|
||||
. $using:PSScriptRoot\..\tools.ps1
|
||||
|
||||
Add-Type -AssemblyName System.IO.Compression.FileSystem
|
||||
|
||||
# Ensure input file exist
|
||||
if (!(Test-Path $PackagePath)) {
|
||||
Write-PipelineTaskError "Input file does not exist: $PackagePath"
|
||||
return -2
|
||||
}
|
||||
|
||||
# Extensions for which we'll look for symbols
|
||||
$RelevantExtensions = @('.dll', '.exe', '.so', '.dylib')
|
||||
|
||||
# How many files are missing symbol information
|
||||
$MissingSymbols = 0
|
||||
|
||||
$PackageId = [System.IO.Path]::GetFileNameWithoutExtension($PackagePath)
|
||||
$PackageGuid = New-Guid
|
||||
$ExtractPath = Join-Path -Path $using:ExtractPath -ChildPath $PackageGuid
|
||||
$SymbolsPath = Join-Path -Path $ExtractPath -ChildPath 'Symbols'
|
||||
|
||||
try {
|
||||
[System.IO.Compression.ZipFile]::ExtractToDirectory($PackagePath, $ExtractPath)
|
||||
}
|
||||
catch {
|
||||
Write-Host "Something went wrong extracting $PackagePath"
|
||||
Write-Host $_
|
||||
return [pscustomobject]@{
|
||||
result = -1
|
||||
packagePath = $PackagePath
|
||||
}
|
||||
}
|
||||
|
||||
Get-ChildItem -Recurse $ExtractPath |
|
||||
Where-Object {$RelevantExtensions -contains $_.Extension} |
|
||||
ForEach-Object {
|
||||
$FileName = $_.FullName
|
||||
if ($FileName -Match '\\ref\\') {
|
||||
Write-Host "`t Ignoring reference assembly file " $FileName
|
||||
return
|
||||
}
|
||||
|
||||
$FirstMatchingSymbolDescriptionOrDefault = {
|
||||
param(
|
||||
[string] $FullPath, # Full path to the module that has to be checked
|
||||
[string] $TargetServerParam, # Parameter to pass to `Symbol Tool` indicating the server to lookup for symbols
|
||||
[string] $SymbolsPath
|
||||
)
|
||||
|
||||
$FileName = [System.IO.Path]::GetFileName($FullPath)
|
||||
$Extension = [System.IO.Path]::GetExtension($FullPath)
|
||||
|
||||
# Those below are potential symbol files that the `dotnet symbol` might
|
||||
# return. Which one will be returned depend on the type of file we are
|
||||
# checking and which type of file was uploaded.
|
||||
|
||||
# The file itself is returned
|
||||
$SymbolPath = $SymbolsPath + '\' + $FileName
|
||||
|
||||
# PDB file for the module
|
||||
$PdbPath = $SymbolPath.Replace($Extension, '.pdb')
|
||||
|
||||
# PDB file for R2R module (created by crossgen)
|
||||
$NGenPdb = $SymbolPath.Replace($Extension, '.ni.pdb')
|
||||
|
||||
# DBG file for a .so library
|
||||
$SODbg = $SymbolPath.Replace($Extension, '.so.dbg')
|
||||
|
||||
# DWARF file for a .dylib
|
||||
$DylibDwarf = $SymbolPath.Replace($Extension, '.dylib.dwarf')
|
||||
|
||||
$dotnetSymbolExe = "$env:USERPROFILE\.dotnet\tools"
|
||||
$dotnetSymbolExe = Resolve-Path "$dotnetSymbolExe\dotnet-symbol.exe"
|
||||
|
||||
& $dotnetSymbolExe --symbols --modules --windows-pdbs $TargetServerParam $FullPath -o $SymbolsPath | Out-Null
|
||||
|
||||
if (Test-Path $PdbPath) {
|
||||
return 'PDB'
|
||||
}
|
||||
elseif (Test-Path $NGenPdb) {
|
||||
return 'NGen PDB'
|
||||
}
|
||||
elseif (Test-Path $SODbg) {
|
||||
return 'DBG for SO'
|
||||
}
|
||||
elseif (Test-Path $DylibDwarf) {
|
||||
return 'Dwarf for Dylib'
|
||||
}
|
||||
elseif (Test-Path $SymbolPath) {
|
||||
return 'Module'
|
||||
}
|
||||
else {
|
||||
return $null
|
||||
}
|
||||
}
|
||||
|
||||
$SymbolsOnMSDL = & $FirstMatchingSymbolDescriptionOrDefault $FileName '--microsoft-symbol-server' $SymbolsPath
|
||||
$SymbolsOnSymWeb = & $FirstMatchingSymbolDescriptionOrDefault $FileName '--internal-server' $SymbolsPath
|
||||
|
||||
Write-Host -NoNewLine "`t Checking file " $FileName "... "
|
||||
|
||||
if ($SymbolsOnMSDL -ne $null -and $SymbolsOnSymWeb -ne $null) {
|
||||
Write-Host "Symbols found on MSDL ($SymbolsOnMSDL) and SymWeb ($SymbolsOnSymWeb)"
|
||||
}
|
||||
else {
|
||||
$MissingSymbols++
|
||||
|
||||
if ($SymbolsOnMSDL -eq $null -and $SymbolsOnSymWeb -eq $null) {
|
||||
Write-Host 'No symbols found on MSDL or SymWeb!'
|
||||
}
|
||||
else {
|
||||
if ($SymbolsOnMSDL -eq $null) {
|
||||
Write-Host 'No symbols found on MSDL!'
|
||||
}
|
||||
else {
|
||||
Write-Host 'No symbols found on SymWeb!'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if ($using:Clean) {
|
||||
Remove-Item $ExtractPath -Recurse -Force
|
||||
}
|
||||
|
||||
Pop-Location
|
||||
|
||||
return [pscustomobject]@{
|
||||
result = $MissingSymbols
|
||||
packagePath = $PackagePath
|
||||
}
|
||||
}
|
||||
|
||||
function CheckJobResult(
|
||||
$result,
|
||||
$packagePath,
|
||||
[ref]$DupedSymbols,
|
||||
[ref]$TotalFailures) {
|
||||
if ($result -eq '-1') {
|
||||
Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "$packagePath has duplicated symbol files"
|
||||
$DupedSymbols.Value++
|
||||
}
|
||||
elseif ($jobResult.result -ne '0') {
|
||||
Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "Missing symbols for $result modules in the package $packagePath"
|
||||
$TotalFailures.Value++
|
||||
}
|
||||
}
|
||||
|
||||
function CheckSymbolsAvailable {
|
||||
if (Test-Path $ExtractPath) {
|
||||
Remove-Item $ExtractPath -Force -Recurse -ErrorAction SilentlyContinue
|
||||
}
|
||||
|
||||
$TotalFailures = 0
|
||||
$DupedSymbols = 0
|
||||
|
||||
Get-ChildItem "$InputPath\*.nupkg" |
|
||||
ForEach-Object {
|
||||
$FileName = $_.Name
|
||||
$FullName = $_.FullName
|
||||
|
||||
# These packages from Arcade-Services include some native libraries that
|
||||
# our current symbol uploader can't handle. Below is a workaround until
|
||||
# we get issue: https://github.com/dotnet/arcade/issues/2457 sorted.
|
||||
if ($FileName -Match 'Microsoft\.DotNet\.Darc\.') {
|
||||
Write-Host "Ignoring Arcade-services file: $FileName"
|
||||
Write-Host
|
||||
return
|
||||
}
|
||||
elseif ($FileName -Match 'Microsoft\.DotNet\.Maestro\.Tasks\.') {
|
||||
Write-Host "Ignoring Arcade-services file: $FileName"
|
||||
Write-Host
|
||||
return
|
||||
}
|
||||
|
||||
Write-Host "Validating $FileName "
|
||||
|
||||
Start-Job -ScriptBlock $CountMissingSymbols -ArgumentList $FullName | Out-Null
|
||||
|
||||
$NumJobs = @(Get-Job -State 'Running').Count
|
||||
|
||||
while ($NumJobs -ge $MaxParallelJobs) {
|
||||
Write-Host "There are $NumJobs validation jobs running right now. Waiting $SecondsBetweenLoadChecks seconds to check again."
|
||||
sleep $SecondsBetweenLoadChecks
|
||||
$NumJobs = @(Get-Job -State 'Running').Count
|
||||
}
|
||||
|
||||
foreach ($Job in @(Get-Job -State 'Completed')) {
|
||||
$jobResult = Wait-Job -Id $Job.Id | Receive-Job
|
||||
CheckJobResult $jobResult.result $jobResult.packagePath ([ref]$DupedSymbols) ([ref]$TotalFailures)
|
||||
Remove-Job -Id $Job.Id
|
||||
}
|
||||
Write-Host
|
||||
}
|
||||
|
||||
foreach ($Job in @(Get-Job)) {
|
||||
$jobResult = Wait-Job -Id $Job.Id | Receive-Job
|
||||
CheckJobResult $jobResult.result $jobResult.packagePath ([ref]$DupedSymbols) ([ref]$TotalFailures)
|
||||
}
|
||||
|
||||
if ($TotalFailures -gt 0 -or $DupedSymbols -gt 0) {
|
||||
if ($TotalFailures -gt 0) {
|
||||
Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "Symbols missing for $TotalFailures packages"
|
||||
}
|
||||
|
||||
if ($DupedSymbols -gt 0) {
|
||||
Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "$DupedSymbols packages had duplicated symbol files"
|
||||
}
|
||||
|
||||
ExitWithExitCode 1
|
||||
}
|
||||
else {
|
||||
Write-Host "All symbols validated!"
|
||||
}
|
||||
}
|
||||
|
||||
function InstallDotnetSymbol {
|
||||
$dotnetSymbolPackageName = 'dotnet-symbol'
|
||||
|
||||
$dotnetRoot = InitializeDotNetCli -install:$true
|
||||
$dotnet = "$dotnetRoot\dotnet.exe"
|
||||
$toolList = & "$dotnet" tool list --global
|
||||
|
||||
if (($toolList -like "*$dotnetSymbolPackageName*") -and ($toolList -like "*$dotnetSymbolVersion*")) {
|
||||
Write-Host "dotnet-symbol version $dotnetSymbolVersion is already installed."
|
||||
}
|
||||
else {
|
||||
Write-Host "Installing dotnet-symbol version $dotnetSymbolVersion..."
|
||||
Write-Host 'You may need to restart your command window if this is the first dotnet tool you have installed.'
|
||||
& "$dotnet" tool install $dotnetSymbolPackageName --version $dotnetSymbolVersion --verbosity "minimal" --global
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
. $PSScriptRoot\post-build-utils.ps1
|
||||
|
||||
InstallDotnetSymbol
|
||||
|
||||
foreach ($Job in @(Get-Job)) {
|
||||
Remove-Job -Id $Job.Id
|
||||
}
|
||||
|
||||
CheckSymbolsAvailable
|
||||
}
|
||||
catch {
|
||||
Write-Host $_.ScriptStackTrace
|
||||
Write-PipelineTelemetryError -Category 'CheckSymbols' -Message $_
|
||||
ExitWithExitCode 1
|
||||
}
|
|
@ -0,0 +1,64 @@
|
|||
param(
|
||||
[Parameter(Mandatory=$true)][string] $SourceRepo,
|
||||
[Parameter(Mandatory=$true)][int] $ChannelId,
|
||||
[Parameter(Mandatory=$true)][string] $MaestroApiAccessToken,
|
||||
[Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro-prod.westus2.cloudapp.azure.com',
|
||||
[Parameter(Mandatory=$false)][string] $MaestroApiVersion = '2019-01-16'
|
||||
)
|
||||
|
||||
try {
|
||||
. $PSScriptRoot\post-build-utils.ps1
|
||||
|
||||
# Get all the $SourceRepo subscriptions
|
||||
$normalizedSourceRepo = $SourceRepo.Replace('dnceng@', '')
|
||||
$subscriptions = Get-MaestroSubscriptions -SourceRepository $normalizedSourceRepo -ChannelId $ChannelId
|
||||
|
||||
if (!$subscriptions) {
|
||||
Write-PipelineTelemetryError -Category 'TriggerSubscriptions' -Message "No subscriptions found for source repo '$normalizedSourceRepo' in channel '$ChannelId'"
|
||||
ExitWithExitCode 0
|
||||
}
|
||||
|
||||
$subscriptionsToTrigger = New-Object System.Collections.Generic.List[string]
|
||||
$failedTriggeredSubscription = $false
|
||||
|
||||
# Get all enabled subscriptions that need dependency flow on 'everyBuild'
|
||||
foreach ($subscription in $subscriptions) {
|
||||
if ($subscription.enabled -and $subscription.policy.updateFrequency -like 'everyBuild' -and $subscription.channel.id -eq $ChannelId) {
|
||||
Write-Host "Should trigger this subscription: ${$subscription.id}"
|
||||
[void]$subscriptionsToTrigger.Add($subscription.id)
|
||||
}
|
||||
}
|
||||
|
||||
foreach ($subscriptionToTrigger in $subscriptionsToTrigger) {
|
||||
try {
|
||||
Write-Host "Triggering subscription '$subscriptionToTrigger'."
|
||||
|
||||
Trigger-Subscription -SubscriptionId $subscriptionToTrigger
|
||||
|
||||
Write-Host 'done.'
|
||||
}
|
||||
catch
|
||||
{
|
||||
Write-Host "There was an error while triggering subscription '$subscriptionToTrigger'"
|
||||
Write-Host $_
|
||||
Write-Host $_.ScriptStackTrace
|
||||
$failedTriggeredSubscription = $true
|
||||
}
|
||||
}
|
||||
|
||||
if ($subscriptionsToTrigger.Count -eq 0) {
|
||||
Write-Host "No subscription matched source repo '$normalizedSourceRepo' and channel ID '$ChannelId'."
|
||||
}
|
||||
elseif ($failedTriggeredSubscription) {
|
||||
Write-PipelineTelemetryError -Category 'TriggerSubscriptions' -Message 'At least one subscription failed to be triggered...'
|
||||
ExitWithExitCode 1
|
||||
}
|
||||
else {
|
||||
Write-Host 'All subscriptions were triggered successfully!'
|
||||
}
|
||||
}
|
||||
catch {
|
||||
Write-Host $_.ScriptStackTrace
|
||||
Write-PipelineTelemetryError -Category 'TriggerSubscriptions' -Message $_
|
||||
ExitWithExitCode 1
|
||||
}
|
|
@ -0,0 +1,97 @@
|
|||
[CmdletBinding(PositionalBinding=$false)]
|
||||
Param(
|
||||
[string] $configuration = 'Debug',
|
||||
[string] $task,
|
||||
[string] $verbosity = 'minimal',
|
||||
[string] $msbuildEngine = $null,
|
||||
[switch] $restore,
|
||||
[switch] $prepareMachine,
|
||||
[switch] $help,
|
||||
[Parameter(ValueFromRemainingArguments=$true)][String[]]$properties
|
||||
)
|
||||
|
||||
$ci = $true
|
||||
$binaryLog = $true
|
||||
$warnAsError = $true
|
||||
|
||||
. $PSScriptRoot\tools.ps1
|
||||
|
||||
function Print-Usage() {
|
||||
Write-Host "Common settings:"
|
||||
Write-Host " -task <value> Name of Arcade task (name of a project in SdkTasks directory of the Arcade SDK package)"
|
||||
Write-Host " -restore Restore dependencies"
|
||||
Write-Host " -verbosity <value> Msbuild verbosity: q[uiet], m[inimal], n[ormal], d[etailed], and diag[nostic]"
|
||||
Write-Host " -help Print help and exit"
|
||||
Write-Host ""
|
||||
|
||||
Write-Host "Advanced settings:"
|
||||
Write-Host " -prepareMachine Prepare machine for CI run"
|
||||
Write-Host " -msbuildEngine <value> Msbuild engine to use to run build ('dotnet', 'vs', or unspecified)."
|
||||
Write-Host ""
|
||||
Write-Host "Command line arguments not listed above are passed thru to msbuild."
|
||||
}
|
||||
|
||||
function Build([string]$target) {
|
||||
$logSuffix = if ($target -eq 'Execute') { '' } else { ".$target" }
|
||||
$log = Join-Path $LogDir "$task$logSuffix.binlog"
|
||||
$outputPath = Join-Path $ToolsetDir "$task\\"
|
||||
|
||||
MSBuild $taskProject `
|
||||
/bl:$log `
|
||||
/t:$target `
|
||||
/p:Configuration=$configuration `
|
||||
/p:RepoRoot=$RepoRoot `
|
||||
/p:BaseIntermediateOutputPath=$outputPath `
|
||||
/v:$verbosity `
|
||||
@properties
|
||||
}
|
||||
|
||||
try {
|
||||
if ($help -or (($null -ne $properties) -and ($properties.Contains('/help') -or $properties.Contains('/?')))) {
|
||||
Print-Usage
|
||||
exit 0
|
||||
}
|
||||
|
||||
if ($task -eq "") {
|
||||
Write-PipelineTelemetryError -Category 'Build' -Message "Missing required parameter '-task <value>'" -ForegroundColor Red
|
||||
Print-Usage
|
||||
ExitWithExitCode 1
|
||||
}
|
||||
|
||||
if( $msbuildEngine -eq "vs") {
|
||||
# Ensure desktop MSBuild is available for sdk tasks.
|
||||
if( -not ($GlobalJson.tools.PSObject.Properties.Name -contains "vs" )) {
|
||||
$GlobalJson.tools | Add-Member -Name "vs" -Value (ConvertFrom-Json "{ `"version`": `"16.5`" }") -MemberType NoteProperty
|
||||
}
|
||||
if( -not ($GlobalJson.tools.PSObject.Properties.Name -match "xcopy-msbuild" )) {
|
||||
$GlobalJson.tools | Add-Member -Name "xcopy-msbuild" -Value "16.5.0-alpha" -MemberType NoteProperty
|
||||
}
|
||||
if ($GlobalJson.tools."xcopy-msbuild".Trim() -ine "none") {
|
||||
$xcopyMSBuildToolsFolder = InitializeXCopyMSBuild $GlobalJson.tools."xcopy-msbuild" -install $true
|
||||
}
|
||||
if ($xcopyMSBuildToolsFolder -eq $null) {
|
||||
throw 'Unable to get xcopy downloadable version of msbuild'
|
||||
}
|
||||
|
||||
$global:_MSBuildExe = "$($xcopyMSBuildToolsFolder)\MSBuild\Current\Bin\MSBuild.exe"
|
||||
}
|
||||
|
||||
$taskProject = GetSdkTaskProject $task
|
||||
if (!(Test-Path $taskProject)) {
|
||||
Write-PipelineTelemetryError -Category 'Build' -Message "Unknown task: $task" -ForegroundColor Red
|
||||
ExitWithExitCode 1
|
||||
}
|
||||
|
||||
if ($restore) {
|
||||
Build 'Restore'
|
||||
}
|
||||
|
||||
Build 'Execute'
|
||||
}
|
||||
catch {
|
||||
Write-Host $_.ScriptStackTrace
|
||||
Write-PipelineTelemetryError -Category 'Build' -Message $_
|
||||
ExitWithExitCode 1
|
||||
}
|
||||
|
||||
ExitWithExitCode 0
|
|
@ -0,0 +1,13 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<configuration>
|
||||
<solution>
|
||||
<add key="disableSourceControlIntegration" value="true" />
|
||||
</solution>
|
||||
<packageSources>
|
||||
<clear />
|
||||
<add key="guardian" value="https://securitytools.pkgs.visualstudio.com/_packaging/Guardian/nuget/v3/index.json" />
|
||||
</packageSources>
|
||||
<disabledPackageSources>
|
||||
<clear />
|
||||
</disabledPackageSources>
|
||||
</configuration>
|
|
@ -0,0 +1,120 @@
|
|||
Param(
|
||||
[string] $GuardianPackageName, # Required: the name of guardian CLI package (not needed if GuardianCliLocation is specified)
|
||||
[string] $NugetPackageDirectory, # Required: directory where NuGet packages are installed (not needed if GuardianCliLocation is specified)
|
||||
[string] $GuardianCliLocation, # Optional: Direct location of Guardian CLI executable if GuardianPackageName & NugetPackageDirectory are not specified
|
||||
[string] $Repository=$env:BUILD_REPOSITORY_NAME, # Required: the name of the repository (e.g. dotnet/arcade)
|
||||
[string] $BranchName=$env:BUILD_SOURCEBRANCH, # Optional: name of branch or version of gdn settings; defaults to master
|
||||
[string] $SourceDirectory=$env:BUILD_SOURCESDIRECTORY, # Required: the directory where source files are located
|
||||
[string] $ArtifactsDirectory = (Join-Path $env:BUILD_ARTIFACTSTAGINGDIRECTORY ('artifacts')), # Required: the directory where build artifacts are located
|
||||
[string] $AzureDevOpsAccessToken, # Required: access token for dnceng; should be provided via KeyVault
|
||||
[string[]] $SourceToolsList, # Optional: list of SDL tools to run on source code
|
||||
[string[]] $ArtifactToolsList, # Optional: list of SDL tools to run on built artifacts
|
||||
[bool] $TsaPublish=$False, # Optional: true will publish results to TSA; only set to true after onboarding to TSA; TSA is the automated framework used to upload test results as bugs.
|
||||
[string] $TsaBranchName=$env:BUILD_SOURCEBRANCH, # Optional: required for TSA publish; defaults to $(Build.SourceBranchName); TSA is the automated framework used to upload test results as bugs.
|
||||
[string] $TsaRepositoryName=$env:BUILD_REPOSITORY_NAME, # Optional: TSA repository name; will be generated automatically if not submitted; TSA is the automated framework used to upload test results as bugs.
|
||||
[string] $BuildNumber=$env:BUILD_BUILDNUMBER, # Optional: required for TSA publish; defaults to $(Build.BuildNumber)
|
||||
[bool] $UpdateBaseline=$False, # Optional: if true, will update the baseline in the repository; should only be run after fixing any issues which need to be fixed
|
||||
[bool] $TsaOnboard=$False, # Optional: if true, will onboard the repository to TSA; should only be run once; TSA is the automated framework used to upload test results as bugs.
|
||||
[string] $TsaInstanceUrl, # Optional: only needed if TsaOnboard or TsaPublish is true; the instance-url registered with TSA; TSA is the automated framework used to upload test results as bugs.
|
||||
[string] $TsaCodebaseName, # Optional: only needed if TsaOnboard or TsaPublish is true; the name of the codebase registered with TSA; TSA is the automated framework used to upload test results as bugs.
|
||||
[string] $TsaProjectName, # Optional: only needed if TsaOnboard or TsaPublish is true; the name of the project registered with TSA; TSA is the automated framework used to upload test results as bugs.
|
||||
[string] $TsaNotificationEmail, # Optional: only needed if TsaOnboard is true; the email(s) which will receive notifications of TSA bug filings (e.g. alias@microsoft.com); TSA is the automated framework used to upload test results as bugs.
|
||||
[string] $TsaCodebaseAdmin, # Optional: only needed if TsaOnboard is true; the aliases which are admins of the TSA codebase (e.g. DOMAIN\alias); TSA is the automated framework used to upload test results as bugs.
|
||||
[string] $TsaBugAreaPath, # Optional: only needed if TsaOnboard is true; the area path where TSA will file bugs in AzDO; TSA is the automated framework used to upload test results as bugs.
|
||||
[string] $TsaIterationPath, # Optional: only needed if TsaOnboard is true; the iteration path where TSA will file bugs in AzDO; TSA is the automated framework used to upload test results as bugs.
|
||||
[string] $GuardianLoggerLevel='Standard', # Optional: the logger level for the Guardian CLI; options are Trace, Verbose, Standard, Warning, and Error
|
||||
[string[]] $CrScanAdditionalRunConfigParams, # Optional: Additional Params to custom build a CredScan run config in the format @("xyz:abc","sdf:1")
|
||||
[string[]] $PoliCheckAdditionalRunConfigParams, # Optional: Additional Params to custom build a Policheck run config in the format @("xyz:abc","sdf:1")
|
||||
[bool] $BreakOnFailure=$False # Optional: Fail the build if there were errors during the run
|
||||
)
|
||||
|
||||
try {
|
||||
$ErrorActionPreference = 'Stop'
|
||||
Set-StrictMode -Version 2.0
|
||||
$disableConfigureToolsetImport = $true
|
||||
$LASTEXITCODE = 0
|
||||
|
||||
# `tools.ps1` checks $ci to perform some actions. Since the SDL
|
||||
# scripts don't necessarily execute in the same agent that run the
|
||||
# build.ps1/sh script this variable isn't automatically set.
|
||||
$ci = $true
|
||||
. $PSScriptRoot\..\tools.ps1
|
||||
|
||||
#Replace repo names to the format of org/repo
|
||||
if (!($Repository.contains('/'))) {
|
||||
$RepoName = $Repository -replace '(.*?)-(.*)', '$1/$2';
|
||||
}
|
||||
else{
|
||||
$RepoName = $Repository;
|
||||
}
|
||||
|
||||
if ($GuardianPackageName) {
|
||||
$guardianCliLocation = Join-Path $NugetPackageDirectory (Join-Path $GuardianPackageName (Join-Path 'tools' 'guardian.cmd'))
|
||||
} else {
|
||||
$guardianCliLocation = $GuardianCliLocation
|
||||
}
|
||||
|
||||
$workingDirectory = (Split-Path $SourceDirectory -Parent)
|
||||
$ValidPath = Test-Path $guardianCliLocation
|
||||
|
||||
if ($ValidPath -eq $False)
|
||||
{
|
||||
Write-PipelineTelemetryError -Force -Category 'Sdl' -Message 'Invalid Guardian CLI Location.'
|
||||
ExitWithExitCode 1
|
||||
}
|
||||
|
||||
& $(Join-Path $PSScriptRoot 'init-sdl.ps1') -GuardianCliLocation $guardianCliLocation -Repository $RepoName -BranchName $BranchName -WorkingDirectory $workingDirectory -AzureDevOpsAccessToken $AzureDevOpsAccessToken -GuardianLoggerLevel $GuardianLoggerLevel
|
||||
$gdnFolder = Join-Path $workingDirectory '.gdn'
|
||||
|
||||
if ($TsaOnboard) {
|
||||
if ($TsaCodebaseName -and $TsaNotificationEmail -and $TsaCodebaseAdmin -and $TsaBugAreaPath) {
|
||||
Write-Host "$guardianCliLocation tsa-onboard --codebase-name `"$TsaCodebaseName`" --notification-alias `"$TsaNotificationEmail`" --codebase-admin `"$TsaCodebaseAdmin`" --instance-url `"$TsaInstanceUrl`" --project-name `"$TsaProjectName`" --area-path `"$TsaBugAreaPath`" --iteration-path `"$TsaIterationPath`" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel"
|
||||
& $guardianCliLocation tsa-onboard --codebase-name "$TsaCodebaseName" --notification-alias "$TsaNotificationEmail" --codebase-admin "$TsaCodebaseAdmin" --instance-url "$TsaInstanceUrl" --project-name "$TsaProjectName" --area-path "$TsaBugAreaPath" --iteration-path "$TsaIterationPath" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel
|
||||
if ($LASTEXITCODE -ne 0) {
|
||||
Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Guardian tsa-onboard failed with exit code $LASTEXITCODE."
|
||||
ExitWithExitCode $LASTEXITCODE
|
||||
}
|
||||
} else {
|
||||
Write-PipelineTelemetryError -Force -Category 'Sdl' -Message 'Could not onboard to TSA -- not all required values ($TsaCodebaseName, $TsaNotificationEmail, $TsaCodebaseAdmin, $TsaBugAreaPath) were specified.'
|
||||
ExitWithExitCode 1
|
||||
}
|
||||
}
|
||||
|
||||
if ($ArtifactToolsList -and $ArtifactToolsList.Count -gt 0) {
|
||||
& $(Join-Path $PSScriptRoot 'run-sdl.ps1') -GuardianCliLocation $guardianCliLocation -WorkingDirectory $workingDirectory -TargetDirectory $ArtifactsDirectory -GdnFolder $gdnFolder -ToolsList $ArtifactToolsList -AzureDevOpsAccessToken $AzureDevOpsAccessToken -UpdateBaseline $UpdateBaseline -GuardianLoggerLevel $GuardianLoggerLevel -CrScanAdditionalRunConfigParams $CrScanAdditionalRunConfigParams -PoliCheckAdditionalRunConfigParams $PoliCheckAdditionalRunConfigParams
|
||||
}
|
||||
if ($SourceToolsList -and $SourceToolsList.Count -gt 0) {
|
||||
& $(Join-Path $PSScriptRoot 'run-sdl.ps1') -GuardianCliLocation $guardianCliLocation -WorkingDirectory $workingDirectory -TargetDirectory $SourceDirectory -GdnFolder $gdnFolder -ToolsList $SourceToolsList -AzureDevOpsAccessToken $AzureDevOpsAccessToken -UpdateBaseline $UpdateBaseline -GuardianLoggerLevel $GuardianLoggerLevel -CrScanAdditionalRunConfigParams $CrScanAdditionalRunConfigParams -PoliCheckAdditionalRunConfigParams $PoliCheckAdditionalRunConfigParams
|
||||
}
|
||||
|
||||
if ($UpdateBaseline) {
|
||||
& (Join-Path $PSScriptRoot 'push-gdn.ps1') -Repository $RepoName -BranchName $BranchName -GdnFolder $GdnFolder -AzureDevOpsAccessToken $AzureDevOpsAccessToken -PushReason 'Update baseline'
|
||||
}
|
||||
|
||||
if ($TsaPublish) {
|
||||
if ($TsaBranchName -and $BuildNumber) {
|
||||
if (-not $TsaRepositoryName) {
|
||||
$TsaRepositoryName = "$($Repository)-$($BranchName)"
|
||||
}
|
||||
Write-Host "$guardianCliLocation tsa-publish --all-tools --repository-name `"$TsaRepositoryName`" --branch-name `"$TsaBranchName`" --build-number `"$BuildNumber`" --codebase-name `"$TsaCodebaseName`" --notification-alias `"$TsaNotificationEmail`" --codebase-admin `"$TsaCodebaseAdmin`" --instance-url `"$TsaInstanceUrl`" --project-name `"$TsaProjectName`" --area-path `"$TsaBugAreaPath`" --iteration-path `"$TsaIterationPath`" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel"
|
||||
& $guardianCliLocation tsa-publish --all-tools --repository-name "$TsaRepositoryName" --branch-name "$TsaBranchName" --build-number "$BuildNumber" --onboard $True --codebase-name "$TsaCodebaseName" --notification-alias "$TsaNotificationEmail" --codebase-admin "$TsaCodebaseAdmin" --instance-url "$TsaInstanceUrl" --project-name "$TsaProjectName" --area-path "$TsaBugAreaPath" --iteration-path "$TsaIterationPath" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel
|
||||
if ($LASTEXITCODE -ne 0) {
|
||||
Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Guardian tsa-publish failed with exit code $LASTEXITCODE."
|
||||
ExitWithExitCode $LASTEXITCODE
|
||||
}
|
||||
} else {
|
||||
Write-PipelineTelemetryError -Force -Category 'Sdl' -Message 'Could not publish to TSA -- not all required values ($TsaBranchName, $BuildNumber) were specified.'
|
||||
ExitWithExitCode 1
|
||||
}
|
||||
}
|
||||
|
||||
if ($BreakOnFailure) {
|
||||
Write-Host "Failing the build in case of breaking results..."
|
||||
& $guardianCliLocation break
|
||||
}
|
||||
}
|
||||
catch {
|
||||
Write-Host $_.ScriptStackTrace
|
||||
Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
|
||||
exit 1
|
||||
}
|
|
@ -0,0 +1,80 @@
|
|||
param(
|
||||
[Parameter(Mandatory=$true)][string] $InputPath, # Full path to directory where artifact packages are stored
|
||||
[Parameter(Mandatory=$true)][string] $ExtractPath # Full path to directory where the packages will be extracted
|
||||
)
|
||||
|
||||
$ErrorActionPreference = 'Stop'
|
||||
Set-StrictMode -Version 2.0
|
||||
|
||||
$disableConfigureToolsetImport = $true
|
||||
|
||||
function ExtractArtifacts {
|
||||
if (!(Test-Path $InputPath)) {
|
||||
Write-Host "Input Path does not exist: $InputPath"
|
||||
ExitWithExitCode 0
|
||||
}
|
||||
$Jobs = @()
|
||||
Get-ChildItem "$InputPath\*.nupkg" |
|
||||
ForEach-Object {
|
||||
$Jobs += Start-Job -ScriptBlock $ExtractPackage -ArgumentList $_.FullName
|
||||
}
|
||||
|
||||
foreach ($Job in $Jobs) {
|
||||
Wait-Job -Id $Job.Id | Receive-Job
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
# `tools.ps1` checks $ci to perform some actions. Since the SDL
|
||||
# scripts don't necessarily execute in the same agent that run the
|
||||
# build.ps1/sh script this variable isn't automatically set.
|
||||
$ci = $true
|
||||
. $PSScriptRoot\..\tools.ps1
|
||||
|
||||
$ExtractPackage = {
|
||||
param(
|
||||
[string] $PackagePath # Full path to a NuGet package
|
||||
)
|
||||
|
||||
if (!(Test-Path $PackagePath)) {
|
||||
Write-PipelineTelemetryError -Category 'Build' -Message "Input file does not exist: $PackagePath"
|
||||
ExitWithExitCode 1
|
||||
}
|
||||
|
||||
$RelevantExtensions = @('.dll', '.exe', '.pdb')
|
||||
Write-Host -NoNewLine 'Extracting ' ([System.IO.Path]::GetFileName($PackagePath)) '...'
|
||||
|
||||
$PackageId = [System.IO.Path]::GetFileNameWithoutExtension($PackagePath)
|
||||
$ExtractPath = Join-Path -Path $using:ExtractPath -ChildPath $PackageId
|
||||
|
||||
Add-Type -AssemblyName System.IO.Compression.FileSystem
|
||||
|
||||
[System.IO.Directory]::CreateDirectory($ExtractPath);
|
||||
|
||||
try {
|
||||
$zip = [System.IO.Compression.ZipFile]::OpenRead($PackagePath)
|
||||
|
||||
$zip.Entries |
|
||||
Where-Object {$RelevantExtensions -contains [System.IO.Path]::GetExtension($_.Name)} |
|
||||
ForEach-Object {
|
||||
$TargetFile = Join-Path -Path $ExtractPath -ChildPath $_.Name
|
||||
|
||||
[System.IO.Compression.ZipFileExtensions]::ExtractToFile($_, $TargetFile, $true)
|
||||
}
|
||||
}
|
||||
catch {
|
||||
Write-Host $_
|
||||
Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
|
||||
ExitWithExitCode 1
|
||||
}
|
||||
finally {
|
||||
$zip.Dispose()
|
||||
}
|
||||
}
|
||||
Measure-Command { ExtractArtifacts }
|
||||
}
|
||||
catch {
|
||||
Write-Host $_
|
||||
Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
|
||||
ExitWithExitCode 1
|
||||
}
|
Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше
Загрузка…
Ссылка в новой задаче