[release/3.0] Update dependencies from dotnet/arcade (#6938)
* Update dependencies from https://github.com/dotnet/arcade build 20231220.2 Microsoft.DotNet.Arcade.Sdk , Microsoft.DotNet.Build.Tasks.Feed , Microsoft.DotNet.Helix.Sdk , Microsoft.DotNet.SignTool , Microsoft.DotNet.SwaggerGenerator.MSBuild , Microsoft.DotNet.XUnitExtensions From Version 8.0.0-beta.23265.1 -> To Version 8.0.0-beta.23620.2 * Fixed version update breaks. * Update XUnitVersion * Update MicrosoftMLOnnxRuntimeVersion to 1.16.3 * Rollback OnnxRuntime and suppress warning * Update to Xunit with fix for https://github.com/xunit/xunit/issues/2821 * Update Centos docker containers * Fix packaging step * Try including stdint.h to fix missing uint8_t on centos * Update Centos test queue * Attempt to use runtime centos-stream8-helix container for tests * Use centos-stream8-mlnet-helix container for testing * Undo changes to test data * Make NETFRAMEWORK ifdef versionless * Only use semi-colons for NoWarn * Fix assert by only accessing idx (#6924) Asserting on `_rowCount < Utils.Size(_valueBoundaries)` was catching a case where `_rowCount`'s update was reordered before `_valueBoundaries` This was unnecessary, since this method doesn't need to use `_rowCount`. Instead, make the asserts use only `idx` which will be maintained consistent with the waiter logic in this cache. Ensure we only ever use `_rowCount` from the caching thread, so write reordering won't matter. * Don't include the SDK in our helix payload (#6918) * Don't include the SDK in our helix payload I noticed that the tests included the latest SDK - including the host - in our helix payloads. This is a large amount of unnecessary downloads and it also makes it so we use the latest host on the older frameworks which can fail when the latest host drops support for distros. Since our tests shouldn't need the full CLI, remove this from our helix payloads. We'll instead get just the runtime we need through `AdditionalDotNetPackage` * Place Helix downloaded runtime on the PATH Helix only sets the path when the CLI is included, however we don't need the CLI. * Make double assertions compare with tolerance instead of precision (#6923) Precision might cause small differences to round to a different number. Instead compare with a tolerance which is not sensitive to rounding. --------- Co-authored-by: dotnet-maestro[bot] <dotnet-maestro[bot]@users.noreply.github.com> Co-authored-by: Michael Sharp <misharp@microsoft.com> Co-authored-by: Eric StJohn <ericstj@microsoft.com>
This commit is contained in:
Родитель
4f268c7e52
Коммит
f602581005
|
@ -19,7 +19,7 @@ trigger:
|
|||
resources:
|
||||
containers:
|
||||
- container: CentosContainer
|
||||
image: mcr.microsoft.com/dotnet-buildtools/prereqs:centos-7-mlnet
|
||||
image: mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream8-mlnet
|
||||
|
||||
- container: UbuntuContainer
|
||||
image: mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-18.04-mlnet
|
||||
|
@ -86,7 +86,7 @@ jobs:
|
|||
pool:
|
||||
name: NetCore-Public
|
||||
demands: ImageOverride -equals build.ubuntu.1804.amd64.open
|
||||
helixQueue: Ubuntu.1804.Amd64.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:centos-7-mlnet-helix
|
||||
helixQueue: Ubuntu.1804.Amd64.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream8-mlnet-helix
|
||||
|
||||
- template: /build/ci/job-template.yml
|
||||
parameters:
|
||||
|
|
|
@ -23,6 +23,9 @@
|
|||
<TargetArchitecture>$(TargetArchitecture.ToLower())</TargetArchitecture>
|
||||
<PlatformConfig>$(Platform).$(Configuration)</PlatformConfig>
|
||||
<StrongNameKeyId>Open</StrongNameKeyId>
|
||||
|
||||
<!-- temporarily suppress NETSDK 1206 until we can update OnnxRuntime to fix https://github.com/dotnet/machinelearning/issues/6916 -->
|
||||
<NoWarn>$(NoWarn);NETSDK1206</NoWarn>
|
||||
</PropertyGroup>
|
||||
|
||||
<PropertyGroup>
|
||||
|
@ -54,8 +57,7 @@
|
|||
<!-- Optional: Embed source files that are not tracked by the source control manager in the PDB -->
|
||||
<EmbedUntrackedSources>true</EmbedUntrackedSources>
|
||||
|
||||
<!-- Optional: Build symbol package (.snupkg) to distribute the PDB containing Source Link -->
|
||||
<!-- Optional: Build symbol packages (.symbols.nupkg) to distribute the PDB containing Source Link -->
|
||||
<IncludeSymbols>true</IncludeSymbols>
|
||||
<SymbolPackageFormat>snupkg</SymbolPackageFormat>
|
||||
</PropertyGroup>
|
||||
</Project>
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<configuration>
|
||||
<solution>
|
||||
<add key="disableSourceControlIntegration" value="true" />
|
||||
|
|
|
@ -22,7 +22,7 @@ schedules:
|
|||
resources:
|
||||
containers:
|
||||
- container: CentosContainer
|
||||
image: mcr.microsoft.com/dotnet-buildtools/prereqs:centos-7-mlnet
|
||||
image: mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream8-mlnet
|
||||
|
||||
- container: UbuntuContainer
|
||||
image: mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-18.04-mlnet
|
||||
|
|
|
@ -23,7 +23,7 @@ schedules:
|
|||
resources:
|
||||
containers:
|
||||
- container: CentosContainer
|
||||
image: mcr.microsoft.com/dotnet-buildtools/prereqs:centos-7-mlnet
|
||||
image: mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream8-mlnet
|
||||
|
||||
- container: UbuntuContainer
|
||||
image: mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-18.04-mlnet
|
||||
|
|
|
@ -11,7 +11,6 @@ parameters:
|
|||
WarnAsError: ''
|
||||
TestTargetFramework: ''
|
||||
HelixConfiguration: '' # optional -- additional property attached to a job
|
||||
IncludeDotNetCli: true # optional -- true will download a version of the .NET CLI onto the Helix machine as a correlation payload; requires DotNetCliPackageType and DotNetCliVersion
|
||||
EnableXUnitReporter: true # optional -- true enables XUnit result reporting to Mission Control
|
||||
WaitForWorkItemCompletion: true # optional -- true will make the task wait until work items have been completed and fail the build if work items fail. False is "fire and forget."
|
||||
HelixBaseUri: 'https://helix.dot.net' # optional -- sets the Helix API base URI (allows targeting int)
|
||||
|
@ -34,7 +33,6 @@ steps:
|
|||
/p:HelixBuild=${{ parameters.HelixBuild }}
|
||||
/p:HelixConfiguration="${{ parameters.HelixConfiguration }}"
|
||||
/p:HelixAccessToken="${{ parameters.HelixAccessToken }}"
|
||||
/p:IncludeDotNetCli=${{ parameters.IncludeDotNetCli }}
|
||||
/p:EnableXUnitReporter=${{ parameters.EnableXUnitReporter }}
|
||||
/p:WaitForWorkItemCompletion=${{ parameters.WaitForWorkItemCompletion }}
|
||||
/p:HelixBaseUri=${{ parameters.HelixBaseUri }}
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
resources:
|
||||
containers:
|
||||
- container: CentosContainer
|
||||
image: mcr.microsoft.com/dotnet-buildtools/prereqs:centos-7-mlnet
|
||||
image: mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream8-mlnet
|
||||
|
||||
- container: UbuntuCrossArmContainer
|
||||
image: mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-18.04-mlnet-cross-arm
|
||||
|
|
|
@ -7,25 +7,25 @@
|
|||
</Dependency>
|
||||
</ProductDependencies>
|
||||
<ToolsetDependencies>
|
||||
<Dependency Name="Microsoft.DotNet.Arcade.Sdk" Version="8.0.0-beta.23265.1">
|
||||
<Dependency Name="Microsoft.DotNet.Arcade.Sdk" Version="8.0.0-beta.23620.2">
|
||||
<Uri>https://github.com/dotnet/arcade</Uri>
|
||||
<Sha>2d8d59065b5e090584a8e90c4371fc06ed60bdc5</Sha>
|
||||
<Sha>84129325171e65373edef24019e1171feeb19cbc</Sha>
|
||||
</Dependency>
|
||||
<Dependency Name="Microsoft.DotNet.Build.Tasks.Feed" Version="8.0.0-beta.23265.1">
|
||||
<Dependency Name="Microsoft.DotNet.Build.Tasks.Feed" Version="8.0.0-beta.23620.2">
|
||||
<Uri>https://github.com/dotnet/arcade</Uri>
|
||||
<Sha>2d8d59065b5e090584a8e90c4371fc06ed60bdc5</Sha>
|
||||
<Sha>84129325171e65373edef24019e1171feeb19cbc</Sha>
|
||||
</Dependency>
|
||||
<Dependency Name="Microsoft.DotNet.SignTool" Version="8.0.0-beta.23265.1">
|
||||
<Dependency Name="Microsoft.DotNet.SignTool" Version="8.0.0-beta.23620.2">
|
||||
<Uri>https://github.com/dotnet/arcade</Uri>
|
||||
<Sha>2d8d59065b5e090584a8e90c4371fc06ed60bdc5</Sha>
|
||||
<Sha>84129325171e65373edef24019e1171feeb19cbc</Sha>
|
||||
</Dependency>
|
||||
<Dependency Name="Microsoft.DotNet.Helix.Sdk" Version="8.0.0-beta.23265.1">
|
||||
<Dependency Name="Microsoft.DotNet.Helix.Sdk" Version="8.0.0-beta.23620.2">
|
||||
<Uri>https://github.com/dotnet/arcade</Uri>
|
||||
<Sha>2d8d59065b5e090584a8e90c4371fc06ed60bdc5</Sha>
|
||||
<Sha>84129325171e65373edef24019e1171feeb19cbc</Sha>
|
||||
</Dependency>
|
||||
<Dependency Name="Microsoft.DotNet.SwaggerGenerator.MSBuild" Version="8.0.0-beta.23265.1">
|
||||
<Dependency Name="Microsoft.DotNet.SwaggerGenerator.MSBuild" Version="8.0.0-beta.23620.2">
|
||||
<Uri>https://github.com/dotnet/arcade</Uri>
|
||||
<Sha>2d8d59065b5e090584a8e90c4371fc06ed60bdc5</Sha>
|
||||
<Sha>84129325171e65373edef24019e1171feeb19cbc</Sha>
|
||||
</Dependency>
|
||||
<Dependency Name="Microsoft.DotNet.Maestro.Client" Version="1.1.0-beta.20074.1">
|
||||
<Uri>https://github.com/dotnet/arcade-services</Uri>
|
||||
|
@ -39,9 +39,9 @@
|
|||
<Uri>https://github.com/dotnet/xharness</Uri>
|
||||
<Sha>89cb4b1d368e0f15b4df8e02a176dd1f1c33958b</Sha>
|
||||
</Dependency>
|
||||
<Dependency Name="Microsoft.DotNet.XUnitExtensions" Version="8.0.0-beta.23265.1">
|
||||
<Dependency Name="Microsoft.DotNet.XUnitExtensions" Version="8.0.0-beta.23620.2">
|
||||
<Uri>https://github.com/dotnet/arcade</Uri>
|
||||
<Sha>2d8d59065b5e090584a8e90c4371fc06ed60bdc5</Sha>
|
||||
<Sha>84129325171e65373edef24019e1171feeb19cbc</Sha>
|
||||
</Dependency>
|
||||
<Dependency Name="Microsoft.Net.Compilers.Toolset" Version="3.8.0-3.20460.2">
|
||||
<Uri>https://github.com/dotnet/roslyn</Uri>
|
||||
|
|
|
@ -81,7 +81,7 @@
|
|||
<DotNetRuntime80Version>8.0.0</DotNetRuntime80Version>
|
||||
<FluentAssertionVersion>5.10.2</FluentAssertionVersion>
|
||||
<MicrosoftCodeAnalysisTestingVersion>1.1.2-beta1.23431.1</MicrosoftCodeAnalysisTestingVersion>
|
||||
<MicrosoftDotNetXUnitExtensionsVersion>8.0.0-beta.23265.1</MicrosoftDotNetXUnitExtensionsVersion>
|
||||
<MicrosoftDotNetXUnitExtensionsVersion>8.0.0-beta.23620.2</MicrosoftDotNetXUnitExtensionsVersion>
|
||||
<MicrosoftExtensionsDependencyModelVersion>2.1.0</MicrosoftExtensionsDependencyModelVersion>
|
||||
<MicrosoftExtensionsTestVersion>3.0.1</MicrosoftExtensionsTestVersion>
|
||||
<MicrosoftMLOnnxTestModelsVersion>0.0.6-test</MicrosoftMLOnnxTestModelsVersion>
|
||||
|
@ -91,7 +91,7 @@
|
|||
<SystemDataSqlClientVersion>4.8.5</SystemDataSqlClientVersion>
|
||||
<SystemDataSQLiteCoreVersion>1.0.118</SystemDataSQLiteCoreVersion>
|
||||
<XunitCombinatorialVersion>1.2.7</XunitCombinatorialVersion>
|
||||
<XUnitVersion>2.4.2</XUnitVersion>
|
||||
<XUnitVersion>2.6.3</XUnitVersion>
|
||||
<!-- Opt-out repo features -->
|
||||
<UsingToolXliff>false</UsingToolXliff>
|
||||
</PropertyGroup>
|
||||
|
|
|
@ -153,7 +153,7 @@ if ($dotnet31Source -ne $null) {
|
|||
AddPackageSource -Sources $sources -SourceName "dotnet3.1-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-transport/nuget/v2" -Creds $creds -Username $userName -Password $Password
|
||||
}
|
||||
|
||||
$dotnetVersions = @('5','6','7')
|
||||
$dotnetVersions = @('5','6','7','8')
|
||||
|
||||
foreach ($dotnetVersion in $dotnetVersions) {
|
||||
$feedPrefix = "dotnet" + $dotnetVersion;
|
||||
|
|
|
@ -105,7 +105,7 @@ if [ "$?" == "0" ]; then
|
|||
PackageSources+=('dotnet3.1-internal-transport')
|
||||
fi
|
||||
|
||||
DotNetVersions=('5' '6' '7')
|
||||
DotNetVersions=('5' '6' '7' '8')
|
||||
|
||||
for DotNetVersion in ${DotNetVersions[@]} ; do
|
||||
FeedPrefix="dotnet${DotNetVersion}";
|
||||
|
|
|
@ -207,6 +207,7 @@ elseif(ILLUMOS)
|
|||
set(CMAKE_CXX_STANDARD_LIBRARIES "${CMAKE_CXX_STANDARD_LIBRARIES} -lssp")
|
||||
elseif(HAIKU)
|
||||
set(CMAKE_SYSROOT "${CROSS_ROOTFS}")
|
||||
set(CMAKE_PROGRAM_PATH "${CMAKE_PROGRAM_PATH};${CROSS_ROOTFS}/cross-tools-x86_64/bin")
|
||||
|
||||
set(TOOLSET_PREFIX ${TOOLCHAIN}-)
|
||||
function(locate_toolchain_exec exec var)
|
||||
|
@ -217,7 +218,6 @@ elseif(HAIKU)
|
|||
endif()
|
||||
|
||||
find_program(EXEC_LOCATION_${exec}
|
||||
PATHS "${CROSS_ROOTFS}/cross-tools-x86_64/bin"
|
||||
NAMES
|
||||
"${TOOLSET_PREFIX}${exec}${CLR_CMAKE_COMPILER_FILE_NAME_VERSION}"
|
||||
"${TOOLSET_PREFIX}${exec}")
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
param (
|
||||
$darcVersion = $null,
|
||||
$versionEndpoint = 'https://maestro-prod.westus2.cloudapp.azure.com/api/assets/darc-version?api-version=2019-01-16',
|
||||
$versionEndpoint = 'https://maestro.dot.net/api/assets/darc-version?api-version=2019-01-16',
|
||||
$verbosity = 'minimal',
|
||||
$toolpath = $null
|
||||
)
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
source="${BASH_SOURCE[0]}"
|
||||
darcVersion=''
|
||||
versionEndpoint='https://maestro-prod.westus2.cloudapp.azure.com/api/assets/darc-version?api-version=2019-01-16'
|
||||
versionEndpoint='https://maestro.dot.net/api/assets/darc-version?api-version=2019-01-16'
|
||||
verbosity='minimal'
|
||||
|
||||
while [[ $# > 0 ]]; do
|
||||
|
|
|
@ -54,6 +54,10 @@ cpuname=$(uname -m)
|
|||
case $cpuname in
|
||||
arm64|aarch64)
|
||||
buildarch=arm64
|
||||
if [ "$(getconf LONG_BIT)" -lt 64 ]; then
|
||||
# This is 32-bit OS running on 64-bit CPU (for example Raspberry Pi OS)
|
||||
buildarch=arm
|
||||
fi
|
||||
;;
|
||||
loongarch64)
|
||||
buildarch=loongarch64
|
||||
|
|
Двоичные данные
eng/common/loc/P22DotNetHtmlLocalization.lss
Двоичные данные
eng/common/loc/P22DotNetHtmlLocalization.lss
Двоичный файл не отображается.
|
@ -63,7 +63,7 @@ if [ -z "$CLR_CC" ]; then
|
|||
# Set default versions
|
||||
if [ -z "$majorVersion" ]; then
|
||||
# note: gcc (all versions) and clang versions higher than 6 do not have minor version in file name, if it is zero.
|
||||
if [ "$compiler" = "clang" ]; then versions="16 15 14 13 12 11 10 9 8 7 6.0 5.0 4.0 3.9 3.8 3.7 3.6 3.5"
|
||||
if [ "$compiler" = "clang" ]; then versions="17 16 15 14 13 12 11 10 9 8 7 6.0 5.0 4.0 3.9 3.8 3.7 3.6 3.5"
|
||||
elif [ "$compiler" = "gcc" ]; then versions="13 12 11 10 9 8 7 6 5 4.9"; fi
|
||||
|
||||
for version in $versions; do
|
||||
|
|
|
@ -0,0 +1,130 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# getNonPortableDistroRid
|
||||
#
|
||||
# Input:
|
||||
# targetOs: (str)
|
||||
# targetArch: (str)
|
||||
# rootfsDir: (str)
|
||||
#
|
||||
# Return:
|
||||
# non-portable rid
|
||||
getNonPortableDistroRid()
|
||||
{
|
||||
local targetOs="$1"
|
||||
local targetArch="$2"
|
||||
local rootfsDir="$3"
|
||||
local nonPortableRid=""
|
||||
|
||||
if [ "$targetOs" = "linux" ]; then
|
||||
if [ -e "${rootfsDir}/etc/os-release" ]; then
|
||||
source "${rootfsDir}/etc/os-release"
|
||||
|
||||
if [[ "${ID}" == "rhel" || "${ID}" == "rocky" || "${ID}" == "alpine" ]]; then
|
||||
# remove the last version digit
|
||||
VERSION_ID="${VERSION_ID%.*}"
|
||||
fi
|
||||
|
||||
if [[ "${VERSION_ID:-}" =~ ^([[:digit:]]|\.)+$ ]]; then
|
||||
nonPortableRid="${ID}.${VERSION_ID}-${targetArch}"
|
||||
else
|
||||
# Rolling release distros either do not set VERSION_ID, set it as blank or
|
||||
# set it to non-version looking string (such as TEMPLATE_VERSION_ID on ArchLinux);
|
||||
# so omit it here to be consistent with everything else.
|
||||
nonPortableRid="${ID}-${targetArch}"
|
||||
fi
|
||||
|
||||
elif [ -e "${rootfsDir}/android_platform" ]; then
|
||||
source "$rootfsDir"/android_platform
|
||||
nonPortableRid="$RID"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ "$targetOs" = "freebsd" ]; then
|
||||
# $rootfsDir can be empty. freebsd-version is shell script and it should always work.
|
||||
__freebsd_major_version=$($rootfsDir/bin/freebsd-version | { read v; echo "${v%%.*}"; })
|
||||
nonPortableRid="freebsd.$__freebsd_major_version-${targetArch}"
|
||||
elif command -v getprop && getprop ro.product.system.model 2>&1 | grep -qi android; then
|
||||
__android_sdk_version=$(getprop ro.build.version.sdk)
|
||||
nonPortableRid="android.$__android_sdk_version-${targetArch}"
|
||||
elif [ "$targetOs" = "illumos" ]; then
|
||||
__uname_version=$(uname -v)
|
||||
case "$__uname_version" in
|
||||
omnios-*)
|
||||
__omnios_major_version=$(echo "${__uname_version:8:2}")
|
||||
nonPortableRid=omnios."$__omnios_major_version"-"$targetArch"
|
||||
;;
|
||||
joyent_*)
|
||||
__smartos_major_version=$(echo "${__uname_version:7:4}")
|
||||
nonPortableRid=smartos."$__smartos_major_version"-"$targetArch"
|
||||
;;
|
||||
illumos_*)
|
||||
nonPortableRid=openindiana-"$targetArch"
|
||||
;;
|
||||
esac
|
||||
elif [ "$targetOs" = "solaris" ]; then
|
||||
__uname_version=$(uname -v)
|
||||
__solaris_major_version=$(echo "${__uname_version%.*}")
|
||||
nonPortableRid=solaris."$__solaris_major_version"-"$targetArch"
|
||||
elif [ "$targetOs" = "haiku" ]; then
|
||||
__uname_release=$(uname -r)
|
||||
nonPortableRid=haiku.r"$__uname_release"-"$targetArch"
|
||||
fi
|
||||
|
||||
echo "$(echo $nonPortableRid | tr '[:upper:]' '[:lower:]')"
|
||||
}
|
||||
|
||||
# initDistroRidGlobal
|
||||
#
|
||||
# Input:
|
||||
# os: (str)
|
||||
# arch: (str)
|
||||
# rootfsDir?: (nullable:string)
|
||||
#
|
||||
# Return:
|
||||
# None
|
||||
#
|
||||
# Notes:
|
||||
#
|
||||
# It is important to note that the function does not return anything, but it
|
||||
# exports the following variables on success:
|
||||
#
|
||||
# __DistroRid : Non-portable rid of the target platform.
|
||||
# __PortableTargetOS : OS-part of the portable rid that corresponds to the target platform.
|
||||
#
|
||||
initDistroRidGlobal()
|
||||
{
|
||||
local targetOs="$1"
|
||||
local targetArch="$2"
|
||||
local rootfsDir=""
|
||||
if [ "$#" -ge 3 ]; then
|
||||
rootfsDir="$3"
|
||||
fi
|
||||
|
||||
if [ -n "${rootfsDir}" ]; then
|
||||
# We may have a cross build. Check for the existence of the rootfsDir
|
||||
if [ ! -e "${rootfsDir}" ]; then
|
||||
echo "Error rootfsDir has been passed, but the location is not valid."
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
__DistroRid=$(getNonPortableDistroRid "${targetOs}" "${targetArch}" "${rootfsDir}")
|
||||
|
||||
if [ -z "${__PortableTargetOS:-}" ]; then
|
||||
__PortableTargetOS="$targetOs"
|
||||
|
||||
STRINGS="$(command -v strings || true)"
|
||||
if [ -z "$STRINGS" ]; then
|
||||
STRINGS="$(command -v llvm-strings || true)"
|
||||
fi
|
||||
|
||||
# Check for musl-based distros (e.g Alpine Linux, Void Linux).
|
||||
if "${rootfsDir}/usr/bin/ldd" --version 2>&1 | grep -q musl ||
|
||||
( [ -n "$STRINGS" ] && "$STRINGS" "${rootfsDir}/usr/bin/ldd" 2>&1 | grep -q musl ); then
|
||||
__PortableTargetOS="linux-musl"
|
||||
fi
|
||||
fi
|
||||
|
||||
export __DistroRid __PortableTargetOS
|
||||
}
|
|
@ -0,0 +1,80 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# Use uname to determine what the OS is.
|
||||
OSName=$(uname -s | tr '[:upper:]' '[:lower:]')
|
||||
|
||||
if command -v getprop && getprop ro.product.system.model 2>&1 | grep -qi android; then
|
||||
OSName="android"
|
||||
fi
|
||||
|
||||
case "$OSName" in
|
||||
freebsd|linux|netbsd|openbsd|sunos|android|haiku)
|
||||
os="$OSName" ;;
|
||||
darwin)
|
||||
os=osx ;;
|
||||
*)
|
||||
echo "Unsupported OS $OSName detected!"
|
||||
exit 1 ;;
|
||||
esac
|
||||
|
||||
# On Solaris, `uname -m` is discouraged, see https://docs.oracle.com/cd/E36784_01/html/E36870/uname-1.html
|
||||
# and `uname -p` returns processor type (e.g. i386 on amd64).
|
||||
# The appropriate tool to determine CPU is isainfo(1) https://docs.oracle.com/cd/E36784_01/html/E36870/isainfo-1.html.
|
||||
if [ "$os" = "sunos" ]; then
|
||||
if uname -o 2>&1 | grep -q illumos; then
|
||||
os="illumos"
|
||||
else
|
||||
os="solaris"
|
||||
fi
|
||||
CPUName=$(isainfo -n)
|
||||
else
|
||||
# For the rest of the operating systems, use uname(1) to determine what the CPU is.
|
||||
CPUName=$(uname -m)
|
||||
fi
|
||||
|
||||
case "$CPUName" in
|
||||
arm64|aarch64)
|
||||
arch=arm64
|
||||
;;
|
||||
|
||||
loongarch64)
|
||||
arch=loongarch64
|
||||
;;
|
||||
|
||||
riscv64)
|
||||
arch=riscv64
|
||||
;;
|
||||
|
||||
amd64|x86_64)
|
||||
arch=x64
|
||||
;;
|
||||
|
||||
armv7l|armv8l)
|
||||
if (NAME=""; . /etc/os-release; test "$NAME" = "Tizen"); then
|
||||
arch=armel
|
||||
else
|
||||
arch=arm
|
||||
fi
|
||||
;;
|
||||
|
||||
armv6l)
|
||||
arch=armv6
|
||||
;;
|
||||
|
||||
i[3-6]86)
|
||||
echo "Unsupported CPU $CPUName detected, build might not succeed!"
|
||||
arch=x86
|
||||
;;
|
||||
|
||||
s390x)
|
||||
arch=s390x
|
||||
;;
|
||||
|
||||
ppc64le)
|
||||
arch=ppc64le
|
||||
;;
|
||||
*)
|
||||
echo "Unknown CPU $CPUName detected!"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
|
@ -2,7 +2,7 @@ param(
|
|||
[Parameter(Mandatory=$true)][int] $BuildId,
|
||||
[Parameter(Mandatory=$true)][int] $ChannelId,
|
||||
[Parameter(Mandatory=$true)][string] $MaestroApiAccessToken,
|
||||
[Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro-prod.westus2.cloudapp.azure.com',
|
||||
[Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro.dot.net',
|
||||
[Parameter(Mandatory=$false)][string] $MaestroApiVersion = '2019-01-16'
|
||||
)
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@ param(
|
|||
[Parameter(Mandatory=$true)][int] $PublishingInfraVersion,
|
||||
[Parameter(Mandatory=$true)][string] $AzdoToken,
|
||||
[Parameter(Mandatory=$true)][string] $MaestroToken,
|
||||
[Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro-prod.westus2.cloudapp.azure.com',
|
||||
[Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro.dot.net',
|
||||
[Parameter(Mandatory=$true)][string] $WaitPublishingFinish,
|
||||
[Parameter(Mandatory=$false)][string] $ArtifactsPublishingAdditionalParameters,
|
||||
[Parameter(Mandatory=$false)][string] $SymbolPublishingAdditionalParameters
|
||||
|
|
|
@ -2,7 +2,7 @@ param(
|
|||
[Parameter(Mandatory=$true)][string] $SourceRepo,
|
||||
[Parameter(Mandatory=$true)][int] $ChannelId,
|
||||
[Parameter(Mandatory=$true)][string] $MaestroApiAccessToken,
|
||||
[Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro-prod.westus2.cloudapp.azure.com',
|
||||
[Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro.dot.net',
|
||||
[Parameter(Mandatory=$false)][string] $MaestroApiVersion = '2019-01-16'
|
||||
)
|
||||
|
||||
|
|
|
@ -64,7 +64,7 @@ try {
|
|||
$GlobalJson.tools | Add-Member -Name "vs" -Value (ConvertFrom-Json "{ `"version`": `"16.5`" }") -MemberType NoteProperty
|
||||
}
|
||||
if( -not ($GlobalJson.tools.PSObject.Properties.Name -match "xcopy-msbuild" )) {
|
||||
$GlobalJson.tools | Add-Member -Name "xcopy-msbuild" -Value "17.4.1" -MemberType NoteProperty
|
||||
$GlobalJson.tools | Add-Member -Name "xcopy-msbuild" -Value "17.8.1-2" -MemberType NoteProperty
|
||||
}
|
||||
if ($GlobalJson.tools."xcopy-msbuild".Trim() -ine "none") {
|
||||
$xcopyMSBuildToolsFolder = InitializeXCopyMSBuild $GlobalJson.tools."xcopy-msbuild" -install $true
|
||||
|
|
|
@ -17,7 +17,9 @@ Param(
|
|||
# Optional: Additional params to add to any tool using PoliCheck.
|
||||
[string[]] $PoliCheckAdditionalRunConfigParams,
|
||||
# Optional: Additional params to add to any tool using CodeQL/Semmle.
|
||||
[string[]] $CodeQLAdditionalRunConfigParams
|
||||
[string[]] $CodeQLAdditionalRunConfigParams,
|
||||
# Optional: Additional params to add to any tool using Binskim.
|
||||
[string[]] $BinskimAdditionalRunConfigParams
|
||||
)
|
||||
|
||||
$ErrorActionPreference = 'Stop'
|
||||
|
@ -69,22 +71,34 @@ try {
|
|||
$gdnConfigFile = Join-Path $gdnConfigPath "$toolConfigName-configure.gdnconfig"
|
||||
|
||||
# For some tools, add default and automatic args.
|
||||
if ($tool.Name -eq 'credscan') {
|
||||
if ($targetDirectory) {
|
||||
$tool.Args += "`"TargetDirectory < $TargetDirectory`""
|
||||
switch -Exact ($tool.Name) {
|
||||
'credscan' {
|
||||
if ($targetDirectory) {
|
||||
$tool.Args += "`"TargetDirectory < $TargetDirectory`""
|
||||
}
|
||||
$tool.Args += "`"OutputType < pre`""
|
||||
$tool.Args += $CrScanAdditionalRunConfigParams
|
||||
}
|
||||
$tool.Args += "`"OutputType < pre`""
|
||||
$tool.Args += $CrScanAdditionalRunConfigParams
|
||||
} elseif ($tool.Name -eq 'policheck') {
|
||||
if ($targetDirectory) {
|
||||
$tool.Args += "`"Target < $TargetDirectory`""
|
||||
'policheck' {
|
||||
if ($targetDirectory) {
|
||||
$tool.Args += "`"Target < $TargetDirectory`""
|
||||
}
|
||||
$tool.Args += $PoliCheckAdditionalRunConfigParams
|
||||
}
|
||||
$tool.Args += $PoliCheckAdditionalRunConfigParams
|
||||
} elseif ($tool.Name -eq 'semmle' -or $tool.Name -eq 'codeql') {
|
||||
if ($targetDirectory) {
|
||||
$tool.Args += "`"SourceCodeDirectory < $TargetDirectory`""
|
||||
{$_ -in 'semmle', 'codeql'} {
|
||||
if ($targetDirectory) {
|
||||
$tool.Args += "`"SourceCodeDirectory < $TargetDirectory`""
|
||||
}
|
||||
$tool.Args += $CodeQLAdditionalRunConfigParams
|
||||
}
|
||||
'binskim' {
|
||||
if ($targetDirectory) {
|
||||
# Binskim crashes due to specific PDBs. GitHub issue: https://github.com/microsoft/binskim/issues/924.
|
||||
# We are excluding all `_.pdb` files from the scan.
|
||||
$tool.Args += "`"Target < $TargetDirectory\**;-:file|$TargetDirectory\**\_.pdb`""
|
||||
}
|
||||
$tool.Args += $BinskimAdditionalRunConfigParams
|
||||
}
|
||||
$tool.Args += $CodeQLAdditionalRunConfigParams
|
||||
}
|
||||
|
||||
# Create variable pointing to the args array directly so we can use splat syntax later.
|
||||
|
|
|
@ -35,6 +35,7 @@ Param(
|
|||
[string[]] $CrScanAdditionalRunConfigParams, # Optional: Additional Params to custom build a CredScan run config in the format @("xyz:abc","sdf:1")
|
||||
[string[]] $PoliCheckAdditionalRunConfigParams, # Optional: Additional Params to custom build a Policheck run config in the format @("xyz:abc","sdf:1")
|
||||
[string[]] $CodeQLAdditionalRunConfigParams, # Optional: Additional Params to custom build a Semmle/CodeQL run config in the format @("xyz < abc","sdf < 1")
|
||||
[string[]] $BinskimAdditionalRunConfigParams, # Optional: Additional Params to custom build a Binskim run config in the format @("xyz < abc","sdf < 1")
|
||||
[bool] $BreakOnFailure=$False # Optional: Fail the build if there were errors during the run
|
||||
)
|
||||
|
||||
|
@ -107,7 +108,8 @@ try {
|
|||
-GuardianLoggerLevel $GuardianLoggerLevel `
|
||||
-CrScanAdditionalRunConfigParams $CrScanAdditionalRunConfigParams `
|
||||
-PoliCheckAdditionalRunConfigParams $PoliCheckAdditionalRunConfigParams `
|
||||
-CodeQLAdditionalRunConfigParams $CodeQLAdditionalRunConfigParams
|
||||
-CodeQLAdditionalRunConfigParams $CodeQLAdditionalRunConfigParams `
|
||||
-BinskimAdditionalRunConfigParams $BinskimAdditionalRunConfigParams
|
||||
if ($BreakOnFailure) {
|
||||
Exit-IfNZEC "Sdl"
|
||||
}
|
||||
|
|
|
@ -35,31 +35,33 @@ try {
|
|||
param(
|
||||
[string] $PackagePath # Full path to a NuGet package
|
||||
)
|
||||
|
||||
|
||||
if (!(Test-Path $PackagePath)) {
|
||||
Write-PipelineTelemetryError -Category 'Build' -Message "Input file does not exist: $PackagePath"
|
||||
ExitWithExitCode 1
|
||||
}
|
||||
|
||||
|
||||
$RelevantExtensions = @('.dll', '.exe', '.pdb')
|
||||
Write-Host -NoNewLine 'Extracting ' ([System.IO.Path]::GetFileName($PackagePath)) '...'
|
||||
|
||||
|
||||
$PackageId = [System.IO.Path]::GetFileNameWithoutExtension($PackagePath)
|
||||
$ExtractPath = Join-Path -Path $using:ExtractPath -ChildPath $PackageId
|
||||
|
||||
|
||||
Add-Type -AssemblyName System.IO.Compression.FileSystem
|
||||
|
||||
|
||||
[System.IO.Directory]::CreateDirectory($ExtractPath);
|
||||
|
||||
|
||||
try {
|
||||
$zip = [System.IO.Compression.ZipFile]::OpenRead($PackagePath)
|
||||
|
||||
$zip.Entries |
|
||||
Where-Object {$RelevantExtensions -contains [System.IO.Path]::GetExtension($_.Name)} |
|
||||
ForEach-Object {
|
||||
$TargetFile = Join-Path -Path $ExtractPath -ChildPath $_.Name
|
||||
|
||||
[System.IO.Compression.ZipFileExtensions]::ExtractToFile($_, $TargetFile, $true)
|
||||
$TargetPath = Join-Path -Path $ExtractPath -ChildPath (Split-Path -Path $_.FullName)
|
||||
[System.IO.Directory]::CreateDirectory($TargetPath);
|
||||
|
||||
$TargetFile = Join-Path -Path $ExtractPath -ChildPath $_.FullName
|
||||
[System.IO.Compression.ZipFileExtensions]::ExtractToFile($_, $TargetFile)
|
||||
}
|
||||
}
|
||||
catch {
|
||||
|
|
|
@ -0,0 +1,75 @@
|
|||
<#
|
||||
.SYNOPSIS
|
||||
Install and run the 'Microsoft.DotNet.VersionTools.Cli' tool with the 'trim-artifacts-version' command to trim the version from the NuGet assets file name.
|
||||
|
||||
.PARAMETER InputPath
|
||||
Full path to directory where artifact packages are stored
|
||||
|
||||
.PARAMETER Recursive
|
||||
Search for NuGet packages recursively
|
||||
|
||||
#>
|
||||
|
||||
Param(
|
||||
[string] $InputPath,
|
||||
[bool] $Recursive = $true
|
||||
)
|
||||
|
||||
$CliToolName = "Microsoft.DotNet.VersionTools.Cli"
|
||||
|
||||
function Install-VersionTools-Cli {
|
||||
param(
|
||||
[Parameter(Mandatory=$true)][string]$Version
|
||||
)
|
||||
|
||||
Write-Host "Installing the package '$CliToolName' with a version of '$version' ..."
|
||||
$feed = "https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json"
|
||||
|
||||
$argumentList = @("tool", "install", "--local", "$CliToolName", "--add-source $feed", "--no-cache", "--version $Version", "--create-manifest-if-needed")
|
||||
Start-Process "$dotnet" -Verbose -ArgumentList $argumentList -NoNewWindow -Wait
|
||||
}
|
||||
|
||||
# -------------------------------------------------------------------
|
||||
|
||||
if (!(Test-Path $InputPath)) {
|
||||
Write-Host "Input Path '$InputPath' does not exist"
|
||||
ExitWithExitCode 1
|
||||
}
|
||||
|
||||
$ErrorActionPreference = 'Stop'
|
||||
Set-StrictMode -Version 2.0
|
||||
|
||||
$disableConfigureToolsetImport = $true
|
||||
$global:LASTEXITCODE = 0
|
||||
|
||||
# `tools.ps1` checks $ci to perform some actions. Since the SDL
|
||||
# scripts don't necessarily execute in the same agent that run the
|
||||
# build.ps1/sh script this variable isn't automatically set.
|
||||
$ci = $true
|
||||
. $PSScriptRoot\..\tools.ps1
|
||||
|
||||
try {
|
||||
$dotnetRoot = InitializeDotNetCli -install:$true
|
||||
$dotnet = "$dotnetRoot\dotnet.exe"
|
||||
|
||||
$toolsetVersion = Read-ArcadeSdkVersion
|
||||
Install-VersionTools-Cli -Version $toolsetVersion
|
||||
|
||||
$cliToolFound = (& "$dotnet" tool list --local | Where-Object {$_.Split(' ')[0] -eq $CliToolName})
|
||||
if ($null -eq $cliToolFound) {
|
||||
Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "The '$CliToolName' tool is not installed."
|
||||
ExitWithExitCode 1
|
||||
}
|
||||
|
||||
Exec-BlockVerbosely {
|
||||
& "$dotnet" $CliToolName trim-assets-version `
|
||||
--assets-path $InputPath `
|
||||
--recursive $Recursive
|
||||
Exit-IfNZEC "Sdl"
|
||||
}
|
||||
}
|
||||
catch {
|
||||
Write-Host $_
|
||||
Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
|
||||
ExitWithExitCode 1
|
||||
}
|
|
@ -105,6 +105,11 @@ jobs:
|
|||
downloadPath: $(Build.ArtifactStagingDirectory)\artifacts
|
||||
checkDownloadedFiles: true
|
||||
|
||||
- powershell: eng/common/sdl/trim-assets-version.ps1
|
||||
-InputPath $(Build.ArtifactStagingDirectory)\artifacts
|
||||
displayName: Trim the version from the NuGet packages
|
||||
continueOnError: ${{ parameters.sdlContinueOnError }}
|
||||
|
||||
- powershell: eng/common/sdl/extract-artifact-packages.ps1
|
||||
-InputPath $(Build.ArtifactStagingDirectory)\artifacts\BlobArtifacts
|
||||
-ExtractPath $(Build.ArtifactStagingDirectory)\artifacts\BlobArtifacts
|
||||
|
|
|
@ -75,6 +75,10 @@ jobs:
|
|||
- ${{ if eq(parameters.enableRichCodeNavigation, 'true') }}:
|
||||
- name: EnableRichCodeNavigation
|
||||
value: 'true'
|
||||
# Retry signature validation up to three times, waiting 2 seconds between attempts.
|
||||
# See https://learn.microsoft.com/en-us/nuget/reference/errors-and-warnings/nu3028#retry-untrusted-root-failures
|
||||
- name: NUGET_EXPERIMENTAL_CHAIN_BUILD_RETRY_POLICY
|
||||
value: 3,2000
|
||||
- ${{ each variable in parameters.variables }}:
|
||||
# handle name-value variable syntax
|
||||
# example:
|
||||
|
@ -83,7 +87,7 @@ jobs:
|
|||
- ${{ if ne(variable.name, '') }}:
|
||||
- name: ${{ variable.name }}
|
||||
value: ${{ variable.value }}
|
||||
|
||||
|
||||
# handle variable groups
|
||||
- ${{ if ne(variable.group, '') }}:
|
||||
- group: ${{ variable.group }}
|
||||
|
@ -132,7 +136,7 @@ jobs:
|
|||
condition: and(succeeded(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
|
||||
|
||||
- ${{ if and(eq(parameters.runAsPublic, 'false'), eq(variables['System.TeamProject'], 'internal')) }}:
|
||||
- task: NuGetAuthenticate@0
|
||||
- task: NuGetAuthenticate@1
|
||||
|
||||
- ${{ if and(ne(parameters.artifacts.download, 'false'), ne(parameters.artifacts.download, '')) }}:
|
||||
- task: DownloadPipelineArtifact@2
|
||||
|
@ -169,7 +173,7 @@ jobs:
|
|||
- ${{ if eq(parameters.enableMicrobuild, 'true') }}:
|
||||
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
|
||||
- task: MicroBuildCleanup@1
|
||||
displayName: Execute Microbuild cleanup tasks
|
||||
displayName: Execute Microbuild cleanup tasks
|
||||
condition: and(always(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
|
||||
continueOnError: ${{ parameters.continueOnError }}
|
||||
env:
|
||||
|
@ -219,7 +223,7 @@ jobs:
|
|||
displayName: Publish XUnit Test Results
|
||||
inputs:
|
||||
testResultsFormat: 'xUnit'
|
||||
testResultsFiles: '*.xml'
|
||||
testResultsFiles: '*.xml'
|
||||
searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)'
|
||||
testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-xunit
|
||||
mergeTestResults: ${{ parameters.mergeTestResults }}
|
||||
|
@ -230,7 +234,7 @@ jobs:
|
|||
displayName: Publish TRX Test Results
|
||||
inputs:
|
||||
testResultsFormat: 'VSTest'
|
||||
testResultsFiles: '*.trx'
|
||||
testResultsFiles: '*.trx'
|
||||
searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)'
|
||||
testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-trx
|
||||
mergeTestResults: ${{ parameters.mergeTestResults }}
|
||||
|
|
|
@ -72,7 +72,7 @@ jobs:
|
|||
condition: ${{ parameters.condition }}
|
||||
continueOnError: ${{ parameters.continueOnError }}
|
||||
|
||||
- task: NuGetAuthenticate@0
|
||||
- task: NuGetAuthenticate@1
|
||||
|
||||
- task: PowerShell@2
|
||||
displayName: Publish Build Assets
|
||||
|
@ -81,7 +81,7 @@ jobs:
|
|||
arguments: -task PublishBuildAssets -restore -msbuildEngine dotnet
|
||||
/p:ManifestsPath='$(Build.StagingDirectory)/Download/AssetManifests'
|
||||
/p:BuildAssetRegistryToken=$(MaestroAccessToken)
|
||||
/p:MaestroApiEndpoint=https://maestro-prod.westus2.cloudapp.azure.com
|
||||
/p:MaestroApiEndpoint=https://maestro.dot.net
|
||||
/p:PublishUsingPipelines=${{ parameters.publishUsingPipelines }}
|
||||
/p:OfficialBuildId=$(Build.BuildNumber)
|
||||
condition: ${{ parameters.condition }}
|
||||
|
|
|
@ -7,7 +7,7 @@ variables:
|
|||
|
||||
# Default Maestro++ API Endpoint and API Version
|
||||
- name: MaestroApiEndPoint
|
||||
value: "https://maestro-prod.westus2.cloudapp.azure.com"
|
||||
value: "https://maestro.dot.net"
|
||||
- name: MaestroApiAccessToken
|
||||
value: $(MaestroAccessToken)
|
||||
- name: MaestroApiVersion
|
||||
|
|
|
@ -169,7 +169,7 @@ stages:
|
|||
# This is necessary whenever we want to publish/restore to an AzDO private feed
|
||||
# Since sdk-task.ps1 tries to restore packages we need to do this authentication here
|
||||
# otherwise it'll complain about accessing a private feed.
|
||||
- task: NuGetAuthenticate@0
|
||||
- task: NuGetAuthenticate@1
|
||||
displayName: 'Authenticate to AzDO Feeds'
|
||||
|
||||
# Signing validation will optionally work with the buildmanifest file which is downloaded from
|
||||
|
@ -266,7 +266,7 @@ stages:
|
|||
BARBuildId: ${{ parameters.BARBuildId }}
|
||||
PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
|
||||
|
||||
- task: NuGetAuthenticate@0
|
||||
- task: NuGetAuthenticate@1
|
||||
|
||||
- task: PowerShell@2
|
||||
displayName: Publish Using Darc
|
||||
|
|
|
@ -33,7 +33,7 @@ steps:
|
|||
|
||||
- ${{ if ne(parameters.overrideParameters, '') }}:
|
||||
- powershell: ${{ parameters.executeAllSdlToolsScript }} ${{ parameters.overrideParameters }}
|
||||
displayName: Execute SDL
|
||||
displayName: Execute SDL (Overridden)
|
||||
continueOnError: ${{ parameters.sdlContinueOnError }}
|
||||
condition: ${{ parameters.condition }}
|
||||
|
||||
|
|
|
@ -68,6 +68,11 @@ steps:
|
|||
runtimeOsArgs='/p:RuntimeOS=${{ parameters.platform.runtimeOS }}'
|
||||
fi
|
||||
|
||||
baseOsArgs=
|
||||
if [ '${{ parameters.platform.baseOS }}' != '' ]; then
|
||||
baseOsArgs='/p:BaseOS=${{ parameters.platform.baseOS }}'
|
||||
fi
|
||||
|
||||
publishArgs=
|
||||
if [ '${{ parameters.platform.skipPublishValidation }}' != 'true' ]; then
|
||||
publishArgs='--publish'
|
||||
|
@ -86,6 +91,7 @@ steps:
|
|||
$internalRestoreArgs \
|
||||
$targetRidArgs \
|
||||
$runtimeOsArgs \
|
||||
$baseOsArgs \
|
||||
/p:SourceBuildNonPortable=${{ parameters.platform.nonPortable }} \
|
||||
/p:ArcadeBuildFromSource=true \
|
||||
/p:AssetManifestFileName=$assetManifestFileName
|
||||
|
@ -112,3 +118,12 @@ steps:
|
|||
artifactName: BuildLogs_SourceBuild_${{ parameters.platform.name }}_Attempt$(System.JobAttempt)
|
||||
continueOnError: true
|
||||
condition: succeededOrFailed()
|
||||
|
||||
# Manually inject component detection so that we can ignore the source build upstream cache, which contains
|
||||
# a nupkg cache of input packages (a local feed).
|
||||
# This path must match the upstream cache path in property 'CurrentRepoSourceBuiltNupkgCacheDir'
|
||||
# in src\Microsoft.DotNet.Arcade.Sdk\tools\SourceBuild\SourceBuildArcade.targets
|
||||
- task: ComponentGovernanceComponentDetection@0
|
||||
displayName: Component Detection (Exclude upstream cache)
|
||||
inputs:
|
||||
ignoreDirectories: '$(Build.SourcesDirectory)/artifacts/source-build/self/src/artifacts/obj/source-built-upstream-cache'
|
||||
|
|
|
@ -379,13 +379,13 @@ function InitializeVisualStudioMSBuild([bool]$install, [object]$vsRequirements =
|
|||
}
|
||||
|
||||
# Minimum VS version to require.
|
||||
$vsMinVersionReqdStr = '16.8'
|
||||
$vsMinVersionReqdStr = '17.7'
|
||||
$vsMinVersionReqd = [Version]::new($vsMinVersionReqdStr)
|
||||
|
||||
# If the version of msbuild is going to be xcopied,
|
||||
# use this version. Version matches a package here:
|
||||
# https://dev.azure.com/dnceng/public/_packaging?_a=package&feed=dotnet-eng&package=RoslynTools.MSBuild&protocolType=NuGet&version=17.4.1&view=overview
|
||||
$defaultXCopyMSBuildVersion = '17.4.1'
|
||||
# https://dev.azure.com/dnceng/public/_artifacts/feed/dotnet-eng/NuGet/RoslynTools.MSBuild/versions/17.8.1-2
|
||||
$defaultXCopyMSBuildVersion = '17.8.1-2'
|
||||
|
||||
if (!$vsRequirements) {
|
||||
if (Get-Member -InputObject $GlobalJson.tools -Name 'vs') {
|
||||
|
@ -671,6 +671,10 @@ function InitializeNativeTools() {
|
|||
}
|
||||
}
|
||||
|
||||
function Read-ArcadeSdkVersion() {
|
||||
return $GlobalJson.'msbuild-sdks'.'Microsoft.DotNet.Arcade.Sdk'
|
||||
}
|
||||
|
||||
function InitializeToolset() {
|
||||
if (Test-Path variable:global:_ToolsetBuildProj) {
|
||||
return $global:_ToolsetBuildProj
|
||||
|
@ -678,7 +682,7 @@ function InitializeToolset() {
|
|||
|
||||
$nugetCache = GetNuGetPackageCachePath
|
||||
|
||||
$toolsetVersion = $GlobalJson.'msbuild-sdks'.'Microsoft.DotNet.Arcade.Sdk'
|
||||
$toolsetVersion = Read-ArcadeSdkVersion
|
||||
$toolsetLocationFile = Join-Path $ToolsetDir "$toolsetVersion.txt"
|
||||
|
||||
if (Test-Path $toolsetLocationFile) {
|
||||
|
|
|
@ -99,6 +99,9 @@
|
|||
<HelixPreCommands Condition="$(IsPosixShell)">$(HelixPreCommands);export ML_TEST_DATADIR=$HELIX_CORRELATION_PAYLOAD;export MICROSOFTML_RESOURCE_PATH=$HELIX_WORKITEM_ROOT;sudo chmod -R 777 $HELIX_WORKITEM_ROOT;sudo chown -R $USER $HELIX_WORKITEM_ROOT</HelixPreCommands>
|
||||
<HelixPreCommands Condition="!$(IsPosixShell)">$(HelixPreCommands);set ML_TEST_DATADIR=%HELIX_CORRELATION_PAYLOAD%;set MICROSOFTML_RESOURCE_PATH=%HELIX_WORKITEM_ROOT%</HelixPreCommands>
|
||||
|
||||
<HelixPreCommands Condition="$(IsPosixShell)">$(HelixPreCommands);export PATH=$HELIX_CORRELATION_PAYLOAD/$(DotNetCliDestination):$PATH</HelixPreCommands>
|
||||
<HelixPreCommands Condition="!$(IsPosixShell)">$(HelixPreCommands);set PATH=%HELIX_CORRELATION_PAYLOAD%\$(DotNetCliDestination)%3B%PATH%</HelixPreCommands>
|
||||
|
||||
<HelixPreCommands Condition="$(HelixTargetQueues.ToLowerInvariant().Contains('osx'))">$(HelixPreCommands);export LD_LIBRARY_PATH=/opt/homebrew/opt/mono-libgdiplus/lib;</HelixPreCommands>
|
||||
|
||||
<HelixPreCommands Condition="$(HelixTargetQueues.ToLowerInvariant().Contains('armarch'))">$(HelixPreCommands);sudo apt update;sudo apt-get install libomp-dev libomp5 -y</HelixPreCommands>
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"tools": {
|
||||
"dotnet": "8.0.100-preview.3.23178.7",
|
||||
"dotnet": "8.0.100",
|
||||
"runtimes": {
|
||||
"dotnet/x64": [
|
||||
"$(DotNetRuntime60Version)"
|
||||
|
@ -11,8 +11,8 @@
|
|||
}
|
||||
},
|
||||
"msbuild-sdks": {
|
||||
"Microsoft.DotNet.Arcade.Sdk": "8.0.0-beta.23265.1",
|
||||
"Microsoft.DotNet.Helix.Sdk": "8.0.0-beta.23265.1",
|
||||
"Microsoft.DotNet.Arcade.Sdk": "8.0.0-beta.23620.2",
|
||||
"Microsoft.DotNet.Helix.Sdk": "8.0.0-beta.23620.2",
|
||||
"Microsoft.Build.Traversal": "3.2.0",
|
||||
"Microsoft.SourceLink.GitHub": "1.1.0-beta-20206-02",
|
||||
"Microsoft.SourceLink.Common": "1.1.0-beta-20206-02"
|
||||
|
|
|
@ -1320,7 +1320,7 @@ namespace Microsoft.ML.Data
|
|||
|
||||
private sealed class ImplVec<T> : ColumnCache<VBuffer<T>>
|
||||
{
|
||||
// The number of rows cached.
|
||||
// The number of rows cached. Only to be accesssed by the Caching thread.
|
||||
private int _rowCount;
|
||||
// For a given row [r], elements at [r] and [r+1] specify the inclusive
|
||||
// and exclusive range of values for the two big arrays. In the case
|
||||
|
@ -1384,10 +1384,10 @@ namespace Microsoft.ML.Data
|
|||
|
||||
public override void Fetch(int idx, ref VBuffer<T> value)
|
||||
{
|
||||
Ctx.Assert(0 <= idx && idx < _rowCount);
|
||||
Ctx.Assert(_rowCount < Utils.Size(_indexBoundaries));
|
||||
Ctx.Assert(_rowCount < Utils.Size(_valueBoundaries));
|
||||
Ctx.Assert(_uniformLength > 0 || _rowCount <= Utils.Size(_lengths));
|
||||
Ctx.Assert(0 <= idx);
|
||||
Ctx.Assert((idx + 1) < Utils.Size(_indexBoundaries));
|
||||
Ctx.Assert((idx + 1) < Utils.Size(_valueBoundaries));
|
||||
Ctx.Assert(_uniformLength > 0 || idx < Utils.Size(_lengths));
|
||||
|
||||
Ctx.Assert(_indexBoundaries[idx + 1] - _indexBoundaries[idx] <= int.MaxValue);
|
||||
int indexCount = (int)(_indexBoundaries[idx + 1] - _indexBoundaries[idx]);
|
||||
|
|
|
@ -1,8 +0,0 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Suppressions xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema">
|
||||
<!-- The previous version of this package had an empty assembly which was removed -->
|
||||
<Suppression>
|
||||
<DiagnosticId>PKV006</DiagnosticId>
|
||||
<Target>.NETStandard,Version=v2.0</Target>
|
||||
</Suppression>
|
||||
</Suppressions>
|
|
@ -7,6 +7,7 @@
|
|||
#include <assert.h>
|
||||
#include <cmath>
|
||||
#include <cstring>
|
||||
#include <stdint.h>
|
||||
|
||||
#define UNUSED(x) (void)(x)
|
||||
#define DEBUG_ONLY(x) (void)(x)
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
CS1591: Missing XML comment for publicly visible type or member 'Type_or_Member'
|
||||
CS1712: Type parameter 'parameter' has no matching typeparam tag in the XML comment on 'Type_or_Member' (but other type parameters do)
|
||||
-->
|
||||
<NoWarn>$(NoWarn),1573,1591,1712</NoWarn>
|
||||
<NoWarn>$(NoWarn);1573;1591;1712</NoWarn>
|
||||
</PropertyGroup>
|
||||
|
||||
<PropertyGroup>
|
||||
|
|
|
@ -128,7 +128,7 @@ namespace Microsoft.Data.Analysis.Tests
|
|||
}
|
||||
|
||||
[Fact]
|
||||
public void TestEmptyDataFrameRecordBatch()
|
||||
public async void TestEmptyDataFrameRecordBatch()
|
||||
{
|
||||
PrimitiveDataFrameColumn<int> ageColumn = new PrimitiveDataFrameColumn<int>("Age");
|
||||
PrimitiveDataFrameColumn<int> lengthColumn = new PrimitiveDataFrameColumn<int>("CharCount");
|
||||
|
@ -142,7 +142,7 @@ namespace Microsoft.Data.Analysis.Tests
|
|||
foundARecordBatch = true;
|
||||
MemoryStream stream = new MemoryStream();
|
||||
ArrowStreamWriter writer = new ArrowStreamWriter(stream, recordBatch.Schema);
|
||||
writer.WriteRecordBatchAsync(recordBatch).GetAwaiter().GetResult();
|
||||
await writer.WriteRecordBatchAsync(recordBatch);
|
||||
|
||||
stream.Position = 0;
|
||||
ArrowStreamReader reader = new ArrowStreamReader(stream);
|
||||
|
|
|
@ -24,7 +24,7 @@ namespace Microsoft.Data.Analysis.Tests
|
|||
|
||||
// Assert
|
||||
|
||||
Assert.Equal(0, intersection.Count);
|
||||
Assert.Empty(intersection);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
|
@ -47,7 +47,7 @@ namespace Microsoft.Data.Analysis.Tests
|
|||
|
||||
// Assert
|
||||
|
||||
Assert.Equal(0, intersection.Count);
|
||||
Assert.Empty(intersection);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
|
@ -70,7 +70,7 @@ namespace Microsoft.Data.Analysis.Tests
|
|||
|
||||
// Assert
|
||||
|
||||
Assert.Equal(0, intersection.Count);
|
||||
Assert.Empty(intersection);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
|
@ -101,7 +101,7 @@ namespace Microsoft.Data.Analysis.Tests
|
|||
|
||||
// Assert
|
||||
|
||||
Assert.Equal(0, intersection.Count);
|
||||
Assert.Empty(intersection);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
|
|
|
@ -223,8 +223,8 @@ namespace Microsoft.ML.AutoML.Test
|
|||
|
||||
// File has 3 columns: "id", "description" and "animal"
|
||||
Assert.NotNull(result.ColumnInformation.LabelColumnName);
|
||||
Assert.Equal(1, result.ColumnInformation.TextColumnNames.Count);
|
||||
Assert.Equal(1, result.ColumnInformation.CategoricalColumnNames.Count);
|
||||
Assert.Single(result.ColumnInformation.TextColumnNames);
|
||||
Assert.Single(result.ColumnInformation.CategoricalColumnNames);
|
||||
|
||||
Assert.Equal("id", result.ColumnInformation.LabelColumnName);
|
||||
Assert.Equal("description", result.ColumnInformation.TextColumnNames.First());
|
||||
|
|
|
@ -46,7 +46,7 @@ namespace Microsoft.ML.AutoML.Test
|
|||
public void TrainerNotNeedNormalization()
|
||||
{
|
||||
var pipeline = BuildSuggestedPipeline(BuildLightGbmTrainer());
|
||||
Assert.Equal(0, pipeline.Transforms.Count);
|
||||
Assert.Empty(pipeline.Transforms);
|
||||
}
|
||||
|
||||
private static void TestPipelineBuilderCaching(
|
||||
|
|
|
@ -32,19 +32,19 @@ namespace Microsoft.ML.RunTests
|
|||
{
|
||||
var tmp = type;
|
||||
if (dict.ContainsKey(tmp) && dict[tmp] != tmp.ToString())
|
||||
Assert.True(false, dict[tmp] + " and " + tmp.ToString() + " are duplicates.");
|
||||
Assert.Fail(dict[tmp] + " and " + tmp.ToString() + " are duplicates.");
|
||||
dict[tmp] = tmp.ToString();
|
||||
for (int size = 0; size < 5; size++)
|
||||
{
|
||||
tmp1 = new VectorDataViewType(tmp, size);
|
||||
if (dict.ContainsKey(tmp1) && dict[tmp1] != tmp1.ToString())
|
||||
Assert.True(false, dict[tmp1] + " and " + tmp1.ToString() + " are duplicates.");
|
||||
Assert.Fail(dict[tmp1] + " and " + tmp1.ToString() + " are duplicates.");
|
||||
dict[tmp1] = tmp1.ToString();
|
||||
for (int size1 = 0; size1 < 5; size1++)
|
||||
{
|
||||
tmp2 = new VectorDataViewType(tmp, size, size1);
|
||||
if (dict.ContainsKey(tmp2) && dict[tmp2] != tmp2.ToString())
|
||||
Assert.True(false, dict[tmp2] + " and " + tmp2.ToString() + " are duplicates.");
|
||||
Assert.Fail(dict[tmp2] + " and " + tmp2.ToString() + " are duplicates.");
|
||||
dict[tmp2] = tmp2.ToString();
|
||||
}
|
||||
}
|
||||
|
@ -59,19 +59,19 @@ namespace Microsoft.ML.RunTests
|
|||
{
|
||||
tmp = new KeyDataViewType(rawType, count);
|
||||
if (dict.ContainsKey(tmp) && dict[tmp] != tmp.ToString())
|
||||
Assert.True(false, dict[tmp] + " and " + tmp.ToString() + " are duplicates.");
|
||||
Assert.Fail(dict[tmp] + " and " + tmp.ToString() + " are duplicates.");
|
||||
dict[tmp] = tmp.ToString();
|
||||
for (int size = 0; size < 5; size++)
|
||||
{
|
||||
tmp1 = new VectorDataViewType(tmp, size);
|
||||
if (dict.ContainsKey(tmp1) && dict[tmp1] != tmp1.ToString())
|
||||
Assert.True(false, dict[tmp1] + " and " + tmp1.ToString() + " are duplicates.");
|
||||
Assert.Fail(dict[tmp1] + " and " + tmp1.ToString() + " are duplicates.");
|
||||
dict[tmp1] = tmp1.ToString();
|
||||
for (int size1 = 0; size1 < 5; size1++)
|
||||
{
|
||||
tmp2 = new VectorDataViewType(tmp, size, size1);
|
||||
if (dict.ContainsKey(tmp2) && dict[tmp2] != tmp2.ToString())
|
||||
Assert.True(false, dict[tmp2] + " and " + tmp2.ToString() + " are duplicates.");
|
||||
Assert.Fail(dict[tmp2] + " and " + tmp2.ToString() + " are duplicates.");
|
||||
dict[tmp2] = tmp2.ToString();
|
||||
}
|
||||
}
|
||||
|
@ -79,19 +79,19 @@ namespace Microsoft.ML.RunTests
|
|||
Assert.True(rawType.TryGetDataKind(out var kind));
|
||||
tmp = new KeyDataViewType(rawType, kind.ToMaxInt());
|
||||
if (dict.ContainsKey(tmp) && dict[tmp] != tmp.ToString())
|
||||
Assert.True(false, dict[tmp] + " and " + tmp.ToString() + " are duplicates.");
|
||||
Assert.Fail(dict[tmp] + " and " + tmp.ToString() + " are duplicates.");
|
||||
dict[tmp] = tmp.ToString();
|
||||
for (int size = 0; size < 5; size++)
|
||||
{
|
||||
tmp1 = new VectorDataViewType(tmp, size);
|
||||
if (dict.ContainsKey(tmp1) && dict[tmp1] != tmp1.ToString())
|
||||
Assert.True(false, dict[tmp1] + " and " + tmp1.ToString() + " are duplicates.");
|
||||
Assert.Fail(dict[tmp1] + " and " + tmp1.ToString() + " are duplicates.");
|
||||
dict[tmp1] = tmp1.ToString();
|
||||
for (int size1 = 0; size1 < 5; size1++)
|
||||
{
|
||||
tmp2 = new VectorDataViewType(tmp, size, size1);
|
||||
if (dict.ContainsKey(tmp2) && dict[tmp2] != tmp2.ToString())
|
||||
Assert.True(false, dict[tmp2] + " and " + tmp2.ToString() + " are duplicates.");
|
||||
Assert.Fail(dict[tmp2] + " and " + tmp2.ToString() + " are duplicates.");
|
||||
dict[tmp2] = tmp2.ToString();
|
||||
}
|
||||
}
|
||||
|
@ -104,7 +104,7 @@ namespace Microsoft.ML.RunTests
|
|||
{
|
||||
var tmp4 = new ImageDataViewType(height, width);
|
||||
if (dict.ContainsKey(tmp4))
|
||||
Assert.True(false, dict[tmp4] + " and " + tmp4.ToString() + " are duplicates.");
|
||||
Assert.Fail(dict[tmp4] + " and " + tmp4.ToString() + " are duplicates.");
|
||||
dict[tmp4] = tmp4.ToString();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -94,7 +94,7 @@ namespace Microsoft.ML.RunTests
|
|||
int testRows = CountRows(splitOutput.TestData);
|
||||
|
||||
Assert.Equal(totalRows, trainRows + testRows);
|
||||
Assert.Equal(0.9, (double)trainRows / totalRows, 1);
|
||||
Assert.Equal(0.9, (double)trainRows / totalRows, 0.1);
|
||||
}
|
||||
|
||||
private static int CountRows(IDataView dataView)
|
||||
|
@ -5005,7 +5005,7 @@ namespace Microsoft.ML.RunTests
|
|||
Assert.True(b);
|
||||
double auc = 0;
|
||||
getter(ref auc);
|
||||
Assert.Equal(0.93, auc, 2);
|
||||
Assert.Equal(0.93, auc, 0.01);
|
||||
b = cursor.MoveNext();
|
||||
Assert.False(b);
|
||||
}
|
||||
|
@ -5210,7 +5210,7 @@ namespace Microsoft.ML.RunTests
|
|||
if (w == 1)
|
||||
Assert.Equal(1.585, stdev, .001);
|
||||
else
|
||||
Assert.Equal(1.39, stdev, 2);
|
||||
Assert.Equal(1.39, stdev, 0.01);
|
||||
isWeightedGetter(ref isWeighted);
|
||||
Assert.True(isWeighted == (w == 1));
|
||||
}
|
||||
|
@ -5379,7 +5379,7 @@ namespace Microsoft.ML.RunTests
|
|||
getter(ref stdev);
|
||||
foldGetter(ref fold);
|
||||
Assert.True(ReadOnlyMemoryUtils.EqualsStr("Standard Deviation", fold));
|
||||
Assert.Equal(0.024809923969586353, stdev, 3);
|
||||
Assert.Equal(0.024809923969586353, stdev, 0.001);
|
||||
|
||||
double sum = 0;
|
||||
double val = 0;
|
||||
|
@ -5788,7 +5788,7 @@ namespace Microsoft.ML.RunTests
|
|||
getter(ref stdev);
|
||||
foldGetter(ref fold);
|
||||
Assert.True(ReadOnlyMemoryUtils.EqualsStr("Standard Deviation", fold));
|
||||
Assert.Equal(0.02582, stdev, 5);
|
||||
Assert.Equal(0.02582, stdev, 0.00001);
|
||||
|
||||
double sum = 0;
|
||||
double val = 0;
|
||||
|
@ -6089,9 +6089,9 @@ namespace Microsoft.ML.RunTests
|
|||
foldGetter(ref fold);
|
||||
Assert.True(ReadOnlyMemoryUtils.EqualsStr("Standard Deviation", fold));
|
||||
var stdevValues = stdev.GetValues();
|
||||
Assert.Equal(0.02462, stdevValues[0], 5);
|
||||
Assert.Equal(0.02763, stdevValues[1], 5);
|
||||
Assert.Equal(0.03273, stdevValues[2], 5);
|
||||
Assert.Equal(0.02462, stdevValues[0], 0.00001);
|
||||
Assert.Equal(0.02763, stdevValues[1], 0.00001);
|
||||
Assert.Equal(0.03273, stdevValues[2], 0.00001);
|
||||
|
||||
var sumBldr = new BufferBuilder<double>(R8Adder.Instance);
|
||||
sumBldr.Reset(avg.Length, true);
|
||||
|
@ -6291,7 +6291,7 @@ namespace Microsoft.ML.RunTests
|
|||
Assert.True(b);
|
||||
double acc = 0;
|
||||
getter(ref acc);
|
||||
Assert.Equal(0.96, acc, 2);
|
||||
Assert.Equal(0.96, acc, 0.01);
|
||||
b = cursor.MoveNext();
|
||||
Assert.False(b);
|
||||
}
|
||||
|
@ -6463,7 +6463,7 @@ namespace Microsoft.ML.RunTests
|
|||
Assert.True(b);
|
||||
double acc = 0;
|
||||
getter(ref acc);
|
||||
Assert.Equal(0.71, acc, 2);
|
||||
Assert.Equal(0.71, acc, 0.01);
|
||||
b = cursor.MoveNext();
|
||||
Assert.False(b);
|
||||
}
|
||||
|
|
|
@ -39,8 +39,8 @@ namespace Microsoft.ML.RunTests
|
|||
{
|
||||
Double loss = lossFunc.Loss((float)output, (float)label);
|
||||
float derivative = lossFunc.Derivative((float)output, (float)label);
|
||||
Assert.Equal(expectedLoss, loss, 5);
|
||||
Assert.Equal(expectedUpdate, -derivative, 5);
|
||||
Assert.Equal(expectedLoss, loss, 0.00001);
|
||||
Assert.Equal(expectedUpdate, -derivative, 0.00001);
|
||||
|
||||
if (differentiable)
|
||||
{
|
||||
|
@ -48,7 +48,7 @@ namespace Microsoft.ML.RunTests
|
|||
// Use a simple finite difference method to see if it's in the right ballpark.
|
||||
float almostOutput = Math.Max((float)output * (1 + _epsilon), (float)output + _epsilon);
|
||||
Double almostLoss = lossFunc.Loss(almostOutput, (float)label);
|
||||
Assert.Equal((almostLoss - loss) / (almostOutput - output), derivative, 1);
|
||||
Assert.Equal((almostLoss - loss) / (almostOutput - output), derivative, 0.1);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -293,7 +293,7 @@ namespace Microsoft.ML.Core.Tests.UnitTests
|
|||
var actualValues = a.GetValues().ToArray();
|
||||
Assert.Equal(expectedValues.Length, actualValues.Length);
|
||||
for (int i = 0; i < expectedValues.Length; i++)
|
||||
Assert.Equal(expectedValues[i], actualValues[i], precision: 6);
|
||||
Assert.Equal(expectedValues[i], actualValues[i], 0.000001);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
|
|
@ -645,7 +645,7 @@ namespace Microsoft.ML.CpuMath.UnitTests
|
|||
}
|
||||
|
||||
var actual = CpuMathUtils.Sum(src);
|
||||
Assert.Equal((double)expected, (double)actual, 2);
|
||||
Assert.Equal((double)expected, (double)actual, 0.01);
|
||||
return RemoteExecutor.SuccessExitCode;
|
||||
}, mode, test, options);
|
||||
}
|
||||
|
@ -668,7 +668,7 @@ namespace Microsoft.ML.CpuMath.UnitTests
|
|||
}
|
||||
|
||||
var actual = CpuMathUtils.SumSq(src);
|
||||
Assert.Equal((double)expected, (double)actual, 2);
|
||||
Assert.Equal((double)expected, (double)actual, 0.01);
|
||||
return RemoteExecutor.SuccessExitCode;
|
||||
}, mode, test, options);
|
||||
}
|
||||
|
@ -693,7 +693,7 @@ namespace Microsoft.ML.CpuMath.UnitTests
|
|||
expected += (src[i] - defaultScale) * (src[i] - defaultScale);
|
||||
}
|
||||
|
||||
Assert.Equal((double)expected, (double)actual, 1);
|
||||
Assert.Equal((double)expected, (double)actual, 0.1);
|
||||
return RemoteExecutor.SuccessExitCode;
|
||||
}, mode, test, scale, options);
|
||||
}
|
||||
|
@ -716,7 +716,7 @@ namespace Microsoft.ML.CpuMath.UnitTests
|
|||
}
|
||||
|
||||
var actual = CpuMathUtils.SumAbs(src);
|
||||
Assert.Equal((double)expected, (double)actual, 2);
|
||||
Assert.Equal((double)expected, (double)actual, 0.01);
|
||||
return RemoteExecutor.SuccessExitCode;
|
||||
}, mode, test, options);
|
||||
}
|
||||
|
@ -741,7 +741,7 @@ namespace Microsoft.ML.CpuMath.UnitTests
|
|||
expected += Math.Abs(src[i] - defaultScale);
|
||||
}
|
||||
|
||||
Assert.Equal((double)expected, (double)actual, 2);
|
||||
Assert.Equal((double)expected, (double)actual, 0.01);
|
||||
return RemoteExecutor.SuccessExitCode;
|
||||
}, mode, test, scale, options);
|
||||
}
|
||||
|
@ -769,7 +769,7 @@ namespace Microsoft.ML.CpuMath.UnitTests
|
|||
}
|
||||
}
|
||||
|
||||
Assert.Equal((double)expected, (double)actual, 2);
|
||||
Assert.Equal((double)expected, (double)actual, 0.01);
|
||||
return RemoteExecutor.SuccessExitCode;
|
||||
}, mode, test, options);
|
||||
}
|
||||
|
@ -797,7 +797,7 @@ namespace Microsoft.ML.CpuMath.UnitTests
|
|||
expected = abs;
|
||||
}
|
||||
}
|
||||
Assert.Equal((double)expected, (double)actual, 2);
|
||||
Assert.Equal((double)expected, (double)actual, 0.01);
|
||||
return RemoteExecutor.SuccessExitCode;
|
||||
}, mode, test, scale, options);
|
||||
}
|
||||
|
@ -827,7 +827,7 @@ namespace Microsoft.ML.CpuMath.UnitTests
|
|||
}
|
||||
|
||||
var actual = CpuMathUtils.DotProductDense(src, dst, dst.Length);
|
||||
Assert.Equal((double)expected, (double)actual, 1);
|
||||
Assert.Equal((double)expected, (double)actual, 0.1);
|
||||
return RemoteExecutor.SuccessExitCode;
|
||||
}, mode, test, options);
|
||||
}
|
||||
|
@ -861,7 +861,7 @@ namespace Microsoft.ML.CpuMath.UnitTests
|
|||
}
|
||||
|
||||
var actual = CpuMathUtils.DotProductSparse(src, dst, idx, limit);
|
||||
Assert.Equal((double)expected, (double)actual, 2);
|
||||
Assert.Equal((double)expected, (double)actual, 0.01);
|
||||
return RemoteExecutor.SuccessExitCode;
|
||||
}, mode, test, options);
|
||||
}
|
||||
|
|
|
@ -11,31 +11,31 @@
|
|||
// environment, for example, see https://github.com/isaacabraham/ml-test-experiment/, but
|
||||
// here we list them explicitly to avoid the dependency on a package loader,
|
||||
//
|
||||
// You should build Microsoft.ML.FSharp.Tests in Debug mode for framework net461
|
||||
// You should build Microsoft.ML.FSharp.Tests in Debug mode for framework net462
|
||||
// before running this as a script with F# Interactive by editing the project
|
||||
// file to have:
|
||||
// <TargetFrameworks>net6.0; net461</TargetFrameworks>
|
||||
// <TargetFrameworks>net6.0; net462</TargetFrameworks>
|
||||
|
||||
#if INTERACTIVE
|
||||
#r "netstandard"
|
||||
#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net461/Microsoft.ML.Core.dll"
|
||||
#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net461/Google.Protobuf.dll"
|
||||
#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net461/Newtonsoft.Json.dll"
|
||||
#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net461/System.CodeDom.dll"
|
||||
#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net461/Microsoft.ML.CpuMath.dll"
|
||||
#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net461/Microsoft.ML.Data.dll"
|
||||
#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net461/Microsoft.ML.Transforms.dll"
|
||||
#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net461/Microsoft.ML.ResultProcessor.dll"
|
||||
#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net461/Microsoft.ML.PCA.dll"
|
||||
#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net461/Microsoft.ML.KMeansClustering.dll"
|
||||
#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net461/Microsoft.ML.FastTree.dll"
|
||||
#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net461/Microsoft.ML.Api.dll"
|
||||
#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net461/Microsoft.ML.Sweeper.dll"
|
||||
#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net461/Microsoft.ML.dll"
|
||||
#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net461/Microsoft.ML.StandardTrainers.dll"
|
||||
#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net461/Microsoft.ML.PipelineInference.dll"
|
||||
#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net461/xunit.core.dll"
|
||||
#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net461/xunit.assert.dll"
|
||||
#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net462/Microsoft.ML.Core.dll"
|
||||
#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net462/Google.Protobuf.dll"
|
||||
#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net462/Newtonsoft.Json.dll"
|
||||
#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net462/System.CodeDom.dll"
|
||||
#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net462/Microsoft.ML.CpuMath.dll"
|
||||
#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net462/Microsoft.ML.Data.dll"
|
||||
#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net462/Microsoft.ML.Transforms.dll"
|
||||
#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net462/Microsoft.ML.ResultProcessor.dll"
|
||||
#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net462/Microsoft.ML.PCA.dll"
|
||||
#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net462/Microsoft.ML.KMeansClustering.dll"
|
||||
#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net462/Microsoft.ML.FastTree.dll"
|
||||
#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net462/Microsoft.ML.Api.dll"
|
||||
#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net462/Microsoft.ML.Sweeper.dll"
|
||||
#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net462/Microsoft.ML.dll"
|
||||
#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net462/Microsoft.ML.StandardTrainers.dll"
|
||||
#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net462/Microsoft.ML.PipelineInference.dll"
|
||||
#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net462/xunit.core.dll"
|
||||
#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net462/xunit.assert.dll"
|
||||
#r "System"
|
||||
#r "System.Core"
|
||||
#r "System.Xml.Linq"
|
||||
|
|
|
@ -38,17 +38,17 @@ namespace Microsoft.ML.Fairlearn.Tests
|
|||
{
|
||||
RegressionGroupMetric regressionMetric = mlContext.Fairlearn().Metric.Regression(eval: data, labelColumn: "Price", scoreColumn: "Score", sensitiveFeatureColumn: "Gender");
|
||||
var metricByGroup = regressionMetric.ByGroup();
|
||||
Assert.Equal(-2.30578, Convert.ToSingle(metricByGroup["RSquared"][0]), 3);
|
||||
Assert.Equal(-2039.81453, Convert.ToSingle(metricByGroup["RSquared"][1]), 3);
|
||||
Assert.Equal(1.00000, Convert.ToSingle(metricByGroup["RMS"][0]), 3);
|
||||
Assert.Equal(15.811388, Convert.ToSingle(metricByGroup["RMS"][1]), 3);
|
||||
Assert.Equal(-2.30578, Convert.ToSingle(metricByGroup["RSquared"][0]), 0.001);
|
||||
Assert.Equal(-2039.81453, Convert.ToSingle(metricByGroup["RSquared"][1]), 0.001);
|
||||
Assert.Equal(1.00000, Convert.ToSingle(metricByGroup["RMS"][0]), 0.001);
|
||||
Assert.Equal(15.811388, Convert.ToSingle(metricByGroup["RMS"][1]), 0.001);
|
||||
metricByGroup.Description();
|
||||
Dictionary<string, double> metricOverall = regressionMetric.Overall();
|
||||
Assert.Equal(125.5, metricOverall["MSE"], 1);
|
||||
Assert.Equal(11.202678, metricOverall["RMS"], 4);
|
||||
Assert.Equal(125.5, metricOverall["MSE"], 0.1);
|
||||
Assert.Equal(11.202678, metricOverall["RMS"], 0.0001);
|
||||
Dictionary<string, double> diff = regressionMetric.DifferenceBetweenGroups();
|
||||
Assert.Equal(14.81138, diff["RMS"], 4);
|
||||
Assert.Equal(2037.5, diff["RSquared"], 1);
|
||||
Assert.Equal(14.81138, diff["RMS"], 0.0001);
|
||||
Assert.Equal(2037.5, diff["RSquared"], 0.1);
|
||||
|
||||
}
|
||||
|
||||
|
@ -70,10 +70,10 @@ namespace Microsoft.ML.Fairlearn.Tests
|
|||
|
||||
BinaryGroupMetric metrics = mlContext.Fairlearn().Metric.BinaryClassification(eval: df, labelColumn: "label", predictedColumn: "PredictedLabel", sensitiveFeatureColumn: "group_id");
|
||||
var metricByGroup = metrics.ByGroup();
|
||||
Assert.Equal(0.8, Convert.ToSingle(metricByGroup["Accuracy"][0]), 1);
|
||||
Assert.Equal(0.6, Convert.ToSingle(metricByGroup["Accuracy"][1]), 1);
|
||||
Assert.Equal(0.8, Convert.ToSingle(metricByGroup["Accuracy"][0]), 0.1);
|
||||
Assert.Equal(0.6, Convert.ToSingle(metricByGroup["Accuracy"][1]), 0.1);
|
||||
var metricOverall = metrics.Overall();
|
||||
Assert.Equal(0.7, Convert.ToSingle(metricOverall["Accuracy"]), 1);
|
||||
Assert.Equal(0.7, Convert.ToSingle(metricOverall["Accuracy"]), 0.1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -31,10 +31,10 @@ namespace Microsoft.ML.Fairlearn.Tests
|
|||
PrimitiveDataFrameColumn<float> ypred = new PrimitiveDataFrameColumn<float>("pred", fl);
|
||||
var gSinged = dp.Gamma(ypred);
|
||||
|
||||
Assert.Equal(0.1, Convert.ToSingle(gSinged["value"][0]), 1);
|
||||
Assert.Equal(-0.1, Convert.ToSingle(gSinged["value"][1]), 1);
|
||||
Assert.Equal(-0.1, Convert.ToSingle(gSinged["value"][2]), 1);
|
||||
Assert.Equal(0.1, Convert.ToSingle(gSinged["value"][3]), 1);
|
||||
Assert.Equal(0.1, Convert.ToSingle(gSinged["value"][0]), 0.1);
|
||||
Assert.Equal(-0.1, Convert.ToSingle(gSinged["value"][1]), 0.1);
|
||||
Assert.Equal(-0.1, Convert.ToSingle(gSinged["value"][2]), 0.1);
|
||||
Assert.Equal(0.1, Convert.ToSingle(gSinged["value"][3]), 0.1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -59,8 +59,8 @@ namespace Microsoft.ML.IntegrationTests
|
|||
Assert.Equal(tree.SplitGains.Count, tree.NumberOfNodes);
|
||||
Assert.Equal(tree.NumericalSplitThresholds.Count, tree.NumberOfNodes);
|
||||
Assert.All(tree.CategoricalSplitFlags, flag => Assert.False(flag));
|
||||
Assert.Equal(0, tree.GetCategoricalSplitFeaturesAt(0).Count);
|
||||
Assert.Equal(0, tree.GetCategoricalCategoricalSplitFeatureRangeAt(0).Count);
|
||||
Assert.Empty(tree.GetCategoricalSplitFeaturesAt(0));
|
||||
Assert.Empty(tree.GetCategoricalCategoricalSplitFeatureRangeAt(0));
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -103,8 +103,8 @@ namespace Microsoft.ML.IntegrationTests
|
|||
Assert.Equal(tree.SplitGains.Count, tree.NumberOfNodes);
|
||||
Assert.Equal(tree.NumericalSplitThresholds.Count, tree.NumberOfNodes);
|
||||
Assert.All(tree.CategoricalSplitFlags, flag => Assert.False(flag));
|
||||
Assert.Equal(0, tree.GetCategoricalSplitFeaturesAt(0).Count);
|
||||
Assert.Equal(0, tree.GetCategoricalCategoricalSplitFeatureRangeAt(0).Count);
|
||||
Assert.Empty(tree.GetCategoricalSplitFeaturesAt(0));
|
||||
Assert.Empty(tree.GetCategoricalCategoricalSplitFeatureRangeAt(0));
|
||||
});
|
||||
|
||||
// Add baselines for the model.
|
||||
|
@ -119,8 +119,8 @@ namespace Microsoft.ML.IntegrationTests
|
|||
var expectedThresholds = new float[] { 0.0911167f, 0.06509889f, 0.019873254f, 0.0361835f };
|
||||
for (int i = 0; i < finalTree.NumberOfNodes; ++i)
|
||||
{
|
||||
Assert.Equal(expectedSplitGains[i], finalTree.SplitGains[i], 6);
|
||||
Assert.Equal((double)expectedThresholds[i], (double)finalTree.NumericalSplitThresholds[i], 6);
|
||||
Assert.Equal(expectedSplitGains[i], finalTree.SplitGains[i], 0.000001);
|
||||
Assert.Equal((double)expectedThresholds[i], (double)finalTree.NumericalSplitThresholds[i], 0.000001);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -71,7 +71,7 @@ namespace Microsoft.ML.IntegrationTests
|
|||
var originalPrediction = originalPredictionEngine.Predict(row);
|
||||
var onnxPrediction = onnxPredictionEngine.Predict(row);
|
||||
// Check that the predictions are identical.
|
||||
Assert.Equal(originalPrediction.Score, onnxPrediction.Score[0], precision: 4);
|
||||
Assert.Equal(originalPrediction.Score, onnxPrediction.Score[0], 0.0001);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -170,7 +170,7 @@ namespace Microsoft.ML.IntegrationTests
|
|||
var originalPrediction = originalPredictionEngine.Predict(row);
|
||||
var onnxPrediction = onnxPredictionEngine.Predict(row);
|
||||
// Check that the predictions are identical.
|
||||
Assert.Equal(originalPrediction.Score, onnxPrediction.Score[0], precision: 4);
|
||||
Assert.Equal(originalPrediction.Score, onnxPrediction.Score[0], 0.0001);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -498,7 +498,7 @@ namespace Microsoft.ML.IntegrationTests
|
|||
// Evaluate the model.
|
||||
var binaryClassificationMetrics = mlContext.MulticlassClassification.Evaluate(binaryClassificationPredictions);
|
||||
|
||||
Assert.Equal(0.4367, binaryClassificationMetrics.LogLoss, 4);
|
||||
Assert.Equal(0.4367, binaryClassificationMetrics.LogLoss, 0.0001);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -476,9 +476,9 @@ namespace Microsoft.ML.Tests
|
|||
var transformedValues = onnxTransformer.Transform(idv);
|
||||
var predictions = mlContext.Data.CreateEnumerable<PredictionNoneDimension>(transformedValues, reuseRowObject: false).ToArray();
|
||||
|
||||
Assert.Equal(-0.080, Math.Round(predictions[0].variable[0], 3));
|
||||
Assert.Equal(1.204, Math.Round(predictions[1].variable[0], 3));
|
||||
Assert.Equal(2.27, Math.Round(predictions[2].variable[0], 3));
|
||||
Assert.Equal(-0.080, predictions[0].variable[0], 0.001);
|
||||
Assert.Equal(1.204, predictions[1].variable[0], 0.001);
|
||||
Assert.Equal(2.27, predictions[2].variable[0], 0.001);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
|
|
@ -41,7 +41,7 @@ namespace Microsoft.ML.PerformanceTests
|
|||
TimeSpan timeout = TimeSpan.FromMinutes(5);
|
||||
|
||||
#if NETFRAMEWORK
|
||||
var tfm = "net461";
|
||||
var tfm = "net462";
|
||||
var csProj = CsProjClassicNetToolchain.From(tfm, timeout: timeout);
|
||||
#else
|
||||
var frameworkName = new FrameworkName(AppContext.TargetFrameworkName);
|
||||
|
|
|
@ -43,7 +43,7 @@ namespace Microsoft.ML.RunTests
|
|||
Assert.Equal(binUpperBounds.Length, gam.NumberOfShapeFunctions);
|
||||
|
||||
// Check the intercept
|
||||
Assert.Equal(intercept, gam.Bias, 6);
|
||||
Assert.Equal(intercept, gam.Bias, 0.000001);
|
||||
|
||||
// Check that the binUpperBounds were made correctly
|
||||
CheckArrayOfArrayEquality(binUpperBounds, gam.GetBinUpperBounds());
|
||||
|
|
|
@ -737,8 +737,8 @@ namespace Microsoft.ML.RunTests
|
|||
probGetters[i](ref probs[i]);
|
||||
predGetters[i](ref preds[i]);
|
||||
}
|
||||
Assert.Equal(score, 0.4 * scores.Sum() / predCount, 5);
|
||||
Assert.Equal(prob, 1 / (1 + Math.Exp(-score)), 6);
|
||||
Assert.Equal(score, 0.4 * scores.Sum() / predCount, 0.00001);
|
||||
Assert.Equal(prob, 1 / (1 + Math.Exp(-score)), 0.000001);
|
||||
Assert.True(pred == score > 0);
|
||||
}
|
||||
}
|
||||
|
@ -953,7 +953,7 @@ namespace Microsoft.ML.RunTests
|
|||
for (int j = 0; j < predCount; j++)
|
||||
sum += vectorScores[j].GetItemOrDefault(i);
|
||||
if (float.IsNaN(sum))
|
||||
Assert.Equal((double)vectorScore.GetItemOrDefault(i), (double)sum / predCount, 3);
|
||||
Assert.Equal((double)vectorScore.GetItemOrDefault(i), (double)sum / predCount, 0.001);
|
||||
}
|
||||
Assert.Equal(probs.Count(p => p >= prob), probs.Count(p => p <= prob));
|
||||
}
|
||||
|
|
|
@ -120,14 +120,14 @@ namespace Microsoft.ML.Sweeper.RunTests
|
|||
}
|
||||
else
|
||||
{
|
||||
Assert.True(false, "Wrong parameter");
|
||||
Assert.Fail("Wrong parameter");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void TestSimpleSweeperAsync()
|
||||
public async void TestSimpleSweeperAsync()
|
||||
{
|
||||
var random = new Random(42);
|
||||
var env = new MLContext(42);
|
||||
|
@ -146,10 +146,11 @@ namespace Microsoft.ML.Sweeper.RunTests
|
|||
for (int i = 0; i < sweeps; i++)
|
||||
{
|
||||
var task = sweeper.ProposeAsync();
|
||||
var tResult = await task;
|
||||
Assert.True(task.IsCompleted);
|
||||
paramSets.Add(task.Result.ParameterSet);
|
||||
var result = new RunResult(task.Result.ParameterSet, random.NextDouble(), true);
|
||||
sweeper.Update(task.Result.Id, result);
|
||||
paramSets.Add(tResult.ParameterSet);
|
||||
var result = new RunResult(tResult.ParameterSet, random.NextDouble(), true);
|
||||
sweeper.Update(tResult.Id, result);
|
||||
}
|
||||
Assert.Equal(sweeps, paramSets.Count);
|
||||
CheckAsyncSweeperResult(paramSets);
|
||||
|
@ -167,8 +168,9 @@ namespace Microsoft.ML.Sweeper.RunTests
|
|||
for (int i = 0; i < sweeps; i++)
|
||||
{
|
||||
var task = gridSweeper.ProposeAsync();
|
||||
var tResult = await task;
|
||||
Assert.True(task.IsCompleted);
|
||||
paramSets.Add(task.Result.ParameterSet);
|
||||
paramSets.Add(tResult.ParameterSet);
|
||||
}
|
||||
Assert.Equal(sweeps, paramSets.Count);
|
||||
CheckAsyncSweeperResult(paramSets);
|
||||
|
@ -326,12 +328,12 @@ namespace Microsoft.ML.Sweeper.RunTests
|
|||
int[] sleeps = new int[sweeps];
|
||||
for (int i = 0; i < sleeps.Length; i++)
|
||||
sleeps[i] = random.Next(10, 100);
|
||||
var r = Task.Run(() => Parallel.For(0, sweeps, options, (int i) =>
|
||||
var r = Task.Run(() => Parallel.For(0, sweeps, options, async (int i) =>
|
||||
{
|
||||
var task = sweeper.ProposeAsync();
|
||||
task.Wait();
|
||||
var tResult = await task;
|
||||
Assert.Equal(TaskStatus.RanToCompletion, task.Status);
|
||||
var paramWithId = task.Result;
|
||||
var paramWithId = tResult;
|
||||
if (paramWithId == null)
|
||||
return;
|
||||
Thread.Sleep(sleeps[i]);
|
||||
|
@ -417,7 +419,7 @@ namespace Microsoft.ML.Sweeper.RunTests
|
|||
}
|
||||
else
|
||||
{
|
||||
Assert.True(false, "Wrong parameter");
|
||||
Assert.Fail("Wrong parameter");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -464,7 +466,7 @@ namespace Microsoft.ML.Sweeper.RunTests
|
|||
}
|
||||
else
|
||||
{
|
||||
Assert.True(false, "Wrong parameter");
|
||||
Assert.Fail("Wrong parameter");
|
||||
}
|
||||
}
|
||||
Assert.False(gridPoint[i][j]);
|
||||
|
@ -491,7 +493,7 @@ namespace Microsoft.ML.Sweeper.RunTests
|
|||
}
|
||||
else
|
||||
{
|
||||
Assert.True(false, "Wrong parameter");
|
||||
Assert.Fail("Wrong parameter");
|
||||
}
|
||||
}
|
||||
Assert.False(gridPoint[i][j]);
|
||||
|
@ -523,7 +525,7 @@ namespace Microsoft.ML.Sweeper.RunTests
|
|||
}
|
||||
else
|
||||
{
|
||||
Assert.True(false, "Wrong parameter");
|
||||
Assert.Fail("Wrong parameter");
|
||||
}
|
||||
}
|
||||
Assert.False(gridPoint[i][j]);
|
||||
|
@ -577,7 +579,7 @@ namespace Microsoft.ML.Sweeper.RunTests
|
|||
}
|
||||
else
|
||||
{
|
||||
Assert.True(false, "Wrong parameter");
|
||||
Assert.Fail("Wrong parameter");
|
||||
}
|
||||
}
|
||||
results.Add(new RunResult(parameterSet, random.NextDouble(), true));
|
||||
|
@ -625,7 +627,7 @@ namespace Microsoft.ML.Sweeper.RunTests
|
|||
}
|
||||
else
|
||||
{
|
||||
Assert.True(false, "Wrong parameter");
|
||||
Assert.Fail("Wrong parameter");
|
||||
}
|
||||
}
|
||||
results.Add(new RunResult(parameterSet, random.NextDouble(), true));
|
||||
|
@ -676,7 +678,7 @@ namespace Microsoft.ML.Sweeper.RunTests
|
|||
}
|
||||
else
|
||||
{
|
||||
Assert.True(false, "Wrong parameter");
|
||||
Assert.Fail("Wrong parameter");
|
||||
}
|
||||
}
|
||||
results.Add(new RunResult(parameterSet, random.NextDouble(), true));
|
||||
|
|
|
@ -12,7 +12,7 @@ namespace Microsoft.ML.TestFramework.Attributes
|
|||
#if DEBUG
|
||||
private const string SkipMessage = "BenchmarkDotNet does not allow running the benchmarks in Debug, so this test is disabled for DEBUG";
|
||||
private readonly bool _isEnvironmentSupported = false;
|
||||
#elif NET461
|
||||
#elif NETFRAMEWORK
|
||||
private const string SkipMessage = "We are currently not running Benchmarks for FullFramework";
|
||||
private readonly bool _isEnvironmentSupported = false;
|
||||
#else
|
||||
|
|
|
@ -1611,7 +1611,7 @@ namespace Microsoft.ML.RunTests
|
|||
return;
|
||||
}
|
||||
|
||||
Assert.True(false, "The LDA transform does not throw expected error on empty documents.");
|
||||
Assert.Fail("The LDA transform does not throw expected error on empty documents.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -77,7 +77,7 @@ namespace Microsoft.ML.Internal.Internallearn.Test
|
|||
else
|
||||
#endif
|
||||
{
|
||||
Assert.True(false, $"Assert failed: {msg}");
|
||||
Assert.Fail($"Assert failed: {msg}");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -155,7 +155,7 @@ namespace Microsoft.ML.TestFramework
|
|||
|
||||
private sealed class RemoteExecutionException : XunitException
|
||||
{
|
||||
internal RemoteExecutionException(string stackTrace) : base("Remote process failed with an unhandled exception.", stackTrace) { }
|
||||
internal RemoteExecutionException(string stackTrace) : base($"Remote process failed with an unhandled exception. {stackTrace}") { }
|
||||
}
|
||||
|
||||
private static MethodInfo GetMethodInfo(Delegate d)
|
||||
|
|
|
@ -985,7 +985,7 @@ namespace Microsoft.ML.RunTests
|
|||
// FastTree internally fails if we try to run it simultaneously and if this happens we wouldn't get model file for training.
|
||||
[TestCategory(Cat)]
|
||||
[Fact]
|
||||
public void CommandTrainFastTreeInDifferentThreads()
|
||||
public async void CommandTrainFastTreeInDifferentThreads()
|
||||
{
|
||||
var dataPath = GetDataPath(TestDatasets.adult.testFilename);
|
||||
var firstModelOutPath = DeleteOutputPath("TreeTransform-model2.zip");
|
||||
|
@ -1001,10 +1001,11 @@ namespace Microsoft.ML.RunTests
|
|||
t[1] = new Task<int>(() => MainForTest(secondTrainArgs));
|
||||
t[0].Start();
|
||||
t[1].Start();
|
||||
Task.WaitAll(t);
|
||||
var t0 = await t[0];
|
||||
var t1 = await t[1];
|
||||
|
||||
Assert.Equal(0, t[0].Result);
|
||||
Assert.Equal(0, t[1].Result);
|
||||
Assert.Equal(0, t0);
|
||||
Assert.Equal(0, t1);
|
||||
}
|
||||
|
||||
[TestCategory(Cat), TestCategory("FastTree")]
|
||||
|
|
|
@ -33,8 +33,8 @@ namespace Microsoft.ML.Tests
|
|||
// Evaluate
|
||||
var metrics = ML.AnomalyDetection.Evaluate(transformedData, falsePositiveCount: 5);
|
||||
|
||||
Assert.Equal(0.98667, metrics.AreaUnderRocCurve, 5);
|
||||
Assert.Equal(0.90000, metrics.DetectionRateAtFalsePositiveCount, 5);
|
||||
Assert.Equal(0.98667, metrics.AreaUnderRocCurve, 0.00001);
|
||||
Assert.Equal(0.90000, metrics.DetectionRateAtFalsePositiveCount, 0.00001);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
|
|
@ -65,7 +65,7 @@ namespace Microsoft.ML.Tests
|
|||
var metrics2 = mlContext.MulticlassClassification.Evaluate(inputDV2, topKPredictionCount: 4);
|
||||
var output2 = metrics2.TopKAccuracyForAllK.ToArray();
|
||||
for (int i = 0; i < expectedTopKArray2.Length; i++)
|
||||
Assert.Equal(expectedTopKArray2[i], output2[i], precision: 7);
|
||||
Assert.Equal(expectedTopKArray2[i], output2[i], 0.0000001);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -48,7 +48,7 @@ namespace Microsoft.ML.Tests
|
|||
trainState.CurrentBatchIndex = i % trainState.BatchesPerEpoch;
|
||||
trainState.CurrentEpoch = i / trainState.BatchesPerEpoch;
|
||||
float decayedLR = learningRateScheduler.GetLearningRate(trainState);
|
||||
Assert.Equal((double)expectedValues[i], (double)decayedLR, 4);
|
||||
Assert.Equal((double)expectedValues[i], (double)decayedLR, 0.0001);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -91,7 +91,7 @@ namespace Microsoft.ML.Tests.Scenarios.Api
|
|||
try
|
||||
{
|
||||
filter2.GetRowCursorForAllColumns().MoveNext();
|
||||
Assert.True(false, "Throw an error if attribute is applied to a field that is not an IChannel.");
|
||||
Assert.Fail("Throw an error if attribute is applied to a field that is not an IChannel.");
|
||||
}
|
||||
catch (InvalidOperationException ex)
|
||||
{
|
||||
|
@ -114,7 +114,7 @@ namespace Microsoft.ML.Tests.Scenarios.Api
|
|||
try
|
||||
{
|
||||
filter3.GetRowCursorForAllColumns().MoveNext();
|
||||
Assert.True(false, "Throw an error if attribute is applied to a field that is not an IChannel.");
|
||||
Assert.Fail("Throw an error if attribute is applied to a field that is not an IChannel.");
|
||||
}
|
||||
catch (InvalidOperationException ex)
|
||||
{
|
||||
|
|
|
@ -87,7 +87,7 @@ namespace Microsoft.ML.Scenarios
|
|||
Assert.Equal(double.NaN, metrics.NormalizedMutualInformation);
|
||||
//Calculate dbi is false by default so Dbi would be 0
|
||||
Assert.Equal(0d, metrics.DaviesBouldinIndex);
|
||||
Assert.Equal(0d, metrics.AverageDistance, 5);
|
||||
Assert.Equal(0d, metrics.AverageDistance, 0.00001);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -55,9 +55,9 @@ namespace Microsoft.ML.Scenarios
|
|||
PetalWidth = 0.2f,
|
||||
});
|
||||
|
||||
Assert.Equal(1d, prediction.PredictedLabels[0], 2);
|
||||
Assert.Equal(0d, prediction.PredictedLabels[1], 2);
|
||||
Assert.Equal(0d, prediction.PredictedLabels[2], 2);
|
||||
Assert.Equal(1d, prediction.PredictedLabels[0], 0.01);
|
||||
Assert.Equal(0d, prediction.PredictedLabels[1], 0.01);
|
||||
Assert.Equal(0d, prediction.PredictedLabels[2], 0.01);
|
||||
|
||||
prediction = predictFunction.Predict(new IrisData()
|
||||
{
|
||||
|
@ -67,9 +67,9 @@ namespace Microsoft.ML.Scenarios
|
|||
PetalWidth = 2.2f,
|
||||
});
|
||||
|
||||
Assert.Equal(0d, prediction.PredictedLabels[0], 2);
|
||||
Assert.Equal(0d, prediction.PredictedLabels[1], 2);
|
||||
Assert.Equal(1d, prediction.PredictedLabels[2], 2);
|
||||
Assert.Equal(0d, prediction.PredictedLabels[0], 0.01);
|
||||
Assert.Equal(0d, prediction.PredictedLabels[1], 0.01);
|
||||
Assert.Equal(1d, prediction.PredictedLabels[2], 0.01);
|
||||
|
||||
prediction = predictFunction.Predict(new IrisData()
|
||||
{
|
||||
|
@ -79,23 +79,23 @@ namespace Microsoft.ML.Scenarios
|
|||
PetalWidth = 1.2f,
|
||||
});
|
||||
|
||||
Assert.Equal(.2, prediction.PredictedLabels[0], 1);
|
||||
Assert.Equal(.8, prediction.PredictedLabels[1], 1);
|
||||
Assert.Equal(0d, prediction.PredictedLabels[2], 2);
|
||||
Assert.Equal(.2, prediction.PredictedLabels[0], 0.1);
|
||||
Assert.Equal(.8, prediction.PredictedLabels[1], 0.1);
|
||||
Assert.Equal(0d, prediction.PredictedLabels[2], 0.01);
|
||||
|
||||
// Evaluate the trained pipeline
|
||||
var predicted = trainedModel.Transform(testData);
|
||||
var metrics = mlContext.MulticlassClassification.Evaluate(predicted, topKPredictionCount: 3);
|
||||
|
||||
Assert.Equal(.98, metrics.MacroAccuracy);
|
||||
Assert.Equal(.98, metrics.MicroAccuracy, 2);
|
||||
Assert.Equal(.06, metrics.LogLoss, 2);
|
||||
Assert.Equal(.98, metrics.MicroAccuracy, 0.01);
|
||||
Assert.Equal(.06, metrics.LogLoss, 0.01);
|
||||
Assert.Equal(1, metrics.TopKAccuracy);
|
||||
|
||||
Assert.Equal(3, metrics.PerClassLogLoss.Count);
|
||||
Assert.Equal(0d, metrics.PerClassLogLoss[0], 1);
|
||||
Assert.Equal(.1, metrics.PerClassLogLoss[1], 1);
|
||||
Assert.Equal(.1, metrics.PerClassLogLoss[2], 1);
|
||||
Assert.Equal(0d, metrics.PerClassLogLoss[0], 0.1);
|
||||
Assert.Equal(.1, metrics.PerClassLogLoss[1], 0.1);
|
||||
Assert.Equal(.1, metrics.PerClassLogLoss[2], 0.1);
|
||||
}
|
||||
|
||||
public class IrisData
|
||||
|
|
|
@ -54,9 +54,9 @@ namespace Microsoft.ML.Scenarios
|
|||
PetalWidth = 0.2f,
|
||||
});
|
||||
|
||||
Assert.Equal(1d, prediction.PredictedScores[0], 2);
|
||||
Assert.Equal(0d, prediction.PredictedScores[1], 2);
|
||||
Assert.Equal(0d, prediction.PredictedScores[2], 2);
|
||||
Assert.Equal(1d, prediction.PredictedScores[0], 0.01);
|
||||
Assert.Equal(0d, prediction.PredictedScores[1], 0.01);
|
||||
Assert.Equal(0d, prediction.PredictedScores[2], 0.01);
|
||||
Assert.True(prediction.PredictedPlant == "Iris-setosa");
|
||||
|
||||
prediction = predictFunction.Predict(new IrisDataWithStringLabel()
|
||||
|
@ -67,9 +67,9 @@ namespace Microsoft.ML.Scenarios
|
|||
PetalWidth = 2.2f,
|
||||
});
|
||||
|
||||
Assert.Equal(0d, prediction.PredictedScores[0], 2);
|
||||
Assert.Equal(0d, prediction.PredictedScores[1], 2);
|
||||
Assert.Equal(1d, prediction.PredictedScores[2], 2);
|
||||
Assert.Equal(0d, prediction.PredictedScores[0], 0.01);
|
||||
Assert.Equal(0d, prediction.PredictedScores[1], 0.01);
|
||||
Assert.Equal(1d, prediction.PredictedScores[2], 0.01);
|
||||
Assert.True(prediction.PredictedPlant == "Iris-virginica");
|
||||
|
||||
prediction = predictFunction.Predict(new IrisDataWithStringLabel()
|
||||
|
@ -80,9 +80,9 @@ namespace Microsoft.ML.Scenarios
|
|||
PetalWidth = 1.2f,
|
||||
});
|
||||
|
||||
Assert.Equal(.2, prediction.PredictedScores[0], 1);
|
||||
Assert.Equal(.8, prediction.PredictedScores[1], 1);
|
||||
Assert.Equal(0d, prediction.PredictedScores[2], 2);
|
||||
Assert.Equal(.2, prediction.PredictedScores[0], 0.1);
|
||||
Assert.Equal(.8, prediction.PredictedScores[1], 0.1);
|
||||
Assert.Equal(0d, prediction.PredictedScores[2], 0.01);
|
||||
Assert.True(prediction.PredictedPlant == "Iris-versicolor");
|
||||
|
||||
// Evaluate the trained pipeline
|
||||
|
@ -90,15 +90,15 @@ namespace Microsoft.ML.Scenarios
|
|||
var metrics = mlContext.MulticlassClassification.Evaluate(predicted, topKPredictionCount: 3);
|
||||
|
||||
Assert.Equal(.98, metrics.MacroAccuracy);
|
||||
Assert.Equal(.98, metrics.MicroAccuracy, 2);
|
||||
Assert.Equal(.06, metrics.LogLoss, 2);
|
||||
Assert.Equal(.98, metrics.MicroAccuracy, 0.01);
|
||||
Assert.Equal(.06, metrics.LogLoss, 0.01);
|
||||
Assert.InRange(metrics.LogLossReduction, 0.94, 0.96);
|
||||
Assert.Equal(1, metrics.TopKAccuracy);
|
||||
|
||||
Assert.Equal(3, metrics.PerClassLogLoss.Count);
|
||||
Assert.Equal(0d, metrics.PerClassLogLoss[0], 1);
|
||||
Assert.Equal(.1, metrics.PerClassLogLoss[1], 1);
|
||||
Assert.Equal(.1, metrics.PerClassLogLoss[2], 1);
|
||||
Assert.Equal(0d, metrics.PerClassLogLoss[0], 0.1);
|
||||
Assert.Equal(.1, metrics.PerClassLogLoss[1], 0.1);
|
||||
Assert.Equal(.1, metrics.PerClassLogLoss[2], 0.1);
|
||||
}
|
||||
|
||||
private class IrisDataWithStringLabel
|
||||
|
|
|
@ -61,9 +61,9 @@ namespace Microsoft.ML.Scenarios
|
|||
PetalWidth = 0.2f,
|
||||
});
|
||||
|
||||
Assert.Equal(1d, prediction.PredictedLabels[0], 2);
|
||||
Assert.Equal(0d, prediction.PredictedLabels[1], 2);
|
||||
Assert.Equal(0d, prediction.PredictedLabels[2], 2);
|
||||
Assert.Equal(1d, prediction.PredictedLabels[0], 0.01);
|
||||
Assert.Equal(0d, prediction.PredictedLabels[1], 0.01);
|
||||
Assert.Equal(0d, prediction.PredictedLabels[2], 0.01);
|
||||
|
||||
prediction = model.Predict(new IrisData()
|
||||
{
|
||||
|
@ -73,9 +73,9 @@ namespace Microsoft.ML.Scenarios
|
|||
PetalWidth = 2.2f,
|
||||
});
|
||||
|
||||
Assert.Equal(0d, prediction.PredictedLabels[0], 2);
|
||||
Assert.Equal(0d, prediction.PredictedLabels[1], 2);
|
||||
Assert.Equal(1d, prediction.PredictedLabels[2], 2);
|
||||
Assert.Equal(0d, prediction.PredictedLabels[0], 0.01);
|
||||
Assert.Equal(0d, prediction.PredictedLabels[1], 0.01);
|
||||
Assert.Equal(1d, prediction.PredictedLabels[2], 0.01);
|
||||
|
||||
prediction = model.Predict(new IrisData()
|
||||
{
|
||||
|
@ -85,22 +85,22 @@ namespace Microsoft.ML.Scenarios
|
|||
PetalWidth = 1.2f,
|
||||
});
|
||||
|
||||
Assert.Equal(.2, prediction.PredictedLabels[0], 1);
|
||||
Assert.Equal(.8, prediction.PredictedLabels[1], 1);
|
||||
Assert.Equal(0d, prediction.PredictedLabels[2], 2);
|
||||
Assert.Equal(.2, prediction.PredictedLabels[0], 0.1);
|
||||
Assert.Equal(.8, prediction.PredictedLabels[1], 0.1);
|
||||
Assert.Equal(0d, prediction.PredictedLabels[2], 0.01);
|
||||
}
|
||||
|
||||
private void CompareMetrics(MulticlassClassificationMetrics metrics)
|
||||
{
|
||||
Assert.Equal(.98, metrics.MacroAccuracy);
|
||||
Assert.Equal(.98, metrics.MicroAccuracy, 2);
|
||||
Assert.Equal(.98, metrics.MicroAccuracy, 0.01);
|
||||
Assert.InRange(metrics.LogLoss, .05, .06);
|
||||
Assert.InRange(metrics.LogLossReduction, 0.94, 0.96);
|
||||
|
||||
Assert.Equal(3, metrics.PerClassLogLoss.Count);
|
||||
Assert.Equal(0d, metrics.PerClassLogLoss[0], 1);
|
||||
Assert.Equal(.1, metrics.PerClassLogLoss[1], 1);
|
||||
Assert.Equal(.1, metrics.PerClassLogLoss[2], 1);
|
||||
Assert.Equal(0d, metrics.PerClassLogLoss[0], 0.1);
|
||||
Assert.Equal(.1, metrics.PerClassLogLoss[1], 0.1);
|
||||
Assert.Equal(.1, metrics.PerClassLogLoss[2], 0.1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -161,24 +161,24 @@ namespace Microsoft.ML.Scenarios
|
|||
var predictions = transformer.Transform(data);
|
||||
|
||||
var metrics = _mlContext.MulticlassClassification.Evaluate(predictions);
|
||||
Assert.Equal(1, metrics.MicroAccuracy, 2);
|
||||
Assert.Equal(1, metrics.MicroAccuracy, 0.01);
|
||||
|
||||
var predictFunction = _mlContext.Model.CreatePredictionEngine<CifarData, CifarPrediction>(transformer);
|
||||
var prediction = predictFunction.Predict(new CifarData()
|
||||
{
|
||||
ImagePath = GetDataPath("images/banana.jpg")
|
||||
});
|
||||
Assert.Equal(0d, prediction.PredictedScores[0], 2);
|
||||
Assert.Equal(1d, prediction.PredictedScores[1], 2);
|
||||
Assert.Equal(0d, prediction.PredictedScores[2], 2);
|
||||
Assert.Equal(0d, prediction.PredictedScores[0], 0.01);
|
||||
Assert.Equal(1d, prediction.PredictedScores[1], 0.01);
|
||||
Assert.Equal(0d, prediction.PredictedScores[2], 0.01);
|
||||
|
||||
prediction = predictFunction.Predict(new CifarData()
|
||||
{
|
||||
ImagePath = GetDataPath("images/hotdog.jpg")
|
||||
});
|
||||
Assert.Equal(0d, prediction.PredictedScores[0], 2);
|
||||
Assert.Equal(0d, prediction.PredictedScores[1], 2);
|
||||
Assert.Equal(1d, prediction.PredictedScores[2], 2);
|
||||
Assert.Equal(0d, prediction.PredictedScores[0], 0.01);
|
||||
Assert.Equal(0d, prediction.PredictedScores[1], 0.01);
|
||||
Assert.Equal(1d, prediction.PredictedScores[2], 0.01);
|
||||
(transformer as IDisposable)?.Dispose();
|
||||
}
|
||||
|
||||
|
@ -677,7 +677,7 @@ namespace Microsoft.ML.Scenarios
|
|||
var metrics = _mlContext.MulticlassClassification.Evaluate(predicted);
|
||||
|
||||
Assert.Equal(0.99, metrics.MicroAccuracy, .01);
|
||||
Assert.Equal(0.93, metrics.MacroAccuracy, 2);
|
||||
Assert.Equal(0.93, metrics.MacroAccuracy, 0.01);
|
||||
|
||||
var oneSample = GetOneMNISTExample();
|
||||
|
||||
|
@ -902,7 +902,7 @@ namespace Microsoft.ML.Scenarios
|
|||
|
||||
// First group of checks
|
||||
Assert.Equal(0.99, metrics.MicroAccuracy, .01);
|
||||
Assert.Equal(.93, metrics.MacroAccuracy, 2);
|
||||
Assert.Equal(.93, metrics.MacroAccuracy, 0.01);
|
||||
|
||||
// An in-memory example. Its label is predicted below.
|
||||
var oneSample = GetOneMNISTExample();
|
||||
|
@ -1172,7 +1172,7 @@ namespace Microsoft.ML.Scenarios
|
|||
var outputSchema = transformer.GetOutputSchema(data.Schema);
|
||||
|
||||
var metrics = _mlContext.MulticlassClassification.Evaluate(transformedData);
|
||||
Assert.Equal(1, metrics.MicroAccuracy, 2);
|
||||
Assert.Equal(1, metrics.MicroAccuracy, 0.01);
|
||||
|
||||
var predictFunction = _mlContext.Model.CreatePredictionEngine<CifarData, CifarPrediction>(transformer);
|
||||
var predictions = new[]
|
||||
|
@ -1207,7 +1207,7 @@ namespace Microsoft.ML.Scenarios
|
|||
for (var i = 0; i < predictions.Length; i++)
|
||||
{
|
||||
for (var j = 0; j < predictions[i].PredictedScores.Length; j++)
|
||||
Assert.Equal((double)predictions[i].PredictedScores[j], (double)testPredictions[i].PredictedScores[j], 2);
|
||||
Assert.Equal((double)predictions[i].PredictedScores[j], (double)testPredictions[i].PredictedScores[j], 0.01);
|
||||
}
|
||||
(testTransformer as IDisposable)?.Dispose();
|
||||
testPredictFunction.Dispose();
|
||||
|
|
|
@ -109,7 +109,7 @@ namespace Microsoft.ML.EntryPoints.Tests
|
|||
return;
|
||||
}
|
||||
|
||||
Assert.True(false, "Test failed.");
|
||||
Assert.Fail("Test failed.");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
|
@ -133,7 +133,7 @@ namespace Microsoft.ML.EntryPoints.Tests
|
|||
return;
|
||||
}
|
||||
|
||||
Assert.True(false, "Test failed.");
|
||||
Assert.Fail("Test failed.");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -96,12 +96,12 @@ namespace Microsoft.ML.Tests.TrainerEstimators
|
|||
if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
|
||||
{
|
||||
if (RuntimeInformation.ProcessArchitecture == Architecture.Arm64)
|
||||
Assert.Equal(0.3041052520275116, leftMatrix[0], 4);
|
||||
Assert.Equal(0.3041052520275116, leftMatrix[0], 0.0001);
|
||||
else
|
||||
Assert.Equal(0.309137582778931, leftMatrix[0], 4);
|
||||
Assert.Equal(0.468956589698792, leftMatrix[leftMatrix.Count - 1], 4);
|
||||
Assert.Equal(0.303486406803131, rightMatrix[0], 4);
|
||||
Assert.Equal(0.503888845443726, rightMatrix[rightMatrix.Count - 1], 4);
|
||||
Assert.Equal(0.309137582778931, leftMatrix[0], 0.0001);
|
||||
Assert.Equal(0.468956589698792, leftMatrix[leftMatrix.Count - 1], 0.0001);
|
||||
Assert.Equal(0.303486406803131, rightMatrix[0], 0.0001);
|
||||
Assert.Equal(0.503888845443726, rightMatrix[rightMatrix.Count - 1], 0.0001);
|
||||
}
|
||||
// Read the test data set as an IDataView
|
||||
var testData = reader.Load(new MultiFileSource(GetDataPath(TestDatasets.trivialMatrixFactorization.testFilename)));
|
||||
|
@ -687,13 +687,13 @@ namespace Microsoft.ML.Tests.TrainerEstimators
|
|||
|
||||
Assert.Equal(1u, firstElement.MatrixColumnIndex);
|
||||
Assert.Equal(1u, firstElement.MatrixRowIndex);
|
||||
Assert.Equal(0.987113833, firstElement.Score, 3);
|
||||
Assert.Equal(1d, firstElement.Value, 3);
|
||||
Assert.Equal(0.987113833, firstElement.Score, 0.001);
|
||||
Assert.Equal(1d, firstElement.Value, 0.001);
|
||||
|
||||
Assert.Equal(60u, lastElement.MatrixColumnIndex);
|
||||
Assert.Equal(100u, lastElement.MatrixRowIndex);
|
||||
Assert.Equal(0.149993762, lastElement.Score, 3);
|
||||
Assert.Equal(0.15, lastElement.Value, 3);
|
||||
Assert.Equal(0.149993762, lastElement.Score, 0.001);
|
||||
Assert.Equal(0.15, lastElement.Value, 0.001);
|
||||
|
||||
// Two columns with highest predicted score to the 2nd row (indexed by 1). If we view row index as user ID and column as game ID,
|
||||
// the following list contains the games recommended by the trained model. Note that sometime, you may want to exclude training
|
||||
|
@ -705,13 +705,13 @@ namespace Microsoft.ML.Tests.TrainerEstimators
|
|||
|
||||
Assert.Equal(1u, firstElement.MatrixColumnIndex);
|
||||
Assert.Equal(1u, firstElement.MatrixRowIndex);
|
||||
Assert.Equal(0.987113833, firstElement.Score, 3);
|
||||
Assert.Equal(1d, firstElement.Value, 3);
|
||||
Assert.Equal(0.987113833, firstElement.Score, 0.001);
|
||||
Assert.Equal(1d, firstElement.Value, 0.001);
|
||||
|
||||
Assert.Equal(11u, lastElement.MatrixColumnIndex);
|
||||
Assert.Equal(1u, lastElement.MatrixRowIndex);
|
||||
Assert.Equal(0.987113833, lastElement.Score, 3);
|
||||
Assert.Equal(1d, lastElement.Value, 3);
|
||||
Assert.Equal(0.987113833, lastElement.Score, 0.001);
|
||||
Assert.Equal(1d, lastElement.Value, 0.001);
|
||||
}
|
||||
|
||||
// A data structure used to encode a single value in matrix
|
||||
|
@ -842,7 +842,7 @@ namespace Microsoft.ML.Tests.TrainerEstimators
|
|||
|
||||
// Check if results computed by SSE code and MF predictor are the same.
|
||||
for (int i = 0; i < predictions.Count(); ++i)
|
||||
Assert.Equal((double)predictions[i].Score, (double)valuesAtSecondColumn[i], 3);
|
||||
Assert.Equal((double)predictions[i].Score, (double)valuesAtSecondColumn[i], 0.001);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -129,10 +129,10 @@ namespace Microsoft.ML.Tests.TrainerEstimators
|
|||
// Verify the metrics produced are different.
|
||||
var metrics1 = mlContext.BinaryClassification.Evaluate(prediction1);
|
||||
var metrics2 = mlContext.BinaryClassification.Evaluate(prediction2);
|
||||
Assert.Equal(0.9658, metrics1.AreaUnderRocCurve, 4);
|
||||
Assert.Equal(0.3488, metrics1.LogLoss, 4);
|
||||
Assert.Equal(0.9596, metrics2.AreaUnderRocCurve, 4);
|
||||
Assert.Equal(0.3591, metrics2.LogLoss, 4);
|
||||
Assert.Equal(0.9658, metrics1.AreaUnderRocCurve, 0.0001);
|
||||
Assert.Equal(0.3488, metrics1.LogLoss, 0.0001);
|
||||
Assert.Equal(0.9596, metrics2.AreaUnderRocCurve, 0.0001);
|
||||
Assert.Equal(0.3591, metrics2.LogLoss, 0.0001);
|
||||
|
||||
// Verify the raw scores are different.
|
||||
var scores1 = prediction1.GetColumn<float>(prediction1.Schema["Score"]).ToArray();
|
||||
|
@ -188,10 +188,10 @@ namespace Microsoft.ML.Tests.TrainerEstimators
|
|||
// Verify the metrics produced are different.
|
||||
var metrics1 = mlContext.MulticlassClassification.Evaluate(prediction1, labelColumnName: "LabelIndex", topKPredictionCount: 1);
|
||||
var metrics2 = mlContext.MulticlassClassification.Evaluate(prediction2, labelColumnName: "LabelIndex", topKPredictionCount: 1);
|
||||
Assert.Equal(0.9100, metrics1.TopKAccuracy, 4);
|
||||
Assert.Equal(0.2411, metrics1.LogLoss, 4);
|
||||
Assert.Equal(0.8800, metrics2.TopKAccuracy, 4);
|
||||
Assert.Equal(0.2464, metrics2.LogLoss, 4);
|
||||
Assert.Equal(0.9100, metrics1.TopKAccuracy, 0.0001);
|
||||
Assert.Equal(0.2411, metrics1.LogLoss, 0.0001);
|
||||
Assert.Equal(0.8800, metrics2.TopKAccuracy, 0.0001);
|
||||
Assert.Equal(0.2464, metrics2.LogLoss, 0.0001);
|
||||
|
||||
// Verify the raw scores are different.
|
||||
var scores1 = prediction1.GetColumn<float[]>(prediction1.Schema["Score"]).ToArray();
|
||||
|
|
|
@ -627,8 +627,8 @@ namespace Microsoft.ML.Tests.TrainerEstimators
|
|||
var loadedPrediction = loadedModel.Transform(dataView);
|
||||
var loadedMetrics = ML.Regression.Evaluate(loadedPrediction);
|
||||
|
||||
Assert.Equal(metrics.MeanAbsoluteError, loadedMetrics.MeanAbsoluteError, 5);
|
||||
Assert.Equal(metrics.MeanSquaredError, loadedMetrics.MeanSquaredError, 5);
|
||||
Assert.Equal(metrics.MeanAbsoluteError, loadedMetrics.MeanAbsoluteError, 0.00001);
|
||||
Assert.Equal(metrics.MeanSquaredError, loadedMetrics.MeanSquaredError, 0.00001);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
|
@ -687,8 +687,8 @@ namespace Microsoft.ML.Tests.TrainerEstimators
|
|||
var loadedMetrics = ML.Regression.Evaluate(loadedPrediction);
|
||||
|
||||
// Check if the loaded model produces the same result as the trained model.
|
||||
Assert.Equal(metrics.MeanAbsoluteError, loadedMetrics.MeanAbsoluteError, 5);
|
||||
Assert.Equal(metrics.MeanSquaredError, loadedMetrics.MeanSquaredError, 5);
|
||||
Assert.Equal(metrics.MeanAbsoluteError, loadedMetrics.MeanAbsoluteError, 0.00001);
|
||||
Assert.Equal(metrics.MeanSquaredError, loadedMetrics.MeanSquaredError, 0.00001);
|
||||
|
||||
var secondPipeline = ML.Transforms.CopyColumns("CopiedFeatures", "Features")
|
||||
.Append(ML.Transforms.NormalizeBinning("CopiedFeatures"))
|
||||
|
|
|
@ -557,7 +557,7 @@ namespace Microsoft.ML.Tests.TrainerEstimators
|
|||
double sum = 0;
|
||||
for (int j = 0; j < _classNumber; ++j)
|
||||
{
|
||||
Assert.Equal(nativeResult0[j + i * _classNumber], mlnetPredictions[i].Score[j], 6);
|
||||
Assert.Equal(nativeResult0[j + i * _classNumber], mlnetPredictions[i].Score[j], 0.000001);
|
||||
if (float.IsNaN((float)nativeResult1[j + i * _classNumber]))
|
||||
continue;
|
||||
sum += MathUtils.SigmoidSlow(sigmoidScale * (float)nativeResult1[j + i * _classNumber]);
|
||||
|
@ -565,7 +565,7 @@ namespace Microsoft.ML.Tests.TrainerEstimators
|
|||
for (int j = 0; j < _classNumber; ++j)
|
||||
{
|
||||
double prob = MathUtils.SigmoidSlow(sigmoidScale * (float)nativeResult1[j + i * _classNumber]);
|
||||
Assert.Equal(prob / sum, mlnetPredictions[i].Score[j], 6);
|
||||
Assert.Equal(prob / sum, mlnetPredictions[i].Score[j], 0.000001);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -593,7 +593,7 @@ namespace Microsoft.ML.Tests.TrainerEstimators
|
|||
double sum = 0;
|
||||
for (int j = 0; j < _classNumber; ++j)
|
||||
{
|
||||
Assert.Equal(nativeResult0[j + i * _classNumber], mlnetPredictions[i].Score[j], 6);
|
||||
Assert.Equal(nativeResult0[j + i * _classNumber], mlnetPredictions[i].Score[j], 0.000001);
|
||||
if (float.IsNaN((float)nativeResult1[j + i * _classNumber]))
|
||||
continue;
|
||||
sum += MathUtils.SigmoidSlow((float)sigmoidScale * (float)nativeResult1[j + i * _classNumber]);
|
||||
|
@ -601,7 +601,7 @@ namespace Microsoft.ML.Tests.TrainerEstimators
|
|||
for (int j = 0; j < _classNumber; ++j)
|
||||
{
|
||||
double prob = MathUtils.SigmoidSlow((float)sigmoidScale * (float)nativeResult1[j + i * _classNumber]);
|
||||
Assert.Equal(prob / sum, mlnetPredictions[i].Score[j], 6);
|
||||
Assert.Equal(prob / sum, mlnetPredictions[i].Score[j], 0.000001);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -664,13 +664,13 @@ namespace Microsoft.ML.Tests.TrainerEstimators
|
|||
double sum = 0;
|
||||
for (int j = 0; j < _classNumber; ++j)
|
||||
{
|
||||
Assert.Equal(nativeResult0[j + i * _classNumber], mlnetPredictions[i].Score[j], 6);
|
||||
Assert.Equal(nativeResult0[j + i * _classNumber], mlnetPredictions[i].Score[j], 0.000001);
|
||||
sum += Math.Exp((float)nativeResult1[j + i * _classNumber]);
|
||||
}
|
||||
for (int j = 0; j < _classNumber; ++j)
|
||||
{
|
||||
double prob = Math.Exp(nativeResult1[j + i * _classNumber]);
|
||||
Assert.Equal(prob / sum, mlnetPredictions[i].Score[j], 6);
|
||||
Assert.Equal(prob / sum, mlnetPredictions[i].Score[j], 0.000001);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -693,13 +693,13 @@ namespace Microsoft.ML.Tests.TrainerEstimators
|
|||
double sum = 0;
|
||||
for (int j = 0; j < _classNumber; ++j)
|
||||
{
|
||||
Assert.Equal(nativeResult0[j + i * _classNumber], mlnetPredictions[i].Score[j], 6);
|
||||
Assert.Equal(nativeResult0[j + i * _classNumber], mlnetPredictions[i].Score[j], 0.000001);
|
||||
sum += Math.Exp((float)nativeResult1[j + i * _classNumber]);
|
||||
}
|
||||
for (int j = 0; j < _classNumber; ++j)
|
||||
{
|
||||
double prob = Math.Exp(nativeResult1[j + i * _classNumber]);
|
||||
Assert.Equal(prob / sum, mlnetPredictions[i].Score[j], 6);
|
||||
Assert.Equal(prob / sum, mlnetPredictions[i].Score[j], 0.000001);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -395,36 +395,36 @@ namespace Microsoft.ML.Tests.Transformers
|
|||
var robustScalerTransformer = robustScalerEstimator.Fit(data);
|
||||
|
||||
floatAffineModel = ((NormalizingTransformer)robustScalerTransformer).Columns[0].ModelParameters as NormalizingTransformer.AffineNormalizerModelParameters<float>;
|
||||
Assert.Equal(1 / 1.8, floatAffineModel.Scale, 2);
|
||||
Assert.Equal(5.8d, floatAffineModel.Offset, 2);
|
||||
Assert.Equal(1 / 1.8, floatAffineModel.Scale, 0.01);
|
||||
Assert.Equal(5.8d, floatAffineModel.Offset, 0.01);
|
||||
|
||||
floatAffineModelVec = ((NormalizingTransformer)robustScalerTransformer).Columns[1].ModelParameters as NormalizingTransformer.AffineNormalizerModelParameters<ImmutableArray<float>>;
|
||||
Assert.Equal(4, floatAffineModelVec.Scale.Length);
|
||||
Assert.Equal(.5555556, floatAffineModelVec.Scale[0], 2);
|
||||
Assert.Equal(.8333333, floatAffineModelVec.Scale[1], 2);
|
||||
Assert.Equal(.3389830, floatAffineModelVec.Scale[2], 2);
|
||||
Assert.Equal(.8333333, floatAffineModelVec.Scale[3], 2);
|
||||
Assert.Equal(.5555556, floatAffineModelVec.Scale[0], 0.01);
|
||||
Assert.Equal(.8333333, floatAffineModelVec.Scale[1], 0.01);
|
||||
Assert.Equal(.3389830, floatAffineModelVec.Scale[2], 0.01);
|
||||
Assert.Equal(.8333333, floatAffineModelVec.Scale[3], 0.01);
|
||||
|
||||
Assert.Equal(5.8, floatAffineModelVec.Offset[0], 2);
|
||||
Assert.Equal(3d, floatAffineModelVec.Offset[1], 2);
|
||||
Assert.Equal(4.4, floatAffineModelVec.Offset[2], 2);
|
||||
Assert.Equal(1.3, floatAffineModelVec.Offset[3], 2);
|
||||
Assert.Equal(5.8, floatAffineModelVec.Offset[0], 0.01);
|
||||
Assert.Equal(3d, floatAffineModelVec.Offset[1], 0.01);
|
||||
Assert.Equal(4.4, floatAffineModelVec.Offset[2], 0.01);
|
||||
Assert.Equal(1.3, floatAffineModelVec.Offset[3], 0.01);
|
||||
|
||||
doubleAffineModel = ((NormalizingTransformer)robustScalerTransformer).Columns[2].ModelParameters as NormalizingTransformer.AffineNormalizerModelParameters<double>;
|
||||
Assert.Equal(1 / 1.8, doubleAffineModel.Scale, 2);
|
||||
Assert.Equal(5.8, doubleAffineModel.Offset, 2);
|
||||
Assert.Equal(1 / 1.8, doubleAffineModel.Scale, 0.01);
|
||||
Assert.Equal(5.8, doubleAffineModel.Offset, 0.01);
|
||||
|
||||
doubleAffineModelVector = ((NormalizingTransformer)robustScalerTransformer).Columns[3].ModelParameters as NormalizingTransformer.AffineNormalizerModelParameters<ImmutableArray<double>>;
|
||||
Assert.Equal(4, doubleAffineModelVector.Scale.Length);
|
||||
Assert.Equal(.5555556, doubleAffineModelVector.Scale[0], 2);
|
||||
Assert.Equal(.8333333, doubleAffineModelVector.Scale[1], 2);
|
||||
Assert.Equal(.3389830, doubleAffineModelVector.Scale[2], 2);
|
||||
Assert.Equal(.8333333, doubleAffineModelVector.Scale[3], 2);
|
||||
Assert.Equal(.5555556, doubleAffineModelVector.Scale[0], 0.01);
|
||||
Assert.Equal(.8333333, doubleAffineModelVector.Scale[1], 0.01);
|
||||
Assert.Equal(.3389830, doubleAffineModelVector.Scale[2], 0.01);
|
||||
Assert.Equal(.8333333, doubleAffineModelVector.Scale[3], 0.01);
|
||||
|
||||
Assert.Equal(5.8, doubleAffineModelVector.Offset[0], 2);
|
||||
Assert.Equal(3, doubleAffineModelVector.Offset[1], 2);
|
||||
Assert.Equal(4.4, doubleAffineModelVector.Offset[2], 2);
|
||||
Assert.Equal(1.3, doubleAffineModelVector.Offset[3], 2);
|
||||
Assert.Equal(5.8, doubleAffineModelVector.Offset[0], 0.01);
|
||||
Assert.Equal(3, doubleAffineModelVector.Offset[1], 0.01);
|
||||
Assert.Equal(4.4, doubleAffineModelVector.Offset[2], 0.01);
|
||||
Assert.Equal(1.3, doubleAffineModelVector.Offset[3], 0.01);
|
||||
|
||||
// Robust scaler no offset
|
||||
robustScalerEstimator = context.Transforms.NormalizeRobustScaling(
|
||||
|
@ -435,28 +435,28 @@ namespace Microsoft.ML.Tests.Transformers
|
|||
robustScalerTransformer = robustScalerEstimator.Fit(data);
|
||||
|
||||
floatAffineModel = ((NormalizingTransformer)robustScalerTransformer).Columns[0].ModelParameters as NormalizingTransformer.AffineNormalizerModelParameters<float>;
|
||||
Assert.Equal(1 / 1.8, floatAffineModel.Scale, 2);
|
||||
Assert.Equal(0d, floatAffineModel.Offset, 2);
|
||||
Assert.Equal(1 / 1.8, floatAffineModel.Scale, 0.01);
|
||||
Assert.Equal(0d, floatAffineModel.Offset, 0.01);
|
||||
|
||||
floatAffineModelVec = ((NormalizingTransformer)robustScalerTransformer).Columns[1].ModelParameters as NormalizingTransformer.AffineNormalizerModelParameters<ImmutableArray<float>>;
|
||||
Assert.Equal(4, floatAffineModelVec.Scale.Length);
|
||||
Assert.Equal(.5555556, floatAffineModelVec.Scale[0], 2);
|
||||
Assert.Equal(.8333333, floatAffineModelVec.Scale[1], 2);
|
||||
Assert.Equal(.3389830, floatAffineModelVec.Scale[2], 2);
|
||||
Assert.Equal(.8333333, floatAffineModelVec.Scale[3], 2);
|
||||
Assert.Equal(.5555556, floatAffineModelVec.Scale[0], 0.01);
|
||||
Assert.Equal(.8333333, floatAffineModelVec.Scale[1], 0.01);
|
||||
Assert.Equal(.3389830, floatAffineModelVec.Scale[2], 0.01);
|
||||
Assert.Equal(.8333333, floatAffineModelVec.Scale[3], 0.01);
|
||||
|
||||
Assert.Empty(floatAffineModelVec.Offset);
|
||||
|
||||
doubleAffineModel = ((NormalizingTransformer)robustScalerTransformer).Columns[2].ModelParameters as NormalizingTransformer.AffineNormalizerModelParameters<double>;
|
||||
Assert.Equal(1 / 1.8, doubleAffineModel.Scale, 2);
|
||||
Assert.Equal(0, doubleAffineModel.Offset, 2);
|
||||
Assert.Equal(1 / 1.8, doubleAffineModel.Scale, 0.01);
|
||||
Assert.Equal(0, doubleAffineModel.Offset, 0.01);
|
||||
|
||||
doubleAffineModelVector = ((NormalizingTransformer)robustScalerTransformer).Columns[3].ModelParameters as NormalizingTransformer.AffineNormalizerModelParameters<ImmutableArray<double>>;
|
||||
Assert.Equal(4, doubleAffineModelVector.Scale.Length);
|
||||
Assert.Equal(.5555556, doubleAffineModelVector.Scale[0], 2);
|
||||
Assert.Equal(.8333333, doubleAffineModelVector.Scale[1], 2);
|
||||
Assert.Equal(.3389830, doubleAffineModelVector.Scale[2], 2);
|
||||
Assert.Equal(.8333333, doubleAffineModelVector.Scale[3], 2);
|
||||
Assert.Equal(.5555556, doubleAffineModelVector.Scale[0], 0.01);
|
||||
Assert.Equal(.8333333, doubleAffineModelVector.Scale[1], 0.01);
|
||||
Assert.Equal(.3389830, doubleAffineModelVector.Scale[2], 0.01);
|
||||
Assert.Equal(.8333333, doubleAffineModelVector.Scale[3], 0.01);
|
||||
|
||||
Assert.Empty(doubleAffineModelVector.Offset);
|
||||
|
||||
|
|
|
@ -195,10 +195,10 @@ namespace Microsoft.ML.Tests
|
|||
while (enumerator.MoveNext() && index < expectedValues.Count)
|
||||
{
|
||||
row = enumerator.Current;
|
||||
Assert.Equal(expectedValues[index++], row.Change[0], precision: 7); // Alert
|
||||
Assert.Equal(expectedValues[index++], row.Change[1], precision: 7); // Raw score
|
||||
Assert.Equal(expectedValues[index++], row.Change[2], precision: 7); // P-Value score
|
||||
Assert.Equal(expectedValues[index++], row.Change[3], precision: 7); // Martingale score
|
||||
Assert.Equal(expectedValues[index++], row.Change[0], 0.0000001); // Alert
|
||||
Assert.Equal(expectedValues[index++], row.Change[1], 0.0000001); // Raw score
|
||||
Assert.Equal(expectedValues[index++], row.Change[2], 0.0000001); // P-Value score
|
||||
Assert.Equal(expectedValues[index++], row.Change[3], 0.0000001); // Martingale score
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -255,10 +255,10 @@ namespace Microsoft.ML.Tests
|
|||
var engine2 = model2.CreateTimeSeriesEngine<Data, Prediction>(ml);
|
||||
var prediction2 = engine2.Predict(new Data(1));
|
||||
//Raw score after first input.
|
||||
Assert.Equal(1.1661833524703979, prediction2.Change[1], precision: 5); // Raw score
|
||||
Assert.Equal(1.1661833524703979, prediction2.Change[1], 0.00001); // Raw score
|
||||
prediction2 = engine2.Predict(new Data(1));
|
||||
//Raw score after second input.
|
||||
Assert.Equal(0.12216401100158691, prediction2.Change[1], precision: 5); // Raw score
|
||||
Assert.Equal(0.12216401100158691, prediction2.Change[1], 0.00001); // Raw score
|
||||
|
||||
//Even though time series column is not requested it will
|
||||
// pass the observation through time series transform and update the state with the first input.
|
||||
|
@ -275,7 +275,7 @@ namespace Microsoft.ML.Tests
|
|||
//and raw score should match the raw score obtained by passing the two input in the first model.
|
||||
var engine3 = model3.CreateTimeSeriesEngine<Data, Prediction>(ml);
|
||||
var prediction3 = engine3.Predict(new Data(1));
|
||||
Assert.Equal(0.12216401100158691, prediction2.Change[1], precision: 5); // Raw score
|
||||
Assert.Equal(0.12216401100158691, prediction2.Change[1], 0.00001); // Raw score
|
||||
}
|
||||
|
||||
[NativeDependencyFact("MklImports")]
|
||||
|
@ -318,10 +318,10 @@ namespace Microsoft.ML.Tests
|
|||
//Model 1: Prediction #1.
|
||||
var engine = model.CreateTimeSeriesEngine<Data, Prediction>(ml);
|
||||
var prediction = engine.Predict(new Data(1));
|
||||
Assert.Equal(0, prediction.Change[0], precision: 7); // Alert
|
||||
Assert.Equal(1.1661833524703979, prediction.Change[1], precision: 5); // Raw score
|
||||
Assert.Equal(0.5, prediction.Change[2], precision: 7); // P-Value score
|
||||
Assert.Equal(5.1200000000000114E-08, prediction.Change[3], precision: 7); // Martingale score
|
||||
Assert.Equal(0, prediction.Change[0], 0.0000001); // Alert
|
||||
Assert.Equal(1.1661833524703979, prediction.Change[1], 0.00001); // Raw score
|
||||
Assert.Equal(0.5, prediction.Change[2], 0.0000001); // P-Value score
|
||||
Assert.Equal(5.1200000000000114E-08, prediction.Change[3], 0.0000001); // Martingale score
|
||||
|
||||
//Model 1: Checkpoint.
|
||||
var modelPath = "temp.zip";
|
||||
|
@ -329,10 +329,10 @@ namespace Microsoft.ML.Tests
|
|||
|
||||
//Model 1: Prediction #2
|
||||
prediction = engine.Predict(new Data(1));
|
||||
Assert.Equal(0, prediction.Change[0], precision: 7); // Alert
|
||||
Assert.Equal(0.12216401100158691, prediction.Change[1], precision: 5); // Raw score
|
||||
Assert.Equal(0.14823824685192111, prediction.Change[2], precision: 5); // P-Value score
|
||||
Assert.Equal(1.5292508189989167E-07, prediction.Change[3], precision: 7); // Martingale score
|
||||
Assert.Equal(0, prediction.Change[0], 0.0000001); // Alert
|
||||
Assert.Equal(0.12216401100158691, prediction.Change[1], 0.00001); // Raw score
|
||||
Assert.Equal(0.14823824685192111, prediction.Change[2], 0.00001); // P-Value score
|
||||
Assert.Equal(1.5292508189989167E-07, prediction.Change[3], 0.0000001); // Martingale score
|
||||
|
||||
// Load Model 1.
|
||||
ITransformer model2 = null;
|
||||
|
@ -342,10 +342,10 @@ namespace Microsoft.ML.Tests
|
|||
//Predict and expect the same result after checkpointing(Prediction #2).
|
||||
engine = model2.CreateTimeSeriesEngine<Data, Prediction>(ml);
|
||||
prediction = engine.Predict(new Data(1));
|
||||
Assert.Equal(0, prediction.Change[0], precision: 7); // Alert
|
||||
Assert.Equal(0.12216401100158691, prediction.Change[1], precision: 5); // Raw score
|
||||
Assert.Equal(0.14823824685192111, prediction.Change[2], precision: 5); // P-Value score
|
||||
Assert.Equal(1.5292508189989167E-07, prediction.Change[3], precision: 5); // Martingale score
|
||||
Assert.Equal(0, prediction.Change[0], 0.0000001); // Alert
|
||||
Assert.Equal(0.12216401100158691, prediction.Change[1], 0.00001); // Raw score
|
||||
Assert.Equal(0.14823824685192111, prediction.Change[2], 0.00001); // P-Value score
|
||||
Assert.Equal(1.5292508189989167E-07, prediction.Change[3], 0.00001); // Martingale score
|
||||
}
|
||||
|
||||
[NativeDependencyFact("MklImports")]
|
||||
|
@ -405,9 +405,9 @@ namespace Microsoft.ML.Tests
|
|||
|
||||
for (int localIndex = 0; localIndex < 4; localIndex++)
|
||||
{
|
||||
Assert.Equal(expectedForecast[localIndex], row.Forecast[localIndex], precision: 7);
|
||||
Assert.Equal(minCnf[localIndex], row.MinCnf[localIndex], precision: 7);
|
||||
Assert.Equal(maxCnf[localIndex], row.MaxCnf[localIndex], precision: 7);
|
||||
Assert.Equal(expectedForecast[localIndex], row.Forecast[localIndex], 0.0000001);
|
||||
Assert.Equal(minCnf[localIndex], row.MinCnf[localIndex], 0.0000001);
|
||||
Assert.Equal(maxCnf[localIndex], row.MaxCnf[localIndex], 0.0000001);
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -645,7 +645,7 @@ namespace Microsoft.ML.Tests
|
|||
if (k == 20)
|
||||
{
|
||||
Assert.Equal(1, prediction.Prediction[0]);
|
||||
Assert.Equal(5.00, prediction.Prediction[3], 2);
|
||||
Assert.Equal(5.00, prediction.Prediction[3], 0.01);
|
||||
}
|
||||
else
|
||||
Assert.Equal(0, prediction.Prediction[0]);
|
||||
|
@ -655,10 +655,10 @@ namespace Microsoft.ML.Tests
|
|||
if (k == 20)
|
||||
{
|
||||
Assert.Equal(1, prediction.Prediction[0]);
|
||||
Assert.Equal(5.00, prediction.Prediction[3], 2);
|
||||
Assert.Equal(5.00, prediction.Prediction[4], 2);
|
||||
Assert.Equal(5.01, prediction.Prediction[5], 2);
|
||||
Assert.Equal(4.99, prediction.Prediction[6], 2);
|
||||
Assert.Equal(5.00, prediction.Prediction[3], 0.01);
|
||||
Assert.Equal(5.00, prediction.Prediction[4], 0.01);
|
||||
Assert.Equal(5.01, prediction.Prediction[5], 0.01);
|
||||
Assert.Equal(4.99, prediction.Prediction[6], 0.01);
|
||||
Assert.True(prediction.Prediction[6] > data[k].Value || data[k].Value > prediction.Prediction[5]);
|
||||
}
|
||||
else
|
||||
|
|
|
@ -66,10 +66,10 @@ namespace Microsoft.ML.Tests
|
|||
{
|
||||
row = enumerator.Current;
|
||||
|
||||
Assert.Equal(expectedValues[index++], row.Data[0], precision: 7);
|
||||
Assert.Equal(expectedValues[index++], row.Data[1], precision: 7);
|
||||
Assert.Equal(expectedValues[index++], row.Data[2], precision: 7);
|
||||
Assert.Equal(expectedValues[index++], row.Data[3], precision: 7);
|
||||
Assert.Equal(expectedValues[index++], row.Data[0], 0.0000001);
|
||||
Assert.Equal(expectedValues[index++], row.Data[1], 0.0000001);
|
||||
Assert.Equal(expectedValues[index++], row.Data[2], 0.0000001);
|
||||
Assert.Equal(expectedValues[index++], row.Data[3], 0.0000001);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Загрузка…
Ссылка в новой задаче