Merge branch 'master' into jotims/pr/fix-prefix-build

This commit is contained in:
Jonathan Tims 2020-03-19 10:32:31 +00:00 коммит произвёл GitHub
Родитель 205d26f4a1 7e4ec4dccf
Коммит 57f5a6c7b3
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: 4AEE18F83AFDEB23
102 изменённых файлов: 7163 добавлений и 3894 удалений

Просмотреть файл

@ -34,17 +34,17 @@ Project("{6EC3EE1D-3C4E-46DD-8F32-0CC8E7565705}") = "TestFSharp", "test\TestFSha
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Examples", "Examples", "{DC5F5BC4-CDB0-41F7-8B03-CD4C38C8DEB2}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "ClickThroughModel", "src\Examples\ClickThroughModel\ClickThroughModel.csproj", "{33D86EA2-2161-4EF0-8F17-59602296273C}"
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ClickThroughModel", "src\Examples\ClickThroughModel\ClickThroughModel.csproj", "{33D86EA2-2161-4EF0-8F17-59602296273C}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "ClinicalTrial", "src\Examples\ClinicalTrial\ClinicalTrial.csproj", "{B517BBF2-60E6-4C69-885A-AE5C014D877B}"
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ClinicalTrial", "src\Examples\ClinicalTrial\ClinicalTrial.csproj", "{B517BBF2-60E6-4C69-885A-AE5C014D877B}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "InferNET101", "src\Examples\InferNET101\InferNET101.csproj", "{52D174E7-2407-4FC1-9DDA-4D9D14F18618}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "MontyHall", "src\Examples\MontyHall\MontyHall.csproj", "{6139CF19-0190-4ED5-AEE3-D3CE7458E517}"
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "MontyHall", "src\Examples\MontyHall\MontyHall.csproj", "{6139CF19-0190-4ED5-AEE3-D3CE7458E517}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "MotifFinder", "src\Examples\MotifFinder\MotifFinder.csproj", "{D2A7B5F5-8D33-45AC-9776-07C23F5859BB}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Image_Classifier", "src\Examples\ImageClassifier\Image_Classifier.csproj", "{87D09BD4-119E-49C1-B0B4-86DF962A00EE}"
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Image_Classifier", "src\Examples\ImageClassifier\Image_Classifier.csproj", "{87D09BD4-119E-49C1-B0B4-86DF962A00EE}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "LDA", "src\Examples\LDA\LDA.csproj", "{6FF3E672-378C-4D61-B4CA-A5A5E01C2563}"
EndProject
@ -90,6 +90,8 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "RobustGaussianProcess", "sr
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "LearnersNuGet", "src\Learners\LearnersNuGet\LearnersNuGet.csproj", "{6BC792EE-0436-494E-9F0C-63A3822320D0}"
EndProject
Project("{888888A0-9F3D-457C-B088-3A5042F75D52}") = "Tools.GenerateSeries", "src\Tools\GenerateSeries\Tools.GenerateSeries.pyproj", "{D7562CC3-D48A-481D-B40C-07EE51EBBF50}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
@ -498,6 +500,12 @@ Global
{6BC792EE-0436-494E-9F0C-63A3822320D0}.ReleaseCore|Any CPU.Build.0 = Release|Any CPU
{6BC792EE-0436-494E-9F0C-63A3822320D0}.ReleaseFull|Any CPU.ActiveCfg = Release|Any CPU
{6BC792EE-0436-494E-9F0C-63A3822320D0}.ReleaseFull|Any CPU.Build.0 = Release|Any CPU
{D7562CC3-D48A-481D-B40C-07EE51EBBF50}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{D7562CC3-D48A-481D-B40C-07EE51EBBF50}.DebugCore|Any CPU.ActiveCfg = Debug|Any CPU
{D7562CC3-D48A-481D-B40C-07EE51EBBF50}.DebugFull|Any CPU.ActiveCfg = Debug|Any CPU
{D7562CC3-D48A-481D-B40C-07EE51EBBF50}.Release|Any CPU.ActiveCfg = Release|Any CPU
{D7562CC3-D48A-481D-B40C-07EE51EBBF50}.ReleaseCore|Any CPU.ActiveCfg = Release|Any CPU
{D7562CC3-D48A-481D-B40C-07EE51EBBF50}.ReleaseFull|Any CPU.ActiveCfg = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE

23
docs/_build/makeApiDocs.ps1 поставляемый
Просмотреть файл

@ -17,8 +17,9 @@ $destinationDirectory = [IO.Path]::GetFullPath((join-path $scriptDir '../../Infe
$dotnetExe = 'dotnet'
Write-Host $sourceDirectory
Write-Host "Copy src to InferNet_Copy_Temp directory"
Write-Host "Copy src and build to InferNet_Copy_Temp directory"
Copy-Item -Path "$sourceDirectory/src/" -Destination "$destinationDirectory/src/" -Recurse -Force
Copy-Item -Path "$sourceDirectory/build/" -Destination "$destinationDirectory/build/" -Recurse -Force
Write-Host "Copy root files to InferNet_Copy_Temp directory"
Get-ChildItem -Path $sourceDirectory -Filter "*.*" | Copy-Item -Destination $destinationDirectory -Force
@ -36,24 +37,20 @@ if (!(Test-Path $projPath)) {
Write-Error -Message ('ERROR: Failed to locate PrepareSource project file at ' + $projPath)
exit 1
}
$BuildArgs = @{
FilePath = $dotnetExe
ArgumentList = "build", $projPath, "/p:Configuration=Release"
}
Start-Process @BuildArgs -NoNewWindow -Wait
& "$dotnetExe" build "$projPath" /p:Configuration=Release
Write-Host "Run PrepareSource for InferNet_Copy_Temp folder"
$prepareSourcePath = [IO.Path]::GetFullPath((join-path $sourceDirectory 'src/Tools/PrepareSource/bin/Release/netcoreapp2.1/Microsoft.ML.Probabilistic.Tools.PrepareSource.dll'))
$prepareSourceCmd = "& $dotnetExe ""$prepareSourcePath"" ""$destinationDirectory"""
Invoke-Expression $prepareSourceCmd
& "$dotnetExe" "$prepareSourcePath" "$destinationDirectory"
Write-Host "Install nuget package docfx.console"
Install-Package -Name docfx.console -provider Nuget -Source https://nuget.org/api/v2 -RequiredVersion 2.38.0 -Destination $scriptDir\..\..\packages -Force
Install-Package -Name docfx.console -provider Nuget -Source https://nuget.org/api/v2 -RequiredVersion 2.48.1 -Destination $scriptDir\..\..\packages -Force
Write-Host "Run docfx"
$docFXPath = [IO.Path]::GetFullPath((join-path $scriptDir '../../packages/docfx.console.2.38.0/tools/docfx.exe'))
$docFXPath = [IO.Path]::GetFullPath((join-path $scriptDir '../../packages/docfx.console.2.48.1/tools/docfx.exe'))
$docFxJsonPath = "$scriptDir/../docfx.json"
$docFxCmd = "& ""$docFXPath"" ""$docFxJsonPath"""
if(!(Invoke-Expression $docFxCmd))
& "$docFXPath" "$docFxJsonPath"
if($LASTEXITCODE)
{
if(!(Invoke-Expression "& mono ""$docFXPath"" ""$docFxJsonPath"""))
{
@ -61,6 +58,8 @@ if(!(Invoke-Expression $docFxCmd))
}
}
Write-Warning "Three warnings about invalid file links in toc.yml are expected and benign, because those files don't exist yet. However, the links are still set up correctly."
if ((Test-Path $destinationDirectory)) {
Write-Host "Remove temp repository"
Remove-Item -Path $destinationDirectory -Recurse -Force

Просмотреть файл

@ -1,6 +1,6 @@
- name: Home Page
href: ../index.html
href: ../index.md
- name: User Guide
href: ../userguide/index.html
href: ../userguide/index.md
- name: API Documentation
href: api/index.html

Просмотреть файл

@ -4,6 +4,7 @@
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Text;
using System.Linq;
using System.Reflection;
@ -351,10 +352,6 @@ namespace Microsoft.ML.Probabilistic.Compiler.Transforms
return Builder.StaticMethod(new Func<int>(GateAnalysisTransform.AnyIndex));
}
#if SUPPRESS_UNREACHABLE_CODE_WARNINGS
#pragma warning disable 162
#endif
/// <summary>
/// Post-process dependencies replacing message expressions with the operator
/// blocks that compute them.
@ -388,7 +385,8 @@ namespace Microsoft.ML.Probabilistic.Compiler.Transforms
}
var bounds = new Dictionary<IVariableDeclaration, CodeRecognizer.Bounds>(new IdentityComparer<IVariableDeclaration>());
Recognizer.AddLoopBounds(bounds, ist);
if (false)
bool debug = false;
if (debug)
{
// for debugging
IStatement innerStmt = ist;
@ -421,25 +419,36 @@ namespace Microsoft.ML.Probabilistic.Compiler.Transforms
Dictionary<IStatement, Set<IVariableDeclaration>> extraIndicesOfStmt = new Dictionary<IStatement, Set<IVariableDeclaration>>(new IdentityComparer<IStatement>());
foreach (IStatement exprStmt in di.DeclDependencies)
{
var mutatingStmts = GetStatementsThatMutate2(ist, exprStmt, true, false, ist, bounds, di2.offsetIndexOf, extraIndicesOfStmt);
var mutatingStmts = GetStatementsThatMutate(ist, exprStmt, true, false, ist, bounds, di2.offsetIndexOf, extraIndicesOfStmt);
foreach (IStatement mutatingStmt in mutatingStmts)
{
di2.Add(DependencyType.Declaration, mutatingStmt);
}
}
List<KeyValuePair<IStatement, OffsetInfo>> extraOffsetInfos = new List<KeyValuePair<IStatement, OffsetInfo>>();
bool allocationsOnly = isInitializer;
foreach (KeyValuePair<IStatement, DependencyType> entry in di.dependencyTypeOf)
{
IStatement exprStmt = entry.Key;
DependencyType type = entry.Value;
type &= ~DependencyType.Declaration; // declarations are handled above
if ((type & DependencyType.SkipIfUniform) > 0)
{
// Process SkipIfUniform dependencies specially
// Stmts with the same offsetInfo are put into Any
var mutatingStmtsSkip = GetStatementsThatMutate(ist, exprStmt, allocationsOnly, true, ist, bounds, di2.offsetIndexOf, extraIndicesOfStmt);
foreach (IStatement mutatingStmt in mutatingStmtsSkip)
{
di2.Add(DependencyType.SkipIfUniform, mutatingStmt);
}
type &= ~DependencyType.SkipIfUniform;
// Fall through
}
if (type == 0)
continue;
if ((type & DependencyType.Container) > 0)
type |= DependencyType.Dependency; // containers also become deps
bool allocationsOnly = isInitializer;
bool mustMutate = (type & DependencyType.SkipIfUniform) > 0;
var mutatingStmts = GetStatementsThatMutate2(ist, exprStmt, allocationsOnly, mustMutate, ist, bounds, di2.offsetIndexOf, extraIndicesOfStmt);
var mutatingStmts = GetStatementsThatMutate(ist, exprStmt, allocationsOnly, false, ist, bounds, di2.offsetIndexOf, extraIndicesOfStmt);
foreach (IStatement mutatingStmt in mutatingStmts)
{
DependencyType type2 = type;
@ -544,9 +553,20 @@ namespace Microsoft.ML.Probabilistic.Compiler.Transforms
stmts.AddRange(dummyStatements);
}
#if SUPPRESS_UNREACHABLE_CODE_WARNINGS
#pragma warning restore 162
#endif
class OffsetInfoComparer : IEqualityComparer<OffsetInfo>
{
public bool Equals(OffsetInfo x, OffsetInfo y)
{
if (x == null) return (y == null);
else if (y == null) return false;
else return x.Any(offset => y.ContainsKey(offset.loopVar));
}
public int GetHashCode(OffsetInfo obj)
{
return 0;
}
}
protected IExpression ConvertExpressionWithDependencyType(IExpression expr, DependencyType type)
{
@ -848,8 +868,37 @@ namespace Microsoft.ML.Probabilistic.Compiler.Transforms
return imie;
}
if (Recognizer.IsStaticMethod(imie, new Func<object[], object>(FactorManager.Any)) ||
Recognizer.IsStaticMethod(imie, new Func<object, object>(FactorManager.AnyItem)))
if (Recognizer.IsStaticMethod(imie, new Func<object[], object>(FactorManager.All)))
{
AllStatement allSt = new AllStatement();
DependencyInformation parentDepInfo = dependencyInformation;
dependencyInformation = new DependencyInformation();
foreach (IExpression arg in imie.Arguments)
{
dependencyInformation.IsUniform = false;
dependencyInformation.dependencyTypeOf.Clear();
ConvertExpression(arg);
if ((dependencyType & DependencyType.SkipIfUniform) > 0)
{
if (dependencyInformation.IsUniform)
{
// nothing to add
continue;
}
}
foreach (KeyValuePair<IStatement, DependencyType> entry in dependencyInformation.dependencyTypeOf)
{
DependencyType type = entry.Value & dependencyType;
if (type > 0)
allSt.Statements.Add(entry.Key);
}
}
dependencyInformation = parentDepInfo;
if (allSt.Statements.Count > 0)
AddDependencyOn(allSt);
return imie;
}
if (Recognizer.IsStaticMethod(imie, new Func<object[], object>(FactorManager.Any)))
{
AnyStatement anySt = new AnyStatement();
DependencyInformation parentDepInfo = dependencyInformation;
@ -1206,8 +1255,41 @@ namespace Microsoft.ML.Probabilistic.Compiler.Transforms
}
}
// same as GetStatementsThatMutate but handles AnyStatements
internal IEnumerable<IStatement> GetStatementsThatMutate2(
static IEnumerable<IStatement> LiftNestedAll(AnyStatement anySt)
{
// Convert Any(x,All(y,z)) to All(Any(x,y),Any(x,z))
// Convert Any(All(x,y),All(z,w)) to All(Any(x,z),Any(x,w),Any(y,z),Any(y,w))
List<AnyStatement> results = new List<AnyStatement>();
IEnumerable<AnyStatement> CopyAndAdd(IStatement newSt)
{
IStatement[] newSequence = new IStatement[] { newSt };
if (results.Count == 0) return new[] { new AnyStatement(newSequence) };
else return results.Select(a => new AnyStatement(a.Statements.Concat(newSequence).ToArray()));
}
foreach(var stmt in anySt.Statements)
{
if(stmt is AllStatement allSt)
{
results = allSt.Statements.SelectMany(CopyAndAdd).ToList();
}
else if(results.Count > 0)
{
// Add this stmt to all clauses
foreach(var clause in results)
{
clause.Statements.Add(stmt);
}
}
else
{
results.Add(new AnyStatement(stmt));
}
}
return results.Select(a => (a.Statements.Count == 1) ? a.Statements[0] : a);
}
// same as GetMutations but handles AnyStatements
internal IEnumerable<IStatement> GetStatementsThatMutate(
IStatement exclude,
IStatement exprStmt,
bool allocationsOnly,
@ -1217,72 +1299,87 @@ namespace Microsoft.ML.Probabilistic.Compiler.Transforms
Dictionary<IStatement, IOffsetInfo> offsetInfos,
Dictionary<IStatement, Set<IVariableDeclaration>> extraIndicesOfStmt)
{
if (exprStmt is AnyStatement)
if (mustMutate && exprStmt is ExpressionDependency) exprStmt = new AnyStatement(exprStmt);
if (exprStmt is AllStatement allSt)
{
List<ExpressionDependency> exprDeps = new List<ExpressionDependency>();
ForEachExpressionDependency(allSt.Statements, exprDeps.Add);
List<IStatement> results = new List<IStatement>();
foreach (ExpressionDependency ies in exprDeps)
{
IExpression expr = ies.Expression;
List<MutationInformation.Mutation> mutations = GetMutations(exclude, expr, allocationsOnly, mustMutate, bindings, bounds, offsetInfos, extraIndicesOfStmt);
IExpression prefixExpr = expr;
while (prefixExpr is IMethodInvokeExpression)
{
IMethodInvokeExpression imie = (IMethodInvokeExpression)prefixExpr;
prefixExpr = imie.Arguments[0];
}
var prefixes = Recognizer.GetAllPrefixes(prefixExpr);
// algorithm: find the prefix of each mutation, compute a graph of all prefix overlaps, make an All statement for each clique
// example: x[i] has mutations x[0][0], x[1][0], x[i][1]
// prefixes are: x[0], x[1], x[i]
// cliques are: (x[0], x[i]), (x[1], x[i])
// dependency is: (x[0][0] and x[i][1]) or (x[1][0] and x[i][1])
List<KeyValuePair<MutationInformation.Mutation, IExpression>> mutationsToCheck = new List<KeyValuePair<MutationInformation.Mutation, IExpression>>();
foreach (MutationInformation.Mutation m in mutations)
{
IExpression expr2 = m.expr;
bool isIncrement = context.InputAttributes.Has<IncrementStatement>(m.stmt);
if (isIncrement)
{
// ignore
}
else
{
var prefixes2 = Recognizer.GetAllPrefixes(expr2);
var prefix2 = prefixes2[System.Math.Min(prefixes2.Count, prefixes.Count) - 1];
mutationsToCheck.Add(new KeyValuePair<MutationInformation.Mutation, IExpression>(m, prefix2));
}
}
if (mutationsToCheck.Count == 1)
{
results.Add(mutationsToCheck[0].Key.stmt);
}
else // if (mutationsToCheck.Count > 1)
{
List<IReadOnlyList<IStatement>> cliques = new List<IReadOnlyList<IStatement>>();
AddCliques(mutationsToCheck, cliques);
AnyStatement anyBlock = new AnyStatement();
foreach (var clique in cliques)
{
if(clique.Count == 1)
{
anyBlock.Statements.Add(clique[0]);
}
else
{
anyBlock.Statements.Add(new AllStatement(clique.ToArray()));
}
}
results.AddRange(LiftNestedAll(anyBlock));
}
}
foreach (var result in results)
yield return result;
}
else if (exprStmt is AnyStatement anySt)
{
// Any(expr1, expr2) => Any(stmt1, stmt2)
// This becomes complicated when expr1 is modified by multiple statements.
// In that case, we would need Any(All(stmt1a, stmt1b), stmt2) but that form is not allowed so we convert it into
// All(Any(stmt1a, stmt2), Any(stmt1b, stmt2)).
// 'results' holds this set of Any statements.
AnyStatement anySt = (AnyStatement)exprStmt;
List<ExpressionDependency> exprDeps = new List<ExpressionDependency>();
ForEachExpressionDependency(anySt.Statements, exprDeps.Add);
List<AnyStatement> results = new List<AnyStatement>();
results.Add(new AnyStatement());
var newSt = new AnyStatement();
// For pruning based on SkipIfUniform, we only want to prune a statement if it must be uniform in all cases.
// This is only guaranteed when every dependency is uniform.
bool anyDependencySuffices = true;
foreach (ExpressionDependency ies in exprDeps)
{
IExpression expr = ies.Expression;
AllStatement allBlock = new AllStatement();
AnyStatement anyBlock = new AnyStatement();
List<MutationInformation.Mutation> mutations = GetMutations(exclude, expr, allocationsOnly, mustMutate, bindings, bounds, offsetInfos, extraIndicesOfStmt);
if (anyDependencySuffices)
foreach (MutationInformation.Mutation m in mutations)
{
foreach (MutationInformation.Mutation m in mutations)
{
anyBlock.Statements.Add(m.stmt);
}
}
else
{
IExpression prefixExpr = expr;
while (prefixExpr is IMethodInvokeExpression)
{
IMethodInvokeExpression imie = (IMethodInvokeExpression)prefixExpr;
prefixExpr = imie.Arguments[0];
}
var prefixes = Recognizer.GetAllPrefixes(prefixExpr);
// algorithm: find the prefix of each mutation, compute a graph of all prefix overlaps, make an Any statement for each clique
// example: x[i] has mutations x[0][0], x[1][0], x[i][1]
// prefixes are: x[0], x[1], x[i]
// cliques are: (x[0], x[i]), (x[1], x[i])
// dependency is: (x[0][0] or x[i][1]) and (x[1][0] or x[i][1])
List<KeyValuePair<MutationInformation.Mutation, IExpression>> mutationsToCheck = new List<KeyValuePair<MutationInformation.Mutation, IExpression>>();
foreach (MutationInformation.Mutation m in mutations)
{
IExpression expr2 = m.expr;
bool isIncrement = context.InputAttributes.Has<IncrementStatement>(m.stmt);
if (isIncrement)
{
// ignore
}
else
{
var prefixes2 = Recognizer.GetAllPrefixes(expr2);
var prefix2 = prefixes2[System.Math.Min(prefixes2.Count, prefixes.Count) - 1];
mutationsToCheck.Add(new KeyValuePair<MutationInformation.Mutation, IExpression>(m, prefix2));
}
}
if (mutationsToCheck.Count == 1)
{
allBlock.Statements.Add(mutationsToCheck[0].Key.stmt);
}
else if (mutationsToCheck.Count > 1)
{
AddCliques(mutationsToCheck, allBlock);
}
anyBlock.Statements.Add(m.stmt);
}
if (anyBlock.Statements.Count > 0)
allBlock.Statements.Add(anyBlock);
@ -1294,57 +1391,21 @@ namespace Microsoft.ML.Probabilistic.Compiler.Transforms
if (ist is AnyStatement)
{
AnyStatement group = (AnyStatement)ist;
foreach (AnyStatement newSt in results)
{
newSt.Statements.AddRange(group.Statements);
}
newSt.Statements.AddRange(group.Statements);
}
else
{
foreach (AnyStatement newSt in results)
{
newSt.Statements.Add(ist);
}
newSt.Statements.Add(ist);
}
}
else if (allBlock.Statements.Count > 1)
{
// Any(X, All(Y,Z)) = All(Any(X,Y), Any(X,Z))
List<AnyStatement> results2 = new List<AnyStatement>();
foreach (AnyStatement result in results)
{
// replace result with Any(result, groups) = All(Any(result, groups[0]), Any(result, groups[1]), ...)
foreach (IStatement ist in allBlock.Statements)
{
AnyStatement newSt = new AnyStatement();
newSt.Statements.AddRange(result.Statements);
if (ist is AnyStatement)
{
// flatten Any(result, Any(group)) = Any(result, group)
AnyStatement group = (AnyStatement)ist;
newSt.Statements.AddRange(group.Statements);
}
else
{
newSt.Statements.Add(ist);
}
results2.Add(newSt);
}
}
results = results2;
}
}
foreach (AnyStatement newSt in results)
{
if (newSt.Statements.Count == 1)
yield return newSt.Statements[0];
else if (newSt.Statements.Count > 0)
yield return newSt;
}
if (newSt.Statements.Count == 1)
yield return newSt.Statements[0];
else if (newSt.Statements.Count > 0)
yield return newSt;
}
else if (exprStmt is ExpressionDependency)
else if (exprStmt is ExpressionDependency ies)
{
ExpressionDependency ies = (ExpressionDependency)exprStmt;
IExpression expr = ies.Expression;
foreach (MutationInformation.Mutation m in GetMutations(exclude, expr, allocationsOnly, mustMutate, bindings, bounds, offsetInfos, extraIndicesOfStmt))
{
@ -1358,7 +1419,7 @@ namespace Microsoft.ML.Probabilistic.Compiler.Transforms
}
}
private void AddCliques(List<KeyValuePair<MutationInformation.Mutation, IExpression>> mutationsToCheck, AllStatement groups)
private void AddCliques(List<KeyValuePair<MutationInformation.Mutation, IExpression>> mutationsToCheck, IList<IReadOnlyList<IStatement>> groups)
{
// find overlapping mutations
Dictionary<int, List<int>> overlappingMutations = new Dictionary<int, List<int>>();
@ -1394,12 +1455,12 @@ namespace Microsoft.ML.Probabilistic.Compiler.Transforms
var cliqueFinder = new CliqueFinder<int>(i => overlappingMutations[i]);
cliqueFinder.ForEachClique(candidates, delegate (Stack<int> c)
{
AnyStatement group = new AnyStatement();
List<IStatement> group = new List<IStatement>();
foreach (int i in c)
{
group.Statements.Add(mutationsToCheck[i].Key.stmt);
group.Add(mutationsToCheck[i].Key.stmt);
}
groups.Statements.Add(group);
groups.Add(group);
});
}

Просмотреть файл

@ -205,6 +205,12 @@ namespace Microsoft.ML.Probabilistic.Compiler.Transforms
}
}
/// <summary>
/// Wraps an array-typed expression to indicate that all elements of the array are used except at the given index.
/// </summary>
/// <param name="list"></param>
/// <param name="index"></param>
/// <returns></returns>
public static ListType AllExcept<ListType, IndexType>(ListType list, IndexType index)
{
return list;
@ -215,7 +221,12 @@ namespace Microsoft.ML.Probabilistic.Compiler.Transforms
return values[0];
}
public static object AnyItem(object list)
/// <summary>
/// Wraps an array-typed expression to indicate that all elements of the array are non-uniform.
/// </summary>
/// <param name="list"></param>
/// <returns></returns>
public static object All(object list)
{
return list;
}
@ -360,7 +371,11 @@ namespace Microsoft.ML.Probabilistic.Compiler.Transforms
if (parameter.IsDefined(typeof(SkipIfAnyUniformAttribute), false)
)
{
info.Add(DependencyType.SkipIfUniform, dependencySt);
Type t = dependency.GetExpressionType();
IExpression requirement = Util.IsIList(t)
? Builder.StaticMethod(new Func<object, object>(FactorManager.All), dependency)
: dependency;
info.Add(DependencyType.SkipIfUniform, Builder.ExprStatement(requirement));
}
else if (parameter.IsDefined(typeof(SkipIfAllUniformAttribute), false)
|| parameter.IsDefined(typeof(SkipIfUniformAttribute), false)
@ -368,8 +383,7 @@ namespace Microsoft.ML.Probabilistic.Compiler.Transforms
|| parameter.IsDefined(typeof(IsReturnedInEveryElementAttribute), false)
)
{
IExpression requirement = Builder.StaticMethod(new Func<object, object>(FactorManager.AnyItem), dependency);
info.Add(DependencyType.SkipIfUniform, Builder.ExprStatement(requirement));
info.Add(DependencyType.SkipIfUniform, dependencySt);
}
else
{
@ -390,6 +404,7 @@ namespace Microsoft.ML.Probabilistic.Compiler.Transforms
new Func<PlaceHolder, PlaceHolder, PlaceHolder>(FactorManager.AllExcept<PlaceHolder, PlaceHolder>),
new Type[] { parameter.ParameterType, indexParameter.ParameterType },
paramRef, resultIndex);
requirement = Builder.StaticMethod(new Func<object, object>(FactorManager.All), requirement);
info.Add(DependencyType.SkipIfUniform, Builder.ExprStatement(requirement));
}
else if (parameter.IsDefined(typeof(SkipIfAllExceptIndexAreUniformAttribute), false))
@ -401,8 +416,6 @@ namespace Microsoft.ML.Probabilistic.Compiler.Transforms
new Func<PlaceHolder, PlaceHolder, PlaceHolder>(FactorManager.AllExcept<PlaceHolder, PlaceHolder>),
new Type[] { parameter.ParameterType, indexParameter.ParameterType },
paramRef, resultIndex);
//requirement = Builder.ArrayIndex(requirement, new Any());
requirement = Builder.StaticMethod(new Func<object, object>(FactorManager.AnyItem), requirement);
info.Add(DependencyType.SkipIfUniform, Builder.ExprStatement(requirement));
}
}
@ -486,10 +499,7 @@ namespace Microsoft.ML.Probabilistic.Compiler.Transforms
Type t = dependency.GetExpressionType();
if (!t.IsPrimitive)
{
IExpression message = (t.IsArray || (t.IsGenericType && t.GetGenericTypeDefinition().Equals(typeof(IList<>))))
? Builder.StaticMethod(new Func<object, object>(FactorManager.AnyItem), dependency)
: dependency;
messages.Add(message);
messages.Add(dependency);
}
}
IExpression requirement = Builder.StaticMethod(new Func<object[], object>(FactorManager.Any), messages.ToArray());
@ -506,12 +516,12 @@ namespace Microsoft.ML.Probabilistic.Compiler.Transforms
private static bool UniformIsProper(Type type)
{
// In some cases, we could construct a uniform instance of type and check if it is proper.
return type.Equals(typeof(Microsoft.ML.Probabilistic.Distributions.Bernoulli)) ||
type.Equals(typeof(Microsoft.ML.Probabilistic.Distributions.Beta)) ||
type.Equals(typeof(Microsoft.ML.Probabilistic.Distributions.Dirichlet)) ||
type.Equals(typeof(Microsoft.ML.Probabilistic.Distributions.Discrete)) ||
type.Equals(typeof(Microsoft.ML.Probabilistic.Distributions.DiscreteChar)) ||
(type.IsGenericType && type.GetGenericTypeDefinition().Equals(typeof(Microsoft.ML.Probabilistic.Distributions.DiscreteEnum<>)));
return type.Equals(typeof(Distributions.Bernoulli)) ||
type.Equals(typeof(Distributions.Beta)) ||
type.Equals(typeof(Distributions.Dirichlet)) ||
type.Equals(typeof(Distributions.Discrete)) ||
type.Equals(typeof(Distributions.DiscreteChar)) ||
(type.IsGenericType && type.GetGenericTypeDefinition().Equals(typeof(Distributions.DiscreteEnum<>)));
}
/// <summary>
@ -1128,8 +1138,8 @@ namespace Microsoft.ML.Probabilistic.Compiler.Transforms
conversionOptions.AllowImplicitConversions = true;
conversionOptions.IsImplicitConversion = delegate (Type fromType, Type toType)
{
bool isDomainType = Microsoft.ML.Probabilistic.Distributions.Distribution.IsDistributionType(toType) &&
Microsoft.ML.Probabilistic.Distributions.Distribution.GetDomainType(toType).IsAssignableFrom(fromType);
bool isDomainType = Distributions.Distribution.IsDistributionType(toType) &&
Distributions.Distribution.GetDomainType(toType).IsAssignableFrom(fromType);
if (isDomainType)
{
MethodInfo pointMassMethod = GetPointMassMethod(toType, fromType);

Просмотреть файл

@ -44,7 +44,7 @@ namespace Microsoft.ML.Probabilistic.Compiler.Transforms
protected override IStatement DoConvertStatement(IStatement ist)
{
bool wasOperatorStatement = this.isOperatorStatement;
if(context.InputAttributes.Has<OperatorStatement>(ist))
if (context.InputAttributes.Has<OperatorStatement>(ist))
{
this.isOperatorStatement = true;
}
@ -212,10 +212,8 @@ namespace Microsoft.ML.Probabilistic.Compiler.Transforms
IExpression[] args2 = new IExpression[] { marginalExpr, forwardExpr, backwardExpr, indicesExpr, indexExpr };
Type[] argTypes = Array.ConvertAll(args2, e => e.GetExpressionType());
Exception exception;
MethodInfo method =
(MethodInfo)
Microsoft.ML.Probabilistic.Compiler.Reflection.Invoker.GetBestMethod(itr.DotNetType, "MarginalIncrement",
BindingFlags.Static | BindingFlags.Public | BindingFlags.FlattenHierarchy, null, argTypes, out exception);
MethodInfo method = (MethodInfo)Reflection.Invoker.GetBestMethod(itr.DotNetType, "MarginalIncrement",
BindingFlags.Static | BindingFlags.Public | BindingFlags.FlattenHierarchy, null, argTypes, out exception);
if (method == null)
Error("Cannot find a compatible MarginalIncrement method for JaggedSubarrayOp", exception);
else
@ -250,10 +248,8 @@ namespace Microsoft.ML.Probabilistic.Compiler.Transforms
IExpression[] args2 = new IExpression[] { backwardExpr, forwardExpr, indicesExpr, indexExpr, marginalExpr };
Type[] argTypes = Array.ConvertAll(args2, e => e.GetExpressionType());
Exception exception;
MethodInfo method =
(MethodInfo)
Microsoft.ML.Probabilistic.Compiler.Reflection.Invoker.GetBestMethod(itr.DotNetType, "MarginalIncrementItems",
BindingFlags.Static | BindingFlags.Public | BindingFlags.FlattenHierarchy, null, argTypes, out exception);
MethodInfo method = (MethodInfo)Reflection.Invoker.GetBestMethod(itr.DotNetType, "MarginalIncrementItems",
BindingFlags.Static | BindingFlags.Public | BindingFlags.FlattenHierarchy, null, argTypes, out exception);
if (method == null)
Error("Cannot find a compatible MarginalIncrementItems method for JaggedSubarrayWithMarginalOp", exception);
else
@ -295,10 +291,8 @@ namespace Microsoft.ML.Probabilistic.Compiler.Transforms
IExpression[] args2 = new IExpression[] { forwardExpr, backwardExpr, marginalExpr };
Type[] argTypes = Array.ConvertAll(args2, e => e.GetExpressionType());
Exception exception;
MethodInfo method =
(MethodInfo)
Microsoft.ML.Probabilistic.Compiler.Reflection.Invoker.GetBestMethod(itr.DotNetType, "MarginalIncrementArray",
BindingFlags.Static | BindingFlags.Public | BindingFlags.FlattenHierarchy, null, argTypes, out exception);
MethodInfo method = (MethodInfo)Reflection.Invoker.GetBestMethod(itr.DotNetType, "MarginalIncrementArray",
BindingFlags.Static | BindingFlags.Public | BindingFlags.FlattenHierarchy, null, argTypes, out exception);
if (method == null)
Error("Cannot find a compatible MarginalIncrementArray method for JaggedSubarrayWithMarginalOp", exception);
else

Просмотреть файл

@ -1075,7 +1075,7 @@ namespace Microsoft.ML.Probabilistic.Compiler.Transforms
}
prefix = CodeBuilder.MakeValid(prefix);
IVariableDeclaration bufferDecl = Builder.VarDecl(VariableInformation.GenerateName(context, prefix + "_" + name), type);
context.OutputAttributes.Set(bufferDecl, new DescriptionAttribute("Buffer for " + StringUtil.MethodFullNameToString(fcninfo.Method)));
context.OutputAttributes.Set(bufferDecl, new DescriptionAttribute("Buffer for " + StringUtil.MethodFullNameToXmlString(fcninfo.Method)));
context.OutputAttributes.Set(bufferDecl, new Containers(context));
IExpression msg = Builder.VarRefExpr(bufferDecl);
MessageInfo mi = new MessageInfo();

Просмотреть файл

@ -1097,8 +1097,7 @@ namespace Microsoft.ML.Probabilistic.Compiler.Transforms
mpa.prototypeExpression = Builder.StaticMethod(new Func<Microsoft.ML.Probabilistic.Distributions.TruncatedGaussian>(Microsoft.ML.Probabilistic.Distributions.TruncatedGaussian.Uniform));
return mpa;
}
else if (Recognizer.IsStaticMethod(imie, new Func<double, double, double, double, double>(TruncatedGamma.Sample))
|| Recognizer.IsStaticMethod(imie, new Func<double, double, double, double, double>(Factor.TruncatedGammaFromShapeAndRate))
else if (Recognizer.IsStaticMethod(imie, new Func<double, double, double, double, double>(Factor.TruncatedGammaFromShapeAndRate))
)
{
MarginalPrototype mpa = new MarginalPrototype(null);

Просмотреть файл

@ -6,6 +6,7 @@ using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Globalization;
using System.Text;
using System.Windows.Forms;
using System.IO;
@ -138,7 +139,7 @@ namespace ImageClassifier
Vector v = Vector.Zero(entries.Length - 1);
for (int i = 0; i < v.Count; i++)
{
v[i] = double.Parse(entries[i + 1]);
v[i] = double.Parse(entries[i + 1], CultureInfo.InvariantCulture);
}
result.Add(v);

Просмотреть файл

@ -3,11 +3,12 @@
// See the LICENSE file in the project root for more information.
using System;
using System.Collections.Generic;
using System.IO;
using System.Drawing;
using System.Globalization;
using System.IO;
using System.Linq;
using Microsoft.ML.Probabilistic.Math;
using Microsoft.ML.Probabilistic.Utilities;
using System.Linq;
namespace ImageClassifier
{
@ -33,7 +34,7 @@ namespace ImageClassifier
StreamWriter writer = new StreamWriter(folder + "Features.txt");
foreach (string filename in filenames)
{
writer.WriteLine(filename + "," + StringUtil.CollectionToString(features[filename], ","));
writer.WriteLine(filename + "," + StringUtil.CollectionToString(features[filename].Select(d => d.ToString("r", CultureInfo.InvariantCulture)), ","));
}
writer.Close();

Просмотреть файл

@ -2,7 +2,9 @@
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Globalization;
using System.IO;
using System.Threading;
using System.Windows.Forms;
namespace ImageClassifier
@ -15,7 +17,7 @@ namespace ImageClassifier
[STAThread]
static void Main()
{
// First compute features for all the images.
// First compute features for all the images.
// Comment the next 2 lines out if the images have not changed and you don't want to re-compute the features each run.
var features = new ImageFeatures();
features.ComputeImageFeatures();

Просмотреть файл

@ -46,10 +46,10 @@ namespace Microsoft.ML.Probabilistic.Learners.BayesPointMachineClassifierInterna
public bool Changed_numberOfIterationsDecreased_Init_FeatureCount_FeatureValues_InstanceCount_Labels_WeightConst9_isInitialised;
public Gamma CommonWeightPrecision_F;
public DistributionStructArray<Gamma,double> CommonWeightPrecision_rep_B;
/// <summary>Buffer for ReplicateOp_Divide.Marginal<Gamma></summary>
/// <summary>Buffer for ReplicateOp_Divide.Marginal&lt;Gamma&gt;</summary>
public Gamma CommonWeightPrecision_rep_B_toDef;
public DistributionStructArray<Gamma,double> CommonWeightPrecision_rep_F;
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional<Gamma></summary>
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional&lt;Gamma&gt;</summary>
public Gamma CommonWeightPrecision_rep_F_marginal;
/// <summary>True if Constant has executed. Set this to false to force re-execution of Constant</summary>
public bool Constant_isDone;
@ -110,10 +110,10 @@ namespace Microsoft.ML.Probabilistic.Learners.BayesPointMachineClassifierInterna
/// <summary>Message to marginal of 'WeightPrecisionRates'</summary>
public DistributionStructArray<Gamma,double> WeightPrecisionRates_marginal_F;
public DistributionRefArray<DistributionStructArray<Gamma,double>,double[]> WeightPrecisionRates_rep_B;
/// <summary>Buffer for ReplicateOp_Divide.Marginal<Gamma></summary>
/// <summary>Buffer for ReplicateOp_Divide.Marginal&lt;Gamma&gt;</summary>
public DistributionStructArray<Gamma,double> WeightPrecisionRates_rep_B_toDef;
public DistributionRefArray<DistributionStructArray<Gamma,double>,double[]> WeightPrecisionRates_rep_F;
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional<Gamma></summary>
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional&lt;Gamma&gt;</summary>
public DistributionStructArray<Gamma,double> WeightPrecisionRates_rep_F_marginal;
/// <summary>Message from use of 'WeightPrecisionRates'</summary>
public DistributionStructArray<Gamma,double> WeightPrecisionRates_use_B;
@ -130,10 +130,10 @@ namespace Microsoft.ML.Probabilistic.Learners.BayesPointMachineClassifierInterna
/// <summary>Message to marginal of 'Weights'</summary>
public DistributionStructArray<Gaussian,double> Weights_marginal_F;
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> Weights_rep_B;
/// <summary>Buffer for ReplicateOp_Divide.Marginal<Gaussian></summary>
/// <summary>Buffer for ReplicateOp_Divide.Marginal&lt;Gaussian&gt;</summary>
public DistributionStructArray<Gaussian,double> Weights_rep_B_toDef;
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> Weights_rep_F;
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional<Gaussian></summary>
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional&lt;Gaussian&gt;</summary>
public DistributionStructArray<Gaussian,double> Weights_rep_F_marginal;
/// <summary>Message from use of 'Weights'</summary>
public DistributionStructArray<Gaussian,double> Weights_use_B;

Просмотреть файл

@ -46,10 +46,10 @@ namespace Microsoft.ML.Probabilistic.Learners.BayesPointMachineClassifierInterna
public bool Changed_numberOfIterationsDecreased_Init_FeatureCount_FeatureValues_InstanceCount_Labels_WeightConst3_isInitialised;
public Gamma CommonWeightPrecision_F;
public DistributionStructArray<Gamma,double> CommonWeightPrecision_rep_B;
/// <summary>Buffer for ReplicateOp_Divide.Marginal<Gamma></summary>
/// <summary>Buffer for ReplicateOp_Divide.Marginal&lt;Gamma&gt;</summary>
public Gamma CommonWeightPrecision_rep_B_toDef;
public DistributionStructArray<Gamma,double> CommonWeightPrecision_rep_F;
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional<Gamma></summary>
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional&lt;Gamma&gt;</summary>
public Gamma CommonWeightPrecision_rep_F_marginal;
/// <summary>True if Constant has executed. Set this to false to force re-execution of Constant</summary>
public bool Constant_isDone;
@ -85,7 +85,7 @@ namespace Microsoft.ML.Probabilistic.Learners.BayesPointMachineClassifierInterna
private DistributionStructArray<Gamma,double> weightPrecisionRateConstraints;
public Gamma WeightPrecisionRateRates_F_reduced;
public DistributionRefArray<DistributionStructArray<Gamma,double>,double[]> WeightPrecisionRates_rep_B;
/// <summary>Buffer for ReplicateOp_Divide.Marginal<Gamma></summary>
/// <summary>Buffer for ReplicateOp_Divide.Marginal&lt;Gamma&gt;</summary>
public DistributionStructArray<Gamma,double> WeightPrecisionRates_rep_B_toDef;
/// <summary>Message from use of 'WeightPrecisionRates'</summary>
public DistributionStructArray<Gamma,double> WeightPrecisionRates_use_B;
@ -101,10 +101,10 @@ namespace Microsoft.ML.Probabilistic.Learners.BayesPointMachineClassifierInterna
/// <summary>Message to marginal of 'Weights'</summary>
public DistributionStructArray<Gaussian,double> Weights_marginal_F;
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> Weights_rep_B;
/// <summary>Buffer for ReplicateOp_Divide.Marginal<Gaussian></summary>
/// <summary>Buffer for ReplicateOp_Divide.Marginal&lt;Gaussian&gt;</summary>
public DistributionStructArray<Gaussian,double> Weights_rep_B_toDef;
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> Weights_rep_F;
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional<Gaussian></summary>
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional&lt;Gaussian&gt;</summary>
public DistributionStructArray<Gaussian,double> Weights_rep_F_marginal;
/// <summary>Message from use of 'Weights'</summary>
public DistributionStructArray<Gaussian,double> Weights_use_B;

Просмотреть файл

@ -82,10 +82,10 @@ namespace Microsoft.ML.Probabilistic.Learners.BayesPointMachineClassifierInterna
private int classCount;
public Gamma CommonWeightPrecision_F;
public DistributionStructArray<Gamma,double> CommonWeightPrecision_rep_B;
/// <summary>Buffer for ReplicateOp_Divide.Marginal<Gamma></summary>
/// <summary>Buffer for ReplicateOp_Divide.Marginal&lt;Gamma&gt;</summary>
public Gamma CommonWeightPrecision_rep_B_toDef;
public DistributionStructArray<Gamma,double> CommonWeightPrecision_rep_F;
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional<Gamma></summary>
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional&lt;Gamma&gt;</summary>
public Gamma CommonWeightPrecision_rep_F_marginal;
/// <summary>True if Constant has executed. Set this to false to force re-execution of Constant</summary>
public bool Constant_isDone;
@ -101,10 +101,10 @@ namespace Microsoft.ML.Probabilistic.Learners.BayesPointMachineClassifierInterna
private int[] labels;
public DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]> MaxNoisyScore_0__B;
public DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]> MaxNoisyScore_rep_B;
/// <summary>Buffer for ReplicateOp_Divide.Marginal<Gaussian></summary>
/// <summary>Buffer for ReplicateOp_Divide.Marginal&lt;Gaussian&gt;</summary>
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> MaxNoisyScore_rep_B_toDef;
public DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]> MaxNoisyScore_rep_F;
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional<Gaussian></summary>
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional&lt;Gaussian&gt;</summary>
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> MaxNoisyScore_rep_F_marginal;
/// <summary>Message to marginal of 'ModelSelector'</summary>
public Bernoulli ModelSelector_marginal_F;
@ -184,10 +184,10 @@ namespace Microsoft.ML.Probabilistic.Learners.BayesPointMachineClassifierInterna
/// <summary>Message to marginal of 'WeightPrecisionRates'</summary>
public DistributionStructArray<Gamma,double> WeightPrecisionRates_marginal_F;
public DistributionRefArray<DistributionStructArray<Gamma,double>,double[]> WeightPrecisionRates_rep_B;
/// <summary>Buffer for ReplicateOp_Divide.Marginal<Gamma></summary>
/// <summary>Buffer for ReplicateOp_Divide.Marginal&lt;Gamma&gt;</summary>
public DistributionStructArray<Gamma,double> WeightPrecisionRates_rep_B_toDef;
public DistributionRefArray<DistributionStructArray<Gamma,double>,double[]> WeightPrecisionRates_rep_F;
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional<Gamma></summary>
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional&lt;Gamma&gt;</summary>
public DistributionStructArray<Gamma,double> WeightPrecisionRates_rep_F_marginal;
/// <summary>Message from use of 'WeightPrecisionRates'</summary>
public DistributionStructArray<Gamma,double> WeightPrecisionRates_use_B;
@ -199,20 +199,20 @@ namespace Microsoft.ML.Probabilistic.Learners.BayesPointMachineClassifierInterna
public DistributionStructArray<Gamma,double> WeightPrecisions_B_FeatureRange__Q;
public DistributionStructArray<Gamma,double> WeightPrecisions_F;
public DistributionRefArray<DistributionStructArray<Gamma,double>,double[]> WeightPrecisions_rep_B;
/// <summary>Buffer for ReplicateOp_Divide.Marginal<Gamma></summary>
/// <summary>Buffer for ReplicateOp_Divide.Marginal&lt;Gamma&gt;</summary>
public DistributionStructArray<Gamma,double> WeightPrecisions_rep_B_toDef;
public DistributionRefArray<DistributionStructArray<Gamma,double>,double[]> WeightPrecisions_rep_F;
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional<Gamma></summary>
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional&lt;Gamma&gt;</summary>
public DistributionStructArray<Gamma,double> WeightPrecisions_rep_F_marginal;
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> Weights_depth0_F;
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> Weights_F;
/// <summary>Message to marginal of 'Weights'</summary>
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> Weights_marginal_F;
public DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]> Weights_rep_B;
/// <summary>Buffer for ReplicateOp_Divide.Marginal<Gaussian></summary>
/// <summary>Buffer for ReplicateOp_Divide.Marginal&lt;Gaussian&gt;</summary>
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> Weights_rep_B_toDef;
public DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]> Weights_rep_F;
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional<Gaussian></summary>
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional&lt;Gaussian&gt;</summary>
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> Weights_rep_F_marginal;
/// <summary>Message from use of 'Weights'</summary>
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> Weights_use_B;

Просмотреть файл

@ -80,10 +80,10 @@ namespace Microsoft.ML.Probabilistic.Learners.BayesPointMachineClassifierInterna
private int classCount;
public Gamma CommonWeightPrecision_F;
public DistributionStructArray<Gamma,double> CommonWeightPrecision_rep_B;
/// <summary>Buffer for ReplicateOp_Divide.Marginal<Gamma></summary>
/// <summary>Buffer for ReplicateOp_Divide.Marginal&lt;Gamma&gt;</summary>
public Gamma CommonWeightPrecision_rep_B_toDef;
public DistributionStructArray<Gamma,double> CommonWeightPrecision_rep_F;
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional<Gamma></summary>
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional&lt;Gamma&gt;</summary>
public Gamma CommonWeightPrecision_rep_F_marginal;
/// <summary>True if Constant has executed. Set this to false to force re-execution of Constant</summary>
public bool Constant_isDone;
@ -99,10 +99,10 @@ namespace Microsoft.ML.Probabilistic.Learners.BayesPointMachineClassifierInterna
private int[] labels;
public DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]> MaxNoisyScore_0__B;
public DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]> MaxNoisyScore_rep_B;
/// <summary>Buffer for ReplicateOp_Divide.Marginal<Gaussian></summary>
/// <summary>Buffer for ReplicateOp_Divide.Marginal&lt;Gaussian&gt;</summary>
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> MaxNoisyScore_rep_B_toDef;
public DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]> MaxNoisyScore_rep_F;
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional<Gaussian></summary>
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional&lt;Gaussian&gt;</summary>
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> MaxNoisyScore_rep_F_marginal;
public DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]> NoisyScoreDeltas_B;
public DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]> NoisyScoreDeltas_F;
@ -140,7 +140,7 @@ namespace Microsoft.ML.Probabilistic.Learners.BayesPointMachineClassifierInterna
private DistributionStructArray<Gamma,double> weightPrecisionRateConstraints;
public Gamma WeightPrecisionRateRates_F_reduced;
public DistributionRefArray<DistributionStructArray<Gamma,double>,double[]> WeightPrecisionRates_rep_B;
/// <summary>Buffer for ReplicateOp_Divide.Marginal<Gamma></summary>
/// <summary>Buffer for ReplicateOp_Divide.Marginal&lt;Gamma&gt;</summary>
public DistributionStructArray<Gamma,double> WeightPrecisionRates_rep_B_toDef;
/// <summary>Message from use of 'WeightPrecisionRates'</summary>
public DistributionStructArray<Gamma,double> WeightPrecisionRates_use_B;
@ -152,19 +152,19 @@ namespace Microsoft.ML.Probabilistic.Learners.BayesPointMachineClassifierInterna
public DistributionStructArray<Gamma,double> WeightPrecisions_B_FeatureRange__Q;
public DistributionStructArray<Gamma,double> WeightPrecisions_F;
public DistributionRefArray<DistributionStructArray<Gamma,double>,double[]> WeightPrecisions_rep_B;
/// <summary>Buffer for ReplicateOp_Divide.Marginal<Gamma></summary>
/// <summary>Buffer for ReplicateOp_Divide.Marginal&lt;Gamma&gt;</summary>
public DistributionStructArray<Gamma,double> WeightPrecisions_rep_B_toDef;
public DistributionRefArray<DistributionStructArray<Gamma,double>,double[]> WeightPrecisions_rep_F;
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional<Gamma></summary>
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional&lt;Gamma&gt;</summary>
public DistributionStructArray<Gamma,double> WeightPrecisions_rep_F_marginal;
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> Weights_F;
/// <summary>Message to marginal of 'Weights'</summary>
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> Weights_marginal_F;
public DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]> Weights_rep_B;
/// <summary>Buffer for ReplicateOp_Divide.Marginal<Gaussian></summary>
/// <summary>Buffer for ReplicateOp_Divide.Marginal&lt;Gaussian&gt;</summary>
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> Weights_rep_B_toDef;
public DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]> Weights_rep_F;
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional<Gaussian></summary>
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional&lt;Gaussian&gt;</summary>
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> Weights_rep_F_marginal;
/// <summary>Message from use of 'Weights'</summary>
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> Weights_use_B;

Просмотреть файл

@ -50,10 +50,10 @@ namespace Microsoft.ML.Probabilistic.Learners.BayesPointMachineClassifierInterna
public bool Changed_numberOfIterationsDecreased_Init_FeatureCount_FeatureIndexes_FeatureValues_InstanceCount_Ins12_isInitialised;
public Gamma CommonWeightPrecision_F;
public DistributionStructArray<Gamma,double> CommonWeightPrecision_rep_B;
/// <summary>Buffer for ReplicateOp_Divide.Marginal<Gamma></summary>
/// <summary>Buffer for ReplicateOp_Divide.Marginal&lt;Gamma&gt;</summary>
public Gamma CommonWeightPrecision_rep_B_toDef;
public DistributionStructArray<Gamma,double> CommonWeightPrecision_rep_F;
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional<Gamma></summary>
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional&lt;Gamma&gt;</summary>
public Gamma CommonWeightPrecision_rep_F_marginal;
/// <summary>True if Constant has executed. Set this to false to force re-execution of Constant</summary>
public bool Constant_isDone;

Просмотреть файл

@ -50,10 +50,10 @@ namespace Microsoft.ML.Probabilistic.Learners.BayesPointMachineClassifierInterna
public bool Changed_numberOfIterationsDecreased_Init_FeatureCount_FeatureIndexes_FeatureValues_InstanceCount_Ins6_isInitialised;
public Gamma CommonWeightPrecision_F;
public DistributionStructArray<Gamma,double> CommonWeightPrecision_rep_B;
/// <summary>Buffer for ReplicateOp_Divide.Marginal<Gamma></summary>
/// <summary>Buffer for ReplicateOp_Divide.Marginal&lt;Gamma&gt;</summary>
public Gamma CommonWeightPrecision_rep_B_toDef;
public DistributionStructArray<Gamma,double> CommonWeightPrecision_rep_F;
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional<Gamma></summary>
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional&lt;Gamma&gt;</summary>
public Gamma CommonWeightPrecision_rep_F_marginal;
/// <summary>True if Constant has executed. Set this to false to force re-execution of Constant</summary>
public bool Constant_isDone;

Просмотреть файл

@ -64,10 +64,10 @@ namespace Microsoft.ML.Probabilistic.Learners.BayesPointMachineClassifierInterna
private int classCount;
public Gamma CommonWeightPrecision_F;
public DistributionStructArray<Gamma,double> CommonWeightPrecision_rep_B;
/// <summary>Buffer for ReplicateOp_Divide.Marginal<Gamma></summary>
/// <summary>Buffer for ReplicateOp_Divide.Marginal&lt;Gamma&gt;</summary>
public Gamma CommonWeightPrecision_rep_B_toDef;
public DistributionStructArray<Gamma,double> CommonWeightPrecision_rep_F;
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional<Gamma></summary>
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional&lt;Gamma&gt;</summary>
public Gamma CommonWeightPrecision_rep_F_marginal;
/// <summary>True if Constant has executed. Set this to false to force re-execution of Constant</summary>
public bool Constant_isDone;
@ -89,10 +89,10 @@ namespace Microsoft.ML.Probabilistic.Learners.BayesPointMachineClassifierInterna
private int[] labels;
public DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]> MaxNoisyScore_0__B;
public DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]> MaxNoisyScore_rep_B;
/// <summary>Buffer for ReplicateOp_Divide.Marginal<Gaussian></summary>
/// <summary>Buffer for ReplicateOp_Divide.Marginal&lt;Gaussian&gt;</summary>
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> MaxNoisyScore_rep_B_toDef;
public DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]> MaxNoisyScore_rep_F;
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional<Gaussian></summary>
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional&lt;Gaussian&gt;</summary>
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> MaxNoisyScore_rep_F_marginal;
/// <summary>Message to marginal of 'ModelSelector'</summary>
public Bernoulli ModelSelector_marginal_F;
@ -191,15 +191,15 @@ namespace Microsoft.ML.Probabilistic.Learners.BayesPointMachineClassifierInterna
public DistributionStructArray<Gamma,double> WeightPrecisions_B_FeatureRange__Q;
public DistributionStructArray<Gamma,double> WeightPrecisions_F;
public DistributionRefArray<DistributionStructArray<Gamma,double>,double[]> WeightPrecisions_rep_B;
/// <summary>Buffer for ReplicateOp_Divide.Marginal<Gamma></summary>
/// <summary>Buffer for ReplicateOp_Divide.Marginal&lt;Gamma&gt;</summary>
public DistributionStructArray<Gamma,double> WeightPrecisions_rep_B_toDef;
public DistributionRefArray<DistributionStructArray<Gamma,double>,double[]> WeightPrecisions_rep_F;
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional<Gamma></summary>
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional&lt;Gamma&gt;</summary>
public DistributionStructArray<Gamma,double> WeightPrecisions_rep_F_marginal;
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> Weights_depth0_F;
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> Weights_depth1_B;
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> Weights_depth1_F;
/// <summary>Buffer for JaggedSubarrayOp<double>.ItemsAverageConditional<DistributionStructArray<Gaussian, double>, Gaussian, DistributionStructArray<Gaussian, double>></summary>
/// <summary>Buffer for JaggedSubarrayOp&lt;double&gt;.ItemsAverageConditional&lt;DistributionStructArray&lt;Gaussian, double&gt;, Gaussian, DistributionStructArray&lt;Gaussian, double&gt;&gt;</summary>
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> Weights_depth1_F_ClassRange__marginal;
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> Weights_F;
public DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]> Weights_FeatureIndexes_B;

Просмотреть файл

@ -62,10 +62,10 @@ namespace Microsoft.ML.Probabilistic.Learners.BayesPointMachineClassifierInterna
private int classCount;
public Gamma CommonWeightPrecision_F;
public DistributionStructArray<Gamma,double> CommonWeightPrecision_rep_B;
/// <summary>Buffer for ReplicateOp_Divide.Marginal<Gamma></summary>
/// <summary>Buffer for ReplicateOp_Divide.Marginal&lt;Gamma&gt;</summary>
public Gamma CommonWeightPrecision_rep_B_toDef;
public DistributionStructArray<Gamma,double> CommonWeightPrecision_rep_F;
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional<Gamma></summary>
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional&lt;Gamma&gt;</summary>
public Gamma CommonWeightPrecision_rep_F_marginal;
/// <summary>True if Constant has executed. Set this to false to force re-execution of Constant</summary>
public bool Constant_isDone;
@ -87,10 +87,10 @@ namespace Microsoft.ML.Probabilistic.Learners.BayesPointMachineClassifierInterna
private int[] labels;
public DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]> MaxNoisyScore_0__B;
public DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]> MaxNoisyScore_rep_B;
/// <summary>Buffer for ReplicateOp_Divide.Marginal<Gaussian></summary>
/// <summary>Buffer for ReplicateOp_Divide.Marginal&lt;Gaussian&gt;</summary>
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> MaxNoisyScore_rep_B_toDef;
public DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]> MaxNoisyScore_rep_F;
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional<Gaussian></summary>
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional&lt;Gaussian&gt;</summary>
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> MaxNoisyScore_rep_F_marginal;
public DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]> NoisyScoreDeltas_B;
public DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]> NoisyScoreDeltas_F;
@ -144,14 +144,14 @@ namespace Microsoft.ML.Probabilistic.Learners.BayesPointMachineClassifierInterna
public DistributionStructArray<Gamma,double> WeightPrecisions_B_FeatureRange__Q;
public DistributionStructArray<Gamma,double> WeightPrecisions_F;
public DistributionRefArray<DistributionStructArray<Gamma,double>,double[]> WeightPrecisions_rep_B;
/// <summary>Buffer for ReplicateOp_Divide.Marginal<Gamma></summary>
/// <summary>Buffer for ReplicateOp_Divide.Marginal&lt;Gamma&gt;</summary>
public DistributionStructArray<Gamma,double> WeightPrecisions_rep_B_toDef;
public DistributionRefArray<DistributionStructArray<Gamma,double>,double[]> WeightPrecisions_rep_F;
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional<Gamma></summary>
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional&lt;Gamma&gt;</summary>
public DistributionStructArray<Gamma,double> WeightPrecisions_rep_F_marginal;
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> Weights_depth1_B;
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> Weights_depth1_F;
/// <summary>Buffer for JaggedSubarrayOp<double>.ItemsAverageConditional<DistributionStructArray<Gaussian, double>, Gaussian, DistributionStructArray<Gaussian, double>></summary>
/// <summary>Buffer for JaggedSubarrayOp&lt;double&gt;.ItemsAverageConditional&lt;DistributionStructArray&lt;Gaussian, double&gt;, Gaussian, DistributionStructArray&lt;Gaussian, double&gt;&gt;</summary>
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> Weights_depth1_F_ClassRange__marginal;
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> Weights_F;
public DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]> Weights_FeatureIndexes_B;

Просмотреть файл

@ -52,10 +52,10 @@ namespace Microsoft.ML.Probabilistic.Learners.BayesPointMachineClassifierInterna
/// <summary>Field backing the WeightPriors property</summary>
private DistributionStructArray<Gaussian,double> weightPriors;
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> Weights_depth1_rep_B;
/// <summary>Buffer for ReplicateOp_Divide.Marginal<Gaussian></summary>
/// <summary>Buffer for ReplicateOp_Divide.Marginal&lt;Gaussian&gt;</summary>
public DistributionStructArray<Gaussian,double> Weights_depth1_rep_B_toDef;
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> Weights_depth1_rep_F;
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional<Gaussian></summary>
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional&lt;Gaussian&gt;</summary>
public DistributionStructArray<Gaussian,double> Weights_depth1_rep_F_marginal;
/// <summary>Messages from use of 'Weights'</summary>
public DistributionStructArray<Gaussian,double>[] Weights_uses_B;

Просмотреть файл

@ -67,10 +67,10 @@ namespace Microsoft.ML.Probabilistic.Learners.BayesPointMachineClassifierInterna
/// <summary>Field backing the WeightPriors property</summary>
private DistributionStructArray<Gaussian,double> weightPriors;
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> Weights_depth1_rep_B;
/// <summary>Buffer for ReplicateOp_Divide.Marginal<Gaussian></summary>
/// <summary>Buffer for ReplicateOp_Divide.Marginal&lt;Gaussian&gt;</summary>
public DistributionStructArray<Gaussian,double> Weights_depth1_rep_B_toDef;
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> Weights_depth1_rep_F;
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional<Gaussian></summary>
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional&lt;Gaussian&gt;</summary>
public DistributionStructArray<Gaussian,double> Weights_depth1_rep_F_marginal;
/// <summary>Message to marginal of 'Weights'</summary>
public DistributionStructArray<Gaussian,double> Weights_marginal_F;

Просмотреть файл

@ -56,10 +56,10 @@ namespace Microsoft.ML.Probabilistic.Learners.BayesPointMachineClassifierInterna
/// <summary>Field backing the WeightPriors property</summary>
private DistributionStructArray<Gaussian,double> weightPriors;
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> Weights_depth1_rep_B;
/// <summary>Buffer for ReplicateOp_Divide.Marginal<Gaussian></summary>
/// <summary>Buffer for ReplicateOp_Divide.Marginal&lt;Gaussian&gt;</summary>
public DistributionStructArray<Gaussian,double> Weights_depth1_rep_B_toDef;
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> Weights_depth1_rep_F;
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional<Gaussian></summary>
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional&lt;Gaussian&gt;</summary>
public DistributionStructArray<Gaussian,double> Weights_depth1_rep_F_marginal;
/// <summary>Message to marginal of 'Weights'</summary>
public DistributionStructArray<Gaussian,double> Weights_marginal_F;

Просмотреть файл

@ -62,10 +62,10 @@ namespace Microsoft.ML.Probabilistic.Learners.BayesPointMachineClassifierInterna
public DistributionRefArray<DistributionStructArray<Bernoulli,bool>,bool[]> Labels_InstanceRange__selector_cases_rep8_B_reduced;
public Bernoulli[][][] Labels_InstanceRange__selector_cases_uses_B;
public Discrete Labels_InstanceRange__selector_rep_B_reduced;
/// <summary>Buffer for ReplicateOp_Divide.Marginal<Discrete></summary>
/// <summary>Buffer for ReplicateOp_Divide.Marginal&lt;Discrete&gt;</summary>
public DistributionRefArray<Discrete,int> Labels_InstanceRange__selector_rep_B_toDef;
public DistributionRefArray<DistributionRefArray<Discrete,int>,int[]> Labels_InstanceRange__selector_rep_F;
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional<Discrete></summary>
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional&lt;Discrete&gt;</summary>
public DistributionRefArray<Discrete,int> Labels_InstanceRange__selector_rep_F_marginal;
public Discrete[][] Labels_InstanceRange__selector_uses_B;
public Discrete[][] Labels_InstanceRange__selector_uses_F;
@ -79,10 +79,10 @@ namespace Microsoft.ML.Probabilistic.Learners.BayesPointMachineClassifierInterna
public Discrete[][] Labels_uses_F;
public DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]> MaxNoisyScore_0__B;
public DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]> MaxNoisyScore_rep_B;
/// <summary>Buffer for ReplicateOp_Divide.Marginal<Gaussian></summary>
/// <summary>Buffer for ReplicateOp_Divide.Marginal&lt;Gaussian&gt;</summary>
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> MaxNoisyScore_rep_B_toDef;
public DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]> MaxNoisyScore_rep_F;
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional<Gaussian></summary>
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional&lt;Gaussian&gt;</summary>
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> MaxNoisyScore_rep_F_marginal;
public DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]> NoisyScoreDeltas_B;
public DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]> NoisyScoreDeltas_F;
@ -104,10 +104,10 @@ namespace Microsoft.ML.Probabilistic.Learners.BayesPointMachineClassifierInterna
private DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> weightPriors;
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> Weights_F;
public DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]> Weights_rep_B;
/// <summary>Buffer for ReplicateOp_Divide.Marginal<Gaussian></summary>
/// <summary>Buffer for ReplicateOp_Divide.Marginal&lt;Gaussian&gt;</summary>
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> Weights_rep_B_toDef;
public DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]> Weights_rep_F;
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional<Gaussian></summary>
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional&lt;Gaussian&gt;</summary>
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> Weights_rep_F_marginal;
public Gaussian[][][] Weights_uses_B;
public Gaussian[][][] Weights_uses_F;
@ -544,6 +544,7 @@ namespace Microsoft.ML.Probabilistic.Learners.BayesPointMachineClassifierInterna
this.Labels_uses_F[InstanceRange][1] = ArrayHelper.MakeUniform<Discrete>(Discrete.Uniform(this.classCount));
}
if (this.instanceCount>0) {
this.vbool72_reduced = new bool[this.classCount][];
this.Labels_InstanceRange__selector_cases_rep8_B_reduced = new DistributionRefArray<DistributionStructArray<Bernoulli,bool>,bool[]>(this.classCount);
}
for(int ClassMaxNoisyScore = 0; ClassMaxNoisyScore<this.classCount; ClassMaxNoisyScore++) {
@ -555,11 +556,6 @@ namespace Microsoft.ML.Probabilistic.Learners.BayesPointMachineClassifierInterna
this.Labels_InstanceRange__selector_cases_rep8_B_reduced[ClassMaxNoisyScore][_a8] = Bernoulli.Uniform();
}
}
}
if (this.instanceCount>0) {
this.vbool72_reduced = new bool[this.classCount][];
}
for(int ClassMaxNoisyScore = 0; ClassMaxNoisyScore<this.classCount; ClassMaxNoisyScore++) {
if (this.instanceCount>0) {
this.vbool72_reduced[ClassMaxNoisyScore] = new bool[this.classCount];
}
@ -742,8 +738,8 @@ namespace Microsoft.ML.Probabilistic.Learners.BayesPointMachineClassifierInterna
this.Labels_uses_F[InstanceRange] = new Discrete[2];
}
this.Labels_InstanceRange__selector_cases_uses_B = new Bernoulli[this.instanceCount][][];
this.Labels_InstanceRange__selector_cases_rep8_B_reduced = default(DistributionRefArray<DistributionStructArray<Bernoulli,bool>,bool[]>);
this.vbool72_reduced = default(bool[][]);
this.Labels_InstanceRange__selector_cases_rep8_B_reduced = default(DistributionRefArray<DistributionStructArray<Bernoulli,bool>,bool[]>);
this.NoisyScoreDeltas_F = new DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]>(this.instanceCount);
this.NoisyScoreDeltas_B = new DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]>(this.instanceCount);
this.vdouble710_B = new DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]>(this.instanceCount);

Просмотреть файл

@ -82,10 +82,10 @@ namespace Microsoft.ML.Probabilistic.Learners.BayesPointMachineClassifierInterna
private int[] labels;
public DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]> MaxNoisyScore_0__B;
public DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]> MaxNoisyScore_rep_B;
/// <summary>Buffer for ReplicateOp_Divide.Marginal<Gaussian></summary>
/// <summary>Buffer for ReplicateOp_Divide.Marginal&lt;Gaussian&gt;</summary>
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> MaxNoisyScore_rep_B_toDef;
public DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]> MaxNoisyScore_rep_F;
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional<Gaussian></summary>
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional&lt;Gaussian&gt;</summary>
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> MaxNoisyScore_rep_F_marginal;
/// <summary>Message to marginal of 'ModelSelector'</summary>
public Bernoulli ModelSelector_marginal_F;
@ -139,10 +139,10 @@ namespace Microsoft.ML.Probabilistic.Learners.BayesPointMachineClassifierInterna
/// <summary>Message to marginal of 'Weights'</summary>
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> Weights_marginal_F;
public DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]> Weights_rep_B;
/// <summary>Buffer for ReplicateOp_Divide.Marginal<Gaussian></summary>
/// <summary>Buffer for ReplicateOp_Divide.Marginal&lt;Gaussian&gt;</summary>
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> Weights_rep_B_toDef;
public DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]> Weights_rep_F;
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional<Gaussian></summary>
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional&lt;Gaussian&gt;</summary>
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> Weights_rep_F_marginal;
/// <summary>Message from use of 'Weights'</summary>
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> Weights_use_B;

Просмотреть файл

@ -80,10 +80,10 @@ namespace Microsoft.ML.Probabilistic.Learners.BayesPointMachineClassifierInterna
private int[] labels;
public DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]> MaxNoisyScore_0__B;
public DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]> MaxNoisyScore_rep_B;
/// <summary>Buffer for ReplicateOp_Divide.Marginal<Gaussian></summary>
/// <summary>Buffer for ReplicateOp_Divide.Marginal&lt;Gaussian&gt;</summary>
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> MaxNoisyScore_rep_B_toDef;
public DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]> MaxNoisyScore_rep_F;
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional<Gaussian></summary>
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional&lt;Gaussian&gt;</summary>
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> MaxNoisyScore_rep_F_marginal;
public DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]> NoisyScoreDeltas_B;
public DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]> NoisyScoreDeltas_F;
@ -111,10 +111,10 @@ namespace Microsoft.ML.Probabilistic.Learners.BayesPointMachineClassifierInterna
/// <summary>Message to marginal of 'Weights'</summary>
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> Weights_marginal_F;
public DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]> Weights_rep_B;
/// <summary>Buffer for ReplicateOp_Divide.Marginal<Gaussian></summary>
/// <summary>Buffer for ReplicateOp_Divide.Marginal&lt;Gaussian&gt;</summary>
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> Weights_rep_B_toDef;
public DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]> Weights_rep_F;
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional<Gaussian></summary>
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional&lt;Gaussian&gt;</summary>
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> Weights_rep_F_marginal;
/// <summary>Message from use of 'Weights'</summary>
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> Weights_use_B;

Просмотреть файл

@ -67,10 +67,10 @@ namespace Microsoft.ML.Probabilistic.Learners.BayesPointMachineClassifierInterna
public DistributionRefArray<DistributionStructArray<Bernoulli,bool>,bool[]> Labels_InstanceRange__selector_cases_rep8_B_reduced;
public Bernoulli[][][] Labels_InstanceRange__selector_cases_uses_B;
public Discrete Labels_InstanceRange__selector_rep_B_reduced;
/// <summary>Buffer for ReplicateOp_Divide.Marginal<Discrete></summary>
/// <summary>Buffer for ReplicateOp_Divide.Marginal&lt;Discrete&gt;</summary>
public DistributionRefArray<Discrete,int> Labels_InstanceRange__selector_rep_B_toDef;
public DistributionRefArray<DistributionRefArray<Discrete,int>,int[]> Labels_InstanceRange__selector_rep_F;
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional<Discrete></summary>
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional&lt;Discrete&gt;</summary>
public DistributionRefArray<Discrete,int> Labels_InstanceRange__selector_rep_F_marginal;
public Discrete[][] Labels_InstanceRange__selector_uses_B;
public Discrete[][] Labels_InstanceRange__selector_uses_F;
@ -84,10 +84,10 @@ namespace Microsoft.ML.Probabilistic.Learners.BayesPointMachineClassifierInterna
public Discrete[][] Labels_uses_F;
public DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]> MaxNoisyScore_0__B;
public DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]> MaxNoisyScore_rep_B;
/// <summary>Buffer for ReplicateOp_Divide.Marginal<Gaussian></summary>
/// <summary>Buffer for ReplicateOp_Divide.Marginal&lt;Gaussian&gt;</summary>
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> MaxNoisyScore_rep_B_toDef;
public DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]> MaxNoisyScore_rep_F;
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional<Gaussian></summary>
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional&lt;Gaussian&gt;</summary>
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> MaxNoisyScore_rep_F_marginal;
public DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]> NoisyScoreDeltas_B;
public DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]> NoisyScoreDeltas_F;
@ -108,7 +108,7 @@ namespace Microsoft.ML.Probabilistic.Learners.BayesPointMachineClassifierInterna
/// <summary>Field backing the WeightPriors property</summary>
private DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> weightPriors;
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> Weights_depth1_F;
/// <summary>Buffer for JaggedSubarrayOp<double>.ItemsAverageConditional<DistributionStructArray<Gaussian, double>, Gaussian, DistributionStructArray<Gaussian, double>></summary>
/// <summary>Buffer for JaggedSubarrayOp&lt;double&gt;.ItemsAverageConditional&lt;DistributionStructArray&lt;Gaussian, double&gt;, Gaussian, DistributionStructArray&lt;Gaussian, double&gt;&gt;</summary>
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> Weights_depth1_F_ClassRange__marginal;
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> Weights_F;
public DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]> Weights_FeatureIndexes_B;
@ -538,6 +538,7 @@ namespace Microsoft.ML.Probabilistic.Learners.BayesPointMachineClassifierInterna
this.Labels_uses_F[InstanceRange][1] = ArrayHelper.MakeUniform<Discrete>(Discrete.Uniform(this.classCount));
}
if (this.instanceCount>0) {
this.vbool76_reduced = new bool[this.classCount][];
this.Labels_InstanceRange__selector_cases_rep8_B_reduced = new DistributionRefArray<DistributionStructArray<Bernoulli,bool>,bool[]>(this.classCount);
}
for(int ClassMaxNoisyScore = 0; ClassMaxNoisyScore<this.classCount; ClassMaxNoisyScore++) {
@ -549,11 +550,6 @@ namespace Microsoft.ML.Probabilistic.Learners.BayesPointMachineClassifierInterna
this.Labels_InstanceRange__selector_cases_rep8_B_reduced[ClassMaxNoisyScore][_a8] = Bernoulli.Uniform();
}
}
}
if (this.instanceCount>0) {
this.vbool76_reduced = new bool[this.classCount][];
}
for(int ClassMaxNoisyScore = 0; ClassMaxNoisyScore<this.classCount; ClassMaxNoisyScore++) {
if (this.instanceCount>0) {
this.vbool76_reduced[ClassMaxNoisyScore] = new bool[this.classCount];
}
@ -786,8 +782,8 @@ namespace Microsoft.ML.Probabilistic.Learners.BayesPointMachineClassifierInterna
this.Labels_uses_F[InstanceRange] = new Discrete[2];
}
this.Labels_InstanceRange__selector_cases_uses_B = new Bernoulli[this.instanceCount][][];
this.Labels_InstanceRange__selector_cases_rep8_B_reduced = default(DistributionRefArray<DistributionStructArray<Bernoulli,bool>,bool[]>);
this.vbool76_reduced = default(bool[][]);
this.Labels_InstanceRange__selector_cases_rep8_B_reduced = default(DistributionRefArray<DistributionStructArray<Bernoulli,bool>,bool[]>);
this.NoisyScoreDeltas_F = new DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]>(this.instanceCount);
this.NoisyScoreDeltas_B = new DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]>(this.instanceCount);
this.vdouble740_B = new DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]>(this.instanceCount);

Просмотреть файл

@ -65,10 +65,10 @@ namespace Microsoft.ML.Probabilistic.Learners.BayesPointMachineClassifierInterna
private int[] labels;
public DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]> MaxNoisyScore_0__B;
public DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]> MaxNoisyScore_rep_B;
/// <summary>Buffer for ReplicateOp_Divide.Marginal<Gaussian></summary>
/// <summary>Buffer for ReplicateOp_Divide.Marginal&lt;Gaussian&gt;</summary>
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> MaxNoisyScore_rep_B_toDef;
public DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]> MaxNoisyScore_rep_F;
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional<Gaussian></summary>
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional&lt;Gaussian&gt;</summary>
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> MaxNoisyScore_rep_F_marginal;
/// <summary>Message to marginal of 'ModelSelector'</summary>
public Bernoulli ModelSelector_marginal_F;
@ -119,7 +119,7 @@ namespace Microsoft.ML.Probabilistic.Learners.BayesPointMachineClassifierInterna
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> Weights_depth0_F;
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> Weights_depth1_B;
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> Weights_depth1_F;
/// <summary>Buffer for JaggedSubarrayOp<double>.ItemsAverageConditional<DistributionStructArray<Gaussian, double>, Gaussian, DistributionStructArray<Gaussian, double>></summary>
/// <summary>Buffer for JaggedSubarrayOp&lt;double&gt;.ItemsAverageConditional&lt;DistributionStructArray&lt;Gaussian, double&gt;, Gaussian, DistributionStructArray&lt;Gaussian, double&gt;&gt;</summary>
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> Weights_depth1_F_ClassRange__marginal;
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> Weights_F;
public DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]> Weights_FeatureIndexes_B;

Просмотреть файл

@ -63,10 +63,10 @@ namespace Microsoft.ML.Probabilistic.Learners.BayesPointMachineClassifierInterna
private int[] labels;
public DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]> MaxNoisyScore_0__B;
public DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]> MaxNoisyScore_rep_B;
/// <summary>Buffer for ReplicateOp_Divide.Marginal<Gaussian></summary>
/// <summary>Buffer for ReplicateOp_Divide.Marginal&lt;Gaussian&gt;</summary>
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> MaxNoisyScore_rep_B_toDef;
public DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]> MaxNoisyScore_rep_F;
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional<Gaussian></summary>
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional&lt;Gaussian&gt;</summary>
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> MaxNoisyScore_rep_F_marginal;
public DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]> NoisyScoreDeltas_B;
public DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]> NoisyScoreDeltas_F;
@ -92,7 +92,7 @@ namespace Microsoft.ML.Probabilistic.Learners.BayesPointMachineClassifierInterna
private DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> weightPriors;
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> Weights_depth1_B;
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> Weights_depth1_F;
/// <summary>Buffer for JaggedSubarrayOp<double>.ItemsAverageConditional<DistributionStructArray<Gaussian, double>, Gaussian, DistributionStructArray<Gaussian, double>></summary>
/// <summary>Buffer for JaggedSubarrayOp&lt;double&gt;.ItemsAverageConditional&lt;DistributionStructArray&lt;Gaussian, double&gt;, Gaussian, DistributionStructArray&lt;Gaussian, double&gt;&gt;</summary>
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> Weights_depth1_F_ClassRange__marginal;
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> Weights_F;
public DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]> Weights_FeatureIndexes_B;

Просмотреть файл

@ -145,10 +145,10 @@ namespace Microsoft.ML.Probabilistic.Learners.MatchboxRecommenderInternal
public bool Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UserThresholdsInitializer_Init_Affi23_isDone;
/// <summary>True if Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UserThresholdsInitializer_Init_Affi23 has performed initialisation. Set this to false to force re-execution of Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UserThresholdsInitializer_Init_Affi23</summary>
public bool Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UserThresholdsInitializer_Init_Affi23_isInitialised;
/// <summary>True if Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini22 has executed. Set this to false to force re-execution of Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini22</summary>
public bool Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini22_isDone;
/// <summary>True if Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini22 has performed initialisation. Set this to false to force re-execution of Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini22</summary>
public bool Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini22_isInitialised;
/// <summary>True if Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini21 has executed. Set this to false to force re-execution of Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini21</summary>
public bool Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini21_isDone;
/// <summary>True if Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini21 has performed initialisation. Set this to false to force re-execution of Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini21</summary>
public bool Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini21_isInitialised;
/// <summary>True if Changed_ObservationCount has executed. Set this to false to force re-execution of Changed_ObservationCount</summary>
public bool Changed_ObservationCount_isDone;
/// <summary>True if Changed_ObservationCount_Ratings has executed. Set this to false to force re-execution of Changed_ObservationCount_Ratings</summary>
@ -209,7 +209,7 @@ namespace Microsoft.ML.Probabilistic.Learners.MatchboxRecommenderInternal
public DistributionStructArray<Gaussian,double>[] ItemBias_uses_B;
/// <summary>Messages to use of 'ItemBias'</summary>
public DistributionStructArray<Gaussian,double>[] ItemBias_uses_F;
/// <summary>Buffer for GetItemsOp<double>.ItemsAverageConditional<DistributionStructArray<Gaussian, double>, Gaussian></summary>
/// <summary>Buffer for GetItemsOp&lt;double&gt;.ItemsAverageConditional&lt;DistributionStructArray&lt;Gaussian, double&gt;, Gaussian&gt;</summary>
public DistributionStructArray<Gaussian,double> ItemBias_uses_F_1__marginal;
/// <summary>Field backing the ItemBiasFeatureWeightPriorVariance property</summary>
private double itemBiasFeatureWeightPriorVariance;
@ -244,7 +244,7 @@ namespace Microsoft.ML.Probabilistic.Learners.MatchboxRecommenderInternal
public DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]> ItemTraitFeatureWeights_NonZeroItemFeatureIndices_F;
/// <summary>Message from use of 'ItemTraitFeatureWeights'</summary>
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> ItemTraitFeatureWeights_use_B;
/// <summary>Buffer for JaggedSubarrayOp<double>.ItemsAverageConditional<DistributionStructArray<Gaussian, double>, Gaussian, DistributionStructArray<Gaussian, double>></summary>
/// <summary>Buffer for JaggedSubarrayOp&lt;double&gt;.ItemsAverageConditional&lt;DistributionStructArray&lt;Gaussian, double&gt;, Gaussian, DistributionStructArray&lt;Gaussian, double&gt;&gt;</summary>
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> ItemTraitFeatureWeights_use_F_trait__marginal;
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> itemTraitMean_B;
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> itemTraitMean_F;
@ -252,7 +252,7 @@ namespace Microsoft.ML.Probabilistic.Learners.MatchboxRecommenderInternal
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> ItemTraits_depth0_F;
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>[] ItemTraits_depth0_uses_B;
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>[] ItemTraits_depth0_uses_F;
/// <summary>Buffer for GetItemsOp<double[]>.ItemsAverageConditional<DistributionRefArray<DistributionStructArray<Gaussian, double>, double[]>, DistributionStructArray<Gaussian, double>></summary>
/// <summary>Buffer for GetItemsOp&lt;double[]&gt;.ItemsAverageConditional&lt;DistributionRefArray&lt;DistributionStructArray&lt;Gaussian, double&gt;, double[]&gt;, DistributionStructArray&lt;Gaussian, double&gt;&gt;</summary>
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> ItemTraits_depth0_uses_F_1__marginal;
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> ItemTraits_F;
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> ItemTraits_index10_index10_0__index10__B;
@ -323,7 +323,7 @@ namespace Microsoft.ML.Probabilistic.Learners.MatchboxRecommenderInternal
public DistributionStructArray<Gaussian,double>[] UserBias_uses_B;
/// <summary>Messages to use of 'UserBias'</summary>
public DistributionStructArray<Gaussian,double>[] UserBias_uses_F;
/// <summary>Buffer for GetItemsOp<double>.ItemsAverageConditional<DistributionStructArray<Gaussian, double>, Gaussian></summary>
/// <summary>Buffer for GetItemsOp&lt;double&gt;.ItemsAverageConditional&lt;DistributionStructArray&lt;Gaussian, double&gt;, Gaussian&gt;</summary>
public DistributionStructArray<Gaussian,double> UserBias_uses_F_1__marginal;
/// <summary>Field backing the UserBiasFeatureWeightPriorVariance property</summary>
private double userBiasFeatureWeightPriorVariance;
@ -361,17 +361,17 @@ namespace Microsoft.ML.Probabilistic.Learners.MatchboxRecommenderInternal
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> UserThresholds_depth0_F;
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>[] UserThresholds_depth0_uses_B;
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>[] UserThresholds_depth0_uses_F;
/// <summary>Buffer for GetItemsOp<double[]>.ItemsAverageConditional<DistributionRefArray<DistributionStructArray<Gaussian, double>, double[]>, DistributionStructArray<Gaussian, double>></summary>
/// <summary>Buffer for GetItemsOp&lt;double[]&gt;.ItemsAverageConditional&lt;DistributionRefArray&lt;DistributionStructArray&lt;Gaussian, double&gt;, double[]&gt;, DistributionStructArray&lt;Gaussian, double&gt;&gt;</summary>
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> UserThresholds_depth0_uses_F_1__marginal;
public DistributionStructArray<Gaussian,double> UserThresholds_F_reduced;
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> UserThresholds_itemUserIds_observation__F;
/// <summary>Message to marginal of 'UserThresholds'</summary>
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> UserThresholds_marginal_F;
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> UserThresholds_rep_B;
/// <summary>Buffer for ReplicateOp_Divide.Marginal<Gaussian></summary>
/// <summary>Buffer for ReplicateOp_Divide.Marginal&lt;Gaussian&gt;</summary>
public DistributionStructArray<Gaussian,double> UserThresholds_rep_B_toDef;
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> UserThresholds_rep_F;
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional<Gaussian></summary>
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional&lt;Gaussian&gt;</summary>
public DistributionStructArray<Gaussian,double> UserThresholds_rep_F_marginal;
/// <summary>Message from use of 'UserThresholds'</summary>
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> UserThresholds_use_B;
@ -399,7 +399,7 @@ namespace Microsoft.ML.Probabilistic.Learners.MatchboxRecommenderInternal
public DistributionRefArray<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>,double[][]> UserTraitFeatureWeights_NonZeroUserFeatureIndices_F;
/// <summary>Message from use of 'UserTraitFeatureWeights'</summary>
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> UserTraitFeatureWeights_use_B;
/// <summary>Buffer for JaggedSubarrayOp<double>.ItemsAverageConditional<DistributionStructArray<Gaussian, double>, Gaussian, DistributionStructArray<Gaussian, double>></summary>
/// <summary>Buffer for JaggedSubarrayOp&lt;double&gt;.ItemsAverageConditional&lt;DistributionStructArray&lt;Gaussian, double&gt;, Gaussian, DistributionStructArray&lt;Gaussian, double&gt;&gt;</summary>
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> UserTraitFeatureWeights_use_F_trait__marginal;
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> userTraitMean_B;
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> userTraitMean_F;
@ -415,7 +415,7 @@ namespace Microsoft.ML.Probabilistic.Learners.MatchboxRecommenderInternal
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>[] UserTraits_uses_B;
/// <summary>Messages to use of 'UserTraits'</summary>
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>[] UserTraits_uses_F;
/// <summary>Buffer for GetItemsOp<double[]>.ItemsAverageConditional<DistributionRefArray<DistributionStructArray<Gaussian, double>, double[]>, DistributionStructArray<Gaussian, double>></summary>
/// <summary>Buffer for GetItemsOp&lt;double[]&gt;.ItemsAverageConditional&lt;DistributionRefArray&lt;DistributionStructArray&lt;Gaussian, double&gt;, double[]&gt;, DistributionStructArray&lt;Gaussian, double&gt;&gt;</summary>
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> UserTraits_uses_F_1__marginal;
/// <summary>Field backing the UserTraitsInitializer property</summary>
private DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> userTraitsInitializer;
@ -481,7 +481,7 @@ namespace Microsoft.ML.Probabilistic.Learners.MatchboxRecommenderInternal
this.Changed_numberOfIterationsDecreased_UserBiasInitializer_UserCount_Init_AffinityNoiseVariance_ItemBia17_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UserThresholdsInitializer_Init_Affi23_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_Init_AffinityNoiseVariance_ItemBias20_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini22_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini21_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_Init_AffinityNoiseVariance_ItemBiasFeatureWeightPriorV7_isInitialised = false;
this.Changed_NonZeroUserFeatureCounts_numberOfIterationsDecreased_TraitCount_UserCount_Init_AffinityNoise42_isInitialised = false;
this.Changed_numberOfIterationsDecreased_TraitCount_UserCount_UserTraitsInitializer_Init_AffinityNoiseVar27_isInitialised = false;
@ -522,7 +522,7 @@ namespace Microsoft.ML.Probabilistic.Learners.MatchboxRecommenderInternal
this.Changed_numberOfIterationsDecreased_UserBiasInitializer_UserCount_Init_AffinityNoiseVariance_ItemBia17_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UserThresholdsInitializer_Init_Affi23_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_Init_AffinityNoiseVariance_ItemBias20_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini22_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini21_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_Init_AffinityNoiseVariance_ItemBiasFeatureWeightPriorV7_isInitialised = false;
this.Changed_NonZeroUserFeatureCounts_numberOfIterationsDecreased_TraitCount_UserCount_Init_AffinityNoise42_isInitialised = false;
this.Changed_numberOfIterationsDecreased_TraitCount_UserCount_UserTraitsInitializer_Init_AffinityNoiseVar27_isInitialised = false;
@ -562,7 +562,7 @@ namespace Microsoft.ML.Probabilistic.Learners.MatchboxRecommenderInternal
this.Changed_numberOfIterationsDecreased_UserBiasInitializer_UserCount_Init_AffinityNoiseVariance_ItemBia17_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UserThresholdsInitializer_Init_Affi23_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_Init_AffinityNoiseVariance_ItemBias20_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini22_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini21_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_Init_AffinityNoiseVariance_ItemBiasFeatureWeightPriorV7_isInitialised = false;
this.Changed_NonZeroUserFeatureCounts_numberOfIterationsDecreased_TraitCount_UserCount_Init_AffinityNoise42_isInitialised = false;
this.Changed_numberOfIterationsDecreased_TraitCount_UserCount_UserTraitsInitializer_Init_AffinityNoiseVar27_isInitialised = false;
@ -601,7 +601,7 @@ namespace Microsoft.ML.Probabilistic.Learners.MatchboxRecommenderInternal
this.Changed_numberOfIterationsDecreased_UserBiasInitializer_UserCount_Init_AffinityNoiseVariance_ItemBia17_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UserThresholdsInitializer_Init_Affi23_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_Init_AffinityNoiseVariance_ItemBias20_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini22_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini21_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_Init_AffinityNoiseVariance_ItemBiasFeatureWeightPriorV7_isInitialised = false;
this.Changed_NonZeroUserFeatureCounts_numberOfIterationsDecreased_TraitCount_UserCount_Init_AffinityNoise42_isInitialised = false;
this.Changed_numberOfIterationsDecreased_TraitCount_UserCount_UserTraitsInitializer_Init_AffinityNoiseVar27_isInitialised = false;
@ -640,7 +640,7 @@ namespace Microsoft.ML.Probabilistic.Learners.MatchboxRecommenderInternal
this.Changed_numberOfIterationsDecreased_UserBiasInitializer_UserCount_Init_AffinityNoiseVariance_ItemBia17_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UserThresholdsInitializer_Init_Affi23_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_Init_AffinityNoiseVariance_ItemBias20_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini22_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini21_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_Init_AffinityNoiseVariance_ItemBiasFeatureWeightPriorV7_isInitialised = false;
this.Changed_NonZeroUserFeatureCounts_numberOfIterationsDecreased_TraitCount_UserCount_Init_AffinityNoise42_isInitialised = false;
this.Changed_numberOfIterationsDecreased_TraitCount_UserCount_UserTraitsInitializer_Init_AffinityNoiseVar27_isInitialised = false;
@ -688,7 +688,7 @@ namespace Microsoft.ML.Probabilistic.Learners.MatchboxRecommenderInternal
this.Changed_numberOfIterationsDecreased_UserBiasInitializer_UserCount_Init_AffinityNoiseVariance_ItemBia17_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UserThresholdsInitializer_Init_Affi23_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_Init_AffinityNoiseVariance_ItemBias20_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini22_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini21_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_Init_AffinityNoiseVariance_ItemBiasFeatureWeightPriorV7_isInitialised = false;
this.Changed_NonZeroUserFeatureCounts_numberOfIterationsDecreased_TraitCount_UserCount_Init_AffinityNoise42_isInitialised = false;
this.Changed_numberOfIterationsDecreased_TraitCount_UserCount_UserTraitsInitializer_Init_AffinityNoiseVar27_isInitialised = false;
@ -732,7 +732,7 @@ namespace Microsoft.ML.Probabilistic.Learners.MatchboxRecommenderInternal
this.Changed_numberOfIterationsDecreased_UserBiasInitializer_UserCount_Init_AffinityNoiseVariance_ItemBia17_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UserThresholdsInitializer_Init_Affi23_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_Init_AffinityNoiseVariance_ItemBias20_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini22_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini21_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_Init_AffinityNoiseVariance_ItemBiasFeatureWeightPriorV7_isInitialised = false;
this.Changed_NonZeroUserFeatureCounts_numberOfIterationsDecreased_TraitCount_UserCount_Init_AffinityNoise42_isInitialised = false;
this.Changed_numberOfIterationsDecreased_TraitCount_UserCount_UserTraitsInitializer_Init_AffinityNoiseVar27_isInitialised = false;
@ -774,7 +774,7 @@ namespace Microsoft.ML.Probabilistic.Learners.MatchboxRecommenderInternal
this.Changed_numberOfIterationsDecreased_UserBiasInitializer_UserCount_Init_AffinityNoiseVariance_ItemBia17_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UserThresholdsInitializer_Init_Affi23_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_Init_AffinityNoiseVariance_ItemBias20_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini22_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini21_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_Init_AffinityNoiseVariance_ItemBiasFeatureWeightPriorV7_isInitialised = false;
this.Changed_NonZeroUserFeatureCounts_numberOfIterationsDecreased_TraitCount_UserCount_Init_AffinityNoise42_isInitialised = false;
this.Changed_numberOfIterationsDecreased_TraitCount_UserCount_UserTraitsInitializer_Init_AffinityNoiseVar27_isInitialised = false;
@ -814,7 +814,7 @@ namespace Microsoft.ML.Probabilistic.Learners.MatchboxRecommenderInternal
this.Changed_numberOfIterationsDecreased_UserBiasInitializer_UserCount_Init_AffinityNoiseVariance_ItemBia17_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UserThresholdsInitializer_Init_Affi23_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_Init_AffinityNoiseVariance_ItemBias20_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini22_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini21_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_Init_AffinityNoiseVariance_ItemBiasFeatureWeightPriorV7_isInitialised = false;
this.Changed_NonZeroUserFeatureCounts_numberOfIterationsDecreased_TraitCount_UserCount_Init_AffinityNoise42_isInitialised = false;
this.Changed_numberOfIterationsDecreased_TraitCount_UserCount_UserTraitsInitializer_Init_AffinityNoiseVar27_isInitialised = false;
@ -854,7 +854,7 @@ namespace Microsoft.ML.Probabilistic.Learners.MatchboxRecommenderInternal
this.Changed_numberOfIterationsDecreased_UserBiasInitializer_UserCount_Init_AffinityNoiseVariance_ItemBia17_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UserThresholdsInitializer_Init_Affi23_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_Init_AffinityNoiseVariance_ItemBias20_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini22_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini21_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_Init_AffinityNoiseVariance_ItemBiasFeatureWeightPriorV7_isInitialised = false;
this.Changed_NonZeroUserFeatureCounts_numberOfIterationsDecreased_TraitCount_UserCount_Init_AffinityNoise42_isInitialised = false;
this.Changed_numberOfIterationsDecreased_TraitCount_UserCount_UserTraitsInitializer_Init_AffinityNoiseVar27_isInitialised = false;
@ -893,7 +893,7 @@ namespace Microsoft.ML.Probabilistic.Learners.MatchboxRecommenderInternal
this.Changed_numberOfIterationsDecreased_UserBiasInitializer_UserCount_Init_AffinityNoiseVariance_ItemBia17_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UserThresholdsInitializer_Init_Affi23_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_Init_AffinityNoiseVariance_ItemBias20_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini22_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini21_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_Init_AffinityNoiseVariance_ItemBiasFeatureWeightPriorV7_isInitialised = false;
this.Changed_NonZeroUserFeatureCounts_numberOfIterationsDecreased_TraitCount_UserCount_Init_AffinityNoise42_isInitialised = false;
this.Changed_numberOfIterationsDecreased_TraitCount_UserCount_UserTraitsInitializer_Init_AffinityNoiseVar27_isInitialised = false;
@ -932,7 +932,7 @@ namespace Microsoft.ML.Probabilistic.Learners.MatchboxRecommenderInternal
this.Changed_numberOfIterationsDecreased_UserBiasInitializer_UserCount_Init_AffinityNoiseVariance_ItemBia17_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UserThresholdsInitializer_Init_Affi23_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_Init_AffinityNoiseVariance_ItemBias20_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini22_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini21_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_Init_AffinityNoiseVariance_ItemBiasFeatureWeightPriorV7_isInitialised = false;
this.Changed_NonZeroUserFeatureCounts_numberOfIterationsDecreased_TraitCount_UserCount_Init_AffinityNoise42_isInitialised = false;
this.Changed_numberOfIterationsDecreased_TraitCount_UserCount_UserTraitsInitializer_Init_AffinityNoiseVar27_isInitialised = false;
@ -975,7 +975,7 @@ namespace Microsoft.ML.Probabilistic.Learners.MatchboxRecommenderInternal
this.Changed_MiddleUserThresholdIndex_UserCount_UserThresholdCount_UserThresholdPriorMean_UserThresholdPr58_isDone = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UserThresholdsInitializer_Init_Affi23_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_Init_AffinityNoiseVariance_ItemBias20_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini22_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini21_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_Init_AffinityNoiseVariance_ItemBiasFeatureWeightPriorV7_isInitialised = false;
this.Changed_NonZeroUserFeatureCounts_numberOfIterationsDecreased_TraitCount_UserCount_Init_AffinityNoise42_isInitialised = false;
this.Changed_numberOfIterationsDecreased_TraitCount_UserCount_UserTraitsInitializer_Init_AffinityNoiseVar27_isInitialised = false;
@ -1019,7 +1019,7 @@ namespace Microsoft.ML.Probabilistic.Learners.MatchboxRecommenderInternal
this.Changed_numberOfIterationsDecreased_UserBiasInitializer_UserCount_Init_AffinityNoiseVariance_ItemBia17_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UserThresholdsInitializer_Init_Affi23_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_Init_AffinityNoiseVariance_ItemBias20_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini22_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini21_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_Init_AffinityNoiseVariance_ItemBiasFeatureWeightPriorV7_isInitialised = false;
this.Changed_NonZeroUserFeatureCounts_numberOfIterationsDecreased_TraitCount_UserCount_Init_AffinityNoise42_isInitialised = false;
this.Changed_numberOfIterationsDecreased_TraitCount_UserCount_UserTraitsInitializer_Init_AffinityNoiseVar27_isInitialised = false;
@ -1060,7 +1060,7 @@ namespace Microsoft.ML.Probabilistic.Learners.MatchboxRecommenderInternal
this.Changed_numberOfIterationsDecreased_UserBiasInitializer_UserCount_Init_AffinityNoiseVariance_ItemBia17_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UserThresholdsInitializer_Init_Affi23_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_Init_AffinityNoiseVariance_ItemBias20_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini22_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini21_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_Init_AffinityNoiseVariance_ItemBiasFeatureWeightPriorV7_isInitialised = false;
this.Changed_NonZeroUserFeatureCounts_numberOfIterationsDecreased_TraitCount_UserCount_Init_AffinityNoise42_isInitialised = false;
this.Changed_numberOfIterationsDecreased_TraitCount_UserCount_UserTraitsInitializer_Init_AffinityNoiseVar27_isInitialised = false;
@ -1101,7 +1101,7 @@ namespace Microsoft.ML.Probabilistic.Learners.MatchboxRecommenderInternal
this.Changed_numberOfIterationsDecreased_UserBiasInitializer_UserCount_Init_AffinityNoiseVariance_ItemBia17_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UserThresholdsInitializer_Init_Affi23_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_Init_AffinityNoiseVariance_ItemBias20_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini22_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini21_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_Init_AffinityNoiseVariance_ItemBiasFeatureWeightPriorV7_isInitialised = false;
this.Changed_NonZeroUserFeatureCounts_numberOfIterationsDecreased_TraitCount_UserCount_Init_AffinityNoise42_isInitialised = false;
this.Changed_numberOfIterationsDecreased_TraitCount_UserCount_UserTraitsInitializer_Init_AffinityNoiseVar27_isInitialised = false;
@ -1143,7 +1143,7 @@ namespace Microsoft.ML.Probabilistic.Learners.MatchboxRecommenderInternal
this.Changed_numberOfIterationsDecreased_UserBiasInitializer_UserCount_Init_AffinityNoiseVariance_ItemBia17_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UserThresholdsInitializer_Init_Affi23_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_Init_AffinityNoiseVariance_ItemBias20_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini22_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini21_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_Init_AffinityNoiseVariance_ItemBiasFeatureWeightPriorV7_isInitialised = false;
this.Changed_NonZeroUserFeatureCounts_TraitCount_UserCount_isDone = false;
this.Changed_NonZeroUserFeatureCounts_numberOfIterationsDecreased_TraitCount_UserCount_Init_AffinityNoise42_isDone = false;
@ -1185,7 +1185,7 @@ namespace Microsoft.ML.Probabilistic.Learners.MatchboxRecommenderInternal
this.Changed_numberOfIterationsDecreased_UserBiasInitializer_UserCount_Init_AffinityNoiseVariance_ItemBia17_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UserThresholdsInitializer_Init_Affi23_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_Init_AffinityNoiseVariance_ItemBias20_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini22_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini21_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_Init_AffinityNoiseVariance_ItemBiasFeatureWeightPriorV7_isInitialised = false;
this.Changed_NonZeroUserFeatureCounts_numberOfIterationsDecreased_TraitCount_UserCount_Init_AffinityNoise42_isInitialised = false;
this.Changed_numberOfIterationsDecreased_TraitCount_UserCount_UserTraitsInitializer_Init_AffinityNoiseVar27_isInitialised = false;
@ -1226,7 +1226,7 @@ namespace Microsoft.ML.Probabilistic.Learners.MatchboxRecommenderInternal
this.Changed_numberOfIterationsDecreased_UserBiasInitializer_UserCount_Init_AffinityNoiseVariance_ItemBia17_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UserThresholdsInitializer_Init_Affi23_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_Init_AffinityNoiseVariance_ItemBias20_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini22_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini21_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_Init_AffinityNoiseVariance_ItemBiasFeatureWeightPriorV7_isInitialised = false;
this.Changed_NonZeroUserFeatureCounts_numberOfIterationsDecreased_TraitCount_UserCount_Init_AffinityNoise42_isInitialised = false;
this.Changed_numberOfIterationsDecreased_TraitCount_UserCount_UserTraitsInitializer_Init_AffinityNoiseVar27_isInitialised = false;
@ -1279,7 +1279,7 @@ namespace Microsoft.ML.Probabilistic.Learners.MatchboxRecommenderInternal
this.Changed_numberOfIterationsDecreased_UserBiasInitializer_UserCount_Init_AffinityNoiseVariance_ItemBia17_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UserThresholdsInitializer_Init_Affi23_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_Init_AffinityNoiseVariance_ItemBias20_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini22_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini21_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_Init_AffinityNoiseVariance_ItemBiasFeatureWeightPriorV7_isInitialised = false;
this.Changed_NonZeroUserFeatureCounts_numberOfIterationsDecreased_TraitCount_UserCount_Init_AffinityNoise42_isInitialised = false;
this.Changed_numberOfIterationsDecreased_TraitCount_UserCount_UserTraitsInitializer_Init_AffinityNoiseVar27_isInitialised = false;
@ -1322,7 +1322,7 @@ namespace Microsoft.ML.Probabilistic.Learners.MatchboxRecommenderInternal
this.Changed_numberOfIterationsDecreased_UserBiasInitializer_UserCount_Init_AffinityNoiseVariance_ItemBia17_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UserThresholdsInitializer_Init_Affi23_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_Init_AffinityNoiseVariance_ItemBias20_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini22_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini21_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_Init_AffinityNoiseVariance_ItemBiasFeatureWeightPriorV7_isInitialised = false;
this.Changed_NonZeroUserFeatureCounts_numberOfIterationsDecreased_TraitCount_UserCount_Init_AffinityNoise42_isInitialised = false;
this.Changed_numberOfIterationsDecreased_TraitCount_UserCount_UserTraitsInitializer_Init_AffinityNoiseVar27_isInitialised = false;
@ -1371,7 +1371,7 @@ namespace Microsoft.ML.Probabilistic.Learners.MatchboxRecommenderInternal
this.Changed_numberOfIterationsDecreased_UserBiasInitializer_UserCount_Init_AffinityNoiseVariance_ItemBia17_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UserThresholdsInitializer_Init_Affi23_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_Init_AffinityNoiseVariance_ItemBias20_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini22_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini21_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_Init_AffinityNoiseVariance_ItemBiasFeatureWeightPriorV7_isInitialised = false;
this.Changed_TraitCount_UserCount_isDone = false;
this.Changed_TraitCount_UserCount_UserTraitsInitializer_isDone = false;
@ -1416,7 +1416,7 @@ namespace Microsoft.ML.Probabilistic.Learners.MatchboxRecommenderInternal
this.Changed_numberOfIterationsDecreased_UserBiasInitializer_UserCount_Init_AffinityNoiseVariance_ItemBia17_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UserThresholdsInitializer_Init_Affi23_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_Init_AffinityNoiseVariance_ItemBias20_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini22_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini21_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_Init_AffinityNoiseVariance_ItemBiasFeatureWeightPriorV7_isInitialised = false;
this.Changed_NonZeroUserFeatureCounts_numberOfIterationsDecreased_TraitCount_UserCount_Init_AffinityNoise42_isInitialised = false;
this.Changed_numberOfIterationsDecreased_TraitCount_UserCount_UserTraitsInitializer_Init_AffinityNoiseVar27_isInitialised = false;
@ -1456,7 +1456,7 @@ namespace Microsoft.ML.Probabilistic.Learners.MatchboxRecommenderInternal
this.Changed_numberOfIterationsDecreased_UserBiasInitializer_UserCount_Init_AffinityNoiseVariance_ItemBia17_isDone = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UserThresholdsInitializer_Init_Affi23_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_Init_AffinityNoiseVariance_ItemBias20_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini22_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini21_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_Init_AffinityNoiseVariance_ItemBiasFeatureWeightPriorV7_isInitialised = false;
this.Changed_NonZeroUserFeatureCounts_numberOfIterationsDecreased_TraitCount_UserCount_Init_AffinityNoise42_isInitialised = false;
this.Changed_numberOfIterationsDecreased_TraitCount_UserCount_UserTraitsInitializer_Init_AffinityNoiseVar27_isInitialised = false;
@ -1495,7 +1495,7 @@ namespace Microsoft.ML.Probabilistic.Learners.MatchboxRecommenderInternal
this.Changed_UserBiasMessage_UserCount_isDone = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UserThresholdsInitializer_Init_Affi23_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_Init_AffinityNoiseVariance_ItemBias20_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini22_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini21_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_Init_AffinityNoiseVariance_ItemBiasFeatureWeightPriorV7_isInitialised = false;
this.Changed_NonZeroUserFeatureCounts_numberOfIterationsDecreased_TraitCount_UserCount_Init_AffinityNoise42_isInitialised = false;
this.Changed_numberOfIterationsDecreased_TraitCount_UserCount_UserTraitsInitializer_Init_AffinityNoiseVar27_isInitialised = false;
@ -1534,7 +1534,7 @@ namespace Microsoft.ML.Probabilistic.Learners.MatchboxRecommenderInternal
this.Changed_numberOfIterationsDecreased_UserBiasInitializer_UserCount_Init_AffinityNoiseVariance_ItemBia17_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UserThresholdsInitializer_Init_Affi23_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_Init_AffinityNoiseVariance_ItemBias20_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini22_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini21_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_Init_AffinityNoiseVariance_ItemBiasFeatureWeightPriorV7_isInitialised = false;
this.Changed_NonZeroUserFeatureCounts_numberOfIterationsDecreased_TraitCount_UserCount_Init_AffinityNoise42_isInitialised = false;
this.Changed_numberOfIterationsDecreased_TraitCount_UserCount_UserTraitsInitializer_Init_AffinityNoiseVar27_isInitialised = false;
@ -1582,7 +1582,7 @@ namespace Microsoft.ML.Probabilistic.Learners.MatchboxRecommenderInternal
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UserThresholdsInitializer_Init_Affi23_isDone = false;
this.Changed_UserCount_UserThresholdCount_UserThresholdsMessage_isDone = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_Init_AffinityNoiseVariance_ItemBias20_isDone = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini22_isDone = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini21_isDone = false;
this.Changed_numberOfIterationsDecreased_UserCount_Init_AffinityNoiseVariance_ItemBiasFeatureWeightPriorV7_isDone = false;
this.Changed_TraitCount_UserCount_isDone = false;
this.Changed_TraitCount_UserCount_UserTraitsInitializer_isDone = false;
@ -1630,7 +1630,7 @@ namespace Microsoft.ML.Probabilistic.Learners.MatchboxRecommenderInternal
this.Changed_numberOfIterationsDecreased_UserBiasInitializer_UserCount_Init_AffinityNoiseVariance_ItemBia17_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UserThresholdsInitializer_Init_Affi23_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_Init_AffinityNoiseVariance_ItemBias20_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini22_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini21_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_Init_AffinityNoiseVariance_ItemBiasFeatureWeightPriorV7_isInitialised = false;
this.Changed_NonZeroUserFeatureCounts_numberOfIterationsDecreased_TraitCount_UserCount_Init_AffinityNoise42_isInitialised = false;
this.Changed_numberOfIterationsDecreased_TraitCount_UserCount_UserTraitsInitializer_Init_AffinityNoiseVar27_isInitialised = false;
@ -1672,7 +1672,7 @@ namespace Microsoft.ML.Probabilistic.Learners.MatchboxRecommenderInternal
this.Changed_numberOfIterationsDecreased_UserBiasInitializer_UserCount_Init_AffinityNoiseVariance_ItemBia17_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UserThresholdsInitializer_Init_Affi23_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_Init_AffinityNoiseVariance_ItemBias20_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini22_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini21_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_Init_AffinityNoiseVariance_ItemBiasFeatureWeightPriorV7_isInitialised = false;
this.Changed_NonZeroUserFeatureCounts_numberOfIterationsDecreased_TraitCount_UserCount_Init_AffinityNoise42_isInitialised = false;
this.Changed_numberOfIterationsDecreased_TraitCount_UserCount_UserTraitsInitializer_Init_AffinityNoiseVar27_isInitialised = false;
@ -1720,7 +1720,7 @@ namespace Microsoft.ML.Probabilistic.Learners.MatchboxRecommenderInternal
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UserThresholdsInitializer_Init_Affi23_isDone = false;
this.Changed_UserCount_UserThresholdCount_UserThresholdsMessage_isDone = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_Init_AffinityNoiseVariance_ItemBias20_isDone = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini22_isDone = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini21_isDone = false;
this.Changed_numberOfIterationsDecreased_UserCount_Init_AffinityNoiseVariance_ItemBiasFeatureWeightPriorV7_isInitialised = false;
this.Changed_NonZeroUserFeatureCounts_numberOfIterationsDecreased_TraitCount_UserCount_Init_AffinityNoise42_isInitialised = false;
this.Changed_numberOfIterationsDecreased_TraitCount_UserCount_UserTraitsInitializer_Init_AffinityNoiseVar27_isInitialised = false;
@ -1760,7 +1760,7 @@ namespace Microsoft.ML.Probabilistic.Learners.MatchboxRecommenderInternal
this.Changed_numberOfIterationsDecreased_UserBiasInitializer_UserCount_Init_AffinityNoiseVariance_ItemBia17_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UserThresholdsInitializer_Init_Affi23_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_Init_AffinityNoiseVariance_ItemBias20_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini22_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini21_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_Init_AffinityNoiseVariance_ItemBiasFeatureWeightPriorV7_isInitialised = false;
this.Changed_NonZeroUserFeatureCounts_numberOfIterationsDecreased_TraitCount_UserCount_Init_AffinityNoise42_isInitialised = false;
this.Changed_numberOfIterationsDecreased_TraitCount_UserCount_UserTraitsInitializer_Init_AffinityNoiseVar27_isInitialised = false;
@ -1803,7 +1803,7 @@ namespace Microsoft.ML.Probabilistic.Learners.MatchboxRecommenderInternal
this.Changed_MiddleUserThresholdIndex_UserCount_UserThresholdCount_UserThresholdPriorMean_UserThresholdPr58_isDone = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UserThresholdsInitializer_Init_Affi23_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_Init_AffinityNoiseVariance_ItemBias20_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini22_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini21_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_Init_AffinityNoiseVariance_ItemBiasFeatureWeightPriorV7_isInitialised = false;
this.Changed_NonZeroUserFeatureCounts_numberOfIterationsDecreased_TraitCount_UserCount_Init_AffinityNoise42_isInitialised = false;
this.Changed_numberOfIterationsDecreased_TraitCount_UserCount_UserTraitsInitializer_Init_AffinityNoiseVar27_isInitialised = false;
@ -1844,7 +1844,7 @@ namespace Microsoft.ML.Probabilistic.Learners.MatchboxRecommenderInternal
this.Changed_MiddleUserThresholdIndex_UserCount_UserThresholdCount_UserThresholdPriorMean_UserThresholdPr58_isDone = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UserThresholdsInitializer_Init_Affi23_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_Init_AffinityNoiseVariance_ItemBias20_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini22_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini21_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_Init_AffinityNoiseVariance_ItemBiasFeatureWeightPriorV7_isInitialised = false;
this.Changed_NonZeroUserFeatureCounts_numberOfIterationsDecreased_TraitCount_UserCount_Init_AffinityNoise42_isInitialised = false;
this.Changed_numberOfIterationsDecreased_TraitCount_UserCount_UserTraitsInitializer_Init_AffinityNoiseVar27_isInitialised = false;
@ -1884,7 +1884,7 @@ namespace Microsoft.ML.Probabilistic.Learners.MatchboxRecommenderInternal
this.Changed_UserCount_UserThresholdCount_UserThresholdsInitializer_isDone = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UserThresholdsInitializer_Init_Affi23_isDone = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_Init_AffinityNoiseVariance_ItemBias20_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini22_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini21_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_Init_AffinityNoiseVariance_ItemBiasFeatureWeightPriorV7_isInitialised = false;
this.Changed_NonZeroUserFeatureCounts_numberOfIterationsDecreased_TraitCount_UserCount_Init_AffinityNoise42_isInitialised = false;
this.Changed_numberOfIterationsDecreased_TraitCount_UserCount_UserTraitsInitializer_Init_AffinityNoiseVar27_isInitialised = false;
@ -1923,7 +1923,7 @@ namespace Microsoft.ML.Probabilistic.Learners.MatchboxRecommenderInternal
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UserThresholdsInitializer_Init_Affi23_isInitialised = false;
this.Changed_UserCount_UserThresholdCount_UserThresholdsMessage_isDone = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_Init_AffinityNoiseVariance_ItemBias20_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini22_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini21_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_Init_AffinityNoiseVariance_ItemBiasFeatureWeightPriorV7_isInitialised = false;
this.Changed_NonZeroUserFeatureCounts_numberOfIterationsDecreased_TraitCount_UserCount_Init_AffinityNoise42_isInitialised = false;
this.Changed_numberOfIterationsDecreased_TraitCount_UserCount_UserTraitsInitializer_Init_AffinityNoiseVar27_isInitialised = false;
@ -1963,7 +1963,7 @@ namespace Microsoft.ML.Probabilistic.Learners.MatchboxRecommenderInternal
this.Changed_numberOfIterationsDecreased_UserBiasInitializer_UserCount_Init_AffinityNoiseVariance_ItemBia17_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UserThresholdsInitializer_Init_Affi23_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_Init_AffinityNoiseVariance_ItemBias20_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini22_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini21_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_Init_AffinityNoiseVariance_ItemBiasFeatureWeightPriorV7_isInitialised = false;
this.Changed_NonZeroUserFeatureCounts_numberOfIterationsDecreased_TraitCount_UserCount_Init_AffinityNoise42_isInitialised = false;
this.Changed_numberOfIterationsDecreased_TraitCount_UserCount_UserTraitsInitializer_Init_AffinityNoiseVar27_isInitialised = false;
@ -2002,7 +2002,7 @@ namespace Microsoft.ML.Probabilistic.Learners.MatchboxRecommenderInternal
this.Changed_numberOfIterationsDecreased_UserBiasInitializer_UserCount_Init_AffinityNoiseVariance_ItemBia17_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UserThresholdsInitializer_Init_Affi23_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_Init_AffinityNoiseVariance_ItemBias20_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini22_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini21_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_Init_AffinityNoiseVariance_ItemBiasFeatureWeightPriorV7_isInitialised = false;
this.Changed_TraitCount_UserCount_UserTraitsInitializer_isDone = false;
this.Changed_NonZeroUserFeatureCounts_numberOfIterationsDecreased_TraitCount_UserCount_Init_AffinityNoise42_isInitialised = false;
@ -2041,7 +2041,7 @@ namespace Microsoft.ML.Probabilistic.Learners.MatchboxRecommenderInternal
this.Changed_numberOfIterationsDecreased_UserBiasInitializer_UserCount_Init_AffinityNoiseVariance_ItemBia17_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UserThresholdsInitializer_Init_Affi23_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_Init_AffinityNoiseVariance_ItemBias20_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini22_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini21_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_Init_AffinityNoiseVariance_ItemBiasFeatureWeightPriorV7_isInitialised = false;
this.Changed_NonZeroUserFeatureCounts_numberOfIterationsDecreased_TraitCount_UserCount_Init_AffinityNoise42_isInitialised = false;
this.Changed_numberOfIterationsDecreased_TraitCount_UserCount_UserTraitsInitializer_Init_AffinityNoiseVar27_isInitialised = false;
@ -2081,7 +2081,7 @@ namespace Microsoft.ML.Probabilistic.Learners.MatchboxRecommenderInternal
this.Changed_numberOfIterationsDecreased_UserBiasInitializer_UserCount_Init_AffinityNoiseVariance_ItemBia17_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UserThresholdsInitializer_Init_Affi23_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_Init_AffinityNoiseVariance_ItemBias20_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini22_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini21_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_Init_AffinityNoiseVariance_ItemBiasFeatureWeightPriorV7_isInitialised = false;
this.Changed_NonZeroUserFeatureCounts_numberOfIterationsDecreased_TraitCount_UserCount_Init_AffinityNoise42_isInitialised = false;
this.Changed_numberOfIterationsDecreased_TraitCount_UserCount_UserTraitsInitializer_Init_AffinityNoiseVar27_isInitialised = false;
@ -2125,7 +2125,7 @@ namespace Microsoft.ML.Probabilistic.Learners.MatchboxRecommenderInternal
this.Changed_numberOfIterationsDecreased_UserBiasInitializer_UserCount_Init_AffinityNoiseVariance_ItemBia17_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UserThresholdsInitializer_Init_Affi23_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_Init_AffinityNoiseVariance_ItemBias20_isInitialised = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini22_isDone = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini21_isDone = false;
this.Changed_numberOfIterationsDecreased_UserCount_Init_AffinityNoiseVariance_ItemBiasFeatureWeightPriorV7_isInitialised = false;
this.Changed_NonZeroUserFeatureCounts_numberOfIterationsDecreased_TraitCount_UserCount_Init_AffinityNoise42_isInitialised = false;
this.Changed_numberOfIterationsDecreased_TraitCount_UserCount_UserTraitsInitializer_Init_AffinityNoiseVar27_isInitialised = false;
@ -2978,6 +2978,10 @@ namespace Microsoft.ML.Probabilistic.Learners.MatchboxRecommenderInternal
if (this.Changed_ItemFeatureCount_isDone) {
return ;
}
this.ItemBiasFeatureWeights_marginal_F = new DistributionStructArray<Gaussian,double>(this.itemFeatureCount);
for(int itemFeature = 0; itemFeature<this.itemFeatureCount; itemFeature++) {
this.ItemBiasFeatureWeights_marginal_F[itemFeature] = Gaussian.Uniform();
}
this.ItemBiasFeatureWeights_F = new DistributionStructArray<Gaussian,double>(this.itemFeatureCount);
this.ItemBiasFeatureWeights_F_reduced = default(Gaussian);
if (this.itemFeatureCount>0) {
@ -2986,10 +2990,6 @@ namespace Microsoft.ML.Probabilistic.Learners.MatchboxRecommenderInternal
for(int itemFeature = 0; itemFeature<this.itemFeatureCount; itemFeature++) {
this.ItemBiasFeatureWeights_F[itemFeature] = this.ItemBiasFeatureWeights_F_reduced;
}
this.ItemBiasFeatureWeights_marginal_F = new DistributionStructArray<Gaussian,double>(this.itemFeatureCount);
for(int itemFeature = 0; itemFeature<this.itemFeatureCount; itemFeature++) {
this.ItemBiasFeatureWeights_marginal_F[itemFeature] = Gaussian.Uniform();
}
this.Changed_ItemFeatureCount_isDone = true;
}
@ -3427,9 +3427,9 @@ namespace Microsoft.ML.Probabilistic.Learners.MatchboxRecommenderInternal
/// <summary>Computations that depend on the observed value of numberOfIterationsDecreased and UserCount and UserThresholdCount and UseSharedUserThresholds and must reset on changes to AffinityNoiseVariance and ItemBiasFeatureWeightPriorVariance and ItemBiasInitializer and ItemBiasMessage and ItemBiasVariance and ItemCount and ItemFeatureCount and ItemIds and ItemTraitFeatureWeightPriorVariance and ItemTraitsInitializer and ItemTraitsMessage and ItemTraitVariance and MiddleUserThresholdIndex and NonZeroItemFeatureCounts and NonZeroItemFeatureIndices and NonZeroItemFeatureValues and NonZeroUserFeatureCounts and NonZeroUserFeatureIndices and NonZeroUserFeatureValues and ObservationCount and Ratings and TraitCount and UserBiasFeatureWeightPriorVariance and UserBiasInitializer and UserBiasMessage and UserBiasVariance and UserFeatureCount and UserIds and UserThresholdNoiseVariance and UserThresholdPriorMean and UserThresholdPriorVariance and UserThresholdsInitializer and UserThresholdsMessage and UserTraitFeatureWeightPriorVariance and UserTraitsInitializer and UserTraitsMessage and UserTraitVariance</summary>
/// <param name="initialise">If true, reset messages that initialise loops</param>
private void Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini22(bool initialise)
private void Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini21(bool initialise)
{
if (this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini22_isDone&&((!initialise)||this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini22_isInitialised)) {
if (this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini21_isDone&&((!initialise)||this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini21_isInitialised)) {
return ;
}
for(int user = 0; user<this.userCount; user++) {
@ -3449,8 +3449,8 @@ namespace Microsoft.ML.Probabilistic.Learners.MatchboxRecommenderInternal
this.UserThresholds_depth0_uses_F_1__marginal = GetItemsOp<double[]>.MarginalInit<DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>>(this.UserThresholds_depth0_uses_F[1]);
}
}
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini22_isDone = true;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini22_isInitialised = true;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini21_isDone = true;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini21_isInitialised = true;
}
/// <summary>Computations that depend on the observed value of ObservationCount</summary>
@ -3767,8 +3767,8 @@ namespace Microsoft.ML.Probabilistic.Learners.MatchboxRecommenderInternal
this.UserBias_use_F = new DistributionStructArray<Gaussian,double>(this.userCount);
this.UserBias_uses_B[1] = new DistributionStructArray<Gaussian,double>(this.userCount);
this.UserThresholds_depth0_B = new DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>(this.userCount);
this.UserThresholds_depth0_uses_B[0] = new DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>(this.userCount);
this.UserThresholds_depth0_uses_B[1] = new DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>(this.userCount);
this.UserThresholds_depth0_uses_B[0] = new DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>(this.userCount);
this.UserThresholds_uses_B = new Gaussian[this.userCount][][];
this.UserThresholds_uses_F = new Gaussian[this.userCount][][];
this.UserThresholds_use_F = new DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]>(this.userCount);
@ -3814,11 +3814,11 @@ namespace Microsoft.ML.Probabilistic.Learners.MatchboxRecommenderInternal
}
for(int user = 0; user<this.userCount; user++) {
this.UserThresholds_depth0_B[user] = new DistributionStructArray<Gaussian,double>(this.userThresholdCount);
this.UserThresholds_depth0_uses_B[1][user] = new DistributionStructArray<Gaussian,double>(this.userThresholdCount);
this.UserThresholds_depth0_uses_B[0][user] = new DistributionStructArray<Gaussian,double>(this.userThresholdCount);
for(int userThreshold = 0; userThreshold<this.userThresholdCount; userThreshold++) {
this.UserThresholds_depth0_uses_B[0][user][userThreshold] = Gaussian.Uniform();
}
this.UserThresholds_depth0_uses_B[1][user] = new DistributionStructArray<Gaussian,double>(this.userThresholdCount);
this.UserThresholds_uses_B[user] = new Gaussian[this.userThresholdCount][];
for(int userThreshold = 0; userThreshold<this.userThresholdCount; userThreshold++) {
this.UserThresholds_uses_B[user][userThreshold] = new Gaussian[2];
@ -3960,7 +3960,7 @@ namespace Microsoft.ML.Probabilistic.Learners.MatchboxRecommenderInternal
this.Changed_numberOfIterationsDecreased_UserBiasInitializer_UserCount_Init_AffinityNoiseVariance_ItemBia17_isDone = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UserThresholdsInitializer_Init_Affi23_isDone = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_Init_AffinityNoiseVariance_ItemBias20_isDone = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini22_isDone = false;
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini21_isDone = false;
this.Changed_numberOfIterationsDecreased_UserCount_Init_AffinityNoiseVariance_ItemBiasFeatureWeightPriorV7_isDone = false;
this.Changed_NonZeroUserFeatureCounts_numberOfIterationsDecreased_TraitCount_UserCount_Init_AffinityNoise42_isDone = false;
this.Changed_numberOfIterationsDecreased_TraitCount_UserCount_UserTraitsInitializer_Init_AffinityNoiseVar27_isDone = false;
@ -4024,7 +4024,7 @@ namespace Microsoft.ML.Probabilistic.Learners.MatchboxRecommenderInternal
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UserThresholdsInitializer_Init_Affi23(initialise);
this.Changed_UserCount_UserThresholdCount_UserThresholdsMessage();
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_Init_AffinityNoiseVariance_ItemBias20(initialise);
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini22(initialise);
this.Changed_numberOfIterationsDecreased_UserCount_UserThresholdCount_UseSharedUserThresholds_Init_Affini21(initialise);
this.Changed_numberOfIterationsDecreased_UserCount_Init_AffinityNoiseVariance_ItemBiasFeatureWeightPriorV7(initialise);
this.Changed_TraitCount_UserCount();
this.Changed_TraitCount_UserCount_UserTraitsInitializer();

Просмотреть файл

@ -79,7 +79,7 @@ namespace Microsoft.ML.Probabilistic.Learners.MatchboxRecommenderInternal
public DistributionStructArray<Gaussian,double> ItemBias_itemItemIds_observation__F;
/// <summary>Message from use of 'ItemBias'</summary>
public DistributionStructArray<Gaussian,double> ItemBias_use_B;
/// <summary>Buffer for GetItemsOp<double>.ItemsAverageConditional<DistributionStructArray<Gaussian, double>, Gaussian></summary>
/// <summary>Buffer for GetItemsOp&lt;double&gt;.ItemsAverageConditional&lt;DistributionStructArray&lt;Gaussian, double&gt;, Gaussian&gt;</summary>
public DistributionStructArray<Gaussian,double> ItemBias_use_F_marginal;
public DistributionStructArray<Gaussian,double> itemBiasObs_B;
/// <summary>Field backing the ItemBiasPrior property</summary>
@ -92,7 +92,7 @@ namespace Microsoft.ML.Probabilistic.Learners.MatchboxRecommenderInternal
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> ItemTraits_itemItemIds_observation__F;
/// <summary>Message from use of 'ItemTraits'</summary>
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> ItemTraits_use_B;
/// <summary>Buffer for GetItemsOp<double[]>.ItemsAverageConditional<DistributionRefArray<DistributionStructArray<Gaussian, double>, double[]>, DistributionStructArray<Gaussian, double>></summary>
/// <summary>Buffer for GetItemsOp&lt;double[]&gt;.ItemsAverageConditional&lt;DistributionRefArray&lt;DistributionStructArray&lt;Gaussian, double&gt;, double[]&gt;, DistributionStructArray&lt;Gaussian, double&gt;&gt;</summary>
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> ItemTraits_use_F_marginal;
/// <summary>Field backing the ItemTraitsPrior property</summary>
private DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> itemTraitsPrior;
@ -107,7 +107,7 @@ namespace Microsoft.ML.Probabilistic.Learners.MatchboxRecommenderInternal
public DistributionRefArray<DistributionStructArray<Bernoulli,bool>,bool[]> Ratings_observation__selector_cases_B;
public Bernoulli[][][] Ratings_observation__selector_cases_uses_B;
public Discrete[][] Ratings_observation__selector_uses_B;
/// <summary>Buffer for ReplicateOp_Divide.Marginal<Discrete></summary>
/// <summary>Buffer for ReplicateOp_Divide.Marginal&lt;Discrete&gt;</summary>
public DistributionRefArray<Discrete,int> Ratings_observation__selector_uses_B_toDef;
/// <summary>Field backing the TraitCount property</summary>
private int traitCount;
@ -116,7 +116,7 @@ namespace Microsoft.ML.Probabilistic.Learners.MatchboxRecommenderInternal
public DistributionStructArray<Gaussian,double> UserBias_itemUserIds_observation__F;
/// <summary>Message from use of 'UserBias'</summary>
public DistributionStructArray<Gaussian,double> UserBias_use_B;
/// <summary>Buffer for GetItemsOp<double>.ItemsAverageConditional<DistributionStructArray<Gaussian, double>, Gaussian></summary>
/// <summary>Buffer for GetItemsOp&lt;double&gt;.ItemsAverageConditional&lt;DistributionStructArray&lt;Gaussian, double&gt;, Gaussian&gt;</summary>
public DistributionStructArray<Gaussian,double> UserBias_use_F_marginal;
public DistributionStructArray<Gaussian,double> userBiasObs_B;
/// <summary>Field backing the UserBiasPrior property</summary>
@ -130,15 +130,15 @@ namespace Microsoft.ML.Probabilistic.Learners.MatchboxRecommenderInternal
/// <summary>Field backing the UserThresholdNoiseVariance property</summary>
private double userThresholdNoiseVariance;
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> UserThresholds_0__gi_0__F;
/// <summary>Buffer for ReplicateOp_Divide.Marginal<Gaussian></summary>
/// <summary>Buffer for ReplicateOp_Divide.Marginal&lt;Gaussian&gt;</summary>
public DistributionStructArray<Gaussian,double> UserThresholds_depth2_rep_B_toDef;
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> UserThresholds_depth2_rep_F;
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional<Gaussian></summary>
/// <summary>Buffer for ReplicateOp_Divide.UsesAverageConditional&lt;Gaussian&gt;</summary>
public DistributionStructArray<Gaussian,double> UserThresholds_depth2_rep_F_marginal;
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> UserThresholds_itemUserIds_observation__F;
/// <summary>Message from use of 'UserThresholds'</summary>
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> UserThresholds_use_B;
/// <summary>Buffer for GetItemsOp<double[]>.ItemsAverageConditional<DistributionRefArray<DistributionStructArray<Gaussian, double>, double[]>, DistributionStructArray<Gaussian, double>></summary>
/// <summary>Buffer for GetItemsOp&lt;double[]&gt;.ItemsAverageConditional&lt;DistributionRefArray&lt;DistributionStructArray&lt;Gaussian, double&gt;, double[]&gt;, DistributionStructArray&lt;Gaussian, double&gt;&gt;</summary>
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> UserThresholds_use_F_marginal;
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> UserThresholdsObs_B;
public Gaussian UserThresholdsObs_B_reduced;
@ -149,7 +149,7 @@ namespace Microsoft.ML.Probabilistic.Learners.MatchboxRecommenderInternal
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> UserTraits_itemUserIds_observation__F;
/// <summary>Message from use of 'UserTraits'</summary>
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> UserTraits_use_B;
/// <summary>Buffer for GetItemsOp<double[]>.ItemsAverageConditional<DistributionRefArray<DistributionStructArray<Gaussian, double>, double[]>, DistributionStructArray<Gaussian, double>></summary>
/// <summary>Buffer for GetItemsOp&lt;double[]&gt;.ItemsAverageConditional&lt;DistributionRefArray&lt;DistributionStructArray&lt;Gaussian, double&gt;, double[]&gt;, DistributionStructArray&lt;Gaussian, double&gt;&gt;</summary>
public DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> UserTraits_use_F_marginal;
/// <summary>Field backing the UserTraitsPrior property</summary>
private DistributionRefArray<DistributionStructArray<Gaussian,double>,double[]> userTraitsPrior;

Просмотреть файл

@ -31,7 +31,7 @@ namespace Microsoft.ML.Probabilistic.Learners.Runners.MovieLens
/// <summary>
/// Converts raiting file.
/// </summary>
/// /// <param name="reader">The input file reader.</param>
/// <param name="reader">The input file reader.</param>
/// <param name="writer">The output file writer.</param>
private static void ConvertRating(TextReader reader, TextWriter writer)
{

Просмотреть файл

@ -8,21 +8,23 @@ namespace Microsoft.ML.Probabilistic.Collections
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Runtime.Serialization;
using Microsoft.ML.Probabilistic.Serialization;
using Microsoft.ML.Probabilistic.Utilities;
/// <summary>
/// Represents a read only array.
/// Represents an immutable array.
/// </summary>
/// <remarks>
/// It is implemented as struct because it avoids extra allocations on heap.
/// <see cref="ReadOnlyArray{T}"/> doesn't have space overhead compared to regular arrays.
/// This is a partial reimplementation of System.Collections.Immutable.ImmutableArray.
/// Once we can move to netcore-only codebase, this type can be removed.
/// API is supposed to be a subset of the real thing to ease migration in future.
/// </remarks>
[Serializable]
[DataContract]
public struct ReadOnlyArray<T> : IReadOnlyList<T>
public struct ImmutableArray<T> : IReadOnlyList<T>
{
/// <summary>
/// Regular array that holds data.
@ -31,18 +33,22 @@ namespace Microsoft.ML.Probabilistic.Collections
private readonly T[] array;
/// <summary>
/// Initializes a new instance of the <see cref="ReadOnlyArray{T}"/> structure.
/// Initializes a new instance of the <see cref="ImmutableArray{T}"/> structure.
/// </summary>
[Construction("CloneArray")]
public ReadOnlyArray(T[] array)
private ImmutableArray(T[] array)
{
this.array = array;
}
/// <summary>
/// Gets a boolean value which is true if this ReadOnlyArray wraps null array.
/// Creates a new instance of <see cref="ImmutableArray{T}"/> by copying elements of
/// <paramref name="sequence"/>.
/// </summary>
public bool IsNull => this.array == null;
[Construction("CloneArray")]
public static ImmutableArray<T> CreateCopy(IEnumerable<T> sequence) =>
new ImmutableArray<T>(sequence.ToArray());
public static ImmutableArray<T> Empty => new ImmutableArray<T>(Array.Empty<T>());
/// <inheritdoc/>
public T this[int index] => this.array[index];
@ -61,32 +67,60 @@ namespace Microsoft.ML.Probabilistic.Collections
/// <remarks>
/// This is value-type non-virtual version of enumerator that is used by compiler in foreach loops.
/// </remarks>
public ReadOnlyArraySegmentEnumerator<T> GetEnumerator() =>
new ReadOnlyArraySegmentEnumerator<T>(this, 0, this.array.Length);
public ImmutableArraySegmentEnumerator<T> GetEnumerator() =>
new ImmutableArraySegmentEnumerator<T>(this, 0, this.array.Length);
/// <inheritdoc/>
IEnumerator<T> IEnumerable<T>.GetEnumerator() =>
new ReadOnlyArraySegmentEnumerator<T>(this, 0, this.array.Length);
new ImmutableArraySegmentEnumerator<T>(this, 0, this.array.Length);
/// <inheritdoc/>
IEnumerator IEnumerable.GetEnumerator() =>
new ReadOnlyArraySegmentEnumerator<T>(this, 0, this.array.Length);
new ImmutableArraySegmentEnumerator<T>(this, 0, this.array.Length);
/// <summary>
/// Helper method which allows to cast regular arrays to read only versions implicitly.
/// </summary>
public static implicit operator ReadOnlyArray<T>(T[] array) => new ReadOnlyArray<T>(array);
public override bool Equals(object o) => o is ImmutableArray<T> that && this == that;
public override int GetHashCode() => this.array.GetHashCode();
public static bool operator ==(ImmutableArray<T> left, ImmutableArray<T> right) =>
left.array == right.array;
public static bool operator !=(ImmutableArray<T> left, ImmutableArray<T> right) =>
left.array != right.array;
public class Builder
{
private T[] array;
public Builder(int size) =>
this.array = new T[size];
public T this[int index]
{
get => this.array[index];
set => this.array[index] = value;
}
public int Count => this.array.Length;
public ImmutableArray<T> MoveToImmutable()
{
var result = new ImmutableArray<T>(this.array);
this.array = Array.Empty<T>();
return result;
}
}
}
/// <summary>
/// A version if <see cref="ArraySegment{T}"/> which can not be mutated.
/// </summary>
public struct ReadOnlyArraySegment<T> : IReadOnlyList<T>
public struct ImmutableArraySegment<T> : IReadOnlyList<T>
{
/// <summary>
/// Underlying read-only array.
/// </summary>
private readonly ReadOnlyArray<T> array;
private readonly ImmutableArray<T> array;
/// <summary>
/// Index of the first element which belongs to this segment.
@ -99,11 +133,10 @@ namespace Microsoft.ML.Probabilistic.Collections
private readonly int length;
/// <summary>
/// Initializes a new instance of <see cref="ReadOnlyArraySegment{T}"/> structure.
/// Initializes a new instance of <see cref="ImmutableArraySegment{T}"/> structure.
/// </summary>
public ReadOnlyArraySegment(ReadOnlyArray<T> array, int begin, int length)
public ImmutableArraySegment(ImmutableArray<T> array, int begin, int length)
{
Argument.CheckIfValid(!array.IsNull, nameof(array));
Argument.CheckIfInRange(begin >= 0 && begin <= array.Count, nameof(begin), "Segment begin should be in the range [0, array.Count]");
Argument.CheckIfInRange(length >= 0 && length <= array.Count - begin, nameof(length), "Segment length should be in the range [0, array.Count - begin]");
@ -131,27 +164,27 @@ namespace Microsoft.ML.Probabilistic.Collections
/// <remarks>
/// This is value-type non-virtual version of enumerator that is used by compiler in foreach loops.
/// </remarks>
public ReadOnlyArraySegmentEnumerator<T> GetEnumerator() =>
new ReadOnlyArraySegmentEnumerator<T>(this.array, this.begin, this.begin + this.length);
public ImmutableArraySegmentEnumerator<T> GetEnumerator() =>
new ImmutableArraySegmentEnumerator<T>(this.array, this.begin, this.begin + this.length);
/// <inheritdoc/>
IEnumerator<T> IEnumerable<T>.GetEnumerator() =>
new ReadOnlyArraySegmentEnumerator<T>(this.array, this.begin, this.begin + this.length);
new ImmutableArraySegmentEnumerator<T>(this.array, this.begin, this.begin + this.length);
/// <inheritdoc/>
IEnumerator IEnumerable.GetEnumerator() =>
new ReadOnlyArraySegmentEnumerator<T>(this.array, this.begin, this.begin + this.length);
new ImmutableArraySegmentEnumerator<T>(this.array, this.begin, this.begin + this.length);
}
/// <summary>
/// Enumerator for read only arrays and read only array segments.
/// Enumerator for immutable arrays and immutable array segments.
/// </summary>
public struct ReadOnlyArraySegmentEnumerator<T> : IEnumerator<T>
public struct ImmutableArraySegmentEnumerator<T> : IEnumerator<T>
{
/// <summary>
/// Underlying read-only array.
/// Underlying immutable array.
/// </summary>
private readonly ReadOnlyArray<T> array;
private readonly ImmutableArray<T> array;
/// <summary>
/// Index of the first element which belongs segment begin enumerated.
@ -169,9 +202,9 @@ namespace Microsoft.ML.Probabilistic.Collections
private int pointer;
/// <summary>
/// Initializes a new instance of <see cref="ReadOnlyArraySegment{T}"/> structure.
/// Initializes a new instance of <see cref="ImmutableArraySegment{T}"/> structure.
/// </summary>
internal ReadOnlyArraySegmentEnumerator(ReadOnlyArray<T> array, int begin, int end)
internal ImmutableArraySegmentEnumerator(ImmutableArray<T> array, int begin, int end)
{
this.array = array;
this.begin = begin;
@ -203,4 +236,30 @@ namespace Microsoft.ML.Probabilistic.Collections
this.pointer = this.begin - 1;
}
}
public static class ImmutableArray
{
public static ImmutableArray<T>.Builder CreateBuilder<T>(int size) =>
new ImmutableArray<T>.Builder(size);
public static ImmutableArray<T> Create<T>() => ImmutableArray<T>.Empty;
public static ImmutableArray<T> Create<T>(T elem)
{
var builder = new ImmutableArray<T>.Builder(1) {[0] = elem};
return builder.MoveToImmutable();
}
public static ImmutableArray<T> Create<T>(T elem1, T elem2)
{
var builder = new ImmutableArray<T>.Builder(2) {[0] = elem1, [1] = elem2};
return builder.MoveToImmutable();
}
/// <summary>
/// Syntactic sugar for `ReadOnlyArray{T}.CreateCopy(sequence)`
/// </summary>
public static ImmutableArray<T> ToImmutableArray<T>(this IEnumerable<T> sequence) =>
ImmutableArray<T>.CreateCopy(sequence);
}
}

Просмотреть файл

@ -36,7 +36,7 @@ namespace Microsoft.ML.Probabilistic.Core.Maths
public override string ToString()
{
return $"{Mantissa:r}*exp({Exponent:r})";
return $"{Mantissa:g17}*exp({Exponent:g17})";
}
public static ExtendedDouble Zero()

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -259,6 +259,20 @@ namespace Microsoft.ML.Probabilistic.Utilities
}
}
/// <summary>
/// Get a string of the form "typeName.methodName&amp;lt;types&amp;gt;", suitable
/// for use as an XML element value.
/// </summary>
/// <param name="method">A method.</param>
/// <returns>A string.</returns>
public static string MethodFullNameToXmlString(MethodBase method) =>
// Note that '&' is not escaped, because
// this character will not appear in a
// method name.
MethodFullNameToString(method)
.Replace("<", "&lt;")
.Replace(">", "&gt;");
/// <summary>
/// Get a string of the form "typeName.methodName&lt;types&gt;".
/// </summary>

Просмотреть файл

@ -307,8 +307,7 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
var secondStartState = this[oldStateCount + automaton.Start.Index];
if (avoidEpsilonTransitions &&
(AllEndStatesHaveNoTransitions() || !automaton.Start.HasIncomingTransitions))
if (avoidEpsilonTransitions && CanMergeEndAndStart())
{
// Remove start state of appended automaton and copy all its transitions to previous end states
for (var i = 0; i < oldStateCount; ++i)
@ -359,7 +358,10 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
}
}
bool AllEndStatesHaveNoTransitions()
bool CanMergeEndAndStart() =>
AllOldEndStatesHaveNoOutgoingTransitions() || !SecondStartStateHasIncomingTransitions();
bool AllOldEndStatesHaveNoOutgoingTransitions()
{
for (var i = 0; i < oldStateCount; ++i)
{
@ -372,6 +374,19 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
return true;
}
bool SecondStartStateHasIncomingTransitions()
{
foreach (var transition in automaton.Data.Transitions)
{
if (transition.DestinationStateIndex == automaton.Data.StartStateIndex)
{
return true;
}
}
return false;
}
}
#endregion
@ -397,11 +412,14 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
var hasEpsilonTransitions = false;
var usesGroups = false;
var resultStates = new StateData[this.states.Count];
var resultTransitions = new Transition[this.transitions.Count - this.numRemovedTransitions];
var hasSelfLoops = false;
var hasOnlyForwardTransitions = true;
var resultStates = ImmutableArray.CreateBuilder<StateData>(this.states.Count);
var resultTransitions = ImmutableArray.CreateBuilder<Transition>(this.transitions.Count - this.numRemovedTransitions);
var nextResultTransitionIndex = 0;
for (var i = 0; i < resultStates.Length; ++i)
for (var i = 0; i < resultStates.Count; ++i)
{
var firstResultTransitionIndex = nextResultTransitionIndex;
var transitionIndex = this.states[i].FirstTransitionIndex;
@ -410,13 +428,22 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
var node = this.transitions[transitionIndex];
var transition = node.Transition;
Debug.Assert(
transition.DestinationStateIndex < resultStates.Length,
transition.DestinationStateIndex < resultStates.Count,
"Destination indexes must be in valid range");
resultTransitions[nextResultTransitionIndex] = transition;
++nextResultTransitionIndex;
hasEpsilonTransitions = hasEpsilonTransitions || transition.IsEpsilon;
usesGroups = usesGroups || (transition.Group != 0);
if (transition.DestinationStateIndex == i)
{
hasSelfLoops = true;
}
else if (transition.DestinationStateIndex < i)
{
hasOnlyForwardTransitions = false;
}
transitionIndex = node.Next;
}
@ -427,17 +454,24 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
}
Debug.Assert(
nextResultTransitionIndex == resultTransitions.Length,
nextResultTransitionIndex == resultTransitions.Count,
"number of copied transitions must match result array size");
// Detect two very common automata shapes
var isEnumerable =
hasSelfLoops ? false :
hasOnlyForwardTransitions ? true :
(bool?)null;
return new DataContainer(
this.StartStateIndex,
resultStates,
resultTransitions,
resultStates.MoveToImmutable(),
resultTransitions.MoveToImmutable(),
!hasEpsilonTransitions,
usesGroups,
isDeterminized,
isZero: null);
isZero: null,
isEnumerable: isEnumerable);
}
#endregion
@ -552,7 +586,6 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
state.LastTransitionIndex = transitionIndex;
this.builder.states[this.Index] = state;
return new StateBuilder(this.builder, transition.DestinationStateIndex);
}

Просмотреть файл

@ -41,9 +41,8 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
public Condensation ComputeCondensation(State root, Func<Transition, bool> transitionFilter, bool useApproximateClosure)
{
Argument.CheckIfNotNull(transitionFilter, nameof(transitionFilter));
Argument.CheckIfValid(ReferenceEquals(root.Owner, this), nameof(root), "The given node belongs to a different automaton.");
return new Condensation(root, transitionFilter, useApproximateClosure);
return new Condensation(this, root, transitionFilter, useApproximateClosure);
}
/// <summary>
@ -52,6 +51,11 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
/// </summary>
public class Condensation
{
/// <summary>
/// Automaton to which <see cref="Root"/> belongs.
/// </summary>
private readonly Automaton<TSequence, TElement, TElementDistribution, TSequenceManipulator, TThis> automaton;
/// <summary>
/// A function specifying whether the transition should be treated as an edge
/// of the automaton graph while building the condensation.
@ -90,6 +94,7 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
/// <summary>
/// Initializes a new instance of the <see cref="Condensation"/> class.
/// </summary>
/// <param name="automaton">The automaton.</param>
/// <param name="root">The root of the condensation DAG.</param>
/// <param name="transitionFilter">
/// A function specifying whether the transition should be treated as an edge
@ -99,10 +104,14 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
/// Specifies whether <see cref="Weight.ApproximateClosure"/> should be used
/// instead of <see cref="Weight.Closure"/> in semiring computations.
/// </param>
internal Condensation(State root, Func<Transition, bool> transitionFilter, bool useApproximateClosure)
internal Condensation(
Automaton<TSequence, TElement, TElementDistribution, TSequenceManipulator, TThis> automaton,
State root,
Func<Transition, bool> transitionFilter, bool useApproximateClosure)
{
Debug.Assert(transitionFilter != null, "A valid transition filter must be provided.");
this.automaton = automaton;
this.Root = root;
this.transitionFilter = transitionFilter;
this.useApproximateClosure = useApproximateClosure;
@ -168,8 +177,6 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
/// <returns>The computed total weight.</returns>
public Weight GetWeightFromRoot(State state)
{
Argument.CheckIfValid(ReferenceEquals(state.Owner, this.Root.Owner), "state", "The given state belongs to a different automaton.");
if (!this.weightsFromRootComputed)
{
this.ComputeWeightsFromRoot();
@ -191,7 +198,7 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
{
var components = new List<StronglyConnectedComponent>();
var states = this.Root.Owner.States;
var states = this.automaton.States;
var stateIdStack = new Stack<int>();
var stateIdToStateInfo = new Dictionary<int, TarjanStateInfo>();
int traversalIndex = 0;
@ -269,7 +276,7 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
} while (stateIndex != currentStateIndex);
components.Add(new StronglyConnectedComponent(
this.transitionFilter, statesInComponent, this.useApproximateClosure));
this.automaton, this.transitionFilter, statesInComponent, this.useApproximateClosure));
}
}
@ -297,7 +304,7 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
Weight weightToAdd = state.EndWeight;
foreach (var transition in state.Transitions)
{
State destState = state.Owner.States[transition.DestinationStateIndex];
State destState = this.automaton.States[transition.DestinationStateIndex];
if (this.transitionFilter(transition) && !currentComponent.HasState(destState))
{
weightToAdd += transition.Weight * this.stateIdToInfo[transition.DestinationStateIndex].WeightToEnd;
@ -371,7 +378,7 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
// Aggregate weights of all the outgoing transitions from this state
foreach (var transition in srcState.Transitions)
{
State destState = srcState.Owner.States[transition.DestinationStateIndex];
State destState = this.automaton.States[transition.DestinationStateIndex];
if (this.transitionFilter(transition) && !currentComponent.HasState(destState))
{
CondensationStateInfo destStateInfo = this.stateIdToInfo[destState.Index];

Просмотреть файл

@ -7,7 +7,7 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
using System;
using System.Diagnostics;
using System.Runtime.Serialization;
using Microsoft.ML.Probabilistic.Collections;
using Microsoft.ML.Probabilistic.Serialization;
@ -28,27 +28,27 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
/// Index of start state of automaton.
/// </summary>
public readonly int StartStateIndex;
/// <summary>
/// All automaton states.
/// </summary>
public readonly ReadOnlyArray<StateData> States;
public readonly ImmutableArray<StateData> States;
/// <summary>
/// All automaton transitions. Transitions for the same state are stored as a contiguous block
/// inside this array.
/// </summary>
public readonly ReadOnlyArray<Transition> Transitions;
public readonly ImmutableArray<Transition> Transitions;
/// <summary>
/// Gets value indicating whether this automaton is epsilon-free.
/// </summary>
public bool IsEpsilonFree => (this.flags & Flags.IsEpsilonFree) != 0;
public bool IsEpsilonFree => this.flags.HasFlag(Flags.IsEpsilonFree);
/// <summary>
/// Get value indicating whether this automaton uses groups.
/// </summary>
public bool UsesGroups => (this.flags & Flags.UsesGroups) != 0;
public bool UsesGroups => this.flags.HasFlag(Flags.UsesGroups);
/// <summary>
/// Gets value indicating whether this automaton is determinized
@ -58,8 +58,8 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
/// False value means that this automaton can not be determinized
/// </remarks>
public bool? IsDeterminized =>
(this.flags & Flags.DeterminizationStateKnown) != 0
? (this.flags & Flags.IsDeterminized) != 0
this.flags.HasFlag(Flags.DeterminizationStateKnown)
? this.flags.HasFlag(Flags.IsDeterminized)
: (bool?)null;
/// <summary>
@ -69,22 +69,28 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
/// Null value means that this property is unknown.
/// </remarks>
public bool? IsZero =>
((this.flags & Flags.IsZeroStateKnown) != 0)
? (this.flags & Flags.IsZero) != 0
this.flags.HasFlag(Flags.IsZeroStateKnown)
? this.flags.HasFlag(Flags.IsZero)
: (bool?)null;
public bool? IsEnumerable =>
this.flags.HasFlag(Flags.IsEnumerableStateKnown)
? this.flags.HasFlag(Flags.IsEnumerable)
: (bool?)null;
/// <summary>
/// Initializes instance of <see cref="DataContainer"/>.
/// </summary>
[Construction("StartStateIndex", "States", "Transitions", "IsEpsilonFree", "UsesGroups", "IsDeterminized", "IsZero")]
[Construction("StartStateIndex", "States", "Transitions", "IsEpsilonFree", "UsesGroups", "IsDeterminized", "IsZero", "IsEnumerable")]
public DataContainer(
int startStateIndex,
ReadOnlyArray<StateData> states,
ReadOnlyArray<Transition> transitions,
ImmutableArray<StateData> states,
ImmutableArray<Transition> transitions,
bool isEpsilonFree,
bool usesGroups,
bool? isDeterminized,
bool? isZero)
bool? isZero,
bool? isEnumerable)
{
this.flags =
(isEpsilonFree ? Flags.IsEpsilonFree : 0) |
@ -92,7 +98,9 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
(isDeterminized.HasValue ? Flags.DeterminizationStateKnown : 0) |
(isDeterminized == true ? Flags.IsDeterminized : 0) |
(isZero.HasValue ? Flags.IsZeroStateKnown : 0) |
(isZero == true ? Flags.IsZero : 0);
(isZero == true ? Flags.IsZero : 0) |
(isEnumerable.HasValue ? Flags.IsEnumerableStateKnown : 0) |
(isEnumerable == true ? Flags.IsEnumerable : 0);
this.StartStateIndex = startStateIndex;
this.States = states;
this.Transitions = transitions;
@ -100,11 +108,13 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
public DataContainer With(
bool? isDeterminized = null,
bool? isZero= null)
bool? isZero = null,
bool? isEnumerable = null)
{
// Can't overwrite known properties
Debug.Assert(isDeterminized.HasValue != this.IsDeterminized.HasValue || isDeterminized == this.IsDeterminized);
Debug.Assert(isZero.HasValue != this.IsZero.HasValue || isZero == this.IsZero);
Debug.Assert(isEnumerable.HasValue != this.IsEnumerable.HasValue || isEnumerable == this.IsEnumerable);
return new DataContainer(
this.StartStateIndex,
@ -113,7 +123,8 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
this.IsEpsilonFree,
this.UsesGroups,
isDeterminized ?? this.IsDeterminized,
isZero ?? this.IsZero);
isZero ?? this.IsZero,
isEnumerable ?? this.IsEnumerable);
}
/// <summary>
@ -165,10 +176,10 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
{
this.flags = (Flags)info.GetValue(nameof(this.flags), typeof(Flags));
this.StartStateIndex = (int)info.GetValue(nameof(this.StartStateIndex), typeof(int));
this.States = (StateData[])info.GetValue(nameof(this.States), typeof(StateData[]));
this.Transitions = (Transition[])info.GetValue(nameof(this.Transitions), typeof(Transition[]));
this.States = ((StateData[])info.GetValue(nameof(this.States), typeof(StateData[]))).ToImmutableArray();
this.Transitions = ((Transition[])info.GetValue(nameof(this.Transitions), typeof(Transition[]))).ToImmutableArray();
if (!IsConsistent())
if (!this.IsConsistent())
{
throw new Exception("Deserialized automaton is inconsistent!");
}
@ -194,6 +205,8 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
IsDeterminized = 0x8,
IsZeroStateKnown = 0x10,
IsZero = 0x20,
IsEnumerableStateKnown = 0x40,
IsEnumerable = 0x80,
}
}
}

Просмотреть файл

@ -329,24 +329,24 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
/// <summary>
/// A mapping from state ids to weights. This array is sorted by state Id.
/// </summary>
private readonly ReadOnlyArray<WeightedState> weightedStates;
private readonly ImmutableArray<WeightedState> weightedStates;
private readonly int singleStateIndex;
public WeightedStateSet(int stateIndex)
{
this.weightedStates = null;
this.weightedStates = default(ImmutableArray<WeightedState>);
this.singleStateIndex = stateIndex;
}
public WeightedStateSet(ReadOnlyArray<WeightedState> weightedStates)
public WeightedStateSet(ImmutableArray<WeightedState> weightedStates)
{
Debug.Assert(weightedStates.Count > 0);
Debug.Assert(IsSorted(weightedStates));
if (weightedStates.Count == 1)
{
Debug.Assert(weightedStates[0].Weight == Weight.One);
this.weightedStates = null;
this.weightedStates = default(ImmutableArray<WeightedState>);
this.singleStateIndex = weightedStates[0].Index;
}
else
@ -357,12 +357,12 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
}
public int Count =>
this.weightedStates.IsNull
(this.weightedStates == default(ImmutableArray<WeightedState>))
? 1
: this.weightedStates.Count;
public WeightedState this[int index] =>
this.weightedStates.IsNull
(this.weightedStates == default(ImmutableArray<WeightedState>))
? new WeightedState(this.singleStateIndex, Weight.One)
: this.weightedStates[index];
@ -444,7 +444,7 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
/// <summary>
/// Checks weather states array is sorted in ascending order by Index.
/// </summary>
private static bool IsSorted(ReadOnlyArray<WeightedState> array)
private static bool IsSorted(ImmutableArray<WeightedState> array)
{
for (var i = 1; i < array.Count; ++i)
{
@ -520,7 +520,7 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
MergeRepeatedEntries(weightsClone);
var maxWeight = NormalizeWeights(weightsClone);
return (new WeightedStateSet(weightsClone.ToArray()), maxWeight);
return (new WeightedStateSet(weightsClone.ToImmutableArray()), maxWeight);
}
private static void MergeRepeatedEntries(List<WeightedState> weightedStates)

Просмотреть файл

@ -31,10 +31,13 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
/// <summary>
/// Initializes a new instance of the <see cref="EpsilonClosure"/> class.
/// </summary>
/// <param name="automaton">The automaton from to which <paramref name="state"/> belongs.</param>
/// <param name="state">The state, which epsilon closure this instance will represent.</param>
internal EpsilonClosure(State state)
internal EpsilonClosure(
Automaton<TSequence, TElement, TElementDistribution, TSequenceManipulator, TThis> automaton,
State state)
{
weightedStates = new List<(State, Weight)>(DefaultStateListCapacity);
this.weightedStates = new List<(State, Weight)>(DefaultStateListCapacity);
// Optimize for a very common case: a single-node closure
bool singleNodeClosure = true;
@ -61,7 +64,7 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
}
else
{
Condensation condensation = state.Owner.ComputeCondensation(state, tr => tr.IsEpsilon, true);
Condensation condensation = automaton.ComputeCondensation(state, tr => tr.IsEpsilon, true);
for (int i = 0; i < condensation.ComponentCount; ++i)
{
StronglyConnectedComponent component = condensation.GetComponent(i);

Просмотреть файл

@ -1,4 +1,4 @@
// Licensed to the .NET Foundation under one or more agreements.
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
@ -18,37 +18,30 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
/// Represents a reference to a state of automaton for exposure in public API.
/// </summary>
/// <remarks>
/// Acts as a "fat reference" to state in automaton. In addition to reference to actual StateData it carries
/// 3 additional properties for convenience: <see cref="Owner"/> automaton, <see cref="Index"/> of the state
/// and full <see cref="transitions"/> table.
/// Acts as a "fat reference" to state in automaton. In addition to reference to actual
/// StateData it carries 2 additional properties for convenience: <see cref="Index"/>
/// of the state and full <see cref="transitions"/> table.
/// </remarks>
public struct State : IEquatable<State>
{
private readonly ReadOnlyArray<StateData> states;
private readonly ImmutableArray<StateData> states;
private readonly ReadOnlyArray<Transition> transitions;
private readonly ImmutableArray<Transition> transitions;
/// <summary>
/// Initializes a new instance of <see cref="State"/> class. Used internally by automaton implementation
/// to wrap StateData for use in public Automaton APIs.
/// </summary>
internal State(
Automaton<TSequence, TElement, TElementDistribution, TSequenceManipulator, TThis> owner,
ReadOnlyArray<StateData> states,
ReadOnlyArray<Transition> transitions,
ImmutableArray<StateData> states,
ImmutableArray<Transition> transitions,
int index)
{
this.Owner = owner;
this.states = states;
this.transitions = transitions;
this.Index = index;
}
/// <summary>
/// Gets automaton to which this state belongs.
/// </summary>
public Automaton<TSequence, TElement, TElementDistribution, TSequenceManipulator, TThis> Owner { get; }
/// <summary>
/// Gets the index of the state.
/// </summary>
@ -58,14 +51,14 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
/// Gets the ending weight of the state.
/// </summary>
public Weight EndWeight => this.Data.EndWeight;
/// <summary>
/// Gets a value indicating whether the ending weight of this state is greater than zero.
/// </summary>
public bool CanEnd => this.Data.CanEnd;
public ReadOnlyArraySegment<Transition> Transitions =>
new ReadOnlyArraySegment<Transition>(
public ImmutableArraySegment<Transition> Transitions =>
new ImmutableArraySegment<Transition>(
this.transitions,
this.Data.FirstTransitionIndex,
this.Data.TransitionsCount);
@ -75,8 +68,7 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
/// <summary>
/// Compares 2 states for equality.
/// </summary>
public static bool operator ==(State a, State b) =>
ReferenceEquals(a.Owner, b.Owner) && a.Index == b.Index;
public static bool operator ==(State a, State b) => a.Index == b.Index;
/// <summary>
/// Compares 2 states for inequality.
@ -100,17 +92,10 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
/// <returns>A string that represents the state.</returns>
public override string ToString()
{
const string StartStateMarker = "START ->";
const string TransitionSeparator = ",";
var sb = new StringBuilder();
var isStartState = this.Owner != null && this.Owner.Start == this;
if (isStartState)
{
sb.Append(StartStateMarker);
}
var firstTransition = true;
foreach (var transition in this.Transitions)
{
@ -139,34 +124,6 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
return sb.ToString();
}
/// <summary>
/// Gets the epsilon closure of this state.
/// </summary>
/// <returns>The epsilon closure of this state.</returns>
public EpsilonClosure GetEpsilonClosure() => new EpsilonClosure(this);
/// <summary>
/// Whether there are incoming transitions to this state
/// </summary>
public bool HasIncomingTransitions
{
get
{
foreach (var state in this.Owner.States)
{
foreach (var transition in state.Transitions)
{
if (transition.DestinationStateIndex == this.Index)
{
return true;
}
}
}
return false;
}
}
#region Serialization
public void Write(Action<double> writeDouble, Action<int> writeInt32, Action<TElementDistribution> writeElementDistribution)

Просмотреть файл

@ -25,17 +25,12 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
/// <summary>
/// Cached value of this.owner.Data.states. Cached for performance.
/// </summary>
internal readonly ReadOnlyArray<StateData> states;
internal readonly ImmutableArray<StateData> states;
/// <summary>
/// Cached value of this.owner.Data.states. Cached for performance.
/// </summary>
internal readonly ReadOnlyArray<Transition> transitions;
/// <summary>
/// Owner automaton of all states in collection.
/// </summary>
private readonly Automaton<TSequence, TElement, TElementDistribution, TSequenceManipulator, TThis> owner;
internal readonly ImmutableArray<Transition> transitions;
/// <summary>
/// Initializes instance of <see cref="StateCollection"/>.
@ -43,7 +38,6 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
internal StateCollection(
Automaton<TSequence, TElement, TElementDistribution, TSequenceManipulator, TThis> owner)
{
this.owner = owner;
this.states = owner.Data.States;
this.transitions = owner.Data.Transitions;
}
@ -51,7 +45,7 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
#region IReadOnlyList<State> methods
/// <inheritdoc/>
public State this[int index] => new State(this.owner, this.states, this.transitions, index);
public State this[int index] => new State(this.states, this.transitions, index);
/// <inheritdoc/>
public int Count => this.states.Count;

Просмотреть файл

@ -55,6 +55,7 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
/// <summary>
/// Initializes a new instance of the <see cref="StronglyConnectedComponent"/> class.
/// </summary>
/// <param name="automaton">The automaton to which all states belong</param>
/// <param name="transitionFilter">The transition filter used to build the condensation this component belongs to.</param>
/// <param name="statesInComponent">The list of states in the component.</param>
/// <param name="useApproximateClosure">
@ -62,6 +63,7 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
/// instead of <see cref="Weight.Closure"/> in semiring computations.
/// </param>
internal StronglyConnectedComponent(
Automaton<TSequence, TElement, TElementDistribution, TSequenceManipulator, TThis> automaton,
Func<Transition, bool> transitionFilter,
List<State> statesInComponent,
bool useApproximateClosure)
@ -69,15 +71,18 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
Debug.Assert(
statesInComponent.Count > 0,
"There must be at least one state in the strongly connected component.");
Debug.Assert(
statesInComponent.All(s => ReferenceEquals(s.Owner, statesInComponent[0].Owner)),
"All the states must be valid and belong to the same automaton.");
this.Automaton = automaton;
this.transitionFilter = transitionFilter;
this.statesInComponent = statesInComponent;
this.useApproximateClosure = useApproximateClosure;
}
/// <summary>
/// Automaton to which all states belong.
/// </summary>
public Automaton<TSequence, TElement, TElementDistribution, TSequenceManipulator, TThis> Automaton { get; }
/// <summary>
/// Gets the number of states in the component.
/// </summary>
@ -117,8 +122,6 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
/// </returns>
public int GetIndexByState(State state)
{
Argument.CheckIfValid(ReferenceEquals(state.Owner, this.statesInComponent[0].Owner), "state", "The given state belongs to other automaton.");
if (this.statesInComponent.Count == 1)
{
return this.statesInComponent[0].Index == state.Index ? 0 : -1;
@ -198,7 +201,7 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
State state = this.statesInComponent[srcStateIndexInComponent];
foreach (var transition in state.Transitions)
{
State destState = state.Owner.States[transition.DestinationStateIndex];
State destState = this.Automaton.States[transition.DestinationStateIndex];
int destStateIndexInComponent;
if (this.transitionFilter(transition) && (destStateIndexInComponent = this.GetIndexByState(destState)) != -1)
{

Просмотреть файл

@ -1,4 +1,4 @@
// Licensed to the .NET Foundation under one or more agreements.
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
@ -65,12 +65,8 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
/// <summary>
/// Cached states representation of states for zero automaton.
/// </summary>
private static readonly ReadOnlyArray<StateData> ZeroStates = new[] { new StateData(0, 0, Weight.Zero) };
/// <summary>
/// Cached states representation of transitions for zero automaton.
/// </summary>
private static readonly ReadOnlyArray<Transition> ZeroTransitions = new Transition[] { };
private static readonly ImmutableArray<StateData> SingleState =
ImmutableArray.Create(new StateData(0, 0, Weight.Zero));
/// <summary>
/// The maximum number of states an automaton can have.
@ -603,6 +599,36 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
return result;
}
/// <summary>
/// Creates an automaton which is the concatenation of given automata.
/// </summary>
/// <param name="automata">The automata to multiply.</param>
/// <returns>The created automaton.</returns>
public static TThis Concatenate(params TThis[] automata)
{
return Concatenate((IEnumerable<TThis>)automata);
}
/// <summary>
/// Creates an automaton which is the concatenation of given automata.
/// </summary>
/// <param name="automata">The automata to multiply.</param>
/// <returns>The created automaton.</returns>
public static TThis Concatenate(IEnumerable<TThis> automata)
{
Argument.CheckIfNotNull(automata, "automata");
var builder = new Builder(1);
builder.Start.SetEndWeight(Weight.One);
foreach (var automaton in automata)
{
builder.Append(automaton);
}
return builder.GetAutomaton();
}
/// <summary>
/// Creates an automaton which has given values on given sequences and is zero everywhere else.
/// </summary>
@ -784,6 +810,12 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
}
var builder = new StringBuilder();
if (this.LogValueOverride.HasValue)
{
builder.Append(this.LogValueOverride);
builder.Append(":");
}
var visitedStates = new HashSet<int>();
var stack = new Stack<(string prefix, Option<TElementDistribution> prefixDistribution, int state)>();
stack.Push((string.Empty, Option.None, Start.Index));
@ -1410,7 +1442,7 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
// Iterate over transitions in state1
foreach (var transition1 in state1.Transitions)
{
var destState1 = state1.Owner.States[transition1.DestinationStateIndex];
var destState1 = automaton1.States[transition1.DestinationStateIndex];
if (transition1.IsEpsilon)
{
@ -1426,7 +1458,7 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
Debug.Assert(
!transition2.IsEpsilon,
"The second argument of the product operation must be epsilon-free.");
var destState2 = state2.Owner.States[transition2.DestinationStateIndex];
var destState2 = automaton2.States[transition2.DestinationStateIndex];
var productLogNormalizer = Distribution<TElement>.GetLogAverageOf(
transition1.ElementDistribution.Value, transition2.ElementDistribution.Value,
out var product);
@ -1645,12 +1677,13 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
{
this.Data = new DataContainer(
0,
ZeroStates,
ZeroTransitions,
SingleState,
ImmutableArray<Transition>.Empty,
isEpsilonFree: true,
usesGroups: false,
isDeterminized: true,
isZero: true);
isZero: true,
isEnumerable: true);
}
/// <summary>
@ -1826,7 +1859,7 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
}
else
{
var closure = this.States[stateIndex].GetEpsilonClosure();
var closure = new EpsilonClosure(this, this.States[stateIndex]);
if (sequencePos == sequenceLength)
{
@ -2014,32 +2047,21 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
/// <returns>The sequences in the support of this automaton</returns>
public IEnumerable<TSequence> EnumerateSupport(int maxCount = 1000000, bool tryDeterminize = true)
{
if (tryDeterminize && this is StringAutomaton)
int idx = 0;
foreach (var seq in this.EnumerateSupportInternal(tryDeterminize))
{
this.TryDeterminize();
}
// Lazily return sequences until the count is exceeded.
var enumeration = this.EnumerateSupport(
new Stack<TElement>(),
new ArrayDictionary<bool>(),
this.Start.Index);
if (!tryDeterminize) enumeration = enumeration.Distinct();
var result = enumeration.Select(
(seq, idx) =>
if (seq == null)
{
if (idx < maxCount)
{
return seq;
}
else
{
throw new AutomatonEnumerationCountException(maxCount);
}
});
throw new NotSupportedException("Infinite loops cannot be enumerated");
}
return result;
if (++idx > maxCount)
{
throw new AutomatonEnumerationCountException(maxCount);
}
yield return seq;
}
}
/// <summary>
@ -2052,16 +2074,20 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
/// <returns>True if successful, false otherwise</returns>
public bool TryEnumerateSupport(int maxCount, out IEnumerable<TSequence> result, bool tryDeterminize = true)
{
if (tryDeterminize && this is StringAutomaton)
var limitedResult = new List<TSequence>();
foreach (var seq in this.EnumerateSupportInternal(tryDeterminize))
{
this.TryDeterminize();
if (seq == null || limitedResult.Count >= maxCount)
{
result = null;
return false;
}
limitedResult.Add(seq);
}
result = this.EnumerateSupport(new Stack<TElement>(), new ArrayDictionary<bool>(), this.Start.Index);
if (!tryDeterminize) result = result.Distinct();
result = result.Take(maxCount + 1).ToList();
return result.Count() <= maxCount;
result = limitedResult;
return true;
}
/// <summary>
@ -2227,8 +2253,7 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
while (stack.Count > 0)
{
var oldStateIndex = stack.Pop();
var oldState = automaton.States[oldStateIndex];
var closure = oldState.GetEpsilonClosure();
var closure = new EpsilonClosure(automaton, automaton.States[oldStateIndex]);
var resultState = builder[oldToNewState[oldStateIndex].Value];
resultState.SetEndWeight(closure.EndWeight);
@ -2255,7 +2280,7 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
this.Data = builder.GetData();
this.LogValueOverride = automaton.LogValueOverride;
this.PruneStatesWithLogEndWeightLessThan = automaton.LogValueOverride;
this.PruneStatesWithLogEndWeightLessThan = automaton.PruneStatesWithLogEndWeightLessThan;
}
#endregion
@ -2276,7 +2301,7 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
return automaton2.IsZero() ? double.NegativeInfinity : 1;
}
TThis theConverger = GetConverger(automaton1, automaton2);
TThis theConverger = GetConverger(new TThis[] {automaton1, automaton2});
var automaton1conv = automaton1.Product(theConverger);
var automaton2conv = automaton2.Product(theConverger);
@ -2310,8 +2335,20 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
/// Gets an automaton such that every given automaton, if multiplied by it, becomes normalizable.
/// </summary>
/// <param name="automata">The automata.</param>
/// <param name="decayWeight">The decay weight.</param>
/// <returns>An automaton, product with which will make every given automaton normalizable.</returns>
public static TThis GetConverger(params TThis[] automata)
public static TThis GetConverger(TThis automata, double decayWeight = 0.99)
{
return GetConverger(new TThis[] {automata}, decayWeight);
}
/// <summary>
/// Gets an automaton such that every given automaton, if multiplied by it, becomes normalizable.
/// </summary>
/// <param name="automata">The automata.</param>
/// <param name="decayWeight">The decay weight.</param>
/// <returns>An automaton, product with which will make every given automaton normalizable.</returns>
public static TThis GetConverger(TThis[] automata, double decayWeight = 0.99)
{
// TODO: This method might not work in the presense of non-trivial loops.
@ -2347,7 +2384,7 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
Weight transitionWeight = Weight.Product(
Weight.FromLogValue(-uniformDist.GetLogAverageOf(uniformDist)),
Weight.FromLogValue(-maxLogTransitionWeightSum),
Weight.FromValue(0.99));
Weight.FromValue(decayWeight));
theConverger.Start.AddSelfTransition(uniformDist, transitionWeight);
theConverger.Start.SetEndWeight(Weight.One);
@ -2510,71 +2547,182 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
}
/// <summary>
/// Recursively enumerate support of this automaton
/// Enumerate support of this automaton
/// </summary>
/// <param name="prefix">The prefix at this point</param>
/// <param name="visitedStates">The states visited at this point</param>
/// <param name="stateIndex">The index of the next state to process</param>
/// <returns>The strings supporting this automaton</returns>
private IEnumerable<TSequence> EnumerateSupport(Stack<TElement> prefix, ArrayDictionary<bool> visitedStates, int stateIndex)
private IEnumerable<TSequence> EnumerateSupportInternal(bool tryDeterminize)
{
if (visitedStates.ContainsKey(stateIndex) && visitedStates[stateIndex])
var isEnumerable = this.Data.IsEnumerable;
if (isEnumerable != null && isEnumerable.Value == false)
{
throw new NotSupportedException("Infinite loops cannot be enumerated");
// This automaton is definitely not enumerable
return new TSequence[] { null };
}
var currentState = this.States[stateIndex];
if (currentState.CanEnd)
if (tryDeterminize && this is StringAutomaton)
{
yield return SequenceManipulator.ToSequence(prefix.Reverse());
this.TryDeterminize();
}
visitedStates[stateIndex] = true;
foreach (var transition in currentState.Transitions)
var enumeration = this.EnumerateSupportInternalWithDuplicates();
if (!tryDeterminize)
{
if (transition.Weight.IsZero)
{
continue;
}
enumeration = enumeration.Distinct();
}
if (transition.IsEpsilon)
return enumeration;
}
/// <summary>
/// Stores information needed for backtracking during support enumeration.
/// </summary>
private struct StateEnumerationState
{
public int StateIndex;
public int PrefixLength;
public int TransitionIndex;
public int RemainingTransitionsCount;
public IEnumerator<TElement> ElementEnumerator;
}
/// <summary>
/// Enumerate support of this automaton without elimination of duplicate elements
/// </summary>
/// <returns>
/// The sequences supporting this automaton. Sequences may be non-distinct if
/// automaton is not determinized. A `null` value in enumeration means that
/// an infinite loop was reached. Public `EnumerateSupport()` / `TryEnumerateSupport()`
/// methods handle null value differently.
/// </returns>
private IEnumerable<TSequence> EnumerateSupportInternalWithDuplicates()
{
var visited = new bool[this.States.Count];
var prefix = new List<TElement>();
var stack = new Stack<StateEnumerationState>();
var current = default(StateEnumerationState);
TryMoveTo(this.Data.StartStateIndex);
if (this.States[current.StateIndex].CanEnd)
{
yield return SequenceManipulator.ToSequence(prefix);
}
while (true)
{
// Backtrack while needed
while (current.ElementEnumerator == null && current.RemainingTransitionsCount == 0)
{
foreach (var support in this.EnumerateSupport(prefix, visitedStates, transition.DestinationStateIndex))
if (stack.Count == 0)
{
yield return support;
}
}
else if (transition.ElementDistribution.Value.IsPointMass)
{
prefix.Push(transition.ElementDistribution.Value.Point);
foreach (var support in this.EnumerateSupport(prefix, visitedStates, transition.DestinationStateIndex))
{
yield return support;
// Nowhere to backtrack, enumerated everything
if (this.Data.IsEnumerable == null)
{
this.Data = this.Data.With(isEnumerable: true);
}
yield break;
}
prefix.Pop();
visited[current.StateIndex] = false;
current = stack.Pop();
prefix.RemoveRange(current.PrefixLength, prefix.Count - current.PrefixLength);
}
if (current.ElementEnumerator != null)
{
// Advance to next element in current transition
prefix.Add(current.ElementEnumerator.Current);
if (!current.ElementEnumerator.MoveNext())
{
// Element done, move to next transition
current.ElementEnumerator = null;
}
}
else if (current.RemainingTransitionsCount != 0)
{
// Advance to next transition
++current.TransitionIndex;
--current.RemainingTransitionsCount;
var transition = this.Data.Transitions[current.TransitionIndex];
if (!transition.IsEpsilon)
{
// Add next element to sequence
var elementDistribution = transition.ElementDistribution.Value;
if (!(transition.ElementDistribution.Value is CanEnumerateSupport<TElement> supportEnumerator))
{
throw new NotImplementedException(
"Only point mass element distributions or distributions for which we can enumerate support are currently implemented");
}
var enumerator = supportEnumerator.EnumerateSupport().GetEnumerator();
if (enumerator.MoveNext())
{
prefix.Add(enumerator.Current);
if (enumerator.MoveNext())
{
current.ElementEnumerator = enumerator;
}
}
}
}
if (!TryMoveTo(this.Data.Transitions[current.TransitionIndex].DestinationStateIndex))
{
// Found a loop, signal that automaton is not enumerable
this.Data = this.Data.With(isEnumerable: false);
yield return null;
yield break;
}
if (this.States[current.StateIndex].CanEnd)
{
yield return SequenceManipulator.ToSequence(prefix);
}
}
// Return false if loop was encountered
bool TryMoveTo(int index)
{
if (index >= current.StateIndex &&
current.ElementEnumerator == null &&
current.RemainingTransitionsCount == 0)
{
// Fastpath: if we move forward and current state has 0 elements left to traverse,
// we can omit the backtracking logic entirely
visited[current.StateIndex] = false;
}
else
{
if (!(transition.ElementDistribution.Value is CanEnumerateSupport<TElement> supportEnumerator))
// Slowpath: Store information needed for backtracking
stack.Push(current);
if (index <= current.StateIndex)
{
throw new NotImplementedException("Only point mass element distributions or distributions for which we can enumerate support are currently implemented");
}
foreach (var elt in supportEnumerator.EnumerateSupport())
{
prefix.Push(elt);
foreach (var support in this.EnumerateSupport(prefix, visitedStates, transition.DestinationStateIndex))
{
yield return support;
}
prefix.Pop();
// Tracking the visited states only on backward transitions is enough for
// loop detection. By not setting "visited" to true for forward transitions
// we can backtrack with less overhead in simple cases
visited[current.StateIndex] = true;
}
}
}
visitedStates[stateIndex] = false;
if (visited[index])
{
// Loop encountered
return false;
}
var state = this.Data.States[index];
current = new StateEnumerationState
{
StateIndex = index,
TransitionIndex = state.FirstTransitionIndex - 1,
RemainingTransitionsCount = state.TransitionsCount,
PrefixLength = prefix.Count,
};
return true;
}
}
/// <summary>
@ -2634,14 +2782,14 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
/// </summary>
public class UnlimitedStatesComputation : IDisposable
{
private readonly int originalMaxStateCount;
private readonly int originalThreadMaxStateCount;
/// <summary>
/// Initializes a new instance of the <see cref="UnlimitedStatesComputation"/> class.
/// </summary>
public UnlimitedStatesComputation()
{
originalMaxStateCount = threadMaxStateCountOverride;
this.originalThreadMaxStateCount = threadMaxStateCountOverride;
threadMaxStateCountOverride = int.MaxValue;
}
@ -2650,15 +2798,18 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
/// </summary>
public void CheckStateCount(TThis automaton)
{
if (automaton.States.Count > originalMaxStateCount)
var limit = this.originalThreadMaxStateCount != 0
? this.originalThreadMaxStateCount
: maxStateCount;
if (automaton.States.Count > limit)
{
throw new AutomatonTooLargeException(originalMaxStateCount);
throw new AutomatonTooLargeException(limit);
}
}
public void Dispose()
{
threadMaxStateCountOverride = originalMaxStateCount;
threadMaxStateCountOverride = this.originalThreadMaxStateCount;
}
}
#endregion
@ -2676,11 +2827,11 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
{
var propertyMask = new BitVector32();
var idx = 0;
propertyMask[1 << idx++] = true; // isEpsilonFree is alway known
propertyMask[1 << idx++] = true; // isEpsilonFree is always known
propertyMask[1 << idx++] = this.Data.IsEpsilonFree;
propertyMask[1 << idx++] = this.LogValueOverride.HasValue;
propertyMask[1 << idx++] = this.PruneStatesWithLogEndWeightLessThan.HasValue;
propertyMask[1 << idx++] = true; // start state is alway serialized
propertyMask[1 << idx++] = true; // start state is always serialized
writeInt32(propertyMask.Data);
@ -2708,8 +2859,8 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
/// Reads an automaton from.
/// </summary>
/// <remarks>
/// Serializtion format is a bit unnatural, but we do it for compatiblity with old serialized data.
/// So we don't have to maintain 2 versions of derserialization
/// Serialization format is a bit unnatural, but we do it for compatibility with old serialized data.
/// So we don't have to maintain 2 versions of deserialization.
/// </remarks>
public static TThis Read(Func<double> readDouble, Func<int> readInt32, Func<TElementDistribution> readElementDistribution)
{

Просмотреть файл

@ -199,7 +199,7 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
{
var s = currentComponent.GetStateByIndex(i);
var sIdx = s.Index;
foreach (var s1 in s.Owner.States)
foreach (var s1 in automaton.States)
{
if (currentComponent.HasState(s1))
{
@ -381,7 +381,7 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
}
int destStateIndex;
if ((destStateIndex = component.GetIndexByState(state.Owner.States[transition.DestinationStateIndex])) != -1)
if ((destStateIndex = component.GetIndexByState(component.Automaton.States[transition.DestinationStateIndex])) != -1)
{
var destStateRegexp = transition.IsEpsilon
? RegexpTreeNode<TElement>.Empty()

Просмотреть файл

@ -10,6 +10,7 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
/// <summary>
/// Represents a weighted finite state automaton defined on <see cref="string"/>.
@ -20,6 +21,19 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
public StringAutomaton()
{
}
/// <summary>
/// Whether there are log value overrides at the element level.
/// </summary>
public bool HasElementLogValueOverrides
{
get
{
return this.States.transitions.Any(
trans => trans.ElementDistribution.HasValue &&
trans.ElementDistribution.Value.HasLogProbabilityOverride);
}
}
/// <summary>
/// Computes a set of outgoing transitions from a given state of the determinization result.

Просмотреть файл

@ -1,4 +1,4 @@
// Licensed to the .NET Foundation under one or more agreements.
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
@ -208,6 +208,28 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
return new TThis { sequencePairToWeight = PairListAutomaton.Sum(transducers.Select(t => t.sequencePairToWeight)) };
}
/// <summary>
/// Creates an automaton which is the concatenation of given transducers.
/// </summary>
/// <param name="transducers">The transducers to concatenate.</param>
/// <returns>The created transucer.</returns>
public static TThis Concatenate(params TThis[] transducers)
{
return Concatenate((IEnumerable<TThis>)transducers);
}
/// <summary>
/// Creates an automaton which is the concatenation of given transducers.
/// </summary>
/// <param name="transducers">The transducers to concatenate.</param>
/// <returns>The created transucer.</returns>
public static TThis Concatenate(IEnumerable<TThis> transducers)
{
Argument.CheckIfNotNull(transducers, "transducers");
return new TThis { sequencePairToWeight = PairListAutomaton.Concatenate(transducers.Select(t => t.sequencePairToWeight)) };
}
/// <summary>
/// Creates a transducer <c>T'(a, b) = sum_{k=Kmin}^{Kmax} sum_{a1 a2 ... ak = a} sum_{b1 b2 ... bk = b} T(a1, b1)T(a2, b2)...T(ak, bk)</c>,
/// where <c>T(a, b)</c> is a given transducer, and <c>Kmin</c> and <c>Kmax</c> are the minimum
@ -347,6 +369,8 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
// Populate the stack with start destination state
result.StartStateIndex = CreateDestState(mappingAutomaton.Start, srcAutomaton.Start);
var stringAutomaton = srcAutomaton as StringAutomaton;
var sourceDistributionHasLogProbabilityOverrides = stringAutomaton?.HasElementLogValueOverrides ?? false;
while (stack.Count > 0)
{
@ -359,7 +383,7 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
// Iterate over transitions from mappingState
foreach (var mappingTransition in mappingState.Transitions)
{
var childMappingState = mappingState.Owner.States[mappingTransition.DestinationStateIndex];
var childMappingState = mappingAutomaton.States[mappingTransition.DestinationStateIndex];
// Epsilon transition case
if (IsSrcEpsilon(mappingTransition))
@ -378,7 +402,7 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
{
Debug.Assert(!srcTransition.IsEpsilon, "The automaton being projected must be epsilon-free.");
var srcChildState = srcState.Owner.States[srcTransition.DestinationStateIndex];
var srcChildState = srcAutomaton.States[srcTransition.DestinationStateIndex];
var projectionLogScale = mappingTransition.ElementDistribution.Value.ProjectFirst(
srcTransition.ElementDistribution.Value, out var destElementDistribution);
@ -387,7 +411,32 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
continue;
}
var destWeight = Weight.Product(mappingTransition.Weight, srcTransition.Weight, Weight.FromLogValue(projectionLogScale));
// In the special case of a log probability override in a DiscreteChar element distribution,
// we need to compensate for the fact that the distribution is not normalized.
if (destElementDistribution.HasValue && sourceDistributionHasLogProbabilityOverrides)
{
var discreteChar =
(DiscreteChar)(IDistribution<char>)srcTransition.ElementDistribution.Value;
if (discreteChar.HasLogProbabilityOverride)
{
var totalMass = discreteChar.Ranges.EnumerableSum(rng =>
rng.Probability.Value * (rng.EndExclusive - rng.StartInclusive));
projectionLogScale -= System.Math.Log(totalMass);
}
}
var destWeight =
sourceDistributionHasLogProbabilityOverrides && destElementDistribution.HasNoValue
? Weight.One
: Weight.Product(mappingTransition.Weight, srcTransition.Weight,
Weight.FromLogValue(projectionLogScale));
// We don't want an unnormalizable distribution to become normalizable due to a rounding error.
if (Math.Abs(destWeight.LogValue) < 1e-12)
{
destWeight = Weight.One;
}
var childDestStateIndex = CreateDestState(childMappingState, srcChildState);
destState.AddTransition(destElementDistribution, destWeight, childDestStateIndex, mappingTransition.Group);
}
@ -465,7 +514,7 @@ namespace Microsoft.ML.Probabilistic.Distributions.Automata
// Enumerate transitions from the current mapping state
foreach (var mappingTransition in mappingState.Transitions)
{
var destMappingState = mappingState.Owner.States[mappingTransition.DestinationStateIndex];
var destMappingState = mappingAutomaton.States[mappingTransition.DestinationStateIndex];
// Epsilon transition case
if (IsSrcEpsilon(mappingTransition))

Просмотреть файл

@ -2,6 +2,8 @@
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections;
namespace Microsoft.ML.Probabilistic.Distributions
{
using System;
@ -140,7 +142,7 @@ namespace Microsoft.ML.Probabilistic.Distributions
/// The probabilities need to be normalized. The character ranges need to be sorted.
/// The created objects takes ownership of the character range list.
/// </remarks>
private DiscreteChar(ReadOnlyArray<CharRange> ranges, int rangeCount) =>
private DiscreteChar(ImmutableArray<CharRange> ranges, int rangeCount) =>
this.data_ = Storage.Create(ranges);
private DiscreteChar(Storage storage) => this.data_ = storage;
@ -183,6 +185,22 @@ namespace Microsoft.ML.Probabilistic.Distributions
/// </summary>
public bool IsWordChar => this.Data.IsWordChar;
/// <summary>
/// Gets a value indicating whether this distribution is broad - i.e. a general class of values.
/// </summary>
public bool IsBroad => this.Data.IsBroad;
/// <summary>
/// Gets a value indicating whether this distribution is partial uniform with a log probability override.
/// </summary>
public bool HasLogProbabilityOverride => this.Data.HasLogProbabilityOverride;
/// <summary>
/// Gets a value for the log probability override if it exists.
/// </summary>
public double? LogProbabilityOverride =>
this.HasLogProbabilityOverride ? this.Ranges.First().Probability.LogValue : (double?)null;
#endregion
#region Distribution properties
@ -530,13 +548,34 @@ namespace Microsoft.ML.Probabilistic.Distributions
}
var builder = StorageBuilder.Create();
foreach (var pair in CharRangePair.IntersectRanges(distribution1, distribution2))
{
var probProduct = pair.Probability1 * pair.Probability2;
builder.AddRange(new CharRange(pair.StartInclusive, pair.EndExclusive, probProduct));
}
this.Data = builder.GetResult();
double? logProbabilityOverride = null;
var distribution1LogProbabilityOverride = distribution1.LogProbabilityOverride;
var distribution2LogProbabilityOverride = distribution2.LogProbabilityOverride;
if (distribution1LogProbabilityOverride.HasValue)
{
if (distribution2LogProbabilityOverride.HasValue)
{
throw new ArgumentException("Only one distribution in a DiscreteChar product may have a log probability override");
}
if (distribution2.IsBroad)
{
logProbabilityOverride = distribution1LogProbabilityOverride;
}
}
else if (distribution2LogProbabilityOverride.HasValue && distribution1.IsBroad)
{
logProbabilityOverride = distribution2LogProbabilityOverride;
}
this.Data = builder.GetResult(logProbabilityOverride);
}
/// <summary>
@ -629,8 +668,23 @@ namespace Microsoft.ML.Probabilistic.Distributions
/// <summary>
/// Sets the distribution to be uniform over the support of a given distribution.
/// </summary>
/// <param name="distribution">The distribution which support will be used to setup the current distribution.</param>
/// <param name="distribution">The distribution whose support will be used to setup the current distribution.</param>
public void SetToPartialUniformOf(DiscreteChar distribution)
{
SetToPartialUniformOf(distribution, null);
}
/// <summary>
/// Sets the distribution to be uniform over the support of a given distribution.
/// </summary>
/// <param name="distribution">The distribution whose support will be used to setup the current distribution.</param>
/// <param name="logProbabilityOverride">An optional value to override for the log probability calculation
/// against this distribution. If this is set, then the distribution will not be normalize;
/// i.e. the probabilities will not sum to 1 over the support.</param>
/// <remarks>Overriding the log probability calculation in this way is useful within the context of using <see cref="StringDistribution"/>
/// to create more realistic language model priors. Distributions with this override are always uncached.
/// </remarks>
public void SetToPartialUniformOf(DiscreteChar distribution, double? logProbabilityOverride)
{
var builder = StorageBuilder.Create();
foreach (var range in distribution.Ranges)
@ -642,7 +696,7 @@ namespace Microsoft.ML.Probabilistic.Distributions
range.Probability.IsZero ? Weight.Zero : Weight.One));
}
this.Data = builder.GetResult();
this.Data = builder.GetResult(logProbabilityOverride);
}
/// <summary>
@ -852,7 +906,7 @@ namespace Microsoft.ML.Probabilistic.Distributions
/// Gets an array of character ranges with associated probabilities.
/// </summary>
/// <value>An array of character ranges with associated probabilities.</value>
public ReadOnlyArray<CharRange> Ranges => this.Data.Ranges;
public ImmutableArray<CharRange> Ranges => this.Data.Ranges;
/// <summary>
/// Creates a distribution which is uniform over all characters
@ -1293,7 +1347,7 @@ namespace Microsoft.ML.Probabilistic.Distributions
}
internal static IEnumerable<CharRangePair> CombineRanges(
ReadOnlyArray<CharRange> ranges1, ReadOnlyArray<CharRange> ranges2)
ImmutableArray<CharRange> ranges1, ImmutableArray<CharRange> ranges2)
{
var rangeIndex1 = 0;
var rangeIndex2 = 0;
@ -1320,7 +1374,7 @@ namespace Microsoft.ML.Probabilistic.Distributions
}
Weight ProcessRange(
ReadOnlyArray<CharRange> ranges,
ImmutableArray<CharRange> ranges,
int startInclusive,
ref int index,
ref int endExclusive)
@ -1380,7 +1434,7 @@ namespace Microsoft.ML.Probabilistic.Distributions
/// <remarks>
/// This class is serializable but is not marked with <see cref="SerializableAttribute"/> and
/// <see cref="DataContractAttribute"/> because we have to implement serialization manually
/// due to Newtonsoft.Json not deserializing <see cref="ReadOnlyArray{T}"/> properly without
/// due to Newtonsoft.Json not deserializing <see cref="ImmutableArray{T}"/> properly without
/// "JsonObjectAttribute". Which can't be added because Infer.NET has no explicit dependency
/// on Newtonsoft.Json.
/// </remarks>
@ -1394,21 +1448,48 @@ namespace Microsoft.ML.Probabilistic.Distributions
/// <remarks>
/// The character probabilities must be kept normalized by applying <see cref="StorageBuilder.NormalizeProbabilities"/> when necessary.
/// </remarks>
public ReadOnlyArray<CharRange> Ranges { get; }
public ImmutableArray<CharRange> Ranges { get; }
public char? Point { get; }
// Following 3 members are not immutable and can be recalculated on-demand
// Following members are not immutable and can be recalculated on-demand
public CharClasses CharClasses { get; private set; }
private string regexRepresentation;
private string symbolRepresentation;
/// <summary>
/// Flags derived from ranges.
/// </summary>
/// <returns></returns>
private readonly Flags flags;
/// <summary>
/// Whether this distribution has broad support. We want to be able to
/// distinguish between specific distributions and general distributions.
/// Use the number of non-zero digits as a threshold.
/// </summary>
/// <returns></returns>
public bool IsBroad => (this.flags & Flags.IsBroad) != 0;
/// <summary>
/// Whether this distribution is partial uniform with a log probability override.
/// </summary>
/// <returns></returns>
public bool HasLogProbabilityOverride => (this.flags & Flags.HasLogProbabilityOverride) != 0;
[Flags]
private enum Flags
{
HasLogProbabilityOverride = 0x01,
IsBroad = 0x02
}
#endregion
#region Constructor and factory methods
private Storage(
ReadOnlyArray<CharRange> ranges,
ImmutableArray<CharRange> ranges,
char? point,
CharClasses charClasses,
string regexRepresentation,
@ -1419,10 +1500,25 @@ namespace Microsoft.ML.Probabilistic.Distributions
this.CharClasses = charClasses;
this.regexRepresentation = regexRepresentation;
this.symbolRepresentation = symbolRepresentation;
var supportCount = this.Ranges.Where(range => !range.Probability.IsZero).Sum(range => range.EndExclusive - range.StartInclusive);
var isBroad = supportCount >= 9; // Number of non-zero digits.
var totalMass = this.Ranges.Sum(range =>
range.Probability.Value * (range.EndExclusive - range.StartInclusive));
var isScaled = Math.Abs(totalMass - 1.0) > 1e-10;
this.flags = 0;
if (isBroad)
{
flags |= Flags.IsBroad;
}
if (isScaled)
{
flags |= Flags.HasLogProbabilityOverride;
}
}
public static Storage CreateUncached(
ReadOnlyArray<CharRange> ranges,
ImmutableArray<CharRange> ranges,
char? point,
CharClasses charClasses = CharClasses.Unknown,
string regexRepresentation = null,
@ -1433,7 +1529,7 @@ namespace Microsoft.ML.Probabilistic.Distributions
}
public static Storage Create(
ReadOnlyArray<CharRange> ranges,
ImmutableArray<CharRange> ranges,
CharClasses charClasses = CharClasses.Unknown,
string regexRepresentation = null,
string symbolRepresentation = null)
@ -1443,11 +1539,11 @@ namespace Microsoft.ML.Probabilistic.Distributions
: CreateUncached(ranges, null, charClasses, regexRepresentation, symbolRepresentation);
}
public static Storage CreatePoint(char point, ReadOnlyArray<CharRange> ranges) =>
public static Storage CreatePoint(char point, ImmutableArray<CharRange> ranges) =>
StorageCache.GetPointMass(point, ranges);
public static Storage CreatePoint(char point) =>
StorageCache.GetPointMass(point, new ReadOnlyArray<CharRange>(null));
StorageCache.GetPointMass(point, null);
public static Storage CreateUniformInRanges(
IEnumerable<char> startEndPairs,
@ -1528,7 +1624,7 @@ namespace Microsoft.ML.Probabilistic.Distributions
#region Properties
// TODO: Assumes that there are no ranges with zero probability
private static bool IsRangesPointMass(ReadOnlyArray<CharRange> ranges) =>
private static bool IsRangesPointMass(ImmutableArray<CharRange> ranges) =>
ranges.Count > 0 && Math.Abs(ranges[0].Probability.LogValue - Weight.One.LogValue) < Eps;
/// <summary>
@ -1561,10 +1657,12 @@ namespace Microsoft.ML.Probabilistic.Distributions
#region Serialization
public static Storage FromSerializationInfo(SerializationInfo info) =>
Storage.Create(
(CharRange[]) info.GetValue(nameof(Ranges), typeof(CharRange[])),
(CharClasses) info.GetValue(nameof(CharClasses), typeof(CharClasses)));
public static Storage FromSerializationInfo(SerializationInfo info)
{
var ranges = (CharRange[]) info.GetValue(nameof(Ranges), typeof(CharRange[]));
var classes = (CharClasses) info.GetValue(nameof(CharClasses), typeof(CharClasses));
return Storage.Create(ranges.ToImmutableArray(), classes);
}
public void GetObjectData(SerializationInfo info)
{
@ -1584,10 +1682,8 @@ namespace Microsoft.ML.Probabilistic.Distributions
/// </summary>
public static Storage Read(Func<int> readInt32, Func<double> readDouble)
{
CharRange[] ranges = null;
var nRanges = readInt32();
ranges = new CharRange[nRanges];
var ranges = new CharRange[nRanges];
for (var i = 0; i < nRanges; i++)
{
ranges[i] = CharRange.Read(readInt32, readDouble);
@ -1595,7 +1691,7 @@ namespace Microsoft.ML.Probabilistic.Distributions
var charClasses = (CharClasses)readInt32();
return Storage.Create(ranges, charClasses);
return Storage.Create(ranges.ToImmutableArray(), charClasses);
}
#endregion
@ -1777,7 +1873,7 @@ namespace Microsoft.ML.Probabilistic.Distributions
string WordCharRanges(string baseRange) => baseRange + "09__";
Uniform = Storage.CreateUncached(
new CharRange[] { new CharRange(char.MinValue, CharRangeEndExclusive, UniformProb) },
ImmutableArray.Create(new CharRange(char.MinValue, CharRangeEndExclusive, UniformProb)),
null,
CharClasses.Uniform,
UniformRegexRepresentation,
@ -1810,14 +1906,14 @@ namespace Microsoft.ML.Probabilistic.Distributions
PointMasses = new Storage[CharRangeEndExclusive];
}
public static Storage GetPointMass(char point, ReadOnlyArray<CharRange> ranges)
public static Storage GetPointMass(char point, ImmutableArray<CharRange>? ranges)
{
if (PointMasses[point] == null)
{
PointMasses[point] = Storage.CreateUncached(
ranges.IsNull
? new ReadOnlyArray<CharRange>(new[] { new CharRange(point, point + 1, Weight.One) })
: ranges,
ranges.HasValue
? ranges.Value
: ImmutableArray.Create(new CharRange(point, point + 1, Weight.One)),
point);
}
@ -1896,7 +1992,7 @@ namespace Microsoft.ML.Probabilistic.Distributions
private readonly List<CharRange> ranges;
/// <summary>
/// Precomuted character class.
/// Precomputed character class.
/// </summary>
private readonly CharClasses charClasses;
@ -1971,15 +2067,23 @@ namespace Microsoft.ML.Probabilistic.Distributions
/// <summary>
/// Normalizes probabilities in ranges and returns build Storage.
/// </summary>
public Storage GetResult()
public Storage GetResult(double? maximumProbability = null)
{
this.MergeNeighboringRanges();
this.NormalizeProbabilities();
return Storage.Create(
this.ranges.ToArray(),
this.charClasses,
this.regexRepresentation,
this.symbolRepresentation);
NormalizeProbabilities(this.ranges, maximumProbability);
return
maximumProbability.HasValue
? Storage.CreateUncached(
this.ranges.ToImmutableArray(),
null,
this.charClasses,
this.regexRepresentation,
this.symbolRepresentation)
: Storage.Create(
this.ranges.ToImmutableArray(),
this.charClasses,
this.regexRepresentation,
this.symbolRepresentation);
}
#endregion
@ -2015,16 +2119,39 @@ namespace Microsoft.ML.Probabilistic.Distributions
}
/// <summary>
/// Normalizes probabilities in ranges
/// Normalizes probabilities in ranges.
/// </summary>
private void NormalizeProbabilities()
/// <param name="ranges">The ranges.</param>
/// <param name="logProbabilityOverride">Ignores the probabilities in the ranges and creates a non-normalized partial uniform distribution.</param>
/// <exception cref="ArgumentException">Thrown if logProbabilityOverride has value corresponding to a non-probability.</exception>
public static void NormalizeProbabilities(IList<CharRange> ranges, double? logProbabilityOverride = null)
{
var normalizer = this.ComputeInvNormalizer();
for (int i = 0; i < this.ranges.Count; ++i)
if (logProbabilityOverride.HasValue)
{
var range = this.ranges[i];
this.ranges[i] = new CharRange(
range.StartInclusive, range.EndExclusive, range.Probability * normalizer);
var weight = Weight.FromLogValue(logProbabilityOverride.Value);
if (weight.IsZero || weight.Value > 1)
{
throw new ArgumentException("Invalid log probability override.");
}
for (var i = 0; i < ranges.Count; ++i)
{
var range = ranges[i];
ranges[i] = new CharRange(
range.StartInclusive, range.EndExclusive, weight);
}
}
else
{
var normalizer = ComputeInvNormalizer(ranges);
for (var i = 0; i < ranges.Count; ++i)
{
var range = ranges[i];
var probability = range.Probability * normalizer;
ranges[i] = new CharRange(
range.StartInclusive, range.EndExclusive, probability);
}
}
}
@ -2032,11 +2159,11 @@ namespace Microsoft.ML.Probabilistic.Distributions
/// Computes the normalizer of this distribution.
/// </summary>
/// <returns>The computed normalizer.</returns>
private Weight ComputeInvNormalizer()
private static Weight ComputeInvNormalizer(IEnumerable<CharRange> ranges)
{
Weight normalizer = Weight.Zero;
var normalizer = Weight.Zero;
foreach (var range in this.ranges)
foreach (var range in ranges)
{
normalizer += Weight.FromValue(range.EndExclusive - range.StartInclusive) * range.Probability;
}

Просмотреть файл

@ -13,6 +13,19 @@ namespace Microsoft.ML.Probabilistic.Distributions
using Utilities;
using Factors.Attributes;
/// <summary>
/// Base type for all distributions that doesn't specify over which domain distribution is defined.
/// </summary>
/// <remarks>
/// This interface is useful in generic code where distributions of different types have to be stored
/// in single container. Container of <see cref="IDistribution"/> is a more specific type than
/// container of <see cref="object"/> and adds some type-safety in these cases.
/// </remarks>
[Quality(QualityBand.Mature)]
public interface IDistribution : ICloneable, Diffable, SettableToUniform
{
}
/// <summary>Distribution interface</summary>
/// <typeparam name="T">The type of objects in the domain, e.g. Vector or Matrix.</typeparam>
/// <remarks><para>
@ -29,21 +42,25 @@ namespace Microsoft.ML.Probabilistic.Distributions
/// CanGetLogAverageOf, CanGetAverageLog</c>
/// </para></remarks>
[Quality(QualityBand.Mature)]
public interface IDistribution<T> : ICloneable,
HasPoint<T>, Diffable, SettableToUniform, CanGetLogProb<T>
public interface IDistribution<T> : IDistribution, HasPoint<T>, CanGetLogProb<T>
{
}
/// <summary>
/// Interface to allow untyped access to collection distribution
/// </summary>
public interface ICollectionDistribution
public interface ICollectionDistribution : IDistribution
{
/// <summary>
/// Returns the count of known elements in collection distribution.
/// </summary>
int GetElementsCount();
/// <summary>
/// Returns the list of elements' distributions.
/// </summary>
List<IDistribution> GetElementsUntyped();
/// <summary>
/// Product of two collection distributions which also return element mapping information.
/// </summary>

Просмотреть файл

@ -174,15 +174,17 @@ namespace Microsoft.ML.Probabilistic.Distributions
/// <param name="rate">rate = 1/scale</param>
public void SetShapeAndRate(double shape, double rate)
{
if (rate > double.MaxValue)
this.Shape = shape;
this.Rate = rate;
CheckForPointMass();
}
private void CheckForPointMass()
{
if (!IsPointMass && Rate > double.MaxValue)
{
Point = 0;
}
else
{
this.Shape = shape;
this.Rate = rate;
}
}
/// <summary>
@ -225,14 +227,8 @@ namespace Microsoft.ML.Probabilistic.Distributions
/// <param name="scale">Scale</param>
public void SetShapeAndScale(double shape, double scale)
{
if (scale == 0)
{
Point = 0;
}
else
{
SetShapeAndRate(shape, 1.0 / scale);
}
if (double.IsPositiveInfinity(shape)) throw new ArgumentOutOfRangeException(nameof(shape), "shape is infinite. To create a point mass, set the Point property.");
SetShapeAndRate(shape, 1.0 / scale);
}
/// <summary>
@ -537,15 +533,6 @@ namespace Microsoft.ML.Probabilistic.Distributions
get { return (Shape == Double.PositiveInfinity); }
}
/// <summary>
/// Sets this instance to a point mass. The location of the
/// point mass is the existing Rate parameter
/// </summary>
private void SetToPointMass()
{
Shape = Double.PositiveInfinity;
}
/// <summary>
/// Sets/gets the instance as a point mass
/// </summary>
@ -560,7 +547,7 @@ namespace Microsoft.ML.Probabilistic.Distributions
}
set
{
SetToPointMass();
Shape = Double.PositiveInfinity;
Rate = value;
}
}
@ -599,7 +586,7 @@ namespace Microsoft.ML.Probabilistic.Distributions
{
if (x < 0) return double.NegativeInfinity;
if (x > double.MaxValue) // Avoid subtracting infinities below
{
{
if (rate > 0) return -x;
else if (rate < 0) return x;
// fall through when rate == 0

Просмотреть файл

@ -206,8 +206,7 @@ namespace Microsoft.ML.Probabilistic.Distributions
{
if (!IsPointMass && Rate > double.MaxValue)
{
Rate = Math.Pow(0, Power);
SetToPointMass();
Point = Math.Pow(0, Power);
}
}
@ -283,7 +282,7 @@ namespace Microsoft.ML.Probabilistic.Distributions
double oldShape = shape;
logRate = MMath.RisingFactorialLnOverN(shape, power) - logMeanOverPower;
shape = Math.Exp(meanLogOverPower + logRate) + 0.5;
//Console.WriteLine($"shape = {shape:r}, logRate = {logRate:r}");
//Console.WriteLine($"shape = {shape:g17}, logRate = {logRate:g17}");
if (MMath.AreEqual(oldLogRate, logRate) && MMath.AreEqual(oldShape, shape)) break;
if (double.IsNaN(shape)) throw new Exception("Failed to converge");
}
@ -450,15 +449,6 @@ namespace Microsoft.ML.Probabilistic.Distributions
get { return (Shape == Double.PositiveInfinity); }
}
/// <summary>
/// Sets this instance to a point mass. The location of the
/// point mass is the existing Rate parameter
/// </summary>
private void SetToPointMass()
{
Shape = Double.PositiveInfinity;
}
/// <summary>
/// Sets/gets the instance as a point mass
/// </summary>
@ -472,7 +462,8 @@ namespace Microsoft.ML.Probabilistic.Distributions
}
set
{
SetToPointMass();
// Change this instance to a point mass.
Shape = Double.PositiveInfinity;
Rate = value;
}
}

Просмотреть файл

@ -244,19 +244,37 @@ namespace Microsoft.ML.Probabilistic.Distributions
}
/// <summary>
/// Creates a distribution over sequences induced by a given list of distributions over sequence elements.
/// Creates a distribution over sequences induced by a given list of distributions over sequence elements
/// where the sequence can optionally end at any length, and the last element can optionally repeat without limit.
/// </summary>
/// <param name="sequence">Enumerable of distributions over sequence elements.</param>
/// <param name="elementDistributions">Enumerable of distributions over sequence elements and the transition weights.</param>
/// <param name="allowEarlyEnd">Allow the sequence to end at any point.</param>
/// <param name="repeatLastElement">Repeat the last element.</param>
/// <returns>The created distribution.</returns>
public static TThis Concatenate(IEnumerable<TElementDistribution> sequence)
public static TThis Concatenate(IEnumerable<TElementDistribution> elementDistributions, bool allowEarlyEnd = false, bool repeatLastElement = false)
{
var result = new Automaton<TSequence, TElement, TElementDistribution, TSequenceManipulator, TWeightFunction>.Builder();
var last = result.Start;
foreach (var elem in sequence)
var elementDistributionArray = elementDistributions.ToArray();
for (var i = 0; i < elementDistributionArray.Length - 1; i++)
{
last = last.AddTransition(elem, Weight.One);
last = last.AddTransition(elementDistributionArray[i], Weight.One);
if (allowEarlyEnd)
{
last.SetEndWeight(Weight.One);
}
}
var lastElement = elementDistributionArray[elementDistributionArray.Length - 1];
if (repeatLastElement)
{
last.AddSelfTransition(lastElement, Weight.One);
}
else
{
last = last.AddTransition(lastElement, Weight.One);
}
last.SetEndWeight(Weight.One);
return FromWorkspace(result.GetAutomaton());
}
@ -1602,6 +1620,23 @@ namespace Microsoft.ML.Probabilistic.Distributions
return !this.IsPointMass && this.sequenceToWeight.IsZero();
}
/// <summary>
/// Converges an improper sequence distribution
/// </summary>
/// <param name="dist">The original distribution.</param>
/// <param name="decayWeight">The decay weight.</param>
/// <returns>The converged distribution.</returns>
public static TThis Converge(TThis dist, double decayWeight = 0.99)
{
var converger =
Automaton<TSequence, TElement, TElementDistribution, TSequenceManipulator, TWeightFunction>
.GetConverger(new TWeightFunction[]
{
dist.sequenceToWeight
}, decayWeight);
return dist.Product(FromWorkspace(converger));
}
/// <summary>
/// Checks if <paramref name="obj"/> equals to this distribution (i.e. represents the same distribution over sequences).
/// </summary>

Просмотреть файл

@ -29,7 +29,7 @@ namespace Microsoft.ML.Probabilistic.Distributions
Sampleable<double>, SettableToWeightedSum<TruncatedGamma>,
CanGetMean<double>, CanGetVariance<double>, CanGetMeanAndVarianceOut<double, double>,
CanGetLogNormalizer, CanGetLogAverageOf<TruncatedGamma>, CanGetLogAverageOfPower<TruncatedGamma>,
CanGetAverageLog<TruncatedGamma>
CanGetAverageLog<TruncatedGamma>, CanGetMode<double>
{
/// <summary>
/// Untruncated Gamma
@ -249,7 +249,7 @@ namespace Microsoft.ML.Probabilistic.Distributions
return double.NegativeInfinity;
else
{
return this.Gamma.GetLogProb(value) + this.Gamma.GetLogNormalizer() - GetLogNormalizer();
return this.Gamma.GetLogProb(value) + (this.Gamma.GetLogNormalizer() - GetLogNormalizer());
}
}
@ -259,9 +259,11 @@ namespace Microsoft.ML.Probabilistic.Distributions
/// <returns></returns>
public double GetNormalizer()
{
if (IsProper())
if (IsProper() && !IsPointMass)
{
return this.Gamma.GetProbLessThan(UpperBound) - this.Gamma.GetProbLessThan(LowerBound);
// Equivalent but less accurate:
//return this.Gamma.GetProbLessThan(UpperBound) - this.Gamma.GetProbLessThan(LowerBound);
return GammaProbBetween(this.Gamma.Shape, this.Gamma.Rate, LowerBound, UpperBound);
}
else
{
@ -275,8 +277,22 @@ namespace Microsoft.ML.Probabilistic.Distributions
/// <returns></returns>
public double GetLogNormalizer()
{
// TODO: make this more accurate.
return Math.Log(GetNormalizer());
if (IsProper() && !IsPointMass)
{
if (this.Gamma.Shape < 1 && (double)(this.Gamma.Rate * LowerBound) > 0)
{
// When Shape < 1, Gamma(Shape) > 1 so use the unregularized version to avoid underflow.
return Math.Log(GammaProbBetween(this.Gamma.Shape, this.Gamma.Rate, LowerBound, UpperBound, false)) - MMath.GammaLn(this.Gamma.Shape);
}
else
{
return Math.Log(GammaProbBetween(this.Gamma.Shape, this.Gamma.Rate, LowerBound, UpperBound));
}
}
else
{
return 0.0;
}
}
/// <summary>
@ -448,12 +464,12 @@ namespace Microsoft.ML.Probabilistic.Distributions
/// </summary>
/// <returns>The sample value</returns>
[Stochastic]
public static double Sample(double shape, double scale, double lowerBound, double upperBound)
public static double Sample(Gamma gamma, double lowerBound, double upperBound)
{
double sample;
do
{
sample = Gamma.Sample(shape, scale);
sample = gamma.Sample();
} while (sample < lowerBound || sample > upperBound);
return sample;
}
@ -471,7 +487,7 @@ namespace Microsoft.ML.Probabilistic.Distributions
}
else
{
return Sample(Gamma.Shape, 1 / Gamma.Rate, LowerBound, UpperBound);
return Sample(Gamma, LowerBound, UpperBound);
}
}
@ -486,29 +502,58 @@ namespace Microsoft.ML.Probabilistic.Distributions
return Sample();
}
/// <summary>
/// Get the mode (highest density point) of this distribution
/// </summary>
/// <returns></returns>
public double GetMode()
{
return Math.Min(Math.Max(this.Gamma.GetMode(), this.LowerBound), this.UpperBound);
}
/// <summary>
/// Returns the mean (first moment) of the distribution
/// </summary>
/// <returns></returns>
public double GetMean()
{
if (this.Gamma.IsPointMass)
return this.Gamma.Point;
else if (!IsProper())
throw new ImproperDistributionException(this);
else
double mean, variance;
GetMeanAndVariance(out mean, out variance);
return mean;
}
/// <summary>
/// Get the variance of this distribution
/// </summary>
/// <returns></returns>
public double GetVariance()
{
double mean, variance;
GetMeanAndVariance(out mean, out variance);
return variance;
}
/// <summary>
/// Computes <c>GammaUpper(s,x)/(x^(s-1)*exp(-x)) - 1</c> to high accuracy
/// </summary>
/// <param name="s"></param>
/// <param name="x">A real number gt;= 45 and gt; <paramref name="s"/>/0.99</param>
/// <param name="regularized"></param>
/// <returns></returns>
public static double GammaUpperRatio(double s, double x, bool regularized = true)
{
if (s >= x * 0.99) throw new ArgumentOutOfRangeException(nameof(s), s, "s >= x*0.99");
if (x < 45) throw new ArgumentOutOfRangeException(nameof(x), x, "x < 45");
double term = (s - 1) / x;
double sum = term;
for (int i = 2; i < 1000; i++)
{
double Z = GetNormalizer();
if (Z == 0)
{
double mean = this.Gamma.GetMean();
return Math.Min(UpperBound, Math.Max(LowerBound, mean));
}
// if Z is not zero, then Z1 cannot be zero.
double Z1 = MMath.GammaLower(this.Gamma.Shape + 1, this.Gamma.Rate * UpperBound) - MMath.GammaLower(this.Gamma.Shape + 1, this.Gamma.Rate * LowerBound);
double sum = this.Gamma.Shape / this.Gamma.Rate * Z1;
return sum / Z;
term *= (s - i) / x;
double oldSum = sum;
sum += term;
if (MMath.AreEqual(sum, oldSum)) return regularized ? sum / MMath.Gamma(s) : sum;
}
throw new Exception($"GammaUpperRatio not converging for s={s:g17}, x={x:g17}, regularized={regularized}");
}
/// <summary>
@ -535,34 +580,50 @@ namespace Microsoft.ML.Probabilistic.Distributions
throw new ImproperDistributionException(this);
else
{
double Z = GetNormalizer();
if (Z == 0)
{
mean = Math.Min(UpperBound, Math.Max(LowerBound, this.Gamma.GetMean()));
variance = 0.0;
return;
}
// Apply the recurrence GammaUpper(s+1,x,false) = s*GammaUpper(s,x,false) + x^s*exp(-x)
double rl = this.Gamma.Rate * LowerBound;
double ru = this.Gamma.Rate * UpperBound;
double m = this.Gamma.Shape / this.Gamma.Rate;
// t = x * Rate
// dt = dx * Rate
double Z1 = MMath.GammaLower(this.Gamma.Shape + 1, this.Gamma.Rate * UpperBound) - MMath.GammaLower(this.Gamma.Shape + 1, this.Gamma.Rate * LowerBound);
mean = m * Z1 / Z;
double sum2 = m * (this.Gamma.Shape + 1) / this.Gamma.Rate * (MMath.GammaLower(this.Gamma.Shape + 2, this.Gamma.Rate * UpperBound) - MMath.GammaLower(this.Gamma.Shape + 2, this.Gamma.Rate * LowerBound));
variance = sum2 / Z - mean * mean;
double offset, offset2;
if (ru > double.MaxValue)
{
double logZ = GetLogNormalizer();
if (logZ < double.MinValue)
{
mean = GetMode();
variance = 0.0;
return;
}
offset = Math.Exp(MMath.GammaUpperLogScale(this.Gamma.Shape, rl) - logZ);
offset2 = (rl - this.Gamma.Shape) / this.Gamma.Rate * offset;
}
else
{
// This fails when GammaUpperScale underflows to 0
double Z = GetNormalizer();
if (Z == 0)
{
mean = GetMode();
variance = 0.0;
return;
}
double gammaUpperScaleLower = MMath.GammaUpperScale(this.Gamma.Shape, rl);
double gammaUpperScaleUpper = MMath.GammaUpperScale(this.Gamma.Shape, ru);
offset = (gammaUpperScaleLower - gammaUpperScaleUpper) / Z;
offset2 = ((rl - this.Gamma.Shape) / this.Gamma.Rate * gammaUpperScaleLower - (ru - this.Gamma.Shape) / this.Gamma.Rate * gammaUpperScaleUpper) / Z;
}
if (rl == this.Gamma.Shape) mean = LowerBound + offset / this.Gamma.Rate;
else
{
mean = (this.Gamma.Shape + offset) / this.Gamma.Rate;
if (mean < LowerBound) mean = MMath.NextDouble(mean);
if (mean < LowerBound) mean = MMath.NextDouble(mean);
}
if (mean > double.MaxValue) variance = mean;
else variance = (m + offset2 + (1 - offset) * offset / this.Gamma.Rate) / this.Gamma.Rate;
}
}
/// <summary>
/// Get the variance of this distribution
/// </summary>
/// <returns></returns>
public double GetVariance()
{
double mean, var;
GetMeanAndVariance(out mean, out var);
return var;
}
/// <summary>
/// Computes E[x^power]
/// </summary>
@ -570,6 +631,7 @@ namespace Microsoft.ML.Probabilistic.Distributions
public double GetMeanPower(double power)
{
if (power == 0.0) return 1.0;
else if (power == 1.0) return GetMean();
else if (IsPointMass) return Math.Pow(Point, power);
//else if (Rate == 0.0) return (power > 0) ? Double.PositiveInfinity : 0.0;
else if (!IsProper()) throw new ImproperDistributionException(this);
@ -577,23 +639,94 @@ namespace Microsoft.ML.Probabilistic.Distributions
{
throw new ArgumentException("Cannot compute E[x^" + power + "] for " + this + " (shape <= " + (-power) + ")");
}
else
else if (power != 1)
{
double Z = GetNormalizer();
// Large powers lead to overflow
power = Math.Min(Math.Max(power, -1e300), 1e300);
double logZ = GetLogNormalizer();
if (logZ < double.MinValue)
{
return Math.Pow(GetMode(), power);
}
double shapePlusPower = this.Gamma.Shape + power;
double Z1;
double logZ1;
bool regularized = shapePlusPower >= 1;
if (regularized)
{
Z1 = Math.Exp(MMath.GammaLn(shapePlusPower) - MMath.GammaLn(this.Gamma.Shape)) *
(MMath.GammaLower(shapePlusPower, this.Gamma.Rate * UpperBound) - MMath.GammaLower(shapePlusPower, this.Gamma.Rate * LowerBound));
// This formula cannot be used when shapePlusPower <= 0
logZ1 = (power * MMath.RisingFactorialLnOverN(this.Gamma.Shape, power)) +
Math.Log(GammaProbBetween(shapePlusPower, this.Gamma.Rate, LowerBound, UpperBound, regularized));
}
else
{
Z1 = Math.Exp(- MMath.GammaLn(this.Gamma.Shape)) *
(MMath.GammaUpper(shapePlusPower, this.Gamma.Rate * LowerBound, regularized) - MMath.GammaUpper(shapePlusPower, this.Gamma.Rate * UpperBound, regularized));
logZ1 = -MMath.GammaLn(this.Gamma.Shape) +
Math.Log(GammaProbBetween(shapePlusPower, this.Gamma.Rate, LowerBound, UpperBound, regularized));
}
return Math.Pow(this.Gamma.Rate, -power) * Z1 / Z;
return Math.Exp(-power * Math.Log(this.Gamma.Rate) + logZ1 - logZ);
}
else
{
double Z = GetNormalizer();
if (Z == 0.0)
{
return Math.Pow(GetMode(), power);
}
double shapePlusPower = this.Gamma.Shape + power;
double Z1;
double gammaLnShapePlusPower = MMath.GammaLn(shapePlusPower);
double gammaLnShape = MMath.GammaLn(this.Gamma.Shape);
bool regularized = true; // (gammaLnShapePlusPower - gammaLnShape <= 700);
if (regularized)
{
// If shapePlusPower is large and Gamma.Rate * UpperBound is small, then this can lead to Inf * 0
Z1 = Math.Exp(power * MMath.RisingFactorialLnOverN(this.Gamma.Shape, power)) *
GammaProbBetween(shapePlusPower, this.Gamma.Rate, LowerBound, UpperBound, regularized);
}
else
{
Z1 = Math.Exp(-gammaLnShape) *
GammaProbBetween(shapePlusPower, this.Gamma.Rate, LowerBound, UpperBound, regularized);
}
return Z1 / (Math.Pow(this.Gamma.Rate, power) * Z);
}
}
/// <summary>
/// Computes GammaLower(a, r*u) - GammaLower(a, r*l) to high accuracy.
/// </summary>
/// <param name="shape"></param>
/// <param name="rate"></param>
/// <param name="lowerBound"></param>
/// <param name="upperBound"></param>
/// <param name="regularized"></param>
/// <returns></returns>
public static double GammaProbBetween(double shape, double rate, double lowerBound, double upperBound, bool regularized = true)
{
double rl = rate * lowerBound;
// Use the criterion from Gautschi (1979) to determine whether GammaLower(a,x) or GammaUpper(a,x) is smaller.
bool lowerIsSmaller;
if (rl > 0.25)
lowerIsSmaller = (shape > rl + 0.25);
else
lowerIsSmaller = (shape > -MMath.Ln2 / Math.Log(rl));
if (!lowerIsSmaller)
{
double logl = Math.Log(lowerBound);
if (rate * upperBound < 1e-16 && shape < -1e-16 / (Math.Log(rate) + logl))
{
double logu = Math.Log(upperBound);
return shape * (logu - logl);
}
else
{
// This is inaccurate when lowerBound is close to upperBound. In that case, use a Taylor expansion of lowerBound around upperBound.
return MMath.GammaUpper(shape, rl, regularized) - MMath.GammaUpper(shape, rate * upperBound, regularized);
}
}
else
{
double diff = MMath.GammaLower(shape, rate * upperBound) - MMath.GammaLower(shape, rl);
return regularized ? diff : (MMath.Gamma(shape) * diff);
}
}

Просмотреть файл

@ -10,7 +10,7 @@ namespace Microsoft.ML.Probabilistic.Factors
using Microsoft.ML.Probabilistic.Math;
using Microsoft.ML.Probabilistic.Factors.Attributes;
// /// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="BinomialOp"]/doc/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="BinomialOp"]/doc/*'/>
/// <remarks>The factor is f(sample,n,p) = choose(n,sample) p^sample (1-p)^(n-sample)</remarks>
[FactorMethod(new string[] { "sample", "trialCount", "p" }, typeof(Rand), "Binomial", typeof(int), typeof(double))]
[Quality(QualityBand.Preview)]

Просмотреть файл

@ -683,14 +683,14 @@ namespace Microsoft.ML.Probabilistic.Factors
}
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="ExpOp_Laplace"]/doc/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="ExpOp_LaplaceProp"]/doc/*'/>
[FactorMethod(typeof(Math), "Exp", typeof(double))]
[Quality(QualityBand.Experimental)]
public static class ExpOp_LaplaceProp
{
public static bool ForceProper;
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="ExpOp_Laplace"]/message_doc[@name="LogAverageFactor(Gamma, Gaussian, Gaussian)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="ExpOp_LaplaceProp"]/message_doc[@name="LogAverageFactor(Gamma, Gaussian, Gaussian)"]/*'/>
public static double LogAverageFactor([SkipIfUniform] Gamma exp, [Proper] Gaussian d, Gaussian to_d)
{
Gaussian dPost = d * to_d;
@ -702,13 +702,13 @@ namespace Microsoft.ML.Probabilistic.Factors
return exp.GetLogProb(expx) + d.GetLogProb(x) + MMath.LnSqrt2PI + 0.5 * Math.Log(v);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="ExpOp_Laplace"]/message_doc[@name="LogEvidenceRatio(Gamma, Gaussian, Gaussian, Gamma)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="ExpOp_LaplaceProp"]/message_doc[@name="LogEvidenceRatio(Gamma, Gaussian, Gaussian, Gamma)"]/*'/>
public static double LogEvidenceRatio([SkipIfUniform] Gamma exp, [Proper] Gaussian d, Gaussian to_d, Gamma to_exp)
{
return LogAverageFactor(exp, d, to_d) - to_exp.GetLogAverageOf(exp);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="ExpOp_Laplace"]/message_doc[@name="DAverageConditional(Gamma, Gaussian, Gaussian)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="ExpOp_LaplaceProp"]/message_doc[@name="DAverageConditional(Gamma, Gaussian, Gaussian)"]/*'/>
public static Gaussian DAverageConditional([SkipIfUniform] Gamma exp, [Proper] Gaussian d, Gaussian to_d)
{
if (exp.IsPointMass)
@ -726,7 +726,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return Gaussian.FromNatural(r * dhat + dlogf, r);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="ExpOp_Laplace"]/message_doc[@name="ExpAverageConditional(Gamma, Gaussian, Gaussian)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="ExpOp_LaplaceProp"]/message_doc[@name="ExpAverageConditional(Gamma, Gaussian, Gaussian)"]/*'/>
public static Gamma ExpAverageConditional(Gamma exp, Gaussian d, Gaussian to_d)
{
if (d.IsPointMass)
@ -752,19 +752,19 @@ namespace Microsoft.ML.Probabilistic.Factors
}
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="ExpOp_Laplace2"]/doc/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="ExpOp_Laplace"]/doc/*'/>
[FactorMethod(typeof(Math), "Exp", typeof(double))]
[Buffers("x")]
[Quality(QualityBand.Experimental)]
public static class ExpOp_Laplace
{
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="ExpOp_Laplace2"]/message_doc[@name="XInit(Gaussian)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="ExpOp_Laplace"]/message_doc[@name="XInit(Gaussian)"]/*'/>
public static double XInit([SkipIfUniform] Gaussian d)
{
return d.GetMean();
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="ExpOp_Laplace2"]/message_doc[@name="X(Gamma, Gaussian, double)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="ExpOp_Laplace"]/message_doc[@name="X2(Gamma, Gaussian, double)"]/*'/>
public static double X2([SkipIfUniform] Gamma exp, [Proper] Gaussian d, double x)
{
// perform one Newton update of X
@ -778,6 +778,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return (t + d.MeanTimesPrecision) / (r + d.Precision);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="ExpOp_Laplace"]/message_doc[@name="X(Gamma, Gaussian)"]/*'/>
public static double X([SkipIfUniform] Gamma exp, [Proper] Gaussian d)
{
double x = 0;
@ -790,7 +791,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return x;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="ExpOp_Laplace2"]/message_doc[@name="LogAverageFactor(Gamma, Gaussian, double)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="ExpOp_Laplace"]/message_doc[@name="LogAverageFactor(Gamma, Gaussian, double)"]/*'/>
public static double LogAverageFactor([SkipIfUniform] Gamma exp, [Proper] Gaussian d, double x)
{
double expx = Math.Exp(x);
@ -800,13 +801,13 @@ namespace Microsoft.ML.Probabilistic.Factors
return exp.GetLogProb(expx) + d.GetLogProb(x) + MMath.LnSqrt2PI + 0.5 * Math.Log(v);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="ExpOp_Laplace2"]/message_doc[@name="LogEvidenceRatio(Gamma, Gaussian, double, Gamma)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="ExpOp_Laplace"]/message_doc[@name="LogEvidenceRatio(Gamma, Gaussian, double, Gamma)"]/*'/>
public static double LogEvidenceRatio([SkipIfUniform] Gamma exp, [Proper] Gaussian d, double x, Gamma to_exp)
{
return LogAverageFactor(exp, d, x) - to_exp.GetLogAverageOf(exp);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="ExpOp_Laplace2"]/message_doc[@name="DAverageConditional(Gamma, Gaussian, double)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="ExpOp_Laplace"]/message_doc[@name="DAverageConditional(Gamma, Gaussian, double)"]/*'/>
public static Gaussian DAverageConditional([SkipIfUniform] Gamma exp, [Proper] Gaussian d, double x)
{
if (exp.IsPointMass)
@ -828,7 +829,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return result;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="ExpOp_Laplace2"]/message_doc[@name="ExpAverageConditional(Gamma, Gaussian, double)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="ExpOp_Laplace"]/message_doc[@name="ExpAverageConditional(Gamma, Gaussian, double)"]/*'/>
public static Gamma ExpAverageConditional(Gamma exp, Gaussian d, double x)
{
if (d.IsPointMass)

Просмотреть файл

@ -81,7 +81,7 @@ namespace Microsoft.ML.Probabilistic.Factors
[ParameterNames("sample", "shape", "rate", "lowerBound", "upperBound")]
public static double TruncatedGammaFromShapeAndRate(double shape, double rate, double lowerBound, double upperBound)
{
return TruncatedGamma.Sample(shape, 1 / rate, lowerBound, upperBound);
return TruncatedGamma.Sample(Gamma.FromShapeAndRate(shape, rate), lowerBound, upperBound);
}
/// <summary>

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -536,9 +536,27 @@ namespace Microsoft.ML.Probabilistic.Factors
double x = sample.Point;
double shape2 = shape + rate.Shape;
double xrr = x + rate.Rate;
double dlogf = (shape - 1) / x - shape2 / xrr;
double ddlogf = -(shape - 1) / (x * x) + shape2 / (xrr * xrr);
return Gamma.FromDerivatives(x, dlogf, ddlogf, GammaFromShapeAndRateOp.ForceProper);
if (x == 0)
{
if (shape == 1)
{
double dlogf = -shape2 / xrr;
double ddlogf = shape2 / (xrr * xrr);
return Gamma.FromDerivatives(x, dlogf, ddlogf, GammaFromShapeAndRateOp.ForceProper);
}
else
{
// a = -x*x*ddLogP
// b = a / x - dLogP
return Gamma.FromShapeAndRate(shape, shape2 / xrr);
}
}
else
{
double dlogf = (shape - 1) / x - shape2 / xrr;
double ddlogf = -(shape - 1) / (x * x) + shape2 / (xrr * xrr);
return Gamma.FromDerivatives(x, dlogf, ddlogf, GammaFromShapeAndRateOp.ForceProper);
}
}
double sampleMean, sampleVariance;
if (sample.Rate == 0)
@ -681,12 +699,16 @@ namespace Microsoft.ML.Probabilistic.Factors
/// <returns></returns>
internal static double FindMaximum(double shape1, double shape2, double yRate, double rateRate)
{
if (shape2 == 0)
{
return shape1 / rateRate;
}
if (yRate < 0)
throw new ArgumentException("yRate < 0");
// f = shape1*log(rs) - shape2*log(rs+by) - br*rs
// df = shape1/rs - shape2/(rs + by) - br
// df=0 when shape1*(rs+by) - shape2*rs - br*rs*(rs+by) = 0
// -br*rs^2 + (shape1-shape2-br*by)*rs + shape1*by = 0
throw new ArgumentOutOfRangeException(nameof(yRate), yRate, "yRate < 0");
// f = shape1*log(x) - shape2*log(x+yRate) - x*rateRate
// df = shape1/x - shape2/(x + yrate) - rateRate
// df=0 when shape1*(x+yRate) - shape2*x - rateRate*x*(x+yRate) = 0
// -rateRate*x^2 + (shape1-shape2-rateRate*yRate)*x + shape1*yRate = 0
double a = -rateRate;
double b = shape1 - shape2 - yRate * rateRate;
double c = shape1 * yRate;
@ -714,11 +736,11 @@ namespace Microsoft.ML.Probabilistic.Factors
// compute the derivative wrt log(rs)
double sum = r0 + yRate;
double p = r0 / sum;
double df = shape1 - shape2*p - rateRate*r0;
double df = shape1 - shape2 * p - rateRate * r0;
if (Math.Abs(df) > 1)
{
// take a Newton step for extra accuracy
double ddf = shape2*p*(p-1) - rateRate*r0;
double ddf = shape2 * p * (p - 1) - rateRate * r0;
r0 *= Math.Exp(-df / ddf);
}
if (double.IsNaN(r0))
@ -814,7 +836,7 @@ namespace Microsoft.ML.Probabilistic.Factors
{
if (hasInflection)
rmax = r * 1.1; // restart closer to the stationary point
else
else
throw new Exception("rmax < r");
}
if (MMath.AreEqual(rmax, r))
@ -908,12 +930,19 @@ namespace Microsoft.ML.Probabilistic.Factors
// dlogf = s/r - (s+xs-1)/(r+xr)
// ddlogf = -s/r^2 + (s+xs-1)/(r+xr)^2
r = rate.Point;
double v = 1 / r;
double r2 = r + sample.Rate;
double v2 = 1 / r2;
double dlogf = shape * v - shape2 * v2;
double ddlogf = -shape * v * v + shape2 * v2 * v2;
return Gamma.FromDerivatives(r, dlogf, ddlogf, GammaFromShapeAndRateOp.ForceProper);
if (r == 0)
{
// a = -r*r*ddLogP
// b = a / r - dLogP
return Gamma.FromShapeAndRate(shape + 1, shape2 / r2);
}
else
{
double dlogf = shape / r - shape2 / r2;
double ddlogf = -shape / (r * r) + shape2 / (r2 * r2);
return Gamma.FromDerivatives(r, dlogf, ddlogf, GammaFromShapeAndRateOp.ForceProper);
}
}
double shape1 = shape + rate.Shape;
double rateMean, rateVariance;
@ -1043,8 +1072,8 @@ namespace Microsoft.ML.Probabilistic.Factors
double p = r / (r + y.Rate);
double p2 = p * p;
double shape2 = GammaFromShapeAndRateOp_Slow.AddShapesMinus1(y.Shape, shape);
double dlogf = shape - shape2 * p;
double ddlogf = -shape + shape2 * p2;
double dlogf = shape - shape2 * p;
double ddlogf = -shape + shape2 * p2;
double dddlogf = 2 * shape - 2 * shape2 * p * p2;
double d4logf = -6 * shape + 6 * shape2 * p2 * p2;
return new double[] { dlogf, ddlogf, dddlogf, d4logf };

Просмотреть файл

@ -10,7 +10,7 @@ namespace Microsoft.ML.Probabilistic.Factors
using Microsoft.ML.Probabilistic.Math;
using Microsoft.ML.Probabilistic.Factors.Attributes;
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GammaProductOp_Laplace"]/doc/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GammaPowerProductOp_Laplace"]/doc/*'/>
[FactorMethod(typeof(Factor), "Product", typeof(double), typeof(double))]
[Buffers("Q")]
[Quality(QualityBand.Experimental)]

Просмотреть файл

@ -476,7 +476,7 @@ namespace Microsoft.ML.Probabilistic.Factors
[Quality(QualityBand.Mature)]
public static class IntCasesOp
{
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="IntCasesOp"]/message_doc[@name="CasesAverageConditional{BernoulliList}(Discrete, BernoulliList)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="IntCasesOp"]/message_doc[@name="CasesAverageConditional(Discrete, int)"]/*'/>
public static Bernoulli CasesAverageConditional(Discrete i, int resultIndex)
{
return Bernoulli.FromLogOdds(i.GetLogProb(resultIndex));
@ -541,7 +541,7 @@ namespace Microsoft.ML.Probabilistic.Factors
//-- VMP --------------------------------------------------------------------------------------------
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="IntCasesOp"]/message_doc[@name="CasesAverageLogarithm{BernoulliList}(Discrete, BernoulliList)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="IntCasesOp"]/message_doc[@name="CasesAverageLogarithm(Discrete, int)"]/*'/>
public static Bernoulli CasesAverageLogarithm(Discrete i, int resultIndex)
{
return CasesAverageConditional(i, resultIndex);

Просмотреть файл

@ -21,7 +21,7 @@ namespace Microsoft.ML.Probabilistic.Factors
[Quality(QualityBand.Mature)]
public static class GateEnterPartialOp<T>
{
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GateEnterPartialOp{T}"]/message_doc[@name="LogEvidenceRatio()"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GateEnterPartialOp{T}"]/message_doc[@name="LogEvidenceRatio{TDist}(IList{TDist})"]/*'/>
[Skip]
public static double LogEvidenceRatio<TDist>(IList<TDist> enterPartial)
where TDist : IDistribution<T>
@ -818,7 +818,7 @@ namespace Microsoft.ML.Probabilistic.Factors
[Quality(QualityBand.Mature)]
public static class GateEnterOneOp<T>
{
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GateEnterOneOp{T}"]/message_doc[@name="LogEvidenceRatio()"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GateEnterOneOp{T}"]/message_doc[@name="LogEvidenceRatio{TDist}(TDist)"]/*'/>
[Skip]
public static double LogEvidenceRatio<TDist>(TDist enterOne)
where TDist : IDistribution<T>
@ -1047,7 +1047,7 @@ namespace Microsoft.ML.Probabilistic.Factors
/// </summary>
public static bool ForceProper = true;
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GateEnterOp{T}"]/message_doc[@name="LogEvidenceRatio()"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GateEnterOp{T}"]/message_doc[@name="LogEvidenceRatio{TDist}(IList{TDist})"]/*'/>
[Skip]
public static double LogEvidenceRatio<TDist>(IList<TDist> enter)
where TDist : IDistribution<T>

Просмотреть файл

@ -230,7 +230,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return result;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GateExitOp{T}"]/message_doc[@name="CasesAverageConditional{TDist, TBernoulliList}(TDist, IList{TDist}, TBernoulliList)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GateExitOp{T}"]/message_doc[@name="CasesAverageConditional{TDist}(TDist, TDist, int)"]/*'/>
/// <typeparam name="TDist">The type of the distribution over the variable exiting the gate.</typeparam>
public static Bernoulli CasesAverageConditional<TDist>(
[SkipIfUniform] TDist exit, [Indexed] TDist values, [IgnoreDependency] int resultIndex)
@ -416,7 +416,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return result;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GateExitOp{T}"]/message_doc[@name="CasesAverageConditional{TDist, TBernoulliList}(TDist, IList{T}, TBernoulliList)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GateExitOp{T}"]/message_doc[@name="CasesAverageConditional{TDist}(TDist, IList{T}, int)"]/*'/>
/// <typeparam name="TDist">The type of the distribution over the variable exiting the gate.</typeparam>
public static Bernoulli CasesAverageConditional<TDist>(TDist exit, IList<T> values, int resultIndex)
where TDist : CanGetLogProb<T>
@ -501,7 +501,7 @@ namespace Microsoft.ML.Probabilistic.Factors
#if true
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GateExitOp{T}"]/message_doc[@name="CasesAverageLogarithm{TDist, TBernoulliList}(TDist, IList{TDist}, TBernoulliList)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GateExitOp{T}"]/message_doc[@name="CasesAverageLogarithm{TDist}(TDist, IList{TDist}, int)"]/*'/>
/// <typeparam name="TDist">The type of the distribution over the variable exiting the gate.</typeparam>
[NoTriggers] // see VmpTests.GateExitTriggerTest
public static Bernoulli CasesAverageLogarithm<TDist>(

Просмотреть файл

@ -957,10 +957,12 @@ namespace Microsoft.ML.Probabilistic.Factors
/// This class defines specializations for the case where variance is a point mass.
/// These methods have fewer inputs, allowing more efficient schedules.
/// </summary>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianFromMeanAndVarianceOp_PointVariance"]/doc/*'/>
[FactorMethod(typeof(Factor), "GaussianFromMeanAndVariance", Default = false)]
[Quality(QualityBand.Preview)]
public static class GaussianFromMeanAndVarianceOp_PointVariance
{
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianFromMeanAndVarianceOp_PointVariance"]/message_doc[@name="LogEvidenceRatio(double, Gaussian, Gamma)"]/*'/>
public static double LogEvidenceRatio(
double sample, [SkipIfUniform] Gaussian mean, [SkipIfUniform] Gamma variance)
{
@ -969,6 +971,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return GaussianFromMeanAndVarianceOp.LogEvidenceRatio(sample, mean, variance.Point);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianFromMeanAndVarianceOp_PointVariance"]/message_doc[@name="LogEvidenceRatio(Gaussian, Gaussian, Gamma)"]/*'/>
[Skip]
public static double LogEvidenceRatio(
[SkipIfUniform] Gaussian sample, [SkipIfUniform] Gaussian mean, [SkipIfUniform] Gamma variance)
@ -978,6 +981,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return GaussianFromMeanAndVarianceOp.LogEvidenceRatio(sample, mean, variance.Point);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianFromMeanAndVarianceOp_PointVariance"]/message_doc[@name="VarianceAverageConditional(Gaussian, Gaussian, Gamma)"]/*'/>
public static Gamma VarianceAverageConditional([SkipIfUniform] Gaussian sample, [SkipIfUniform] Gaussian mean, [Proper] Gamma variance)
{
if (!variance.IsPointMass)
@ -985,6 +989,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return GaussianFromMeanAndVarianceOp.VarianceAverageConditional(sample, mean, variance);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianFromMeanAndVarianceOp_PointVariance"]/message_doc[@name="SampleAverageConditional(Gaussian, Gamma)"]/*'/>
public static Gaussian SampleAverageConditional([SkipIfUniform] Gaussian mean, [Proper] Gamma variance)
{
if (!variance.IsPointMass)
@ -992,6 +997,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return GaussianFromMeanAndVarianceOp.SampleAverageConditional(mean, variance.Point);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianFromMeanAndVarianceOp_PointVariance"]/message_doc[@name="MeanAverageConditional(Gaussian, Gamma)"]/*'/>
public static Gaussian MeanAverageConditional([SkipIfUniform] Gaussian sample, [Proper] Gamma variance)
{
return SampleAverageConditional(sample, variance);

Просмотреть файл

@ -11,14 +11,14 @@ namespace Microsoft.ML.Probabilistic.Factors
using Attributes;
using Utilities;
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsOp{T}"]/doc/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetDeepJaggedItemsOp{T}"]/doc/*'/>
/// <typeparam name="T">The type of an item.</typeparam>
[FactorMethod(typeof(Factor), "GetDeepJaggedItems<>", Default = true)]
[Quality(QualityBand.Mature)]
[Buffers("marginal")]
public static class GetDeepJaggedItemsOp<T>
{
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsOp{T}"]/message_doc[@name="LogAverageFactor(IList{T}, IList{T}, IList{int})"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetDeepJaggedItemsOp{T}"]/message_doc[@name="LogAverageFactor(IList{IList{IList{T}}}, IList{T}, IList{IList{IList{int}}})"]/*'/>
public static double LogAverageFactor(IList<IList<IList<T>>> items, IList<T> array, IList<IList<IList<int>>> indices)
{
IEqualityComparer<T> equalityComparer = Utilities.Util.GetEqualityComparer<T>();
@ -41,19 +41,19 @@ namespace Microsoft.ML.Probabilistic.Factors
return 0.0;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsOp{T}"]/message_doc[@name="LogEvidenceRatio(IList{T}, IList{T}, IList{int})"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetDeepJaggedItemsOp{T}"]/message_doc[@name="LogEvidenceRatio(IList{IList{IList{T}}}, IList{T}, IList{IList{IList{int}}})"]/*'/>
public static double LogEvidenceRatio(IList<IList<IList<T>>> items, IList<T> array, IList<IList<IList<int>>> indices)
{
return LogAverageFactor(items, array, indices);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsOp{T}"]/message_doc[@name="AverageLogFactor(IList{T}, IList{T}, IList{int})"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetDeepJaggedItemsOp{T}"]/message_doc[@name="AverageLogFactor(IList{IList{IList{T}}}, IList{T}, IList{IList{IList{int}}})"]/*'/>
public static double AverageLogFactor(IList<IList<IList<T>>> items, IList<T> array, IList<IList<IList<int>>> indices)
{
return LogAverageFactor(items, array, indices);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsOp{T}"]/message_doc[@name="LogAverageFactor{DistributionType}(IList{DistributionType}, IList{DistributionType}, IList{int})"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetDeepJaggedItemsOp{T}"]/message_doc[@name="LogAverageFactor{ItemType, ItemType2,DistributionType}(IList{ItemType}, IList{DistributionType}, IList{IList{IList{int}}})"]/*'/>
/// <typeparam name="DistributionType">The type of a distribution over array elements.</typeparam>
/// <typeparam name="ItemType2">The type of a sub-sub-array.</typeparam>
/// <typeparam name="ItemType">The type of a sub-array.</typeparam>
@ -92,7 +92,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return z;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsOp{T}"]/message_doc[@name="AverageLogFactor{DistributionType}(IList{DistributionType}, IList{DistributionType})"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetDeepJaggedItemsOp{T}"]/message_doc[@name="AverageLogFactor{ItemType, ItemType2, DistributionType}(IList{ItemType}, IList{DistributionType})"]/*'/>
/// <typeparam name="DistributionType">The type of a distribution over array elements.</typeparam>
/// <typeparam name="ItemType2">The type of a sub-sub-array.</typeparam>
/// <typeparam name="ItemType">The type of a sub-array.</typeparam>
@ -105,7 +105,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return 0.0;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsOp{T}"]/message_doc[@name="LogAverageFactor{DistributionType}(IList{T}, IList{DistributionType}, IList{int})"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetDeepJaggedItemsOp{T}"]/message_doc[@name="LogAverageFactor{DistributionType}(IList{IList{IList{T}}}, IList{DistributionType}, IList{IList{IList{int}}})"]/*'/>
/// <typeparam name="DistributionType">The type of a distribution over array elements.</typeparam>
public static double LogAverageFactor<DistributionType>(IList<IList<IList<T>>> items, IList<DistributionType> array, IList<IList<IList<int>>> indices)
where DistributionType : HasPoint<T>, CanGetLogProb<T>
@ -140,7 +140,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return z;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsOp{T}"]/message_doc[@name="LogEvidenceRatio{DistributionType}(IList{T}, IList{DistributionType}, IList{int})"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetDeepJaggedItemsOp{T}"]/message_doc[@name="LogEvidenceRatio{DistributionType}(IList{IList{IList{T}}}, IList{DistributionType}, IList{IList{IList{int}}})"]/*'/>
/// <typeparam name="DistributionType">The type of a distribution over array elements.</typeparam>
public static double LogEvidenceRatio<DistributionType>(IList<IList<IList<T>>> items, IList<DistributionType> array, IList<IList<IList<int>>> indices)
where DistributionType : HasPoint<T>, CanGetLogProb<T>
@ -148,7 +148,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return LogAverageFactor<DistributionType>(items, array, indices);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsOp{T}"]/message_doc[@name="AverageLogFactor{DistributionType}(IList{T}, IList{DistributionType})"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetDeepJaggedItemsOp{T}"]/message_doc[@name="AverageLogFactor{DistributionType}(IList{IList{IList{T}}}, IList{DistributionType})"]/*'/>
/// <typeparam name="DistributionType">The type of a distribution over array elements.</typeparam>
[Skip]
public static double AverageLogFactor<DistributionType>(IList<IList<IList<T>>> items, IList<DistributionType> array)
@ -157,7 +157,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return 0.0;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsOp{T}"]/message_doc[@name="LogAverageFactor{DistributionType}(IList{DistributionType}, IList{T}, IList{int})"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetDeepJaggedItemsOp{T}"]/message_doc[@name="LogAverageFactor{ItemType, ItemType2, DistributionType}(IList{ItemType}, IList{T}, IList{IList{IList{int}}})"]/*'/>
/// <typeparam name="DistributionType">The type of a distribution over array elements.</typeparam>
/// <typeparam name="ItemType2">The type of a sub-sub-array.</typeparam>
/// <typeparam name="ItemType">The type of a sub-array.</typeparam>
@ -187,7 +187,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return z;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsOp{T}"]/message_doc[@name="LogEvidenceRatio{DistributionType}(IList{DistributionType}, IList{T}, IList{int})"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetDeepJaggedItemsOp{T}"]/message_doc[@name="LogEvidenceRatio{ItemType, ItemType2,DistributionType}(IList{ItemType}, IList{T}, IList{IList{IList{int}}})"]/*'/>
/// <typeparam name="DistributionType">The type of a distribution over array elements.</typeparam>
/// <typeparam name="ItemType2">The type of a sub-sub-array.</typeparam>
/// <typeparam name="ItemType">The type of a sub-array.</typeparam>
@ -200,7 +200,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return 0.0;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsOp{T}"]/message_doc[@name="AverageLogFactor{DistributionType}(IList{DistributionType}, IList{T})"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetDeepJaggedItemsOp{T}"]/message_doc[@name="AverageLogFactor{ItemType, ItemType2,DistributionType}(IList{ItemType}, IList{T})"]/*'/>
/// <typeparam name="DistributionType">The type of a distribution over array elements.</typeparam>
/// <typeparam name="ItemType2">The type of a sub-sub-array.</typeparam>
/// <typeparam name="ItemType">The type of a sub-array.</typeparam>
@ -214,7 +214,7 @@ namespace Microsoft.ML.Probabilistic.Factors
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsOp{T}"]/message_doc[@name="LogEvidenceRatio{DistributionType}(IList{DistributionType}, IList{DistributionType}, IList{int}, IList{DistributionType})"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetDeepJaggedItemsOp{T}"]/message_doc[@name="LogEvidenceRatio{ItemType, ItemType2,DistributionType}(IList{ItemType}, IList{DistributionType}, IList{IList{IList{int}}}, IList{ItemType})"]/*'/>
/// <typeparam name="DistributionType">The type of a distribution over array elements.</typeparam>
/// <typeparam name="ItemType2">The type of a sub-sub-array.</typeparam>
/// <typeparam name="ItemType">The type of a sub-array.</typeparam>
@ -260,7 +260,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return z;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsOp{T}"]/message_doc[@name="MarginalInit{ArrayType}(ArrayType)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetDeepJaggedItemsOp{T}"]/message_doc[@name="MarginalInit{ArrayType}(ArrayType)"]/*'/>
/// <typeparam name="ArrayType">The type of an array for the marginal.</typeparam>
public static ArrayType MarginalInit<ArrayType>([SkipIfUniform] ArrayType array)
where ArrayType : ICloneable
@ -268,6 +268,9 @@ namespace Microsoft.ML.Probabilistic.Factors
return (ArrayType)array.Clone();
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetDeepJaggedItemsOp{T}"]/message_doc[@name="Marginal{ArrayType,DistributionType}(ArrayType,ArrayType,ArrayType)"]/*'/>
/// <typeparam name="ArrayType">The type of an array for the marginal.</typeparam>
/// <typeparam name="DistributionType">The type of a distribution over array elements.</typeparam>
[SkipIfAllUniform("array", "to_array")]
[MultiplyAll]
public static ArrayType Marginal<ArrayType, DistributionType>(
@ -284,7 +287,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return result;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsOp{T}"]/message_doc[@name="MarginalIncrement{ArrayType, DistributionType}(ArrayType, DistributionType, DistributionType, IList{int}, int)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetDeepJaggedItemsOp{T}"]/message_doc[@name="MarginalIncrement{ArrayType, ItemType, ItemType2, DistributionType}(ArrayType, ItemType, ItemType, IList{IList{IList{int}}}, int)"]/*'/>
/// <typeparam name="ArrayType">The type of an array for the marginal.</typeparam>
/// <typeparam name="DistributionType">The type of a distribution over array elements.</typeparam>
/// <typeparam name="ItemType2">The type of a sub-sub-array.</typeparam>
@ -316,7 +319,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return result;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsOp{T}"]/message_doc[@name="ItemsAverageConditional{ArrayType, DistributionType}(DistributionType, ArrayType, ArrayType, IList{int}, int, DistributionType)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetDeepJaggedItemsOp{T}"]/message_doc[@name="ItemsAverageConditional{ArrayType, ItemType, ItemType2, DistributionType}(ItemType, ArrayType, ArrayType, IList{IList{IList{int}}}, int, ItemType)"]/*'/>
/// <typeparam name="ArrayType">The type of an array for the marginal.</typeparam>
/// <typeparam name="DistributionType">The type of a distribution over array elements.</typeparam>
/// <typeparam name="ItemType2">The type of a sub-sub-array.</typeparam>
@ -353,7 +356,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return result;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsOp{T}"]/message_doc[@name="ArrayAverageConditional{DistributionType, ArrayType}(IList{DistributionType}, IList{int}, ArrayType)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetDeepJaggedItemsOp{T}"]/message_doc[@name="ArrayAverageConditional{DistributionType, ArrayType}(IList{ItemType}, IList{IList{IList{int}}}, ArrayType)"]/*'/>
/// <typeparam name="ItemType2">The type of a sub-sub-array.</typeparam>
/// <typeparam name="ItemType">The type of a sub-array.</typeparam>
/// <typeparam name="DistributionType">The type of a distribution over array elements.</typeparam>
@ -389,7 +392,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return result;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsOp{T}"]/message_doc[@name="ArrayAverageConditional{DistributionType, ArrayType}(IList{T}, IList{int}, ArrayType)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetDeepJaggedItemsOp{T}"]/message_doc[@name="ArrayAverageConditional{DistributionType, ArrayType}(IList{IList{IList{T}}}, IList{IList{IList{int}}}, ArrayType)"]/*'/>
/// <typeparam name="DistributionType">The type of a distribution over array elements.</typeparam>
/// <typeparam name="ArrayType">The type of the resulting array.</typeparam>
public static ArrayType ArrayAverageConditional<DistributionType, ArrayType>(
@ -423,7 +426,7 @@ namespace Microsoft.ML.Probabilistic.Factors
//-- VMP -------------------------------------------------------------------------------------------------------------
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsOp{T}"]/message_doc[@name="ItemsAverageLogarithm{DistributionType}(IList{DistributionType}, IList{int}, int, DistributionType)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetDeepJaggedItemsOp{T}"]/message_doc[@name="ItemsAverageLogarithm{ItemType, ItemType2, DistributionType}(IList{DistributionType}, IList{IList{IList{int}}}, int, ItemType)"]/*'/>
/// <typeparam name="ItemType">The type of a sub-array.</typeparam>
/// <typeparam name="ItemType2">The type of a sub-sub-array.</typeparam>
/// <typeparam name="DistributionType">The type of a distribution over array elements.</typeparam>
@ -451,7 +454,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return result;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsOp{T}"]/message_doc[@name="ArrayAverageLogarithm{DistributionType, ArrayType}(IList{DistributionType}, IList{int}, ArrayType)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetDeepJaggedItemsOp{T}"]/message_doc[@name="ArrayAverageLogarithm{ItemType, ItemType2, DistributionType, ArrayType}(IList{ItemType}, IList{IList{IList{int}}}, ArrayType)"]/*'/>
/// <typeparam name="ItemType">The type of a sub-array.</typeparam>
/// <typeparam name="ItemType2">The type of a sub-sub-array.</typeparam>
/// <typeparam name="DistributionType">The type of a distribution over array elements.</typeparam>
@ -466,7 +469,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return ArrayAverageConditional<ItemType, ItemType2, DistributionType, ArrayType>(items, indices, result);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsOp{T}"]/message_doc[@name="ArrayAverageLogarithm{DistributionType, ArrayType}(IList{T}, IList{int}, ArrayType)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetDeepJaggedItemsOp{T}"]/message_doc[@name="ArrayAverageLogarithm{DistributionType, ArrayType}(IList{IList{IList{T}}}, IList{IList{IList{int}}}, ArrayType)"]/*'/>
/// <typeparam name="DistributionType">The type of a distribution over array elements.</typeparam>
/// <typeparam name="ArrayType">The type of the resulting array.</typeparam>
public static ArrayType ArrayAverageLogarithm<DistributionType, ArrayType>(IList<IList<IList<T>>> items, IList<IList<IList<int>>> indices, ArrayType result)

Просмотреть файл

@ -231,7 +231,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return (ArrayType)array.Clone();
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetItemsOp{T}"]/message_doc[@name="Marginal{ArrayType, DistributionType}(ArrayType, IList{DistributionType}, IList{int}, ArrayType)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetItemsOp{T}"]/message_doc[@name="Marginal2{ArrayType, DistributionType}(ArrayType, IList{DistributionType}, IList{int}, ArrayType)"]/*'/>
/// <typeparam name="ArrayType">The type of an array for the marginal.</typeparam>
/// <typeparam name="DistributionType">The type of a distribution over array elements.</typeparam>
[SkipIfAllUniform("array", "items")]
@ -251,6 +251,9 @@ namespace Microsoft.ML.Probabilistic.Factors
return result;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetItemsOp{T}"]/message_doc[@name="Marginal{ArrayType, DistributionType}(ArrayType, ArrayType, ArrayType)"]/*'/>
/// <typeparam name="ArrayType">The type of an array for the marginal.</typeparam>
/// <typeparam name="DistributionType">The type of a distribution over array elements.</typeparam>
[SkipIfAllUniform("array", "to_array")]
[MultiplyAll]
public static ArrayType Marginal<ArrayType, DistributionType>(

Просмотреть файл

@ -15,7 +15,7 @@ namespace Microsoft.ML.Probabilistic.Factors
[Quality(QualityBand.Experimental)]
public static class GetItemsPointOp<T>
{
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetItemsPointOp{T}"]/message_doc[@name="ItemsAverageConditional{ArrayType, DistributionType}(DistributionType, ArrayType, ArrayType, IList{int}, int, DistributionType)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetItemsPointOp{T}"]/message_doc[@name="ItemsAverageConditional{DistributionType}(IList{DistributionType}, IList{int}, int, DistributionType)"]/*'/>
/// <typeparam name="DistributionType">The type of a distribution over array elements.</typeparam>
public static DistributionType ItemsAverageConditional<DistributionType>(
[SkipIfAllUniform] IList<DistributionType> array,
@ -37,7 +37,7 @@ namespace Microsoft.ML.Probabilistic.Factors
[Quality(QualityBand.Experimental)]
public static class GetItemsFromJaggedPointOp<T>
{
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetItemsFromJaggedPointOp{T}"]/message_doc[@name="ItemsAverageConditional{ArrayType, DistributionType}(DistributionType, ArrayType, ArrayType, IList{int}, int, DistributionType)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetItemsFromJaggedPointOp{T}"]/message_doc[@name="ItemsAverageConditional{DistributionArrayType, DistributionType}(IList{DistributionArrayType}, IList{int}, IList{int}, int, DistributionType)"]/*'/>
/// <typeparam name="DistributionArrayType">The type of a distribution over array elements.</typeparam>
/// <typeparam name="DistributionType">The type of a distribution over inner array elements.</typeparam>
public static DistributionType ItemsAverageConditional<DistributionArrayType, DistributionType>(
@ -62,7 +62,7 @@ namespace Microsoft.ML.Probabilistic.Factors
[Quality(QualityBand.Experimental)]
public static class GetItemsFromDeepJaggedPointOp<T>
{
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetItemsFromDeepJaggedPointOp{T}"]/message_doc[@name="ItemsAverageConditional{ArrayType, DistributionType}(DistributionType, ArrayType, ArrayType, IList{int}, int, DistributionType)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetItemsFromDeepJaggedPointOp{T}"]/message_doc[@name="ItemsAverageConditional{DistributionArrayArrayType, DistributionArrayType, DistributionType}(IList{DistributionArrayArrayType}, IList{int}, IList{int}, IList{int}, int, DistributionType)"]/*'/>
/// <typeparam name="DistributionArrayArrayType">The type of a distribution over depth 1 array elements.</typeparam>
/// <typeparam name="DistributionArrayType">The type of a distribution over depth 2 array elements.</typeparam>
/// <typeparam name="DistributionType">The type of a distribution over depth 3 array elements.</typeparam>
@ -84,11 +84,13 @@ namespace Microsoft.ML.Probabilistic.Factors
}
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsPointOp{T}"]/doc/*'/>
/// <typeparam name="T">The type of an item.</typeparam>
[FactorMethod(typeof(Factor), "GetJaggedItems<>", Default = false)]
[Quality(QualityBand.Experimental)]
public static class GetJaggedItemsPointOp<T>
{
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsPointOp{T}"]/message_doc[@name="ItemsAverageConditional{ItemType, DistributionType}(DistributionType, ArrayType, ArrayType, IList{int}, int, DistributionType)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsPointOp{T}"]/message_doc[@name="ItemsAverageConditional{ItemType, DistributionType}(IList{DistributionType}, IList{IList{int}}, int, ItemType)"]/*'/>
/// <typeparam name="DistributionType">The type of a distribution over array elements.</typeparam>
/// <typeparam name="ItemType">The type of a sub-array.</typeparam>
public static ItemType ItemsAverageConditional<ItemType, DistributionType>(
@ -114,11 +116,13 @@ namespace Microsoft.ML.Probabilistic.Factors
}
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetDeepJaggedItemsPointOp{T}"]/doc/*'/>
/// <typeparam name="T">The type of an item.</typeparam>
[FactorMethod(typeof(Factor), "GetDeepJaggedItems<>", Default = false)]
[Quality(QualityBand.Experimental)]
public static class GetDeepJaggedItemsPointOp<T>
{
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsPointOp{T}"]/message_doc[@name="ItemsAverageConditional{ItemType, DistributionType}(DistributionType, ArrayType, ArrayType, IList{int}, int, DistributionType)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetDeepJaggedItemsPointOp{T}"]/message_doc[@name="ItemsAverageConditional{ItemType, ItemType2, DistributionType}(IList{DistributionType}, IList{IList{IList{int}}}, int, ItemType)"]/*'/>
/// <typeparam name="DistributionType">The type of a distribution over array elements.</typeparam>
/// <typeparam name="ItemType">The type of a sub-array.</typeparam>
/// <typeparam name="ItemType2">The type of a sub-sub-array.</typeparam>
@ -152,11 +156,13 @@ namespace Microsoft.ML.Probabilistic.Factors
}
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsFromJaggedPointOp{T}"]/doc/*'/>
/// <typeparam name="T">The type of an item.</typeparam>
[FactorMethod(typeof(Factor), "GetJaggedItemsFromJagged<>", Default = false)]
[Quality(QualityBand.Experimental)]
public static class GetJaggedItemsFromJaggedPointOp<T>
{
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsPointOp{T}"]/message_doc[@name="ItemsAverageConditional{ItemType, DistributionType}(DistributionType, ArrayType, ArrayType, IList{int}, int, DistributionType)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsFromJaggedPointOp{T}"]/message_doc[@name="ItemsAverageConditional{ItemType, DistributionType}(IList{ItemType}, IList{IList{int}}, IList{IList{int}}, int, ItemType)"]/*'/>
/// <typeparam name="DistributionType">The type of a distribution over array elements.</typeparam>
/// <typeparam name="ItemType">The type of a sub-array.</typeparam>
public static ItemType ItemsAverageConditional<ItemType, DistributionType>(

Просмотреть файл

@ -11,14 +11,14 @@ namespace Microsoft.ML.Probabilistic.Factors
using Attributes;
using Utilities;
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsOp{T}"]/doc/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsFromJaggedOp{T}"]/doc/*'/>
/// <typeparam name="T">The type of an item.</typeparam>
[FactorMethod(typeof(Factor), "GetJaggedItemsFromJagged<>", Default = true)]
[Quality(QualityBand.Mature)]
[Buffers("marginal")]
public static class GetJaggedItemsFromJaggedOp<T>
{
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsOp{T}"]/message_doc[@name="LogAverageFactor(IList{T}, IList{T}, IList{int})"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsFromJaggedOp{T}"]/message_doc[@name="LogAverageFactor(IList{IList{T}}, IList{IList{T}}, IList{IList{int}})"]/*'/>
public static double LogAverageFactor(IList<IList<T>> items, IList<IList<T>> array, IList<IList<int>> indices, IList<IList<int>> indices2)
{
IEqualityComparer<T> equalityComparer = Utilities.Util.GetEqualityComparer<T>();
@ -37,19 +37,19 @@ namespace Microsoft.ML.Probabilistic.Factors
return 0.0;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsOp{T}"]/message_doc[@name="LogEvidenceRatio(IList{T}, IList{T}, IList{int})"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsFromJaggedOp{T}"]/message_doc[@name="LogEvidenceRatio(IList{IList{T}}, IList{IList{T}}, IList{IList{int}}, IList{IList{int}})"]/*'/>
public static double LogEvidenceRatio(IList<IList<T>> items, IList<IList<T>> array, IList<IList<int>> indices, IList<IList<int>> indices2)
{
return LogAverageFactor(items, array, indices, indices2);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsOp{T}"]/message_doc[@name="AverageLogFactor(IList{T}, IList{T}, IList{int})"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsFromJaggedOp{T}"]/message_doc[@name="AverageLogFactor(IList{IList{T}}, IList{IList{T}}, IList{IList{int}}, IList{IList{int}})"]/*'/>
public static double AverageLogFactor(IList<IList<T>> items, IList<IList<T>> array, IList<IList<int>> indices, IList<IList<int>> indices2)
{
return LogAverageFactor(items, array, indices, indices2);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsOp{T}"]/message_doc[@name="LogAverageFactor{DistributionType}(IList{DistributionType}, IList{DistributionType}, IList{int})"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsFromJaggedOp{T}"]/message_doc[@name="LogAverageFactor{ItemType,DistributionType}(IList{ItemType}, IList{ItemType}, IList{IList{int}}, IList{IList{int}})"]/*'/>
/// <typeparam name="DistributionType">The type of a distribution over array elements.</typeparam>
/// <typeparam name="ItemType">The type of a sub-array.</typeparam>
public static double LogAverageFactor<ItemType, DistributionType>(IList<ItemType> items, IList<ItemType> array, IList<IList<int>> indices, IList<IList<int>> indices2)
@ -83,7 +83,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return z;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsOp{T}"]/message_doc[@name="AverageLogFactor{DistributionType}(IList{DistributionType}, IList{DistributionType})"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsFromJaggedOp{T}"]/message_doc[@name="AverageLogFactor{ItemType,DistributionType}(IList{ItemType}, IList{ItemType})"]/*'/>
/// <typeparam name="DistributionType">The type of a distribution over array elements.</typeparam>
/// <typeparam name="ItemType">The type of a sub-array.</typeparam>
[Skip]
@ -94,7 +94,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return 0.0;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsOp{T}"]/message_doc[@name="LogAverageFactor{DistributionType}(IList{T}, IList{DistributionType}, IList{int})"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsFromJaggedOp{T}"]/message_doc[@name="LogAverageFactor{DistributionType}(IList{IList{T}}, IList{ItemType}, IList{IList{int}}, IList{IList{int}})"]/*'/>
/// <typeparam name="ItemType">The type of a sub-array.</typeparam>
/// <typeparam name="DistributionType">The type of a distribution over array elements.</typeparam>
public static double LogAverageFactor<ItemType, DistributionType>(IList<IList<T>> items, IList<ItemType> array, IList<IList<int>> indices, IList<IList<int>> indices2)
@ -128,7 +128,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return z;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsOp{T}"]/message_doc[@name="LogEvidenceRatio{DistributionType}(IList{T}, IList{DistributionType}, IList{int})"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsFromJaggedOp{T}"]/message_doc[@name="LogEvidenceRatio{ItemType,DistributionType}(IList{IList{T}}, IList{ItemType}, IList{IList{int}}, IList{IList{int}})"]/*'/>
/// <typeparam name="ItemType">The type of a sub-array.</typeparam>
/// <typeparam name="DistributionType">The type of a distribution over array elements.</typeparam>
public static double LogEvidenceRatio<ItemType, DistributionType>(IList<IList<T>> items, IList<ItemType> array, IList<IList<int>> indices, IList<IList<int>> indices2)
@ -138,7 +138,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return LogAverageFactor<ItemType, DistributionType>(items, array, indices, indices2);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsOp{T}"]/message_doc[@name="AverageLogFactor{DistributionType}(IList{T}, IList{DistributionType})"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsFromJaggedOp{T}"]/message_doc[@name="AverageLogFactor{ItemType,DistributionType}(IList{IList{T}}, IList{ItemType})"]/*'/>
/// <typeparam name="ItemType">The type of a sub-array.</typeparam>
/// <typeparam name="DistributionType">The type of a distribution over array elements.</typeparam>
[Skip]
@ -149,7 +149,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return 0.0;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsOp{T}"]/message_doc[@name="LogAverageFactor{DistributionType}(IList{DistributionType}, IList{T}, IList{int})"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsFromJaggedOp{T}"]/message_doc[@name="LogAverageFactor{ItemType,DistributionType}(IList{ItemType}, IList{IList{T}}, IList{IList{int}}, IList{IList{int}})"]/*'/>
/// <typeparam name="DistributionType">The type of a distribution over array elements.</typeparam>
/// <typeparam name="ItemType">The type of a sub-array.</typeparam>
public static double LogAverageFactor<ItemType, DistributionType>(IList<ItemType> items, IList<IList<T>> array, IList<IList<int>> indices, IList<IList<int>> indices2)
@ -171,7 +171,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return z;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsOp{T}"]/message_doc[@name="LogEvidenceRatio{DistributionType}(IList{DistributionType}, IList{T}, IList{int})"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsFromJaggedOp{T}"]/message_doc[@name="LogEvidenceRatio{ItemType,DistributionType}(IList{ItemType}, IList{IList{T}}, IList{IList{int}}, IList{IList{int}})"]/*'/>
/// <typeparam name="DistributionType">The type of a distribution over array elements.</typeparam>
/// <typeparam name="ItemType">The type of a sub-array.</typeparam>
[Skip]
@ -182,7 +182,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return 0.0;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsOp{T}"]/message_doc[@name="AverageLogFactor{DistributionType}(IList{DistributionType}, IList{T})"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsFromJaggedOp{T}"]/message_doc[@name="AverageLogFactor{ItemType,DistributionType}(IList{ItemType}, IList{IList{T}})"]/*'/>
/// <typeparam name="DistributionType">The type of a distribution over array elements.</typeparam>
/// <typeparam name="ItemType">The type of a sub-array.</typeparam>
[Skip]
@ -194,7 +194,7 @@ namespace Microsoft.ML.Probabilistic.Factors
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsOp{T}"]/message_doc[@name="LogEvidenceRatio{DistributionType}(IList{DistributionType}, IList{DistributionType}, IList{int}, IList{DistributionType})"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsFromJaggedOp{T}"]/message_doc[@name="LogEvidenceRatio{ItemType,DistributionType}(IList{ItemType}, IList{ItemType}, IList{IList{int}}, IList{IList{int}}, IList{ItemType})"]/*'/>
/// <typeparam name="DistributionType">The type of a distribution over array elements.</typeparam>
/// <typeparam name="ItemType">The type of a sub-array.</typeparam>
public static double LogEvidenceRatio<ItemType, DistributionType>(
@ -234,7 +234,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return z;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsOp{T}"]/message_doc[@name="MarginalInit{ArrayType}(ArrayType)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsFromJaggedOp{T}"]/message_doc[@name="MarginalInit{ArrayType}(ArrayType)"]/*'/>
/// <typeparam name="ArrayType">The type of an array for the marginal.</typeparam>
public static ArrayType MarginalInit<ArrayType>([SkipIfUniform] ArrayType array)
where ArrayType : ICloneable
@ -242,6 +242,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return (ArrayType)array.Clone();
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsFromJaggedOp{T}"]/message_doc[@name="Marginal{ArrayType,DistributionType}(ArrayType,ArrayType,ArrayType)"]/*'/>
[SkipIfAllUniform("array", "to_array")]
[MultiplyAll]
public static ArrayType Marginal<ArrayType, DistributionType>(
@ -258,7 +259,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return result;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsOp{T}"]/message_doc[@name="MarginalIncrement{ArrayType, DistributionType}(ArrayType, DistributionType, DistributionType, IList{int}, int)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsFromJaggedOp{T}"]/message_doc[@name="MarginalIncrement{ArrayType, ItemType, DistributionType}(ArrayType, ItemType, ItemType, IList{IList{int}}, IList{IList{int}}, int)"]/*'/>
/// <typeparam name="ArrayType">The type of an array for the marginal.</typeparam>
/// <typeparam name="DistributionType">The type of a distribution over array elements.</typeparam>
/// <typeparam name="ItemType">The type of a sub-array.</typeparam>
@ -283,7 +284,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return result;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsOp{T}"]/message_doc[@name="ItemsAverageConditional{ArrayType, DistributionType}(DistributionType, ArrayType, ArrayType, IList{int}, int, DistributionType)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsFromJaggedOp{T}"]/message_doc[@name="ItemsAverageConditional{ArrayType, ItemType, DistributionType}(ItemType, ArrayType, ArrayType, IList{IList{int}}, IList{IList{int}}, int, ItemType)"]/*'/>
/// <typeparam name="ArrayType">The type of an array for the marginal.</typeparam>
/// <typeparam name="DistributionType">The type of a distribution over array elements.</typeparam>
/// <typeparam name="ItemType">The type of a sub-array.</typeparam>
@ -313,7 +314,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return result;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsOp{T}"]/message_doc[@name="ArrayAverageConditional{DistributionType, ArrayType}(IList{DistributionType}, IList{int}, ArrayType)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsFromJaggedOp{T}"]/message_doc[@name="ArrayAverageConditional{ItemType, DistributionType, ArrayType}(IList{ItemType}, IList{IList{int}}, IList{IList{int}}, ArrayType)"]/*'/>
/// <typeparam name="ItemType">The type of a sub-array.</typeparam>
/// <typeparam name="DistributionType">The type of a distribution over array elements.</typeparam>
/// <typeparam name="ArrayType">The type of the resulting array.</typeparam>
@ -343,7 +344,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return result;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsOp{T}"]/message_doc[@name="ArrayAverageConditional{DistributionType, ArrayType}(IList{T}, IList{int}, ArrayType)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsFromJaggedOp{T}"]/message_doc[@name="ArrayAverageConditional{ItemType, DistributionType, ArrayType}(IList{IList{T}}, IList{IList{int}}, IList{IList{int}}, ArrayType)"]/*'/>
/// <typeparam name="ArrayType">The type of the resulting array.</typeparam>
/// <typeparam name="ItemType">The type of a sub-array.</typeparam>
/// <typeparam name="DistributionType">The type of a distribution over array elements.</typeparam>
@ -375,7 +376,7 @@ namespace Microsoft.ML.Probabilistic.Factors
//-- VMP -------------------------------------------------------------------------------------------------------------
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsOp{T}"]/message_doc[@name="ItemsAverageLogarithm{DistributionType}(IList{DistributionType}, IList{int}, int, DistributionType)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsFromJaggedOp{T}"]/message_doc[@name="ItemsAverageLogarithm{ItemType, DistributionType}(IList{ItemType}, IList{IList{int}}, IList{IList{int}}, int, ItemType)"]/*'/>
/// <typeparam name="ItemType">The type of a sub-array.</typeparam>
/// <typeparam name="DistributionType">The type of a distribution over array elements.</typeparam>
public static ItemType ItemsAverageLogarithm<ItemType, DistributionType>(
@ -396,7 +397,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return result;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsOp{T}"]/message_doc[@name="ArrayAverageLogarithm{DistributionType, ArrayType}(IList{DistributionType}, IList{int}, ArrayType)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsFromJaggedOp{T}"]/message_doc[@name="ArrayAverageLogarithm{ItemType, DistributionType, ArrayType}(IList{ItemType}, IList{IList{int}},IList{IList{int}}, ArrayType)"]/*'/>
/// <typeparam name="ItemType">The type of a sub-array.</typeparam>
/// <typeparam name="DistributionType">The type of a distribution over array elements.</typeparam>
/// <typeparam name="ArrayType">The type of the resulting array.</typeparam>
@ -409,7 +410,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return ArrayAverageConditional<ItemType, DistributionType, ArrayType>(items, indices, indices2, result);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsOp{T}"]/message_doc[@name="ArrayAverageLogarithm{DistributionType, ArrayType}(IList{T}, IList{int}, ArrayType)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GetJaggedItemsFromJaggedOp{T}"]/message_doc[@name="ArrayAverageLogarithm{ItemType, DistributionType, ArrayType}(IList{IList{T}}, IList{IList{int}}, IList{IList{int}}, ArrayType)"]/*'/>
/// <typeparam name="ItemType">The type of a sub-array.</typeparam>
/// <typeparam name="DistributionType">The type of a distribution over array elements.</typeparam>
/// <typeparam name="ArrayType">The type of the resulting array.</typeparam>

Просмотреть файл

@ -152,7 +152,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return Buffers;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="IndexOfMaximumStochasticOp"]/message_doc[@name="listAverageConditional{GaussianList}(IndexOfMaximumBuffer[], GaussianList, Discrete, GaussianList)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="IndexOfMaximumStochasticOp"]/message_doc[@name="ListAverageConditional{GaussianList}(IndexOfMaximumBuffer[], GaussianList, Discrete, GaussianList)"]/*'/>
/// <typeparam name="GaussianList">The type of an incoming message from <c>list</c>.</typeparam>
public static GaussianList ListAverageConditional<GaussianList>(
[SkipIfUniform] IndexOfMaximumBuffer[] Buffers, GaussianList list, [SkipIfUniform] Discrete IndexOfMaximumDouble, GaussianList result)

Просмотреть файл

@ -72,18 +72,19 @@ namespace Microsoft.ML.Probabilistic.Factors
}
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOpBase"]/doc/*'/>
public class InnerProductOpBase
{
private const string NotSupportedMessage = "Variational Message Passing does not support an InnerProduct factor with fixed output.";
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="AAverageLogarithm(double, VectorGaussian, VectorGaussian)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOpBase"]/message_doc[@name="AAverageLogarithm(double, VectorGaussian, VectorGaussian)"]/*'/>
[NotSupported(InnerProductOp.NotSupportedMessage)]
public static VectorGaussian AAverageLogarithm(double innerProduct, [SkipIfUniform] VectorGaussian B, VectorGaussian result)
{
throw new NotSupportedException(InnerProductOp.NotSupportedMessage);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="BAverageLogarithm(double, VectorGaussian, VectorGaussian)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOpBase"]/message_doc[@name="BAverageLogarithm(double, VectorGaussian, VectorGaussian)"]/*'/>
[NotSupported(InnerProductOp.NotSupportedMessage)]
public static VectorGaussian BAverageLogarithm(double innerProduct, [SkipIfUniform] VectorGaussian A, VectorGaussian result)
{
@ -92,7 +93,7 @@ namespace Microsoft.ML.Probabilistic.Factors
private const string LowRankNotSupportedMessage = "A InnerProduct factor with fixed output is not yet implemented.";
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="AAverageConditional(double, Vector, VectorGaussian)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOpBase"]/message_doc[@name="AAverageConditional(double, Vector, VectorGaussian)"]/*'/>
[NotSupported(InnerProductOp.LowRankNotSupportedMessage)]
public static VectorGaussian AAverageConditional(double innerProduct, Vector B, VectorGaussian result)
{
@ -115,21 +116,21 @@ namespace Microsoft.ML.Probabilistic.Factors
else throw new NotImplementedException(LowRankNotSupportedMessage);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="BAverageConditional(double, Vector, VectorGaussian)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOpBase"]/message_doc[@name="BAverageConditional(double, Vector, VectorGaussian)"]/*'/>
[NotSupported(InnerProductOp.LowRankNotSupportedMessage)]
public static VectorGaussian BAverageConditional(double innerProduct, Vector A, VectorGaussian result)
{
return AAverageConditional(innerProduct, A, result);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="AAverageLogarithm(double, Vector, VectorGaussian)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOpBase"]/message_doc[@name="AAverageLogarithm(double, Vector, VectorGaussian)"]/*'/>
[NotSupported(InnerProductOp.LowRankNotSupportedMessage)]
public static VectorGaussian AAverageLogarithm(double innerProduct, Vector B, VectorGaussian result)
{
return AAverageConditional(innerProduct, B, result);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="BAverageLogarithm(double, Vector, VectorGaussian)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOpBase"]/message_doc[@name="BAverageLogarithm(double, Vector, VectorGaussian)"]/*'/>
[NotSupported(InnerProductOp.LowRankNotSupportedMessage)]
public static VectorGaussian BAverageLogarithm(double innerProduct, Vector A, VectorGaussian result)
{
@ -138,14 +139,14 @@ namespace Microsoft.ML.Probabilistic.Factors
//-- VMP ---------------------------------------------------------------------------------------------
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="AverageLogFactor()"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOpBase"]/message_doc[@name="AverageLogFactor()"]/*'/>
[Skip]
public static double AverageLogFactor()
{
return 0.0;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="InnerProductAverageLogarithm(Vector, PositiveDefiniteMatrix, Vector, PositiveDefiniteMatrix)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOpBase"]/message_doc[@name="InnerProductAverageLogarithm(DenseVector, PositiveDefiniteMatrix, DenseVector, PositiveDefiniteMatrix)"]/*'/>
public static Gaussian InnerProductAverageLogarithm(DenseVector AMean, PositiveDefiniteMatrix AVariance, DenseVector BMean, PositiveDefiniteMatrix BVariance)
{
Gaussian result = new Gaussian();
@ -156,7 +157,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return result;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="InnerProductAverageLogarithm(Vector, Vector, PositiveDefiniteMatrix)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOpBase"]/message_doc[@name="InnerProductAverageLogarithm(Vector, DenseVector, PositiveDefiniteMatrix)"]/*'/>
public static Gaussian InnerProductAverageLogarithm(Vector A, DenseVector BMean, PositiveDefiniteMatrix BVariance)
{
Gaussian result = new Gaussian();
@ -167,76 +168,76 @@ namespace Microsoft.ML.Probabilistic.Factors
return result;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="InnerProductAverageLogarithmInit()"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOpBase"]/message_doc[@name="InnerProductAverageLogarithmInit()"]/*'/>
[Skip]
public static Gaussian InnerProductAverageLogarithmInit()
{
return new Gaussian();
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="InnerProductAverageLogarithm(Vector, PositiveDefiniteMatrix, Vector)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOpBase"]/message_doc[@name="InnerProductAverageLogarithm(DenseVector, PositiveDefiniteMatrix, Vector)"]/*'/>
public static Gaussian InnerProductAverageLogarithm(DenseVector AMean, PositiveDefiniteMatrix AVariance, Vector B)
{
return InnerProductAverageLogarithm(B, AMean, AVariance);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="BVarianceInit(VectorGaussian)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOpBase"]/message_doc[@name="BVarianceInit(VectorGaussian)"]/*'/>
[Skip]
public static PositiveDefiniteMatrix BVarianceInit([IgnoreDependency] VectorGaussian B)
{
return new PositiveDefiniteMatrix(B.Dimension, B.Dimension);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="BVariance(VectorGaussian, PositiveDefiniteMatrix)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOpBase"]/message_doc[@name="BVariance(VectorGaussian, PositiveDefiniteMatrix)"]/*'/>
[Fresh]
public static PositiveDefiniteMatrix BVariance([Proper] VectorGaussian B, PositiveDefiniteMatrix result)
{
return B.GetVariance(result);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="BMeanInit(VectorGaussian)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOpBase"]/message_doc[@name="BMeanInit(VectorGaussian)"]/*'/>
[Skip]
public static DenseVector BMeanInit([IgnoreDependency] VectorGaussian B)
{
return DenseVector.Zero(B.Dimension);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="BMean(VectorGaussian, PositiveDefiniteMatrix, Vector)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOpBase"]/message_doc[@name="BMean(VectorGaussian, PositiveDefiniteMatrix, DenseVector)"]/*'/>
[Fresh]
public static DenseVector BMean([Proper] VectorGaussian B, PositiveDefiniteMatrix BVariance, DenseVector result)
{
return (DenseVector)B.GetMean(result, BVariance);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="AVarianceInit(VectorGaussian)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOpBase"]/message_doc[@name="AVarianceInit(VectorGaussian)"]/*'/>
[Skip]
public static PositiveDefiniteMatrix AVarianceInit([IgnoreDependency] VectorGaussian A)
{
return new PositiveDefiniteMatrix(A.Dimension, A.Dimension);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="AVariance(VectorGaussian, PositiveDefiniteMatrix)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOpBase"]/message_doc[@name="AVariance(VectorGaussian, PositiveDefiniteMatrix)"]/*'/>
[Fresh]
public static PositiveDefiniteMatrix AVariance([Proper] VectorGaussian A, PositiveDefiniteMatrix result)
{
return A.GetVariance(result);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="AMeanInit(VectorGaussian)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOpBase"]/message_doc[@name="AMeanInit(VectorGaussian)"]/*'/>
[Skip]
public static DenseVector AMeanInit([IgnoreDependency] VectorGaussian A)
{
return DenseVector.Zero(A.Dimension);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="AMean(VectorGaussian, PositiveDefiniteMatrix, Vector)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOpBase"]/message_doc[@name="AMean(VectorGaussian, PositiveDefiniteMatrix, DenseVector)"]/*'/>
[Fresh]
public static DenseVector AMean([Proper] VectorGaussian A, PositiveDefiniteMatrix AVariance, DenseVector result)
{
return (DenseVector)A.GetMean(result, AVariance);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="AAverageLogarithm(Gaussian, VectorGaussian, Vector, PositiveDefiniteMatrix, VectorGaussian)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOpBase"]/message_doc[@name="AAverageLogarithm(Gaussian, VectorGaussian, DenseVector, PositiveDefiniteMatrix, VectorGaussian)"]/*'/>
public static VectorGaussian AAverageLogarithm(
[SkipIfUniform] Gaussian innerProduct, [SkipIfUniform] VectorGaussian B, DenseVector BMean, PositiveDefiniteMatrix BVariance, VectorGaussian result)
{
@ -255,7 +256,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return result;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="AAverageLogarithm(Gaussian, Vector, VectorGaussian)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOpBase"]/message_doc[@name="AAverageLogarithm(Gaussian, Vector, VectorGaussian)"]/*'/>
public static VectorGaussian AAverageLogarithm([SkipIfUniform] Gaussian innerProduct, Vector B, VectorGaussian result)
{
if (innerProduct.IsPointMass)
@ -268,14 +269,14 @@ namespace Microsoft.ML.Probabilistic.Factors
return result;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="BAverageLogarithm(Gaussian, VectorGaussian, Vector, PositiveDefiniteMatrix, VectorGaussian)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOpBase"]/message_doc[@name="BAverageLogarithm(Gaussian, VectorGaussian, DenseVector, PositiveDefiniteMatrix, VectorGaussian)"]/*'/>
public static VectorGaussian BAverageLogarithm(
[SkipIfUniform] Gaussian innerProduct, [SkipIfUniform] VectorGaussian A, DenseVector AMean, PositiveDefiniteMatrix AVariance, VectorGaussian result)
{
return AAverageLogarithm(innerProduct, A, AMean, AVariance, result);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="BAverageLogarithm(Gaussian, Vector, VectorGaussian)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOpBase"]/message_doc[@name="BAverageLogarithm(Gaussian, Vector, VectorGaussian)"]/*'/>
public static VectorGaussian BAverageLogarithm([SkipIfUniform] Gaussian innerProduct, Vector A, VectorGaussian result)
{
return AAverageLogarithm(innerProduct, A, result);
@ -283,26 +284,26 @@ namespace Microsoft.ML.Probabilistic.Factors
// ----------------------- AverageConditional ------------------------------
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="InnerProductAverageConditional(Vector, Vector, PositiveDefiniteMatrix)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOpBase"]/message_doc[@name="InnerProductAverageConditional(Vector, DenseVector, PositiveDefiniteMatrix)"]/*'/>
public static Gaussian InnerProductAverageConditional(Vector A, DenseVector BMean, PositiveDefiniteMatrix BVariance)
{
return InnerProductAverageLogarithm(A, BMean, BVariance);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="InnerProductAverageConditionalInit()"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOpBase"]/message_doc[@name="InnerProductAverageConditionalInit()"]/*'/>
[Skip]
public static Gaussian InnerProductAverageConditionalInit()
{
return new Gaussian();
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="InnerProductAverageConditional(Vector, PositiveDefiniteMatrix, Vector)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOpBase"]/message_doc[@name="InnerProductAverageConditional(DenseVector, PositiveDefiniteMatrix, Vector)"]/*'/>
public static Gaussian InnerProductAverageConditional(DenseVector AMean, PositiveDefiniteMatrix AVariance, Vector B)
{
return InnerProductAverageConditional(B, AMean, AVariance);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="AAverageConditional(Gaussian, Vector, VectorGaussian)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOpBase"]/message_doc[@name="AAverageConditional(Gaussian, Vector, VectorGaussian)"]/*'/>
public static VectorGaussian AAverageConditional([SkipIfUniform] Gaussian innerProduct, Vector B, VectorGaussian result)
{
if (innerProduct.IsPointMass)
@ -316,6 +317,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return result;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOpBase"]/message_doc[@name="AAverageConditional(Gaussian, Vector, DenseVector, PositiveDefiniteMatrix, VectorGaussian)"]/*'/>
public static VectorGaussian AAverageConditional([SkipIfUniform] Gaussian innerProduct, Vector A, DenseVector BMean, PositiveDefiniteMatrix BVariance, VectorGaussian result)
{
if (innerProduct.IsUniform())
@ -342,63 +344,65 @@ namespace Microsoft.ML.Probabilistic.Factors
return VectorGaussian.FromDerivatives(A, dlogZ, negativeHessian, GaussianProductOp.ForceProper);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="BAverageConditional(Gaussian, Vector, VectorGaussian)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOpBase"]/message_doc[@name="BAverageConditional(Gaussian, Vector, VectorGaussian)"]/*'/>
public static VectorGaussian BAverageConditional([SkipIfUniform] Gaussian innerProduct, Vector A, VectorGaussian result)
{
return AAverageConditional(innerProduct, A, result);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="LogEvidenceRatio(Gaussian, Vector, VectorGaussian)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOpBase"]/message_doc[@name="LogEvidenceRatio(Gaussian, Vector, VectorGaussian)"]/*'/>
[Skip]
public static double LogEvidenceRatio(Gaussian innerProduct, Vector A, VectorGaussian b)
{
return 0.0;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="LogEvidenceRatio(double, Vector, Vector, PositiveDefiniteMatrix)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOpBase"]/message_doc[@name="LogEvidenceRatio(double, Vector, DenseVector, PositiveDefiniteMatrix)"]/*'/>
public static double LogEvidenceRatio(double innerProduct, Vector A, DenseVector BMean, PositiveDefiniteMatrix BVariance)
{
return LogAverageFactor(innerProduct, A, BMean, BVariance);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="LogEvidenceRatio(Gaussian, VectorGaussian, Vector)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOpBase"]/message_doc[@name="LogEvidenceRatio(Gaussian, VectorGaussian, Vector)"]/*'/>
[Skip]
public static double LogEvidenceRatio(Gaussian innerProduct, VectorGaussian a, Vector B)
{
return LogEvidenceRatio(innerProduct, B, a);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="LogEvidenceRatio(double, Vector, PositiveDefiniteMatrix, Vector)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOpBase"]/message_doc[@name="LogEvidenceRatio(double, DenseVector, PositiveDefiniteMatrix, Vector)"]/*'/>
public static double LogEvidenceRatio(double innerProduct, DenseVector AMean, PositiveDefiniteMatrix AVariance, Vector B)
{
return LogEvidenceRatio(innerProduct, B, AMean, AVariance);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="LogAverageFactor(Gaussian, Gaussian)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOpBase"]/message_doc[@name="LogAverageFactor(Gaussian, Gaussian)"]/*'/>
public static double LogAverageFactor(Gaussian innerProduct, [Fresh] Gaussian to_innerProduct)
{
return to_innerProduct.GetLogAverageOf(innerProduct);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="LogAverageFactor(double, Vector, Vector, PositiveDefiniteMatrix)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOpBase"]/message_doc[@name="LogAverageFactor(double, Vector, DenseVector, PositiveDefiniteMatrix)"]/*'/>
public static double LogAverageFactor(double innerProduct, Vector A, DenseVector BMean, PositiveDefiniteMatrix BVariance)
{
Gaussian to_innerProduct = InnerProductAverageConditional(A, BMean, BVariance);
return to_innerProduct.GetLogProb(innerProduct);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="LogAverageFactor(double, Vector, PositiveDefiniteMatrix, Vector)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOpBase"]/message_doc[@name="LogAverageFactor(double, DenseVector, PositiveDefiniteMatrix, Vector)"]/*'/>
public static double LogAverageFactor(double innerProduct, DenseVector AMean, PositiveDefiniteMatrix AVariance, Vector B)
{
return LogAverageFactor(innerProduct, B, AMean, AVariance);
}
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp_PointB"]/doc/*'/>
[FactorMethod(typeof(Vector), "InnerProduct", Default = false)]
[Buffers("AVariance", "AMean")]
[Quality(QualityBand.Experimental)]
public class InnerProductOp_PointB : InnerProductOpBase
{
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp_PointB"]/message_doc[@name="InnerProductAverageConditional(DenseVector, PositiveDefiniteMatrix, VectorGaussian)"]/*'/>
public static Gaussian InnerProductAverageConditional(DenseVector AMean, PositiveDefiniteMatrix AVariance, [SkipIfUniform] VectorGaussian B)
{
if (!B.IsPointMass)
@ -406,6 +410,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return InnerProductOp.InnerProductAverageConditional(AMean, AVariance, B.Point);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp_PointB"]/message_doc[@name="AAverageConditional(Gaussian, VectorGaussian, VectorGaussian)"]/*'/>
public static VectorGaussian AAverageConditional([SkipIfUniform] Gaussian innerProduct, [SkipIfUniform] VectorGaussian B, VectorGaussian result)
{
if (!B.IsPointMass)
@ -413,6 +418,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return InnerProductOp.AAverageConditional(innerProduct, B.Point, result);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp_PointB"]/message_doc[@name="BAverageConditional(Gaussian, DenseVector, PositiveDefiniteMatrix, VectorGaussian)"]/*'/>
public static VectorGaussian BAverageConditional([SkipIfUniform] Gaussian innerProduct, DenseVector AMean, PositiveDefiniteMatrix AVariance, VectorGaussian B, VectorGaussian result)
{
if (!B.IsPointMass)

Просмотреть файл

@ -10,21 +10,21 @@ namespace Microsoft.ML.Probabilistic.Factors
using Microsoft.ML.Probabilistic.Math;
using Microsoft.ML.Probabilistic.Factors.Attributes;
// /// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/doc/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductArrayOp"]/doc/*'/>
[FactorMethod(typeof(Factor), "InnerProduct", typeof(double[]), typeof(double[]))]
[Quality(QualityBand.Experimental)]
public static class InnerProductArrayOp
{
//-- VMP -------------------------------------------------------------------------------------------------------------
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="VectorMultiplyOp"]/message_doc[@name="AverageLogFactor()"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductArrayOp"]/message_doc[@name="AverageLogFactor()"]/*'/>
[Skip]
public static double AverageLogFactor()
{
return 0.0;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="InnerProductAverageLogarithm(DistributionArray2D{Gaussian}, DistributionArray2D{Gaussian}, DistributionStructArray2D{Gaussian, double})"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductArrayOp"]/message_doc[@name="InnerProductAverageLogarithm(IList{Gaussian}, IList{Gaussian}, Gaussian)"]/*'/>
public static Gaussian InnerProductAverageLogarithm(
[SkipIfUniform] IList<Gaussian> A, [SkipIfUniform] IList<Gaussian> B, Gaussian result)
{
@ -54,40 +54,40 @@ namespace Microsoft.ML.Probabilistic.Factors
return result;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="InnerProductAverageLogarithmInit(DistributionArray2D{Gaussian}, DistributionArray2D{Gaussian})"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductArrayOp"]/message_doc[@name="InnerProductAverageLogarithmInit(IList{Gaussian}, IList{Gaussian})"]/*'/>
[Skip]
public static Gaussian InnerProductAverageLogarithmInit([IgnoreDependency] IList<Gaussian> A, [IgnoreDependency] IList<Gaussian> B)
{
return new Gaussian();
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="InnerProductAverageLogarithmInit(double[,], DistributionArray2D{Gaussian})"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductArrayOp"]/message_doc[@name="InnerProductAverageLogarithmInit(double[], IList{Gaussian})"]/*'/>
[Skip]
public static Gaussian InnerProductAverageLogarithmInit([IgnoreDependency] double[] A, [IgnoreDependency] IList<Gaussian> B)
{
return new Gaussian();
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="InnerProductAverageLogarithmInit(DistributionArray2D{Gaussian}, double[,])"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductArrayOp"]/message_doc[@name="InnerProductAverageLogarithmInit(IList{Gaussian}, double[])"]/*'/>
[Skip]
public static Gaussian InnerProductAverageLogarithmInit([IgnoreDependency] IList<Gaussian> A, [IgnoreDependency] double[] B)
{
return new Gaussian();
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="InnerProductAverageLogarithm(double[,], DistributionStructArray2D{Gaussian, double}, DistributionStructArray2D{Gaussian, double})"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductArrayOp"]/message_doc[@name="InnerProductAverageLogarithm(double[], IList{Gaussian})"]/*'/>
public static Gaussian InnerProductAverageLogarithm(double[] A, [SkipIfUniform] IList<Gaussian> B)
{
return InnerProductAverageConditional(A, B);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="InnerProductAverageLogarithm(DistributionStructArray2D{Gaussian, double}, double[,], DistributionStructArray2D{Gaussian, double})"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductArrayOp"]/message_doc[@name="InnerProductAverageLogarithm(IList{Gaussian}, double[])"]/*'/>
public static Gaussian InnerProductAverageLogarithm([SkipIfUniform] IList<Gaussian> A, double[] B)
{
return InnerProductAverageConditional(A, B);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="AAverageLogarithm(DistributionStructArray2D{Gaussian, double}, DistributionStructArray2D{Gaussian, double}, DistributionStructArray2D{Gaussian, double}, DistributionStructArray2D{Gaussian, double})"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductArrayOp"]/message_doc[@name="AAverageLogarithm{GaussianList}(Gaussian, IList{Gaussian}, IList{Gaussian}, GaussianList)"]/*'/>
public static GaussianList AAverageLogarithm<GaussianList>(
[SkipIfUniform] Gaussian innerProduct, [Stochastic] IList<Gaussian> A, [SkipIfUniform] IList<Gaussian> B, GaussianList to_A)
where GaussianList : IList<Gaussian>
@ -135,7 +135,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return result;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="AAverageLogarithm(DistributionStructArray2D{Gaussian, double}, DistributionStructArray2D{Gaussian, double}, double[,], DistributionStructArray2D{Gaussian, double})"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductArrayOp"]/message_doc[@name="AAverageLogarithm{GaussianList}(Gaussian, IList{Gaussian}, double[], GaussianList)"]/*'/>
public static GaussianList AAverageLogarithm<GaussianList>(
[SkipIfUniform] Gaussian innerProduct, [Proper, Stochastic] IList<Gaussian> A, double[] B, GaussianList to_A)
where GaussianList : IList<Gaussian>
@ -181,7 +181,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return result;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="BAverageLogarithm(DistributionStructArray2D{Gaussian, double}, DistributionStructArray2D{Gaussian, double}, DistributionStructArray2D{Gaussian, double}, DistributionStructArray2D{Gaussian, double})"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductArrayOp"]/message_doc[@name="BAverageLogarithm{GaussianList}(Gaussian, IList{Gaussian}, IList{Gaussian}, GaussianList)"]/*'/>
public static GaussianList BAverageLogarithm<GaussianList>(
[SkipIfUniform] Gaussian innerProduct, [SkipIfUniform] IList<Gaussian> A, [Proper, Stochastic] IList<Gaussian> B, GaussianList to_B)
where GaussianList : IList<Gaussian>
@ -189,7 +189,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return AAverageLogarithm(innerProduct, B, A, to_B);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="BAverageLogarithm(DistributionStructArray2D{Gaussian, double}, double[,], DistributionStructArray2D{Gaussian, double}, DistributionStructArray2D{Gaussian, double})"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductArrayOp"]/message_doc[@name="BAverageLogarithm{GaussianList}(Gaussian, double[], IList{Gaussian}, GaussianList)"]/*'/>
public static GaussianList BAverageLogarithm<GaussianList>(
[SkipIfUniform] Gaussian innerProduct, double[] A, [Proper, Stochastic] IList<Gaussian> B, GaussianList to_B)
where GaussianList : IList<Gaussian>
@ -199,7 +199,7 @@ namespace Microsoft.ML.Probabilistic.Factors
private const string NotSupportedMessage = "Variational Message Passing does not support a InnerProduct factor with fixed output.";
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="AAverageLogarithm(double[,])"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductArrayOp"]/message_doc[@name="AAverageLogarithm{GaussianList}(double)"]/*'/>
[NotSupported(InnerProductArrayOp.NotSupportedMessage)]
public static GaussianList AAverageLogarithm<GaussianList>(double innerProduct)
where GaussianList : IList<Gaussian>
@ -207,7 +207,7 @@ namespace Microsoft.ML.Probabilistic.Factors
throw new NotSupportedException(InnerProductArrayOp.NotSupportedMessage);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="BAverageLogarithm(double[,])"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductArrayOp"]/message_doc[@name="BAverageLogarithm(double)"]/*'/>
[NotSupported(InnerProductArrayOp.NotSupportedMessage)]
public static IList<Gaussian> BAverageLogarithm(double innerProduct)
{
@ -221,14 +221,14 @@ namespace Microsoft.ML.Probabilistic.Factors
private const string BothRandomNotSupportedMessage =
"A InnerProduct factor between two Gaussian arrays is not yet implemented for Expectation Propagation. Try using Variational Message Passing.";
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="InnerProductAverageConditional(DistributionStructArray2D{Gaussian, double}, DistributionStructArray2D{Gaussian, double}, DistributionStructArray2D{Gaussian, double})"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductArrayOp"]/message_doc[@name="InnerProductAverageConditional(IList{Gaussian}, IList{Gaussian}, IList{Gaussian})"]/*'/>
[NotSupported(InnerProductArrayOp.BothRandomNotSupportedMessage)]
public static Gaussian InnerProductAverageConditional([SkipIfUniform] IList<Gaussian> A, [SkipIfUniform] IList<Gaussian> B, IList<Gaussian> result)
{
throw new NotSupportedException(InnerProductArrayOp.BothRandomNotSupportedMessage);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="AAverageConditional(DistributionStructArray2D{Gaussian, double}, DistributionStructArray2D{Gaussian, double}, DistributionStructArray2D{Gaussian, double})"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductArrayOp"]/message_doc[@name="AAverageConditional{GaussianList}(Gaussian, IList{Gaussian}, GaussianList)"]/*'/>
[NotSupported(InnerProductArrayOp.BothRandomNotSupportedMessage)]
public static GaussianList AAverageConditional<GaussianList>(Gaussian innerProduct, [SkipIfUniform] IList<Gaussian> B, GaussianList result)
where GaussianList : IList<Gaussian>
@ -236,7 +236,7 @@ namespace Microsoft.ML.Probabilistic.Factors
throw new NotSupportedException(InnerProductArrayOp.BothRandomNotSupportedMessage);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="BAverageConditional(DistributionStructArray2D{Gaussian, double}, DistributionStructArray2D{Gaussian, double}, DistributionStructArray2D{Gaussian, double})"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductArrayOp"]/message_doc[@name="BAverageConditional{GaussianList}(Gaussian, IList{Gaussian}, GaussianList)"]/*'/>
[NotSupported(InnerProductArrayOp.BothRandomNotSupportedMessage)]
public static GaussianList BAverageConditional<GaussianList>(Gaussian innerProduct, [SkipIfUniform] IList<Gaussian> A, GaussianList result)
where GaussianList : IList<Gaussian>
@ -244,7 +244,7 @@ namespace Microsoft.ML.Probabilistic.Factors
throw new NotSupportedException(InnerProductArrayOp.BothRandomNotSupportedMessage);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="LogAverageFactor(double[,], double[,], double[,])"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductArrayOp"]/message_doc[@name="LogAverageFactor(double, double[], double[])"]/*'/>
public static double LogAverageFactor(double innerProduct, double[] A, double[] B)
{
@ -260,74 +260,73 @@ namespace Microsoft.ML.Probabilistic.Factors
return 0.0;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="LogEvidenceRatio(double[,], double[,], double[,])"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductArrayOp"]/message_doc[@name="LogEvidenceRatio(double, double[], double[])"]/*'/>
public static double LogEvidenceRatio(double innerProduct, double[] A, double[] B)
{
return LogAverageFactor(innerProduct, A, B);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="AverageLogFactor(double[,], double[,], double[,])"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductArrayOp"]/message_doc[@name="AverageLogFactor(double, double[], double[])"]/*'/>
public static double AverageLogFactor(double innerProduct, double[] A, double[] B)
{
return LogAverageFactor(innerProduct, A, B);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="LogAverageFactor(DistributionStructArray2D{Gaussian, double}, double[,], DistributionStructArray2D{Gaussian, double})"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductArrayOp"]/message_doc[@name="LogAverageFactor(Gaussian, double[], IList{Gaussian})"]/*'/>
public static double LogAverageFactor(Gaussian innerProduct, double[] A, IList<Gaussian> B)
{
Gaussian to_innerProduct = InnerProductAverageConditional(A, B);
return to_innerProduct.GetLogAverageOf(innerProduct);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="LogAverageFactor(double[,], double[,], DistributionStructArray2D{Gaussian, double})"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductArrayOp"]/message_doc[@name="LogAverageFactor(double, double[], IList{Gaussian})"]/*'/>
public static double LogAverageFactor(double innerProduct, double[] A, IList<Gaussian> B)
{
Gaussian to_innerProduct = InnerProductAverageConditional(A, B);
return to_innerProduct.GetLogProb(innerProduct);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="LogAverageFactor(DistributionStructArray2D{Gaussian, double}, DistributionStructArray2D{Gaussian, double}, double[,])"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductArrayOp"]/message_doc[@name="LogAverageFactor(Gaussian, IList{Gaussian}, double[])"]/*'/>
public static double LogAverageFactor(Gaussian innerProduct, IList<Gaussian> A, double[] B)
{
Gaussian to_innerProduct = InnerProductAverageConditional(A, B);
return to_innerProduct.GetLogAverageOf(innerProduct);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="LogAverageFactor(double[,], DistributionStructArray2D{Gaussian, double}, double[,])"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductArrayOp"]/message_doc[@name="LogAverageFactor(double, IList{Gaussian}, double[])"]/*'/>
public static double LogAverageFactor(double innerProduct, IList<Gaussian> A, double[] B)
{
Gaussian to_innerProduct = InnerProductAverageConditional(A, B);
return to_innerProduct.GetLogProb(innerProduct);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="LogEvidenceRatio(DistributionStructArray2D{Gaussian, double}, double[,], DistributionStructArray2D{Gaussian, double})"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductArrayOp"]/message_doc[@name="LogEvidenceRatio(Gaussian, double[], IList{Gaussian})"]/*'/>
[Skip]
public static double LogEvidenceRatio(Gaussian innerProduct, double[] A, IList<Gaussian> B)
{
return 0.0;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="LogEvidenceRatio(DistributionStructArray2D{Gaussian, double}, DistributionStructArray2D{Gaussian, double}, double[,])"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductArrayOp"]/message_doc[@name="LogEvidenceRatio(Gaussian, IList{Gaussian}, double[])"]/*'/>
[Skip]
public static double LogEvidenceRatio(Gaussian innerProduct, IList<Gaussian> A, double[] B)
{
return 0.0;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="LogEvidenceRatio(double[,], double[,], DistributionStructArray2D{Gaussian, double})"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductArrayOp"]/message_doc[@name="LogEvidenceRatio(double, double[], IList{Gaussian})"]/*'/>
public static double LogEvidenceRatio(double innerProduct, double[] A, IList<Gaussian> B)
{
return LogAverageFactor(innerProduct, A, B);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="LogEvidenceRatio(double[,], DistributionStructArray2D{Gaussian, double}, double[,])"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductArrayOp"]/message_doc[@name="LogEvidenceRatio(double, IList{Gaussian}, double[])"]/*'/>
public static double LogEvidenceRatio(double innerProduct, IList<Gaussian> A, double[] B)
{
return LogAverageFactor(innerProduct, A, B);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="InnerProductAverageConditional(double[,], DistributionStructArray2D{Gaussian, double}, DistributionStructArray2D{Gaussian, double})"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductArrayOp"]/message_doc[@name="InnerProductAverageConditional(double[], IList{Gaussian})"]/*'/>
public static Gaussian InnerProductAverageConditional(double[] A, [SkipIfUniform] IList<Gaussian> B)
{
double xMean = 0, xVariance = 0,
@ -343,13 +342,13 @@ namespace Microsoft.ML.Probabilistic.Factors
return Gaussian.FromMeanAndVariance(xMean, xVariance);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="InnerProductAverageConditional(DistributionStructArray2D{Gaussian, double}, double[,], DistributionStructArray2D{Gaussian, double})"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductArrayOp"]/message_doc[@name="InnerProductAverageConditional(IList{Gaussian}, double[])"]/*'/>
public static Gaussian InnerProductAverageConditional([SkipIfUniform] IList<Gaussian> A, double[] B)
{
return InnerProductAverageConditional(B, A);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="AAverageConditional(DistributionStructArray2D{Gaussian, double}, DistributionStructArray2D{Gaussian, double}, double[,], DistributionStructArray2D{Gaussian, double})"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductArrayOp"]/message_doc[@name="AAverageConditional{GaussianList}(Gaussian, IList{Gaussian}, double[], Gaussian, GaussianList)"]/*'/>
public static GaussianList AAverageConditional<GaussianList>(
[SkipIfUniform] Gaussian innerProduct, IList<Gaussian> A, double[] B, [Fresh] Gaussian to_innerProduct, GaussianList result)
where GaussianList : IList<Gaussian>
@ -390,7 +389,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return result;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="BAverageConditional(DistributionStructArray2D{Gaussian, double}, double[,], DistributionStructArray2D{Gaussian, double}, DistributionStructArray2D{Gaussian, double})"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductArrayOp"]/message_doc[@name="BAverageConditional{GaussianList}(Gaussian, double[], IList{Gaussian}, Gaussian, GaussianList)"]/*'/>
public static GaussianList BAverageConditional<GaussianList>(
[SkipIfUniform] Gaussian innerProduct, double[] A, [SkipIfUniform] IList<Gaussian> B, Gaussian to_innerProduct, GaussianList result)
where GaussianList : IList<Gaussian>
@ -398,7 +397,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return AAverageConditional(innerProduct, B, A, to_innerProduct, result);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="AAverageConditional(double[,], double[,], DistributionStructArray2D{Gaussian, double})"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductArrayOp"]/message_doc[@name="AAverageConditional{GaussianList}(double, double[], GaussianList)"]/*'/>
[NotSupported(InnerProductArrayOp.LowRankNotSupportedMessage)]
public static GaussianList AAverageConditional<GaussianList>(double innerProduct, double[] B, GaussianList result)
where GaussianList : IList<Gaussian>
@ -406,9 +405,9 @@ namespace Microsoft.ML.Probabilistic.Factors
throw new NotImplementedException(InnerProductArrayOp.LowRankNotSupportedMessage);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOp"]/message_doc[@name="BAverageConditional(double[,], double[,], DistributionStructArray2D{Gaussian, double})"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductArrayOp"]/message_doc[@name="BAverageConditional{GaussianList}(double, double[], GaussianList)"]/*'/>
[NotSupported(InnerProductArrayOp.LowRankNotSupportedMessage)]
public static GaussianList BAverageConditional<GaussianList>(double innerProduct, double A, GaussianList result)
public static GaussianList BAverageConditional<GaussianList>(double innerProduct, double[] A, GaussianList result)
where GaussianList : IList<Gaussian>
{
throw new NotImplementedException(InnerProductArrayOp.LowRankNotSupportedMessage);

Просмотреть файл

@ -282,7 +282,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return (ArrayType)array.Clone();
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="JaggedSubarrayOp{T}"]/message_doc[@name="Marginal{ArrayType, DistributionType, Object, ItemType}(ArrayType, IList{ItemType}, IList{IList{int}}, ArrayType)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="JaggedSubarrayOp{T}"]/message_doc[@name="Marginal{ArrayType, DistributionType, ItemType}(ArrayType, IList{ItemType}, IList{IList{int}}, ArrayType)"]/*'/>
/// <typeparam name="ArrayType">The type of a message from <c>array</c>.</typeparam>
/// <typeparam name="DistributionType">The type of a distribution over array elements.</typeparam>
/// <typeparam name="ItemType">The type of a sub-array.</typeparam>
@ -810,7 +810,7 @@ namespace Microsoft.ML.Probabilistic.Factors
}
#endif
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="JaggedSubarrayWithMarginalOp{T}"]/message_doc[@name="Marginal{ArrayType, DistributionType, Object, ItemType}(ArrayType, IList{ItemType}, IList{IList{int}}, ArrayType)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="JaggedSubarrayWithMarginalOp{T}"]/message_doc[@name="Marginal{ArrayType, DistributionType, ItemType}(ArrayType, IList{ItemType}, IList{IList{int}}, ArrayType)"]/*'/>
/// <typeparam name="ArrayType">The type of a message from <c>array</c>.</typeparam>
/// <typeparam name="DistributionType">The type of a distribution over array elements.</typeparam>
/// <typeparam name="ItemType">The type of a sub-array.</typeparam>
@ -840,7 +840,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return result;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="JaggedSubarrayWithMarginalOp{T}"]/message_doc[@name="Marginal{ArrayType, DistributionType, Object, ItemType}(ArrayType, IList{ItemType}, IList{IList{int}}, ArrayType)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="JaggedSubarrayWithMarginalOp{T}"]/message_doc[@name="Marginal{ArrayType, DistributionType, ItemArrayType, ItemType}(ArrayType, IList{ItemType}, IList{IList{int}}, ArrayType)"]/*'/>
/// <typeparam name="ArrayType">The type of a message from <c>array</c>.</typeparam>
/// <typeparam name="DistributionType">The type of a distribution over array elements.</typeparam>
/// <typeparam name="ItemArrayType">The type of an incoming message from <c>items</c>.</typeparam>
@ -871,7 +871,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return result;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="JaggedSubarrayWithMarginalOp{T}"]/message_doc[@name="MarginalIncrement{ArrayType, DistributionType, ItemType}(ArrayType, ItemType, ItemType, IList{IList{int}}, int)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="JaggedSubarrayWithMarginalOp{T}"]/message_doc[@name="MarginalIncrementItems{ArrayType, DistributionType, ItemType}(ItemType, ItemType, IList{IList{int}}, int, ArrayType)"]/*'/>
/// <typeparam name="ArrayType">The type of the outgoing message.</typeparam>
/// <typeparam name="DistributionType">The type of a distribution over array elements.</typeparam>
/// <typeparam name="ItemType">The type of a sub-array.</typeparam>
@ -928,7 +928,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return result;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="JaggedSubarrayWithMarginalOp{T}"]/message_doc[@name="ArrayAverageConditional{DistributionType, ArrayType, ItemType}(IList{ItemType}, IList{IList{int}}, ArrayType)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="JaggedSubarrayWithMarginalOp{T}"]/message_doc[@name="ArrayAverageConditional{ArrayType}(ArrayType, ArrayType, ArrayType)"]/*'/>
/// <typeparam name="ArrayType">The type of the outgoing message.</typeparam>
public static ArrayType ArrayAverageConditional<ArrayType>(
[Cancels] ArrayType array,
@ -940,6 +940,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return result;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="JaggedSubarrayWithMarginalOp{T}"]/message_doc[@name="MarginalIncrementArray{ArrayType}(ArrayType, ArrayType, ArrayType)"]/*'/>
/// <typeparam name="ArrayType">The type of the outgoing message.</typeparam>
public static ArrayType MarginalIncrementArray<ArrayType>(
[SkipIfUniform] ArrayType array, // SkipIfUniform on 'array' causes this line to be pruned when the incoming message isn't changing
@ -978,6 +979,8 @@ namespace Microsoft.ML.Probabilistic.Factors
//-- VMP -------------------------------------------------------------------------------------------------------------
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="JaggedSubarrayWithMarginalOp{T}"]/message_doc[@name="MarginalAverageLogarithm{ArrayType}(ArrayType, ArrayType)"]/*'/>
/// <typeparam name="ArrayType">The type of the outgoing message.</typeparam>
public static ArrayType MarginalAverageLogarithm<ArrayType>(
ArrayType array, ArrayType result)
where ArrayType : SettableTo<ArrayType>

Просмотреть файл

@ -134,7 +134,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return Beta.Uniform();
}
// /// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="LogisticOp"]/message_doc[@name="LogisticAverageConditional(Beta, Gaussian, Gaussian)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="LogisticOp"]/message_doc[@name="LogisticAverageConditional(Beta, Gaussian, Gaussian, Gaussian)"]/*'/>
public static Beta LogisticAverageConditional(Beta logistic, [Proper] Gaussian x, Gaussian falseMsg, Gaussian to_x)
{
if (x.IsPointMass)
@ -294,6 +294,7 @@ namespace Microsoft.ML.Probabilistic.Factors
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="LogisticOp"]/message_doc[@name="FalseMsg(Beta, Gaussian, Gaussian)"]/*'/>
public static Gaussian FalseMsg([SkipIfUniform] Beta logistic, [Proper] Gaussian x, Gaussian falseMsg)
{
if (x.IsUniform()) throw new ArgumentException("x is uniform", nameof(x));
// falseMsg approximates sigma(-x)
// logistic(sigma(x)) N(x;m,v)
// = sigma(x)^(a-1) sigma(-x)^(b-1) N(x;m,v)

Просмотреть файл

@ -9,7 +9,7 @@ namespace Microsoft.ML.Probabilistic.Factors
using Microsoft.ML.Probabilistic.Math;
using Microsoft.ML.Probabilistic.Factors.Attributes;
// /// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="MaxGaussianOp"]/doc/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="MaxGaussianOp"]/doc/*'/>
[FactorMethod(new string[] { "max", "a", "b" }, typeof(Math), "Max", typeof(double), typeof(double))]
[Quality(QualityBand.Stable)]
public static class MaxGaussianOp

Просмотреть файл

@ -11,7 +11,7 @@ namespace Microsoft.ML.Probabilistic.Factors
using Microsoft.ML.Probabilistic.Math;
using Microsoft.ML.Probabilistic.Factors.Attributes;
// /// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="MultinomialOp"]/doc/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="MultinomialOp"]/doc/*'/>
/// <remarks>The factor is f(sample,p,n) = n!/prod_k sample[k]! prod_k p[k]^sample[k]</remarks>
[FactorMethod(new string[] { "sample", "trialCount", "p" }, typeof(Rand), "Multinomial", typeof(int), typeof(Vector))]
[FactorMethod(typeof(Factor), "MultinomialList", typeof(int), typeof(Vector))]

Просмотреть файл

@ -9,11 +9,194 @@ namespace Microsoft.ML.Probabilistic.Factors
using Microsoft.ML.Probabilistic.Factors.Attributes;
using Microsoft.ML.Probabilistic.Math;
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="PlusGammaVmpOp"]/doc/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="PlusGammaOp"]/doc/*'/>
[FactorMethod(typeof(Factor), "Plus", typeof(double), typeof(double), Default = true)]
[Quality(QualityBand.Experimental)]
public static class PlusGammaOp
{
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="PlusGammaOp"]/message_doc[@name="SumAverageConditional(GammaPower, GammaPower)"]/*'/>
public static GammaPower SumAverageConditional([SkipIfUniform] GammaPower a, [SkipIfUniform] GammaPower b, GammaPower result)
{
a.GetMeanAndVariance(out double aMean, out double aVariance);
b.GetMeanAndVariance(out double bMean, out double bVariance);
double mean = aMean + bMean;
double variance = aVariance + bVariance;
return GammaPower.FromMeanAndVariance(mean, variance, result.Power);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="PlusGammaOp"]/message_doc[@name="AAverageConditional(GammaPower, GammaPower)"]/*'/>
public static GammaPower AAverageConditional([SkipIfUniform] GammaPower sum, [SkipIfUniform] GammaPower a, [SkipIfUniform] GammaPower b, GammaPower result)
{
if (sum.IsUniform()) return sum;
sum.GetMeanAndVariance(out double sumMean, out double sumVariance);
b.GetMeanAndVariance(out double bMean, out double bVariance);
double rMean = Math.Max(0, sumMean - bMean);
double rVariance = sumVariance + bVariance;
double aVariance = a.GetVariance();
if (rVariance > aVariance)
{
if (sum.Power == 1)
{
GetGammaMomentDerivs(a, out double mean, out double dmean, out double ddmean, out double variance, out double dvariance, out double ddvariance);
mean += b.GetMean();
variance += b.GetVariance();
GetGammaDerivs(mean, dmean, ddmean, variance, dvariance, ddvariance, out double ds, out double dds, out double dr, out double ddr);
GetDerivLogZ(sum, GammaPower.FromMeanAndVariance(mean, variance, sum.Power), ds, dds, dr, ddr, out double dlogZ, out double ddlogZ);
return GammaPowerFromDerivLogZ(a, dlogZ, ddlogZ);
}
else if (sum.Power == -1)
{
GetInverseGammaMomentDerivs(a, out double mean, out double dmean, out double ddmean, out double variance, out double dvariance, out double ddvariance);
mean += b.GetMean();
variance += b.GetVariance();
if (variance > double.MaxValue) return GammaPower.Uniform(a.Power); //throw new NotSupportedException();
GetInverseGammaDerivs(mean, dmean, ddmean, variance, dvariance, ddvariance, out double ds, out double dds, out double dr, out double ddr);
if (sum.IsPointMass && sum.Point == 0) return GammaPower.PointMass(0, a.Power);
GetDerivLogZ(sum, GammaPower.FromMeanAndVariance(mean, variance, sum.Power), ds, dds, dr, ddr, out double dlogZ, out double ddlogZ);
return GammaPowerFromDerivLogZ(a, dlogZ, ddlogZ);
}
}
return GammaPower.FromMeanAndVariance(rMean, rVariance, result.Power);
}
public static GammaPower GammaPowerFromDerivLogZ(GammaPower a, double dlogZ, double ddlogZ)
{
bool method1 = false;
if (method1)
{
GetPosteriorMeanAndVariance(Gamma.FromShapeAndRate(a.Shape, a.Rate), dlogZ, ddlogZ, out double iaMean, out double iaVariance);
Gamma ia = Gamma.FromMeanAndVariance(iaMean, iaVariance);
return GammaPower.FromShapeAndRate(ia.Shape, ia.Rate, a.Power) / a;
}
else
{
double alpha = -a.Rate * dlogZ;
// dalpha/dr = -dlogZ - r*ddlogZ
// beta = -r * dalpha/dr
double beta = a.Rate * dlogZ + a.Rate * a.Rate * ddlogZ;
Gamma prior = Gamma.FromShapeAndRate(a.Shape, a.Rate);
// ia is the marginal of a^(1/a.Power)
Gamma ia = GaussianOp.GammaFromAlphaBeta(prior, alpha, beta, true) * prior;
return GammaPower.FromShapeAndRate(ia.Shape, ia.Rate, a.Power) / a;
}
}
/// <summary>
/// Gets first and second derivatives of the moments with respect to the rate parameter of the distribution.
/// </summary>
/// <param name="gammaPower"></param>
/// <param name="mean"></param>
/// <param name="dmean"></param>
/// <param name="ddmean"></param>
/// <param name="variance"></param>
/// <param name="dvariance"></param>
/// <param name="ddvariance"></param>
public static void GetInverseGammaMomentDerivs(GammaPower gammaPower, out double mean, out double dmean, out double ddmean, out double variance, out double dvariance, out double ddvariance)
{
if (gammaPower.Power != -1) throw new ArgumentException();
if (gammaPower.Shape <= 2) throw new ArgumentOutOfRangeException($"gammaPower.Shape <= 2");
mean = gammaPower.Rate / (gammaPower.Shape - 1);
dmean = 1 / (gammaPower.Shape - 1);
ddmean = 0;
variance = mean * mean / (gammaPower.Shape - 2);
dvariance = 2 * mean * dmean / (gammaPower.Shape - 2);
ddvariance = 2 * dmean * dmean / (gammaPower.Shape - 2);
}
public static void GetInverseGammaDerivs(double mean, double dmean, double ddmean, double variance, double dvariance, double ddvariance, out double ds, out double dds, out double dr, out double ddr)
{
double shape = 2 + mean * mean / variance;
double v2 = variance * variance;
ds = 2 * mean * dmean / variance - mean * mean / v2 * dvariance;
dds = 2 * mean * ddmean / variance - mean * mean / v2 * ddvariance + 2 * dmean * dmean / variance - 4 * mean * dmean / v2 * dvariance + 2 * mean * mean / (v2 * variance) * dvariance * dvariance;
dr = dmean * (shape - 1) + mean * ds;
ddr = ddmean * (shape - 1) + 2 * dmean * ds + mean * dds;
}
public static void GetGammaMomentDerivs(GammaPower gammaPower, out double mean, out double dmean, out double ddmean, out double variance, out double dvariance, out double ddvariance)
{
if (gammaPower.Power != 1) throw new ArgumentException();
mean = gammaPower.Shape / gammaPower.Rate;
variance = mean / gammaPower.Rate;
dmean = -variance;
ddmean = 2 * variance / gammaPower.Rate;
dvariance = -ddmean;
ddvariance = -3 * dvariance / gammaPower.Rate;
}
public static void GetGammaDerivs(double mean, double dmean, double ddmean, double variance, double dvariance, double ddvariance, out double ds, out double dds, out double dr, out double ddr)
{
double rate = mean / variance;
//double shape = mean * rate;
double v2 = variance * variance;
dr = dmean / variance - mean / v2 * dvariance;
ddr = ddmean / variance - mean / v2 * ddvariance - 2 * dmean / v2 * dvariance + 2 * mean / (v2 * variance) * dvariance * dvariance;
ds = dmean * rate + mean * dr;
dds = ddmean * rate + 2 * dmean * dr + mean * ddr;
}
public static void GetDerivLogZ(GammaPower sum, GammaPower toSum, double ds, double dds, double dr, double ddr, out double dlogZ, out double ddlogZ)
{
if (sum.Power != toSum.Power) throw new ArgumentException($"sum.Power ({sum.Power}) != toSum.Power ({toSum.Power})");
if (toSum.IsPointMass) throw new NotSupportedException();
if(toSum.IsUniform())
{
dlogZ = 0;
ddlogZ = 0;
return;
}
if (sum.IsPointMass)
{
// Z = toSum.GetLogProb(sum.Point)
// log(Z) = (toSum.Shape/toSum.Power - 1)*log(sum.Point) - toSum.Rate*sum.Point^(1/toSum.Power) + toSum.Shape*log(toSum.Rate) - GammaLn(toSum.Shape)
if (sum.Point == 0) throw new NotSupportedException();
double logSumOverPower = Math.Log(sum.Point) / toSum.Power;
double powSum = Math.Exp(logSumOverPower);
double logRate = Math.Log(toSum.Rate);
double digammaShape = MMath.Digamma(toSum.Shape);
double shapeOverRate = toSum.Shape / toSum.Rate;
dlogZ = ds * logSumOverPower - dr * powSum + ds * logRate + shapeOverRate * dr - digammaShape * ds;
ddlogZ = dds * logSumOverPower - ddr * powSum + dds * logRate + 2 * ds * dr / toSum.Rate + shapeOverRate * ddr - MMath.Trigamma(toSum.Shape) * ds - digammaShape * dds;
}
else
{
GammaPower product = sum * toSum;
double cs = (MMath.Digamma(product.Shape) - Math.Log(product.Shape)) - (MMath.Digamma(toSum.Shape) - Math.Log(toSum.Shape));
double cr = toSum.Shape / toSum.Rate - product.Shape / product.Rate;
double css = MMath.Trigamma(product.Shape) - MMath.Trigamma(toSum.Shape);
double csr = 1 / toSum.Rate - 1 / product.Rate;
double crr = product.Shape / (product.Rate * product.Rate) - toSum.Shape / (toSum.Rate * toSum.Rate);
dlogZ = cs * ds + cr * dr;
ddlogZ = cs * dds + cr * ddr + css * ds * ds + 2 * csr * ds * dr + crr * dr * dr;
}
}
public static void GetPosteriorMeanAndVariance(Gamma prior, double dlogZ, double ddlogZ, out double mean, out double variance)
{
// dlogZ is derivative of log(Z) wrt prior rate parameter
prior.GetMeanAndVariance(out double priorMean, out double priorVariance);
mean = priorMean - dlogZ;
variance = priorVariance + ddlogZ;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="PlusGammaOp"]/message_doc[@name="BAverageConditional(GammaPower, GammaPower)"]/*'/>
public static GammaPower BAverageConditional([SkipIfUniform] GammaPower sum, [SkipIfUniform] GammaPower a, [SkipIfUniform] GammaPower b, GammaPower result)
{
return AAverageConditional(sum, b, a, result);
}
public static double LogAverageFactor([SkipIfUniform] GammaPower sum, [SkipIfUniform] GammaPower a, [SkipIfUniform] GammaPower b)
{
GammaPower toSum = SumAverageConditional(a, b, sum);
return toSum.GetLogAverageOf(sum);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="PlusGammaOp"]/message_doc[@name="LogEvidenceRatio(GammaPower, GammaPower, GammaPower)"]/*'/>
public static double LogEvidenceRatio([SkipIfUniform] GammaPower sum, [SkipIfUniform] GammaPower a, [SkipIfUniform] GammaPower b)
{
return 0.0;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="PlusGammaOp"]/message_doc[@name="SumAverageConditional(GammaPower, double)"]/*'/>
public static GammaPower SumAverageConditional([SkipIfUniform] GammaPower a, double b)
{
@ -75,7 +258,7 @@ namespace Microsoft.ML.Probabilistic.Factors
}
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="PlusGammaOp"]/message_doc[@name="SumAverageConditional(GammaPower, double)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="PlusGammaOp"]/message_doc[@name="SumAverageConditional(double, GammaPower)"]/*'/>
public static GammaPower SumAverageConditional(double a, [SkipIfUniform] GammaPower b)
{
return SumAverageConditional(b, a);

Просмотреть файл

@ -15,8 +15,41 @@ namespace Microsoft.ML.Probabilistic.Factors
[Quality(QualityBand.Experimental)]
public static class PowerOp
{
public static Gamma GammaFromMeanAndMeanInverse(double mean, double meanInverse)
{
if (mean < 0) throw new ArgumentOutOfRangeException(nameof(mean), mean, "mean < 0");
if (meanInverse < 0) throw new ArgumentOutOfRangeException(nameof(meanInverse), meanInverse, "meanInverse < 0");
// mean = a/b
// meanInverse = b/(a-1)
// a = mean*meanInverse / (mean*meanInverse - 1)
// b = a/mean
double rate = meanInverse / (mean * meanInverse - 1);
if (rate < 0 || rate > double.MaxValue) return Gamma.PointMass(mean);
double shape = mean * rate;
if (shape > double.MaxValue)
return Gamma.PointMass(mean);
else
return Gamma.FromShapeAndRate(shape, rate);
}
public static Gamma GammaFromGammaPower(GammaPower message)
{
if (message.Power == 1) return Gamma.FromShapeAndRate(message.Shape, message.Rate); // same as below, but faster
if (message.IsUniform()) return Gamma.Uniform();
message.GetMeanAndVariance(out double mean, out double variance);
return Gamma.FromMeanAndVariance(mean, variance);
}
public static Gamma FromMeanPowerAndMeanLog(double meanPower, double meanLog, double power)
{
// We want E[log(x)] = meanLog but this sets E[log(x^power)] = meanLog, so we scale meanLog
var gammaPower = GammaPower.FromMeanAndMeanLog(meanPower, meanLog * power, power);
return Gamma.FromShapeAndRate(gammaPower.Shape, gammaPower.Rate);
}
// Gamma = TruncatedGamma ^ y /////////////////////////////////////////////////////////
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="PowerOp"]/message_doc[@name="PowAverageConditional(TruncatedGamma, double)"]/*'/>
public static Gamma PowAverageConditional([SkipIfUniform] TruncatedGamma x, double y)
{
double mean = x.GetMeanPower(y);
@ -32,17 +65,6 @@ namespace Microsoft.ML.Probabilistic.Factors
}
}
public static Gamma GammaFromMeanAndMeanInverse(double mean, double meanInverse)
{
// mean = a/b
// meanInverse = b/(a-1)
// a = mean*meanInverse / (mean*meanInverse - 1)
// b = a/mean
double rate = meanInverse / (mean * meanInverse - 1);
double shape = mean * rate;
return Gamma.FromShapeAndRate(shape, rate);
}
public static TruncatedGamma XAverageConditional([SkipIfUniform] Gamma pow, TruncatedGamma x, double y)
{
// message computed below should be uniform when pow is uniform, but may not due to roundoff error.
@ -56,7 +78,48 @@ namespace Microsoft.ML.Probabilistic.Factors
var powMarginal = pow * toPow;
// xMarginal2 is the exact distribution of pow^(1/y) where pow has distribution powMarginal
GammaPower xMarginal2 = GammaPower.FromShapeAndRate(powMarginal.Shape, powMarginal.Rate, power);
var xMarginal = new TruncatedGamma(GammaFromGammaPower(xMarginal2));
var xMarginal = new TruncatedGamma(GammaFromGammaPower(xMarginal2), x.LowerBound, x.UpperBound);
var result = xMarginal;
result.SetToRatio(xMarginal, x, GammaProductOp_Laplace.ForceProper);
return result;
}
// GammaPower = TruncatedGamma ^ y /////////////////////////////////////////////////////////
public static GammaPower PowAverageConditional([SkipIfUniform] TruncatedGamma x, double y, GammaPower result)
{
if (result.Power == -1) y = -y;
else if (result.Power != 1) throw new ArgumentException($"result.Power ({result.Power}) is not 1 or -1", nameof(result));
double mean = x.GetMeanPower(y);
if (x.LowerBound > 0)
{
double meanInverse = x.GetMeanPower(-y);
Gamma result1 = GammaFromMeanAndMeanInverse(mean, meanInverse);
return GammaPower.FromShapeAndRate(result1.Shape, result1.Rate, result.Power);
}
else
{
double variance = x.GetMeanPower(2 * y) - mean * mean;
Gamma result1 = Gamma.FromMeanAndVariance(mean, variance);
return GammaPower.FromShapeAndRate(result1.Shape, result1.Rate, result.Power);
}
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="PowerOp"]/message_doc[@name="XAverageConditional(GammaPower, TruncatedGamma, double)"]/*'/>
public static TruncatedGamma XAverageConditional([SkipIfUniform] GammaPower pow, TruncatedGamma x, double y)
{
// message computed below should be uniform when pow is uniform, but may not due to roundoff error.
if (pow.IsUniform()) return TruncatedGamma.Uniform();
// Factor is (x^y)^(pow.Shape/pow.Power - 1) * exp(-pow.Rate*(x^y)^1/pow.Power)
// =propto x^(pow.Shape/(pow.Power/y) - y) * exp(-pow.Rate*x^y/pow.Power)
// newShape/(pow.Power/y) - 1 = pow.Shape/(pow.Power/y) - y
// newShape = pow.Shape + (1-y)*(pow.Power/y)
double power = pow.Power / y;
var toPow = PowAverageConditional(x, y, pow);
var powMarginal = pow * toPow;
// xMarginal2 is the exact distribution of pow^(1/y) where pow has distribution powMarginal
GammaPower xMarginal2 = GammaPower.FromShapeAndRate(powMarginal.Shape, powMarginal.Rate, power);
var xMarginal = new TruncatedGamma(GammaFromGammaPower(xMarginal2), x.LowerBound, x.UpperBound);
var result = xMarginal;
result.SetToRatio(xMarginal, x, GammaProductOp_Laplace.ForceProper);
return result;
@ -64,12 +127,14 @@ namespace Microsoft.ML.Probabilistic.Factors
// Gamma = Gamma ^ y /////////////////////////////////////////////////////////
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="PowerOp"]/message_doc[@name="PowAverageConditional(Gamma, double, Gamma)"]/*'/>
public static Gamma PowAverageConditional([SkipIfUniform] Gamma x, double y, Gamma result)
{
GammaPower message = GammaPower.FromShapeAndRate(x.Shape, x.Rate, y);
return GammaFromGammaPower(message);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="PowerOp"]/message_doc[@name="XAverageConditional(Gamma, Gamma, double, Gamma)"]/*'/>
public static Gamma XAverageConditional([SkipIfUniform] Gamma pow, Gamma x, double y, Gamma result)
{
// message computed below should be uniform when pow is uniform, but may not due to roundoff error.
@ -88,16 +153,9 @@ namespace Microsoft.ML.Probabilistic.Factors
return result;
}
public static Gamma GammaFromGammaPower(GammaPower message)
{
if (message.Power == 1) return Gamma.FromShapeAndRate(message.Shape, message.Rate); // same as below, but faster
if (message.IsUniform()) return Gamma.Uniform();
message.GetMeanAndVariance(out double mean, out double variance);
return Gamma.FromMeanAndVariance(mean, variance);
}
// GammaPower = GammaPower ^ y /////////////////////////////////////////////////////////
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="PowerOp"]/message_doc[@name="LogAverageFactor(GammaPower, GammaPower, double)"]/*'/>
public static double LogAverageFactor(GammaPower pow, GammaPower x, double y)
{
// GetLogAverageOf =
@ -110,12 +168,14 @@ namespace Microsoft.ML.Probabilistic.Factors
return toPow.GetLogAverageOf(pow);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="PowerOp"]/message_doc[@name="PowAverageConditional(GammaPower, double, GammaPower)"]/*'/>
public static GammaPower PowAverageConditional([SkipIfUniform] GammaPower x, double y, GammaPower result)
{
GammaPower message = GammaPower.FromShapeAndRate(x.Shape, x.Rate, y * x.Power);
return GammaPowerFromDifferentPower(message, result.Power);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="PowerOp"]/message_doc[@name="XAverageConditional(GammaPower, GammaPower, double, GammaPower)"]/*'/>
public static GammaPower XAverageConditional([SkipIfUniform] GammaPower pow, GammaPower x, double y, GammaPower result)
{
// message computed below should be uniform when pow is uniform, but may not due to roundoff error.
@ -134,13 +194,6 @@ namespace Microsoft.ML.Probabilistic.Factors
return result;
}
public static Gamma FromMeanPowerAndMeanLog(double meanPower, double meanLog, double power)
{
// We want E[log(x)] = meanLog but this sets E[log(x^power)] = meanLog, so we scale meanLog
var gammaPower = GammaPower.FromMeanAndMeanLog(meanPower, meanLog * power, power);
return Gamma.FromShapeAndRate(gammaPower.Shape, gammaPower.Rate);
}
public static GammaPower GammaPowerFromDifferentPower(GammaPower message, double newPower)
{
if (message.Power == newPower) return message; // same as below, but faster
@ -184,11 +237,13 @@ namespace Microsoft.ML.Probabilistic.Factors
}
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="PowerOp"]/message_doc[@name="PowAverageLogarithm(GammaPower, double, GammaPower)"]/*'/>
public static GammaPower PowAverageLogarithm([SkipIfUniform] GammaPower x, double y, GammaPower result)
{
return PowAverageConditional(x, y, result);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="PowerOp"]/message_doc[@name="XAverageLogarithm(GammaPower, GammaPower, double, GammaPower)"]/*'/>
public static GammaPower XAverageLogarithm([SkipIfUniform] GammaPower pow, GammaPower x, double y, GammaPower result)
{
return XAverageConditional(pow, x, y, result);
@ -196,11 +251,13 @@ namespace Microsoft.ML.Probabilistic.Factors
// GammaPower = Gamma ^ y //////////////////////////////////////////////////////////////
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="PowerOp"]/message_doc[@name="PowAverageConditional(Gamma, double)"]/*'/>
public static GammaPower PowAverageConditional([SkipIfUniform] Gamma x, double y)
{
return GammaPower.FromShapeAndRate(x.Shape, x.Rate, y);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="PowerOp"]/message_doc[@name="XAverageConditional(GammaPower, double)"]/*'/>
public static Gamma XAverageConditional([SkipIfUniform] GammaPower pow, double y)
{
if (y != pow.Power)
@ -208,11 +265,13 @@ namespace Microsoft.ML.Probabilistic.Factors
return Gamma.FromShapeAndRate(pow.Shape + (1 - y), pow.Rate);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="PowerOp"]/message_doc[@name="PowAverageLogarithm(Gamma, double)"]/*'/>
public static GammaPower PowAverageLogarithm([SkipIfUniform] Gamma x, double y)
{
return PowAverageConditional(x, y);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="PowerOp"]/message_doc[@name="XAverageLogarithm(GammaPower, double)"]/*'/>
public static Gamma XAverageLogarithm([SkipIfUniform] GammaPower pow, double y)
{
return XAverageConditional(pow, y);

Просмотреть файл

@ -162,13 +162,14 @@ namespace Microsoft.ML.Probabilistic.Factors
return 0.0;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="DampBackwardOp"]/message_doc[@name="AverageLogFactor()"]/*'/>
[Skip]
public static double AverageLogFactor()
{
return 0.0;
}
// /// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="DampBackwardOp"]/message_doc[@name="ValueAverageConditional{Distribution}(Distribution, double, Distribution)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="DampBackwardOp"]/message_doc[@name="ValueAverageConditional{Distribution}(Distribution, double, Distribution)"]/*'/>
/// <typeparam name="Distribution">The type of the distribution over the damped variable.</typeparam>
public static Distribution ValueAverageConditional<Distribution>(
[SkipIfUniform] Distribution backward, double stepsize, Distribution to_value)
@ -184,6 +185,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return result;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="DampBackwardOp"]/message_doc[@name="ValueAverageLogarithm{Distribution}(Distribution, double, Distribution)"]/*'/>
/// <typeparam name="Distribution">The type of the distribution over the damped variable.</typeparam>
public static Distribution ValueAverageLogarithm<Distribution>(
[SkipIfUniform] Distribution backward, double stepsize, Distribution to_value)
@ -199,6 +201,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return value;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="DampBackwardOp"]/message_doc[@name="BackwardAverageLogarithm{Distribution}(Distribution)"]/*'/>
/// <typeparam name="Distribution">The type of the distribution over the damped variable.</typeparam>
public static Distribution BackwardAverageLogarithm<Distribution>([IsReturned] Distribution value)
{
@ -206,25 +209,26 @@ namespace Microsoft.ML.Probabilistic.Factors
}
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="DampForwardOp"]/doc/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="DampForwardOp{T}"]/doc/*'/>
[FactorMethod(typeof(Damp), "Forward<>")]
[Quality(QualityBand.Preview)]
public static class DampForwardOp<T>
{
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="DampForwardOp"]/message_doc[@name="LogEvidenceRatio()"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="DampForwardOp{T}"]/message_doc[@name="LogEvidenceRatio()"]/*'/>
[Skip]
public static double LogEvidenceRatio()
{
return 0.0;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="DampForwardOp{T}"]/message_doc[@name="AverageLogFactor()"]/*'/>
[Skip]
public static double AverageLogFactor()
{
return 0.0;
}
// /// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="DampForwardOp"]/message_doc[@name="ForwardAverageConditional{Distribution}(Distribution, double, Distribution)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="DampForwardOp{T}"]/message_doc[@name="ForwardAverageConditional{Distribution}(Distribution, double, Distribution)"]/*'/>
/// <typeparam name="Distribution">The type of the distribution over the damped variable.</typeparam>
public static Distribution ForwardAverageConditional<Distribution>(
[SkipIfUniform] Distribution value, double stepsize, Distribution to_forward)
@ -248,6 +252,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return result;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="DampForwardOp{T}"]/message_doc[@name="ForwardAverageLogarithm{Distribution}(Distribution,double,Distribution)"]/*'/>
/// <typeparam name="Distribution">The type of the distribution over the damped variable.</typeparam>
public static Distribution ForwardAverageLogarithm<Distribution>(
[SkipIfUniform] Distribution value, double stepsize, Distribution to_forward)
@ -256,13 +261,14 @@ namespace Microsoft.ML.Probabilistic.Factors
return ForwardAverageConditional(value, stepsize, to_forward);
}
// /// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="DampForwardOp"]/message_doc[@name="ValueAverageConditional{Distribution}(Distribution, Distribution)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="DampForwardOp{T}"]/message_doc[@name="ValueAverageConditional{Distribution}(Distribution)"]/*'/>
/// <typeparam name="Distribution">The type of the distribution over the damped variable.</typeparam>
public static Distribution ValueAverageConditional<Distribution>([IsReturned] Distribution forward)
{
return forward;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="DampForwardOp{T}"]/message_doc[@name="ValueAverageLogarithm{Distribution}(Distribution)"]/*'/>
/// <typeparam name="Distribution">The type of the distribution over the damped variable.</typeparam>
public static Distribution ValueAverageLogarithm<Distribution>([IsReturned] Distribution forward)
{

Просмотреть файл

@ -14,25 +14,25 @@ namespace Microsoft.ML.Probabilistic.Factors
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductOpBase"]/doc/*'/>
public class GaussianProductOpBase
{
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductOp"]/message_doc[@name="ProductAverageConditional(double, Gaussian)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductOpBase"]/message_doc[@name="ProductAverageConditional(double, Gaussian)"]/*'/>
public static Gaussian ProductAverageConditional(double A, [SkipIfUniform] Gaussian B)
{
return GaussianProductVmpOp.ProductAverageLogarithm(A, B);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductOp"]/message_doc[@name="ProductAverageConditional(Gaussian, double)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductOpBase"]/message_doc[@name="ProductAverageConditional(Gaussian, double)"]/*'/>
public static Gaussian ProductAverageConditional([SkipIfUniform] Gaussian A, double B)
{
return ProductAverageConditional(B, A);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductOp"]/message_doc[@name="ProductAverageConditional(double, double)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductOpBase"]/message_doc[@name="ProductAverageConditional(double, double)"]/*'/>
public static Gaussian ProductAverageConditional(double a, double b)
{
return Gaussian.PointMass(a * b);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductOp"]/message_doc[@name="AAverageConditional(Gaussian, double)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductOpBase"]/message_doc[@name="AAverageConditional(Gaussian, double)"]/*'/>
public static Gaussian AAverageConditional([SkipIfUniform] Gaussian Product, double B)
{
if (Product.IsPointMass)
@ -45,7 +45,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return result;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductOp"]/message_doc[@name="AAverageConditional(double, double)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductOpBase"]/message_doc[@name="AAverageConditional(double, double)"]/*'/>
public static Gaussian AAverageConditional(double Product, double B)
{
if (B == 0)
@ -58,13 +58,13 @@ namespace Microsoft.ML.Probabilistic.Factors
return Gaussian.PointMass(Product / B);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductOp"]/message_doc[@name="BAverageConditional(Gaussian, double)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductOpBase"]/message_doc[@name="BAverageConditional(Gaussian, double)"]/*'/>
public static Gaussian BAverageConditional([SkipIfUniform] Gaussian Product, double A)
{
return AAverageConditional(Product, A);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductOp"]/message_doc[@name="BAverageConditional(double, double)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductOpBase"]/message_doc[@name="BAverageConditional(double, double)"]/*'/>
public static Gaussian BAverageConditional(double Product, double A)
{
return AAverageConditional(Product, A);
@ -73,7 +73,7 @@ namespace Microsoft.ML.Probabilistic.Factors
public class GaussianProductOpEvidenceBase : GaussianProductOpBase
{
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductOp"]/message_doc[@name="LogAverageFactor(Gaussian, Gaussian, double, Gaussian)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductOpEvidenceBase"]/message_doc[@name="LogAverageFactor(Gaussian, Gaussian, double, Gaussian)"]/*'/>
[FactorMethod(typeof(Factor), "Product", typeof(double), typeof(double))]
public static double LogAverageFactor(Gaussian product, Gaussian a, double b, [Fresh] Gaussian to_product)
{
@ -81,14 +81,14 @@ namespace Microsoft.ML.Probabilistic.Factors
return to_product.GetLogAverageOf(product);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductEvidenceOp"]/message_doc[@name="LogAverageFactor(Gaussian, double, Gaussian, Gaussian)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductOpEvidenceBase"]/message_doc[@name="LogAverageFactor(Gaussian, double, Gaussian, Gaussian)"]/*'/>
[FactorMethod(typeof(Factor), "Product", typeof(double), typeof(double))]
public static double LogAverageFactor(Gaussian product, double a, Gaussian b, [Fresh] Gaussian to_product)
{
return LogAverageFactor(product, b, a, to_product);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductEvidenceOp"]/message_doc[@name="LogAverageFactor(double, Gaussian, double)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductOpEvidenceBase"]/message_doc[@name="LogAverageFactor(double, Gaussian, double)"]/*'/>
[FactorMethod(typeof(Factor), "Product", typeof(double), typeof(double))]
public static double LogAverageFactor(double product, Gaussian a, double b)
{
@ -96,35 +96,35 @@ namespace Microsoft.ML.Probabilistic.Factors
return to_product.GetLogProb(product);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductEvidenceOp"]/message_doc[@name="LogAverageFactor(double, double, Gaussian)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductOpEvidenceBase"]/message_doc[@name="LogAverageFactor(double, double, Gaussian)"]/*'/>
[FactorMethod(typeof(Factor), "Product", typeof(double), typeof(double))]
public static double LogAverageFactor(double product, double a, Gaussian b)
{
return LogAverageFactor(product, b, a);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductEvidenceOp"]/message_doc[@name="LogAverageFactor(double, double, double)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductOpEvidenceBase"]/message_doc[@name="LogAverageFactor(double, double, double)"]/*'/>
[FactorMethod(typeof(Factor), "Product", typeof(double), typeof(double))]
public static double LogAverageFactor(double product, double a, double b)
{
return (product == Factor.Product(a, b)) ? 0.0 : Double.NegativeInfinity;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductEvidenceOp"]/message_doc[@name="LogEvidenceRatio(double, double, double)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductOpEvidenceBase"]/message_doc[@name="LogEvidenceRatio(double, double, double)"]/*'/>
[FactorMethod(typeof(Factor), "Product", typeof(double), typeof(double))]
public static double LogEvidenceRatio(double product, double a, double b)
{
return LogAverageFactor(product, a, b);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductEvidenceOp"]/message_doc[@name="LogAverageFactor(Gaussian, double, double)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductOpEvidenceBase"]/message_doc[@name="LogAverageFactor(Gaussian, double, double)"]/*'/>
[FactorMethod(typeof(Factor), "Product", typeof(double), typeof(double))]
public static double LogAverageFactor(Gaussian product, double a, double b)
{
return product.GetLogProb(Factor.Product(a, b));
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductEvidenceOp"]/message_doc[@name="LogEvidenceRatio(Gaussian, Gaussian, double)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductOpEvidenceBase"]/message_doc[@name="LogEvidenceRatio(Gaussian, Gaussian, double)"]/*'/>
[FactorMethod(typeof(Factor), "Product", typeof(double), typeof(double))]
[Skip]
public static double LogEvidenceRatio(Gaussian product, Gaussian a, double b)
@ -132,7 +132,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return 0.0;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductEvidenceOp"]/message_doc[@name="LogEvidenceRatio(Gaussian, double, Gaussian)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductOpEvidenceBase"]/message_doc[@name="LogEvidenceRatio(Gaussian, double, Gaussian)"]/*'/>
[FactorMethod(typeof(Factor), "Product", typeof(double), typeof(double))]
[Skip]
public static double LogEvidenceRatio(Gaussian product, double a, Gaussian b)
@ -140,7 +140,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return LogEvidenceRatio(product, b, a);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductEvidenceOp"]/message_doc[@name="LogEvidenceRatio(Gaussian, double, double)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductOpEvidenceBase"]/message_doc[@name="LogEvidenceRatio(Gaussian, double, double)"]/*'/>
[FactorMethod(typeof(Factor), "Product", typeof(double), typeof(double))]
[Skip]
public static double LogEvidenceRatio(Gaussian product, double a, double b)
@ -148,14 +148,14 @@ namespace Microsoft.ML.Probabilistic.Factors
return 0.0;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductEvidenceOp"]/message_doc[@name="LogEvidenceRatio(double, Gaussian, double)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductOpEvidenceBase"]/message_doc[@name="LogEvidenceRatio(double, Gaussian, double)"]/*'/>
[FactorMethod(typeof(Factor), "Product", typeof(double), typeof(double))]
public static double LogEvidenceRatio(double product, Gaussian a, double b)
{
return LogAverageFactor(product, a, b);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductEvidenceOp"]/message_doc[@name="LogEvidenceRatio(double, double, Gaussian)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductOpEvidenceBase"]/message_doc[@name="LogEvidenceRatio(double, double, Gaussian)"]/*'/>
[FactorMethod(typeof(Factor), "Product", typeof(double), typeof(double))]
public static double LogEvidenceRatio(double product, double a, Gaussian b)
{
@ -314,7 +314,7 @@ namespace Microsoft.ML.Probabilistic.Factors
{
if (B.IsPointMass)
return AAverageConditional(Product, B.Point);
if (Product.IsUniform())
if (Product.IsUniform() || B.IsUniform())
return Gaussian.Uniform();
double mA, vA;
A.GetMeanAndVariance(out mA, out vA);
@ -941,7 +941,7 @@ namespace Microsoft.ML.Probabilistic.Factors
[Quality(QualityBand.Experimental)]
public class GaussianProductOp_LaplaceProp : GaussianProductOpEvidenceBase
{
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductOp_Laplace2"]/message_doc[@name="ProductAverageConditional(Gaussian, Gaussian)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductOp_LaplaceProp"]/message_doc[@name="ProductAverageConditional(Gaussian, Gaussian)"]/*'/>
public static Gaussian ProductAverageConditional(Gaussian A, Gaussian B)
{
return GaussianProductVmpOp.ProductAverageLogarithm(A, B);
@ -974,7 +974,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return GaussianOp.GaussianFromAlphaBeta(Product, dlogz, -ddlogz, true);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductOp_Laplace2"]/message_doc[@name="AAverageConditional(Gaussian, Gaussian, Gaussian, Gaussian)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductOp_LaplaceProp"]/message_doc[@name="AAverageConditional(Gaussian, Gaussian, Gaussian, Gaussian)"]/*'/>
public static Gaussian AAverageConditional(Gaussian Product, Gaussian A, Gaussian B, Gaussian to_A)
{
Gaussian Apost = A * to_A;
@ -996,13 +996,13 @@ namespace Microsoft.ML.Probabilistic.Factors
return Gaussian.FromNatural(r * ahat + dlogf, r);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductOp_Laplace2"]/message_doc[@name="BAverageConditional(Gaussian, Gaussian, Gaussian, Gaussian)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductOp_LaplaceProp"]/message_doc[@name="BAverageConditional(Gaussian, Gaussian, Gaussian, Gaussian)"]/*'/>
public static Gaussian BAverageConditional(Gaussian Product, Gaussian A, Gaussian B, Gaussian to_B)
{
return AAverageConditional(Product, B, A, to_B);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductOp_Laplace2"]/message_doc[@name="LogAverageFactor(Gaussian, Gaussian, Gaussian, Gaussian)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductOp_LaplaceProp"]/message_doc[@name="LogAverageFactor(Gaussian, Gaussian, Gaussian, Gaussian)"]/*'/>
[FactorMethod(typeof(Factor), "Product", typeof(double), typeof(double))]
public static double LogAverageFactor([SkipIfUniform] Gaussian Product, [SkipIfUniform] Gaussian A, [SkipIfUniform] Gaussian B, Gaussian to_A)
{
@ -1017,7 +1017,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return Gaussian.GetLogProb(mx, ahat * mb, vx + ahat * ahat * vb) + A.GetLogProb(ahat) - Apost.GetLogProb(ahat);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductOp_Laplace2"]/message_doc[@name="LogEvidenceRatio(Gaussian, Gaussian, Gaussian, Gaussian, Gaussian)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductOp_LaplaceProp"]/message_doc[@name="LogEvidenceRatio(Gaussian, Gaussian, Gaussian, Gaussian, Gaussian)"]/*'/>
[FactorMethod(typeof(Factor), "Product", typeof(double), typeof(double))]
public static double LogEvidenceRatio([SkipIfUniform] Gaussian Product, [SkipIfUniform] Gaussian A, [SkipIfUniform] Gaussian B, Gaussian to_A, Gaussian to_product)
{
@ -1056,7 +1056,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return ahat;
}
// /// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductOp_Laplace"]/message_doc[@name="ProductAverageConditional(Gaussian, Gaussian, Gaussian, Gaussian, Gaussian)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductOp_Laplace"]/message_doc[@name="ProductAverageConditional(Gaussian, Gaussian, Gaussian, double)"]/*'/>
public static Gaussian ProductAverageConditional([NoInit] Gaussian Product, Gaussian A, Gaussian B, [Fresh] double ahat)
{
if (Product.IsUniform())
@ -1134,7 +1134,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return result;
}
// /// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductOp_Laplace"]/message_doc[@name="AAverageConditional(Gaussian, Gaussian, Gaussian, Gaussian)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductOp_Laplace"]/message_doc[@name="AAverageConditional(Gaussian, Gaussian, Gaussian, double)"]/*'/>
public static Gaussian AAverageConditional([SkipIfUniform] Gaussian Product, [NoInit] Gaussian A, Gaussian B, [Fresh] double ahat)
{
if (Product.IsUniform()) return Product;
@ -1143,7 +1143,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return result;
}
// /// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductOp_Laplace"]/message_doc[@name="BAverageConditional(Gaussian, Gaussian, Gaussian, Gaussian)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductOp_Laplace"]/message_doc[@name="BAverageConditional(Gaussian, Gaussian, Gaussian, double)"]/*'/>
public static Gaussian BAverageConditional([SkipIfUniform] Gaussian Product, Gaussian A, [NoInit] Gaussian B, [Fresh] double ahat)
{
if (Product.IsUniform()) return Product;
@ -1245,7 +1245,7 @@ namespace Microsoft.ML.Probabilistic.Factors
public static bool modified = true;
public static bool offDiagonal = false;
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductOp_Laplace"]/message_doc[@name="ProductAverageConditional(Gaussian, Gaussian, Gaussian, Gaussian, Gaussian)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductOp_Laplace2"]/message_doc[@name="ProductAverageConditional(Gaussian, Gaussian, Gaussian, Gaussian, Gaussian)"]/*'/>
public static Gaussian ProductAverageConditional([NoInit] Gaussian Product, Gaussian A, Gaussian B, Gaussian to_A, Gaussian to_B)
{
if (Product.IsUniform())
@ -1317,7 +1317,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return GaussianOp.GaussianFromAlphaBeta(Product, dlogz, -ddlogz, true);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductOp_Laplace"]/message_doc[@name="AAverageConditional(Gaussian, Gaussian, Gaussian, Gaussian)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductOp_Laplace2"]/message_doc[@name="AAverageConditional(Gaussian, Gaussian, Gaussian, Gaussian)"]/*'/>
public static Gaussian AAverageConditional([SkipIfUniform] Gaussian Product, [NoInit] Gaussian A, Gaussian B, Gaussian to_A)
{
if (A.IsPointMass)
@ -1374,13 +1374,13 @@ namespace Microsoft.ML.Probabilistic.Factors
return Gaussian.FromNatural(r * ahat + ga, r);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductOp_Laplace"]/message_doc[@name="BAverageConditional(Gaussian, Gaussian, Gaussian, Gaussian)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductOp_Laplace2"]/message_doc[@name="BAverageConditional(Gaussian, Gaussian, Gaussian, Gaussian)"]/*'/>
public static Gaussian BAverageConditional([SkipIfUniform] Gaussian Product, Gaussian A, [NoInit] Gaussian B, Gaussian to_B)
{
return AAverageConditional(Product, B, A, to_B);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductOp_Laplace"]/message_doc[@name="LogAverageFactor(Gaussian, Gaussian, Gaussian, Gaussian)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductOp_Laplace2"]/message_doc[@name="LogAverageFactor(Gaussian, Gaussian, Gaussian, Gaussian)"]/*'/>
public static double LogAverageFactor([SkipIfUniform] Gaussian Product, [SkipIfUniform] Gaussian A, [SkipIfUniform] Gaussian B, Gaussian to_A)
{
double mx, vx;
@ -1394,7 +1394,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return Gaussian.GetLogProb(mx, ahat * mb, vx + ahat * ahat * vb) + A.GetLogProb(ahat) - Apost.GetLogProb(ahat);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductOp_Laplace"]/message_doc[@name="LogEvidenceRatio(Gaussian, Gaussian, Gaussian)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianProductOp_Laplace2"]/message_doc[@name="LogEvidenceRatio(Gaussian, Gaussian, Gaussian)"]/*'/>
[Skip]
public static double LogEvidenceRatio([SkipIfUniform] Gaussian Product, [SkipIfUniform] Gaussian A, [SkipIfUniform] Gaussian B)
{
@ -2193,7 +2193,7 @@ namespace Microsoft.ML.Probabilistic.Factors
[Quality(QualityBand.Mature)]
public static class GaussianRatioVmpOp
{
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianRatioVmpOp"]/message_doc[@name="AverageLogFactor()"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianRatioVmpOp"]/message_doc[@name="AverageLogFactor(Gaussian)"]/*'/>
/// <remarks><para>
/// Variational Message Passing does not support a Ratio factor with fixed output or random denominator
/// </para></remarks>

Просмотреть файл

@ -56,7 +56,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return Gamma.FromShapeAndRate(Product.Shape, Product.Rate * B);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GammaProductOp"]/message_doc[@name="AAverageConditional(Gamma, double, GammaPower)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GammaProductOp"]/message_doc[@name="AAverageConditional(GammaPower, double, GammaPower)"]/*'/>
public static GammaPower AAverageConditional([SkipIfUniform] GammaPower Product, double B, GammaPower result)
{
if (Product.IsPointMass)
@ -144,7 +144,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return AAverageConditional(Product, A);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GammaProductOp"]/message_doc[@name="BAverageConditional(Gamma, double, GammaPower)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GammaProductOp"]/message_doc[@name="BAverageConditional(GammaPower, double, GammaPower)"]/*'/>
public static GammaPower BAverageConditional([SkipIfUniform] GammaPower Product, double A, GammaPower result)
{
return AAverageConditional(Product, A, result);
@ -773,7 +773,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return AAverageLogarithm(Product, A);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GammaProductVmpOp"]/message_doc[@name="BAverageLogarithm(Gamma, double, GammaPower)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GammaProductVmpOp"]/message_doc[@name="BAverageLogarithm(GammaPower, double, GammaPower)"]/*'/>
public static GammaPower BAverageLogarithm([SkipIfUniform] GammaPower Product, double A, GammaPower result)
{
return AAverageLogarithm(Product, A, result);

Просмотреть файл

@ -41,7 +41,7 @@ namespace Microsoft.ML.Probabilistic.Factors
Vector resultLogProb = PiecewiseVector.Constant(char.MaxValue + 1, double.NegativeInfinity);
StringAutomaton probFunc = str.GetWorkspaceOrPoint();
StringAutomaton.EpsilonClosure startEpsilonClosure = probFunc.Start.GetEpsilonClosure();
StringAutomaton.EpsilonClosure startEpsilonClosure = new Automaton<string, char, DiscreteChar, StringManipulator, StringAutomaton>.EpsilonClosure(probFunc, probFunc.Start);
for (int stateIndex = 0; stateIndex < startEpsilonClosure.Size; ++stateIndex)
{
StringAutomaton.State state = startEpsilonClosure.GetStateByIndex(stateIndex);
@ -51,7 +51,7 @@ namespace Microsoft.ML.Probabilistic.Factors
if (!transition.IsEpsilon)
{
StringAutomaton.State destState = probFunc.States[transition.DestinationStateIndex];
StringAutomaton.EpsilonClosure destStateClosure = destState.GetEpsilonClosure();
StringAutomaton.EpsilonClosure destStateClosure = new Automaton<string, char, DiscreteChar, StringManipulator, StringAutomaton>.EpsilonClosure(probFunc, destState);
if (!destStateClosure.EndWeight.IsZero)
{
Weight weight = Weight.Product(stateLogWeight, transition.Weight, destStateClosure.EndWeight);

Просмотреть файл

@ -414,7 +414,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return 0.0;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="DerivedVariableOp"]/message_doc[@name="LogEvidenceRatio()"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="DerivedVariableOp"]/message_doc[@name="LogEvidenceRatio{T}(T)"]/*'/>
[Skip]
public static double LogEvidenceRatio<T>(T use)
{

Просмотреть файл

@ -10,10 +10,11 @@ namespace Microsoft.ML.Probabilistic.Factors
using Microsoft.ML.Probabilistic.Math;
using Microsoft.ML.Probabilistic.Factors.Attributes;
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="VariablePointOpBase"]/doc/*'/>
[Quality(QualityBand.Preview)]
public class VariablePointOpBase
{
// /// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="VariablePointOp{T}"]/message_doc[@name="LogEvidenceRatio{TDist}(TDist, TDist, TDist)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="VariablePointOpBase"]/message_doc[@name="LogEvidenceRatio{TDist}(TDist, TDist, TDist)"]/*'/>
/// <typeparam name="TDist">The type of the marginal of the variable.</typeparam>
public static double LogEvidenceRatio<TDist>(TDist use, TDist def, TDist to_marginal)
where TDist : CanGetLogAverageOf<TDist>
@ -22,7 +23,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return def.GetLogAverageOf(use) - use.GetLogAverageOf(to_marginal);
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="VariablePointOp{T}"]/message_doc[@name="MarginalAverageConditionalInit{TDist}(TDist)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="VariablePointOpBase"]/message_doc[@name="MarginalAverageConditionalInit{TDist}(TDist)"]/*'/>
/// <typeparam name="TDist">The type of the marginal of the variable.</typeparam>
[Skip]
public static TDist MarginalAverageConditionalInit<TDist>([IgnoreDependency] TDist def)
@ -31,35 +32,35 @@ namespace Microsoft.ML.Probabilistic.Factors
return (TDist)def.Clone();
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="VariablePointOp{T}"]/message_doc[@name="UseAverageConditional{TDist}(TDist)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="VariablePointOpBase"]/message_doc[@name="UseAverageConditional{TDist}(TDist)"]/*'/>
/// <typeparam name="TDist">The type of the marginal of the variable.</typeparam>
public static TDist UseAverageConditional<TDist>([IsReturned] TDist to_marginal)
{
return to_marginal;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="VariablePointOp{T}"]/message_doc[@name="DefAverageConditional{TDist}(TDist)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="VariablePointOpBase"]/message_doc[@name="DefAverageConditional{TDist}(TDist)"]/*'/>
/// <typeparam name="TDist">The type of the marginal of the variable.</typeparam>
public static TDist DefAverageConditional<TDist>([IsReturned] TDist to_marginal)
{
return to_marginal;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="VariablePointOp{T}"]/message_doc[@name="AverageLogFactor()"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="VariablePointOpBase"]/message_doc[@name="AverageLogFactor()"]/*'/>
[Skip]
public static double AverageLogFactor()
{
return 0.0;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="VariablePointOp{T}"]/message_doc[@name="UseAverageLogarithm{TDist}(TDist)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="VariablePointOpBase"]/message_doc[@name="UseAverageLogarithm{TDist}(TDist)"]/*'/>
/// <typeparam name="TDist">The type of the marginal of the variable.</typeparam>
public static TDist UseAverageLogarithm<TDist>([IsReturned] TDist to_marginal)
{
return to_marginal;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="VariablePointOp{T}"]/message_doc[@name="DefAverageLogarithm{TDist}(TDist)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="VariablePointOpBase"]/message_doc[@name="DefAverageLogarithm{TDist}(TDist)"]/*'/>
/// <typeparam name="TDist">The type of the marginal of the variable.</typeparam>
public static TDist DefAverageLogarithm<TDist>([IsReturned] TDist to_marginal)
{
@ -94,11 +95,12 @@ namespace Microsoft.ML.Probabilistic.Factors
}
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="VariablePointOp_Mean{T}"]/doc/*'/>
[FactorMethod(typeof(Factor), "VariablePoint<>", Default = false)]
[Quality(QualityBand.Preview)]
public class VariablePointOp_Mean<T> : VariablePointOpBase
{
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="VariablePointOp{T}"]/message_doc[@name="MarginalAverageConditional{TDist}(TDist, TDist, TDist)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="VariablePointOp_Mean{T}"]/message_doc[@name="MarginalAverageConditional{TDist}(TDist, TDist, TDist)"]/*'/>
/// <typeparam name="TDist">The type of the marginal of the variable.</typeparam>
[SkipIfAllUniform]
public static TDist MarginalAverageConditional<TDist>([NoInit] TDist use, TDist def, TDist result)
@ -109,7 +111,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return result;
}
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="VariablePointOp{T}"]/message_doc[@name="MarginalAverageLogarithm{TDist}(TDist, TDist, TDist)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="VariablePointOp_Mean{T}"]/message_doc[@name="MarginalAverageLogarithm{TDist}(TDist, TDist, TDist)"]/*'/>
/// <typeparam name="TDist">The type of the marginal of the variable.</typeparam>
[SkipIfAllUniform]
public static TDist MarginalAverageLogarithm<TDist>([NoInit] TDist use, TDist def, TDist result)
@ -152,7 +154,6 @@ namespace Microsoft.ML.Probabilistic.Factors
if (double.IsNaN(currPoint)) throw new ArgumentException("currPoint is NaN");
if (double.IsInfinity(currPoint)) throw new ArgumentException("currPoint is infinite");
if (double.IsNaN(currDeriv)) throw new ArgumentException("currDeriv is NaN");
if (double.IsInfinity(currDeriv)) throw new ArgumentException("currDeriv is infinite");
if (hasPrevious)
{
double prevStep = currPoint - prevPoint;
@ -270,7 +271,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return buffer;
}
// /// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="VariablePointOp_Secant"]/message_doc[@name="MarginalAverageConditional{TDist}(TDist, TDist, TDist)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="VariablePointOp_Rprop"]/message_doc[@name="MarginalAverageConditional(Gaussian, Gaussian, RpropBufferData, Gaussian)"]/*'/>
public static Gaussian MarginalAverageConditional([IgnoreDependency] Gaussian use, [IgnoreDependency] Gaussian def, [RequiredArgument] RpropBufferData buffer, Gaussian result)
{
result.Point = buffer.nextPoint;
@ -320,7 +321,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return bufferTG;
}
// /// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="VariablePointOp_Secant"]/message_doc[@name="MarginalAverageConditional{TDist}(TDist, TDist, TDist)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="VariablePointOp_RpropTruncatedGaussian"]/message_doc[@name="MarginalAverageConditional(TruncatedGaussian, TruncatedGaussian, RpropBufferData, TruncatedGaussian)"]/*'/>
public static TruncatedGaussian MarginalAverageConditional([IgnoreDependency] TruncatedGaussian use, [IgnoreDependency] TruncatedGaussian def, [RequiredArgument] RpropBufferData bufferTG, TruncatedGaussian result)
{
result.Point = bufferTG.nextPoint;
@ -389,7 +390,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return bufferTGa;
}
// /// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="VariablePointOp_Secant"]/message_doc[@name="MarginalAverageConditional{TDist}(TDist, TDist, TDist)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="VariablePointOp_RpropTruncatedGamma"]/message_doc[@name="MarginalAverageConditional(TruncatedGamma, TruncatedGamma, RpropBufferData, TruncatedGamma)"]/*'/>
public static TruncatedGamma MarginalAverageConditional([IgnoreDependency] TruncatedGamma use, [IgnoreDependency] TruncatedGamma def, [RequiredArgument] RpropBufferData bufferTGa, TruncatedGamma result)
{
if (VariablePointOp_RpropGamma.UseMean)
@ -460,7 +461,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return buffer0;
}
// /// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="VariablePointOp_Secant"]/message_doc[@name="MarginalAverageConditional{TDist}(TDist, TDist, TDist)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="VariablePointOp_RpropGamma"]/message_doc[@name="MarginalAverageConditional(Gamma, Gamma, RpropBufferData, Gamma)"]/*'/>
public static Gamma MarginalAverageConditional([IgnoreDependency] Gamma use, [IgnoreDependency] Gamma def, [RequiredArgument] RpropBufferData buffer0, Gamma result)
{
if (UseMean)
@ -513,7 +514,7 @@ namespace Microsoft.ML.Probabilistic.Factors
return bufferBeta;
}
// /// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="VariablePointOp_Secant"]/message_doc[@name="MarginalAverageConditional{TDist}(TDist, TDist, TDist)"]/*'/>
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="VariablePointOp_RpropBeta"]/message_doc[@name="MarginalAverageConditional(Beta, Beta, RpropBufferData, Beta)"]/*'/>
public static Beta MarginalAverageConditional([IgnoreDependency] Beta use, [IgnoreDependency] Beta def, [RequiredArgument] RpropBufferData bufferBeta, Beta result)
{
result.Point = bufferBeta.nextPoint;

Просмотреть файл

@ -0,0 +1,331 @@
# Licensed to the .NET Foundation under one or more agreements.
# The .NET Foundation licenses this file to you under the MIT license.
# See the LICENSE file in the project root for more information.
from __future__ import division
from sympy import zeta, evalf, bernoulli, symbols, Poly, series, factorial, factorial2, S, log, exp, gamma, digamma, sqrt
# configuration
decimal_precision = 30
evalf_inner_precision = 500
gamma_at_2_series_length = 26
gamma_at_2_variable_name = "dx"
gamma_at_2_indent = " "
digamma_at_1_series_length = 2
digamma_at_1_variable_name = "x"
digamma_at_1_indent = " "
digamma_at_2_series_length = 26
digamma_at_2_variable_name = "dx"
digamma_at_2_indent = " "
digamma_asymptotic_series_length = 8
digamma_asymptotic_variable_name = "invX2"
digamma_asymptotic_indent = " "
trigamma_at_1_series_length = 2
trigamma_at_1_variable_name = "x"
trigamma_at_1_indent = " "
trigamma_asymptotic_series_length = 9
trigamma_asymptotic_variable_name = "invX2"
trigamma_asymptotic_indent = " "
tetragamma_at_1_series_length = 2
tetragamma_at_1_variable_name = "x"
tetragamma_at_1_indent = " "
tetragamma_asymptotic_series_length = 9
tetragamma_asymptotic_variable_name = "invX2"
tetragamma_asymptotic_indent = " "
gammaln_asymptotic_series_length = 7
gammaln_asymptotic_variable_name = "invX2"
gammaln_asymptotic_indent = " "
log_1_plus_series_length = 13
log_1_plus_variable_name = "x"
log_1_plus_indent = " "
log_1_minus_series_length = 11
log_1_minus_variable_name = "expx"
log_1_minus_indent = " "
x_minus_log_1_plus_series_length = 7
x_minus_log_1_plus_variable_name = "x"
x_minus_log_1_plus_indent = " "
exp_minus_1_series_length = 9
exp_minus_1_variable_name = "x"
exp_minus_1_indent = " "
exp_minus_1_ratio_minus_1_ratio_minus_half_series_length = 13
exp_minus_1_ratio_minus_1_ratio_minus_half_variable_name = "x"
exp_minus_1_ratio_minus_1_ratio_minus_half_indent = " "
log_exp_minus_1_ratio_series_length = 5
log_exp_minus_1_ratio_variable_name = "x"
log_exp_minus_1_ratio_indent = " "
normcdfln_asymptotic_series_length = 8
normcdfln_asymptotic_variable_name = "z"
normcdfln_asymptotic_indent = " "
one_minus_sqrt_one_minus_series_length = 5
one_minus_sqrt_one_minus_variable_name = "x"
one_minus_sqrt_one_minus_indent = " "
reciprocal_factorial_minus_1_series_length = 17
reciprocal_factorial_minus_1_variable_name = "x"
reciprocal_factorial_minus_1_indent = " "
def print_heading_comment(indent, header):
print(f"{indent}// Truncated series {header}")
print(f"{indent}// Generated automatically by /src/Tools/GenerateSeries/GenerateSeries.py")
def format_real_coefficient(coefficient):
return str(coefficient)
def print_polynomial_with_real_coefficients(varname, coefficients, indent):
if len(coefficients) <= 1:
print(f"{indent}{format_real_coefficient(coefficients[0])}")
return
if coefficients[0] != 0.0:
print(f"{indent}{format_real_coefficient(coefficients[0])} +")
last_non_zero_idx = len(coefficients) - 1
while coefficients[last_non_zero_idx] == 0.0:
last_non_zero_idx = last_non_zero_idx - 1
idx = 1
parentheses = 0
print(indent, end='')
while idx < last_non_zero_idx:
print(f"{varname} * ", end='')
if coefficients[idx] != 0.0:
print(f"({format_real_coefficient(coefficients[idx])} +")
print(indent, end='')
parentheses = parentheses + 1
idx = idx + 1
print(f"{varname} * {format_real_coefficient(coefficients[last_non_zero_idx])}", end='')
for i in range(0, parentheses):
print(")", end='')
print()
def format_rational_coefficient(coefficient):
return str(coefficient).replace("/", ".0 / ") + ".0"
def print_polynomial_with_rational_coefficients(varname, coefficients, indent):
if len(coefficients) <= 1:
print(f"{indent}{format_rational_coefficient(coefficients[0])}")
return
if coefficients[0] != 0:
print(f"{indent}{format_rational_coefficient(coefficients[0])} +")
last_non_zero_idx = len(coefficients) - 1
while coefficients[last_non_zero_idx] == 0:
last_non_zero_idx = last_non_zero_idx - 1
idx = 1
parentheses = 0
print(indent, end='')
while idx <= last_non_zero_idx:
print(f"{varname} * ", end='')
if coefficients[idx] != 0:
if idx < last_non_zero_idx:
suffix = ' +'
else:
suffix = ''
print(f"({format_rational_coefficient(coefficients[idx])}{suffix}")
print(indent, end='')
parentheses = parentheses + 1
idx = idx + 1
for i in range(0, parentheses):
print(")", end='')
print()
def gamma_at_2_coefficient(k):
if k == 0:
return 0.0
return ((-1)**(k + 1)*(zeta(k + 1) - 1)/(k + 1)).evalf(decimal_precision, maxn=evalf_inner_precision)
def digamma_at_1_coefficient(k):
if k == 0:
return digamma(1).evalf(decimal_precision, maxn=evalf_inner_precision)
return ((-1)**(k + 1) * zeta(k + 1)).evalf(decimal_precision, maxn=evalf_inner_precision)
def digamma_at_2_coefficient(k):
if k == 0:
return 0.0
return ((-1)**(k + 1)*(zeta(k + 1) - 1)).evalf(decimal_precision, maxn=evalf_inner_precision)
def digamma_asymptotic_coefficient(k):
if k == 0:
return 0.0
return bernoulli(2 * k) / (2 * k)
def trigamma_at_1_coefficient(k):
return ((-1)**k * (k + 1) * zeta(k + 2)).evalf(decimal_precision, maxn=evalf_inner_precision)
def trigamma_asymptotic_coefficient(k):
if k == 0:
return 0.0
return bernoulli(2 * k)
def tetragamma_at_1_coefficient(k):
return ((-1)**(k + 1) * (k + 1) * (k + 2) * zeta(k + 3)).evalf(decimal_precision, maxn=evalf_inner_precision)
def tetragamma_asymptotic_coefficient(k):
if k == 0:
return 0.0
return -(2 * k - 1) * bernoulli(2 * k - 2)
def gammaln_asymptotic_coefficient(k):
return bernoulli(2 * k + 2) / (2 * (k + 1) * (2 * k + 1))
def log_1_plus_coefficient(k):
if k == S(0):
return 0
if k % 2 == 0:
return S(-1) / k
return S(1) / k
def log_1_minus_coefficient(k):
if k == 0:
return S(0)
return S(-1) / k
def x_minus_log_1_plus_coefficient(k):
if k <= 1:
return S(0)
if k % 2 == 0:
return S(1) / k
return S(-1) / k
def exp_minus_1_coefficient(k):
if k == 0:
return S(0)
return S(1) / factorial(k)
def exp_minus_1_ratio_minus_1_ratio_minus_half_coefficient(k):
if k == 0:
return S(0)
return S(1) / factorial(k + 2)
def get_log_exp_minus_1_ratio_coefficients(count):
x = symbols('x')
return list(reversed(Poly(log((exp(x) - 1) / x).series(x, 0, count).removeO()).all_coeffs()))
# Formula for mth coefficient of the normcdfln asymptotic:
# \sum_{n=1}^m (-1)^{n+m+1} / n * \sum_{l1, l2, ..., ln \in N, l1 + l2 + ... + ln = m} (2 * l1 - 1)!! * (2 * l2 - 1)!! * ... * (2 * ln - 1)!!
# Can be obtained by composing the Taylor expansion for log(1 + x) and asymtotic expansion for erfc
def normcdfln_asymptotic_coefficient(m):
if m == 0:
return 0
def next(v):
idx = 1
while idx < len(v) and v[idx] == 0:
idx = idx + 1
if idx == len(v): return False
v0 = v[0]
v[0] = 0
v[idx] = v[idx] - 1
v[idx - 1] = v0 + 1
return True
result = S((-1)**(2 + m)) * factorial2(2 * m - 1)
for n in range(2,m+1):
coef = S((-1)**(n + 1 + m)) / n
deltas = []
for k in range(0, n):
deltas.append(0)
deltas[-1] = m - n
accSum = S(0)
while True:
accProd = S(1)
for delta in deltas:
accProd = accProd * factorial2(2 * (delta + 1) - 1)
accSum = accSum + accProd
if not next(deltas):
break
result = result + coef * accSum
return result
def get_one_minus_sqrt_one_minus_coefficients(count):
x = symbols('x')
return list(reversed(Poly((1 - sqrt(1 - x)).series(x, 0, count).removeO()).all_coeffs()))
def get_reciprocal_factorial_minus_1_coefficients(count):
x = symbols('x')
return list(reversed(Poly((1 / gamma(x + 1) - 1).series(x, 0, count).removeO().evalf(decimal_precision, maxn=evalf_inner_precision), x).all_coeffs()))
def main():
print_heading_comment(gamma_at_2_indent, "1: Gamma at 2")
gamma_at_2_coefficients = [gamma_at_2_coefficient(k) for k in range(0, gamma_at_2_series_length)]
print_polynomial_with_real_coefficients(gamma_at_2_variable_name, gamma_at_2_coefficients, gamma_at_2_indent)
print_heading_comment(digamma_at_1_indent, "2: Digamma at 1")
digamma_at_1_coefficients = [digamma_at_1_coefficient(k) for k in range(0, digamma_at_1_series_length)]
print_polynomial_with_real_coefficients(digamma_at_1_variable_name, digamma_at_1_coefficients, digamma_at_1_indent)
print_heading_comment(digamma_at_2_indent, "3: Digamma at 2")
digamma_at_2_coefficients = [digamma_at_2_coefficient(k) for k in range(0, digamma_at_2_series_length)]
print_polynomial_with_real_coefficients(digamma_at_2_variable_name, digamma_at_2_coefficients, digamma_at_2_indent)
print_heading_comment(digamma_asymptotic_indent, "4: Digamma asymptotic")
digamma_asymptotic_coefficients = [digamma_asymptotic_coefficient(k) for k in range(0, digamma_asymptotic_series_length)]
print_polynomial_with_rational_coefficients(digamma_asymptotic_variable_name, digamma_asymptotic_coefficients, digamma_asymptotic_indent)
print_heading_comment(trigamma_at_1_indent, "5: Trigamma at 1")
trigamma_at_1_coefficients = [trigamma_at_1_coefficient(k) for k in range(0, trigamma_at_1_series_length)]
print_polynomial_with_real_coefficients(trigamma_at_1_variable_name, trigamma_at_1_coefficients, trigamma_at_1_indent)
print_heading_comment(trigamma_asymptotic_indent, "6: Trigamma asymptotic")
trigamma_asymptotic_coefficients = [trigamma_asymptotic_coefficient(k) for k in range(0, trigamma_asymptotic_series_length)]
print_polynomial_with_rational_coefficients(trigamma_asymptotic_variable_name, trigamma_asymptotic_coefficients, trigamma_asymptotic_indent)
print_heading_comment(tetragamma_at_1_indent, "7: Tetragamma at 1")
tetragamma_at_1_coefficients = [tetragamma_at_1_coefficient(k) for k in range(0, tetragamma_at_1_series_length)]
print_polynomial_with_real_coefficients(tetragamma_at_1_variable_name, tetragamma_at_1_coefficients, tetragamma_at_1_indent)
print_heading_comment(tetragamma_asymptotic_indent, "8: Tetragamma asymptotic")
tetragamma_asymptotic_coefficients = [tetragamma_asymptotic_coefficient(k) for k in range(0, tetragamma_asymptotic_series_length)]
print_polynomial_with_rational_coefficients(tetragamma_asymptotic_variable_name, tetragamma_asymptotic_coefficients, tetragamma_asymptotic_indent)
print_heading_comment(gammaln_asymptotic_indent, "9: GammaLn asymptotic")
gammaln_asymptotic_coefficients = [gammaln_asymptotic_coefficient(k) for k in range(0, gammaln_asymptotic_series_length)]
print_polynomial_with_rational_coefficients(gammaln_asymptotic_variable_name, gammaln_asymptotic_coefficients, gammaln_asymptotic_indent)
print_heading_comment(log_1_plus_indent, "10: log(1 + x)")
log_1_plus_coefficients = [log_1_plus_coefficient(k) for k in range(0, log_1_plus_series_length)]
print_polynomial_with_rational_coefficients(log_1_plus_variable_name, log_1_plus_coefficients, log_1_plus_indent)
print_heading_comment(log_1_minus_indent, "11: log(1 - x)")
log_1_minus_coefficients = [log_1_minus_coefficient(k) for k in range(0, log_1_minus_series_length)]
print_polynomial_with_rational_coefficients(log_1_minus_variable_name, log_1_minus_coefficients, log_1_minus_indent)
print_heading_comment(x_minus_log_1_plus_indent, "12: x - log(1 + x)")
x_minus_log_1_plus_coefficients = [x_minus_log_1_plus_coefficient(k) for k in range(0, x_minus_log_1_plus_series_length)]
print_polynomial_with_rational_coefficients(x_minus_log_1_plus_variable_name, x_minus_log_1_plus_coefficients, x_minus_log_1_plus_indent)
print_heading_comment(exp_minus_1_indent, "13: exp(x) - 1")
exp_minus_1_coefficients = [exp_minus_1_coefficient(k) for k in range(0, exp_minus_1_series_length)]
print_polynomial_with_rational_coefficients(exp_minus_1_variable_name, exp_minus_1_coefficients, exp_minus_1_indent)
print_heading_comment(exp_minus_1_ratio_minus_1_ratio_minus_half_indent, "14: ((exp(x) - 1) / x - 1) / x - 0.5")
exp_minus_1_ratio_minus_1_ratio_minus_half_coefficients = [exp_minus_1_ratio_minus_1_ratio_minus_half_coefficient(k) for k in range(0, exp_minus_1_ratio_minus_1_ratio_minus_half_series_length)]
print_polynomial_with_rational_coefficients(exp_minus_1_ratio_minus_1_ratio_minus_half_variable_name, exp_minus_1_ratio_minus_1_ratio_minus_half_coefficients, exp_minus_1_ratio_minus_1_ratio_minus_half_indent)
print_heading_comment(log_exp_minus_1_ratio_indent, "15: log(exp(x) - 1) / x")
log_exp_minus_1_ratio_coefficients = get_log_exp_minus_1_ratio_coefficients(log_exp_minus_1_ratio_series_length)
print_polynomial_with_rational_coefficients(log_exp_minus_1_ratio_variable_name, log_exp_minus_1_ratio_coefficients, log_exp_minus_1_ratio_indent)
print_heading_comment(normcdfln_asymptotic_indent, "16: normcdfln asymptotic")
normcdfln_asymptotic_coefficients = [normcdfln_asymptotic_coefficient(k) for k in range(0, normcdfln_asymptotic_series_length)]
print_polynomial_with_rational_coefficients(normcdfln_asymptotic_variable_name, normcdfln_asymptotic_coefficients, normcdfln_asymptotic_indent)
print_heading_comment(one_minus_sqrt_one_minus_indent, "17: 1 - sqrt(1 - x)")
one_minus_sqrt_one_minus_coefficients = get_one_minus_sqrt_one_minus_coefficients(one_minus_sqrt_one_minus_series_length)
print_polynomial_with_rational_coefficients(one_minus_sqrt_one_minus_variable_name, one_minus_sqrt_one_minus_coefficients, one_minus_sqrt_one_minus_indent)
print_heading_comment(reciprocal_factorial_minus_1_indent, "18: Reciprocal factorial minus 1")
reciprocal_factorial_minus_1_coefficients = get_reciprocal_factorial_minus_1_coefficients(reciprocal_factorial_minus_1_series_length)
print_polynomial_with_real_coefficients(reciprocal_factorial_minus_1_variable_name, reciprocal_factorial_minus_1_coefficients, reciprocal_factorial_minus_1_indent)
if __name__ == '__main__': main()

Просмотреть файл

@ -0,0 +1,36 @@
<Project DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003" ToolsVersion="4.0">
<PropertyGroup>
<Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
<SchemaVersion>2.0</SchemaVersion>
<ProjectGuid>d7562cc3-d48a-481d-b40c-07ee51ebbf50</ProjectGuid>
<ProjectHome>
</ProjectHome>
<StartupFile>GenerateSeries.py</StartupFile>
<SearchPath>
</SearchPath>
<WorkingDirectory>.</WorkingDirectory>
<OutputPath>.</OutputPath>
<Name>Tools.GenerateSeries</Name>
<RootNamespace>GenerateSeries</RootNamespace>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)' == 'Debug' ">
<DebugSymbols>true</DebugSymbols>
<EnableUnmanagedDebugging>false</EnableUnmanagedDebugging>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)' == 'Release' ">
<DebugSymbols>true</DebugSymbols>
<EnableUnmanagedDebugging>false</EnableUnmanagedDebugging>
</PropertyGroup>
<ItemGroup>
<Compile Include="GenerateSeries.py" />
</ItemGroup>
<Import Project="$(MSBuildExtensionsPath32)\Microsoft\VisualStudio\v$(VisualStudioVersion)\Python Tools\Microsoft.PythonTools.targets" />
<!-- Uncomment the CoreCompile target to enable the Build command in
Visual Studio and specify your pre- and post-build commands in
the BeforeBuild and AfterBuild targets below. -->
<!--<Target Name="CoreCompile" />-->
<Target Name="BeforeBuild">
</Target>
<Target Name="AfterBuild">
</Target>
</Project>

Просмотреть файл

@ -67,19 +67,16 @@ namespace Microsoft.ML.Probabilistic.Tools.PrepareSource
while ((line = reader.ReadLine()) != null)
{
++lineNumber;
//// For simplicity this code assumes that the line with <include> has no other content on it.
//// This is currently the case for our codebase.
int indexOfDocStart = line.IndexOf("/// <include", StringComparison.InvariantCulture);
if (indexOfDocStart == -1)
string trimmedLine = line.Trim();
if (!trimmedLine.StartsWith("/// <include", StringComparison.InvariantCulture))
{
// Not a line with an include directive
writer.WriteLine(line);
continue;
}
string includeString = line.Substring(indexOfDocStart + "/// ".Length);
string includeString = trimmedLine.Substring("/// ".Length);
var includeDoc = XDocument.Parse(includeString);
XAttribute fileAttribute = includeDoc.Root.Attribute("file");
@ -104,6 +101,7 @@ namespace Microsoft.ML.Probabilistic.Tools.PrepareSource
}
else
{
int indexOfDocStart = line.IndexOf("/// <include", StringComparison.InvariantCulture);
foreach (XElement docElement in docElements)
{
string[] docElementStringLines = docElement.ToString().Split(new[] { Environment.NewLine }, StringSplitOptions.None);

Просмотреть файл

@ -0,0 +1,8 @@
{
"profiles": {
"Tools.PrepareSource": {
"commandName": "Project",
"commandLineArgs": "..\\..\\..\\..\\..\\..\\InferNet_Copy_Temp\\src\\Runtime\\Factors"
}
}
}

Просмотреть файл

@ -46,6 +46,12 @@
<DefineConstants>$(DefineConstants);DEBUG</DefineConstants>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|AnyCPU' OR '$(Configuration)|$(Platform)'=='ReleaseFull|AnyCPU' OR '$(Configuration)|$(Platform)'=='ReleaseCore|AnyCPU'">
<DebugSymbols>true</DebugSymbols>
<DebugType>pdbonly</DebugType>
<Optimize>true</Optimize>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\Compiler\Compiler.csproj" />
<ProjectReference Include="..\Runtime\Runtime.csproj" />

Просмотреть файл

@ -41,6 +41,7 @@
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|AnyCPU' OR '$(Configuration)|$(Platform)'=='ReleaseFull|AnyCPU' OR '$(Configuration)|$(Platform)'=='ReleaseCore|AnyCPU'">
<DebugType>pdbonly</DebugType>
<DebugSymbols>true</DebugSymbols>
<Optimize>true</Optimize>
</PropertyGroup>

Просмотреть файл

@ -44,6 +44,7 @@
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|AnyCPU' OR '$(Configuration)|$(Platform)'=='ReleaseFull|AnyCPU' OR '$(Configuration)|$(Platform)'=='ReleaseCore|AnyCPU'">
<DebugSymbols>true</DebugSymbols>
<DebugType>pdbonly</DebugType>
<Optimize>true</Optimize>
</PropertyGroup>

Просмотреть файл

@ -41,6 +41,12 @@
<DefineConstants>$(DefineConstants);DEBUG</DefineConstants>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|AnyCPU' OR '$(Configuration)|$(Platform)'=='ReleaseFull|AnyCPU' OR '$(Configuration)|$(Platform)'=='ReleaseCore|AnyCPU'">
<DebugType>pdbonly</DebugType>
<DebugSymbols>true</DebugSymbols>
<Optimize>true</Optimize>
</PropertyGroup>
<ItemGroup>
<Compile Include="..\..\src\Shared\SharedAssemblyFileVersion.cs" />
<Compile Include="..\..\src\Shared\SharedAssemblyInfo.cs" />

Просмотреть файл

@ -33,6 +33,23 @@ namespace Microsoft.ML.Probabilistic.Tests
public double[,] pairs;
};
[Fact]
public void ToStringExactTest()
{
Assert.Equal("0", MMath.ToStringExact(0));
Assert.Equal("NaN", MMath.ToStringExact(double.NaN));
Assert.Equal(double.MaxValue, double.Parse(MMath.ToStringExact(double.MaxValue)));
Assert.Equal(double.MinValue, double.Parse(MMath.ToStringExact(double.MinValue)));
Assert.Equal("10.5", MMath.ToStringExact(10.5));
Assert.Equal(10.05, double.Parse(MMath.ToStringExact(10.05)));
Assert.Equal("0.100000000000000002505909183520875968569614680770370524992534231990046604318405148467630281218195010089496230627027825414891031146499880413081224609160619018271942662793458427551041478278701507022263926060379361392435977509403014386614147912551359088259101734169222292122040491862182202915561954185941852588326204092831631787205015401996986616948980410676557942431921652541808732242554300585073938340203330993157646467433638479065531661724812599598594906293782493759617177861888792970476530542335134710418229637566637950767497147854236589795152044892049176025289756709261767081824924720105632337755616538050643653812583050224659631159300563236507929025398878153811554013986009587978081167432804936359631140419153283449560376539011485874652862548828125e-299", MMath.ToStringExact(1e-300));
Assert.Equal("0.988131291682493088353137585736442744730119605228649528851171365001351014540417503730599672723271984759593129390891435461853313420711879592797549592021563756252601426380622809055691634335697964207377437272113997461446100012774818307129968774624946794546339230280063430770796148252477131182342053317113373536374079120621249863890543182984910658610913088802254960259419999083863978818160833126649049514295738029453560318710477223100269607052986944038758053621421498340666445368950667144166486387218476578691673612021202301233961950615668455463665849580996504946155275185449574931216955640746893939906729403594535543517025132110239826300978220290207572547633450191167477946719798732961988232841140527418055848553508913045817507736501283943653106689453125e-322", MMath.ToStringExact(1e-322));
Assert.Equal("0.4940656458412465441765687928682213723650598026143247644255856825006755072702087518652998363616359923797965646954457177309266567103559397963987747960107818781263007131903114045278458171678489821036887186360569987307230500063874091535649843873124733972731696151400317153853980741262385655911710266585566867681870395603106249319452715914924553293054565444011274801297099995419319894090804165633245247571478690147267801593552386115501348035264934720193790268107107491703332226844753335720832431936092382893458368060106011506169809753078342277318329247904982524730776375927247874656084778203734469699533647017972677717585125660551199131504891101451037862738167250955837389733598993664809941164205702637090279242767544565229087538682506419718265533447265625e-323", MMath.ToStringExact(double.Epsilon));
Assert.Equal(1e-300, double.Parse(MMath.ToStringExact(1e-300)));
Assert.Equal(1e-322, double.Parse(MMath.ToStringExact(1e-322)));
Assert.Equal(double.Epsilon, double.Parse(MMath.ToStringExact(double.Epsilon)));
}
[Fact]
public void GammaSpecialFunctionsTest()
{
@ -48,8 +65,22 @@ namespace Microsoft.ML.Probabilistic.Tests
};
CheckFunctionValues("BesselI", MMath.BesselI, BesselI_pairs);
/* In python mpmath:
from mpmath import *
mp.dps = 500
mp.pretty = True
gamma(mpf('8.5'))
*/
double[,] Gamma_pairs = new double[,]
{
{1e-18, 999999999999999999.422784335 },
{1e-17, 99999999999999999.422784335 },
{1e-16, 9999999999999999.422784335 },
{1e-15, 999999999999999.422784335 },
{1e-14, 99999999999999.422784335 },
{1e-13, 9999999999999.422784335 },
{1e-12, 999999999999.422784335 },
{1e-11, 99999999999.4227843351 },
{System.Math.Pow(0.5,20), 1048575.42278527833494202474 },
{System.Math.Pow(0.5,15), 32767.42281451784694671242432333 },
{System.Math.Pow(0.5,10), 1023.4237493455678303694987182399 },
@ -276,12 +307,44 @@ digamma(mpf('9.5'))
};
CheckFunctionValues("LogisticLn", MMath.LogisticLn, logisticln_pairs);
// In python mpmath:
// log(1+mpf('1e-3'))
/* In python mpmath:
from mpmath import *
mp.dps = 500
mp.pretty = True
log(1+mpf('1e-3'))
*/
double[,] log1plus_pairs = new double[,]
{
{0, 0},
{1e-1, 0.09531017980432486004395212328 },
{6.3e-2, 0.06109509935981087637675438 },
{6.1e-2, 0.059211859631846083 },
{5.5e-2, 0.05354076692802981828874 },
{5.3e-2, 0.051643233151838449580494 },
{5.1e-2, 0.0497420918948140735608097 },
{4.1e-2, 0.0401817896328318316949477 },
{3.1e-2, 0.0305292050348228732504307413911 },
{2.7e-2, 0.0266419309464211781724586 },
{2.5e-2, 0.0246926125903715010143 },
{2.3e-2, 0.0227394869694894293323787753769 },
{2.1e-2, 0.0207825391825285036221723649 },
{2e-2, 0.019802627296179713026029 },
{19e-3, 0.0188217542405877614354916031 },
{17e-3, 0.0168571170664228993186476034 },
{16e-3, 0.0158733491562901492451628 },
{15e-3, 0.014888612493750654835409744978 },
{14e-3, 0.013902905168991420865477877458 },
{13e-3, 0.0129162252665463284388365486353 },
{12e-3, 0.011928570865273801649186 },
{11e-3, 0.0109399400383343638461374275 },
{1e-2, .995033085316808284821535754426e-2},
{9e-3, 0.008959741371471904443146461327328 },
{8e-3, 0.007968169649176873510797339 },
{7e-3, 0.0069756137364252420995222 },
{6e-3, 0.005982071677547463782018873 },
{5e-3, 0.0049875415110390736121022 },
{4e-3, 0.003992021269537452999075117871513754627 },
{3e-3, 0.00299550897979847881161 },
{2e-3, 0.001998002662673056018253771 },
{1.9e-3, 0.001898197283080252703101569277 },
{1.5e-3, 0.00149887612373589185400014886673 },
@ -303,22 +366,42 @@ digamma(mpf('9.5'))
{1e-7, 1e-7 - 0.5e-14 + 1e-21/3 - 1e-28/4},
{1e-8, 1e-8 - 0.5e-16 + 1e-24/3},
{1e-14, 1e-14 - 0.5e-28 + 1e-42/3},
{-1e-2, -0.01005033585350144118},
{-1e-3, -0.0010005003335835335},
{-1e-4, -0.0001000050003333583353335},
{-1e-5, -0.0000100000500003333358333533335},
{-1e-8, -0.00000001000000005000000033333333583333},
{-1e-7, -0.0000001000000050000003333333583333353333335 },
{-1e-6, -0.0000010000005000003333335833335333335 },
{-1e-5, -0.0000100000500003333358333533335},
{-1e-4, -0.0001000050003333583353335},
{-1e-3, -0.0010005003335835335},
{-2e-3, -0.0020020026706730773516511 },
{-3e-3, -0.00300450902029872181325 },
{-4e-3, -0.00400802139753881834879266 },
{-5e-3, -0.0050125418235442820430937 },
{-6e-3, -0.0060180723255630162019349666387 },
{-1e-2, -0.01005033585350144118},
{-2e-2, -0.020202707317519448408045301 },
{-3e-2, -0.0304592074847085459192612876647667 },
{Double.PositiveInfinity, Double.PositiveInfinity},
{Double.NaN, Double.NaN}
};
CheckFunctionValues("Log1Plus", MMath.Log1Plus, log1plus_pairs);
// In python mpmath:
// log(1-exp(mpf('1e-3')))
/* In python mpmath:
from mpmath import *
mp.dps = 500
mp.pretty = True
log(1-exp(mpf('-3')))
*/
double[,] log1MinusExp_pairs = new double[,]
{
{0, Double.NegativeInfinity},
{-1, -0.4586751453870818910216436450673297 },
{-2, -0.145413457868859056972648150099474 },
{-2.5, -0.08565048374203818116996505081 },
{-3, -0.0510691809427015865387237405222592504 },
{-3.5, -0.0306627162554596336513956 },
{-3.75, -0.02379870175015311452567632265 },
{-4, -0.01848544682588656052892134374 },
{-4.5, -0.011171162268296935826765483324276 },
{-5, -0.006760749449488557825921134 },
{-6, -0.00248182936895952777979782 },
{-7, -0.9122979828390684565844296e-3},
@ -344,8 +427,28 @@ digamma(mpf('9.5'))
double[,] expminus1_pairs = new double[,]
{
{0, 0},
{1e-1, 0.1051709180756476248117 },
{6e-2, 0.061836546545359622224684877 },
{5.6e-2, 0.057597683736611251657434658737 },
{5.5e-2, 0.0565406146754942858469448477 },
{5.4e-2, 0.05548460215508004058489867657 },
{5.3e-2, 0.054429645119355907456004582 },
{5.2e-2, 0.05337574251336476282304 },
{5.1e-2, 0.05232289328320391286964 },
{5e-2, 0.0512710963760240396975 },
{1e-2, 0.0100501670841680575421654569 },
{3e-3, 0.003004504503377026012934 },
{2e-3, 0.00200200133400026675558 },
{1e-3, 0.001000500166708341668 },
{1e-4, 0.100005000166670833416668e-3},
{-1e-4, -0.9999500016666250008333e-4},
{-1e-3, -0.000999500166625008331944642832344 },
{-1e-2, -0.009950166250831946426094 },
{-2e-2, -0.01980132669324469777918589577469 },
{-3e-2, -0.029554466451491823067471648 },
{-4e-2, -0.0392105608476767905607893 },
{-5e-2, -0.04877057549928599090857468 },
{-1e-1, -0.09516258196404042683575 },
{Double.PositiveInfinity, Double.PositiveInfinity},
{Double.NegativeInfinity, -1},
{Double.NaN, Double.NaN}
@ -962,6 +1065,13 @@ ncdf(-12.2)
[Fact]
public void GammaUpperTest()
{
double[,] gammaUpperScale_pairs = new double[,]
{
{100,3, 2.749402805834002258937858149557e-110},
{1e30,1.0000000000000024E+30, 22798605571598.2221521928234647 },
};
CheckFunctionValues(nameof(MMath.GammaUpperScale), MMath.GammaUpperScale, gammaUpperScale_pairs);
double[,] gammaLower_pairs = new double[,] {
{1e-6,1e-1,0.9999981770769746499},
{0.05,3e-20,0.1085221036950261},
@ -985,7 +1095,7 @@ ncdf(-12.2)
{double.PositiveInfinity,1,0 },
{double.Epsilon,0,0 },
};
CheckFunctionValues("GammaLower", MMath.GammaLower, gammaLower_pairs);
CheckFunctionValues(nameof(MMath.GammaLower), MMath.GammaLower, gammaLower_pairs);
/* In python mpmath:
from mpmath import *
@ -1033,18 +1143,28 @@ gammainc(mpf('1'),mpf('1'),mpf('inf'),regularized=True)
{double.PositiveInfinity,1,1 },
{double.Epsilon,0,1 },
};
CheckFunctionValues("GammaUpperRegularized", (a,x) => MMath.GammaUpper(a, x, true), gammaUpperRegularized_pairs);
//CheckFunctionValues("GammaUpperRegularized", (a,x) => MMath.GammaUpper(a, x, true), gammaUpperRegularized_pairs);
/* In python mpmath:
from mpmath import *
mp.dps = 500
mp.pretty = True
gammainc(mpf('1'),mpf('1'),mpf('inf'),regularized=True)
gammainc(mpf('1'),mpf('1'),mpf('inf'),regularized=False)
*/
double[,] gammaUpper_pairs = new double[,] {
{1e-20,0.3,0.9056766516758467124267199175638778988963728798249333},
{1e-6,1e-1,1.8229219731321746872065707723366373632},
{2,1,0.73575888234288464319104754 },
{1, 1e-1, 0.9048374180359595731642490594464366211947 },
{0.5, 1e-1, 1.160462484793744246763365832264165338881 },
{1e-1, 1e-1, 1.6405876628018872105125369365484 },
{1e-2, 1e-1, 1.803241356902497279052687858810883 },
{1e-3, 1e-1, 1.8209403811279321641732411796 },
{1e-4, 1e-1, 1.8227254466517034567872146606649738 },
{1e-5, 1e-1, 1.822904105701365262441009216994 },
{1e-6, 1e-1, 1.8229219731321746872065707723366373632},
{1e-20,0.3,0.9056766516758467124267199175638778988963728798249333},
{1e-20, 1, 0.21938393439552027367814220743228835 },
{1e-20, 2, 0.048900510708061119567721436 },
{9.8813129168249309E-324, 4.94065645841247E-323, 741.5602711634856828468858990353816714074565 },
};
CheckFunctionValues("GammaUpper", (a, x) => MMath.GammaUpper(a, x, false), gammaUpper_pairs);
}
@ -1844,7 +1964,7 @@ exp(x*x/4)*pcfu(0.5+n,-x)
long ticks2 = watch.ElapsedTicks;
bool overtime = ticks > 10 * ticks2;
if (double.IsNaN(result1) /*|| overtime*/)
Trace.WriteLine($"({x:r},{y:r},{r:r},{x-r*y}): {good} {ticks} {ticks2} {result1} {result2}");
Trace.WriteLine($"({x:g17},{y:g17},{r:g17},{x-r*y}): {good} {ticks} {ticks2} {result1} {result2}");
}
}
}
@ -2258,7 +2378,7 @@ exp(x*x/4)*pcfu(0.5+n,-x)
r = 0.1;
Trace.WriteLine($"(x,y,r) = {x:r}, {y:r}, {r:r}");
Trace.WriteLine($"(x,y,r) = {x:g17}, {y:g17}, {r:g17}");
double intZOverZ;
try
@ -2269,7 +2389,7 @@ exp(x*x/4)*pcfu(0.5+n,-x)
{
intZOverZ = double.NaN;
}
Trace.WriteLine($"intZOverZ = {intZOverZ:r}");
Trace.WriteLine($"intZOverZ = {intZOverZ:g17}");
double intZ0 = NormalCdfIntegralBasic(x, y, r);
double intZ1 = 0; // NormalCdfIntegralFlip(x, y, r);
@ -2285,7 +2405,7 @@ exp(x*x/4)*pcfu(0.5+n,-x)
intZ = ExtendedDouble.NaN();
}
//double intZ = intZ0;
Trace.WriteLine($"intZ = {intZ:r} {intZ.ToDouble():r} {intZ0:r} {intZ1:r} {intZr:r}");
Trace.WriteLine($"intZ = {intZ:g17} {intZ.ToDouble():g17} {intZ0:g17} {intZ1:g17} {intZr:g17}");
if (intZ.Mantissa < 0) throw new Exception();
//double intZ2 = NormalCdfIntegralBasic(y, x, r);
//Trace.WriteLine($"intZ2 = {intZ2} {r*intZ}");
@ -2731,7 +2851,7 @@ exp(x*x/4)*pcfu(0.5+n,-x)
if (i % 2 == 1)
{
result = -numer / denom;
Console.WriteLine($"iter {i}: {result:r} {c:g4}");
Console.WriteLine($"iter {i}: {result:g17} {c:g4}");
if (double.IsInfinity(result) || double.IsNaN(result))
throw new Exception($"NormalCdfConFrac5 not converging for x={x} y={y} r={r}");
if (result == rOld)
@ -23948,11 +24068,23 @@ exp(x*x/4)*pcfu(0.5+n,-x)
x[k] = pairs[i, k];
args[k] = x[k];
}
bool showTiming = false;
if(showTiming)
{
Stopwatch watch = Stopwatch.StartNew();
int repetitionCount = 100000;
for (int repetition = 0; repetition < repetitionCount; repetition++)
{
Util.DynamicInvoke(fcn, args);
}
watch.Stop();
Trace.WriteLine($" ({watch.ElapsedTicks} ticks for {repetitionCount} calls)");
}
double fx = pairs[i, x.Count];
double result = (double)Util.DynamicInvoke(fcn, args);
if (!double.IsNaN(result) && System.Math.Sign(result) != System.Math.Sign(fx) && fx != 0)
{
string strMsg = $"{name}({x:r})\t has wrong sign (result = {result:r})";
string strMsg = $"{name}({x:g17})\t has wrong sign (result = {result:g17})";
Trace.WriteLine(strMsg);
Assert.True(false, strMsg);
}
@ -23969,11 +24101,11 @@ exp(x*x/4)*pcfu(0.5+n,-x)
}
if (err < TOLERANCE)
{
Trace.WriteLine($"{name}({x:r})\t ok");
Trace.WriteLine($"{name}({x:g17})\t ok");
}
else
{
string strMsg = $"{name}({x:r})\t wrong by {err.ToString("g2")} (result = {result:r})";
string strMsg = $"{name}({x:g17})\t wrong by {err.ToString("g2")} (result = {result:g17})";
Trace.WriteLine(strMsg);
if (err > assertTolerance || double.IsNaN(err))
Assert.True(false, strMsg);

Просмотреть файл

@ -133,7 +133,46 @@ namespace Microsoft.ML.Probabilistic.Tests
(mode <= double.Epsilon && gammaPower.GetLogProb(smallestNormalized) >= max)
);
Interlocked.Add(ref count, 1);
if(count % 100000 == 0)
if (count % 100000 == 0)
Trace.WriteLine($"{count} cases passed");
});
Trace.WriteLine($"{count} cases passed");
}
[Fact]
public void TruncatedGamma_GetMode_MaximizesGetLogProb()
{
long count = 0;
Parallel.ForEach(OperatorTests.TruncatedGammas().Take(100000), dist =>
{
double argmax = double.NaN;
double max = double.NegativeInfinity;
foreach (var x in OperatorTests.DoublesAtLeastZero())
{
double logProb = dist.GetLogProb(x);
Assert.False(double.IsNaN(logProb));
if (logProb > max)
{
max = logProb;
argmax = x;
}
}
double mode = dist.GetMode();
Assert.False(double.IsNaN(mode));
double logProbBelowMode = dist.GetLogProb(MMath.PreviousDouble(mode));
Assert.False(double.IsNaN(logProbBelowMode));
double logProbAboveMode = dist.GetLogProb(MMath.NextDouble(mode));
Assert.False(double.IsNaN(logProbAboveMode));
double logProbAtMode = dist.GetLogProb(mode);
Assert.False(double.IsNaN(logProbAtMode));
logProbAtMode = System.Math.Max(System.Math.Max(logProbAtMode, logProbAboveMode), logProbBelowMode);
const double smallestNormalized = 1e-308;
Assert.True(logProbAtMode >= max ||
MMath.AbsDiff(logProbAtMode, max, 1e-8) < 1e-8 ||
(mode <= double.Epsilon && dist.GetLogProb(smallestNormalized) >= max)
);
Interlocked.Add(ref count, 1);
if (count % 100000 == 0)
Trace.WriteLine($"{count} cases passed");
});
Trace.WriteLine($"{count} cases passed");
@ -188,7 +227,7 @@ namespace Microsoft.ML.Probabilistic.Tests
[Fact]
public void GammaPowerMeanAndVarianceFuzzTest()
{
foreach(var gammaPower in OperatorTests.GammaPowers().Take(100000))
foreach (var gammaPower in OperatorTests.GammaPowers().Take(100000))
{
gammaPower.GetMeanAndVariance(out double mean, out double variance);
Assert.False(double.IsNaN(mean));
@ -289,6 +328,9 @@ namespace Microsoft.ML.Probabilistic.Tests
g = new TruncatedGamma(2, 1, 3, 3);
Assert.True(g.IsPointMass);
Assert.Equal(3.0, g.Point);
g = new TruncatedGamma(Gamma.FromShapeAndRate(4.94065645841247E-324, 4.94065645841247E-324), 0, 1e14);
Assert.True(g.Sample() >= 0);
}
/// <summary>
@ -303,7 +345,7 @@ namespace Microsoft.ML.Probabilistic.Tests
{
TruncatedGamma g = new TruncatedGamma(1, System.Math.Exp(-i), target, double.PositiveInfinity);
var mean = g.GetMean();
Console.WriteLine($"GetNormalizer = {g.GetNormalizer()} GetMean = {g.GetMean()}");
//Trace.WriteLine($"GetNormalizer = {g.GetNormalizer()} GetMean = {g.GetMean()}");
Assert.False(double.IsInfinity(mean));
Assert.False(double.IsNaN(mean));
double diff = System.Math.Abs(mean - target);
@ -318,7 +360,7 @@ namespace Microsoft.ML.Probabilistic.Tests
{
TruncatedGamma g = new TruncatedGamma(System.Math.Exp(i), 1, 0, target);
var mean = g.GetMean();
Console.WriteLine($"GetNormalizer = {g.GetNormalizer()} GetMean = {g.GetMean()}");
//Trace.WriteLine($"GetNormalizer = {g.GetNormalizer()} GetMean = {g.GetMean()}");
Assert.False(double.IsInfinity(mean));
Assert.False(double.IsNaN(mean));
double diff = System.Math.Abs(mean - target);
@ -341,13 +383,81 @@ namespace Microsoft.ML.Probabilistic.Tests
for (int i = 0; i < 100; i++)
{
var meanPower = g.GetMeanPower(-i);
Trace.WriteLine($"GetMeanPower({-i}) = {meanPower}");
//Trace.WriteLine($"GetMeanPower({-i}) = {meanPower}");
Assert.False(double.IsNaN(meanPower));
Assert.False(double.IsInfinity(meanPower));
if (i == 1) Assert.Equal(MMath.GammaUpper(shape-1, 1, false)/MMath.GammaUpper(shape, 1, false), meanPower, 1e-8);
if (i == 1) Assert.Equal(MMath.GammaUpper(shape - 1, 1, false) / MMath.GammaUpper(shape, 1, false), meanPower, 1e-8);
}
}
[Fact]
public void TruncatedGamma_GetMeanAndVariance_WithinBounds()
{
long count = 0;
Parallel.ForEach(OperatorTests.LowerTruncatedGammas()
.Take(100000), dist =>
{
dist.GetMeanAndVariance(out double mean, out double variance);
// Compiler.Quoter.Quote(dist)
Assert.True(mean >= dist.LowerBound);
Assert.True(mean <= dist.UpperBound);
Assert.Equal(mean, dist.GetMean());
Assert.True(variance >= 0);
Interlocked.Add(ref count, 1);
if (count % 100000 == 0)
Trace.WriteLine($"{count} cases passed");
});
Trace.WriteLine($"{count} cases passed");
}
[Fact]
[Trait("Category", "OpenBug")]
public void TruncatedGamma_GetMeanPower_WithinBounds()
{
var g = new TruncatedGamma(Gamma.FromShapeAndRate(4.94065645841247E-324, 4.94065645841247E-324), 0, 1e14);
Assert.True(g.GetMean() <= g.UpperBound);
for (int i = 0; i < 308; i++)
{
double power = System.Math.Pow(10, i);
//Trace.WriteLine($"GetMeanPower({power}) = {g.GetMeanPower(power)}");
Assert.True(g.GetMeanPower(power) <= g.UpperBound);
}
Assert.True(g.GetMeanPower(1.7976931348623157E+308) <= g.UpperBound);
Assert.True(new TruncatedGamma(Gamma.FromShapeAndRate(4.94065645841247E-324, 4.94065645841247E-324), 0, 1e9).GetMeanPower(1.7976931348623157E+308) <= 1e9);
Assert.True(new TruncatedGamma(Gamma.FromShapeAndRate(4.94065645841247E-324, 4.94065645841247E-324), 0, 1e6).GetMeanPower(1.7976931348623157E+308) <= 1e6);
Assert.True(new TruncatedGamma(Gamma.FromShapeAndRate(4.94065645841247E-324, 4.94065645841247E-324), 0, 100).GetMeanPower(4.94065645841247E-324) <= 100);
long count = 0;
Parallel.ForEach(OperatorTests.LowerTruncatedGammas()
.Take(100000), dist =>
{
foreach (var power in OperatorTests.Doubles())
{
if (dist.Gamma.Shape <= -power && dist.LowerBound == 0) continue;
double meanPower = dist.GetMeanPower(power);
if (power >= 0)
{
// Compiler.Quoter.Quote(dist)
Assert.True(meanPower >= System.Math.Pow(dist.LowerBound, power));
Assert.True(meanPower <= System.Math.Pow(dist.UpperBound, power));
}
else
{
Assert.True(meanPower <= System.Math.Pow(dist.LowerBound, power));
Assert.True(meanPower >= System.Math.Pow(dist.UpperBound, power));
}
if (power == 1)
{
Assert.Equal(meanPower, dist.GetMean());
}
}
Interlocked.Add(ref count, 1);
if (count % 100000 == 0)
Trace.WriteLine($"{count} cases passed");
});
Trace.WriteLine($"{count} cases passed");
}
[Fact]
public void GaussianTest()
{

Просмотреть файл

@ -91,7 +91,7 @@ namespace Microsoft.ML.Probabilistic.Tests
public void BinaryFormatterTest()
{
var mc = new MyClass();
mc.Initialize();
mc.Initialize(skipStringDistributions: true);
var mc2 = CloneBinaryFormatter(mc);
mc.AssertEqualTo(mc2);
@ -185,7 +185,7 @@ namespace Microsoft.ML.Probabilistic.Tests
[DataMember] private StringDistribution stringDistribution1;
[DataMember] private StringDistribution stringDistribution2;
public void Initialize()
public void Initialize(bool skipStringDistributions = false)
{
// DO NOT make this a constructor, because it makes the test not notice complete lack of serialization as an empty object is set up exactly as the thing
// you are trying to deserialize.
@ -240,9 +240,16 @@ namespace Microsoft.ML.Probabilistic.Tests
this.outerQuantiles = OuterQuantiles.FromDistribution(3, this.quantileEstimator);
this.innerQuantiles = InnerQuantiles.FromDistribution(3, this.outerQuantiles);
this.stringDistribution1 = StringDistribution.String("aa").Append(StringDistribution.OneOf("b", "ccc")).Append("dddd");
this.stringDistribution2 = new StringDistribution();
this.stringDistribution2.SetToProduct(StringDistribution.OneOf("a", "b"), StringDistribution.OneOf("b", "c"));
if (!skipStringDistributions)
{
// String distributions can not be serialized by some formatters (namely BinaryFormatter)
// That is fine because this combination is never used in practice
this.stringDistribution1 = StringDistribution.String("aa")
.Append(StringDistribution.OneOf("b", "ccc")).Append("dddd");
this.stringDistribution2 = new StringDistribution();
this.stringDistribution2.SetToProduct(StringDistribution.OneOf("a", "b"),
StringDistribution.OneOf("b", "c"));
}
}
public void AssertEqualTo(MyClass that)
@ -280,8 +287,12 @@ namespace Microsoft.ML.Probabilistic.Tests
Assert.True(this.quantileEstimator.ValueEquals(that.quantileEstimator));
Assert.True(this.innerQuantiles.Equals(that.innerQuantiles));
Assert.True(this.outerQuantiles.Equals(that.outerQuantiles));
Assert.Equal(0, this.stringDistribution1.MaxDiff(that.stringDistribution1));
Assert.Equal(0, this.stringDistribution2.MaxDiff(that.stringDistribution2));
if (this.stringDistribution1 != null)
{
Assert.Equal(0, this.stringDistribution1.MaxDiff(that.stringDistribution1));
Assert.Equal(0, this.stringDistribution2.MaxDiff(that.stringDistribution2));
}
}
}

Просмотреть файл

@ -39,7 +39,15 @@ namespace Microsoft.ML.Probabilistic.Tests
}
[Fact]
public void GammaPower_ReturnsShapeGreaterThan1()
public void TruncatedGammaPowerTest()
{
Assert.True(PowerOp.PowAverageConditional(new TruncatedGamma(2.333, 0.02547, 1, double.PositiveInfinity), 1.1209480955953663, GammaPower.Uniform(-1)).IsProper());
Assert.True(PowerOp.PowAverageConditional(new TruncatedGamma(5.196e+48, 5.567e-50, 1, double.PositiveInfinity), 0.0016132617913803061, GammaPower.Uniform(-1)).IsProper());
Assert.True(PowerOp.PowAverageConditional(new TruncatedGamma(23.14, 0.06354, 1, double.PositiveInfinity), 1.5543122344752203E-15, GammaPower.Uniform(-1)).IsProper());
}
[Fact]
public void TruncatedGammaPower_ReturnsGammaShapeGreaterThan1()
{
Variable<TruncatedGamma> xPriorVar = Variable.Observed(default(TruncatedGamma)).Named("xPrior");
Variable<double> x = Variable<double>.Random(xPriorVar).Named("x");
@ -58,12 +66,13 @@ namespace Microsoft.ML.Probabilistic.Tests
Gamma yLike = Gamma.Uniform();
yLikeVar.ObservedValue = yLike;
power.ObservedValue = powerValue;
var xActual = engine.Infer<TruncatedGamma>(x);
var yActual = engine.Infer<Gamma>(y);
// Importance sampling
GammaEstimator xEstimator = new GammaEstimator();
GammaEstimator yEstimator = new GammaEstimator();
MeanVarianceAccumulator mva = new MeanVarianceAccumulator();
MeanVarianceAccumulator yExpectedInverse = new MeanVarianceAccumulator();
int nSamples = 1000000;
for (int i = 0; i < nSamples; i++)
{
@ -73,13 +82,74 @@ namespace Microsoft.ML.Probabilistic.Tests
double weight = System.Math.Exp(logWeight);
xEstimator.Add(xSample, weight);
yEstimator.Add(ySample, weight);
mva.Add(1/ySample, weight);
yExpectedInverse.Add(1/ySample, weight);
}
Gamma xExpected = xEstimator.GetDistribution(new Gamma());
Gamma yExpected = yEstimator.GetDistribution(yLike);
double yActualMeanInverse = yActual.GetMeanPower(-1);
double meanInverseError = MMath.AbsDiff(mva.Mean, yActualMeanInverse, 1e-8);
Trace.WriteLine($"power = {powerValue}: y = {yActual}[E^-1={yActual.GetMeanPower(-1)}] should be {yExpected}[E^-1={mva.Mean}], error = {meanInverseError}");
double meanInverseError = MMath.AbsDiff(yExpectedInverse.Mean, yActualMeanInverse, 1e-8);
Trace.WriteLine($"power = {powerValue}:");
Trace.WriteLine($" x = {xActual} should be {xExpected}");
Trace.WriteLine($" y = {yActual}[E^-1={yActual.GetMeanPower(-1)}] should be {yExpected}[E^-1={yExpectedInverse.Mean}], E^-1 error = {meanInverseError}");
Assert.True(yActual.Shape > 1);
Assert.True(MMath.AbsDiff(yExpected.GetMean(), yActual.GetMean(), 1e-8) < 1);
Assert.True(meanInverseError < 1e-2);
}
}
[Fact]
public void TruncatedGammaPower_ReturnsGammaPowerShapeGreaterThan1()
{
var result = PowerOp.PowAverageConditional(new TruncatedGamma(0.4, 0.5, 1, double.PositiveInfinity), 0, GammaPower.PointMass(0, -1));
Assert.True(result.IsPointMass);
Assert.Equal(1.0, result.Point);
Variable<TruncatedGamma> xPriorVar = Variable.Observed(default(TruncatedGamma)).Named("xPrior");
Variable<double> x = Variable<double>.Random(xPriorVar).Named("x");
Variable<double> power = Variable.Observed(0.5).Named("power");
var y = x ^ power;
y.Name = nameof(y);
Variable<GammaPower> yLikeVar = Variable.Observed(default(GammaPower)).Named("yLike");
Variable.ConstrainEqualRandom(y, yLikeVar);
y.SetMarginalPrototype(yLikeVar);
InferenceEngine engine = new InferenceEngine();
foreach (var powerValue in linspace(1, 10, 10))
{
TruncatedGamma xPrior = new TruncatedGamma(Gamma.FromShapeAndRate(3, 3), 1, double.PositiveInfinity);
xPriorVar.ObservedValue = xPrior;
GammaPower yLike = GammaPower.Uniform(-1);
//GammaPower yLike = GammaPower.FromShapeAndRate(1, 0.5, -1);
yLikeVar.ObservedValue = yLike;
power.ObservedValue = powerValue;
var xActual = engine.Infer<TruncatedGamma>(x);
var yActual = engine.Infer<GammaPower>(y);
// Importance sampling
GammaEstimator xEstimator = new GammaEstimator();
GammaPowerEstimator yEstimator = new GammaPowerEstimator(yLike.Power);
MeanVarianceAccumulator yExpectedInverse = new MeanVarianceAccumulator();
MeanVarianceAccumulator yMva = new MeanVarianceAccumulator();
int nSamples = 1000000;
for (int i = 0; i < nSamples; i++)
{
double xSample = xPrior.Sample();
double ySample = System.Math.Pow(xSample, power.ObservedValue);
double logWeight = yLike.GetLogProb(ySample);
double weight = System.Math.Exp(logWeight);
xEstimator.Add(xSample, weight);
yEstimator.Add(ySample, weight);
yExpectedInverse.Add(1 / ySample, weight);
yMva.Add(ySample, weight);
}
Gamma xExpected = xEstimator.GetDistribution(new Gamma());
GammaPower yExpected = yEstimator.GetDistribution(yLike);
yExpected = GammaPower.FromMeanAndVariance(yMva.Mean, yMva.Variance, yLike.Power);
double yActualMeanInverse = yActual.GetMeanPower(-1);
double meanInverseError = MMath.AbsDiff(yExpectedInverse.Mean, yActualMeanInverse, 1e-8);
Trace.WriteLine($"power = {powerValue}:");
Trace.WriteLine($" x = {xActual} should be {xExpected}");
Trace.WriteLine($" y = {yActual}[E^-1={yActual.GetMeanPower(-1)}] should be {yExpected}[E^-1={yExpectedInverse.Mean}], error = {meanInverseError}");
Assert.True(yActual.Shape > 1);
Assert.True(MMath.AbsDiff(yExpected.GetMean(), yActual.GetMean(), 1e-8) < 1);
Assert.True(meanInverseError < 1e-2);
@ -106,6 +176,7 @@ namespace Microsoft.ML.Probabilistic.Tests
GammaPower yLike = GammaPower.Uniform(-1);
yLikeVar.ObservedValue = yLike;
power.ObservedValue = powerValue;
var xActual = engine.Infer<GammaPower>(x);
var yActual = engine.Infer<GammaPower>(y);
// Importance sampling
@ -127,7 +198,9 @@ namespace Microsoft.ML.Probabilistic.Tests
Gamma yExpected = yEstimator.GetDistribution(new Gamma());
double yActualMeanInverse = yActual.GetMeanPower(-1);
double meanInverseError = MMath.AbsDiff(mva.Mean, yActualMeanInverse, 1e-8);
Trace.WriteLine($"power = {powerValue}: y = {yActual}[E^-1={yActualMeanInverse}] should be {yExpected}[E^-1={mva.Mean}], error = {meanInverseError}");
Trace.WriteLine($"power = {powerValue}:");
Trace.WriteLine($" x = {xActual} should be {xExpected}");
Trace.WriteLine($" y = {yActual}[E^-1={yActualMeanInverse}] should be {yExpected}[E^-1={mva.Mean}], error = {meanInverseError}");
Assert.True(yActual.Shape > 2);
Assert.True(MMath.AbsDiff(yExpected.GetMean(), yActual.GetMean(), 1e-8) < 1);
//Assert.True(meanInverseError < 10);
@ -1900,7 +1973,7 @@ namespace Microsoft.ML.Probabilistic.Tests
double aError = aExpected.MaxDiff(aActual);
double productError = productExpected.MaxDiff(productActual);
double evError = MMath.AbsDiff(evExpected, evActual, 1e-6);
bool trace = false;
bool trace = true;
if (trace)
{
Trace.WriteLine($"b = {bActual} should be {bExpected}, error = {bError}");
@ -1916,6 +1989,212 @@ namespace Microsoft.ML.Probabilistic.Tests
}
}
internal static void TestLogEvidence()
{
LogEvidenceScale(new GammaPower(100, 5.0 / 100, -1), new GammaPower(100, 2.0 / 100, -1), new GammaPower(100, 3.0 / 100, -1), 0.2);
}
internal static void LogEvidenceShift(GammaPower sum, GammaPower a, GammaPower b)
{
double logz100 = PlusGammaOp.LogAverageFactor(GammaPower.FromShapeAndRate(sum.Shape-1, sum.Rate, sum.Power), GammaPower.FromShapeAndRate(a.Shape, a.Rate, a.Power), GammaPower.FromShapeAndRate(b.Shape, b.Rate, b.Power));
double logz010 = PlusGammaOp.LogAverageFactor(GammaPower.FromShapeAndRate(sum.Shape, sum.Rate, sum.Power), GammaPower.FromShapeAndRate(a.Shape-1, a.Rate, a.Power), GammaPower.FromShapeAndRate(b.Shape, b.Rate, b.Power));
double logz001 = PlusGammaOp.LogAverageFactor(GammaPower.FromShapeAndRate(sum.Shape, sum.Rate, sum.Power), GammaPower.FromShapeAndRate(a.Shape, a.Rate, a.Power), GammaPower.FromShapeAndRate(b.Shape-1, b.Rate, b.Power));
double lhs = logz100 + System.Math.Log(sum.Rate / (sum.Shape - 1));
double rhs1 = logz010 + System.Math.Log(a.Rate / (a.Shape - 1));
double rhs2 = logz001 + System.Math.Log(b.Rate / (b.Shape - 1));
Trace.WriteLine($"lhs = {lhs} rhs = {MMath.LogSumExp(rhs1, rhs2)}");
}
internal static void LogEvidenceScale(GammaPower sum, GammaPower a, GammaPower b, double scale)
{
double logZ = LogEvidenceBrute(sum, a, b);
double logZ2 = System.Math.Log(scale) + LogEvidenceBrute(GammaPower.FromShapeAndRate(sum.Shape, scale * sum.Rate, sum.Power), GammaPower.FromShapeAndRate(a.Shape, scale * a.Rate, a.Power), GammaPower.FromShapeAndRate(b.Shape, scale * b.Rate, b.Power));
Trace.WriteLine($"logZ = {logZ} {logZ2}");
}
internal static double LogEvidenceBrute(GammaPower sumPrior, GammaPower aPrior, GammaPower bPrior)
{
double totalWeight = 0;
int numIter = 1000000;
for (int iter = 0; iter < numIter; iter++)
{
if (iter % 1000000 == 0) Trace.WriteLine($"iter = {iter}");
double bSample = bPrior.Sample();
double aSample = aPrior.Sample();
if (sumPrior.Rate > 1e100)
{
bSample = 0;
aSample = 0;
}
double sumSample = aSample + bSample;
double logWeight = sumPrior.GetLogProb(sumSample);
double weight = System.Math.Exp(logWeight);
totalWeight += weight;
}
Trace.WriteLine($"totalWeight = {totalWeight}");
return System.Math.Log(totalWeight / numIter);
}
internal static double LogEvidenceIncrementBShape(GammaPower sum, GammaPower a, GammaPower b)
{
const double threshold = 0;
if (b.Shape > threshold)
{
//return PlusGammaOp.LogAverageFactor(sum, a, b);
return LogEvidenceBrute(sum, a, b);
}
double logz100 = LogEvidenceIncrementBShape(GammaPower.FromShapeAndRate(sum.Shape - 1, sum.Rate, sum.Power), GammaPower.FromShapeAndRate(a.Shape, a.Rate, a.Power), GammaPower.FromShapeAndRate(b.Shape + 1, b.Rate, b.Power));
double logz010 = LogEvidenceIncrementBShape(GammaPower.FromShapeAndRate(sum.Shape, sum.Rate, sum.Power), GammaPower.FromShapeAndRate(a.Shape - 1, a.Rate, a.Power), GammaPower.FromShapeAndRate(b.Shape + 1, b.Rate, b.Power));
double lhs = logz100 + System.Math.Log(sum.Rate / (sum.Shape - 1));
double rhs1 = logz010 + System.Math.Log(a.Rate / (a.Shape - 1));
double rhs2 = System.Math.Log(b.Rate / b.Shape);
return MMath.LogDifferenceOfExp(lhs, rhs1) - rhs2;
}
[Fact]
[Trait("Category", "OpenBug")]
public void GammaPowerSumRRRTest()
{
//Assert.True(PlusGammaOp.AAverageConditional(GammaPower.FromShapeAndRate(299, 2135, -1), GammaPower.FromShapeAndRate(2.01, 10, -1), GammaPower.FromShapeAndRate(12, 22, -1), GammaPower.Uniform(-1)).Shape > 2);
//Assert.True(PlusGammaOp.AAverageConditional(GammaPower.Uniform(-1), GammaPower.FromShapeAndRate(2.0095439611576689, 43.241375394505766, -1), GammaPower.FromShapeAndRate(12, 11, -1), GammaPower.Uniform(-1)).IsUniform());
//Assert.False(double.IsNaN(PlusGammaOp.BAverageConditional(new GammaPower(287, 0.002132, -1), new GammaPower(1.943, 1.714, -1), new GammaPower(12, 0.09091, -1), GammaPower.Uniform(-1)).Shape));
Variable<bool> evidence = Variable.Bernoulli(0.5).Named("evidence");
IfBlock block = Variable.If(evidence);
Variable<GammaPower> bPriorVar = Variable.Observed(default(GammaPower)).Named("bPrior");
Variable<double> b = Variable<double>.Random(bPriorVar).Named("b");
Variable<GammaPower> aPriorVar = Variable.Observed(default(GammaPower)).Named("aPrior");
Variable<double> a = Variable<double>.Random(aPriorVar).Named("a");
Variable<double> sum = (a + b).Named("sum");
Variable<GammaPower> sumPriorVar = Variable.Observed(default(GammaPower)).Named("sumPrior");
Variable.ConstrainEqualRandom(sum, sumPriorVar);
block.CloseBlock();
InferenceEngine engine = new InferenceEngine();
var groundTruthArray = new[]
{
//((new GammaPower(12, 0.09091, -1), new GammaPower(1.943, 1.714, -1), new GammaPower(287, 0.002132, -1)),
// (GammaPower.FromShapeAndRate(23.445316648707465, 25.094880573396285, -1.0), GammaPower.FromShapeAndRate(6.291922598211336, 2.6711637040924909, -1.0), GammaPower.FromShapeAndRate(297.59289156399706, 481.31323394825631, -1.0), -0.517002984399292)),
//((GammaPower.FromShapeAndRate(12, 22, -1), GammaPower.FromShapeAndRate(2.01, 10, -1), GammaPower.FromShapeAndRate(299, 2135, -1)),
// (GammaPower.FromShapeAndRate(12.4019151884055, 23.487535138993064, -1.0), GammaPower.FromShapeAndRate(47.605465737960976, 236.41203334327037, -1.0), GammaPower.FromShapeAndRate(303.94717779788243, 2160.7976040127091, -1.0), -2.26178042225837)),
//((GammaPower.FromShapeAndRate(1, 2, 1), GammaPower.FromShapeAndRate(10, 10, 1), GammaPower.FromShapeAndRate(101, double.MaxValue, 1)),
// (GammaPower.PointMass(0, 1.0), GammaPower.FromShapeAndScale(9, 0.1, 1), GammaPower.PointMass(5.6183114927306835E-307, 1), 0.79850769622135)),
//((GammaPower.FromShapeAndRate(1, 2, 1), GammaPower.FromShapeAndRate(10, 10, 1), GammaPower.FromShapeAndRate(101, double.PositiveInfinity, 1)),
// (GammaPower.PointMass(0, 1.0), GammaPower.PointMass(0, 1.0), GammaPower.FromShapeAndRate(101, double.PositiveInfinity, 1), double.NegativeInfinity)),
//((GammaPower.FromShapeAndRate(2.25, 0.625, -1), GammaPower.FromShapeAndRate(100000002, 100000001, -1), GammaPower.PointMass(5, -1)),
// (GammaPower.FromShapeAndRate(1599999864.8654146, 6399999443.0866585, -1.0), GammaPower.FromShapeAndRate(488689405.117356, 488689405.88170129, -1.0), GammaPower.FromShapeAndRate(double.PositiveInfinity, 5.0, -1.0), -4.80649551611576)),
//((GammaPower.FromShapeAndRate(2.25, 0.625, -1), GammaPower.FromShapeAndRate(100000002, 100000001, -1), GammaPower.PointMass(0, -1)),
// (GammaPower.FromShapeAndRate(5.25, 0.625, -1.0), GammaPower.PointMass(0, -1.0), GammaPower.PointMass(0, -1), double.NegativeInfinity)),
((GammaPower.FromShapeAndRate(0.83228652924877289, 0.31928405884349487, -1), GammaPower.FromShapeAndRate(1.7184321234630087, 0.709692740551586, -1), GammaPower.FromShapeAndRate(491, 1583.0722891566263, -1)),
(GammaPower.FromShapeAndRate(5.6062357530254419, 8.7330355320375, -1.0), GammaPower.FromShapeAndRate(3.7704064465114597, 3.6618414405426956, -1.0), GammaPower.FromShapeAndRate(493.79911104976264, 1585.67297686381, -1.0), -2.62514943790608)),
//((GammaPower.FromShapeAndRate(1, 1, 1), GammaPower.FromShapeAndRate(1, 1, 1), GammaPower.Uniform(1)),
// (GammaPower.FromShapeAndRate(1, 1, 1), GammaPower.FromShapeAndRate(1, 1, 1), new GammaPower(2, 1, 1), 0)),
//((GammaPower.FromShapeAndRate(1, 1, 1), GammaPower.FromShapeAndRate(1, 1, 1), GammaPower.FromShapeAndRate(10, 1, 1)),
// (GammaPower.FromShapeAndRate(2.2, 0.8, 1), GammaPower.FromShapeAndRate(2.2, 0.8, 1), GammaPower.FromShapeAndRate(11, 2, 1), -5.32133409609914)),
//((GammaPower.FromShapeAndRate(3, 1, -1), GammaPower.FromShapeAndRate(4, 1, -1), GammaPower.Uniform(-1)),
// (GammaPower.FromShapeAndRate(3, 1, -1), GammaPower.FromShapeAndRate(4, 1, -1), GammaPower.FromShapeAndRate(4.311275674659143, 2.7596322350392035, -1.0), 0)),
//((GammaPower.FromShapeAndRate(3, 1, -1), GammaPower.FromShapeAndRate(4, 1, -1), GammaPower.FromShapeAndRate(10, 1, -1)),
// (new GammaPower(10.17, 0.6812, -1), new GammaPower(10.7, 0.7072, -1), new GammaPower(17.04, 0.2038, -1), -5.80097480415528)),
//((GammaPower.FromShapeAndRate(2, 1, -1), GammaPower.FromShapeAndRate(2, 1, -1), GammaPower.Uniform(-1)),
// (GammaPower.FromShapeAndRate(2, 1, -1), GammaPower.FromShapeAndRate(2, 1, -1), new GammaPower(2, 2, -1), 0)),
//((GammaPower.FromShapeAndRate(1, 1, -1), GammaPower.FromShapeAndRate(1, 1, -1), GammaPower.FromShapeAndRate(30, 1, -1)),
// (GammaPower.FromShapeAndRate(11.057594449558747, 2.0731054100295871, -1.0), GammaPower.FromShapeAndRate(11.213079710986863, 2.1031756133562678, -1.0), GammaPower.FromShapeAndRate(28.815751741667615, 1.0848182432207041, -1.0), -4.22210057295786)),
//((GammaPower.FromShapeAndRate(1, 1, 2), GammaPower.FromShapeAndRate(1, 1, 2), GammaPower.Uniform(2)),
// (GammaPower.FromShapeAndRate(1, 1, 2), GammaPower.FromShapeAndRate(1, 1, 2), GammaPower.FromShapeAndRate(0.16538410345846666, 0.219449497990138, 2.0), 0)),
//((GammaPower.FromShapeAndRate(1, 1, 2), GammaPower.FromShapeAndRate(1, 1, 2), GammaPower.FromShapeAndRate(30, 1, 2)),
// (GammaPower.FromShapeAndRate(8.72865708291647, 1.71734403810018, 2.0), GammaPower.FromShapeAndRate(8.5298603954575931, 1.6767026737490067, 2.0), GammaPower.FromShapeAndRate(25.831187278202215, 1.0852321896648485, 2.0), -14.5369973268808)),
};
//using (TestUtils.TemporarilyAllowGammaImproperProducts)
{
foreach (var groundTruth in groundTruthArray)
{
var (bPrior, aPrior, sumPrior) = groundTruth.Item1;
var (bExpected, aExpected, sumExpected, evExpected) = groundTruth.Item2;
bPriorVar.ObservedValue = bPrior;
aPriorVar.ObservedValue = aPrior;
sumPriorVar.ObservedValue = sumPrior;
GammaPower bActual = engine.Infer<GammaPower>(b);
GammaPower aActual = engine.Infer<GammaPower>(a);
GammaPower sumActual = engine.Infer<GammaPower>(sum);
double evActual = engine.Infer<Bernoulli>(evidence).LogOdds;
double logZ = LogEvidenceIncrementBShape(sumPrior, aPrior, bPrior);
Trace.WriteLine($"LogZ = {logZ}");
if (true)
{
// importance sampling
Rand.Restart(0);
double totalWeight = 0;
GammaPowerEstimator bEstimator = new GammaPowerEstimator(bPrior.Power);
GammaPowerEstimator aEstimator = new GammaPowerEstimator(aPrior.Power);
GammaPowerEstimator sumEstimator = new GammaPowerEstimator(sumPrior.Power);
MeanVarianceAccumulator bMva = new MeanVarianceAccumulator();
MeanVarianceAccumulator aMva = new MeanVarianceAccumulator();
MeanVarianceAccumulator sumMva = new MeanVarianceAccumulator();
int numIter = 10000000;
for (int iter = 0; iter < numIter; iter++)
{
if (iter % 1000000 == 0) Trace.WriteLine($"iter = {iter}");
double bSample = bPrior.Sample();
double aSample = aPrior.Sample();
if (sumPrior.Rate > 1e100)
{
bSample = 0;
aSample = 0;
}
double sumSample = aSample + bSample;
double logWeight = sumPrior.GetLogProb(sumSample);
double weight = System.Math.Exp(logWeight);
totalWeight += weight;
bEstimator.Add(bSample, weight);
aEstimator.Add(aSample, weight);
sumEstimator.Add(sumSample, weight);
bMva.Add(bSample, weight);
aMva.Add(aSample, weight);
sumMva.Add(sumSample, weight);
}
Trace.WriteLine($"totalWeight = {totalWeight}");
evExpected = System.Math.Log(totalWeight / numIter);
bExpected = bEstimator.GetDistribution(bPrior);
aExpected = aEstimator.GetDistribution(aPrior);
sumExpected = sumEstimator.GetDistribution(sumPrior);
bExpected = GammaPower.FromMeanAndVariance(bMva.Mean, bMva.Variance, bPrior.Power);
aExpected = GammaPower.FromMeanAndVariance(aMva.Mean, aMva.Variance, aPrior.Power);
sumExpected = GammaPower.FromMeanAndVariance(sumMva.Mean, sumMva.Variance, sumPrior.Power);
Trace.WriteLine($"{Quoter.Quote(bExpected)}, {Quoter.Quote(aExpected)}, {Quoter.Quote(sumExpected)}, {evExpected}");
}
else Trace.WriteLine($"{Quoter.Quote(bActual)}, {Quoter.Quote(aActual)}, {Quoter.Quote(sumActual)}, {evActual}");
double bError = MomentDiff(bExpected, bActual);
double aError = MomentDiff(aExpected, aActual);
double productError = MomentDiff(sumExpected, sumActual);
double evError = MMath.AbsDiff(evExpected, evActual, 1e-6);
bool trace = true;
if (trace)
{
Trace.WriteLine($"b = {bActual} should be {bExpected}, error = {bError}");
Trace.WriteLine($"a = {aActual}[variance={aActual.GetVariance()}] should be {aExpected}[variance={aExpected.GetVariance()}], error = {aError}");
Trace.WriteLine($"product = {sumActual} should be {sumExpected}, error = {productError}");
Trace.WriteLine($"evidence = {evActual} should be {evExpected}, error = {evError}");
}
Assert.True(bError < 3);
Assert.True(aError < 1);
Assert.True(productError < 1);
Assert.True(evError < 0.3);
}
}
}
public static double MomentDiff(GammaPower expected, GammaPower actual)
{
expected.GetMeanAndVariance(out double meanExpected, out double varianceExpected);
actual.GetMeanAndVariance(out double meanActual, out double varianceActual);
const double rel = 1e-8;
return System.Math.Max(MMath.AbsDiff(meanExpected, meanActual, rel), MMath.AbsDiff(varianceExpected, varianceActual, rel));
}
[Fact]
public void GammaCCRTest()
{

Просмотреть файл

@ -4791,6 +4791,30 @@ namespace Microsoft.ML.Probabilistic.Tests
Assert.Equal(actual, expected);
}
[Fact]
public void PartiallyUniformArrayTest2()
{
var item = new Range(2).Named("item");
var inner = new Range(2).Named("inner");
var arrays = Variable.Array(Variable.Array<double>(inner), item).Named("arrays");
arrays[0][0] = Variable.GaussianFromMeanAndPrecision(0, 1);
arrays[0][1] = Variable.GaussianFromMeanAndPrecision(0, 1);
arrays[1][0] = Variable.Random(Gaussian.Uniform());
arrays[1][1] = Variable.Random(Gaussian.Uniform());
var sums = Variable.Array<double>(item).Named("sums");
using (Variable.ForEach(item))
{
sums[item] = Variable.Sum(arrays[item]);
}
InferenceEngine engine = new InferenceEngine();
GaussianArray expected = new GaussianArray(2);
expected[0] = Gaussian.FromMeanAndPrecision(0, 0.5);
expected[1] = Gaussian.Uniform();
IList<Gaussian> actual = engine.Infer<IList<Gaussian>>(sums);
Console.WriteLine(actual);
Assert.Equal(actual, expected);
}
[Fact]
public void VectorGaussianFactorTest()
{

Просмотреть файл

@ -384,6 +384,9 @@ namespace Microsoft.ML.Probabilistic.Tests
[Fact]
public void GammaFromShapeAndRateOpTest()
{
Assert.False(double.IsNaN(GammaFromShapeAndRateOp_Slow.SampleAverageConditional(Gamma.PointMass(0), 2.0, new Gamma(1, 1)).Rate));
Assert.False(double.IsNaN(GammaFromShapeAndRateOp_Slow.RateAverageConditional(new Gamma(1, 1), 2.0, Gamma.PointMass(0)).Rate));
Gamma sample, rate, result;
double prevDiff;
double shape = 3;
@ -856,7 +859,7 @@ namespace Microsoft.ML.Probabilistic.Tests
{
double point = 3;
Gaussian toPoint = MaxGaussianOp.AAverageConditional(max, Gaussian.PointMass(point), b);
//Console.WriteLine($"{point} {toPoint} {toPoint.MeanTimesPrecision:r} {toPoint.Precision:r}");
//Console.WriteLine($"{point} {toPoint} {toPoint.MeanTimesPrecision:g17} {toPoint.Precision:g17}");
if (max.IsPointMass && b.IsPointMass)
{
Gaussian toUniform = MaxGaussianOp.AAverageConditional(max, Gaussian.Uniform(), b);
@ -874,7 +877,7 @@ namespace Microsoft.ML.Probabilistic.Tests
{
Gaussian a = Gaussian.FromMeanAndPrecision(point, System.Math.Pow(10, i));
Gaussian to_a = MaxGaussianOp.AAverageConditional(max, a, b);
//Console.WriteLine($"{a} {to_a} {to_a.MeanTimesPrecision:r} {to_a.Precision:r}");
//Console.WriteLine($"{a} {to_a} {to_a.MeanTimesPrecision:g17} {to_a.Precision:g17}");
double diff = toPoint.MaxDiff(to_a);
if (diff < 1e-14) diff = 0;
Assert.True(diff <= oldDiff);
@ -1362,16 +1365,6 @@ namespace Microsoft.ML.Probabilistic.Tests
});
}
[Fact]
[Trait("Category", "OpenBug")]
public void GammaUpper_IsDecreasingInX()
{
foreach (double a in DoublesGreaterThanZero())
{
IsIncreasingForAtLeastZero(x => -MMath.GammaUpper(a, x));
}
}
[Fact]
public void GammaLower_IsDecreasingInA()
{
@ -1382,13 +1375,21 @@ namespace Microsoft.ML.Probabilistic.Tests
}
[Fact]
[Trait("Category", "OpenBug")]
public void GammaUpper_IsDecreasingInX()
{
Parallel.ForEach(DoublesGreaterThanZero(), a =>
{
IsIncreasingForAtLeastZero(x => -MMath.GammaUpper(a, x));
});
}
[Fact]
public void GammaUpper_IsIncreasingInA()
{
foreach (double x in DoublesAtLeastZero())
Parallel.ForEach(DoublesAtLeastZero(), x =>
{
IsIncreasingForAtLeastZero(a => MMath.GammaUpper(a + double.Epsilon, x));
}
IsIncreasingForAtLeastZero(a => MMath.GammaUpper(a + double.Epsilon, x), 2);
});
}
[Fact]
@ -1454,18 +1455,22 @@ namespace Microsoft.ML.Probabilistic.Tests
/// </summary>
/// <param name="func"></param>
/// <returns></returns>
public bool IsIncreasingForAtLeastZero(Func<double, double> func)
public bool IsIncreasingForAtLeastZero(Func<double, double> func, double ulpError = 0)
{
double scale = 1 + 2e-16 * ulpError;
foreach (var x in DoublesAtLeastZero())
{
double fx = func(x);
double smallFx;
if (fx >= 0) smallFx = fx / scale;
else smallFx = fx * scale;
foreach (var delta in DoublesGreaterThanZero())
{
double x2 = x + delta;
if (double.IsPositiveInfinity(delta)) x2 = delta;
double fx2 = func(x2);
// The cast here is important when running in 32-bit, Release mode.
Assert.True((double)fx2 >= fx);
Assert.True((double)fx2 >= smallFx);
}
}
return true;
@ -1552,7 +1557,7 @@ zL = (L - mx)*sqrt(prec)
//X = Gaussian.FromMeanAndPrecision(mx, X.Precision + 1.0000000000000011E-19);
Gaussian toX2 = DoubleIsBetweenOp.XAverageConditional(Bernoulli.PointMass(true), X, lowerBound, upperBound);
Gaussian xPost = X * toX2;
Console.WriteLine($"mx = {X.GetMean():r} mp = {xPost.GetMean():r} vp = {xPost.GetVariance():r} toX = {toX2}");
Console.WriteLine($"mx = {X.GetMean():g17} mp = {xPost.GetMean():g17} vp = {xPost.GetVariance():g17} toX = {toX2}");
//X.Precision *= 100;
//X.MeanTimesPrecision *= 0.999999;
//X.SetMeanAndPrecision(mx, X.Precision * 2);
@ -1671,7 +1676,7 @@ zL = (L - mx)*sqrt(prec)
// all we need is a good approx for (ZR/diff - 1)
double ZR5 = (1.0 / 6 * diffs * diffs * diffs * (-1 + zL * zL) + 0.5 * diffs * diffs * (-zL) + diffs) / sqrtPrec;
double ZR6 = (1.0 / 24 * diffs * diffs * diffs * diffs * (zL - zL * zL * zL + 2 * zL) + 1.0 / 6 * diffs * diffs * diffs * (-1 + zL * zL) + 0.5 * diffs * diffs * (-zL) + diffs) / sqrtPrec;
//Console.WriteLine($"zL = {zL:r} delta = {delta:r} (-zL-zU)/2*diffs={(-zL - zU) / 2 * diffs:r} diffs = {diffs:r} diffs*zL = {diffs * zL}");
//Console.WriteLine($"zL = {zL:g17} delta = {delta:g17} (-zL-zU)/2*diffs={(-zL - zU) / 2 * diffs:g17} diffs = {diffs:g17} diffs*zL = {diffs * zL}");
//Console.WriteLine($"Z/N = {ZR} {ZR2} {ZR2b} {ZR2c} asympt:{ZRasympt} {ZR4} {ZR5} {ZR6}");
// want to compute Z/X.Prob(L)/diffs + (exp(delta)-1)/delta
double expMinus1RatioMinus1RatioMinusHalf = MMath.ExpMinus1RatioMinus1RatioMinusHalf(delta);
@ -1741,7 +1746,7 @@ zL = (L - mx)*sqrt(prec)
// delta = 0.0002 diffs = 0.00014142135623731: bad
// delta = 2E-08 diffs = 1.4142135623731E-08: good
double numer5 = delta * diffs * diffs / 6 + diffs * diffs * diffs * diffs / 24 - 1.0 / 24 * delta * delta * delta - 1.0 / 120 * delta * delta * delta * delta + diffs * diffs / 12;
//Console.WriteLine($"numer = {numer} smallzL:{numer1SmallzL} largezL:{numerLargezL} {numerLargezL2} {numerLargezL3} {numerLargezL4:r} {numerLargezL5:r} {numerLargezL6:r} {numerLargezL7:r} {numerLargezL8:r} {numer1e} asympt:{numerAsympt} {numerAsympt2} {numer2} {numer3} {numer4} {numer5}");
//Console.WriteLine($"numer = {numer} smallzL:{numer1SmallzL} largezL:{numerLargezL} {numerLargezL2} {numerLargezL3} {numerLargezL4:g17} {numerLargezL5:g17} {numerLargezL6:g17} {numerLargezL7:g17} {numerLargezL8:g17} {numer1e} asympt:{numerAsympt} {numerAsympt2} {numer2} {numer3} {numer4} {numer5}");
double mp = mx - System.Math.Exp(logPhiL - logZ) * expMinus1 / X.Precision;
double mp2 = center + (delta / diff - System.Math.Exp(logPhiL - logZ) * expMinus1) / X.Precision;
double mp3 = center + (delta / diff * ZR2b - expMinus1) * System.Math.Exp(logPhiL - logZ) / X.Precision;
@ -1776,7 +1781,7 @@ zL = (L - mx)*sqrt(prec)
//WriteLast(mpSmallzUs);
double mp5 = center + numer5 * delta / diffs * alphaXcLprecDiffs;
//double mpBrute = Util.ArrayInit(10000000, i => X.Sample()).Where(sample => (sample > lowerBound) && (sample < upperBound)).Average();
//Console.WriteLine($"mp = {mp} {mp2} {mp3} {mpLargezL4:r} {mpLargezL5:r} {mpLargezL6:r} {mpLargezL7:r} {mpLargezL8:r} asympt:{mpAsympt} {mpAsympt2} {mp5}");
//Console.WriteLine($"mp = {mp} {mp2} {mp3} {mpLargezL4:g17} {mpLargezL5:g17} {mpLargezL6:g17} {mpLargezL7:g17} {mpLargezL8:g17} asympt:{mpAsympt} {mpAsympt2} {mp5}");
double cL = -1 / expMinus1;
// rU*diffs = rU*zU - rU*zL = r1U - 1 - rU*zL + rL*zL - rL*zL = r1U - 1 - drU*zL - (r1L-1) = dr1U - drU*zL
// zL = -diffs/2 - delta/diffs
@ -2226,6 +2231,43 @@ zL = (L - mx)*sqrt(prec)
}
}
/// <summary>
/// Generates a representative set of proper TruncatedGamma distributions with infinite upper bound.
/// </summary>
/// <returns></returns>
public static IEnumerable<TruncatedGamma> LowerTruncatedGammas()
{
foreach (var gamma in Gammas())
{
foreach (var lowerBound in DoublesAtLeastZero())
{
if (gamma.IsPointMass && gamma.Point < lowerBound) continue;
yield return new TruncatedGamma(gamma, lowerBound, double.PositiveInfinity);
}
}
}
/// <summary>
/// Generates a representative set of proper TruncatedGamma distributions.
/// </summary>
/// <returns></returns>
public static IEnumerable<TruncatedGamma> TruncatedGammas()
{
foreach (var gamma in Gammas())
{
foreach (var lowerBound in DoublesAtLeastZero())
{
foreach (var gap in DoublesGreaterThanZero())
{
double upperBound = lowerBound + gap;
if (upperBound == lowerBound) continue;
if (gamma.IsPointMass && (gamma.Point < lowerBound || gamma.Point > upperBound)) continue;
yield return new TruncatedGamma(gamma, lowerBound, upperBound);
}
}
}
}
/// <summary>
/// Generates a representative set of proper Gaussian distributions.
/// </summary>
@ -2272,10 +2314,10 @@ zL = (L - mx)*sqrt(prec)
{
Parallel.ForEach(DoublesLessThanZero(), lowerBound =>
{
//Console.WriteLine($"isBetween = {isBetween}, lowerBound = {lowerBound:r}");
//Console.WriteLine($"isBetween = {isBetween}, lowerBound = {lowerBound:g17}");
foreach (var upperBound in new[] { -lowerBound }.Concat(UpperBounds(lowerBound)).Take(1))
{
//Console.WriteLine($"lowerBound = {lowerBound:r}, upperBound = {upperBound:r}");
//Console.WriteLine($"lowerBound = {lowerBound:g17}, upperBound = {upperBound:g17}");
double center = MMath.Average(lowerBound, upperBound);
if (double.IsNegativeInfinity(lowerBound) && double.IsPositiveInfinity(upperBound))
center = 0;
@ -2314,8 +2356,8 @@ zL = (L - mx)*sqrt(prec)
}
});
}
Console.WriteLine($"meanMaxUlpError = {meanMaxUlpError}, lowerBound = {meanMaxUlpErrorLowerBound:r}, upperBound = {meanMaxUlpErrorUpperBound:r}, isBetween = {meanMaxUlpErrorIsBetween}");
Console.WriteLine($"precMaxUlpError = {precMaxUlpError}, lowerBound = {precMaxUlpErrorLowerBound:r}, upperBound = {precMaxUlpErrorUpperBound:r}, isBetween = {precMaxUlpErrorIsBetween}");
Console.WriteLine($"meanMaxUlpError = {meanMaxUlpError}, lowerBound = {meanMaxUlpErrorLowerBound:g17}, upperBound = {meanMaxUlpErrorUpperBound:g17}, isBetween = {meanMaxUlpErrorIsBetween}");
Console.WriteLine($"precMaxUlpError = {precMaxUlpError}, lowerBound = {precMaxUlpErrorLowerBound:g17}, upperBound = {precMaxUlpErrorUpperBound:g17}, isBetween = {precMaxUlpErrorIsBetween}");
Assert.True(meanMaxUlpError == 0);
Assert.True(precMaxUlpError == 0);
}
@ -2331,7 +2373,7 @@ zL = (L - mx)*sqrt(prec)
for (int i = 0; i < 1000; i++)
{
double logProb = DoubleIsBetweenOp.LogProbBetween(x, lowerBound, upperBound);
Console.WriteLine($"{x.Precision:r} {logProb:r}");
Console.WriteLine($"{x.Precision:g17} {logProb:g17}");
x = Gaussian.FromMeanAndPrecision(x.GetMean(), x.Precision + 1000000000000 * MMath.Ulp(x.Precision));
}
}
@ -2348,7 +2390,7 @@ zL = (L - mx)*sqrt(prec)
{
foreach (double upperBound in new double[] { 1 }.Concat(UpperBounds(lowerBound)).Take(1))
{
if (trace) Trace.WriteLine($"lowerBound = {lowerBound:r}, upperBound = {upperBound:r}");
if (trace) Trace.WriteLine($"lowerBound = {lowerBound:g17}, upperBound = {upperBound:g17}");
foreach (var x in Gaussians())
{
if (x.IsPointMass) continue;
@ -2383,7 +2425,7 @@ zL = (L - mx)*sqrt(prec)
}
}
}
if (trace) Trace.WriteLine($"maxUlpError = {maxUlpError}, lowerBound = {maxUlpErrorLowerBound:r}, upperBound = {maxUlpErrorUpperBound:r}");
if (trace) Trace.WriteLine($"maxUlpError = {maxUlpError}, lowerBound = {maxUlpErrorLowerBound:g17}, upperBound = {maxUlpErrorUpperBound:g17}");
}
});
Assert.True(maxUlpError < 1e3);
@ -2404,7 +2446,7 @@ zL = (L - mx)*sqrt(prec)
{
foreach (double upperBound in new[] { -9999.9999999999982 }.Concat(UpperBounds(lowerBound)).Take(1))
{
if (trace) Trace.WriteLine($"lowerBound = {lowerBound:r}, upperBound = {upperBound:r}");
if (trace) Trace.WriteLine($"lowerBound = {lowerBound:g17}, upperBound = {upperBound:g17}");
Parallel.ForEach(Gaussians().Where(g => !g.IsPointMass), x =>
{
double mx = x.GetMean();
@ -2488,8 +2530,8 @@ zL = (L - mx)*sqrt(prec)
}
if (trace)
{
Trace.WriteLine($"meanMaxUlpError = {meanMaxUlpError}, lowerBound = {meanMaxUlpErrorLowerBound:r}, upperBound = {meanMaxUlpErrorUpperBound:r}");
Trace.WriteLine($"precMaxUlpError = {precMaxUlpError}, lowerBound = {precMaxUlpErrorLowerBound:r}, upperBound = {precMaxUlpErrorUpperBound:r}");
Trace.WriteLine($"meanMaxUlpError = {meanMaxUlpError}, lowerBound = {meanMaxUlpErrorLowerBound:g17}, upperBound = {meanMaxUlpErrorUpperBound:g17}");
Trace.WriteLine($"precMaxUlpError = {precMaxUlpError}, lowerBound = {precMaxUlpErrorLowerBound:g17}, upperBound = {precMaxUlpErrorUpperBound:g17}");
}
}
// meanMaxUlpError = 4271.53318407361, lowerBound = -1.0000000000000006E-12, upperBound = inf
@ -2513,7 +2555,7 @@ zL = (L - mx)*sqrt(prec)
{
foreach (double upperBound in new[] { 0.0 }.Concat(UpperBounds(lowerBound)).Take(1))
{
if (trace) Console.WriteLine($"lowerBound = {lowerBound:r}, upperBound = {upperBound:r}");
if (trace) Console.WriteLine($"lowerBound = {lowerBound:g17}, upperBound = {upperBound:g17}");
double center = (lowerBound + upperBound) / 2;
if (double.IsNegativeInfinity(lowerBound) && double.IsPositiveInfinity(upperBound))
center = 0;
@ -2586,8 +2628,8 @@ zL = (L - mx)*sqrt(prec)
}
if (trace)
{
Console.WriteLine($"meanMaxUlpError = {meanMaxUlpError}, lowerBound = {meanMaxUlpErrorLowerBound:r}, upperBound = {meanMaxUlpErrorUpperBound:r}");
Console.WriteLine($"precMaxUlpError = {precMaxUlpError}, lowerBound = {precMaxUlpErrorLowerBound:r}, upperBound = {precMaxUlpErrorUpperBound:r}");
Console.WriteLine($"meanMaxUlpError = {meanMaxUlpError}, lowerBound = {meanMaxUlpErrorLowerBound:g17}, upperBound = {meanMaxUlpErrorUpperBound:g17}");
Console.WriteLine($"precMaxUlpError = {precMaxUlpError}, lowerBound = {precMaxUlpErrorLowerBound:g17}, upperBound = {precMaxUlpErrorUpperBound:g17}");
}
}
// meanMaxUlpError = 104.001435643838, lowerBound = -1.0000000000000022E-37, upperBound = 9.9000000000000191E-36
@ -2612,7 +2654,7 @@ zL = (L - mx)*sqrt(prec)
{
foreach (double upperBound in new[] { 1.0 }.Concat(UpperBounds(lowerBound)).Take(1))
{
if (trace) Console.WriteLine($"lowerBound = {lowerBound:r}, upperBound = {upperBound:r}");
if (trace) Console.WriteLine($"lowerBound = {lowerBound:g17}, upperBound = {upperBound:g17}");
Parallel.ForEach(Gaussians(), x =>
{
Gaussian toX = DoubleIsBetweenOp.XAverageConditional(true, x, lowerBound, upperBound);
@ -2678,8 +2720,8 @@ zL = (L - mx)*sqrt(prec)
}
if (trace)
{
Console.WriteLine($"meanMaxUlpError = {meanMaxUlpError}, lowerBound = {meanMaxUlpErrorLowerBound:r}, upperBound = {meanMaxUlpErrorUpperBound:r}");
Console.WriteLine($"precMaxUlpError = {precMaxUlpError}, lowerBound = {precMaxUlpErrorLowerBound:r}, upperBound = {precMaxUlpErrorUpperBound:r}");
Console.WriteLine($"meanMaxUlpError = {meanMaxUlpError}, lowerBound = {meanMaxUlpErrorLowerBound:g17}, upperBound = {meanMaxUlpErrorUpperBound:g17}");
Console.WriteLine($"precMaxUlpError = {precMaxUlpError}, lowerBound = {precMaxUlpErrorLowerBound:g17}, upperBound = {precMaxUlpErrorUpperBound:g17}");
}
}
// meanMaxUlpError = 33584, lowerBound = -1E+30, upperBound = 9.9E+31
@ -3115,7 +3157,7 @@ weight * (tau + alphaX) + alphaX
Gaussian X = Gaussian.FromMeanAndPrecision(mean, System.Math.Pow(2, -i * 1 - 20));
Gaussian toX = DoubleIsBetweenOp.XAverageConditional_Slow(Bernoulli.PointMass(true), X, lowerBound, upperBound);
Gaussian toLowerBound = toLowerBoundPrev;// DoubleIsBetweenOp.LowerBoundAverageConditional_Slow(Bernoulli.PointMass(true), X, lowerBound, upperBound);
Trace.WriteLine($"{i} {X}: {toX.MeanTimesPrecision:r} {toX.Precision:r} {toLowerBound.MeanTimesPrecision:r} {toLowerBound.Precision:r}");
Trace.WriteLine($"{i} {X}: {toX.MeanTimesPrecision:g17} {toX.Precision:g17} {toLowerBound.MeanTimesPrecision:g17} {toLowerBound.Precision:g17}");
Assert.False(toLowerBound.IsPointMass);
if ((mean > 0 && toLowerBound.MeanTimesPrecision > toLowerBoundPrev.MeanTimesPrecision) ||
(mean < 0 && toLowerBound.MeanTimesPrecision < toLowerBoundPrev.MeanTimesPrecision))

Просмотреть файл

@ -130,8 +130,135 @@ namespace Microsoft.ML.Probabilistic.Tests
//}
}
/// <summary>
/// Test a model where inference can fail due to incorrect initial messages.
/// Failure only happens when ProductOp throws on uniform inputs.
/// Fails with "The distribution is improper" because Team1Perf_uses_F is uniform.
/// This happens because skill_uses_F[player][gameOfPlayer][1] is uniform for gameOfPlayer>0,
/// due to never being initialized in the initialization schedule.
/// </summary>
[Fact]
public void TrueSkill2Test()
{
var performancePrecision = Variable.GammaFromMeanAndVariance(1, 1e-3);
performancePrecision.Name = nameof(performancePrecision);
//performancePrecision.ObservedValue = 1;
var skillChangeWithMatchPrecision = Variable.GammaFromMeanAndVariance(1, 1e-3);
skillChangeWithMatchPrecision.Name = nameof(skillChangeWithMatchPrecision);
//skillChangeWithMatchPrecision.ObservedValue = 1;
var pKillW = Variable.GaussianFromMeanAndPrecision(0, 1);
pKillW.Name = nameof(pKillW);
pKillW.ObservedValue = 0;
var oKillW = Variable.GaussianFromMeanAndPrecision(0, 1);
oKillW.Name = nameof(oKillW);
//oKillW.ObservedValue = 0;
var killV = Variable.GammaFromMeanAndVariance(1, 1e-1);
killV.Name = nameof(killV);
killV.ObservedValue = 1;
var pDeathW = Variable.GaussianFromMeanAndPrecision(0, 1);
pDeathW.Name = nameof(pDeathW);
//pDeathW.ObservedValue = 0;
var oDeathW = Variable.GaussianFromMeanAndPrecision(0, 1);
oDeathW.Name = nameof(oDeathW);
//oDeathW.ObservedValue = 0;
var deathV = Variable.GammaFromMeanAndVariance(1, 1e-1);
deathV.Name = nameof(deathV);
//deathV.ObservedValue = 1;
Range player = new Range(4);
player.Name = nameof(player);
var gameCountOfPlayer = Variable.Observed(new int[] { 3, 1, 1, 1 }, player);
gameCountOfPlayer.Name = nameof(gameCountOfPlayer);
Range gameOfPlayer = new Range(gameCountOfPlayer[player]);
gameOfPlayer.Name = nameof(gameOfPlayer);
var priorSkill = Variable.Observed(new Gaussian[] { new Gaussian(0,1), new Gaussian(0, 1), new Gaussian(0, 1), new Gaussian(0, 1) }, player);
var skill = Variable.Array(Variable.Array<double>(gameOfPlayer), player);
skill.Name = nameof(skill);
using (var playerBlock = Variable.ForEach(player))
{
var p = playerBlock.Index;
using (var gameBlock = Variable.ForEach(gameOfPlayer))
{
var g = gameBlock.Index;
using (Variable.If(g == 0))
{
skill[player][gameOfPlayer] = Variable<double>.Random(priorSkill[player]);
}
using (Variable.If(g > 0))
{
skill[player][gameOfPlayer] = Variable.GaussianFromMeanAndPrecision(skill[player][g - 1], skillChangeWithMatchPrecision);
}
}
}
Range game = new Range(3);
game.Name = nameof(game);
Range member = new Range(1);
member.Name = nameof(member);
var Team1Player = Variable.Observed(new int[][] { new int[] { 0 }, new int[] { 0 }, new int[] { 0 } }, game, member);
Team1Player.Name = nameof(Team1Player);
var Team1Kill = Variable.Observed(new double[][] { new double[] { 1 }, new double[] { 1 }, new double[] { 1 } }, game, member);
var Team1Death = Variable.Observed(new double[][] { new double[] { 1 }, new double[] { 1 }, new double[] { 1 } }, game, member);
var Team1GameIndex = Variable.Observed(new int[][] { new int[] { 0 }, new int[] { 1 }, new int[] { 2 } }, game, member);
Team1GameIndex.Name = nameof(Team1GameIndex);
var Team2Player = Variable.Observed(new int[][] { new int[] { 1 }, new int[] { 2 }, new int[] { 3 } }, game, member);
Team2Player.Name = nameof(Team2Player);
var Team2GameIndex = Variable.Observed(new int[][] { new int[] { 0 }, new int[] { 0 }, new int[] { 0 } }, game, member);
Team2GameIndex.Name = nameof(Team2GameIndex);
var Team1Perf = Variable.Array(Variable.Array<double>(member), game);
Team1Perf.Name = nameof(Team1Perf);
var Team2Perf = Variable.Array(Variable.Array<double>(member), game);
Team2Perf.Name = nameof(Team2Perf);
using (Variable.ForEach(game))
{
using (var playerBlock = Variable.ForEach(member))
{
var PlayerIndex1 = Team1Player[game][member];
var GameIndex1 = Team1GameIndex[game][member];
Team1Perf[game][member] = Variable.GaussianFromMeanAndPrecision(skill[PlayerIndex1][GameIndex1], performancePrecision);
var PlayerIndex2 = Team2Player[game][member];
var GameIndex2 = Team2GameIndex[game][member];
Team2Perf[game][member] = Variable.GaussianFromMeanAndPrecision(skill[PlayerIndex2][GameIndex2], performancePrecision);
}
Variable<double> Team1PerfSum = Variable.Sum(Team1Perf[game]);
Team1PerfSum.Name = nameof(Team1PerfSum);
Variable<double> Team2PerfSum = Variable.Sum(Team2Perf[game]);
Team2PerfSum.Name = nameof(Team2PerfSum);
using (var playerBlock = Variable.ForEach(member))
{
Team1Kill[game][member] = Variable.Max(0.0, Variable.GaussianFromMeanAndPrecision(pKillW * Team1Perf[game][member] + oKillW * Team2PerfSum, killV));
Team1Death[game][member] = Variable.Max(0.0, Variable.GaussianFromMeanAndPrecision(pDeathW * Team1Perf[game][member] + oDeathW * Team2PerfSum, deathV));
}
}
performancePrecision.AddAttribute(new PointEstimate());
skillChangeWithMatchPrecision.AddAttribute(new PointEstimate());
pKillW.AddAttribute(new PointEstimate());
oKillW.AddAttribute(new PointEstimate());
killV.AddAttribute(new PointEstimate());
pDeathW.AddAttribute(new PointEstimate());
oDeathW.AddAttribute(new PointEstimate());
deathV.AddAttribute(new PointEstimate());
game.AddAttribute(new Models.Attributes.Sequential()); // helps inference converge faster
InferenceEngine engine = new InferenceEngine();
engine.Infer(performancePrecision);
}
/// <summary>
/// Test a model where inference fails due to incorrect initial messages.
/// Fails with "The distribution is improper" because D_uses_F[4] is uniform.
/// This happens because D_uses_F[day][0] is not updated in the same loop as vdouble16_F which has an offset dependency on it.
/// </summary>
[Fact]
[Trait("Category", "OpenBug")]
@ -189,6 +316,7 @@ namespace Microsoft.ML.Probabilistic.Tests
/// <summary>
/// Tests a model with complex chain dependencies.
/// Fails with "enemyStrengthAfter is not defined in all cases"
/// </summary>
[Fact]
[Trait("Category", "OpenBug")]
@ -227,7 +355,7 @@ namespace Microsoft.ML.Probabilistic.Tests
{
var teamIndex = block.Index;
var isLast = (teamIndex == teamCount - 1).Named("isLast");
//using(Variable.If(isLast))
//using(Variable.If(isLast)) // this version works
using (Variable.If(teamIndex == lastIndex))
{
enemyStrengthAfter[teamIndex] = 0.0;
@ -344,9 +472,7 @@ namespace Microsoft.ML.Probabilistic.Tests
Console.WriteLine(engine.Infer(x));
}
// Fails with a bad init schedule, if the product operator is sensitive to uniform inputs
[Fact]
[Trait("Category", "OpenBug")]
public void ChangePointTest()
{
//double[] data = { 8, 7, -1, 7, 8, 6, 7, 9, 3, 1, 0, 4, 0, 0, 8, 9, 10, 7, 9, 10, 8, 10, 1, 1, 0, 5, 3, 6, 0, 0, 2 };
@ -5804,7 +5930,8 @@ namespace Microsoft.ML.Probabilistic.Tests
}
/// <summary>
/// This test fails due to a poor initialization schedule.
/// This test fails with "The distribution is improper" during initialization due to a poor initialization schedule.
/// receivingSkill_depth1_F[1] is uniform during the backward loop. It needs to be initialized by a forward loop.
/// Previously failed with "Internal: schedule splits group 278"
/// This happens due to a mistake by the repair algorithm (and possibly also rotation). Best seen via the TransformBrowser.
/// Also fails because servingSkill_depth1_F doesn't have its requirements. It has an Any requirement where the wrong option is chosen.

Просмотреть файл

@ -709,7 +709,7 @@ namespace Microsoft.ML.Probabilistic.Tests
// Make sure it doesn't contain epsilon transitions
Assert.True(automaton.States.All(s => s.Transitions.All(t => !t.IsEpsilon)));
}
}
}
/// <summary>
@ -732,7 +732,7 @@ namespace Microsoft.ML.Probabilistic.Tests
state.SetEndWeight(Weight.One);
var automaton = builder.GetAutomaton();
var closure = automaton.Start.GetEpsilonClosure();
var closure = new Automaton<string, char, DiscreteChar, StringManipulator, StringAutomaton>.EpsilonClosure(automaton, automaton.Start);
Assert.Equal(ChainLength + 1, closure.Size);
Assert.Equal(Math.Log(1 << ChainLength), closure.EndWeight.LogValue);
@ -884,6 +884,40 @@ namespace Microsoft.ML.Probabilistic.Tests
}
}
/// <summary>
/// Tests whether StringAutomaton.UnlimitedStatesComputation.CheckStateCount() works as expected
/// </summary>
[Fact]
[Trait("Category", "StringInference")]
public void CheckStateCount()
{
using (var unlimited = new StringAutomaton.UnlimitedStatesComputation())
{
var builder = new StringAutomaton.Builder();
var state = builder.Start;
for (var i = 1; i < 200000; ++i)
{
state = state.AddTransition('a', Weight.One);
}
var automaton = builder.GetAutomaton();
// Fine, because 200k < default limit
unlimited.CheckStateCount(automaton);
for (var i = 1; i < 200000; ++i)
{
state = state.AddTransition('a', Weight.One);
}
automaton = builder.GetAutomaton();
// Not fine anymore, automaton (with 400k states) is over the default limit
Assert.Throws<AutomatonTooLargeException>(() => unlimited.CheckStateCount(automaton));
}
}
/// <summary>
/// Tests creating an automaton from state and transition lists.
/// </summary>
@ -895,16 +929,16 @@ namespace Microsoft.ML.Probabilistic.Tests
var automaton1 = StringAutomaton.FromData(
new StringAutomaton.DataContainer(
0,
new[]
{
ImmutableArray.Create(
new StringAutomaton.StateData(0, 1, Weight.One),
new StringAutomaton.StateData(1, 0, Weight.One),
},
new[] { new StringAutomaton.Transition(DiscreteChar.PointMass('a'), Weight.One, 1) },
new StringAutomaton.StateData(1, 0, Weight.One)),
ImmutableArray.Create(
new StringAutomaton.Transition(DiscreteChar.PointMass('a'), Weight.One, 1)),
isEpsilonFree: true,
usesGroups: false,
isDeterminized: null,
isZero: null));
isZero: null,
isEnumerable: null));
StringInferenceTestUtilities.TestValue(automaton1, 1.0, string.Empty, "a");
StringInferenceTestUtilities.TestValue(automaton1, 0.0, "b");
@ -913,12 +947,13 @@ namespace Microsoft.ML.Probabilistic.Tests
var automaton2 = StringAutomaton.FromData(
new StringAutomaton.DataContainer(
0,
new[] { new StringAutomaton.StateData(0, 0, Weight.Zero) },
Array.Empty<StringAutomaton.Transition>(),
ImmutableArray.Create(new StringAutomaton.StateData(0, 0, Weight.Zero)),
ImmutableArray<StringAutomaton.Transition>.Empty,
isEpsilonFree: true,
usesGroups: false,
isDeterminized: true,
isZero: true));
isZero: true,
isEnumerable: true));
Assert.True(automaton2.IsZero());
// Bad start state index
@ -926,48 +961,52 @@ namespace Microsoft.ML.Probabilistic.Tests
() => StringAutomaton.FromData(
new StringAutomaton.DataContainer(
0,
Array.Empty<StringAutomaton.StateData>(),
Array.Empty<StringAutomaton.Transition>(),
ImmutableArray<StringAutomaton.StateData>.Empty,
ImmutableArray<StringAutomaton.Transition>.Empty,
isEpsilonFree: true,
usesGroups: false,
isDeterminized: false,
isZero: true)));
isZero: true,
isEnumerable: false)));
// automaton is actually epsilon-free, but data says that it is
Assert.Throws<ArgumentException>(
() => StringAutomaton.FromData(
new StringAutomaton.DataContainer(
0,
new[] { new StringAutomaton.StateData(0, 0, Weight.Zero) },
Array.Empty<StringAutomaton.Transition>(),
ImmutableArray.Create(new StringAutomaton.StateData(0, 0, Weight.Zero)),
ImmutableArray<StringAutomaton.Transition>.Empty,
isEpsilonFree: false,
usesGroups: false,
isDeterminized: null,
isZero: null)));
isZero: null,
isEnumerable: null)));
// automaton is not epsilon-free
Assert.Throws<ArgumentException>(
() => StringAutomaton.FromData(
new StringAutomaton.DataContainer(
0,
new[] { new StringAutomaton.StateData(0, 1, Weight.Zero) },
new[] { new StringAutomaton.Transition(Option.None, Weight.One, 1) },
ImmutableArray.Create(new StringAutomaton.StateData(0, 1, Weight.Zero)),
ImmutableArray.Create(new StringAutomaton.Transition(Option.None, Weight.One, 1)),
isEpsilonFree: false,
usesGroups: false,
isDeterminized: null,
isZero: null)));
isZero: null,
isEnumerable: null)));
// Incorrect transition index
Assert.Throws<ArgumentException>(
() => StringAutomaton.FromData(
new StringAutomaton.DataContainer(
0,
new[] { new StringAutomaton.StateData(0, 1, Weight.One) },
new[] { new StringAutomaton.Transition(Option.None, Weight.One, 2) },
ImmutableArray.Create(new StringAutomaton.StateData(0, 1, Weight.One)),
ImmutableArray.Create(new StringAutomaton.Transition(Option.None, Weight.One, 2)),
true,
false,
isDeterminized: null,
isZero: null)));
isZero: null,
isEnumerable: null)));
}
#region ToString tests
@ -2169,9 +2208,11 @@ namespace Microsoft.ML.Probabilistic.Tests
builder[0].AddTransition(DiscreteChar.UniformOver('c', 'd'), Weight.FromValue(1), 2);
builder[2].AddTransition(DiscreteChar.UniformOver('e', 'f'), Weight.FromValue(1), 3);
builder[2].AddTransition(DiscreteChar.UniformOver('g', 'h'), Weight.FromValue(1), 4);
builder[2].AddEpsilonTransition(Weight.FromValue(1), 4);
builder[4].AddTransition(DiscreteChar.UniformOver('i', 'j'), Weight.FromValue(1), 5);
builder[4].AddTransition(DiscreteChar.UniformOver('k', 'l'), Weight.FromValue(1), 5);
builder[0].SetEndWeight(Weight.FromValue(1));
builder[1].SetEndWeight(Weight.FromValue(1));
builder[3].SetEndWeight(Weight.FromValue(1));
builder[5].SetEndWeight(Weight.FromValue(1));
@ -2181,18 +2222,59 @@ namespace Microsoft.ML.Probabilistic.Tests
var expectedSupport = new HashSet<string>
{
"",
"a", "b",
"ce", "cf",
"cgi", "cgj", "cgk", "cgl",
"chi", "chj", "chk", "chl",
"ci", "cj", "ck", "cl",
"de", "df",
"dgi", "dgj", "dgk", "dgl",
"dhi", "dhj", "dhk", "dhl"
"dhi", "dhj", "dhk", "dhl",
"di", "dj", "dk", "dl"
};
var caclulatedSupport = new HashSet<string>(automaton.EnumerateSupport());
var calculatedSupport1= new HashSet<string>(automaton.EnumerateSupport(tryDeterminize: false));
Assert.True(calculatedSupport1.SetEquals(expectedSupport));
Assert.True(caclulatedSupport.SetEquals(expectedSupport));
var calculatedSupport2 = new HashSet<string>(automaton.EnumerateSupport(tryDeterminize: true));
Assert.True(calculatedSupport2.SetEquals(expectedSupport));
}
/// <summary>
/// Tests enumeration of support.
/// </summary>
[Fact]
[Trait("Category", "StringInference")]
public void EnumerateSupportThrowsOnLoop()
{
var builder = new StringAutomaton.Builder();
builder.Start
.AddTransition('a', Weight.One)
.AddTransition('a', Weight.One, 0)
.SetEndWeight(Weight.One);
var automaton = builder.GetAutomaton();
Assert.Throws<NotSupportedException>(() => automaton.EnumerateSupport().ToList());
}
/// <summary>
/// Tests enumeration of support.
/// </summary>
[Fact]
[Trait("Category", "StringInference")]
public void TryEnumerateSupportReturnsFalseOnLoop()
{
var builder = new StringAutomaton.Builder();
builder.Start
.AddTransition('a', Weight.One)
.AddTransition('a', Weight.One, 0)
.SetEndWeight(Weight.One);
var automaton = builder.GetAutomaton();
Assert.False(automaton.TryEnumerateSupport(Int32.MaxValue, out _));
}
[Trait("Category", "BadTest")] // Performance tests which look for exact timings are likely to fail on the build machine
@ -2214,6 +2296,10 @@ namespace Microsoft.ML.Probabilistic.Tests
builder[5].SetEndWeight(Weight.FromValue(1));
builder[6].SetEndWeight(Weight.FromValue(1));
var s = builder[5];
for (var i = 0; i < 100; ++i) s = s.AddTransition('a', Weight.One);
s.SetEndWeight(Weight.One);
var automaton = builder.GetAutomaton();
int numPasses = 10000;

Просмотреть файл

@ -2,7 +2,9 @@
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Reflection.Metadata.Ecma335;
using Microsoft.ML.Probabilistic.Math;
using Microsoft.ML.Probabilistic.Utilities;
namespace Microsoft.ML.Probabilistic.Tests
{
@ -10,13 +12,15 @@ namespace Microsoft.ML.Probabilistic.Tests
using System.Collections.Generic;
using Xunit;
using Microsoft.ML.Probabilistic.Distributions;
using Assert = Xunit.Assert;
using Assert = Microsoft.ML.Probabilistic.Tests.AssertHelper;
/// <summary>
/// Tests for <see cref="DiscreteChar"/>.
/// </summary>
public class DiscreteCharTest
{
const double Eps = 1e-10;
/// <summary>
/// Runs a set of common distribution tests for <see cref="DiscreteChar"/>.
/// </summary>
@ -63,7 +67,7 @@ namespace Microsoft.ML.Probabilistic.Tests
return;
}
Assert.True(false);
Xunit.Assert.True(false);
}
[Fact]
@ -88,7 +92,7 @@ namespace Microsoft.ML.Probabilistic.Tests
var unif = Vector.Constant(numChars, 1.0 / numChars);
var maxDiff = hist.MaxDiff(unif);
Assert.True(maxDiff < 0.01);
Xunit.Assert.True(maxDiff < 0.01);
}
[Fact]
@ -106,7 +110,7 @@ namespace Microsoft.ML.Probabilistic.Tests
ab.SetToSum(1, a, 2, b);
// 2 subsequent ranges
Assert.Equal(2, ab.Ranges.Count);
Xunit.Assert.Equal(2, ab.Ranges.Count);
TestComplement(ab);
void TestComplement(DiscreteChar dist)
@ -117,21 +121,123 @@ namespace Microsoft.ML.Probabilistic.Tests
var complement = dist.Complement();
// complement should always be partial uniform
Assert.True(complement.IsPartialUniform());
Xunit.Assert.True(complement.IsPartialUniform());
// overlap is zero
Assert.True(double.IsNegativeInfinity(dist.GetLogAverageOf(complement)));
Assert.True(double.IsNegativeInfinity(uniformDist.GetLogAverageOf(complement)));
Xunit.Assert.True(double.IsNegativeInfinity(dist.GetLogAverageOf(complement)));
Xunit.Assert.True(double.IsNegativeInfinity(uniformDist.GetLogAverageOf(complement)));
// union is covers the whole range
var sum = default(DiscreteChar);
sum.SetToSum(1, dist, 1, complement);
sum.SetToPartialUniform();
Assert.True(sum.IsUniform());
Xunit.Assert.True(sum.IsUniform());
// Doing complement again will cover the same set of characters
var complement2 = complement.Complement();
Assert.Equal(uniformDist, complement2);
Xunit.Assert.Equal(uniformDist, complement2);
}
}
[Fact]
public void PartialUniformWithLogProbabilityOverride()
{
var dist = DiscreteChar.LetterOrDigit();
var probLetter = Math.Exp(dist.GetLogProb('j'));
var probNumber = Math.Exp(dist.GetLogProb('5'));
var logProbabilityOverride = Math.Log(0.7);
var scaledDist = DiscreteChar.Uniform();
scaledDist.SetToPartialUniformOf(dist, logProbabilityOverride);
var scaledLogProbLetter = scaledDist.GetLogProb('j');
var scaledLogProbNumber = scaledDist.GetLogProb('5');
Assert.Equal(scaledLogProbLetter, logProbabilityOverride, Eps);
Assert.Equal(scaledLogProbNumber, logProbabilityOverride, Eps);
// Check that cache has not been compromised.
Assert.Equal(probLetter, Math.Exp(dist.GetLogProb('j')), Eps);
Assert.Equal(probNumber, Math.Exp(dist.GetLogProb('5')), Eps);
// Check that an exception is thrown if a bad maximumProbability is passed down.
Xunit.Assert.Throws<ArgumentException>(() =>
{
var badDist = DiscreteChar.Uniform();
badDist.SetToPartialUniformOf(dist, Math.Log(1.2));
});
}
[Fact]
public void BroadAndNarrow()
{
var dist1 = DiscreteChar.Digit();
Xunit.Assert.True(dist1.IsBroad);
var dist2 = DiscreteChar.OneOf('1', '3', '5', '6');
Xunit.Assert.False(dist2.IsBroad);
}
[Fact]
public void HasLogOverride()
{
var dist1 = DiscreteChar.LetterOrDigit();
Xunit.Assert.False(dist1.HasLogProbabilityOverride);
dist1.SetToPartialUniformOf(dist1, Math.Log(0.9));
Xunit.Assert.True(dist1.HasLogProbabilityOverride);
}
[Fact]
public void ProductWithLogOverrideBroad()
{
for (var i = 0; i < 2; i++)
{
var dist1 = DiscreteChar.LetterOrDigit();
var dist2 = DiscreteChar.Digit();
var logOverrideProbability = Math.Log(0.9);
dist1.SetToPartialUniformOf(dist1, logOverrideProbability);
Xunit.Assert.True(dist1.HasLogProbabilityOverride);
Xunit.Assert.True(dist2.IsBroad);
if (i == 1)
{
Util.Swap(ref dist1, ref dist2);
}
var dist3 = DiscreteChar.Uniform();
dist3.SetToProduct(dist1, dist2);
Xunit.Assert.True(dist3.HasLogProbabilityOverride);
Assert.Equal(logOverrideProbability, dist3.GetLogProb('5'), Eps);
Xunit.Assert.True(double.IsNegativeInfinity(dist3.GetLogProb('a')));
}
}
[Fact]
public void ProductWithLogOverrideNarrow()
{
for (var i = 0; i < 2; i++)
{
var dist1 = DiscreteChar.LetterOrDigit();
var dist2 = DiscreteChar.OneOf('1', '3', '5', '6');
var logOverrideProbability = Math.Log(0.9);
dist1.SetToPartialUniformOf(dist1, logOverrideProbability);
Xunit.Assert.True(dist1.HasLogProbabilityOverride);
Xunit.Assert.False(dist2.IsBroad);
if (i == 1)
{
Util.Swap(ref dist1, ref dist2);
}
var dist3 = DiscreteChar.Uniform();
dist3.SetToProduct(dist1, dist2);
Xunit.Assert.False(dist3.HasLogProbabilityOverride);
Assert.Equal(Math.Log(0.25), dist3.GetLogProb('5'), Eps);
Xunit.Assert.True(double.IsNegativeInfinity(dist3.GetLogProb('a')));
}
}
@ -152,12 +258,12 @@ namespace Microsoft.ML.Probabilistic.Tests
foreach (var ch in included)
{
Assert.True(!double.IsNegativeInfinity(distribution.GetLogProb(ch)), distribution + " should contain " + ch);
Xunit.Assert.True(!double.IsNegativeInfinity(distribution.GetLogProb(ch)), distribution + " should contain " + ch);
}
foreach (var ch in excluded)
{
Assert.True(double.IsNegativeInfinity(distribution.GetLogProb(ch)), distribution + " should not contain " + ch);
Xunit.Assert.True(double.IsNegativeInfinity(distribution.GetLogProb(ch)), distribution + " should not contain " + ch);
}
}
}

Просмотреть файл

@ -629,14 +629,15 @@ namespace Microsoft.ML.Probabilistic.Tests
AssertStochastic(automaton);
StringAutomaton.EpsilonClosure startClosure = automaton.Start.GetEpsilonClosure();
StringAutomaton.EpsilonClosure startClosure = new Automaton<string, char, DiscreteChar, StringManipulator, StringAutomaton>.EpsilonClosure(automaton, automaton.Start);
Assert.Equal(3, startClosure.Size);
Assert.Equal(0.0, startClosure.EndWeight.LogValue, 1e-8);
for (int i = 0; i < startClosure.Size; ++i)
{
Weight weight = startClosure.GetStateWeightByIndex(i);
double expectedWeight = startClosure.GetStateByIndex(i) == automaton.Start ? 10 : 4;
var state = startClosure.GetStateByIndex(i);
double expectedWeight = state == automaton.Start ? 10 : 4;
Assert.Equal(expectedWeight, weight.Value, 1e-8);
}
}

Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше