Merged PR 43330: Release 0.12.53.2

Release 0.12.53.2 to master

Related work items: #126416
This commit is contained in:
Nick Saw 2020-06-17 00:33:21 +00:00
Родитель dc11ff3f87
Коммит fc6bdfd5f4
467 изменённых файлов: 14733 добавлений и 9611 удалений

1
.gitattributes поставляемый Normal file
Просмотреть файл

@ -0,0 +1 @@
*.sh text eol=lf

Просмотреть файл

@ -1,5 +1,6 @@
<?xml version="1.0" encoding="utf-8"?>
<RuleSet Name="Microsoft Psi Released Project Rules" Description="These rules are used for released Psi projects." ToolsVersion="15.0">
<RuleSet Name="Microsoft Psi Released Project Rules" Description="These rules are used for released Psi projects." ToolsVersion="16.0">
<Include Path="Security.ruleset" Action="Default" />
<Rules AnalyzerId="Microsoft.Analyzers.ManagedCodeAnalysis" RuleNamespace="Microsoft.Rules.Managed">
<Rule Id="CA1001" Action="Warning" />
<Rule Id="CA1009" Action="Warning" />
@ -8,6 +9,7 @@
<Rule Id="CA1049" Action="Warning" />
<Rule Id="CA1060" Action="Warning" />
<Rule Id="CA1061" Action="Warning" />
<Rule Id="CA1063" Action="None" />
<Rule Id="CA1065" Action="Warning" />
<Rule Id="CA1301" Action="Warning" />
<Rule Id="CA1400" Action="Warning" />
@ -71,7 +73,7 @@
<Rule Id="SA1009" Action="None" />
<Rule Id="SA1124" Action="None" />
<Rule Id="SA1305" Action="Warning" />
<Rule Id="SA1504" Action="None" />
<Rule Id="SA1407" Action="None" />
<Rule Id="SA1504" Action="None" />
</Rules>
</RuleSet>

Просмотреть файл

@ -1,9 +1,6 @@
<?xml version="1.0" encoding="utf-8"?>
<RuleSet Name="Microsoft Psi Sample Project Rules" Description="These rules are used for Psi sample projects." ToolsVersion="10.0">
<Localization ResourceAssembly="Microsoft.VisualStudio.CodeAnalysis.RuleSets.Strings.dll" ResourceBaseName="Microsoft.VisualStudio.CodeAnalysis.RuleSets.Strings.Localized">
<Name Resource="MinimumRecommendedRules_Name" />
<Description Resource="MinimumRecommendedRules_Description" />
</Localization>
<RuleSet Name="Microsoft Psi Sample Project Rules" Description="These rules are used for Psi sample projects." ToolsVersion="15.0">
<Include Path="Security.ruleset" Action="Default" />
<Rules AnalyzerId="Microsoft.Analyzers.ManagedCodeAnalysis" RuleNamespace="Microsoft.Rules.Managed">
<Rule Id="CA1001" Action="Warning" />
<Rule Id="CA1009" Action="Warning" />
@ -12,7 +9,7 @@
<Rule Id="CA1049" Action="Warning" />
<Rule Id="CA1060" Action="Warning" />
<Rule Id="CA1061" Action="Warning" />
<Rule Id="CA1063" Action="Warning" />
<Rule Id="CA1063" Action="None" />
<Rule Id="CA1065" Action="Warning" />
<Rule Id="CA1301" Action="Warning" />
<Rule Id="CA1400" Action="Warning" />
@ -74,9 +71,9 @@
<Rule Id="SA1124" Action="None" />
<Rule Id="SA1305" Action="Warning" />
<Rule Id="SA1401" Action="None" />
<Rule Id="SA1402" Action="None" />
<Rule Id="SA1407" Action="None" />
<Rule Id="SA1504" Action="None" />
<Rule Id="SA1652" Action="None" />
<Rule Id="SA1402" Action="None" />
</Rules>
</RuleSet>

207
Build/Security.ruleset Normal file
Просмотреть файл

@ -0,0 +1,207 @@
<?xml version="1.0" encoding="utf-8"?>
<RuleSet Name="Microsoft Psi Security Rules" Description="This rule set contains required and recommended security rules." ToolsVersion="16.0">
<Rules AnalyzerId="Microsoft.CodeAnalysis.VersionCheckAnalyzer" RuleNamespace="Microsoft.CodeAnalysis.VersionCheckAnalyzer">
<Rule Id="CA9999" Action="None" />
</Rules>
<Rules AnalyzerId="Microsoft.CodeQuality.Analyzers" RuleNamespace="Microsoft.CodeQuality.Analyzers">
<Rule Id="CA1000" Action="None" />
<Rule Id="CA1010" Action="None" />
<Rule Id="CA1016" Action="None" />
<Rule Id="CA1018" Action="None" />
<Rule Id="CA1021" Action="None" />
<Rule Id="CA1028" Action="None" />
<Rule Id="CA1030" Action="None" />
<Rule Id="CA1031" Action="None" />
<Rule Id="CA1034" Action="None" />
<Rule Id="CA1036" Action="None" />
<Rule Id="CA1040" Action="None" />
<Rule Id="CA1041" Action="None" />
<Rule Id="CA1043" Action="None" />
<Rule Id="CA1044" Action="None" />
<Rule Id="CA1051" Action="None" />
<Rule Id="CA1052" Action="None" />
<Rule Id="CA1054" Action="None" />
<Rule Id="CA1055" Action="None" />
<Rule Id="CA1056" Action="None" />
<Rule Id="CA1061" Action="None" />
<Rule Id="CA1062" Action="None" />
<Rule Id="CA1063" Action="None" />
<Rule Id="CA1064" Action="None" />
<Rule Id="CA1066" Action="None" />
<Rule Id="CA1067" Action="None" />
<Rule Id="CA1068" Action="None" />
<Rule Id="CA1069" Action="None" />
<Rule Id="CA1707" Action="None" />
<Rule Id="CA1710" Action="None" />
<Rule Id="CA1712" Action="None" />
<Rule Id="CA1714" Action="None" />
<Rule Id="CA1715" Action="None" />
<Rule Id="CA1716" Action="None" />
<Rule Id="CA1717" Action="None" />
<Rule Id="CA1720" Action="None" />
<Rule Id="CA1721" Action="None" />
<Rule Id="CA1724" Action="None" />
<Rule Id="CA1801" Action="None" />
<Rule Id="CA1802" Action="None" />
<Rule Id="CA1806" Action="None" />
<Rule Id="CA1812" Action="None" />
<Rule Id="CA1814" Action="None" />
<Rule Id="CA1815" Action="None" />
<Rule Id="CA1819" Action="None" />
<Rule Id="CA1821" Action="None" />
<Rule Id="CA1822" Action="None" />
<Rule Id="CA1823" Action="None" />
<Rule Id="CA2007" Action="None" />
<Rule Id="CA2011" Action="None" />
<Rule Id="CA2119" Action="None" />
<Rule Id="CA2211" Action="None" />
<Rule Id="CA2214" Action="None" />
<Rule Id="CA2219" Action="None" />
<Rule Id="CA2225" Action="None" />
<Rule Id="CA2226" Action="None" />
<Rule Id="CA2227" Action="None" />
<Rule Id="CA2231" Action="None" />
<Rule Id="CA2244" Action="None" />
<Rule Id="CA2245" Action="None" />
<Rule Id="CA2246" Action="None" />
</Rules>
<Rules AnalyzerId="Microsoft.CodeQuality.CSharp.Analyzers" RuleNamespace="Microsoft.CodeQuality.CSharp.Analyzers">
<Rule Id="Async001" Action="None" />
<Rule Id="Async002" Action="None" />
<Rule Id="Async003" Action="None" />
<Rule Id="Async004" Action="None" />
<Rule Id="Async005" Action="None" />
<Rule Id="Async006" Action="None" />
<Rule Id="CA1001" Action="None" />
<Rule Id="CA1032" Action="None" />
<Rule Id="CA1065" Action="None" />
<Rule Id="CA1200" Action="None" />
<Rule Id="CA1507" Action="None" />
<Rule Id="CA2200" Action="None" />
<Rule Id="CA2234" Action="None" />
</Rules>
<Rules AnalyzerId="Microsoft.CodeQuality.VisualBasic.Analyzers" RuleNamespace="Microsoft.CodeQuality.VisualBasic.Analyzers">
<Rule Id="Async001" Action="None" />
<Rule Id="Async002" Action="None" />
<Rule Id="Async003" Action="None" />
<Rule Id="Async004" Action="None" />
<Rule Id="Async005" Action="None" />
<Rule Id="Async006" Action="None" />
<Rule Id="CA1001" Action="None" />
<Rule Id="CA1003" Action="None" />
<Rule Id="CA1019" Action="None" />
<Rule Id="CA1032" Action="None" />
<Rule Id="CA1065" Action="None" />
<Rule Id="CA1200" Action="None" />
<Rule Id="CA1507" Action="None" />
<Rule Id="CA2200" Action="None" />
<Rule Id="CA2218" Action="None" />
<Rule Id="CA2224" Action="None" />
<Rule Id="CA2234" Action="None" />
</Rules>
<Rules AnalyzerId="Microsoft.NetCore.Analyzers" RuleNamespace="Microsoft.NetCore.Analyzers">
<Rule Id="CA1303" Action="None" />
<Rule Id="CA1304" Action="None" />
<Rule Id="CA1305" Action="None" />
<Rule Id="CA1307" Action="None" />
<Rule Id="CA1308" Action="None" />
<Rule Id="CA1401" Action="None" />
<Rule Id="CA1816" Action="None" />
<Rule Id="CA1820" Action="None" />
<Rule Id="CA1826" Action="None" />
<Rule Id="CA1827" Action="None" />
<Rule Id="CA1828" Action="None" />
<Rule Id="CA2000" Action="None" />
<Rule Id="CA2002" Action="None" />
<Rule Id="CA2008" Action="None" />
<Rule Id="CA2009" Action="None" />
<Rule Id="CA2012" Action="None" />
<Rule Id="CA2013" Action="None" />
<Rule Id="CA2100" Action="None" />
<Rule Id="CA2101" Action="None" />
<Rule Id="CA2201" Action="None" />
<Rule Id="CA2208" Action="None" />
<Rule Id="CA2213" Action="None" />
<Rule Id="CA2215" Action="None" />
<Rule Id="CA2216" Action="None" />
<Rule Id="CA2229" Action="None" />
<Rule Id="CA2235" Action="None" />
<Rule Id="CA2237" Action="None" />
<Rule Id="CA2241" Action="None" />
<Rule Id="CA2242" Action="None" />
<Rule Id="CA2243" Action="None" />
<Rule Id="CA2301" Action="Warning" />
<Rule Id="CA2302" Action="Warning" />
<Rule Id="CA2305" Action="Warning" />
<Rule Id="CA2311" Action="Warning" />
<Rule Id="CA2312" Action="Warning" />
<Rule Id="CA2315" Action="Warning" />
<Rule Id="CA2321" Action="Warning" />
<Rule Id="CA2322" Action="Info" />
<Rule Id="CA2327" Action="Warning" />
<Rule Id="CA2328" Action="Warning" />
<Rule Id="CA2329" Action="Warning" />
<Rule Id="CA2330" Action="Warning" />
<Rule Id="CA3001" Action="Info" />
<Rule Id="CA3002" Action="Info" />
<Rule Id="CA3003" Action="Info" />
<Rule Id="CA3004" Action="Info" />
<Rule Id="CA3005" Action="Info" />
<Rule Id="CA3006" Action="Info" />
<Rule Id="CA3007" Action="Info" />
<Rule Id="CA3008" Action="Info" />
<Rule Id="CA3009" Action="Info" />
<Rule Id="CA3010" Action="Info" />
<Rule Id="CA3011" Action="Info" />
<Rule Id="CA3012" Action="Info" />
<Rule Id="CA5358" Action="Warning" />
<Rule Id="CA5359" Action="Info" />
<Rule Id="CA5360" Action="None" />
<Rule Id="CA5363" Action="None" />
<Rule Id="CA5365" Action="None" />
<Rule Id="CA5366" Action="None" />
<Rule Id="CA5368" Action="None" />
<Rule Id="CA5369" Action="None" />
<Rule Id="CA5370" Action="None" />
<Rule Id="CA5371" Action="None" />
<Rule Id="CA5372" Action="None" />
<Rule Id="CA5373" Action="None" />
<Rule Id="CA5374" Action="None" />
<Rule Id="CA5376" Action="None" />
<Rule Id="CA5377" Action="None" />
<Rule Id="CA5379" Action="None" />
<Rule Id="CA5380" Action="Info" />
<Rule Id="CA5381" Action="Info" />
<Rule Id="CA5384" Action="None" />
<Rule Id="CA5385" Action="None" />
<Rule Id="CA5386" Action="Info" />
<Rule Id="CA5391" Action="Info" />
<Rule Id="CA5395" Action="Info" />
<Rule Id="CA5396" Action="Info" />
<Rule Id="CA5398" Action="Info" />
</Rules>
<Rules AnalyzerId="Microsoft.NetCore.CSharp.Analyzers" RuleNamespace="Microsoft.NetCore.CSharp.Analyzers">
<Rule Id="CA1810" Action="None" />
<Rule Id="CA1824" Action="None" />
<Rule Id="CA1825" Action="None" />
<Rule Id="CA1829" Action="None" />
<Rule Id="CA2010" Action="None" />
<Rule Id="CA2207" Action="None" />
</Rules>
<Rules AnalyzerId="Microsoft.NetCore.VisualBasic.Analyzers" RuleNamespace="Microsoft.NetCore.VisualBasic.Analyzers">
<Rule Id="CA1309" Action="None" />
<Rule Id="CA1810" Action="None" />
<Rule Id="CA1824" Action="None" />
<Rule Id="CA1825" Action="None" />
<Rule Id="CA1829" Action="None" />
<Rule Id="CA2010" Action="None" />
<Rule Id="CA2207" Action="None" />
</Rules>
<Rules AnalyzerId="Microsoft.NetFramework.Analyzers" RuleNamespace="Microsoft.NetFramework.Analyzers">
<Rule Id="CA1058" Action="None" />
</Rules>
<Rules AnalyzerId="Microsoft.NetFramework.VisualBasic.Analyzers" RuleNamespace="Microsoft.NetFramework.VisualBasic.Analyzers">
<Rule Id="CA3076" Action="Warning" />
<Rule Id="CA3077" Action="Warning" />
</Rules>
</RuleSet>

Просмотреть файл

@ -1,18 +1,15 @@
<?xml version="1.0" encoding="utf-8"?>
<RuleSet Name="Microsoft Psi Test Project Rules" Description="These rules are used for Psi test projects." ToolsVersion="16.0">
<Localization ResourceAssembly="Microsoft.VisualStudio.CodeAnalysis.RuleSets.Strings.dll" ResourceBaseName="Microsoft.VisualStudio.CodeAnalysis.RuleSets.Strings.Localized">
<Name Resource="MinimumRecommendedRules_Name" />
<Description Resource="MinimumRecommendedRules_Description" />
</Localization>
<RuleSet Name="Microsoft Psi Test Project Rules" Description="These rules are used for Psi test projects." ToolsVersion="15.0">
<Include Path="Security.ruleset" Action="Default" />
<Rules AnalyzerId="Microsoft.Analyzers.ManagedCodeAnalysis" RuleNamespace="Microsoft.Rules.Managed">
<Rule Id="CA1001" Action="Warning" />
<Rule Id="CA1001" Action="None" />
<Rule Id="CA1009" Action="Warning" />
<Rule Id="CA1016" Action="Warning" />
<Rule Id="CA1033" Action="Warning" />
<Rule Id="CA1049" Action="Warning" />
<Rule Id="CA1060" Action="Warning" />
<Rule Id="CA1061" Action="Warning" />
<Rule Id="CA1063" Action="Warning" />
<Rule Id="CA1063" Action="None" />
<Rule Id="CA1065" Action="Warning" />
<Rule Id="CA1301" Action="Warning" />
<Rule Id="CA1400" Action="Warning" />
@ -70,15 +67,22 @@
</Rules>
<Rules AnalyzerId="StyleCop.Analyzers" RuleNamespace="StyleCop.Analyzers">
<Rule Id="SA0001" Action="None" />
<Rule Id="SA1005" Action="None" />
<Rule Id="SA1008" Action="None" />
<Rule Id="SA1009" Action="None" />
<Rule Id="SA1025" Action="None" />
<Rule Id="SA1108" Action="None" />
<Rule Id="SA1118" Action="None" />
<Rule Id="SA1124" Action="None" />
<Rule Id="SA1305" Action="Warning" />
<Rule Id="SA1201" Action="None" />
<Rule Id="SA1202" Action="None" />
<Rule Id="SA1401" Action="None" />
<Rule Id="SA1402" Action="None" />
<Rule Id="SA1407" Action="None" />
<Rule Id="SA1504" Action="None" />
<Rule Id="SA1512" Action="None" />
<Rule Id="SA1600" Action="None" />
<Rule Id="SA1602" Action="None" />
<Rule Id="SA1652" Action="None" />
</Rules>
</RuleSet>

Просмотреть файл

@ -1,6 +1,6 @@
# Contributing to Platform for Situated Intelligence
We welcome contributions from the community in a variety of forms: from simply using it and filing issues and bugs, to writing and releasing your own new components, to creating pull requests for bug fixes or new features, etc. This document describes some of the things you need to know if you are going to contribute to the Platform for Situated Intelligence ecosystem. Please read it carefully before making source code changes.
We welcome contributions from the community in a variety of forms: from simply using it and filing issues and bugs, to writing and releasing your own new components, to creating pull requests for bug fixes or new features, etc. This document describes some of the things you need to know if you are going to contribute to the Platform for Situated Intelligence ecosystem.
## Code of conduct
@ -60,12 +60,14 @@ Below is a description of the directory structure for the Platform for Situated
| Sources | Audio | Contains class libraries for audio components. |
| Sources | Calibration | Contains class libraries for calibrating cameras. |
| Sources | Common | Contains class libraries for common test support. |
| Sources | Extensions | Contains class libraries that extend the \psi runtime class libraries. |
| Sources | Data | Contains class libraries for creating and manipulating datasets. |
| Sources | Devices | Contains class libraries that support enumerating devices. |
| Sources | Imaging | Contains class libraries for \psi imaging, e.g. images, video capture, etc. |
| Sources | Integrations | Contains integrations - libraries that provide shims around 3rd party libraries. |
| Sources | Kinect | Contains class libraries for Kinect sensor components. |
| Sources | Kinect | Contains class libraries for Azure Kinect and Kinect V2 sensor components. |
| Sources | Language | Contains class libraries for natural language processing components. |
| Sources | Media | Contains class libraries for media components. |
| Sources | RealSense | Contains class libraries for RealSense sensor component. |
| Sources | Runtime | Contains class libraries for \psi runtime. |
| Sources | Speech | Contains class libraries for speech components. |
| Sources | Toolkits | Contains toolkits - e.g. Finite State Machine toolkit, etc. |
@ -74,10 +76,9 @@ Below is a description of the directory structure for the Platform for Situated
### Coding Style
Platform for Situated Intelligence is an organically grown codebase. The consistency of style reflects this.
For the most part, the team follows these [coding conventions](https://docs.microsoft.com/en-us/dotnet/csharp/programming-guide/inside-a-program/coding-conventions) along with these [design guidelines](https://docs.microsoft.com/en-us/dotnet/standard/design-guidelines/). Pull requests that reformat the code will not be accepted.
For the most part, the Platform for Situated Intelligence codebase follows these [coding conventions](https://docs.microsoft.com/en-us/dotnet/csharp/programming-guide/inside-a-program/coding-conventions) along with these [design guidelines](https://docs.microsoft.com/en-us/dotnet/standard/design-guidelines/).
In case you would like to add a new project to the `Psi.sln` we require that the project is setup in a similar ways to the other projects to ensure a certain coding standard.
In case you would like to add a new project to the `Psi.sln` we require that the project is setup in a similar ways to the other projects to ensure consistency.
### Build and Test
@ -85,9 +86,7 @@ To fully validate your changes, do a complete rebuild and test for both Debug an
### Pull Requests
We accept __bug fix pull requests__. Please make sure there is a corresponding tracking issue for the bug. When you submit a PR for a bug, please link to the issue.
We also accept __new feature pull requests__. We are available to discuss new features. We recommend you open an issue if you plan to develop new features.
We accept __bug fix pull requests__ as well as __new feature pull requests__. For bug fixes, please open a corresponding issue for the bug and link to it, if one does not already exist. We also recommend you open an issue if you plan to develop new features, which will help facilitate community discussions about the design, implementation, etc.
Pull requests should:

Просмотреть файл

@ -6,7 +6,7 @@
<Company>Microsoft Corporation</Company>
<Owners>microsoft,psi</Owners>
<Authors>Microsoft</Authors>
<AssemblyVersion>0.11.82.2</AssemblyVersion>
<AssemblyVersion>0.12.53.2</AssemblyVersion>
<FileVersion>$(AssemblyVersion)</FileVersion>
<Version>$(AssemblyVersion)-beta</Version>
<SignAssembly>false</SignAssembly>
@ -21,6 +21,7 @@
<!-- Workaround for arbitrary value in AssemblyInformationalVersionAttribute until it is fixed in VS 16 https://github.com/Microsoft/visualfsharp/issues/4822 -->
<NoWarn>FS2003</NoWarn>
<LangVersion>latest</LangVersion>
<RunCodeAnalysis>false</RunCodeAnalysis>
</PropertyGroup>
<!-- Support for building in Visual Studio versions 15.9 and up -->

29
Psi.sln
Просмотреть файл

@ -166,6 +166,7 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Build", "Build", "{4026C2BE
Build\MSLogoGreySmall.png = Build\MSLogoGreySmall.png
Build\RunDoxygen.ps1 = Build\RunDoxygen.ps1
Build\Sample.Psi.ruleset = Build\Sample.Psi.ruleset
Build\Security.ruleset = Build\Security.ruleset
Build\Test.Psi.ruleset = Build\Test.Psi.ruleset
EndProjectSection
EndProject
@ -197,6 +198,14 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.Psi.DeviceManagem
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.Psi.CognitiveServices.Face", "Sources\Integrations\CognitiveServices\Microsoft.Psi.CognitiveServices.Face\Microsoft.Psi.CognitiveServices.Face.csproj", "{084FB05C-4022-40FD-B00B-E3229B882F08}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.Psi.AzureKinect.Visualization.Windows.x64", "Sources\Kinect\Microsoft.Psi.AzureKinect.Visualization\Microsoft.Psi.AzureKinect.Visualization.Windows.x64.csproj", "{8D33307F-0E96-491A-9D31-9025709310F6}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.Psi.AzureKinect.x64", "Sources\Kinect\Microsoft.Psi.AzureKinect.x64\Microsoft.Psi.AzureKinect.x64.csproj", "{C91D0412-1BB2-40D2-8DCA-A48B6C5B7E67}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AzureKinectSample", "Samples\AzureKinectSample\AzureKinectSample.csproj", "{66639311-E7BE-4A5B-A35B-9BFF6D3F69F2}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.Psi.Kinect.Visualization.Windows", "Sources\Kinect\Microsoft.Psi.Kinect.Visualization.Windows\Microsoft.Psi.Kinect.Visualization.Windows.csproj", "{F31606FF-3737-45DC-8E89-6256AACD841F}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
@ -435,6 +444,22 @@ Global
{084FB05C-4022-40FD-B00B-E3229B882F08}.Debug|Any CPU.Build.0 = Debug|Any CPU
{084FB05C-4022-40FD-B00B-E3229B882F08}.Release|Any CPU.ActiveCfg = Release|Any CPU
{084FB05C-4022-40FD-B00B-E3229B882F08}.Release|Any CPU.Build.0 = Release|Any CPU
{8D33307F-0E96-491A-9D31-9025709310F6}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{8D33307F-0E96-491A-9D31-9025709310F6}.Debug|Any CPU.Build.0 = Debug|Any CPU
{8D33307F-0E96-491A-9D31-9025709310F6}.Release|Any CPU.ActiveCfg = Release|Any CPU
{8D33307F-0E96-491A-9D31-9025709310F6}.Release|Any CPU.Build.0 = Release|Any CPU
{C91D0412-1BB2-40D2-8DCA-A48B6C5B7E67}.Debug|Any CPU.ActiveCfg = Debug|x64
{C91D0412-1BB2-40D2-8DCA-A48B6C5B7E67}.Debug|Any CPU.Build.0 = Debug|x64
{C91D0412-1BB2-40D2-8DCA-A48B6C5B7E67}.Release|Any CPU.ActiveCfg = Release|x64
{C91D0412-1BB2-40D2-8DCA-A48B6C5B7E67}.Release|Any CPU.Build.0 = Release|x64
{66639311-E7BE-4A5B-A35B-9BFF6D3F69F2}.Debug|Any CPU.ActiveCfg = Debug|x64
{66639311-E7BE-4A5B-A35B-9BFF6D3F69F2}.Debug|Any CPU.Build.0 = Debug|x64
{66639311-E7BE-4A5B-A35B-9BFF6D3F69F2}.Release|Any CPU.ActiveCfg = Release|x64
{66639311-E7BE-4A5B-A35B-9BFF6D3F69F2}.Release|Any CPU.Build.0 = Release|x64
{F31606FF-3737-45DC-8E89-6256AACD841F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{F31606FF-3737-45DC-8E89-6256AACD841F}.Debug|Any CPU.Build.0 = Debug|Any CPU
{F31606FF-3737-45DC-8E89-6256AACD841F}.Release|Any CPU.ActiveCfg = Release|Any CPU
{F31606FF-3737-45DC-8E89-6256AACD841F}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
@ -520,6 +545,10 @@ Global
{8AEFDD4F-CF2E-4392-AF46-378DE96126A5} = {A0856299-D28A-4513-B964-3FA5290FF160}
{6B572F54-0E2F-4223-8283-14B3BAB7534A} = {8AEFDD4F-CF2E-4392-AF46-378DE96126A5}
{084FB05C-4022-40FD-B00B-E3229B882F08} = {05481E26-A4CA-4F7D-B6FC-671A8AAC18B1}
{8D33307F-0E96-491A-9D31-9025709310F6} = {CB8286F5-167B-4416-8FE9-9B97FCF146D5}
{C91D0412-1BB2-40D2-8DCA-A48B6C5B7E67} = {CB8286F5-167B-4416-8FE9-9B97FCF146D5}
{66639311-E7BE-4A5B-A35B-9BFF6D3F69F2} = {1AA38339-B349-4AA7-A0A9-F92ADCFDB2DF}
{F31606FF-3737-45DC-8E89-6256AACD841F} = {CB8286F5-167B-4416-8FE9-9B97FCF146D5}
EndGlobalSection
GlobalSection(ExtensibilityGlobals) = postSolution
SolutionGuid = {EAF15EE9-DCC5-411B-A9E5-7C2F3D132331}

Просмотреть файл

@ -3,47 +3,44 @@
![Build status](https://dev.azure.com/msresearch/psi/_apis/build/status/psi-github-ci?branchName=master)
[![Join the chat at https://gitter.im/Microsoft/psi](https://badges.gitter.im/Microsoft/psi.svg)](https://gitter.im/Microsoft/psi?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
**Platform for Situated Intelligence** is an open, extensible framework that enables the development, fielding and study of situated, integrative-AI systems.
**Platform for Situated Intelligence** is an open, extensible framework that enables the development, fielding and study of multimodal, integrative-AI systems.
In recent years, we have seen significant progress with machine learning techniques on various perceptual and control problems. At the same time, building end-to-end, multimodal, integrative-AI systems that leverage multiple technologies and act autonomously or interact with people in the open world remains a challenging, error-prone and time-consuming engineering task. Numerous challenges stem from the sheer complexity of these systems and are amplified by the lack of appropriate infrastructure and development tools.
The Platform for Situated Intelligence project aims to address these issues and provide a basis for developing, fielding and studying integrative-AI systems. The platform consists of three layers. The **Runtime** layer provides a parallel programming model centered around temporal streams of data, and enables easy development of components and applications using .NET, while retaining the performance properties of natively written, carefully tuned systems. A set of **Tools** enable multimodal data visualization, annotations, analytics, tuning and machine learning scenarios. Finally, an open ecosystem of **Components** encapsulate various AI technologies and allow for quick compositing of integrative-AI applications. For more information about the goals of the project, the types of systems that you can build using it, and the various layers see [Platform for Situated Intelligence Overview](https://github.com/microsoft/psi/wiki/Platform-Overview).
The Platform for Situated Intelligence project aims to address these issues and provide a basis for __developing, fielding and studying multimodal, integrative-AI systems__. The platform consists of three layers. The **Runtime** layer provides a parallel programming model centered around temporal streams of data, and enables easy development of components and applications using .NET, while retaining the performance properties of natively written, carefully tuned systems. A set of **Tools** enable multimodal data visualization, annotations, analytics, tuning and machine learning scenarios. Finally, an open ecosystem of **Components** encapsulate various AI technologies and allow for quick compositing of integrative-AI applications.
For more information about the goals of the project, the types of systems that you can build using it, and the various layers see [Platform for Situated Intelligence Overview](https://github.com/microsoft/psi/wiki/Platform-Overview).
# Using and Building
Platform for Situated Intelligence is built on the .NET Framework. Large parts of it are built on .NET Standard and therefore run both on Windows and Linux, whereas some components are specific and available only to one operating system (for instance the Kinect sensor component is available only for Windows.)
Platform for Situated Intelligence is built on the .NET Framework. Large parts of it are built on .NET Standard and therefore run both on Windows and Linux, whereas some components are specific and available only to one operating system.
You can build applications based on Platform for Situated Intelligence either by leveraging nuget packages, or by cloning and building the code. Below are instructions:
* [Using \\psi via Nuget packages](https://github.com/microsoft/psi/wiki/Using-via-NuGet-Packages)
* [Building the \\psi codebase](https://github.com/microsoft/psi/wiki/Building-the-Codebase)
# Getting Started
# Documentation and Getting Started
__Brief Introduction__. A number of [tutorials](https://github.com/microsoft/psi/wiki/Basic-Tutorials) are available to get you started with using Platform for Situated Intelligence. We recommend starting with the [Brief Introduction](https://github.com/microsoft/psi/wiki/Brief-Introduction), which provides a guided walk-through for some of the main concepts in \psi. It shows how to create a simple program, describes the core concept of a stream, and explains how to transform, synchronize, visualize, persist to and replay streams from disk. We recommend that you first work through the examples in this tutorial to familiarize yourself with these core concepts. The [Writing Components](https://github.com/microsoft/psi/wiki/Writing-Components) tutorial explains how to write new \psi components, and the [Delivery Policies](https://github.com/microsoft/psi/wiki/Delivery-Policies) tutorial describes how to control throughput on streams in your application.
The documentation for Platform for Situated Intelligence is available in the [github project wiki](https://github.com/microsoft/psi/wiki). The documentation is still under construction and in various phases of completion. If you need further explanation in any area, please open an issue and label it `documentation`, as this will help us target our documentation development efforts to the highest priority needs.
__Advanced Topics__. A number of documents on more [advanced topics](https://github.com/microsoft/psi/wiki/More-Advanced-Topics) describe in more detail various aspects of the framework, including [stream operators](https://github.com/microsoft/psi/wiki/Stream-Operators), [synchronization](https://github.com/microsoft/psi/wiki/Synchronization), [remoting](https://github.com/microsoft/psi/wiki/Remoting), [interop](https://github.com/microsoft/psi/wiki/Interop), [shared objects and memory management](https://github.com/microsoft/psi/wiki/Shared-Objects), etc.
__Getting Started__. We recommend starting with the [Brief Introduction](https://github.com/microsoft/psi/wiki/Brief-Introduction) tutorial, which provides a guided walk-through for some of the main concepts in \psi. It shows how to create a simple \\psi application, describes the core concept of a stream, and explains how to transform, synchronize, visualize, persist to and replay streams from disk. We recommend that you first work through the examples in the [Brief Introduction](https://github.com/microsoft/psi/wiki/Brief-Introduction) to familiarize yourself with these core concepts, before you peruse the other available [tutorials](https://github.com/microsoft/psi/wiki/Basic-Tutorials). Two other helpful tutorials if you are just getting started are the [Writing Components](https://github.com/microsoft/psi/wiki/Writing-Components) tutorial, which explains how to write new \psi components, and the [Delivery Policies](https://github.com/microsoft/psi/wiki/Delivery-Policies) tutorial, which describes how to control throughput on streams in your application.
__Advanced Topics__. A set of documents on more [advanced topics](https://github.com/microsoft/psi/wiki/More-Advanced-Topics) describe in more detail various aspects of the framework, including [stream fusion and merging](https://github.com/microsoft/psi/wiki/Stream-Fusion-and-Merging), [interpolation and sampling](https://github.com/microsoft/psi/wiki/Interpolation-and-Sampling), [windowing operators](https://github.com/microsoft/psi/wiki/Windowing-Operators), [remoting](https://github.com/microsoft/psi/wiki/Remoting), [interop](https://github.com/microsoft/psi/wiki/Interop), [shared objects and memory management](https://github.com/microsoft/psi/wiki/Shared-Objects), etc.
__Samples__. Besides the tutorials and topics, it may be helpful to look through the set of [Samples](https://github.com/microsoft/psi/wiki/Samples) provided. While some of the samples address specialized topics such as how to leverage speech recognition components or how to bridge to ROS, reading them will give you more insight into programming with \psi.
__Samples__. Besides the tutorials and topics, we also recommend looking through the set of [Samples](https://github.com/microsoft/psi/wiki/Samples) provided. While some of the samples address specialized topics such as how to leverage speech recognition components or how to bridge to ROS, reading them will give you more insight into programming with \psi.
__Components__. Additional useful information regarding available packages and components can be found in the [NuGet packages list](https://github.com/microsoft/psi/wiki/List-of-NuGet-Packages) and in the [component list](https://github.com/microsoft/psi/wiki/List-of-Components) pages. The latter page also has pointers to other repositories by third parties containing other \psi components.
__Documentation__. Like the rest of the codebase, the documentation available in the [wiki](https://github.com/microsoft/psi/wiki) is still under construction and in various phases of completion. If you need further explanation in any of these areas, please open an issue, label it `documentation`, as this will help us target our documentation development efforts to the highest priority needs.
# Disclaimer
The codebase is currently in beta and various aspects of the platform are at different levels of completion and robustness. There are probably still bugs in the code and we will likely be making breaking API changes. We plan to continuously improve the framework and we encourage the community to contribute.
The [Roadmap](https://github.com/microsoft/psi/wiki/Roadmap) document provides more information about our future plans.
__API Reference__. An additional [API Reference](https://microsoft.github.io/psi/api/classes.html) is also available.
# Getting Help
If you find a reproducible bug or if you would like to request a new feature or additional documentation, please file an [issue on the github repo](https://github.com/microsoft/psi/issues). If you do so, please make sure a corresponding issue has not already been filed. Use the [`bug`](https://github.com/microsoft/psi/labels/bug) label when filing issues that represent code defects, and provide enough information to reproduce. Use the [`feature request`](https://github.com/microsoft/psi/labels/feature%20request) label to request new features, and use the [`documentation`](https://github.com/microsoft/psi/labels/documentation) label to request additional documentation.
If you find a reproducible bug or if you would like to request a new feature or additional documentation, please file an [issue on the github repo](https://github.com/microsoft/psi/issues). If you do so, please first check whether a corresponding issue has already been filed. Use the [`bug`](https://github.com/microsoft/psi/labels/bug) label when filing issues that represent code defects, and provide enough information to reproduce. Use the [`feature request`](https://github.com/microsoft/psi/labels/feature%20request) label to request new features, and use the [`documentation`](https://github.com/microsoft/psi/labels/documentation) label to request additional documentation.
# Contributing
We hope the community can help improve and evolve Platform for Situated Intelligence. If you plan to contribute to the codebase, please read the [Contributing Guidelines](https://github.com/microsoft/psi/wiki/Contributing) page. It describes how the source code is organized and things you need to know before making any source code changes.
We hope the community can help improve and evolve Platform for Situated Intelligence, and we welcome contributions in a variety of forms: from simply using it and filing issues and bugs, to writing and releasing your own new components, to creating pull requests for bug fixes or new features, etc. The [Contributing Guidelines](https://github.com/microsoft/psi/wiki/Contributing) page in the wiki describes in more detail a variety of ways in which you can get involved, how the source code is organized, and other useful things to know before starting to make source code changes.
# Who is Using
@ -55,6 +52,12 @@ Platform for Situated Intelligence is currently being used in a number of indust
If you would like to be added to this list, just add a [GitHub issue](https://github.com/Microsoft/psi/issues) and label it with the [`whoisusing`](https://github.com/Microsoft/psi/labels/whoisusing) label. Add a url for your research lab, website or project that you would like us to link to.
# Disclaimer
The codebase is currently in beta and various aspects of the platform are at different levels of completion and robustness. There are probably still bugs in the code and we will likely be making breaking API changes. We plan to continuously improve the framework and we encourage the community to contribute.
The [Roadmap](https://github.com/microsoft/psi/wiki/Roadmap) document provides more information about our future plans.
# License
Platform for Situated Intelligence is available under an [MIT License](LICENSE.txt). See also [Third Party Notices](ThirdPartyNotices.txt).

Просмотреть файл

@ -0,0 +1,50 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>netcoreapp3.1</TargetFramework>
<Platforms>x64</Platforms>
<CodeAnalysisRuleSet>..\..\Build\Sample.Psi.ruleset</CodeAnalysisRuleSet>
<GeneratePackageOnBuild>false</GeneratePackageOnBuild>
<StartupObject>AzureKinectSample.Program</StartupObject>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|AnyCPU'">
<PlatformTarget>x64</PlatformTarget>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<WarningsAsErrors />
<DocumentationFile>bin\Debug\netcoreapp3.1\AzureKinectSample.xml</DocumentationFile>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|AnyCPU'">
<PlatformTarget>x64</PlatformTarget>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<WarningsAsErrors />
<DocumentationFile>bin\Release\netcoreapp3.1\AzureKinectSample.xml</DocumentationFile>
</PropertyGroup>
<ItemGroup>
<None Remove="stylecop.json" />
</ItemGroup>
<ItemGroup>
<AdditionalFiles Include="stylecop.json" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.CodeAnalysis.FxCopAnalyzers" Version="2.9.8">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="StyleCop.Analyzers" Version="1.1.118">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\..\Sources\Calibration\Microsoft.Psi.Calibration\Microsoft.Psi.Calibration.csproj" />
<ProjectReference Include="..\..\Sources\Kinect\Microsoft.Psi.AzureKinect.x64\Microsoft.Psi.AzureKinect.x64.csproj" />
</ItemGroup>
</Project>

Просмотреть файл

@ -0,0 +1,165 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT license.
namespace AzureKinectSample
{
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using MathNet.Spatial.Euclidean;
using Microsoft.Azure.Kinect.BodyTracking;
using Microsoft.Azure.Kinect.Sensor;
using Microsoft.Psi;
using Microsoft.Psi.AzureKinect;
using Microsoft.Psi.Calibration;
using Microsoft.Psi.Imaging;
/// <summary>
/// Azure Kinect sample program.
/// </summary>
public class Program
{
/// <summary>
/// Main entry point.
/// </summary>
public static void Main()
{
// camera resolution settings
const ColorResolution resolution = ColorResolution.R720p;
const int widthSource = 1280;
const int heightSource = 720;
// down sampled resolution
const int widthOutput = 80;
const int heightOutput = 45;
const double scaleFactorWidth = (double)widthOutput / widthSource;
const double scaleFactorHeight = (double)heightOutput / heightSource;
// background subtraction beyond this depth
const double maxDepth = 1.0; // meters
const SensorOrientation initialOrientation = SensorOrientation.Default;
using (var pipeline = Pipeline.Create("AzureKinectSample", DeliveryPolicy.LatestMessage))
{
var azureKinect = new AzureKinectSensor(
pipeline,
new AzureKinectSensorConfiguration()
{
OutputImu = true,
ColorResolution = resolution,
DepthMode = DepthMode.WFOV_Unbinned,
CameraFPS = FPS.FPS15,
BodyTrackerConfiguration = new AzureKinectBodyTrackerConfiguration()
{
CpuOnlyMode = true, // false if CUDA supported GPU available
SensorOrientation = initialOrientation,
},
});
StringBuilder sb = new StringBuilder();
SensorOrientation lastOrientation = (SensorOrientation)(-1); // detect orientation changes
// consuming color, depth, IMU, body tracking, calibration
azureKinect.ColorImage.Resize(widthOutput, heightOutput)
.Join(azureKinect.DepthImage)
.Join(azureKinect.Imu, TimeSpan.FromMilliseconds(10))
.Pair(azureKinect.Bodies)
.Pair(azureKinect.DepthDeviceCalibrationInfo)
.Do(message =>
{
var (color, depth, imu, bodies, calib) = message;
// determine camera orientation from IMU
static SensorOrientation ImuOrientation(ImuSample imu)
{
const double halfGravity = 9.8 / 2;
return
(imu.AccelerometerSample.Z > halfGravity) ? SensorOrientation.Flip180 :
(imu.AccelerometerSample.Y > halfGravity) ? SensorOrientation.Clockwise90 :
(imu.AccelerometerSample.Y < -halfGravity) ? SensorOrientation.CounterClockwise90 :
SensorOrientation.Default; // upright
}
// enumerate image coordinates while correcting for orientation
static (IEnumerable<int>, IEnumerable<int>, bool) EnumerateCoordinates(SensorOrientation orientation)
{
var w = Enumerable.Range(0, widthOutput);
var h = Enumerable.Range(0, heightOutput);
return orientation switch
{
SensorOrientation.Clockwise90 => (h.Reverse(), w, true),
SensorOrientation.Flip180 => (w.Reverse(), h.Reverse(), false),
SensorOrientation.CounterClockwise90 => (h, w.Reverse(), true),
_ => (w, h, false), // normal
};
}
// render color frame as "ASCII art"
sb.Clear();
var bitmap = color.Resource.ToBitmap();
var orientation = ImuOrientation(imu);
var (horizontal, vertical, swap) = EnumerateCoordinates(orientation);
foreach (var j in vertical.Where(n => n % 2 == 0))
{
foreach (var i in horizontal)
{
var (x, y) = swap ? (j, i) : (i, j);
// subtract background beyond max depth
var d = DepthExtensions.ProjectToCameraSpace(calib, new Point2D(x / scaleFactorWidth, y / scaleFactorHeight), depth);
if (!d.HasValue || d.Value.Z < maxDepth)
{
var p = bitmap.GetPixel(x, y);
sb.Append(" .:-=+*#%@"[(int)((p.R + p.G + p.B) / 76.5)]);
}
else
{
sb.Append(' '); // subtract background
}
}
sb.Append(Environment.NewLine);
}
// clear console when orientation changes
if (orientation != lastOrientation)
{
Console.Clear();
lastOrientation = orientation;
}
Console.SetCursorPosition(0, 0);
Console.WriteLine(sb.ToString());
// overlay head tracking
if (orientation == initialOrientation)
{
// body tracking works only in initially configured orientation
Console.BackgroundColor = ConsoleColor.Red;
foreach (var body in bodies)
{
var p = calib.ToColorSpace(body.Joints[JointId.Head].Pose.Origin);
var x = (int)(p.X * scaleFactorWidth);
var y = (int)(p.Y * scaleFactorHeight / 2);
if (x > 0 && x < widthOutput && y > 0 && y < heightOutput)
{
Console.SetCursorPosition(x, y / 2);
Console.Write(' ');
}
}
Console.BackgroundColor = ConsoleColor.Black;
}
});
Console.BackgroundColor = ConsoleColor.Black;
Console.ForegroundColor = ConsoleColor.White;
Console.Clear();
pipeline.RunAsync();
Console.ReadLine(); // press Enter to end
}
}
}
}

Просмотреть файл

@ -0,0 +1,349 @@
# Azure Kinect Sample
This sample demonstrates how to use the Azure Kinect sensor with body tracking and how to use the `Join()` and `Pair()` operators to synchronize and fuse streams.
# Using the Color Image Stream
First, let's get a minimal application up and running. The `AzureKinectSensor` component gives us access to various image streams from the device (color, depth, infrared) as well as other information such as IMU and temperature readings. We will start with the `ColorImage` stream.
```csharp
using (var pipeline = Pipeline.Create("AzureKinectSample", DeliveryPolicy.LatestMessage))
{
var azureKinectSensor = new AzureKinectSensor(
pipeline,
new AzureKinectSensorConfiguration()
{
ColorResolution = ColorResolution.R720p,
CameraFPS = FPS.FPS15,
});
...
}
```
Notice that at construction time we can configure the frame rate (`CameraFPS`) and resolution (`ColorResolution`). A number of other configuration options are also available as part of the `AzureKinectSensorConfiguration`:
- **DeviceIndex:** The index of the device to open (default 0).
- **ColorResolution:** The resolution of the color camera (default 1080p).
- **DepthMode:** The depth camera mode (default NFOV unbinned).
- **CameraFPS:** The desired frame rate (default 30 FPS).
- **SynchronizedImagesOnly:** Whether color and depth captures should be strictly synchronized (default `true`).
- **OutputColor:** Whether the color stream is emitted (default `true`).
- **OutputDepth:** Whether the depth stream is emitted (default `true`).
- **OutputInfrared:** Whether the infrared stream is emitted (default `true`).
- **OutputImu:** Whether to use the Azure Kinect's IMU (default `false`).
- **OutputCalibration:** Whether the Azure Kinect outputs its calibration settings (default `true`).
- **BodyTrackerConfiguration:** The body tracker configuration (default null). If null, no body tracking is performed.
- **DeviceCaptureTimeout:** The timeout used for device capture (default 1 minute).
- **FrameRateReportingFrequency:** The frequency at which frame rate is reported on the `FrameRate` emitter (default 2 seconds).
For this demonstration we'll be resizing the color images to render as ASCII art at the console for a cross-platform solution. The `ColorImage` stream is of `Shared<Image>` on which several operators exist for cropping, transforming, encoding, etc. Below, we use the `Resize()` operator to scale the image down to 80 by 45 pixels, and then we apply a `Do()` operator in which we convert the image to ASCII art:
```csharp
// down sampled resolution
const int widthOutput = 80;
const int heightOutput = 45;
StringBuilder sb = new StringBuilder();
// consuming color
azureKinectSensor.ColorImage.Resize(widthOutput, heightOutput).Do(color =>
{
var bitmap = color.Resource.ToBitmap();
// render color frame as "ASCII art"
sb.Clear();
for (int y = 0; y < heightOutput; y += 2)
{
for (int x = 0; x < widthOutput; x++)
{
var p = bitmap.GetPixel(x, y);
sb.Append(" .:-=+*#%@"[(int)((p.R + p.G + p.B) / 76.5)]);
}
sb.Append(Environment.NewLine);
}
Console.SetCursorPosition(0, 0);
Console.WriteLine(sb.ToString());
});
Console.BackgroundColor = ConsoleColor.Black;
Console.ForegroundColor = ConsoleColor.White;
Console.Clear();
pipeline.RunAsync();
Console.ReadLine(); // press Enter to end
```
Here's an example output produced by this application:
![Sample output](./SampleOutput.png)
# Using the Depth Image Stream
A core feature of the Azure Kinect sensor is depth perception. Next we'll use the `DepthImage` stream to perform background subtraction, i.e. to remove pixels beyond a distance threshold.
```csharp
// background subtraction beyond this depth
const double maxDepth = 1.0; // meters
```
We will _not_ be resizing the depth image but will need to scale coordinates:
```csharp
// camera resolution settings
const ColorResolution resolution = ColorResolution.R720p;
const int widthSource = 1280;
const int heightSource = 720;
// down sampled resolution
const int widthOutput = 80;
const int heightOutput = 45;
const double scaleFactorWidth = (double)widthOutput / widthSource;
const double scaleFactorHeight = (double)heightOutput / heightSource;
```
The `ColorImage` and `DepthImage` streams emit independently. When the component is configured with `SynchronizedImagesOnly = true` (the default) then the images on each stream have matching originating times. However, they remain separate streams and may have varying latencies in the system. In a more complex system they may pass through different paths in the graph of components. We want to ensure that we receive pairs of color and depth images that correspond to the same originating time in the real world regardless of when they arrive in wall clock time at our block of code. To do this we use `Join()`. The operator buffers incoming messages and fuses them in pairs based on their originating times. We'll see later how `Join()` can also be used with a tolerance window to relax the requirement of _exactly_ matching originating times while still guaranteeing reproducibility.
Additionally, we need the `DepthDeviceCalibrationInfo` to correlate the physical poses of the two cameras involved. This comes as a single message on another stream. Here we use `Pair()` to fuse this with the other data we receive; this time _without_ ensuring synchronicity. To learn more about the different types of fusion and synchronization operators available you can [visit this in-depth tutorial.](https://github.com/microsoft/psi/wiki/Synchronization)
The result of the consecutive `Join()` and `Pair()` operators is a stream of tuple `message` which we can unpack with `var (color, depth, calib) = message`.
```csharp
// consuming color, depth, and calibration
azureKinectSensor.ColorImage
.Resize(widthOutput, heightOutput)
.Join(azureKinectSensor.DepthImage)
.Pair(azureKinectSensor.DepthDeviceCalibrationInfo)
.Do(message =>
{
var (color, depth, calib) = message;
...
});
```
The `Microsoft.Psi.Calibration` namespace provides a number of useful functions for dealing with depth information via the `DepthExtensions` static class. We'll use `ProjectToCameraSpace()` to get the depth at each color image pixel. Any pixel known to be beyond the `maxDepth` threshold will be rendered blank.
```csharp
var d = DepthExtensions.ProjectToCameraSpace(calib, new Point2D(x / scaleFactorWidth, y / scaleFactorHeight), depth);
if (!d.HasValue || d.Value.Z < maxDepth)
{
var p = bitmap.GetPixel(x, y);
sb.Append(" .:-=+*#%@"[(int)((p.R + p.G + p.B) / 76.5)]);
}
else
{
sb.Append(' '); // subtract background
}
```
# Using the Inertial Measurement Unit (IMU) Stream
The Azure Kinect provides inertial information as well. A gyro gives instantaneous angular speed when the device is physically rotated and an accelerometer gives linear acceleration.
We will assume that the device is relatively stationary and will used accelerometer values to measure the direction of gravity. With this we can rotate the output image to remain upright even as the device is physically turned on its side or upside down; much like mobile phones commonly do.
While `OutputColor` and `OutputDepth` are configured to `true` by default, `OutputIMU` is not. We'll first enable this in the configuration passed in when constructing the sensor.
```csharp
var azureKinectSensor = new AzureKinectSensor(
pipeline,
new AzureKinectSensorConfiguration()
{
ColorResolution = resolution,
CameraFPS = FPS.FPS15,
OutputImu = true,
});
```
As with the other streams, we will `Join()` with the `Imu` stream. Unlike the color and depth streams, the IMU information flows at a higher rate. It does not obey the `CameraFPS` setting. By default `Join()` correlates messages by _exactly_ matching originating times. This will not work with the IMU because it's on a different cadence. We do want to take samples that are _reasonably_ near to each camera frame. The `Join()` operator allows us to specify what be mean by _reasonably near_; for instance, we can match messages within 10 milliseconds by using `.Join(azureKinectSensor.Imu, TimeSpan.FromMilliseconds(10))`.
```csharp
// consuming color, depth, IMU, and calibration
azureKinectSensor.ColorImage.Resize(widthOutput, heightOutput)
.Join(azureKinectSensor.DepthImage)
.Join(azureKinectSensor.Imu, TimeSpan.FromMilliseconds(10))
.Pair(azureKinectSensor.DepthDeviceCalibrationInfo)
.Do(message =>
{
var (color, depth, imu, calib) = message;
...
});
```
To determine the orientation we observe the pull of gravity along each axis.
```csharp
// determine camera orientation from IMU
SensorOrientation ImuOrientation(ImuSample imu)
{
const double halfGravity = 9.8 / 2; // G ≈ 9.8m/s²
return
(imu.AccelerometerSample.Z > halfGravity) ? SensorOrientation.Flip180 :
(imu.AccelerometerSample.Y > halfGravity) ? SensorOrientation.Clockwise90 :
(imu.AccelerometerSample.Y < -halfGravity) ? SensorOrientation.CounterClockwise90 :
SensorOrientation.Default; // upright
}
```
We will enumerate pixels in the order required to render upright (from right-to-left, bottom-to-top when the device is upside down for example).
```csharp
// enumerate image coordinates while correcting for orientation
(IEnumerable<int>, IEnumerable<int>, bool) EnumerateCoordinates(SensorOrientation orientation)
{
var w = Enumerable.Range(0, widthOutput);
var h = Enumerable.Range(0, heightOutput);
switch (orientation)
{
case SensorOrientation.Clockwise90: return (h.Reverse(), w, true);
case SensorOrientation.Flip180: return (w.Reverse(), h.Reverse(), false);
case SensorOrientation.CounterClockwise90: return (h, w.Reverse(), true);
default: return (w, h, false); // normal
}
}
```
Changing our nested `for` loops to this order, while swapping `x` and `y` when sideways.
```csharp
var orientation = ImuOrientation(imu);
var (horizontal, vertical, swap) = EnumerateCoordinates(orientation);
foreach (var j in vertical.Where(n => n % 2 == 0))
{
foreach (var i in horizontal)
{
var (x, y) = swap ? (j, i) : (i, j);
...
}
sb.Append(Environment.NewLine);
}
```
To prevent displaying characters from previous frames we'll keep track of the `lastOrientation` and clear the console when changing between landscape and portrait renderings.
```csharp
SensorOrientation lastOrientation = (SensorOrientation)(-1); // detect orientation changes
...
// clear console when orientation changes
if (orientation != lastOrientation)
{
Console.Clear();
lastOrientation = orientation;
}
```
# Body Tracking
Using the depth and infrared image streams, the Azure Kinect may be used to [track one or many human bodies, including detailed joint positions](https://docs.microsoft.com/en-us/azure/kinect-dk/body-joints).
The body tracker can make use of a CUDA supported GPU if one is available (and configured with `CpuOnlyMode = false`). The following other parameters are available as part of the `AzureKinectBodyTrackerConfiguration`:
- **TemporalSmoothing:** The temporal smoothing to use across frames for the body tracker. Set between 0 for no smoothing and 1 for full smoothing (default 0.5 seconds).
- **CpuOnlyMode:** Whether to perform body tracking computation only on the CPU. If false, the tracker requires CUDA hardware and drivers (default `false`).
- **SensorOrientation:** The sensor orientation used by body tracking (default upright).
We configure the Azure Kinect to perform body tracking by providing a body tracker configuration, as follows:
```csharp
var azureKinectSensor = new AzureKinect(
pipeline,
new AzureKinectSensorConfiguration()
{
OutputImu = true,
ColorResolution = resolution,
DepthMode = DepthMode.WFOV_Unbinned,
CameraFPS = FPS.FPS15,
BodyTrackerConfiguration =
new AzureKinectBodyTrackerConfiguration()
{
CpuOnlyMode = true, // false if CUDA supported GPU available
},
});
```
Now we can fuse in and make use of the `Bodies` stream.
```csharp
// consuming color, depth, IMU, body tracking, calibration
azureKinectSensor.ColorImage.Resize(widthOutput, heightOutput)
.Join(azureKinectSensor.DepthImage)
.Join(azureKinectSensor.Imu, TimeSpan.FromMilliseconds(10))
.Pair(azureKinectSensor.Bodies)
.Pair(azureKinectSensor.DepthDeviceCalibrationInfo)
.Do(message =>
{
var (color, depth, imu, bodies, calib) = message;
...
});
```
Notice that we use `Pair()` to fuse in the `Bodies` stream. The `Bodies` stream generally comes at a lower frequency than the camera image streams. If we were to use a `Join()` here we would have perfectly synchronized data. That is, bodies in sync with color and depth image frames. However the frame rate would drop significantly (especially in `CpuOnlyMode`).
We could use a tolerance `TimeSpan` as we did with the `Imu` stream, but `Join()` has an interesting side effect to consider. Generally before a joined message may be emitted, the _next_ messages outside of the tolerance window must first be seen to ensure that the _best_ match within the window has been chosen. This necessarily introduces some latency. With the high frequency `Imu` stream, this was fine. With the much lower frequency `Bodies` stream this would cause a significant delay. Instead of `Join()` we choose to use `Pair()` which will _immediately_ fuse the last body message (in wall clock time). No latency, but also no synchronicity or reproducibility guarantees. Reproducibility is the primary difference between `Join()` and `Pair()` as explained in more detail in the [synchronization tutorial.](https://github.com/microsoft/psi/wiki/Synchronization)
Finally, we highlight each person's head with a red block; correlating the `Head` joint with the color image pixel coordinate using `ToColorSpace()`.
```csharp
// overlay head tracking
if (orientation == SensorOrientation.Default)
{
// body tracking works only in initially configured orientation
Console.BackgroundColor = ConsoleColor.Red;
foreach (var body in bodies)
{
var p = calib.ToColorSpace(body.Joints[JointId.Head].Pose.Origin);
var x = (int)(p.X * scaleFactorWidth);
var y = (int)(p.Y * scaleFactorHeight / 2);
if (x > 0 && x < widthOutput && y > 0 && y < heightOutput)
{
Console.SetCursorPosition(x, y / 2);
Console.Write(' ');
}
}
Console.BackgroundColor = ConsoleColor.Black;
}
```
## Decoupled Body Tracking Component
Body tracking can also be performed without a live, running Azure Kinect sensor, if the depth, IR, and calibration information streams are available. This functionality is implemented by the `AzureKinectBodyTracker` component.
```csharp
var bodyTracker = new AzureKinectBodyTracker(
pipeline,
new AzureKinectBodyTrackerConfiguration()
{
CpuOnlyMode = true, // false if CUDA supported GPU available
});
```
This component consumes `DepthImage` and `InfraredImage` streams as well as a the 'AzureKinectSensorCalibration` stream that contains the sensor calibration information; these streams are produced by the `AzureKinectSensor` component, and can be persisted and leveraged for running the tracker at a later time.
For instace, assuming these streams were persisted into a store, we can open them up as follows:
```csharp
var store = Store.Open(pipeline, "MyRecording", @"C:\Data");
var depth = store.OpenStream<Shared<DepthImage>>("DepthImage"); // DepthImage
var infrared = store.OpenStream<Shared<Image>>("InfraredStream"); // ColorImage
var calibration = store.OpenStream<Calibration>("AzureKinectSensorCalibration"); // AzureKinectSensorCalibration
```
The depth and infrared streams are joined and piped to the body tracker. The calibration stream is also separately piped to the body tracker. The tracker generates the resulting bodies on it's `Bodies` output stream.
```csharp
depth.Join(infrared).PipeTo(bodyTracker);
calibration.PipeTo(bodyTracker.AzureKinectSensorCalibration);
var bodies = bodyTracker.Bodies;
```

Двоичные данные
Samples/AzureKinectSample/SampleOutput.png Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 125 KiB

Просмотреть файл

@ -0,0 +1,3 @@
#!/usr/bin/env bash
dotnet build ./AzureKinectSample.csproj

Просмотреть файл

@ -0,0 +1,16 @@
{
// ACTION REQUIRED: This file was automatically added to your project, but it
// will not take effect until additional steps are taken to enable it. See the
// following page for additional information:
//
// https://github.com/DotNetAnalyzers/StyleCopAnalyzers/blob/master/documentation/EnableConfiguration.md
"$schema": "https://raw.githubusercontent.com/DotNetAnalyzers/StyleCopAnalyzers/master/StyleCop.Analyzers/StyleCop.Analyzers/Settings/stylecop.schema.json",
"settings": {
"documentationRules": {
"companyName": "Microsoft Corporation",
"copyrightText": "Copyright (c) Microsoft Corporation. All rights reserved.\nLicensed under the MIT license.",
"xmlHeader": false
}
}
}

Просмотреть файл

@ -1,8 +1,7 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net472</TargetFramework>
<RunAnalyzersDuringBuild>false</RunAnalyzersDuringBuild>
<RunAnalyzersDuringLiveAnalysis>false</RunAnalyzersDuringLiveAnalysis>
<CodeAnalysisRuleSet>../../Build/Sample.Psi.ruleset</CodeAnalysisRuleSet>
<ApplicationIcon />
<OutputType>Exe</OutputType>
<StartupObject>MultiModalSpeechDetection.Program</StartupObject>
@ -43,6 +42,10 @@
<Folder Include="Properties\" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.CodeAnalysis.FxCopAnalyzers" Version="2.9.8">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers</IncludeAssets>
</PackageReference>
<PackageReference Include="StyleCop.Analyzers" Version="1.1.118">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>

Просмотреть файл

@ -1,6 +1,6 @@
# Kinect Sample
This sample demostrates how to use the Kinect sensor and how to use the `Join()` operator to synchronize streams. The sample uses the
This sample demonstrates how to use the Kinect sensor and how to use the `Join()` operator to synchronize streams. The sample uses the
Kinect's face tracking and the audio and video streams to detect when a user is speaking. The sample compiles and runs on Windows.
__NOTE__: In order to run this sample, you must have a valid Cognitive Services Speech subscription key. You may enter this key at runtime, or set it in the static `AzureSubscriptionKey` variable on the `OperatorExtensions` class. For more information on how to obtain a subscription key for the Azure Speech Service, see [https://docs.microsoft.com/en-us/azure/cognitive-services/cognitive-services-apis-create-account](https://docs.microsoft.com/en-us/azure/cognitive-services/cognitive-services-apis-create-account)

Просмотреть файл

@ -2,21 +2,21 @@
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>netcoreapp2.0</TargetFramework>
<TargetFramework>netcoreapp3.1</TargetFramework>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|AnyCPU'">
<CodeAnalysisRuleSet>../../Build/Sample.Psi.ruleset</CodeAnalysisRuleSet>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<WarningsAsErrors />
<DocumentationFile>bin\Release\netstandard2.0\LinuxSpeechSample.xml</DocumentationFile>
<DocumentationFile>bin\Release\netcoreapp3.1\LinuxSpeechSample.xml</DocumentationFile>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|AnyCPU'">
<CodeAnalysisRuleSet>../../Build/Sample.Psi.ruleset</CodeAnalysisRuleSet>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<WarningsAsErrors />
<DocumentationFile>bin\Debug\netstandard2.0\LinuxSpeechSample.xml</DocumentationFile>
<DocumentationFile>bin\Debug\netcoreapp3.1\LinuxSpeechSample.xml</DocumentationFile>
</PropertyGroup>
<ItemGroup>
@ -33,6 +33,10 @@
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.CodeAnalysis.FxCopAnalyzers" Version="2.9.8">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers</IncludeAssets>
</PackageReference>
<PackageReference Include="StyleCop.Analyzers" Version="1.1.118" PrivateAssets="All" />
</ItemGroup>

Просмотреть файл

@ -1,6 +1,6 @@
# Speech Sample
This sample demostrates how to build a simple speech recognition application using the audio and speech components on Linux.
This sample demonstrates how to build a simple speech recognition application using the audio and speech components on Linux.
__NOTES:__

Просмотреть файл

@ -13,8 +13,8 @@ using namespace System::Security::Permissions;
[assembly:AssemblyProductAttribute(L"OpenCVSampleInterop")];
[assembly:AssemblyCompanyAttribute(L"Microsoft Corporation")];
[assembly:AssemblyCopyrightAttribute(L"Copyright (c) Microsoft Corporation. All rights reserved.")];
[assembly:AssemblyVersionAttribute("0.11.82.2")];
[assembly:AssemblyFileVersionAttribute("0.11.82.2")];
[assembly:AssemblyInformationalVersionAttribute("0.11.82.2-beta")];
[assembly:AssemblyVersionAttribute("0.12.53.2")];
[assembly:AssemblyFileVersionAttribute("0.12.53.2")];
[assembly:AssemblyInformationalVersionAttribute("0.12.53.2-beta")];
[assembly:ComVisible(false)];
[assembly:CLSCompliantAttribute(true)];

Просмотреть файл

@ -38,7 +38,7 @@
<UseDebugLibraries>true</UseDebugLibraries>
<CLRSupport>true</CLRSupport>
<CharacterSet>Unicode</CharacterSet>
<PlatformToolset>v141</PlatformToolset>
<PlatformToolset>v142</PlatformToolset>
<SpectreMitigation>Spectre</SpectreMitigation>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="Configuration">
@ -46,7 +46,7 @@
<UseDebugLibraries>false</UseDebugLibraries>
<CLRSupport>true</CLRSupport>
<CharacterSet>Unicode</CharacterSet>
<PlatformToolset>v141</PlatformToolset>
<PlatformToolset>v142</PlatformToolset>
<SpectreMitigation>Spectre</SpectreMitigation>
</PropertyGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
@ -162,4 +162,4 @@ copy $(OpenCVDlls) $(OutDir)..\..\..\OpenCVSample\bin\Release</Command>
<OldBuildDependsOn>$(BuildDependsOn)</OldBuildDependsOn>
<BuildDependsOn>CheckVariable</BuildDependsOn>
</PropertyGroup>
</Project>
</Project>

Просмотреть файл

@ -42,7 +42,7 @@ namespace Microsoft.Psi.Samples.OpenCV
(srcImage, env, e) =>
{
// Our lambda here is called with each image sample from our stream and calls OpenCV to convert
// the image into a grayscale image. We then post the resulting gray scale image to our event queu
// the image into a grayscale image. We then post the resulting gray scale image to our event queue
// so that the Psi pipeline will send it to the next component.
// Have Psi allocate a new image. We will convert the current image ('srcImage') into this new image.

Просмотреть файл

@ -1,22 +1,40 @@
<?xml version="1.0" encoding="utf-8"?>
<Project Sdk="Microsoft.NET.Sdk">
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net472</TargetFramework>
<AppendTargetFrameworkToOutputPath>false</AppendTargetFrameworkToOutputPath>
<AssemblyName>OpenCVSample</AssemblyName>
<ApplicationIcon />
<OutputType>WinExe</OutputType>
<CodeAnalysisRuleSet>../../../Build/Sample.Psi.ruleset</CodeAnalysisRuleSet>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|AnyCPU'">
<OutputPath>bin\Debug\</OutputPath>
<PlatformTarget>x64</PlatformTarget>
<DocumentationFile>bin\Debug\OpenCVSample.xml</DocumentationFile>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|AnyCPU'">
<OutputPath>bin\Release\</OutputPath>
<PlatformTarget>x64</PlatformTarget>
<DocumentationFile>bin\Release\OpenCVSample.xml</DocumentationFile>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<None Remove="stylecop.json" />
</ItemGroup>
<ItemGroup>
<AdditionalFiles Include="stylecop.json" />
</ItemGroup>
<ItemGroup>
<ApplicationDefinition Include="App.xaml" />
<PackageReference Include="Microsoft.CodeAnalysis.FxCopAnalyzers" Version="2.9.8">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers</IncludeAssets>
</PackageReference>
<PackageReference Include="StyleCop.Analyzers" Version="1.1.118">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<Page Include="MainWindow.xaml" />
</ItemGroup>
<ItemGroup>

Просмотреть файл

@ -2,21 +2,21 @@
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>netcoreapp2.0</TargetFramework>
<TargetFramework>netcoreapp3.1</TargetFramework>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|AnyCPU'">
<CodeAnalysisRuleSet>../../Build/Sample.Psi.ruleset</CodeAnalysisRuleSet>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<WarningsAsErrors />
<DocumentationFile>bin\Debug\netstandard2.0\RosArmControlSample.xml</DocumentationFile>
<DocumentationFile>bin\Debug\netcoreapp3.1\RosArmControlSample.xml</DocumentationFile>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|AnyCPU'">
<CodeAnalysisRuleSet>../../Build/Sample.Psi.ruleset</CodeAnalysisRuleSet>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<WarningsAsErrors />
<DocumentationFile>bin\Release\netstandard2.0\RosArmControlSample.xml</DocumentationFile>
<DocumentationFile>bin\Release\netcoreapp3.1\RosArmControlSample.xml</DocumentationFile>
</PropertyGroup>
<ItemGroup>
@ -33,6 +33,10 @@
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.CodeAnalysis.FxCopAnalyzers" Version="2.9.8">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers</IncludeAssets>
</PackageReference>
<PackageReference Include="StyleCop.Analyzers" Version="1.1.118" PrivateAssets="All" />
</ItemGroup>
</Project>

Просмотреть файл

@ -113,7 +113,7 @@ namespace ArmControlROSSample
}
/// <summary>
/// Set absolute cartesian position.
/// Set absolute Cartesian position.
/// </summary>
/// <param name="x">Coordinate X.</param>
/// <param name="y">Coordinate Y.</param>
@ -124,7 +124,7 @@ namespace ArmControlROSSample
}
/// <summary>
/// Set relative cartesian position.
/// Set relative Cartesian position.
/// </summary>
/// <param name="x">Delta X.</param>
/// <param name="y">Delta Y.</param>

Просмотреть файл

@ -43,12 +43,12 @@ namespace ArmControlROSSample
public Receiver<bool> Pump { get; private set; }
/// <summary>
/// Gets receiver of absolute cartesian positions.
/// Gets receiver of absolute Cartesian positions.
/// </summary>
public Receiver<(float, float, float)> AbsolutePosition { get; private set; }
/// <summary>
/// Gets receiver of relative cartesian positions.
/// Gets receiver of relative Cartesian positions.
/// </summary>
public Receiver<(float, float, float)> RelativePosition { get; private set; }

Просмотреть файл

@ -2,21 +2,21 @@
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>netcoreapp2.0</TargetFramework>
<TargetFramework>netcoreapp3.1</TargetFramework>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|AnyCPU'">
<CodeAnalysisRuleSet>../../Build/Sample.Psi.ruleset</CodeAnalysisRuleSet>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<WarningsAsErrors />
<DocumentationFile>bin\Debug\netstandard2.0\RosTurtleSample.xml</DocumentationFile>
<DocumentationFile>bin\Debug\netcoreapp3.1\RosTurtleSample.xml</DocumentationFile>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|AnyCPU'">
<CodeAnalysisRuleSet>../../Build/Sample.Psi.ruleset</CodeAnalysisRuleSet>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<WarningsAsErrors />
<DocumentationFile>bin\Release\netstandard2.0\RosTurtleSample.xml</DocumentationFile>
<DocumentationFile>bin\Release\netcoreapp3.1\RosTurtleSample.xml</DocumentationFile>
</PropertyGroup>
<ItemGroup>
@ -34,6 +34,10 @@
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.CodeAnalysis.FxCopAnalyzers" Version="2.9.8">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers</IncludeAssets>
</PackageReference>
<PackageReference Include="StyleCop.Analyzers" Version="1.1.118" PrivateAssets="All" />
</ItemGroup>
</Project>

Просмотреть файл

@ -116,7 +116,7 @@ namespace Microsoft.Psi.Samples.SpeechSample
{
// Create the AudioCapture component to capture audio from the default device in 16 kHz 1-channel
// PCM format as required by both the voice activity detector and speech recognition components.
audioInput = new AudioCapture(pipeline, new AudioCaptureConfiguration() { OutputFormat = WaveFormat.Create16kHz1Channel16BitPcm() });
audioInput = new AudioCapture(pipeline, WaveFormat.Create16kHz1Channel16BitPcm());
}
// Create System.Speech recognizer component
@ -202,7 +202,7 @@ namespace Microsoft.Psi.Samples.SpeechSample
{
// Create the AudioCapture component to capture audio from the default device in 16 kHz 1-channel
// PCM format as required by both the voice activity detector and speech recognition components.
audioInput = new AudioCapture(pipeline, new AudioCaptureConfiguration() { OutputFormat = WaveFormat.Create16kHz1Channel16BitPcm() });
audioInput = new AudioCapture(pipeline, WaveFormat.Create16kHz1Channel16BitPcm());
}
// Perform voice activity detection using the voice activity detector component

Просмотреть файл

@ -1,6 +1,6 @@
# Speech Sample
This sample demostrates how to build a simple speech recognition application using a number of different audio and speech components. In addition, it also demonstrates data logging and replay of logged data. The sample builds and runs on Windows.
This sample demonstrates how to build a simple speech recognition application using a number of different audio and speech components. In addition, it also demonstrates data logging and replay of logged data. The sample builds and runs on Windows.
__NOTES:__

Просмотреть файл

@ -1,8 +1,7 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net472</TargetFramework>
<RunAnalyzersDuringBuild>false</RunAnalyzersDuringBuild>
<RunAnalyzersDuringLiveAnalysis>false</RunAnalyzersDuringLiveAnalysis>
<CodeAnalysisRuleSet>../../Build/Sample.Psi.ruleset</CodeAnalysisRuleSet>
<ApplicationIcon />
<OutputType>Exe</OutputType>
<StartupObject>Microsoft.Psi.Samples.SpeechSample.Program</StartupObject>
@ -38,6 +37,10 @@
<Folder Include="Properties\" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.CodeAnalysis.FxCopAnalyzers" Version="2.9.8">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers</IncludeAssets>
</PackageReference>
<PackageReference Include="StyleCop.Analyzers" Version="1.1.118">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>

Просмотреть файл

@ -70,7 +70,7 @@ namespace Microsoft.Psi.Samples.WebcamWithAudioSample
MediaCapture webcam = new MediaCapture(pipeline, 1920, 1080, 30);
// Create the AudioCapture component to capture audio from the default device in 16 kHz 1-channel
IProducer<AudioBuffer> audioInput = new AudioCapture(pipeline, new AudioCaptureConfiguration() { OutputFormat = WaveFormat.Create16kHz1Channel16BitPcm() });
IProducer<AudioBuffer> audioInput = new AudioCapture(pipeline, WaveFormat.Create16kHz1Channel16BitPcm());
var images = webcam.Out.EncodeJpeg(90, DeliveryPolicy.LatestMessage).Out;

Просмотреть файл

@ -1,4 +1,4 @@
# WebCam + Audio Sample
This sample demostrates how to build a simple application that records audio and video from a webcam and displays the playback using
This sample demonstrates how to build a simple application that records audio and video from a webcam and displays the playback using
the Platform for Situated Intelligence Studio's visualization client. The sample builds and runs on Windows.

Просмотреть файл

@ -1,8 +1,7 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net472</TargetFramework>
<RunAnalyzersDuringBuild>false</RunAnalyzersDuringBuild>
<RunAnalyzersDuringLiveAnalysis>false</RunAnalyzersDuringLiveAnalysis>
<CodeAnalysisRuleSet>../../Build/Sample.Psi.ruleset</CodeAnalysisRuleSet>
<ApplicationIcon />
<OutputType>Exe</OutputType>
<StartupObject>Microsoft.Psi.Samples.WebcamWithAudioSample.Program</StartupObject>
@ -40,6 +39,10 @@
<Folder Include="Properties\" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.CodeAnalysis.FxCopAnalyzers" Version="2.9.8">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers</IncludeAssets>
</PackageReference>
<PackageReference Include="StyleCop.Analyzers" Version="1.1.118">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>

Просмотреть файл

@ -1,4 +1,4 @@
# WPF Sample
This sample demostrates how to build a simple application based on Windows Presentation Foundation (WPF). The application
This sample demonstrates how to build a simple application based on Windows Presentation Foundation (WPF). The application
connects to a web camera and displays the video stream. The sample builds and runs on Windows.

Просмотреть файл

@ -4,8 +4,7 @@
<ApplicationIcon />
<OutputType>WinExe</OutputType>
<StartupObject>PsiWpfSample.App</StartupObject>
<RunAnalyzersDuringBuild>false</RunAnalyzersDuringBuild>
<RunAnalyzersDuringLiveAnalysis>false</RunAnalyzersDuringLiveAnalysis>
<CodeAnalysisRuleSet>../../Build/Sample.Psi.ruleset</CodeAnalysisRuleSet>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|AnyCPU'">
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
@ -33,6 +32,10 @@
<ApplicationDefinition Include="App.xaml" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.CodeAnalysis.FxCopAnalyzers" Version="2.9.8">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers</IncludeAssets>
</PackageReference>
<PackageReference Include="StyleCop.Analyzers" Version="1.1.118">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>

Просмотреть файл

@ -126,11 +126,11 @@ namespace Microsoft.Psi.Audio
{
LinuxAudioInterop.Read(this.audioDevice, buf, blockSize);
}
catch (Exception ex)
catch
{
if (this.audioDevice != null)
{
throw ex;
throw;
}
}

Просмотреть файл

@ -10,7 +10,7 @@ namespace Microsoft.Psi.Audio
/// Structs, enums and static methods for interacting with Advanced Linux Sound Architecture (ALSA) drivers.
/// </summary>
/// <remarks>
/// This implimentation is based on this spec: http://www.alsa-project.org/alsa-doc/alsa-lib
/// This implementation is based on this spec: http://www.alsa-project.org/alsa-doc/alsa-lib
/// The only dependency is on `asound`, which comes with the system.
/// </remarks>
internal static class LinuxAudioInterop
@ -347,56 +347,24 @@ namespace Microsoft.Psi.Audio
internal static unsafe AudioDevice Open(string name, Mode mode, int rate = 44100, int channels = 1, Format format = Format.S16LE, Access access = Access.Interleaved)
{
void* handle;
if (Open(&handle, name, (int)mode, 0) != 0)
{
throw new ArgumentException("Open failed.");
}
CheckResult(NativeMethods.Open(&handle, name, (int)mode, 0), "Open failed");
void* param;
if (HardwareParamsMalloc(&param) != 0)
{
throw new ArgumentException("Hardware params malloc failed.");
}
if (HardwareParamsAny(handle, param) != 0)
{
throw new ArgumentException("Hardware params any failed.");
}
if (HardwareParamsSetAccess(handle, param, (int)access) != 0)
{
throw new ArgumentException("Hardware params set access failed.");
}
if (HardwareParamsSetFormat(handle, param, (int)format) != 0)
{
throw new ArgumentException("Hardware params set format failed.");
}
CheckResult(NativeMethods.HardwareParamsMalloc(&param), "Hardware params malloc failed");
CheckResult(NativeMethods.HardwareParamsAny(handle, param), "Hardware params any failed");
CheckResult(NativeMethods.HardwareParamsSetAccess(handle, param, (int)access), "Hardware params set access failed");
CheckResult(NativeMethods.HardwareParamsSetFormat(handle, param, (int)format), "Hardware params set format failed");
int* ratePtr = &rate;
int dir = 0;
int* dirPtr = &dir;
if (HardwareParamsSetRate(handle, param, ratePtr, dirPtr) != 0)
{
throw new ArgumentException("Hardware params set rate failed.");
}
CheckResult(NativeMethods.HardwareParamsSetRate(handle, param, ratePtr, dirPtr), "Hardware params set rate failed");
CheckResult(NativeMethods.HardwareParamsSetChannels(handle, param, (uint)channels), "Hardware params set channels failed");
CheckResult(NativeMethods.HardwareParams(handle, param), "Hardware set params failed");
if (HardwareParamsSetChannels(handle, param, (uint)channels) != 0)
{
throw new ArgumentException("Hardware params set channels failed.");
}
NativeMethods.HardwareParamsFree(param);
if (HardwareParams(handle, param) != 0)
{
throw new ArgumentException("Hardware set params failed.");
}
HardwareParamsFree(param);
if (PrepareHandle(handle) != 0)
{
throw new ArgumentException("Prepare handle failed.");
}
CheckResult(NativeMethods.PrepareHandle(handle), "Prepare handle failed");
return new AudioDevice(handle);
}
@ -411,18 +379,23 @@ namespace Microsoft.Psi.Audio
{
fixed (void* bufferPtr = buffer)
{
long err = Read(device.Handle, bufferPtr, (ulong)blockSize);
long err;
if (Environment.Is64BitOperatingSystem)
{
err = NativeMethods.Read64(device.Handle, bufferPtr, (ulong)blockSize);
}
else
{
err = NativeMethods.Read32(device.Handle, bufferPtr, (uint)blockSize);
}
if (err < 0)
{
err = Recover(device.Handle, (int)err, 1);
if (err < 0)
{
throw new ArgumentException("Read recovery failed.");
}
CheckResult(NativeMethods.Recover(device.Handle, (int)err, 1), "Read recovery failed");
}
else if (err != blockSize)
{
throw new ArgumentException("Read failed.");
throw new ArgumentException($"Read failed (ALSA error code: {err}).");
}
}
}
@ -441,14 +414,18 @@ namespace Microsoft.Psi.Audio
fixed (void* bufferPtr = buffer)
{
byte* pb = (byte*)bufferPtr + offset;
err = Write(device.Handle, pb, (ulong)blockSize);
if (Environment.Is64BitOperatingSystem)
{
err = NativeMethods.Write64(device.Handle, pb, (ulong)blockSize);
}
else
{
err = NativeMethods.Write32(device.Handle, pb, (uint)blockSize);
}
if (err < 0)
{
err = Recover(device.Handle, (int)err, 1);
if (err < 0)
{
throw new ArgumentException("Write recovery failed.");
}
CheckResult(NativeMethods.Recover(device.Handle, (int)err, 1), "Write recovery failed");
}
else if (err != blockSize)
{
@ -464,53 +441,24 @@ namespace Microsoft.Psi.Audio
/// <param name="device">Device handle.</param>
internal static unsafe void Close(AudioDevice device)
{
if (CloseHandle(device.Handle) != 0)
if (NativeMethods.CloseHandle(device.Handle) != 0)
{
throw new ArgumentException("Close failed.");
}
}
[DllImport("asound", EntryPoint = "snd_pcm_open")]
private static unsafe extern int Open(void** handle, [MarshalAs(UnmanagedType.LPStr)]string name, int capture, int mode);
[DllImport("asound", EntryPoint = "snd_pcm_hw_params_malloc")]
private static unsafe extern int HardwareParamsMalloc(void** param);
[DllImport("asound", EntryPoint = "snd_pcm_hw_params_any")]
private static unsafe extern int HardwareParamsAny(void* handle, void* param);
[DllImport("asound", EntryPoint = "snd_pcm_hw_params_set_access")]
private static unsafe extern int HardwareParamsSetAccess(void* handle, void* param, int access);
[DllImport("asound", EntryPoint = "snd_pcm_hw_params_set_format")]
private static unsafe extern int HardwareParamsSetFormat(void* handle, void* param, int format);
[DllImport("asound", EntryPoint = "snd_pcm_hw_params_set_rate_near")]
private static unsafe extern int HardwareParamsSetRate(void* handle, void* param, int* rate, int* dir);
[DllImport("asound", EntryPoint = "snd_pcm_hw_params_set_channels")]
private static unsafe extern int HardwareParamsSetChannels(void* handle, void* param, uint channels);
[DllImport("asound", EntryPoint = "snd_pcm_hw_params")]
private static unsafe extern int HardwareParams(void* handle, void* param);
[DllImport("asound", EntryPoint = "snd_pcm_hw_params_free")]
private static unsafe extern void HardwareParamsFree(void* param);
[DllImport("asound", EntryPoint = "snd_pcm_prepare")]
private static unsafe extern int PrepareHandle(void* handle);
[DllImport("asound", EntryPoint = "snd_pcm_recover")]
private static unsafe extern int Recover(void* handle, int error, int silent);
[DllImport("asound", EntryPoint = "snd_pcm_readi")]
private static unsafe extern long Read(void* handle, void* buffer, ulong blockSize);
[DllImport("asound", EntryPoint = "snd_pcm_writei")]
private static unsafe extern long Write(void* handle, void* buffer, ulong blockSize);
[DllImport("asound", EntryPoint = "snd_pcm_close")]
private static unsafe extern int CloseHandle(void* handle);
/// <summary>
/// Check result code and throw argument exception upon failure from ALSA APIs.
/// </summary>
/// <param name="result">Result code returned by ALSA API.</param>
/// <param name="message">Error message in case of failure.</param>
private static void CheckResult(long result, string message)
{
if (result != 0)
{
throw new ArgumentException($"{message} (ALSA error code: {result}).");
}
}
/// <summary>
/// Audio device handle.
@ -532,5 +480,56 @@ namespace Microsoft.Psi.Audio
/// </summary>
public unsafe void* Handle { get; private set; }
}
private static class NativeMethods
{
[DllImport("asound", EntryPoint = "snd_pcm_open", BestFitMapping = false, ThrowOnUnmappableChar = true)]
internal static unsafe extern int Open(void** handle, [MarshalAs(UnmanagedType.LPStr)]string name, int capture, int mode);
[DllImport("asound", EntryPoint = "snd_pcm_hw_params_malloc")]
internal static unsafe extern int HardwareParamsMalloc(void** param);
[DllImport("asound", EntryPoint = "snd_pcm_hw_params_any")]
internal static unsafe extern int HardwareParamsAny(void* handle, void* param);
[DllImport("asound", EntryPoint = "snd_pcm_hw_params_set_access")]
internal static unsafe extern int HardwareParamsSetAccess(void* handle, void* param, int access);
[DllImport("asound", EntryPoint = "snd_pcm_hw_params_set_format")]
internal static unsafe extern int HardwareParamsSetFormat(void* handle, void* param, int format);
[DllImport("asound", EntryPoint = "snd_pcm_hw_params_set_rate_near")]
internal static unsafe extern int HardwareParamsSetRate(void* handle, void* param, int* rate, int* dir);
[DllImport("asound", EntryPoint = "snd_pcm_hw_params_set_channels")]
internal static unsafe extern int HardwareParamsSetChannels(void* handle, void* param, uint channels);
[DllImport("asound", EntryPoint = "snd_pcm_hw_params")]
internal static unsafe extern int HardwareParams(void* handle, void* param);
[DllImport("asound", EntryPoint = "snd_pcm_hw_params_free")]
internal static unsafe extern void HardwareParamsFree(void* param);
[DllImport("asound", EntryPoint = "snd_pcm_prepare")]
internal static unsafe extern int PrepareHandle(void* handle);
[DllImport("asound", EntryPoint = "snd_pcm_recover")]
internal static unsafe extern int Recover(void* handle, int error, int silent);
[DllImport("asound", EntryPoint = "snd_pcm_readi")]
internal static unsafe extern int Read32(void* handle, void* buffer, uint blockSize);
[DllImport("asound", EntryPoint = "snd_pcm_readi")]
internal static unsafe extern long Read64(void* handle, void* buffer, ulong blockSize);
[DllImport("asound", EntryPoint = "snd_pcm_writei")]
internal static unsafe extern int Write32(void* handle, void* buffer, uint blockSize);
[DllImport("asound", EntryPoint = "snd_pcm_writei")]
internal static unsafe extern long Write64(void* handle, void* buffer, ulong blockSize);
[DllImport("asound", EntryPoint = "snd_pcm_close")]
internal static unsafe extern int CloseHandle(void* handle);
}
}
}

Просмотреть файл

@ -36,6 +36,10 @@
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.CodeAnalysis.FxCopAnalyzers" Version="2.9.8">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers</IncludeAssets>
</PackageReference>
<PackageReference Include="StyleCop.Analyzers" Version="1.1.118" PrivateAssets="All" />
</ItemGroup>
</Project>

Просмотреть файл

@ -88,6 +88,16 @@ namespace Microsoft.Psi.Audio
{
}
/// <summary>
/// Initializes a new instance of the <see cref="AudioCapture"/> class with a specified output format.
/// </summary>
/// <param name="pipeline">The pipeline to add the component to.</param>
/// <param name="outputFormat">The output format to use.</param>
public AudioCapture(Pipeline pipeline, WaveFormat outputFormat)
: this(pipeline, new AudioCaptureConfiguration() { OutputFormat = outputFormat })
{
}
/// <summary>
/// Gets the output stream of audio buffers.
/// </summary>

Просмотреть файл

@ -17,15 +17,6 @@ namespace Microsoft.Psi.Audio
/// </summary>
public AudioCaptureConfiguration()
{
this.DeviceName = string.Empty;
this.TargetLatencyInMs = 20;
this.AudioEngineBufferInMs = 500;
this.AudioLevel = -1;
this.Gain = 1.0f;
this.OptimizeForSpeech = false;
this.UseEventDrivenCapture = true;
this.DropOutOfOrderPackets = false;
this.OutputFormat = null;
}
/// <summary>
@ -37,7 +28,7 @@ namespace Microsoft.Psi.Audio
/// <see cref="AudioCapture.GetAvailableDevices"/> static method. If not specified, the
/// default recording device will be selected.
/// </remarks>
public string DeviceName { get; set; }
public string DeviceName { get; set; } = string.Empty;
/// <summary>
/// Gets or sets the target audio latency (pull capture mode only).
@ -52,7 +43,7 @@ namespace Microsoft.Psi.Audio
/// <see cref="UseEventDrivenCapture"/> is set to true. In event-driven capture mode, the latency
/// is determined by the rate at which the audio engine signals that it has new data available.
/// </remarks>
public int TargetLatencyInMs { get; set; }
public int TargetLatencyInMs { get; set; } = 20;
/// <summary>
/// Gets or sets the audio engine buffer.
@ -66,7 +57,7 @@ namespace Microsoft.Psi.Audio
/// audio packets fast enough. Setting this to a larger value reduces the likelihood of
/// encountering glitches in the captured audio stream.
/// </remarks>
public int AudioEngineBufferInMs { get; set; }
public int AudioEngineBufferInMs { get; set; } = 500;
/// <summary>
/// Gets or sets the audio input level.
@ -76,7 +67,7 @@ namespace Microsoft.Psi.Audio
/// between 0.0 and 1.0 inclusive. If not specified, the current level of the selected
/// recording device will be left unchanged.
/// </remarks>
public double AudioLevel { get; set; }
public double AudioLevel { get; set; } = -1;
/// <summary>
/// Gets or sets the additional gain to be applied to the captured audio.
@ -86,7 +77,7 @@ namespace Microsoft.Psi.Audio
/// audio signal. Values greater than 1.0 boost the audio signal, while values in the range
/// of 0.0 to 1.0 attenuate it. The default value is 1.0 (no additional gain).
/// </remarks>
public float Gain { get; set; }
public float Gain { get; set; } = 1.0f;
/// <summary>
/// Gets or sets a value indicating whether the captured audio should be pre-processed for
@ -98,7 +89,7 @@ namespace Microsoft.Psi.Audio
/// the audio for speech recognition applications. By default, this option is set to false.
/// This feature may not be available for all capture devices.
/// </remarks>
public bool OptimizeForSpeech { get; set; }
public bool OptimizeForSpeech { get; set; } = false;
/// <summary>
/// Gets or sets a value indicating whether to use event-driven or pull capture mode. When using
@ -109,7 +100,7 @@ namespace Microsoft.Psi.Audio
/// by the audio engine (up to an amount equivalent to <see cref="AudioEngineBufferInMs"/>) should
/// the application be unable to consume the audio data quickly enough.
/// </summary>
public bool UseEventDrivenCapture { get; set; }
public bool UseEventDrivenCapture { get; set; } = true;
/// <summary>
/// Gets or sets a value indicating whether the component should
@ -118,7 +109,7 @@ namespace Microsoft.Psi.Audio
/// <remarks>
/// This is for internal use only and may be removed in future versions.
/// </remarks>
public bool DropOutOfOrderPackets { get; set; }
public bool DropOutOfOrderPackets { get; set; } = false;
/// <summary>
/// Gets or sets the desired format for the captured audio.
@ -128,6 +119,6 @@ namespace Microsoft.Psi.Audio
/// Use this to specify a different format for the <see cref="AudioBuffer"/> Out stream of
/// the <see cref="AudioCapture"/> component.
/// </remarks>
public WaveFormat OutputFormat { get; set; }
public WaveFormat OutputFormat { get; set; } = null;
}
}

Просмотреть файл

@ -17,14 +17,6 @@ namespace Microsoft.Psi.Audio
/// </summary>
public AudioPlayerConfiguration()
{
this.DeviceName = string.Empty;
this.TargetLatencyInMs = 20;
this.BufferLengthSeconds = 0.1;
this.AudioLevel = -1;
this.Gain = 1.0f;
// Defaults to 16 kHz, 16-bit, 1-channel PCM samples
this.InputFormat = WaveFormat.Create16kHz1Channel16BitPcm();
}
/// <summary>
@ -36,7 +28,7 @@ namespace Microsoft.Psi.Audio
/// <see cref="AudioPlayer.GetAvailableDevices"/> static method. If not specified, the
/// default playback device will be selected.
/// </remarks>
public string DeviceName { get; set; }
public string DeviceName { get; set; } = string.Empty;
/// <summary>
/// Gets or sets the target audio latency.
@ -46,9 +38,9 @@ namespace Microsoft.Psi.Audio
/// at a time. This in turn determines the latency of the audio output (i.e. the amount of lag
/// between when the audio was available and when the corresponding sound is produced). For
/// live audio playback, we normally want this to be small. By default, this value is set to
/// 20 milliseconds. Is is safe to leave this unchanged.
/// 20 milliseconds. It is safe to leave this unchanged.
/// </remarks>
public int TargetLatencyInMs { get; set; }
public int TargetLatencyInMs { get; set; } = 20;
/// <summary>
/// Gets or sets the maximum duration of audio that can be buffered for playback.
@ -57,7 +49,7 @@ namespace Microsoft.Psi.Audio
/// This controls the amount of audio that can be buffered while waiting for the playback
/// device to be ready to render it. The default value is 0.1 seconds.
/// </remarks>
public double BufferLengthSeconds { get; set; }
public double BufferLengthSeconds { get; set; } = 0.1;
/// <summary>
/// Gets or sets the audio output level.
@ -67,7 +59,7 @@ namespace Microsoft.Psi.Audio
/// between 0.0 and 1.0 inclusive. If not specified, the current level of the selected
/// playback device will be left unchanged.
/// </remarks>
public float AudioLevel { get; set; }
public float AudioLevel { get; set; } = -1;
/// <summary>
/// Gets or sets the additional gain to be applied to the audio data.
@ -77,7 +69,7 @@ namespace Microsoft.Psi.Audio
/// signal. Values greater than 1.0 boost the audio signal, while values in the range
/// of 0.0 to 1.0 attenuate it. The default value is 1.0 (no additional gain).
/// </remarks>
public float Gain { get; set; }
public float Gain { get; set; } = 1.0f;
/// <summary>
/// Gets or sets the input format of the audio stream.
@ -87,6 +79,6 @@ namespace Microsoft.Psi.Audio
/// set, the <see cref="AudioPlayer"/> component will attempt to infer the audio format
/// from the <see cref="AudioBuffer"/> messages arriving on the input stream.
/// </remarks>
public WaveFormat InputFormat { get; set; }
public WaveFormat InputFormat { get; set; } = WaveFormat.Create16kHz1Channel16BitPcm();
}
}

Просмотреть файл

@ -184,7 +184,7 @@ namespace Microsoft.Psi.Audio
timestamp + (10000000L * length / this.Configuration.OutputFormat.AvgBytesPerSec),
DateTimeKind.Utc);
if (originatingTime < this.lastOutputPostTime)
if (originatingTime <= this.lastOutputPostTime)
{
// If the input audio packet is larger than the output packet (as determined by the
// target latency), then the packet will be split into multiple packets for resampling.
@ -195,10 +195,10 @@ namespace Microsoft.Psi.Audio
// This could happen if the two consecutive input packets overlap in time, for example
// if an automatic system time adjustment occurred between the capture of the two packets.
// These adjustments occur from time to time to account for system clock drift w.r.t.
// UTC time. In order to ensure that this does not lead to resampled output sub-packets
// regressing in time, we manually enforce the output originating time to be no less than
// that of the previous packet.
originatingTime = this.lastOutputPostTime;
// UTC time. As this could in result in output message originating times not advancing
// or even regressing, we check for this and ensure that they are always monotonically
// increasing.
originatingTime = this.lastOutputPostTime + TimeSpan.FromTicks(1);
}
// post the data to the output stream

Просмотреть файл

@ -17,9 +17,6 @@ namespace Microsoft.Psi.Audio
/// </summary>
public AudioResamplerConfiguration()
{
this.TargetLatencyInMs = 20;
this.InputFormat = WaveFormat.Create16kHz1Channel16BitPcm();
this.OutputFormat = WaveFormat.Create16kHz1Channel16BitPcm();
}
/// <summary>
@ -30,9 +27,9 @@ namespace Microsoft.Psi.Audio
/// turn determines the latency of the audio output. The larger this value, the more audio
/// data is carried in each <see cref="AudioBuffer"/> and the longer the audio latency. For
/// live audio capture, we normally want this value to be small as possible. By default,
/// this value is set to 20 milliseconds. Is is safe to leave this unchanged.
/// this value is set to 20 milliseconds. It is safe to leave this unchanged.
/// </remarks>
public int TargetLatencyInMs { get; set; }
public int TargetLatencyInMs { get; set; } = 20;
/// <summary>
/// Gets or sets the input format of the audio stream to be resampled.
@ -42,11 +39,11 @@ namespace Microsoft.Psi.Audio
/// set, the <see cref="AudioResampler"/> component will attempt to infer the audio format
/// from the <see cref="AudioBuffer"/> messages arriving on the input stream.
/// </remarks>
public WaveFormat InputFormat { get; set; }
public WaveFormat InputFormat { get; set; } = WaveFormat.Create16kHz1Channel16BitPcm();
/// <summary>
/// Gets or sets the output format for the resampled audio.
/// </summary>
public WaveFormat OutputFormat { get; set; }
public WaveFormat OutputFormat { get; set; } = WaveFormat.Create16kHz1Channel16BitPcm();
}
}

Просмотреть файл

@ -30,6 +30,10 @@
<ProjectReference Include="..\Microsoft.Psi.Audio\Microsoft.Psi.Audio.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.CodeAnalysis.FxCopAnalyzers" Version="2.9.8">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers</IncludeAssets>
</PackageReference>
<PackageReference Include="StyleCop.Analyzers" Version="1.1.118" PrivateAssets="All" />
</ItemGroup>
</Project>

Просмотреть файл

@ -17,7 +17,7 @@ namespace Microsoft.Psi.Audio
/// </remarks>
public sealed class AcousticFeaturesExtractor : IConsumer<AudioBuffer>
{
private Connector<AudioBuffer> inAudio;
private readonly Connector<AudioBuffer> inAudio;
/// <summary>
/// Initializes a new instance of the <see cref="AcousticFeaturesExtractor"/> class.
@ -43,11 +43,9 @@ namespace Microsoft.Psi.Audio
float frameRate = configuration.FrameRateInHz;
int frameSize = (int)((configuration.InputFormat.SamplesPerSec * configuration.FrameDurationInSeconds) + 0.5);
int frameShift = (int)((configuration.InputFormat.SamplesPerSec / frameRate) + 0.5);
int frameOverlap = frameSize - frameShift;
int bytesPerSample = configuration.InputFormat.BlockAlign;
int bytesPerFrame = bytesPerSample * frameSize;
int bytesPerFrameShift = bytesPerSample * frameShift;
int bytesPerOverlap = bytesPerFrame - bytesPerFrameShift;
int fftSize = 2;
while (fftSize < frameSize)
{

Просмотреть файл

@ -34,23 +34,8 @@ namespace Microsoft.Psi.Audio
public AcousticFeaturesExtractorConfiguration()
{
// Default parameters for acoustic features computation
this.FrameDurationInSeconds = 0.025f;
this.FrameRateInHz = 100.0f;
this.AddDither = true;
this.DitherScaleFactor = 1.0f;
this.StartFrequency = 250.0f;
this.EndFrequency = 7000.0f;
this.LowEndFrequency = 3000.0f;
this.HighStartFrequency = 2500.0f;
this.EntropyBandwidth = 2500.0f;
this.ComputeLogEnergy = true;
this.ComputeZeroCrossingRate = true;
this.ComputeFrequencyDomainEnergy = true;
this.ComputeLowFrequencyEnergy = true;
this.ComputeHighFrequencyEnergy = true;
this.ComputeSpectralEntropy = true;
this.ComputeFFT = false;
this.ComputeFFTPower = false;
this.computeFFT = false;
this.computeFFTPower = false;
// Defaults to 16 kHz, 16-bit, 1-channel PCM samples
this.InputFormat = WaveFormat.Create16kHz1Channel16BitPcm();
@ -59,58 +44,78 @@ namespace Microsoft.Psi.Audio
/// <summary>
/// Gets or sets the duration of the frame of audio over which the acoustic features will be computed.
/// </summary>
public float FrameDurationInSeconds { get; set; }
public float FrameDurationInSeconds { get; set; } = 0.025f;
/// <summary>
/// Gets or sets the frame rate at which the acoustic features will be computed.
/// </summary>
public float FrameRateInHz { get; set; }
public float FrameRateInHz { get; set; } = 100.0f;
/// <summary>
/// Gets or sets a value indicating whether dither is to be applied to the audio data.
/// </summary>
public bool AddDither { get; set; }
public bool AddDither { get; set; } = true;
/// <summary>
/// Gets or sets the scale factor by which the dither to be applied will be multiplied.
/// A scale factor of 1.0 will result in a dither with a range of -1.0 to +1.0.
/// </summary>
public float DitherScaleFactor { get; set; }
public float DitherScaleFactor { get; set; } = 1.0f;
/// <summary>
/// Gets or sets the start frequency for frequency-domain features.
/// </summary>
public float StartFrequency { get; set; }
public float StartFrequency { get; set; } = 250.0f;
/// <summary>
/// Gets or sets the end frequency for frequency-domain features.
/// </summary>
public float EndFrequency { get; set; }
public float EndFrequency { get; set; } = 7000.0f;
/// <summary>
/// Gets or sets the end frequency for low-frequency features.
/// </summary>
public float LowEndFrequency { get; set; }
public float LowEndFrequency { get; set; } = 3000.0f;
/// <summary>
/// Gets or sets the start frequency for high-frequency features.
/// </summary>
public float HighStartFrequency { get; set; }
public float HighStartFrequency { get; set; } = 2500.0f;
/// <summary>
/// Gets or sets the bandwidth for entropy features.
/// </summary>
public float EntropyBandwidth { get; set; }
public float EntropyBandwidth { get; set; } = 2500.0f;
/// <summary>
/// Gets or sets a value indicating whether to compute the log energy stream.
/// </summary>
public bool ComputeLogEnergy { get; set; }
public bool ComputeLogEnergy { get; set; } = true;
/// <summary>
/// Gets or sets a value indicating whether to compute the zero-crossing rate stream.
/// </summary>
public bool ComputeZeroCrossingRate { get; set; }
public bool ComputeZeroCrossingRate { get; set; } = true;
/// <summary>
/// Gets or sets a value indicating whether to compute the frequency domain energy stream.
/// </summary>
public bool ComputeFrequencyDomainEnergy { get; set; } = true;
/// <summary>
/// Gets or sets a value indicating whether to compute the low frequency energy stream.
/// </summary>
public bool ComputeLowFrequencyEnergy { get; set; } = true;
/// <summary>
/// Gets or sets a value indicating whether to compute the high frequency energy stream.
/// </summary>
public bool ComputeHighFrequencyEnergy { get; set; } = true;
/// <summary>
/// Gets or sets a value indicating whether to compute the spectral entropy stream.
/// </summary>
public bool ComputeSpectralEntropy { get; set; } = true;
/// <summary>
/// Gets or sets a value indicating whether to compute the FFT stream.
@ -148,26 +153,6 @@ namespace Microsoft.Psi.Audio
}
}
/// <summary>
/// Gets or sets a value indicating whether to compute the frequency domain energy stream.
/// </summary>
public bool ComputeFrequencyDomainEnergy { get; set; }
/// <summary>
/// Gets or sets a value indicating whether to compute the low frequency energy stream.
/// </summary>
public bool ComputeLowFrequencyEnergy { get; set; }
/// <summary>
/// Gets or sets a value indicating whether to compute the high frequency energy stream.
/// </summary>
public bool ComputeHighFrequencyEnergy { get; set; }
/// <summary>
/// Gets or sets a value indicating whether to compute the spectral entropy stream.
/// </summary>
public bool ComputeSpectralEntropy { get; set; }
/// <summary>
/// Gets or sets the format of the audio stream.
/// </summary>

Просмотреть файл

@ -10,14 +10,12 @@ namespace Microsoft.Psi.Audio
/// </summary>
internal sealed class FastFourierTransform
{
private int fftSize; // FFT size
private int windowSize; // Size of the data window. FFTSize - WindowSize = ZeroPadSize
private int fftPow2; // FFT size in form of POW of 2
private int halfFftSize;
private readonly int fftSize; // FFT size
private readonly int windowSize; // Size of the data window. FFTSize - WindowSize = ZeroPadSize
private readonly int fftPow2; // FFT size in form of POW of 2
private float[] wriFactors; // SinCos(theta) array
private float[] alignedWriFactors; // SinCos(theta) array - 16 byte aligned
private short[] revMap;
private readonly float[] alignedWriFactors; // SinCos(theta) array - 16 byte aligned
private readonly short[] revMap;
/// <summary>
/// Initializes a new instance of the <see cref="FastFourierTransform"/> class.
@ -28,17 +26,15 @@ namespace Microsoft.Psi.Audio
{
this.fftSize = fftSize;
this.windowSize = windowSize;
this.halfFftSize = this.fftSize >> 1;
this.fftPow2 = 1;
int size = 2;
while (size < fftSize)
{
size = size << 1;
size <<= 1;
this.fftPow2++;
}
this.alignedWriFactors = new float[this.fftSize * 2];
this.wriFactors = new float[(this.fftSize * 2) + 20];
this.revMap = new short[this.fftSize / 2];
this.alignedWriFactors[0] = 1.0f;
this.alignedWriFactors[1] = -1.0f;
@ -68,7 +64,7 @@ namespace Microsoft.Psi.Audio
while (j >= k)
{
j -= k;
k = k >> 1;
k >>= 1;
}
j += k;
@ -240,7 +236,7 @@ namespace Microsoft.Psi.Audio
kk += limit;
limit = incr;
incr = incr + incr;
incr += incr;
}
float xr1, xi1, xr2, xi2;

Просмотреть файл

@ -10,8 +10,8 @@ namespace Microsoft.Psi.Audio
/// </summary>
public sealed class FrequencyDomainEnergy : ConsumerProducer<float[], float>
{
private int start;
private int end;
private readonly int start;
private readonly int end;
/// <summary>
/// Initializes a new instance of the <see cref="FrequencyDomainEnergy"/> class.

Просмотреть файл

@ -18,8 +18,8 @@ namespace Microsoft.Psi.Audio
/// </remarks>
public struct AudioBuffer
{
private WaveFormat format;
private byte[] data;
private readonly WaveFormat format;
private readonly byte[] data;
/// <summary>
/// Initializes a new instance of the <see cref="AudioBuffer"/> structure.

Просмотреть файл

@ -35,6 +35,10 @@
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.CodeAnalysis.FxCopAnalyzers" Version="2.9.8">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers</IncludeAssets>
</PackageReference>
<PackageReference Include="StyleCop.Analyzers" Version="1.1.118" PrivateAssets="All" />
<PackageReference Update="NETStandard.Library" Version="2.0.3" />
</ItemGroup>

Просмотреть файл

@ -57,7 +57,7 @@ namespace Microsoft.Psi.Audio
}
/// <summary>
/// Reads the lenth in bytes of the data section of a Wave file.
/// Reads the length in bytes of the data section of a Wave file.
/// </summary>
/// <param name="br">The binary reader to read from.</param>
/// <returns>The number of byte of wave data that follow.</returns>

Просмотреть файл

@ -2,7 +2,7 @@
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>netcoreapp2.0</TargetFramework>
<TargetFramework>netcoreapp3.1</TargetFramework>
<IsPackable>false</IsPackable>
<Description>Audio unit tests</Description>
<StartupObject>Test.Psi.Audio.ConsoleMain</StartupObject>
@ -27,10 +27,14 @@
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="15.9.2" />
<PackageReference Include="Microsoft.CodeAnalysis.FxCopAnalyzers" Version="2.9.8">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers</IncludeAssets>
</PackageReference>
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="16.6.1" />
<PackageReference Include="StyleCop.Analyzers" Version="1.1.118" PrivateAssets="All" />
<PackageReference Include="MSTest.TestAdapter" Version="2.1.0" />
<PackageReference Include="MSTest.TestFramework" Version="2.1.0" />
<PackageReference Include="MSTest.TestAdapter" Version="2.1.1" />
<PackageReference Include="MSTest.TestFramework" Version="2.1.1" />
</ItemGroup>
<ItemGroup>

Просмотреть файл

@ -3,6 +3,7 @@
namespace Microsoft.Psi.Calibration
{
using System.Runtime.Serialization;
using MathNet.Numerics.LinearAlgebra;
using MathNet.Spatial.Euclidean;
@ -13,6 +14,9 @@ namespace Microsoft.Psi.Calibration
{
private Matrix<double> transform;
[OptionalField]
private bool closedFormDistorts;
/// <summary>
/// Initializes a new instance of the <see cref="CameraIntrinsics"/> class.
/// </summary>
@ -21,12 +25,14 @@ namespace Microsoft.Psi.Calibration
/// <param name="transform">The intrinsics transform matrix.</param>
/// <param name="radialDistortion">The radial distortion parameters.</param>
/// <param name="tangentialDistortion">The tangential distortion parameters.</param>
/// <param name="closedFormDistorts">Indicates which direction the closed form equation for Brown-Conrady Distortion model goes. I.e. does it perform distortion or undistortion. Default is to distort (thus making projection simpler and unprojection more complicated).</param>
public CameraIntrinsics(
int imageWidth,
int imageHeight,
Matrix<double> transform,
Vector<double> radialDistortion = null,
Vector<double> tangentialDistortion = null)
Vector<double> tangentialDistortion = null,
bool closedFormDistorts = true)
{
this.ImageWidth = imageWidth;
this.ImageHeight = imageHeight;
@ -35,6 +41,7 @@ namespace Microsoft.Psi.Calibration
this.TangentialDistortion = tangentialDistortion ?? Vector<double>.Build.Dense(2, 0);
this.FocalLengthXY = new Point2D(this.Transform[0, 0], this.Transform[1, 1]);
this.PrincipalPoint = new Point2D(this.Transform[0, 2], this.Transform[1, 2]);
this.ClosedFormDistorts = closedFormDistorts;
}
/// <inheritdoc/>
@ -70,6 +77,20 @@ namespace Microsoft.Psi.Calibration
/// <inheritdoc/>
public Point2D PrincipalPoint { get; private set; }
/// <inheritdoc/>
public bool ClosedFormDistorts
{
get
{
return this.closedFormDistorts;
}
private set
{
this.closedFormDistorts = value;
}
}
/// <inheritdoc/>
public int ImageWidth { get; private set; }
@ -88,7 +109,7 @@ namespace Microsoft.Psi.Calibration
Point3D tmp = new Point3D(pixelPt.X, pixelPt.Y, 1.0);
tmp = tmp.TransformBy(this.transform);
return new Point2D(tmp.X, this.ImageHeight - tmp.Y);
return new Point2D(tmp.X, tmp.Y);
}
/// <inheritdoc/>
@ -98,33 +119,63 @@ namespace Microsoft.Psi.Calibration
Point3D tmp = new Point3D(pt.X, pt.Y, 1.0);
tmp = tmp.TransformBy(this.InvTransform);
// Undistort the pixel
// Distort the pixel
Point2D pixelPt = new Point2D(tmp.X, tmp.Y);
if (undistort)
{
pixelPt = this.UndistortPoint(pixelPt);
this.UndistortPoint(pixelPt, out pixelPt);
}
// X points in the depth dimension. Y points to the left, and Z points up.
return new Point3D(depth, -pixelPt.X * depth, -pixelPt.Y * depth);
}
/// <inheritdoc/>
public bool UndistortPoint(Point2D distortedPt, out Point2D undistortedPt)
{
if (this.ClosedFormDistorts)
{
return this.InverseOfClosedForm(distortedPt, out undistortedPt);
}
return this.ClosedForm(distortedPt, out undistortedPt);
}
/// <inheritdoc/>
public bool DistortPoint(Point2D undistortedPt, out Point2D distortedPt)
{
double x = undistortedPt.X;
double y = undistortedPt.Y;
if (this.ClosedFormDistorts)
{
return this.ClosedForm(undistortedPt, out distortedPt);
}
return this.InverseOfClosedForm(undistortedPt, out distortedPt);
}
private bool InverseOfClosedForm(Point2D inputPt, out Point2D outputPt)
{
double k1 = this.RadialDistortion[0];
double k2 = this.RadialDistortion[1];
double k3 = this.RadialDistortion[2];
double k4 = this.RadialDistortion[3];
double k5 = this.RadialDistortion[4];
double k6 = this.RadialDistortion[5];
double t0 = this.TangentialDistortion[0];
double t1 = this.TangentialDistortion[1];
double x = inputPt.X;
double y = inputPt.Y;
// Our distortion model is defined as:
// See https://docs.opencv.org/2.4/modules/calib3d/doc/camera_calibration_and_3d_reconstruction.html?highlight=convertpointshomogeneous
// r^2 = x^2 + y^2
// (1+k1*r^2+k2*r^3+k3^r^6)
// (1+k1*r^2+k2*r^4+k3^r^6)
// Fx = x ------------------------ + t1*(r^2+ 2 * x^2) + 2 * t0 * x*y
// (1+k4*r^2+k5*r^3+k6^r^6)
// (1+k4*r^2+k5*r^4+k6^r^6)
//
// (1+k1*r^2+k2*r^3+k3^r^6)
// (1+k1*r^2+k2*r^4+k3^r^6)
// Fy = y ------------------------ + t0*(r^2+ 2 * y^2) + 2 * t1 * x*y
// (1+k4*r^2+k5*r^3+k6^r^6)
// (1+k4*r^2+k5*r^4+k6^r^6)
//
// We want to solve for:
// 1 | @Fy/@y -@Fx/@y |
@ -147,15 +198,6 @@ namespace Microsoft.Psi.Calibration
// @Fy/@x = y @d/@x + 2*t0*y + 2*t1*x
//
// In the code below @<x>/@<y> is named 'd<x>d<y>'.
double k1 = this.RadialDistortion[0];
double k2 = this.RadialDistortion[1];
double k3 = this.RadialDistortion[2];
double k4 = this.RadialDistortion[3];
double k5 = this.RadialDistortion[4];
double k6 = this.RadialDistortion[5];
double t0 = this.TangentialDistortion[0];
double t1 = this.TangentialDistortion[1];
#pragma warning disable SA1305
bool converged = false;
for (int j = 0; j < 100 && !converged; j++)
@ -169,7 +211,7 @@ namespace Microsoft.Psi.Calibration
double dr2dy = 2 * y;
double d = g / h;
double dgdr2 = k1 + 2 * k2 * radiusSq + 3 * k3 * radiusSqSq;
double dhdr2 = k4 * 2 * k5 * radiusSq + 3 * k6 * radiusSqSq;
double dhdr2 = k4 + 2 * k5 * radiusSq + 3 * k6 * radiusSqSq;
double dddr2 = (dgdr2 * h - g * dhdr2) / (h * h);
double dddx = dddr2 * 2 * x;
double dddy = dddr2 * 2 * y;
@ -183,7 +225,7 @@ namespace Microsoft.Psi.Calibration
if (System.Math.Abs(det) < 1E-16)
{
// Not invertible. Perform no distortion
distortedPt = new Point2D(undistortedPt.X, undistortedPt.Y);
outputPt = new Point2D(inputPt.X, inputPt.Y);
return false;
}
@ -199,31 +241,40 @@ namespace Microsoft.Psi.Calibration
// to be equal to 0:
// 0 = F(xp) - Xu
// 0 = F(yp) - Yu
xp -= undistortedPt.X;
yp -= undistortedPt.Y;
double errx = xp - inputPt.X;
double erry = yp - inputPt.Y;
if ((xp * xp) + (yp * yp) < 1E-16)
double err = (errx * errx) + (erry * erry);
if (err < 1.0e-16)
{
converged = true;
break;
}
// Update our new guess (i.e. x = x - J(F(x))^-1 * F(x))
x = x - ((dFydy * xp) - (dFxdy * yp)) / det;
y = y - ((-dFydx * xp) + (dFxdx * yp)) / det;
x = x - ((dFydy * errx) - (dFxdy * erry)) / det;
y = y - ((-dFydx * errx) + (dFxdx * erry)) / det;
#pragma warning restore SA1305
}
distortedPt = new Point2D(x, y);
return true;
if (converged)
{
outputPt = new Point2D(x, y);
}
else
{
outputPt = new Point2D(inputPt.X, inputPt.Y);
}
return converged;
}
/// <inheritdoc/>
public Point2D UndistortPoint(Point2D distortedPt)
private bool ClosedForm(Point2D inputPt, out Point2D outputPt)
{
// Undistort pixel
double xp, yp;
double radiusSquared = (distortedPt.X * distortedPt.X) + (distortedPt.Y * distortedPt.Y);
double radiusSquared = (inputPt.X * inputPt.X) + (inputPt.Y * inputPt.Y);
if (this.RadialDistortion != null)
{
double k1 = this.RadialDistortion[0];
@ -236,26 +287,27 @@ namespace Microsoft.Psi.Calibration
double h = 1 + k4 * radiusSquared + k5 * radiusSquared * radiusSquared + k6 * radiusSquared * radiusSquared * radiusSquared;
double d = g / h;
xp = distortedPt.X * d;
yp = distortedPt.Y * d;
xp = inputPt.X * d;
yp = inputPt.Y * d;
}
else
{
xp = distortedPt.X;
yp = distortedPt.Y;
xp = inputPt.X;
yp = inputPt.Y;
}
// If we are incorporating tangential distortion, include that here
if (this.TangentialDistortion != null && (this.TangentialDistortion[0] != 0.0 || this.TangentialDistortion[1] != 0.0))
{
double xy = 2.0 * distortedPt.X * distortedPt.Y;
double x2 = 2.0 * distortedPt.X * distortedPt.X;
double y2 = 2.0 * distortedPt.Y * distortedPt.Y;
double xy = 2.0 * inputPt.X * inputPt.Y;
double x2 = 2.0 * inputPt.X * inputPt.X;
double y2 = 2.0 * inputPt.Y * inputPt.Y;
xp += (this.TangentialDistortion[1] * (radiusSquared + x2)) + (this.TangentialDistortion[0] * xy);
yp += (this.TangentialDistortion[0] * (radiusSquared + y2)) + (this.TangentialDistortion[1] * xy);
}
return new Point2D(xp, yp);
outputPt = new Point2D(xp, yp);
return true;
}
}
}

Просмотреть файл

@ -60,6 +60,7 @@ namespace Microsoft.Psi.Calibration
Vector<double>.Build.DenseOfArray(colorTangentialDistortionCoefficients));
this.ColorExtrinsics = new CoordinateSystem(depthToColorTransform);
this.ColorPose = this.ColorExtrinsics.Invert();
this.DepthIntrinsics = new CameraIntrinsics(
depthWidth,
@ -69,24 +70,34 @@ namespace Microsoft.Psi.Calibration
Vector<double>.Build.DenseOfArray(depthTangentialDistortionCoefficients));
this.DepthExtrinsics = new CoordinateSystem(depthExtrinsics);
this.DepthPose = this.DepthExtrinsics.Invert();
}
/// <inheritdoc/>
public CoordinateSystem ColorExtrinsics { get; }
/// <inheritdoc/>
public CoordinateSystem ColorPose { get; }
/// <inheritdoc/>
public ICameraIntrinsics ColorIntrinsics { get; }
/// <inheritdoc/>
public CoordinateSystem DepthExtrinsics { get; }
/// <inheritdoc/>
public CoordinateSystem DepthPose { get; }
/// <inheritdoc/>
public ICameraIntrinsics DepthIntrinsics { get; }
/// <inheritdoc/>
public Point2D ToColorSpace(Point3D point3D)
{
// First convert the point into camera coordinates.
var point3DInColorCamera = this.ColorExtrinsics.Transform(point3D);
// Then convert to pixel space.
return this.ColorIntrinsics.ToPixelSpace(point3DInColorCamera, true);
}
}

Просмотреть файл

@ -4,7 +4,9 @@
namespace Microsoft.Psi.Calibration
{
using System;
using System.Collections.Generic;
using MathNet.Spatial.Euclidean;
using Microsoft.Psi;
using Microsoft.Psi.Imaging;
/// <summary>
@ -19,9 +21,9 @@ namespace Microsoft.Psi.Calibration
/// <param name="point2D">Pixel coordinates in the color camera.</param>
/// <param name="depthImage">Depth map.</param>
/// <returns>Point in camera coordinates.</returns>
internal static Point3D? ProjectToCameraSpace(IDepthDeviceCalibrationInfo depthDeviceCalibrationInfo, Point2D point2D, Shared<Image> depthImage)
public static Point3D? ProjectToCameraSpace(IDepthDeviceCalibrationInfo depthDeviceCalibrationInfo, Point2D point2D, Shared<DepthImage> depthImage)
{
var colorExtrinsicsInverse = depthDeviceCalibrationInfo.ColorExtrinsics.Inverse();
var colorExtrinsicsInverse = depthDeviceCalibrationInfo.ColorPose;
var pointInCameraSpace = depthDeviceCalibrationInfo.ColorIntrinsics.ToCameraSpace(point2D, 1.0, true);
double x = pointInCameraSpace.X * colorExtrinsicsInverse[0, 0] + pointInCameraSpace.Y * colorExtrinsicsInverse[0, 1] + pointInCameraSpace.Z * colorExtrinsicsInverse[0, 2] + colorExtrinsicsInverse[0, 3];
double y = pointInCameraSpace.X * colorExtrinsicsInverse[1, 0] + pointInCameraSpace.Y * colorExtrinsicsInverse[1, 1] + pointInCameraSpace.Z * colorExtrinsicsInverse[1, 2] + colorExtrinsicsInverse[1, 3];
@ -32,6 +34,20 @@ namespace Microsoft.Psi.Calibration
return IntersectLineWithDepthMesh(depthDeviceCalibrationInfo, rgbLine, depthImage.Resource, 0.1);
}
/// <summary>
/// Projects set of 2D image points into 3D.
/// </summary>
/// <param name="source">Tuple of depth image, list of points to project, and calibration information.</param>
/// <param name="deliveryPolicy">An optional delivery policy.</param>
/// <returns>Returns a producer that generates a list of corresponding 3D points in Kinect camera space.</returns>
public static IProducer<List<Point3D>> ProjectTo3D(
this IProducer<(Shared<DepthImage>, List<Point2D>, IDepthDeviceCalibrationInfo)> source, DeliveryPolicy<(Shared<DepthImage>, List<Point2D>, IDepthDeviceCalibrationInfo)> deliveryPolicy = null)
{
var projectTo3D = new ProjectTo3D(source.Out.Pipeline);
source.PipeTo(projectTo3D, deliveryPolicy);
return projectTo3D;
}
/// <summary>
/// Performs a ray/mesh intersection with the depth map.
/// </summary>
@ -41,7 +57,7 @@ namespace Microsoft.Psi.Calibration
/// <param name="skipFactor">Distance to march on each step along ray.</param>
/// <param name="undistort">Whether undistortion should be applied to the point.</param>
/// <returns>Returns point of intersection.</returns>
internal static Point3D? IntersectLineWithDepthMesh(IDepthDeviceCalibrationInfo calibration, Line3D line, Image depthImage, double skipFactor, bool undistort = true)
internal static Point3D? IntersectLineWithDepthMesh(IDepthDeviceCalibrationInfo calibration, Line3D line, DepthImage depthImage, double skipFactor, bool undistort = true)
{
// max distance to check for intersection with the scene
double totalDistance = 5;
@ -67,7 +83,7 @@ namespace Microsoft.Psi.Calibration
return null;
}
private static float GetMeshDepthAtPoint(IDepthDeviceCalibrationInfo calibration, Image depthImage, Point3D point, bool undistort)
private static float GetMeshDepthAtPoint(IDepthDeviceCalibrationInfo calibration, DepthImage depthImage, Point3D point, bool undistort)
{
Point2D depthSpacePoint = calibration.DepthIntrinsics.ToPixelSpace(point, undistort);

Просмотреть файл

@ -49,6 +49,15 @@ namespace Microsoft.Psi.Calibration
/// </summary>
Point2D PrincipalPoint { get; }
/// <summary>
/// Gets a value indicating whether the closed form equation of the Brown-Conrady Distortion model
/// distorts or undistorts. i.e. if true then:
/// Xdistorted = Xundistorted * (1+K1*R2+K2*R3+...
/// otherwise:
/// Xundistorted = Xdistorted * (1+K1*R2+K2*R3+...
/// </summary>
bool ClosedFormDistorts { get; }
/// <summary>
/// Gets the width of the camera's image (in pixels).
/// </summary>
@ -92,10 +101,10 @@ namespace Microsoft.Psi.Calibration
/// Newton's method is used to find the inverse of this. That is
/// Xd(n+1) = Xd(n) + J^-1 * F(Xd,Yd).
/// </summary>
/// <param name="undistortedPoint">The undistorted point in camera post-projection coordinates.</param>
/// <param name="distortedPoint">The distorted point.</param>
/// <returns>True if 'distortedPoint' contains the distorted point, or false if the algorithm did not converge.</returns>
bool DistortPoint(Point2D undistortedPoint, out Point2D distortedPoint);
/// <param name="undistortedPt">The undistorted point in camera post-projection coordinates.</param>
/// <param name="distortedPt">The distorted point.</param>
/// <returns>True if 'distortedPt' contains the distorted point, or false if the algorithm did not converge.</returns>
bool DistortPoint(Point2D undistortedPt, out Point2D distortedPt);
/// <summary>
/// Applies the camera's radial and tangential undistortion to the specified (distorted) point.
@ -110,8 +119,9 @@ namespace Microsoft.Psi.Calibration
/// T0,T1 - tangential distortion coefficients.
///
/// </summary>
/// <param name="distortedPoint">Distorted point in camera post-projection coordinates.</param>
/// <returns>Undistorted coordinates in camera post-projection coordinates.</returns>
Point2D UndistortPoint(Point2D distortedPoint);
/// <param name="distortedPt">Distorted point in camera post-projection coordinates.</param>
/// <param name="undistortedPt">Returns the undistorted point in camera post-projection coordinates.</param>
/// <returns>True if 'undistortedPoint' contains the undistorted point, or false if the algorithm did not converge.</returns>
bool UndistortPoint(Point2D distortedPt, out Point2D undistortedPt);
}
}

Просмотреть файл

@ -11,20 +11,30 @@ namespace Microsoft.Psi.Calibration
public interface IDepthDeviceCalibrationInfo
{
/// <summary>
/// Gets the extrinsics defining the color camera's position with respect to the depth camera.
/// Gets the extrinsics associated with the color camera, which describes how to transform points in world coordinates to color camera coordinates (world => camera).
/// </summary>
CoordinateSystem ColorExtrinsics { get; }
/// <summary>
/// Gets the pose of the color camera in the world, which is obtained by inverting the extrinsics matrix (camera => world).
/// </summary>
CoordinateSystem ColorPose { get; }
/// <summary>
/// Gets the intrinsics associated with the color camera.
/// </summary>
ICameraIntrinsics ColorIntrinsics { get; }
/// <summary>
/// Gets the extrinsics defining the depth camera's position in the world.
/// Gets the extrinsics associated with the depth camera, which describes how to transform points in world coordinates to depth camera coordinates (world => camera).
/// </summary>
CoordinateSystem DepthExtrinsics { get; }
/// <summary>
/// Gets the pose of the depth camera in the world, which is obtained by inverting the extrinsics matrix (camera => world).
/// </summary>
CoordinateSystem DepthPose { get; }
/// <summary>
/// Gets the intrinsics associated with the depth camera.
/// </summary>

Просмотреть файл

@ -1,15 +1,15 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT license.
namespace Microsoft.Psi.Kinect
namespace Microsoft.Psi.Calibration
{
using System;
using System.IO;
using MathNet.Numerics.LinearAlgebra;
/// <summary>
/// Defines a class for performing Levenberg-Marquardt optimization.
/// </summary>
internal class LevenbergMarquardt
public class LevenbergMarquardt
{
private int maximumIterations = 100;
private double minimumReduction = 1.0e-5;
@ -18,9 +18,6 @@ namespace Microsoft.Psi.Kinect
private double initialLambda = 1.0e-3;
private Function function;
private Jacobian jacobianFunction;
private States state = States.Running;
private double rmsError;
private System.Diagnostics.Stopwatch stopWatch = new System.Diagnostics.Stopwatch();
/// <summary>
/// Initializes a new instance of the <see cref="LevenbergMarquardt"/> class.
@ -35,7 +32,7 @@ namespace Microsoft.Psi.Kinect
/// Initializes a new instance of the <see cref="LevenbergMarquardt"/> class.
/// </summary>
/// <param name="function">Cost function.</param>
/// <param name="jacobianFunction">Jacobian.</param>
/// <param name="jacobianFunction">Jacobian function.</param>
public LevenbergMarquardt(Function function, Jacobian jacobianFunction)
{
this.function = function;
@ -47,198 +44,116 @@ namespace Microsoft.Psi.Kinect
/// </summary>
/// <param name="parameters">Parameters.</param>
/// <returns>Matrix.</returns>
public delegate Matrix Function(Matrix parameters);
public delegate Vector<double> Function(Vector<double> parameters);
/// <summary>
/// J_ij, ith error from function, jth parameter.
/// </summary>
/// <param name="parameters">Parameters.</param>
/// <returns>Matrix.</returns>
public delegate Matrix Jacobian(Matrix parameters);
public delegate Matrix<double> Jacobian(Vector<double> parameters);
/// <summary>
/// States for optimization.
/// </summary>
public enum States
{
#pragma warning disable SA1602 // Enumeration items must be documented
/// <summary>
/// Running.
/// </summary>
Running,
/// <summary>
/// Maximum iterations.
/// </summary>
MaximumIterations,
/// <summary>
/// Lambda too large.
/// </summary>
LambdaTooLarge,
/// <summary>
/// Reduction step too small.
/// </summary>
ReductionStepTooSmall,
#pragma warning restore SA1602 // Enumeration items must be documented
}
/// <summary>
/// Gets the RMS error.
/// </summary>
public double RMSError
{
get { return this.rmsError; }
}
public double RMSError { get; private set; }
/// <summary>
/// Gets the optimization state.
/// </summary>
public States State
{
get { return this.state; }
}
/// <summary>
/// Performs unit test.
/// </summary>
public static void Test()
{
// generate x_i, y_i observations on test function
var random = new Random();
int n = 200;
var matX = new Matrix(n, 1);
var matY = new Matrix(n, 1);
double a = 100;
double b = 102;
for (int i = 0; i < n; i++)
{
double x = (random.NextDouble() / (Math.PI / 4.0)) - (Math.PI / 8.0);
double y = (a * Math.Cos(b * x)) + (b * Math.Sin(a * x)) + (random.NextDouble() * 0.1);
matX[i] = x;
matY[i] = y;
}
Function f = (Matrix parameters) =>
{
// return y_i - f(x_i, parameters) as column vector
var error = new Matrix(n, 1);
double a2 = parameters[0];
double b2 = parameters[1];
for (int i = 0; i < n; i++)
{
double y = (a2 * Math.Cos(b2 * matX[i])) + (b2 * Math.Sin(a2 * matX[i]));
error[i] = matY[i] - y;
}
return error;
};
var levenbergMarquardt = new LevenbergMarquardt(f);
var parameters0 = new Matrix(2, 1);
parameters0[0] = 90;
parameters0[1] = 96;
var rmsError = levenbergMarquardt.Minimize(parameters0);
}
public States State { get; private set; } = States.Running;
/// <summary>
/// Minimizes function.
/// </summary>
/// <param name="parameters">Parameters.</param>
/// <returns>Returns the RMS.</returns>
public double Minimize(Matrix parameters)
public double Minimize(Vector<double> parameters)
{
this.state = States.Running;
this.State = States.Running;
for (int iteration = 0; iteration < this.maximumIterations; iteration++)
{
this.MinimizeOneStep(parameters);
if (this.state != States.Running)
if (this.State != States.Running)
{
return this.RMSError;
}
}
this.state = States.MaximumIterations;
this.State = States.MaximumIterations;
return this.RMSError;
}
/// <summary>
/// Writes the specified matrix to the specified file.
/// </summary>
/// <param name="matA">Matrix to write.</param>
/// <param name="filename">Name of output file.</param>
public void WriteMatrixToFile(Matrix matA, string filename)
{
var file = new StreamWriter(filename);
for (int i = 0; i < matA.Rows; i++)
{
for (int j = 0; j < matA.Cols; j++)
{
file.Write(matA[i, j] + "\t");
}
file.WriteLine();
}
file.Close();
}
/// <summary>
/// Single step of the optimization.
/// </summary>
/// <param name="parameters">Parameters.</param>
/// <returns>Returns the error.</returns>
public double MinimizeOneStep(Matrix parameters)
public double MinimizeOneStep(Vector<double> parameters)
{
// initial value of the function; callee knows the size of the returned vector
var errorVector = this.function(parameters);
var error = errorVector.Dot(errorVector);
var error = errorVector.DotProduct(errorVector);
// Jacobian; callee knows the size of the returned matrix
var matJ = this.jacobianFunction(parameters);
// J'*J
var matJtJ = new Matrix(parameters.Size, parameters.Size);
// stopWatch.Restart();
// JtJ.MultATA(J, J); // this is the big calculation that could be parallelized
matJtJ.MultATAParallel(matJ, matJ);
// Console.WriteLine("JtJ: J size {0}x{1} {2}ms", J.Rows, J.Cols, stopWatch.ElapsedMilliseconds);
var matJtJ = matJ.TransposeThisAndMultiply(matJ);
// J'*error
var matJtError = new Matrix(parameters.Size, 1);
// stopWatch.Restart();
matJtError.MultATA(matJ, errorVector); // error vector must be a column vector
// Console.WriteLine("JtError: errorVector size {0}x{1} {2}ms", errorVector.Rows, errorVector.Cols, stopWatch.ElapsedMilliseconds);
var matJtError = matJ.TransposeThisAndMultiply(errorVector);
// allocate some space
var matJtJaugmented = new Matrix(parameters.Size, parameters.Size);
var matJtJinv = new Matrix(parameters.Size, parameters.Size);
var matDelta = new Matrix(parameters.Size, 1);
var matNewParameters = new Matrix(parameters.Size, 1);
var matJtJaugmented = Matrix<double>.Build.Dense(parameters.Count, parameters.Count);
// find a value of lambda that reduces error
double lambda = this.initialLambda;
while (true)
{
// augment J'*J: J'*J += lambda*(diag(J))
matJtJaugmented.Copy(matJtJ);
for (int i = 0; i < parameters.Size; i++)
matJtJ.CopyTo(matJtJaugmented);
for (int i = 0; i < parameters.Count; i++)
{
matJtJaugmented[i, i] = (1.0 + lambda) * matJtJ[i, i];
}
// WriteMatrixToFile(errorVector, "errorVector");
// WriteMatrixToFile(J, "J");
// WriteMatrixToFile(JtJaugmented, "JtJaugmented");
// WriteMatrixToFile(JtError, "JtError");
// solve for delta: (J'*J + lambda*(diag(J)))*delta = J'*error
matJtJinv.Inverse(matJtJaugmented);
matDelta.Mult(matJtJinv, matJtError);
var matJtJinv = matJtJaugmented.Inverse();
var matDelta = matJtJinv * matJtError;
// new parameters = parameters - delta [why not add?]
matNewParameters.Sub(parameters, matDelta);
var matNewParameters = parameters - matDelta;
// evaluate function, compute error
var newErrorVector = this.function(matNewParameters);
double newError = newErrorVector.Dot(newErrorVector);
double newError = newErrorVector.DotProduct(newErrorVector);
// if error is reduced, divide lambda by 10
bool improvement;
@ -255,20 +170,19 @@ namespace Microsoft.Psi.Kinect
// termination criteria:
// reduction in error is too small
var diff = new Matrix(errorVector.Size, 1);
diff.Sub(errorVector, newErrorVector);
double diffSq = diff.Dot(diff);
var diff = errorVector - newErrorVector;
double diffSq = diff.DotProduct(diff);
double errorDelta = Math.Sqrt(diffSq / error);
if (errorDelta < this.minimumReduction)
{
this.state = States.ReductionStepTooSmall;
this.State = States.ReductionStepTooSmall;
}
// lambda is too big
if (lambda > this.maximumLambda)
{
this.state = States.LambdaTooLarge;
this.State = States.LambdaTooLarge;
}
// change in parameters is too small [not implemented]
@ -276,20 +190,20 @@ namespace Microsoft.Psi.Kinect
// if we made an improvement, accept the new parameters
if (improvement)
{
parameters.Copy(matNewParameters);
matNewParameters.CopyTo(parameters);
error = newError;
break;
}
// if we meet termination criteria, break
if (this.state != States.Running)
if (this.State != States.Running)
{
break;
}
}
this.rmsError = Math.Sqrt(error / errorVector.Size);
return this.rmsError;
this.RMSError = Math.Sqrt(error / errorVector.Count);
return this.RMSError;
}
/// <summary>
@ -314,17 +228,17 @@ namespace Microsoft.Psi.Kinect
/// </summary>
/// <param name="parameters">Parameters.</param>
/// <returns>Returns Jacobian.</returns>
public Matrix Jacobian(Matrix parameters)
public Matrix<double> Jacobian(Vector<double> parameters)
{
const double deltaFactor = 1.0e-6;
const double minDelta = 1.0e-6;
// evaluate the function at the current solution
var errorVector0 = this.function(parameters);
var matJ = new Matrix(errorVector0.Size, parameters.Size);
var matJ = Matrix<double>.Build.Dense(errorVector0.Count, parameters.Count);
// vary each paremeter
for (int j = 0; j < parameters.Size; j++)
for (int j = 0; j < parameters.Count; j++)
{
double parameterValue = parameters[j]; // save the original value
@ -338,9 +252,9 @@ namespace Microsoft.Psi.Kinect
// we only get error from function, but error(p + d) - error(p) = f(p + d) - f(p)
var errorVector = this.function(parameters);
errorVector.Sub(errorVector0);
errorVector -= errorVector0;
for (int i = 0; i < errorVector0.Rows; i++)
for (int i = 0; i < errorVector0.Count; i++)
{
matJ[i, j] = errorVector[i] / delta;
}

Просмотреть файл

@ -25,12 +25,19 @@
<AdditionalFiles Include="stylecop.json" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.CodeAnalysis.FxCopAnalyzers" Version="2.9.8">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers</IncludeAssets>
</PackageReference>
<PackageReference Include="StyleCop.Analyzers" Version="1.1.118" PrivateAssets="All" />
<PackageReference Include="MathNet.Numerics.Signed" Version="4.9.0" />
<PackageReference Include="MathNet.Spatial.Signed" Version="0.5.0" />
<PackageReference Include="MathNet.Numerics.Signed" Version="4.9.1" />
<PackageReference Include="MathNet.Spatial.Signed" Version="0.6.0" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\..\Imaging\Microsoft.Psi.Imaging\Microsoft.Psi.Imaging.csproj" />
<ProjectReference Include="..\..\Runtime\Microsoft.Psi\Microsoft.Psi.csproj" />
</ItemGroup>
<ItemGroup>
<None Include="ThirdPartyNotices.txt" Pack="true" PackagePath="" />
</ItemGroup>
</Project>

Просмотреть файл

@ -132,14 +132,11 @@ namespace Microsoft.Psi.Calibration
/// Values are stored in column-major order and assumes column-vectors
/// (i.e. Matrix * Point versus Point * Matrix).
/// Units are millimeters.
/// OpenCV basis is asssumed here (Forward=Z, Right=X, Down=Y):
/// Z (forward)
/// /
/// /
/// +----> X (right)
/// |
/// |
/// Y (down).
/// MathNet basis is asssumed here
/// Z ^ X
/// | /
/// |/
/// Y ----+.
/// </summary>
[XmlArray]
public double[] Extrinsics { get; set; }
@ -241,13 +238,7 @@ namespace Microsoft.Psi.Calibration
mtx.SetColumn(3, mtx.Column(3) / 1000.0);
mtx[3, 3] = 1;
// Extrinsics are stored in OpenCV basis, so convert here to MathNet basis.
var openCVBasis = new MathNet.Spatial.Euclidean.CoordinateSystem(
default,
MathNet.Spatial.Euclidean.UnitVector3D.ZAxis,
MathNet.Spatial.Euclidean.UnitVector3D.XAxis.Negate(),
MathNet.Spatial.Euclidean.UnitVector3D.YAxis.Negate());
return new MathNet.Spatial.Euclidean.CoordinateSystem(openCVBasis.Invert() * mtx.Inverse() * openCVBasis);
return new MathNet.Spatial.Euclidean.CoordinateSystem(mtx.Inverse());
}
}
}

Просмотреть файл

@ -15,7 +15,7 @@ namespace Microsoft.Psi.Calibration
/// Inputs are the depth image, list of 2D points from the color image, and the camera calibration.
/// Outputs the 3D points projected into the depth camera's coordinate system.
/// </remarks>
public sealed class ProjectTo3D : ConsumerProducer<(Shared<Image>, List<Point2D>, IDepthDeviceCalibrationInfo), List<Point3D>>
public sealed class ProjectTo3D : ConsumerProducer<(Shared<DepthImage>, List<Point2D>, IDepthDeviceCalibrationInfo), List<Point3D>>
{
/// <summary>
/// Initializes a new instance of the <see cref="ProjectTo3D"/> class.
@ -27,7 +27,7 @@ namespace Microsoft.Psi.Calibration
}
/// <inheritdoc/>
protected override void Receive((Shared<Image>, List<Point2D>, IDepthDeviceCalibrationInfo) data, Envelope e)
protected override void Receive((Shared<DepthImage>, List<Point2D>, IDepthDeviceCalibrationInfo) data, Envelope e)
{
var point2DList = data.Item2;
var depthImage = data.Item1;

Просмотреть файл

@ -0,0 +1,165 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT license.
namespace Microsoft.Psi.Calibration
{
using System;
using MathNet.Numerics.LinearAlgebra;
/// <summary>
/// Define set of extensions for dealing with rotation matrices and vectors.
/// </summary>
public static class RotationExtensions
{
/// <summary>
/// Use the Rodrigues formula for transforming a given rotation from axis-angle representation to a 3x3 matrix.
/// Where 'r' is a rotation vector:
/// theta = norm(r)
/// M = skew(r/theta)
/// R = I + M * sin(theta) + M*M * (1-cos(theta)).
/// </summary>
/// <param name="vectorRotation">Rotation in axis-angle vector representation,
/// where the angle is represented by the length (L2-norm) of the vector.</param>
/// <returns>Rotation in a 3x3 matrix representation.</returns>
public static Matrix<double> AxisAngleToMatrix(Vector<double> vectorRotation)
{
if (vectorRotation.Count != 3)
{
throw new InvalidOperationException("The input must be a valid 3-element vector representing an axis-angle rotation.");
}
double theta = vectorRotation.L2Norm();
var matR = Matrix<double>.Build.DenseIdentity(3, 3);
// if there is no rotation (theta == 0) return identity rotation
if (theta == 0)
{
return matR;
}
// Create a skew-symmetric matrix from the normalized axis vector
var rn = vectorRotation.Normalize(2);
var matM = Matrix<double>.Build.Dense(3, 3);
matM[0, 0] = 0;
matM[0, 1] = -rn[2];
matM[0, 2] = rn[1];
matM[1, 0] = rn[2];
matM[1, 1] = 0;
matM[1, 2] = -rn[0];
matM[2, 0] = -rn[1];
matM[2, 1] = rn[0];
matM[2, 2] = 0;
// I + M * sin(theta) + M*M * (1 - cos(theta))
var sinThetaM = matM * Math.Sin(theta);
matR += sinThetaM;
var matMM = matM * matM;
var cosThetaMM = matMM * (1 - Math.Cos(theta));
matR += cosThetaMM;
return matR;
}
/// <summary>
/// Convert a rotation matrix to axis-angle representation (a unit vector scaled by the angular distance to rotate).
/// </summary>
/// <param name="m">Input rotation matrix.</param>
/// <returns>Same rotation in axis-angle representation (L2-Norm of the vector represents angular distance).</returns>
public static Vector<double> MatrixToAxisAngle(Matrix<double> m)
{
if (m.RowCount != 3 || m.ColumnCount != 3)
{
throw new InvalidOperationException("The input must be a valid 3x3 rotation matrix in order to compute its axis-angle representation.");
}
double epsilon = 0.01;
// theta = arccos((Trace(m) - 1) / 2)
double angle = Math.Acos((m.Trace() - 1.0) / 2.0);
// Create the axis vector.
var v = Vector<double>.Build.Dense(3, 0);
if (angle < epsilon)
{
// If the angular distance to rotate is 0, we just return a vector of all zeroes.
return v;
}
// Otherwise, the axis of rotation is extracted from the matrix as follows.
v[0] = m[2, 1] - m[1, 2];
v[1] = m[0, 2] - m[2, 0];
v[2] = m[1, 0] - m[0, 1];
if (v.L2Norm() < epsilon)
{
// if the axis to rotate around has 0 length, we are in a singularity where the angle has to be 180 degrees.
angle = Math.PI;
// We can extract the axis of rotation, knowing that v*vT = (m + I) / 2;
// First compute r = (m + I) / 2
var r = Matrix<double>.Build.Dense(3, 3);
m.CopyTo(r);
r[0, 0] += 1;
r[1, 1] += 1;
r[2, 2] += 1;
r /= 2.0;
// r = v*vT =
// | v_x*v_x, v_x*v_y, v_x*v_z |
// | v_x*v_y, v_y*v_y, v_y*v_z |
// | v_x*v_z, v_y*v_z, v_z*v_z |
// Extract x, y, and z as the square roots of the diagonal elements.
var x = Math.Sqrt(r[0, 0]);
var y = Math.Sqrt(r[1, 1]);
var z = Math.Sqrt(r[2, 2]);
// Now we need to determine the signs of x, y, and z.
double xsign;
double ysign;
double zsign;
double xy = r[0, 1];
double xz = r[0, 2];
if (xy > 0)
{
if (xz > 0)
{
xsign = 1;
ysign = 1;
zsign = 1;
}
else
{
xsign = 1;
ysign = 1;
zsign = -1;
}
}
else
{
if (xz > 0)
{
xsign = 1;
ysign = -1;
zsign = 1;
}
else
{
xsign = 1;
ysign = -1;
zsign = -1;
}
}
v[0] = x * xsign;
v[1] = y * ysign;
v[2] = z * zsign;
}
return v.Normalize(2) * angle;
}
}
}

Просмотреть файл

@ -0,0 +1,38 @@
Microsoft Platform for Situated Intelligence
THIRD-PARTY SOFTWARE NOTICES AND INFORMATION
Do Not Translate or Localize
This project is based on or incorporates material from the projects listed below (Third Party IP). The original copyright notice and the license under which Microsoft received such Third Party IP, are set forth below. Such licenses and notices are provided for informational purposes only. Microsoft licenses the Third Party IP to you under the licensing terms for the Microsoft Platform for Situated Intelligence product. Microsoft reserves all other rights not expressly granted under this agreement, whether by implication, estoppel or otherwise.
1. RoomAlive Toolkit (https://github.com/Microsoft/RoomAliveToolkit)
%% RoomAlive Toolkit NOTICES AND INFORMATION BEGIN HERE
=========================================
RoomAlive Toolkit
Copyright (c) Microsoft Corporation
All rights reserved.
MIT License
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
=========================================
END OF RoomAlive Toolkit NOTICES AND INFORMATION

Просмотреть файл

@ -5,9 +5,9 @@ namespace Test.Psi.Calibration
{
using System;
using System.IO;
using System.Net.Http.Headers;
using System.Runtime.InteropServices;
using MathNet.Numerics.LinearAlgebra;
using MathNet.Spatial.Euclidean;
using Microsoft.Psi.Calibration;
using Microsoft.VisualStudio.TestTools.UnitTesting;
@ -58,7 +58,11 @@ namespace Test.Psi.Calibration
public void TestDistortion()
{
// Create a checkerboard image
var img = Microsoft.Psi.Imaging.Image.Create(1024, 1024, Microsoft.Psi.Imaging.PixelFormat.BGR_24bpp);
bool useColor = false;
bool reverseDirection = true;
int width = useColor ? 1280 : 640;
int height = useColor ? 720 : 576;
var img = new Microsoft.Psi.Imaging.Image(width, height, Microsoft.Psi.Imaging.PixelFormat.BGR_24bpp);
unsafe
{
byte* row = (byte*)img.ImageData.ToPointer();
@ -69,15 +73,15 @@ namespace Test.Psi.Calibration
{
if ((i / 20 + j / 20) % 2 == 0)
{
col[0] = 255;
col[1] = 0;
col[0] = (byte)(255.0f * (float)j / (float)img.Width);
col[1] = (byte)(255.0f * (1.0f - (float)j / (float)img.Width));
col[2] = 0;
}
else
{
col[0] = 0;
col[1] = 0;
col[2] = 255;
col[1] = (byte)(255.0f * (float)i / (float)img.Height);
col[2] = (byte)(255.0f * (1.0f - (float)i / (float)img.Height));
}
col += img.BitsPerPixel / 8;
@ -90,19 +94,67 @@ namespace Test.Psi.Calibration
#endif // DUMP_IMAGES
}
double[] colorAzureDistortionCoefficients = new double[6]
{
0.609246314,
-2.84837151,
1.63566089,
0.483219713,
-2.66301942,
1.55776918,
};
double[] colorAzureTangentialCoefficients = new double[2]
{
-0.000216085638,
0.000744335062,
};
double[] colorIntrinsics = new double[4]
{
638.904968, // cx
350.822327, // cy
607.090698, // fx
607.030762, // fy
};
double[] depthIntrinsics = new double[4]
{
326.131775, // cx
324.755524, // cy
504.679749, // fx
504.865875, // fy
};
double[] depthAzureDistortionCoefficients = new double[6]
{
0.228193134,
-0.0650567561,
-0.000764187891,
0.568694472,
-0.0599768497,
-0.0119919786,
};
double[] depthAzureTangentialCoefficients = new double[2]
{
-9.04210319e-05,
-9.16166828e-05,
};
// Setup our distortion coefficients
double[] distortionCoefficients = new double[6] { 1.10156359448570129, -0.049757665717193485, -0.0018714899575029596, 0.0, 0.0, 0.0 };
double[] tangentialCoefficients = new double[2] { 0.0083588278483703853, 0.0 };
var distortionCoefficients = useColor ? colorAzureDistortionCoefficients : depthAzureDistortionCoefficients;
var tangentialCoefficients = useColor ? colorAzureTangentialCoefficients : depthAzureTangentialCoefficients;
// Next run distort on the image
var distortedImage = Microsoft.Psi.Imaging.Image.Create(img.Width, img.Height, img.PixelFormat);
var intrinsicMat = CreateMatrix.Dense<double>(3, 3, new double[9] { 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0 });
var distortedImage = new Microsoft.Psi.Imaging.Image(img.Width, img.Height, img.PixelFormat);
double[] colorArray = new double[9] { colorIntrinsics[2], 0.0, 0.0, 0.0, colorIntrinsics[3], 0.0, colorIntrinsics[0], colorIntrinsics[1], 1.0, };
double[] depthArray = new double[9] { depthIntrinsics[2], 0.0, 0.0, 0.0, depthIntrinsics[3], 0.0, depthIntrinsics[0], depthIntrinsics[1], 1.0, };
var intrinsicMat = CreateMatrix.Dense<double>(3, 3, useColor ? colorArray : depthArray);
var ci = new CameraIntrinsics(
img.Width,
img.Height,
intrinsicMat,
Vector<double>.Build.DenseOfArray(distortionCoefficients),
Vector<double>.Build.DenseOfArray(tangentialCoefficients));
Vector<double>.Build.DenseOfArray(tangentialCoefficients),
reverseDirection);
unsafe
{
byte* dstrow = (byte*)distortedImage.ImageData.ToPointer();
@ -111,15 +163,18 @@ namespace Test.Psi.Calibration
byte* dstcol = dstrow;
for (int j = 0; j < distortedImage.Width; j++)
{
MathNet.Spatial.Euclidean.Point2D pixelCoord = new MathNet.Spatial.Euclidean.Point2D((i - 512.0) / 1024.0, (j - 512.0) / 1024.0);
MathNet.Spatial.Euclidean.Point2D distortedPixelCoord;
ci.DistortPoint(pixelCoord, out distortedPixelCoord);
MathNet.Spatial.Euclidean.Point2D pixelCoord = new MathNet.Spatial.Euclidean.Point2D(
((float)j - ci.PrincipalPoint.X) / ci.FocalLengthXY.X,
((float)i - ci.PrincipalPoint.Y) / ci.FocalLengthXY.Y);
int px = (int)(distortedPixelCoord.X * 1024.0 + 512.0);
int py = (int)(distortedPixelCoord.Y * 1024.0 + 512.0);
if (px >= 0 && px < img.Width && py >= 0 && py < img.Height)
Point2D undistortedPoint;
bool converged = ci.UndistortPoint(pixelCoord, out undistortedPoint);
int px = (int)(undistortedPoint.X * ci.FocalLengthXY.X + ci.PrincipalPoint.X);
int py = (int)(undistortedPoint.Y * ci.FocalLengthXY.Y + ci.PrincipalPoint.Y);
if (converged && px >= 0 && px < img.Width && py >= 0 && py < img.Height)
{
byte* src = (byte*)img.ImageData.ToPointer() + py * distortedImage.Stride + px * distortedImage.BitsPerPixel / 8;
byte* src = (byte*)img.ImageData.ToPointer() + py * img.Stride + px * img.BitsPerPixel / 8;
dstcol[0] = src[0];
dstcol[1] = src[1];
dstcol[2] = src[2];
@ -136,7 +191,7 @@ namespace Test.Psi.Calibration
}
// Finally run undistort on the result
var undistortedImage = Microsoft.Psi.Imaging.Image.Create(img.Width, img.Height, img.PixelFormat);
var undistortedImage = new Microsoft.Psi.Imaging.Image(img.Width, img.Height, img.PixelFormat);
unsafe
{
double err = 0.0;
@ -147,14 +202,16 @@ namespace Test.Psi.Calibration
byte* dstcol = dstrow;
for (int j = 0; j < undistortedImage.Width; j++)
{
MathNet.Spatial.Euclidean.Point2D pixelCoord = new MathNet.Spatial.Euclidean.Point2D((i - 512.0) / 1024.0, (j - 512.0) / 1024.0);
MathNet.Spatial.Euclidean.Point2D distortedPixelCoord;
MathNet.Spatial.Euclidean.Point2D pixelCoord = new MathNet.Spatial.Euclidean.Point2D(
((float)j - ci.PrincipalPoint.X) / ci.FocalLengthXY.X,
((float)i - ci.PrincipalPoint.Y) / ci.FocalLengthXY.Y);
MathNet.Spatial.Euclidean.Point2D distortedPixelCoord, undistortedPixelCoord;
ci.DistortPoint(pixelCoord, out distortedPixelCoord);
var undistortedPixelCoord = ci.UndistortPoint(distortedPixelCoord);
bool converged = ci.UndistortPoint(distortedPixelCoord, out undistortedPixelCoord);
int px = (int)(undistortedPixelCoord.X * 1024.0 + 512.0);
int py = (int)(undistortedPixelCoord.Y * 1024.0 + 512.0);
if (px >= 0 && px < img.Width && py >= 0 && py < img.Height)
int px = (int)(undistortedPixelCoord.X * ci.FocalLengthXY.X + ci.PrincipalPoint.X);
int py = (int)(undistortedPixelCoord.Y * ci.FocalLengthXY.Y + ci.PrincipalPoint.Y);
if (converged && px >= 0 && px < img.Width && py >= 0 && py < img.Height)
{
byte* src = (byte*)img.ImageData.ToPointer() + py * img.Stride + px * img.BitsPerPixel / 8;
dstcol[0] = src[0];

Просмотреть файл

@ -0,0 +1,65 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT license.
namespace Test.Psi.Calibration
{
using System;
using MathNet.Numerics.LinearAlgebra;
using Microsoft.Psi.Calibration;
using Microsoft.VisualStudio.TestTools.UnitTesting;
/// <summary>
/// Distortion tests.
/// </summary>
[TestClass]
public class LevenbergMarquardtTests
{
[TestMethod]
[Timeout(60000)]
public void TestOptimization()
{
// generate x_i, y_i observations on test function
var random = new Random();
int n = 200;
var matX = Vector<double>.Build.Dense(n);
var matY = Vector<double>.Build.Dense(n);
double a = 100;
double b = 102;
for (int i = 0; i < n; i++)
{
double x = (random.NextDouble() / (Math.PI / 4.0)) - (Math.PI / 8.0);
double y = (a * Math.Cos(b * x)) + (b * Math.Sin(a * x)) + (random.NextDouble() * 0.1);
matX[i] = x;
matY[i] = y;
}
LevenbergMarquardt.Function f = (Vector<double> parameters) =>
{
// return y_i - f(x_i, parameters) as column vector
var error = Vector<double>.Build.Dense(n);
double a2 = parameters[0];
double b2 = parameters[1];
for (int i = 0; i < n; i++)
{
double y = (a2 * Math.Cos(b2 * matX[i])) + (b2 * Math.Sin(a2 * matX[i]));
error[i] = matY[i] - y;
}
return error;
};
var levenbergMarquardt = new LevenbergMarquardt(f);
var parameters0 = Vector<double>.Build.Dense(2);
parameters0[0] = 90;
parameters0[1] = 96;
var rmsError = levenbergMarquardt.Minimize(parameters0);
}
}
}

Просмотреть файл

@ -2,7 +2,7 @@
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>netcoreapp2.0</TargetFramework>
<TargetFramework>netcoreapp3.1</TargetFramework>
<IsPackable>false</IsPackable>
<Description>Calibration unit tests</Description>
<StartupObject>Test.Psi.Calibration.ConsoleMain</StartupObject>
@ -30,10 +30,10 @@
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="15.9.2" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="16.6.1" />
<PackageReference Include="StyleCop.Analyzers" Version="1.1.118" PrivateAssets="All" />
<PackageReference Include="MSTest.TestAdapter" Version="2.1.0" />
<PackageReference Include="MSTest.TestFramework" Version="2.1.0" />
<PackageReference Include="MSTest.TestAdapter" Version="2.1.1" />
<PackageReference Include="MSTest.TestFramework" Version="2.1.1" />
</ItemGroup>
<ItemGroup>

Просмотреть файл

@ -20,8 +20,6 @@
<PropertyGroup>
<AutoGenerateBindingRedirects>true</AutoGenerateBindingRedirects>
<GenerateBindingRedirectsOutputType>true</GenerateBindingRedirectsOutputType>
<DelaySign>false</DelaySign>
<Authors>Microsoft Corporation</Authors>
</PropertyGroup>
<ItemGroup>
@ -31,8 +29,12 @@
<AdditionalFiles Include="stylecop.json" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.CodeAnalysis.FxCopAnalyzers" Version="2.9.8">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers</IncludeAssets>
</PackageReference>
<PackageReference Include="StyleCop.Analyzers" Version="1.1.118" PrivateAssets="All" />
<PackageReference Include="MSTest.TestFramework" Version="2.1.0" />
<PackageReference Include="MSTest.TestFramework" Version="2.1.1" />
<PackageReference Update="NETStandard.Library" Version="2.0.3" />
</ItemGroup>
</Project>

Просмотреть файл

@ -195,7 +195,7 @@ namespace Test.Psi.Common
}
/// <summary>
/// Due to the runtime's asynchronous behaviour, we may try to
/// Due to the runtime's asynchronous behavior, we may try to
/// delete our test directory before the runtime has finished
/// messing with it. This method will keep trying to delete
/// the directory until the runtime shuts down.

Просмотреть файл

@ -21,10 +21,10 @@ namespace Microsoft.Psi.Data.Annotations
/// <param name="endTime">The end time of the annotated event.</param>
public AnnotatedEvent(DateTime startTime, DateTime endTime)
{
this.CheckArguments(startTime, endTime);
this.StartTime = startTime;
this.EndTime = endTime;
this.InternalAnnotations = new List<string>();
this.InitNew();
}
/// <summary>
@ -85,14 +85,11 @@ namespace Microsoft.Psi.Data.Annotations
}
/// <summary>
/// Overridable method to allow derived object to initialize properties as part of object construction or after deserialization.
/// Overridable method to allow derived object to initialize properties after deserialization.
/// </summary>
protected virtual void InitNew()
{
if (this.EndTime < this.StartTime)
{
throw new ArgumentException("startTime must preceed endTime.", "startTime");
}
this.CheckArguments(this.StartTime, this.EndTime);
}
[OnDeserialized]
@ -100,5 +97,13 @@ namespace Microsoft.Psi.Data.Annotations
{
this.InitNew();
}
private void CheckArguments(DateTime startTime, DateTime endTime)
{
if (endTime < startTime)
{
throw new ArgumentException("startTime must preceed endTime.", "startTime");
}
}
}
}

Просмотреть файл

@ -16,6 +16,7 @@ namespace Microsoft.Psi.Data.Annotations
private AnnotationPartition(Session session, string storeName, string storePath, string name)
: base(session, storeName, storePath, name, typeof(AnnotationSimpleReader))
{
this.Reader = new AnnotationSimpleReader(storeName, storePath);
}
private AnnotationPartition()
@ -36,7 +37,7 @@ namespace Microsoft.Psi.Data.Annotations
using (var writer = new AnnotationSimpleWriter(definition))
{
writer.CreateStore(storeName, storePath);
writer.CreateStream(new JsonStreamMetadata(definition.Name, 0, typeof(AnnotatedEvent).AssemblyQualifiedName, storeName, storePath), new List<Message<AnnotatedEvent>>());
writer.CreateStream(new JsonStreamMetadata(definition.Name, 0, typeof(AnnotatedEvent).AssemblyQualifiedName, null, storeName, storePath), new List<Message<AnnotatedEvent>>());
writer.WriteAll(ReplayDescriptor.ReplayAll);
}
@ -57,7 +58,7 @@ namespace Microsoft.Psi.Data.Annotations
}
/// <summary>
/// Overridable method to allow derived object to initialize properties as part of object construction or after deserialization.
/// Overridable method to allow derived object to initialize properties after deserialization.
/// </summary>
protected override void InitNew()
{

Просмотреть файл

@ -95,7 +95,7 @@ namespace Microsoft.Psi.Data.Annotations
/// <summary>
/// Removes an annotation schema value from the current annotation schema.
/// </summary>
/// <param name="schemaValue">The annotation schema value to remove from teh current annotation schema.</param>
/// <param name="schemaValue">The annotation schema value to remove from the current annotation schema.</param>
public void RemoveSchemaValue(AnnotationSchemaValue schemaValue)
{
this.InternalValues.Remove(schemaValue);

Просмотреть файл

@ -8,7 +8,7 @@ namespace Microsoft.Psi.Data.Annotations
using System.Runtime.Serialization;
/// <summary>
/// Provides a singleton resistry for annotation schemas.
/// Provides a singleton registry for annotation schemas.
/// </summary>
[DataContract(Namespace = "http://www.microsoft.com/psi")]
public class AnnotationSchemaRegistry
@ -22,7 +22,6 @@ namespace Microsoft.Psi.Data.Annotations
private AnnotationSchemaRegistry()
{
this.InternalSchemas = new List<AnnotationSchema>();
this.InitNew();
}
/// <summary>
@ -53,7 +52,7 @@ namespace Microsoft.Psi.Data.Annotations
}
/// <summary>
/// Overridable method to allow derived object to initialize properties as part of object construction or after deserialization.
/// Overridable method to allow derived object to initialize properties after deserialization.
/// </summary>
protected virtual void InitNew()
{

Просмотреть файл

@ -19,8 +19,9 @@ namespace Microsoft.Psi.Data.Annotations
/// <param name="name">The name of the application that generated the persisted files, or the root name of the files.</param>
/// <param name="path">The directory in which the main persisted file resides or will reside, or null to create a volatile data store.</param>
public AnnotationSimpleReader(string name, string path)
: base(name, path, AnnotationStoreCommon.DefaultExtension)
: this()
{
this.Reader = new AnnotationStoreReader(name, path);
}
/// <summary>
@ -36,7 +37,7 @@ namespace Microsoft.Psi.Data.Annotations
/// </summary>
/// <param name="that">Existing <see cref="AnnotationSimpleReader"/> used to initialize new instance.</param>
public AnnotationSimpleReader(AnnotationSimpleReader that)
: base(that)
: this(that.Name, that.Path)
{
}

Просмотреть файл

@ -22,9 +22,9 @@ namespace Microsoft.Psi.Data.Annotations
/// <param name="definition">The annotated event definition used to create and validate annotated events for this store.</param>
/// <param name="createSubdirectory">If true, a numbered subdirectory is created for this store.</param>
public AnnotationSimpleWriter(string name, string path, AnnotatedEventDefinition definition, bool createSubdirectory = true)
: base(name, path, createSubdirectory, AnnotationStoreCommon.DefaultExtension)
: this(definition)
{
this.definition = definition;
this.Writer = new AnnotationStoreWriter(name, path, definition, createSubdirectory);
}
/// <summary>

Просмотреть файл

@ -43,7 +43,7 @@ namespace Microsoft.Psi.Data
public string Name { get; set; }
/// <summary>
/// Gets the orginating time interval (earliest to latest) of the messages in this dataset.
/// Gets the originating time interval (earliest to latest) of the messages in this dataset.
/// </summary>
[IgnoreDataMember]
public TimeInterval OriginatingTimeInterval =>
@ -87,7 +87,7 @@ namespace Microsoft.Psi.Data
}
/// <summary>
/// Creates a new dataset from an exising data store.
/// Creates a new dataset from an existing data store.
/// </summary>
/// <param name="storeName">The name of the data store.</param>
/// <param name="storePath">The path of the data store.</param>
@ -227,7 +227,7 @@ namespace Microsoft.Psi.Data
/// <param name="outputStoreName">The name of the output data store. Default is null.</param>
/// <param name="outputStorePath">The path of the output data store. Default is null.</param>
/// <param name="replayDescriptor">The replay descriptor to us.</param>
/// <param name="cancellationToken">A token for cancelling the asynchronous task.</param>
/// <param name="cancellationToken">A token for canceling the asynchronous task.</param>
/// <returns>A task that represents the asynchronous operation.</returns>
public async Task CreateDerivedPartitionAsync(
Action<Pipeline, SessionImporter, Exporter> computeDerived,
@ -259,7 +259,7 @@ namespace Microsoft.Psi.Data
/// <param name="outputPathFunction">A function to determine output path from the given Session.</param>
/// <param name="replayDescriptor">The replay descriptor to us.</param>
/// <param name="progress">An object that can be used for reporting progress.</param>
/// <param name="cancellationToken">A token for cancelling the asynchronous task.</param>
/// <param name="cancellationToken">A token for canceling the asynchronous task.</param>
/// <returns>A task that represents the asynchronous operation.</returns>
public async Task CreateDerivedPartitionAsync(
Action<Pipeline, SessionImporter, Exporter> computeDerived,
@ -286,7 +286,7 @@ namespace Microsoft.Psi.Data
/// <summary>
/// Asynchronously computes a derived partition for each session in the dataset.
/// </summary>
/// <typeparam name="TParameter">The type of paramater passed to the action.</typeparam>
/// <typeparam name="TParameter">The type of parameter passed to the action.</typeparam>
/// <param name="computeDerived">The action to be invoked to derive partitions.</param>
/// <param name="parameter">The parameter to be passed to the action.</param>
/// <param name="outputPartitionName">The output partition name to be created.</param>
@ -295,7 +295,7 @@ namespace Microsoft.Psi.Data
/// <param name="outputPathFunction">A function to determine output path from the given Session.</param>
/// <param name="replayDescriptor">The replay descriptor to us.</param>
/// <param name="progress">An object that can be used for reporting progress.</param>
/// <param name="cancellationToken">A token for cancelling the asynchronous task.</param>
/// <param name="cancellationToken">A token for canceling the asynchronous task.</param>
/// <returns>A task that represents the asynchronous operation.</returns>
public async Task CreateDerivedPartitionAsync<TParameter>(
Action<Pipeline, SessionImporter, Exporter, TParameter> computeDerived,
@ -328,7 +328,7 @@ namespace Microsoft.Psi.Data
/// <summary>
/// Asynchronously computes a derived partition for each session in the dataset.
/// </summary>
/// <typeparam name="TParameter">The type of paramater passed to the action.</typeparam>
/// <typeparam name="TParameter">The type of parameter passed to the action.</typeparam>
/// <param name="computeDerived">The action to be invoked to derive partitions.</param>
/// <param name="parameter">The parameter to be passed to the action.</param>
/// <param name="outputPartitionName">The output partition name to be created.</param>
@ -336,7 +336,7 @@ namespace Microsoft.Psi.Data
/// <param name="outputStoreName">The name of the output data store. Default is null.</param>
/// <param name="outputStorePath">The path of the output data store. Default is null.</param>
/// <param name="replayDescriptor">The replay descriptor to us.</param>
/// <param name="cancellationToken">A token for cancelling the asynchronous task.</param>
/// <param name="cancellationToken">A token for canceling the asynchronous task.</param>
/// <returns>A task that represents the asynchronous operation.</returns>
public async Task CreateDerivedPartitionAsync<TParameter>(
Action<Pipeline, SessionImporter, Exporter, TParameter> computeDerived,
@ -372,7 +372,7 @@ namespace Microsoft.Psi.Data
/// Adds sessions from data stores located in the specified path.
/// </summary>
/// <param name="path">The path that contains the data stores.</param>
/// <param name="partitionName">The name of the partion to be added when adding a new session. Default is null.</param>
/// <param name="partitionName">The name of the partition to be added when adding a new session. Default is null.</param>
public void AddSessionsFromExistingStores(string path, string partitionName = null)
{
this.AddSessionsFromExistingStores(path, path, partitionName);

Просмотреть файл

@ -16,7 +16,7 @@ namespace Microsoft.Psi.Data
string Name { get; set; }
/// <summary>
/// Gets the orginating time interval (earliest to latest) of the messages in this partition.
/// Gets the originating time interval (earliest to latest) of the messages in this partition.
/// </summary>
TimeInterval OriginatingTimeInterval { get; }

Просмотреть файл

@ -69,6 +69,8 @@ namespace Microsoft.Psi.Data.Json
{
this.writer.Dispose();
}
this.throttle.Dispose();
}
/// <summary>

Просмотреть файл

@ -61,7 +61,7 @@ namespace Microsoft.Psi.Data.Json
public IEnumerable<IStreamMetadata> AvailableStreams => this.reader.AvailableStreams;
/// <summary>
/// Gets the orginating time interval (earliest to latest) of the messages in the underlying data store.
/// Gets the originating time interval (earliest to latest) of the messages in the underlying data store.
/// </summary>
public TimeInterval OriginatingTimeInterval => this.reader.OriginatingTimeInterval;
@ -90,7 +90,7 @@ namespace Microsoft.Psi.Data.Json
/// </summary>
/// <typeparam name="T">Type of data in underlying stream.</typeparam>
/// <param name="streamName">The name of the stream.</param>
/// <returns>The newly created emmitte that generates messages from the stream of type <typeparamref name="T"/>.</returns>
/// <returns>The newly created emitter that generates messages from the stream of type <typeparamref name="T"/>.</returns>
public Emitter<T> OpenStream<T>(string streamName)
{
// if stream already opened, return emitter

Просмотреть файл

@ -27,7 +27,7 @@ namespace Microsoft.Psi.Data.Json
public JsonSimpleReader(string name, string path, string extension = JsonStoreBase.DefaultExtension)
: this(extension)
{
this.OpenStore(name, path);
this.Reader = new JsonStoreReader(name, path, extension);
}
/// <summary>

Просмотреть файл

@ -28,7 +28,7 @@ namespace Microsoft.Psi.Data.Json
public JsonSimpleWriter(string name, string path, bool createSubdirectory = true, string extension = JsonStoreBase.DefaultExtension)
: this(extension)
{
this.CreateStore(name, path, createSubdirectory);
this.Writer = new JsonStoreWriter(name, path, createSubdirectory, extension);
}
/// <summary>

Просмотреть файл

@ -62,7 +62,7 @@ namespace Microsoft.Psi.Data.Json
public IEnumerable<JsonStreamMetadata> AvailableStreams => this.catalog;
/// <summary>
/// Gets the orginating time interval (earliest to latest) of the messages in the data store.
/// Gets the originating time interval (earliest to latest) of the messages in the data store.
/// </summary>
public TimeInterval OriginatingTimeInterval => this.originatingTimeInterval;
@ -113,6 +113,7 @@ namespace Microsoft.Psi.Data.Json
{
this.streamReader?.Dispose();
this.streamReader = null;
this.jsonReader?.Close();
this.jsonReader = null;
}

Просмотреть файл

@ -67,6 +67,7 @@ namespace Microsoft.Psi.Data.Json
this.jsonWriter.WriteEndArray();
this.streamWriter.Dispose();
this.streamWriter = null;
this.jsonWriter.Close();
this.jsonWriter = null;
}

Просмотреть файл

@ -18,7 +18,6 @@ namespace Microsoft.Psi.Data.Json
/// </summary>
public JsonStreamMetadata()
{
this.Reset();
}
/// <summary>
@ -26,15 +25,17 @@ namespace Microsoft.Psi.Data.Json
/// </summary>
/// <param name="name">The name of the stream the metadata represents.</param>
/// <param name="id">The id of the stream the metadata represents.</param>
/// <param name="typeName">The name of the type of data conatined in the stream the metadata represents.</param>
/// <param name="partitionName">The name of the partation where the stream is stored.</param>
/// <param name="partitionPath">The path of the partation where the stream is stored.</param>
public JsonStreamMetadata(string name, int id, string typeName, string partitionName, string partitionPath)
/// <param name="typeName">The name of the type of data contained in the stream the metadata represents.</param>
/// <param name="supplementalMetadataTypeName">The name of the type of supplemental metadata for the stream the metadata represents.</param>
/// <param name="partitionName">The name of the partition where the stream is stored.</param>
/// <param name="partitionPath">The path of the partition where the stream is stored.</param>
public JsonStreamMetadata(string name, int id, string typeName, string supplementalMetadataTypeName, string partitionName, string partitionPath)
: this()
{
this.Name = name;
this.Id = id;
this.TypeName = typeName;
this.SupplementalMetadataTypeName = supplementalMetadataTypeName;
this.PartitionName = partitionName;
this.PartitionPath = partitionPath;
}
@ -87,19 +88,9 @@ namespace Microsoft.Psi.Data.Json
[JsonProperty(Order = 12)]
public int MessageCount { get; set; }
/// <summary>
/// Reset statistics for this stream metadata instance.
/// </summary>
public virtual void Reset()
{
this.FirstMessageTime = default(DateTime);
this.LastMessageTime = default(DateTime);
this.FirstMessageOriginatingTime = default(DateTime);
this.LastMessageOriginatingTime = default(DateTime);
this.AverageMessageSize = 0;
this.AverageLatency = 0;
this.MessageCount = 0;
}
/// <inheritdoc />
[JsonProperty(Order = 13)]
public string SupplementalMetadataTypeName { get; set; }
/// <inheritdoc />
public void Update(Envelope envelope, int size)

Просмотреть файл

@ -29,6 +29,10 @@
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.CodeAnalysis.FxCopAnalyzers" Version="2.9.8">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers</IncludeAssets>
</PackageReference>
<PackageReference Include="StyleCop.Analyzers" Version="1.1.118" PrivateAssets="All" />
<PackageReference Include="Newtonsoft.Json" Version="12.0.3" />
</ItemGroup>

Просмотреть файл

@ -33,7 +33,6 @@ namespace Microsoft.Psi.Data
this.StoreName = storeName;
this.StorePath = storePath;
this.Name = name ?? storeName;
this.InitNew();
}
/// <summary>
@ -100,7 +99,7 @@ namespace Microsoft.Psi.Data
public IEnumerable<IStreamMetadata> AvailableStreams => this.Reader?.AvailableStreams;
/// <summary>
/// Overridable method to allow derived object to initialize properties as part of object construction or after deserialization.
/// Overridable method to allow derived object to initialize properties after deserialization.
/// </summary>
protected virtual void InitNew()
{

Просмотреть файл

@ -71,7 +71,7 @@ namespace Microsoft.Psi.Data
}
/// <summary>
/// Gets the orginating time interval (earliest to latest) of the messages in this session.
/// Gets the originating time interval (earliest to latest) of the messages in this session.
/// </summary>
[IgnoreDataMember]
public TimeInterval OriginatingTimeInterval =>
@ -122,7 +122,7 @@ namespace Microsoft.Psi.Data
/// </summary>
/// <param name="storeName">The name of the annotation store.</param>
/// <param name="storePath">The path of the annotation store.</param>
/// <param name="definition">The annotated event definition to use when creating new annoted events in the newly created annotation partition.</param>
/// <param name="definition">The annotated event definition to use when creating new annotated events in the newly created annotation partition.</param>
/// <param name="partitionName">The partition name. Default is null.</param>
/// <returns>The newly added annotation partition.</returns>
public AnnotationPartition CreateAnnotationPartition(string storeName, string storePath, AnnotatedEventDefinition definition, string partitionName = null)
@ -135,7 +135,7 @@ namespace Microsoft.Psi.Data
/// <summary>
/// Asynchronously computes a derived partition for the session.
/// </summary>
/// <typeparam name="TParameter">The type of paramater passed to the action.</typeparam>
/// <typeparam name="TParameter">The type of parameter passed to the action.</typeparam>
/// <param name="computeDerived">The action to be invoked to derive partitions.</param>
/// <param name="parameter">The parameter to be passed to the action.</param>
/// <param name="outputPartitionName">The output partition name to be created.</param>
@ -144,7 +144,7 @@ namespace Microsoft.Psi.Data
/// <param name="outputPartitionPath">The path of the output partition. Default is null.</param>
/// <param name="replayDescriptor">The replay descriptor to us.</param>
/// <param name="progress">An object that can be used for reporting progress.</param>
/// <param name="cancellationToken">A token for cancelling the asynchronous task.</param>
/// <param name="cancellationToken">A token for canceling the asynchronous task.</param>
/// <returns>A task that represents the asynchronous operation.</returns>
public async Task CreateDerivedPartitionAsync<TParameter>(
Action<Pipeline, SessionImporter, Exporter, TParameter> computeDerived,
@ -274,7 +274,7 @@ namespace Microsoft.Psi.Data
}
/// <summary>
/// Due to the runtime's asynchronous behaviour, we may try to
/// Due to the runtime's asynchronous behavior, we may try to
/// delete our test directory before the runtime has finished
/// messing with it. This method will keep trying to delete
/// the directory until the runtime shuts down.

Просмотреть файл

@ -31,7 +31,7 @@ namespace Microsoft.Psi.Data
public string Name { get; private set; }
/// <summary>
/// Gets the orginating time interval (earliest to latest) of the messages in the session.
/// Gets the originating time interval (earliest to latest) of the messages in the session.
/// </summary>
public TimeInterval OriginatingTimeInterval { get; private set; }
@ -76,7 +76,7 @@ namespace Microsoft.Psi.Data
}
/// <summary>
/// Determines if a specicif importer contains the named stream.
/// Determines if a specific importer contains the named stream.
/// </summary>
/// <param name="partitionName">Partition name of the specific importer.</param>
/// <param name="streamName">The stream to search for.</param>

Просмотреть файл

@ -114,6 +114,18 @@ namespace Microsoft.Psi.Data
/// <returns>The metadata describing the specified stream.</returns>
public PsiStreamMetadata GetMetadata(string streamName) => this.reader.GetMetadata(streamName);
/// <summary>
/// Returns the supplemental metadata for a specified storage stream.
/// </summary>
/// <typeparam name="T">Type of supplemental metadata.</typeparam>
/// <param name="streamName">The name of the storage stream.</param>
/// <returns>The metadata associated with the storage stream.</returns>
public T GetSupplementalMetadata<T>(string streamName)
{
var meta = this.reader.GetMetadata(streamName);
return meta.GetSupplementalMetadata<T>(this.Serializers);
}
/// <summary>
/// Checks whether the specified storage stream exist in this store.
/// </summary>

Просмотреть файл

@ -15,6 +15,7 @@ namespace Microsoft.Psi.Data
private StorePartition(Session session, string storeName, string storePath, string name)
: base(session, storeName, storePath, name, typeof(SimpleReader))
{
this.Reader = new SimpleReader(storeName, storePath);
}
private StorePartition()
@ -40,7 +41,7 @@ namespace Microsoft.Psi.Data
}
/// <summary>
/// Creates a store partition from an exisitng data store.
/// Creates a store partition from an existing data store.
/// </summary>
/// <param name="session">The session that this partition belongs to.</param>
/// <param name="storeName">The store name of this partition.</param>
@ -60,7 +61,7 @@ namespace Microsoft.Psi.Data
}
/// <summary>
/// Overridable method to allow derived object to initialize properties as part of object construction or after deserialization.
/// Overridable method to allow derived object to initialize properties after deserialization.
/// </summary>
protected override void InitNew()
{

Просмотреть файл

@ -49,7 +49,7 @@ namespace Test.Psi.Data.Annotations
this.definition = new AnnotatedEventDefinition("Definition");
this.definition.AddSchema(this.booleanSchema);
this.metadata = new JsonStreamMetadata("Range", 1, typeof(AnnotatedEvent).AssemblyQualifiedName, this.name, this.path);
this.metadata = new JsonStreamMetadata("Range", 1, typeof(AnnotatedEvent).AssemblyQualifiedName, null, this.name, this.path);
}
/// <summary>

Просмотреть файл

@ -388,7 +388,7 @@ namespace Test.Psi.Data
var inputStream = importer.OpenStream<int>("Root");
var derivedStream = inputStream.Sample(TimeSpan.FromMinutes(1), RelativeTimeInterval.Infinite).Select(x => x * parameter).Write("DerivedStream", exporter);
// add a dummy source and propose a long time interval so that the operation will block (and eventually be cancelled)
// add a dummy source and propose a long time interval so that the operation will block (and eventually be canceled)
var generator = Generators.Repeat(pipeline, 0, int.MaxValue, TimeSpan.FromMilliseconds(1000));
var replayTimeInterval = TimeInterval.LeftBounded(importer.OriginatingTimeInterval.Left);
pipeline.ProposeReplayTime(replayTimeInterval);

Просмотреть файл

@ -117,8 +117,8 @@ namespace Test.Psi.Data
{
writer.CreateStore(StoreName, OutputPath, false);
IStreamMetadata metadata1 = new JsonStreamMetadata("Stream1", 1, TypeName, PartitionName, OutputPath);
IStreamMetadata metadata2 = new JsonStreamMetadata("Stream2", 2, TypeName, PartitionName, OutputPath);
IStreamMetadata metadata1 = new JsonStreamMetadata("Stream1", 1, TypeName, null, PartitionName, OutputPath);
IStreamMetadata metadata2 = new JsonStreamMetadata("Stream2", 2, TypeName, null, PartitionName, OutputPath);
List<Message<SimpleObject>> stream1 = new List<Message<SimpleObject>>();
stream1.Add(new Message<SimpleObject>(Data, FirstTime, FirstTime, 1, 0));
@ -134,7 +134,7 @@ namespace Test.Psi.Data
}
var escapedOutputPath = OutputPath.Replace(@"\", @"\\");
string expectedCatalog = "[{\"Name\":\"Stream1\",\"Id\":1,\"TypeName\":\"Test.Psi.Data.SimpleObject\",\"PartitionName\":\"JsonStore\",\"PartitionPath\":\"" + escapedOutputPath + "\",\"FirstMessageTime\":\"2017-11-01T09:15:30.12345Z\",\"LastMessageTime\":\"2017-11-01T09:15:34.12345Z\",\"FirstMessageOriginatingTime\":\"2017-11-01T09:15:30.12345Z\",\"LastMessageOriginatingTime\":\"2017-11-01T09:15:34.12345Z\",\"AverageMessageSize\":303,\"AverageLatency\":0,\"MessageCount\":2},{\"Name\":\"Stream2\",\"Id\":2,\"TypeName\":\"Test.Psi.Data.SimpleObject\",\"PartitionName\":\"JsonStore\",\"PartitionPath\":\"" + escapedOutputPath + "\",\"FirstMessageTime\":\"2017-11-01T09:15:30.12345Z\",\"LastMessageTime\":\"2017-11-01T09:15:34.12345Z\",\"FirstMessageOriginatingTime\":\"2017-11-01T09:15:30.12345Z\",\"LastMessageOriginatingTime\":\"2017-11-01T09:15:34.12345Z\",\"AverageMessageSize\":303,\"AverageLatency\":0,\"MessageCount\":2}]";
string expectedCatalog = "[{\"Name\":\"Stream1\",\"Id\":1,\"TypeName\":\"Test.Psi.Data.SimpleObject\",\"PartitionName\":\"JsonStore\",\"PartitionPath\":\"" + escapedOutputPath + "\",\"FirstMessageTime\":\"2017-11-01T09:15:30.12345Z\",\"LastMessageTime\":\"2017-11-01T09:15:34.12345Z\",\"FirstMessageOriginatingTime\":\"2017-11-01T09:15:30.12345Z\",\"LastMessageOriginatingTime\":\"2017-11-01T09:15:34.12345Z\",\"AverageMessageSize\":303,\"AverageLatency\":0,\"MessageCount\":2,\"SupplementalMetadataTypeName\":null},{\"Name\":\"Stream2\",\"Id\":2,\"TypeName\":\"Test.Psi.Data.SimpleObject\",\"PartitionName\":\"JsonStore\",\"PartitionPath\":\"" + escapedOutputPath + "\",\"FirstMessageTime\":\"2017-11-01T09:15:30.12345Z\",\"LastMessageTime\":\"2017-11-01T09:15:34.12345Z\",\"FirstMessageOriginatingTime\":\"2017-11-01T09:15:30.12345Z\",\"LastMessageOriginatingTime\":\"2017-11-01T09:15:34.12345Z\",\"AverageMessageSize\":303,\"AverageLatency\":0,\"MessageCount\":2,\"SupplementalMetadataTypeName\":null}]";
string expectedData = "[{\"Envelope\":{\"SourceId\":1,\"SequenceId\":0,\"OriginatingTime\":\"2017-11-01T09:15:30.12345Z\",\"Time\":\"2017-11-01T09:15:30.12345Z\"},\"Data\":{\"ArrayValue\":[0,1,2,3],\"BoolValue\":true,\"DateTimeValue\":\"2017-11-30T12:59:41.896745Z\",\"DoubleValue\":0.123456,\"IntValue\":123456,\"ListValue\":[4,5,6,7],\"StringValue\":\"abc\",\"TimeSpanValue\":\"01:02:03.4567890\"}},{\"Envelope\":{\"SourceId\":2,\"SequenceId\":0,\"OriginatingTime\":\"2017-11-01T09:15:30.12345Z\",\"Time\":\"2017-11-01T09:15:30.12345Z\"},\"Data\":{\"ArrayValue\":[0,1,2,3],\"BoolValue\":true,\"DateTimeValue\":\"2017-11-30T12:59:41.896745Z\",\"DoubleValue\":0.123456,\"IntValue\":123456,\"ListValue\":[4,5,6,7],\"StringValue\":\"abc\",\"TimeSpanValue\":\"01:02:03.4567890\"}},{\"Envelope\":{\"SourceId\":1,\"SequenceId\":1,\"OriginatingTime\":\"2017-11-01T09:15:34.12345Z\",\"Time\":\"2017-11-01T09:15:34.12345Z\"},\"Data\":{\"ArrayValue\":[0,1,2,3],\"BoolValue\":true,\"DateTimeValue\":\"2017-11-30T12:59:41.896745Z\",\"DoubleValue\":0.123456,\"IntValue\":123456,\"ListValue\":[4,5,6,7],\"StringValue\":\"abc\",\"TimeSpanValue\":\"01:02:03.4567890\"}},{\"Envelope\":{\"SourceId\":2,\"SequenceId\":1,\"OriginatingTime\":\"2017-11-01T09:15:34.12345Z\",\"Time\":\"2017-11-01T09:15:34.12345Z\"},\"Data\":{\"ArrayValue\":[0,1,2,3],\"BoolValue\":true,\"DateTimeValue\":\"2017-11-30T12:59:41.896745Z\",\"DoubleValue\":0.123456,\"IntValue\":123456,\"ListValue\":[4,5,6,7],\"StringValue\":\"abc\",\"TimeSpanValue\":\"01:02:03.4567890\"}}]";
string actualCatalog = string.Empty;
string actualData = string.Empty;

Просмотреть файл

@ -98,5 +98,30 @@ namespace Test.Psi.Data
Assert.AreEqual(result.Sum(x => x), probe * size);
}
}
[TestMethod]
[Timeout(60000)]
public void RetrieveStreamSupplementalMetadata()
{
var name = nameof(this.RetrieveStreamSupplementalMetadata);
// create store with supplemental meta
using (var p = Pipeline.Create("write"))
{
var store = Store.Create(p, name, this.path);
var stream0 = Generators.Range(p, 0, 10, TimeSpan.FromTicks(1));
var stream1 = Generators.Range(p, 0, 10, TimeSpan.FromTicks(1));
stream0.Write("NoMeta", store, true);
stream1.Write(("Favorite irrational number", Math.E), "WithMeta", store);
}
// read it back with a simple reader
var reader = new SimpleReader(name, this.path);
Assert.IsNull(reader.GetMetadata("NoMeta").SupplementalMetadataTypeName);
Assert.AreEqual(typeof(ValueTuple<string, double>).AssemblyQualifiedName, reader.GetMetadata("WithMeta").SupplementalMetadataTypeName);
var supplemental1 = reader.GetSupplementalMetadata<(string, double)>("WithMeta");
Assert.AreEqual("Favorite irrational number", supplemental1.Item1);
Assert.AreEqual(Math.E, supplemental1.Item2);
}
}
}

Просмотреть файл

@ -1,7 +1,7 @@
<Project Sdk="Microsoft.NET.Sdk">
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>netcoreapp2.0</TargetFramework>
<TargetFramework>netcoreapp3.1</TargetFramework>
<IsPackable>false</IsPackable>
<StartupObject>Test.Psi.Data.ConsoleMain</StartupObject>
<ApplicationIcon />
@ -31,10 +31,14 @@
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="15.9.2" />
<PackageReference Include="Microsoft.CodeAnalysis.FxCopAnalyzers" Version="2.9.8">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers</IncludeAssets>
</PackageReference>
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="16.6.1" />
<PackageReference Include="StyleCop.Analyzers" Version="1.1.118" PrivateAssets="All" />
<PackageReference Include="MSTest.TestAdapter" Version="2.1.0" />
<PackageReference Include="MSTest.TestFramework" Version="2.1.0" />
<PackageReference Include="MSTest.TestAdapter" Version="2.1.1" />
<PackageReference Include="MSTest.TestFramework" Version="2.1.1" />
</ItemGroup>
<ItemGroup>

Просмотреть файл

@ -48,6 +48,11 @@ namespace Microsoft.Psi.DeviceManagement
/// </summary>
public string DeviceName { get; set; }
/// <summary>
/// Gets or sets the device id (index of device from systems perspective).
/// </summary>
public int DeviceId { get; set; }
/// <summary>
/// Gets or sets the serial number for this device. Maybe empty string.
/// </summary>

Просмотреть файл

@ -31,6 +31,10 @@
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.CodeAnalysis.FxCopAnalyzers" Version="2.9.8">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers</IncludeAssets>
</PackageReference>
<PackageReference Include="StyleCop.Analyzers" Version="1.1.118" PrivateAssets="All" />
<PackageReference Update="NETStandard.Library" Version="2.0.3" />
</ItemGroup>

Просмотреть файл

@ -0,0 +1,67 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT license.
namespace Microsoft.Psi.Imaging
{
using Microsoft.Psi.Common;
using Microsoft.Psi.Serialization;
/// <summary>
/// Implements a compressor used by the serialization layer for compressing streams
/// of depth images in a generic fashion. This object should not be called directly,
/// but instead is used by the <see cref="DepthImage.CustomSerializer"/> class.
/// </summary>
public class DepthImageCompressor : IDepthImageCompressor
{
private readonly IDepthImageToStreamEncoder encoder = null;
private readonly IDepthImageFromStreamDecoder decoder = null;
/// <summary>
/// Initializes a new instance of the <see cref="DepthImageCompressor"/> class.
/// </summary>
/// <param name="depthCompressionMethod">The depth compression method to be used.</param>
public DepthImageCompressor(DepthCompressionMethod depthCompressionMethod)
{
this.DepthCompressionMethod = depthCompressionMethod;
switch (this.DepthCompressionMethod)
{
case DepthCompressionMethod.Png:
this.encoder = new DepthImageToPngStreamEncoder();
break;
case DepthCompressionMethod.None:
break;
}
this.decoder = new DepthImageFromStreamDecoder();
}
/// <inheritdoc/>
public DepthCompressionMethod DepthCompressionMethod { get; set; } = DepthCompressionMethod.Png;
/// <inheritdoc/>
public void Serialize(BufferWriter writer, DepthImage depthImage, SerializationContext context)
{
if (this.encoder != null)
{
using var sharedEncodedDepthImage = EncodedDepthImagePool.GetOrCreate(depthImage.Width, depthImage.Height);
sharedEncodedDepthImage.Resource.EncodeFrom(depthImage, this.encoder);
Serializer.Serialize(writer, sharedEncodedDepthImage, context);
}
else
{
Serializer.Serialize(writer, depthImage, context);
}
}
/// <inheritdoc/>
public void Deserialize(BufferReader reader, ref DepthImage depthImage, SerializationContext context)
{
Shared<EncodedDepthImage> sharedEncodedDepthImage = null;
Serializer.Deserialize(reader, ref sharedEncodedDepthImage, context);
using var sharedDepthImage = DepthImagePool.GetOrCreate(sharedEncodedDepthImage.Resource.Width, sharedEncodedDepthImage.Resource.Height);
sharedDepthImage.Resource.DecodeFrom(sharedEncodedDepthImage.Resource, this.decoder);
depthImage = sharedDepthImage.Resource.DeepClone();
sharedEncodedDepthImage.Dispose();
}
}
}

Просмотреть файл

@ -0,0 +1,22 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT license.
namespace Microsoft.Psi.Imaging
{
using System.IO;
using System.Runtime.InteropServices;
using SkiaSharp;
/// <summary>
/// Implements a depth image decoder.
/// </summary>
public class DepthImageFromStreamDecoder : IDepthImageFromStreamDecoder
{
/// <inheritdoc/>
public void DecodeFromStream(Stream stream, DepthImage depthImage)
{
var decoded = SKBitmap.Decode(stream);
Marshal.Copy(decoded.Bytes, 0, depthImage.ImageData, decoded.ByteCount);
}
}
}

Просмотреть файл

@ -0,0 +1,21 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT license.
namespace Microsoft.Psi.Imaging
{
using System;
using System.IO;
using SkiaSharp;
/// <summary>
/// Implements a depth image encoder for PNG format.
/// </summary>
public class DepthImageToPngStreamEncoder : IDepthImageToStreamEncoder
{
/// <inheritdoc/>
public void EncodeToStream(DepthImage depthImage, Stream stream)
{
depthImage.AsSKImage().Encode(SKEncodedImageFormat.Png, 100).SaveTo(stream);
}
}
}

Просмотреть файл

@ -1,20 +0,0 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT license.
namespace Microsoft.Psi.Imaging
{
using System.IO;
/// <summary>
/// Bitmap encoder interface.
/// </summary>
public interface IBitmapEncoder
{
/// <summary>
/// Encode image to stream.
/// </summary>
/// <param name="image">Image to encode.</param>
/// <param name="stream">Stream to which to encode.</param>
void Encode(Image image, Stream stream);
}
}

Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше