This commit is contained in:
Amrutha Srinivasan 2021-11-03 13:54:09 -07:00
Родитель 48c177e51b
Коммит ddc5ace7c4
7 изменённых файлов: 165 добавлений и 146 удалений

Просмотреть файл

@ -9,24 +9,29 @@ using Windows.Graphics.Imaging;
using Windows.Media;
using Windows.Media.FaceAnalysis;
using Windows.Storage;
using Windows.Storage.Streams;
namespace CommunityToolkit.Labs.Intelligent.EmotionRecognition
{
public class DetectedEmotion
{
public int emotionIndex;
public string emotion;
}
public class EmotionRecognizer
{
private LearningModel _model = null;
private LearningModelSession _session = null;
int happinessEmotionIndex;
private LearningModelBinding _binding = null;
FaceDetector faceDetector;
private static EmotionRecognizer instance = null;
List<string> labels;
private static List<string> labels;
private async Task LoadModelAsync()
private async void InitModelAsync()
{
// load model file
var file = await StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///Assets/model_emotion.onnx"));
var file = await StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///IntelligentAPI_EmotionRecognizer/Assets/model_emotion.onnx"));
//Loads the mdoel from the file
_model = await LearningModel.LoadFromStorageFileAsync(file);
@ -48,7 +53,6 @@ namespace CommunityToolkit.Labs.Intelligent.EmotionRecognition
"Fear",
"Contempt"
};
happinessEmotionIndex = 1; //happiness
}
LearningModelDeviceKind GetDeviceKind()
@ -56,14 +60,96 @@ namespace CommunityToolkit.Labs.Intelligent.EmotionRecognition
return LearningModelDeviceKind.Cpu;
}
private async Task<IList<DetectedFace>> DetectFacesInImageAsync(SoftwareBitmap bitmap)
private async static Task<IList<DetectedFace>> DetectFacesInImageAsync(SoftwareBitmap bitmap)
{
faceDetector = await FaceDetector.CreateAsync();
FaceDetector faceDetector = await FaceDetector.CreateAsync();
var convertedBitmap = SoftwareBitmap.Convert(bitmap, BitmapPixelFormat.Gray8);
return await faceDetector.DetectFacesAsync(convertedBitmap);
}
public async static Task<DetectedEmotion> DetectEmotion(SoftwareBitmap bitmap)
{
if (instance == null)
{
instance = new EmotionRecognizer();
}
return await instance.EvaluateFrame(bitmap);
}
public async Task<DetectedEmotion> EvaluateFrame(SoftwareBitmap softwareBitmap)
{
InitModelAsync();
LoadLabels();
DetectedFace detectedFace = await DetectFace(softwareBitmap);
if (detectedFace != null)
{
return await EvaluateEmotionInFace(detectedFace, softwareBitmap);
}
return null;
}
public async Task<DetectedEmotion> EvaluateEmotionInFace(DetectedFace detectedFace, SoftwareBitmap softwareBitmap)
{
var boundingBox = new Rect(detectedFace.FaceBox.X,
detectedFace.FaceBox.Y,
detectedFace.FaceBox.Width,
detectedFace.FaceBox.Height);
softwareBitmap = SoftwareBitmap.Convert(softwareBitmap, BitmapPixelFormat.Bgra8);
var croppedFace = await Crop(softwareBitmap, boundingBox);
LearningModelEvaluationResult emotionResults = await BindAndEvaluateModelAsync(croppedFace);
// to get percentages, you'd need to run the output through a softmax function
// we don't need percentages, we just need max value
TensorFloat emotionIndexTensor = emotionResults.Outputs["Plus692_Output_0"] as TensorFloat;
var emotionList = emotionIndexTensor.GetAsVectorView().ToList();
var emotionIndex = emotionList.IndexOf(emotionList.Max());
return new DetectedEmotion() { emotionIndex = emotionIndex, emotion = labels[emotionIndex] };
}
private static async Task<DetectedFace> DetectFace(SoftwareBitmap softwareBitmap)
{
var faces = await DetectFacesInImageAsync(softwareBitmap);
// if there is a face in the frame, evaluate the emotion
var detectedFace = faces.FirstOrDefault();
return detectedFace;
}
public static async Task<SoftwareBitmap> Crop(SoftwareBitmap softwareBitmap, Rect bounds)
{
VideoFrame vid = VideoFrame.CreateWithSoftwareBitmap(softwareBitmap);
vid = await Crop(vid, bounds);
return vid.SoftwareBitmap;
}
public static async Task<VideoFrame> Crop(VideoFrame videoFrame, Rect bounds)
{
BitmapBounds cropBounds = new BitmapBounds()
{
Width = (uint)bounds.Width,
Height = (uint)bounds.Height,
X = (uint)bounds.X,
Y = (uint)bounds.Y
};
VideoFrame result = new VideoFrame(BitmapPixelFormat.Bgra8,
(int)cropBounds.Width,
(int)cropBounds.Height,
BitmapAlphaMode.Premultiplied);
await videoFrame.CopyToAsync(result, cropBounds, null);
return result;
}
private async Task<LearningModelEvaluationResult> BindAndEvaluateModelAsync(SoftwareBitmap croppedFace)
{

Просмотреть файл

@ -1,138 +1,24 @@
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="15.0" DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<Import Project="$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props" Condition="Exists('$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props')" />
<Project Sdk="MSBuild.Sdk.Extras">
<PropertyGroup>
<Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
<Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
<ProjectGuid>{6dc9254b-b4d9-4a30-92c9-8407ef653617}</ProjectGuid>
<OutputType>Library</OutputType>
<AppDesignerFolder>Properties</AppDesignerFolder>
<RootNamespace>IntelligentAPI_EmotionRecognizer</RootNamespace>
<AssemblyName>IntelligentAPI_EmotionRecognizer</AssemblyName>
<DefaultLanguage>en-US</DefaultLanguage>
<TargetPlatformIdentifier>UAP</TargetPlatformIdentifier>
<TargetPlatformVersion Condition=" '$(TargetPlatformVersion)' == '' ">10.0.19041.0</TargetPlatformVersion>
<TargetPlatformMinVersion>10.0.17763.0</TargetPlatformMinVersion>
<MinimumVisualStudioVersion>14</MinimumVisualStudioVersion>
<FileAlignment>512</FileAlignment>
<ProjectTypeGuids>{A5A43C5B-DE2A-4C0C-9213-0A381AF9435A};{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}</ProjectTypeGuids>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
<PlatformTarget>AnyCPU</PlatformTarget>
<DebugSymbols>true</DebugSymbols>
<DebugType>full</DebugType>
<Optimize>false</Optimize>
<OutputPath>bin\Debug\</OutputPath>
<DefineConstants>DEBUG;TRACE;NETFX_CORE;WINDOWS_UWP</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
<PlatformTarget>AnyCPU</PlatformTarget>
<DebugType>pdbonly</DebugType>
<Optimize>true</Optimize>
<OutputPath>bin\Release\</OutputPath>
<DefineConstants>TRACE;NETFX_CORE;WINDOWS_UWP</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)' == 'Debug|x86'">
<PlatformTarget>x86</PlatformTarget>
<DebugSymbols>true</DebugSymbols>
<OutputPath>bin\x86\Debug\</OutputPath>
<DefineConstants>DEBUG;TRACE;NETFX_CORE;WINDOWS_UWP</DefineConstants>
<NoWarn>;2008</NoWarn>
<DebugType>full</DebugType>
<UseVSHostingProcess>false</UseVSHostingProcess>
<ErrorReport>prompt</ErrorReport>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)' == 'Release|x86'">
<PlatformTarget>x86</PlatformTarget>
<OutputPath>bin\x86\Release\</OutputPath>
<DefineConstants>TRACE;NETFX_CORE;WINDOWS_UWP</DefineConstants>
<Optimize>true</Optimize>
<NoWarn>;2008</NoWarn>
<DebugType>pdbonly</DebugType>
<UseVSHostingProcess>false</UseVSHostingProcess>
<ErrorReport>prompt</ErrorReport>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)' == 'Debug|ARM'">
<PlatformTarget>ARM</PlatformTarget>
<DebugSymbols>true</DebugSymbols>
<OutputPath>bin\ARM\Debug\</OutputPath>
<DefineConstants>DEBUG;TRACE;NETFX_CORE;WINDOWS_UWP</DefineConstants>
<NoWarn>;2008</NoWarn>
<DebugType>full</DebugType>
<UseVSHostingProcess>false</UseVSHostingProcess>
<ErrorReport>prompt</ErrorReport>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)' == 'Release|ARM'">
<PlatformTarget>ARM</PlatformTarget>
<OutputPath>bin\ARM\Release\</OutputPath>
<DefineConstants>TRACE;NETFX_CORE;WINDOWS_UWP</DefineConstants>
<Optimize>true</Optimize>
<NoWarn>;2008</NoWarn>
<DebugType>pdbonly</DebugType>
<UseVSHostingProcess>false</UseVSHostingProcess>
<ErrorReport>prompt</ErrorReport>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)' == 'Debug|ARM64'">
<PlatformTarget>ARM64</PlatformTarget>
<DebugSymbols>true</DebugSymbols>
<OutputPath>bin\ARM64\Debug\</OutputPath>
<DefineConstants>DEBUG;TRACE;NETFX_CORE;WINDOWS_UWP</DefineConstants>
<NoWarn>;2008</NoWarn>
<DebugType>full</DebugType>
<UseVSHostingProcess>false</UseVSHostingProcess>
<ErrorReport>prompt</ErrorReport>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)' == 'Release|ARM64'">
<PlatformTarget>ARM64</PlatformTarget>
<OutputPath>bin\ARM64\Release\</OutputPath>
<DefineConstants>TRACE;NETFX_CORE;WINDOWS_UWP</DefineConstants>
<Optimize>true</Optimize>
<NoWarn>;2008</NoWarn>
<DebugType>pdbonly</DebugType>
<UseVSHostingProcess>false</UseVSHostingProcess>
<ErrorReport>prompt</ErrorReport>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)' == 'Debug|x64'">
<PlatformTarget>x64</PlatformTarget>
<DebugSymbols>true</DebugSymbols>
<OutputPath>bin\x64\Debug\</OutputPath>
<DefineConstants>DEBUG;TRACE;NETFX_CORE;WINDOWS_UWP</DefineConstants>
<NoWarn>;2008</NoWarn>
<DebugType>full</DebugType>
<UseVSHostingProcess>false</UseVSHostingProcess>
<ErrorReport>prompt</ErrorReport>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)' == 'Release|x64'">
<PlatformTarget>x64</PlatformTarget>
<OutputPath>bin\x64\Release\</OutputPath>
<DefineConstants>TRACE;NETFX_CORE;WINDOWS_UWP</DefineConstants>
<Optimize>true</Optimize>
<NoWarn>;2008</NoWarn>
<DebugType>pdbonly</DebugType>
<UseVSHostingProcess>false</UseVSHostingProcess>
<ErrorReport>prompt</ErrorReport>
<RootNamespace>IntelligentAPI.EmotionRecognition</RootNamespace>
</PropertyGroup>
<PropertyGroup>
<RestoreProjectStyle>PackageReference</RestoreProjectStyle>
<PackageId>CommunityToolkit.Labs.Intelligent.EmotionRecognition</PackageId>
<Description>
This package performs Emotion Recognition on an input image by using the Emotion FERPlus model.
</Description>
<Version>0.0.1</Version>
</PropertyGroup>
<ItemGroup>
<Compile Include="Class1.cs" />
<Compile Include="Properties\AssemblyInfo.cs" />
<EmbeddedResource Include="Properties\IntelligentAPI_EmotionRecognizer.rd.xml" />
</ItemGroup>
<PropertyGroup Condition=" '$(VisualStudioVersion)' == '' or '$(VisualStudioVersion)' &lt; '14.0' ">
<VisualStudioVersion>14.0</VisualStudioVersion>
</PropertyGroup>
<Import Project="$(MSBuildExtensionsPath)\Microsoft\WindowsXaml\v$(VisualStudioVersion)\Microsoft.Windows.UI.Xaml.CSharp.targets" />
<!-- To modify your build process, add your task inside one of the targets below and uncomment it.
Other similar extension points exist, see Microsoft.Common.targets.
<Target Name="BeforeBuild">
<ItemGroup>
<Content Include="Assets\model_emotion.onnx" Pack="True" PackagePath="lib/uap10.0.17763/Assets"/>
</ItemGroup>
<ItemGroup>
<None Include="ImageClassifier.licenseheader" />
</ItemGroup>
<Target Name="CustomBeforeBuild" BeforeTargets="BeforeBuild">
<Exec Command="powershell.exe –NonInteractive –ExecutionPolicy Unrestricted -command &quot;&amp; {.\Scripts\script.ps1 }&quot;" />
</Target>
<Target Name="AfterBuild">
</Target>
-->
</Project>

Просмотреть файл

@ -5,12 +5,6 @@ using System.Runtime.InteropServices;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyTitle("IntelligentAPI_EmotionRecognizer")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("")]
[assembly: AssemblyProduct("IntelligentAPI_EmotionRecognizer")]
[assembly: AssemblyCopyright("Copyright © 2021")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
@ -24,6 +18,4 @@ using System.Runtime.InteropServices;
// You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below:
// [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("1.0.0.0")]
[assembly: AssemblyFileVersion("1.0.0.0")]
[assembly: ComVisible(false)]

Просмотреть файл

@ -0,0 +1,15 @@
$ProgressPreference = 'SilentlyContinue'
$emotionferplusfile = "./Assets/model_emotion.onnx"
if (-not(Test-Path -Path $emotionferplusfile -PathType Leaf)) {
try {
Invoke-WebRequest -URI "https://github.com/onnx/models/raw/master/vision/body_analysis/emotion_ferplus/model/emotion-ferplus-8.onnx" -OutFile $emotionferplusfile
Write-Host "The file [$emotionferplusfile] has been created."
}
catch {
throw $_.Exception.Message
}
}
else {
Write-Host "The file [$emotionferplusfile] exists."
}

Просмотреть файл

@ -9,6 +9,8 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "IntelligentAPI_ImageClassif
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "IntelligentLabsTest", "IntelligentAPIsTester\IntelligentLabsTest.csproj", "{47D87733-B357-4706-88BD-211FB7D8679D}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "IntelligentAPI_EmotionRecognizer", "IntelligentAPI_EmotionRecognizer\IntelligentAPI_EmotionRecognizer.csproj", "{2AAF76B9-2B93-4932-909C-09BBF54731FC}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
@ -123,6 +125,36 @@ Global
{47D87733-B357-4706-88BD-211FB7D8679D}.Release|x86.ActiveCfg = Release|x86
{47D87733-B357-4706-88BD-211FB7D8679D}.Release|x86.Build.0 = Release|x86
{47D87733-B357-4706-88BD-211FB7D8679D}.Release|x86.Deploy.0 = Release|x86
{2AAF76B9-2B93-4932-909C-09BBF54731FC}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{2AAF76B9-2B93-4932-909C-09BBF54731FC}.Debug|Any CPU.Build.0 = Debug|Any CPU
{2AAF76B9-2B93-4932-909C-09BBF54731FC}.Debug|ARM.ActiveCfg = Debug|ARM
{2AAF76B9-2B93-4932-909C-09BBF54731FC}.Debug|ARM.Build.0 = Debug|ARM
{2AAF76B9-2B93-4932-909C-09BBF54731FC}.Debug|ARM64.ActiveCfg = Debug|ARM64
{2AAF76B9-2B93-4932-909C-09BBF54731FC}.Debug|ARM64.Build.0 = Debug|ARM64
{2AAF76B9-2B93-4932-909C-09BBF54731FC}.Debug|x64.ActiveCfg = Debug|x64
{2AAF76B9-2B93-4932-909C-09BBF54731FC}.Debug|x64.Build.0 = Debug|x64
{2AAF76B9-2B93-4932-909C-09BBF54731FC}.Debug|x86.ActiveCfg = Debug|x86
{2AAF76B9-2B93-4932-909C-09BBF54731FC}.Debug|x86.Build.0 = Debug|x86
{2AAF76B9-2B93-4932-909C-09BBF54731FC}.Packages|Any CPU.ActiveCfg = Debug|Any CPU
{2AAF76B9-2B93-4932-909C-09BBF54731FC}.Packages|Any CPU.Build.0 = Debug|Any CPU
{2AAF76B9-2B93-4932-909C-09BBF54731FC}.Packages|ARM.ActiveCfg = Debug|ARM
{2AAF76B9-2B93-4932-909C-09BBF54731FC}.Packages|ARM.Build.0 = Debug|ARM
{2AAF76B9-2B93-4932-909C-09BBF54731FC}.Packages|ARM64.ActiveCfg = Debug|ARM64
{2AAF76B9-2B93-4932-909C-09BBF54731FC}.Packages|ARM64.Build.0 = Debug|ARM64
{2AAF76B9-2B93-4932-909C-09BBF54731FC}.Packages|x64.ActiveCfg = Debug|x64
{2AAF76B9-2B93-4932-909C-09BBF54731FC}.Packages|x64.Build.0 = Debug|x64
{2AAF76B9-2B93-4932-909C-09BBF54731FC}.Packages|x86.ActiveCfg = Debug|x86
{2AAF76B9-2B93-4932-909C-09BBF54731FC}.Packages|x86.Build.0 = Debug|x86
{2AAF76B9-2B93-4932-909C-09BBF54731FC}.Release|Any CPU.ActiveCfg = Release|Any CPU
{2AAF76B9-2B93-4932-909C-09BBF54731FC}.Release|Any CPU.Build.0 = Release|Any CPU
{2AAF76B9-2B93-4932-909C-09BBF54731FC}.Release|ARM.ActiveCfg = Release|ARM
{2AAF76B9-2B93-4932-909C-09BBF54731FC}.Release|ARM.Build.0 = Release|ARM
{2AAF76B9-2B93-4932-909C-09BBF54731FC}.Release|ARM64.ActiveCfg = Release|ARM64
{2AAF76B9-2B93-4932-909C-09BBF54731FC}.Release|ARM64.Build.0 = Release|ARM64
{2AAF76B9-2B93-4932-909C-09BBF54731FC}.Release|x64.ActiveCfg = Release|x64
{2AAF76B9-2B93-4932-909C-09BBF54731FC}.Release|x64.Build.0 = Release|x64
{2AAF76B9-2B93-4932-909C-09BBF54731FC}.Release|x86.ActiveCfg = Release|x86
{2AAF76B9-2B93-4932-909C-09BBF54731FC}.Release|x86.Build.0 = Release|x86
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE

Просмотреть файл

@ -170,6 +170,10 @@
<Project>{f705b499-d0b5-408c-b1d1-d3f379d0fcd6}</Project>
<Name>IntelligentAPI_ImageClassifier</Name>
</ProjectReference>
<ProjectReference Include="..\IntelligentAPI_EmotionRecognizer\IntelligentAPI_EmotionRecognizer.csproj">
<Project>{2aaf76b9-2b93-4932-909c-09bbf54731fc}</Project>
<Name>IntelligentAPI_EmotionRecognizer</Name>
</ProjectReference>
<ProjectReference Include="..\ObjectDetector\IntelligentAPI_ObjectDetector.csproj">
<Project>{6f985dc2-db68-49e5-add8-6d11f35e552e}</Project>
<Name>IntelligentAPI_ObjectDetector</Name>

Просмотреть файл

@ -1,6 +1,7 @@

using CommunityToolkit.Labs.Intelligent.ImageClassification;
using CommunityToolkit.Labs.Intelligent.ObjectDetection;
using CommunityToolkit.Labs.Intelligent.EmotionRecognition;
using System;
using System.Collections.Generic;
using System.IO;
@ -73,6 +74,8 @@ namespace IntelligentLabsTest
//Use Squeezenet model to classify image
List<ClassificationResult> imageClasses = await SqueezeNetImageClassifier.ClassifyImage(selectedStorageFile, 3 );
UpdateTextBox(imageClasses);
}
catch(Exception exc)
{
@ -83,7 +86,7 @@ namespace IntelligentLabsTest
}
}
//Use YOLOv4 to detect objects. WORKS ONLY IF YOU ARE RUNNING WINDOWS 11!!
if (CheckWindowsBuildNumber())
{
@ -124,6 +127,7 @@ namespace IntelligentLabsTest
// Get the SoftwareBitmap representation of the file in BGRA8 format
softwareBitmap = await decoder.GetSoftwareBitmapAsync();
softwareBitmap = SoftwareBitmap.Convert(softwareBitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Premultiplied);
var stuff = await EmotionRecognizer.DetectEmotion(softwareBitmap);
}
// Display the image
SoftwareBitmapSource imageSource = new SoftwareBitmapSource();