ui fixes and code cleanup
This commit is contained in:
Родитель
61dad9ba39
Коммит
a312b9c14c
|
@ -0,0 +1,4 @@
|
|||
[*.cs]
|
||||
|
||||
# CA1416: Validate platform compatibility
|
||||
dotnet_diagnostic.CA1416.severity = silent
|
|
@ -1,7 +1,7 @@
|
|||
|
||||
Microsoft Visual Studio Solution File, Format Version 12.00
|
||||
# Visual Studio Version 16
|
||||
VisualStudioVersion = 16.0.31112.23
|
||||
# Visual Studio Version 17
|
||||
VisualStudioVersion = 17.0.31919.166
|
||||
MinimumVisualStudioVersion = 10.0.40219.1
|
||||
Project("{C7167F0D-BC9F-4E6E-AFE1-012C56B48DB5}") = "WinMLSamplesGallery (Package)", "WinMLSamplesGallery (Package)\WinMLSamplesGallery (Package).wapproj", "{DEA7791F-55CF-4ED5-BC99-3870997B1242}"
|
||||
EndProject
|
||||
|
@ -11,6 +11,11 @@ Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "WinMLSamplesGalleryNative",
|
|||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "WinMLSamplesGalleryNative.Interop", "WinMLSamplesGalleryNative.Interop\WinMLSamplesGalleryNative.Interop.csproj", "{8E1F2E33-58A3-4454-926E-1C98B93A2A30}"
|
||||
EndProject
|
||||
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution Items", "{7EDD189E-04E6-432A-80A6-7811E157E9F6}"
|
||||
ProjectSection(SolutionItems) = preProject
|
||||
.editorconfig = .editorconfig
|
||||
EndProjectSection
|
||||
EndProject
|
||||
Global
|
||||
GlobalSection(SolutionConfigurationPlatforms) = preSolution
|
||||
Debug|arm64 = Debug|arm64
|
||||
|
|
|
@ -17,7 +17,9 @@
|
|||
AlwaysShowHeader="False"
|
||||
PaneTitle="Windows ML"
|
||||
FontFamily="Arial"
|
||||
IsSettingsVisible="False">
|
||||
IsSettingsVisible="False"
|
||||
PaneClosing="nvSample_PaneClosing"
|
||||
PaneOpening="nvSample_PaneOpening">
|
||||
<NavigationView.Resources>
|
||||
<SolidColorBrush x:Key="NavigationViewExpandedPaneBackground" Color="#f5f5f5"/>
|
||||
<SolidColorBrush x:Key="NavigationViewDefaultPaneBackground" Color="#f5f5f5" />
|
||||
|
@ -26,11 +28,13 @@
|
|||
<NavigationViewItem Icon="Flag" Content="Home" Tag="home" />
|
||||
<NavigationViewItem Icon="Library" Content="All samples" Tag="all_samples" />
|
||||
<NavigationViewItemSeparator />
|
||||
<TextBlock>
|
||||
<Hyperlink TextDecorations="None" NavigateUri="https://docs.microsoft.com/en-us/windows/ai/windows-ml/">
|
||||
Learn more about Windows ML
|
||||
</Hyperlink>
|
||||
</TextBlock>
|
||||
<Grid>
|
||||
<TextBlock x:Name="LearnMoreLink">
|
||||
<Hyperlink TextDecorations="None" NavigateUri="https://docs.microsoft.com/en-us/windows/ai/windows-ml/">
|
||||
Learn more about Windows ML
|
||||
</Hyperlink>
|
||||
</TextBlock>
|
||||
</Grid>
|
||||
</NavigationView.MenuItems>
|
||||
<Frame x:Name="contentFrame" Navigated="contentFrame_Navigated" BorderBrush="#dbdbdb" BorderThickness="1,1,0,0" Margin="0,40,0,0"
|
||||
CornerRadius="5,0,0,0" Background="White" FontFamily="Arial"/>
|
||||
|
|
|
@ -110,5 +110,15 @@ namespace WinMLSamplesGallery
|
|||
// here to load the home page.
|
||||
NavView_Navigate("home", new Microsoft.UI.Xaml.Media.Animation.EntranceNavigationTransitionInfo());
|
||||
}
|
||||
|
||||
private void nvSample_PaneClosing(NavigationView sender, NavigationViewPaneClosingEventArgs args)
|
||||
{
|
||||
LearnMoreLink.Visibility = Visibility.Collapsed;
|
||||
}
|
||||
|
||||
private void nvSample_PaneOpening(NavigationView sender, object args)
|
||||
{
|
||||
LearnMoreLink.Visibility = Visibility.Visible;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,6 +3,7 @@ using Microsoft.UI.Xaml;
|
|||
using Microsoft.UI.Xaml.Controls;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Threading.Tasks;
|
||||
using Windows.Graphics.Imaging;
|
||||
using Windows.Media;
|
||||
|
@ -19,14 +20,15 @@ namespace WinMLSamplesGallery.Samples
|
|||
|
||||
public sealed partial class Batching : Page
|
||||
{
|
||||
const int numInputImages = 50;
|
||||
const int numEvalIterations = 100;
|
||||
const int NumInputImages = 50;
|
||||
const int NumEvalIterations = 100;
|
||||
|
||||
private LearningModelSession nonBatchingSession_;
|
||||
private LearningModelSession batchingSession_;
|
||||
|
||||
float avgNonBatchedDuration_ = 0;
|
||||
float avgBatchDuration_ = 0;
|
||||
private LearningModel _model = null;
|
||||
private LearningModelSession _nonBatchingSession = null;
|
||||
private LearningModelSession _batchingSession = null;
|
||||
|
||||
float _avgNonBatchedDuration = 0;
|
||||
float _avgBatchDuration = 0;
|
||||
|
||||
// Marked volatile since it's updated across threads
|
||||
static volatile bool navigatingAwayFromPage = false;
|
||||
|
@ -36,28 +38,35 @@ namespace WinMLSamplesGallery.Samples
|
|||
{
|
||||
this.InitializeComponent();
|
||||
// Ensure static variable is always false on page initialization
|
||||
navigatingAwayFromPage = false;
|
||||
navigatingAwayFromPage = false;
|
||||
|
||||
// Load the model
|
||||
var modelName = "squeezenet1.1-7-batched.onnx";
|
||||
var modelPath = Path.Join(Windows.ApplicationModel.Package.Current.InstalledLocation.Path, "Models", modelName);
|
||||
_model = LearningModel.LoadFromFilePath(modelPath);
|
||||
}
|
||||
|
||||
async private void StartInference(object sender, RoutedEventArgs e)
|
||||
{
|
||||
ShowEvalUI();
|
||||
ResetEvalMetrics();
|
||||
ShowStatus();
|
||||
ResetMetrics();
|
||||
|
||||
var inputImages = await GetInputImages();
|
||||
int batchSize = GetBatchSizeFromBatchSizeSlider();
|
||||
await CreateSessions(batchSize);
|
||||
|
||||
UpdateEvalText(false);
|
||||
_nonBatchingSession = await CreateLearningModelSession(_model);
|
||||
_batchingSession = await CreateLearningModelSession(_model, batchSize);
|
||||
|
||||
UpdateStatus(false);
|
||||
await Classify(inputImages);
|
||||
|
||||
UpdateEvalText(true);
|
||||
UpdateStatus(true);
|
||||
await ClassifyBatched(inputImages, batchSize);
|
||||
|
||||
GenerateEvalResultAndUI();
|
||||
ShowUI();
|
||||
}
|
||||
|
||||
private void ShowEvalUI()
|
||||
private void ShowStatus()
|
||||
{
|
||||
StartInferenceBtn.IsEnabled = false;
|
||||
BatchSizeSlider.IsEnabled = false;
|
||||
|
@ -66,10 +75,10 @@ namespace WinMLSamplesGallery.Samples
|
|||
LoadingContainer.Visibility = Visibility.Visible;
|
||||
}
|
||||
|
||||
private void ResetEvalMetrics()
|
||||
private void ResetMetrics()
|
||||
{
|
||||
avgNonBatchedDuration_ = 0;
|
||||
avgBatchDuration_ = 0;
|
||||
_avgNonBatchedDuration = 0;
|
||||
_avgBatchDuration = 0;
|
||||
}
|
||||
|
||||
// Test input consists of 50 images (25 bird and 25 cat)
|
||||
|
@ -80,7 +89,7 @@ namespace WinMLSamplesGallery.Samples
|
|||
var birdImage = await CreateSoftwareBitmapFromStorageFile(birdFile);
|
||||
var catImage = await CreateSoftwareBitmapFromStorageFile(catFile);
|
||||
var inputImages = new List<VideoFrame>();
|
||||
for (int i = 0; i < numInputImages / 2; i++)
|
||||
for (int i = 0; i < NumInputImages / 2; i++)
|
||||
{
|
||||
inputImages.Add(VideoFrame.CreateWithSoftwareBitmap(birdImage));
|
||||
inputImages.Add(VideoFrame.CreateWithSoftwareBitmap(catImage));
|
||||
|
@ -96,30 +105,23 @@ namespace WinMLSamplesGallery.Samples
|
|||
return bitmap;
|
||||
}
|
||||
|
||||
private void UpdateEvalText(bool isBatchingEval)
|
||||
private void UpdateStatus(bool isBatchingEval)
|
||||
{
|
||||
if (isBatchingEval)
|
||||
EvalText.Text = "Inferencing Batched Inputs:";
|
||||
else
|
||||
EvalText.Text = "Inferencing Non-Batched Inputs:";
|
||||
if (isBatchingEval)
|
||||
{
|
||||
EvalText.Text = "Inferencing Batched Inputs:";
|
||||
}
|
||||
else
|
||||
{
|
||||
EvalText.Text = "Inferencing Non-Batched Inputs:";
|
||||
}
|
||||
}
|
||||
|
||||
private async Task CreateSessions(int batchSizeOverride)
|
||||
private async Task<LearningModelSession> CreateLearningModelSession(LearningModel model, int batchSizeOverride=-1)
|
||||
{
|
||||
var modelPath = "ms-appx:///Models/squeezenet1.1-7-batched.onnx";
|
||||
nonBatchingSession_ = await CreateLearningModelSession(modelPath);
|
||||
batchingSession_ = await CreateLearningModelSession(modelPath, batchSizeOverride);
|
||||
}
|
||||
|
||||
private async Task<LearningModelSession> CreateLearningModelSession(string modelPath, int batchSizeOverride=-1)
|
||||
{
|
||||
var model = await CreateLearningModel(modelPath);
|
||||
var deviceKind = DeviceComboBox.GetDeviceKind();
|
||||
var device = new LearningModelDevice(deviceKind);
|
||||
var options = new LearningModelSessionOptions()
|
||||
{
|
||||
CloseModelOnSessionCreation = true // Close the model to prevent extra memory usage
|
||||
};
|
||||
var options = new LearningModelSessionOptions();
|
||||
if (batchSizeOverride > 0)
|
||||
{
|
||||
options.BatchSizeOverride = (uint)batchSizeOverride;
|
||||
|
@ -128,26 +130,21 @@ namespace WinMLSamplesGallery.Samples
|
|||
return session;
|
||||
}
|
||||
|
||||
private static async Task<LearningModel> CreateLearningModel(string modelPath)
|
||||
{
|
||||
var uri = new Uri(modelPath);
|
||||
var file = await StorageFile.GetFileFromApplicationUriAsync(uri);
|
||||
var model = await LearningModel.LoadFromStorageFileAsync(file);
|
||||
return model;
|
||||
}
|
||||
|
||||
async private Task Classify(List<VideoFrame> inputImages)
|
||||
{
|
||||
float totalEvalDurations = 0;
|
||||
for (int i = 0; i < numEvalIterations; i++)
|
||||
for (int i = 0; i < NumEvalIterations; i++)
|
||||
{
|
||||
if (navigatingAwayFromPage)
|
||||
break;
|
||||
UpdateEvalProgressUI(i);
|
||||
float evalDuration = await Task.Run(() => Evaluate(nonBatchingSession_, inputImages));
|
||||
if (navigatingAwayFromPage)
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
UpdateProgress(i);
|
||||
float evalDuration = await Task.Run(() => Evaluate(_nonBatchingSession, inputImages));
|
||||
totalEvalDurations += evalDuration;
|
||||
}
|
||||
avgNonBatchedDuration_ = totalEvalDurations / numEvalIterations;
|
||||
_avgNonBatchedDuration = totalEvalDurations / NumEvalIterations;
|
||||
}
|
||||
|
||||
private static float Evaluate(LearningModelSession session, List<VideoFrame> input)
|
||||
|
@ -157,8 +154,11 @@ namespace WinMLSamplesGallery.Samples
|
|||
var binding = new LearningModelBinding(session);
|
||||
for (int j = 0; j < input.Count; j++)
|
||||
{
|
||||
if (navigatingAwayFromPage)
|
||||
break;
|
||||
if (navigatingAwayFromPage)
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
var start = HighResolutionClock.UtcNow();
|
||||
binding.Bind(inputName, input[j]);
|
||||
session.Evaluate(binding, "");
|
||||
|
@ -172,15 +172,15 @@ namespace WinMLSamplesGallery.Samples
|
|||
async private Task ClassifyBatched(List<VideoFrame> inputImages, int batchSize)
|
||||
{
|
||||
float totalEvalDurations = 0;
|
||||
for (int i = 0; i < numEvalIterations; i++)
|
||||
for (int i = 0; i < NumEvalIterations; i++)
|
||||
{
|
||||
if (navigatingAwayFromPage)
|
||||
break;
|
||||
UpdateEvalProgressUI(i);
|
||||
float evalDuration = await Task.Run(() => EvaluateBatched(batchingSession_, inputImages, batchSize));
|
||||
UpdateProgress(i);
|
||||
float evalDuration = await Task.Run(() => EvaluateBatched(_batchingSession, inputImages, batchSize));
|
||||
totalEvalDurations += evalDuration;
|
||||
}
|
||||
avgBatchDuration_ = totalEvalDurations / numEvalIterations;
|
||||
_avgBatchDuration = totalEvalDurations / NumEvalIterations;
|
||||
}
|
||||
|
||||
private static float EvaluateBatched(LearningModelSession session, List<VideoFrame> input, int batchSize)
|
||||
|
@ -191,8 +191,11 @@ namespace WinMLSamplesGallery.Samples
|
|||
var binding = new LearningModelBinding(session);
|
||||
for (int i = 0; i < numBatches; i++)
|
||||
{
|
||||
if (navigatingAwayFromPage)
|
||||
break;
|
||||
if (navigatingAwayFromPage)
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
int rangeStart = batchSize * i;
|
||||
List<VideoFrame> batch;
|
||||
// Add padding to the last batch if necessary
|
||||
|
@ -222,19 +225,19 @@ namespace WinMLSamplesGallery.Samples
|
|||
return int.Parse(BatchSizeSlider.Value.ToString());
|
||||
}
|
||||
|
||||
private void UpdateEvalProgressUI(int attemptNumber)
|
||||
private void UpdateProgress(int attemptNumber)
|
||||
{
|
||||
EvalProgressText.Text = "Attempt " + attemptNumber.ToString() + "/" + numEvalIterations.ToString();
|
||||
EvalProgressText.Text = "Attempt " + attemptNumber.ToString() + "/" + NumEvalIterations.ToString();
|
||||
EvalProgressBar.Value = attemptNumber + 1;
|
||||
}
|
||||
|
||||
private void GenerateEvalResultAndUI()
|
||||
private void ShowUI()
|
||||
{
|
||||
float ratio = (1 - (avgBatchDuration_ / avgNonBatchedDuration_)) * 100;
|
||||
float ratio = (1 - (_avgBatchDuration / _avgNonBatchedDuration)) * 100;
|
||||
var evalResult = new EvalResult
|
||||
{
|
||||
nonBatchedAvgTime = avgNonBatchedDuration_.ToString("0.00"),
|
||||
batchedAvgTime = avgBatchDuration_.ToString("0.00"),
|
||||
nonBatchedAvgTime = _avgNonBatchedDuration.ToString("0.00"),
|
||||
batchedAvgTime = _avgBatchDuration.ToString("0.00"),
|
||||
timeRatio = ratio.ToString("0.0")
|
||||
};
|
||||
List<EvalResult> results = new List<EvalResult>();
|
||||
|
|
|
@ -4,6 +4,7 @@ using Microsoft.UI.Xaml.Controls;
|
|||
using Microsoft.UI.Xaml.Data;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Runtime.InteropServices.WindowsRuntime;
|
||||
using Windows.Foundation;
|
||||
|
@ -131,24 +132,25 @@ namespace WinMLSamplesGallery.Samples
|
|||
{
|
||||
if (_modelDictionary == null)
|
||||
{
|
||||
var installPath = Windows.ApplicationModel.Package.Current.InstalledLocation.Path;
|
||||
_modelDictionary = new Dictionary<Classifier, string>{
|
||||
{ Classifier.DenseNet121, "ms-appx:///Models/densenet-9.onnx"},
|
||||
{ Classifier.EfficientNetLite4, "ms-appx:///Models/efficientnet-lite4-11.onnx"},
|
||||
{ Classifier.ShuffleNet_V1, "ms-appx:///Models/shufflenet-9.onnx"},
|
||||
{ Classifier.SqueezeNet, "ms-appx:///Models/squeezenet1.1-7.onnx" },
|
||||
{ Classifier.DenseNet121, Path.Combine(installPath, "Models\\densenet-9.onnx") },
|
||||
{ Classifier.EfficientNetLite4, Path.Combine(installPath, "Models\\efficientnet-lite4-11.onnx") },
|
||||
{ Classifier.ShuffleNet_V1, Path.Combine(installPath, "Models\\shufflenet-9.onnx") },
|
||||
{ Classifier.SqueezeNet, Path.Combine(installPath, "Models\\squeezenet1.1-7.onnx") },
|
||||
#if USE_LARGE_MODELS
|
||||
// Large Models
|
||||
{ Classifier.AlexNet, "ms-appx:///LargeModels/bvlcalexnet-9.onnx"},
|
||||
{ Classifier.CaffeNet, "ms-appx:///LargeModels/caffenet-9.onnx"},
|
||||
{ Classifier.GoogleNet, "ms-appx:///LargeModels/googlenet-9.onnx"},
|
||||
{ Classifier.Inception_V1, "ms-appx:///LargeModels/inception-v1-9.onnx"},
|
||||
{ Classifier.Inception_V2, "ms-appx:///LargeModels/inception-v2-9.onnx"},
|
||||
{ Classifier.MobileNet, "ms-appx:///LargeModels/mobilenetv2-7.onnx" },
|
||||
{ Classifier.ShuffleNet_V2, "ms-appx:///LargeModels/shufflenet-v2-10.onnx"},
|
||||
{ Classifier.RCNN_ILSVRC13, "ms-appx:///LargeModels/rcnn-ilsvrc13-9.onnx"},
|
||||
{ Classifier.ResNet, "ms-appx:///LargeModels/resnet50-caffe2-v1-9.onnx"},
|
||||
{ Classifier.VGG, "ms-appx:///LargeModels/vgg19-7.onnx"},
|
||||
{ Classifier.ZFNet512, "ms-appx:///LargeModels/zfnet512-9.onnx"},
|
||||
{ Classifier.AlexNet, Path.Combine(installPath, "LargeModels\\bvlcalexnet-9.onnx") },
|
||||
{ Classifier.CaffeNet, Path.Combine(installPath, "LargeModels\\caffenet-9.onnx") },
|
||||
{ Classifier.GoogleNet, Path.Combine(installPath, "LargeModels\\googlenet-9.onnx") },
|
||||
{ Classifier.Inception_V1, Path.Combine(installPath, "LargeModels\\inception-v1-9.onnx") },
|
||||
{ Classifier.Inception_V2, Path.Combine(installPath, "LargeModels\\inception-v2-9.onnx") },
|
||||
{ Classifier.MobileNet, Path.Combine(installPath, "LargeModels\\mobilenetv2-7.onnx" },
|
||||
{ Classifier.ShuffleNet_V2, Path.Combine(installPath, "LargeModels\\shufflenet-v2-10.onnx") },
|
||||
{ Classifier.RCNN_ILSVRC13, Path.Combine(installPath, "LargeModels\\rcnn-ilsvrc13-9.onnx") },
|
||||
{ Classifier.ResNet, Path.Combine(installPath, "LargeModels\\resnet50-caffe2-v1-9.onnx") },
|
||||
{ Classifier.VGG, Path.Combine(installPath, "LargeModels\\vgg19-7.onnx") },
|
||||
{ Classifier.ZFNet512, Path.Combine(installPath, "LargeModels\\zfnet512-9.onnx") },
|
||||
#endif
|
||||
};
|
||||
}
|
||||
|
@ -156,10 +158,11 @@ namespace WinMLSamplesGallery.Samples
|
|||
if (_postProcessorDictionary == null)
|
||||
{
|
||||
_postProcessorDictionary = new Dictionary<Classifier, Func<LearningModel>>{
|
||||
{ Classifier.DenseNet121, () => TensorizationModels.ReshapeThenSoftmaxThenTopK(new long[] { BatchSize, ClassificationLabels.ImageNet.Count, 1, 1 },
|
||||
TopK,
|
||||
BatchSize,
|
||||
ClassificationLabels.ImageNet.Count) },
|
||||
{ Classifier.DenseNet121, () => TensorizationModels.ReshapeThenSoftmaxThenTopK(
|
||||
new long[] { BatchSize, ClassificationLabels.ImageNet.Count, 1, 1 },
|
||||
TopK,
|
||||
BatchSize,
|
||||
ClassificationLabels.ImageNet.Count) },
|
||||
{ Classifier.EfficientNetLite4, () => TensorizationModels.SoftMaxThenTopK(TopK) },
|
||||
{ Classifier.ShuffleNet_V1, () => TensorizationModels.TopK(TopK) },
|
||||
{ Classifier.SqueezeNet, () => TensorizationModels.SoftMaxThenTopK(TopK) },
|
||||
|
@ -185,13 +188,15 @@ namespace WinMLSamplesGallery.Samples
|
|||
// Preprocessing values are described in the ONNX Model Zoo:
|
||||
// https://github.com/onnx/models/tree/master/vision/classification/mobilenet
|
||||
_preProcessorDictionary = new Dictionary<Classifier, Func<LearningModel>>{
|
||||
{ Classifier.DenseNet121, () => TensorizationModels.Normalize0_1ThenZScore(Height, Width, Channels,
|
||||
new float[] { 0.485f, 0.456f, 0.406f },
|
||||
new float[] { 0.229f, 0.224f, 0.225f}) },
|
||||
{ Classifier.DenseNet121, () => TensorizationModels.Normalize0_1ThenZScore(
|
||||
Height, Width, Channels,
|
||||
new float[] { 0.485f, 0.456f, 0.406f },
|
||||
new float[] { 0.229f, 0.224f, 0.225f}) },
|
||||
{ Classifier.EfficientNetLite4, () => TensorizationModels.NormalizeMinusOneToOneThenTransposeNHWC() },
|
||||
{ Classifier.ShuffleNet_V1, () => TensorizationModels.Normalize0_1ThenZScore(Height, Width, Channels,
|
||||
new float[] { 0.485f, 0.456f, 0.406f },
|
||||
new float[] { 0.229f, 0.224f, 0.225f}) },
|
||||
{ Classifier.ShuffleNet_V1, () => TensorizationModels.Normalize0_1ThenZScore(
|
||||
Height, Width, Channels,
|
||||
new float[] { 0.485f, 0.456f, 0.406f },
|
||||
new float[] { 0.229f, 0.224f, 0.225f}) },
|
||||
{ Classifier.SqueezeNet, null }, // No preprocessing required
|
||||
#if USE_LARGE_MODELS
|
||||
// Large Models
|
||||
|
@ -200,19 +205,23 @@ namespace WinMLSamplesGallery.Samples
|
|||
{ Classifier.GoogleNet, null },
|
||||
{ Classifier.Inception_V1, null }, // No preprocessing required
|
||||
{ Classifier.Inception_V2, null }, // ????
|
||||
{ Classifier.MobileNet, () => TensorizationModels.Normalize0_1ThenZScore(Height, Width, Channels,
|
||||
new float[] { 0.485f, 0.456f, 0.406f },
|
||||
new float[] { 0.229f, 0.224f, 0.225f}) },
|
||||
{ Classifier.MobileNet, () => TensorizationModels.Normalize0_1ThenZScore(
|
||||
Height, Width, Channels,
|
||||
new float[] { 0.485f, 0.456f, 0.406f },
|
||||
new float[] { 0.229f, 0.224f, 0.225f}) },
|
||||
{ Classifier.RCNN_ILSVRC13, null }, // No preprocessing required
|
||||
{ Classifier.ResNet, () => TensorizationModels.Normalize0_1ThenZScore(224, 224, 4,
|
||||
new float[] { 0.485f, 0.456f, 0.406f },
|
||||
new float[] { 0.229f, 0.224f, 0.225f}) },
|
||||
{ Classifier.ShuffleNet_V2, () => TensorizationModels.Normalize0_1ThenZScore(Height, Width, Channels,
|
||||
new float[] { 0.485f, 0.456f, 0.406f },
|
||||
new float[] { 0.229f, 0.224f, 0.225f}) },
|
||||
{ Classifier.VGG, () => TensorizationModels.Normalize0_1ThenZScore(224, 224, 4,
|
||||
new float[] { 0.485f, 0.456f, 0.406f },
|
||||
new float[] { 0.229f, 0.224f, 0.225f}) },
|
||||
{ Classifier.ResNet, () => TensorizationModels.Normalize0_1ThenZScore(
|
||||
224, 224, 4,
|
||||
new float[] { 0.485f, 0.456f, 0.406f },
|
||||
new float[] { 0.229f, 0.224f, 0.225f}) },
|
||||
{ Classifier.ShuffleNet_V2, () => TensorizationModels.Normalize0_1ThenZScore(
|
||||
Height, Width, Channels,
|
||||
new float[] { 0.485f, 0.456f, 0.406f },
|
||||
new float[] { 0.229f, 0.224f, 0.225f}) },
|
||||
{ Classifier.VGG, () => TensorizationModels.Normalize0_1ThenZScore(
|
||||
224, 224, 4,
|
||||
new float[] { 0.485f, 0.456f, 0.406f },
|
||||
new float[] { 0.229f, 0.224f, 0.225f}) },
|
||||
{ Classifier.ZFNet512, null }, // No preprocessing required
|
||||
#endif
|
||||
};
|
||||
|
@ -223,17 +232,18 @@ namespace WinMLSamplesGallery.Samples
|
|||
|
||||
private void InitializeWindowsMachineLearning()
|
||||
{
|
||||
_tensorizationSession = CreateLearningModelSession(TensorizationModels.BasicTensorization(
|
||||
Height, Width,
|
||||
BatchSize, Channels, CurrentImageDecoder.PixelHeight, CurrentImageDecoder.PixelWidth,
|
||||
"nearest"),
|
||||
LearningModelDeviceKind.Cpu);
|
||||
var tensorizationModel = TensorizationModels.BasicTensorization(
|
||||
Height, Width,
|
||||
BatchSize, Channels, CurrentImageDecoder.PixelHeight, CurrentImageDecoder.PixelWidth,
|
||||
"nearest");
|
||||
_tensorizationSession = CreateLearningModelSession(tensorizationModel, LearningModelDeviceKind.Cpu);
|
||||
|
||||
var model = SelectedModel;
|
||||
if (model != CurrentModel)
|
||||
{
|
||||
var modelPath = _modelDictionary[model];
|
||||
_inferenceSession = CreateLearningModelSession(modelPath);
|
||||
var inferenceModel = LearningModel.LoadFromFilePath(modelPath);
|
||||
_inferenceSession = CreateLearningModelSession(inferenceModel);
|
||||
|
||||
var preProcessor = _preProcessorDictionary[model];
|
||||
var hasPreProcessor = preProcessor != null;
|
||||
|
@ -341,7 +351,7 @@ namespace WinMLSamplesGallery.Samples
|
|||
var binding = new LearningModelBinding(session);
|
||||
|
||||
// Create an emoty output, that will keep the output resources on the GPU
|
||||
// It will be chained into a the post processing on the GPU as well
|
||||
// It will be chained into post processing on the GPU as well
|
||||
var output = TensorFloat.Create();
|
||||
|
||||
// Bind inputs and outputs
|
||||
|
@ -358,13 +368,6 @@ namespace WinMLSamplesGallery.Samples
|
|||
return session.Evaluate(binding, "");
|
||||
}
|
||||
|
||||
private LearningModelSession CreateLearningModelSession(string modelPath)
|
||||
{
|
||||
var model = CreateLearningModel(modelPath);
|
||||
var session = CreateLearningModelSession(model);
|
||||
return session;
|
||||
}
|
||||
|
||||
private LearningModelSession CreateLearningModelSession(LearningModel model, Nullable<LearningModelDeviceKind> kind = null)
|
||||
{
|
||||
var device = new LearningModelDevice(kind ?? SelectedDeviceKind);
|
||||
|
@ -376,14 +379,7 @@ namespace WinMLSamplesGallery.Samples
|
|||
return session;
|
||||
}
|
||||
|
||||
private static LearningModel CreateLearningModel(string modelPath)
|
||||
{
|
||||
var uri = new Uri(modelPath);
|
||||
var file = StorageFile.GetFileFromApplicationUriAsync(uri).GetAwaiter().GetResult();
|
||||
return LearningModel.LoadFromStorageFileAsync(file).GetAwaiter().GetResult();
|
||||
}
|
||||
#pragma warning restore CA1416 // Validate platform compatibility
|
||||
|
||||
private void TryPerformInference()
|
||||
{
|
||||
if (CurrentImageDecoder != null)
|
||||
|
|
|
@ -20,6 +20,7 @@ using SixLabors.ImageSharp;
|
|||
using SixLabors.ImageSharp.PixelFormats;
|
||||
using ImageSharpExtensionMethods;
|
||||
using SixLabors.ImageSharp.Processing;
|
||||
using System.IO;
|
||||
|
||||
namespace WinMLSamplesGallery.Samples
|
||||
{
|
||||
|
@ -40,7 +41,6 @@ namespace WinMLSamplesGallery.Samples
|
|||
|
||||
private Image<Bgra32> CurrentImage { get; set; }
|
||||
|
||||
#pragma warning disable CA1416 // Validate platform compatibility
|
||||
private LearningModelDeviceKind SelectedDeviceKind
|
||||
{
|
||||
get
|
||||
|
@ -50,7 +50,6 @@ namespace WinMLSamplesGallery.Samples
|
|||
LearningModelDeviceKind.DirectXHighPerformance;
|
||||
}
|
||||
}
|
||||
#pragma warning restore CA1416 // Validate platform compatibility
|
||||
|
||||
public ImageSharpInterop()
|
||||
{
|
||||
|
@ -58,13 +57,18 @@ namespace WinMLSamplesGallery.Samples
|
|||
|
||||
var tensorizationModel = TensorizationModels.BasicTensorization(Height, Width, BatchSize, Channels, Height, Width, "nearest");
|
||||
_tensorizationSession = CreateLearningModelSession(tensorizationModel, SelectedDeviceKind);
|
||||
_inferenceSession = CreateLearningModelSession("ms-appx:///Models/squeezenet1.1-7.onnx");
|
||||
_postProcessingSession = CreateLearningModelSession(TensorizationModels.SoftMaxThenTopK(TopK));
|
||||
|
||||
var inferenceModelName = "squeezenet1.1-7.onnx";
|
||||
var inferenceModelPath = Path.Join(Windows.ApplicationModel.Package.Current.InstalledLocation.Path, "Models", inferenceModelName);
|
||||
var inferenceModel = LearningModel.LoadFromFilePath(inferenceModelPath);
|
||||
_inferenceSession = CreateLearningModelSession(inferenceModel);
|
||||
|
||||
var postProcessingModel = TensorizationModels.SoftMaxThenTopK(TopK);
|
||||
_postProcessingSession = CreateLearningModelSession(postProcessingModel);
|
||||
|
||||
BasicGridView.SelectedIndex = 0;
|
||||
}
|
||||
|
||||
#pragma warning disable CA1416 // Validate platform compatibility
|
||||
private (IEnumerable<string>, IReadOnlyList<float>) Classify(Image<Bgra32> image, float angle)
|
||||
{
|
||||
long start, stop;
|
||||
|
@ -139,13 +143,6 @@ namespace WinMLSamplesGallery.Samples
|
|||
return session.Evaluate(binding, "");
|
||||
}
|
||||
|
||||
private LearningModelSession CreateLearningModelSession(string modelPath)
|
||||
{
|
||||
var model = CreateLearningModel(modelPath);
|
||||
var session = CreateLearningModelSession(model);
|
||||
return session;
|
||||
}
|
||||
|
||||
private LearningModelSession CreateLearningModelSession(LearningModel model, Nullable<LearningModelDeviceKind> kind = null)
|
||||
{
|
||||
var device = new LearningModelDevice(kind ?? SelectedDeviceKind);
|
||||
|
@ -157,14 +154,6 @@ namespace WinMLSamplesGallery.Samples
|
|||
return session;
|
||||
}
|
||||
|
||||
private static LearningModel CreateLearningModel(string modelPath)
|
||||
{
|
||||
var uri = new Uri(modelPath);
|
||||
var file = StorageFile.GetFileFromApplicationUriAsync(uri).GetAwaiter().GetResult();
|
||||
return LearningModel.LoadFromStorageFileAsync(file).GetAwaiter().GetResult();
|
||||
}
|
||||
#pragma warning restore CA1416 // Validate platform compatibility
|
||||
|
||||
private void TryPerformInference(bool reloadImages = true)
|
||||
{
|
||||
if (CurrentImage != null)
|
||||
|
|
|
@ -6,6 +6,7 @@ using Microsoft.UI.Xaml.Media.Imaging;
|
|||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Diagnostics;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Runtime.InteropServices;
|
||||
using System.Runtime.InteropServices.WindowsRuntime;
|
||||
|
@ -156,15 +157,12 @@ namespace WinMLSamplesGallery.Samples
|
|||
dmlDevice = new LearningModelDevice(LearningModelDeviceKind.DirectX);
|
||||
cpuDevice = new LearningModelDevice(LearningModelDeviceKind.Cpu);
|
||||
|
||||
_session = CreateLearningModelSession("ms-appx:///Models/yolov4.onnx");
|
||||
initialized_ = true;
|
||||
}
|
||||
var modelName = "yolov4.onnx";
|
||||
var modelPath = Path.Join(Windows.ApplicationModel.Package.Current.InstalledLocation.Path, "Models", modelName);
|
||||
var model = LearningModel.LoadFromFilePath(modelPath);
|
||||
_session = CreateLearningModelSession(model);
|
||||
|
||||
private LearningModelSession CreateLearningModelSession(string modelPath)
|
||||
{
|
||||
var model = CreateLearningModel(modelPath);
|
||||
var session = CreateLearningModelSession(model);
|
||||
return session;
|
||||
initialized_ = true;
|
||||
}
|
||||
|
||||
private LearningModelSession CreateLearningModelSession(LearningModel model)
|
||||
|
@ -182,13 +180,6 @@ namespace WinMLSamplesGallery.Samples
|
|||
return session;
|
||||
}
|
||||
|
||||
private static LearningModel CreateLearningModel(string modelPath)
|
||||
{
|
||||
var uri = new Uri(modelPath);
|
||||
var file = StorageFile.GetFileFromApplicationUriAsync(uri).GetAwaiter().GetResult();
|
||||
return LearningModel.LoadFromStorageFileAsync(file).GetAwaiter().GetResult();
|
||||
}
|
||||
|
||||
private void DeviceComboBox_SelectionChanged(object sender, SelectionChangedEventArgs e)
|
||||
{
|
||||
}
|
||||
|
|
|
@ -5,6 +5,7 @@ using Microsoft.UI.Xaml.Data;
|
|||
using Microsoft.UI.Xaml.Media;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Runtime.InteropServices.WindowsRuntime;
|
||||
using Windows.Foundation;
|
||||
|
@ -96,7 +97,6 @@ namespace WinMLSamplesGallery.Samples
|
|||
|
||||
private ClassifyChoice InferenceChoice { get; set; }
|
||||
|
||||
#pragma warning disable CA1416 // Validate platform compatibility
|
||||
private LearningModelDeviceKind SelectedDeviceKind
|
||||
{
|
||||
get
|
||||
|
@ -106,20 +106,25 @@ namespace WinMLSamplesGallery.Samples
|
|||
LearningModelDeviceKind.DirectXHighPerformance;
|
||||
}
|
||||
}
|
||||
#pragma warning restore CA1416 // Validate platform compatibility
|
||||
|
||||
public OpenCVInterop()
|
||||
{
|
||||
this.InitializeComponent();
|
||||
CurrentImagePath = null;
|
||||
InferenceChoice = ClassifyChoice.Denoised;
|
||||
_inferenceSession = CreateLearningModelSession("ms-appx:///Models/squeezenet1.1-7.onnx");
|
||||
|
||||
// Load inference session
|
||||
var modelName = "squeezenet1.1-7.onnx";
|
||||
var modelPath = Path.Join(Windows.ApplicationModel.Package.Current.InstalledLocation.Path, "Models", modelName);
|
||||
var model = LearningModel.LoadFromFilePath(modelPath);
|
||||
_inferenceSession = CreateLearningModelSession(model);
|
||||
|
||||
// Load post processing session
|
||||
_postProcessingSession = CreateLearningModelSession(TensorizationModels.SoftMaxThenTopK(TopK));
|
||||
|
||||
BasicGridView.SelectedIndex = 0;
|
||||
}
|
||||
|
||||
#pragma warning disable CA1416 // Validate platform compatibility
|
||||
private (IEnumerable<string>, IReadOnlyList<float>) Classify(WinMLSamplesGalleryNative.OpenCVImage image)
|
||||
{
|
||||
long start, stop;
|
||||
|
@ -190,13 +195,6 @@ namespace WinMLSamplesGallery.Samples
|
|||
return session.Evaluate(binding, "");
|
||||
}
|
||||
|
||||
private LearningModelSession CreateLearningModelSession(string modelPath)
|
||||
{
|
||||
var model = CreateLearningModel(modelPath);
|
||||
var session = CreateLearningModelSession(model);
|
||||
return session;
|
||||
}
|
||||
|
||||
private LearningModelSession CreateLearningModelSession(LearningModel model, Nullable<LearningModelDeviceKind> kind = null)
|
||||
{
|
||||
var device = new LearningModelDevice(kind ?? SelectedDeviceKind);
|
||||
|
@ -208,14 +206,6 @@ namespace WinMLSamplesGallery.Samples
|
|||
return session;
|
||||
}
|
||||
|
||||
private static LearningModel CreateLearningModel(string modelPath)
|
||||
{
|
||||
var uri = new Uri(modelPath);
|
||||
var file = StorageFile.GetFileFromApplicationUriAsync(uri).GetAwaiter().GetResult();
|
||||
return LearningModel.LoadFromStorageFileAsync(file).GetAwaiter().GetResult();
|
||||
}
|
||||
#pragma warning restore CA1416 // Validate platform compatibility
|
||||
|
||||
private void TryPerformInference(bool reloadImages = true)
|
||||
{
|
||||
if (CurrentImagePath != null)
|
||||
|
|
|
@ -86,6 +86,7 @@
|
|||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<None Include="..\.editorconfig" Link=".editorconfig" />
|
||||
<None Include="Samples\ImageEffects\ImageEffects.xaml.cs" />
|
||||
<None Include="Samples\ImageClassifier\ImageClassifier.xaml.cs" />
|
||||
<None Include="Samples\OpenCVInterop\OpenCVInterop.xaml.cs" />
|
||||
|
|
Загрузка…
Ссылка в новой задаче