Merge pull request #377 from microsoft/user/sheilk/add-net5-sample
Add .NET5 Squeezenet Sample
This commit is contained in:
Коммит
86d6c0302c
|
@ -46,7 +46,6 @@ dlldata.c
|
|||
project.lock.json
|
||||
project.fragment.lock.json
|
||||
artifacts/
|
||||
**/Properties/launchSettings.json
|
||||
|
||||
*_i.c
|
||||
*_p.c
|
||||
|
|
|
@ -37,6 +37,7 @@ These generic examples show how to use various models and input feeds with Windo
|
|||
- **[SqueezeNetObjectDetection\UWP\cs](https://github.com/Microsoft/Windows-Machine-Learning/tree/master/Samples/SqueezeNetObjectDetection/UWP/cs)**: a UWP C# app that uses the SqueezeNet model to detect the predominant object in an image.
|
||||
- **[SqueezeNetObjectDetection\UWP\js](https://github.com/Microsoft/Windows-Machine-Learning/tree/master/Samples/SqueezeNetObjectDetection/UWP/js)**: a UWP Javascript app that uses the SqueezeNet model to detect the predominant object in an image.
|
||||
- **[SqueezeNetObjectDetection\Desktop\cpp](https://github.com/Microsoft/Windows-Machine-Learning/tree/master/Samples/SqueezeNetObjectDetection/Desktop/cpp)**: a classic desktop C++/WinRT app that uses the SqueezeNet model to detect the predominant object in an image.
|
||||
- **[SqueezeNetObjectDetection\NET5\cs](https://github.com/Microsoft/Windows-Machine-Learning/tree/master/Samples/SqueezeNetObjectDetection/NET5)**: a .NET5 application that uses the SqueezeNet model to detect the predominant object in an image.
|
||||
- **[SqueezeNetObjectDetection\NETCore\cs](https://github.com/Microsoft/Windows-Machine-Learning/tree/master/Samples/SqueezeNetObjectDetection/Desktop/cpp)**: a .NET Core 2 application that uses the SqueezeNet model to detect the predominant object in an image.
|
||||
- **[StyleTransfer](https://github.com/Microsoft/Windows-Machine-Learning/tree/master/Samples/StyleTransfer)**: a UWP C# app that uses a custom C++ Video Effect to apply style transfer in real-time to videos.
|
||||
- **[MNIST\UWP\cs](https://github.com/Microsoft/Windows-Machine-Learning/tree/master/Samples/MNIST/Tutorial/cs)**: a UWP C# app that uses the MNIST model to detect handwritten numbers.
|
||||
|
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -0,0 +1,215 @@
|
|||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using Windows.Storage;
|
||||
using Windows.Graphics.Imaging;
|
||||
using Windows.Storage.Streams;
|
||||
using Windows.Foundation;
|
||||
using Windows.Media;
|
||||
using Newtonsoft.Json;
|
||||
|
||||
/// To choose between Inbox WinML and Redist WinML, simply select the appropriate namesapce via "using".
|
||||
using Microsoft.AI.MachineLearning;
|
||||
// using Windows.AI.MachineLearning;
|
||||
|
||||
namespace SqueezeNetObjectDetectionNC
|
||||
{
|
||||
class ImageInference
|
||||
{
|
||||
// globals
|
||||
private static LearningModelDeviceKind _deviceKind = LearningModelDeviceKind.Default;
|
||||
private static string _deviceName = "default";
|
||||
private static string _modelPath;
|
||||
private static string _imagePath;
|
||||
private static string _labelsFileName = "Labels.json";
|
||||
private static LearningModel _model = null;
|
||||
private static LearningModelSession _session;
|
||||
private static List<string> _labels = new List<string>();
|
||||
|
||||
// usage: SqueezeNet [modelfile] [imagefile] [cpu|directx]
|
||||
static int Main(string[] args)
|
||||
{
|
||||
if (!ParseArgs(args))
|
||||
{
|
||||
Console.WriteLine("Usage: [executable_name] [modelfile] [imagefile] [cpu|directx]");
|
||||
return -1;
|
||||
}
|
||||
|
||||
// Load and create the model
|
||||
Console.WriteLine($"Loading modelfile '{_modelPath}' on the '{_deviceName}' device");
|
||||
|
||||
int ticks = Environment.TickCount;
|
||||
_model = LearningModel.LoadFromFilePath(_modelPath);
|
||||
ticks = Environment.TickCount - ticks;
|
||||
Console.WriteLine($"model file loaded in { ticks } ticks");
|
||||
|
||||
// Create the evaluation session with the model and device
|
||||
_session = new LearningModelSession(_model, new LearningModelDevice(_deviceKind));
|
||||
|
||||
Console.WriteLine("Getting color management mode...");
|
||||
ColorManagementMode colorManagementMode = GetColorManagementMode();
|
||||
|
||||
Console.WriteLine("Loading the image...");
|
||||
ImageFeatureValue imageTensor = LoadImageFile(colorManagementMode);
|
||||
|
||||
// create a binding object from the session
|
||||
Console.WriteLine("Binding...");
|
||||
LearningModelBinding binding = new LearningModelBinding(_session);
|
||||
binding.Bind(_model.InputFeatures.ElementAt(0).Name, imageTensor);
|
||||
|
||||
Console.WriteLine("Running the model...");
|
||||
ticks = Environment.TickCount;
|
||||
var results = _session.Evaluate(binding, "RunId");
|
||||
ticks = Environment.TickCount - ticks;
|
||||
Console.WriteLine($"model run took { ticks } ticks");
|
||||
|
||||
// retrieve results from evaluation
|
||||
var resultTensor = results.Outputs[_model.OutputFeatures.ElementAt(0).Name] as TensorFloat;
|
||||
var resultVector = resultTensor.GetAsVectorView();
|
||||
PrintResults(resultVector);
|
||||
return 0;
|
||||
}
|
||||
|
||||
static bool ParseArgs(string[] args)
|
||||
{
|
||||
if (args.Length < 2)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
// get the model file
|
||||
_modelPath = args[0];
|
||||
// get the image file
|
||||
_imagePath = args[1];
|
||||
// did they pass a fourth arg?
|
||||
|
||||
if (args.Length > 2)
|
||||
{
|
||||
string deviceName = args[2];
|
||||
if (deviceName == "cpu")
|
||||
{
|
||||
_deviceKind = LearningModelDeviceKind.Cpu;
|
||||
}
|
||||
else if (deviceName == "directx")
|
||||
{
|
||||
_deviceKind = LearningModelDeviceKind.DirectX;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private static void LoadLabels()
|
||||
{
|
||||
// Parse labels from label json file. We know the file's
|
||||
// entries are already sorted in order.
|
||||
var fileString = File.ReadAllText(_labelsFileName);
|
||||
var fileDict = JsonConvert.DeserializeObject<Dictionary<string, string>>(fileString);
|
||||
foreach (var kvp in fileDict)
|
||||
{
|
||||
_labels.Add(kvp.Value);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private static T AsyncHelper<T> (IAsyncOperation<T> operation)
|
||||
{
|
||||
AutoResetEvent waitHandle = new AutoResetEvent(false);
|
||||
operation.Completed = new AsyncOperationCompletedHandler<T>((op, status) =>
|
||||
{
|
||||
waitHandle.Set();
|
||||
});
|
||||
waitHandle.WaitOne();
|
||||
return operation.GetResults();
|
||||
}
|
||||
|
||||
private static ImageFeatureValue LoadImageFile(ColorManagementMode colorManagementMode)
|
||||
{
|
||||
BitmapDecoder decoder = null;
|
||||
try
|
||||
{
|
||||
StorageFile imageFile = AsyncHelper(StorageFile.GetFileFromPathAsync(System.IO.Path.GetFullPath(_imagePath)));
|
||||
IRandomAccessStream stream = AsyncHelper(imageFile.OpenReadAsync());
|
||||
decoder = AsyncHelper(BitmapDecoder.CreateAsync(stream));
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
Console.WriteLine("Failed to load image file! Make sure that fully qualified paths are used.");
|
||||
Console.WriteLine(" Exception caught.\n {0}", e);
|
||||
System.Environment.Exit(e.HResult);
|
||||
}
|
||||
SoftwareBitmap softwareBitmap = null;
|
||||
try
|
||||
{
|
||||
softwareBitmap = AsyncHelper(
|
||||
decoder.GetSoftwareBitmapAsync(
|
||||
decoder.BitmapPixelFormat,
|
||||
decoder.BitmapAlphaMode,
|
||||
new BitmapTransform(),
|
||||
ExifOrientationMode.RespectExifOrientation,
|
||||
colorManagementMode
|
||||
)
|
||||
);
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
Console.WriteLine("Failed to create SoftwareBitmap! Please make sure that input image is within the model's colorspace.");
|
||||
Console.WriteLine(" Exception caught.\n {0}", e);
|
||||
System.Environment.Exit(e.HResult);
|
||||
}
|
||||
softwareBitmap = SoftwareBitmap.Convert(softwareBitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Premultiplied);
|
||||
VideoFrame inputImage = VideoFrame.CreateWithSoftwareBitmap(softwareBitmap);
|
||||
return ImageFeatureValue.CreateFromVideoFrame(inputImage);
|
||||
}
|
||||
|
||||
private static ColorManagementMode GetColorManagementMode()
|
||||
{
|
||||
// Get model color space gamma
|
||||
string gammaSpace = "";
|
||||
bool doesModelContainGammaSpaceMetadata = _model.Metadata.TryGetValue("Image.ColorSpaceGamma", out gammaSpace);
|
||||
if (!doesModelContainGammaSpaceMetadata)
|
||||
{
|
||||
Console.WriteLine(" Model does not have color space gamma information. Will color manage to sRGB by default...");
|
||||
}
|
||||
if (!doesModelContainGammaSpaceMetadata || gammaSpace.Equals("SRGB", StringComparison.CurrentCultureIgnoreCase))
|
||||
{
|
||||
return ColorManagementMode.ColorManageToSRgb;
|
||||
}
|
||||
// Due diligence should be done to make sure that the input image is within the model's colorspace. There are multiple non-sRGB color spaces.
|
||||
Console.WriteLine(" Model metadata indicates that color gamma space is : {0}. Will not manage color space to sRGB...", gammaSpace);
|
||||
return ColorManagementMode.DoNotColorManage;
|
||||
}
|
||||
|
||||
private static void PrintResults(IReadOnlyList<float> resultVector)
|
||||
{
|
||||
// load the labels
|
||||
LoadLabels();
|
||||
|
||||
List<(int index, float probability)> indexedResults = new List<(int, float)>();
|
||||
for (int i = 0; i < resultVector.Count; i++)
|
||||
{
|
||||
indexedResults.Add((index: i, probability: resultVector.ElementAt(i)));
|
||||
}
|
||||
indexedResults.Sort((a, b) =>
|
||||
{
|
||||
if (a.probability < b.probability)
|
||||
{
|
||||
return 1;
|
||||
}
|
||||
else if (a.probability > b.probability)
|
||||
{
|
||||
return -1;
|
||||
}
|
||||
else
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
});
|
||||
|
||||
for (int i = 0; i < 3; i++)
|
||||
{
|
||||
Console.WriteLine($"\"{ _labels[indexedResults[i].index]}\" with confidence of { indexedResults[i].probability}");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,9 @@
|
|||
{
|
||||
"profiles": {
|
||||
"SqueezeNetObjectDetectionNET5": {
|
||||
"commandName": "Project",
|
||||
"commandLineArgs": "SqueezeNet.onnx kitten_224.png cpu",
|
||||
"workingDirectory": "$(OutDir)"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,46 @@
|
|||
# SqueezeNet Object Detection sample
|
||||
|
||||
This is a .NET Core 2 application that uses SqueezeNet, a pre-trained machine learning model, to detect the predominant object in an image selected by the user from a file.
|
||||
|
||||
Note: SqueezeNet was trained to work with image sizes of 224x224, so you must provide an image of size 224X224.
|
||||
Also, the asynchronous handlers defined in the code are required due to a limitation of .NET Core 2. With the release of .NET Core 3 you will be able to use the async/await pattern.
|
||||
|
||||
To get access to Windows.AI.MachineLearning and various other Windows classes an assembly reference needs to be added for Windows.winmd
|
||||
For this project the assembly reference is parametrized by the environment variable WINDOWS_WINMD, so you need to set this environment variable before building.
|
||||
The file path for the Windows.winmd file may be: ```C:\Program Files (x86)\Windows Kits\10\UnionMetadata\[version]\Windows.winmd```
|
||||
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- [Visual Studio 2017 Version 15.7.4 or Newer](https://developer.microsoft.com/en-us/windows/downloads)
|
||||
- [Windows 10 - Build 17763 or higher](https://www.microsoft.com/en-us/software-download/windowsinsiderpreviewiso)
|
||||
- [Windows SDK - Build 17763 or higher](https://www.microsoft.com/en-us/software-download/windowsinsiderpreviewSDK)
|
||||
|
||||
## Build the sample
|
||||
|
||||
1. If you download the samples ZIP, be sure to unzip the entire archive, not just the folder with the sample you want to build.
|
||||
2. Start Microsoft Visual Studio 2017 and select **File > Open > Project/Solution**.
|
||||
3. Starting in the folder where you unzipped the samples, go to the **Samples** subfolder, then the subfolder for this sample (**SqueezeNetObjectDetection). Double-click the Visual Studio solution file (.sln).
|
||||
4. Confirm that you are set for the right configuration and platform (for example: Debug, x64).
|
||||
5. Build the solution by right clicking the project in **Solution Explorer** and selecting Build (**Ctrl+Shift+B**).
|
||||
|
||||
## Running the sample
|
||||
|
||||
- To debug the sample and then run it, press F5 or select Debug > Start Debugging. To run the sample without debugging, press Ctrl+F5 or selectDebug > Start Without Debugging.
|
||||
|
||||
- You should get output similar to the following:
|
||||
```
|
||||
Loading modelfile 'C:\Repos\Windows-Machine-Learning\SharedContent\models\SqueezeNet.onnx' on the 'default' device
|
||||
model file loaded in 421 ticks
|
||||
Loading the image...
|
||||
Binding...
|
||||
Running the model...
|
||||
model run took 31 ticks
|
||||
"tabby, tabby cat" with confidence of 0.931461
|
||||
"Egyptian cat" with confidence of 0.065307
|
||||
"tiger cat" with confidence of 0.002927
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
MIT. See [LICENSE file](https://github.com/Microsoft/Windows-Machine-Learning/blob/master/LICENSE).
|
|
@ -0,0 +1,45 @@
|
|||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<OutputType>Exe</OutputType>
|
||||
<TargetFramework>net5.0-windows10.0.19041.0</TargetFramework>
|
||||
</PropertyGroup>
|
||||
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|AnyCPU'">
|
||||
<DefineConstants>TRACE</DefineConstants>
|
||||
</PropertyGroup>
|
||||
|
||||
<PropertyGroup>
|
||||
<DefineConstants>USE_WINML_NUGET</DefineConstants>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<None Remove="Labels.json" />
|
||||
<None Remove="..\..\..\SharedContent\models\SqueezeNet.onnx" />
|
||||
<None Remove="..\..\..\SharedContent\media\kitten_224.png" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<Content Include="Labels.json">
|
||||
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
|
||||
</Content>
|
||||
<Content Include="..\..\..\SharedContent\models\SqueezeNet.onnx">
|
||||
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
|
||||
</Content>
|
||||
<Content Include="..\..\..\SharedContent\media\kitten_224.png">
|
||||
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
|
||||
</Content>
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.AI.MachineLearning" Version="1.7.0" />
|
||||
<PackageReference Include="Newtonsoft.Json" Version="12.0.1" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<None Update="Labels.json">
|
||||
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
|
||||
</None>
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
|
@ -1,7 +1,7 @@
|
|||
|
||||
Microsoft Visual Studio Solution File, Format Version 12.00
|
||||
# Visual Studio 15
|
||||
VisualStudioVersion = 15.0.27703.2026
|
||||
# Visual Studio Version 16
|
||||
VisualStudioVersion = 16.0.31019.35
|
||||
MinimumVisualStudioVersion = 10.0.40219.1
|
||||
Project("{262852C6-CD72-467D-83FE-5EEB1973A190}") = "SqueezeNetObjectDetectionJS", "UWP\js\SqueezeNetObjectDetectionJS.jsproj", "{2CF5F1AA-BF69-498C-8B74-04137B2F67F3}"
|
||||
EndProject
|
||||
|
@ -16,6 +16,8 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "SqueezeNetObjectDetectionNC
|
|||
EndProject
|
||||
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "SampleSharedLib", "..\SampleSharedLib\SampleSharedLib\SampleSharedLib.vcxproj", "{12103A5B-677A-4286-83D2-54EAB9010C16}"
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "SqueezeNetObjectDetectionNET5", "NET5\SqueezeNetObjectDetectionNET5.csproj", "{C475A99E-5976-4C5C-941F-1CE3E71F035C}"
|
||||
EndProject
|
||||
Global
|
||||
GlobalSection(SolutionConfigurationPlatforms) = preSolution
|
||||
Debug|Any CPU = Debug|Any CPU
|
||||
|
@ -112,6 +114,22 @@ Global
|
|||
{12103A5B-677A-4286-83D2-54EAB9010C16}.Release|x64.Build.0 = Release|x64
|
||||
{12103A5B-677A-4286-83D2-54EAB9010C16}.Release|x86.ActiveCfg = Release|Win32
|
||||
{12103A5B-677A-4286-83D2-54EAB9010C16}.Release|x86.Build.0 = Release|Win32
|
||||
{C475A99E-5976-4C5C-941F-1CE3E71F035C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{C475A99E-5976-4C5C-941F-1CE3E71F035C}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{C475A99E-5976-4C5C-941F-1CE3E71F035C}.Debug|ARM.ActiveCfg = Debug|Any CPU
|
||||
{C475A99E-5976-4C5C-941F-1CE3E71F035C}.Debug|ARM.Build.0 = Debug|Any CPU
|
||||
{C475A99E-5976-4C5C-941F-1CE3E71F035C}.Debug|x64.ActiveCfg = Debug|Any CPU
|
||||
{C475A99E-5976-4C5C-941F-1CE3E71F035C}.Debug|x64.Build.0 = Debug|Any CPU
|
||||
{C475A99E-5976-4C5C-941F-1CE3E71F035C}.Debug|x86.ActiveCfg = Debug|Any CPU
|
||||
{C475A99E-5976-4C5C-941F-1CE3E71F035C}.Debug|x86.Build.0 = Debug|Any CPU
|
||||
{C475A99E-5976-4C5C-941F-1CE3E71F035C}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{C475A99E-5976-4C5C-941F-1CE3E71F035C}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{C475A99E-5976-4C5C-941F-1CE3E71F035C}.Release|ARM.ActiveCfg = Release|Any CPU
|
||||
{C475A99E-5976-4C5C-941F-1CE3E71F035C}.Release|ARM.Build.0 = Release|Any CPU
|
||||
{C475A99E-5976-4C5C-941F-1CE3E71F035C}.Release|x64.ActiveCfg = Release|Any CPU
|
||||
{C475A99E-5976-4C5C-941F-1CE3E71F035C}.Release|x64.Build.0 = Release|Any CPU
|
||||
{C475A99E-5976-4C5C-941F-1CE3E71F035C}.Release|x86.ActiveCfg = Release|Any CPU
|
||||
{C475A99E-5976-4C5C-941F-1CE3E71F035C}.Release|x86.Build.0 = Release|Any CPU
|
||||
EndGlobalSection
|
||||
GlobalSection(SolutionProperties) = preSolution
|
||||
HideSolutionNode = FALSE
|
||||
|
|
Загрузка…
Ссылка в новой задаче