This change allows user to specify folder of images to bind and evaluate (#237)

* Evaluate folder of images

* Evaluate folder of images

* Updated usage

* Test change

* test changes back to test method cleanup

* Fix build break

* remove test

* Added test

* Fix comment typo

* Don't print out image path if using -terse flag

* Remove commitments
This commit is contained in:
Ryan Lai 2019-06-18 09:28:50 -07:00 коммит произвёл GitHub
Родитель f5fec2fdac
Коммит 99e9342593
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: 4AEE18F83AFDEB23
9 изменённых файлов: 185 добавлений и 91 удалений

Просмотреть файл

@ -641,6 +641,30 @@ namespace WinMLRunnerTest
// We need to expect one more line because of the header
Assert::AreEqual(static_cast<size_t>(2), GetOutputCSVLineCount(tensorDataPath + L"\\PerIterationData\\Summary.csv"));
}
TEST_METHOD(ProvidedImageInputFolder)
{
// Make test_folder_input folder before starting the tests
std::string mkFolderCommand = "mkdir " + std::string(INPUT_FOLDER_PATH.begin(), INPUT_FOLDER_PATH.end());
system(mkFolderCommand.c_str());
std::vector<std::string> images = { "fish.png", "kitten_224.png" };
// Copy images from list to test_folder_input
for (auto image : images)
{
std::string copyCommand = "Copy ";
copyCommand += image;
copyCommand += ' ' + std::string(INPUT_FOLDER_PATH.begin(), INPUT_FOLDER_PATH.end());
system(copyCommand.c_str());
}
const std::wstring command = BuildCommand({ EXE_PATH, L"-model", L"SqueezeNet.onnx", L"-InputImageFolder", INPUT_FOLDER_PATH });
Assert::AreEqual(S_OK, RunProc((wchar_t*)command.c_str()));
std::string removeCommand = "rd /s /q ";
removeCommand += std::string(INPUT_FOLDER_PATH.begin(), INPUT_FOLDER_PATH.end());
system(removeCommand.c_str());
}
};
TEST_CLASS(CsvInputTest)

Просмотреть файл

@ -45,7 +45,8 @@ Required command-Line arguments:
-Tensor : load the input as a tensor
-Perf [all]: capture performance measurements such as timing and memory usage. Specifying "all" will output all measurements
-Iterations : # times perf measurements will be run/averaged. (maximum: 1024 times)
-Input <fully qualified path>: binds image or CSV to model
-Input <path to input file>: binds image or CSV to model
-InputImageFolder <path to directory of images> : specify folder of images to bind to model" << std::endl;
-TopK <number>: print top <number> values in the result. Default to 1
-BaseOutputPath [<fully qualified path>] : base output directory path for results, default to cwd
-PerfOutput [<path>] : fully qualified or relative path including csv filename for perf results

Просмотреть файл

@ -477,8 +477,8 @@ namespace BindingUtilities
}
else if (args.IsImageInput())
{
// Creating Tensors for Input Images haven't been added yet.
throw hresult_not_implemented(L"Creating Tensors for Input Images haven't been implemented yet!");
// Creating Tensors for Input Images haven't been added.
throw hresult_not_implemented(L"Creating Tensors for Input Images haven't been implemented!");
}
if (inputBindingType == InputBindingType::CPU)
@ -605,7 +605,7 @@ namespace BindingUtilities
}
else
{
throw hresult_not_implemented(L"BitmapPixel format not yet handled by WinMLRunner.");
throw hresult_not_implemented(L"BitmapPixel format not handled by WinMLRunner.");
}
std::vector<int64_t> shape = { 1, channels, imageFeatureDescriptor.Height(),
imageFeatureDescriptor.Width() };

Просмотреть файл

@ -5,6 +5,7 @@
#include <ctime>
#include <iomanip>
#include <filesystem>
#include "Filehelper.h"
using namespace Windows::AI::MachineLearning;
@ -35,7 +36,8 @@ void CommandLineArgs::PrintUsage()
"will output all measurements"
<< std::endl;
std::cout << " -Iterations : # times perf measurements will be run/averaged. (maximum: 1024 times)" << std::endl;
std::cout << " -Input <fully qualified path> : binds image or CSV to model" << std::endl;
std::cout << " -Input <path to input file>: binds image or CSV to model" << std::endl;
std::cout << " -InputImageFolder <path to directory of images> : specify folder of images to bind to model" << std::endl;
std::cout << " -TopK <number> : print top <number> values in the result. Default to 1" << std::endl;
std::cout << " -BaseOutputPath [<fully qualified path>] : base output directory path for results, default to cwd"
<< std::endl;
@ -139,7 +141,13 @@ CommandLineArgs::CommandLineArgs(const std::vector<std::wstring>& args)
}
else if ((_wcsicmp(args[i].c_str(), L"-Input") == 0))
{
m_inputData = args[++i];
CheckNextArgument(args, i);
m_inputData = FileHelper::GetAbsolutePath(args[++i]);
}
else if ((_wcsicmp(args[i].c_str(), L"-InputImageFolder") == 0))
{
CheckNextArgument(args, i);
m_inputImageFolderPath = FileHelper::GetAbsolutePath(args[++i]);
}
else if ((_wcsicmp(args[i].c_str(), L"-PerfOutput") == 0))
{
@ -326,7 +334,7 @@ CommandLineArgs::CommandLineArgs(const std::vector<std::wstring>& args)
if (m_inputData.find(L".png") != std::string::npos || m_inputData.find(L".jpg") != std::string::npos ||
m_inputData.find(L".jpeg") != std::string::npos)
{
m_imagePath = m_inputData;
m_imagePaths.push_back(m_inputData);
}
else if (m_inputData.find(L".csv") != std::string::npos)
{
@ -339,12 +347,31 @@ CommandLineArgs::CommandLineArgs(const std::vector<std::wstring>& args)
throw hresult_invalid_argument(msg.c_str());
}
}
if (!m_inputImageFolderPath.empty())
{
PopulateInputImagePaths();
}
SetupOutputDirectories(sBaseOutputPath, sPerfOutputPath, sPerIterationDataPath);
CheckForInvalidArguments();
}
void CommandLineArgs::PopulateInputImagePaths()
{
for (auto& it : std::filesystem::directory_iterator(m_inputImageFolderPath))
{
std::string path = it.path().string();
if (it.path().string().find(".png") != std::string::npos ||
it.path().string().find(".jpg") != std::string::npos ||
it.path().string().find(".jpeg") != std::string::npos)
{
std::wstring fileName;
fileName.assign(path.begin(), path.end());
m_imagePaths.push_back(fileName);
}
}
}
void CommandLineArgs::SetupOutputDirectories(const std::wstring& sBaseOutputPath,
const std::wstring& sPerfOutputPath,
const std::wstring& sPerIterationDataPath)
@ -420,4 +447,8 @@ void CommandLineArgs::CheckForInvalidArguments()
{
throw hresult_invalid_argument(L"Cannot save tensor output if no input data is provided!");
}
if (m_imagePaths.size() > 1 && IsSaveTensor())
{
throw hresult_not_implemented(L"Saving tensor output for multiple images isn't implemented.");
}
}

Просмотреть файл

@ -24,7 +24,7 @@ public:
BitmapInterpolationMode AutoScaleInterpMode() const { return m_autoScaleInterpMode; }
const std::wstring& ImagePath() const { return m_imagePath; }
const std::vector<std::wstring>& ImagePaths() const { return m_imagePaths; }
const std::wstring& CsvPath() const { return m_csvData; }
const std::wstring& OutputPath() const { return m_perfOutputPath; }
const std::wstring& FolderPath() const { return m_modelFolderPath; }
@ -38,7 +38,7 @@ public:
bool UseRGB() const
{
// If an image is specified without flags, we load it as a BGR image by default
return m_useRGB || (!m_imagePath.empty() && !m_useBGR && !m_useTensor);
return m_useRGB || (!m_imagePaths.empty() && !m_useBGR && !m_useTensor);
}
bool UseTensor() const
@ -70,10 +70,10 @@ public:
bool IsGarbageInput() const
{
// When there is no image or csv input provided, then garbage input binding is used.
return m_imagePath.empty() && m_csvData.empty();
return m_imagePaths.empty() && m_csvData.empty();
}
bool IsCSVInput() const { return m_imagePath.empty() && !m_csvData.empty(); }
bool IsImageInput() const { return !m_imagePath.empty() && m_csvData.empty(); }
bool IsCSVInput() const { return m_imagePaths.empty() && !m_csvData.empty(); }
bool IsImageInput() const { return !m_imagePaths.empty() && m_csvData.empty(); }
uint32_t NumIterations() const { return m_numIterations; }
uint32_t NumThreads() const { return m_numThreads; }
@ -143,7 +143,8 @@ private:
std::wstring m_modelFolderPath;
std::wstring m_modelPath;
std::wstring m_imagePath;
std::vector<std::wstring> m_imagePaths;
std::wstring m_inputImageFolderPath;
std::wstring m_csvData;
std::wstring m_inputData;
#ifdef DXCORE_SUPPORTED_BUILD
@ -161,4 +162,5 @@ private:
void CheckForInvalidArguments();
void SetupOutputDirectories(const std::wstring& sBaseOutputPath, const std::wstring& sPerfOutputPath,
const std::wstring& sPerIterationDataPath);
void PopulateInputImagePaths();
};

Просмотреть файл

@ -22,4 +22,15 @@ namespace FileHelper
return val;
}
std::wstring GetAbsolutePath(std::wstring relativePath)
{
TCHAR** lppPart = { NULL };
wchar_t absolutePath[MAX_PATH] = { 0 };
errno_t err = GetFullPathName(relativePath.c_str(), MAX_PATH, absolutePath, lppPart);
if (err == 0)
{
throw HRESULT_FROM_WIN32(GetLastError());
}
return absolutePath;
}
} // namespace FileHelper

Просмотреть файл

@ -4,4 +4,5 @@
namespace FileHelper
{
std::wstring GetModulePath();
std::wstring GetAbsolutePath(std::wstring relativePath);
}

Просмотреть файл

@ -578,7 +578,7 @@ public:
void SetCSVFileName(const std::wstring& fileName) { m_csvFileName = fileName; }
void WritePerIterationPerformance(const CommandLineArgs& args, std::wstring model)
void WritePerIterationPerformance(const CommandLineArgs& args, const std::wstring model, const std::wstring imagePath)
{
if (m_csvFileNamePerIterationSummary.length() > 0)
{
@ -599,7 +599,7 @@ public:
std::string modelName = converter.to_bytes(model);
std::string fileNameResultDevice = converter.to_bytes(m_fileNameResultDevice);
std::string inputName = args.IsCSVInput() ? converter.to_bytes(args.CsvPath())
: args.IsImageInput() ? converter.to_bytes(args.ImagePath()) : "";
: args.IsImageInput() ? converter.to_bytes(imagePath) : "";
if (bNewFile)
{

Просмотреть файл

@ -11,10 +11,14 @@ using namespace winrt::Windows::Graphics::DirectX::Direct3D11;
std::vector<ILearningModelFeatureValue> GenerateInputFeatures(const LearningModel& model, const CommandLineArgs& args,
InputBindingType inputBindingType,
InputDataType inputDataType,
const IDirect3DDevice winrtDevice, uint32_t iterationNum)
const IDirect3DDevice winrtDevice, uint32_t iterationNum,
const std::wstring& imagePath)
{
std::vector<ILearningModelFeatureValue> inputFeatures;
if (!imagePath.empty() && (!args.TerseOutput() || args.TerseOutput() && iterationNum == 0))
{
std::wcout << L"Generating input feature(s) with image: " << imagePath << std::endl;
}
for (uint32_t i = 0; i < model.InputFeatures().Size(); i++)
{
auto&& description = model.InputFeatures().GetAt(i);
@ -27,7 +31,7 @@ std::vector<ILearningModelFeatureValue> GenerateInputFeatures(const LearningMode
}
else
{
auto imageFeature = BindingUtilities::CreateBindableImage(description, args.ImagePath(), inputBindingType,
auto imageFeature = BindingUtilities::CreateBindableImage(description, imagePath, inputBindingType,
inputDataType, winrtDevice, args, iterationNum);
inputFeatures.push_back(imageFeature);
}
@ -363,7 +367,7 @@ HRESULT BindInputs(LearningModelBinding& context, const LearningModel& model, co
OutputHelper& output, DeviceType deviceType, const CommandLineArgs& args,
InputBindingType inputBindingType, InputDataType inputDataType, const IDirect3DDevice& winrtDevice,
DeviceCreationLocation deviceCreationLocation, uint32_t iteration,
Profiler<WINML_MODEL_TEST_PERF>& profiler)
Profiler<WINML_MODEL_TEST_PERF>& profiler, const std::wstring& imagePath)
{
if (deviceType == DeviceType::CPU && inputDataType == InputDataType::Tensor &&
inputBindingType == InputBindingType::GPU)
@ -381,7 +385,7 @@ HRESULT BindInputs(LearningModelBinding& context, const LearningModel& model, co
std::vector<ILearningModelFeatureValue> inputFeatures;
try
{
inputFeatures = GenerateInputFeatures(model, args, inputBindingType, inputDataType, winrtDevice, iteration);
inputFeatures = GenerateInputFeatures(model, args, inputBindingType, inputDataType, winrtDevice, iteration, imagePath);
}
catch (hresult_error hr)
{
@ -444,7 +448,7 @@ HRESULT CheckIfModelAndConfigurationsAreSupported(LearningModel& model, const st
if (inputFeature.Kind() != LearningModelFeatureKind::Tensor &&
inputFeature.Kind() != LearningModelFeatureKind::Image)
{
std::wcout << L"Model: " + modelPath + L" has an input type that isn't supported by WinMLRunner yet."
std::wcout << L"Model: " + modelPath + L" has an input type that isn't supported by WinMLRunner."
<< std::endl;
return E_NOTIMPL;
}
@ -505,12 +509,6 @@ HRESULT EvaluateModel(LearningModelEvaluationResult& result, const LearningModel
std::wcout << hr.message().c_str() << std::endl;
return hr.code();
}
// Only print eval results on the first iteration, iff it's not garbage data
if (!args.IsGarbageInput() || args.IsSaveTensor())
{
BindingUtilities::PrintOrSaveEvaluationResults(model, args, result.Outputs(), output, iterationNum);
}
return S_OK;
}
@ -655,6 +653,85 @@ void PrintIfPIXToolAttached(OutputHelper& output)
}
}
#endif
void RunConfiguration(CommandLineArgs& args, OutputHelper& output, LearningModelSession& session, HRESULT& lastHr,
LearningModel& model, const DeviceType deviceType, const InputBindingType inputBindingType,
const InputDataType inputDataType, const IDirect3DDevice& winrtDevice,
const DeviceCreationLocation deviceCreationLocation, Profiler<WINML_MODEL_TEST_PERF>& profiler,
const std::wstring& modelPath, const std::wstring& imagePath)
{
for (uint32_t i = 0; i < args.NumIterations(); i++)
{
#if defined(_AMD64_)
// PIX markers only work on AMD64
// If PIX tool was attached then capture already began for the first iteration before
// session creation. This is to begin PIX capture for each iteration after the first
// iteration.
if (i > 0)
{
StartPIXCapture(output);
}
#endif
LearningModelBinding context(session);
lastHr = BindInputs(context, model, session, output, deviceType, args, inputBindingType, inputDataType,
winrtDevice, deviceCreationLocation, i, profiler, imagePath);
if (FAILED(lastHr))
{
break;
}
LearningModelEvaluationResult result = nullptr;
bool capture_perf = args.IsPerformanceCapture() || args.IsPerIterationCapture();
lastHr = EvaluateModel(result, model, context, session, args, output, capture_perf, i, profiler);
if (FAILED(lastHr))
{
output.PrintEvaluatingInfo(i + 1, deviceType, inputBindingType, inputDataType, deviceCreationLocation,
"[FAILED]");
break;
}
else if (!args.TerseOutput() || i == 0)
{
output.PrintEvaluatingInfo(i + 1, deviceType, inputBindingType, inputDataType, deviceCreationLocation,
"[SUCCESS]");
// Only print eval results on the first iteration, iff it's not garbage data
if (!args.IsGarbageInput() || args.IsSaveTensor())
{
BindingUtilities::PrintOrSaveEvaluationResults(model, args, result.Outputs(), output, i);
}
if (args.TerseOutput() && args.NumIterations() > 1)
{
printf("Binding and Evaluating %d more time%s...", args.NumIterations() - 1,
(args.NumIterations() == 2 ? "" : "s"));
}
}
#if defined(_AMD64_)
EndPIXCapture(output);
#endif
}
// print metrics after iterations
if (SUCCEEDED(lastHr) && args.IsPerformanceCapture())
{
output.PrintResults(profiler, args.NumIterations(), deviceType, inputBindingType, inputDataType,
deviceCreationLocation, args.IsPerformanceConsoleOutputVerbose());
if (args.IsOutputPerf())
{
std::string deviceTypeStringified = TypeHelper::Stringify(deviceType);
std::string inputDataTypeStringified = TypeHelper::Stringify(inputDataType);
std::string inputBindingTypeStringified = TypeHelper::Stringify(inputBindingType);
std::string deviceCreationLocationStringified = TypeHelper::Stringify(deviceCreationLocation);
output.WritePerformanceDataToCSV(profiler, args.NumIterations(), modelPath, deviceTypeStringified,
inputDataTypeStringified, inputBindingTypeStringified,
deviceCreationLocationStringified, args.GetPerformanceFileMetadata());
}
}
if (SUCCEEDED(lastHr) && args.IsPerIterationCapture())
{
output.WritePerIterationPerformance(args, model.Name().c_str(), imagePath);
}
}
int run(CommandLineArgs& args, Profiler<WINML_MODEL_TEST_PERF>& profiler) try
{
// Initialize COM in a multi-threaded environment.
@ -728,74 +805,21 @@ int run(CommandLineArgs& args, Profiler<WINML_MODEL_TEST_PERF>& profiler) try
// Resets all values from profiler for bind and evaluate.
profiler.Reset(WINML_MODEL_TEST_PERF::BIND_VALUE, WINML_MODEL_TEST_PERF::COUNT);
}
for (uint32_t i = 0; i < args.NumIterations(); i++)
if (args.IsImageInput())
{
#if defined(_AMD64_)
// PIX markers only work on AMD64
// If PIX tool was attached then capture already began for the first iteration before
// session creation. This is to begin PIX capture for each iteration after the first
// iteration.
if (i > 0)
for (const std::wstring& inputImagePath : args.ImagePaths())
{
StartPIXCapture(output);
}
#endif
LearningModelBinding context(session);
lastHr = BindInputs(context, model, session, output, deviceType, args, inputBindingType,
inputDataType, winrtDevice, deviceCreationLocation, i, profiler);
if (FAILED(lastHr))
{
break;
}
LearningModelEvaluationResult result = nullptr;
bool capture_perf = args.IsPerformanceCapture() || args.IsPerIterationCapture();
lastHr = EvaluateModel(result, model, context, session, args, output, capture_perf, i,
profiler);
if (FAILED(lastHr))
{
output.PrintEvaluatingInfo(i + 1, deviceType, inputBindingType, inputDataType,
deviceCreationLocation, "[FAILED]");
break;
}
else if (!args.TerseOutput() || i == 0)
{
output.PrintEvaluatingInfo(i + 1, deviceType, inputBindingType, inputDataType,
deviceCreationLocation, "[SUCCESS]");
if (args.TerseOutput() && args.NumIterations() > 1)
{
printf("Binding and Evaluating %d more time%s...", args.NumIterations() - 1,
(args.NumIterations() == 2 ? "" : "s"));
}
}
#if defined(_AMD64_)
EndPIXCapture(output);
#endif
}
// print metrics after iterations
if (SUCCEEDED(lastHr) && args.IsPerformanceCapture())
{
output.PrintResults(profiler, args.NumIterations(), deviceType, inputBindingType,
inputDataType, deviceCreationLocation,
args.IsPerformanceConsoleOutputVerbose());
if (args.IsOutputPerf())
{
std::string deviceTypeStringified = TypeHelper::Stringify(deviceType);
std::string inputDataTypeStringified = TypeHelper::Stringify(inputDataType);
std::string inputBindingTypeStringified = TypeHelper::Stringify(inputBindingType);
std::string deviceCreationLocationStringified =
TypeHelper::Stringify(deviceCreationLocation);
output.WritePerformanceDataToCSV(profiler, args.NumIterations(), path,
deviceTypeStringified, inputDataTypeStringified,
inputBindingTypeStringified,
deviceCreationLocationStringified,
args.GetPerformanceFileMetadata());
RunConfiguration(args, output, session, lastHr, model, deviceType, inputBindingType,
inputDataType, winrtDevice, deviceCreationLocation, profiler, path,
inputImagePath);
}
}
if (SUCCEEDED(lastHr) && args.IsPerIterationCapture())
else
{
output.WritePerIterationPerformance(args, model.Name().c_str());
const std::wstring& inputImagePath = L"";
RunConfiguration(args, output, session, lastHr, model, deviceType, inputBindingType,
inputDataType, winrtDevice, deviceCreationLocation, profiler, path,
L"");
}
}
}