Initial commit of holographic remoting samples (#1)

This commit is contained in:
Benjamin Thaut 2019-07-26 14:02:46 +02:00 коммит произвёл GitHub
Родитель 84a96f104b
Коммит f3a6ce1645
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: 4AEE18F83AFDEB23
163 изменённых файлов: 20021 добавлений и 1 удалений

43
.clang-format Normal file
Просмотреть файл

@ -0,0 +1,43 @@
BasedOnStyle: LLVM
UseTab: Never
IndentWidth: 4
BreakBeforeBraces: Custom
BraceWrapping:
AfterClass: true
AfterControlStatement: true
AfterEnum: true
AfterFunction: true
AfterNamespace: true
AfterStruct: true
AfterUnion: true
AfterExternBlock: true
BeforeCatch: true
BeforeElse: true
IndentBraces: false
SplitEmptyFunction: true
SplitEmptyRecord: true
SplitEmptyNamespace: true
AfterCaseLabel: true
AllowShortIfStatementsOnASingleLine: false
ColumnLimit: 140
TabWidth: 4
AccessModifierOffset: -2
PointerAlignment: Left
IndentCaseLabels: true
AllowShortFunctionsOnASingleLine: Inline
MaxEmptyLinesToKeep: 3
IndentPPDirectives: AfterHash
BreakConstructorInitializers: BeforeComma
AlwaysBreakTemplateDeclarations: true
AlignAfterOpenBracket: AlwaysBreak
AllowAllParametersOfDeclarationOnNextLine: true
BinPackArguments: false
BinPackParameters: false
FixNamespaceComments: true
IndentWrappedFunctionNames: true
CompactNamespaces: true
NamespaceIndentation: All
AllowShortBlocksOnASingleLine: false
AllowShortFunctionsOnASingleLine: false
AllowShortCaseLabelsOnASingleLine: false
AccessModifierOffset: -4

18
.editorconfig Normal file
Просмотреть файл

@ -0,0 +1,18 @@
# EditorConfig is awesome: http://EditorConfig.org
# To make use of this, simply install a plugin for Visual Studio 20XX, Visual Studio Code, Notepad++ or XCode
# Will work out of the box with Visual Studio 15+
# top-most EditorConfig file
root = true
# Default settings for all files
[*]
charset = utf-8
indent_style = space
trim_trailing_whitespace = true
# additional settings for code files
[*.{cpp,h,inl,c,cs}]
indent_size = 4
end_of_line = crlf
insert_final_newline = true

Просмотреть файл

@ -1,5 +1,27 @@
# Contributing
# Holographic Remoting Samples
This repository hosts the two sample applications for Holographic Remoting. (https://docs.microsoft.com/en-us/windows/mixed-reality/holographic-remoting-player)
* The hostsampleapp sample shows how to write an application streaming content to a Microsoft HoloLens 2.
* The player sample shows how to write an application running on your Microsoft HoloLens 2 and recieving streamed content. The player sample is very similar to the Holographic Remoting Player available in the store.
## Prequisites
* Visual Studio 2017
* With C++ Build Packages
* With C++ UWP Build Packages
* With Spectre Mitigation Libraries Packages (for release builds only)
* With ARM and ARM64 C++ Packages
* Windows SDK 10.0.18362.0 (for Windows 10, version 1903)
## Getting Started
1. Open one of the ```.sln``` files either under ```player/``` or ```hostsampleapp/```.
2. On first use ensure to restore any missing nuget packages.
3. Then build and run.
* When running the hostsampleapp pass the ip address of your HoloLens device as first argument to the application.
## Contributing
This project welcomes contributions and suggestions. Most contributions require you to agree to a
Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us

Двоичные данные
hostsampleapp/desktop/Assets/LockScreenLogo.scale-200.png Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 1.4 KiB

Двоичные данные
hostsampleapp/desktop/Assets/SplashScreen.scale-200.png Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 7.5 KiB

Двоичные данные
hostsampleapp/desktop/Assets/Square150x150Logo.scale-200.png Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 2.9 KiB

Двоичные данные
hostsampleapp/desktop/Assets/Square44x44Logo.scale-200.png Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 1.6 KiB

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 1.2 KiB

Двоичные данные
hostsampleapp/desktop/Assets/StoreLogo.png Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 1.4 KiB

Двоичные данные
hostsampleapp/desktop/Assets/Wide310x150Logo.scale-200.png Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 3.1 KiB

Просмотреть файл

@ -0,0 +1,251 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#include "pch.h"
#include "CameraResources.h"
#include "DeviceResources.h"
#include "DirectXHelper.h"
#include <windows.graphics.directx.direct3d11.interop.h>
using namespace DirectX;
using namespace winrt::Windows::Graphics::Holographic;
using namespace winrt::Windows::Perception::Spatial;
DXHelper::CameraResources::CameraResources(const HolographicCamera& camera)
: m_holographicCamera(camera)
, m_isStereo(camera.IsStereo())
, m_d3dRenderTargetSize(camera.RenderTargetSize())
{
m_d3dViewport = CD3D11_VIEWPORT(0.f, 0.f, m_d3dRenderTargetSize.Width, m_d3dRenderTargetSize.Height);
};
// Updates resources associated with a holographic camera's swap chain.
// The app does not access the swap chain directly, but it does create
// resource views for the back buffer.
void DXHelper::CameraResources::CreateResourcesForBackBuffer(
DXHelper::DeviceResources* pDeviceResources, const HolographicCameraRenderingParameters& cameraParameters)
{
const auto device = pDeviceResources->GetD3DDevice();
// Get a DXGI interface for the holographic camera's back buffer.
// Holographic cameras do not provide the DXGI swap chain, which is owned
// by the system. The Direct3D back buffer resource is provided using WinRT
// interop APIs.
winrt::com_ptr<ID3D11Resource> resource;
{
winrt::com_ptr<::IInspectable> inspectable = cameraParameters.Direct3D11BackBuffer().as<::IInspectable>();
winrt::com_ptr<Windows::Graphics::DirectX::Direct3D11::IDirect3DDxgiInterfaceAccess> dxgiInterfaceAccess;
HRESULT hr = inspectable->QueryInterface(__uuidof(dxgiInterfaceAccess), dxgiInterfaceAccess.put_void());
if (FAILED(hr))
{
winrt::throw_hresult(hr);
}
hr = dxgiInterfaceAccess->GetInterface(__uuidof(resource), resource.put_void());
if (FAILED(hr))
{
winrt::throw_hresult(hr);
}
}
// Get a Direct3D interface for the holographic camera's back buffer.
winrt::com_ptr<ID3D11Texture2D> cameraBackBuffer;
resource.as(cameraBackBuffer);
// Determine if the back buffer has changed. If so, ensure that the render target view
// is for the current back buffer.
if (m_d3dBackBuffer != cameraBackBuffer)
{
// This can change every frame as the system moves to the next buffer in the
// swap chain. This mode of operation will occur when certain rendering modes
// are activated.
m_d3dBackBuffer = cameraBackBuffer;
// Get the DXGI format for the back buffer.
// This information can be accessed by the app using CameraResources::GetBackBufferDXGIFormat().
D3D11_TEXTURE2D_DESC backBufferDesc;
m_d3dBackBuffer->GetDesc(&backBufferDesc);
m_dxgiFormat = backBufferDesc.Format;
D3D11_RENDER_TARGET_VIEW_DESC viewDesc = {};
viewDesc.ViewDimension = backBufferDesc.ArraySize > 1 ? D3D11_RTV_DIMENSION_TEXTURE2DARRAY : D3D11_RTV_DIMENSION_TEXTURE2D;
viewDesc.Format = DXGI_FORMAT_B8G8R8A8_UNORM_SRGB;
if (backBufferDesc.ArraySize > 1)
{
viewDesc.Texture2DArray.ArraySize = backBufferDesc.ArraySize;
}
// Create a render target view of the back buffer.
// Creating this resource is inexpensive, and is better than keeping track of
// the back buffers in order to pre-allocate render target views for each one.
m_d3dRenderTargetView = nullptr;
winrt::check_hresult(device->CreateRenderTargetView(m_d3dBackBuffer.get(), &viewDesc, m_d3dRenderTargetView.put()));
// Check for render target size changes.
winrt::Windows::Foundation::Size currentSize = m_holographicCamera.RenderTargetSize();
if (m_d3dRenderTargetSize != currentSize)
{
// Set render target size.
m_d3dRenderTargetSize = currentSize;
// A new depth stencil view is also needed.
m_d3dDepthStencilView = nullptr;
}
}
// Refresh depth stencil resources, if needed.
if (m_d3dDepthStencilView == nullptr)
{
// Create a depth stencil view for use with 3D rendering if needed.
CD3D11_TEXTURE2D_DESC depthStencilDesc(
DXGI_FORMAT_D16_UNORM,
static_cast<UINT>(m_d3dRenderTargetSize.Width),
static_cast<UINT>(m_d3dRenderTargetSize.Height),
m_isStereo ? 2 : 1, // Create two textures when rendering in stereo.
1, // Use a single mipmap level.
D3D11_BIND_DEPTH_STENCIL);
winrt::com_ptr<ID3D11Texture2D> depthStencil;
winrt::check_hresult(device->CreateTexture2D(&depthStencilDesc, nullptr, depthStencil.put()));
CD3D11_DEPTH_STENCIL_VIEW_DESC depthStencilViewDesc(
m_isStereo ? D3D11_DSV_DIMENSION_TEXTURE2DARRAY : D3D11_DSV_DIMENSION_TEXTURE2D);
winrt::check_hresult(device->CreateDepthStencilView(depthStencil.get(), &depthStencilViewDesc, m_d3dDepthStencilView.put()));
}
// Create the constant buffer, if needed.
if (m_viewProjectionConstantBuffer == nullptr)
{
// Create a constant buffer to store view and projection matrices for the camera.
CD3D11_BUFFER_DESC constantBufferDesc(sizeof(ViewProjectionConstantBuffer), D3D11_BIND_CONSTANT_BUFFER);
winrt::check_hresult(device->CreateBuffer(&constantBufferDesc, nullptr, m_viewProjectionConstantBuffer.put()));
}
}
// Releases resources associated with a back buffer.
void DXHelper::CameraResources::ReleaseResourcesForBackBuffer(DXHelper::DeviceResources* pDeviceResources)
{
// Release camera-specific resources.
m_d3dBackBuffer = nullptr;
m_d3dRenderTargetView = nullptr;
m_d3dDepthStencilView = nullptr;
m_viewProjectionConstantBuffer = nullptr;
// Ensure system references to the back buffer are released by clearing the render
// target from the graphics pipeline state, and then flushing the Direct3D context.
pDeviceResources->UseD3DDeviceContext([](auto context) {
ID3D11RenderTargetView* nullViews[D3D11_SIMULTANEOUS_RENDER_TARGET_COUNT] = {nullptr};
context->OMSetRenderTargets(ARRAYSIZE(nullViews), nullViews, nullptr);
context->Flush();
});
}
// Updates the view/projection constant buffer for a holographic camera.
void DXHelper::CameraResources::UpdateViewProjectionBuffer(
std::shared_ptr<DXHelper::DeviceResources> deviceResources,
const HolographicCameraPose& cameraPose,
const SpatialCoordinateSystem& coordinateSystem)
{
// The system changes the viewport on a per-frame basis for system optimizations.
m_d3dViewport =
CD3D11_VIEWPORT(cameraPose.Viewport().X, cameraPose.Viewport().Y, cameraPose.Viewport().Width, cameraPose.Viewport().Height);
// The projection transform for each frame is provided by the HolographicCameraPose.
auto cameraProjectionTransform = cameraPose.ProjectionTransform();
// Get a container object with the view and projection matrices for the given
// pose in the given coordinate system.
auto viewTransformContainer = cameraPose.TryGetViewTransform(coordinateSystem);
// If TryGetViewTransform returns a null pointer, that means the pose and coordinate
// system cannot be understood relative to one another; content cannot be rendered
// in this coordinate system for the duration of the current frame.
// This usually means that positional tracking is not active for the current frame, in
// which case it is possible to use a SpatialLocatorAttachedFrameOfReference to render
// content that is not world-locked instead.
DXHelper::ViewProjectionConstantBuffer viewProjectionConstantBufferData = {};
bool viewTransformAcquired = viewTransformContainer != nullptr;
if (viewTransformAcquired)
{
// Otherwise, the set of view transforms can be retrieved.
auto viewCoordinateSystemTransform = viewTransformContainer.Value();
// Update the view matrices. Holographic cameras (such as Microsoft HoloLens) are
// constantly moving relative to the world. The view matrices need to be updated
// every frame.
XMStoreFloat4x4(
&viewProjectionConstantBufferData.viewProjection[0],
XMMatrixTranspose(XMLoadFloat4x4(&viewCoordinateSystemTransform.Left) * XMLoadFloat4x4(&cameraProjectionTransform.Left)));
XMStoreFloat4x4(
&viewProjectionConstantBufferData.viewProjection[1],
XMMatrixTranspose(XMLoadFloat4x4(&viewCoordinateSystemTransform.Right) * XMLoadFloat4x4(&cameraProjectionTransform.Right)));
}
// Use the D3D device context to update Direct3D device-based resources.
deviceResources->UseD3DDeviceContext([&](auto context) {
// Loading is asynchronous. Resources must be created before they can be updated.
if (context == nullptr || m_viewProjectionConstantBuffer == nullptr || !viewTransformAcquired)
{
m_framePending = false;
}
else
{
// Update the view and projection matrices.
context->UpdateSubresource(m_viewProjectionConstantBuffer.get(), 0, nullptr, &viewProjectionConstantBufferData, 0, 0);
m_framePending = true;
}
});
}
// Gets the view-projection constant buffer for the HolographicCamera and attaches it
// to the shader pipeline.
bool DXHelper::CameraResources::AttachViewProjectionBuffer(std::shared_ptr<DXHelper::DeviceResources> deviceResources)
{
// This method uses Direct3D device-based resources.
return deviceResources->UseD3DDeviceContext([&](auto context) {
// Loading is asynchronous. Resources must be created before they can be updated.
// Cameras can also be added asynchronously, in which case they must be initialized
// before they can be used.
if (context == nullptr || m_viewProjectionConstantBuffer == nullptr || m_framePending == false)
{
return false;
}
// Set the viewport for this camera.
context->RSSetViewports(1, &m_d3dViewport);
// Send the constant buffer to the vertex shader.
ID3D11Buffer* pBuffer = m_viewProjectionConstantBuffer.get();
context->VSSetConstantBuffers(1, 1, &pBuffer);
// The template includes a pass-through geometry shader that is used by
// default on systems that don't support the D3D11_FEATURE_D3D11_OPTIONS3::
// VPAndRTArrayIndexFromAnyShaderFeedingRasterizer extension. The shader
// will be enabled at run-time on systems that require it.
// If your app will also use the geometry shader for other tasks and those
// tasks require the view/projection matrix, uncomment the following line
// of code to send the constant buffer to the geometry shader as well.
/*context->GSSetConstantBuffers(
1,
1,
m_viewProjectionConstantBuffer.GetAddressOf()
);*/
m_framePending = false;
return true;
});
}

Просмотреть файл

@ -0,0 +1,115 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#pragma once
#include <winrt/Windows.Graphics.Holographic.h>
#include <d3d11.h>
using namespace winrt::Windows::Graphics::Holographic;
using namespace winrt::Windows::Perception::Spatial;
namespace DXHelper
{
class DeviceResources;
// Constant buffer used to send the view-projection matrices to the shader pipeline.
struct ViewProjectionConstantBuffer
{
DirectX::XMFLOAT4X4 viewProjection[2];
};
// Assert that the constant buffer remains 16-byte aligned (best practice).
static_assert(
(sizeof(ViewProjectionConstantBuffer) % (sizeof(float) * 4)) == 0,
"ViewProjection constant buffer size must be 16-byte aligned (16 bytes is the length of four floats).");
// Manages DirectX device resources that are specific to a holographic camera, such as the
// back buffer, ViewProjection constant buffer, and viewport.
class CameraResources
{
public:
CameraResources(const HolographicCamera& holographicCamera);
void CreateResourcesForBackBuffer(
DXHelper::DeviceResources* pDeviceResources, const HolographicCameraRenderingParameters& cameraParameters);
void ReleaseResourcesForBackBuffer(DXHelper::DeviceResources* pDeviceResources);
void UpdateViewProjectionBuffer(
std::shared_ptr<DXHelper::DeviceResources> deviceResources,
const HolographicCameraPose& cameraPose,
const SpatialCoordinateSystem& coordinateSystem);
bool AttachViewProjectionBuffer(std::shared_ptr<DXHelper::DeviceResources> deviceResources);
// Direct3D device resources.
ID3D11RenderTargetView* GetBackBufferRenderTargetView() const
{
return m_d3dRenderTargetView.get();
}
ID3D11DepthStencilView* GetDepthStencilView() const
{
return m_d3dDepthStencilView.get();
}
ID3D11Texture2D* GetBackBufferTexture2D() const
{
return m_d3dBackBuffer.get();
}
D3D11_VIEWPORT GetViewport() const
{
return m_d3dViewport;
}
DXGI_FORMAT GetBackBufferDXGIFormat() const
{
return m_dxgiFormat;
}
// Render target properties.
winrt::Windows::Foundation::Size GetRenderTargetSize() const
{
return m_d3dRenderTargetSize;
}
bool IsRenderingStereoscopic() const
{
return m_isStereo;
}
// The holographic camera these resources are for.
const HolographicCamera& GetHolographicCamera() const
{
return m_holographicCamera;
}
private:
// Direct3D rendering objects. Required for 3D.
winrt::com_ptr<ID3D11RenderTargetView> m_d3dRenderTargetView;
winrt::com_ptr<ID3D11DepthStencilView> m_d3dDepthStencilView;
winrt::com_ptr<ID3D11Texture2D> m_d3dBackBuffer;
// Device resource to store view and projection matrices.
winrt::com_ptr<ID3D11Buffer> m_viewProjectionConstantBuffer;
// Direct3D rendering properties.
DXGI_FORMAT m_dxgiFormat;
winrt::Windows::Foundation::Size m_d3dRenderTargetSize;
D3D11_VIEWPORT m_d3dViewport;
// Indicates whether the camera supports stereoscopic rendering.
bool m_isStereo = false;
// Indicates whether this camera has a pending frame.
bool m_framePending = false;
// Pointer to the holographic camera these resources are for.
HolographicCamera m_holographicCamera = nullptr;
};
} // namespace DXHelper

Просмотреть файл

@ -0,0 +1,37 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#pragma once
#include <strsafe.h>
static void DebugLog(_In_z_ LPCWSTR format, ...)
{
wchar_t buffer[1024];
LPWSTR bufEnd = nullptr;
va_list args;
va_start(args, format);
HRESULT hr =
StringCchVPrintfExW(buffer, _countof(buffer), &bufEnd, nullptr, STRSAFE_FILL_BEHIND_NULL | STRSAFE_FILL_ON_FAILURE, format, args);
if (SUCCEEDED(hr))
{
if (*bufEnd != L'\n')
{
StringCchCatW(buffer, _countof(buffer), L"\r\n");
}
OutputDebugStringW(buffer);
}
va_end(args);
}

Просмотреть файл

@ -0,0 +1,304 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#include "pch.h"
#include "DeviceResources.h"
#include "DirectXHelper.h"
#include <winrt/Windows.Graphics.Display.h>
using namespace D2D1;
using namespace Microsoft::WRL;
using namespace winrt::Windows::Graphics::DirectX::Direct3D11;
using namespace winrt::Windows::Graphics::Display;
using namespace winrt::Windows::Graphics::Holographic;
// Constructor for DeviceResources.
DXHelper::DeviceResources::DeviceResources()
{
CreateDeviceIndependentResources();
}
DXHelper::DeviceResources::~DeviceResources()
{
if (m_d3dContext)
{
m_d3dContext->Flush();
m_d3dContext->ClearState();
}
}
// Configures resources that don't depend on the Direct3D device.
void DXHelper::DeviceResources::CreateDeviceIndependentResources()
{
// Initialize Direct2D resources.
D2D1_FACTORY_OPTIONS options{};
#if defined(_DEBUG)
// If the project is in a debug build, enable Direct2D debugging via SDK Layers.
options.debugLevel = D2D1_DEBUG_LEVEL_INFORMATION;
#endif
// Initialize the Direct2D Factory.
winrt::check_hresult(D2D1CreateFactory(D2D1_FACTORY_TYPE_SINGLE_THREADED, __uuidof(ID2D1Factory2), &options, m_d2dFactory.put_void()));
// Initialize the DirectWrite Factory.
winrt::check_hresult(
DWriteCreateFactory(DWRITE_FACTORY_TYPE_SHARED, __uuidof(IDWriteFactory2), (::IUnknown**)m_dwriteFactory.put_void()));
// Initialize the Windows Imaging Component (WIC) Factory.
winrt::check_hresult(
CoCreateInstance(CLSID_WICImagingFactory2, nullptr, CLSCTX_INPROC_SERVER, __uuidof(m_wicFactory), m_wicFactory.put_void()));
}
void DXHelper::DeviceResources::SetHolographicSpace(HolographicSpace holographicSpace)
{
// Cache the holographic space. Used to re-initalize during device-lost scenarios.
m_holographicSpace = holographicSpace;
InitializeUsingHolographicSpace();
}
void DXHelper::DeviceResources::InitializeUsingHolographicSpace()
{
// The holographic space might need to determine which adapter supports
// holograms, in which case it will specify a non-zero PrimaryAdapterId.
LUID id = {m_holographicSpace.PrimaryAdapterId().LowPart, m_holographicSpace.PrimaryAdapterId().HighPart};
// When a primary adapter ID is given to the app, the app should find
// the corresponding DXGI adapter and use it to create Direct3D devices
// and device contexts. Otherwise, there is no restriction on the DXGI
// adapter the app can use.
if ((id.HighPart != 0) || (id.LowPart != 0))
{
UINT createFlags = 0;
#ifdef DEBUG
if (DXHelper::SdkLayersAvailable())
{
createFlags |= DXGI_CREATE_FACTORY_DEBUG;
}
#endif
// Create the DXGI factory.
winrt::com_ptr<IDXGIFactory1> dxgiFactory;
winrt::check_hresult(CreateDXGIFactory2(createFlags, __uuidof(dxgiFactory), dxgiFactory.put_void()));
winrt::com_ptr<IDXGIFactory4> dxgiFactory4;
dxgiFactory.as(dxgiFactory4);
// Retrieve the adapter specified by the holographic space.
winrt::check_hresult(dxgiFactory4->EnumAdapterByLuid(id, __uuidof(m_dxgiAdapter), m_dxgiAdapter.put_void()));
}
else
{
m_dxgiAdapter = nullptr;
}
CreateDeviceResources();
m_holographicSpace.SetDirect3D11Device(m_d3dInteropDevice);
}
// Configures the Direct3D device, and stores handles to it and the device context.
void DXHelper::DeviceResources::CreateDeviceResources()
{
// This flag adds support for surfaces with a different color channel ordering
// than the API default. It is required for compatibility with Direct2D.
UINT creationFlags = D3D11_CREATE_DEVICE_BGRA_SUPPORT;
#if defined(_DEBUG)
if (DXHelper::SdkLayersAvailable())
{
// If the project is in a debug build, enable debugging via SDK Layers with this flag.
creationFlags |= D3D11_CREATE_DEVICE_DEBUG;
}
#endif
// This array defines the set of DirectX hardware feature levels this app will support.
// Note the ordering should be preserved.
// Note that HoloLens supports feature level 11.1. The HoloLens emulator is also capable
// of running on graphics cards starting with feature level 10.0.
D3D_FEATURE_LEVEL featureLevels[] = {D3D_FEATURE_LEVEL_12_1,
D3D_FEATURE_LEVEL_12_0,
D3D_FEATURE_LEVEL_11_1,
D3D_FEATURE_LEVEL_11_0,
D3D_FEATURE_LEVEL_10_1,
D3D_FEATURE_LEVEL_10_0};
// Create the Direct3D 11 API device object and a corresponding context.
winrt::com_ptr<ID3D11Device> device;
winrt::com_ptr<ID3D11DeviceContext> context;
const D3D_DRIVER_TYPE driverType = m_dxgiAdapter == nullptr ? D3D_DRIVER_TYPE_HARDWARE : D3D_DRIVER_TYPE_UNKNOWN;
const HRESULT hr = D3D11CreateDevice(
m_dxgiAdapter.get(), // Either nullptr, or the primary adapter determined by Windows Holographic.
driverType, // Create a device using the hardware graphics driver.
0, // Should be 0 unless the driver is D3D_DRIVER_TYPE_SOFTWARE.
creationFlags, // Set debug and Direct2D compatibility flags.
featureLevels, // List of feature levels this app can support.
ARRAYSIZE(featureLevels), // Size of the list above.
D3D11_SDK_VERSION, // Always set this to D3D11_SDK_VERSION for Windows Runtime apps.
device.put(), // Returns the Direct3D device created.
&m_d3dFeatureLevel, // Returns feature level of device created.
context.put() // Returns the device immediate context.
);
if (FAILED(hr))
{
// If the initialization fails, fall back to the WARP device.
// For more information on WARP, see:
// http://go.microsoft.com/fwlink/?LinkId=286690
winrt::check_hresult(D3D11CreateDevice(
nullptr, // Use the default DXGI adapter for WARP.
D3D_DRIVER_TYPE_WARP, // Create a WARP device instead of a hardware device.
0,
creationFlags,
featureLevels,
ARRAYSIZE(featureLevels),
D3D11_SDK_VERSION,
device.put(),
&m_d3dFeatureLevel,
context.put()));
}
// Store pointers to the Direct3D device and immediate context.
device.as(m_d3dDevice);
context.as(m_d3dContext);
// Acquire the DXGI interface for the Direct3D device.
winrt::com_ptr<IDXGIDevice3> dxgiDevice;
m_d3dDevice.as(dxgiDevice);
// Wrap the native device using a WinRT interop object.
winrt::com_ptr<::IInspectable> object;
winrt::check_hresult(CreateDirect3D11DeviceFromDXGIDevice(dxgiDevice.get(), reinterpret_cast<IInspectable**>(winrt::put_abi(object))));
m_d3dInteropDevice = object.as<IDirect3DDevice>();
// Cache the DXGI adapter.
// This is for the case of no preferred DXGI adapter, or fallback to WARP.
winrt::com_ptr<IDXGIAdapter> dxgiAdapter;
dxgiDevice->GetAdapter(dxgiAdapter.put());
dxgiAdapter.as(m_dxgiAdapter);
// Check for device support for the optional feature that allows setting the render target array index from the vertex shader stage.
D3D11_FEATURE_DATA_D3D11_OPTIONS3 options;
m_d3dDevice->CheckFeatureSupport(D3D11_FEATURE_D3D11_OPTIONS3, &options, sizeof(options));
if (options.VPAndRTArrayIndexFromAnyShaderFeedingRasterizer)
{
m_supportsVprt = true;
}
}
// Validates the back buffer for each HolographicCamera and recreates
// resources for back buffers that have changed.
// Locks the set of holographic camera resources until the function exits.
void DXHelper::DeviceResources::EnsureCameraResources(HolographicFrame frame, HolographicFramePrediction prediction)
{
UseHolographicCameraResources([this, frame, prediction](std::map<UINT32, std::unique_ptr<CameraResources>>& cameraResourceMap) {
for (HolographicCameraPose const& cameraPose : prediction.CameraPoses())
{
try
{
HolographicCameraRenderingParameters renderingParameters = frame.GetRenderingParameters(cameraPose);
CameraResources* pCameraResources = cameraResourceMap[cameraPose.HolographicCamera().Id()].get();
pCameraResources->CreateResourcesForBackBuffer(this, renderingParameters);
}
catch (const winrt::hresult_error&)
{
}
}
});
}
// Prepares to allocate resources and adds resource views for a camera.
// Locks the set of holographic camera resources until the function exits.
void DXHelper::DeviceResources::AddHolographicCamera(HolographicCamera camera)
{
UseHolographicCameraResources([this, camera](std::map<UINT32, std::unique_ptr<CameraResources>>& cameraResourceMap) {
cameraResourceMap[camera.Id()] = std::make_unique<CameraResources>(camera);
});
}
// Deallocates resources for a camera and removes the camera from the set.
// Locks the set of holographic camera resources until the function exits.
void DXHelper::DeviceResources::RemoveHolographicCamera(HolographicCamera camera)
{
UseHolographicCameraResources([this, camera](std::map<UINT32, std::unique_ptr<CameraResources>>& cameraResourceMap) {
CameraResources* pCameraResources = cameraResourceMap[camera.Id()].get();
if (pCameraResources != nullptr)
{
pCameraResources->ReleaseResourcesForBackBuffer(this);
cameraResourceMap.erase(camera.Id());
}
});
}
// Recreate all device resources and set them back to the current state.
// Locks the set of holographic camera resources until the function exits.
void DXHelper::DeviceResources::HandleDeviceLost()
{
if (m_deviceNotify != nullptr)
{
m_deviceNotify->OnDeviceLost();
}
UseHolographicCameraResources([this](std::map<UINT32, std::unique_ptr<CameraResources>>& cameraResourceMap) {
for (auto& pair : cameraResourceMap)
{
CameraResources* pCameraResources = pair.second.get();
pCameraResources->ReleaseResourcesForBackBuffer(this);
}
});
InitializeUsingHolographicSpace();
if (m_deviceNotify != nullptr)
{
m_deviceNotify->OnDeviceRestored();
}
}
// Register our DeviceNotify to be informed on device lost and creation.
void DXHelper::DeviceResources::RegisterDeviceNotify(DXHelper::IDeviceNotify* deviceNotify)
{
m_deviceNotify = deviceNotify;
}
// Call this method when the app suspends. It provides a hint to the driver that the app
// is entering an idle state and that temporary buffers can be reclaimed for use by other apps.
void DXHelper::DeviceResources::Trim()
{
m_d3dContext->ClearState();
winrt::com_ptr<IDXGIDevice3> dxgiDevice;
m_d3dDevice.as(dxgiDevice);
dxgiDevice->Trim();
}
// Present the contents of the swap chain to the screen.
// Locks the set of holographic camera resources until the function exits.
void DXHelper::DeviceResources::Present(HolographicFrame frame)
{
// By default, this API waits for the frame to finish before it returns.
// For Holographic Remoting we do not wait but instead wait in SampleHostMain::Update to ensure that CreateNextFrame is called exactly
// with a delta of 16.6ms (in case of 60Hz).
HolographicFramePresentResult presentResult =
frame.PresentUsingCurrentPrediction(HolographicFramePresentWaitBehavior::DoNotWaitForFrameToFinish);
// The PresentUsingCurrentPrediction API will detect when the graphics device
// changes or becomes invalid. When this happens, it is considered a Direct3D
// device lost scenario.
if (presentResult == HolographicFramePresentResult::DeviceRemoved)
{
// The Direct3D device, context, and resources should be recreated.
HandleDeviceLost();
}
}

Просмотреть файл

@ -0,0 +1,156 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#pragma once
#include "CameraResources.h"
#include <d2d1_2.h>
#include <d3d11_4.h>
#include <dwrite_2.h>
#include <wincodec.h>
#include <wrl/client.h>
namespace DXHelper
{
// Provides an interface for an application that owns DeviceResources to be notified of the device being lost or created.
interface IDeviceNotify
{
virtual void OnDeviceLost() = 0;
virtual void OnDeviceRestored() = 0;
};
// Creates and manages a Direct3D device and immediate context, Direct2D device and context (for debug), and the holographic swap chain.
class DeviceResources
{
public:
DeviceResources();
~DeviceResources();
// Public methods related to Direct3D devices.
void HandleDeviceLost();
void RegisterDeviceNotify(IDeviceNotify* deviceNotify);
void Trim();
void Present(winrt::Windows::Graphics::Holographic::HolographicFrame frame);
// Public methods related to holographic devices.
void SetHolographicSpace(winrt::Windows::Graphics::Holographic::HolographicSpace space);
void EnsureCameraResources(
winrt::Windows::Graphics::Holographic::HolographicFrame frame,
winrt::Windows::Graphics::Holographic::HolographicFramePrediction prediction);
void AddHolographicCamera(winrt::Windows::Graphics::Holographic::HolographicCamera camera);
void RemoveHolographicCamera(winrt::Windows::Graphics::Holographic::HolographicCamera camera);
// Holographic accessors.
template <typename LCallback>
void UseHolographicCameraResources(LCallback const& callback);
winrt::Windows::Graphics::DirectX::Direct3D11::IDirect3DDevice GetD3DInteropDevice() const
{
return m_d3dInteropDevice;
}
// D3D accessors.
ID3D11Device4* GetD3DDevice() const
{
return m_d3dDevice.get();
}
template <typename F>
auto UseD3DDeviceContext(F func) const
{
std::scoped_lock lock(m_d3dContextMutex);
return func(m_d3dContext.get());
}
D3D_FEATURE_LEVEL GetDeviceFeatureLevel() const
{
return m_d3dFeatureLevel;
}
bool GetDeviceSupportsVprt() const
{
return m_supportsVprt;
}
// DXGI acessors.
IDXGIAdapter3* GetDXGIAdapter() const
{
return m_dxgiAdapter.get();
}
// D2D accessors.
ID2D1Factory2* GetD2DFactory() const
{
return m_d2dFactory.get();
}
IDWriteFactory2* GetDWriteFactory() const
{
return m_dwriteFactory.get();
}
IWICImagingFactory2* GetWicImagingFactory() const
{
return m_wicFactory.get();
}
protected:
void CreateDeviceResources();
private:
// Private methods related to the Direct3D device, and resources based on that device.
void CreateDeviceIndependentResources();
void InitializeUsingHolographicSpace();
protected:
// Direct3D objects.
winrt::com_ptr<ID3D11Device4> m_d3dDevice;
mutable std::recursive_mutex m_d3dContextMutex;
winrt::com_ptr<ID3D11DeviceContext3> m_d3dContext;
winrt::com_ptr<IDXGIAdapter3> m_dxgiAdapter;
// Direct3D interop objects.
winrt::Windows::Graphics::DirectX::Direct3D11::IDirect3DDevice m_d3dInteropDevice;
// Direct2D factories.
winrt::com_ptr<ID2D1Factory2> m_d2dFactory;
winrt::com_ptr<IDWriteFactory2> m_dwriteFactory;
winrt::com_ptr<IWICImagingFactory2> m_wicFactory;
// The IDeviceNotify can be held directly as it owns the DeviceResources.
IDeviceNotify* m_deviceNotify = nullptr;
// Whether or not the current Direct3D device supports the optional feature
// for setting the render target array index from the vertex shader stage.
bool m_supportsVprt = false;
private:
// The holographic space provides a preferred DXGI adapter ID.
winrt::Windows::Graphics::Holographic::HolographicSpace m_holographicSpace = nullptr;
// Properties of the Direct3D device currently in use.
D3D_FEATURE_LEVEL m_d3dFeatureLevel = D3D_FEATURE_LEVEL_10_0;
// Back buffer resources, etc. for attached holographic cameras.
std::map<UINT32, std::unique_ptr<CameraResources>> m_cameraResources;
std::mutex m_cameraResourcesLock;
};
} // namespace DXHelper
// Device-based resources for holographic cameras are stored in a std::map. Access this list by providing a
// callback to this function, and the std::map will be guarded from add and remove
// events until the callback returns. The callback is processed immediately and must
// not contain any nested calls to UseHolographicCameraResources.
// The callback takes a parameter of type std::map<UINT32, std::unique_ptr<DXHelper::CameraResources>>&
// through which the list of cameras will be accessed.
template <typename LCallback>
void DXHelper::DeviceResources::UseHolographicCameraResources(LCallback const& callback)
{
std::lock_guard<std::mutex> guard(m_cameraResourcesLock);
callback(m_cameraResources);
}

Просмотреть файл

@ -0,0 +1,87 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#pragma once
#include <wrl.h>
#include <future>
#include <winrt/Windows.Storage.Streams.h>
#include <winrt/Windows.Storage.h>
#include <winrt/Windows.Graphics.DirectX.Direct3D11.h>
#include <windows.graphics.directx.direct3d11.interop.h>
#include <d3d11.h>
#include <dxgi1_2.h>
#include <filesystem>
namespace DXHelper
{
// Function that reads from a binary file asynchronously.
inline std::future<std::vector<byte>> ReadDataAsync(const std::wstring_view& filename)
{
using namespace winrt::Windows::Storage;
using namespace winrt::Windows::Storage::Streams;
#if WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_DESKTOP)
wchar_t moduleFullyQualifiedFilename[MAX_PATH] = {};
uint32_t moduleFileNameLength = GetModuleFileNameW(NULL, moduleFullyQualifiedFilename, _countof(moduleFullyQualifiedFilename));
moduleFullyQualifiedFilename[moduleFileNameLength] = L'\0';
std::filesystem::path modulePath = moduleFullyQualifiedFilename;
// winrt::hstring moduleFilename = modulePath.filename().c_str();
modulePath.replace_filename(filename);
winrt::hstring absoluteFilename = modulePath.c_str();
IBuffer fileBuffer = co_await PathIO::ReadBufferAsync(absoluteFilename);
#else
IBuffer fileBuffer = co_await PathIO::ReadBufferAsync(filename);
#endif
std::vector<byte> returnBuffer;
returnBuffer.resize(fileBuffer.Length());
DataReader::FromBuffer(fileBuffer).ReadBytes(winrt::array_view<uint8_t>(returnBuffer));
return returnBuffer;
}
// Converts a length in device-independent pixels (DIPs) to a length in physical pixels.
inline float ConvertDipsToPixels(float dips, float dpi)
{
static const float dipsPerInch = 96.0f;
return floorf(dips * dpi / dipsPerInch + 0.5f); // Round to nearest integer.
}
#if defined(_DEBUG)
// Check for SDK Layer support.
inline bool SdkLayersAvailable()
{
HRESULT hr = D3D11CreateDevice(
nullptr,
D3D_DRIVER_TYPE_NULL, // There is no need to create a real hardware device.
0,
D3D11_CREATE_DEVICE_DEBUG, // Check for the SDK layers.
nullptr, // Any feature level will do.
0,
D3D11_SDK_VERSION, // Always set this to D3D11_SDK_VERSION for Windows Runtime apps.
nullptr, // No need to keep the D3D device reference.
nullptr, // No need to know the feature level.
nullptr // No need to keep the D3D device context reference.
);
return SUCCEEDED(hr);
}
#endif
} // namespace DXHelper

Просмотреть файл

@ -0,0 +1,18 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#include "../pch.h"
// -- important: preserve order of these two includes (initguid before PerceptionTypes)!
#include <initguid.h>
#include "PerceptionTypes.h"
// --

Просмотреть файл

@ -0,0 +1,33 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#pragma once
#include <guiddef.h>
DEFINE_GUID(SPATIALPROP_QRTrackerObjectId, 0xf2e86326, 0xfbd8, 0x4088, 0xb5, 0xfb, 0xac, 0x61, 0xc, 0x3, 0x99, 0x7f);
DEFINE_GUID(SPATIALPROP_QRTracker_TrackingStatus, 0x270f00dc, 0xc0d9, 0x442c, 0x87, 0x25, 0x27, 0xb6, 0xbb, 0xd9, 0x43, 0xd);
DEFINE_GUID(SPATIALPROP_QRTracker_QRCodesList, 0x338b32f8, 0x1ce0, 0x4e75, 0x8d, 0xf4, 0x56, 0xd4, 0x2f, 0x73, 0x96, 0x74);
DEFINE_GUID(SPATIALPROP_QRCode_PhysicalSize, 0xcfb07ae5, 0x456a, 0x4aaf, 0x9d, 0x6a, 0xa2, 0x9d, 0x3, 0x9b, 0xc0, 0x56);
DEFINE_GUID(SPATIALPROP_QRCode_LastSeenTime, 0xb2b08c2d, 0xb531, 0x4f18, 0x87, 0x84, 0x44, 0x84, 0x46, 0x3d, 0x88, 0x53);
DEFINE_GUID(SPATIALPROP_QRCode_StreamInfo, 0x609143ea, 0x4ec5, 0x4b0e, 0xba, 0xef, 0x52, 0xa5, 0x5c, 0xfc, 0x23, 0x58);
#pragma pack(push, 1)
typedef struct SPATIAL_GRAPH_QR_CODE_STREAM_INFO
{
UINT32 Version;
ULONG StreamSize;
_Field_size_(StreamSize) BYTE StreamData[ANYSIZE_ARRAY];
} SPATIAL_GRAPH_QR_CODE_STREAM_INFO, *PSPATIAL_GRAPH_QR_CODE_STREAM_INFO;
#pragma pack(pop)

Просмотреть файл

@ -0,0 +1,70 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#include "pch.h"
#include "SampleHostMain.h"
#include "Speech.h"
#include <windows.h>
#include <winrt/Windows.ApplicationModel.h>
#include <winrt/Windows.Storage.h>
#include <filesystem>
namespace
{
winrt::Windows::Foundation::IAsyncOperation<winrt::Windows::Storage::StorageFile> LoadGrammarFileAsync()
{
const wchar_t* speechGrammarFile = L"SpeechGrammar.xml";
#if WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_DESKTOP)
wchar_t executablePath[MAX_PATH];
if (GetModuleFileNameW(NULL, executablePath, ARRAYSIZE(executablePath)) == 0)
{
winrt::throw_last_error();
}
std::filesystem::path executableFolder(executablePath);
executableFolder.remove_filename();
auto rootFolder = co_await winrt::Windows::Storage::StorageFolder::GetFolderFromPathAsync(executableFolder.c_str());
auto file = co_await rootFolder.GetFileAsync(speechGrammarFile);
co_return file;
#else
auto rootFolder = winrt::Windows::ApplicationModel::Package::Current().InstalledLocation();
return rootFolder.GetFileAsync(speechGrammarFile);
#endif
}
} // namespace
winrt::fire_and_forget Speech::InitializeSpeechAsync(
winrt::Microsoft::Holographic::AppRemoting::IRemoteSpeech remoteSpeech,
winrt::Microsoft::Holographic::AppRemoting::IRemoteSpeech::OnRecognizedSpeech_revoker& onRecognizedSpeechRevoker,
std::weak_ptr<SampleHostMain> sampleHostMainWeak)
{
onRecognizedSpeechRevoker = remoteSpeech.OnRecognizedSpeech(
winrt::auto_revoke, [sampleHostMainWeak](const winrt::Microsoft::Holographic::AppRemoting::RecognizedSpeech& recognizedSpeech) {
if (auto sampleHostMain = sampleHostMainWeak.lock())
{
sampleHostMain->OnRecognizedSpeech(recognizedSpeech.RecognizedText);
}
});
auto grammarFile = co_await LoadGrammarFileAsync();
std::vector<winrt::hstring> dictionary;
dictionary.push_back(L"Red");
dictionary.push_back(L"Blue");
dictionary.push_back(L"Green");
dictionary.push_back(L"Default");
dictionary.push_back(L"Aquamarine");
remoteSpeech.ApplyParameters(L"en-US", grammarFile, dictionary);
}

Просмотреть файл

@ -0,0 +1,28 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#pragma once
#include <memory>
#include <winrt/Microsoft.Holographic.AppRemoting.h>
#include <winrt/Windows.Foundation.h>
class SampleHostMain;
namespace Speech
{
winrt::fire_and_forget InitializeSpeechAsync(
winrt::Microsoft::Holographic::AppRemoting::IRemoteSpeech remoteSpeech,
winrt::Microsoft::Holographic::AppRemoting::IRemoteSpeech::OnRecognizedSpeech_revoker& onRecognizedSpeechRevoker,
std::weak_ptr<SampleHostMain> sampleHostMainWeak);
}

Просмотреть файл

@ -0,0 +1,31 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#pragma once
// comparison function to allow for GUID as a hash map key
struct GUIDComparer
{
inline static int compare(const GUID& Left, const GUID& Right)
{
return memcmp(&Left, &Right, sizeof(GUID));
}
inline static bool equals(const GUID& Left, const GUID& Right)
{
return memcmp(&Left, &Right, sizeof(GUID)) == 0;
}
bool operator()(const GUID& Left, const GUID& Right) const
{
return compare(Left, Right) < 0;
}
};

Просмотреть файл

@ -0,0 +1,218 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#include "../pch.h"
#include "../Common/PerceptionTypes.h"
#include "../Common/Utils.h"
#include "PerceptionDeviceHandler.h"
#include "QRCodeTracker.h"
PerceptionRootObject::~PerceptionRootObject()
{
}
const GUID& PerceptionRootObject::GetPropertyId() const
{
return m_typeId;
}
const GUID& PerceptionRootObject::GetObjectId() const
{
return m_objectId;
}
PerceptionRootObject::PerceptionRootObject(IPerceptionDevice* device, const GUID& typeId, const GUID& objectId)
: m_typeId(typeId)
, m_objectId(objectId)
{
m_device.copy_from(device);
}
PerceptionDeviceHandler::PerceptionDeviceHandler()
{
}
PerceptionDeviceHandler::~PerceptionDeviceHandler()
{
Stop();
}
void PerceptionDeviceHandler::Start()
{
std::lock_guard stateLock(m_stateProtect);
if (m_running)
{
return;
}
HRESULT hr = PerceptionDeviceCreateFactory(IID_PPV_ARGS(m_perceptionDeviceFactory.put()));
if (FAILED(hr))
{
Stop();
return;
}
m_rootObjectChangeHandler = winrt::make_self<RootObjectChangeHandler>(*this);
std::array<const GUID, 2> rootObjectIds{QRCodeTracker::GetStaticPropertyId()};
for (size_t i = 0; i < rootObjectIds.size(); ++i)
{
winrt::com_ptr<IPerceptionDeviceRootObjectWatcher> watcher;
hr = m_perceptionDeviceFactory->CreateRootObjectWatcher(1, &rootObjectIds[i], PerceptionDeviceOptions::None, watcher.put());
if (FAILED(hr))
{
Stop();
return;
}
hr = watcher->SetAddedHandler(m_rootObjectChangeHandler.get());
if (FAILED(hr))
{
Stop();
return;
}
hr = watcher->SetRemovedHandler(m_rootObjectChangeHandler.get());
if (FAILED(hr))
{
Stop();
return;
}
m_rootObjectWatchers.emplace_back(std::move(watcher));
}
m_running = true;
for (auto& watcher : m_rootObjectWatchers)
{
hr = watcher->Start();
if (FAILED(hr))
{
Stop();
return;
}
}
}
void PerceptionDeviceHandler::Stop()
{
std::lock_guard stateLock(m_stateProtect);
m_running = false;
for (auto& watcher : m_rootObjectWatchers)
{
watcher->Stop();
}
m_rootObjectWatchers.clear();
m_rootObjectChangeHandler = nullptr;
m_perceptionDeviceFactory = nullptr;
}
HRESULT PerceptionDeviceHandler::HandleRootObjectAdded(IPerceptionDeviceRootObjectAddedEventArgs* args)
{
std::lock_guard stateLock(m_stateProtect);
if (!m_running)
{
return S_OK;
}
RootObjectKey key{args->GetPropertyId(), args->GetObjectId()};
if (m_rootObjects.find(key) != m_rootObjects.end())
{
return S_FALSE; // Already have that root object; don't add it twice
}
if (GUIDComparer::equals(key.propertyId, QRCodeTracker::GetStaticPropertyId()))
{
winrt::com_ptr<IPerceptionDevice> device;
args->GetDevice(device.put());
m_rootObjects.emplace(key, std::make_shared<QRCodeTracker>(device.get(), key.propertyId, key.objectId));
}
return S_OK;
}
HRESULT PerceptionDeviceHandler::HandleRootObjectRemoved(IPerceptionDeviceRootObjectRemovedEventArgs* args)
{
std::lock_guard stateLock(m_stateProtect);
if (!m_running)
{
return S_OK;
}
RootObjectKey key{args->GetPropertyId(), args->GetObjectId()};
auto it = m_rootObjects.find(key);
if (it != m_rootObjects.end())
{
m_rootObjects.erase(key);
}
return S_OK;
}
PerceptionDeviceHandler::RootObjectChangeHandler::RootObjectChangeHandler(PerceptionDeviceHandler& owner)
: m_weakOwner(owner.weak_from_this())
{
}
STDMETHODIMP PerceptionDeviceHandler::RootObjectChangeHandler::Invoke(
_In_ IPerceptionDeviceRootObjectWatcher* sender, _In_ IPerceptionDeviceRootObjectAddedEventArgs* args)
{
auto owner{m_weakOwner.lock()};
if (owner)
{
return owner->HandleRootObjectAdded(args);
}
return S_OK;
}
STDMETHODIMP PerceptionDeviceHandler::RootObjectChangeHandler::Invoke(
_In_ IPerceptionDeviceRootObjectWatcher* sender, _In_ IPerceptionDeviceRootObjectRemovedEventArgs* args)
{
auto owner{m_weakOwner.lock()};
if (owner)
{
return owner->HandleRootObjectRemoved(args);
}
return S_OK;
}
bool PerceptionDeviceHandler::RootObjectKey::operator<(const RootObjectKey& other) const
{
const auto typeIdRes = GUIDComparer::compare(propertyId, other.propertyId);
if (typeIdRes < 0)
{
return true;
}
else if (typeIdRes > 0)
{
return false;
}
else
{
return GUIDComparer::compare(objectId, other.objectId) < 0;
}
}

Просмотреть файл

@ -0,0 +1,121 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#pragma once
#include <vector>
#include <winrt/Windows.UI.Input.Spatial.h>
#include <PerceptionDevice.h>
// Base class for perception root objects managed by the PerceptionDeviceHandler
class PerceptionRootObject
{
public:
virtual ~PerceptionRootObject();
const GUID& GetPropertyId() const;
const GUID& GetObjectId() const;
protected:
PerceptionRootObject(IPerceptionDevice* device, const GUID& typeId, const GUID& objectId);
protected:
winrt::com_ptr<IPerceptionDevice> m_device;
GUID m_typeId;
GUID m_objectId;
};
// Sample perception device handler. Listens to the availability of perception devices (more accurately:
// perception root objects of known types), and retrieves data from these root objects.
class PerceptionDeviceHandler : public std::enable_shared_from_this<PerceptionDeviceHandler>
{
public:
PerceptionDeviceHandler();
~PerceptionDeviceHandler();
// Starts monitoring for perception root object changes
void Start();
// Stops monitoring perception root object changes
void Stop();
// Iterates over all perception root objects currently known
template <typename Func>
void ForEachRootObject(Func& func)
{
std::lock_guard stateLock(m_stateProtect);
for (auto& rootObjectEntry : m_rootObjects)
{
func(*rootObjectEntry.second.get());
}
}
// Iterates over all root objects of a certain type
template <typename RootObjectType, typename Func>
void ForEachRootObjectOfType(Func& func)
{
std::lock_guard stateLock(m_stateProtect);
for (auto& rootObjectEntry : m_rootObjects)
{
PerceptionRootObject& rootObject = *rootObjectEntry.second.get();
if (GUIDComparer::equals(rootObject.GetPropertyId(), RootObjectType::GetStaticPropertyId()))
{
func(static_cast<RootObjectType&>(rootObject));
}
}
}
private:
struct RootObjectChangeHandler
: winrt::implements<RootObjectChangeHandler, IPerceptionDeviceRootObjectAddedHandler, IPerceptionDeviceRootObjectRemovedHandler>
{
RootObjectChangeHandler(PerceptionDeviceHandler& owner);
IFACEMETHOD(Invoke)
(_In_ IPerceptionDeviceRootObjectWatcher* sender, _In_ IPerceptionDeviceRootObjectAddedEventArgs* args) override;
IFACEMETHOD(Invoke)
(_In_ IPerceptionDeviceRootObjectWatcher* sender, _In_ IPerceptionDeviceRootObjectRemovedEventArgs* args) override;
private:
std::weak_ptr<PerceptionDeviceHandler> m_weakOwner;
};
friend RootObjectChangeHandler;
struct RootObjectKey
{
GUID propertyId;
GUID objectId;
bool operator<(const RootObjectKey& other) const;
};
using RootObjectMap = std::map<RootObjectKey, std::shared_ptr<PerceptionRootObject>>;
private:
HRESULT HandleRootObjectAdded(IPerceptionDeviceRootObjectAddedEventArgs* args);
HRESULT HandleRootObjectRemoved(IPerceptionDeviceRootObjectRemovedEventArgs* args);
private:
std::recursive_mutex m_stateProtect;
bool m_running{false};
winrt::com_ptr<IPerceptionDeviceFactory> m_perceptionDeviceFactory;
std::vector<winrt::com_ptr<IPerceptionDeviceRootObjectWatcher>> m_rootObjectWatchers;
winrt::com_ptr<RootObjectChangeHandler> m_rootObjectChangeHandler;
RootObjectMap m_rootObjects;
};

Просмотреть файл

@ -0,0 +1,107 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#include "pch.h"
#include "../Common/DirectXHelper.h"
#include "../Common/PerceptionTypes.h"
#include "PerceptionDeviceHandler.h"
#include "QRCodeRenderer.h"
#include "QRCodeTracker.h"
namespace
{
using namespace DirectX;
void AppendColoredTriangle(
winrt::Windows::Foundation::Numerics::float3 p0,
winrt::Windows::Foundation::Numerics::float3 p1,
winrt::Windows::Foundation::Numerics::float3 p2,
winrt::Windows::Foundation::Numerics::float3 color,
std::vector<VertexPositionNormalColor>& vertices)
{
VertexPositionNormalColor vertex;
vertex.color = XMFLOAT3(&color.x);
vertex.normal = XMFLOAT3(0.0f, 0.0f, 0.0f);
vertex.pos = XMFLOAT3(&p0.x);
vertices.push_back(vertex);
vertex.pos = XMFLOAT3(&p1.x);
vertices.push_back(vertex);
vertex.pos = XMFLOAT3(&p2.x);
vertices.push_back(vertex);
}
} // namespace
QRCodeRenderer::QRCodeRenderer(const std::shared_ptr<DXHelper::DeviceResources>& deviceResources)
: RenderableObject(deviceResources)
{
}
void QRCodeRenderer::Update(
PerceptionDeviceHandler& perceptionDeviceHandler,
winrt::Windows::Perception::Spatial::SpatialCoordinateSystem renderingCoordinateSystem)
{
auto processQRCode = [this, renderingCoordinateSystem](QRCode& code) {
auto codeCS = code.GetCoordinateSystem();
float size = code.GetPhysicalSize();
auto codeToRendering = codeCS.TryGetTransformTo(renderingCoordinateSystem);
if (!codeToRendering)
{
return;
}
auto codeToRenderingV = codeToRendering.Value();
winrt::Windows::Foundation::Numerics::float3 positions[4] = {
{0.0f, 0.0f, 0.0f}, {0.0f, size, 0.0f}, {size, size, 0.0f}, {size, 0.0f, 0.0f}};
for (int i = 0; i < 4; ++i)
{
positions[i] = winrt::Windows::Foundation::Numerics::transform(positions[i], codeToRenderingV);
}
winrt::Windows::Foundation::Numerics::float3 col{1.0f, 1.0f, 0.0f};
AppendColoredTriangle(positions[0], positions[2], positions[1], col, m_vertices);
AppendColoredTriangle(positions[0], positions[3], positions[2], col, m_vertices);
};
auto processQRCodeTracker = [this, processQRCode](QRCodeTracker& tracker) { tracker.ForEachQRCode(processQRCode); };
m_vertices.clear();
perceptionDeviceHandler.ForEachRootObjectOfType<QRCodeTracker>(processQRCodeTracker);
auto modelTransform = winrt::Windows::Foundation::Numerics::float4x4::identity();
UpdateModelConstantBuffer(modelTransform);
}
void QRCodeRenderer::Draw(unsigned int numInstances)
{
if (m_vertices.empty())
{
return;
}
const UINT stride = sizeof(m_vertices[0]);
const UINT offset = 0;
D3D11_SUBRESOURCE_DATA vertexBufferData = {0};
vertexBufferData.pSysMem = m_vertices.data();
const CD3D11_BUFFER_DESC vertexBufferDesc(static_cast<UINT>(m_vertices.size() * stride), D3D11_BIND_VERTEX_BUFFER);
winrt::com_ptr<ID3D11Buffer> vertexBuffer;
winrt::check_hresult(m_deviceResources->GetD3DDevice()->CreateBuffer(&vertexBufferDesc, &vertexBufferData, vertexBuffer.put()));
m_deviceResources->UseD3DDeviceContext([&](auto context) {
context->IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY_TRIANGLELIST);
ID3D11Buffer* pBuffer = vertexBuffer.get();
context->IASetVertexBuffers(0, 1, &pBuffer, &stride, &offset);
context->DrawInstanced(static_cast<UINT>(m_vertices.size()), numInstances, offset, 0);
});
}

Просмотреть файл

@ -0,0 +1,35 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#pragma once
#include "RenderableObject.h"
#include <vector>
#include <winrt/Windows.UI.Input.Spatial.h>
class PerceptionDeviceHandler;
class QRCodeRenderer : public RenderableObject
{
public:
QRCodeRenderer(const std::shared_ptr<DXHelper::DeviceResources>& deviceResources);
void Update(
PerceptionDeviceHandler& perceptionDeviceHandler,
winrt::Windows::Perception::Spatial::SpatialCoordinateSystem renderingCoordinateSystem);
private:
void Draw(unsigned int numInstances) override;
private:
std::vector<VertexPositionNormalColor> m_vertices;
};

Просмотреть файл

@ -0,0 +1,337 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#include "..\pch.h"
#include <winrt/Windows.Perception.Spatial.Preview.h>
#include "QRCodeTracker.h"
#include <set>
QRCode::QRCode(
const GUID& id,
PSPATIAL_GRAPH_QR_CODE_STREAM_INFO streamInfo,
const winrt::Windows::Perception::Spatial::SpatialCoordinateSystem& coordinateSystem)
: m_id(id)
, m_streamInfo(streamInfo)
, m_coordinateSystem(coordinateSystem)
{
}
QRCode::~QRCode()
{
if (m_streamInfo)
{
CoTaskMemFree(m_streamInfo);
}
}
const GUID& QRCode::GetId() const
{
return m_id;
}
float QRCode::GetPhysicalSize() const
{
return m_physicalSizeInMeters;
}
winrt::Windows::Perception::Spatial::SpatialCoordinateSystem QRCode::GetCoordinateSystem() const
{
return m_coordinateSystem;
}
QRCodeTracker::QRCodeTracker(IPerceptionDevice* device, const GUID& typeId, const GUID& objectId)
: PerceptionRootObject(device, typeId, objectId)
{
Start();
}
QRCodeTracker::~QRCodeTracker()
{
Stop();
}
const GUID& QRCodeTracker::GetStaticPropertyId()
{
return SPATIALPROP_QRTrackerObjectId;
}
void QRCodeTracker::Start()
{
std::lock_guard stateLock(m_stateProtect);
if (m_running)
{
return;
}
HRESULT hr = m_device->CreateObjectSubscription(m_objectId, UINT(1), m_qrTrackerSubscription.put());
if (FAILED(hr))
{
Stop();
return;
}
hr = m_device->CreatePropertyListener(GetObjectId(), SPATIALPROP_QRTracker_QRCodesList, m_qrListChangeListener.put());
if (FAILED(hr))
{
Stop();
return;
}
m_propertyChangeHandler = winrt::make_self<PropertyChangeHandler>(*this);
hr = m_qrListChangeListener->SetPropertyChangedHandler(m_propertyChangeHandler.get());
if (FAILED(hr))
{
Stop();
return;
}
hr = m_qrListChangeListener->Start();
if (FAILED(hr))
{
Stop();
return;
}
m_running = true;
}
void QRCodeTracker::Stop()
{
std::lock_guard stateLock(m_stateProtect);
m_running = false;
if (m_qrListChangeListener)
{
m_qrListChangeListener->Stop();
}
for (auto& qrByPointer : m_qrCodesByPointer)
{
QRCode* qrCode = qrByPointer.second.get();
if (qrCode->m_propertyChangedListener)
{
qrCode->m_propertyChangedListener->Stop();
qrCode->m_propertyChangedListener = nullptr;
}
}
if (m_propertyChangeHandler)
{
m_propertyChangeHandler->Dispose();
m_propertyChangeHandler = nullptr;
}
}
HRESULT QRCodeTracker::HandlePropertyChange(IPerceptionDevicePropertyListener* sender, IPerceptionDevicePropertyChangedEventArgs* args)
{
// Change event for QR code list?
if (sender == m_qrListChangeListener.get())
{
const GUID* guids = static_cast<const GUID*>(args->GetValue());
UINT numGuids = args->GetValueSize() / sizeof(GUID);
return HandleQRCodeListChange(guids, numGuids);
}
// Change event for single QR code?
{
std::lock_guard<std::recursive_mutex> stateLock(m_stateProtect);
auto byListenerPos = m_qrCodesByListener.find(sender);
if (byListenerPos != m_qrCodesByListener.end())
{
QRCode* qrCode = byListenerPos->second;
return UpdateQRCode(*qrCode);
}
}
return S_OK;
}
HRESULT QRCodeTracker::HandleQRCodeListChange(const GUID* guids, UINT numGuids)
{
std::lock_guard<std::recursive_mutex> stateLock(m_stateProtect);
if (!m_running)
{
return S_FALSE;
}
// Duplicate the list of known QR code IDs. We'll remove all entries from it that we see in
// the incoming list, and thus will end up with a list of the IDs of all removed QR codes.
std::set<GUID, GUIDComparer> codesNotInList;
for (auto& kv : m_qrCodesByGUID)
{
codesNotInList.insert(kv.first);
}
// Check each QR code on the incoming list, and update the local cache
// with new codes.
for (size_t qrIndex = 0; qrIndex < numGuids; ++qrIndex)
{
const GUID& qrCodeId = guids[qrIndex];
auto it = m_qrCodesByGUID.find(qrCodeId);
if (it != m_qrCodesByGUID.end())
{
// Code is already known.
codesNotInList.erase(qrCodeId);
continue;
}
// Code is new. Read initial state, and add to collections.
winrt::Windows::Perception::Spatial::SpatialCoordinateSystem coordinateSystem{nullptr};
try
{
coordinateSystem =
winrt::Windows::Perception::Spatial::Preview::SpatialGraphInteropPreview::CreateCoordinateSystemForNode(qrCodeId);
}
catch (winrt::hresult_error const& ex)
{
return ex.to_abi();
}
if (coordinateSystem == nullptr)
{
return E_FAIL;
}
void* streamData{nullptr};
UINT streamDataSize{0};
HRESULT hr = m_device->ReadVariableSizeProperty(qrCodeId, SPATIALPROP_QRCode_StreamInfo, &streamDataSize, &streamData, nullptr);
if (FAILED(hr))
{
return hr;
}
if (streamDataSize == 0)
{
CoTaskMemFree(streamData);
return E_FAIL;
}
auto newCode =
std::make_unique<QRCode>(qrCodeId, reinterpret_cast<PSPATIAL_GRAPH_QR_CODE_STREAM_INFO>(streamData), coordinateSystem);
QRCode* qrCode = newCode.get();
m_qrCodesByPointer.emplace(qrCode, std::move(newCode));
m_qrCodesByGUID.emplace(qrCodeId, qrCode);
hr = UpdateQRCode(*qrCode);
if (FAILED(hr))
{
return hr;
}
hr = m_device->CreatePropertyListener(qrCodeId, SPATIALPROP_QRCode_LastSeenTime, qrCode->m_propertyChangedListener.put());
if (FAILED(hr))
{
return hr;
}
if (!m_propertyChangeHandler)
{
return E_UNEXPECTED;
}
hr = qrCode->m_propertyChangedListener->SetPropertyChangedHandler(
m_propertyChangeHandler.as<IPerceptionDevicePropertyChangedHandler>().get());
if (FAILED(hr))
{
return hr;
}
hr = qrCode->m_propertyChangedListener->Start();
if (FAILED(hr))
{
return hr;
}
m_qrCodesByListener.emplace(qrCode->m_propertyChangedListener.get(), qrCode);
}
// Remove all QR codes that have not been seen in this update
for (auto& qrCodeId : codesNotInList)
{
auto byCodeIdPos = m_qrCodesByGUID.find(qrCodeId);
if (byCodeIdPos == m_qrCodesByGUID.end())
{
// Not found (this should not ever happen)
continue;
}
QRCode* qrCode = byCodeIdPos->second;
m_qrCodesByGUID.erase(byCodeIdPos);
if (qrCode->m_propertyChangedListener)
{
qrCode->m_propertyChangedListener->Stop();
qrCode->m_propertyChangedListener = nullptr;
m_qrCodesByListener.erase(qrCode->m_propertyChangedListener.get());
}
m_qrCodesByPointer.erase(qrCode);
}
return S_OK;
}
HRESULT QRCodeTracker::UpdateQRCode(QRCode& qrCode)
{
float physicalSizeInMeters{0};
HRESULT hr =
m_device->ReadProperty(qrCode.m_id, SPATIALPROP_QRCode_PhysicalSize, sizeof(physicalSizeInMeters), &physicalSizeInMeters, nullptr);
if (FAILED(hr))
{
return hr;
}
qrCode.m_physicalSizeInMeters = physicalSizeInMeters;
LONGLONG lastSeenTime{0};
hr = m_device->ReadProperty(qrCode.m_id, SPATIALPROP_QRCode_LastSeenTime, sizeof(lastSeenTime), &lastSeenTime, nullptr);
if (FAILED(hr))
{
return hr;
}
qrCode.m_lastSeenTime = lastSeenTime;
return S_OK;
}
QRCodeTracker::PropertyChangeHandler::PropertyChangeHandler(QRCodeTracker& owner)
: m_owner(&owner)
{
}
void QRCodeTracker::PropertyChangeHandler::Dispose()
{
m_owner = nullptr;
}
STDMETHODIMP QRCodeTracker::PropertyChangeHandler::Invoke(
_In_ IPerceptionDevicePropertyListener* sender, _In_ IPerceptionDevicePropertyChangedEventArgs* eventArgs)
{
auto owner = m_owner;
if (owner)
{
return owner->HandlePropertyChange(sender, eventArgs);
}
return S_OK;
}

Просмотреть файл

@ -0,0 +1,108 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#pragma once
#include "../Common/PerceptionTypes.h"
#include "../Common/Utils.h"
#include "PerceptionDeviceHandler.h"
// Represents a single tracked QR code with position, size and last seen time.
class QRCode
{
public:
QRCode(
const GUID& id,
PSPATIAL_GRAPH_QR_CODE_STREAM_INFO streamInfo,
const winrt::Windows::Perception::Spatial::SpatialCoordinateSystem& coordinateSystem);
~QRCode();
const GUID& GetId() const;
float GetPhysicalSize() const;
winrt::Windows::Perception::Spatial::SpatialCoordinateSystem GetCoordinateSystem() const;
private:
friend class QRCodeTracker;
private:
GUID m_id;
PSPATIAL_GRAPH_QR_CODE_STREAM_INFO m_streamInfo;
__int64 m_lastSeenTime{0};
float m_physicalSizeInMeters{0};
winrt::Windows::Perception::Spatial::SpatialCoordinateSystem m_coordinateSystem{nullptr};
winrt::com_ptr<IPerceptionDevicePropertyListener> m_propertyChangedListener;
};
// Manages all active QR codes.
// Listens for events from the perception device to add, remove or update QR codes.
class QRCodeTracker : public PerceptionRootObject
{
public:
QRCodeTracker(IPerceptionDevice* device, const GUID& typeId, const GUID& objectId);
~QRCodeTracker();
// Helper function to iterate all QR codes in a thread safe manner.
template <typename Func>
void ForEachQRCode(Func& func)
{
std::lock_guard stateLock(m_stateProtect);
for (auto& qrCodeEntry : m_qrCodesByPointer)
{
func(*qrCodeEntry.second.get());
}
}
public:
static const GUID& GetStaticPropertyId();
private:
// Implementation of the IPerceptionDevicePropertyChangedHandler interface.
// Events from the perception device are propagated through an instance of this class.
struct PropertyChangeHandler : winrt::implements<PropertyChangeHandler, IPerceptionDevicePropertyChangedHandler>
{
PropertyChangeHandler(QRCodeTracker& owner);
void Dispose();
STDMETHOD(Invoke)
(_In_ IPerceptionDevicePropertyListener* sender, _In_ IPerceptionDevicePropertyChangedEventArgs* eventArgs) override;
private:
QRCodeTracker* m_owner;
};
friend PropertyChangeHandler;
using QRCodesByPointerMap = std::map<const QRCode*, std::unique_ptr<QRCode>>;
using QRCodesByGUIDMap = std::map<GUID, QRCode*, GUIDComparer>;
using QRCodesByListenerMap = std::map<IPerceptionDevicePropertyListener*, QRCode*>;
private:
void Start();
void Stop();
HRESULT HandlePropertyChange(IPerceptionDevicePropertyListener* sender, IPerceptionDevicePropertyChangedEventArgs* args);
HRESULT HandleQRCodeListChange(const GUID* guids, UINT numGuids);
HRESULT UpdateQRCode(QRCode& qrCode);
private:
std::recursive_mutex m_stateProtect;
bool m_running{false};
winrt::com_ptr<IPerceptionDeviceObjectSubscription> m_qrTrackerSubscription;
winrt::com_ptr<IPerceptionDevicePropertyListener> m_qrListChangeListener;
winrt::com_ptr<PropertyChangeHandler> m_propertyChangeHandler;
QRCodesByPointerMap m_qrCodesByPointer;
QRCodesByGUIDMap m_qrCodesByGUID;
QRCodesByListenerMap m_qrCodesByListener;
};

Просмотреть файл

@ -0,0 +1,148 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#include "pch.h"
#include "RenderableObject.h"
#include "../Common/DirectXHelper.h"
RenderableObject::RenderableObject(const std::shared_ptr<DXHelper::DeviceResources>& deviceResources)
: m_deviceResources(deviceResources)
{
CreateDeviceDependentResources();
}
void RenderableObject::UpdateModelConstantBuffer(const winrt::Windows::Foundation::Numerics::float4x4& modelTransform)
{
winrt::Windows::Foundation::Numerics::float4x4 normalTransform = modelTransform;
normalTransform.m41 = normalTransform.m42 = normalTransform.m43 = 0;
UpdateModelConstantBuffer(modelTransform, normalTransform);
}
void RenderableObject::UpdateModelConstantBuffer(
const winrt::Windows::Foundation::Numerics::float4x4& modelTransform,
const winrt::Windows::Foundation::Numerics::float4x4& normalTransform)
{
if (m_loadingComplete)
{
m_modelConstantBufferData.model = reinterpret_cast<DirectX::XMFLOAT4X4&>(transpose(modelTransform));
m_modelConstantBufferData.normal = reinterpret_cast<DirectX::XMFLOAT4X4&>(transpose(normalTransform));
// Update the model transform buffer for the hologram.
m_deviceResources->UseD3DDeviceContext(
[&](auto context) { context->UpdateSubresource(m_modelConstantBuffer.get(), 0, nullptr, &m_modelConstantBufferData, 0, 0); });
}
}
void RenderableObject::Render(bool isStereo)
{
if (!m_loadingComplete)
{
return;
}
// Use the D3D device context to update Direct3D device-based resources.
m_deviceResources->UseD3DDeviceContext([&](auto context) {
context->IASetInputLayout(m_inputLayout.get());
context->PSSetShader(m_pixelShader.get(), nullptr, 0);
// Attach the vertex shader.
context->VSSetShader(m_vertexShader.get(), nullptr, 0);
// Apply the model constant buffer to the vertex shader.
ID3D11Buffer* pBuffer = m_modelConstantBuffer.get();
context->VSSetConstantBuffers(0, 1, &pBuffer);
if (!m_usingVprtShaders)
{
// On devices that do not support the D3D11_FEATURE_D3D11_OPTIONS3::
// VPAndRTArrayIndexFromAnyShaderFeedingRasterizer optional feature,
// a pass-through geometry shader is used to set the render target
// array index.
context->GSSetShader(m_geometryShader.get(), nullptr, 0);
}
Draw(isStereo ? 2 : 1);
});
}
std::future<void> RenderableObject::CreateDeviceDependentResources()
{
#if WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_DESKTOP)
std::wstring fileNamePrefix = L"";
#else
std::wstring fileNamePrefix = L"ms-appx:///";
#endif
m_usingVprtShaders = m_deviceResources->GetDeviceSupportsVprt();
// On devices that do support the D3D11_FEATURE_D3D11_OPTIONS3::
// VPAndRTArrayIndexFromAnyShaderFeedingRasterizer optional feature
// we can avoid using a pass-through geometry shader to set the render
// target array index, thus avoiding any overhead that would be
// incurred by setting the geometry shader stage.
std::wstring vertexShaderFileName = m_usingVprtShaders ? L"hsa_VprtVertexShader.cso" : L"hsa_VertexShader.cso";
// Load shaders asynchronously.
std::vector<byte> vertexShaderFileData = co_await DXHelper::ReadDataAsync(fileNamePrefix + vertexShaderFileName);
winrt::check_hresult(m_deviceResources->GetD3DDevice()->CreateVertexShader(
vertexShaderFileData.data(), vertexShaderFileData.size(), nullptr, m_vertexShader.put()));
constexpr std::array<D3D11_INPUT_ELEMENT_DESC, 3> vertexDesc = {{
{"POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0},
{"NORMAL", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 12, D3D11_INPUT_PER_VERTEX_DATA, 0},
{"COLOR", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 24, D3D11_INPUT_PER_VERTEX_DATA, 0},
}};
winrt::check_hresult(m_deviceResources->GetD3DDevice()->CreateInputLayout(
vertexDesc.data(),
static_cast<UINT>(vertexDesc.size()),
vertexShaderFileData.data(),
static_cast<UINT>(vertexShaderFileData.size()),
m_inputLayout.put()));
std::vector<byte> pixelShaderFileData = co_await DXHelper::ReadDataAsync(fileNamePrefix + L"hsa_PixelShader.cso");
winrt::check_hresult(m_deviceResources->GetD3DDevice()->CreatePixelShader(
pixelShaderFileData.data(), pixelShaderFileData.size(), nullptr, m_pixelShader.put()));
const ModelConstantBuffer constantBuffer{
reinterpret_cast<DirectX::XMFLOAT4X4&>(winrt::Windows::Foundation::Numerics::float4x4::identity()),
reinterpret_cast<DirectX::XMFLOAT4X4&>(winrt::Windows::Foundation::Numerics::float4x4::identity()),
};
const CD3D11_BUFFER_DESC constantBufferDesc(sizeof(ModelConstantBuffer), D3D11_BIND_CONSTANT_BUFFER);
winrt::check_hresult(m_deviceResources->GetD3DDevice()->CreateBuffer(&constantBufferDesc, nullptr, m_modelConstantBuffer.put()));
if (!m_usingVprtShaders)
{
// Load the pass-through geometry shader.
std::vector<byte> geometryShaderFileData = co_await DXHelper::ReadDataAsync(fileNamePrefix + L"hsa_GeometryShader.cso");
// After the pass-through geometry shader file is loaded, create the shader.
winrt::check_hresult(m_deviceResources->GetD3DDevice()->CreateGeometryShader(
geometryShaderFileData.data(), geometryShaderFileData.size(), nullptr, m_geometryShader.put()));
}
m_loadingComplete = true;
}
void RenderableObject::ReleaseDeviceDependentResources()
{
m_loadingComplete = false;
m_usingVprtShaders = false;
m_vertexShader = nullptr;
m_inputLayout = nullptr;
m_pixelShader = nullptr;
m_geometryShader = nullptr;
m_modelConstantBuffer = nullptr;
}

Просмотреть файл

@ -0,0 +1,61 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#pragma once
#include "..\Common\DeviceResources.h"
#include "ShaderStructures.h"
#include <future>
class RenderableObject
{
public:
RenderableObject(const std::shared_ptr<DXHelper::DeviceResources>& deviceResources);
virtual ~RenderableObject()
{
}
virtual std::future<void> CreateDeviceDependentResources();
virtual void ReleaseDeviceDependentResources();
void Render(bool isStereo);
protected:
void UpdateModelConstantBuffer(const winrt::Windows::Foundation::Numerics::float4x4& modelTransform);
void UpdateModelConstantBuffer(
const winrt::Windows::Foundation::Numerics::float4x4& modelTransform,
const winrt::Windows::Foundation::Numerics::float4x4& normalTransform);
virtual void Draw(unsigned int numInstances) = 0;
// Cached pointer to device resources.
std::shared_ptr<DXHelper::DeviceResources> m_deviceResources;
private:
// Direct3D resources for geometry.
winrt::com_ptr<ID3D11InputLayout> m_inputLayout;
winrt::com_ptr<ID3D11VertexShader> m_vertexShader;
winrt::com_ptr<ID3D11GeometryShader> m_geometryShader;
winrt::com_ptr<ID3D11PixelShader> m_pixelShader;
winrt::com_ptr<ID3D11Buffer> m_modelConstantBuffer;
// System resources for geometry.
ModelConstantBuffer m_modelConstantBufferData;
uint32_t m_indexCount = 0;
// Variables used with the rendering loop.
bool m_loadingComplete = false;
// If the current D3D Device supports VPRT, we can avoid using a geometry
// shader just to set the render target array index.
bool m_usingVprtShaders = false;
};

Просмотреть файл

@ -0,0 +1,576 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#include "pch.h"
#include "../Common/DbgLog.h"
#include "../Common/DirectXHelper.h"
#include "SceneUnderstandingRenderer.h"
#include <winrt/Windows.Perception.Spatial.Preview.h>
using namespace RemotingHostSample;
namespace
{
using namespace DirectX;
void AppendColoredTriangle(
winrt::Windows::Foundation::Numerics::float3 p0,
winrt::Windows::Foundation::Numerics::float3 p1,
winrt::Windows::Foundation::Numerics::float3 p2,
winrt::Windows::Foundation::Numerics::float3 color,
std::vector<VertexPositionNormalColor>& vertices)
{
VertexPositionNormalColor vertex;
vertex.color = XMFLOAT3(&color.x);
vertex.normal = XMFLOAT3(0.0f, 0.0f, 0.0f);
vertex.pos = XMFLOAT3(&p0.x);
vertices.push_back(vertex);
vertex.pos = XMFLOAT3(&p1.x);
vertices.push_back(vertex);
vertex.pos = XMFLOAT3(&p2.x);
vertices.push_back(vertex);
}
} // namespace
// Struct to hold one entity label type entry
struct Label
{
const wchar_t* const Name;
uint32_t Index;
uint8_t RGB[3];
};
// Entity label types
static const Label LabelStorage[] = {
{L"Background", 0, {243, 121, 223}},
{L"Ignore", 255, {255, 255, 255}},
{L"Wall", 1, {243, 126, 121}},
{L"Floor", 2, {187, 243, 121}},
{L"Ceiling", 3, {121, 152, 243}},
{L"Table", 4, {121, 243, 227}},
{L"Chair", 5, {243, 191, 121}},
{L"Window", 6, {121, 243, 146}},
{L"Door", 7, {156, 121, 243}},
{L"Monitor", 8, {2, 159, 253}},
{L"Pillar", 10, {253, 106, 2}},
{L"Couch", 11, {72, 197, 126}},
{L"Whiteboard", 12, {137, 159, 2}},
{L"Beanbag", 13, {206, 112, 74}},
{L"Cabinet", 14, {36, 43, 138}},
{L"Nightstands", 15, {78, 231, 210}},
{L"TVStands", 16, {26, 71, 66}},
{L"Countertops", 17, {13, 60, 55}},
{L"Dressers", 18, {29, 58, 55}},
{L"Bench", 19, {105, 54, 136}},
{L"Ottoman", 20, {99, 9, 44}},
{L"Stool", 21, {255, 204, 153}},
{L"GTEquipment", 22, {206, 199, 74}},
{L"Telephone", 23, {243, 217, 121}},
{L"Bookshelf", 24, {37, 117, 164}},
{L"Laptop", 25, {96, 147, 234}},
{L"Stanchion", 26, {29, 117, 40}},
{L"Markers", 27, {111, 93, 167}},
{L"Controller", 28, {230, 254, 251}},
{L"Stairs", 9, {43, 174, 100}},
{L"Empty", 254, {0, 0, 0}},
{L"Appliances-CeilingLight", 30, {250, 24, 180}},
{L"Appliances-DishWasher", 32, {38, 204, 168}},
{L"Appliances-FloorLamp", 34, {106, 134, 187}},
{L"Appliances-Lighting", 36, {156, 162, 56}},
{L"Appliances-Microwave", 37, {6, 44, 91}},
{L"Appliances-NotSpecified", 38, {35, 188, 199}},
{L"Appliances-Oven", 39, {153, 60, 52}},
{L"Appliances-SmallAppliances", 40, {255, 83, 112}},
{L"Appliances-Stove", 41, {76, 175, 147}},
{L"Appliances-Toaster", 42, {145, 58, 23}},
{L"Appliances-WashingMachine", 44, {46, 66, 12}},
{L"Appliances-DeskLamp", 45, {128, 86, 177}},
{L"Appliances-Dryer", 46, {239, 162, 164}},
{L"Appliances-Fridge", 47, {87, 243, 139}},
{L"Appliances-WallLight", 50, {222, 49, 1}},
{L"Bed-BunkBed", 51, {97, 174, 71}},
{L"Bed-DoubleBed", 52, {85, 195, 111}},
{L"Bed-NotSpecified", 53, {212, 26, 75}},
{L"Bed-SingleBed", 54, {200, 219, 241}},
{L"Ceiling-Unassigned", 55, {48, 120, 115}},
{L"Ceiling-NotSpecified", 56, {205, 144, 139}},
{L"Chair-Beanbag", 57, {136, 175, 192}},
{L"Chair-Bench", 58, {89, 41, 203}},
{L"Chair-ArmChair", 59, {192, 1, 27}},
{L"Chair-ArmOfAChair", 60, {194, 241, 101}},
{L"Chair-BarStool", 61, {146, 21, 8}},
{L"Chair-ChaiseLounge", 62, {178, 31, 121}},
{L"Chair-DiningChair", 63, {76, 10, 219}},
{L"Chair-LoungeChair", 64, {174, 165, 77}},
{L"Chair-NotSpecified", 65, {186, 217, 58}},
{L"Chair-OfficeChair", 66, {177, 29, 181}},
{L"Chair-Unknown", 67, {155, 128, 196}},
{L"Chair-Ottoman", 68, {28, 75, 247}},
{L"Chair-Stool", 69, {60, 243, 241}},
{L"Door-DoubleDoors", 70, {220, 101, 83}},
{L"Door-NotSpecified", 71, {219, 20, 187}},
{L"Door-Revolving", 72, {211, 229, 158}},
{L"Door-SingleDoor", 73, {10, 100, 12}},
{L"Door-Sliding", 74, {73, 197, 108}},
{L"Electronics-Desktop", 75, {181, 22, 191}},
{L"Electronics-DVDPlayer", 76, {5, 131, 13}},
{L"Electronics-Headphones", 77, {169, 60, 180}},
{L"Electronics-Keyboard", 78, {6, 92, 79}},
{L"Electronics-Laptop", 79, {252, 108, 50}},
{L"Electronics-Mobile", 80, {35, 73, 64}},
{L"Electronics-Mouse", 81, {3, 112, 214}},
{L"Electronics-Mousepad", 82, {106, 70, 62}},
{L"Electronics-NotSpecified", 83, {63, 100, 209}},
{L"Electronics-Phone", 84, {64, 32, 142}},
{L"Electronics-Printer", 85, {70, 188, 0}},
{L"Electronics-Projector", 86, {72, 100, 38}},
{L"Electronics-Speakers", 87, {202, 60, 135}},
{L"Electronics-Tablet", 88, {126, 2, 49}},
{L"Electronics-TVMonitor", 89, {188, 184, 46}},
{L"Electronics-Xbox", 90, {6, 218, 26}},
{L"Electronics-Monitor", 91, {179, 160, 177}},
{L"Floor-Unassigned", 92, {9, 42, 145}},
{L"Human-Female", 93, {52, 156, 230}},
{L"Human-Male", 94, {231, 88, 138}},
{L"Human-Other", 95, {0, 0, 255}},
{L"NotSpecified-Ax", 96, {230, 228, 24}},
{L"NotSpecified-Backpack", 97, {228, 104, 245}},
{L"NotSpecified-Bag", 98, {215, 41, 202}},
{L"NotSpecified-Barbell", 99, {100, 125, 112}},
{L"NotSpecified-BlackBoard", 100, {65, 166, 116}},
{L"NotSpecified-Bottle", 101, {140, 68, 191}},
{L"NotSpecified-box", 102, {145, 146, 89}},
{L"NotSpecified-Cable", 103, {170, 1, 118}},
{L"NotSpecified-Can", 104, {205, 195, 201}},
{L"NotSpecified-Cart", 105, {156, 159, 0}},
{L"NotSpecified-case", 106, {208, 70, 137}},
{L"NotSpecified-CeilingFan", 107, {9, 227, 245}},
{L"NotSpecified-Clothes", 108, {181, 123, 192}},
{L"NotSpecified-Coat", 109, {189, 249, 62}},
{L"NotSpecified-Coatrack", 110, {136, 15, 19}},
{L"NotSpecified-CorkBoard", 111, {167, 98, 139}},
{L"NotSpecified-CounterTop", 112, {6, 14, 93}},
{L"NotSpecified-Drawers", 113, {216, 156, 242}},
{L"NotSpecified-Drinkcontainer", 114, {238, 153, 75}},
{L"NotSpecified-Dumbbell", 115, {183, 111, 41}},
{L"NotSpecified-ElectricalOutlet", 116, {191, 199, 36}},
{L"NotSpecified-ElectricalSwitch", 117, {31, 81, 127}},
{L"NotSpecified-Elliptical", 118, {244, 92, 59}},
{L"NotSpecified-Food", 119, {221, 210, 211}},
{L"NotSpecified-Footwear", 120, {163, 245, 159}},
{L"NotSpecified-Hammer", 121, {118, 176, 85}},
{L"NotSpecified-LaptopBag", 122, {225, 32, 60}},
{L"NotSpecified-LIDAR", 123, {26, 105, 172}},
{L"NotSpecified-Mannequin", 124, {131, 135, 194}},
{L"NotSpecified-Markers", 125, {124, 23, 155}},
{L"NotSpecified-Microscope", 126, {128, 143, 248}},
{L"NotSpecified-NDI", 127, {220, 39, 237}},
{L"NotSpecified-Pinwheel", 128, {155, 24, 46}},
{L"NotSpecified-PunchingBag", 129, {152, 215, 122}},
{L"NotSpecified-Shower", 130, {78, 243, 86}},
{L"NotSpecified-Sign", 131, {29, 159, 136}},
{L"NotSpecified-Sink", 132, {209, 19, 236}},
{L"NotSpecified-Sissors", 133, {31, 229, 162}},
{L"NotSpecified-Sphere", 134, {151, 86, 155}},
{L"NotSpecified-StairClimber", 135, {52, 236, 130}},
{L"NotSpecified-stanchion", 136, {6, 76, 221}},
{L"NotSpecified-Stand", 137, {2, 12, 172}},
{L"NotSpecified-StationaryBike", 138, {69, 190, 196}},
{L"NotSpecified-Tape", 139, {176, 3, 131}},
{L"NotSpecified-Thermostat", 140, {33, 22, 47}},
{L"NotSpecified-Toilet", 141, {107, 45, 152}},
{L"NotSpecified-TrashCan", 142, {128, 72, 143}},
{L"NotSpecified-Tripod", 143, {225, 31, 162}},
{L"NotSpecified-Tub", 144, {110, 147, 77}},
{L"NotSpecified-Vent", 145, {137, 170, 110}},
{L"NotSpecified-WeightBench", 146, {183, 79, 90}},
{L"NotSpecified-Wire", 147, {0, 255, 38}},
{L"NotSpecified-Wrench", 148, {116, 3, 22}},
{L"NotSpecified-Pillar", 149, {128, 184, 144}},
{L"NotSpecified-Whiteboard", 150, {94, 240, 206}},
{L"Plant-Fake", 151, {216, 230, 169}},
{L"Plant-NotSpecified", 152, {182, 43, 63}},
{L"Plant-Organic", 153, {197, 86, 148}},
{L"Props-Book", 154, {247, 3, 157}},
{L"Props-Cushion", 155, {13, 94, 49}},
{L"Props-FloorVase", 156, {55, 213, 231}},
{L"Props-FlowerPot", 157, {239, 172, 43}},
{L"Props-Magazine", 158, {138, 164, 178}},
{L"Props-Mirror", 159, {116, 236, 157}},
{L"Props-NewsPaper", 160, {62, 80, 43}},
{L"Props-NotSpecified", 161, {9, 106, 45}},
{L"Props-Paintings", 162, {164, 117, 118}},
{L"Props-PaperSheet", 163, {85, 190, 229}},
{L"Props-PhotoFrame", 164, {18, 95, 80}},
{L"Props-Rug", 165, {192, 82, 167}},
{L"Props-Sculpture", 166, {130, 15, 64}},
{L"Props-Toys", 167, {136, 130, 225}},
{L"Sofa-ChaiseLounge", 168, {241, 154, 12}},
{L"Sofa-NotSpecified", 169, {113, 197, 139}},
{L"Sofa-Sectional", 170, {24, 132, 64}},
{L"Sofa-Straight", 171, {248, 137, 194}},
{L"Storage-Bookshelf", 172, {4, 69, 174}},
{L"Storage-ChinaCabinet", 173, {216, 165, 83}},
{L"Storage-Dresser", 174, {156, 24, 110}},
{L"Storage-FileCabinet", 175, {78, 78, 12}},
{L"Storage-MediaCabinet", 176, {168, 234, 45}},
{L"Storage-NotSpecified", 177, {29, 232, 238}},
{L"Storage-Rack", 178, {161, 36, 92}},
{L"Storage-Shelf", 179, {57, 187, 87}},
{L"Storage-Cabinet", 180, {164, 23, 45}},
{L"Storage-Stairs", 181, {10, 13, 61}},
{L"Table-CoffeeTable", 182, {178, 214, 30}},
{L"Table-ConferenceTable", 183, {25, 153, 182}},
{L"Table-Desk", 184, {171, 128, 231}},
{L"Table-DiningTable", 185, {12, 169, 156}},
{L"Table-Nightstand", 186, {247, 131, 122}},
{L"Table-NotSpecified", 187, {227, 214, 90}},
{L"Table-OfficeDesk", 188, {122, 253, 7}},
{L"Table-OfficeTable", 189, {6, 20, 5}},
{L"Table-SideTable", 190, {230, 211, 253}},
{L"Unassigned-Unassigned", 191, {141, 204, 180}},
{L"Utensils-Bowl", 192, {108, 89, 46}},
{L"Utensils-Cups", 193, {90, 250, 131}},
{L"Utensils-Knife", 194, {28, 67, 176}},
{L"Utensils-Mug", 195, {152, 218, 150}},
{L"Utensils-NotSpecified", 196, {211, 96, 157}},
{L"Utensils-Pans", 197, {73, 159, 109}},
{L"Utensils-Pots", 198, {7, 193, 112}},
{L"Utensils-Tray", 199, {60, 152, 1}},
{L"Vehicle-Car", 200, {189, 149, 61}},
{L"Vehicle-MotorCycle", 201, {2, 164, 102}},
{L"Vehicle-Segway", 202, {198, 165, 85}},
{L"Vehicle-Truck", 203, {134, 46, 106}},
{L"Wall-Blinds", 204, {9, 13, 13}},
{L"Wall-Curtain", 205, {52, 74, 241}},
{L"Wall-Unassigned", 206, {83, 158, 59}},
{L"Wall-Window", 207, {117, 162, 84}},
{L"Storage-BathroomVanity", 208, {127, 151, 35}},
{L"NotSpecified-Unassigned", 209, {143, 133, 123}},
{L"Storage-Nightstand", 210, {181, 112, 177}},
{L"Storage-Unassigned", 211, {73, 125, 140}},
{L"Props-Unassigned", 212, {156, 127, 134}},
{L"Storage-ArmChair", 213, {102, 111, 19}},
{L"NotSpecified-LaundryBasket", 214, {106, 168, 192}},
{L"Props-Decorations", 215, {49, 242, 177}},
{L"NotSpecified-Fireplace", 216, {96, 128, 236}},
{L"NotSpecified-Drinkware", 217, {6, 247, 22}},
{L"Sofa-LoungeChair", 218, {167, 92, 66}},
{L"NotSpecified-NotSpecified", 219, {174, 127, 40}},
{L"Mouse", 220, {65, 33, 210}},
{L"Bag", 221, {168, 71, 185}},
{L"Fridge", 222, {255, 127, 94}},
{L"Stand", 223, {246, 160, 193}},
{L"Sign", 224, {143, 221, 54}},
{L"Sphere", 225, {255, 207, 172}},
{L"Tripod", 227, {255, 235, 46}},
{L"PinWheel", 228, {13, 92, 139}},
{L"Kart", 229, {49, 3, 27}},
{L"Box", 230, {134, 215, 144}},
{L"Light", 231, {140, 3, 56}},
{L"Keyboard ", 232, {7, 66, 58}},
{L"Scupture", 233, {240, 191, 82}},
{L"Lamp", 234, {189, 8, 78}},
{L"Microscope ", 235, {255, 211, 112}},
{L"Case ", 236, {59, 155, 70}},
{L"Ax", 237, {157, 117, 29}},
{L"Manikin_Parts ", 238, {67, 141, 186}},
{L"Clothing ", 239, {4, 122, 55}},
{L"CoatRack", 240, {211, 52, 114}},
{L"DrinkContainer ", 241, {35, 23, 0}},
{L"MousePad", 242, {68, 28, 0}},
{L"Tape", 243, {107, 173, 211}},
{L"Sissors ", 245, {53, 24, 143}},
{L"Headphones ", 246, {45, 212, 189}},
};
constexpr auto NumLabels = sizeof(LabelStorage) / sizeof(Label);
// Dictionary to quickly access labels by numeric label ID
using LabelDictionary = std::map<uint32_t, const Label*>;
static LabelDictionary Labels;
SceneUnderstandingRenderer::SceneUnderstandingRenderer(const std::shared_ptr<DX::DeviceResources>& deviceResources)
: RenderableObject(deviceResources)
{
// If the label dictionary is still empty, build it from static data
if (Labels.empty())
{
for (size_t i = 0; i < NumLabels; ++i)
{
Labels[LabelStorage[i].Index] = &LabelStorage[i];
}
}
}
void SceneUnderstandingRenderer::Update(
winrt::SceneUnderstanding::SceneProcessor& sceneProcessor,
winrt::Windows::Perception::Spatial::SpatialCoordinateSystem renderingCoordinateSystem,
winrt::Windows::Perception::Spatial::SpatialStationaryFrameOfReference lastUpdateLocation)
{
m_vertices.clear();
// Calculate the head position at the time of the last SU update in render space. This information can be
// used for debug rendering.
winrt::Windows::Foundation::IReference<winrt::Windows::Foundation::Numerics::float3> lastUpdatePosInRenderSpace;
if (lastUpdateLocation)
{
auto lastUpdateCS = lastUpdateLocation.CoordinateSystem();
auto lastUpdateLocationToRender = lastUpdateCS.TryGetTransformTo(renderingCoordinateSystem);
if (lastUpdateLocationToRender)
{
lastUpdatePosInRenderSpace = winrt::Windows::Foundation::Numerics::transform({0, 0, 0}, lastUpdateLocationToRender.Value());
}
}
// Lambda to execute for each quad returned by SU. Adds the quad to the vertex buffer for rendering, using
// the color indicated by the label dictionary for the quad's owner entity's type. Optionally draws debug
// rays from the last update position to each quad.
auto processQuadForRendering = [this, &renderingCoordinateSystem, &lastUpdatePosInRenderSpace](
const winrt::SceneUnderstanding::Entity& entity,
const winrt::SceneUnderstanding::Quad& quad,
const winrt::Windows::Foundation::Numerics::float4x4& entityToAnchorTransform,
const winrt::Windows::Perception::Spatial::SpatialCoordinateSystem& entityAnchorCS) {
// Determine the transform to go from entity space to rendering space
winrt::Windows::Foundation::IReference<winrt::Windows::Foundation::Numerics::float4x4> anchorToRenderingRef =
entityAnchorCS.TryGetTransformTo(renderingCoordinateSystem);
if (!anchorToRenderingRef)
{
return;
}
winrt::Windows::Foundation::Numerics::float4x4 anchorToRenderingTransform = anchorToRenderingRef.Value();
winrt::Windows::Foundation::Numerics::float4x4 entityToRenderingTransform = entityToAnchorTransform * anchorToRenderingTransform;
// Create the quad's corner points in entity space and transform them to rendering space
const float width = quad.WidthInMeters();
const float height = quad.HeightInMeters();
winrt::Windows::Foundation::Numerics::float3 positions[4] = {
{-width / 2, -height / 2, 0.0f}, {width / 2, -height / 2, 0.0f}, {-width / 2, height / 2, 0.0f}, {width / 2, height / 2, 0.0f}};
for (int i = 0; i < 4; ++i)
{
positions[i] = winrt::Windows::Foundation::Numerics::transform(positions[i], entityToRenderingTransform);
}
// Determine the color with which to draw the quad
winrt::Windows::Foundation::Numerics::float3 color{1.0f, 1.0f, 0.0f};
auto labelPos = Labels.find(static_cast<uint32_t>(entity.Label()));
if (labelPos != Labels.end())
{
const Label& label = *labelPos->second;
color = {label.RGB[0] / 255.0f, label.RGB[1] / 255.0f, label.RGB[2] / 255.0f};
}
// Add triangles to render the quad (both winding orders to guarantee double-sided rendering)
AppendColoredTriangle(positions[0], positions[3], positions[1], color, m_vertices);
AppendColoredTriangle(positions[0], positions[2], positions[3], color, m_vertices);
AppendColoredTriangle(positions[1], positions[3], positions[0], color, m_vertices);
AppendColoredTriangle(positions[3], positions[2], positions[0], color, m_vertices);
/** /
// Debug code: draw a ray from the last update position to the center of each visible quad
if (lastUpdatePosInRenderSpace)
{
float rayEndRadius = 0.05f;
winrt::Windows::Foundation::Numerics::float3 rayEndPositions[4] = {
{-rayEndRadius, 0.0f, 0.0f},
{rayEndRadius, 0.0f, 0.0f},
{0.0f, -rayEndRadius, 0.0f},
{0.0f, rayEndRadius, 0.0f},
};
for (int i = 0; i < 4; ++i)
{
rayEndPositions[i] = winrt::Windows::Foundation::Numerics::transform(rayEndPositions[i], entityToRenderingTransform);
}
AppendColoredTriangle(lastUpdatePosInRenderSpace.Value(), rayEndPositions[0], rayEndPositions[1], color, m_vertices);
AppendColoredTriangle(lastUpdatePosInRenderSpace.Value(), rayEndPositions[1], rayEndPositions[0], color, m_vertices);
AppendColoredTriangle(lastUpdatePosInRenderSpace.Value(), rayEndPositions[2], rayEndPositions[3], color, m_vertices);
AppendColoredTriangle(lastUpdatePosInRenderSpace.Value(), rayEndPositions[3], rayEndPositions[2], color, m_vertices);
}
/**/
};
// Execute the above lambda for each quad known by the SceneProcessor
ForEachQuad(sceneProcessor, processQuadForRendering);
// The geometry we added is already in rendering space, so the model transform must be identity.
auto modelTransform = winrt::Windows::Foundation::Numerics::float4x4::identity();
UpdateModelConstantBuffer(modelTransform);
}
void SceneUnderstandingRenderer::DebugLogState(
winrt::SceneUnderstanding::SceneProcessor& sceneProcessor,
winrt::Windows::Perception::Spatial::SpatialCoordinateSystem renderingCoordinateSystem,
winrt::Windows::Perception::Spatial::SpatialStationaryFrameOfReference lastUpdateLocation)
{
// Calculate the head position at the time of the last SU update in render space. This information can be
// used for debug rendering.
winrt::Windows::Perception::Spatial::SpatialCoordinateSystem lastUpdateCS = lastUpdateLocation.CoordinateSystem();
winrt::Windows::Foundation::IReference<winrt::Windows::Foundation::Numerics::float3> lastUpdatePosInRenderSpaceRef;
if (lastUpdateLocation)
{
auto lastUpdateLocationToRender = lastUpdateCS.TryGetTransformTo(renderingCoordinateSystem);
if (lastUpdateLocationToRender)
{
lastUpdatePosInRenderSpaceRef = winrt::Windows::Foundation::Numerics::transform({0, 0, 0}, lastUpdateLocationToRender.Value());
}
}
if (!lastUpdatePosInRenderSpaceRef)
{
return;
}
winrt::Windows::Foundation::Numerics::float3 lastUpdatePosInRenderSpace = lastUpdatePosInRenderSpaceRef.Value();
auto logQuad = [this, &lastUpdateCS](
const winrt::SceneUnderstanding::Entity& entity,
const winrt::SceneUnderstanding::Quad& quad,
const winrt::Windows::Foundation::Numerics::float4x4& entityToAnchorTransform,
const winrt::Windows::Perception::Spatial::SpatialCoordinateSystem& entityAnchorCS) {
// Determine transform from entity space to last update pose space
winrt::Windows::Foundation::IReference<winrt::Windows::Foundation::Numerics::float4x4> anchorToLastUpdateRef =
entityAnchorCS.TryGetTransformTo(lastUpdateCS);
if (!anchorToLastUpdateRef)
{
return;
}
winrt::Windows::Foundation::Numerics::float4x4 anchorToLastUpdateTransform = anchorToLastUpdateRef.Value();
winrt::Windows::Foundation::Numerics::float4x4 entityToLastUpdateTransform = entityToAnchorTransform * anchorToLastUpdateTransform;
// Determine various sizes, position, and distance from head
const float width = quad.WidthInMeters();
const float height = quad.HeightInMeters();
const float radius = sqrtf(width * width + height * height) / 2;
const winrt::Windows::Foundation::Numerics::float3 position = winrt::Windows::Foundation::Numerics::transform(
winrt::Windows::Foundation::Numerics::float3::zero(), entityToLastUpdateTransform);
const float distance = winrt::Windows::Foundation::Numerics::length(position);
const wchar_t* labelName = L"<unknown>";
auto labelPos = Labels.find(static_cast<uint32_t>(entity.Label()));
if (labelPos != Labels.end())
{
const Label& label = *labelPos->second;
labelName = label.Name;
}
DebugLog(
L" %s (%.2f x %.2f m, radius: %.2f m) at %.2f;%.2f;%.2f (distance: %.2f m)",
labelName,
width,
height,
radius,
position.x,
position.y,
position.z,
distance);
};
DebugLog(L"--- SU Update ---");
DebugLog(
L" Update position (in root space): (%.2f; %.2f; %.2f)",
lastUpdatePosInRenderSpace.x,
lastUpdatePosInRenderSpace.y,
lastUpdatePosInRenderSpace.z);
DebugLog(L" Quads (in head pose space):");
ForEachQuad(sceneProcessor, logQuad);
}
void SceneUnderstandingRenderer::Draw(unsigned int numInstances)
{
if (m_vertices.empty())
{
return;
}
const UINT stride = sizeof(m_vertices[0]);
const UINT offset = 0;
D3D11_SUBRESOURCE_DATA vertexBufferData = {0};
vertexBufferData.pSysMem = m_vertices.data();
const CD3D11_BUFFER_DESC vertexBufferDesc(static_cast<UINT>(m_vertices.size() * stride), D3D11_BIND_VERTEX_BUFFER);
winrt::com_ptr<ID3D11Buffer> vertexBuffer;
winrt::check_hresult(m_deviceResources->GetD3DDevice()->CreateBuffer(&vertexBufferDesc, &vertexBufferData, vertexBuffer.put()));
auto context = m_deviceResources->GetD3DDeviceContext();
context->IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY_TRIANGLELIST);
ID3D11Buffer* pBuffer = vertexBuffer.get();
context->IASetVertexBuffers(0, 1, &pBuffer, &stride, &offset);
context->DrawInstanced(static_cast<UINT>(m_vertices.size()), numInstances, offset, 0);
}
template <typename Func>
void SceneUnderstandingRenderer::ForEachQuad(winrt::SceneUnderstanding::SceneProcessor& sceneProcessor, Func f)
{
// Collect all components, then iterate to find quad entities
winrt::com_array<winrt::SceneUnderstanding::Component> components;
sceneProcessor.GetAllComponents(components);
for (auto& component : components)
{
winrt::SceneUnderstanding::Entity entity = component.try_as<winrt::SceneUnderstanding::Entity>();
if (!entity)
{
continue;
}
winrt::SceneUnderstanding::Quad quad{nullptr};
winrt::SceneUnderstanding::Transform transform{nullptr};
winrt::SceneUnderstanding::SpatialCoordinateSystem spatialCS{nullptr};
winrt::com_array<winrt::SceneUnderstanding::Id> associatedComponentIds;
entity.GetAllAssociatedComponentIds(associatedComponentIds);
for (auto& id : associatedComponentIds)
{
winrt::SceneUnderstanding::Component ac = sceneProcessor.GetComponent(id);
if (auto q = ac.try_as<winrt::SceneUnderstanding::Quad>())
{
quad = q;
continue;
}
if (auto t = ac.try_as<winrt::SceneUnderstanding::Transform>())
{
transform = t;
continue;
}
if (auto s = ac.try_as<winrt::SceneUnderstanding::SpatialCoordinateSystem>())
{
spatialCS = s;
continue;
}
}
// Don't proceed if any essential bit of data is missing
if (!quad || !transform || !spatialCS)
{
continue;
}
// Determine the transform from the entity to its anchor
winrt::Windows::Perception::Spatial::SpatialCoordinateSystem entityAnchorCS{nullptr};
try
{
entityAnchorCS = winrt::Windows::Perception::Spatial::Preview::SpatialGraphInteropPreview::CreateCoordinateSystemForNode(
spatialCS.SpatialCoordinateGuid());
}
catch (const winrt::hresult_error&)
{
continue;
}
winrt::Windows::Foundation::Numerics::float4x4 entityToAnchorTransform = transform.TransformationMatrix();
f(entity, quad, entityToAnchorTransform, entityAnchorCS);
}
}

Просмотреть файл

@ -0,0 +1,47 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#pragma once
#include "RenderableObject.h"
#include <vector>
#include <winrt/SceneUnderstanding.h>
#include <winrt/Windows.UI.Input.Spatial.h>
namespace RemotingHostSample
{
class SceneUnderstandingRenderer : public RenderableObject
{
public:
SceneUnderstandingRenderer(const std::shared_ptr<DX::DeviceResources>& deviceResources);
void Update(
winrt::SceneUnderstanding::SceneProcessor& sceneProcessor,
winrt::Windows::Perception::Spatial::SpatialCoordinateSystem renderingCoordinateSystem,
winrt::Windows::Perception::Spatial::SpatialStationaryFrameOfReference lastUpdateLocation);
void DebugLogState(
winrt::SceneUnderstanding::SceneProcessor& sceneProcessor,
winrt::Windows::Perception::Spatial::SpatialCoordinateSystem renderingCoordinateSystem,
winrt::Windows::Perception::Spatial::SpatialStationaryFrameOfReference lastUpdateLocation);
private:
void Draw(unsigned int numInstances) override;
template <typename Func>
void ForEachQuad(winrt::SceneUnderstanding::SceneProcessor& sceneProcessor, Func f);
private:
std::vector<VertexPositionNormalColor> m_vertices;
};
} // namespace RemotingHostSample

Просмотреть файл

@ -0,0 +1,33 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#pragma once
// Constant buffer used to send hologram position transform to the shader pipeline.
struct ModelConstantBuffer
{
DirectX::XMFLOAT4X4 model;
DirectX::XMFLOAT4X4 normal; // Normal transform matrix.
};
// Assert that the constant buffer remains 16-byte aligned (best practice).
static_assert(
(sizeof(ModelConstantBuffer) % (sizeof(float) * 4)) == 0,
"Model constant buffer size must be 16-byte aligned (16 bytes is the length of four floats).");
// Used to send per-vertex data to the vertex shader.
struct VertexPositionNormalColor
{
DirectX::XMFLOAT3 pos;
DirectX::XMFLOAT3 normal; // Normal.
DirectX::XMFLOAT3 color;
};

Просмотреть файл

@ -0,0 +1,193 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#include "pch.h"
#include "SpatialInputHandler.h"
#include <winrt/Windows.UI.Input.Spatial.h>
// Creates and initializes a GestureRecognizer that listens to a Person.
SpatialInputHandler::SpatialInputHandler()
{
// The interaction manager provides an event that informs the app when spatial interactions are detected.
m_interactionManager = winrt::Windows::UI::Input::Spatial::SpatialInteractionManager::GetForCurrentView();
m_gestureRecognizer = winrt::Windows::UI::Input::Spatial::SpatialGestureRecognizer(
winrt::Windows::UI::Input::Spatial::SpatialGestureSettings::Tap |
winrt::Windows::UI::Input::Spatial::SpatialGestureSettings::ManipulationTranslate);
m_interactionDetectedEventToken =
m_interactionManager.InteractionDetected(winrt::Windows::Foundation::TypedEventHandler<
winrt::Windows::UI::Input::Spatial::SpatialInteractionManager,
winrt::Windows::UI::Input::Spatial::SpatialInteractionDetectedEventArgs>(
[this](
winrt::Windows::UI::Input::Spatial::SpatialInteractionManager,
winrt::Windows::UI::Input::Spatial::SpatialInteractionDetectedEventArgs args) {
m_gestureRecognizer.CaptureInteraction(args.Interaction());
}));
m_tappedEventToken = m_gestureRecognizer.Tapped(winrt::Windows::Foundation::TypedEventHandler<
winrt::Windows::UI::Input::Spatial::SpatialGestureRecognizer,
winrt::Windows::UI::Input::Spatial::SpatialTappedEventArgs>(
[this](
winrt::Windows::UI::Input::Spatial::SpatialGestureRecognizer, winrt::Windows::UI::Input::Spatial::SpatialTappedEventArgs args) {
std::lock_guard _lg(m_manipulationStateLock);
m_tapped = args;
}));
m_manipulationStartedEventToken =
m_gestureRecognizer.ManipulationStarted(winrt::Windows::Foundation::TypedEventHandler<
winrt::Windows::UI::Input::Spatial::SpatialGestureRecognizer,
winrt::Windows::UI::Input::Spatial::SpatialManipulationStartedEventArgs>(
[this](
winrt::Windows::UI::Input::Spatial::SpatialGestureRecognizer,
winrt::Windows::UI::Input::Spatial::SpatialManipulationStartedEventArgs args) {
std::lock_guard _lg(m_manipulationStateLock);
m_manipulationStarted = args;
}));
m_manipulationUpdatedEventToken =
m_gestureRecognizer.ManipulationUpdated(winrt::Windows::Foundation::TypedEventHandler<
winrt::Windows::UI::Input::Spatial::SpatialGestureRecognizer,
winrt::Windows::UI::Input::Spatial::SpatialManipulationUpdatedEventArgs>(
[this](
winrt::Windows::UI::Input::Spatial::SpatialGestureRecognizer,
winrt::Windows::UI::Input::Spatial::SpatialManipulationUpdatedEventArgs args) {
std::lock_guard _lg(m_manipulationStateLock);
m_manipulationUpdated = args;
}));
m_manipulationCompletedEventToken =
m_gestureRecognizer.ManipulationCompleted(winrt::Windows::Foundation::TypedEventHandler<
winrt::Windows::UI::Input::Spatial::SpatialGestureRecognizer,
winrt::Windows::UI::Input::Spatial::SpatialManipulationCompletedEventArgs>(
[this](
winrt::Windows::UI::Input::Spatial::SpatialGestureRecognizer,
winrt::Windows::UI::Input::Spatial::SpatialManipulationCompletedEventArgs) {
std::lock_guard _lg(m_manipulationStateLock);
m_manipulationResult = ManipulationResult::Completed;
}));
m_manipulationCanceledEventToken =
m_gestureRecognizer.ManipulationCanceled(winrt::Windows::Foundation::TypedEventHandler<
winrt::Windows::UI::Input::Spatial::SpatialGestureRecognizer,
winrt::Windows::UI::Input::Spatial::SpatialManipulationCanceledEventArgs>(
[this](
winrt::Windows::UI::Input::Spatial::SpatialGestureRecognizer,
winrt::Windows::UI::Input::Spatial::SpatialManipulationCanceledEventArgs) {
std::lock_guard _lg(m_manipulationStateLock);
m_manipulationResult = ManipulationResult::Canceled;
}));
m_navigationStartedEventToken =
m_gestureRecognizer.NavigationStarted(winrt::Windows::Foundation::TypedEventHandler<
winrt::Windows::UI::Input::Spatial::SpatialGestureRecognizer,
winrt::Windows::UI::Input::Spatial::SpatialNavigationStartedEventArgs>(
[this](
winrt::Windows::UI::Input::Spatial::SpatialGestureRecognizer,
winrt::Windows::UI::Input::Spatial::SpatialNavigationStartedEventArgs args) {
char buf[128];
sprintf_s(
buf,
"NS: %d %d %d\n",
static_cast<int>(args.IsNavigatingX()),
static_cast<int>(args.IsNavigatingY()),
static_cast<int>(args.IsNavigatingZ()));
OutputDebugStringA(buf);
}));
m_navigationUpdatedEventToken =
m_gestureRecognizer.NavigationUpdated(winrt::Windows::Foundation::TypedEventHandler<
winrt::Windows::UI::Input::Spatial::SpatialGestureRecognizer,
winrt::Windows::UI::Input::Spatial::SpatialNavigationUpdatedEventArgs>(
[this](
winrt::Windows::UI::Input::Spatial::SpatialGestureRecognizer,
winrt::Windows::UI::Input::Spatial::SpatialNavigationUpdatedEventArgs args) {
winrt::Windows::Foundation::Numerics::float3 offset = args.NormalizedOffset();
char buf[128];
sprintf_s(buf, "NU: %f %f %f\n", offset.x, offset.y, offset.z);
OutputDebugStringA(buf);
}));
m_navigationCompletedEventToken =
m_gestureRecognizer.NavigationCompleted(winrt::Windows::Foundation::TypedEventHandler<
winrt::Windows::UI::Input::Spatial::SpatialGestureRecognizer,
winrt::Windows::UI::Input::Spatial::SpatialNavigationCompletedEventArgs>(
[this](
winrt::Windows::UI::Input::Spatial::SpatialGestureRecognizer,
winrt::Windows::UI::Input::Spatial::SpatialNavigationCompletedEventArgs args) {
winrt::Windows::Foundation::Numerics::float3 offset = args.NormalizedOffset();
char buf[128];
sprintf_s(buf, "NC: %f %f %f\n", offset.x, offset.y, offset.z);
OutputDebugStringA(buf);
}));
m_navigationCanceledEventToken =
m_gestureRecognizer.NavigationCanceled(winrt::Windows::Foundation::TypedEventHandler<
winrt::Windows::UI::Input::Spatial::SpatialGestureRecognizer,
winrt::Windows::UI::Input::Spatial::SpatialNavigationCanceledEventArgs>(
[this](
winrt::Windows::UI::Input::Spatial::SpatialGestureRecognizer,
winrt::Windows::UI::Input::Spatial::SpatialNavigationCanceledEventArgs args) {
char buf[128];
sprintf_s(buf, "N: canceled\n");
OutputDebugStringA(buf);
}));
}
SpatialInputHandler::~SpatialInputHandler()
{
// Unregister our handler for the OnSourcePressed event.
m_interactionManager.InteractionDetected(m_interactionDetectedEventToken);
m_gestureRecognizer.Tapped(m_tappedEventToken);
m_gestureRecognizer.ManipulationStarted(m_manipulationStartedEventToken);
m_gestureRecognizer.ManipulationUpdated(m_manipulationUpdatedEventToken);
m_gestureRecognizer.ManipulationCompleted(m_manipulationCompletedEventToken);
m_gestureRecognizer.ManipulationCanceled(m_manipulationCanceledEventToken);
}
// Checks if the user performed an input gesture since the last call to this method.
// Allows the main update loop to check for asynchronous changes to the user
// input state.
winrt::Windows::UI::Input::Spatial::SpatialTappedEventArgs SpatialInputHandler::CheckForTapped()
{
std::lock_guard _lg(m_manipulationStateLock);
auto tapped = m_tapped;
m_tapped = nullptr;
return tapped;
}
winrt::Windows::UI::Input::Spatial::SpatialManipulationStartedEventArgs SpatialInputHandler::CheckForManipulationStarted()
{
std::lock_guard _lg(m_manipulationStateLock);
auto manipulationStarted = m_manipulationStarted;
m_manipulationStarted = nullptr;
return manipulationStarted;
}
winrt::Windows::UI::Input::Spatial::SpatialManipulationUpdatedEventArgs SpatialInputHandler::CheckForManipulationUpdated()
{
std::lock_guard _lg(m_manipulationStateLock);
auto manipulationUpdated = m_manipulationUpdated;
m_manipulationUpdated = nullptr;
return manipulationUpdated;
}
SpatialInputHandler::ManipulationResult SpatialInputHandler::CheckForManipulationResult()
{
std::lock_guard _lg(m_manipulationStateLock);
auto manipulationResult = m_manipulationResult;
m_manipulationResult = ManipulationResult::Unknown;
return manipulationResult;
}

Просмотреть файл

@ -0,0 +1,62 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#pragma once
#include <vector>
#include <winrt/Windows.UI.Input.Spatial.h>
// Sample gesture handler.
// Hooks up events to recognize a tap gesture, and keeps track of input using a boolean value.
class SpatialInputHandler
{
public:
SpatialInputHandler();
~SpatialInputHandler();
enum class ManipulationResult
{
Unknown = 0,
Completed,
Canceled,
};
winrt::Windows::UI::Input::Spatial::SpatialTappedEventArgs CheckForTapped();
winrt::Windows::UI::Input::Spatial::SpatialManipulationStartedEventArgs CheckForManipulationStarted();
winrt::Windows::UI::Input::Spatial::SpatialManipulationUpdatedEventArgs CheckForManipulationUpdated();
ManipulationResult CheckForManipulationResult();
private:
// API objects used to process gesture input, and generate gesture events.
winrt::Windows::UI::Input::Spatial::SpatialInteractionManager m_interactionManager = nullptr;
winrt::Windows::UI::Input::Spatial::SpatialGestureRecognizer m_gestureRecognizer = nullptr;
// Event registration token.
winrt::event_token m_interactionDetectedEventToken;
winrt::event_token m_tappedEventToken;
winrt::event_token m_manipulationStartedEventToken;
winrt::event_token m_manipulationUpdatedEventToken;
winrt::event_token m_manipulationCompletedEventToken;
winrt::event_token m_manipulationCanceledEventToken;
winrt::event_token m_navigationStartedEventToken;
winrt::event_token m_navigationUpdatedEventToken;
winrt::event_token m_navigationCompletedEventToken;
winrt::event_token m_navigationCanceledEventToken;
// Used to indicate that a Pressed input event was received this frame.
std::recursive_mutex m_manipulationStateLock;
winrt::Windows::UI::Input::Spatial::SpatialTappedEventArgs m_tapped = nullptr;
winrt::Windows::UI::Input::Spatial::SpatialManipulationStartedEventArgs m_manipulationStarted = nullptr;
winrt::Windows::UI::Input::Spatial::SpatialManipulationUpdatedEventArgs m_manipulationUpdated = nullptr;
ManipulationResult m_manipulationResult = ManipulationResult::Unknown;
};

Просмотреть файл

@ -0,0 +1,238 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#include "pch.h"
#include "SpatialInputRenderer.h"
#include <winrt/Windows.Foundation.Numerics.h>
#include <winrt/Windows.Perception.People.h>
#include <winrt/Windows.Perception.Spatial.h>
#include "../Common/DirectXHelper.h"
#include <algorithm>
namespace
{
using namespace DirectX;
void AppendColoredTriangle(XMFLOAT3 p0, XMFLOAT3 p1, XMFLOAT3 p2, XMFLOAT3 color, std::vector<VertexPositionNormalColor>& vertices)
{
VertexPositionNormalColor vertex;
vertex.color = color;
vertex.normal = XMFLOAT3(0.0f, 0.0f, 0.0f);
vertex.pos = p0;
vertices.push_back(vertex);
vertex.pos = p1;
vertices.push_back(vertex);
vertex.pos = p2;
vertices.push_back(vertex);
}
} // namespace
SpatialInputRenderer::SpatialInputRenderer(const std::shared_ptr<DXHelper::DeviceResources>& deviceResources)
: RenderableObject(deviceResources)
{
m_manager = winrt::Windows::UI::Input::Spatial::SpatialInteractionManager::GetForCurrentView();
m_referenceFrame = winrt::Windows::Perception::Spatial::SpatialLocator::GetDefault().CreateAttachedFrameOfReferenceAtCurrentHeading();
}
void SpatialInputRenderer::Update(
winrt::Windows::Perception::PerceptionTimestamp timestamp,
winrt::Windows::Perception::Spatial::SpatialCoordinateSystem renderingCoordinateSystem)
{
m_transforms.clear();
m_joints.clear();
auto headingAdjustment = m_referenceFrame.TryGetRelativeHeadingAtTimestamp(timestamp);
if (headingAdjustment)
{
// keep coordinate systems facing user
m_referenceFrame.AdjustHeading(-headingAdjustment.Value());
auto coordinateSystem = m_referenceFrame.GetStationaryCoordinateSystemAtTimestamp(timestamp);
auto spatialPointerPose = winrt::Windows::UI::Input::Spatial::SpatialPointerPose::TryGetAtTimestamp(coordinateSystem, timestamp);
if (spatialPointerPose)
{
if (auto eyesPose = spatialPointerPose.Eyes())
{
if (auto gaze = eyesPose.Gaze())
{
auto position = gaze.Value().Origin + gaze.Value().Direction;
quaternion orientation = quaternion::identity();
m_transforms.emplace_back(QTransform(position, orientation));
}
}
}
auto states = m_manager.GetDetectedSourcesAtTimestamp(timestamp);
m_transforms.reserve(states.Size());
for (const auto& state : states)
{
auto location = state.Properties().TryGetLocation(coordinateSystem);
if (location)
{
if (location.Position())
{
using namespace winrt::Windows::Foundation::Numerics;
auto position = location.Position().Value();
quaternion orientation = quaternion::identity();
if (location.Orientation())
{
orientation = location.Orientation().Value();
}
DirectX::XMVECTOR xmPosition = DirectX::XMLoadFloat3(&position);
DirectX::XMVECTOR xmOrientation = DirectX::XMLoadQuaternion(&orientation);
m_transforms.emplace_back(QTransform(xmPosition, xmOrientation));
}
if (auto sourcePose = location.SourcePointerPose())
{
m_joints.push_back({sourcePose.Position(), sourcePose.Orientation(), 1.0f, 0.01f});
}
}
auto handPose = state.TryGetHandPose();
if (handPose)
{
constexpr const winrt::Windows::Perception::People::HandJointKind jointKinds[] = {
winrt::Windows::Perception::People::HandJointKind::Palm,
winrt::Windows::Perception::People::HandJointKind::Wrist,
winrt::Windows::Perception::People::HandJointKind::ThumbMetacarpal,
winrt::Windows::Perception::People::HandJointKind::ThumbProximal,
winrt::Windows::Perception::People::HandJointKind::ThumbDistal,
winrt::Windows::Perception::People::HandJointKind::ThumbTip,
winrt::Windows::Perception::People::HandJointKind::IndexMetacarpal,
winrt::Windows::Perception::People::HandJointKind::IndexProximal,
winrt::Windows::Perception::People::HandJointKind::IndexIntermediate,
winrt::Windows::Perception::People::HandJointKind::IndexDistal,
winrt::Windows::Perception::People::HandJointKind::IndexTip,
winrt::Windows::Perception::People::HandJointKind::MiddleMetacarpal,
winrt::Windows::Perception::People::HandJointKind::MiddleProximal,
winrt::Windows::Perception::People::HandJointKind::MiddleIntermediate,
winrt::Windows::Perception::People::HandJointKind::MiddleDistal,
winrt::Windows::Perception::People::HandJointKind::MiddleTip,
winrt::Windows::Perception::People::HandJointKind::RingMetacarpal,
winrt::Windows::Perception::People::HandJointKind::RingProximal,
winrt::Windows::Perception::People::HandJointKind::RingIntermediate,
winrt::Windows::Perception::People::HandJointKind::RingDistal,
winrt::Windows::Perception::People::HandJointKind::RingTip,
winrt::Windows::Perception::People::HandJointKind::LittleMetacarpal,
winrt::Windows::Perception::People::HandJointKind::LittleProximal,
winrt::Windows::Perception::People::HandJointKind::LittleIntermediate,
winrt::Windows::Perception::People::HandJointKind::LittleDistal,
winrt::Windows::Perception::People::HandJointKind::LittleTip};
constexpr const size_t jointCount = _countof(jointKinds);
winrt::Windows::Perception::People::JointPose jointPoses[jointCount] = {};
if (handPose.TryGetJoints(coordinateSystem, jointKinds, jointPoses))
{
for (size_t jointIndex = 0; jointIndex < jointCount; ++jointIndex)
{
m_joints.push_back({jointPoses[jointIndex].Position,
jointPoses[jointIndex].Orientation,
jointPoses[jointIndex].Radius * 3,
jointPoses[jointIndex].Radius});
}
}
}
}
auto modelTransform = coordinateSystem.TryGetTransformTo(renderingCoordinateSystem);
if (modelTransform)
{
UpdateModelConstantBuffer(modelTransform.Value());
}
}
}
void SpatialInputRenderer::Draw(unsigned int numInstances)
{
if (!m_transforms.empty())
{
std::vector<VertexPositionNormalColor> vertices;
for (const auto& transform : m_transforms)
{
DirectX::XMFLOAT3 trianglePositions[3] = {
DirectX::XMFLOAT3(0.0f, 0.03f, 0.0f), DirectX::XMFLOAT3(0.01f, 0.0f, 0.0f), DirectX::XMFLOAT3(-0.01f, 0.0f, 0.0f)};
AppendColoredTriangle(
transform.TransformPosition(trianglePositions[0]),
transform.TransformPosition(trianglePositions[1]),
transform.TransformPosition(trianglePositions[2]),
DirectX::XMFLOAT3{0, 0, 1},
vertices);
}
for (const auto& joint : m_joints)
{
auto jointVertices = CalculateJointVisualizationVertices(joint.position, joint.orientation, joint.length, joint.radius);
vertices.insert(vertices.end(), jointVertices.begin(), jointVertices.end());
}
const UINT stride = sizeof(vertices[0]);
const UINT offset = 0;
D3D11_SUBRESOURCE_DATA vertexBufferData = {0};
vertexBufferData.pSysMem = vertices.data();
const CD3D11_BUFFER_DESC vertexBufferDesc(static_cast<UINT>(vertices.size() * stride), D3D11_BIND_VERTEX_BUFFER);
winrt::com_ptr<ID3D11Buffer> vertexBuffer;
winrt::check_hresult(m_deviceResources->GetD3DDevice()->CreateBuffer(&vertexBufferDesc, &vertexBufferData, vertexBuffer.put()));
m_deviceResources->UseD3DDeviceContext([&](auto context) {
context->IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY_TRIANGLELIST);
ID3D11Buffer* pBuffer = vertexBuffer.get();
context->IASetVertexBuffers(0, 1, &pBuffer, &stride, &offset);
context->DrawInstanced(static_cast<UINT>(vertices.size()), numInstances, offset, 0);
});
}
}
std::vector<VertexPositionNormalColor> SpatialInputRenderer::CalculateJointVisualizationVertices(
float3 jointPosition, quaternion jointOrientation, float jointLength, float jointRadius)
{
using namespace DirectX;
constexpr const size_t verticesCount = 2 * 4 * 3;
std::vector<VertexPositionNormalColor> vertices;
vertices.reserve(verticesCount);
float centerHeight = std::min<float>(jointRadius, 0.5f * jointLength);
float centerXandY = jointRadius / sqrtf(2.0f);
QTransform jointTransform = QTransform(jointPosition, jointOrientation);
XMFLOAT3 baseVertexPosition = jointTransform.TransformPosition(XMFLOAT3(0.0f, 0.0f, 0.0f));
XMFLOAT3 centerVertexPositions[4] = {
jointTransform.TransformPosition(XMFLOAT3(-centerXandY, -centerXandY, -centerHeight)),
jointTransform.TransformPosition(XMFLOAT3(-centerXandY, +centerXandY, -centerHeight)),
jointTransform.TransformPosition(XMFLOAT3(+centerXandY, +centerXandY, -centerHeight)),
jointTransform.TransformPosition(XMFLOAT3(+centerXandY, -centerXandY, -centerHeight)),
};
XMFLOAT3 topVertexPosition = jointTransform.TransformPosition(XMFLOAT3(0.0f, 0.0f, -jointLength));
AppendColoredTriangle(baseVertexPosition, centerVertexPositions[0], centerVertexPositions[1], XMFLOAT3(0.0f, 0.0f, 0.4f), vertices);
AppendColoredTriangle(baseVertexPosition, centerVertexPositions[1], centerVertexPositions[2], XMFLOAT3(0.0f, 0.4f, 0.0f), vertices);
AppendColoredTriangle(baseVertexPosition, centerVertexPositions[2], centerVertexPositions[3], XMFLOAT3(0.4f, 0.0f, 0.0f), vertices);
AppendColoredTriangle(baseVertexPosition, centerVertexPositions[3], centerVertexPositions[0], XMFLOAT3(0.4f, 0.4f, 0.0f), vertices);
AppendColoredTriangle(topVertexPosition, centerVertexPositions[1], centerVertexPositions[0], XMFLOAT3(0.0f, 0.0f, 0.6f), vertices);
AppendColoredTriangle(topVertexPosition, centerVertexPositions[2], centerVertexPositions[1], XMFLOAT3(0.0f, 0.6f, 0.0f), vertices);
AppendColoredTriangle(topVertexPosition, centerVertexPositions[3], centerVertexPositions[2], XMFLOAT3(0.6f, 0.0f, 0.0f), vertices);
AppendColoredTriangle(topVertexPosition, centerVertexPositions[0], centerVertexPositions[3], XMFLOAT3(0.6f, 0.6f, 0.0f), vertices);
return vertices;
}

Просмотреть файл

@ -0,0 +1,85 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#pragma once
#include "RenderableObject.h"
#include <vector>
#include <winrt/Windows.UI.Input.Spatial.h>
using namespace winrt::Windows::Foundation::Numerics;
struct QTransform
{
QTransform() = default;
QTransform(const DirectX::XMVECTOR& position, const DirectX::XMVECTOR& orientation)
: m_position(position)
, m_orientation(orientation)
{
}
QTransform(const float3& position, const quaternion& orientation)
: m_position(DirectX::XMLoadFloat3(&position))
, m_orientation(DirectX::XMLoadQuaternion(&orientation))
{
}
DirectX::XMVECTOR TransformNormal(const DirectX::XMVECTOR& normal) const
{
return DirectX::XMVector3Rotate(normal, m_orientation);
}
DirectX::XMVECTOR TransformPosition(const DirectX::XMVECTOR& position) const
{
DirectX::XMVECTOR rotated = TransformNormal(position);
return DirectX::XMVectorAdd(rotated, m_position);
}
DirectX::XMFLOAT3 TransformPosition(const DirectX::XMFLOAT3& position) const
{
DirectX::XMFLOAT3 result;
XMStoreFloat3(&result, TransformPosition(DirectX::XMLoadFloat3(&position)));
return result;
}
DirectX::XMVECTOR m_position;
DirectX::XMVECTOR m_orientation;
};
class SpatialInputRenderer : public RenderableObject
{
public:
SpatialInputRenderer(const std::shared_ptr<DXHelper::DeviceResources>& deviceResources);
void Update(
winrt::Windows::Perception::PerceptionTimestamp timestamp,
winrt::Windows::Perception::Spatial::SpatialCoordinateSystem renderingCoordinateSystem);
private:
struct Joint
{
float3 position;
quaternion orientation;
float length;
float radius;
};
private:
static std::vector<VertexPositionNormalColor>
CalculateJointVisualizationVertices(float3 jointPosition, quaternion jointOrientation, float jointLength, float jointRadius);
void Draw(unsigned int numInstances) override;
winrt::Windows::UI::Input::Spatial::SpatialInteractionManager m_manager{nullptr};
winrt::Windows::Perception::Spatial::SpatialLocatorAttachedFrameOfReference m_referenceFrame{nullptr};
std::vector<QTransform> m_transforms;
std::vector<Joint> m_joints;
};

Просмотреть файл

@ -0,0 +1,395 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#include "pch.h"
#include "SpatialSurfaceMeshRenderer.h"
#include "Common\DirectXHelper.h"
using namespace winrt::Windows;
using namespace winrt::Windows::Perception::Spatial;
using namespace winrt::Windows::Graphics::DirectX;
using namespace winrt::Windows::Foundation::Numerics;
using namespace Concurrency;
// for debugging -> remove
bool g_freeze = false;
bool g_freezeOnFrame = false;
// Initializes D2D resources used for text rendering.
SpatialSurfaceMeshRenderer::SpatialSurfaceMeshRenderer(const std::shared_ptr<DXHelper::DeviceResources>& deviceResources)
: m_deviceResources(deviceResources)
{
CreateDeviceDependentResources();
}
SpatialSurfaceMeshRenderer::~SpatialSurfaceMeshRenderer()
{
}
std::future<void> SpatialSurfaceMeshRenderer::CreateDeviceDependentResources()
{
auto asyncAccess = Surfaces::SpatialSurfaceObserver::RequestAccessAsync();
asyncAccess.Completed([this](auto handler, auto asyncStatus) {
m_surfaceObserver = Surfaces::SpatialSurfaceObserver();
m_observedSurfaceChangedToken = m_surfaceObserver.ObservedSurfacesChanged(
[this](Surfaces::SpatialSurfaceObserver, winrt::Windows::Foundation::IInspectable const& handler) {
OnObservedSurfaceChanged();
});
});
std::vector<byte> vertexShaderFileData = co_await DXHelper::ReadDataAsync(L"hsa_SRMeshVertexShader.cso");
winrt::check_hresult(m_deviceResources->GetD3DDevice()->CreateVertexShader(
vertexShaderFileData.data(), vertexShaderFileData.size(), nullptr, m_vertexShader.put()));
constexpr std::array<D3D11_INPUT_ELEMENT_DESC, 1> vertexDesc = {{
{"POSITION", 0, DXGI_FORMAT_R16G16B16A16_SNORM, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0},
}};
winrt::check_hresult(m_deviceResources->GetD3DDevice()->CreateInputLayout(
vertexDesc.data(),
static_cast<UINT>(vertexDesc.size()),
vertexShaderFileData.data(),
static_cast<UINT>(vertexShaderFileData.size()),
m_inputLayout.put()));
std::vector<byte> geometryShaderFileData = co_await DXHelper::ReadDataAsync(L"hsa_SRMeshGeometryShader.cso");
// After the pass-through geometry shader file is loaded, create the shader.
winrt::check_hresult(m_deviceResources->GetD3DDevice()->CreateGeometryShader(
geometryShaderFileData.data(), geometryShaderFileData.size(), nullptr, m_geometryShader.put()));
std::vector<byte> pixelShaderFileData = co_await DXHelper::ReadDataAsync(L"hsa_SRMeshPixelShader.cso");
winrt::check_hresult(m_deviceResources->GetD3DDevice()->CreatePixelShader(
pixelShaderFileData.data(), pixelShaderFileData.size(), nullptr, m_pixelShader.put()));
const CD3D11_BUFFER_DESC constantBufferDesc(sizeof(SRMeshConstantBuffer), D3D11_BIND_CONSTANT_BUFFER);
winrt::check_hresult(m_deviceResources->GetD3DDevice()->CreateBuffer(&constantBufferDesc, nullptr, m_modelConstantBuffer.put()));
m_loadingComplete = true;
}
void SpatialSurfaceMeshRenderer::ReleaseDeviceDependentResources()
{
if (m_surfaceObserver)
{
m_surfaceObserver.ObservedSurfacesChanged(m_observedSurfaceChangedToken);
m_surfaceObserver = nullptr;
}
m_loadingComplete = false;
m_inputLayout = nullptr;
m_vertexShader = nullptr;
m_geometryShader = nullptr;
m_pixelShader = nullptr;
m_modelConstantBuffer = nullptr;
}
void SpatialSurfaceMeshRenderer::OnObservedSurfaceChanged()
{
if (g_freeze)
return;
m_surfaceChangedCounter++; // just for debugging purposes
m_sufaceChanged = true;
}
SpatialSurfaceMeshPart* SpatialSurfaceMeshRenderer::GetOrCreateMeshPart(winrt::guid id)
{
GUID key = id;
auto found = m_meshParts.find(key);
if (found == m_meshParts.cend())
{
m_meshParts[id] = std::make_unique<SpatialSurfaceMeshPart>(this);
return m_meshParts[id].get();
}
return found->second.get();
}
void SpatialSurfaceMeshRenderer::Update(winrt::Windows::Perception::Spatial::SpatialCoordinateSystem renderingCoordinateSystem)
{
if (m_surfaceObserver == nullptr)
return;
// update bounding volume (every frame)
{
SpatialBoundingBox axisAlignedBoundingBox = {
{-5.0f, -5.0f, -2.5f},
{10.0f, 10.0f, 5.f},
};
SpatialBoundingVolume volume = SpatialBoundingVolume::FromBox(renderingCoordinateSystem, axisAlignedBoundingBox);
m_surfaceObserver.SetBoundingVolume(volume);
}
if (m_sufaceChanged)
{
// first mark all as not used
for (auto& pair : m_meshParts)
{
pair.second->m_inUse = false;
}
auto mapContainingSurfaceCollection = m_surfaceObserver.GetObservedSurfaces();
for (auto pair : mapContainingSurfaceCollection)
{
if (SpatialSurfaceMeshPart* meshPart = GetOrCreateMeshPart(pair.Key()))
{
meshPart->Update(pair.Value());
g_freeze = g_freezeOnFrame;
}
}
// purge the ones not used
for (MeshPartMap::const_iterator itr = m_meshParts.cbegin(); itr != m_meshParts.cend();)
{
itr = itr->second->IsInUse() ? std::next(itr) : m_meshParts.erase(itr);
}
m_sufaceChanged = false;
}
// every frame, bring the model matrix to rendering space
for (auto& pair : m_meshParts)
{
pair.second->UpdateModelMatrix(renderingCoordinateSystem);
}
}
void SpatialSurfaceMeshRenderer::Render(bool isStereo)
{
if (!m_loadingComplete || m_meshParts.empty())
return;
m_deviceResources->UseD3DDeviceContext([&](auto context) {
// Each vertex is one instance of the VertexPositionColorTexture struct.
const uint32_t stride = sizeof(SpatialSurfaceMeshPart::Vertex_t);
const uint32_t offset = 0;
ID3D11Buffer* pBufferToSet;
context->IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY_TRIANGLELIST);
context->IASetInputLayout(m_inputLayout.get());
// Attach the vertex shader.
context->VSSetShader(m_vertexShader.get(), nullptr, 0);
// Apply the model constant buffer to the vertex shader.
pBufferToSet = m_modelConstantBuffer.get();
context->VSSetConstantBuffers(0, 1, &pBufferToSet);
// geometry shader
context->GSSetShader(m_geometryShader.get(), nullptr, 0);
// pixel shader
context->PSSetShader(m_zfillOnly ? nullptr : m_pixelShader.get(), nullptr, 0);
// Apply the model constant buffer to the pixel shader.
pBufferToSet = m_modelConstantBuffer.get();
context->PSSetConstantBuffers(0, 1, &pBufferToSet);
// render each mesh part
for (auto& pair : m_meshParts)
{
SpatialSurfaceMeshPart* part = pair.second.get();
if (part->m_indexCount == 0)
continue;
if (part->m_needsUpload)
{
part->UploadData();
}
// update part specific model matrix
context->UpdateSubresource(m_modelConstantBuffer.get(), 0, nullptr, &part->m_constantBufferData, 0, 0);
ID3D11Buffer* pBufferToSet2 = part->m_vertexBuffer.get();
context->IASetVertexBuffers(0, 1, &pBufferToSet2, &stride, &offset);
context->IASetIndexBuffer(part->m_indexBuffer.get(), DXGI_FORMAT_R16_UINT, 0);
// draw the mesh
context->DrawIndexedInstanced(part->m_indexCount, isStereo ? 2 : 1, 0, 0, 0);
}
// set geometry shader back
context->GSSetShader(nullptr, nullptr, 0);
});
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////
// SRMeshPart
//////////////////////////////////////////////////////////////////////////////////////////////////////////
SpatialSurfaceMeshPart::SpatialSurfaceMeshPart(SpatialSurfaceMeshRenderer* owner)
: m_owner(owner)
{
auto identity = DirectX::XMMatrixIdentity();
m_constantBufferData.modelMatrix = reinterpret_cast<DirectX::XMFLOAT4X4&>(identity);
m_vertexScale.x = m_vertexScale.y = m_vertexScale.z = 1.0f;
}
void SpatialSurfaceMeshPart::Update(Surfaces::SpatialSurfaceInfo surfaceInfo)
{
m_inUse = true;
m_updateInProgress = true;
double TriangleDensity = 750.0; // from Hydrogen
auto asyncOpertation = surfaceInfo.TryComputeLatestMeshAsync(TriangleDensity);
asyncOpertation.Completed([this](winrt::Windows::Foundation::IAsyncOperation<Surfaces::SpatialSurfaceMesh> result, auto asyncStatus) {
Surfaces::SpatialSurfaceMesh mesh = result.GetResults();
UpdateMesh(mesh);
m_updateInProgress = false;
});
}
void SpatialSurfaceMeshPart::UpdateModelMatrix(winrt::Windows::Perception::Spatial::SpatialCoordinateSystem renderingCoordinateSystem)
{
if (m_coordinateSystem == nullptr)
return;
auto modelTransform = m_coordinateSystem.TryGetTransformTo(renderingCoordinateSystem);
if (modelTransform)
{
float4x4 matrixWinRt = transpose(modelTransform.Value());
DirectX::XMMATRIX transformMatrix = DirectX::XMLoadFloat4x4(&matrixWinRt);
DirectX::XMMATRIX scaleMatrix = DirectX::XMMatrixScaling(m_vertexScale.x, m_vertexScale.y, m_vertexScale.z);
DirectX::XMMATRIX result = DirectX::XMMatrixMultiply(transformMatrix, scaleMatrix);
DirectX::XMStoreFloat4x4(&m_constantBufferData.modelMatrix, result);
}
}
void SpatialSurfaceMeshPart::UpdateMesh(Surfaces::SpatialSurfaceMesh mesh)
{
m_coordinateSystem = mesh.CoordinateSystem();
Surfaces::SpatialSurfaceMeshBuffer vertexBuffer = mesh.VertexPositions();
Surfaces::SpatialSurfaceMeshBuffer indexBuffer = mesh.TriangleIndices();
DirectXPixelFormat vertexFormat = vertexBuffer.Format();
DirectXPixelFormat indexFormat = indexBuffer.Format();
assert(vertexFormat == DirectXPixelFormat::R16G16B16A16IntNormalized);
assert(indexFormat == DirectXPixelFormat::R16UInt);
uint32_t vertexCount = vertexBuffer.ElementCount();
uint32_t indexCount = indexBuffer.ElementCount();
assert((indexCount % 3) == 0);
if (vertexCount == 0 || indexCount == 0)
{
m_indexCount = 0;
return;
}
// convert vertices:
{
Vertex_t* dest = MapVertices(vertexCount);
winrt::Windows::Storage::Streams::IBuffer vertexData = vertexBuffer.Data();
uint8_t* vertexRaw = vertexData.data();
int vertexStride = vertexData.Length() / vertexCount;
assert(vertexStride == 8); // DirectXPixelFormat::R16G16B16A16IntNormalized
winrt::Windows::Foundation::Numerics::float3 positionScale = mesh.VertexPositionScale();
m_vertexScale.x = positionScale.x;
m_vertexScale.y = positionScale.y;
m_vertexScale.z = positionScale.z;
memcpy(dest, vertexRaw, vertexCount * sizeof(Vertex_t));
UnmapVertices();
}
// convert indices
{
uint16_t* dest = MapIndices(indexCount);
winrt::Windows::Storage::Streams::IBuffer indexData = indexBuffer.Data();
uint16_t* source = (uint16_t*)indexData.data();
for (uint32_t i = 0; i < indexCount; i++)
{
assert(source[i] < vertexCount);
dest[i] = source[i];
}
UnmapIndices();
}
m_needsUpload = true;
}
SpatialSurfaceMeshPart::Vertex_t* SpatialSurfaceMeshPart::MapVertices(uint32_t vertexCount)
{
m_vertexCount = vertexCount;
if (vertexCount > m_vertexData.size())
m_vertexData.resize(vertexCount);
return &m_vertexData[0];
}
void SpatialSurfaceMeshPart::UnmapVertices()
{
}
uint16_t* SpatialSurfaceMeshPart::MapIndices(uint32_t indexCount)
{
m_indexCount = indexCount;
if (indexCount > m_indexData.size())
m_indexData.resize(indexCount);
return &m_indexData[0];
}
void SpatialSurfaceMeshPart::UnmapIndices()
{
}
void SpatialSurfaceMeshPart::UploadData()
{
if (m_vertexCount > m_allocatedVertexCount)
{
m_vertexBuffer = nullptr;
const int alignment = 1024;
m_allocatedVertexCount = ((m_vertexCount + alignment - 1) / alignment) * alignment; // align to reasonable size
const int elementSize = sizeof(Vertex_t);
const CD3D11_BUFFER_DESC vertexBufferDesc(
m_allocatedVertexCount * elementSize, D3D11_BIND_VERTEX_BUFFER, D3D11_USAGE_DYNAMIC, D3D11_CPU_ACCESS_WRITE);
winrt::check_hresult(m_owner->m_deviceResources->GetD3DDevice()->CreateBuffer(&vertexBufferDesc, nullptr, m_vertexBuffer.put()));
}
if (m_indexCount > m_allocatedIndexCount)
{
m_indexBuffer = nullptr;
const int alignment = 3 * 1024;
m_allocatedIndexCount = ((m_indexCount + alignment - 1) / alignment) * alignment; // align to reasonable size
const int elementSize = sizeof(uint16_t);
const CD3D11_BUFFER_DESC indexBufferDesc(
m_allocatedIndexCount * elementSize, D3D11_BIND_INDEX_BUFFER, D3D11_USAGE_DYNAMIC, D3D11_CPU_ACCESS_WRITE);
winrt::check_hresult(m_owner->m_deviceResources->GetD3DDevice()->CreateBuffer(&indexBufferDesc, nullptr, m_indexBuffer.put()));
}
// upload data
D3D11_MAPPED_SUBRESOURCE resource;
m_owner->m_deviceResources->UseD3DDeviceContext([&](auto context) {
context->Map(m_vertexBuffer.get(), 0, D3D11_MAP_WRITE_DISCARD, 0, &resource);
memcpy(resource.pData, &m_vertexData[0], sizeof(Vertex_t) * m_vertexCount);
context->Unmap(m_vertexBuffer.get(), 0);
context->Map(m_indexBuffer.get(), 0, D3D11_MAP_WRITE_DISCARD, 0, &resource);
memcpy(resource.pData, &m_indexData[0], sizeof(uint16_t) * m_indexCount);
context->Unmap(m_indexBuffer.get(), 0);
});
}

Просмотреть файл

@ -0,0 +1,130 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#pragma once
#include "..\Common\DeviceResources.h"
#include "..\Common\Utils.h"
#include <winrt/windows.perception.spatial.surfaces.h>
#include <future>
#include <string>
// forward
class SpatialSurfaceMeshRenderer;
struct SRMeshConstantBuffer
{
DirectX::XMFLOAT4X4 modelMatrix;
};
// represents a single piece of mesh (SpatialSurfaceMesh)
class SpatialSurfaceMeshPart
{
public:
struct Vertex_t
{
// float pos[4];
int16_t pos[4];
};
SpatialSurfaceMeshPart(SpatialSurfaceMeshRenderer* owner);
void Update(winrt::Windows::Perception::Spatial::Surfaces::SpatialSurfaceInfo surfaceInfo);
void UpdateMesh(winrt::Windows::Perception::Spatial::Surfaces::SpatialSurfaceMesh mesh);
bool IsInUse() const
{
return m_inUse || m_updateInProgress;
}
private:
Vertex_t* MapVertices(uint32_t vertexCount);
void UnmapVertices();
uint16_t* MapIndices(uint32_t indexCount);
void UnmapIndices();
void UploadData();
void UpdateModelMatrix(winrt::Windows::Perception::Spatial::SpatialCoordinateSystem renderingCoordinateSystem);
friend class SpatialSurfaceMeshRenderer;
SpatialSurfaceMeshRenderer* m_owner;
bool m_inUse = true;
bool m_needsUpload = false;
bool m_updateInProgress = false;
GUID m_ID;
uint32_t m_allocatedVertexCount = 0;
uint32_t m_allocatedIndexCount = 0;
uint32_t m_vertexCount = 0;
uint32_t m_indexCount = 0;
winrt::com_ptr<ID3D11Buffer> m_vertexBuffer;
winrt::com_ptr<ID3D11Buffer> m_indexBuffer;
winrt::Windows::Perception::Spatial::SpatialCoordinateSystem m_coordinateSystem = nullptr;
// double buffered data:
std::vector<Vertex_t> m_vertexData;
std::vector<uint16_t> m_indexData;
SRMeshConstantBuffer m_constantBufferData;
DirectX::XMFLOAT3 m_vertexScale;
};
// Renders the SR mesh
class SpatialSurfaceMeshRenderer
{
public:
SpatialSurfaceMeshRenderer(const std::shared_ptr<DXHelper::DeviceResources>& deviceResources);
virtual ~SpatialSurfaceMeshRenderer();
void Update(winrt::Windows::Perception::Spatial::SpatialCoordinateSystem renderingCoordinateSystem);
void Render(bool isStereo);
std::future<void> CreateDeviceDependentResources();
void ReleaseDeviceDependentResources();
private:
void OnObservedSurfaceChanged();
SpatialSurfaceMeshPart* GetOrCreateMeshPart(winrt::guid id);
private:
friend class SpatialSurfaceMeshPart;
// Cached pointer to device resources.
std::shared_ptr<DXHelper::DeviceResources> m_deviceResources;
// Resources related to mesh rendering.
winrt::com_ptr<ID3D11ShaderResourceView> m_shaderResourceView;
winrt::com_ptr<ID3D11SamplerState> m_pointSampler;
winrt::com_ptr<ID3D11RenderTargetView> m_renderTargetView;
// observer:
int m_surfaceChangedCounter = 0;
bool m_sufaceChanged = false;
winrt::Windows::Perception::Spatial::Surfaces::SpatialSurfaceObserver m_surfaceObserver = nullptr;
winrt::event_token m_observedSurfaceChangedToken;
// mesh parts
using MeshPartMap = std::map<GUID, std::unique_ptr<SpatialSurfaceMeshPart>, GUIDComparer>;
MeshPartMap m_meshParts;
// rendering
bool m_zfillOnly = false;
bool m_loadingComplete = false;
winrt::com_ptr<ID3D11InputLayout> m_inputLayout;
winrt::com_ptr<ID3D11VertexShader> m_vertexShader;
winrt::com_ptr<ID3D11GeometryShader> m_geometryShader;
winrt::com_ptr<ID3D11PixelShader> m_pixelShader;
winrt::com_ptr<ID3D11Buffer> m_modelConstantBuffer;
};

Просмотреть файл

@ -0,0 +1,347 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#include "pch.h"
#include "../Common/DirectXHelper.h"
#include "SpinningCubeRenderer.h"
#include <winrt/Windows.Foundation.Numerics.h>
#include <winrt/Windows.Perception.People.h>
#include <winrt/Windows.Perception.h>
using namespace DirectX;
// Loads vertex and pixel shaders from files and instantiates the cube geometry.
SpinningCubeRenderer::SpinningCubeRenderer(const std::shared_ptr<DXHelper::DeviceResources>& deviceResources)
: m_deviceResources(deviceResources)
{
CreateDeviceDependentResources();
}
// This function uses a SpatialPointerPose to position the world-locked hologram
// two meters in front of the user's heading.
void SpinningCubeRenderer::PositionHologram(const winrt::Windows::UI::Input::Spatial::SpatialPointerPose& pointerPose)
{
if (pointerPose != nullptr)
{
// Try to get the gaze from eyes.
winrt::Windows::Perception::People::EyesPose eyesPose = pointerPose.Eyes();
if (eyesPose != nullptr)
{
winrt::Windows::Foundation::IReference<winrt::Windows::Perception::Spatial::SpatialRay> gaze = eyesPose.Gaze();
if (gaze != nullptr)
{
PositionHologram(gaze.Value().Origin, gaze.Value().Direction);
return;
}
}
// Get the gaze direction from head.
const auto headPosition = pointerPose.Head().Position();
const auto headDirection = pointerPose.Head().ForwardDirection();
PositionHologram(headPosition, headDirection);
}
}
void SpinningCubeRenderer::SetColorFilter(DirectX::XMFLOAT4 color)
{
m_filterColorData = color;
}
// This function uses a point and a vector to position the world-locked hologram
// two meters in front of the user's heading.
void SpinningCubeRenderer::PositionHologram(
winrt::Windows::Foundation::Numerics::float3 headPosition, winrt::Windows::Foundation::Numerics::float3 headDirection)
{
// The hologram is positioned two meters along the user's gaze direction.
static const float distanceFromUser = 2.0f; // meters
const auto gazeAtTwoMeters = headPosition + (distanceFromUser * headDirection);
// This will be used as the translation component of the hologram's
// model transform.
SetPosition(gazeAtTwoMeters);
}
// Called once per frame. Rotates the cube, and calculates and sets the model matrix
// relative to the position transform indicated by hologramPositionTransform.
void SpinningCubeRenderer::Update(float totalSeconds)
{
// Rotate the cube.
// Convert degrees to radians, then convert seconds to rotation angle.
const float radiansPerSecond = XMConvertToRadians(m_degreesPerSecond);
const double relativeRotation = totalSeconds * radiansPerSecond;
double totalRotation = m_rotationOffset;
switch (m_pauseState)
{
case PauseState::Unpaused:
totalRotation += relativeRotation;
break;
case PauseState::Pausing:
m_rotationOffset += relativeRotation;
m_pauseState = PauseState::Paused;
case PauseState::Paused:
totalRotation = m_rotationOffset;
break;
case PauseState::Unpausing:
m_rotationOffset -= relativeRotation;
m_pauseState = PauseState::Unpaused;
break;
}
const float radians = static_cast<float>(fmod(totalRotation, XM_2PI));
const XMMATRIX modelRotation = XMMatrixRotationY(-radians);
// Position the cube.
const XMMATRIX modelTranslation = XMMatrixTranslationFromVector(XMLoadFloat3(&m_position));
// Multiply to get the transform matrix.
// Note that this transform does not enforce a particular coordinate system. The calling
// class is responsible for rendering this content in a consistent manner.
const XMMATRIX modelTransform = XMMatrixMultiply(modelRotation, modelTranslation);
// Store the normal transform.
XMStoreFloat4x4(&m_modelConstantBufferData.normal, XMMatrixTranspose(modelRotation));
// The view and projection matrices are provided by the system; they are associated
// with holographic cameras, and updated on a per-camera basis.
// Here, we provide the model transform for the sample hologram. The model transform
// matrix is transposed to prepare it for the shader.
XMStoreFloat4x4(&m_modelConstantBufferData.model, XMMatrixTranspose(modelTransform));
// Loading is asynchronous. Resources must be created before they can be updated.
if (!m_loadingComplete)
{
return;
}
// Use the D3D device context to update Direct3D device-based resources.
m_deviceResources->UseD3DDeviceContext([&](auto context) {
// Update the model transform buffer for the hologram.
context->UpdateSubresource(m_modelConstantBuffer.get(), 0, nullptr, &m_modelConstantBufferData, 0, 0);
});
}
// Renders one frame using the vertex and pixel shaders.
// On devices that do not support the D3D11_FEATURE_D3D11_OPTIONS3::
// VPAndRTArrayIndexFromAnyShaderFeedingRasterizer optional feature,
// a pass-through geometry shader is also used to set the render
// target array index.
void SpinningCubeRenderer::Render(bool isStereo)
{
// Loading is asynchronous. Resources must be created before drawing can occur.
if (!m_loadingComplete)
{
return;
}
m_deviceResources->UseD3DDeviceContext([&](auto context) {
ID3D11Buffer* pBufferToSet = nullptr;
// Each vertex is one instance of the VertexPositionColor struct.
const UINT stride = sizeof(VertexPositionNormalColor);
const UINT offset = 0;
pBufferToSet = m_vertexBuffer.get();
context->IASetVertexBuffers(0, 1, &pBufferToSet, &stride, &offset);
context->IASetIndexBuffer(
m_indexBuffer.get(),
DXGI_FORMAT_R16_UINT, // Each index is one 16-bit unsigned integer (short).
0);
context->IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY_TRIANGLELIST);
context->IASetInputLayout(m_inputLayout.get());
// Attach the vertex shader.
context->VSSetShader(m_vertexShader.get(), nullptr, 0);
// Apply the model constant buffer to the vertex shader.
pBufferToSet = m_modelConstantBuffer.get();
context->VSSetConstantBuffers(0, 1, &pBufferToSet);
if (!m_usingVprtShaders)
{
// On devices that do not support the D3D11_FEATURE_D3D11_OPTIONS3::
// VPAndRTArrayIndexFromAnyShaderFeedingRasterizer optional feature,
// a pass-through geometry shader is used to set the render target
// array index.
context->GSSetShader(m_geometryShader.get(), nullptr, 0);
}
context->UpdateSubresource(m_filterColorBuffer.get(), 0, nullptr, &m_filterColorData, 0, 0);
pBufferToSet = m_filterColorBuffer.get();
context->PSSetConstantBuffers(2, 1, &pBufferToSet);
// Attach the pixel shader.
context->PSSetShader(m_pixelShader.get(), nullptr, 0);
// Draw the objects.
context->DrawIndexedInstanced(
m_indexCount, // Index count per instance.
isStereo ? 2 : 1, // Instance count.
0, // Start index location.
0, // Base vertex location.
0 // Start instance location.
);
});
}
std::future<void> SpinningCubeRenderer::CreateDeviceDependentResources()
{
#if WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_DESKTOP)
std::wstring fileNamePrefix = L"";
#else
std::wstring fileNamePrefix = L"ms-appx:///";
#endif
m_usingVprtShaders = m_deviceResources->GetDeviceSupportsVprt();
// On devices that do support the D3D11_FEATURE_D3D11_OPTIONS3::
// VPAndRTArrayIndexFromAnyShaderFeedingRasterizer optional feature
// we can avoid using a pass-through geometry shader to set the render
// target array index, thus avoiding any overhead that would be
// incurred by setting the geometry shader stage.
std::wstring vertexShaderFileName = m_usingVprtShaders ? L"hsa_VprtVertexShader.cso" : L"hsa_VertexShader.cso";
std::vector<byte> vertexShaderFileData = co_await DXHelper::ReadDataAsync(fileNamePrefix + vertexShaderFileName);
winrt::check_hresult(m_deviceResources->GetD3DDevice()->CreateVertexShader(
vertexShaderFileData.data(), vertexShaderFileData.size(), nullptr, m_vertexShader.put()));
constexpr std::array<D3D11_INPUT_ELEMENT_DESC, 3> vertexDesc = {{
{"POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0},
{"NORMAL", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 12, D3D11_INPUT_PER_VERTEX_DATA, 0},
{"COLOR", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 24, D3D11_INPUT_PER_VERTEX_DATA, 0},
}};
winrt::check_hresult(m_deviceResources->GetD3DDevice()->CreateInputLayout(
vertexDesc.data(),
static_cast<UINT>(vertexDesc.size()),
vertexShaderFileData.data(),
static_cast<UINT>(vertexShaderFileData.size()),
m_inputLayout.put()));
std::vector<byte> pixelShaderFileData = co_await DXHelper::ReadDataAsync(fileNamePrefix + L"hsa_PixelShader.cso");
winrt::check_hresult(m_deviceResources->GetD3DDevice()->CreatePixelShader(
pixelShaderFileData.data(), pixelShaderFileData.size(), nullptr, m_pixelShader.put()));
const ModelConstantBuffer constantBuffer{
reinterpret_cast<DirectX::XMFLOAT4X4&>(winrt::Windows::Foundation::Numerics::float4x4::identity()),
reinterpret_cast<DirectX::XMFLOAT4X4&>(winrt::Windows::Foundation::Numerics::float4x4::identity()),
};
const CD3D11_BUFFER_DESC constantBufferDesc(sizeof(ModelConstantBuffer), D3D11_BIND_CONSTANT_BUFFER);
winrt::check_hresult(m_deviceResources->GetD3DDevice()->CreateBuffer(&constantBufferDesc, nullptr, m_modelConstantBuffer.put()));
const CD3D11_BUFFER_DESC filterColorBufferDesc(sizeof(XMFLOAT4), D3D11_BIND_CONSTANT_BUFFER);
winrt::check_hresult(m_deviceResources->GetD3DDevice()->CreateBuffer(&filterColorBufferDesc, nullptr, m_filterColorBuffer.put()));
if (!m_usingVprtShaders)
{
// Load the pass-through geometry shader.
std::vector<byte> geometryShaderFileData = co_await DXHelper::ReadDataAsync(fileNamePrefix + L"hsa_GeometryShader.cso");
// After the pass-through geometry shader file is loaded, create the shader.
winrt::check_hresult(m_deviceResources->GetD3DDevice()->CreateGeometryShader(
geometryShaderFileData.data(), geometryShaderFileData.size(), nullptr, m_geometryShader.put()));
}
// Load mesh vertices. Each vertex has a position and a color.
// Note that the cube size has changed from the default DirectX app
// template. Windows Holographic is scaled in meters, so to draw the
// cube at a comfortable size we made the cube width 0.2 m (20 cm).
static const VertexPositionNormalColor cubeVertices[] = {
{XMFLOAT3(-0.1f, -0.1f, -0.1f), XMFLOAT3(0.0f, 0.0f, 0.0f), XMFLOAT3(0.0f, 0.0f, 0.0f)}, // vertex 0 non-debug
{XMFLOAT3(-0.1f, -0.1f, 0.1f), XMFLOAT3(0.0f, 0.0f, 0.0f), XMFLOAT3(0.0f, 0.0f, 1.0f)},
{XMFLOAT3(-0.1f, 0.1f, -0.1f), XMFLOAT3(0.0f, 0.0f, 0.0f), XMFLOAT3(0.0f, 1.0f, 0.0f)},
{XMFLOAT3(-0.1f, 0.1f, 0.1f), XMFLOAT3(0.0f, 0.0f, 0.0f), XMFLOAT3(0.0f, 1.0f, 1.0f)},
{XMFLOAT3(0.1f, -0.1f, -0.1f), XMFLOAT3(0.0f, 0.0f, 0.0f), XMFLOAT3(1.0f, 0.0f, 0.0f)},
{XMFLOAT3(0.1f, -0.1f, 0.1f), XMFLOAT3(0.0f, 0.0f, 0.0f), XMFLOAT3(1.0f, 0.0f, 1.0f)},
{XMFLOAT3(0.1f, 0.1f, -0.1f), XMFLOAT3(0.0f, 0.0f, 0.0f), XMFLOAT3(1.0f, 1.0f, 0.0f)},
{XMFLOAT3(0.1f, 0.1f, 0.1f), XMFLOAT3(0.0f, 0.0f, 0.0f), XMFLOAT3(1.0f, 1.0f, 1.0f)},
};
D3D11_SUBRESOURCE_DATA vertexBufferData = {0};
vertexBufferData.pSysMem = cubeVertices;
vertexBufferData.SysMemPitch = 0;
vertexBufferData.SysMemSlicePitch = 0;
const CD3D11_BUFFER_DESC vertexBufferDesc(sizeof(cubeVertices), D3D11_BIND_VERTEX_BUFFER);
winrt::check_hresult(m_deviceResources->GetD3DDevice()->CreateBuffer(&vertexBufferDesc, &vertexBufferData, m_vertexBuffer.put()));
// Load mesh indices. Each trio of indices represents
// a triangle to be rendered on the screen.
// For example: 2,1,0 means that the vertices with indexes
// 2, 1, and 0 from the vertex buffer compose the
// first triangle of this mesh.
// Note that the winding order is clockwise by default.
static const unsigned short cubeIndices[] = {
2, 1, 0, // -x
2, 3, 1,
6, 4, 5, // +x
6, 5, 7,
0, 1, 5, // -y
0, 5, 4,
2, 6, 7, // +y
2, 7, 3,
0, 4, 6, // -z
0, 6, 2,
1, 3, 7, // +z
1, 7, 5,
};
m_indexCount = ARRAYSIZE(cubeIndices);
D3D11_SUBRESOURCE_DATA indexBufferData = {0};
indexBufferData.pSysMem = cubeIndices;
indexBufferData.SysMemPitch = 0;
indexBufferData.SysMemSlicePitch = 0;
const CD3D11_BUFFER_DESC indexBufferDesc(sizeof(cubeIndices), D3D11_BIND_INDEX_BUFFER);
winrt::check_hresult(m_deviceResources->GetD3DDevice()->CreateBuffer(&indexBufferDesc, &indexBufferData, m_indexBuffer.put()));
// Once the cube is loaded, the object is ready to be rendered.
m_loadingComplete = true;
}
void SpinningCubeRenderer::ReleaseDeviceDependentResources()
{
m_loadingComplete = false;
m_usingVprtShaders = false;
m_vertexShader = nullptr;
m_inputLayout = nullptr;
m_pixelShader = nullptr;
m_geometryShader = nullptr;
m_modelConstantBuffer = nullptr;
m_vertexBuffer = nullptr;
m_indexBuffer = nullptr;
m_filterColorBuffer = nullptr;
}
void SpinningCubeRenderer::CreateWindowSizeDependentResources()
{
}
void SpinningCubeRenderer::TogglePauseState()
{
if (m_pauseState == PauseState::Paused)
{
m_pauseState = PauseState::Unpausing;
}
else
{
m_pauseState = PauseState::Pausing;
}
}

Просмотреть файл

@ -0,0 +1,93 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#pragma once
#include "..\Common\DeviceResources.h"
#include "ShaderStructures.h"
#include <winrt/Windows.UI.Input.Spatial.h>
// This sample renderer instantiates a basic rendering pipeline.
class SpinningCubeRenderer
{
public:
SpinningCubeRenderer(const std::shared_ptr<DXHelper::DeviceResources>& deviceResources);
void CreateWindowSizeDependentResources();
std::future<void> CreateDeviceDependentResources();
void ReleaseDeviceDependentResources();
void Update(float totalSeconds);
void SetColorFilter(DirectX::XMFLOAT4 color);
void Render(bool isStereo);
// Repositions the sample hologram.
void PositionHologram(const winrt::Windows::UI::Input::Spatial::SpatialPointerPose& pointerPose);
// Repositions the sample hologram, using direct measures.
void PositionHologram(winrt::Windows::Foundation::Numerics::float3 pos, winrt::Windows::Foundation::Numerics::float3 dir);
// Property accessors.
void SetPosition(winrt::Windows::Foundation::Numerics::float3 pos)
{
m_position = pos;
}
const winrt::Windows::Foundation::Numerics::float3& GetPosition()
{
return m_position;
}
void Pause()
{
m_pauseState = PauseState::Pausing;
}
void Unpause()
{
m_pauseState = PauseState::Unpausing;
}
void TogglePauseState();
private:
enum class PauseState
{
Unpaused = 0,
Pausing,
Paused,
Unpausing,
};
// Cached pointer to device resources.
std::shared_ptr<DXHelper::DeviceResources> m_deviceResources;
// Direct3D resources for cube geometry.
winrt::com_ptr<ID3D11InputLayout> m_inputLayout;
winrt::com_ptr<ID3D11Buffer> m_vertexBuffer;
winrt::com_ptr<ID3D11Buffer> m_indexBuffer;
winrt::com_ptr<ID3D11VertexShader> m_vertexShader;
winrt::com_ptr<ID3D11GeometryShader> m_geometryShader;
winrt::com_ptr<ID3D11PixelShader> m_pixelShader;
winrt::com_ptr<ID3D11Buffer> m_modelConstantBuffer;
winrt::com_ptr<ID3D11Buffer> m_filterColorBuffer;
// System resources for cube geometry.
ModelConstantBuffer m_modelConstantBufferData;
uint32_t m_indexCount = 0;
DirectX::XMFLOAT4 m_filterColorData = {1, 1, 1, 1};
// Variables used with the rendering loop.
bool m_loadingComplete = false;
float m_degreesPerSecond = 180.0f;
winrt::Windows::Foundation::Numerics::float3 m_position = {0.0f, 0.0f, -2.0f};
PauseState m_pauseState = PauseState::Unpaused;
double m_rotationOffset = 0;
// If the current D3D Device supports VPRT, we can avoid using a geometry
// shader just to set the render target array index.
bool m_usingVprtShaders = false;
};

Просмотреть файл

@ -0,0 +1,44 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
// Per-vertex data from the vertex shader.
struct GeometryShaderInput
{
float4 pos : SV_POSITION;
min16float3 color : COLOR0;
uint instId : TEXCOORD0;
};
// Per-vertex data passed to the rasterizer.
struct GeometryShaderOutput
{
float4 pos : SV_POSITION;
min16float3 color : COLOR0;
uint idx : TEXCOORD0;
uint rtvId : SV_RenderTargetArrayIndex;
};
// This geometry shader is a pass-through that leaves the geometry unmodified
// and sets the render target array index.
[maxvertexcount(3)]
void main(triangle GeometryShaderInput input[3], inout TriangleStream<GeometryShaderOutput> outStream)
{
GeometryShaderOutput output;
[unroll(3)]
for (int i = 0; i < 3; ++i)
{
output.pos = input[i].pos;
output.color = input[i].color;
output.idx = input[i].instId;
output.rtvId = input[i].instId;
outStream.Append(output);
}
}

Просмотреть файл

@ -0,0 +1,42 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
// A constant buffer that stores the model transform.
cbuffer ModelConstantBuffer : register(b0)
{
float4x4 model;
float4x4 normal;
};
// A constant buffer that stores each set of view and projection matrices in column-major format.
cbuffer ViewProjectionConstantBuffer : register(b1)
{
float4x4 viewProjection[2];
};
cbuffer ColorFilterConstantBuffer : register(b2)
{
float4 colorFilter;
};
// Per-pixel color data passed through the pixel shader.
struct PixelShaderInput
{
float4 pos : SV_POSITION;
min16float3 color : COLOR0;
uint idx : TEXCOORD0;
};
// The pixel shader applies Blinn-Phong BRDF shading.
min16float4 main(PixelShaderInput input) : SV_TARGET
{
return min16float4(input.color, 1.f) * min16float4(colorFilter);
}

Просмотреть файл

@ -0,0 +1,48 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
// Per-vertex data from the vertex shader.
struct GeometryShaderInput
{
float4 pos : SV_POSITION;
uint instId : TEXCOORD1;
};
// Per-vertex data passed to the rasterizer.
struct GeometryShaderOutput
{
float4 pos : SV_POSITION;
float3 barycentricCoords : TEXCOORD0;
uint rtvId : SV_RenderTargetArrayIndex;
};
// This geometry shader is a pass-through that leaves the geometry unmodified
// and sets the render target array index.
[maxvertexcount(3)]
void main(triangle GeometryShaderInput input[3], inout TriangleStream<GeometryShaderOutput> outStream)
{
static const float3 barycentricVectors[3] = {
float3(1.0f, 0.0f, 0.0f),
float3(0.0f, 1.0f, 0.0f),
float3(0.0f, 0.0f, 1.0f)
};
GeometryShaderOutput output;
[unroll(3)]
for (int i = 0; i < 3; ++i)
{
output.pos = input[i].pos;
output.barycentricCoords = barycentricVectors[i];
output.rtvId = input[i].instId;
outStream.Append(output);
}
}

Просмотреть файл

@ -0,0 +1,36 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
// Per-pixel color data passed through the pixel shader.
struct PixelShaderInput
{
float4 pos : SV_POSITION;
float3 barycentricCoords : TEXCOORD0;
};
#define LINE_WIDTH 2.0
float edgeFactor(float3 coords)
{
float3 d = fwidth(coords);
float3 a3 = smoothstep(0.0, d*LINE_WIDTH, coords);
return min(min(a3.x, a3.y), a3.z);
}
// The pixel shader passes through the color data. The color data from
// is interpolated and assigned to a pixel at the rasterization step.
float4 main(PixelShaderInput input) : SV_TARGET
{
float lineBrightness = 1.0f - edgeFactor(input.barycentricCoords);
float4 result = float4(0.25, 0.25, 0.25, 1.0);
result.xyz *= lineBrightness;
return result;
}

Просмотреть файл

@ -0,0 +1,64 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
// A constant buffer that stores the model transform.
cbuffer SRMeshConstantBuffer : register(b0)
{
float4x4 model;
};
// A constant buffer that stores each set of view and projection matrices in column-major format.
cbuffer ViewProjectionConstantBuffer : register(b1)
{
float4x4 viewProjection[2];
};
// Per-vertex data used as input to the vertex shader.
struct VertexShaderInput
{
float4 pos : POSITION;
uint instId : SV_InstanceID;
};
// Per-vertex data passed to the geometry shader.
// Note that the render target array index will be set by the geometry shader
// using the value of viewId.
struct VertexShaderOutput
{
float4 pos : SV_POSITION;
uint viewId : TEXCOORD1; // SV_InstanceID % 2
};
// Simple shader to do vertex processing on the GPU.
VertexShaderOutput main(VertexShaderInput input)
{
VertexShaderOutput output;
float4 pos = float4(input.pos.xyz, 1.0f);
// Note which view this vertex has been sent to. Used for matrix lookup.
// Taking the modulo of the instance ID allows geometry instancing to be used
// along with stereo instanced drawing; in that case, two copies of each
// instance would be drawn, one for left and one for right.
int idx = input.instId % 2;
// Transform the vertex position into world space.
pos = mul(pos, model);
// Correct for perspective and project the vertex position onto the screen.
pos = mul(pos, viewProjection[idx]);
output.pos = pos;
// Set the instance ID. The pass-through geometry shader will set the
// render target array index to whatever value is set here.
output.viewId = idx;
return output;
}

Просмотреть файл

@ -0,0 +1,72 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
// A constant buffer that stores the model transform.
cbuffer ModelConstantBuffer : register(b0)
{
float4x4 model;
float4x4 normal;
};
// A constant buffer that stores each set of view and projection matrices in column-major format.
cbuffer ViewProjectionConstantBuffer : register(b1)
{
float4x4 viewProjection[2];
};
// Per-vertex data used as input to the vertex shader.
struct VertexShaderInput
{
float3 pos : POSITION;
min16float3 color : COLOR0;
uint instId : SV_InstanceID;
};
// Per-vertex data passed to the geometry shader.
// Note that the render target array index is set here in the vertex shader.
struct VertexShaderOutput
{
float4 pos : SV_POSITION;
min16float3 color : COLOR0;
uint idx : TEXCOORD0;
uint rtvId : SV_RenderTargetArrayIndex; // SV_InstanceID % 2
};
// Simple shader to do vertex processing on the GPU.
VertexShaderOutput main(VertexShaderInput input)
{
VertexShaderOutput output;
float4 pos = float4(input.pos, 1.0f);
// Note which view this vertex has been sent to. Used for matrix lookup.
// Taking the modulo of the instance ID allows geometry instancing to be used
// along with stereo instanced drawing; in that case, two copies of each
// instance would be drawn, one for left and one for right.
int idx = input.instId % 2;
// Transform the vertex position into world space.
pos = mul(pos, model);
// Correct for perspective and project the vertex position onto the screen.
pos = mul(pos, viewProjection[idx]);
// Write minimum-precision floating-point data.
output.pos = pos;
// Pass the color through without modification.
output.color = input.color;
// Set the render target array index.
output.rtvId = idx;
output.idx = idx;
return output;
}

Просмотреть файл

@ -0,0 +1,72 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
// A constant buffer that stores the model transform.
cbuffer ModelConstantBuffer : register(b0)
{
float4x4 model;
float4x4 normal;
};
// A constant buffer that stores each set of view and projection matrices in column-major format.
cbuffer ViewProjectionConstantBuffer : register(b1)
{
float4x4 viewProjection[2];
};
// Per-vertex data used as input to the vertex shader.
struct VertexShaderInput
{
float3 pos : POSITION;
min16float3 color : COLOR0;
uint instId : SV_InstanceID;
};
// Per-vertex data passed to the geometry shader.
// Note that the render target array index will be set by the geometry shader
// using the value of viewId.
struct VertexShaderOutput
{
float4 pos : SV_POSITION;
min16float3 color : COLOR0;
uint viewId : TEXCOORD0; // SV_InstanceID % 2
};
// Simple shader to do vertex processing on the GPU.
VertexShaderOutput main(VertexShaderInput input)
{
VertexShaderOutput output;
float4 pos = float4(input.pos, 1.0f);
// Note which view this vertex has been sent to. Used for matrix lookup.
// Taking the modulo of the instance ID allows geometry instancing to be used
// along with stereo instanced drawing; in that case, two copies of each
// instance would be drawn, one for left and one for right.
int idx = input.instId % 2;
// Transform the vertex position into world space.
pos = mul(pos, model);
// Correct for perspective and project the vertex position onto the screen.
pos = mul(pos, viewProjection[idx]);
// Write minimum-precision floating-point data.
output.pos = pos;
// Pass the color through without modification.
output.color = input.color;
// Set the instance ID. The pass-through geometry shader will set the
// render target array index to whatever value is set here.
output.viewId = idx;
return output;
}

Просмотреть файл

@ -0,0 +1,32 @@

Microsoft Visual Studio Solution File, Format Version 12.00
# Visual Studio 15
VisualStudioVersion = 15.0.28307.645
MinimumVisualStudioVersion = 10.0.40219.1
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "HolographicHostSample", "HolographicHostSample.vcxproj", "{59F48F16-7CF3-36AB-AE6D-B56A9DA89747}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|x64 = Debug|x64
Release|x64 = Release|x64
RelWithDebInfo|x64 = RelWithDebInfo|x64
EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution
{59F48F16-7CF3-36AB-AE6D-B56A9DA89747}.Debug|x64.ActiveCfg = Debug|x64
{59F48F16-7CF3-36AB-AE6D-B56A9DA89747}.Debug|x64.Build.0 = Debug|x64
{59F48F16-7CF3-36AB-AE6D-B56A9DA89747}.Debug|x64.Deploy.0 = Debug|x64
{59F48F16-7CF3-36AB-AE6D-B56A9DA89747}.Release|x64.ActiveCfg = Release|x64
{59F48F16-7CF3-36AB-AE6D-B56A9DA89747}.Release|x64.Build.0 = Release|x64
{59F48F16-7CF3-36AB-AE6D-B56A9DA89747}.Release|x64.Deploy.0 = Release|x64
{59F48F16-7CF3-36AB-AE6D-B56A9DA89747}.RelWithDebInfo|x64.ActiveCfg = RelWithDebInfo|x64
{59F48F16-7CF3-36AB-AE6D-B56A9DA89747}.RelWithDebInfo|x64.Build.0 = RelWithDebInfo|x64
{59F48F16-7CF3-36AB-AE6D-B56A9DA89747}.RelWithDebInfo|x64.Deploy.0 = RelWithDebInfo|x64
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
EndGlobalSection
GlobalSection(ExtensibilityGlobals) = postSolution
SolutionGuid = {E8A3A1EC-E636-40D6-9E3D-C22347CC8162}
EndGlobalSection
EndGlobal

Просмотреть файл

@ -0,0 +1,313 @@
<?xml version="1.0" encoding="utf-8"?>
<Project DefaultTargets="Build" ToolsVersion="15.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<ItemGroup Label="ProjectConfigurations">
<ProjectConfiguration Include="Debug|x64">
<Configuration>Debug</Configuration>
<Platform>x64</Platform>
</ProjectConfiguration>
<ProjectConfiguration Include="Release|x64">
<Configuration>Release</Configuration>
<Platform>x64</Platform>
</ProjectConfiguration>
<ProjectConfiguration Include="RelWithDebInfo|x64">
<Configuration>RelWithDebInfo</Configuration>
<Platform>x64</Platform>
</ProjectConfiguration>
</ItemGroup>
<PropertyGroup Label="Globals">
<ProjectGuid>{59F48F16-7CF3-36AB-AE6D-B56A9DA89747}</ProjectGuid>
<WindowsTargetPlatformVersion>10.0.18362.0</WindowsTargetPlatformVersion>
<Keyword>Win32Proj</Keyword>
<Platform>x64</Platform>
<ProjectName>HolographicHostSample</ProjectName>
<VCProjectUpgraderObjectName>NoUpgrade</VCProjectUpgraderObjectName>
</PropertyGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" Label="Configuration">
<ConfigurationType>Application</ConfigurationType>
<CharacterSet>MultiByte</CharacterSet>
<PlatformToolset>v141</PlatformToolset>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="Configuration">
<ConfigurationType>Application</ConfigurationType>
<CharacterSet>MultiByte</CharacterSet>
<PlatformToolset>v141</PlatformToolset>
<SpectreMitigation>Spectre</SpectreMitigation>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='RelWithDebInfo|x64'" Label="Configuration">
<ConfigurationType>Application</ConfigurationType>
<CharacterSet>MultiByte</CharacterSet>
<PlatformToolset>v141</PlatformToolset>
</PropertyGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
<ImportGroup Label="ExtensionSettings">
</ImportGroup>
<ImportGroup Label="PropertySheets">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
<Import Project="packages\Microsoft.Holographic.Remoting.2.0.7\build\native\Microsoft.Holographic.Remoting.targets" Condition="Exists('packages\Microsoft.Holographic.Remoting.2.0.7\build\native\Microsoft.Holographic.Remoting.targets')" />
</ImportGroup>
<PropertyGroup Label="UserMacros" />
<PropertyGroup>
<_ProjectFileVersion>10.0.20506.1</_ProjectFileVersion>
<OutDir Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">bin\Debug\</OutDir>
<IntDir Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">HolographicHostSample.dir\Debug\</IntDir>
<TargetName Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">HolographicHostSample</TargetName>
<TargetExt Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">.exe</TargetExt>
<LinkIncremental Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">true</LinkIncremental>
<GenerateManifest Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">true</GenerateManifest>
<OutDir Condition="'$(Configuration)|$(Platform)'=='Release|x64'">bin\Release\</OutDir>
<IntDir Condition="'$(Configuration)|$(Platform)'=='Release|x64'">HolographicHostSample.dir\Release\</IntDir>
<TargetName Condition="'$(Configuration)|$(Platform)'=='Release|x64'">HolographicHostSample</TargetName>
<TargetExt Condition="'$(Configuration)|$(Platform)'=='Release|x64'">.exe</TargetExt>
<LinkIncremental Condition="'$(Configuration)|$(Platform)'=='Release|x64'">false</LinkIncremental>
<GenerateManifest Condition="'$(Configuration)|$(Platform)'=='Release|x64'">true</GenerateManifest>
<OutDir Condition="'$(Configuration)|$(Platform)'=='RelWithDebInfo|x64'">bin\RelWithDebInfo\</OutDir>
<IntDir Condition="'$(Configuration)|$(Platform)'=='RelWithDebInfo|x64'">HolographicHostSample.dir\RelWithDebInfo\</IntDir>
<TargetName Condition="'$(Configuration)|$(Platform)'=='RelWithDebInfo|x64'">HolographicHostSample</TargetName>
<TargetExt Condition="'$(Configuration)|$(Platform)'=='RelWithDebInfo|x64'">.exe</TargetExt>
<LinkIncremental Condition="'$(Configuration)|$(Platform)'=='RelWithDebInfo|x64'">true</LinkIncremental>
<GenerateManifest Condition="'$(Configuration)|$(Platform)'=='RelWithDebInfo|x64'">true</GenerateManifest>
</PropertyGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
<ClCompile>
<AdditionalIncludeDirectories>.;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
<AdditionalOptions>%(AdditionalOptions) /await</AdditionalOptions>
<AdditionalUsingDirectories>$(VCIDEInstallDir)vcpackages;$(WindowsSDK_UnionMetadataPath)</AdditionalUsingDirectories>
<AssemblerListingLocation>Debug/</AssemblerListingLocation>
<BasicRuntimeChecks>EnableFastChecks</BasicRuntimeChecks>
<CompileAs>CompileAsCpp</CompileAs>
<CompileAsWinRT>true</CompileAsWinRT>
<DebugInformationFormat>ProgramDatabase</DebugInformationFormat>
<DisableSpecificWarnings>4503</DisableSpecificWarnings>
<ExceptionHandling>Sync</ExceptionHandling>
<FunctionLevelLinking>true</FunctionLevelLinking>
<InlineFunctionExpansion>Disabled</InlineFunctionExpansion>
<LanguageStandard>stdcpp17</LanguageStandard>
<MultiProcessorCompilation>true</MultiProcessorCompilation>
<Optimization>Disabled</Optimization>
<PrecompiledHeader>NotUsing</PrecompiledHeader>
<RuntimeLibrary>MultiThreadedDebugDLL</RuntimeLibrary>
<RuntimeTypeInfo>true</RuntimeTypeInfo>
<SDLCheck>true</SDLCheck>
<UseFullPaths>false</UseFullPaths>
<WarningLevel>Level3</WarningLevel>
<PreprocessorDefinitions>WIN32;_WINDOWS;ALLOW_INSECURE_RANDOM_DEVICE=1;UNICODE;_SILENCE_CXX17_ITERATOR_BASE_CLASS_DEPRECATION_WARNING;_SILENCE_CXX17_CODECVT_HEADER_DEPRECATION_WARNING;_SILENCE_CXX17_OLD_ALLOCATOR_MEMBERS_DEPRECATION_WARNING;RDBUILD_PLATFORM_DEFINED;RDBUILD_PLATFORM_WINDOWS;RDBUILD_PLATFORM_WINDOWS_WIN32;RDBUILD_ARCHITECTURE_DEFINED;RDBUILD_ARCH_INTEL;RDBUILD_ARCH_INTEL_X64;RDBUILD_DEFAULT_CALL=;RDBUILD_FEATURE_OPENSSL=0;_SCL_SECURE_NO_WARNINGS;_SILENCE_TR1_NAMESPACE_DEPRECATION_WARNING;WIN32_LEAN_AND_MEAN=1;WINVER=0x0A00;_WIN32_WINNT=0x0A00;NTDDI_VERSION=0x0A000007;CMAKE_INTDIR="Debug";%(PreprocessorDefinitions)</PreprocessorDefinitions>
<ObjectFileName>$(IntDir)</ObjectFileName>
</ClCompile>
<ResourceCompile>
<PreprocessorDefinitions>WIN32;_DEBUG;_WINDOWS;ALLOW_INSECURE_RANDOM_DEVICE=1;UNICODE;_SILENCE_CXX17_ITERATOR_BASE_CLASS_DEPRECATION_WARNING;_SILENCE_CXX17_CODECVT_HEADER_DEPRECATION_WARNING;_SILENCE_CXX17_OLD_ALLOCATOR_MEMBERS_DEPRECATION_WARNING;RDBUILD_PLATFORM_DEFINED;RDBUILD_PLATFORM_WINDOWS;RDBUILD_PLATFORM_WINDOWS_WIN32;RDBUILD_ARCHITECTURE_DEFINED;RDBUILD_ARCH_INTEL;RDBUILD_ARCH_INTEL_X64;RDBUILD_DEFAULT_CALL=;RDBUILD_FEATURE_OPENSSL=0;_SCL_SECURE_NO_WARNINGS;_SILENCE_TR1_NAMESPACE_DEPRECATION_WARNING;WIN32_LEAN_AND_MEAN=1;WINVER=0x0A00;_WIN32_WINNT=0x0A00;NTDDI_VERSION=0x0A000007;CMAKE_INTDIR=\"Debug\";%(PreprocessorDefinitions)</PreprocessorDefinitions>
<AdditionalIncludeDirectories>.;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
</ResourceCompile>
<Midl>
<AdditionalIncludeDirectories>.;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
<OutputDirectory>$(ProjectDir)/$(IntDir)</OutputDirectory>
<HeaderFileName>%(Filename).h</HeaderFileName>
<TypeLibraryName>%(Filename).tlb</TypeLibraryName>
<InterfaceIdentifierFileName>%(Filename)_i.c</InterfaceIdentifierFileName>
<ProxyFileName>%(Filename)_p.c</ProxyFileName>
</Midl>
<Link>
<AdditionalDependencies>d2d1.lib;d3d11.lib;dxgi.lib;dwrite.lib;windowscodecs.lib;pathcch.lib;Mswsock.lib;mfreadwrite.lib;Mfplat.lib;mfuuid.lib;ntdll.lib;Secur32.lib;crypt32.lib;secur32.lib;avrt.lib;WindowsApp.lib;%(AdditionalDependencies)</AdditionalDependencies>
<AdditionalLibraryDirectories>%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>
<AdditionalOptions>%(AdditionalOptions) /machine:x64</AdditionalOptions>
<EnableCOMDATFolding>false</EnableCOMDATFolding>
<GenerateDebugInformation>true</GenerateDebugInformation>
<IgnoreSpecificDefaultLibraries>%(IgnoreSpecificDefaultLibraries)</IgnoreSpecificDefaultLibraries>
<ImportLibrary>lib/Debug/HolographicHostSample.lib</ImportLibrary>
<OptimizeReferences>false</OptimizeReferences>
<ProgramDataBaseFile>bin/Debug/HolographicHostSample.pdb</ProgramDataBaseFile>
<SubSystem>Windows</SubSystem>
</Link>
<ProjectReference>
<LinkLibraryDependencies>false</LinkLibraryDependencies>
</ProjectReference>
</ItemDefinitionGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
<ClCompile>
<AdditionalIncludeDirectories>.;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
<AdditionalOptions>%(AdditionalOptions) /await</AdditionalOptions>
<AdditionalUsingDirectories>$(VCIDEInstallDir)vcpackages;$(WindowsSDK_UnionMetadataPath)</AdditionalUsingDirectories>
<AssemblerListingLocation>Release/</AssemblerListingLocation>
<CompileAs>CompileAsCpp</CompileAs>
<CompileAsWinRT>true</CompileAsWinRT>
<ControlFlowGuard>Guard</ControlFlowGuard>
<DebugInformationFormat>ProgramDatabase</DebugInformationFormat>
<DisableSpecificWarnings>4503</DisableSpecificWarnings>
<ExceptionHandling>Sync</ExceptionHandling>
<FunctionLevelLinking>true</FunctionLevelLinking>
<InlineFunctionExpansion>AnySuitable</InlineFunctionExpansion>
<LanguageStandard>stdcpp17</LanguageStandard>
<MultiProcessorCompilation>true</MultiProcessorCompilation>
<Optimization>MaxSpeed</Optimization>
<PrecompiledHeader>NotUsing</PrecompiledHeader>
<RuntimeLibrary>MultiThreadedDLL</RuntimeLibrary>
<RuntimeTypeInfo>true</RuntimeTypeInfo>
<SDLCheck>true</SDLCheck>
<UseFullPaths>false</UseFullPaths>
<WarningLevel>Level3</WarningLevel>
<PreprocessorDefinitions>WIN32;_WINDOWS;NDEBUG;ALLOW_INSECURE_RANDOM_DEVICE=1;UNICODE;_SILENCE_CXX17_ITERATOR_BASE_CLASS_DEPRECATION_WARNING;_SILENCE_CXX17_CODECVT_HEADER_DEPRECATION_WARNING;_SILENCE_CXX17_OLD_ALLOCATOR_MEMBERS_DEPRECATION_WARNING;RDBUILD_PLATFORM_DEFINED;RDBUILD_PLATFORM_WINDOWS;RDBUILD_PLATFORM_WINDOWS_WIN32;RDBUILD_ARCHITECTURE_DEFINED;RDBUILD_ARCH_INTEL;RDBUILD_ARCH_INTEL_X64;RDBUILD_DEFAULT_CALL=;RDBUILD_FEATURE_OPENSSL=0;_SCL_SECURE_NO_WARNINGS;_SILENCE_TR1_NAMESPACE_DEPRECATION_WARNING;WIN32_LEAN_AND_MEAN=1;WINVER=0x0A00;_WIN32_WINNT=0x0A00;NTDDI_VERSION=0x0A000007;CMAKE_INTDIR="Release";%(PreprocessorDefinitions)</PreprocessorDefinitions>
<ObjectFileName>$(IntDir)</ObjectFileName>
</ClCompile>
<ResourceCompile>
<PreprocessorDefinitions>WIN32;_WINDOWS;NDEBUG;ALLOW_INSECURE_RANDOM_DEVICE=1;UNICODE;_SILENCE_CXX17_ITERATOR_BASE_CLASS_DEPRECATION_WARNING;_SILENCE_CXX17_CODECVT_HEADER_DEPRECATION_WARNING;_SILENCE_CXX17_OLD_ALLOCATOR_MEMBERS_DEPRECATION_WARNING;RDBUILD_PLATFORM_DEFINED;RDBUILD_PLATFORM_WINDOWS;RDBUILD_PLATFORM_WINDOWS_WIN32;RDBUILD_ARCHITECTURE_DEFINED;RDBUILD_ARCH_INTEL;RDBUILD_ARCH_INTEL_X64;RDBUILD_DEFAULT_CALL=;RDBUILD_FEATURE_OPENSSL=0;_SCL_SECURE_NO_WARNINGS;_SILENCE_TR1_NAMESPACE_DEPRECATION_WARNING;WIN32_LEAN_AND_MEAN=1;WINVER=0x0A00;_WIN32_WINNT=0x0A00;NTDDI_VERSION=0x0A000007;CMAKE_INTDIR=\"Release\";%(PreprocessorDefinitions)</PreprocessorDefinitions>
<AdditionalIncludeDirectories>.;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
</ResourceCompile>
<Midl>
<AdditionalIncludeDirectories>.;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
<OutputDirectory>$(ProjectDir)/$(IntDir)</OutputDirectory>
<HeaderFileName>%(Filename).h</HeaderFileName>
<TypeLibraryName>%(Filename).tlb</TypeLibraryName>
<InterfaceIdentifierFileName>%(Filename)_i.c</InterfaceIdentifierFileName>
<ProxyFileName>%(Filename)_p.c</ProxyFileName>
</Midl>
<Link>
<AdditionalDependencies>d2d1.lib;d3d11.lib;dxgi.lib;dwrite.lib;windowscodecs.lib;pathcch.lib;Mswsock.lib;mfreadwrite.lib;Mfplat.lib;mfuuid.lib;ntdll.lib;Secur32.lib;crypt32.lib;secur32.lib;avrt.lib;WindowsApp.lib;%(AdditionalDependencies)</AdditionalDependencies>
<AdditionalLibraryDirectories>%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>
<AdditionalOptions>%(AdditionalOptions) /machine:x64</AdditionalOptions>
<GenerateDebugInformation>DebugFull</GenerateDebugInformation>
<IgnoreSpecificDefaultLibraries>%(IgnoreSpecificDefaultLibraries)</IgnoreSpecificDefaultLibraries>
<ImportLibrary>lib/Release/HolographicHostSample.lib</ImportLibrary>
<ProgramDataBaseFile>bin/Release/HolographicHostSample.pdb</ProgramDataBaseFile>
<SubSystem>Windows</SubSystem>
</Link>
<ProjectReference>
<LinkLibraryDependencies>false</LinkLibraryDependencies>
</ProjectReference>
</ItemDefinitionGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='RelWithDebInfo|x64'">
<ClCompile>
<AdditionalIncludeDirectories>.;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
<AdditionalOptions>%(AdditionalOptions) /await</AdditionalOptions>
<AdditionalUsingDirectories>$(VCIDEInstallDir)vcpackages;$(WindowsSDK_UnionMetadataPath)</AdditionalUsingDirectories>
<AssemblerListingLocation>RelWithDebInfo/</AssemblerListingLocation>
<CompileAs>CompileAsCpp</CompileAs>
<CompileAsWinRT>true</CompileAsWinRT>
<DebugInformationFormat>ProgramDatabase</DebugInformationFormat>
<DisableSpecificWarnings>4503</DisableSpecificWarnings>
<ExceptionHandling>Sync</ExceptionHandling>
<FunctionLevelLinking>true</FunctionLevelLinking>
<InlineFunctionExpansion>OnlyExplicitInline</InlineFunctionExpansion>
<LanguageStandard>stdcpp17</LanguageStandard>
<MultiProcessorCompilation>true</MultiProcessorCompilation>
<Optimization>MaxSpeed</Optimization>
<PrecompiledHeader>NotUsing</PrecompiledHeader>
<RuntimeLibrary>MultiThreadedDLL</RuntimeLibrary>
<RuntimeTypeInfo>true</RuntimeTypeInfo>
<SDLCheck>true</SDLCheck>
<UseFullPaths>false</UseFullPaths>
<WarningLevel>Level3</WarningLevel>
<PreprocessorDefinitions>WIN32;_WINDOWS;NDEBUG;ALLOW_INSECURE_RANDOM_DEVICE=1;UNICODE;_SILENCE_CXX17_ITERATOR_BASE_CLASS_DEPRECATION_WARNING;_SILENCE_CXX17_CODECVT_HEADER_DEPRECATION_WARNING;_SILENCE_CXX17_OLD_ALLOCATOR_MEMBERS_DEPRECATION_WARNING;RDBUILD_PLATFORM_DEFINED;RDBUILD_PLATFORM_WINDOWS;RDBUILD_PLATFORM_WINDOWS_WIN32;RDBUILD_ARCHITECTURE_DEFINED;RDBUILD_ARCH_INTEL;RDBUILD_ARCH_INTEL_X64;RDBUILD_DEFAULT_CALL=;RDBUILD_FEATURE_OPENSSL=0;_SCL_SECURE_NO_WARNINGS;_SILENCE_TR1_NAMESPACE_DEPRECATION_WARNING;WIN32_LEAN_AND_MEAN=1;WINVER=0x0A00;_WIN32_WINNT=0x0A00;NTDDI_VERSION=0x0A000007;CMAKE_INTDIR="RelWithDebInfo";%(PreprocessorDefinitions)</PreprocessorDefinitions>
<ObjectFileName>$(IntDir)</ObjectFileName>
</ClCompile>
<ResourceCompile>
<PreprocessorDefinitions>WIN32;_WINDOWS;NDEBUG;ALLOW_INSECURE_RANDOM_DEVICE=1;UNICODE;_SILENCE_CXX17_ITERATOR_BASE_CLASS_DEPRECATION_WARNING;_SILENCE_CXX17_CODECVT_HEADER_DEPRECATION_WARNING;_SILENCE_CXX17_OLD_ALLOCATOR_MEMBERS_DEPRECATION_WARNING;RDBUILD_PLATFORM_DEFINED;RDBUILD_PLATFORM_WINDOWS;RDBUILD_PLATFORM_WINDOWS_WIN32;RDBUILD_ARCHITECTURE_DEFINED;RDBUILD_ARCH_INTEL;RDBUILD_ARCH_INTEL_X64;RDBUILD_DEFAULT_CALL=;RDBUILD_FEATURE_OPENSSL=0;_SCL_SECURE_NO_WARNINGS;_SILENCE_TR1_NAMESPACE_DEPRECATION_WARNING;WIN32_LEAN_AND_MEAN=1;WINVER=0x0A00;_WIN32_WINNT=0x0A00;NTDDI_VERSION=0x0A000007;CMAKE_INTDIR=\"RelWithDebInfo\";%(PreprocessorDefinitions)</PreprocessorDefinitions>
<AdditionalIncludeDirectories>.;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
</ResourceCompile>
<Midl>
<AdditionalIncludeDirectories>.;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
<OutputDirectory>$(ProjectDir)/$(IntDir)</OutputDirectory>
<HeaderFileName>%(Filename).h</HeaderFileName>
<TypeLibraryName>%(Filename).tlb</TypeLibraryName>
<InterfaceIdentifierFileName>%(Filename)_i.c</InterfaceIdentifierFileName>
<ProxyFileName>%(Filename)_p.c</ProxyFileName>
</Midl>
<Link>
<AdditionalDependencies>d2d1.lib;d3d11.lib;dxgi.lib;dwrite.lib;windowscodecs.lib;pathcch.lib;Mswsock.lib;mfreadwrite.lib;Mfplat.lib;mfuuid.lib;ntdll.lib;Secur32.lib;crypt32.lib;secur32.lib;avrt.lib;WindowsApp.lib;%(AdditionalDependencies)</AdditionalDependencies>
<AdditionalLibraryDirectories>%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>
<AdditionalOptions>%(AdditionalOptions) /machine:x64</AdditionalOptions>
<EnableCOMDATFolding>false</EnableCOMDATFolding>
<GenerateDebugInformation>true</GenerateDebugInformation>
<IgnoreSpecificDefaultLibraries>%(IgnoreSpecificDefaultLibraries)</IgnoreSpecificDefaultLibraries>
<ImportLibrary>lib/RelWithDebInfo/HolographicHostSample.lib</ImportLibrary>
<OptimizeReferences>false</OptimizeReferences>
<ProgramDataBaseFile>bin/RelWithDebInfo/HolographicHostSample.pdb</ProgramDataBaseFile>
<SubSystem>Windows</SubSystem>
</Link>
<ProjectReference>
<LinkLibraryDependencies>false</LinkLibraryDependencies>
</ProjectReference>
</ItemDefinitionGroup>
<ItemGroup>
<ClCompile Include=".\SampleHostMain.cpp" />
<ClInclude Include=".\SampleHostMain.h" />
<ClInclude Include=".\pch.h" />
<ClCompile Include=".\pch.cpp" />
<ClCompile Include=".\SampleHostWindowWin32.cpp" />
<ClInclude Include=".\SampleHostWindowWin32.h" />
<ClCompile Include=".\Common\CameraResources.cpp" />
<ClInclude Include=".\Common\CameraResources.h" />
<ClInclude Include=".\Common\DbgLog.h" />
<ClCompile Include=".\Common\DeviceResources.cpp" />
<ClInclude Include=".\Common\DeviceResources.h" />
<ClInclude Include=".\Common\DirectXHelper.h" />
<ClCompile Include=".\Common\PerceptionTypes.cpp" />
<ClInclude Include=".\Common\PerceptionTypes.h" />
<ClInclude Include=".\Common\Utils.h" />
<ClCompile Include=".\Common\Speech.cpp" />
<ClInclude Include=".\Common\Speech.h" />
<ClCompile Include=".\Content\PerceptionDeviceHandler.cpp" />
<ClInclude Include=".\Content\PerceptionDeviceHandler.h" />
<ClCompile Include=".\Content\QRCodeRenderer.cpp" />
<ClInclude Include=".\Content\QRCodeRenderer.h" />
<ClCompile Include=".\Content\QRCodeTracker.cpp" />
<ClInclude Include=".\Content\QRCodeTracker.h" />
<ClCompile Include=".\Content\RenderableObject.cpp" />
<ClInclude Include=".\Content\RenderableObject.h" />
<ClInclude Include=".\Content\ShaderStructures.h" />
<ClCompile Include=".\Content\SpatialInputHandler.cpp" />
<ClInclude Include=".\Content\SpatialInputHandler.h" />
<ClCompile Include=".\Content\SpatialInputRenderer.cpp" />
<ClInclude Include=".\Content\SpatialInputRenderer.h" />
<ClCompile Include=".\Content\SpinningCubeRenderer.cpp" />
<ClInclude Include=".\Content\SpinningCubeRenderer.h" />
<ClCompile Include=".\Content\SpatialSurfaceMeshRenderer.cpp" />
<ClInclude Include=".\Content\SpatialSurfaceMeshRenderer.h" />
<Image Include=".\Assets\LockScreenLogo.scale-200.png" />
<Image Include=".\Assets\SplashScreen.scale-200.png" />
<Image Include=".\Assets\Square44x44Logo.scale-200.png" />
<Image Include=".\Assets\Square44x44Logo.targetsize-24_altform-unplated.png" />
<Image Include=".\Assets\Square150x150Logo.scale-200.png" />
<Image Include=".\Assets\StoreLogo.png" />
<Image Include=".\Assets\Wide310x150Logo.scale-200.png" />
<AppxManifest Include=".\Package.appxmanifest" />
<FXCompile Include=".\Content\shaders\hsa_VertexShader.hlsl">
<ShaderType>Vertex</ShaderType>
<EntryPointName>main</EntryPointName>
<ShaderModel>5.0</ShaderModel>
</FXCompile>
<FXCompile Include=".\Content\shaders\hsa_VPRTVertexShader.hlsl">
<ShaderType>Vertex</ShaderType>
<EntryPointName>main</EntryPointName>
<ShaderModel>5.0</ShaderModel>
</FXCompile>
<FXCompile Include=".\Content\shaders\hsa_SRMeshVertexShader.hlsl">
<ShaderType>Vertex</ShaderType>
<EntryPointName>main</EntryPointName>
<ShaderModel>5.0</ShaderModel>
</FXCompile>
<FXCompile Include=".\Content\shaders\hsa_PixelShader.hlsl">
<ShaderType>Pixel</ShaderType>
<EntryPointName>main</EntryPointName>
<ShaderModel>5.0</ShaderModel>
</FXCompile>
<FXCompile Include=".\Content\shaders\hsa_SRMeshPixelShader.hlsl">
<ShaderType>Pixel</ShaderType>
<EntryPointName>main</EntryPointName>
<ShaderModel>5.0</ShaderModel>
</FXCompile>
<FXCompile Include=".\Content\shaders\hsa_GeometryShader.hlsl">
<ShaderType>Geometry</ShaderType>
<EntryPointName>main</EntryPointName>
<ShaderModel>5.0</ShaderModel>
</FXCompile>
<FXCompile Include=".\Content\shaders\hsa_SRMeshGeometryShader.hlsl">
<ShaderType>Geometry</ShaderType>
<EntryPointName>main</EntryPointName>
<ShaderModel>5.0</ShaderModel>
</FXCompile>
<None Include=".\SpeechGrammar.xml">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</None>
</ItemGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
</Project>

Просмотреть файл

@ -0,0 +1,32 @@
<?xml version="1.0" encoding="utf-8"?>
<Package xmlns="http://schemas.microsoft.com/appx/manifest/foundation/windows10" xmlns:mp="http://schemas.microsoft.com/appx/2014/phone/manifest" xmlns:uap="http://schemas.microsoft.com/appx/manifest/uap/windows10" xmlns:rescap="http://schemas.microsoft.com/appx/manifest/foundation/windows10/restrictedcapabilities" IgnorableNamespaces="uap mp rescap">
<Identity Name="b0cf2e39-8f6e-4238-b33a-868c9b1122ce" Publisher="CN=Microsoft Corporation, O=Microsoft Corporation, L=Redmond, S=Washington, C=US" Version="2.0.7.0" />
<mp:PhoneIdentity PhoneProductId="b0cf2e39-8f6e-4238-b33a-868c9b1122ce" PhonePublisherId="00000000-0000-0000-0000-000000000000" />
<Properties>
<DisplayName>RemotingHostSample</DisplayName>
<PublisherDisplayName>Microsoft Corporation</PublisherDisplayName>
<Logo>Assets\StoreLogo.png</Logo>
</Properties>
<Dependencies>
<TargetDeviceFamily Name="Windows.Universal" MinVersion="10.0.0.0" MaxVersionTested="10.0.0.0" />
</Dependencies>
<Resources>
<Resource Language="x-generate" />
</Resources>
<Applications>
<Application Id="App" Executable="$targetnametoken$.exe" EntryPoint="SampleHostWindowUWPView">
<uap:VisualElements DisplayName="RemotingHostSampleUWP" Square150x150Logo="Assets\Square150x150Logo.png" Square44x44Logo="Assets\Square44x44Logo.png" Description="RemotingHostSampleUWP" BackgroundColor="transparent">
<uap:DefaultTile Wide310x150Logo="Assets\Wide310x150Logo.png">
</uap:DefaultTile>
<uap:SplashScreen Image="Assets\SplashScreen.png" />
</uap:VisualElements>
</Application>
</Applications>
<Capabilities>
<Capability Name="internetClient" />
<Capability Name="internetClientServer" />
<Capability Name="privateNetworkClientServer" />
<rescap:Capability Name="perceptionMonitoring" />
<DeviceCapability Name="microphone" />
</Capabilities>
</Package>

Просмотреть файл

@ -0,0 +1,888 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#include "pch.h"
#include "SampleHostMain.h"
#include "Common\DbgLog.h"
#include "Common\DirectXHelper.h"
#include "Common\Speech.h"
#include <DirectXColors.h>
#include <HolographicAppRemoting\Streamer.h>
#include <winrt/Microsoft.Holographic.AppRemoting.h>
#include <winrt/Windows.Perception.People.h>
using namespace concurrency;
using namespace std::chrono_literals;
using namespace winrt::Microsoft::Holographic::AppRemoting;
using namespace winrt::Windows::Foundation::Numerics;
using namespace winrt::Windows::Graphics::Holographic;
using namespace winrt::Windows::Perception::People;
using namespace winrt::Windows::Perception::Spatial;
using namespace winrt::Windows::UI::Input;
namespace
{
const wchar_t* StreamerConnectionStateToString(ConnectionState state)
{
switch (state)
{
case ConnectionState::Disconnected:
return L"Disconnected";
case ConnectionState::Connecting:
return L"Connecting";
case ConnectionState::Connected:
return L"Connected";
}
return L"Unknown";
}
} // namespace
SampleHostMain::SampleHostMain(std::weak_ptr<IWindow> window)
: m_window(window)
{
m_deviceResources = std::make_shared<DXHelper::DeviceResources>();
m_deviceResources->RegisterDeviceNotify(this);
}
SampleHostMain::~SampleHostMain()
{
ShutdownRemoteContext();
m_deviceResources->RegisterDeviceNotify(nullptr);
UnregisterHolographicEventHandlers();
}
HolographicFrame SampleHostMain::Update()
{
auto timeDelta = std::chrono::high_resolution_clock::now() - m_windowTitleUpdateTime;
if (timeDelta >= 1s)
{
WindowUpdateTitle();
m_windowTitleUpdateTime = std::chrono::high_resolution_clock::now();
m_framesPerSecond = 0;
}
if (!m_holographicSpace)
{
return nullptr;
}
// NOTE: DXHelper::DeviceResources::Present does not wait for the frame to finish.
// Instead we wait here before we do the call to CreateNextFrame on the HolographicSpace.
// We do this to avoid that PeekMessage causes frame delta time spikes, say if we wait
// after PeekMessage WaitForNextFrameReady will compensate any time spend in PeekMessage.
m_holographicSpace.WaitForNextFrameReady();
HolographicFrame holographicFrame = m_holographicSpace.CreateNextFrame();
HolographicFramePrediction prediction = holographicFrame.CurrentPrediction();
// Back buffers can change from frame to frame. Validate each buffer, and recreate resource views and depth buffers as needed.
m_deviceResources->EnsureCameraResources(holographicFrame, prediction);
SpatialCoordinateSystem coordinateSystem = nullptr;
{
coordinateSystem = m_referenceFrame.CoordinateSystem();
}
// Check for new input state since the last frame.
Spatial::SpatialTappedEventArgs tapped = m_spatialInputHandler->CheckForTapped();
if (tapped)
{
Spatial::SpatialPointerPose pointerPose = tapped.TryGetPointerPose(coordinateSystem);
// When the Tapped spatial input event is received, the sample hologram will be repositioned two meters in front of the user.
m_spinningCubeRenderer->PositionHologram(pointerPose);
}
else
{
static float3 initialCubePosition = float3::zero();
auto manipulationStarted = m_spatialInputHandler->CheckForManipulationStarted();
if (manipulationStarted)
{
initialCubePosition = m_spinningCubeRenderer->GetPosition();
m_spinningCubeRenderer->Pause();
}
else
{
auto manipulationUpdated = m_spatialInputHandler->CheckForManipulationUpdated();
if (manipulationUpdated)
{
auto delta = manipulationUpdated.TryGetCumulativeDelta(coordinateSystem);
if (delta)
{
m_spinningCubeRenderer->SetPosition(initialCubePosition + delta.Translation());
}
}
else
{
switch (m_spatialInputHandler->CheckForManipulationResult())
{
case SpatialInputHandler::ManipulationResult::Canceled:
m_spinningCubeRenderer->SetPosition(initialCubePosition);
case SpatialInputHandler::ManipulationResult::Completed:
m_spinningCubeRenderer->Unpause();
break;
}
}
}
}
std::chrono::duration<float> timeSinceStart = std::chrono::high_resolution_clock::now() - m_startTime;
m_spinningCubeRenderer->Update(timeSinceStart.count());
if (m_spatialSurfaceMeshRenderer != nullptr)
{
m_spatialSurfaceMeshRenderer->Update(coordinateSystem);
}
m_spatialInputRenderer->Update(prediction.Timestamp(), coordinateSystem);
m_qrCodeRenderer->Update(*m_perceptionDeviceHandler.get(), coordinateSystem);
// We complete the frame update by using information about our content positioning to set the focus point.
for (auto cameraPose : prediction.CameraPoses())
{
try
{
HolographicCameraRenderingParameters renderingParameters = holographicFrame.GetRenderingParameters(cameraPose);
// Set the focus point for image stabilization to the center of the sample hologram.
// NOTE: A focus point can be set for every HolographicFrame. If a focus point is set on a HolographicFrame,
// it will get transmitted to the player and will get set during the PlayerContext::BlitRemoteFrame() call.
renderingParameters.SetFocusPoint(coordinateSystem, m_spinningCubeRenderer->GetPosition());
}
catch (winrt::hresult_error&)
{
}
}
#ifdef ENABLE_CUSTOM_DATA_CHANNEL_SAMPLE
timeDelta = std::chrono::high_resolution_clock::now() - m_customDataChannelSendTime;
if (timeDelta > 5s)
{
m_customDataChannelSendTime = std::chrono::high_resolution_clock::now();
// Send ping every couple of frames if we have a custom data channel.
std::lock_guard lock(m_customDataChannelLock);
if (m_customDataChannel)
{
uint8_t data = 1;
m_customDataChannel.SendData(
winrt::array_view<const uint8_t>(reinterpret_cast<const uint8_t*>(&data), reinterpret_cast<const uint8_t*>(&data + 1)),
true);
OutputDebugString(TEXT("Request Sent.\n"));
}
}
#endif
return holographicFrame;
}
void SampleHostMain::Render(HolographicFrame holographicFrame)
{
bool atLeastOneCameraRendered = false;
m_deviceResources->UseHolographicCameraResources([this, holographicFrame, &atLeastOneCameraRendered](
std::map<UINT32, std::unique_ptr<DXHelper::CameraResources>>& cameraResourceMap) {
holographicFrame.UpdateCurrentPrediction();
HolographicFramePrediction prediction = holographicFrame.CurrentPrediction();
SpatialCoordinateSystem coordinateSystem = nullptr;
{
coordinateSystem = m_referenceFrame.CoordinateSystem();
}
for (auto cameraPose : prediction.CameraPoses())
{
try
{
DXHelper::CameraResources* pCameraResources = cameraResourceMap[cameraPose.HolographicCamera().Id()].get();
if (pCameraResources == nullptr)
{
continue;
}
m_deviceResources->UseD3DDeviceContext([&](ID3D11DeviceContext3* context) {
// Clear the back buffer view.
context->ClearRenderTargetView(pCameraResources->GetBackBufferRenderTargetView(), DirectX::Colors::Transparent);
context->ClearDepthStencilView(
pCameraResources->GetDepthStencilView(), D3D11_CLEAR_DEPTH | D3D11_CLEAR_STENCIL, 1.0f, 0);
// The view and projection matrices for each holographic camera will change
// every frame. This function refreshes the data in the constant buffer for
// the holographic camera indicated by cameraPose.
pCameraResources->UpdateViewProjectionBuffer(m_deviceResources, cameraPose, coordinateSystem);
// Set up the camera buffer.
bool cameraActive = pCameraResources->AttachViewProjectionBuffer(m_deviceResources);
// Only render world-locked content when positional tracking is active.
if (cameraActive)
{
// Set the render target, and set the depth target drawing buffer.
ID3D11RenderTargetView* const targets[1] = {pCameraResources->GetBackBufferRenderTargetView()};
context->OMSetRenderTargets(1, targets, pCameraResources->GetDepthStencilView());
// Render the scene objects.
m_spinningCubeRenderer->Render(pCameraResources->IsRenderingStereoscopic());
if (m_spatialSurfaceMeshRenderer != nullptr)
{
m_spatialSurfaceMeshRenderer->Render(pCameraResources->IsRenderingStereoscopic());
}
m_spatialInputRenderer->Render(pCameraResources->IsRenderingStereoscopic());
m_qrCodeRenderer->Render(pCameraResources->IsRenderingStereoscopic());
}
});
atLeastOneCameraRendered = true;
}
catch (const winrt::hresult_error&)
{
}
}
});
if (atLeastOneCameraRendered)
{
m_deviceResources->Present(holographicFrame);
}
if (m_swapChain == nullptr && m_isInitialized)
{
// A device lost event has occurred.
// Reconnection is necessary because the holographic streamer uses the D3D device.
// The following resources depend on the D3D device:
// * Holographic streamer
// * Renderer
// * Holographic space
// The InitializeRemoteContext() function will call the functions necessary to recreate these resources.
ShutdownRemoteContext();
InitializeRemoteContextAndConnectOrListen();
}
// Determine whether or not to copy to the preview buffer.
bool copyPreview = m_remoteContext == nullptr || m_remoteContext.ConnectionState() != ConnectionState::Connected;
if (copyPreview && m_isInitialized)
{
winrt::com_ptr<ID3D11Device1> spDevice;
spDevice.copy_from(GetDeviceResources()->GetD3DDevice());
winrt::com_ptr<ID3D11Texture2D> spBackBuffer;
winrt::check_hresult(m_swapChain->GetBuffer(0, __uuidof(ID3D11Texture2D), spBackBuffer.put_void()));
// Create a render target view of the back buffer.
// Creating this resource is inexpensive, and is better than keeping track of
// the back buffers in order to pre-allocate render target views for each one.
winrt::com_ptr<ID3D11RenderTargetView> spRenderTargetView;
winrt::check_hresult(spDevice->CreateRenderTargetView(spBackBuffer.get(), nullptr, spRenderTargetView.put()));
GetDeviceResources()->UseD3DDeviceContext(
[&](auto context) { context->ClearRenderTargetView(spRenderTargetView.get(), DirectX::Colors::CornflowerBlue); });
WindowPresentSwapChain();
}
m_framesPerSecond++;
}
void SampleHostMain::SetHostOptions(bool listen, const std::wstring& hostname, uint32_t port)
{
m_listen = listen;
m_hostname = hostname;
m_port = port;
}
void SampleHostMain::OnKeyPress(char key)
{
switch (key)
{
case ' ':
InitializeRemoteContextAndConnectOrListen();
break;
case 'd':
ShutdownRemoteContext();
break;
case 'p':
m_showPreview = !m_showPreview;
break;
case 'l':
LoadPosition();
break;
case 's':
SavePosition();
break;
case 'c':
m_spinningCubeRenderer->TogglePauseState();
break;
}
WindowUpdateTitle();
}
void SampleHostMain::OnResize(int width, int height)
{
std::lock_guard _lg(m_deviceLock);
if (width != m_width || height != m_height)
{
m_width = width;
m_height = height;
if (m_swapChain)
{
winrt::check_hresult(m_swapChain->ResizeBuffers(2, m_width, m_height, DXGI_FORMAT_B8G8R8A8_UNORM, 0));
}
}
}
void SampleHostMain::OnRecognizedSpeech(const winrt::hstring& recognizedText)
{
bool changedColor = false;
DirectX::XMFLOAT4 color = {1, 1, 1, 1};
if (recognizedText == L"Red")
{
color = {1, 0, 0, 1};
changedColor = true;
}
else if (recognizedText == L"Blue")
{
color = {0, 0, 1, 1};
changedColor = true;
}
else if (recognizedText == L"Green")
{
color = {0, 1, 0, 1};
changedColor = true;
}
else if (recognizedText == L"Default")
{
color = {1, 1, 1, 1};
changedColor = true;
}
else if (recognizedText == L"Aquamarine")
{
color = {0, 1, 1, 1};
changedColor = true;
}
else if (recognizedText == L"Load position")
{
LoadPosition();
}
else if (recognizedText == L"Save position")
{
SavePosition();
}
if (changedColor && m_spinningCubeRenderer)
{
m_spinningCubeRenderer->SetColorFilter(color);
}
}
void SampleHostMain::InitializeRemoteContextAndConnectOrListen()
{
if (!m_remoteContext)
{
// Create the RemoteContext
// IMPORTANT: This must be done before creating the HolographicSpace (or any other call to the Holographic API).
CreateRemoteContext(m_remoteContext, 20000, false, PreferredVideoCodec::Default);
// Create the HolographicSpace
CreateHolographicSpaceAndDeviceResources();
if (auto remoteSpeech = m_remoteContext.GetRemoteSpeech())
{
Speech::InitializeSpeechAsync(remoteSpeech, m_onRecognizedSpeechRevoker, weak_from_this());
}
winrt::com_ptr<ID3D11Device1> device;
device.copy_from(GetDeviceResources()->GetD3DDevice());
WindowCreateSwapChain(device);
DXGI_ADAPTER_DESC2 dxgiAdapterDesc;
if (SUCCEEDED(GetDeviceResources()->GetDXGIAdapter()->GetDesc2(&dxgiAdapterDesc)) &&
(dxgiAdapterDesc.Flags & DXGI_ADAPTER_FLAG_SOFTWARE))
{
DebugLog(L"Software video adapter is not supported for holographic streamer.\n");
m_remoteContext = nullptr;
return;
}
winrt::weak_ref<IRemoteContext> remoteContextWeakRef = m_remoteContext;
m_onConnectedEventRevoker = m_remoteContext.OnConnected(winrt::auto_revoke, [this, remoteContextWeakRef]() {
if (auto remoteContext = remoteContextWeakRef.get())
{
WindowUpdateTitle();
remoteContext.CreateDataChannel(0, DataChannelPriority::Low);
}
});
m_onDisconnectedEventRevoker =
m_remoteContext.OnDisconnected(winrt::auto_revoke, [this, remoteContextWeakRef](ConnectionFailureReason failureReason) {
if (auto remoteContext = remoteContextWeakRef.get())
{
DebugLog(L"Disconnected with reason %d", failureReason);
WindowUpdateTitle();
// Reconnect if this is a transient failure.
if (failureReason == ConnectionFailureReason::HandshakeUnreachable ||
failureReason == ConnectionFailureReason::TransportUnreachable ||
failureReason == ConnectionFailureReason::ConnectionLost)
{
DebugLog(L"Reconnecting...");
ConnectOrListen();
}
// Failure reason None indicates a normal disconnect.
else if (failureReason != ConnectionFailureReason::None)
{
DebugLog(L"Disconnected with unrecoverable error, not attempting to reconnect.");
}
}
});
m_onSendFrameEventRevoker = m_remoteContext.OnSendFrame(
winrt::auto_revoke, [this](const winrt::Windows::Graphics::DirectX::Direct3D11::IDirect3DSurface& texture) {
if (m_showPreview)
{
winrt::com_ptr<ID3D11Device1> spDevice;
spDevice.copy_from(GetDeviceResources()->GetD3DDevice());
winrt::com_ptr<ID3D11Texture2D> spBackBuffer;
winrt::check_hresult(m_swapChain->GetBuffer(0, __uuidof(ID3D11Texture2D), spBackBuffer.put_void()));
winrt::com_ptr<ID3D11Texture2D> texturePtr;
{
winrt::com_ptr<ID3D11Resource> resource;
winrt::com_ptr<::IInspectable> inspectable = texture.as<::IInspectable>();
winrt::com_ptr<Windows::Graphics::DirectX::Direct3D11::IDirect3DDxgiInterfaceAccess> dxgiInterfaceAccess;
winrt::check_hresult(inspectable->QueryInterface(__uuidof(dxgiInterfaceAccess), dxgiInterfaceAccess.put_void()));
winrt::check_hresult(dxgiInterfaceAccess->GetInterface(__uuidof(resource), resource.put_void()));
resource.as(texturePtr);
}
GetDeviceResources()->UseD3DDeviceContext([&](auto context) {
context->CopySubresourceRegion(
spBackBuffer.get(), // dest
0, // dest subresource
0,
0,
0, // dest x, y, z
texturePtr.get(), // source
0, // source subresource
nullptr); // source box, null means the entire resource
});
WindowPresentSwapChain();
}
});
#ifdef ENABLE_CUSTOM_DATA_CHANNEL_SAMPLE
m_onDataChannelCreatedEventRevoker =
m_remoteContext.OnDataChannelCreated(winrt::auto_revoke, [this](const IDataChannel& dataChannel, uint8_t channelId) {
std::lock_guard lock(m_customDataChannelLock);
m_customDataChannel = dataChannel;
m_customChannelDataReceivedEventRevoker = m_customDataChannel.OnDataReceived(
winrt::auto_revoke, [this](winrt::array_view<const uint8_t> dataView) { OnCustomDataChannelDataReceived(); });
m_customChannelClosedEventRevoker =
m_customDataChannel.OnClosed(winrt::auto_revoke, [this]() { OnCustomDataChannelClosed(); });
});
#endif
ConnectOrListen();
}
}
void SampleHostMain::CreateHolographicSpaceAndDeviceResources()
{
UnregisterHolographicEventHandlers();
m_holographicSpace = HolographicSpace::CreateForCoreWindow(nullptr);
m_deviceResources->SetHolographicSpace(m_holographicSpace);
m_spinningCubeRenderer = std::make_unique<SpinningCubeRenderer>(m_deviceResources);
// Uncomment the below line to render spatial surfaces
// m_spatialSurfaceMeshRenderer = std::make_unique<SpatialSurfaceMeshRenderer>(m_deviceResources);
m_spatialInputRenderer = std::make_unique<SpatialInputRenderer>(m_deviceResources);
m_spatialInputHandler = std::make_shared<SpatialInputHandler>();
m_qrCodeRenderer = std::make_unique<QRCodeRenderer>(m_deviceResources);
m_perceptionDeviceHandler = std::make_shared<PerceptionDeviceHandler>();
m_perceptionDeviceHandler->Start();
m_locator = SpatialLocator::GetDefault();
// Be able to respond to changes in the positional tracking state.
m_locatabilityChangedToken = m_locator.LocatabilityChanged({this, &SampleHostMain::OnLocatabilityChanged});
m_cameraAddedToken = m_holographicSpace.CameraAdded({this, &SampleHostMain::OnCameraAdded});
m_cameraRemovedToken = m_holographicSpace.CameraRemoved({this, &SampleHostMain::OnCameraRemoved});
{
m_referenceFrame = m_locator.CreateStationaryFrameOfReferenceAtCurrentLocation(float3::zero(), quaternion(0, 0, 0, 1), 0.0);
}
m_isInitialized = true;
}
void SampleHostMain::ConnectOrListen()
{
// Try to establish a connection.
try
{
m_remoteContext.Disconnect();
// Request access to eyes pose data on every connection/listen attempt.
RequestEyesPoseAccess();
if (m_port == 0)
{
m_port = 8265;
}
if (m_listen)
{
if (m_hostname.empty())
{
m_hostname = L"0.0.0.0";
}
m_remoteContext.Listen(m_hostname, m_port, m_port + 1);
}
else
{
if (m_hostname.empty())
{
m_hostname = L"127.0.0.1";
}
m_remoteContext.Connect(m_hostname, m_port);
}
}
catch (winrt::hresult_error& e)
{
if (m_listen)
{
DebugLog(L"Listen failed with hr = 0x%08X", e.code());
}
else
{
DebugLog(L"Connect failed with hr = 0x%08X", e.code());
}
}
}
void SampleHostMain::LoadPosition()
{
auto storeRequest = SpatialAnchorManager::RequestStoreAsync();
storeRequest.Completed([this](winrt::Windows::Foundation::IAsyncOperation<SpatialAnchorStore> result, auto asyncStatus) {
if (result.Status() != winrt::Windows::Foundation::AsyncStatus::Completed)
{
return;
}
const SpatialAnchorStore& store = result.GetResults();
if (store)
{
auto anchors = store.GetAllSavedAnchors();
if (anchors.HasKey(L"position"))
{
auto position = anchors.Lookup(L"position");
auto positionToOrigin = position.CoordinateSystem().TryGetTransformTo(m_referenceFrame.CoordinateSystem());
if (positionToOrigin)
{
const float3 res = transform(float3::zero(), positionToOrigin.Value());
m_spinningCubeRenderer->SetPosition(res);
OutputDebugStringW(L"Loaded cube position from SpatialAnchorStore.\n");
}
}
}
});
}
void SampleHostMain::SavePosition()
{
auto position = SpatialAnchor::TryCreateRelativeTo(m_referenceFrame.CoordinateSystem(), m_spinningCubeRenderer->GetPosition());
auto storeRequest = SpatialAnchorManager::RequestStoreAsync();
storeRequest.Completed([position](winrt::Windows::Foundation::IAsyncOperation<SpatialAnchorStore> result, auto asyncStatus) {
if (result.Status() != winrt::Windows::Foundation::AsyncStatus::Completed)
{
return;
}
const SpatialAnchorStore& store = result.GetResults();
if (store)
{
store.Clear();
if (store.TrySave(L"position", position))
{
OutputDebugStringW(L"Saved cube position to SpatialAnchorStore.\n");
}
}
});
}
void SampleHostMain::RequestEyesPoseAccess()
{
try
{
auto asyncOpertation = winrt::Windows::Perception::People::EyesPose::RequestAccessAsync();
asyncOpertation.Completed(
[this](winrt::Windows::Foundation::IAsyncOperation<winrt::Windows::UI::Input::GazeInputAccessStatus> result, auto asyncStatus) {
winrt::Windows::UI::Input::GazeInputAccessStatus status = result.GetResults();
switch (status)
{
case winrt::Windows::UI::Input::GazeInputAccessStatus::Unspecified:
OutputDebugStringA("ParseGazeInputResponseData Unspecified\n");
break;
case winrt::Windows::UI::Input::GazeInputAccessStatus::Allowed:
OutputDebugStringA("ParseGazeInputResponseData Allowed\n");
break;
case winrt::Windows::UI::Input::GazeInputAccessStatus::DeniedByUser:
OutputDebugStringA("ParseGazeInputResponseData DeniedByUser\n");
break;
case winrt::Windows::UI::Input::GazeInputAccessStatus::DeniedBySystem:
OutputDebugStringA("ParseGazeInputResponseData DeniedBySystem\n");
break;
default:
break;
}
});
}
catch (winrt::hresult_error&)
{
}
}
void SampleHostMain::UnregisterHolographicEventHandlers()
{
if (m_holographicSpace != nullptr)
{
m_holographicSpace.CameraAdded(m_cameraAddedToken);
m_holographicSpace.CameraRemoved(m_cameraRemovedToken);
}
if (m_locator != nullptr)
{
m_locator.LocatabilityChanged(m_locatabilityChangedToken);
}
}
void SampleHostMain::ShutdownRemoteContext()
{
if (m_remoteContext != nullptr)
{
m_onConnectedEventRevoker.revoke();
m_onDisconnectedEventRevoker.revoke();
m_onSendFrameEventRevoker.revoke();
m_onDataChannelCreatedEventRevoker.revoke();
#ifdef ENABLE_CUSTOM_DATA_CHANNEL_SAMPLE
{
std::lock_guard lock(m_customDataChannelLock);
m_customChannelDataReceivedEventRevoker.revoke();
m_customChannelClosedEventRevoker.revoke();
m_customDataChannel = nullptr;
}
#endif
m_remoteContext.Close();
m_remoteContext = nullptr;
}
}
void SampleHostMain::OnDeviceLost()
{
m_spinningCubeRenderer->ReleaseDeviceDependentResources();
m_spatialInputRenderer->ReleaseDeviceDependentResources();
m_qrCodeRenderer->ReleaseDeviceDependentResources();
if (m_spatialSurfaceMeshRenderer)
{
m_spatialSurfaceMeshRenderer->ReleaseDeviceDependentResources();
}
}
void SampleHostMain::OnDeviceRestored()
{
m_spinningCubeRenderer->CreateDeviceDependentResources();
m_spatialInputRenderer->CreateDeviceDependentResources();
m_qrCodeRenderer->CreateDeviceDependentResources();
if (m_spatialSurfaceMeshRenderer)
{
m_spatialSurfaceMeshRenderer->CreateDeviceDependentResources();
}
}
void SampleHostMain::OnCameraAdded(const HolographicSpace& sender, const HolographicSpaceCameraAddedEventArgs& args)
{
winrt::Windows::Foundation::Deferral deferral = args.GetDeferral();
auto holographicCamera = args.Camera();
create_task([this, deferral, holographicCamera]() {
m_deviceResources->AddHolographicCamera(holographicCamera);
deferral.Complete();
});
}
void SampleHostMain::OnCameraRemoved(const HolographicSpace& sender, const HolographicSpaceCameraRemovedEventArgs& args)
{
m_deviceResources->RemoveHolographicCamera(args.Camera());
}
void SampleHostMain::OnLocatabilityChanged(const SpatialLocator& sender, const winrt::Windows::Foundation::IInspectable& args)
{
const wchar_t* locatability = L"";
switch (sender.Locatability())
{
case SpatialLocatability::Unavailable:
locatability = L"Unavailable";
break;
case SpatialLocatability::PositionalTrackingActivating:
locatability = L"PositionalTrackingActivating";
break;
case SpatialLocatability::OrientationOnly:
locatability = L"OrientationOnly";
break;
case SpatialLocatability::PositionalTrackingInhibited:
locatability = L"PositionalTrackingInhibited";
break;
case SpatialLocatability::PositionalTrackingActive:
locatability = L"PositionalTrackingActive";
break;
}
winrt::hstring message = L"Positional tracking is " + winrt::to_hstring(locatability) + L".\n";
OutputDebugStringW(message.data());
}
void SampleHostMain::WindowCreateSwapChain(const winrt::com_ptr<ID3D11Device1>& device)
{
std::lock_guard _lg(m_deviceLock);
DXGI_SWAP_CHAIN_DESC1 desc = {0};
desc.Width = m_width;
desc.Height = m_height;
desc.Format = DXGI_FORMAT_B8G8R8A8_UNORM;
desc.Stereo = false;
desc.SampleDesc.Count = 1; // Don't use multi-sampling.
desc.SampleDesc.Quality = 0;
desc.BufferUsage = DXGI_USAGE_RENDER_TARGET_OUTPUT;
desc.BufferCount = 2; // Double buffered
desc.SwapEffect = DXGI_SWAP_EFFECT_FLIP_SEQUENTIAL;
desc.Flags = 0;
desc.AlphaMode = DXGI_ALPHA_MODE_IGNORE;
desc.Scaling = DXGI_SCALING_STRETCH;
m_swapChain = nullptr;
if (auto window = m_window.lock())
{
m_swapChain = window->CreateSwapChain(device, &desc);
}
}
void SampleHostMain::WindowPresentSwapChain()
{
HRESULT hr = m_swapChain->Present(0, 0);
if (hr == DXGI_ERROR_DEVICE_REMOVED || hr == DXGI_ERROR_DEVICE_RESET)
{
// The D3D device is lost.
// This should be handled after the frame is complete.
m_swapChain = nullptr;
}
else
{
winrt::check_hresult(hr);
}
}
void SampleHostMain::WindowUpdateTitle()
{
std::wstring title = TITLE_TEXT;
std::wstring separator = TITLE_SEPARATOR;
uint32_t fps = min(120, m_framesPerSecond);
title += separator + std::to_wstring(fps) + L" fps";
// Title | {ip} | {State} [| Press Space to Connect] [| Preview Disabled (p toggles)]
title += separator + m_hostname;
{
if (m_remoteContext)
{
auto connectionState = m_remoteContext.ConnectionState();
title += separator + (m_isInitialized ? StreamerConnectionStateToString(connectionState) : L"Initializing");
title += separator + ((connectionState == ConnectionState::Disconnected) ? TITLE_CONNECT_TEXT : TITLE_DISCONNECT_TEXT);
}
else
{
title += separator + TITLE_CONNECT_TEXT;
}
title += separator + (m_showPreview ? TITLE_DISABLE_PREVIEW_TEXT : TITLE_ENABLE_PREVIEW_TEXT);
}
if (auto window = m_window.lock())
{
window->SetWindowTitle(title);
}
}
#ifdef ENABLE_CUSTOM_DATA_CHANNEL_SAMPLE
void SampleHostMain::OnCustomDataChannelDataReceived()
{
// TODO: React on data received via the custom data channel here.
}
void SampleHostMain::OnCustomDataChannelClosed()
{
std::lock_guard lock(m_customDataChannelLock);
if (m_customDataChannel)
{
m_customChannelDataReceivedEventRevoker.revoke();
m_customChannelClosedEventRevoker.revoke();
m_customDataChannel = nullptr;
}
}
#endif

Просмотреть файл

@ -0,0 +1,210 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#pragma once
#include "Common/DeviceResources.h"
#include "Content/PerceptionDeviceHandler.h"
#include "Content/QRCodeRenderer.h"
#include "Content/SpatialInputHandler.h"
#include "Content/SpatialInputRenderer.h"
#include "Content/SpatialSurfaceMeshRenderer.h"
#include "Content/SpinningCubeRenderer.h"
#include <memory>
#include <winrt/Microsoft.Holographic.AppRemoting.h>
#define INITIAL_WINDOW_WIDTH 1280
#define INITIAL_WINDOW_HEIGHT 720
#define TITLE_TEXT L"Remoting Host Sample"
#define TITLE_SEPARATOR L" | "
#define TITLE_CONNECT_TEXT L"Press Space To Connect"
#define TITLE_DISCONNECT_TEXT L"Press D to Disconnect"
#define TITLE_ENABLE_PREVIEW_TEXT L"Preview Disabled (press P to enable)"
#define TITLE_DISABLE_PREVIEW_TEXT L"Preview Enabled (press P to disable)"
// #define ENABLE_CUSTOM_DATA_CHANNEL_SAMPLE
class SampleHostMain : public std::enable_shared_from_this<SampleHostMain>, public DXHelper::IDeviceNotify
{
public:
struct IWindow
{
virtual winrt::com_ptr<IDXGISwapChain1>
CreateSwapChain(const winrt::com_ptr<ID3D11Device1>& device, const DXGI_SWAP_CHAIN_DESC1* desc) = 0;
virtual void SetWindowTitle(std::wstring title) = 0;
};
public:
SampleHostMain(std::weak_ptr<IWindow> window);
~SampleHostMain();
// Creates a HolographicFrame and updates the content.
winrt::Windows::Graphics::Holographic::HolographicFrame Update();
// Renders the current frame to each holographic camera and presents it.
void Render(winrt::Windows::Graphics::Holographic::HolographicFrame holographicFrame);
const std::shared_ptr<DXHelper::DeviceResources>& GetDeviceResources()
{
return m_deviceResources;
}
void SetHostOptions(bool listen, const std::wstring& hostname, uint32_t port);
// Responds to key presses.
void OnKeyPress(char key);
// Responds to window changing its size.
void OnResize(int width, int height);
// Responds to speech recognition results.
void OnRecognizedSpeech(const winrt::hstring& recognizedText);
// IDeviceNotify methods
virtual void OnDeviceLost();
virtual void OnDeviceRestored();
private:
// Initializes the RemoteContext and starts connecting or listening to the currently set network address
void InitializeRemoteContextAndConnectOrListen();
// Initializes the HolographicSpace and creates graphics device dependent resources
void CreateHolographicSpaceAndDeviceResources();
// Connects to or listens on the currently set network address
void ConnectOrListen();
// Loads the currently saved position of the spinning cube.
void LoadPosition();
// Saves the position of the spinning cube.
void SavePosition();
// Request access for eyes pose data.
void RequestEyesPoseAccess();
// Clears event registration state. Used when changing to a new HolographicSpace
// and when tearing down SampleHostMain.
void UnregisterHolographicEventHandlers();
// Shuts down the RemoteContext (which will also disconnect, if currently connected)
void ShutdownRemoteContext();
// Creates a SwapChain for the host window
void WindowCreateSwapChain(const winrt::com_ptr<ID3D11Device1>& device);
// Presents the SwapChain of the host window
void WindowPresentSwapChain();
// Updates the title of the host window
void WindowUpdateTitle();
// Asynchronously creates resources for new holographic cameras.
void OnCameraAdded(
const winrt::Windows::Graphics::Holographic::HolographicSpace& sender,
const winrt::Windows::Graphics::Holographic::HolographicSpaceCameraAddedEventArgs& args);
// Synchronously releases resources for holographic cameras that are no longer
// attached to the system.
void OnCameraRemoved(
const winrt::Windows::Graphics::Holographic::HolographicSpace& sender,
const winrt::Windows::Graphics::Holographic::HolographicSpaceCameraRemovedEventArgs& args);
// Used to notify the app when the positional tracking state changes.
void OnLocatabilityChanged(
const winrt::Windows::Perception::Spatial::SpatialLocator& sender, const winrt::Windows::Foundation::IInspectable& args);
#ifdef ENABLE_CUSTOM_DATA_CHANNEL_SAMPLE
void OnCustomDataChannelDataReceived();
void OnCustomDataChannelClosed();
#endif
private:
bool m_isInitialized = false;
std::chrono::high_resolution_clock::time_point m_startTime = std::chrono::high_resolution_clock::now();
// RemoteContext used to connect with a Holographic Remoting player and display rendered frames
winrt::Microsoft::Holographic::AppRemoting::RemoteContext m_remoteContext = nullptr;
// Represents the holographic space around the user.
winrt::Windows::Graphics::Holographic::HolographicSpace m_holographicSpace = nullptr;
// Cached pointer to device resources.
std::shared_ptr<DXHelper::DeviceResources> m_deviceResources;
// SpatialLocator that is attached to the primary camera.
winrt::Windows::Perception::Spatial::SpatialLocator m_locator = nullptr;
// A reference frame that is positioned in the world.
winrt::Windows::Perception::Spatial::SpatialStationaryFrameOfReference m_referenceFrame = nullptr;
// Renders a colorful holographic cube that's 20 centimeters wide. This sample content
// is used to demonstrate world-locked rendering.
std::unique_ptr<SpinningCubeRenderer> m_spinningCubeRenderer;
// Renders the surface observed in the user's surroundings.
std::unique_ptr<SpatialSurfaceMeshRenderer> m_spatialSurfaceMeshRenderer;
// Listens for the Pressed spatial input event.
std::shared_ptr<SpatialInputHandler> m_spatialInputHandler;
std::unique_ptr<SpatialInputRenderer> m_spatialInputRenderer;
// Handles perception root objects and their events/updates
std::shared_ptr<PerceptionDeviceHandler> m_perceptionDeviceHandler;
std::unique_ptr<QRCodeRenderer> m_qrCodeRenderer;
// Event registration tokens.
winrt::event_token m_cameraAddedToken;
winrt::event_token m_cameraRemovedToken;
winrt::event_token m_locatabilityChangedToken;
// Event registration revokers
winrt::Microsoft::Holographic::AppRemoting::IRemoteContext::OnConnected_revoker m_onConnectedEventRevoker;
winrt::Microsoft::Holographic::AppRemoting::IRemoteContext::OnDisconnected_revoker m_onDisconnectedEventRevoker;
winrt::Microsoft::Holographic::AppRemoting::IRemoteContext::OnSendFrame_revoker m_onSendFrameEventRevoker;
winrt::Microsoft::Holographic::AppRemoting::IRemoteContext::OnDataChannelCreated_revoker m_onDataChannelCreatedEventRevoker;
winrt::Microsoft::Holographic::AppRemoting::IRemoteSpeech::OnRecognizedSpeech_revoker m_onRecognizedSpeechRevoker;
// Host options
std::wstring m_hostname;
uint32_t m_port{0};
bool m_showPreview = true;
bool m_listen{false};
// Host window related variables
std::weak_ptr<IWindow> m_window;
int m_width = INITIAL_WINDOW_WIDTH;
int m_height = INITIAL_WINDOW_HEIGHT;
std::chrono::high_resolution_clock::time_point m_windowTitleUpdateTime;
uint32_t m_framesPerSecond = 0;
std::recursive_mutex m_deviceLock;
winrt::com_ptr<IDXGISwapChain1> m_swapChain;
winrt::com_ptr<ID3D11Texture2D> m_spTexture;
#ifdef ENABLE_CUSTOM_DATA_CHANNEL_SAMPLE
std::recursive_mutex m_customDataChannelLock;
winrt::Microsoft::Holographic::AppRemoting::IDataChannel m_customDataChannel = nullptr;
winrt::Microsoft::Holographic::AppRemoting::IDataChannel::OnDataReceived_revoker m_customChannelDataReceivedEventRevoker;
winrt::Microsoft::Holographic::AppRemoting::IDataChannel::OnClosed_revoker m_customChannelClosedEventRevoker;
std::chrono::high_resolution_clock::time_point m_customDataChannelSendTime = std::chrono::high_resolution_clock::now();
#endif
};

Просмотреть файл

@ -0,0 +1,301 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#include "pch.h"
#include "SampleHostWindowUWP.h"
#include "Common\DbgLog.h"
#include "Common\Speech.h"
#include <iterator>
#include <sstream>
#include <HolographicAppRemoting/Streamer.h>
#include <windows.graphics.directx.direct3d11.interop.h>
#include <winrt/Windows.ApplicationModel.Activation.h>
#include <winrt/Windows.UI.Core.h>
#include <winrt/Windows.ui.ViewManagement.h>
#define INITIAL_WINDOW_WIDTH 1280
#define INITIAL_WINDOW_HEIGHT 720
#define TITLE_SEPARATOR L" | "
using namespace winrt::Windows::ApplicationModel;
using namespace winrt::Windows::ApplicationModel::Activation;
using namespace winrt::Windows::ApplicationModel::Core;
using namespace winrt::Windows::Graphics::Display;
using namespace winrt::Microsoft::Holographic::AppRemoting;
using namespace winrt::Windows::UI::Core;
using namespace winrt::Windows::UI::ViewManagement;
// The main function is only used to initialize our IFrameworkView class.
int __stdcall wWinMain(HINSTANCE, HINSTANCE, PWSTR, int)
{
winrt::init_apartment();
CoreApplication::Run(winrt::make<SampleHostWindowUWPView>());
}
SampleHostWindowUWP::SampleHostWindowUWP()
{
ApplicationView::PreferredLaunchViewSize(winrt::Windows::Foundation::Size(INITIAL_WINDOW_WIDTH, INITIAL_WINDOW_HEIGHT));
}
// The first method called when the IFrameworkView is being created.
void SampleHostWindowUWP::Initialize(const CoreApplicationView& applicationView)
{
CoreApplication::Suspending({this, &SampleHostWindowUWP::OnSuspending});
CoreApplication::Resuming({this, &SampleHostWindowUWP::OnResuming});
applicationView.Activated({this, &SampleHostWindowUWP::OnViewActivated});
m_main = std::make_shared<SampleHostMain>(weak_from_this());
}
// Called when the CoreWindow object is created (or re-created).
void SampleHostWindowUWP::SetWindow(const CoreWindow& window)
{
m_window = window;
window.SizeChanged({this, &SampleHostWindowUWP::OnWindowSizeChanged});
window.VisibilityChanged({this, &SampleHostWindowUWP::OnVisibilityChanged});
window.Closed({this, &SampleHostWindowUWP::OnWindowClosed});
window.KeyDown({this, &SampleHostWindowUWP::OnKeyDown});
}
// Initializes scene resources, or loads a previously saved app state.
void SampleHostWindowUWP::Load(const winrt::hstring& entryPoint)
{
}
// This method is called after the window becomes active.
void SampleHostWindowUWP::Run()
{
CoreWindow window = CoreWindow::GetForCurrentThread();
window.Activate();
while (!m_windowClosed)
{
if (m_windowVisible)
{
CoreWindow::GetForCurrentThread().Dispatcher().ProcessEvents(CoreProcessEventsOption::ProcessAllIfPresent);
if (m_main)
{
if (const HolographicFrame& holographicFrame = m_main->Update())
{
m_main->Render(holographicFrame);
}
}
}
else
{
CoreWindow::GetForCurrentThread().Dispatcher().ProcessEvents(CoreProcessEventsOption::ProcessOneAndAllPending);
}
}
}
// Required for IFrameworkView.
// Terminate events do not cause Uninitialize to be called. It will be called if your IFrameworkView
// class is torn down while the app is in the foreground.
void SampleHostWindowUWP::Uninitialize()
{
}
winrt::com_ptr<IDXGISwapChain1>
SampleHostWindowUWP::CreateSwapChain(const winrt::com_ptr<ID3D11Device1>& device, const DXGI_SWAP_CHAIN_DESC1* desc)
{
winrt::com_ptr<IDXGIDevice3> dxgiDevice;
device.as(dxgiDevice);
winrt::com_ptr<IDXGIAdapter> dxgiAdapter;
winrt::check_hresult(dxgiDevice->GetAdapter(dxgiAdapter.put()));
winrt::com_ptr<IDXGIFactory4> dxgiFactory;
winrt::check_hresult(dxgiAdapter->GetParent(__uuidof(dxgiFactory), dxgiFactory.put_void()));
winrt::com_ptr<IDXGISwapChain1> swapChain;
winrt::check_hresult(dxgiFactory->CreateSwapChainForCoreWindow(
device.get(), static_cast<::IUnknown*>(winrt::get_abi(m_window)), desc, nullptr, swapChain.put()));
return swapChain;
}
void SampleHostWindowUWP::SetWindowTitle(std::wstring title)
{
auto dispatcher = winrt::Windows::ApplicationModel::Core::CoreApplication::MainView().CoreWindow().Dispatcher();
auto doSetWindowTitle = [title]() {
try
{
if (auto view = winrt::Windows::UI::ViewManagement::ApplicationView::GetForCurrentView())
{
view.Title(winrt::to_hstring(title.c_str()));
}
}
catch (const winrt::hresult_error&)
{
}
};
if (dispatcher.HasThreadAccess())
{
doSetWindowTitle();
}
else
{
dispatcher.RunAsync(winrt::Windows::UI::Core::CoreDispatcherPriority::Normal, doSetWindowTitle);
}
}
// Application lifecycle event handlers.
void SampleHostWindowUWP::OnSuspending(const winrt::Windows::Foundation::IInspectable& sender, const SuspendingEventArgs& args)
{
}
void SampleHostWindowUWP::OnResuming(
const winrt::Windows::Foundation::IInspectable& sender, const winrt::Windows::Foundation::IInspectable& args)
{
// Restore any data or state that was unloaded on suspend. By default, data
// and state are persisted when resuming from suspend. Note that this event
// does not occur if the app was previously terminated.
// Insert your code here.
}
// Window event handlers.
void SampleHostWindowUWP::OnWindowSizeChanged(
const winrt::Windows::Foundation::IInspectable& sender, const WindowSizeChangedEventArgs& args)
{
winrt::Windows::Foundation::Size size = args.Size();
m_main->OnResize(static_cast<int>(size.Width + 0.5f), static_cast<int>(size.Height + 0.5f));
}
void SampleHostWindowUWP::OnVisibilityChanged(
const winrt::Windows::Foundation::IInspectable& sender, const VisibilityChangedEventArgs& args)
{
m_windowVisible = args.Visible();
}
void SampleHostWindowUWP::OnWindowClosed(const CoreWindow& window, const CoreWindowEventArgs& args)
{
m_windowClosed = true;
}
void SampleHostWindowUWP::OnKeyDown(const CoreWindow& window, const KeyEventArgs& args)
{
int32_t key = static_cast<int32_t>(args.VirtualKey());
if (key >= 0 && key <= 0xFF)
{
m_main->OnKeyPress(static_cast<char>(tolower(key)));
}
}
void SampleHostWindowUWP::OnViewActivated(CoreApplicationView const& sender, IActivatedEventArgs const& activationArgs)
{
using namespace winrt::Windows::ApplicationModel;
using namespace Activation;
using namespace Core;
std::wstring host = L"127.0.0.1";
int32_t port = 8265;
if (activationArgs != nullptr)
{
ActivationKind activationKind = activationArgs.Kind();
if (activationKind == Activation::ActivationKind::Launch)
{
LaunchActivatedEventArgs launchArgs = activationArgs.as<LaunchActivatedEventArgs>();
std::vector<std::wstring> args;
std::wistringstream stream(std::wstring(launchArgs.Arguments()));
std::copy(
std::istream_iterator<std::wstring, wchar_t>(stream),
std::istream_iterator<std::wstring, wchar_t>(),
std::back_inserter(args));
for (const std::wstring& arg : args)
{
if (arg.size() == 0)
continue;
size_t colonPos = arg.find(L':');
if (colonPos != std::wstring::npos)
{
std::wstring portStr = arg.substr(colonPos + 1);
host = arg.substr(0, colonPos);
port = std::wcstol(portStr.c_str(), nullptr, 10);
}
else
{
host = arg.c_str();
}
}
}
}
// check for invalid port numbers
if (port < 0 || port > 65535)
{
port = 0;
}
m_ipAddress = host;
m_port = port;
m_main->SetHostOptions(false, m_ipAddress, m_port);
// Run() won't start until the CoreWindow is activated.
sender.CoreWindow().Activate();
}
SampleHostWindowUWPView::SampleHostWindowUWPView()
{
m_window = std::make_shared<SampleHostWindowUWP>();
}
winrt::Windows::ApplicationModel::Core::IFrameworkView SampleHostWindowUWPView::CreateView()
{
return *this;
}
void SampleHostWindowUWPView::Initialize(const winrt::Windows::ApplicationModel::Core::CoreApplicationView& applicationView)
{
m_window->Initialize(applicationView);
}
void SampleHostWindowUWPView::SetWindow(const winrt::Windows::UI::Core::CoreWindow& window)
{
m_window->SetWindow(window);
}
void SampleHostWindowUWPView::Load(const winrt::hstring& entryPoint)
{
m_window->Load(entryPoint);
}
void SampleHostWindowUWPView::Run()
{
m_window->Run();
}
void SampleHostWindowUWPView::Uninitialize()
{
m_window->Uninitialize();
}

Просмотреть файл

@ -0,0 +1,89 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#pragma once
#include "SampleHostMain.h"
#include "Common\DeviceResources.h"
#include <winrt/Microsoft.Holographic.AppRemoting.h>
#include <winrt/Windows.ApplicationModel.Core.h>
// Main entry point for our app. Connects the app with the Windows shell and handles application lifecycle events.
class SampleHostWindowUWP : public std::enable_shared_from_this<SampleHostWindowUWP>, public SampleHostMain::IWindow
{
public:
SampleHostWindowUWP();
// IFrameworkView methods
virtual void Initialize(const winrt::Windows::ApplicationModel::Core::CoreApplicationView& applicationView);
virtual void SetWindow(const winrt::Windows::UI::Core::CoreWindow& window);
virtual void Load(const winrt::hstring& entryPoint);
virtual void Run();
virtual void Uninitialize();
// SampleHostMain::IWindow methods.
virtual winrt::com_ptr<IDXGISwapChain1>
CreateSwapChain(const winrt::com_ptr<ID3D11Device1>& device, const DXGI_SWAP_CHAIN_DESC1* desc) override;
virtual void SetWindowTitle(std::wstring title) override;
protected:
// Application lifecycle event handlers.
void OnSuspending(
const winrt::Windows::Foundation::IInspectable& sender, const winrt::Windows::ApplicationModel::SuspendingEventArgs& args);
void OnResuming(const winrt::Windows::Foundation::IInspectable& sender, const winrt::Windows::Foundation::IInspectable& args);
// Activation handling
void OnViewActivated(
winrt::Windows::ApplicationModel::Core::CoreApplicationView const& sender,
winrt::Windows::ApplicationModel::Activation::IActivatedEventArgs const& args);
// Window event handlers.
void OnWindowSizeChanged(
const winrt::Windows::Foundation::IInspectable& sender, const winrt::Windows::UI::Core::WindowSizeChangedEventArgs& args);
void OnVisibilityChanged(
const winrt::Windows::Foundation::IInspectable& sender, const winrt::Windows::UI::Core::VisibilityChangedEventArgs& args);
void OnWindowClosed(const winrt::Windows::UI::Core::CoreWindow& window, const winrt::Windows::UI::Core::CoreWindowEventArgs& args);
void OnKeyDown(const winrt::Windows::UI::Core::CoreWindow& window, const winrt::Windows::UI::Core::KeyEventArgs& args);
private:
winrt::Windows::UI::Core::CoreWindow m_window = nullptr;
std::shared_ptr<SampleHostMain> m_main;
std::wstring m_ipAddress;
int32_t m_port = 8265;
bool m_windowClosed = false;
bool m_windowVisible = true;
};
class SampleHostWindowUWPView : public winrt::implements<
SampleHostWindowUWPView,
winrt::Windows::ApplicationModel::Core::IFrameworkViewSource,
winrt::Windows::ApplicationModel::Core::IFrameworkView>
{
public:
SampleHostWindowUWPView();
// IFrameworkViewSource methods.
winrt::Windows::ApplicationModel::Core::IFrameworkView CreateView();
// IFrameworkView methods.
virtual void Initialize(const winrt::Windows::ApplicationModel::Core::CoreApplicationView& applicationView);
virtual void SetWindow(const winrt::Windows::UI::Core::CoreWindow& window);
virtual void Load(const winrt::hstring& entryPoint);
virtual void Run();
virtual void Uninitialize();
private:
std::shared_ptr<SampleHostWindowUWP> m_window;
};

Просмотреть файл

@ -0,0 +1,247 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#include "pch.h"
#include "SampleHostWindowWin32.h"
#include <HolographicAppRemoting\Streamer.h>
#include "Common\DbgLog.h"
#include <DirectXColors.h>
#include <windows.graphics.directx.direct3d11.interop.h>
#define WINDOWCLASSNAME L"SampleHostWindowWin32Class"
LRESULT CALLBACK wndProc(HWND hWnd, UINT msg, WPARAM wParam, LPARAM lParam)
{
static SampleHostWindowWin32* s_sampleHostWindow;
LRESULT result = 0;
switch (msg)
{
case WM_CREATE:
{
CREATESTRUCT* cs = reinterpret_cast<CREATESTRUCT*>(lParam);
s_sampleHostWindow = reinterpret_cast<SampleHostWindowWin32*>(cs->lpCreateParams);
RECT clientRect;
GetClientRect(hWnd, &clientRect);
s_sampleHostWindow->OnResize(clientRect.right - clientRect.left, clientRect.bottom - clientRect.top);
result = 0;
}
break;
case WM_WINDOWPOSCHANGED:
{
auto windowPos = reinterpret_cast<WINDOWPOS*>(lParam);
if ((windowPos->flags & SWP_NOSIZE) == 0)
{
RECT clientRect;
GetClientRect(hWnd, &clientRect);
s_sampleHostWindow->OnResize(clientRect.right - clientRect.left, clientRect.bottom - clientRect.top);
}
result = 0;
}
break;
case WM_DESTROY:
{
s_sampleHostWindow = nullptr;
result = 0;
PostQuitMessage(0);
}
break;
case WM_CLOSE:
{
DestroyWindow(hWnd);
result = 0;
}
break;
case WM_CHAR:
{
const int key = tolower(static_cast<int>(wParam));
s_sampleHostWindow->OnKeyPress(static_cast<char>(key));
}
break;
default:
result = DefWindowProc(hWnd, msg, wParam, lParam);
break;
}
return result;
}
void SampleHostWindowWin32::Initialize(bool listen, const std::wstring& hostname, uint32_t port)
{
m_main = std::make_shared<SampleHostMain>(weak_from_this());
m_main->SetHostOptions(listen, hostname, port);
}
void SampleHostWindowWin32::InitializeHwnd(HWND hWnd)
{
m_hWnd = hWnd;
}
void SampleHostWindowWin32::Tick()
{
if (const HolographicFrame& holographicFrame = m_main->Update())
{
m_main->Render(holographicFrame);
}
}
void SampleHostWindowWin32::OnKeyPress(char key)
{
m_main->OnKeyPress(key);
}
void SampleHostWindowWin32::OnResize(int width, int height)
{
m_main->OnResize(width, height);
}
winrt::com_ptr<IDXGISwapChain1>
SampleHostWindowWin32::CreateSwapChain(const winrt::com_ptr<ID3D11Device1>& device, const DXGI_SWAP_CHAIN_DESC1* desc)
{
winrt::com_ptr<IDXGIDevice1> dxgiDevice;
device.as(dxgiDevice);
winrt::com_ptr<IDXGIAdapter> dxgiAdapter;
winrt::check_hresult(dxgiDevice->GetAdapter(dxgiAdapter.put()));
winrt::com_ptr<IDXGIFactory2> dxgiFactory;
winrt::check_hresult(dxgiAdapter->GetParent(__uuidof(IDXGIFactory2), dxgiFactory.put_void()));
winrt::check_hresult(dxgiFactory->MakeWindowAssociation(m_hWnd, DXGI_MWA_NO_ALT_ENTER));
winrt::com_ptr<IDXGISwapChain1> swapChain = nullptr;
winrt::check_hresult(dxgiFactory->CreateSwapChainForHwnd(device.get(), m_hWnd, desc, nullptr, nullptr, swapChain.put()));
return swapChain;
}
void SampleHostWindowWin32::SetWindowTitle(std::wstring title)
{
if (m_hWnd)
{
if (!SetWindowTextW(m_hWnd, title.c_str()))
{
winrt::check_hresult(HRESULT_FROM_WIN32(GetLastError()));
}
}
}
int main(Platform::Array<Platform::String ^> ^ args)
{
winrt::init_apartment();
bool listen{false};
std::wstring host;
uint32_t port{0};
for (unsigned int i = 1; i < args->Length; ++i)
{
if (args[i]->Length() == 0)
continue;
std::wstring arg = args[i]->Data();
if (arg[0] == '-')
{
std::wstring param = arg.substr(1);
std::transform(param.begin(), param.end(), param.begin(), ::tolower);
if (param == L"listen")
{
listen = true;
}
continue;
}
size_t colonPos = arg.find(L':');
if (colonPos != std::wstring::npos)
{
std::wstring portStr = arg.substr(colonPos + 1);
host = arg.substr(0, colonPos);
port = std::wcstol(portStr.c_str(), nullptr, 10);
}
else
{
host = arg.c_str();
port = 0;
}
}
std::shared_ptr<SampleHostWindowWin32> sampleHostWindow = std::make_shared<SampleHostWindowWin32>();
sampleHostWindow->Initialize(listen, host, port);
WNDCLASSEXW wcex = {};
wcex.cbSize = sizeof(wcex);
wcex.style = CS_HREDRAW | CS_VREDRAW;
wcex.lpfnWndProc = wndProc;
wcex.hInstance = 0;
wcex.hIcon = LoadIcon(0, IDI_APPLICATION);
wcex.hCursor = LoadCursor(nullptr, IDC_ARROW);
wcex.hbrBackground = static_cast<HBRUSH>(GetStockObject(NULL_BRUSH));
wcex.lpszClassName = WINDOWCLASSNAME;
RegisterClassExW(&wcex);
RECT rc = {0, 0, INITIAL_WINDOW_WIDTH, INITIAL_WINDOW_HEIGHT};
AdjustWindowRectEx(&rc, WS_OVERLAPPEDWINDOW, FALSE, 0);
std::wstring windowName = TITLE_TEXT;
HWND hWnd = CreateWindowW(
WINDOWCLASSNAME,
windowName.c_str(),
WS_OVERLAPPEDWINDOW,
CW_USEDEFAULT,
CW_USEDEFAULT,
rc.right - rc.left,
rc.bottom - rc.top,
nullptr,
nullptr,
0,
sampleHostWindow.get());
RECT clientRect;
GetClientRect(hWnd, &clientRect);
sampleHostWindow->InitializeHwnd(hWnd);
ShowWindow(hWnd, SW_SHOWNORMAL);
bool quit = false;
while (!quit)
{
MSG msg = {0};
if (PeekMessage(&msg, NULL, 0, 0, PM_REMOVE))
{
if (msg.message == WM_QUIT)
{
quit = true;
}
TranslateMessage(&msg);
DispatchMessage(&msg);
}
else
{
sampleHostWindow->Tick();
}
}
return 0;
}

Просмотреть файл

@ -0,0 +1,39 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#pragma once
//#include <winrt/Windows.Graphics.Holographic.h>
#include "SampleHostMain.h"
class SampleHostWindowWin32 : public std::enable_shared_from_this<SampleHostWindowWin32>, public SampleHostMain::IWindow
{
public:
void Initialize(bool listen, const std::wstring& host, uint32_t port);
void InitializeHwnd(HWND hWnd);
void Tick();
void OnKeyPress(char key);
void OnResize(int width, int height);
virtual winrt::com_ptr<IDXGISwapChain1>
CreateSwapChain(const winrt::com_ptr<ID3D11Device1>& device, const DXGI_SWAP_CHAIN_DESC1* desc) override;
virtual void SetWindowTitle(std::wstring title) override;
private:
HWND m_hWnd = 0;
std::shared_ptr<SampleHostMain> m_main;
};

Просмотреть файл

@ -0,0 +1,11 @@
<?xml version="1.0" encoding="UTF-8" ?>
<grammar xmlns="http://www.w3.org/2001/06/grammar" version="1.0" xml:lang="en-US" root="rootRule">
<!-- The first way to load a grammer is by specifying a grammer.xml file according to the https://www.w3.org/TR/speech-grammar/ standard. -->
<!-- See https://docs.microsoft.com/en-us/previous-versions/office/developer/speech-technologies/hh361658(v=office.14) for an example -->
<rule id="rootRule">
<one-of>
<item>Load position</item>
<item>Save position</item>
</one-of>
</rule>
</grammar>

Просмотреть файл

@ -0,0 +1,4 @@
<?xml version="1.0" encoding="utf-8"?>
<packages>
<package id="Microsoft.Holographic.Remoting" version="2.0.7" targetFramework="native" />
</packages>

Просмотреть файл

@ -0,0 +1,12 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#include "pch.h"

Просмотреть файл

@ -0,0 +1,21 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#pragma once
#include <windows.h>
#include <Unknwn.h>
#include <robuffer.h>
#include <stdint.h>
#include <winrt/base.h>

Двоичные данные
hostsampleapp/uwp/Assets/LockScreenLogo.scale-200.png Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 1.4 KiB

Двоичные данные
hostsampleapp/uwp/Assets/SplashScreen.scale-200.png Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 7.5 KiB

Двоичные данные
hostsampleapp/uwp/Assets/Square150x150Logo.scale-200.png Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 2.9 KiB

Двоичные данные
hostsampleapp/uwp/Assets/Square44x44Logo.scale-200.png Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 1.6 KiB

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 1.2 KiB

Двоичные данные
hostsampleapp/uwp/Assets/StoreLogo.png Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 1.4 KiB

Двоичные данные
hostsampleapp/uwp/Assets/Wide310x150Logo.scale-200.png Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 3.1 KiB

Просмотреть файл

@ -0,0 +1,251 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#include "pch.h"
#include "CameraResources.h"
#include "DeviceResources.h"
#include "DirectXHelper.h"
#include <windows.graphics.directx.direct3d11.interop.h>
using namespace DirectX;
using namespace winrt::Windows::Graphics::Holographic;
using namespace winrt::Windows::Perception::Spatial;
DXHelper::CameraResources::CameraResources(const HolographicCamera& camera)
: m_holographicCamera(camera)
, m_isStereo(camera.IsStereo())
, m_d3dRenderTargetSize(camera.RenderTargetSize())
{
m_d3dViewport = CD3D11_VIEWPORT(0.f, 0.f, m_d3dRenderTargetSize.Width, m_d3dRenderTargetSize.Height);
};
// Updates resources associated with a holographic camera's swap chain.
// The app does not access the swap chain directly, but it does create
// resource views for the back buffer.
void DXHelper::CameraResources::CreateResourcesForBackBuffer(
DXHelper::DeviceResources* pDeviceResources, const HolographicCameraRenderingParameters& cameraParameters)
{
const auto device = pDeviceResources->GetD3DDevice();
// Get a DXGI interface for the holographic camera's back buffer.
// Holographic cameras do not provide the DXGI swap chain, which is owned
// by the system. The Direct3D back buffer resource is provided using WinRT
// interop APIs.
winrt::com_ptr<ID3D11Resource> resource;
{
winrt::com_ptr<::IInspectable> inspectable = cameraParameters.Direct3D11BackBuffer().as<::IInspectable>();
winrt::com_ptr<Windows::Graphics::DirectX::Direct3D11::IDirect3DDxgiInterfaceAccess> dxgiInterfaceAccess;
HRESULT hr = inspectable->QueryInterface(__uuidof(dxgiInterfaceAccess), dxgiInterfaceAccess.put_void());
if (FAILED(hr))
{
winrt::throw_hresult(hr);
}
hr = dxgiInterfaceAccess->GetInterface(__uuidof(resource), resource.put_void());
if (FAILED(hr))
{
winrt::throw_hresult(hr);
}
}
// Get a Direct3D interface for the holographic camera's back buffer.
winrt::com_ptr<ID3D11Texture2D> cameraBackBuffer;
resource.as(cameraBackBuffer);
// Determine if the back buffer has changed. If so, ensure that the render target view
// is for the current back buffer.
if (m_d3dBackBuffer != cameraBackBuffer)
{
// This can change every frame as the system moves to the next buffer in the
// swap chain. This mode of operation will occur when certain rendering modes
// are activated.
m_d3dBackBuffer = cameraBackBuffer;
// Get the DXGI format for the back buffer.
// This information can be accessed by the app using CameraResources::GetBackBufferDXGIFormat().
D3D11_TEXTURE2D_DESC backBufferDesc;
m_d3dBackBuffer->GetDesc(&backBufferDesc);
m_dxgiFormat = backBufferDesc.Format;
D3D11_RENDER_TARGET_VIEW_DESC viewDesc = {};
viewDesc.ViewDimension = backBufferDesc.ArraySize > 1 ? D3D11_RTV_DIMENSION_TEXTURE2DARRAY : D3D11_RTV_DIMENSION_TEXTURE2D;
viewDesc.Format = DXGI_FORMAT_B8G8R8A8_UNORM_SRGB;
if (backBufferDesc.ArraySize > 1)
{
viewDesc.Texture2DArray.ArraySize = backBufferDesc.ArraySize;
}
// Create a render target view of the back buffer.
// Creating this resource is inexpensive, and is better than keeping track of
// the back buffers in order to pre-allocate render target views for each one.
m_d3dRenderTargetView = nullptr;
winrt::check_hresult(device->CreateRenderTargetView(m_d3dBackBuffer.get(), &viewDesc, m_d3dRenderTargetView.put()));
// Check for render target size changes.
winrt::Windows::Foundation::Size currentSize = m_holographicCamera.RenderTargetSize();
if (m_d3dRenderTargetSize != currentSize)
{
// Set render target size.
m_d3dRenderTargetSize = currentSize;
// A new depth stencil view is also needed.
m_d3dDepthStencilView = nullptr;
}
}
// Refresh depth stencil resources, if needed.
if (m_d3dDepthStencilView == nullptr)
{
// Create a depth stencil view for use with 3D rendering if needed.
CD3D11_TEXTURE2D_DESC depthStencilDesc(
DXGI_FORMAT_D16_UNORM,
static_cast<UINT>(m_d3dRenderTargetSize.Width),
static_cast<UINT>(m_d3dRenderTargetSize.Height),
m_isStereo ? 2 : 1, // Create two textures when rendering in stereo.
1, // Use a single mipmap level.
D3D11_BIND_DEPTH_STENCIL);
winrt::com_ptr<ID3D11Texture2D> depthStencil;
winrt::check_hresult(device->CreateTexture2D(&depthStencilDesc, nullptr, depthStencil.put()));
CD3D11_DEPTH_STENCIL_VIEW_DESC depthStencilViewDesc(
m_isStereo ? D3D11_DSV_DIMENSION_TEXTURE2DARRAY : D3D11_DSV_DIMENSION_TEXTURE2D);
winrt::check_hresult(device->CreateDepthStencilView(depthStencil.get(), &depthStencilViewDesc, m_d3dDepthStencilView.put()));
}
// Create the constant buffer, if needed.
if (m_viewProjectionConstantBuffer == nullptr)
{
// Create a constant buffer to store view and projection matrices for the camera.
CD3D11_BUFFER_DESC constantBufferDesc(sizeof(ViewProjectionConstantBuffer), D3D11_BIND_CONSTANT_BUFFER);
winrt::check_hresult(device->CreateBuffer(&constantBufferDesc, nullptr, m_viewProjectionConstantBuffer.put()));
}
}
// Releases resources associated with a back buffer.
void DXHelper::CameraResources::ReleaseResourcesForBackBuffer(DXHelper::DeviceResources* pDeviceResources)
{
// Release camera-specific resources.
m_d3dBackBuffer = nullptr;
m_d3dRenderTargetView = nullptr;
m_d3dDepthStencilView = nullptr;
m_viewProjectionConstantBuffer = nullptr;
// Ensure system references to the back buffer are released by clearing the render
// target from the graphics pipeline state, and then flushing the Direct3D context.
pDeviceResources->UseD3DDeviceContext([](auto context) {
ID3D11RenderTargetView* nullViews[D3D11_SIMULTANEOUS_RENDER_TARGET_COUNT] = {nullptr};
context->OMSetRenderTargets(ARRAYSIZE(nullViews), nullViews, nullptr);
context->Flush();
});
}
// Updates the view/projection constant buffer for a holographic camera.
void DXHelper::CameraResources::UpdateViewProjectionBuffer(
std::shared_ptr<DXHelper::DeviceResources> deviceResources,
const HolographicCameraPose& cameraPose,
const SpatialCoordinateSystem& coordinateSystem)
{
// The system changes the viewport on a per-frame basis for system optimizations.
m_d3dViewport =
CD3D11_VIEWPORT(cameraPose.Viewport().X, cameraPose.Viewport().Y, cameraPose.Viewport().Width, cameraPose.Viewport().Height);
// The projection transform for each frame is provided by the HolographicCameraPose.
auto cameraProjectionTransform = cameraPose.ProjectionTransform();
// Get a container object with the view and projection matrices for the given
// pose in the given coordinate system.
auto viewTransformContainer = cameraPose.TryGetViewTransform(coordinateSystem);
// If TryGetViewTransform returns a null pointer, that means the pose and coordinate
// system cannot be understood relative to one another; content cannot be rendered
// in this coordinate system for the duration of the current frame.
// This usually means that positional tracking is not active for the current frame, in
// which case it is possible to use a SpatialLocatorAttachedFrameOfReference to render
// content that is not world-locked instead.
DXHelper::ViewProjectionConstantBuffer viewProjectionConstantBufferData = {};
bool viewTransformAcquired = viewTransformContainer != nullptr;
if (viewTransformAcquired)
{
// Otherwise, the set of view transforms can be retrieved.
auto viewCoordinateSystemTransform = viewTransformContainer.Value();
// Update the view matrices. Holographic cameras (such as Microsoft HoloLens) are
// constantly moving relative to the world. The view matrices need to be updated
// every frame.
XMStoreFloat4x4(
&viewProjectionConstantBufferData.viewProjection[0],
XMMatrixTranspose(XMLoadFloat4x4(&viewCoordinateSystemTransform.Left) * XMLoadFloat4x4(&cameraProjectionTransform.Left)));
XMStoreFloat4x4(
&viewProjectionConstantBufferData.viewProjection[1],
XMMatrixTranspose(XMLoadFloat4x4(&viewCoordinateSystemTransform.Right) * XMLoadFloat4x4(&cameraProjectionTransform.Right)));
}
// Use the D3D device context to update Direct3D device-based resources.
deviceResources->UseD3DDeviceContext([&](auto context) {
// Loading is asynchronous. Resources must be created before they can be updated.
if (context == nullptr || m_viewProjectionConstantBuffer == nullptr || !viewTransformAcquired)
{
m_framePending = false;
}
else
{
// Update the view and projection matrices.
context->UpdateSubresource(m_viewProjectionConstantBuffer.get(), 0, nullptr, &viewProjectionConstantBufferData, 0, 0);
m_framePending = true;
}
});
}
// Gets the view-projection constant buffer for the HolographicCamera and attaches it
// to the shader pipeline.
bool DXHelper::CameraResources::AttachViewProjectionBuffer(std::shared_ptr<DXHelper::DeviceResources> deviceResources)
{
// This method uses Direct3D device-based resources.
return deviceResources->UseD3DDeviceContext([&](auto context) {
// Loading is asynchronous. Resources must be created before they can be updated.
// Cameras can also be added asynchronously, in which case they must be initialized
// before they can be used.
if (context == nullptr || m_viewProjectionConstantBuffer == nullptr || m_framePending == false)
{
return false;
}
// Set the viewport for this camera.
context->RSSetViewports(1, &m_d3dViewport);
// Send the constant buffer to the vertex shader.
ID3D11Buffer* pBuffer = m_viewProjectionConstantBuffer.get();
context->VSSetConstantBuffers(1, 1, &pBuffer);
// The template includes a pass-through geometry shader that is used by
// default on systems that don't support the D3D11_FEATURE_D3D11_OPTIONS3::
// VPAndRTArrayIndexFromAnyShaderFeedingRasterizer extension. The shader
// will be enabled at run-time on systems that require it.
// If your app will also use the geometry shader for other tasks and those
// tasks require the view/projection matrix, uncomment the following line
// of code to send the constant buffer to the geometry shader as well.
/*context->GSSetConstantBuffers(
1,
1,
m_viewProjectionConstantBuffer.GetAddressOf()
);*/
m_framePending = false;
return true;
});
}

Просмотреть файл

@ -0,0 +1,115 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#pragma once
#include <winrt/Windows.Graphics.Holographic.h>
#include <d3d11.h>
using namespace winrt::Windows::Graphics::Holographic;
using namespace winrt::Windows::Perception::Spatial;
namespace DXHelper
{
class DeviceResources;
// Constant buffer used to send the view-projection matrices to the shader pipeline.
struct ViewProjectionConstantBuffer
{
DirectX::XMFLOAT4X4 viewProjection[2];
};
// Assert that the constant buffer remains 16-byte aligned (best practice).
static_assert(
(sizeof(ViewProjectionConstantBuffer) % (sizeof(float) * 4)) == 0,
"ViewProjection constant buffer size must be 16-byte aligned (16 bytes is the length of four floats).");
// Manages DirectX device resources that are specific to a holographic camera, such as the
// back buffer, ViewProjection constant buffer, and viewport.
class CameraResources
{
public:
CameraResources(const HolographicCamera& holographicCamera);
void CreateResourcesForBackBuffer(
DXHelper::DeviceResources* pDeviceResources, const HolographicCameraRenderingParameters& cameraParameters);
void ReleaseResourcesForBackBuffer(DXHelper::DeviceResources* pDeviceResources);
void UpdateViewProjectionBuffer(
std::shared_ptr<DXHelper::DeviceResources> deviceResources,
const HolographicCameraPose& cameraPose,
const SpatialCoordinateSystem& coordinateSystem);
bool AttachViewProjectionBuffer(std::shared_ptr<DXHelper::DeviceResources> deviceResources);
// Direct3D device resources.
ID3D11RenderTargetView* GetBackBufferRenderTargetView() const
{
return m_d3dRenderTargetView.get();
}
ID3D11DepthStencilView* GetDepthStencilView() const
{
return m_d3dDepthStencilView.get();
}
ID3D11Texture2D* GetBackBufferTexture2D() const
{
return m_d3dBackBuffer.get();
}
D3D11_VIEWPORT GetViewport() const
{
return m_d3dViewport;
}
DXGI_FORMAT GetBackBufferDXGIFormat() const
{
return m_dxgiFormat;
}
// Render target properties.
winrt::Windows::Foundation::Size GetRenderTargetSize() const
{
return m_d3dRenderTargetSize;
}
bool IsRenderingStereoscopic() const
{
return m_isStereo;
}
// The holographic camera these resources are for.
const HolographicCamera& GetHolographicCamera() const
{
return m_holographicCamera;
}
private:
// Direct3D rendering objects. Required for 3D.
winrt::com_ptr<ID3D11RenderTargetView> m_d3dRenderTargetView;
winrt::com_ptr<ID3D11DepthStencilView> m_d3dDepthStencilView;
winrt::com_ptr<ID3D11Texture2D> m_d3dBackBuffer;
// Device resource to store view and projection matrices.
winrt::com_ptr<ID3D11Buffer> m_viewProjectionConstantBuffer;
// Direct3D rendering properties.
DXGI_FORMAT m_dxgiFormat;
winrt::Windows::Foundation::Size m_d3dRenderTargetSize;
D3D11_VIEWPORT m_d3dViewport;
// Indicates whether the camera supports stereoscopic rendering.
bool m_isStereo = false;
// Indicates whether this camera has a pending frame.
bool m_framePending = false;
// Pointer to the holographic camera these resources are for.
HolographicCamera m_holographicCamera = nullptr;
};
} // namespace DXHelper

Просмотреть файл

@ -0,0 +1,37 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#pragma once
#include <strsafe.h>
static void DebugLog(_In_z_ LPCWSTR format, ...)
{
wchar_t buffer[1024];
LPWSTR bufEnd = nullptr;
va_list args;
va_start(args, format);
HRESULT hr =
StringCchVPrintfExW(buffer, _countof(buffer), &bufEnd, nullptr, STRSAFE_FILL_BEHIND_NULL | STRSAFE_FILL_ON_FAILURE, format, args);
if (SUCCEEDED(hr))
{
if (*bufEnd != L'\n')
{
StringCchCatW(buffer, _countof(buffer), L"\r\n");
}
OutputDebugStringW(buffer);
}
va_end(args);
}

Просмотреть файл

@ -0,0 +1,304 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#include "pch.h"
#include "DeviceResources.h"
#include "DirectXHelper.h"
#include <winrt/Windows.Graphics.Display.h>
using namespace D2D1;
using namespace Microsoft::WRL;
using namespace winrt::Windows::Graphics::DirectX::Direct3D11;
using namespace winrt::Windows::Graphics::Display;
using namespace winrt::Windows::Graphics::Holographic;
// Constructor for DeviceResources.
DXHelper::DeviceResources::DeviceResources()
{
CreateDeviceIndependentResources();
}
DXHelper::DeviceResources::~DeviceResources()
{
if (m_d3dContext)
{
m_d3dContext->Flush();
m_d3dContext->ClearState();
}
}
// Configures resources that don't depend on the Direct3D device.
void DXHelper::DeviceResources::CreateDeviceIndependentResources()
{
// Initialize Direct2D resources.
D2D1_FACTORY_OPTIONS options{};
#if defined(_DEBUG)
// If the project is in a debug build, enable Direct2D debugging via SDK Layers.
options.debugLevel = D2D1_DEBUG_LEVEL_INFORMATION;
#endif
// Initialize the Direct2D Factory.
winrt::check_hresult(D2D1CreateFactory(D2D1_FACTORY_TYPE_SINGLE_THREADED, __uuidof(ID2D1Factory2), &options, m_d2dFactory.put_void()));
// Initialize the DirectWrite Factory.
winrt::check_hresult(
DWriteCreateFactory(DWRITE_FACTORY_TYPE_SHARED, __uuidof(IDWriteFactory2), (::IUnknown**)m_dwriteFactory.put_void()));
// Initialize the Windows Imaging Component (WIC) Factory.
winrt::check_hresult(
CoCreateInstance(CLSID_WICImagingFactory2, nullptr, CLSCTX_INPROC_SERVER, __uuidof(m_wicFactory), m_wicFactory.put_void()));
}
void DXHelper::DeviceResources::SetHolographicSpace(HolographicSpace holographicSpace)
{
// Cache the holographic space. Used to re-initalize during device-lost scenarios.
m_holographicSpace = holographicSpace;
InitializeUsingHolographicSpace();
}
void DXHelper::DeviceResources::InitializeUsingHolographicSpace()
{
// The holographic space might need to determine which adapter supports
// holograms, in which case it will specify a non-zero PrimaryAdapterId.
LUID id = {m_holographicSpace.PrimaryAdapterId().LowPart, m_holographicSpace.PrimaryAdapterId().HighPart};
// When a primary adapter ID is given to the app, the app should find
// the corresponding DXGI adapter and use it to create Direct3D devices
// and device contexts. Otherwise, there is no restriction on the DXGI
// adapter the app can use.
if ((id.HighPart != 0) || (id.LowPart != 0))
{
UINT createFlags = 0;
#ifdef DEBUG
if (DXHelper::SdkLayersAvailable())
{
createFlags |= DXGI_CREATE_FACTORY_DEBUG;
}
#endif
// Create the DXGI factory.
winrt::com_ptr<IDXGIFactory1> dxgiFactory;
winrt::check_hresult(CreateDXGIFactory2(createFlags, __uuidof(dxgiFactory), dxgiFactory.put_void()));
winrt::com_ptr<IDXGIFactory4> dxgiFactory4;
dxgiFactory.as(dxgiFactory4);
// Retrieve the adapter specified by the holographic space.
winrt::check_hresult(dxgiFactory4->EnumAdapterByLuid(id, __uuidof(m_dxgiAdapter), m_dxgiAdapter.put_void()));
}
else
{
m_dxgiAdapter = nullptr;
}
CreateDeviceResources();
m_holographicSpace.SetDirect3D11Device(m_d3dInteropDevice);
}
// Configures the Direct3D device, and stores handles to it and the device context.
void DXHelper::DeviceResources::CreateDeviceResources()
{
// This flag adds support for surfaces with a different color channel ordering
// than the API default. It is required for compatibility with Direct2D.
UINT creationFlags = D3D11_CREATE_DEVICE_BGRA_SUPPORT;
#if defined(_DEBUG)
if (DXHelper::SdkLayersAvailable())
{
// If the project is in a debug build, enable debugging via SDK Layers with this flag.
creationFlags |= D3D11_CREATE_DEVICE_DEBUG;
}
#endif
// This array defines the set of DirectX hardware feature levels this app will support.
// Note the ordering should be preserved.
// Note that HoloLens supports feature level 11.1. The HoloLens emulator is also capable
// of running on graphics cards starting with feature level 10.0.
D3D_FEATURE_LEVEL featureLevels[] = {D3D_FEATURE_LEVEL_12_1,
D3D_FEATURE_LEVEL_12_0,
D3D_FEATURE_LEVEL_11_1,
D3D_FEATURE_LEVEL_11_0,
D3D_FEATURE_LEVEL_10_1,
D3D_FEATURE_LEVEL_10_0};
// Create the Direct3D 11 API device object and a corresponding context.
winrt::com_ptr<ID3D11Device> device;
winrt::com_ptr<ID3D11DeviceContext> context;
const D3D_DRIVER_TYPE driverType = m_dxgiAdapter == nullptr ? D3D_DRIVER_TYPE_HARDWARE : D3D_DRIVER_TYPE_UNKNOWN;
const HRESULT hr = D3D11CreateDevice(
m_dxgiAdapter.get(), // Either nullptr, or the primary adapter determined by Windows Holographic.
driverType, // Create a device using the hardware graphics driver.
0, // Should be 0 unless the driver is D3D_DRIVER_TYPE_SOFTWARE.
creationFlags, // Set debug and Direct2D compatibility flags.
featureLevels, // List of feature levels this app can support.
ARRAYSIZE(featureLevels), // Size of the list above.
D3D11_SDK_VERSION, // Always set this to D3D11_SDK_VERSION for Windows Runtime apps.
device.put(), // Returns the Direct3D device created.
&m_d3dFeatureLevel, // Returns feature level of device created.
context.put() // Returns the device immediate context.
);
if (FAILED(hr))
{
// If the initialization fails, fall back to the WARP device.
// For more information on WARP, see:
// http://go.microsoft.com/fwlink/?LinkId=286690
winrt::check_hresult(D3D11CreateDevice(
nullptr, // Use the default DXGI adapter for WARP.
D3D_DRIVER_TYPE_WARP, // Create a WARP device instead of a hardware device.
0,
creationFlags,
featureLevels,
ARRAYSIZE(featureLevels),
D3D11_SDK_VERSION,
device.put(),
&m_d3dFeatureLevel,
context.put()));
}
// Store pointers to the Direct3D device and immediate context.
device.as(m_d3dDevice);
context.as(m_d3dContext);
// Acquire the DXGI interface for the Direct3D device.
winrt::com_ptr<IDXGIDevice3> dxgiDevice;
m_d3dDevice.as(dxgiDevice);
// Wrap the native device using a WinRT interop object.
winrt::com_ptr<::IInspectable> object;
winrt::check_hresult(CreateDirect3D11DeviceFromDXGIDevice(dxgiDevice.get(), reinterpret_cast<IInspectable**>(winrt::put_abi(object))));
m_d3dInteropDevice = object.as<IDirect3DDevice>();
// Cache the DXGI adapter.
// This is for the case of no preferred DXGI adapter, or fallback to WARP.
winrt::com_ptr<IDXGIAdapter> dxgiAdapter;
dxgiDevice->GetAdapter(dxgiAdapter.put());
dxgiAdapter.as(m_dxgiAdapter);
// Check for device support for the optional feature that allows setting the render target array index from the vertex shader stage.
D3D11_FEATURE_DATA_D3D11_OPTIONS3 options;
m_d3dDevice->CheckFeatureSupport(D3D11_FEATURE_D3D11_OPTIONS3, &options, sizeof(options));
if (options.VPAndRTArrayIndexFromAnyShaderFeedingRasterizer)
{
m_supportsVprt = true;
}
}
// Validates the back buffer for each HolographicCamera and recreates
// resources for back buffers that have changed.
// Locks the set of holographic camera resources until the function exits.
void DXHelper::DeviceResources::EnsureCameraResources(HolographicFrame frame, HolographicFramePrediction prediction)
{
UseHolographicCameraResources([this, frame, prediction](std::map<UINT32, std::unique_ptr<CameraResources>>& cameraResourceMap) {
for (HolographicCameraPose const& cameraPose : prediction.CameraPoses())
{
try
{
HolographicCameraRenderingParameters renderingParameters = frame.GetRenderingParameters(cameraPose);
CameraResources* pCameraResources = cameraResourceMap[cameraPose.HolographicCamera().Id()].get();
pCameraResources->CreateResourcesForBackBuffer(this, renderingParameters);
}
catch (const winrt::hresult_error&)
{
}
}
});
}
// Prepares to allocate resources and adds resource views for a camera.
// Locks the set of holographic camera resources until the function exits.
void DXHelper::DeviceResources::AddHolographicCamera(HolographicCamera camera)
{
UseHolographicCameraResources([this, camera](std::map<UINT32, std::unique_ptr<CameraResources>>& cameraResourceMap) {
cameraResourceMap[camera.Id()] = std::make_unique<CameraResources>(camera);
});
}
// Deallocates resources for a camera and removes the camera from the set.
// Locks the set of holographic camera resources until the function exits.
void DXHelper::DeviceResources::RemoveHolographicCamera(HolographicCamera camera)
{
UseHolographicCameraResources([this, camera](std::map<UINT32, std::unique_ptr<CameraResources>>& cameraResourceMap) {
CameraResources* pCameraResources = cameraResourceMap[camera.Id()].get();
if (pCameraResources != nullptr)
{
pCameraResources->ReleaseResourcesForBackBuffer(this);
cameraResourceMap.erase(camera.Id());
}
});
}
// Recreate all device resources and set them back to the current state.
// Locks the set of holographic camera resources until the function exits.
void DXHelper::DeviceResources::HandleDeviceLost()
{
if (m_deviceNotify != nullptr)
{
m_deviceNotify->OnDeviceLost();
}
UseHolographicCameraResources([this](std::map<UINT32, std::unique_ptr<CameraResources>>& cameraResourceMap) {
for (auto& pair : cameraResourceMap)
{
CameraResources* pCameraResources = pair.second.get();
pCameraResources->ReleaseResourcesForBackBuffer(this);
}
});
InitializeUsingHolographicSpace();
if (m_deviceNotify != nullptr)
{
m_deviceNotify->OnDeviceRestored();
}
}
// Register our DeviceNotify to be informed on device lost and creation.
void DXHelper::DeviceResources::RegisterDeviceNotify(DXHelper::IDeviceNotify* deviceNotify)
{
m_deviceNotify = deviceNotify;
}
// Call this method when the app suspends. It provides a hint to the driver that the app
// is entering an idle state and that temporary buffers can be reclaimed for use by other apps.
void DXHelper::DeviceResources::Trim()
{
m_d3dContext->ClearState();
winrt::com_ptr<IDXGIDevice3> dxgiDevice;
m_d3dDevice.as(dxgiDevice);
dxgiDevice->Trim();
}
// Present the contents of the swap chain to the screen.
// Locks the set of holographic camera resources until the function exits.
void DXHelper::DeviceResources::Present(HolographicFrame frame)
{
// By default, this API waits for the frame to finish before it returns.
// For Holographic Remoting we do not wait but instead wait in SampleHostMain::Update to ensure that CreateNextFrame is called exactly
// with a delta of 16.6ms (in case of 60Hz).
HolographicFramePresentResult presentResult =
frame.PresentUsingCurrentPrediction(HolographicFramePresentWaitBehavior::DoNotWaitForFrameToFinish);
// The PresentUsingCurrentPrediction API will detect when the graphics device
// changes or becomes invalid. When this happens, it is considered a Direct3D
// device lost scenario.
if (presentResult == HolographicFramePresentResult::DeviceRemoved)
{
// The Direct3D device, context, and resources should be recreated.
HandleDeviceLost();
}
}

Просмотреть файл

@ -0,0 +1,156 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#pragma once
#include "CameraResources.h"
#include <d2d1_2.h>
#include <d3d11_4.h>
#include <dwrite_2.h>
#include <wincodec.h>
#include <wrl/client.h>
namespace DXHelper
{
// Provides an interface for an application that owns DeviceResources to be notified of the device being lost or created.
interface IDeviceNotify
{
virtual void OnDeviceLost() = 0;
virtual void OnDeviceRestored() = 0;
};
// Creates and manages a Direct3D device and immediate context, Direct2D device and context (for debug), and the holographic swap chain.
class DeviceResources
{
public:
DeviceResources();
~DeviceResources();
// Public methods related to Direct3D devices.
void HandleDeviceLost();
void RegisterDeviceNotify(IDeviceNotify* deviceNotify);
void Trim();
void Present(winrt::Windows::Graphics::Holographic::HolographicFrame frame);
// Public methods related to holographic devices.
void SetHolographicSpace(winrt::Windows::Graphics::Holographic::HolographicSpace space);
void EnsureCameraResources(
winrt::Windows::Graphics::Holographic::HolographicFrame frame,
winrt::Windows::Graphics::Holographic::HolographicFramePrediction prediction);
void AddHolographicCamera(winrt::Windows::Graphics::Holographic::HolographicCamera camera);
void RemoveHolographicCamera(winrt::Windows::Graphics::Holographic::HolographicCamera camera);
// Holographic accessors.
template <typename LCallback>
void UseHolographicCameraResources(LCallback const& callback);
winrt::Windows::Graphics::DirectX::Direct3D11::IDirect3DDevice GetD3DInteropDevice() const
{
return m_d3dInteropDevice;
}
// D3D accessors.
ID3D11Device4* GetD3DDevice() const
{
return m_d3dDevice.get();
}
template <typename F>
auto UseD3DDeviceContext(F func) const
{
std::scoped_lock lock(m_d3dContextMutex);
return func(m_d3dContext.get());
}
D3D_FEATURE_LEVEL GetDeviceFeatureLevel() const
{
return m_d3dFeatureLevel;
}
bool GetDeviceSupportsVprt() const
{
return m_supportsVprt;
}
// DXGI acessors.
IDXGIAdapter3* GetDXGIAdapter() const
{
return m_dxgiAdapter.get();
}
// D2D accessors.
ID2D1Factory2* GetD2DFactory() const
{
return m_d2dFactory.get();
}
IDWriteFactory2* GetDWriteFactory() const
{
return m_dwriteFactory.get();
}
IWICImagingFactory2* GetWicImagingFactory() const
{
return m_wicFactory.get();
}
protected:
void CreateDeviceResources();
private:
// Private methods related to the Direct3D device, and resources based on that device.
void CreateDeviceIndependentResources();
void InitializeUsingHolographicSpace();
protected:
// Direct3D objects.
winrt::com_ptr<ID3D11Device4> m_d3dDevice;
mutable std::recursive_mutex m_d3dContextMutex;
winrt::com_ptr<ID3D11DeviceContext3> m_d3dContext;
winrt::com_ptr<IDXGIAdapter3> m_dxgiAdapter;
// Direct3D interop objects.
winrt::Windows::Graphics::DirectX::Direct3D11::IDirect3DDevice m_d3dInteropDevice;
// Direct2D factories.
winrt::com_ptr<ID2D1Factory2> m_d2dFactory;
winrt::com_ptr<IDWriteFactory2> m_dwriteFactory;
winrt::com_ptr<IWICImagingFactory2> m_wicFactory;
// The IDeviceNotify can be held directly as it owns the DeviceResources.
IDeviceNotify* m_deviceNotify = nullptr;
// Whether or not the current Direct3D device supports the optional feature
// for setting the render target array index from the vertex shader stage.
bool m_supportsVprt = false;
private:
// The holographic space provides a preferred DXGI adapter ID.
winrt::Windows::Graphics::Holographic::HolographicSpace m_holographicSpace = nullptr;
// Properties of the Direct3D device currently in use.
D3D_FEATURE_LEVEL m_d3dFeatureLevel = D3D_FEATURE_LEVEL_10_0;
// Back buffer resources, etc. for attached holographic cameras.
std::map<UINT32, std::unique_ptr<CameraResources>> m_cameraResources;
std::mutex m_cameraResourcesLock;
};
} // namespace DXHelper
// Device-based resources for holographic cameras are stored in a std::map. Access this list by providing a
// callback to this function, and the std::map will be guarded from add and remove
// events until the callback returns. The callback is processed immediately and must
// not contain any nested calls to UseHolographicCameraResources.
// The callback takes a parameter of type std::map<UINT32, std::unique_ptr<DXHelper::CameraResources>>&
// through which the list of cameras will be accessed.
template <typename LCallback>
void DXHelper::DeviceResources::UseHolographicCameraResources(LCallback const& callback)
{
std::lock_guard<std::mutex> guard(m_cameraResourcesLock);
callback(m_cameraResources);
}

Просмотреть файл

@ -0,0 +1,87 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#pragma once
#include <wrl.h>
#include <future>
#include <winrt/Windows.Storage.Streams.h>
#include <winrt/Windows.Storage.h>
#include <winrt/Windows.Graphics.DirectX.Direct3D11.h>
#include <windows.graphics.directx.direct3d11.interop.h>
#include <d3d11.h>
#include <dxgi1_2.h>
#include <filesystem>
namespace DXHelper
{
// Function that reads from a binary file asynchronously.
inline std::future<std::vector<byte>> ReadDataAsync(const std::wstring_view& filename)
{
using namespace winrt::Windows::Storage;
using namespace winrt::Windows::Storage::Streams;
#if WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_DESKTOP)
wchar_t moduleFullyQualifiedFilename[MAX_PATH] = {};
uint32_t moduleFileNameLength = GetModuleFileNameW(NULL, moduleFullyQualifiedFilename, _countof(moduleFullyQualifiedFilename));
moduleFullyQualifiedFilename[moduleFileNameLength] = L'\0';
std::filesystem::path modulePath = moduleFullyQualifiedFilename;
// winrt::hstring moduleFilename = modulePath.filename().c_str();
modulePath.replace_filename(filename);
winrt::hstring absoluteFilename = modulePath.c_str();
IBuffer fileBuffer = co_await PathIO::ReadBufferAsync(absoluteFilename);
#else
IBuffer fileBuffer = co_await PathIO::ReadBufferAsync(filename);
#endif
std::vector<byte> returnBuffer;
returnBuffer.resize(fileBuffer.Length());
DataReader::FromBuffer(fileBuffer).ReadBytes(winrt::array_view<uint8_t>(returnBuffer));
return returnBuffer;
}
// Converts a length in device-independent pixels (DIPs) to a length in physical pixels.
inline float ConvertDipsToPixels(float dips, float dpi)
{
static const float dipsPerInch = 96.0f;
return floorf(dips * dpi / dipsPerInch + 0.5f); // Round to nearest integer.
}
#if defined(_DEBUG)
// Check for SDK Layer support.
inline bool SdkLayersAvailable()
{
HRESULT hr = D3D11CreateDevice(
nullptr,
D3D_DRIVER_TYPE_NULL, // There is no need to create a real hardware device.
0,
D3D11_CREATE_DEVICE_DEBUG, // Check for the SDK layers.
nullptr, // Any feature level will do.
0,
D3D11_SDK_VERSION, // Always set this to D3D11_SDK_VERSION for Windows Runtime apps.
nullptr, // No need to keep the D3D device reference.
nullptr, // No need to know the feature level.
nullptr // No need to keep the D3D device context reference.
);
return SUCCEEDED(hr);
}
#endif
} // namespace DXHelper

Просмотреть файл

@ -0,0 +1,18 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#include "../pch.h"
// -- important: preserve order of these two includes (initguid before PerceptionTypes)!
#include <initguid.h>
#include "PerceptionTypes.h"
// --

Просмотреть файл

@ -0,0 +1,33 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#pragma once
#include <guiddef.h>
DEFINE_GUID(SPATIALPROP_QRTrackerObjectId, 0xf2e86326, 0xfbd8, 0x4088, 0xb5, 0xfb, 0xac, 0x61, 0xc, 0x3, 0x99, 0x7f);
DEFINE_GUID(SPATIALPROP_QRTracker_TrackingStatus, 0x270f00dc, 0xc0d9, 0x442c, 0x87, 0x25, 0x27, 0xb6, 0xbb, 0xd9, 0x43, 0xd);
DEFINE_GUID(SPATIALPROP_QRTracker_QRCodesList, 0x338b32f8, 0x1ce0, 0x4e75, 0x8d, 0xf4, 0x56, 0xd4, 0x2f, 0x73, 0x96, 0x74);
DEFINE_GUID(SPATIALPROP_QRCode_PhysicalSize, 0xcfb07ae5, 0x456a, 0x4aaf, 0x9d, 0x6a, 0xa2, 0x9d, 0x3, 0x9b, 0xc0, 0x56);
DEFINE_GUID(SPATIALPROP_QRCode_LastSeenTime, 0xb2b08c2d, 0xb531, 0x4f18, 0x87, 0x84, 0x44, 0x84, 0x46, 0x3d, 0x88, 0x53);
DEFINE_GUID(SPATIALPROP_QRCode_StreamInfo, 0x609143ea, 0x4ec5, 0x4b0e, 0xba, 0xef, 0x52, 0xa5, 0x5c, 0xfc, 0x23, 0x58);
#pragma pack(push, 1)
typedef struct SPATIAL_GRAPH_QR_CODE_STREAM_INFO
{
UINT32 Version;
ULONG StreamSize;
_Field_size_(StreamSize) BYTE StreamData[ANYSIZE_ARRAY];
} SPATIAL_GRAPH_QR_CODE_STREAM_INFO, *PSPATIAL_GRAPH_QR_CODE_STREAM_INFO;
#pragma pack(pop)

Просмотреть файл

@ -0,0 +1,70 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#include "pch.h"
#include "SampleHostMain.h"
#include "Speech.h"
#include <windows.h>
#include <winrt/Windows.ApplicationModel.h>
#include <winrt/Windows.Storage.h>
#include <filesystem>
namespace
{
winrt::Windows::Foundation::IAsyncOperation<winrt::Windows::Storage::StorageFile> LoadGrammarFileAsync()
{
const wchar_t* speechGrammarFile = L"SpeechGrammar.xml";
#if WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_DESKTOP)
wchar_t executablePath[MAX_PATH];
if (GetModuleFileNameW(NULL, executablePath, ARRAYSIZE(executablePath)) == 0)
{
winrt::throw_last_error();
}
std::filesystem::path executableFolder(executablePath);
executableFolder.remove_filename();
auto rootFolder = co_await winrt::Windows::Storage::StorageFolder::GetFolderFromPathAsync(executableFolder.c_str());
auto file = co_await rootFolder.GetFileAsync(speechGrammarFile);
co_return file;
#else
auto rootFolder = winrt::Windows::ApplicationModel::Package::Current().InstalledLocation();
return rootFolder.GetFileAsync(speechGrammarFile);
#endif
}
} // namespace
winrt::fire_and_forget Speech::InitializeSpeechAsync(
winrt::Microsoft::Holographic::AppRemoting::IRemoteSpeech remoteSpeech,
winrt::Microsoft::Holographic::AppRemoting::IRemoteSpeech::OnRecognizedSpeech_revoker& onRecognizedSpeechRevoker,
std::weak_ptr<SampleHostMain> sampleHostMainWeak)
{
onRecognizedSpeechRevoker = remoteSpeech.OnRecognizedSpeech(
winrt::auto_revoke, [sampleHostMainWeak](const winrt::Microsoft::Holographic::AppRemoting::RecognizedSpeech& recognizedSpeech) {
if (auto sampleHostMain = sampleHostMainWeak.lock())
{
sampleHostMain->OnRecognizedSpeech(recognizedSpeech.RecognizedText);
}
});
auto grammarFile = co_await LoadGrammarFileAsync();
std::vector<winrt::hstring> dictionary;
dictionary.push_back(L"Red");
dictionary.push_back(L"Blue");
dictionary.push_back(L"Green");
dictionary.push_back(L"Default");
dictionary.push_back(L"Aquamarine");
remoteSpeech.ApplyParameters(L"en-US", grammarFile, dictionary);
}

Просмотреть файл

@ -0,0 +1,28 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#pragma once
#include <memory>
#include <winrt/Microsoft.Holographic.AppRemoting.h>
#include <winrt/Windows.Foundation.h>
class SampleHostMain;
namespace Speech
{
winrt::fire_and_forget InitializeSpeechAsync(
winrt::Microsoft::Holographic::AppRemoting::IRemoteSpeech remoteSpeech,
winrt::Microsoft::Holographic::AppRemoting::IRemoteSpeech::OnRecognizedSpeech_revoker& onRecognizedSpeechRevoker,
std::weak_ptr<SampleHostMain> sampleHostMainWeak);
}

Просмотреть файл

@ -0,0 +1,31 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#pragma once
// comparison function to allow for GUID as a hash map key
struct GUIDComparer
{
inline static int compare(const GUID& Left, const GUID& Right)
{
return memcmp(&Left, &Right, sizeof(GUID));
}
inline static bool equals(const GUID& Left, const GUID& Right)
{
return memcmp(&Left, &Right, sizeof(GUID)) == 0;
}
bool operator()(const GUID& Left, const GUID& Right) const
{
return compare(Left, Right) < 0;
}
};

Просмотреть файл

@ -0,0 +1,218 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#include "../pch.h"
#include "../Common/PerceptionTypes.h"
#include "../Common/Utils.h"
#include "PerceptionDeviceHandler.h"
#include "QRCodeTracker.h"
PerceptionRootObject::~PerceptionRootObject()
{
}
const GUID& PerceptionRootObject::GetPropertyId() const
{
return m_typeId;
}
const GUID& PerceptionRootObject::GetObjectId() const
{
return m_objectId;
}
PerceptionRootObject::PerceptionRootObject(IPerceptionDevice* device, const GUID& typeId, const GUID& objectId)
: m_typeId(typeId)
, m_objectId(objectId)
{
m_device.copy_from(device);
}
PerceptionDeviceHandler::PerceptionDeviceHandler()
{
}
PerceptionDeviceHandler::~PerceptionDeviceHandler()
{
Stop();
}
void PerceptionDeviceHandler::Start()
{
std::lock_guard stateLock(m_stateProtect);
if (m_running)
{
return;
}
HRESULT hr = PerceptionDeviceCreateFactory(IID_PPV_ARGS(m_perceptionDeviceFactory.put()));
if (FAILED(hr))
{
Stop();
return;
}
m_rootObjectChangeHandler = winrt::make_self<RootObjectChangeHandler>(*this);
std::array<const GUID, 2> rootObjectIds{QRCodeTracker::GetStaticPropertyId()};
for (size_t i = 0; i < rootObjectIds.size(); ++i)
{
winrt::com_ptr<IPerceptionDeviceRootObjectWatcher> watcher;
hr = m_perceptionDeviceFactory->CreateRootObjectWatcher(1, &rootObjectIds[i], PerceptionDeviceOptions::None, watcher.put());
if (FAILED(hr))
{
Stop();
return;
}
hr = watcher->SetAddedHandler(m_rootObjectChangeHandler.get());
if (FAILED(hr))
{
Stop();
return;
}
hr = watcher->SetRemovedHandler(m_rootObjectChangeHandler.get());
if (FAILED(hr))
{
Stop();
return;
}
m_rootObjectWatchers.emplace_back(std::move(watcher));
}
m_running = true;
for (auto& watcher : m_rootObjectWatchers)
{
hr = watcher->Start();
if (FAILED(hr))
{
Stop();
return;
}
}
}
void PerceptionDeviceHandler::Stop()
{
std::lock_guard stateLock(m_stateProtect);
m_running = false;
for (auto& watcher : m_rootObjectWatchers)
{
watcher->Stop();
}
m_rootObjectWatchers.clear();
m_rootObjectChangeHandler = nullptr;
m_perceptionDeviceFactory = nullptr;
}
HRESULT PerceptionDeviceHandler::HandleRootObjectAdded(IPerceptionDeviceRootObjectAddedEventArgs* args)
{
std::lock_guard stateLock(m_stateProtect);
if (!m_running)
{
return S_OK;
}
RootObjectKey key{args->GetPropertyId(), args->GetObjectId()};
if (m_rootObjects.find(key) != m_rootObjects.end())
{
return S_FALSE; // Already have that root object; don't add it twice
}
if (GUIDComparer::equals(key.propertyId, QRCodeTracker::GetStaticPropertyId()))
{
winrt::com_ptr<IPerceptionDevice> device;
args->GetDevice(device.put());
m_rootObjects.emplace(key, std::make_shared<QRCodeTracker>(device.get(), key.propertyId, key.objectId));
}
return S_OK;
}
HRESULT PerceptionDeviceHandler::HandleRootObjectRemoved(IPerceptionDeviceRootObjectRemovedEventArgs* args)
{
std::lock_guard stateLock(m_stateProtect);
if (!m_running)
{
return S_OK;
}
RootObjectKey key{args->GetPropertyId(), args->GetObjectId()};
auto it = m_rootObjects.find(key);
if (it != m_rootObjects.end())
{
m_rootObjects.erase(key);
}
return S_OK;
}
PerceptionDeviceHandler::RootObjectChangeHandler::RootObjectChangeHandler(PerceptionDeviceHandler& owner)
: m_weakOwner(owner.weak_from_this())
{
}
STDMETHODIMP PerceptionDeviceHandler::RootObjectChangeHandler::Invoke(
_In_ IPerceptionDeviceRootObjectWatcher* sender, _In_ IPerceptionDeviceRootObjectAddedEventArgs* args)
{
auto owner{m_weakOwner.lock()};
if (owner)
{
return owner->HandleRootObjectAdded(args);
}
return S_OK;
}
STDMETHODIMP PerceptionDeviceHandler::RootObjectChangeHandler::Invoke(
_In_ IPerceptionDeviceRootObjectWatcher* sender, _In_ IPerceptionDeviceRootObjectRemovedEventArgs* args)
{
auto owner{m_weakOwner.lock()};
if (owner)
{
return owner->HandleRootObjectRemoved(args);
}
return S_OK;
}
bool PerceptionDeviceHandler::RootObjectKey::operator<(const RootObjectKey& other) const
{
const auto typeIdRes = GUIDComparer::compare(propertyId, other.propertyId);
if (typeIdRes < 0)
{
return true;
}
else if (typeIdRes > 0)
{
return false;
}
else
{
return GUIDComparer::compare(objectId, other.objectId) < 0;
}
}

Просмотреть файл

@ -0,0 +1,121 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#pragma once
#include <vector>
#include <winrt/Windows.UI.Input.Spatial.h>
#include <PerceptionDevice.h>
// Base class for perception root objects managed by the PerceptionDeviceHandler
class PerceptionRootObject
{
public:
virtual ~PerceptionRootObject();
const GUID& GetPropertyId() const;
const GUID& GetObjectId() const;
protected:
PerceptionRootObject(IPerceptionDevice* device, const GUID& typeId, const GUID& objectId);
protected:
winrt::com_ptr<IPerceptionDevice> m_device;
GUID m_typeId;
GUID m_objectId;
};
// Sample perception device handler. Listens to the availability of perception devices (more accurately:
// perception root objects of known types), and retrieves data from these root objects.
class PerceptionDeviceHandler : public std::enable_shared_from_this<PerceptionDeviceHandler>
{
public:
PerceptionDeviceHandler();
~PerceptionDeviceHandler();
// Starts monitoring for perception root object changes
void Start();
// Stops monitoring perception root object changes
void Stop();
// Iterates over all perception root objects currently known
template <typename Func>
void ForEachRootObject(Func& func)
{
std::lock_guard stateLock(m_stateProtect);
for (auto& rootObjectEntry : m_rootObjects)
{
func(*rootObjectEntry.second.get());
}
}
// Iterates over all root objects of a certain type
template <typename RootObjectType, typename Func>
void ForEachRootObjectOfType(Func& func)
{
std::lock_guard stateLock(m_stateProtect);
for (auto& rootObjectEntry : m_rootObjects)
{
PerceptionRootObject& rootObject = *rootObjectEntry.second.get();
if (GUIDComparer::equals(rootObject.GetPropertyId(), RootObjectType::GetStaticPropertyId()))
{
func(static_cast<RootObjectType&>(rootObject));
}
}
}
private:
struct RootObjectChangeHandler
: winrt::implements<RootObjectChangeHandler, IPerceptionDeviceRootObjectAddedHandler, IPerceptionDeviceRootObjectRemovedHandler>
{
RootObjectChangeHandler(PerceptionDeviceHandler& owner);
IFACEMETHOD(Invoke)
(_In_ IPerceptionDeviceRootObjectWatcher* sender, _In_ IPerceptionDeviceRootObjectAddedEventArgs* args) override;
IFACEMETHOD(Invoke)
(_In_ IPerceptionDeviceRootObjectWatcher* sender, _In_ IPerceptionDeviceRootObjectRemovedEventArgs* args) override;
private:
std::weak_ptr<PerceptionDeviceHandler> m_weakOwner;
};
friend RootObjectChangeHandler;
struct RootObjectKey
{
GUID propertyId;
GUID objectId;
bool operator<(const RootObjectKey& other) const;
};
using RootObjectMap = std::map<RootObjectKey, std::shared_ptr<PerceptionRootObject>>;
private:
HRESULT HandleRootObjectAdded(IPerceptionDeviceRootObjectAddedEventArgs* args);
HRESULT HandleRootObjectRemoved(IPerceptionDeviceRootObjectRemovedEventArgs* args);
private:
std::recursive_mutex m_stateProtect;
bool m_running{false};
winrt::com_ptr<IPerceptionDeviceFactory> m_perceptionDeviceFactory;
std::vector<winrt::com_ptr<IPerceptionDeviceRootObjectWatcher>> m_rootObjectWatchers;
winrt::com_ptr<RootObjectChangeHandler> m_rootObjectChangeHandler;
RootObjectMap m_rootObjects;
};

Просмотреть файл

@ -0,0 +1,107 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#include "pch.h"
#include "../Common/DirectXHelper.h"
#include "../Common/PerceptionTypes.h"
#include "PerceptionDeviceHandler.h"
#include "QRCodeRenderer.h"
#include "QRCodeTracker.h"
namespace
{
using namespace DirectX;
void AppendColoredTriangle(
winrt::Windows::Foundation::Numerics::float3 p0,
winrt::Windows::Foundation::Numerics::float3 p1,
winrt::Windows::Foundation::Numerics::float3 p2,
winrt::Windows::Foundation::Numerics::float3 color,
std::vector<VertexPositionNormalColor>& vertices)
{
VertexPositionNormalColor vertex;
vertex.color = XMFLOAT3(&color.x);
vertex.normal = XMFLOAT3(0.0f, 0.0f, 0.0f);
vertex.pos = XMFLOAT3(&p0.x);
vertices.push_back(vertex);
vertex.pos = XMFLOAT3(&p1.x);
vertices.push_back(vertex);
vertex.pos = XMFLOAT3(&p2.x);
vertices.push_back(vertex);
}
} // namespace
QRCodeRenderer::QRCodeRenderer(const std::shared_ptr<DXHelper::DeviceResources>& deviceResources)
: RenderableObject(deviceResources)
{
}
void QRCodeRenderer::Update(
PerceptionDeviceHandler& perceptionDeviceHandler,
winrt::Windows::Perception::Spatial::SpatialCoordinateSystem renderingCoordinateSystem)
{
auto processQRCode = [this, renderingCoordinateSystem](QRCode& code) {
auto codeCS = code.GetCoordinateSystem();
float size = code.GetPhysicalSize();
auto codeToRendering = codeCS.TryGetTransformTo(renderingCoordinateSystem);
if (!codeToRendering)
{
return;
}
auto codeToRenderingV = codeToRendering.Value();
winrt::Windows::Foundation::Numerics::float3 positions[4] = {
{0.0f, 0.0f, 0.0f}, {0.0f, size, 0.0f}, {size, size, 0.0f}, {size, 0.0f, 0.0f}};
for (int i = 0; i < 4; ++i)
{
positions[i] = winrt::Windows::Foundation::Numerics::transform(positions[i], codeToRenderingV);
}
winrt::Windows::Foundation::Numerics::float3 col{1.0f, 1.0f, 0.0f};
AppendColoredTriangle(positions[0], positions[2], positions[1], col, m_vertices);
AppendColoredTriangle(positions[0], positions[3], positions[2], col, m_vertices);
};
auto processQRCodeTracker = [this, processQRCode](QRCodeTracker& tracker) { tracker.ForEachQRCode(processQRCode); };
m_vertices.clear();
perceptionDeviceHandler.ForEachRootObjectOfType<QRCodeTracker>(processQRCodeTracker);
auto modelTransform = winrt::Windows::Foundation::Numerics::float4x4::identity();
UpdateModelConstantBuffer(modelTransform);
}
void QRCodeRenderer::Draw(unsigned int numInstances)
{
if (m_vertices.empty())
{
return;
}
const UINT stride = sizeof(m_vertices[0]);
const UINT offset = 0;
D3D11_SUBRESOURCE_DATA vertexBufferData = {0};
vertexBufferData.pSysMem = m_vertices.data();
const CD3D11_BUFFER_DESC vertexBufferDesc(static_cast<UINT>(m_vertices.size() * stride), D3D11_BIND_VERTEX_BUFFER);
winrt::com_ptr<ID3D11Buffer> vertexBuffer;
winrt::check_hresult(m_deviceResources->GetD3DDevice()->CreateBuffer(&vertexBufferDesc, &vertexBufferData, vertexBuffer.put()));
m_deviceResources->UseD3DDeviceContext([&](auto context) {
context->IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY_TRIANGLELIST);
ID3D11Buffer* pBuffer = vertexBuffer.get();
context->IASetVertexBuffers(0, 1, &pBuffer, &stride, &offset);
context->DrawInstanced(static_cast<UINT>(m_vertices.size()), numInstances, offset, 0);
});
}

Просмотреть файл

@ -0,0 +1,35 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#pragma once
#include "RenderableObject.h"
#include <vector>
#include <winrt/Windows.UI.Input.Spatial.h>
class PerceptionDeviceHandler;
class QRCodeRenderer : public RenderableObject
{
public:
QRCodeRenderer(const std::shared_ptr<DXHelper::DeviceResources>& deviceResources);
void Update(
PerceptionDeviceHandler& perceptionDeviceHandler,
winrt::Windows::Perception::Spatial::SpatialCoordinateSystem renderingCoordinateSystem);
private:
void Draw(unsigned int numInstances) override;
private:
std::vector<VertexPositionNormalColor> m_vertices;
};

Просмотреть файл

@ -0,0 +1,337 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#include "..\pch.h"
#include <winrt/Windows.Perception.Spatial.Preview.h>
#include "QRCodeTracker.h"
#include <set>
QRCode::QRCode(
const GUID& id,
PSPATIAL_GRAPH_QR_CODE_STREAM_INFO streamInfo,
const winrt::Windows::Perception::Spatial::SpatialCoordinateSystem& coordinateSystem)
: m_id(id)
, m_streamInfo(streamInfo)
, m_coordinateSystem(coordinateSystem)
{
}
QRCode::~QRCode()
{
if (m_streamInfo)
{
CoTaskMemFree(m_streamInfo);
}
}
const GUID& QRCode::GetId() const
{
return m_id;
}
float QRCode::GetPhysicalSize() const
{
return m_physicalSizeInMeters;
}
winrt::Windows::Perception::Spatial::SpatialCoordinateSystem QRCode::GetCoordinateSystem() const
{
return m_coordinateSystem;
}
QRCodeTracker::QRCodeTracker(IPerceptionDevice* device, const GUID& typeId, const GUID& objectId)
: PerceptionRootObject(device, typeId, objectId)
{
Start();
}
QRCodeTracker::~QRCodeTracker()
{
Stop();
}
const GUID& QRCodeTracker::GetStaticPropertyId()
{
return SPATIALPROP_QRTrackerObjectId;
}
void QRCodeTracker::Start()
{
std::lock_guard stateLock(m_stateProtect);
if (m_running)
{
return;
}
HRESULT hr = m_device->CreateObjectSubscription(m_objectId, UINT(1), m_qrTrackerSubscription.put());
if (FAILED(hr))
{
Stop();
return;
}
hr = m_device->CreatePropertyListener(GetObjectId(), SPATIALPROP_QRTracker_QRCodesList, m_qrListChangeListener.put());
if (FAILED(hr))
{
Stop();
return;
}
m_propertyChangeHandler = winrt::make_self<PropertyChangeHandler>(*this);
hr = m_qrListChangeListener->SetPropertyChangedHandler(m_propertyChangeHandler.get());
if (FAILED(hr))
{
Stop();
return;
}
hr = m_qrListChangeListener->Start();
if (FAILED(hr))
{
Stop();
return;
}
m_running = true;
}
void QRCodeTracker::Stop()
{
std::lock_guard stateLock(m_stateProtect);
m_running = false;
if (m_qrListChangeListener)
{
m_qrListChangeListener->Stop();
}
for (auto& qrByPointer : m_qrCodesByPointer)
{
QRCode* qrCode = qrByPointer.second.get();
if (qrCode->m_propertyChangedListener)
{
qrCode->m_propertyChangedListener->Stop();
qrCode->m_propertyChangedListener = nullptr;
}
}
if (m_propertyChangeHandler)
{
m_propertyChangeHandler->Dispose();
m_propertyChangeHandler = nullptr;
}
}
HRESULT QRCodeTracker::HandlePropertyChange(IPerceptionDevicePropertyListener* sender, IPerceptionDevicePropertyChangedEventArgs* args)
{
// Change event for QR code list?
if (sender == m_qrListChangeListener.get())
{
const GUID* guids = static_cast<const GUID*>(args->GetValue());
UINT numGuids = args->GetValueSize() / sizeof(GUID);
return HandleQRCodeListChange(guids, numGuids);
}
// Change event for single QR code?
{
std::lock_guard<std::recursive_mutex> stateLock(m_stateProtect);
auto byListenerPos = m_qrCodesByListener.find(sender);
if (byListenerPos != m_qrCodesByListener.end())
{
QRCode* qrCode = byListenerPos->second;
return UpdateQRCode(*qrCode);
}
}
return S_OK;
}
HRESULT QRCodeTracker::HandleQRCodeListChange(const GUID* guids, UINT numGuids)
{
std::lock_guard<std::recursive_mutex> stateLock(m_stateProtect);
if (!m_running)
{
return S_FALSE;
}
// Duplicate the list of known QR code IDs. We'll remove all entries from it that we see in
// the incoming list, and thus will end up with a list of the IDs of all removed QR codes.
std::set<GUID, GUIDComparer> codesNotInList;
for (auto& kv : m_qrCodesByGUID)
{
codesNotInList.insert(kv.first);
}
// Check each QR code on the incoming list, and update the local cache
// with new codes.
for (size_t qrIndex = 0; qrIndex < numGuids; ++qrIndex)
{
const GUID& qrCodeId = guids[qrIndex];
auto it = m_qrCodesByGUID.find(qrCodeId);
if (it != m_qrCodesByGUID.end())
{
// Code is already known.
codesNotInList.erase(qrCodeId);
continue;
}
// Code is new. Read initial state, and add to collections.
winrt::Windows::Perception::Spatial::SpatialCoordinateSystem coordinateSystem{nullptr};
try
{
coordinateSystem =
winrt::Windows::Perception::Spatial::Preview::SpatialGraphInteropPreview::CreateCoordinateSystemForNode(qrCodeId);
}
catch (winrt::hresult_error const& ex)
{
return ex.to_abi();
}
if (coordinateSystem == nullptr)
{
return E_FAIL;
}
void* streamData{nullptr};
UINT streamDataSize{0};
HRESULT hr = m_device->ReadVariableSizeProperty(qrCodeId, SPATIALPROP_QRCode_StreamInfo, &streamDataSize, &streamData, nullptr);
if (FAILED(hr))
{
return hr;
}
if (streamDataSize == 0)
{
CoTaskMemFree(streamData);
return E_FAIL;
}
auto newCode =
std::make_unique<QRCode>(qrCodeId, reinterpret_cast<PSPATIAL_GRAPH_QR_CODE_STREAM_INFO>(streamData), coordinateSystem);
QRCode* qrCode = newCode.get();
m_qrCodesByPointer.emplace(qrCode, std::move(newCode));
m_qrCodesByGUID.emplace(qrCodeId, qrCode);
hr = UpdateQRCode(*qrCode);
if (FAILED(hr))
{
return hr;
}
hr = m_device->CreatePropertyListener(qrCodeId, SPATIALPROP_QRCode_LastSeenTime, qrCode->m_propertyChangedListener.put());
if (FAILED(hr))
{
return hr;
}
if (!m_propertyChangeHandler)
{
return E_UNEXPECTED;
}
hr = qrCode->m_propertyChangedListener->SetPropertyChangedHandler(
m_propertyChangeHandler.as<IPerceptionDevicePropertyChangedHandler>().get());
if (FAILED(hr))
{
return hr;
}
hr = qrCode->m_propertyChangedListener->Start();
if (FAILED(hr))
{
return hr;
}
m_qrCodesByListener.emplace(qrCode->m_propertyChangedListener.get(), qrCode);
}
// Remove all QR codes that have not been seen in this update
for (auto& qrCodeId : codesNotInList)
{
auto byCodeIdPos = m_qrCodesByGUID.find(qrCodeId);
if (byCodeIdPos == m_qrCodesByGUID.end())
{
// Not found (this should not ever happen)
continue;
}
QRCode* qrCode = byCodeIdPos->second;
m_qrCodesByGUID.erase(byCodeIdPos);
if (qrCode->m_propertyChangedListener)
{
qrCode->m_propertyChangedListener->Stop();
qrCode->m_propertyChangedListener = nullptr;
m_qrCodesByListener.erase(qrCode->m_propertyChangedListener.get());
}
m_qrCodesByPointer.erase(qrCode);
}
return S_OK;
}
HRESULT QRCodeTracker::UpdateQRCode(QRCode& qrCode)
{
float physicalSizeInMeters{0};
HRESULT hr =
m_device->ReadProperty(qrCode.m_id, SPATIALPROP_QRCode_PhysicalSize, sizeof(physicalSizeInMeters), &physicalSizeInMeters, nullptr);
if (FAILED(hr))
{
return hr;
}
qrCode.m_physicalSizeInMeters = physicalSizeInMeters;
LONGLONG lastSeenTime{0};
hr = m_device->ReadProperty(qrCode.m_id, SPATIALPROP_QRCode_LastSeenTime, sizeof(lastSeenTime), &lastSeenTime, nullptr);
if (FAILED(hr))
{
return hr;
}
qrCode.m_lastSeenTime = lastSeenTime;
return S_OK;
}
QRCodeTracker::PropertyChangeHandler::PropertyChangeHandler(QRCodeTracker& owner)
: m_owner(&owner)
{
}
void QRCodeTracker::PropertyChangeHandler::Dispose()
{
m_owner = nullptr;
}
STDMETHODIMP QRCodeTracker::PropertyChangeHandler::Invoke(
_In_ IPerceptionDevicePropertyListener* sender, _In_ IPerceptionDevicePropertyChangedEventArgs* eventArgs)
{
auto owner = m_owner;
if (owner)
{
return owner->HandlePropertyChange(sender, eventArgs);
}
return S_OK;
}

Просмотреть файл

@ -0,0 +1,108 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#pragma once
#include "../Common/PerceptionTypes.h"
#include "../Common/Utils.h"
#include "PerceptionDeviceHandler.h"
// Represents a single tracked QR code with position, size and last seen time.
class QRCode
{
public:
QRCode(
const GUID& id,
PSPATIAL_GRAPH_QR_CODE_STREAM_INFO streamInfo,
const winrt::Windows::Perception::Spatial::SpatialCoordinateSystem& coordinateSystem);
~QRCode();
const GUID& GetId() const;
float GetPhysicalSize() const;
winrt::Windows::Perception::Spatial::SpatialCoordinateSystem GetCoordinateSystem() const;
private:
friend class QRCodeTracker;
private:
GUID m_id;
PSPATIAL_GRAPH_QR_CODE_STREAM_INFO m_streamInfo;
__int64 m_lastSeenTime{0};
float m_physicalSizeInMeters{0};
winrt::Windows::Perception::Spatial::SpatialCoordinateSystem m_coordinateSystem{nullptr};
winrt::com_ptr<IPerceptionDevicePropertyListener> m_propertyChangedListener;
};
// Manages all active QR codes.
// Listens for events from the perception device to add, remove or update QR codes.
class QRCodeTracker : public PerceptionRootObject
{
public:
QRCodeTracker(IPerceptionDevice* device, const GUID& typeId, const GUID& objectId);
~QRCodeTracker();
// Helper function to iterate all QR codes in a thread safe manner.
template <typename Func>
void ForEachQRCode(Func& func)
{
std::lock_guard stateLock(m_stateProtect);
for (auto& qrCodeEntry : m_qrCodesByPointer)
{
func(*qrCodeEntry.second.get());
}
}
public:
static const GUID& GetStaticPropertyId();
private:
// Implementation of the IPerceptionDevicePropertyChangedHandler interface.
// Events from the perception device are propagated through an instance of this class.
struct PropertyChangeHandler : winrt::implements<PropertyChangeHandler, IPerceptionDevicePropertyChangedHandler>
{
PropertyChangeHandler(QRCodeTracker& owner);
void Dispose();
STDMETHOD(Invoke)
(_In_ IPerceptionDevicePropertyListener* sender, _In_ IPerceptionDevicePropertyChangedEventArgs* eventArgs) override;
private:
QRCodeTracker* m_owner;
};
friend PropertyChangeHandler;
using QRCodesByPointerMap = std::map<const QRCode*, std::unique_ptr<QRCode>>;
using QRCodesByGUIDMap = std::map<GUID, QRCode*, GUIDComparer>;
using QRCodesByListenerMap = std::map<IPerceptionDevicePropertyListener*, QRCode*>;
private:
void Start();
void Stop();
HRESULT HandlePropertyChange(IPerceptionDevicePropertyListener* sender, IPerceptionDevicePropertyChangedEventArgs* args);
HRESULT HandleQRCodeListChange(const GUID* guids, UINT numGuids);
HRESULT UpdateQRCode(QRCode& qrCode);
private:
std::recursive_mutex m_stateProtect;
bool m_running{false};
winrt::com_ptr<IPerceptionDeviceObjectSubscription> m_qrTrackerSubscription;
winrt::com_ptr<IPerceptionDevicePropertyListener> m_qrListChangeListener;
winrt::com_ptr<PropertyChangeHandler> m_propertyChangeHandler;
QRCodesByPointerMap m_qrCodesByPointer;
QRCodesByGUIDMap m_qrCodesByGUID;
QRCodesByListenerMap m_qrCodesByListener;
};

Просмотреть файл

@ -0,0 +1,148 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#include "pch.h"
#include "RenderableObject.h"
#include "../Common/DirectXHelper.h"
RenderableObject::RenderableObject(const std::shared_ptr<DXHelper::DeviceResources>& deviceResources)
: m_deviceResources(deviceResources)
{
CreateDeviceDependentResources();
}
void RenderableObject::UpdateModelConstantBuffer(const winrt::Windows::Foundation::Numerics::float4x4& modelTransform)
{
winrt::Windows::Foundation::Numerics::float4x4 normalTransform = modelTransform;
normalTransform.m41 = normalTransform.m42 = normalTransform.m43 = 0;
UpdateModelConstantBuffer(modelTransform, normalTransform);
}
void RenderableObject::UpdateModelConstantBuffer(
const winrt::Windows::Foundation::Numerics::float4x4& modelTransform,
const winrt::Windows::Foundation::Numerics::float4x4& normalTransform)
{
if (m_loadingComplete)
{
m_modelConstantBufferData.model = reinterpret_cast<DirectX::XMFLOAT4X4&>(transpose(modelTransform));
m_modelConstantBufferData.normal = reinterpret_cast<DirectX::XMFLOAT4X4&>(transpose(normalTransform));
// Update the model transform buffer for the hologram.
m_deviceResources->UseD3DDeviceContext(
[&](auto context) { context->UpdateSubresource(m_modelConstantBuffer.get(), 0, nullptr, &m_modelConstantBufferData, 0, 0); });
}
}
void RenderableObject::Render(bool isStereo)
{
if (!m_loadingComplete)
{
return;
}
// Use the D3D device context to update Direct3D device-based resources.
m_deviceResources->UseD3DDeviceContext([&](auto context) {
context->IASetInputLayout(m_inputLayout.get());
context->PSSetShader(m_pixelShader.get(), nullptr, 0);
// Attach the vertex shader.
context->VSSetShader(m_vertexShader.get(), nullptr, 0);
// Apply the model constant buffer to the vertex shader.
ID3D11Buffer* pBuffer = m_modelConstantBuffer.get();
context->VSSetConstantBuffers(0, 1, &pBuffer);
if (!m_usingVprtShaders)
{
// On devices that do not support the D3D11_FEATURE_D3D11_OPTIONS3::
// VPAndRTArrayIndexFromAnyShaderFeedingRasterizer optional feature,
// a pass-through geometry shader is used to set the render target
// array index.
context->GSSetShader(m_geometryShader.get(), nullptr, 0);
}
Draw(isStereo ? 2 : 1);
});
}
std::future<void> RenderableObject::CreateDeviceDependentResources()
{
#if WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_DESKTOP)
std::wstring fileNamePrefix = L"";
#else
std::wstring fileNamePrefix = L"ms-appx:///";
#endif
m_usingVprtShaders = m_deviceResources->GetDeviceSupportsVprt();
// On devices that do support the D3D11_FEATURE_D3D11_OPTIONS3::
// VPAndRTArrayIndexFromAnyShaderFeedingRasterizer optional feature
// we can avoid using a pass-through geometry shader to set the render
// target array index, thus avoiding any overhead that would be
// incurred by setting the geometry shader stage.
std::wstring vertexShaderFileName = m_usingVprtShaders ? L"hsa_VprtVertexShader.cso" : L"hsa_VertexShader.cso";
// Load shaders asynchronously.
std::vector<byte> vertexShaderFileData = co_await DXHelper::ReadDataAsync(fileNamePrefix + vertexShaderFileName);
winrt::check_hresult(m_deviceResources->GetD3DDevice()->CreateVertexShader(
vertexShaderFileData.data(), vertexShaderFileData.size(), nullptr, m_vertexShader.put()));
constexpr std::array<D3D11_INPUT_ELEMENT_DESC, 3> vertexDesc = {{
{"POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0},
{"NORMAL", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 12, D3D11_INPUT_PER_VERTEX_DATA, 0},
{"COLOR", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 24, D3D11_INPUT_PER_VERTEX_DATA, 0},
}};
winrt::check_hresult(m_deviceResources->GetD3DDevice()->CreateInputLayout(
vertexDesc.data(),
static_cast<UINT>(vertexDesc.size()),
vertexShaderFileData.data(),
static_cast<UINT>(vertexShaderFileData.size()),
m_inputLayout.put()));
std::vector<byte> pixelShaderFileData = co_await DXHelper::ReadDataAsync(fileNamePrefix + L"hsa_PixelShader.cso");
winrt::check_hresult(m_deviceResources->GetD3DDevice()->CreatePixelShader(
pixelShaderFileData.data(), pixelShaderFileData.size(), nullptr, m_pixelShader.put()));
const ModelConstantBuffer constantBuffer{
reinterpret_cast<DirectX::XMFLOAT4X4&>(winrt::Windows::Foundation::Numerics::float4x4::identity()),
reinterpret_cast<DirectX::XMFLOAT4X4&>(winrt::Windows::Foundation::Numerics::float4x4::identity()),
};
const CD3D11_BUFFER_DESC constantBufferDesc(sizeof(ModelConstantBuffer), D3D11_BIND_CONSTANT_BUFFER);
winrt::check_hresult(m_deviceResources->GetD3DDevice()->CreateBuffer(&constantBufferDesc, nullptr, m_modelConstantBuffer.put()));
if (!m_usingVprtShaders)
{
// Load the pass-through geometry shader.
std::vector<byte> geometryShaderFileData = co_await DXHelper::ReadDataAsync(fileNamePrefix + L"hsa_GeometryShader.cso");
// After the pass-through geometry shader file is loaded, create the shader.
winrt::check_hresult(m_deviceResources->GetD3DDevice()->CreateGeometryShader(
geometryShaderFileData.data(), geometryShaderFileData.size(), nullptr, m_geometryShader.put()));
}
m_loadingComplete = true;
}
void RenderableObject::ReleaseDeviceDependentResources()
{
m_loadingComplete = false;
m_usingVprtShaders = false;
m_vertexShader = nullptr;
m_inputLayout = nullptr;
m_pixelShader = nullptr;
m_geometryShader = nullptr;
m_modelConstantBuffer = nullptr;
}

Просмотреть файл

@ -0,0 +1,61 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#pragma once
#include "..\Common\DeviceResources.h"
#include "ShaderStructures.h"
#include <future>
class RenderableObject
{
public:
RenderableObject(const std::shared_ptr<DXHelper::DeviceResources>& deviceResources);
virtual ~RenderableObject()
{
}
virtual std::future<void> CreateDeviceDependentResources();
virtual void ReleaseDeviceDependentResources();
void Render(bool isStereo);
protected:
void UpdateModelConstantBuffer(const winrt::Windows::Foundation::Numerics::float4x4& modelTransform);
void UpdateModelConstantBuffer(
const winrt::Windows::Foundation::Numerics::float4x4& modelTransform,
const winrt::Windows::Foundation::Numerics::float4x4& normalTransform);
virtual void Draw(unsigned int numInstances) = 0;
// Cached pointer to device resources.
std::shared_ptr<DXHelper::DeviceResources> m_deviceResources;
private:
// Direct3D resources for geometry.
winrt::com_ptr<ID3D11InputLayout> m_inputLayout;
winrt::com_ptr<ID3D11VertexShader> m_vertexShader;
winrt::com_ptr<ID3D11GeometryShader> m_geometryShader;
winrt::com_ptr<ID3D11PixelShader> m_pixelShader;
winrt::com_ptr<ID3D11Buffer> m_modelConstantBuffer;
// System resources for geometry.
ModelConstantBuffer m_modelConstantBufferData;
uint32_t m_indexCount = 0;
// Variables used with the rendering loop.
bool m_loadingComplete = false;
// If the current D3D Device supports VPRT, we can avoid using a geometry
// shader just to set the render target array index.
bool m_usingVprtShaders = false;
};

Просмотреть файл

@ -0,0 +1,576 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#include "pch.h"
#include "../Common/DbgLog.h"
#include "../Common/DirectXHelper.h"
#include "SceneUnderstandingRenderer.h"
#include <winrt/Windows.Perception.Spatial.Preview.h>
using namespace RemotingHostSample;
namespace
{
using namespace DirectX;
void AppendColoredTriangle(
winrt::Windows::Foundation::Numerics::float3 p0,
winrt::Windows::Foundation::Numerics::float3 p1,
winrt::Windows::Foundation::Numerics::float3 p2,
winrt::Windows::Foundation::Numerics::float3 color,
std::vector<VertexPositionNormalColor>& vertices)
{
VertexPositionNormalColor vertex;
vertex.color = XMFLOAT3(&color.x);
vertex.normal = XMFLOAT3(0.0f, 0.0f, 0.0f);
vertex.pos = XMFLOAT3(&p0.x);
vertices.push_back(vertex);
vertex.pos = XMFLOAT3(&p1.x);
vertices.push_back(vertex);
vertex.pos = XMFLOAT3(&p2.x);
vertices.push_back(vertex);
}
} // namespace
// Struct to hold one entity label type entry
struct Label
{
const wchar_t* const Name;
uint32_t Index;
uint8_t RGB[3];
};
// Entity label types
static const Label LabelStorage[] = {
{L"Background", 0, {243, 121, 223}},
{L"Ignore", 255, {255, 255, 255}},
{L"Wall", 1, {243, 126, 121}},
{L"Floor", 2, {187, 243, 121}},
{L"Ceiling", 3, {121, 152, 243}},
{L"Table", 4, {121, 243, 227}},
{L"Chair", 5, {243, 191, 121}},
{L"Window", 6, {121, 243, 146}},
{L"Door", 7, {156, 121, 243}},
{L"Monitor", 8, {2, 159, 253}},
{L"Pillar", 10, {253, 106, 2}},
{L"Couch", 11, {72, 197, 126}},
{L"Whiteboard", 12, {137, 159, 2}},
{L"Beanbag", 13, {206, 112, 74}},
{L"Cabinet", 14, {36, 43, 138}},
{L"Nightstands", 15, {78, 231, 210}},
{L"TVStands", 16, {26, 71, 66}},
{L"Countertops", 17, {13, 60, 55}},
{L"Dressers", 18, {29, 58, 55}},
{L"Bench", 19, {105, 54, 136}},
{L"Ottoman", 20, {99, 9, 44}},
{L"Stool", 21, {255, 204, 153}},
{L"GTEquipment", 22, {206, 199, 74}},
{L"Telephone", 23, {243, 217, 121}},
{L"Bookshelf", 24, {37, 117, 164}},
{L"Laptop", 25, {96, 147, 234}},
{L"Stanchion", 26, {29, 117, 40}},
{L"Markers", 27, {111, 93, 167}},
{L"Controller", 28, {230, 254, 251}},
{L"Stairs", 9, {43, 174, 100}},
{L"Empty", 254, {0, 0, 0}},
{L"Appliances-CeilingLight", 30, {250, 24, 180}},
{L"Appliances-DishWasher", 32, {38, 204, 168}},
{L"Appliances-FloorLamp", 34, {106, 134, 187}},
{L"Appliances-Lighting", 36, {156, 162, 56}},
{L"Appliances-Microwave", 37, {6, 44, 91}},
{L"Appliances-NotSpecified", 38, {35, 188, 199}},
{L"Appliances-Oven", 39, {153, 60, 52}},
{L"Appliances-SmallAppliances", 40, {255, 83, 112}},
{L"Appliances-Stove", 41, {76, 175, 147}},
{L"Appliances-Toaster", 42, {145, 58, 23}},
{L"Appliances-WashingMachine", 44, {46, 66, 12}},
{L"Appliances-DeskLamp", 45, {128, 86, 177}},
{L"Appliances-Dryer", 46, {239, 162, 164}},
{L"Appliances-Fridge", 47, {87, 243, 139}},
{L"Appliances-WallLight", 50, {222, 49, 1}},
{L"Bed-BunkBed", 51, {97, 174, 71}},
{L"Bed-DoubleBed", 52, {85, 195, 111}},
{L"Bed-NotSpecified", 53, {212, 26, 75}},
{L"Bed-SingleBed", 54, {200, 219, 241}},
{L"Ceiling-Unassigned", 55, {48, 120, 115}},
{L"Ceiling-NotSpecified", 56, {205, 144, 139}},
{L"Chair-Beanbag", 57, {136, 175, 192}},
{L"Chair-Bench", 58, {89, 41, 203}},
{L"Chair-ArmChair", 59, {192, 1, 27}},
{L"Chair-ArmOfAChair", 60, {194, 241, 101}},
{L"Chair-BarStool", 61, {146, 21, 8}},
{L"Chair-ChaiseLounge", 62, {178, 31, 121}},
{L"Chair-DiningChair", 63, {76, 10, 219}},
{L"Chair-LoungeChair", 64, {174, 165, 77}},
{L"Chair-NotSpecified", 65, {186, 217, 58}},
{L"Chair-OfficeChair", 66, {177, 29, 181}},
{L"Chair-Unknown", 67, {155, 128, 196}},
{L"Chair-Ottoman", 68, {28, 75, 247}},
{L"Chair-Stool", 69, {60, 243, 241}},
{L"Door-DoubleDoors", 70, {220, 101, 83}},
{L"Door-NotSpecified", 71, {219, 20, 187}},
{L"Door-Revolving", 72, {211, 229, 158}},
{L"Door-SingleDoor", 73, {10, 100, 12}},
{L"Door-Sliding", 74, {73, 197, 108}},
{L"Electronics-Desktop", 75, {181, 22, 191}},
{L"Electronics-DVDPlayer", 76, {5, 131, 13}},
{L"Electronics-Headphones", 77, {169, 60, 180}},
{L"Electronics-Keyboard", 78, {6, 92, 79}},
{L"Electronics-Laptop", 79, {252, 108, 50}},
{L"Electronics-Mobile", 80, {35, 73, 64}},
{L"Electronics-Mouse", 81, {3, 112, 214}},
{L"Electronics-Mousepad", 82, {106, 70, 62}},
{L"Electronics-NotSpecified", 83, {63, 100, 209}},
{L"Electronics-Phone", 84, {64, 32, 142}},
{L"Electronics-Printer", 85, {70, 188, 0}},
{L"Electronics-Projector", 86, {72, 100, 38}},
{L"Electronics-Speakers", 87, {202, 60, 135}},
{L"Electronics-Tablet", 88, {126, 2, 49}},
{L"Electronics-TVMonitor", 89, {188, 184, 46}},
{L"Electronics-Xbox", 90, {6, 218, 26}},
{L"Electronics-Monitor", 91, {179, 160, 177}},
{L"Floor-Unassigned", 92, {9, 42, 145}},
{L"Human-Female", 93, {52, 156, 230}},
{L"Human-Male", 94, {231, 88, 138}},
{L"Human-Other", 95, {0, 0, 255}},
{L"NotSpecified-Ax", 96, {230, 228, 24}},
{L"NotSpecified-Backpack", 97, {228, 104, 245}},
{L"NotSpecified-Bag", 98, {215, 41, 202}},
{L"NotSpecified-Barbell", 99, {100, 125, 112}},
{L"NotSpecified-BlackBoard", 100, {65, 166, 116}},
{L"NotSpecified-Bottle", 101, {140, 68, 191}},
{L"NotSpecified-box", 102, {145, 146, 89}},
{L"NotSpecified-Cable", 103, {170, 1, 118}},
{L"NotSpecified-Can", 104, {205, 195, 201}},
{L"NotSpecified-Cart", 105, {156, 159, 0}},
{L"NotSpecified-case", 106, {208, 70, 137}},
{L"NotSpecified-CeilingFan", 107, {9, 227, 245}},
{L"NotSpecified-Clothes", 108, {181, 123, 192}},
{L"NotSpecified-Coat", 109, {189, 249, 62}},
{L"NotSpecified-Coatrack", 110, {136, 15, 19}},
{L"NotSpecified-CorkBoard", 111, {167, 98, 139}},
{L"NotSpecified-CounterTop", 112, {6, 14, 93}},
{L"NotSpecified-Drawers", 113, {216, 156, 242}},
{L"NotSpecified-Drinkcontainer", 114, {238, 153, 75}},
{L"NotSpecified-Dumbbell", 115, {183, 111, 41}},
{L"NotSpecified-ElectricalOutlet", 116, {191, 199, 36}},
{L"NotSpecified-ElectricalSwitch", 117, {31, 81, 127}},
{L"NotSpecified-Elliptical", 118, {244, 92, 59}},
{L"NotSpecified-Food", 119, {221, 210, 211}},
{L"NotSpecified-Footwear", 120, {163, 245, 159}},
{L"NotSpecified-Hammer", 121, {118, 176, 85}},
{L"NotSpecified-LaptopBag", 122, {225, 32, 60}},
{L"NotSpecified-LIDAR", 123, {26, 105, 172}},
{L"NotSpecified-Mannequin", 124, {131, 135, 194}},
{L"NotSpecified-Markers", 125, {124, 23, 155}},
{L"NotSpecified-Microscope", 126, {128, 143, 248}},
{L"NotSpecified-NDI", 127, {220, 39, 237}},
{L"NotSpecified-Pinwheel", 128, {155, 24, 46}},
{L"NotSpecified-PunchingBag", 129, {152, 215, 122}},
{L"NotSpecified-Shower", 130, {78, 243, 86}},
{L"NotSpecified-Sign", 131, {29, 159, 136}},
{L"NotSpecified-Sink", 132, {209, 19, 236}},
{L"NotSpecified-Sissors", 133, {31, 229, 162}},
{L"NotSpecified-Sphere", 134, {151, 86, 155}},
{L"NotSpecified-StairClimber", 135, {52, 236, 130}},
{L"NotSpecified-stanchion", 136, {6, 76, 221}},
{L"NotSpecified-Stand", 137, {2, 12, 172}},
{L"NotSpecified-StationaryBike", 138, {69, 190, 196}},
{L"NotSpecified-Tape", 139, {176, 3, 131}},
{L"NotSpecified-Thermostat", 140, {33, 22, 47}},
{L"NotSpecified-Toilet", 141, {107, 45, 152}},
{L"NotSpecified-TrashCan", 142, {128, 72, 143}},
{L"NotSpecified-Tripod", 143, {225, 31, 162}},
{L"NotSpecified-Tub", 144, {110, 147, 77}},
{L"NotSpecified-Vent", 145, {137, 170, 110}},
{L"NotSpecified-WeightBench", 146, {183, 79, 90}},
{L"NotSpecified-Wire", 147, {0, 255, 38}},
{L"NotSpecified-Wrench", 148, {116, 3, 22}},
{L"NotSpecified-Pillar", 149, {128, 184, 144}},
{L"NotSpecified-Whiteboard", 150, {94, 240, 206}},
{L"Plant-Fake", 151, {216, 230, 169}},
{L"Plant-NotSpecified", 152, {182, 43, 63}},
{L"Plant-Organic", 153, {197, 86, 148}},
{L"Props-Book", 154, {247, 3, 157}},
{L"Props-Cushion", 155, {13, 94, 49}},
{L"Props-FloorVase", 156, {55, 213, 231}},
{L"Props-FlowerPot", 157, {239, 172, 43}},
{L"Props-Magazine", 158, {138, 164, 178}},
{L"Props-Mirror", 159, {116, 236, 157}},
{L"Props-NewsPaper", 160, {62, 80, 43}},
{L"Props-NotSpecified", 161, {9, 106, 45}},
{L"Props-Paintings", 162, {164, 117, 118}},
{L"Props-PaperSheet", 163, {85, 190, 229}},
{L"Props-PhotoFrame", 164, {18, 95, 80}},
{L"Props-Rug", 165, {192, 82, 167}},
{L"Props-Sculpture", 166, {130, 15, 64}},
{L"Props-Toys", 167, {136, 130, 225}},
{L"Sofa-ChaiseLounge", 168, {241, 154, 12}},
{L"Sofa-NotSpecified", 169, {113, 197, 139}},
{L"Sofa-Sectional", 170, {24, 132, 64}},
{L"Sofa-Straight", 171, {248, 137, 194}},
{L"Storage-Bookshelf", 172, {4, 69, 174}},
{L"Storage-ChinaCabinet", 173, {216, 165, 83}},
{L"Storage-Dresser", 174, {156, 24, 110}},
{L"Storage-FileCabinet", 175, {78, 78, 12}},
{L"Storage-MediaCabinet", 176, {168, 234, 45}},
{L"Storage-NotSpecified", 177, {29, 232, 238}},
{L"Storage-Rack", 178, {161, 36, 92}},
{L"Storage-Shelf", 179, {57, 187, 87}},
{L"Storage-Cabinet", 180, {164, 23, 45}},
{L"Storage-Stairs", 181, {10, 13, 61}},
{L"Table-CoffeeTable", 182, {178, 214, 30}},
{L"Table-ConferenceTable", 183, {25, 153, 182}},
{L"Table-Desk", 184, {171, 128, 231}},
{L"Table-DiningTable", 185, {12, 169, 156}},
{L"Table-Nightstand", 186, {247, 131, 122}},
{L"Table-NotSpecified", 187, {227, 214, 90}},
{L"Table-OfficeDesk", 188, {122, 253, 7}},
{L"Table-OfficeTable", 189, {6, 20, 5}},
{L"Table-SideTable", 190, {230, 211, 253}},
{L"Unassigned-Unassigned", 191, {141, 204, 180}},
{L"Utensils-Bowl", 192, {108, 89, 46}},
{L"Utensils-Cups", 193, {90, 250, 131}},
{L"Utensils-Knife", 194, {28, 67, 176}},
{L"Utensils-Mug", 195, {152, 218, 150}},
{L"Utensils-NotSpecified", 196, {211, 96, 157}},
{L"Utensils-Pans", 197, {73, 159, 109}},
{L"Utensils-Pots", 198, {7, 193, 112}},
{L"Utensils-Tray", 199, {60, 152, 1}},
{L"Vehicle-Car", 200, {189, 149, 61}},
{L"Vehicle-MotorCycle", 201, {2, 164, 102}},
{L"Vehicle-Segway", 202, {198, 165, 85}},
{L"Vehicle-Truck", 203, {134, 46, 106}},
{L"Wall-Blinds", 204, {9, 13, 13}},
{L"Wall-Curtain", 205, {52, 74, 241}},
{L"Wall-Unassigned", 206, {83, 158, 59}},
{L"Wall-Window", 207, {117, 162, 84}},
{L"Storage-BathroomVanity", 208, {127, 151, 35}},
{L"NotSpecified-Unassigned", 209, {143, 133, 123}},
{L"Storage-Nightstand", 210, {181, 112, 177}},
{L"Storage-Unassigned", 211, {73, 125, 140}},
{L"Props-Unassigned", 212, {156, 127, 134}},
{L"Storage-ArmChair", 213, {102, 111, 19}},
{L"NotSpecified-LaundryBasket", 214, {106, 168, 192}},
{L"Props-Decorations", 215, {49, 242, 177}},
{L"NotSpecified-Fireplace", 216, {96, 128, 236}},
{L"NotSpecified-Drinkware", 217, {6, 247, 22}},
{L"Sofa-LoungeChair", 218, {167, 92, 66}},
{L"NotSpecified-NotSpecified", 219, {174, 127, 40}},
{L"Mouse", 220, {65, 33, 210}},
{L"Bag", 221, {168, 71, 185}},
{L"Fridge", 222, {255, 127, 94}},
{L"Stand", 223, {246, 160, 193}},
{L"Sign", 224, {143, 221, 54}},
{L"Sphere", 225, {255, 207, 172}},
{L"Tripod", 227, {255, 235, 46}},
{L"PinWheel", 228, {13, 92, 139}},
{L"Kart", 229, {49, 3, 27}},
{L"Box", 230, {134, 215, 144}},
{L"Light", 231, {140, 3, 56}},
{L"Keyboard ", 232, {7, 66, 58}},
{L"Scupture", 233, {240, 191, 82}},
{L"Lamp", 234, {189, 8, 78}},
{L"Microscope ", 235, {255, 211, 112}},
{L"Case ", 236, {59, 155, 70}},
{L"Ax", 237, {157, 117, 29}},
{L"Manikin_Parts ", 238, {67, 141, 186}},
{L"Clothing ", 239, {4, 122, 55}},
{L"CoatRack", 240, {211, 52, 114}},
{L"DrinkContainer ", 241, {35, 23, 0}},
{L"MousePad", 242, {68, 28, 0}},
{L"Tape", 243, {107, 173, 211}},
{L"Sissors ", 245, {53, 24, 143}},
{L"Headphones ", 246, {45, 212, 189}},
};
constexpr auto NumLabels = sizeof(LabelStorage) / sizeof(Label);
// Dictionary to quickly access labels by numeric label ID
using LabelDictionary = std::map<uint32_t, const Label*>;
static LabelDictionary Labels;
SceneUnderstandingRenderer::SceneUnderstandingRenderer(const std::shared_ptr<DX::DeviceResources>& deviceResources)
: RenderableObject(deviceResources)
{
// If the label dictionary is still empty, build it from static data
if (Labels.empty())
{
for (size_t i = 0; i < NumLabels; ++i)
{
Labels[LabelStorage[i].Index] = &LabelStorage[i];
}
}
}
void SceneUnderstandingRenderer::Update(
winrt::SceneUnderstanding::SceneProcessor& sceneProcessor,
winrt::Windows::Perception::Spatial::SpatialCoordinateSystem renderingCoordinateSystem,
winrt::Windows::Perception::Spatial::SpatialStationaryFrameOfReference lastUpdateLocation)
{
m_vertices.clear();
// Calculate the head position at the time of the last SU update in render space. This information can be
// used for debug rendering.
winrt::Windows::Foundation::IReference<winrt::Windows::Foundation::Numerics::float3> lastUpdatePosInRenderSpace;
if (lastUpdateLocation)
{
auto lastUpdateCS = lastUpdateLocation.CoordinateSystem();
auto lastUpdateLocationToRender = lastUpdateCS.TryGetTransformTo(renderingCoordinateSystem);
if (lastUpdateLocationToRender)
{
lastUpdatePosInRenderSpace = winrt::Windows::Foundation::Numerics::transform({0, 0, 0}, lastUpdateLocationToRender.Value());
}
}
// Lambda to execute for each quad returned by SU. Adds the quad to the vertex buffer for rendering, using
// the color indicated by the label dictionary for the quad's owner entity's type. Optionally draws debug
// rays from the last update position to each quad.
auto processQuadForRendering = [this, &renderingCoordinateSystem, &lastUpdatePosInRenderSpace](
const winrt::SceneUnderstanding::Entity& entity,
const winrt::SceneUnderstanding::Quad& quad,
const winrt::Windows::Foundation::Numerics::float4x4& entityToAnchorTransform,
const winrt::Windows::Perception::Spatial::SpatialCoordinateSystem& entityAnchorCS) {
// Determine the transform to go from entity space to rendering space
winrt::Windows::Foundation::IReference<winrt::Windows::Foundation::Numerics::float4x4> anchorToRenderingRef =
entityAnchorCS.TryGetTransformTo(renderingCoordinateSystem);
if (!anchorToRenderingRef)
{
return;
}
winrt::Windows::Foundation::Numerics::float4x4 anchorToRenderingTransform = anchorToRenderingRef.Value();
winrt::Windows::Foundation::Numerics::float4x4 entityToRenderingTransform = entityToAnchorTransform * anchorToRenderingTransform;
// Create the quad's corner points in entity space and transform them to rendering space
const float width = quad.WidthInMeters();
const float height = quad.HeightInMeters();
winrt::Windows::Foundation::Numerics::float3 positions[4] = {
{-width / 2, -height / 2, 0.0f}, {width / 2, -height / 2, 0.0f}, {-width / 2, height / 2, 0.0f}, {width / 2, height / 2, 0.0f}};
for (int i = 0; i < 4; ++i)
{
positions[i] = winrt::Windows::Foundation::Numerics::transform(positions[i], entityToRenderingTransform);
}
// Determine the color with which to draw the quad
winrt::Windows::Foundation::Numerics::float3 color{1.0f, 1.0f, 0.0f};
auto labelPos = Labels.find(static_cast<uint32_t>(entity.Label()));
if (labelPos != Labels.end())
{
const Label& label = *labelPos->second;
color = {label.RGB[0] / 255.0f, label.RGB[1] / 255.0f, label.RGB[2] / 255.0f};
}
// Add triangles to render the quad (both winding orders to guarantee double-sided rendering)
AppendColoredTriangle(positions[0], positions[3], positions[1], color, m_vertices);
AppendColoredTriangle(positions[0], positions[2], positions[3], color, m_vertices);
AppendColoredTriangle(positions[1], positions[3], positions[0], color, m_vertices);
AppendColoredTriangle(positions[3], positions[2], positions[0], color, m_vertices);
/** /
// Debug code: draw a ray from the last update position to the center of each visible quad
if (lastUpdatePosInRenderSpace)
{
float rayEndRadius = 0.05f;
winrt::Windows::Foundation::Numerics::float3 rayEndPositions[4] = {
{-rayEndRadius, 0.0f, 0.0f},
{rayEndRadius, 0.0f, 0.0f},
{0.0f, -rayEndRadius, 0.0f},
{0.0f, rayEndRadius, 0.0f},
};
for (int i = 0; i < 4; ++i)
{
rayEndPositions[i] = winrt::Windows::Foundation::Numerics::transform(rayEndPositions[i], entityToRenderingTransform);
}
AppendColoredTriangle(lastUpdatePosInRenderSpace.Value(), rayEndPositions[0], rayEndPositions[1], color, m_vertices);
AppendColoredTriangle(lastUpdatePosInRenderSpace.Value(), rayEndPositions[1], rayEndPositions[0], color, m_vertices);
AppendColoredTriangle(lastUpdatePosInRenderSpace.Value(), rayEndPositions[2], rayEndPositions[3], color, m_vertices);
AppendColoredTriangle(lastUpdatePosInRenderSpace.Value(), rayEndPositions[3], rayEndPositions[2], color, m_vertices);
}
/**/
};
// Execute the above lambda for each quad known by the SceneProcessor
ForEachQuad(sceneProcessor, processQuadForRendering);
// The geometry we added is already in rendering space, so the model transform must be identity.
auto modelTransform = winrt::Windows::Foundation::Numerics::float4x4::identity();
UpdateModelConstantBuffer(modelTransform);
}
void SceneUnderstandingRenderer::DebugLogState(
winrt::SceneUnderstanding::SceneProcessor& sceneProcessor,
winrt::Windows::Perception::Spatial::SpatialCoordinateSystem renderingCoordinateSystem,
winrt::Windows::Perception::Spatial::SpatialStationaryFrameOfReference lastUpdateLocation)
{
// Calculate the head position at the time of the last SU update in render space. This information can be
// used for debug rendering.
winrt::Windows::Perception::Spatial::SpatialCoordinateSystem lastUpdateCS = lastUpdateLocation.CoordinateSystem();
winrt::Windows::Foundation::IReference<winrt::Windows::Foundation::Numerics::float3> lastUpdatePosInRenderSpaceRef;
if (lastUpdateLocation)
{
auto lastUpdateLocationToRender = lastUpdateCS.TryGetTransformTo(renderingCoordinateSystem);
if (lastUpdateLocationToRender)
{
lastUpdatePosInRenderSpaceRef = winrt::Windows::Foundation::Numerics::transform({0, 0, 0}, lastUpdateLocationToRender.Value());
}
}
if (!lastUpdatePosInRenderSpaceRef)
{
return;
}
winrt::Windows::Foundation::Numerics::float3 lastUpdatePosInRenderSpace = lastUpdatePosInRenderSpaceRef.Value();
auto logQuad = [this, &lastUpdateCS](
const winrt::SceneUnderstanding::Entity& entity,
const winrt::SceneUnderstanding::Quad& quad,
const winrt::Windows::Foundation::Numerics::float4x4& entityToAnchorTransform,
const winrt::Windows::Perception::Spatial::SpatialCoordinateSystem& entityAnchorCS) {
// Determine transform from entity space to last update pose space
winrt::Windows::Foundation::IReference<winrt::Windows::Foundation::Numerics::float4x4> anchorToLastUpdateRef =
entityAnchorCS.TryGetTransformTo(lastUpdateCS);
if (!anchorToLastUpdateRef)
{
return;
}
winrt::Windows::Foundation::Numerics::float4x4 anchorToLastUpdateTransform = anchorToLastUpdateRef.Value();
winrt::Windows::Foundation::Numerics::float4x4 entityToLastUpdateTransform = entityToAnchorTransform * anchorToLastUpdateTransform;
// Determine various sizes, position, and distance from head
const float width = quad.WidthInMeters();
const float height = quad.HeightInMeters();
const float radius = sqrtf(width * width + height * height) / 2;
const winrt::Windows::Foundation::Numerics::float3 position = winrt::Windows::Foundation::Numerics::transform(
winrt::Windows::Foundation::Numerics::float3::zero(), entityToLastUpdateTransform);
const float distance = winrt::Windows::Foundation::Numerics::length(position);
const wchar_t* labelName = L"<unknown>";
auto labelPos = Labels.find(static_cast<uint32_t>(entity.Label()));
if (labelPos != Labels.end())
{
const Label& label = *labelPos->second;
labelName = label.Name;
}
DebugLog(
L" %s (%.2f x %.2f m, radius: %.2f m) at %.2f;%.2f;%.2f (distance: %.2f m)",
labelName,
width,
height,
radius,
position.x,
position.y,
position.z,
distance);
};
DebugLog(L"--- SU Update ---");
DebugLog(
L" Update position (in root space): (%.2f; %.2f; %.2f)",
lastUpdatePosInRenderSpace.x,
lastUpdatePosInRenderSpace.y,
lastUpdatePosInRenderSpace.z);
DebugLog(L" Quads (in head pose space):");
ForEachQuad(sceneProcessor, logQuad);
}
void SceneUnderstandingRenderer::Draw(unsigned int numInstances)
{
if (m_vertices.empty())
{
return;
}
const UINT stride = sizeof(m_vertices[0]);
const UINT offset = 0;
D3D11_SUBRESOURCE_DATA vertexBufferData = {0};
vertexBufferData.pSysMem = m_vertices.data();
const CD3D11_BUFFER_DESC vertexBufferDesc(static_cast<UINT>(m_vertices.size() * stride), D3D11_BIND_VERTEX_BUFFER);
winrt::com_ptr<ID3D11Buffer> vertexBuffer;
winrt::check_hresult(m_deviceResources->GetD3DDevice()->CreateBuffer(&vertexBufferDesc, &vertexBufferData, vertexBuffer.put()));
auto context = m_deviceResources->GetD3DDeviceContext();
context->IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY_TRIANGLELIST);
ID3D11Buffer* pBuffer = vertexBuffer.get();
context->IASetVertexBuffers(0, 1, &pBuffer, &stride, &offset);
context->DrawInstanced(static_cast<UINT>(m_vertices.size()), numInstances, offset, 0);
}
template <typename Func>
void SceneUnderstandingRenderer::ForEachQuad(winrt::SceneUnderstanding::SceneProcessor& sceneProcessor, Func f)
{
// Collect all components, then iterate to find quad entities
winrt::com_array<winrt::SceneUnderstanding::Component> components;
sceneProcessor.GetAllComponents(components);
for (auto& component : components)
{
winrt::SceneUnderstanding::Entity entity = component.try_as<winrt::SceneUnderstanding::Entity>();
if (!entity)
{
continue;
}
winrt::SceneUnderstanding::Quad quad{nullptr};
winrt::SceneUnderstanding::Transform transform{nullptr};
winrt::SceneUnderstanding::SpatialCoordinateSystem spatialCS{nullptr};
winrt::com_array<winrt::SceneUnderstanding::Id> associatedComponentIds;
entity.GetAllAssociatedComponentIds(associatedComponentIds);
for (auto& id : associatedComponentIds)
{
winrt::SceneUnderstanding::Component ac = sceneProcessor.GetComponent(id);
if (auto q = ac.try_as<winrt::SceneUnderstanding::Quad>())
{
quad = q;
continue;
}
if (auto t = ac.try_as<winrt::SceneUnderstanding::Transform>())
{
transform = t;
continue;
}
if (auto s = ac.try_as<winrt::SceneUnderstanding::SpatialCoordinateSystem>())
{
spatialCS = s;
continue;
}
}
// Don't proceed if any essential bit of data is missing
if (!quad || !transform || !spatialCS)
{
continue;
}
// Determine the transform from the entity to its anchor
winrt::Windows::Perception::Spatial::SpatialCoordinateSystem entityAnchorCS{nullptr};
try
{
entityAnchorCS = winrt::Windows::Perception::Spatial::Preview::SpatialGraphInteropPreview::CreateCoordinateSystemForNode(
spatialCS.SpatialCoordinateGuid());
}
catch (const winrt::hresult_error&)
{
continue;
}
winrt::Windows::Foundation::Numerics::float4x4 entityToAnchorTransform = transform.TransformationMatrix();
f(entity, quad, entityToAnchorTransform, entityAnchorCS);
}
}

Просмотреть файл

@ -0,0 +1,47 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#pragma once
#include "RenderableObject.h"
#include <vector>
#include <winrt/SceneUnderstanding.h>
#include <winrt/Windows.UI.Input.Spatial.h>
namespace RemotingHostSample
{
class SceneUnderstandingRenderer : public RenderableObject
{
public:
SceneUnderstandingRenderer(const std::shared_ptr<DX::DeviceResources>& deviceResources);
void Update(
winrt::SceneUnderstanding::SceneProcessor& sceneProcessor,
winrt::Windows::Perception::Spatial::SpatialCoordinateSystem renderingCoordinateSystem,
winrt::Windows::Perception::Spatial::SpatialStationaryFrameOfReference lastUpdateLocation);
void DebugLogState(
winrt::SceneUnderstanding::SceneProcessor& sceneProcessor,
winrt::Windows::Perception::Spatial::SpatialCoordinateSystem renderingCoordinateSystem,
winrt::Windows::Perception::Spatial::SpatialStationaryFrameOfReference lastUpdateLocation);
private:
void Draw(unsigned int numInstances) override;
template <typename Func>
void ForEachQuad(winrt::SceneUnderstanding::SceneProcessor& sceneProcessor, Func f);
private:
std::vector<VertexPositionNormalColor> m_vertices;
};
} // namespace RemotingHostSample

Просмотреть файл

@ -0,0 +1,33 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#pragma once
// Constant buffer used to send hologram position transform to the shader pipeline.
struct ModelConstantBuffer
{
DirectX::XMFLOAT4X4 model;
DirectX::XMFLOAT4X4 normal; // Normal transform matrix.
};
// Assert that the constant buffer remains 16-byte aligned (best practice).
static_assert(
(sizeof(ModelConstantBuffer) % (sizeof(float) * 4)) == 0,
"Model constant buffer size must be 16-byte aligned (16 bytes is the length of four floats).");
// Used to send per-vertex data to the vertex shader.
struct VertexPositionNormalColor
{
DirectX::XMFLOAT3 pos;
DirectX::XMFLOAT3 normal; // Normal.
DirectX::XMFLOAT3 color;
};

Просмотреть файл

@ -0,0 +1,193 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#include "pch.h"
#include "SpatialInputHandler.h"
#include <winrt/Windows.UI.Input.Spatial.h>
// Creates and initializes a GestureRecognizer that listens to a Person.
SpatialInputHandler::SpatialInputHandler()
{
// The interaction manager provides an event that informs the app when spatial interactions are detected.
m_interactionManager = winrt::Windows::UI::Input::Spatial::SpatialInteractionManager::GetForCurrentView();
m_gestureRecognizer = winrt::Windows::UI::Input::Spatial::SpatialGestureRecognizer(
winrt::Windows::UI::Input::Spatial::SpatialGestureSettings::Tap |
winrt::Windows::UI::Input::Spatial::SpatialGestureSettings::ManipulationTranslate);
m_interactionDetectedEventToken =
m_interactionManager.InteractionDetected(winrt::Windows::Foundation::TypedEventHandler<
winrt::Windows::UI::Input::Spatial::SpatialInteractionManager,
winrt::Windows::UI::Input::Spatial::SpatialInteractionDetectedEventArgs>(
[this](
winrt::Windows::UI::Input::Spatial::SpatialInteractionManager,
winrt::Windows::UI::Input::Spatial::SpatialInteractionDetectedEventArgs args) {
m_gestureRecognizer.CaptureInteraction(args.Interaction());
}));
m_tappedEventToken = m_gestureRecognizer.Tapped(winrt::Windows::Foundation::TypedEventHandler<
winrt::Windows::UI::Input::Spatial::SpatialGestureRecognizer,
winrt::Windows::UI::Input::Spatial::SpatialTappedEventArgs>(
[this](
winrt::Windows::UI::Input::Spatial::SpatialGestureRecognizer, winrt::Windows::UI::Input::Spatial::SpatialTappedEventArgs args) {
std::lock_guard _lg(m_manipulationStateLock);
m_tapped = args;
}));
m_manipulationStartedEventToken =
m_gestureRecognizer.ManipulationStarted(winrt::Windows::Foundation::TypedEventHandler<
winrt::Windows::UI::Input::Spatial::SpatialGestureRecognizer,
winrt::Windows::UI::Input::Spatial::SpatialManipulationStartedEventArgs>(
[this](
winrt::Windows::UI::Input::Spatial::SpatialGestureRecognizer,
winrt::Windows::UI::Input::Spatial::SpatialManipulationStartedEventArgs args) {
std::lock_guard _lg(m_manipulationStateLock);
m_manipulationStarted = args;
}));
m_manipulationUpdatedEventToken =
m_gestureRecognizer.ManipulationUpdated(winrt::Windows::Foundation::TypedEventHandler<
winrt::Windows::UI::Input::Spatial::SpatialGestureRecognizer,
winrt::Windows::UI::Input::Spatial::SpatialManipulationUpdatedEventArgs>(
[this](
winrt::Windows::UI::Input::Spatial::SpatialGestureRecognizer,
winrt::Windows::UI::Input::Spatial::SpatialManipulationUpdatedEventArgs args) {
std::lock_guard _lg(m_manipulationStateLock);
m_manipulationUpdated = args;
}));
m_manipulationCompletedEventToken =
m_gestureRecognizer.ManipulationCompleted(winrt::Windows::Foundation::TypedEventHandler<
winrt::Windows::UI::Input::Spatial::SpatialGestureRecognizer,
winrt::Windows::UI::Input::Spatial::SpatialManipulationCompletedEventArgs>(
[this](
winrt::Windows::UI::Input::Spatial::SpatialGestureRecognizer,
winrt::Windows::UI::Input::Spatial::SpatialManipulationCompletedEventArgs) {
std::lock_guard _lg(m_manipulationStateLock);
m_manipulationResult = ManipulationResult::Completed;
}));
m_manipulationCanceledEventToken =
m_gestureRecognizer.ManipulationCanceled(winrt::Windows::Foundation::TypedEventHandler<
winrt::Windows::UI::Input::Spatial::SpatialGestureRecognizer,
winrt::Windows::UI::Input::Spatial::SpatialManipulationCanceledEventArgs>(
[this](
winrt::Windows::UI::Input::Spatial::SpatialGestureRecognizer,
winrt::Windows::UI::Input::Spatial::SpatialManipulationCanceledEventArgs) {
std::lock_guard _lg(m_manipulationStateLock);
m_manipulationResult = ManipulationResult::Canceled;
}));
m_navigationStartedEventToken =
m_gestureRecognizer.NavigationStarted(winrt::Windows::Foundation::TypedEventHandler<
winrt::Windows::UI::Input::Spatial::SpatialGestureRecognizer,
winrt::Windows::UI::Input::Spatial::SpatialNavigationStartedEventArgs>(
[this](
winrt::Windows::UI::Input::Spatial::SpatialGestureRecognizer,
winrt::Windows::UI::Input::Spatial::SpatialNavigationStartedEventArgs args) {
char buf[128];
sprintf_s(
buf,
"NS: %d %d %d\n",
static_cast<int>(args.IsNavigatingX()),
static_cast<int>(args.IsNavigatingY()),
static_cast<int>(args.IsNavigatingZ()));
OutputDebugStringA(buf);
}));
m_navigationUpdatedEventToken =
m_gestureRecognizer.NavigationUpdated(winrt::Windows::Foundation::TypedEventHandler<
winrt::Windows::UI::Input::Spatial::SpatialGestureRecognizer,
winrt::Windows::UI::Input::Spatial::SpatialNavigationUpdatedEventArgs>(
[this](
winrt::Windows::UI::Input::Spatial::SpatialGestureRecognizer,
winrt::Windows::UI::Input::Spatial::SpatialNavigationUpdatedEventArgs args) {
winrt::Windows::Foundation::Numerics::float3 offset = args.NormalizedOffset();
char buf[128];
sprintf_s(buf, "NU: %f %f %f\n", offset.x, offset.y, offset.z);
OutputDebugStringA(buf);
}));
m_navigationCompletedEventToken =
m_gestureRecognizer.NavigationCompleted(winrt::Windows::Foundation::TypedEventHandler<
winrt::Windows::UI::Input::Spatial::SpatialGestureRecognizer,
winrt::Windows::UI::Input::Spatial::SpatialNavigationCompletedEventArgs>(
[this](
winrt::Windows::UI::Input::Spatial::SpatialGestureRecognizer,
winrt::Windows::UI::Input::Spatial::SpatialNavigationCompletedEventArgs args) {
winrt::Windows::Foundation::Numerics::float3 offset = args.NormalizedOffset();
char buf[128];
sprintf_s(buf, "NC: %f %f %f\n", offset.x, offset.y, offset.z);
OutputDebugStringA(buf);
}));
m_navigationCanceledEventToken =
m_gestureRecognizer.NavigationCanceled(winrt::Windows::Foundation::TypedEventHandler<
winrt::Windows::UI::Input::Spatial::SpatialGestureRecognizer,
winrt::Windows::UI::Input::Spatial::SpatialNavigationCanceledEventArgs>(
[this](
winrt::Windows::UI::Input::Spatial::SpatialGestureRecognizer,
winrt::Windows::UI::Input::Spatial::SpatialNavigationCanceledEventArgs args) {
char buf[128];
sprintf_s(buf, "N: canceled\n");
OutputDebugStringA(buf);
}));
}
SpatialInputHandler::~SpatialInputHandler()
{
// Unregister our handler for the OnSourcePressed event.
m_interactionManager.InteractionDetected(m_interactionDetectedEventToken);
m_gestureRecognizer.Tapped(m_tappedEventToken);
m_gestureRecognizer.ManipulationStarted(m_manipulationStartedEventToken);
m_gestureRecognizer.ManipulationUpdated(m_manipulationUpdatedEventToken);
m_gestureRecognizer.ManipulationCompleted(m_manipulationCompletedEventToken);
m_gestureRecognizer.ManipulationCanceled(m_manipulationCanceledEventToken);
}
// Checks if the user performed an input gesture since the last call to this method.
// Allows the main update loop to check for asynchronous changes to the user
// input state.
winrt::Windows::UI::Input::Spatial::SpatialTappedEventArgs SpatialInputHandler::CheckForTapped()
{
std::lock_guard _lg(m_manipulationStateLock);
auto tapped = m_tapped;
m_tapped = nullptr;
return tapped;
}
winrt::Windows::UI::Input::Spatial::SpatialManipulationStartedEventArgs SpatialInputHandler::CheckForManipulationStarted()
{
std::lock_guard _lg(m_manipulationStateLock);
auto manipulationStarted = m_manipulationStarted;
m_manipulationStarted = nullptr;
return manipulationStarted;
}
winrt::Windows::UI::Input::Spatial::SpatialManipulationUpdatedEventArgs SpatialInputHandler::CheckForManipulationUpdated()
{
std::lock_guard _lg(m_manipulationStateLock);
auto manipulationUpdated = m_manipulationUpdated;
m_manipulationUpdated = nullptr;
return manipulationUpdated;
}
SpatialInputHandler::ManipulationResult SpatialInputHandler::CheckForManipulationResult()
{
std::lock_guard _lg(m_manipulationStateLock);
auto manipulationResult = m_manipulationResult;
m_manipulationResult = ManipulationResult::Unknown;
return manipulationResult;
}

Просмотреть файл

@ -0,0 +1,62 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#pragma once
#include <vector>
#include <winrt/Windows.UI.Input.Spatial.h>
// Sample gesture handler.
// Hooks up events to recognize a tap gesture, and keeps track of input using a boolean value.
class SpatialInputHandler
{
public:
SpatialInputHandler();
~SpatialInputHandler();
enum class ManipulationResult
{
Unknown = 0,
Completed,
Canceled,
};
winrt::Windows::UI::Input::Spatial::SpatialTappedEventArgs CheckForTapped();
winrt::Windows::UI::Input::Spatial::SpatialManipulationStartedEventArgs CheckForManipulationStarted();
winrt::Windows::UI::Input::Spatial::SpatialManipulationUpdatedEventArgs CheckForManipulationUpdated();
ManipulationResult CheckForManipulationResult();
private:
// API objects used to process gesture input, and generate gesture events.
winrt::Windows::UI::Input::Spatial::SpatialInteractionManager m_interactionManager = nullptr;
winrt::Windows::UI::Input::Spatial::SpatialGestureRecognizer m_gestureRecognizer = nullptr;
// Event registration token.
winrt::event_token m_interactionDetectedEventToken;
winrt::event_token m_tappedEventToken;
winrt::event_token m_manipulationStartedEventToken;
winrt::event_token m_manipulationUpdatedEventToken;
winrt::event_token m_manipulationCompletedEventToken;
winrt::event_token m_manipulationCanceledEventToken;
winrt::event_token m_navigationStartedEventToken;
winrt::event_token m_navigationUpdatedEventToken;
winrt::event_token m_navigationCompletedEventToken;
winrt::event_token m_navigationCanceledEventToken;
// Used to indicate that a Pressed input event was received this frame.
std::recursive_mutex m_manipulationStateLock;
winrt::Windows::UI::Input::Spatial::SpatialTappedEventArgs m_tapped = nullptr;
winrt::Windows::UI::Input::Spatial::SpatialManipulationStartedEventArgs m_manipulationStarted = nullptr;
winrt::Windows::UI::Input::Spatial::SpatialManipulationUpdatedEventArgs m_manipulationUpdated = nullptr;
ManipulationResult m_manipulationResult = ManipulationResult::Unknown;
};

Просмотреть файл

@ -0,0 +1,238 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#include "pch.h"
#include "SpatialInputRenderer.h"
#include <winrt/Windows.Foundation.Numerics.h>
#include <winrt/Windows.Perception.People.h>
#include <winrt/Windows.Perception.Spatial.h>
#include "../Common/DirectXHelper.h"
#include <algorithm>
namespace
{
using namespace DirectX;
void AppendColoredTriangle(XMFLOAT3 p0, XMFLOAT3 p1, XMFLOAT3 p2, XMFLOAT3 color, std::vector<VertexPositionNormalColor>& vertices)
{
VertexPositionNormalColor vertex;
vertex.color = color;
vertex.normal = XMFLOAT3(0.0f, 0.0f, 0.0f);
vertex.pos = p0;
vertices.push_back(vertex);
vertex.pos = p1;
vertices.push_back(vertex);
vertex.pos = p2;
vertices.push_back(vertex);
}
} // namespace
SpatialInputRenderer::SpatialInputRenderer(const std::shared_ptr<DXHelper::DeviceResources>& deviceResources)
: RenderableObject(deviceResources)
{
m_manager = winrt::Windows::UI::Input::Spatial::SpatialInteractionManager::GetForCurrentView();
m_referenceFrame = winrt::Windows::Perception::Spatial::SpatialLocator::GetDefault().CreateAttachedFrameOfReferenceAtCurrentHeading();
}
void SpatialInputRenderer::Update(
winrt::Windows::Perception::PerceptionTimestamp timestamp,
winrt::Windows::Perception::Spatial::SpatialCoordinateSystem renderingCoordinateSystem)
{
m_transforms.clear();
m_joints.clear();
auto headingAdjustment = m_referenceFrame.TryGetRelativeHeadingAtTimestamp(timestamp);
if (headingAdjustment)
{
// keep coordinate systems facing user
m_referenceFrame.AdjustHeading(-headingAdjustment.Value());
auto coordinateSystem = m_referenceFrame.GetStationaryCoordinateSystemAtTimestamp(timestamp);
auto spatialPointerPose = winrt::Windows::UI::Input::Spatial::SpatialPointerPose::TryGetAtTimestamp(coordinateSystem, timestamp);
if (spatialPointerPose)
{
if (auto eyesPose = spatialPointerPose.Eyes())
{
if (auto gaze = eyesPose.Gaze())
{
auto position = gaze.Value().Origin + gaze.Value().Direction;
quaternion orientation = quaternion::identity();
m_transforms.emplace_back(QTransform(position, orientation));
}
}
}
auto states = m_manager.GetDetectedSourcesAtTimestamp(timestamp);
m_transforms.reserve(states.Size());
for (const auto& state : states)
{
auto location = state.Properties().TryGetLocation(coordinateSystem);
if (location)
{
if (location.Position())
{
using namespace winrt::Windows::Foundation::Numerics;
auto position = location.Position().Value();
quaternion orientation = quaternion::identity();
if (location.Orientation())
{
orientation = location.Orientation().Value();
}
DirectX::XMVECTOR xmPosition = DirectX::XMLoadFloat3(&position);
DirectX::XMVECTOR xmOrientation = DirectX::XMLoadQuaternion(&orientation);
m_transforms.emplace_back(QTransform(xmPosition, xmOrientation));
}
if (auto sourcePose = location.SourcePointerPose())
{
m_joints.push_back({sourcePose.Position(), sourcePose.Orientation(), 1.0f, 0.01f});
}
}
auto handPose = state.TryGetHandPose();
if (handPose)
{
constexpr const winrt::Windows::Perception::People::HandJointKind jointKinds[] = {
winrt::Windows::Perception::People::HandJointKind::Palm,
winrt::Windows::Perception::People::HandJointKind::Wrist,
winrt::Windows::Perception::People::HandJointKind::ThumbMetacarpal,
winrt::Windows::Perception::People::HandJointKind::ThumbProximal,
winrt::Windows::Perception::People::HandJointKind::ThumbDistal,
winrt::Windows::Perception::People::HandJointKind::ThumbTip,
winrt::Windows::Perception::People::HandJointKind::IndexMetacarpal,
winrt::Windows::Perception::People::HandJointKind::IndexProximal,
winrt::Windows::Perception::People::HandJointKind::IndexIntermediate,
winrt::Windows::Perception::People::HandJointKind::IndexDistal,
winrt::Windows::Perception::People::HandJointKind::IndexTip,
winrt::Windows::Perception::People::HandJointKind::MiddleMetacarpal,
winrt::Windows::Perception::People::HandJointKind::MiddleProximal,
winrt::Windows::Perception::People::HandJointKind::MiddleIntermediate,
winrt::Windows::Perception::People::HandJointKind::MiddleDistal,
winrt::Windows::Perception::People::HandJointKind::MiddleTip,
winrt::Windows::Perception::People::HandJointKind::RingMetacarpal,
winrt::Windows::Perception::People::HandJointKind::RingProximal,
winrt::Windows::Perception::People::HandJointKind::RingIntermediate,
winrt::Windows::Perception::People::HandJointKind::RingDistal,
winrt::Windows::Perception::People::HandJointKind::RingTip,
winrt::Windows::Perception::People::HandJointKind::LittleMetacarpal,
winrt::Windows::Perception::People::HandJointKind::LittleProximal,
winrt::Windows::Perception::People::HandJointKind::LittleIntermediate,
winrt::Windows::Perception::People::HandJointKind::LittleDistal,
winrt::Windows::Perception::People::HandJointKind::LittleTip};
constexpr const size_t jointCount = _countof(jointKinds);
winrt::Windows::Perception::People::JointPose jointPoses[jointCount] = {};
if (handPose.TryGetJoints(coordinateSystem, jointKinds, jointPoses))
{
for (size_t jointIndex = 0; jointIndex < jointCount; ++jointIndex)
{
m_joints.push_back({jointPoses[jointIndex].Position,
jointPoses[jointIndex].Orientation,
jointPoses[jointIndex].Radius * 3,
jointPoses[jointIndex].Radius});
}
}
}
}
auto modelTransform = coordinateSystem.TryGetTransformTo(renderingCoordinateSystem);
if (modelTransform)
{
UpdateModelConstantBuffer(modelTransform.Value());
}
}
}
void SpatialInputRenderer::Draw(unsigned int numInstances)
{
if (!m_transforms.empty())
{
std::vector<VertexPositionNormalColor> vertices;
for (const auto& transform : m_transforms)
{
DirectX::XMFLOAT3 trianglePositions[3] = {
DirectX::XMFLOAT3(0.0f, 0.03f, 0.0f), DirectX::XMFLOAT3(0.01f, 0.0f, 0.0f), DirectX::XMFLOAT3(-0.01f, 0.0f, 0.0f)};
AppendColoredTriangle(
transform.TransformPosition(trianglePositions[0]),
transform.TransformPosition(trianglePositions[1]),
transform.TransformPosition(trianglePositions[2]),
DirectX::XMFLOAT3{0, 0, 1},
vertices);
}
for (const auto& joint : m_joints)
{
auto jointVertices = CalculateJointVisualizationVertices(joint.position, joint.orientation, joint.length, joint.radius);
vertices.insert(vertices.end(), jointVertices.begin(), jointVertices.end());
}
const UINT stride = sizeof(vertices[0]);
const UINT offset = 0;
D3D11_SUBRESOURCE_DATA vertexBufferData = {0};
vertexBufferData.pSysMem = vertices.data();
const CD3D11_BUFFER_DESC vertexBufferDesc(static_cast<UINT>(vertices.size() * stride), D3D11_BIND_VERTEX_BUFFER);
winrt::com_ptr<ID3D11Buffer> vertexBuffer;
winrt::check_hresult(m_deviceResources->GetD3DDevice()->CreateBuffer(&vertexBufferDesc, &vertexBufferData, vertexBuffer.put()));
m_deviceResources->UseD3DDeviceContext([&](auto context) {
context->IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY_TRIANGLELIST);
ID3D11Buffer* pBuffer = vertexBuffer.get();
context->IASetVertexBuffers(0, 1, &pBuffer, &stride, &offset);
context->DrawInstanced(static_cast<UINT>(vertices.size()), numInstances, offset, 0);
});
}
}
std::vector<VertexPositionNormalColor> SpatialInputRenderer::CalculateJointVisualizationVertices(
float3 jointPosition, quaternion jointOrientation, float jointLength, float jointRadius)
{
using namespace DirectX;
constexpr const size_t verticesCount = 2 * 4 * 3;
std::vector<VertexPositionNormalColor> vertices;
vertices.reserve(verticesCount);
float centerHeight = std::min<float>(jointRadius, 0.5f * jointLength);
float centerXandY = jointRadius / sqrtf(2.0f);
QTransform jointTransform = QTransform(jointPosition, jointOrientation);
XMFLOAT3 baseVertexPosition = jointTransform.TransformPosition(XMFLOAT3(0.0f, 0.0f, 0.0f));
XMFLOAT3 centerVertexPositions[4] = {
jointTransform.TransformPosition(XMFLOAT3(-centerXandY, -centerXandY, -centerHeight)),
jointTransform.TransformPosition(XMFLOAT3(-centerXandY, +centerXandY, -centerHeight)),
jointTransform.TransformPosition(XMFLOAT3(+centerXandY, +centerXandY, -centerHeight)),
jointTransform.TransformPosition(XMFLOAT3(+centerXandY, -centerXandY, -centerHeight)),
};
XMFLOAT3 topVertexPosition = jointTransform.TransformPosition(XMFLOAT3(0.0f, 0.0f, -jointLength));
AppendColoredTriangle(baseVertexPosition, centerVertexPositions[0], centerVertexPositions[1], XMFLOAT3(0.0f, 0.0f, 0.4f), vertices);
AppendColoredTriangle(baseVertexPosition, centerVertexPositions[1], centerVertexPositions[2], XMFLOAT3(0.0f, 0.4f, 0.0f), vertices);
AppendColoredTriangle(baseVertexPosition, centerVertexPositions[2], centerVertexPositions[3], XMFLOAT3(0.4f, 0.0f, 0.0f), vertices);
AppendColoredTriangle(baseVertexPosition, centerVertexPositions[3], centerVertexPositions[0], XMFLOAT3(0.4f, 0.4f, 0.0f), vertices);
AppendColoredTriangle(topVertexPosition, centerVertexPositions[1], centerVertexPositions[0], XMFLOAT3(0.0f, 0.0f, 0.6f), vertices);
AppendColoredTriangle(topVertexPosition, centerVertexPositions[2], centerVertexPositions[1], XMFLOAT3(0.0f, 0.6f, 0.0f), vertices);
AppendColoredTriangle(topVertexPosition, centerVertexPositions[3], centerVertexPositions[2], XMFLOAT3(0.6f, 0.0f, 0.0f), vertices);
AppendColoredTriangle(topVertexPosition, centerVertexPositions[0], centerVertexPositions[3], XMFLOAT3(0.6f, 0.6f, 0.0f), vertices);
return vertices;
}

Просмотреть файл

@ -0,0 +1,85 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#pragma once
#include "RenderableObject.h"
#include <vector>
#include <winrt/Windows.UI.Input.Spatial.h>
using namespace winrt::Windows::Foundation::Numerics;
struct QTransform
{
QTransform() = default;
QTransform(const DirectX::XMVECTOR& position, const DirectX::XMVECTOR& orientation)
: m_position(position)
, m_orientation(orientation)
{
}
QTransform(const float3& position, const quaternion& orientation)
: m_position(DirectX::XMLoadFloat3(&position))
, m_orientation(DirectX::XMLoadQuaternion(&orientation))
{
}
DirectX::XMVECTOR TransformNormal(const DirectX::XMVECTOR& normal) const
{
return DirectX::XMVector3Rotate(normal, m_orientation);
}
DirectX::XMVECTOR TransformPosition(const DirectX::XMVECTOR& position) const
{
DirectX::XMVECTOR rotated = TransformNormal(position);
return DirectX::XMVectorAdd(rotated, m_position);
}
DirectX::XMFLOAT3 TransformPosition(const DirectX::XMFLOAT3& position) const
{
DirectX::XMFLOAT3 result;
XMStoreFloat3(&result, TransformPosition(DirectX::XMLoadFloat3(&position)));
return result;
}
DirectX::XMVECTOR m_position;
DirectX::XMVECTOR m_orientation;
};
class SpatialInputRenderer : public RenderableObject
{
public:
SpatialInputRenderer(const std::shared_ptr<DXHelper::DeviceResources>& deviceResources);
void Update(
winrt::Windows::Perception::PerceptionTimestamp timestamp,
winrt::Windows::Perception::Spatial::SpatialCoordinateSystem renderingCoordinateSystem);
private:
struct Joint
{
float3 position;
quaternion orientation;
float length;
float radius;
};
private:
static std::vector<VertexPositionNormalColor>
CalculateJointVisualizationVertices(float3 jointPosition, quaternion jointOrientation, float jointLength, float jointRadius);
void Draw(unsigned int numInstances) override;
winrt::Windows::UI::Input::Spatial::SpatialInteractionManager m_manager{nullptr};
winrt::Windows::Perception::Spatial::SpatialLocatorAttachedFrameOfReference m_referenceFrame{nullptr};
std::vector<QTransform> m_transforms;
std::vector<Joint> m_joints;
};

Просмотреть файл

@ -0,0 +1,395 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#include "pch.h"
#include "SpatialSurfaceMeshRenderer.h"
#include "Common\DirectXHelper.h"
using namespace winrt::Windows;
using namespace winrt::Windows::Perception::Spatial;
using namespace winrt::Windows::Graphics::DirectX;
using namespace winrt::Windows::Foundation::Numerics;
using namespace Concurrency;
// for debugging -> remove
bool g_freeze = false;
bool g_freezeOnFrame = false;
// Initializes D2D resources used for text rendering.
SpatialSurfaceMeshRenderer::SpatialSurfaceMeshRenderer(const std::shared_ptr<DXHelper::DeviceResources>& deviceResources)
: m_deviceResources(deviceResources)
{
CreateDeviceDependentResources();
}
SpatialSurfaceMeshRenderer::~SpatialSurfaceMeshRenderer()
{
}
std::future<void> SpatialSurfaceMeshRenderer::CreateDeviceDependentResources()
{
auto asyncAccess = Surfaces::SpatialSurfaceObserver::RequestAccessAsync();
asyncAccess.Completed([this](auto handler, auto asyncStatus) {
m_surfaceObserver = Surfaces::SpatialSurfaceObserver();
m_observedSurfaceChangedToken = m_surfaceObserver.ObservedSurfacesChanged(
[this](Surfaces::SpatialSurfaceObserver, winrt::Windows::Foundation::IInspectable const& handler) {
OnObservedSurfaceChanged();
});
});
std::vector<byte> vertexShaderFileData = co_await DXHelper::ReadDataAsync(L"hsa_SRMeshVertexShader.cso");
winrt::check_hresult(m_deviceResources->GetD3DDevice()->CreateVertexShader(
vertexShaderFileData.data(), vertexShaderFileData.size(), nullptr, m_vertexShader.put()));
constexpr std::array<D3D11_INPUT_ELEMENT_DESC, 1> vertexDesc = {{
{"POSITION", 0, DXGI_FORMAT_R16G16B16A16_SNORM, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0},
}};
winrt::check_hresult(m_deviceResources->GetD3DDevice()->CreateInputLayout(
vertexDesc.data(),
static_cast<UINT>(vertexDesc.size()),
vertexShaderFileData.data(),
static_cast<UINT>(vertexShaderFileData.size()),
m_inputLayout.put()));
std::vector<byte> geometryShaderFileData = co_await DXHelper::ReadDataAsync(L"hsa_SRMeshGeometryShader.cso");
// After the pass-through geometry shader file is loaded, create the shader.
winrt::check_hresult(m_deviceResources->GetD3DDevice()->CreateGeometryShader(
geometryShaderFileData.data(), geometryShaderFileData.size(), nullptr, m_geometryShader.put()));
std::vector<byte> pixelShaderFileData = co_await DXHelper::ReadDataAsync(L"hsa_SRMeshPixelShader.cso");
winrt::check_hresult(m_deviceResources->GetD3DDevice()->CreatePixelShader(
pixelShaderFileData.data(), pixelShaderFileData.size(), nullptr, m_pixelShader.put()));
const CD3D11_BUFFER_DESC constantBufferDesc(sizeof(SRMeshConstantBuffer), D3D11_BIND_CONSTANT_BUFFER);
winrt::check_hresult(m_deviceResources->GetD3DDevice()->CreateBuffer(&constantBufferDesc, nullptr, m_modelConstantBuffer.put()));
m_loadingComplete = true;
}
void SpatialSurfaceMeshRenderer::ReleaseDeviceDependentResources()
{
if (m_surfaceObserver)
{
m_surfaceObserver.ObservedSurfacesChanged(m_observedSurfaceChangedToken);
m_surfaceObserver = nullptr;
}
m_loadingComplete = false;
m_inputLayout = nullptr;
m_vertexShader = nullptr;
m_geometryShader = nullptr;
m_pixelShader = nullptr;
m_modelConstantBuffer = nullptr;
}
void SpatialSurfaceMeshRenderer::OnObservedSurfaceChanged()
{
if (g_freeze)
return;
m_surfaceChangedCounter++; // just for debugging purposes
m_sufaceChanged = true;
}
SpatialSurfaceMeshPart* SpatialSurfaceMeshRenderer::GetOrCreateMeshPart(winrt::guid id)
{
GUID key = id;
auto found = m_meshParts.find(key);
if (found == m_meshParts.cend())
{
m_meshParts[id] = std::make_unique<SpatialSurfaceMeshPart>(this);
return m_meshParts[id].get();
}
return found->second.get();
}
void SpatialSurfaceMeshRenderer::Update(winrt::Windows::Perception::Spatial::SpatialCoordinateSystem renderingCoordinateSystem)
{
if (m_surfaceObserver == nullptr)
return;
// update bounding volume (every frame)
{
SpatialBoundingBox axisAlignedBoundingBox = {
{-5.0f, -5.0f, -2.5f},
{10.0f, 10.0f, 5.f},
};
SpatialBoundingVolume volume = SpatialBoundingVolume::FromBox(renderingCoordinateSystem, axisAlignedBoundingBox);
m_surfaceObserver.SetBoundingVolume(volume);
}
if (m_sufaceChanged)
{
// first mark all as not used
for (auto& pair : m_meshParts)
{
pair.second->m_inUse = false;
}
auto mapContainingSurfaceCollection = m_surfaceObserver.GetObservedSurfaces();
for (auto pair : mapContainingSurfaceCollection)
{
if (SpatialSurfaceMeshPart* meshPart = GetOrCreateMeshPart(pair.Key()))
{
meshPart->Update(pair.Value());
g_freeze = g_freezeOnFrame;
}
}
// purge the ones not used
for (MeshPartMap::const_iterator itr = m_meshParts.cbegin(); itr != m_meshParts.cend();)
{
itr = itr->second->IsInUse() ? std::next(itr) : m_meshParts.erase(itr);
}
m_sufaceChanged = false;
}
// every frame, bring the model matrix to rendering space
for (auto& pair : m_meshParts)
{
pair.second->UpdateModelMatrix(renderingCoordinateSystem);
}
}
void SpatialSurfaceMeshRenderer::Render(bool isStereo)
{
if (!m_loadingComplete || m_meshParts.empty())
return;
m_deviceResources->UseD3DDeviceContext([&](auto context) {
// Each vertex is one instance of the VertexPositionColorTexture struct.
const uint32_t stride = sizeof(SpatialSurfaceMeshPart::Vertex_t);
const uint32_t offset = 0;
ID3D11Buffer* pBufferToSet;
context->IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY_TRIANGLELIST);
context->IASetInputLayout(m_inputLayout.get());
// Attach the vertex shader.
context->VSSetShader(m_vertexShader.get(), nullptr, 0);
// Apply the model constant buffer to the vertex shader.
pBufferToSet = m_modelConstantBuffer.get();
context->VSSetConstantBuffers(0, 1, &pBufferToSet);
// geometry shader
context->GSSetShader(m_geometryShader.get(), nullptr, 0);
// pixel shader
context->PSSetShader(m_zfillOnly ? nullptr : m_pixelShader.get(), nullptr, 0);
// Apply the model constant buffer to the pixel shader.
pBufferToSet = m_modelConstantBuffer.get();
context->PSSetConstantBuffers(0, 1, &pBufferToSet);
// render each mesh part
for (auto& pair : m_meshParts)
{
SpatialSurfaceMeshPart* part = pair.second.get();
if (part->m_indexCount == 0)
continue;
if (part->m_needsUpload)
{
part->UploadData();
}
// update part specific model matrix
context->UpdateSubresource(m_modelConstantBuffer.get(), 0, nullptr, &part->m_constantBufferData, 0, 0);
ID3D11Buffer* pBufferToSet2 = part->m_vertexBuffer.get();
context->IASetVertexBuffers(0, 1, &pBufferToSet2, &stride, &offset);
context->IASetIndexBuffer(part->m_indexBuffer.get(), DXGI_FORMAT_R16_UINT, 0);
// draw the mesh
context->DrawIndexedInstanced(part->m_indexCount, isStereo ? 2 : 1, 0, 0, 0);
}
// set geometry shader back
context->GSSetShader(nullptr, nullptr, 0);
});
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////
// SRMeshPart
//////////////////////////////////////////////////////////////////////////////////////////////////////////
SpatialSurfaceMeshPart::SpatialSurfaceMeshPart(SpatialSurfaceMeshRenderer* owner)
: m_owner(owner)
{
auto identity = DirectX::XMMatrixIdentity();
m_constantBufferData.modelMatrix = reinterpret_cast<DirectX::XMFLOAT4X4&>(identity);
m_vertexScale.x = m_vertexScale.y = m_vertexScale.z = 1.0f;
}
void SpatialSurfaceMeshPart::Update(Surfaces::SpatialSurfaceInfo surfaceInfo)
{
m_inUse = true;
m_updateInProgress = true;
double TriangleDensity = 750.0; // from Hydrogen
auto asyncOpertation = surfaceInfo.TryComputeLatestMeshAsync(TriangleDensity);
asyncOpertation.Completed([this](winrt::Windows::Foundation::IAsyncOperation<Surfaces::SpatialSurfaceMesh> result, auto asyncStatus) {
Surfaces::SpatialSurfaceMesh mesh = result.GetResults();
UpdateMesh(mesh);
m_updateInProgress = false;
});
}
void SpatialSurfaceMeshPart::UpdateModelMatrix(winrt::Windows::Perception::Spatial::SpatialCoordinateSystem renderingCoordinateSystem)
{
if (m_coordinateSystem == nullptr)
return;
auto modelTransform = m_coordinateSystem.TryGetTransformTo(renderingCoordinateSystem);
if (modelTransform)
{
float4x4 matrixWinRt = transpose(modelTransform.Value());
DirectX::XMMATRIX transformMatrix = DirectX::XMLoadFloat4x4(&matrixWinRt);
DirectX::XMMATRIX scaleMatrix = DirectX::XMMatrixScaling(m_vertexScale.x, m_vertexScale.y, m_vertexScale.z);
DirectX::XMMATRIX result = DirectX::XMMatrixMultiply(transformMatrix, scaleMatrix);
DirectX::XMStoreFloat4x4(&m_constantBufferData.modelMatrix, result);
}
}
void SpatialSurfaceMeshPart::UpdateMesh(Surfaces::SpatialSurfaceMesh mesh)
{
m_coordinateSystem = mesh.CoordinateSystem();
Surfaces::SpatialSurfaceMeshBuffer vertexBuffer = mesh.VertexPositions();
Surfaces::SpatialSurfaceMeshBuffer indexBuffer = mesh.TriangleIndices();
DirectXPixelFormat vertexFormat = vertexBuffer.Format();
DirectXPixelFormat indexFormat = indexBuffer.Format();
assert(vertexFormat == DirectXPixelFormat::R16G16B16A16IntNormalized);
assert(indexFormat == DirectXPixelFormat::R16UInt);
uint32_t vertexCount = vertexBuffer.ElementCount();
uint32_t indexCount = indexBuffer.ElementCount();
assert((indexCount % 3) == 0);
if (vertexCount == 0 || indexCount == 0)
{
m_indexCount = 0;
return;
}
// convert vertices:
{
Vertex_t* dest = MapVertices(vertexCount);
winrt::Windows::Storage::Streams::IBuffer vertexData = vertexBuffer.Data();
uint8_t* vertexRaw = vertexData.data();
int vertexStride = vertexData.Length() / vertexCount;
assert(vertexStride == 8); // DirectXPixelFormat::R16G16B16A16IntNormalized
winrt::Windows::Foundation::Numerics::float3 positionScale = mesh.VertexPositionScale();
m_vertexScale.x = positionScale.x;
m_vertexScale.y = positionScale.y;
m_vertexScale.z = positionScale.z;
memcpy(dest, vertexRaw, vertexCount * sizeof(Vertex_t));
UnmapVertices();
}
// convert indices
{
uint16_t* dest = MapIndices(indexCount);
winrt::Windows::Storage::Streams::IBuffer indexData = indexBuffer.Data();
uint16_t* source = (uint16_t*)indexData.data();
for (uint32_t i = 0; i < indexCount; i++)
{
assert(source[i] < vertexCount);
dest[i] = source[i];
}
UnmapIndices();
}
m_needsUpload = true;
}
SpatialSurfaceMeshPart::Vertex_t* SpatialSurfaceMeshPart::MapVertices(uint32_t vertexCount)
{
m_vertexCount = vertexCount;
if (vertexCount > m_vertexData.size())
m_vertexData.resize(vertexCount);
return &m_vertexData[0];
}
void SpatialSurfaceMeshPart::UnmapVertices()
{
}
uint16_t* SpatialSurfaceMeshPart::MapIndices(uint32_t indexCount)
{
m_indexCount = indexCount;
if (indexCount > m_indexData.size())
m_indexData.resize(indexCount);
return &m_indexData[0];
}
void SpatialSurfaceMeshPart::UnmapIndices()
{
}
void SpatialSurfaceMeshPart::UploadData()
{
if (m_vertexCount > m_allocatedVertexCount)
{
m_vertexBuffer = nullptr;
const int alignment = 1024;
m_allocatedVertexCount = ((m_vertexCount + alignment - 1) / alignment) * alignment; // align to reasonable size
const int elementSize = sizeof(Vertex_t);
const CD3D11_BUFFER_DESC vertexBufferDesc(
m_allocatedVertexCount * elementSize, D3D11_BIND_VERTEX_BUFFER, D3D11_USAGE_DYNAMIC, D3D11_CPU_ACCESS_WRITE);
winrt::check_hresult(m_owner->m_deviceResources->GetD3DDevice()->CreateBuffer(&vertexBufferDesc, nullptr, m_vertexBuffer.put()));
}
if (m_indexCount > m_allocatedIndexCount)
{
m_indexBuffer = nullptr;
const int alignment = 3 * 1024;
m_allocatedIndexCount = ((m_indexCount + alignment - 1) / alignment) * alignment; // align to reasonable size
const int elementSize = sizeof(uint16_t);
const CD3D11_BUFFER_DESC indexBufferDesc(
m_allocatedIndexCount * elementSize, D3D11_BIND_INDEX_BUFFER, D3D11_USAGE_DYNAMIC, D3D11_CPU_ACCESS_WRITE);
winrt::check_hresult(m_owner->m_deviceResources->GetD3DDevice()->CreateBuffer(&indexBufferDesc, nullptr, m_indexBuffer.put()));
}
// upload data
D3D11_MAPPED_SUBRESOURCE resource;
m_owner->m_deviceResources->UseD3DDeviceContext([&](auto context) {
context->Map(m_vertexBuffer.get(), 0, D3D11_MAP_WRITE_DISCARD, 0, &resource);
memcpy(resource.pData, &m_vertexData[0], sizeof(Vertex_t) * m_vertexCount);
context->Unmap(m_vertexBuffer.get(), 0);
context->Map(m_indexBuffer.get(), 0, D3D11_MAP_WRITE_DISCARD, 0, &resource);
memcpy(resource.pData, &m_indexData[0], sizeof(uint16_t) * m_indexCount);
context->Unmap(m_indexBuffer.get(), 0);
});
}

Просмотреть файл

@ -0,0 +1,130 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#pragma once
#include "..\Common\DeviceResources.h"
#include "..\Common\Utils.h"
#include <winrt/windows.perception.spatial.surfaces.h>
#include <future>
#include <string>
// forward
class SpatialSurfaceMeshRenderer;
struct SRMeshConstantBuffer
{
DirectX::XMFLOAT4X4 modelMatrix;
};
// represents a single piece of mesh (SpatialSurfaceMesh)
class SpatialSurfaceMeshPart
{
public:
struct Vertex_t
{
// float pos[4];
int16_t pos[4];
};
SpatialSurfaceMeshPart(SpatialSurfaceMeshRenderer* owner);
void Update(winrt::Windows::Perception::Spatial::Surfaces::SpatialSurfaceInfo surfaceInfo);
void UpdateMesh(winrt::Windows::Perception::Spatial::Surfaces::SpatialSurfaceMesh mesh);
bool IsInUse() const
{
return m_inUse || m_updateInProgress;
}
private:
Vertex_t* MapVertices(uint32_t vertexCount);
void UnmapVertices();
uint16_t* MapIndices(uint32_t indexCount);
void UnmapIndices();
void UploadData();
void UpdateModelMatrix(winrt::Windows::Perception::Spatial::SpatialCoordinateSystem renderingCoordinateSystem);
friend class SpatialSurfaceMeshRenderer;
SpatialSurfaceMeshRenderer* m_owner;
bool m_inUse = true;
bool m_needsUpload = false;
bool m_updateInProgress = false;
GUID m_ID;
uint32_t m_allocatedVertexCount = 0;
uint32_t m_allocatedIndexCount = 0;
uint32_t m_vertexCount = 0;
uint32_t m_indexCount = 0;
winrt::com_ptr<ID3D11Buffer> m_vertexBuffer;
winrt::com_ptr<ID3D11Buffer> m_indexBuffer;
winrt::Windows::Perception::Spatial::SpatialCoordinateSystem m_coordinateSystem = nullptr;
// double buffered data:
std::vector<Vertex_t> m_vertexData;
std::vector<uint16_t> m_indexData;
SRMeshConstantBuffer m_constantBufferData;
DirectX::XMFLOAT3 m_vertexScale;
};
// Renders the SR mesh
class SpatialSurfaceMeshRenderer
{
public:
SpatialSurfaceMeshRenderer(const std::shared_ptr<DXHelper::DeviceResources>& deviceResources);
virtual ~SpatialSurfaceMeshRenderer();
void Update(winrt::Windows::Perception::Spatial::SpatialCoordinateSystem renderingCoordinateSystem);
void Render(bool isStereo);
std::future<void> CreateDeviceDependentResources();
void ReleaseDeviceDependentResources();
private:
void OnObservedSurfaceChanged();
SpatialSurfaceMeshPart* GetOrCreateMeshPart(winrt::guid id);
private:
friend class SpatialSurfaceMeshPart;
// Cached pointer to device resources.
std::shared_ptr<DXHelper::DeviceResources> m_deviceResources;
// Resources related to mesh rendering.
winrt::com_ptr<ID3D11ShaderResourceView> m_shaderResourceView;
winrt::com_ptr<ID3D11SamplerState> m_pointSampler;
winrt::com_ptr<ID3D11RenderTargetView> m_renderTargetView;
// observer:
int m_surfaceChangedCounter = 0;
bool m_sufaceChanged = false;
winrt::Windows::Perception::Spatial::Surfaces::SpatialSurfaceObserver m_surfaceObserver = nullptr;
winrt::event_token m_observedSurfaceChangedToken;
// mesh parts
using MeshPartMap = std::map<GUID, std::unique_ptr<SpatialSurfaceMeshPart>, GUIDComparer>;
MeshPartMap m_meshParts;
// rendering
bool m_zfillOnly = false;
bool m_loadingComplete = false;
winrt::com_ptr<ID3D11InputLayout> m_inputLayout;
winrt::com_ptr<ID3D11VertexShader> m_vertexShader;
winrt::com_ptr<ID3D11GeometryShader> m_geometryShader;
winrt::com_ptr<ID3D11PixelShader> m_pixelShader;
winrt::com_ptr<ID3D11Buffer> m_modelConstantBuffer;
};

Просмотреть файл

@ -0,0 +1,347 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#include "pch.h"
#include "../Common/DirectXHelper.h"
#include "SpinningCubeRenderer.h"
#include <winrt/Windows.Foundation.Numerics.h>
#include <winrt/Windows.Perception.People.h>
#include <winrt/Windows.Perception.h>
using namespace DirectX;
// Loads vertex and pixel shaders from files and instantiates the cube geometry.
SpinningCubeRenderer::SpinningCubeRenderer(const std::shared_ptr<DXHelper::DeviceResources>& deviceResources)
: m_deviceResources(deviceResources)
{
CreateDeviceDependentResources();
}
// This function uses a SpatialPointerPose to position the world-locked hologram
// two meters in front of the user's heading.
void SpinningCubeRenderer::PositionHologram(const winrt::Windows::UI::Input::Spatial::SpatialPointerPose& pointerPose)
{
if (pointerPose != nullptr)
{
// Try to get the gaze from eyes.
winrt::Windows::Perception::People::EyesPose eyesPose = pointerPose.Eyes();
if (eyesPose != nullptr)
{
winrt::Windows::Foundation::IReference<winrt::Windows::Perception::Spatial::SpatialRay> gaze = eyesPose.Gaze();
if (gaze != nullptr)
{
PositionHologram(gaze.Value().Origin, gaze.Value().Direction);
return;
}
}
// Get the gaze direction from head.
const auto headPosition = pointerPose.Head().Position();
const auto headDirection = pointerPose.Head().ForwardDirection();
PositionHologram(headPosition, headDirection);
}
}
void SpinningCubeRenderer::SetColorFilter(DirectX::XMFLOAT4 color)
{
m_filterColorData = color;
}
// This function uses a point and a vector to position the world-locked hologram
// two meters in front of the user's heading.
void SpinningCubeRenderer::PositionHologram(
winrt::Windows::Foundation::Numerics::float3 headPosition, winrt::Windows::Foundation::Numerics::float3 headDirection)
{
// The hologram is positioned two meters along the user's gaze direction.
static const float distanceFromUser = 2.0f; // meters
const auto gazeAtTwoMeters = headPosition + (distanceFromUser * headDirection);
// This will be used as the translation component of the hologram's
// model transform.
SetPosition(gazeAtTwoMeters);
}
// Called once per frame. Rotates the cube, and calculates and sets the model matrix
// relative to the position transform indicated by hologramPositionTransform.
void SpinningCubeRenderer::Update(float totalSeconds)
{
// Rotate the cube.
// Convert degrees to radians, then convert seconds to rotation angle.
const float radiansPerSecond = XMConvertToRadians(m_degreesPerSecond);
const double relativeRotation = totalSeconds * radiansPerSecond;
double totalRotation = m_rotationOffset;
switch (m_pauseState)
{
case PauseState::Unpaused:
totalRotation += relativeRotation;
break;
case PauseState::Pausing:
m_rotationOffset += relativeRotation;
m_pauseState = PauseState::Paused;
case PauseState::Paused:
totalRotation = m_rotationOffset;
break;
case PauseState::Unpausing:
m_rotationOffset -= relativeRotation;
m_pauseState = PauseState::Unpaused;
break;
}
const float radians = static_cast<float>(fmod(totalRotation, XM_2PI));
const XMMATRIX modelRotation = XMMatrixRotationY(-radians);
// Position the cube.
const XMMATRIX modelTranslation = XMMatrixTranslationFromVector(XMLoadFloat3(&m_position));
// Multiply to get the transform matrix.
// Note that this transform does not enforce a particular coordinate system. The calling
// class is responsible for rendering this content in a consistent manner.
const XMMATRIX modelTransform = XMMatrixMultiply(modelRotation, modelTranslation);
// Store the normal transform.
XMStoreFloat4x4(&m_modelConstantBufferData.normal, XMMatrixTranspose(modelRotation));
// The view and projection matrices are provided by the system; they are associated
// with holographic cameras, and updated on a per-camera basis.
// Here, we provide the model transform for the sample hologram. The model transform
// matrix is transposed to prepare it for the shader.
XMStoreFloat4x4(&m_modelConstantBufferData.model, XMMatrixTranspose(modelTransform));
// Loading is asynchronous. Resources must be created before they can be updated.
if (!m_loadingComplete)
{
return;
}
// Use the D3D device context to update Direct3D device-based resources.
m_deviceResources->UseD3DDeviceContext([&](auto context) {
// Update the model transform buffer for the hologram.
context->UpdateSubresource(m_modelConstantBuffer.get(), 0, nullptr, &m_modelConstantBufferData, 0, 0);
});
}
// Renders one frame using the vertex and pixel shaders.
// On devices that do not support the D3D11_FEATURE_D3D11_OPTIONS3::
// VPAndRTArrayIndexFromAnyShaderFeedingRasterizer optional feature,
// a pass-through geometry shader is also used to set the render
// target array index.
void SpinningCubeRenderer::Render(bool isStereo)
{
// Loading is asynchronous. Resources must be created before drawing can occur.
if (!m_loadingComplete)
{
return;
}
m_deviceResources->UseD3DDeviceContext([&](auto context) {
ID3D11Buffer* pBufferToSet = nullptr;
// Each vertex is one instance of the VertexPositionColor struct.
const UINT stride = sizeof(VertexPositionNormalColor);
const UINT offset = 0;
pBufferToSet = m_vertexBuffer.get();
context->IASetVertexBuffers(0, 1, &pBufferToSet, &stride, &offset);
context->IASetIndexBuffer(
m_indexBuffer.get(),
DXGI_FORMAT_R16_UINT, // Each index is one 16-bit unsigned integer (short).
0);
context->IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY_TRIANGLELIST);
context->IASetInputLayout(m_inputLayout.get());
// Attach the vertex shader.
context->VSSetShader(m_vertexShader.get(), nullptr, 0);
// Apply the model constant buffer to the vertex shader.
pBufferToSet = m_modelConstantBuffer.get();
context->VSSetConstantBuffers(0, 1, &pBufferToSet);
if (!m_usingVprtShaders)
{
// On devices that do not support the D3D11_FEATURE_D3D11_OPTIONS3::
// VPAndRTArrayIndexFromAnyShaderFeedingRasterizer optional feature,
// a pass-through geometry shader is used to set the render target
// array index.
context->GSSetShader(m_geometryShader.get(), nullptr, 0);
}
context->UpdateSubresource(m_filterColorBuffer.get(), 0, nullptr, &m_filterColorData, 0, 0);
pBufferToSet = m_filterColorBuffer.get();
context->PSSetConstantBuffers(2, 1, &pBufferToSet);
// Attach the pixel shader.
context->PSSetShader(m_pixelShader.get(), nullptr, 0);
// Draw the objects.
context->DrawIndexedInstanced(
m_indexCount, // Index count per instance.
isStereo ? 2 : 1, // Instance count.
0, // Start index location.
0, // Base vertex location.
0 // Start instance location.
);
});
}
std::future<void> SpinningCubeRenderer::CreateDeviceDependentResources()
{
#if WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_DESKTOP)
std::wstring fileNamePrefix = L"";
#else
std::wstring fileNamePrefix = L"ms-appx:///";
#endif
m_usingVprtShaders = m_deviceResources->GetDeviceSupportsVprt();
// On devices that do support the D3D11_FEATURE_D3D11_OPTIONS3::
// VPAndRTArrayIndexFromAnyShaderFeedingRasterizer optional feature
// we can avoid using a pass-through geometry shader to set the render
// target array index, thus avoiding any overhead that would be
// incurred by setting the geometry shader stage.
std::wstring vertexShaderFileName = m_usingVprtShaders ? L"hsa_VprtVertexShader.cso" : L"hsa_VertexShader.cso";
std::vector<byte> vertexShaderFileData = co_await DXHelper::ReadDataAsync(fileNamePrefix + vertexShaderFileName);
winrt::check_hresult(m_deviceResources->GetD3DDevice()->CreateVertexShader(
vertexShaderFileData.data(), vertexShaderFileData.size(), nullptr, m_vertexShader.put()));
constexpr std::array<D3D11_INPUT_ELEMENT_DESC, 3> vertexDesc = {{
{"POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0},
{"NORMAL", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 12, D3D11_INPUT_PER_VERTEX_DATA, 0},
{"COLOR", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 24, D3D11_INPUT_PER_VERTEX_DATA, 0},
}};
winrt::check_hresult(m_deviceResources->GetD3DDevice()->CreateInputLayout(
vertexDesc.data(),
static_cast<UINT>(vertexDesc.size()),
vertexShaderFileData.data(),
static_cast<UINT>(vertexShaderFileData.size()),
m_inputLayout.put()));
std::vector<byte> pixelShaderFileData = co_await DXHelper::ReadDataAsync(fileNamePrefix + L"hsa_PixelShader.cso");
winrt::check_hresult(m_deviceResources->GetD3DDevice()->CreatePixelShader(
pixelShaderFileData.data(), pixelShaderFileData.size(), nullptr, m_pixelShader.put()));
const ModelConstantBuffer constantBuffer{
reinterpret_cast<DirectX::XMFLOAT4X4&>(winrt::Windows::Foundation::Numerics::float4x4::identity()),
reinterpret_cast<DirectX::XMFLOAT4X4&>(winrt::Windows::Foundation::Numerics::float4x4::identity()),
};
const CD3D11_BUFFER_DESC constantBufferDesc(sizeof(ModelConstantBuffer), D3D11_BIND_CONSTANT_BUFFER);
winrt::check_hresult(m_deviceResources->GetD3DDevice()->CreateBuffer(&constantBufferDesc, nullptr, m_modelConstantBuffer.put()));
const CD3D11_BUFFER_DESC filterColorBufferDesc(sizeof(XMFLOAT4), D3D11_BIND_CONSTANT_BUFFER);
winrt::check_hresult(m_deviceResources->GetD3DDevice()->CreateBuffer(&filterColorBufferDesc, nullptr, m_filterColorBuffer.put()));
if (!m_usingVprtShaders)
{
// Load the pass-through geometry shader.
std::vector<byte> geometryShaderFileData = co_await DXHelper::ReadDataAsync(fileNamePrefix + L"hsa_GeometryShader.cso");
// After the pass-through geometry shader file is loaded, create the shader.
winrt::check_hresult(m_deviceResources->GetD3DDevice()->CreateGeometryShader(
geometryShaderFileData.data(), geometryShaderFileData.size(), nullptr, m_geometryShader.put()));
}
// Load mesh vertices. Each vertex has a position and a color.
// Note that the cube size has changed from the default DirectX app
// template. Windows Holographic is scaled in meters, so to draw the
// cube at a comfortable size we made the cube width 0.2 m (20 cm).
static const VertexPositionNormalColor cubeVertices[] = {
{XMFLOAT3(-0.1f, -0.1f, -0.1f), XMFLOAT3(0.0f, 0.0f, 0.0f), XMFLOAT3(0.0f, 0.0f, 0.0f)}, // vertex 0 non-debug
{XMFLOAT3(-0.1f, -0.1f, 0.1f), XMFLOAT3(0.0f, 0.0f, 0.0f), XMFLOAT3(0.0f, 0.0f, 1.0f)},
{XMFLOAT3(-0.1f, 0.1f, -0.1f), XMFLOAT3(0.0f, 0.0f, 0.0f), XMFLOAT3(0.0f, 1.0f, 0.0f)},
{XMFLOAT3(-0.1f, 0.1f, 0.1f), XMFLOAT3(0.0f, 0.0f, 0.0f), XMFLOAT3(0.0f, 1.0f, 1.0f)},
{XMFLOAT3(0.1f, -0.1f, -0.1f), XMFLOAT3(0.0f, 0.0f, 0.0f), XMFLOAT3(1.0f, 0.0f, 0.0f)},
{XMFLOAT3(0.1f, -0.1f, 0.1f), XMFLOAT3(0.0f, 0.0f, 0.0f), XMFLOAT3(1.0f, 0.0f, 1.0f)},
{XMFLOAT3(0.1f, 0.1f, -0.1f), XMFLOAT3(0.0f, 0.0f, 0.0f), XMFLOAT3(1.0f, 1.0f, 0.0f)},
{XMFLOAT3(0.1f, 0.1f, 0.1f), XMFLOAT3(0.0f, 0.0f, 0.0f), XMFLOAT3(1.0f, 1.0f, 1.0f)},
};
D3D11_SUBRESOURCE_DATA vertexBufferData = {0};
vertexBufferData.pSysMem = cubeVertices;
vertexBufferData.SysMemPitch = 0;
vertexBufferData.SysMemSlicePitch = 0;
const CD3D11_BUFFER_DESC vertexBufferDesc(sizeof(cubeVertices), D3D11_BIND_VERTEX_BUFFER);
winrt::check_hresult(m_deviceResources->GetD3DDevice()->CreateBuffer(&vertexBufferDesc, &vertexBufferData, m_vertexBuffer.put()));
// Load mesh indices. Each trio of indices represents
// a triangle to be rendered on the screen.
// For example: 2,1,0 means that the vertices with indexes
// 2, 1, and 0 from the vertex buffer compose the
// first triangle of this mesh.
// Note that the winding order is clockwise by default.
static const unsigned short cubeIndices[] = {
2, 1, 0, // -x
2, 3, 1,
6, 4, 5, // +x
6, 5, 7,
0, 1, 5, // -y
0, 5, 4,
2, 6, 7, // +y
2, 7, 3,
0, 4, 6, // -z
0, 6, 2,
1, 3, 7, // +z
1, 7, 5,
};
m_indexCount = ARRAYSIZE(cubeIndices);
D3D11_SUBRESOURCE_DATA indexBufferData = {0};
indexBufferData.pSysMem = cubeIndices;
indexBufferData.SysMemPitch = 0;
indexBufferData.SysMemSlicePitch = 0;
const CD3D11_BUFFER_DESC indexBufferDesc(sizeof(cubeIndices), D3D11_BIND_INDEX_BUFFER);
winrt::check_hresult(m_deviceResources->GetD3DDevice()->CreateBuffer(&indexBufferDesc, &indexBufferData, m_indexBuffer.put()));
// Once the cube is loaded, the object is ready to be rendered.
m_loadingComplete = true;
}
void SpinningCubeRenderer::ReleaseDeviceDependentResources()
{
m_loadingComplete = false;
m_usingVprtShaders = false;
m_vertexShader = nullptr;
m_inputLayout = nullptr;
m_pixelShader = nullptr;
m_geometryShader = nullptr;
m_modelConstantBuffer = nullptr;
m_vertexBuffer = nullptr;
m_indexBuffer = nullptr;
m_filterColorBuffer = nullptr;
}
void SpinningCubeRenderer::CreateWindowSizeDependentResources()
{
}
void SpinningCubeRenderer::TogglePauseState()
{
if (m_pauseState == PauseState::Paused)
{
m_pauseState = PauseState::Unpausing;
}
else
{
m_pauseState = PauseState::Pausing;
}
}

Просмотреть файл

@ -0,0 +1,93 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
#pragma once
#include "..\Common\DeviceResources.h"
#include "ShaderStructures.h"
#include <winrt/Windows.UI.Input.Spatial.h>
// This sample renderer instantiates a basic rendering pipeline.
class SpinningCubeRenderer
{
public:
SpinningCubeRenderer(const std::shared_ptr<DXHelper::DeviceResources>& deviceResources);
void CreateWindowSizeDependentResources();
std::future<void> CreateDeviceDependentResources();
void ReleaseDeviceDependentResources();
void Update(float totalSeconds);
void SetColorFilter(DirectX::XMFLOAT4 color);
void Render(bool isStereo);
// Repositions the sample hologram.
void PositionHologram(const winrt::Windows::UI::Input::Spatial::SpatialPointerPose& pointerPose);
// Repositions the sample hologram, using direct measures.
void PositionHologram(winrt::Windows::Foundation::Numerics::float3 pos, winrt::Windows::Foundation::Numerics::float3 dir);
// Property accessors.
void SetPosition(winrt::Windows::Foundation::Numerics::float3 pos)
{
m_position = pos;
}
const winrt::Windows::Foundation::Numerics::float3& GetPosition()
{
return m_position;
}
void Pause()
{
m_pauseState = PauseState::Pausing;
}
void Unpause()
{
m_pauseState = PauseState::Unpausing;
}
void TogglePauseState();
private:
enum class PauseState
{
Unpaused = 0,
Pausing,
Paused,
Unpausing,
};
// Cached pointer to device resources.
std::shared_ptr<DXHelper::DeviceResources> m_deviceResources;
// Direct3D resources for cube geometry.
winrt::com_ptr<ID3D11InputLayout> m_inputLayout;
winrt::com_ptr<ID3D11Buffer> m_vertexBuffer;
winrt::com_ptr<ID3D11Buffer> m_indexBuffer;
winrt::com_ptr<ID3D11VertexShader> m_vertexShader;
winrt::com_ptr<ID3D11GeometryShader> m_geometryShader;
winrt::com_ptr<ID3D11PixelShader> m_pixelShader;
winrt::com_ptr<ID3D11Buffer> m_modelConstantBuffer;
winrt::com_ptr<ID3D11Buffer> m_filterColorBuffer;
// System resources for cube geometry.
ModelConstantBuffer m_modelConstantBufferData;
uint32_t m_indexCount = 0;
DirectX::XMFLOAT4 m_filterColorData = {1, 1, 1, 1};
// Variables used with the rendering loop.
bool m_loadingComplete = false;
float m_degreesPerSecond = 180.0f;
winrt::Windows::Foundation::Numerics::float3 m_position = {0.0f, 0.0f, -2.0f};
PauseState m_pauseState = PauseState::Unpaused;
double m_rotationOffset = 0;
// If the current D3D Device supports VPRT, we can avoid using a geometry
// shader just to set the render target array index.
bool m_usingVprtShaders = false;
};

Просмотреть файл

@ -0,0 +1,44 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
// Per-vertex data from the vertex shader.
struct GeometryShaderInput
{
float4 pos : SV_POSITION;
min16float3 color : COLOR0;
uint instId : TEXCOORD0;
};
// Per-vertex data passed to the rasterizer.
struct GeometryShaderOutput
{
float4 pos : SV_POSITION;
min16float3 color : COLOR0;
uint idx : TEXCOORD0;
uint rtvId : SV_RenderTargetArrayIndex;
};
// This geometry shader is a pass-through that leaves the geometry unmodified
// and sets the render target array index.
[maxvertexcount(3)]
void main(triangle GeometryShaderInput input[3], inout TriangleStream<GeometryShaderOutput> outStream)
{
GeometryShaderOutput output;
[unroll(3)]
for (int i = 0; i < 3; ++i)
{
output.pos = input[i].pos;
output.color = input[i].color;
output.idx = input[i].instId;
output.rtvId = input[i].instId;
outStream.Append(output);
}
}

Просмотреть файл

@ -0,0 +1,42 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
// A constant buffer that stores the model transform.
cbuffer ModelConstantBuffer : register(b0)
{
float4x4 model;
float4x4 normal;
};
// A constant buffer that stores each set of view and projection matrices in column-major format.
cbuffer ViewProjectionConstantBuffer : register(b1)
{
float4x4 viewProjection[2];
};
cbuffer ColorFilterConstantBuffer : register(b2)
{
float4 colorFilter;
};
// Per-pixel color data passed through the pixel shader.
struct PixelShaderInput
{
float4 pos : SV_POSITION;
min16float3 color : COLOR0;
uint idx : TEXCOORD0;
};
// The pixel shader applies Blinn-Phong BRDF shading.
min16float4 main(PixelShaderInput input) : SV_TARGET
{
return min16float4(input.color, 1.f) * min16float4(colorFilter);
}

Просмотреть файл

@ -0,0 +1,48 @@
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
// Per-vertex data from the vertex shader.
struct GeometryShaderInput
{
float4 pos : SV_POSITION;
uint instId : TEXCOORD1;
};
// Per-vertex data passed to the rasterizer.
struct GeometryShaderOutput
{
float4 pos : SV_POSITION;
float3 barycentricCoords : TEXCOORD0;
uint rtvId : SV_RenderTargetArrayIndex;
};
// This geometry shader is a pass-through that leaves the geometry unmodified
// and sets the render target array index.
[maxvertexcount(3)]
void main(triangle GeometryShaderInput input[3], inout TriangleStream<GeometryShaderOutput> outStream)
{
static const float3 barycentricVectors[3] = {
float3(1.0f, 0.0f, 0.0f),
float3(0.0f, 1.0f, 0.0f),
float3(0.0f, 0.0f, 1.0f)
};
GeometryShaderOutput output;
[unroll(3)]
for (int i = 0; i < 3; ++i)
{
output.pos = input[i].pos;
output.barycentricCoords = barycentricVectors[i];
output.rtvId = input[i].instId;
outStream.Append(output);
}
}

Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше