Bug 1667303 - video scaling at VideoProcessor for overlay video if possible r=gfx-reviewers,jrmuizel

Video scaling by VideoProcessor is expected to reduce GPU usage.

Differential Revision: https://phabricator.services.mozilla.com/D91392
This commit is contained in:
sotaro 2022-03-18 14:46:05 +00:00
Родитель 7bfa026475
Коммит 56ef111fbd
3 изменённых файлов: 104 добавлений и 29 удалений

Просмотреть файл

@ -550,6 +550,13 @@ void DCLayerTree::AddSurface(wr::NativeSurfaceId aId,
gfx::Matrix transform(aTransform.m11, aTransform.m12, aTransform.m21,
aTransform.m22, aTransform.m41, aTransform.m42);
auto* surfaceVideo = surface->AsDCSurfaceVideo();
if (surfaceVideo) {
transform = surfaceVideo->CalculateSwapChainSize(transform);
surfaceVideo->PresentVideo();
}
transform.PreTranslate(-virtualOffset.x, -virtualOffset.y);
// The DirectComposition API applies clipping *before* any transforms/offset,
@ -629,14 +636,16 @@ GLuint DCLayerTree::GetOrCreateFbo(int aWidth, int aHeight) {
return fboId;
}
bool DCLayerTree::EnsureVideoProcessor(const gfx::IntSize& aVideoSize) {
bool DCLayerTree::EnsureVideoProcessor(const gfx::IntSize& aInputSize,
const gfx::IntSize& aOutputSize) {
HRESULT hr;
if (!mVideoDevice || !mVideoContext) {
return false;
}
if (mVideoProcessor && aVideoSize == mVideoSize) {
if (mVideoProcessor && (aInputSize <= mVideoInputSize) &&
(aOutputSize <= mVideoOutputSize)) {
return true;
}
@ -647,12 +656,12 @@ bool DCLayerTree::EnsureVideoProcessor(const gfx::IntSize& aVideoSize) {
desc.InputFrameFormat = D3D11_VIDEO_FRAME_FORMAT_PROGRESSIVE;
desc.InputFrameRate.Numerator = 60;
desc.InputFrameRate.Denominator = 1;
desc.InputWidth = aVideoSize.width;
desc.InputHeight = aVideoSize.height;
desc.InputWidth = aInputSize.width;
desc.InputHeight = aInputSize.height;
desc.OutputFrameRate.Numerator = 60;
desc.OutputFrameRate.Denominator = 1;
desc.OutputWidth = aVideoSize.width;
desc.OutputHeight = aVideoSize.height;
desc.OutputWidth = aOutputSize.width;
desc.OutputHeight = aOutputSize.height;
desc.Usage = D3D11_VIDEO_USAGE_PLAYBACK_NORMAL;
hr = mVideoDevice->CreateVideoProcessorEnumerator(
@ -677,7 +686,9 @@ bool DCLayerTree::EnsureVideoProcessor(const gfx::IntSize& aVideoSize) {
mVideoContext->VideoProcessorSetStreamAutoProcessingMode(mVideoProcessor, 0,
FALSE);
mVideoSize = aVideoSize;
mVideoInputSize = aInputSize;
mVideoOutputSize = aOutputSize;
return true;
}
@ -810,14 +821,58 @@ void DCSurfaceVideo::AttachExternalImage(wr::ExternalImageId aExternalImage) {
return;
}
gfx::IntSize size = texture->AsRenderDXGITextureHost()->GetSize(0);
if (!mVideoSwapChain || mSwapChainSize != size) {
mRenderTextureHost = texture;
}
gfx::Matrix DCSurfaceVideo::CalculateSwapChainSize(
const gfx::Matrix& aTransform) {
if (!mRenderTextureHost) {
MOZ_ASSERT_UNREACHABLE("unexpected to be called");
return aTransform;
}
mVideoSize = mRenderTextureHost->AsRenderDXGITextureHost()->GetSize(0);
gfx::IntSize swapChainSize = mVideoSize;
gfx::Matrix transform = aTransform;
// When video is rendered to axis aligned integer rectangle, video scaling
// could be done by VideoProcessor
bool scaleVideoAtVideoProcessor = false;
if (StaticPrefs::gfx_webrender_dcomp_video_vp_scaling_win_AtStartup() &&
aTransform.PreservesAxisAlignedRectangles()) {
gfx::Size scaledSize = gfx::Size(mVideoSize) * aTransform.ScaleFactors();
gfx::IntSize size(int32_t(std::round(scaledSize.width)),
int32_t(std::round(scaledSize.height)));
if (gfx::FuzzyEqual(scaledSize.width, size.width, 0.1f) &&
gfx::FuzzyEqual(scaledSize.height, size.height, 0.1f)) {
scaleVideoAtVideoProcessor = true;
swapChainSize = size;
}
}
if (scaleVideoAtVideoProcessor) {
// 4:2:2 subsampled formats like YUY2 must have an even width, and 4:2:0
// subsampled formats like NV12 must have an even width and height.
if (swapChainSize.width % 2 == 1) {
swapChainSize.width += 1;
}
if (swapChainSize.height % 2 == 1) {
swapChainSize.height += 1;
}
transform = gfx::Matrix::Translation(aTransform.GetTranslation());
}
if (!mVideoSwapChain || mSwapChainSize != swapChainSize) {
ReleaseDecodeSwapChainResources();
// Update mSwapChainSize before creating SwapChain
mSwapChainSize = swapChainSize;
auto swapChainFormat = GetSwapChainFormat();
bool useYUVSwapChain = IsYUVSwapChainFormat(swapChainFormat);
if (useYUVSwapChain) {
// Tries to create YUV SwapChain
CreateVideoSwapChain(texture);
CreateVideoSwapChain();
if (!mVideoSwapChain) {
mFailedYuvSwapChain = true;
ReleaseDecodeSwapChainResources();
@ -827,10 +882,18 @@ void DCSurfaceVideo::AttachExternalImage(wr::ExternalImageId aExternalImage) {
}
// Tries to create RGB SwapChain
if (!mVideoSwapChain) {
CreateVideoSwapChain(texture);
CreateVideoSwapChain();
}
}
return transform;
}
void DCSurfaceVideo::PresentVideo() {
if (!mRenderTextureHost) {
return;
}
if (!mVideoSwapChain) {
gfxCriticalNote << "Failed to create VideoSwapChain";
RenderThread::Get()->NotifyWebRenderError(
@ -840,7 +903,7 @@ void DCSurfaceVideo::AttachExternalImage(wr::ExternalImageId aExternalImage) {
mVisual->SetContent(mVideoSwapChain);
if (!CallVideoProcessorBlt(texture)) {
if (!CallVideoProcessorBlt()) {
auto swapChainFormat = GetSwapChainFormat();
bool useYUVSwapChain = IsYUVSwapChainFormat(swapChainFormat);
if (useYUVSwapChain) {
@ -859,7 +922,7 @@ void DCSurfaceVideo::AttachExternalImage(wr::ExternalImageId aExternalImage) {
gfxCriticalNoteOnce << "video Present failed: " << gfx::hexa(hr);
}
mPrevTexture = texture;
mPrevTexture = mRenderTextureHost;
}
DXGI_FORMAT DCSurfaceVideo::GetSwapChainFormat() {
@ -869,7 +932,9 @@ DXGI_FORMAT DCSurfaceVideo::GetSwapChainFormat() {
return mDCLayerTree->GetOverlayFormatForSDR();
}
bool DCSurfaceVideo::CreateVideoSwapChain(RenderTextureHost* aTexture) {
bool DCSurfaceVideo::CreateVideoSwapChain() {
MOZ_ASSERT(mRenderTextureHost);
const auto device = mDCLayerTree->GetDevice();
RefPtr<IDXGIDevice> dxgiDevice;
@ -889,12 +954,11 @@ bool DCSurfaceVideo::CreateVideoSwapChain(RenderTextureHost* aTexture) {
return false;
}
gfx::IntSize size = aTexture->AsRenderDXGITextureHost()->GetSize(0);
auto swapChainFormat = GetSwapChainFormat();
DXGI_SWAP_CHAIN_DESC1 desc = {};
desc.Width = size.width;
desc.Height = size.height;
desc.Width = mSwapChainSize.width;
desc.Height = mSwapChainSize.height;
desc.Format = swapChainFormat;
desc.Stereo = FALSE;
desc.SampleDesc.Count = 1;
@ -914,11 +978,11 @@ bool DCSurfaceVideo::CreateVideoSwapChain(RenderTextureHost* aTexture) {
getter_AddRefs(mVideoSwapChain));
if (FAILED(hr)) {
gfxCriticalNote << "Failed to create video SwapChain: " << gfx::hexa(hr);
gfxCriticalNote << "Failed to create video SwapChain: " << gfx::hexa(hr)
<< " " << mSwapChainSize;
return false;
}
mSwapChainSize = size;
mSwapChainFormat = swapChainFormat;
return true;
}
@ -958,11 +1022,13 @@ static Maybe<DXGI_COLOR_SPACE_TYPE> GetSourceDXGIColorSpace(
return GetSourceDXGIColorSpace(info.space, info.range);
}
bool DCSurfaceVideo::CallVideoProcessorBlt(RenderTextureHost* aTexture) {
bool DCSurfaceVideo::CallVideoProcessorBlt() {
MOZ_ASSERT(mRenderTextureHost);
HRESULT hr;
const auto videoDevice = mDCLayerTree->GetVideoDevice();
const auto videoContext = mDCLayerTree->GetVideoContext();
const auto texture = aTexture->AsRenderDXGITextureHost();
const auto texture = mRenderTextureHost->AsRenderDXGITextureHost();
Maybe<DXGI_COLOR_SPACE_TYPE> sourceColorSpace =
GetSourceDXGIColorSpace(texture->GetYUVColorSpace());
@ -981,7 +1047,7 @@ bool DCSurfaceVideo::CallVideoProcessorBlt(RenderTextureHost* aTexture) {
return false;
}
if (!mDCLayerTree->EnsureVideoProcessor(mSwapChainSize)) {
if (!mDCLayerTree->EnsureVideoProcessor(mVideoSize, mSwapChainSize)) {
gfxCriticalNote << "EnsureVideoProcessor Failed";
return false;
}
@ -1057,8 +1123,8 @@ bool DCSurfaceVideo::CallVideoProcessorBlt(RenderTextureHost* aTexture) {
RECT sourceRect;
sourceRect.left = 0;
sourceRect.top = 0;
sourceRect.right = mSwapChainSize.width;
sourceRect.bottom = mSwapChainSize.height;
sourceRect.right = mVideoSize.width;
sourceRect.bottom = mVideoSize.height;
videoContext->VideoProcessorSetStreamSourceRect(videoProcessor, 0, TRUE,
&sourceRect);
@ -1100,7 +1166,6 @@ void DCSurfaceVideo::ReleaseDecodeSwapChainResources() {
::CloseHandle(mSwapChainSurfaceHandle);
mSwapChainSurfaceHandle = 0;
}
mSwapChainSize = gfx::IntSize();
}
DCTile::DCTile(DCLayerTree* aDCLayerTree) : mDCLayerTree(aDCLayerTree) {}

Просмотреть файл

@ -117,7 +117,8 @@ class DCLayerTree {
ID3D11VideoProcessorEnumerator* GetVideoProcessorEnumerator() const {
return mVideoProcessorEnumerator;
}
bool EnsureVideoProcessor(const gfx::IntSize& aVideoSize);
bool EnsureVideoProcessor(const gfx::IntSize& aInputSize,
const gfx::IntSize& aOutputSize);
DCSurface* GetSurface(wr::NativeSurfaceId aId) const;
@ -154,7 +155,8 @@ class DCLayerTree {
RefPtr<ID3D11VideoContext> mVideoContext;
RefPtr<ID3D11VideoProcessor> mVideoProcessor;
RefPtr<ID3D11VideoProcessorEnumerator> mVideoProcessorEnumerator;
gfx::IntSize mVideoSize;
gfx::IntSize mVideoInputSize;
gfx::IntSize mVideoOutputSize;
bool mDebugCounter;
bool mDebugVisualRedrawRegions;
@ -272,13 +274,15 @@ class DCSurfaceVideo : public DCSurface {
DCSurfaceVideo(bool aIsOpaque, DCLayerTree* aDCLayerTree);
void AttachExternalImage(wr::ExternalImageId aExternalImage);
gfx::Matrix CalculateSwapChainSize(const gfx::Matrix& aTransform);
void PresentVideo();
DCSurfaceVideo* AsDCSurfaceVideo() override { return this; }
protected:
DXGI_FORMAT GetSwapChainFormat();
bool CreateVideoSwapChain(RenderTextureHost* aTexture);
bool CallVideoProcessorBlt(RenderTextureHost* aTexture);
bool CreateVideoSwapChain();
bool CallVideoProcessorBlt();
void ReleaseDecodeSwapChainResources();
RefPtr<ID3D11VideoProcessorOutputView> mOutputView;
@ -286,9 +290,11 @@ class DCSurfaceVideo : public DCSurface {
RefPtr<IDXGISwapChain1> mVideoSwapChain;
RefPtr<IDXGIDecodeSwapChain> mDecodeSwapChain;
HANDLE mSwapChainSurfaceHandle;
gfx::IntSize mVideoSize;
gfx::IntSize mSwapChainSize;
DXGI_FORMAT mSwapChainFormat = DXGI_FORMAT_B8G8R8A8_UNORM;
bool mFailedYuvSwapChain = false;
RefPtr<RenderTextureHost> mRenderTextureHost;
RefPtr<RenderTextureHost> mPrevTexture;
};

Просмотреть файл

@ -5840,6 +5840,10 @@
type: bool
value: @IS_NIGHTLY_BUILD@
mirror: once
- name: gfx.webrender.dcomp-video-vp-scaling-win
type: bool
value: @IS_NIGHTLY_BUILD@
mirror: once
#endif
# Whether or not fallback to Software WebRender requires the GPU process.