Bug 1629788 [Wayland] Implement SW decoding to WaylandDMABufSurface, r=jya

- Use WaylandDMABufSurface for SW decoded frames when they can be converted to NV12 format.
- Rename VAAPIFrameHolder to DMABufSurface and use it as general placeholder for WaylandDMABufSurface surface.
  It's used for VA-API/SW video playback and holds decoded video images until thery are used by gecko rendering engine.
- Implmenet a linked list of DMABufSurface where recently used frames are stored. The frames are recycled by ffmpeg
  decoder for VA-API and SW video playback.

Differential Revision: https://phabricator.services.mozilla.com/D78292
This commit is contained in:
Martin Stransky 2020-06-13 18:38:38 +00:00
Родитель 864e28453a
Коммит 4dbb2503ff
2 изменённых файлов: 196 добавлений и 76 удалений

Просмотреть файл

@ -122,28 +122,43 @@ static AVPixelFormat ChooseVAAPIPixelFormat(AVCodecContext* aCodecContext,
return AV_PIX_FMT_NONE;
}
VAAPIFrameHolder::VAAPIFrameHolder(FFmpegLibWrapper* aLib,
WaylandDMABufSurface* aSurface,
AVCodecContext* aAVCodecContext,
AVFrame* aAVFrame)
: mLib(aLib),
mSurface(aSurface),
mAVHWFramesContext(mLib->av_buffer_ref(aAVCodecContext->hw_frames_ctx)),
mHWAVBuffer(mLib->av_buffer_ref(aAVFrame->buf[0])) {
FFMPEG_LOG("VAAPIFrameHolder is adding dmabuf surface UID = %d",
mSurface->GetUID());
DMABufSurface::DMABufSurface(WaylandDMABufSurface* aSurface,
FFmpegLibWrapper* aLib)
: mSurface(aSurface),
mLib(aLib),
mAVHWFramesContext(nullptr),
mHWAVBuffer(nullptr) {
// Create global refcount object to track mSurface usage over
// gects rendering engine. We can't release it until it's used
// by GL compositor / WebRender.
mSurface->GlobalRefCountCreate();
FFMPEG_LOG("DMABufSurface: creating surface UID = %d", mSurface->GetUID());
}
VAAPIFrameHolder::~VAAPIFrameHolder() {
FFMPEG_LOG("VAAPIFrameHolder is releasing dmabuf surface UID = %d",
void DMABufSurface::LockVAAPIData(AVCodecContext* aAVCodecContext,
AVFrame* aAVFrame) {
FFMPEG_LOG("DMABufSurface: VAAPI locking dmabuf surface UID = %d",
mSurface->GetUID());
mLib->av_buffer_unref(&mHWAVBuffer);
mLib->av_buffer_unref(&mAVHWFramesContext);
if (aAVCodecContext && aAVFrame) {
mAVHWFramesContext = mLib->av_buffer_ref(aAVCodecContext->hw_frames_ctx);
mHWAVBuffer = mLib->av_buffer_ref(aAVFrame->buf[0]);
}
}
void DMABufSurface::ReleaseVAAPIData() {
FFMPEG_LOG("DMABufSurface: VAAPI releasing dmabuf surface UID = %d",
mSurface->GetUID());
if (mHWAVBuffer && mAVHWFramesContext) {
mLib->av_buffer_unref(&mHWAVBuffer);
mLib->av_buffer_unref(&mAVHWFramesContext);
}
mSurface->ReleaseSurface();
}
DMABufSurface::~DMABufSurface() {
FFMPEG_LOG("DMABufSurface: deleting dmabuf surface UID = %d",
mSurface->GetUID());
ReleaseVAAPIData();
}
AVCodec* FFmpegVideoDecoder<LIBAV_VER>::FindVAAPICodec() {
@ -225,14 +240,6 @@ MediaResult FFmpegVideoDecoder<LIBAV_VER>::InitVAAPIDecoder() {
return NS_ERROR_NOT_AVAILABLE;
}
auto layersBackend = mImageAllocator
? mImageAllocator->GetCompositorBackendType()
: layers::LayersBackend::LAYERS_BASIC;
if (layersBackend != layers::LayersBackend::LAYERS_WR) {
FFMPEG_LOG("VA-API works with WebRender only!");
return NS_ERROR_NOT_AVAILABLE;
}
AVCodec* codec = FindVAAPICodec();
if (!codec) {
FFMPEG_LOG("Couldn't find ffmpeg VA-API decoder");
@ -318,6 +325,7 @@ FFmpegVideoDecoder<LIBAV_VER>::FFmpegVideoDecoder(
mVAAPIDeviceContext(nullptr),
mDisableHardwareDecoding(aDisableHardwareDecoding),
mDisplay(nullptr),
mUseDMABufSurfaces(false),
#endif
mImageAllocator(aAllocator),
mImageContainer(aImageContainer),
@ -327,13 +335,25 @@ FFmpegVideoDecoder<LIBAV_VER>::FFmpegVideoDecoder(
// initialization.
mExtraData = new MediaByteBuffer;
mExtraData->AppendElements(*aConfig.mExtraData);
#ifdef MOZ_WAYLAND_USE_VAAPI
mUseDMABufSurfaces =
gfxPlatformGtk::GetPlatform()->UseWaylandDMABufVideoTextures() &&
mImageAllocator &&
(mImageAllocator->GetCompositorBackendType() ==
layers::LayersBackend::LAYERS_WR);
if (!mUseDMABufSurfaces) {
FFMPEG_LOG("DMA-BUF/VA-API can't be used, WebRender/Wayland is disabled");
}
#endif
}
RefPtr<MediaDataDecoder::InitPromise> FFmpegVideoDecoder<LIBAV_VER>::Init() {
MediaResult rv;
#ifdef MOZ_WAYLAND_USE_VAAPI
if (!mDisableHardwareDecoding) {
if (mUseDMABufSurfaces && !mDisableHardwareDecoding) {
rv = InitVAAPIDecoder();
if (NS_SUCCEEDED(rv)) {
return InitPromise::CreateAndResolve(TrackInfo::kVideoTrack, __func__);
@ -433,6 +453,8 @@ MediaResult FFmpegVideoDecoder<LIBAV_VER>::DoDecode(
}
# ifdef MOZ_WAYLAND_USE_VAAPI
// Release unused VA-API surfaces before avcodec_receive_frame() as
// ffmpeg recycles VASurface for HW decoding.
if (mVAAPIDeviceContext) {
ReleaseUnusedVAAPIFrames();
}
@ -453,10 +475,19 @@ MediaResult FFmpegVideoDecoder<LIBAV_VER>::DoDecode(
MediaResult rv;
# ifdef MOZ_WAYLAND_USE_VAAPI
if (mVAAPIDeviceContext) {
MOZ_ASSERT(mFrame->format == AV_PIX_FMT_VAAPI_VLD);
rv = CreateImageVAAPI(mFrame->pkt_pos, mFrame->pkt_pts,
mFrame->pkt_duration, aResults);
if (mVAAPIDeviceContext || mUseDMABufSurfaces) {
rv = CreateImageDMABuf(mFrame->pkt_pos, mFrame->pkt_pts,
mFrame->pkt_duration, aResults);
// If VA-API playback failed, just quit. Decoder is going to be restarted
// without VA-API.
// If VA-API is already off, disable DMABufSurfaces and fallback to
// default.
if (NS_FAILED(rv) && !mVAAPIDeviceContext) {
mUseDMABufSurfaces = false;
rv = CreateImage(mFrame->pkt_pos, mFrame->pkt_pts, mFrame->pkt_duration,
aResults);
}
} else
# endif
{
@ -645,69 +676,122 @@ MediaResult FFmpegVideoDecoder<LIBAV_VER>::CreateImage(
#ifdef MOZ_WAYLAND_USE_VAAPI
void FFmpegVideoDecoder<LIBAV_VER>::ReleaseUnusedVAAPIFrames() {
std::list<UniquePtr<VAAPIFrameHolder>>::iterator holder =
mFrameHolders.begin();
while (holder != mFrameHolders.end()) {
if (!(*holder)->IsUsed()) {
holder = mFrameHolders.erase(holder);
} else {
holder++;
int len = mDMABufSurfaces.Length();
for (int i = 0; i < len; i++) {
if (!mDMABufSurfaces[i].IsUsed()) {
mDMABufSurfaces[i].ReleaseVAAPIData();
}
}
}
void FFmpegVideoDecoder<LIBAV_VER>::ReleaseAllVAAPIFrames() {
mFrameHolders.clear();
DMABufSurface* FFmpegVideoDecoder<LIBAV_VER>::GetUnusedDMABufSurface() {
int len = mDMABufSurfaces.Length();
for (int i = 0; i < len; i++) {
if (!mDMABufSurfaces[i].IsUsed()) {
return &(mDMABufSurfaces[i]);
}
}
return nullptr;
}
MediaResult FFmpegVideoDecoder<LIBAV_VER>::CreateImageVAAPI(
int64_t aOffset, int64_t aPts, int64_t aDuration,
MediaDataDecoder::DecodedData& aResults) {
FFMPEG_LOG("Got one VAAPI frame output with pts=%" PRId64 " dts=%" PRId64
" duration=%" PRId64 " opaque=%" PRId64,
aPts, mFrame->pkt_dts, aDuration, mCodecContext->reordered_opaque);
void FFmpegVideoDecoder<LIBAV_VER>::ReleaseDMABufSurfaces() {
mDMABufSurfaces.Clear();
}
VADRMPRIMESurfaceDescriptor va_desc;
bool FFmpegVideoDecoder<LIBAV_VER>::GetVAAPISurfaceDescriptor(
VADRMPRIMESurfaceDescriptor& aVaDesc) {
VASurfaceID surface_id = (VASurfaceID)(uintptr_t)mFrame->data[3];
VAStatus vas = mLib->vaExportSurfaceHandle(
mDisplay, surface_id, VA_SURFACE_ATTRIB_MEM_TYPE_DRM_PRIME_2,
VA_EXPORT_SURFACE_READ_ONLY | VA_EXPORT_SURFACE_SEPARATE_LAYERS,
&va_desc);
&aVaDesc);
if (vas != VA_STATUS_SUCCESS) {
return MediaResult(
NS_ERROR_OUT_OF_MEMORY,
RESULT_DETAIL("Unable to get frame by vaExportSurfaceHandle()"));
return false;
}
vas = mLib->vaSyncSurface(mDisplay, surface_id);
if (vas != VA_STATUS_SUCCESS) {
NS_WARNING("vaSyncSurface() failed.");
}
va_desc.width = mFrame->width;
va_desc.height = mFrame->height;
aVaDesc.width = mFrame->width;
aVaDesc.height = mFrame->height;
RefPtr<WaylandDMABufSurfaceNV12> surface =
WaylandDMABufSurfaceNV12::CreateNV12Surface(va_desc);
if (!surface) {
return true;
}
MediaResult FFmpegVideoDecoder<LIBAV_VER>::CreateImageDMABuf(
int64_t aOffset, int64_t aPts, int64_t aDuration,
MediaDataDecoder::DecodedData& aResults) {
FFMPEG_LOG("DMABUF/VA-API Got one frame output with pts=%" PRId64
"dts=%" PRId64 " duration=%" PRId64 " opaque=%" PRId64,
aPts, mFrame->pkt_dts, aDuration, mCodecContext->reordered_opaque);
// With SW decode we support only YUV420P format with DMABuf surfaces.
if (!mVAAPIDeviceContext && mCodecContext->pix_fmt != AV_PIX_FMT_YUV420P) {
return MediaResult(
NS_ERROR_OUT_OF_MEMORY,
RESULT_DETAIL("Unable to allocate WaylandDMABufSurfaceNV12."));
NS_ERROR_NOT_IMPLEMENTED,
RESULT_DETAIL("DMA-BUF textures supports YUV420P format only"));
}
VADRMPRIMESurfaceDescriptor vaDesc;
if (mVAAPIDeviceContext && !GetVAAPISurfaceDescriptor(vaDesc)) {
return MediaResult(
NS_ERROR_OUT_OF_MEMORY,
RESULT_DETAIL("Unable to get frame by vaExportSurfaceHandle()"));
}
RefPtr<WaylandDMABufSurfaceNV12> waylandSurface;
DMABufSurface* surface = GetUnusedDMABufSurface();
if (!surface) {
if (mVAAPIDeviceContext) {
waylandSurface = WaylandDMABufSurfaceNV12::CreateNV12Surface(vaDesc);
} else {
waylandSurface = WaylandDMABufSurfaceNV12::CreateNV12Surface(
mFrame->width, mFrame->height, (void**)mFrame->data,
mFrame->linesize);
}
if (!waylandSurface) {
return MediaResult(
NS_ERROR_OUT_OF_MEMORY,
RESULT_DETAIL("Unable to get WaylandDMABufSurfaceNV12"));
}
# ifdef MOZ_LOGGING
static int uid = 0;
surface->SetUID(++uid);
FFMPEG_LOG("Created dmabuf UID = %d HW surface %x", uid, surface_id);
static int uid = 0;
waylandSurface->SetUID(++uid);
FFMPEG_LOG("Created new WaylandDMABufSurface UID = %d", uid);
# endif
mDMABufSurfaces.AppendElement(DMABufSurface(waylandSurface, mLib));
surface = &(mDMABufSurfaces[mDMABufSurfaces.Length() - 1]);
} else {
waylandSurface = surface->GetWaylandDMABufSurface();
bool ret;
surface->SetYUVColorSpace(GetFrameColorSpace());
if (mVAAPIDeviceContext) {
ret = waylandSurface->UpdateNV12Data(vaDesc);
} else {
ret = waylandSurface->UpdateNV12Data((void**)mFrame->data,
mFrame->linesize);
}
// Store reference to the decoded HW buffer, see VAAPIFrameHolder struct.
auto holder =
MakeUnique<VAAPIFrameHolder>(mLib, surface, mCodecContext, mFrame);
mFrameHolders.push_back(std::move(holder));
if (!ret) {
return MediaResult(
NS_ERROR_OUT_OF_MEMORY,
RESULT_DETAIL("Unable to upload data to WaylandDMABufSurfaceNV12"));
}
FFMPEG_LOG("Reusing WaylandDMABufSurface UID = %d",
waylandSurface->GetUID());
}
RefPtr<layers::Image> im = new layers::WaylandDMABUFSurfaceImage(surface);
if (mVAAPIDeviceContext) {
surface->LockVAAPIData(mCodecContext, mFrame);
}
waylandSurface->SetYUVColorSpace(GetFrameColorSpace());
RefPtr<layers::Image> im =
new layers::WaylandDMABUFSurfaceImage(waylandSurface);
RefPtr<VideoData> vp = VideoData::CreateFromImage(
mInfo.mDisplay, aOffset, TimeUnit::FromMicroseconds(aPts),
@ -758,8 +842,8 @@ AVCodecID FFmpegVideoDecoder<LIBAV_VER>::GetCodecId(
void FFmpegVideoDecoder<LIBAV_VER>::ProcessShutdown() {
#ifdef MOZ_WAYLAND_USE_VAAPI
ReleaseDMABufSurfaces();
if (mVAAPIDeviceContext) {
ReleaseAllVAAPIFrames();
mLib->av_buffer_unref(&mVAAPIDeviceContext);
}
#endif

Просмотреть файл

@ -12,12 +12,26 @@
#include "SimpleMap.h"
#ifdef MOZ_WAYLAND_USE_VAAPI
# include "mozilla/widget/WaylandDMABufSurface.h"
# include <list>
# include "mozilla/LinkedList.h"
#endif
namespace mozilla {
#ifdef MOZ_WAYLAND_USE_VAAPI
// DMABufSurface holds a reference to GPU data with a video frame.
//
// Actual GPU pixel data are stored at WaylandDMABufSurface and
// WaylandDMABufSurfaces is passed to gecko GL rendering pipeline via.
// WaylandDMABUFSurfaceImage.
//
// DMABufSurface can optionally hold VA-API ffmpeg related data to keep
// GPU data locked untill we need them.
//
// DMABufSurface is used for both HW accelerated video decoding (VA-API)
// and ffmpeg SW decoding.
//
// VA-API scenario
//
// When VA-API decoding is running, ffmpeg allocates AVHWFramesContext - a pool
// of "hardware" frames. Every "hardware" frame (VASurface) is backed
// by actual piece of GPU memory which holds the decoded image data.
@ -33,20 +47,37 @@ namespace mozilla {
// Unfortunately there isn't any obvious way how to mark particular VASurface
// as used. The best we can do is to hold a reference to particular AVBuffer
// from decoded AVFrame and AVHWFramesContext which owns the AVBuffer.
class VAAPIFrameHolder final {
//
// FFmpeg SW decoding scenario
//
// When SW ffmpeg decoding is running, DMABufSurface contains only
// a WaylandDMABufSurface reference and VA-API related members are null.
// We own the WaylandDMABufSurface underlying GPU data and we use it for
// repeated rendering of video frames.
//
class DMABufSurface final {
public:
VAAPIFrameHolder(FFmpegLibWrapper* aLib, WaylandDMABufSurface* aSurface,
AVCodecContext* aAVCodecContext, AVFrame* aAVFrame);
~VAAPIFrameHolder();
DMABufSurface(WaylandDMABufSurface* aSurface, FFmpegLibWrapper* aLib);
~DMABufSurface();
// Lock VAAPI related data
void LockVAAPIData(AVCodecContext* aAVCodecContext, AVFrame* aAVFrame);
// Release VAAPI related data, DMABufSurface can be reused
// for another frame.
void ReleaseVAAPIData();
// Check if WaylandDMABufSurface is used by any gecko rendering process
// (WebRender or GL compositor) or by WaylandDMABUFSurfaceImage/VideoData.
bool IsUsed() const { return mSurface->IsGlobalRefSet(); }
const RefPtr<WaylandDMABufSurfaceNV12> GetWaylandDMABufSurface() {
return mSurface->GetAsWaylandDMABufSurfaceNV12();
}
private:
const FFmpegLibWrapper* mLib;
const RefPtr<WaylandDMABufSurface> mSurface;
const FFmpegLibWrapper* mLib;
AVBufferRef* mAVHWFramesContext;
AVBufferRef* mHWAVBuffer;
};
@ -118,11 +149,15 @@ class FFmpegVideoDecoder<LIBAV_VER>
void InitVAAPICodecContext();
AVCodec* FindVAAPICodec();
bool IsHardwareAccelerated(nsACString& aFailureReason) const override;
bool GetVAAPISurfaceDescriptor(VADRMPRIMESurfaceDescriptor& aVaDesc);
MediaResult CreateImageDMABuf(int64_t aOffset, int64_t aPts,
int64_t aDuration,
MediaDataDecoder::DecodedData& aResults);
MediaResult CreateImageVAAPI(int64_t aOffset, int64_t aPts, int64_t aDuration,
MediaDataDecoder::DecodedData& aResults);
void ReleaseUnusedVAAPIFrames();
void ReleaseAllVAAPIFrames();
DMABufSurface* GetUnusedDMABufSurface();
void ReleaseDMABufSurfaces();
#endif
/**
@ -138,7 +173,8 @@ class FFmpegVideoDecoder<LIBAV_VER>
AVBufferRef* mVAAPIDeviceContext;
const bool mDisableHardwareDecoding;
VADisplay mDisplay;
std::list<UniquePtr<VAAPIFrameHolder>> mFrameHolders;
bool mUseDMABufSurfaces;
nsTArray<DMABufSurface> mDMABufSurfaces;
#endif
RefPtr<KnowsCompositor> mImageAllocator;
RefPtr<ImageContainer> mImageContainer;