Bug 1629788 [Wayland] Implement SW decoding to WaylandDMABufSurface, r=jya

- Use WaylandDMABufSurface for SW decoded frames when they can be converted to NV12 format.
- Rename VAAPIFrameHolder to DMABufSurface and use it as general placeholder for WaylandDMABufSurface surface.
  It's used for VA-API/SW video playback and holds decoded video images until thery are used by gecko rendering engine.
- Implmenet a linked list of DMABufSurface where recently used frames are stored. The frames are recycled by ffmpeg
  decoder for VA-API and SW video playback.

Differential Revision: https://phabricator.services.mozilla.com/D78292
This commit is contained in:
Martin Stransky 2020-06-13 18:38:38 +00:00
Родитель 864e28453a
Коммит 4dbb2503ff
2 изменённых файлов: 196 добавлений и 76 удалений

Просмотреть файл

@ -122,29 +122,44 @@ static AVPixelFormat ChooseVAAPIPixelFormat(AVCodecContext* aCodecContext,
return AV_PIX_FMT_NONE; return AV_PIX_FMT_NONE;
} }
VAAPIFrameHolder::VAAPIFrameHolder(FFmpegLibWrapper* aLib, DMABufSurface::DMABufSurface(WaylandDMABufSurface* aSurface,
WaylandDMABufSurface* aSurface, FFmpegLibWrapper* aLib)
AVCodecContext* aAVCodecContext, : mSurface(aSurface),
AVFrame* aAVFrame) mLib(aLib),
: mLib(aLib), mAVHWFramesContext(nullptr),
mSurface(aSurface), mHWAVBuffer(nullptr) {
mAVHWFramesContext(mLib->av_buffer_ref(aAVCodecContext->hw_frames_ctx)),
mHWAVBuffer(mLib->av_buffer_ref(aAVFrame->buf[0])) {
FFMPEG_LOG("VAAPIFrameHolder is adding dmabuf surface UID = %d",
mSurface->GetUID());
// Create global refcount object to track mSurface usage over // Create global refcount object to track mSurface usage over
// gects rendering engine. We can't release it until it's used // gects rendering engine. We can't release it until it's used
// by GL compositor / WebRender. // by GL compositor / WebRender.
mSurface->GlobalRefCountCreate(); mSurface->GlobalRefCountCreate();
FFMPEG_LOG("DMABufSurface: creating surface UID = %d", mSurface->GetUID());
} }
VAAPIFrameHolder::~VAAPIFrameHolder() { void DMABufSurface::LockVAAPIData(AVCodecContext* aAVCodecContext,
FFMPEG_LOG("VAAPIFrameHolder is releasing dmabuf surface UID = %d", AVFrame* aAVFrame) {
FFMPEG_LOG("DMABufSurface: VAAPI locking dmabuf surface UID = %d",
mSurface->GetUID()); mSurface->GetUID());
if (aAVCodecContext && aAVFrame) {
mAVHWFramesContext = mLib->av_buffer_ref(aAVCodecContext->hw_frames_ctx);
mHWAVBuffer = mLib->av_buffer_ref(aAVFrame->buf[0]);
}
}
void DMABufSurface::ReleaseVAAPIData() {
FFMPEG_LOG("DMABufSurface: VAAPI releasing dmabuf surface UID = %d",
mSurface->GetUID());
if (mHWAVBuffer && mAVHWFramesContext) {
mLib->av_buffer_unref(&mHWAVBuffer); mLib->av_buffer_unref(&mHWAVBuffer);
mLib->av_buffer_unref(&mAVHWFramesContext); mLib->av_buffer_unref(&mAVHWFramesContext);
} }
mSurface->ReleaseSurface();
}
DMABufSurface::~DMABufSurface() {
FFMPEG_LOG("DMABufSurface: deleting dmabuf surface UID = %d",
mSurface->GetUID());
ReleaseVAAPIData();
}
AVCodec* FFmpegVideoDecoder<LIBAV_VER>::FindVAAPICodec() { AVCodec* FFmpegVideoDecoder<LIBAV_VER>::FindVAAPICodec() {
AVCodec* decoder = mLib->avcodec_find_decoder(mCodecID); AVCodec* decoder = mLib->avcodec_find_decoder(mCodecID);
@ -225,14 +240,6 @@ MediaResult FFmpegVideoDecoder<LIBAV_VER>::InitVAAPIDecoder() {
return NS_ERROR_NOT_AVAILABLE; return NS_ERROR_NOT_AVAILABLE;
} }
auto layersBackend = mImageAllocator
? mImageAllocator->GetCompositorBackendType()
: layers::LayersBackend::LAYERS_BASIC;
if (layersBackend != layers::LayersBackend::LAYERS_WR) {
FFMPEG_LOG("VA-API works with WebRender only!");
return NS_ERROR_NOT_AVAILABLE;
}
AVCodec* codec = FindVAAPICodec(); AVCodec* codec = FindVAAPICodec();
if (!codec) { if (!codec) {
FFMPEG_LOG("Couldn't find ffmpeg VA-API decoder"); FFMPEG_LOG("Couldn't find ffmpeg VA-API decoder");
@ -318,6 +325,7 @@ FFmpegVideoDecoder<LIBAV_VER>::FFmpegVideoDecoder(
mVAAPIDeviceContext(nullptr), mVAAPIDeviceContext(nullptr),
mDisableHardwareDecoding(aDisableHardwareDecoding), mDisableHardwareDecoding(aDisableHardwareDecoding),
mDisplay(nullptr), mDisplay(nullptr),
mUseDMABufSurfaces(false),
#endif #endif
mImageAllocator(aAllocator), mImageAllocator(aAllocator),
mImageContainer(aImageContainer), mImageContainer(aImageContainer),
@ -327,13 +335,25 @@ FFmpegVideoDecoder<LIBAV_VER>::FFmpegVideoDecoder(
// initialization. // initialization.
mExtraData = new MediaByteBuffer; mExtraData = new MediaByteBuffer;
mExtraData->AppendElements(*aConfig.mExtraData); mExtraData->AppendElements(*aConfig.mExtraData);
#ifdef MOZ_WAYLAND_USE_VAAPI
mUseDMABufSurfaces =
gfxPlatformGtk::GetPlatform()->UseWaylandDMABufVideoTextures() &&
mImageAllocator &&
(mImageAllocator->GetCompositorBackendType() ==
layers::LayersBackend::LAYERS_WR);
if (!mUseDMABufSurfaces) {
FFMPEG_LOG("DMA-BUF/VA-API can't be used, WebRender/Wayland is disabled");
}
#endif
} }
RefPtr<MediaDataDecoder::InitPromise> FFmpegVideoDecoder<LIBAV_VER>::Init() { RefPtr<MediaDataDecoder::InitPromise> FFmpegVideoDecoder<LIBAV_VER>::Init() {
MediaResult rv; MediaResult rv;
#ifdef MOZ_WAYLAND_USE_VAAPI #ifdef MOZ_WAYLAND_USE_VAAPI
if (!mDisableHardwareDecoding) { if (mUseDMABufSurfaces && !mDisableHardwareDecoding) {
rv = InitVAAPIDecoder(); rv = InitVAAPIDecoder();
if (NS_SUCCEEDED(rv)) { if (NS_SUCCEEDED(rv)) {
return InitPromise::CreateAndResolve(TrackInfo::kVideoTrack, __func__); return InitPromise::CreateAndResolve(TrackInfo::kVideoTrack, __func__);
@ -433,6 +453,8 @@ MediaResult FFmpegVideoDecoder<LIBAV_VER>::DoDecode(
} }
# ifdef MOZ_WAYLAND_USE_VAAPI # ifdef MOZ_WAYLAND_USE_VAAPI
// Release unused VA-API surfaces before avcodec_receive_frame() as
// ffmpeg recycles VASurface for HW decoding.
if (mVAAPIDeviceContext) { if (mVAAPIDeviceContext) {
ReleaseUnusedVAAPIFrames(); ReleaseUnusedVAAPIFrames();
} }
@ -453,10 +475,19 @@ MediaResult FFmpegVideoDecoder<LIBAV_VER>::DoDecode(
MediaResult rv; MediaResult rv;
# ifdef MOZ_WAYLAND_USE_VAAPI # ifdef MOZ_WAYLAND_USE_VAAPI
if (mVAAPIDeviceContext) { if (mVAAPIDeviceContext || mUseDMABufSurfaces) {
MOZ_ASSERT(mFrame->format == AV_PIX_FMT_VAAPI_VLD); rv = CreateImageDMABuf(mFrame->pkt_pos, mFrame->pkt_pts,
rv = CreateImageVAAPI(mFrame->pkt_pos, mFrame->pkt_pts,
mFrame->pkt_duration, aResults); mFrame->pkt_duration, aResults);
// If VA-API playback failed, just quit. Decoder is going to be restarted
// without VA-API.
// If VA-API is already off, disable DMABufSurfaces and fallback to
// default.
if (NS_FAILED(rv) && !mVAAPIDeviceContext) {
mUseDMABufSurfaces = false;
rv = CreateImage(mFrame->pkt_pos, mFrame->pkt_pts, mFrame->pkt_duration,
aResults);
}
} else } else
# endif # endif
{ {
@ -645,69 +676,122 @@ MediaResult FFmpegVideoDecoder<LIBAV_VER>::CreateImage(
#ifdef MOZ_WAYLAND_USE_VAAPI #ifdef MOZ_WAYLAND_USE_VAAPI
void FFmpegVideoDecoder<LIBAV_VER>::ReleaseUnusedVAAPIFrames() { void FFmpegVideoDecoder<LIBAV_VER>::ReleaseUnusedVAAPIFrames() {
std::list<UniquePtr<VAAPIFrameHolder>>::iterator holder = int len = mDMABufSurfaces.Length();
mFrameHolders.begin(); for (int i = 0; i < len; i++) {
while (holder != mFrameHolders.end()) { if (!mDMABufSurfaces[i].IsUsed()) {
if (!(*holder)->IsUsed()) { mDMABufSurfaces[i].ReleaseVAAPIData();
holder = mFrameHolders.erase(holder);
} else {
holder++;
} }
} }
} }
void FFmpegVideoDecoder<LIBAV_VER>::ReleaseAllVAAPIFrames() { DMABufSurface* FFmpegVideoDecoder<LIBAV_VER>::GetUnusedDMABufSurface() {
mFrameHolders.clear(); int len = mDMABufSurfaces.Length();
for (int i = 0; i < len; i++) {
if (!mDMABufSurfaces[i].IsUsed()) {
return &(mDMABufSurfaces[i]);
}
}
return nullptr;
} }
MediaResult FFmpegVideoDecoder<LIBAV_VER>::CreateImageVAAPI( void FFmpegVideoDecoder<LIBAV_VER>::ReleaseDMABufSurfaces() {
int64_t aOffset, int64_t aPts, int64_t aDuration, mDMABufSurfaces.Clear();
MediaDataDecoder::DecodedData& aResults) { }
FFMPEG_LOG("Got one VAAPI frame output with pts=%" PRId64 " dts=%" PRId64
" duration=%" PRId64 " opaque=%" PRId64,
aPts, mFrame->pkt_dts, aDuration, mCodecContext->reordered_opaque);
VADRMPRIMESurfaceDescriptor va_desc; bool FFmpegVideoDecoder<LIBAV_VER>::GetVAAPISurfaceDescriptor(
VADRMPRIMESurfaceDescriptor& aVaDesc) {
VASurfaceID surface_id = (VASurfaceID)(uintptr_t)mFrame->data[3]; VASurfaceID surface_id = (VASurfaceID)(uintptr_t)mFrame->data[3];
VAStatus vas = mLib->vaExportSurfaceHandle( VAStatus vas = mLib->vaExportSurfaceHandle(
mDisplay, surface_id, VA_SURFACE_ATTRIB_MEM_TYPE_DRM_PRIME_2, mDisplay, surface_id, VA_SURFACE_ATTRIB_MEM_TYPE_DRM_PRIME_2,
VA_EXPORT_SURFACE_READ_ONLY | VA_EXPORT_SURFACE_SEPARATE_LAYERS, VA_EXPORT_SURFACE_READ_ONLY | VA_EXPORT_SURFACE_SEPARATE_LAYERS,
&va_desc); &aVaDesc);
if (vas != VA_STATUS_SUCCESS) { if (vas != VA_STATUS_SUCCESS) {
return MediaResult( return false;
NS_ERROR_OUT_OF_MEMORY,
RESULT_DETAIL("Unable to get frame by vaExportSurfaceHandle()"));
} }
vas = mLib->vaSyncSurface(mDisplay, surface_id); vas = mLib->vaSyncSurface(mDisplay, surface_id);
if (vas != VA_STATUS_SUCCESS) { if (vas != VA_STATUS_SUCCESS) {
NS_WARNING("vaSyncSurface() failed."); NS_WARNING("vaSyncSurface() failed.");
} }
va_desc.width = mFrame->width; aVaDesc.width = mFrame->width;
va_desc.height = mFrame->height; aVaDesc.height = mFrame->height;
RefPtr<WaylandDMABufSurfaceNV12> surface = return true;
WaylandDMABufSurfaceNV12::CreateNV12Surface(va_desc); }
if (!surface) {
MediaResult FFmpegVideoDecoder<LIBAV_VER>::CreateImageDMABuf(
int64_t aOffset, int64_t aPts, int64_t aDuration,
MediaDataDecoder::DecodedData& aResults) {
FFMPEG_LOG("DMABUF/VA-API Got one frame output with pts=%" PRId64
"dts=%" PRId64 " duration=%" PRId64 " opaque=%" PRId64,
aPts, mFrame->pkt_dts, aDuration, mCodecContext->reordered_opaque);
// With SW decode we support only YUV420P format with DMABuf surfaces.
if (!mVAAPIDeviceContext && mCodecContext->pix_fmt != AV_PIX_FMT_YUV420P) {
return MediaResult(
NS_ERROR_NOT_IMPLEMENTED,
RESULT_DETAIL("DMA-BUF textures supports YUV420P format only"));
}
VADRMPRIMESurfaceDescriptor vaDesc;
if (mVAAPIDeviceContext && !GetVAAPISurfaceDescriptor(vaDesc)) {
return MediaResult( return MediaResult(
NS_ERROR_OUT_OF_MEMORY, NS_ERROR_OUT_OF_MEMORY,
RESULT_DETAIL("Unable to allocate WaylandDMABufSurfaceNV12.")); RESULT_DETAIL("Unable to get frame by vaExportSurfaceHandle()"));
}
RefPtr<WaylandDMABufSurfaceNV12> waylandSurface;
DMABufSurface* surface = GetUnusedDMABufSurface();
if (!surface) {
if (mVAAPIDeviceContext) {
waylandSurface = WaylandDMABufSurfaceNV12::CreateNV12Surface(vaDesc);
} else {
waylandSurface = WaylandDMABufSurfaceNV12::CreateNV12Surface(
mFrame->width, mFrame->height, (void**)mFrame->data,
mFrame->linesize);
}
if (!waylandSurface) {
return MediaResult(
NS_ERROR_OUT_OF_MEMORY,
RESULT_DETAIL("Unable to get WaylandDMABufSurfaceNV12"));
} }
# ifdef MOZ_LOGGING # ifdef MOZ_LOGGING
static int uid = 0; static int uid = 0;
surface->SetUID(++uid); waylandSurface->SetUID(++uid);
FFMPEG_LOG("Created dmabuf UID = %d HW surface %x", uid, surface_id); FFMPEG_LOG("Created new WaylandDMABufSurface UID = %d", uid);
# endif # endif
mDMABufSurfaces.AppendElement(DMABufSurface(waylandSurface, mLib));
surface = &(mDMABufSurfaces[mDMABufSurfaces.Length() - 1]);
} else {
waylandSurface = surface->GetWaylandDMABufSurface();
bool ret;
surface->SetYUVColorSpace(GetFrameColorSpace()); if (mVAAPIDeviceContext) {
ret = waylandSurface->UpdateNV12Data(vaDesc);
} else {
ret = waylandSurface->UpdateNV12Data((void**)mFrame->data,
mFrame->linesize);
}
// Store reference to the decoded HW buffer, see VAAPIFrameHolder struct. if (!ret) {
auto holder = return MediaResult(
MakeUnique<VAAPIFrameHolder>(mLib, surface, mCodecContext, mFrame); NS_ERROR_OUT_OF_MEMORY,
mFrameHolders.push_back(std::move(holder)); RESULT_DETAIL("Unable to upload data to WaylandDMABufSurfaceNV12"));
}
FFMPEG_LOG("Reusing WaylandDMABufSurface UID = %d",
waylandSurface->GetUID());
}
RefPtr<layers::Image> im = new layers::WaylandDMABUFSurfaceImage(surface); if (mVAAPIDeviceContext) {
surface->LockVAAPIData(mCodecContext, mFrame);
}
waylandSurface->SetYUVColorSpace(GetFrameColorSpace());
RefPtr<layers::Image> im =
new layers::WaylandDMABUFSurfaceImage(waylandSurface);
RefPtr<VideoData> vp = VideoData::CreateFromImage( RefPtr<VideoData> vp = VideoData::CreateFromImage(
mInfo.mDisplay, aOffset, TimeUnit::FromMicroseconds(aPts), mInfo.mDisplay, aOffset, TimeUnit::FromMicroseconds(aPts),
@ -758,8 +842,8 @@ AVCodecID FFmpegVideoDecoder<LIBAV_VER>::GetCodecId(
void FFmpegVideoDecoder<LIBAV_VER>::ProcessShutdown() { void FFmpegVideoDecoder<LIBAV_VER>::ProcessShutdown() {
#ifdef MOZ_WAYLAND_USE_VAAPI #ifdef MOZ_WAYLAND_USE_VAAPI
ReleaseDMABufSurfaces();
if (mVAAPIDeviceContext) { if (mVAAPIDeviceContext) {
ReleaseAllVAAPIFrames();
mLib->av_buffer_unref(&mVAAPIDeviceContext); mLib->av_buffer_unref(&mVAAPIDeviceContext);
} }
#endif #endif

Просмотреть файл

@ -12,12 +12,26 @@
#include "SimpleMap.h" #include "SimpleMap.h"
#ifdef MOZ_WAYLAND_USE_VAAPI #ifdef MOZ_WAYLAND_USE_VAAPI
# include "mozilla/widget/WaylandDMABufSurface.h" # include "mozilla/widget/WaylandDMABufSurface.h"
# include <list> # include "mozilla/LinkedList.h"
#endif #endif
namespace mozilla { namespace mozilla {
#ifdef MOZ_WAYLAND_USE_VAAPI #ifdef MOZ_WAYLAND_USE_VAAPI
// DMABufSurface holds a reference to GPU data with a video frame.
//
// Actual GPU pixel data are stored at WaylandDMABufSurface and
// WaylandDMABufSurfaces is passed to gecko GL rendering pipeline via.
// WaylandDMABUFSurfaceImage.
//
// DMABufSurface can optionally hold VA-API ffmpeg related data to keep
// GPU data locked untill we need them.
//
// DMABufSurface is used for both HW accelerated video decoding (VA-API)
// and ffmpeg SW decoding.
//
// VA-API scenario
//
// When VA-API decoding is running, ffmpeg allocates AVHWFramesContext - a pool // When VA-API decoding is running, ffmpeg allocates AVHWFramesContext - a pool
// of "hardware" frames. Every "hardware" frame (VASurface) is backed // of "hardware" frames. Every "hardware" frame (VASurface) is backed
// by actual piece of GPU memory which holds the decoded image data. // by actual piece of GPU memory which holds the decoded image data.
@ -33,20 +47,37 @@ namespace mozilla {
// Unfortunately there isn't any obvious way how to mark particular VASurface // Unfortunately there isn't any obvious way how to mark particular VASurface
// as used. The best we can do is to hold a reference to particular AVBuffer // as used. The best we can do is to hold a reference to particular AVBuffer
// from decoded AVFrame and AVHWFramesContext which owns the AVBuffer. // from decoded AVFrame and AVHWFramesContext which owns the AVBuffer.
//
class VAAPIFrameHolder final { // FFmpeg SW decoding scenario
//
// When SW ffmpeg decoding is running, DMABufSurface contains only
// a WaylandDMABufSurface reference and VA-API related members are null.
// We own the WaylandDMABufSurface underlying GPU data and we use it for
// repeated rendering of video frames.
//
class DMABufSurface final {
public: public:
VAAPIFrameHolder(FFmpegLibWrapper* aLib, WaylandDMABufSurface* aSurface, DMABufSurface(WaylandDMABufSurface* aSurface, FFmpegLibWrapper* aLib);
AVCodecContext* aAVCodecContext, AVFrame* aAVFrame); ~DMABufSurface();
~VAAPIFrameHolder();
// Lock VAAPI related data
void LockVAAPIData(AVCodecContext* aAVCodecContext, AVFrame* aAVFrame);
// Release VAAPI related data, DMABufSurface can be reused
// for another frame.
void ReleaseVAAPIData();
// Check if WaylandDMABufSurface is used by any gecko rendering process // Check if WaylandDMABufSurface is used by any gecko rendering process
// (WebRender or GL compositor) or by WaylandDMABUFSurfaceImage/VideoData. // (WebRender or GL compositor) or by WaylandDMABUFSurfaceImage/VideoData.
bool IsUsed() const { return mSurface->IsGlobalRefSet(); } bool IsUsed() const { return mSurface->IsGlobalRefSet(); }
const RefPtr<WaylandDMABufSurfaceNV12> GetWaylandDMABufSurface() {
return mSurface->GetAsWaylandDMABufSurfaceNV12();
}
private: private:
const FFmpegLibWrapper* mLib;
const RefPtr<WaylandDMABufSurface> mSurface; const RefPtr<WaylandDMABufSurface> mSurface;
const FFmpegLibWrapper* mLib;
AVBufferRef* mAVHWFramesContext; AVBufferRef* mAVHWFramesContext;
AVBufferRef* mHWAVBuffer; AVBufferRef* mHWAVBuffer;
}; };
@ -118,11 +149,15 @@ class FFmpegVideoDecoder<LIBAV_VER>
void InitVAAPICodecContext(); void InitVAAPICodecContext();
AVCodec* FindVAAPICodec(); AVCodec* FindVAAPICodec();
bool IsHardwareAccelerated(nsACString& aFailureReason) const override; bool IsHardwareAccelerated(nsACString& aFailureReason) const override;
bool GetVAAPISurfaceDescriptor(VADRMPRIMESurfaceDescriptor& aVaDesc);
MediaResult CreateImageVAAPI(int64_t aOffset, int64_t aPts, int64_t aDuration, MediaResult CreateImageDMABuf(int64_t aOffset, int64_t aPts,
int64_t aDuration,
MediaDataDecoder::DecodedData& aResults); MediaDataDecoder::DecodedData& aResults);
void ReleaseUnusedVAAPIFrames(); void ReleaseUnusedVAAPIFrames();
void ReleaseAllVAAPIFrames(); DMABufSurface* GetUnusedDMABufSurface();
void ReleaseDMABufSurfaces();
#endif #endif
/** /**
@ -138,7 +173,8 @@ class FFmpegVideoDecoder<LIBAV_VER>
AVBufferRef* mVAAPIDeviceContext; AVBufferRef* mVAAPIDeviceContext;
const bool mDisableHardwareDecoding; const bool mDisableHardwareDecoding;
VADisplay mDisplay; VADisplay mDisplay;
std::list<UniquePtr<VAAPIFrameHolder>> mFrameHolders; bool mUseDMABufSurfaces;
nsTArray<DMABufSurface> mDMABufSurfaces;
#endif #endif
RefPtr<KnowsCompositor> mImageAllocator; RefPtr<KnowsCompositor> mImageAllocator;
RefPtr<ImageContainer> mImageContainer; RefPtr<ImageContainer> mImageContainer;