зеркало из https://github.com/mozilla/gecko-dev.git
Bug 1619882 [Wayland] Use VAAPIFrameHolder array at FFmpegVideoDecoder to hold active vaapi hw video surfaces, r=jya
VA-API surfaces are owned by ffmpeg library and we need to keep them locked until any part of rendering pipeline use them. We use a global lock provided by WaylandDMABUFSurface for it, keep array of active surfaces at FFmpegVideoDecoder and release them only when all references are cleared or FFmpegVideoDecoder is deleted. Differential Revision: https://phabricator.services.mozilla.com/D76690
This commit is contained in:
Родитель
ec6a3bec56
Коммит
09e34aad82
|
@ -111,7 +111,7 @@ static AVPixelFormat ChooseVAAPIPixelFormat(AVCodecContext* aCodecContext,
|
|||
for (; *aFormats > -1; aFormats++) {
|
||||
switch (*aFormats) {
|
||||
case AV_PIX_FMT_VAAPI_VLD:
|
||||
FFMPEG_LOG("Requesting pixel format VAAPI_VLD\n");
|
||||
FFMPEG_LOG("Requesting pixel format VAAPI_VLD");
|
||||
return AV_PIX_FMT_VAAPI_VLD;
|
||||
default:
|
||||
break;
|
||||
|
@ -123,18 +123,27 @@ static AVPixelFormat ChooseVAAPIPixelFormat(AVCodecContext* aCodecContext,
|
|||
}
|
||||
|
||||
VAAPIFrameHolder::VAAPIFrameHolder(FFmpegLibWrapper* aLib,
|
||||
AVBufferRef* aVAAPIDeviceContext,
|
||||
AVBufferRef* aAVHWFramesContext,
|
||||
AVBufferRef* aHWFrame)
|
||||
WaylandDMABufSurface* aSurface,
|
||||
AVCodecContext* aAVCodecContext,
|
||||
AVFrame* aAVFrame)
|
||||
: mLib(aLib),
|
||||
mVAAPIDeviceContext(mLib->av_buffer_ref(aVAAPIDeviceContext)),
|
||||
mAVHWFramesContext(mLib->av_buffer_ref(aAVHWFramesContext)),
|
||||
mHWFrame(mLib->av_buffer_ref(aHWFrame)){};
|
||||
mSurface(aSurface),
|
||||
mAVHWFramesContext(mLib->av_buffer_ref(aAVCodecContext->hw_frames_ctx)),
|
||||
mHWAVBuffer(mLib->av_buffer_ref(aAVFrame->buf[0])) {
|
||||
FFMPEG_LOG("VAAPIFrameHolder is adding dmabuf surface UID = %d",
|
||||
mSurface->GetUID());
|
||||
|
||||
// Create global refcount object to track mSurface usage over
|
||||
// gects rendering engine. We can't release it until it's used
|
||||
// by GL compositor / WebRender.
|
||||
mSurface->GlobalRefCountCreate();
|
||||
}
|
||||
|
||||
VAAPIFrameHolder::~VAAPIFrameHolder() {
|
||||
mLib->av_buffer_unref(&mHWFrame);
|
||||
FFMPEG_LOG("VAAPIFrameHolder is releasing dmabuf surface UID = %d",
|
||||
mSurface->GetUID());
|
||||
mLib->av_buffer_unref(&mHWAVBuffer);
|
||||
mLib->av_buffer_unref(&mAVHWFramesContext);
|
||||
mLib->av_buffer_unref(&mVAAPIDeviceContext);
|
||||
}
|
||||
|
||||
AVCodec* FFmpegVideoDecoder<LIBAV_VER>::FindVAAPICodec() {
|
||||
|
@ -422,6 +431,13 @@ MediaResult FFmpegVideoDecoder<LIBAV_VER>::DoDecode(
|
|||
NS_WARNING("FFmpeg h264 decoder failed to allocate frame.");
|
||||
return MediaResult(NS_ERROR_OUT_OF_MEMORY, __func__);
|
||||
}
|
||||
|
||||
# ifdef MOZ_WAYLAND_USE_VAAPI
|
||||
if (mVAAPIDeviceContext) {
|
||||
ReleaseUnusedVAAPIFrames();
|
||||
}
|
||||
# endif
|
||||
|
||||
res = mLib->avcodec_receive_frame(mCodecContext, mFrame);
|
||||
if (res == int(AVERROR_EOF)) {
|
||||
return NS_ERROR_DOM_MEDIA_END_OF_STREAM;
|
||||
|
@ -628,9 +644,20 @@ MediaResult FFmpegVideoDecoder<LIBAV_VER>::CreateImage(
|
|||
}
|
||||
|
||||
#ifdef MOZ_WAYLAND_USE_VAAPI
|
||||
static void VAAPIFrameReleaseCallback(VAAPIFrameHolder* aVAAPIFrameHolder) {
|
||||
auto frameHolder = static_cast<VAAPIFrameHolder*>(aVAAPIFrameHolder);
|
||||
delete frameHolder;
|
||||
void FFmpegVideoDecoder<LIBAV_VER>::ReleaseUnusedVAAPIFrames() {
|
||||
std::list<UniquePtr<VAAPIFrameHolder>>::iterator holder =
|
||||
mFrameHolders.begin();
|
||||
while (holder != mFrameHolders.end()) {
|
||||
if (!(*holder)->IsUsed()) {
|
||||
holder = mFrameHolders.erase(holder);
|
||||
} else {
|
||||
holder++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void FFmpegVideoDecoder<LIBAV_VER>::ReleaseAllVAAPIFrames() {
|
||||
mFrameHolders.clear();
|
||||
}
|
||||
|
||||
MediaResult FFmpegVideoDecoder<LIBAV_VER>::CreateImageVAAPI(
|
||||
|
@ -667,20 +694,20 @@ MediaResult FFmpegVideoDecoder<LIBAV_VER>::CreateImageVAAPI(
|
|||
RESULT_DETAIL("Unable to allocate WaylandDMABufSurfaceNV12."));
|
||||
}
|
||||
|
||||
# ifdef MOZ_LOGGING
|
||||
static int uid = 0;
|
||||
surface->SetUID(++uid);
|
||||
FFMPEG_LOG("Created dmabuf UID = %d HW surface %x", uid, surface_id);
|
||||
# endif
|
||||
|
||||
surface->SetYUVColorSpace(GetFrameColorSpace());
|
||||
|
||||
// mFrame->buf[0] is a reference to H264 VASurface for this mFrame.
|
||||
// We need create WaylandDMABUFSurfaceImage on top of it,
|
||||
// create EGLImage/Texture on top of it and render it by GL.
|
||||
// Store reference to the decoded HW buffer, see VAAPIFrameHolder struct.
|
||||
auto holder =
|
||||
MakeUnique<VAAPIFrameHolder>(mLib, surface, mCodecContext, mFrame);
|
||||
mFrameHolders.push_back(std::move(holder));
|
||||
|
||||
// FFmpeg tends to reuse the particual VASurface for another frame
|
||||
// even when the mFrame is not released. To keep VASurface as is
|
||||
// we explicitly reference it and keep until WaylandDMABUFSurfaceImage
|
||||
// is live.
|
||||
RefPtr<layers::Image> im = new layers::WaylandDMABUFSurfaceImage(
|
||||
surface, VAAPIFrameReleaseCallback,
|
||||
new VAAPIFrameHolder(mLib, mVAAPIDeviceContext,
|
||||
mCodecContext->hw_frames_ctx, mFrame->buf[0]));
|
||||
RefPtr<layers::Image> im = new layers::WaylandDMABUFSurfaceImage(surface);
|
||||
|
||||
RefPtr<VideoData> vp = VideoData::CreateFromImage(
|
||||
mInfo.mDisplay, aOffset, TimeUnit::FromMicroseconds(aPts),
|
||||
|
@ -732,6 +759,7 @@ AVCodecID FFmpegVideoDecoder<LIBAV_VER>::GetCodecId(
|
|||
void FFmpegVideoDecoder<LIBAV_VER>::ProcessShutdown() {
|
||||
#ifdef MOZ_WAYLAND_USE_VAAPI
|
||||
if (mVAAPIDeviceContext) {
|
||||
ReleaseAllVAAPIFrames();
|
||||
mLib->av_buffer_unref(&mVAAPIDeviceContext);
|
||||
}
|
||||
#endif
|
||||
|
|
|
@ -10,21 +10,45 @@
|
|||
#include "FFmpegLibWrapper.h"
|
||||
#include "FFmpegDataDecoder.h"
|
||||
#include "SimpleMap.h"
|
||||
#ifdef MOZ_WAYLAND_USE_VAAPI
|
||||
# include "mozilla/widget/WaylandDMABufSurface.h"
|
||||
# include <list>
|
||||
#endif
|
||||
|
||||
namespace mozilla {
|
||||
|
||||
#ifdef MOZ_WAYLAND_USE_VAAPI
|
||||
class VAAPIFrameHolder {
|
||||
// When VA-API decoding is running, ffmpeg allocates AVHWFramesContext - a pool
|
||||
// of "hardware" frames. Every "hardware" frame (VASurface) is backed
|
||||
// by actual piece of GPU memory which holds the decoded image data.
|
||||
//
|
||||
// The VASurface is wrapped by WaylandDMABufSurface and transferred to
|
||||
// rendering queue by WaylandDMABUFSurfaceImage, where TextureClient is
|
||||
// created and VASurface is used as a texture there.
|
||||
//
|
||||
// As there's a limited number of VASurfaces, ffmpeg reuses them to decode
|
||||
// next frames ASAP even if they are still attached to WaylandDMABufSurface
|
||||
// and used as a texture in our rendering engine.
|
||||
//
|
||||
// Unfortunately there isn't any obvious way how to mark particular VASurface
|
||||
// as used. The best we can do is to hold a reference to particular AVBuffer
|
||||
// from decoded AVFrame and AVHWFramesContext which owns the AVBuffer.
|
||||
|
||||
class VAAPIFrameHolder final {
|
||||
public:
|
||||
VAAPIFrameHolder(FFmpegLibWrapper* aLib, AVBufferRef* aVAAPIDeviceContext,
|
||||
AVBufferRef* aAVHWFramesContext, AVBufferRef* aHWFrame);
|
||||
VAAPIFrameHolder(FFmpegLibWrapper* aLib, WaylandDMABufSurface* aSurface,
|
||||
AVCodecContext* aAVCodecContext, AVFrame* aAVFrame);
|
||||
~VAAPIFrameHolder();
|
||||
|
||||
// Check if WaylandDMABufSurface is used by any gecko rendering process
|
||||
// (WebRender or GL compositor) or by WaylandDMABUFSurfaceImage/VideoData.
|
||||
bool IsUsed() const { return mSurface->IsGlobalRefSet(); }
|
||||
|
||||
private:
|
||||
FFmpegLibWrapper* mLib;
|
||||
AVBufferRef* mVAAPIDeviceContext;
|
||||
const FFmpegLibWrapper* mLib;
|
||||
const RefPtr<WaylandDMABufSurface> mSurface;
|
||||
AVBufferRef* mAVHWFramesContext;
|
||||
AVBufferRef* mHWFrame;
|
||||
AVBufferRef* mHWAVBuffer;
|
||||
};
|
||||
#endif
|
||||
|
||||
|
@ -97,6 +121,8 @@ class FFmpegVideoDecoder<LIBAV_VER>
|
|||
|
||||
MediaResult CreateImageVAAPI(int64_t aOffset, int64_t aPts, int64_t aDuration,
|
||||
MediaDataDecoder::DecodedData& aResults);
|
||||
void ReleaseUnusedVAAPIFrames();
|
||||
void ReleaseAllVAAPIFrames();
|
||||
#endif
|
||||
|
||||
/**
|
||||
|
@ -112,6 +138,7 @@ class FFmpegVideoDecoder<LIBAV_VER>
|
|||
AVBufferRef* mVAAPIDeviceContext;
|
||||
const bool mDisableHardwareDecoding;
|
||||
VADisplay mDisplay;
|
||||
std::list<UniquePtr<VAAPIFrameHolder>> mFrameHolders;
|
||||
#endif
|
||||
RefPtr<KnowsCompositor> mImageAllocator;
|
||||
RefPtr<ImageContainer> mImageContainer;
|
||||
|
|
Загрузка…
Ссылка в новой задаче