Bug 1752097 [Linux] Remove SW decoding to dmabuf textures r=alwu,media-playback-reviewers

Removing SW decoding to dmabuf surfaces in favour of decode to shm surfaces (Bug 1713276).

Differential Revision: https://phabricator.services.mozilla.com/D137032
This commit is contained in:
stransky 2022-01-29 19:50:24 +00:00
Родитель 8a4f73389e
Коммит 6162647fa5
7 изменённых файлов: 35 добавлений и 189 удалений

Просмотреть файл

@ -360,13 +360,6 @@ void FFmpegVideoDecoder<LIBAV_VER>::InitHWDecodingPrefs() {
mEnableHardwareDecoding = false;
FFMPEG_LOG("VA-API is disabled by pref.");
}
if (mEnableHardwareDecoding) {
mUseDMABufSurfaces = widget::GetDMABufDevice()->IsDMABufVideoEnabled();
if (!mUseDMABufSurfaces) {
FFMPEG_LOG("SW encoding to DMABuf textures is disabled by system/pref.");
}
}
}
#endif
@ -379,7 +372,6 @@ FFmpegVideoDecoder<LIBAV_VER>::FFmpegVideoDecoder(
mVAAPIDeviceContext(nullptr),
mEnableHardwareDecoding(!aDisableHardwareDecoding),
mDisplay(nullptr),
mUseDMABufSurfaces(false),
#endif
mImageAllocator(aAllocator),
mImageContainer(aImageContainer),
@ -623,14 +615,6 @@ int FFmpegVideoDecoder<LIBAV_VER>::GetVideoBuffer(
return AVERROR(EINVAL);
}
# ifdef MOZ_WAYLAND_USE_VAAPI
// For SW decoding + DMABuf case, it's the opposite from the above case, we
// don't want to access GPU data too frequently from CPU.
if (mUseDMABufSurfaces) {
return AVERROR(EINVAL);
}
# endif
if (!IsColorFormatSupportedForUsingCustomizedBuffer(aCodecContext->pix_fmt)) {
FFMPEG_LOG("Not support color format %d", aCodecContext->pix_fmt);
return AVERROR(EINVAL);
@ -817,8 +801,8 @@ MediaResult FFmpegVideoDecoder<LIBAV_VER>::DoDecode(
# ifdef MOZ_WAYLAND_USE_VAAPI
// Create VideoFramePool in case we need it.
if (!mVideoFramePool && (mUseDMABufSurfaces || mEnableHardwareDecoding)) {
mVideoFramePool = MakeUnique<VideoFramePool>(mEnableHardwareDecoding);
if (!mVideoFramePool && mEnableHardwareDecoding) {
mVideoFramePool = MakeUnique<VideoFramePool>();
}
// Release unused VA-API surfaces before avcodec_receive_frame() as
@ -855,14 +839,6 @@ MediaResult FFmpegVideoDecoder<LIBAV_VER>::DoDecode(
mVideoFramePool = nullptr;
return rv;
}
} else if (mUseDMABufSurfaces) {
rv = CreateImageDMABuf(mFrame->pkt_pos, mFrame->pkt_pts,
mFrame->pkt_duration, aResults);
if (NS_FAILED(rv)) {
mUseDMABufSurfaces = false;
rv = CreateImage(mFrame->pkt_pos, mFrame->pkt_pts, mFrame->pkt_duration,
aResults);
}
} else
# endif
{
@ -1138,43 +1114,6 @@ MediaResult FFmpegVideoDecoder<LIBAV_VER>::CreateImageVAAPI(
aResults.AppendElement(std::move(vp));
return NS_OK;
}
MediaResult FFmpegVideoDecoder<LIBAV_VER>::CreateImageDMABuf(
int64_t aOffset, int64_t aPts, int64_t aDuration,
MediaDataDecoder::DecodedData& aResults) {
FFMPEG_LOG("DMABuf Got one frame output with pts=%" PRId64 "dts=%" PRId64
" duration=%" PRId64 " opaque=%" PRId64,
aPts, mFrame->pkt_dts, aDuration, mCodecContext->reordered_opaque);
MOZ_ASSERT(mTaskQueue->IsOnCurrentThread());
auto surface =
mVideoFramePool->GetVideoFrameSurface(mCodecContext->pix_fmt, mFrame);
if (!surface) {
return MediaResult(NS_ERROR_DOM_MEDIA_DECODE_ERR,
RESULT_DETAIL("dmabuf allocation error"));
}
surface->SetYUVColorSpace(GetFrameColorSpace());
if (mLib->av_frame_get_color_range) {
auto range = mLib->av_frame_get_color_range(mFrame);
surface->SetColorRange(range == AVCOL_RANGE_JPEG
? gfx::ColorRange::FULL
: gfx::ColorRange::LIMITED);
}
RefPtr<VideoData> vp = VideoData::CreateFromImage(
mInfo.mDisplay, aOffset, TimeUnit::FromMicroseconds(aPts),
TimeUnit::FromMicroseconds(aDuration), surface->GetAsImage(),
!!mFrame->key_frame, TimeUnit::FromMicroseconds(-1));
if (!vp) {
return MediaResult(NS_ERROR_DOM_MEDIA_DECODE_ERR,
RESULT_DETAIL("image allocation error"));
}
aResults.AppendElement(std::move(vp));
return NS_OK;
}
#endif
RefPtr<MediaDataDecoder::FlushPromise>

Просмотреть файл

@ -132,16 +132,12 @@ class FFmpegVideoDecoder<LIBAV_VER>
MediaResult CreateImageVAAPI(int64_t aOffset, int64_t aPts, int64_t aDuration,
MediaDataDecoder::DecodedData& aResults);
MediaResult CreateImageDMABuf(int64_t aOffset, int64_t aPts,
int64_t aDuration,
MediaDataDecoder::DecodedData& aResults);
#endif
#ifdef MOZ_WAYLAND_USE_VAAPI
AVBufferRef* mVAAPIDeviceContext;
bool mEnableHardwareDecoding;
VADisplay mDisplay;
bool mUseDMABufSurfaces;
UniquePtr<VideoFramePool> mVideoFramePool;
static nsTArray<AVCodecID> mAcceleratedFormats;
#endif

Просмотреть файл

@ -15,11 +15,11 @@
namespace mozilla {
RefPtr<layers::Image> VideoFrameSurfaceDMABuf::GetAsImage() {
RefPtr<layers::Image> VideoFrameSurfaceVAAPI::GetAsImage() {
return new layers::DMABUFSurfaceImage(mSurface);
}
VideoFrameSurfaceDMABuf::VideoFrameSurfaceDMABuf(DMABufSurface* aSurface)
VideoFrameSurfaceVAAPI::VideoFrameSurfaceVAAPI(DMABufSurface* aSurface)
: mSurface(aSurface) {
// Create global refcount object to track mSurface usage over
// gects rendering engine. We can't release it until it's used
@ -28,13 +28,10 @@ VideoFrameSurfaceDMABuf::VideoFrameSurfaceDMABuf(DMABufSurface* aSurface)
MOZ_RELEASE_ASSERT(mSurface->GetAsDMABufSurfaceYUV());
mSurface->GlobalRefCountCreate();
mSurface->GlobalRefAdd();
FFMPEG_LOG("VideoFrameSurfaceDMABuf: creating surface UID = %d",
FFMPEG_LOG("VideoFrameSurfaceVAAPI: creating surface UID = %d",
mSurface->GetUID());
}
VideoFrameSurfaceVAAPI::VideoFrameSurfaceVAAPI(DMABufSurface* aSurface)
: VideoFrameSurfaceDMABuf(aSurface) {}
void VideoFrameSurfaceVAAPI::LockVAAPIData(AVCodecContext* aAVCodecContext,
AVFrame* aAVFrame,
FFmpegLibWrapper* aLib) {
@ -73,8 +70,7 @@ VideoFrameSurfaceVAAPI::~VideoFrameSurfaceVAAPI() {
ReleaseVAAPIData(/* aForFrameRecycle */ false);
}
VideoFramePool::VideoFramePool(bool aUseVAAPI)
: mUseVAAPI(aUseVAAPI), mSurfaceLock("VideoFramePoolSurfaceLock") {}
VideoFramePool::VideoFramePool() : mSurfaceLock("VideoFramePoolSurfaceLock") {}
VideoFramePool::~VideoFramePool() {
MutexAutoLock lock(mSurfaceLock);
@ -82,14 +78,11 @@ VideoFramePool::~VideoFramePool() {
}
void VideoFramePool::ReleaseUnusedVAAPIFrames() {
if (!mUseVAAPI) {
return;
}
MutexAutoLock lock(mSurfaceLock);
for (const auto& surface : mDMABufSurfaces) {
auto* dmabufSurface = surface->AsVideoFrameSurfaceVAAPI();
if (!dmabufSurface->IsUsed()) {
dmabufSurface->ReleaseVAAPIData();
auto* vaapiSurface = surface->AsVideoFrameSurfaceVAAPI();
if (!vaapiSurface->IsUsed()) {
vaapiSurface->ReleaseVAAPIData();
}
}
}
@ -99,10 +92,9 @@ RefPtr<VideoFrameSurface> VideoFramePool::GetFreeVideoFrameSurface() {
if (surface->IsUsed()) {
continue;
}
if (auto* vaapiSurface = surface->AsVideoFrameSurfaceVAAPI()) {
vaapiSurface->ReleaseVAAPIData();
}
surface->MarkAsUsed();
auto* vaapiSurface = surface->AsVideoFrameSurfaceVAAPI();
vaapiSurface->ReleaseVAAPIData();
vaapiSurface->MarkAsUsed();
return surface;
}
return nullptr;
@ -111,9 +103,6 @@ RefPtr<VideoFrameSurface> VideoFramePool::GetFreeVideoFrameSurface() {
RefPtr<VideoFrameSurface> VideoFramePool::GetVideoFrameSurface(
VADRMPRIMESurfaceDescriptor& aVaDesc, AVCodecContext* aAVCodecContext,
AVFrame* aAVFrame, FFmpegLibWrapper* aLib) {
// VADRMPRIMESurfaceDescriptor can be used with VA-API only.
MOZ_ASSERT(mUseVAAPI);
if (aVaDesc.fourcc != VA_FOURCC_NV12 && aVaDesc.fourcc != VA_FOURCC_YV12 &&
aVaDesc.fourcc != VA_FOURCC_P010) {
FFMPEG_LOG("Unsupported VA-API surface format %d", aVaDesc.fourcc);
@ -129,7 +118,7 @@ RefPtr<VideoFrameSurface> VideoFramePool::GetVideoFrameSurface(
return nullptr;
}
FFMPEG_LOG("Created new VA-API DMABufSurface UID = %d", surface->GetUID());
RefPtr<VideoFrameSurfaceDMABuf> surf = new VideoFrameSurfaceVAAPI(surface);
RefPtr<VideoFrameSurfaceVAAPI> surf = new VideoFrameSurfaceVAAPI(surface);
if (!mTextureCreationWorks) {
mTextureCreationWorks = Some(surface->VerifyTextureCreation());
}
@ -153,42 +142,4 @@ RefPtr<VideoFrameSurface> VideoFramePool::GetVideoFrameSurface(
return videoSurface;
}
RefPtr<VideoFrameSurface> VideoFramePool::GetVideoFrameSurface(
AVPixelFormat aPixelFormat, AVFrame* aFrame) {
// We should not use SW surfaces when VA-API is enabled.
MOZ_ASSERT(!mUseVAAPI);
MOZ_ASSERT(aFrame);
// With SW decode we support only YUV420P format with DMABuf surfaces.
if (aPixelFormat != AV_PIX_FMT_YUV420P) {
return nullptr;
}
MutexAutoLock lock(mSurfaceLock);
if (RefPtr<VideoFrameSurface> videoSurface = GetFreeVideoFrameSurface()) {
RefPtr<DMABufSurfaceYUV> surface = videoSurface->GetDMABufSurface();
if (!surface->UpdateYUVData((void**)aFrame->data, aFrame->linesize)) {
return nullptr;
}
FFMPEG_LOG("Reusing SW DMABufSurface UID = %d", surface->GetUID());
return videoSurface;
}
RefPtr<DMABufSurfaceYUV> surface = DMABufSurfaceYUV::CreateYUVSurface(
aFrame->width, aFrame->height, (void**)aFrame->data, aFrame->linesize);
if (!surface) {
return nullptr;
}
RefPtr<VideoFrameSurfaceDMABuf> surf = new VideoFrameSurfaceDMABuf(surface);
if (!mTextureCreationWorks) {
mTextureCreationWorks = Some(surface->VerifyTextureCreation());
}
if (!*mTextureCreationWorks) {
FFMPEG_LOG(" failed to create texture over DMABuf memory!");
return nullptr;
}
FFMPEG_LOG("Created new SW DMABufSurface UID = %d", surface->GetUID());
mDMABufSurfaces.AppendElement(surf);
return surf;
}
} // namespace mozilla

Просмотреть файл

@ -17,7 +17,6 @@
namespace mozilla {
class VideoFramePool;
class VideoFrameSurfaceDMABuf;
class VideoFrameSurfaceVAAPI;
class VideoFrameSurface {
@ -26,9 +25,6 @@ class VideoFrameSurface {
VideoFrameSurface() = default;
virtual VideoFrameSurfaceDMABuf* AsVideoFrameSurfaceDMABuf() {
return nullptr;
}
virtual VideoFrameSurfaceVAAPI* AsVideoFrameSurfaceVAAPI() { return nullptr; }
virtual void SetYUVColorSpace(gfx::YUVColorSpace aColorSpace) = 0;
@ -47,41 +43,6 @@ class VideoFrameSurface {
virtual ~VideoFrameSurface(){};
};
// VideoFrameSurfaceDMABuf is YUV dmabuf surface used for SW video decoding.
// Stores decoded video data in GPU memory.
class VideoFrameSurfaceDMABuf : public VideoFrameSurface {
friend class VideoFramePool;
public:
explicit VideoFrameSurfaceDMABuf(DMABufSurface* aSurface);
VideoFrameSurfaceDMABuf* AsVideoFrameSurfaceDMABuf() final { return this; }
void SetYUVColorSpace(gfx::YUVColorSpace aColorSpace) final {
mSurface->GetAsDMABufSurfaceYUV()->SetYUVColorSpace(aColorSpace);
}
void SetColorRange(gfx::ColorRange aColorRange) final {
mSurface->GetAsDMABufSurfaceYUV()->SetColorRange(aColorRange);
}
RefPtr<DMABufSurfaceYUV> GetDMABufSurface() final {
return mSurface->GetAsDMABufSurfaceYUV();
};
RefPtr<layers::Image> GetAsImage() final;
protected:
// Check if DMABufSurface is used by any gecko rendering process
// (WebRender or GL compositor) or by DMABUFSurfaceImage/VideoData.
bool IsUsed() const { return mSurface->IsGlobalRefSet(); }
void MarkAsUsed() { mSurface->GlobalRefAdd(); }
protected:
const RefPtr<DMABufSurface> mSurface;
virtual ~VideoFrameSurfaceDMABuf() = default;
};
// VideoFrameSurfaceVAAPI holds a reference to GPU data with a video frame.
//
// Actual GPU pixel data are stored at DMABufSurface and
@ -111,7 +72,7 @@ class VideoFrameSurfaceDMABuf : public VideoFrameSurface {
// Unfortunately there isn't any obvious way how to mark particular VASurface
// as used. The best we can do is to hold a reference to particular AVBuffer
// from decoded AVFrame and AVHWFramesContext which owns the AVBuffer.
class VideoFrameSurfaceVAAPI final : public VideoFrameSurfaceDMABuf {
class VideoFrameSurfaceVAAPI final : public VideoFrameSurface {
friend class VideoFramePool;
public:
@ -119,6 +80,19 @@ class VideoFrameSurfaceVAAPI final : public VideoFrameSurfaceDMABuf {
VideoFrameSurfaceVAAPI* AsVideoFrameSurfaceVAAPI() final { return this; }
void SetYUVColorSpace(mozilla::gfx::YUVColorSpace aColorSpace) {
mSurface->GetAsDMABufSurfaceYUV()->SetYUVColorSpace(aColorSpace);
}
void SetColorRange(mozilla::gfx::ColorRange aColorRange) {
mSurface->GetAsDMABufSurfaceYUV()->SetColorRange(aColorRange);
}
RefPtr<DMABufSurfaceYUV> GetDMABufSurface() {
return mSurface->GetAsDMABufSurfaceYUV();
};
RefPtr<layers::Image> GetAsImage();
protected:
// Lock VAAPI related data
void LockVAAPIData(AVCodecContext* aAVCodecContext, AVFrame* aAVFrame,
@ -127,9 +101,15 @@ class VideoFrameSurfaceVAAPI final : public VideoFrameSurfaceDMABuf {
// for another frame.
void ReleaseVAAPIData(bool aForFrameRecycle = true);
// Check if DMABufSurface is used by any gecko rendering process
// (WebRender or GL compositor) or by DMABUFSurfaceImage/VideoData.
bool IsUsed() const { return mSurface->IsGlobalRefSet(); }
void MarkAsUsed() { mSurface->GlobalRefAdd(); }
private:
virtual ~VideoFrameSurfaceVAAPI();
const RefPtr<DMABufSurface> mSurface;
const FFmpegLibWrapper* mLib;
AVBufferRef* mAVHWFramesContext;
AVBufferRef* mHWAVBuffer;
@ -138,24 +118,21 @@ class VideoFrameSurfaceVAAPI final : public VideoFrameSurfaceDMABuf {
// VideoFramePool class is thread-safe.
class VideoFramePool final {
public:
explicit VideoFramePool(bool aUseVAAPI);
VideoFramePool();
~VideoFramePool();
RefPtr<VideoFrameSurface> GetVideoFrameSurface(
VADRMPRIMESurfaceDescriptor& aVaDesc, AVCodecContext* aAVCodecContext,
AVFrame* aAVFrame, FFmpegLibWrapper* aLib);
RefPtr<VideoFrameSurface> GetVideoFrameSurface(AVPixelFormat aPixelFormat,
AVFrame* aFrame);
void ReleaseUnusedVAAPIFrames();
private:
RefPtr<VideoFrameSurface> GetFreeVideoFrameSurface();
private:
const bool mUseVAAPI;
// Protect mDMABufSurfaces pool access
Mutex mSurfaceLock;
nsTArray<RefPtr<VideoFrameSurfaceDMABuf>> mDMABufSurfaces;
nsTArray<RefPtr<VideoFrameSurfaceVAAPI>> mDMABufSurfaces;
// We may fail to create texture over DMABuf memory due to driver bugs so
// check that before we export first DMABuf video frame.
Maybe<bool> mTextureCreationWorks;

Просмотреть файл

@ -8758,12 +8758,6 @@
mirror: always
#ifdef MOZ_WAYLAND
# Disable DMABuf for ffmpeg video textures on Linux
- name: media.ffmpeg.dmabuf-textures.disabled
type: RelaxedAtomicBool
value: false
mirror: always
# Use VA-API for ffmpeg video playback on Linux
- name: media.ffmpeg.vaapi.enabled
type: RelaxedAtomicBool

Просмотреть файл

@ -272,15 +272,6 @@ bool nsDMABufDevice::IsDMABufTexturesEnabled() {
#else
bool nsDMABufDevice::IsDMABufTexturesEnabled() { return false; }
#endif
bool nsDMABufDevice::IsDMABufVideoEnabled() {
LOGDMABUF(
("nsDMABufDevice::IsDMABufVideoEnabled: EGL %d DMABufEnabled %d "
"!media_ffmpeg_dmabuf_textures_disabled %d\n",
gfx::gfxVars::UseEGL(), IsDMABufEnabled(),
!StaticPrefs::media_ffmpeg_dmabuf_textures_disabled()));
return !StaticPrefs::media_ffmpeg_dmabuf_textures_disabled() &&
gfx::gfxVars::UseDMABuf() && IsDMABufEnabled();
}
bool nsDMABufDevice::IsDMABufVAAPIEnabled() {
LOGDMABUF(
("nsDMABufDevice::IsDMABufVAAPIEnabled: EGL %d "

Просмотреть файл

@ -174,8 +174,6 @@ class nsDMABufDevice {
// Use dmabuf for WebRender general web content
bool IsDMABufTexturesEnabled();
// Use dmabuf for video playback
bool IsDMABufVideoEnabled();
// Use dmabuf for VA-API video playback
bool IsDMABufVAAPIEnabled();
// Use dmabuf for WebGL content