Backed out 3 changesets (bug 1660336) for bustages on wcontext_vaapi.h.

Backed out changeset bc879f517930 (bug 1660336)
Backed out changeset a1fce14ac3a8 (bug 1660336)
Backed out changeset 1d4f4423422a (bug 1660336)
This commit is contained in:
Cosmin Sabou 2020-11-05 17:30:26 +02:00
Родитель 4dd521634d
Коммит e280184fdb
27 изменённых файлов: 63 добавлений и 3803 удалений

Просмотреть файл

@ -11,9 +11,6 @@
#include "mozilla/Types.h" #include "mozilla/Types.h"
#include "PlatformDecoderModule.h" #include "PlatformDecoderModule.h"
#include "prlink.h" #include "prlink.h"
#ifdef MOZ_WAYLAND
# include "gfxPlatformGtk.h"
#endif
#define AV_LOG_DEBUG 48 #define AV_LOG_DEBUG 48
#define AV_LOG_INFO 32 #define AV_LOG_INFO 32
@ -256,52 +253,6 @@ void FFmpegLibWrapper::Unlink() {
PodZero(this); PodZero(this);
} }
#ifdef MOZ_WAYLAND
void FFmpegLibWrapper::LinkVAAPILibs() {
if (gfxPlatformGtk::GetPlatform()->UseHardwareVideoDecoding()) {
PRLibSpec lspec;
lspec.type = PR_LibSpec_Pathname;
if (gfxPlatformGtk::GetPlatform()->UseDRMVAAPIDisplay()) {
const char* libDrm = "libva-drm.so.2";
lspec.value.pathname = libDrm;
mVALibDrm = PR_LoadLibraryWithFlags(lspec, PR_LD_NOW | PR_LD_LOCAL);
if (!mVALibDrm) {
FFMPEG_LOG("VA-API support: Missing or old %s library.\n", libDrm);
}
} else {
if (gfxPlatformGtk::GetPlatform()->IsWaylandDisplay()) {
const char* libWayland = "libva-wayland.so.2";
lspec.value.pathname = libWayland;
mVALibWayland = PR_LoadLibraryWithFlags(lspec, PR_LD_NOW | PR_LD_LOCAL);
if (!mVALibWayland) {
FFMPEG_LOG("VA-API support: Missing or old %s library.\n",
libWayland);
}
} else {
FFMPEG_LOG("VA-API X11 display is not implemented.\n");
}
}
if (mVALibWayland || mVALibDrm) {
const char* lib = "libva.so.2";
lspec.value.pathname = lib;
mVALib = PR_LoadLibraryWithFlags(lspec, PR_LD_NOW | PR_LD_LOCAL);
// Don't use libva when it's missing vaExportSurfaceHandle.
if (mVALib && !PR_FindSymbol(mVALib, "vaExportSurfaceHandle")) {
PR_UnloadLibrary(mVALib);
mVALib = nullptr;
}
if (!mVALib) {
FFMPEG_LOG("VA-API support: Missing or old %s library.\n", lib);
}
}
} else {
FFMPEG_LOG("VA-API FFmpeg is disabled by platform");
}
}
#endif
#ifdef MOZ_WAYLAND #ifdef MOZ_WAYLAND
bool FFmpegLibWrapper::IsVAAPIAvailable() { bool FFmpegLibWrapper::IsVAAPIAvailable() {
# define VA_FUNC_LOADED(func) (func != nullptr) # define VA_FUNC_LOADED(func) (func != nullptr)

Просмотреть файл

@ -56,7 +56,6 @@ struct MOZ_ONLY_USED_TO_AVOID_STATIC_CONSTRUCTORS FFmpegLibWrapper {
#ifdef MOZ_WAYLAND #ifdef MOZ_WAYLAND
// Check if mVALib are available and we can use HW decode. // Check if mVALib are available and we can use HW decode.
bool IsVAAPIAvailable(); bool IsVAAPIAvailable();
void LinkVAAPILibs();
#endif #endif
// indicate the version of libavcodec linked to. // indicate the version of libavcodec linked to.

Просмотреть файл

@ -57,7 +57,50 @@ bool FFmpegRuntimeLinker::Init() {
} }
#ifdef MOZ_WAYLAND #ifdef MOZ_WAYLAND
sLibAV.LinkVAAPILibs(); if (gfxPlatformGtk::GetPlatform()->UseHardwareVideoDecoding()) {
PRLibSpec lspec;
lspec.type = PR_LibSpec_Pathname;
if (gfxPlatformGtk::GetPlatform()->UseDRMVAAPIDisplay()) {
const char* libDrm = "libva-drm.so.2";
lspec.value.pathname = libDrm;
sLibAV.mVALibDrm =
PR_LoadLibraryWithFlags(lspec, PR_LD_NOW | PR_LD_LOCAL);
if (!sLibAV.mVALibDrm) {
FFMPEG_LOG("VA-API support: Missing or old %s library.\n", libDrm);
}
} else {
if (gfxPlatformGtk::GetPlatform()->IsWaylandDisplay()) {
const char* libWayland = "libva-wayland.so.2";
lspec.value.pathname = libWayland;
sLibAV.mVALibWayland =
PR_LoadLibraryWithFlags(lspec, PR_LD_NOW | PR_LD_LOCAL);
if (!sLibAV.mVALibWayland) {
FFMPEG_LOG("VA-API support: Missing or old %s library.\n",
libWayland);
}
} else {
FFMPEG_LOG("VA-API X11 display is not implemented.\n");
}
}
if (sLibAV.mVALibWayland || sLibAV.mVALibDrm) {
const char* lib = "libva.so.2";
lspec.value.pathname = lib;
sLibAV.mVALib = PR_LoadLibraryWithFlags(lspec, PR_LD_NOW | PR_LD_LOCAL);
// Don't use libva when it's missing vaExportSurfaceHandle.
if (sLibAV.mVALib &&
!PR_FindSymbol(sLibAV.mVALib, "vaExportSurfaceHandle")) {
PR_UnloadLibrary(sLibAV.mVALib);
sLibAV.mVALib = nullptr;
}
if (!sLibAV.mVALib) {
FFMPEG_LOG("VA-API support: Missing or old %s library.\n", lib);
}
}
} else {
FFMPEG_LOG("VA-API FFmpeg is disabled by platform");
}
#endif #endif
// While going through all possible libs, this status will be updated with a // While going through all possible libs, this status will be updated with a

Просмотреть файл

@ -125,8 +125,8 @@ static AVPixelFormat ChooseVAAPIPixelFormat(AVCodecContext* aCodecContext,
return AV_PIX_FMT_NONE; return AV_PIX_FMT_NONE;
} }
DMABufSurfaceWrapper<LIBAV_VER>::DMABufSurfaceWrapper(DMABufSurface* aSurface, DMABufSurfaceWrapper::DMABufSurfaceWrapper(DMABufSurface* aSurface,
FFmpegLibWrapper* aLib) FFmpegLibWrapper* aLib)
: mSurface(aSurface), : mSurface(aSurface),
mLib(aLib), mLib(aLib),
mAVHWFramesContext(nullptr), mAVHWFramesContext(nullptr),
@ -139,8 +139,8 @@ DMABufSurfaceWrapper<LIBAV_VER>::DMABufSurfaceWrapper(DMABufSurface* aSurface,
mSurface->GetUID()); mSurface->GetUID());
} }
void DMABufSurfaceWrapper<LIBAV_VER>::LockVAAPIData( void DMABufSurfaceWrapper::LockVAAPIData(AVCodecContext* aAVCodecContext,
AVCodecContext* aAVCodecContext, AVFrame* aAVFrame) { AVFrame* aAVFrame) {
FFMPEG_LOG("DMABufSurfaceWrapper: VAAPI locking dmabuf surface UID = %d", FFMPEG_LOG("DMABufSurfaceWrapper: VAAPI locking dmabuf surface UID = %d",
mSurface->GetUID()); mSurface->GetUID());
if (aAVCodecContext && aAVFrame) { if (aAVCodecContext && aAVFrame) {
@ -149,7 +149,7 @@ void DMABufSurfaceWrapper<LIBAV_VER>::LockVAAPIData(
} }
} }
void DMABufSurfaceWrapper<LIBAV_VER>::ReleaseVAAPIData() { void DMABufSurfaceWrapper::ReleaseVAAPIData() {
FFMPEG_LOG("DMABufSurfaceWrapper: VAAPI releasing dmabuf surface UID = %d", FFMPEG_LOG("DMABufSurfaceWrapper: VAAPI releasing dmabuf surface UID = %d",
mSurface->GetUID()); mSurface->GetUID());
if (mHWAVBuffer && mAVHWFramesContext) { if (mHWAVBuffer && mAVHWFramesContext) {
@ -159,7 +159,7 @@ void DMABufSurfaceWrapper<LIBAV_VER>::ReleaseVAAPIData() {
mSurface->ReleaseSurface(); mSurface->ReleaseSurface();
} }
DMABufSurfaceWrapper<LIBAV_VER>::~DMABufSurfaceWrapper() { DMABufSurfaceWrapper::~DMABufSurfaceWrapper() {
FFMPEG_LOG("DMABufSurfaceWrapper: deleting dmabuf surface UID = %d", FFMPEG_LOG("DMABufSurfaceWrapper: deleting dmabuf surface UID = %d",
mSurface->GetUID()); mSurface->GetUID());
ReleaseVAAPIData(); ReleaseVAAPIData();
@ -182,14 +182,7 @@ AVCodec* FFmpegVideoDecoder<LIBAV_VER>::FindVAAPICodec() {
return nullptr; return nullptr;
} }
template <int V> class VAAPIDisplayHolder {
class VAAPIDisplayHolder {};
template <>
class VAAPIDisplayHolder<LIBAV_VER>;
template <>
class VAAPIDisplayHolder<LIBAV_VER> {
public: public:
VAAPIDisplayHolder(FFmpegLibWrapper* aLib, VADisplay aDisplay) VAAPIDisplayHolder(FFmpegLibWrapper* aLib, VADisplay aDisplay)
: mLib(aLib), mDisplay(aDisplay){}; : mLib(aLib), mDisplay(aDisplay){};
@ -201,8 +194,7 @@ class VAAPIDisplayHolder<LIBAV_VER> {
}; };
static void VAAPIDisplayReleaseCallback(struct AVHWDeviceContext* hwctx) { static void VAAPIDisplayReleaseCallback(struct AVHWDeviceContext* hwctx) {
auto displayHolder = auto displayHolder = static_cast<VAAPIDisplayHolder*>(hwctx->user_opaque);
static_cast<VAAPIDisplayHolder<LIBAV_VER>*>(hwctx->user_opaque);
delete displayHolder; delete displayHolder;
} }
@ -243,7 +235,7 @@ bool FFmpegVideoDecoder<LIBAV_VER>::CreateVAAPIDeviceContext() {
} }
} }
hwctx->user_opaque = new VAAPIDisplayHolder<LIBAV_VER>(mLib, mDisplay); hwctx->user_opaque = new VAAPIDisplayHolder(mLib, mDisplay);
hwctx->free = VAAPIDisplayReleaseCallback; hwctx->free = VAAPIDisplayReleaseCallback;
int major, minor; int major, minor;
@ -718,7 +710,7 @@ void FFmpegVideoDecoder<LIBAV_VER>::ReleaseUnusedVAAPIFrames() {
} }
} }
DMABufSurfaceWrapper<LIBAV_VER>* DMABufSurfaceWrapper*
FFmpegVideoDecoder<LIBAV_VER>::GetUnusedDMABufSurfaceWrapper() { FFmpegVideoDecoder<LIBAV_VER>::GetUnusedDMABufSurfaceWrapper() {
int len = mDMABufSurfaces.Length(); int len = mDMABufSurfaces.Length();
for (int i = 0; i < len; i++) { for (int i = 0; i < len; i++) {
@ -777,8 +769,7 @@ MediaResult FFmpegVideoDecoder<LIBAV_VER>::CreateImageDMABuf(
RefPtr<DMABufSurfaceYUV> surface; RefPtr<DMABufSurfaceYUV> surface;
DMABufSurfaceWrapper<LIBAV_VER>* surfaceWrapper = DMABufSurfaceWrapper* surfaceWrapper = GetUnusedDMABufSurfaceWrapper();
GetUnusedDMABufSurfaceWrapper();
if (!surfaceWrapper) { if (!surfaceWrapper) {
if (mVAAPIDeviceContext) { if (mVAAPIDeviceContext) {
surface = DMABufSurfaceYUV::CreateYUVSurface(vaDesc); surface = DMABufSurfaceYUV::CreateYUVSurface(vaDesc);

Просмотреть файл

@ -55,14 +55,7 @@ namespace mozilla {
// We own the DMABufSurface underlying GPU data and we use it for // We own the DMABufSurface underlying GPU data and we use it for
// repeated rendering of video frames. // repeated rendering of video frames.
// //
template <int V> class DMABufSurfaceWrapper final {
class DMABufSurfaceWrapper {};
template <>
class DMABufSurfaceWrapper<LIBAV_VER>;
template <>
class DMABufSurfaceWrapper<LIBAV_VER> final {
public: public:
DMABufSurfaceWrapper(DMABufSurface* aSurface, FFmpegLibWrapper* aLib); DMABufSurfaceWrapper(DMABufSurface* aSurface, FFmpegLibWrapper* aLib);
~DMABufSurfaceWrapper(); ~DMABufSurfaceWrapper();
@ -169,7 +162,7 @@ class FFmpegVideoDecoder<LIBAV_VER>
MediaDataDecoder::DecodedData& aResults); MediaDataDecoder::DecodedData& aResults);
void ReleaseUnusedVAAPIFrames(); void ReleaseUnusedVAAPIFrames();
DMABufSurfaceWrapper<LIBAV_VER>* GetUnusedDMABufSurfaceWrapper(); DMABufSurfaceWrapper* GetUnusedDMABufSurfaceWrapper();
void ReleaseDMABufSurfaces(); void ReleaseDMABufSurfaces();
#endif #endif
@ -187,7 +180,7 @@ class FFmpegVideoDecoder<LIBAV_VER>
const bool mDisableHardwareDecoding; const bool mDisableHardwareDecoding;
VADisplay mDisplay; VADisplay mDisplay;
bool mUseDMABufSurfaces; bool mUseDMABufSurfaces;
nsTArray<DMABufSurfaceWrapper<LIBAV_VER>> mDMABufSurfaces; nsTArray<DMABufSurfaceWrapper> mDMABufSurfaces;
#endif #endif
RefPtr<KnowsCompositor> mImageAllocator; RefPtr<KnowsCompositor> mImageAllocator;
RefPtr<ImageContainer> mImageContainer; RefPtr<ImageContainer> mImageContainer;

Просмотреть файл

@ -64,10 +64,6 @@ bool FFVPXRuntimeLinker::Init() {
MOZ_ASSERT(NS_IsMainThread()); MOZ_ASSERT(NS_IsMainThread());
sLinkStatus = LinkStatus_FAILED; sLinkStatus = LinkStatus_FAILED;
#ifdef MOZ_WAYLAND
sFFVPXLib.LinkVAAPILibs();
#endif
// We retrieve the path of the lgpllibs library as this is where mozavcodec // We retrieve the path of the lgpllibs library as this is where mozavcodec
// and mozavutil libs are located. // and mozavutil libs are located.
PathString lgpllibsname = GetLibraryName(nullptr, "lgpllibs"); PathString lgpllibsname = GetLibraryName(nullptr, "lgpllibs");

Просмотреть файл

@ -131,11 +131,6 @@
./libavcodec/thread.h ./libavcodec/thread.h
./libavcodec/unary.h ./libavcodec/unary.h
./libavcodec/utils.c ./libavcodec/utils.c
./libavcodec/vaapi.h
./libavcodec/vaapi_decode.h
./libavcodec/vaapi_decode.c
./libavcodec/vaapi_vp8.c
./libavcodec/vaapi_vp9.c
./libavcodec/version.h ./libavcodec/version.h
./libavcodec/videodsp.c ./libavcodec/videodsp.c
./libavcodec/videodsp.h ./libavcodec/videodsp.h
@ -268,11 +263,7 @@
./libavutil/frame.h ./libavutil/frame.h
./libavutil/hwcontext.c ./libavutil/hwcontext.c
./libavutil/hwcontext.h ./libavutil/hwcontext.h
./libavutil/hwcontext_drm.h
./libavutil/hwcontext_drm.c
./libavutil/hwcontext_internal.h ./libavutil/hwcontext_internal.h
./libavutil/hwcontext_vaapi.h
./libavutil/hwcontext_vaapi.c
./libavutil/imgutils.c ./libavutil/imgutils.c
./libavutil/imgutils.h ./libavutil/imgutils.h
./libavutil/imgutils_internal.h ./libavutil/imgutils_internal.h

Просмотреть файл

@ -56,5 +56,3 @@ $ for i in `cat $PATH_CENTRAL/media/ffvpx/FILES`; do git diff $REV_LASTSYNC HEAD
Then apply patch.diff on the ffvpx tree. Then apply patch.diff on the ffvpx tree.
Compilation will reveal if any files are missing. Compilation will reveal if any files are missing.
Apply linux-vaapi-build.patch patch to enable build VA-API support for Linux.

Просмотреть файл

@ -18,14 +18,4 @@
#define CONFIG_RDFT 1 #define CONFIG_RDFT 1
#endif #endif
#ifdef MOZ_WAYLAND
#define CONFIG_VAAPI 1
#define CONFIG_VP8_VAAPI_HWACCEL 1
#define CONFIG_VP9_VAAPI_HWACCEL 1
#else
#define CONFIG_VAAPI 0
#define CONFIG_VP8_VAAPI_HWACCEL 0
#define CONFIG_VP9_VAAPI_HWACCEL 0
#endif
#endif #endif

Просмотреть файл

@ -524,6 +524,7 @@
#define CONFIG_FFNVCODEC 0 #define CONFIG_FFNVCODEC 0
#define CONFIG_NVDEC 0 #define CONFIG_NVDEC 0
#define CONFIG_NVENC 0 #define CONFIG_NVENC 0
#define CONFIG_VAAPI 0
#define CONFIG_VDPAU 0 #define CONFIG_VDPAU 0
#define CONFIG_VIDEOTOOLBOX 0 #define CONFIG_VIDEOTOOLBOX 0
#define CONFIG_V4L2_M2M 0 #define CONFIG_V4L2_M2M 0

Просмотреть файл

@ -524,6 +524,7 @@
#define CONFIG_FFNVCODEC 0 #define CONFIG_FFNVCODEC 0
#define CONFIG_NVDEC 0 #define CONFIG_NVDEC 0
#define CONFIG_NVENC 0 #define CONFIG_NVENC 0
#define CONFIG_VAAPI 0
#define CONFIG_VDPAU 0 #define CONFIG_VDPAU 0
#define CONFIG_VIDEOTOOLBOX 0 #define CONFIG_VIDEOTOOLBOX 0
#define CONFIG_V4L2_M2M 1 #define CONFIG_V4L2_M2M 1

Просмотреть файл

@ -1706,6 +1706,7 @@
#define CONFIG_VP8_V4L2M2M_DECODER 0 #define CONFIG_VP8_V4L2M2M_DECODER 0
#define CONFIG_VP8_V4L2M2M_ENCODER 0 #define CONFIG_VP8_V4L2M2M_ENCODER 0
#define CONFIG_VP8_VAAPI_ENCODER 0 #define CONFIG_VP8_VAAPI_ENCODER 0
#define CONFIG_VP8_VAAPI_HWACCEL 0
#define CONFIG_VP9_CUVID_DECODER 0 #define CONFIG_VP9_CUVID_DECODER 0
#define CONFIG_VP9_D3D11VA2_HWACCEL 0 #define CONFIG_VP9_D3D11VA2_HWACCEL 0
#define CONFIG_VP9_D3D11VA_HWACCEL 0 #define CONFIG_VP9_D3D11VA_HWACCEL 0
@ -1718,6 +1719,7 @@
#define CONFIG_VP9_SUPERFRAME_BSF 0 #define CONFIG_VP9_SUPERFRAME_BSF 0
#define CONFIG_VP9_V4L2M2M_DECODER 0 #define CONFIG_VP9_V4L2M2M_DECODER 0
#define CONFIG_VP9_VAAPI_ENCODER 0 #define CONFIG_VP9_VAAPI_ENCODER 0
#define CONFIG_VP9_VAAPI_HWACCEL 0
#define CONFIG_VPK_DEMUXER 0 #define CONFIG_VPK_DEMUXER 0
#define CONFIG_VPLAYER_DECODER 0 #define CONFIG_VPLAYER_DECODER 0
#define CONFIG_VPLAYER_DEMUXER 0 #define CONFIG_VPLAYER_DEMUXER 0

Просмотреть файл

@ -71,9 +71,5 @@ elif not CONFIG['RELEASE_OR_BETA']:
# Enable fast assertions in opt builds of Nightly and Aurora. # Enable fast assertions in opt builds of Nightly and Aurora.
DEFINES['ASSERT_LEVEL'] = 1 DEFINES['ASSERT_LEVEL'] = 1
if CONFIG['MOZ_WAYLAND']:
CFLAGS += CONFIG['MOZ_WAYLAND_CFLAGS']
CXXFLAGS += CONFIG['MOZ_WAYLAND_CFLAGS']
# Add libFuzzer configuration directives # Add libFuzzer configuration directives
include('/tools/fuzzing/libfuzzer-config.mozbuild') include('/tools/fuzzing/libfuzzer-config.mozbuild')

Просмотреть файл

@ -28,11 +28,6 @@ av_get_pcm_codec
av_get_profile_name av_get_profile_name
av_grow_packet av_grow_packet
av_hwaccel_next av_hwaccel_next
av_hwdevice_ctx_init
av_hwdevice_ctx_alloc
av_hwdevice_ctx_create_derived
av_hwframe_transfer_get_formats
av_hwframe_ctx_alloc
av_init_packet av_init_packet
av_lockmgr_register av_lockmgr_register
av_new_packet av_new_packet
@ -98,7 +93,6 @@ avcodec_free_context
avcodec_get_class avcodec_get_class
avcodec_get_context_defaults3 avcodec_get_context_defaults3
avcodec_get_frame_class avcodec_get_frame_class
avcodec_get_hw_config
avcodec_get_name avcodec_get_name
avcodec_get_subtitle_rect_class avcodec_get_subtitle_rect_class
avcodec_get_type avcodec_get_type

Просмотреть файл

@ -96,12 +96,6 @@ if not CONFIG['MOZ_FFVPX_AUDIOONLY']:
'vp9prob.c', 'vp9prob.c',
'vp9recon.c' 'vp9recon.c'
] ]
if CONFIG['MOZ_WAYLAND']:
SOURCES += [
'vaapi_decode.c',
'vaapi_vp8.c',
'vaapi_vp9.c',
]
if CONFIG['MOZ_LIBAV_FFT']: if CONFIG['MOZ_LIBAV_FFT']:
SOURCES += [ SOURCES += [

Просмотреть файл

@ -1,86 +0,0 @@
/*
* Video Acceleration API (shared data between FFmpeg and the video player)
* HW decode acceleration for MPEG-2, MPEG-4, H.264 and VC-1
*
* Copyright (C) 2008-2009 Splitted-Desktop Systems
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVCODEC_VAAPI_H
#define AVCODEC_VAAPI_H
/**
* @file
* @ingroup lavc_codec_hwaccel_vaapi
* Public libavcodec VA API header.
*/
#include <stdint.h>
#include "libavutil/attributes.h"
#include "version.h"
#if FF_API_STRUCT_VAAPI_CONTEXT
/**
* @defgroup lavc_codec_hwaccel_vaapi VA API Decoding
* @ingroup lavc_codec_hwaccel
* @{
*/
/**
* This structure is used to share data between the FFmpeg library and
* the client video application.
* This shall be zero-allocated and available as
* AVCodecContext.hwaccel_context. All user members can be set once
* during initialization or through each AVCodecContext.get_buffer()
* function call. In any case, they must be valid prior to calling
* decoding functions.
*
* Deprecated: use AVCodecContext.hw_frames_ctx instead.
*/
struct attribute_deprecated vaapi_context {
/**
* Window system dependent data
*
* - encoding: unused
* - decoding: Set by user
*/
void *display;
/**
* Configuration ID
*
* - encoding: unused
* - decoding: Set by user
*/
uint32_t config_id;
/**
* Context ID (video decode pipeline)
*
* - encoding: unused
* - decoding: Set by user
*/
uint32_t context_id;
};
/* @} */
#endif /* FF_API_STRUCT_VAAPI_CONTEXT */
#endif /* AVCODEC_VAAPI_H */

Просмотреть файл

@ -1,732 +0,0 @@
/*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#include "libavutil/avassert.h"
#include "libavutil/common.h"
#include "libavutil/pixdesc.h"
#include "avcodec.h"
#include "decode.h"
#include "internal.h"
#include "vaapi_decode.h"
int ff_vaapi_decode_make_param_buffer(AVCodecContext *avctx,
VAAPIDecodePicture *pic,
int type,
const void *data,
size_t size)
{
VAAPIDecodeContext *ctx = avctx->internal->hwaccel_priv_data;
VAStatus vas;
VABufferID buffer;
av_assert0(pic->nb_param_buffers + 1 <= MAX_PARAM_BUFFERS);
vas = vaCreateBuffer(ctx->hwctx->display, ctx->va_context,
type, size, 1, (void*)data, &buffer);
if (vas != VA_STATUS_SUCCESS) {
av_log(avctx, AV_LOG_ERROR, "Failed to create parameter "
"buffer (type %d): %d (%s).\n",
type, vas, vaErrorStr(vas));
return AVERROR(EIO);
}
pic->param_buffers[pic->nb_param_buffers++] = buffer;
av_log(avctx, AV_LOG_DEBUG, "Param buffer (type %d, %zu bytes) "
"is %#x.\n", type, size, buffer);
return 0;
}
int ff_vaapi_decode_make_slice_buffer(AVCodecContext *avctx,
VAAPIDecodePicture *pic,
const void *params_data,
size_t params_size,
const void *slice_data,
size_t slice_size)
{
VAAPIDecodeContext *ctx = avctx->internal->hwaccel_priv_data;
VAStatus vas;
int index;
av_assert0(pic->nb_slices <= pic->slices_allocated);
if (pic->nb_slices == pic->slices_allocated) {
if (pic->slices_allocated > 0)
pic->slices_allocated *= 2;
else
pic->slices_allocated = 64;
pic->slice_buffers =
av_realloc_array(pic->slice_buffers,
pic->slices_allocated,
2 * sizeof(*pic->slice_buffers));
if (!pic->slice_buffers)
return AVERROR(ENOMEM);
}
av_assert0(pic->nb_slices + 1 <= pic->slices_allocated);
index = 2 * pic->nb_slices;
vas = vaCreateBuffer(ctx->hwctx->display, ctx->va_context,
VASliceParameterBufferType,
params_size, 1, (void*)params_data,
&pic->slice_buffers[index]);
if (vas != VA_STATUS_SUCCESS) {
av_log(avctx, AV_LOG_ERROR, "Failed to create slice "
"parameter buffer: %d (%s).\n", vas, vaErrorStr(vas));
return AVERROR(EIO);
}
av_log(avctx, AV_LOG_DEBUG, "Slice %d param buffer (%zu bytes) "
"is %#x.\n", pic->nb_slices, params_size,
pic->slice_buffers[index]);
vas = vaCreateBuffer(ctx->hwctx->display, ctx->va_context,
VASliceDataBufferType,
slice_size, 1, (void*)slice_data,
&pic->slice_buffers[index + 1]);
if (vas != VA_STATUS_SUCCESS) {
av_log(avctx, AV_LOG_ERROR, "Failed to create slice "
"data buffer (size %zu): %d (%s).\n",
slice_size, vas, vaErrorStr(vas));
vaDestroyBuffer(ctx->hwctx->display,
pic->slice_buffers[index]);
return AVERROR(EIO);
}
av_log(avctx, AV_LOG_DEBUG, "Slice %d data buffer (%zu bytes) "
"is %#x.\n", pic->nb_slices, slice_size,
pic->slice_buffers[index + 1]);
++pic->nb_slices;
return 0;
}
static void ff_vaapi_decode_destroy_buffers(AVCodecContext *avctx,
VAAPIDecodePicture *pic)
{
VAAPIDecodeContext *ctx = avctx->internal->hwaccel_priv_data;
VAStatus vas;
int i;
for (i = 0; i < pic->nb_param_buffers; i++) {
vas = vaDestroyBuffer(ctx->hwctx->display,
pic->param_buffers[i]);
if (vas != VA_STATUS_SUCCESS) {
av_log(avctx, AV_LOG_ERROR, "Failed to destroy "
"parameter buffer %#x: %d (%s).\n",
pic->param_buffers[i], vas, vaErrorStr(vas));
}
}
for (i = 0; i < 2 * pic->nb_slices; i++) {
vas = vaDestroyBuffer(ctx->hwctx->display,
pic->slice_buffers[i]);
if (vas != VA_STATUS_SUCCESS) {
av_log(avctx, AV_LOG_ERROR, "Failed to destroy slice "
"slice buffer %#x: %d (%s).\n",
pic->slice_buffers[i], vas, vaErrorStr(vas));
}
}
}
int ff_vaapi_decode_issue(AVCodecContext *avctx,
VAAPIDecodePicture *pic)
{
VAAPIDecodeContext *ctx = avctx->internal->hwaccel_priv_data;
VAStatus vas;
int err;
av_log(avctx, AV_LOG_DEBUG, "Decode to surface %#x.\n",
pic->output_surface);
vas = vaBeginPicture(ctx->hwctx->display, ctx->va_context,
pic->output_surface);
if (vas != VA_STATUS_SUCCESS) {
av_log(avctx, AV_LOG_ERROR, "Failed to begin picture decode "
"issue: %d (%s).\n", vas, vaErrorStr(vas));
err = AVERROR(EIO);
goto fail_with_picture;
}
vas = vaRenderPicture(ctx->hwctx->display, ctx->va_context,
pic->param_buffers, pic->nb_param_buffers);
if (vas != VA_STATUS_SUCCESS) {
av_log(avctx, AV_LOG_ERROR, "Failed to upload decode "
"parameters: %d (%s).\n", vas, vaErrorStr(vas));
err = AVERROR(EIO);
goto fail_with_picture;
}
vas = vaRenderPicture(ctx->hwctx->display, ctx->va_context,
pic->slice_buffers, 2 * pic->nb_slices);
if (vas != VA_STATUS_SUCCESS) {
av_log(avctx, AV_LOG_ERROR, "Failed to upload slices: "
"%d (%s).\n", vas, vaErrorStr(vas));
err = AVERROR(EIO);
goto fail_with_picture;
}
vas = vaEndPicture(ctx->hwctx->display, ctx->va_context);
if (vas != VA_STATUS_SUCCESS) {
av_log(avctx, AV_LOG_ERROR, "Failed to end picture decode "
"issue: %d (%s).\n", vas, vaErrorStr(vas));
err = AVERROR(EIO);
if (CONFIG_VAAPI_1 || ctx->hwctx->driver_quirks &
AV_VAAPI_DRIVER_QUIRK_RENDER_PARAM_BUFFERS)
goto fail;
else
goto fail_at_end;
}
if (CONFIG_VAAPI_1 || ctx->hwctx->driver_quirks &
AV_VAAPI_DRIVER_QUIRK_RENDER_PARAM_BUFFERS)
ff_vaapi_decode_destroy_buffers(avctx, pic);
err = 0;
goto exit;
fail_with_picture:
vas = vaEndPicture(ctx->hwctx->display, ctx->va_context);
if (vas != VA_STATUS_SUCCESS) {
av_log(avctx, AV_LOG_ERROR, "Failed to end picture decode "
"after error: %d (%s).\n", vas, vaErrorStr(vas));
}
fail:
ff_vaapi_decode_destroy_buffers(avctx, pic);
fail_at_end:
exit:
pic->nb_param_buffers = 0;
pic->nb_slices = 0;
pic->slices_allocated = 0;
av_freep(&pic->slice_buffers);
return err;
}
int ff_vaapi_decode_cancel(AVCodecContext *avctx,
VAAPIDecodePicture *pic)
{
ff_vaapi_decode_destroy_buffers(avctx, pic);
pic->nb_param_buffers = 0;
pic->nb_slices = 0;
pic->slices_allocated = 0;
av_freep(&pic->slice_buffers);
return 0;
}
static const struct {
uint32_t fourcc;
enum AVPixelFormat pix_fmt;
} vaapi_format_map[] = {
#define MAP(va, av) { VA_FOURCC_ ## va, AV_PIX_FMT_ ## av }
// 4:0:0
MAP(Y800, GRAY8),
// 4:2:0
MAP(NV12, NV12),
MAP(YV12, YUV420P),
MAP(IYUV, YUV420P),
#ifdef VA_FOURCC_I420
MAP(I420, YUV420P),
#endif
MAP(IMC3, YUV420P),
// 4:1:1
MAP(411P, YUV411P),
// 4:2:2
MAP(422H, YUV422P),
#ifdef VA_FOURCC_YV16
MAP(YV16, YUV422P),
#endif
// 4:4:0
MAP(422V, YUV440P),
// 4:4:4
MAP(444P, YUV444P),
// 4:2:0 10-bit
#ifdef VA_FOURCC_P010
MAP(P010, P010),
#endif
#ifdef VA_FOURCC_I010
MAP(I010, YUV420P10),
#endif
#undef MAP
};
static int vaapi_decode_find_best_format(AVCodecContext *avctx,
AVHWDeviceContext *device,
VAConfigID config_id,
AVHWFramesContext *frames)
{
AVVAAPIDeviceContext *hwctx = device->hwctx;
VAStatus vas;
VASurfaceAttrib *attr;
enum AVPixelFormat source_format, best_format, format;
uint32_t best_fourcc, fourcc;
int i, j, nb_attr;
source_format = avctx->sw_pix_fmt;
av_assert0(source_format != AV_PIX_FMT_NONE);
vas = vaQuerySurfaceAttributes(hwctx->display, config_id,
NULL, &nb_attr);
if (vas != VA_STATUS_SUCCESS) {
av_log(avctx, AV_LOG_ERROR, "Failed to query surface attributes: "
"%d (%s).\n", vas, vaErrorStr(vas));
return AVERROR(ENOSYS);
}
attr = av_malloc_array(nb_attr, sizeof(*attr));
if (!attr)
return AVERROR(ENOMEM);
vas = vaQuerySurfaceAttributes(hwctx->display, config_id,
attr, &nb_attr);
if (vas != VA_STATUS_SUCCESS) {
av_log(avctx, AV_LOG_ERROR, "Failed to query surface attributes: "
"%d (%s).\n", vas, vaErrorStr(vas));
av_freep(&attr);
return AVERROR(ENOSYS);
}
best_format = AV_PIX_FMT_NONE;
for (i = 0; i < nb_attr; i++) {
if (attr[i].type != VASurfaceAttribPixelFormat)
continue;
fourcc = attr[i].value.value.i;
for (j = 0; j < FF_ARRAY_ELEMS(vaapi_format_map); j++) {
if (fourcc == vaapi_format_map[j].fourcc)
break;
}
if (j >= FF_ARRAY_ELEMS(vaapi_format_map)) {
av_log(avctx, AV_LOG_DEBUG, "Ignoring unknown format %#x.\n",
fourcc);
continue;
}
format = vaapi_format_map[j].pix_fmt;
av_log(avctx, AV_LOG_DEBUG, "Considering format %#x -> %s.\n",
fourcc, av_get_pix_fmt_name(format));
best_format = av_find_best_pix_fmt_of_2(format, best_format,
source_format, 0, NULL);
if (format == best_format)
best_fourcc = fourcc;
}
av_freep(&attr);
if (best_format == AV_PIX_FMT_NONE) {
av_log(avctx, AV_LOG_ERROR, "No usable formats for decoding!\n");
return AVERROR(EINVAL);
}
av_log(avctx, AV_LOG_DEBUG, "Picked %s (%#x) as best match for %s.\n",
av_get_pix_fmt_name(best_format), best_fourcc,
av_get_pix_fmt_name(source_format));
frames->sw_format = best_format;
if (avctx->internal->hwaccel_priv_data) {
VAAPIDecodeContext *ctx = avctx->internal->hwaccel_priv_data;
AVVAAPIFramesContext *avfc = frames->hwctx;
ctx->pixel_format_attribute = (VASurfaceAttrib) {
.type = VASurfaceAttribPixelFormat,
.value.value.i = best_fourcc,
};
avfc->attributes = &ctx->pixel_format_attribute;
avfc->nb_attributes = 1;
}
return 0;
}
static const struct {
enum AVCodecID codec_id;
int codec_profile;
VAProfile va_profile;
} vaapi_profile_map[] = {
#define MAP(c, p, v) { AV_CODEC_ID_ ## c, FF_PROFILE_ ## p, VAProfile ## v }
MAP(MPEG2VIDEO, MPEG2_SIMPLE, MPEG2Simple ),
MAP(MPEG2VIDEO, MPEG2_MAIN, MPEG2Main ),
MAP(H263, UNKNOWN, H263Baseline),
MAP(MPEG4, MPEG4_SIMPLE, MPEG4Simple ),
MAP(MPEG4, MPEG4_ADVANCED_SIMPLE,
MPEG4AdvancedSimple),
MAP(MPEG4, MPEG4_MAIN, MPEG4Main ),
MAP(H264, H264_CONSTRAINED_BASELINE,
H264ConstrainedBaseline),
MAP(H264, H264_MAIN, H264Main ),
MAP(H264, H264_HIGH, H264High ),
#if VA_CHECK_VERSION(0, 37, 0)
MAP(HEVC, HEVC_MAIN, HEVCMain ),
MAP(HEVC, HEVC_MAIN_10, HEVCMain10 ),
#endif
MAP(MJPEG, MJPEG_HUFFMAN_BASELINE_DCT,
JPEGBaseline),
MAP(WMV3, VC1_SIMPLE, VC1Simple ),
MAP(WMV3, VC1_MAIN, VC1Main ),
MAP(WMV3, VC1_COMPLEX, VC1Advanced ),
MAP(WMV3, VC1_ADVANCED, VC1Advanced ),
MAP(VC1, VC1_SIMPLE, VC1Simple ),
MAP(VC1, VC1_MAIN, VC1Main ),
MAP(VC1, VC1_COMPLEX, VC1Advanced ),
MAP(VC1, VC1_ADVANCED, VC1Advanced ),
MAP(VP8, UNKNOWN, VP8Version0_3 ),
#if VA_CHECK_VERSION(0, 38, 0)
MAP(VP9, VP9_0, VP9Profile0 ),
#endif
#if VA_CHECK_VERSION(0, 39, 0)
MAP(VP9, VP9_2, VP9Profile2 ),
#endif
#undef MAP
};
/*
* Set *va_config and the frames_ref fields from the current codec parameters
* in avctx.
*/
static int vaapi_decode_make_config(AVCodecContext *avctx,
AVBufferRef *device_ref,
VAConfigID *va_config,
AVBufferRef *frames_ref)
{
AVVAAPIHWConfig *hwconfig = NULL;
AVHWFramesConstraints *constraints = NULL;
VAStatus vas;
int err, i, j;
const AVCodecDescriptor *codec_desc;
VAProfile *profile_list = NULL, matched_va_profile;
int profile_count, exact_match, matched_ff_profile;
AVHWDeviceContext *device = (AVHWDeviceContext*)device_ref->data;
AVVAAPIDeviceContext *hwctx = device->hwctx;
codec_desc = avcodec_descriptor_get(avctx->codec_id);
if (!codec_desc) {
err = AVERROR(EINVAL);
goto fail;
}
profile_count = vaMaxNumProfiles(hwctx->display);
profile_list = av_malloc_array(profile_count,
sizeof(VAProfile));
if (!profile_list) {
err = AVERROR(ENOMEM);
goto fail;
}
vas = vaQueryConfigProfiles(hwctx->display,
profile_list, &profile_count);
if (vas != VA_STATUS_SUCCESS) {
av_log(avctx, AV_LOG_ERROR, "Failed to query profiles: "
"%d (%s).\n", vas, vaErrorStr(vas));
err = AVERROR(ENOSYS);
goto fail;
}
matched_va_profile = VAProfileNone;
exact_match = 0;
for (i = 0; i < FF_ARRAY_ELEMS(vaapi_profile_map); i++) {
int profile_match = 0;
if (avctx->codec_id != vaapi_profile_map[i].codec_id)
continue;
if (avctx->profile == vaapi_profile_map[i].codec_profile ||
vaapi_profile_map[i].codec_profile == FF_PROFILE_UNKNOWN)
profile_match = 1;
for (j = 0; j < profile_count; j++) {
if (vaapi_profile_map[i].va_profile == profile_list[j]) {
exact_match = profile_match;
break;
}
}
if (j < profile_count) {
matched_va_profile = vaapi_profile_map[i].va_profile;
matched_ff_profile = vaapi_profile_map[i].codec_profile;
if (exact_match)
break;
}
}
av_freep(&profile_list);
if (matched_va_profile == VAProfileNone) {
av_log(avctx, AV_LOG_ERROR, "No support for codec %s "
"profile %d.\n", codec_desc->name, avctx->profile);
err = AVERROR(ENOSYS);
goto fail;
}
if (!exact_match) {
if (avctx->hwaccel_flags &
AV_HWACCEL_FLAG_ALLOW_PROFILE_MISMATCH) {
av_log(avctx, AV_LOG_VERBOSE, "Codec %s profile %d not "
"supported for hardware decode.\n",
codec_desc->name, avctx->profile);
av_log(avctx, AV_LOG_WARNING, "Using possibly-"
"incompatible profile %d instead.\n",
matched_ff_profile);
} else {
av_log(avctx, AV_LOG_VERBOSE, "Codec %s profile %d not "
"supported for hardware decode.\n",
codec_desc->name, avctx->profile);
err = AVERROR(EINVAL);
goto fail;
}
}
vas = vaCreateConfig(hwctx->display, matched_va_profile,
VAEntrypointVLD, NULL, 0,
va_config);
if (vas != VA_STATUS_SUCCESS) {
av_log(avctx, AV_LOG_ERROR, "Failed to create decode "
"configuration: %d (%s).\n", vas, vaErrorStr(vas));
err = AVERROR(EIO);
goto fail;
}
hwconfig = av_hwdevice_hwconfig_alloc(device_ref);
if (!hwconfig) {
err = AVERROR(ENOMEM);
goto fail;
}
hwconfig->config_id = *va_config;
constraints =
av_hwdevice_get_hwframe_constraints(device_ref, hwconfig);
if (!constraints) {
err = AVERROR(ENOMEM);
goto fail;
}
if (avctx->coded_width < constraints->min_width ||
avctx->coded_height < constraints->min_height ||
avctx->coded_width > constraints->max_width ||
avctx->coded_height > constraints->max_height) {
av_log(avctx, AV_LOG_ERROR, "Hardware does not support image "
"size %dx%d (constraints: width %d-%d height %d-%d).\n",
avctx->coded_width, avctx->coded_height,
constraints->min_width, constraints->max_width,
constraints->min_height, constraints->max_height);
err = AVERROR(EINVAL);
goto fail;
}
if (!constraints->valid_sw_formats ||
constraints->valid_sw_formats[0] == AV_PIX_FMT_NONE) {
av_log(avctx, AV_LOG_ERROR, "Hardware does not offer any "
"usable surface formats.\n");
err = AVERROR(EINVAL);
goto fail;
}
if (frames_ref) {
AVHWFramesContext *frames = (AVHWFramesContext *)frames_ref->data;
frames->format = AV_PIX_FMT_VAAPI;
frames->width = avctx->coded_width;
frames->height = avctx->coded_height;
err = vaapi_decode_find_best_format(avctx, device,
*va_config, frames);
if (err < 0)
goto fail;
frames->initial_pool_size = 1;
// Add per-codec number of surfaces used for storing reference frames.
switch (avctx->codec_id) {
case AV_CODEC_ID_H264:
case AV_CODEC_ID_HEVC:
frames->initial_pool_size += 16;
break;
case AV_CODEC_ID_VP9:
frames->initial_pool_size += 8;
break;
case AV_CODEC_ID_VP8:
frames->initial_pool_size += 3;
break;
default:
frames->initial_pool_size += 2;
}
}
av_hwframe_constraints_free(&constraints);
av_freep(&hwconfig);
return 0;
fail:
av_hwframe_constraints_free(&constraints);
av_freep(&hwconfig);
if (*va_config != VA_INVALID_ID) {
vaDestroyConfig(hwctx->display, *va_config);
*va_config = VA_INVALID_ID;
}
av_freep(&profile_list);
return err;
}
int ff_vaapi_common_frame_params(AVCodecContext *avctx,
AVBufferRef *hw_frames_ctx)
{
AVHWFramesContext *hw_frames = (AVHWFramesContext *)hw_frames_ctx->data;
AVHWDeviceContext *device_ctx = hw_frames->device_ctx;
AVVAAPIDeviceContext *hwctx;
VAConfigID va_config = VA_INVALID_ID;
int err;
if (device_ctx->type != AV_HWDEVICE_TYPE_VAAPI)
return AVERROR(EINVAL);
hwctx = device_ctx->hwctx;
err = vaapi_decode_make_config(avctx, hw_frames->device_ref, &va_config,
hw_frames_ctx);
if (err)
return err;
if (va_config != VA_INVALID_ID)
vaDestroyConfig(hwctx->display, va_config);
return 0;
}
int ff_vaapi_decode_init(AVCodecContext *avctx)
{
VAAPIDecodeContext *ctx = avctx->internal->hwaccel_priv_data;
VAStatus vas;
int err;
ctx->va_config = VA_INVALID_ID;
ctx->va_context = VA_INVALID_ID;
#if FF_API_STRUCT_VAAPI_CONTEXT
if (avctx->hwaccel_context) {
av_log(avctx, AV_LOG_WARNING, "Using deprecated struct "
"vaapi_context in decode.\n");
ctx->have_old_context = 1;
ctx->old_context = avctx->hwaccel_context;
// Really we only want the VAAPI device context, but this
// allocates a whole generic device context because we don't
// have any other way to determine how big it should be.
ctx->device_ref =
av_hwdevice_ctx_alloc(AV_HWDEVICE_TYPE_VAAPI);
if (!ctx->device_ref) {
err = AVERROR(ENOMEM);
goto fail;
}
ctx->device = (AVHWDeviceContext*)ctx->device_ref->data;
ctx->hwctx = ctx->device->hwctx;
ctx->hwctx->display = ctx->old_context->display;
// The old VAAPI decode setup assumed this quirk was always
// present, so set it here to avoid the behaviour changing.
ctx->hwctx->driver_quirks =
AV_VAAPI_DRIVER_QUIRK_RENDER_PARAM_BUFFERS;
}
#endif
#if FF_API_STRUCT_VAAPI_CONTEXT
if (ctx->have_old_context) {
ctx->va_config = ctx->old_context->config_id;
ctx->va_context = ctx->old_context->context_id;
av_log(avctx, AV_LOG_DEBUG, "Using user-supplied decoder "
"context: %#x/%#x.\n", ctx->va_config, ctx->va_context);
} else {
#endif
err = ff_decode_get_hw_frames_ctx(avctx, AV_HWDEVICE_TYPE_VAAPI);
if (err < 0)
goto fail;
ctx->frames = (AVHWFramesContext*)avctx->hw_frames_ctx->data;
ctx->hwfc = ctx->frames->hwctx;
ctx->device = ctx->frames->device_ctx;
ctx->hwctx = ctx->device->hwctx;
err = vaapi_decode_make_config(avctx, ctx->frames->device_ref,
&ctx->va_config, avctx->hw_frames_ctx);
if (err)
goto fail;
vas = vaCreateContext(ctx->hwctx->display, ctx->va_config,
avctx->coded_width, avctx->coded_height,
VA_PROGRESSIVE,
ctx->hwfc->surface_ids,
ctx->hwfc->nb_surfaces,
&ctx->va_context);
if (vas != VA_STATUS_SUCCESS) {
av_log(avctx, AV_LOG_ERROR, "Failed to create decode "
"context: %d (%s).\n", vas, vaErrorStr(vas));
err = AVERROR(EIO);
goto fail;
}
av_log(avctx, AV_LOG_DEBUG, "Decode context initialised: "
"%#x/%#x.\n", ctx->va_config, ctx->va_context);
#if FF_API_STRUCT_VAAPI_CONTEXT
}
#endif
return 0;
fail:
ff_vaapi_decode_uninit(avctx);
return err;
}
int ff_vaapi_decode_uninit(AVCodecContext *avctx)
{
VAAPIDecodeContext *ctx = avctx->internal->hwaccel_priv_data;
VAStatus vas;
#if FF_API_STRUCT_VAAPI_CONTEXT
if (ctx->have_old_context) {
av_buffer_unref(&ctx->device_ref);
} else {
#endif
if (ctx->va_context != VA_INVALID_ID) {
vas = vaDestroyContext(ctx->hwctx->display, ctx->va_context);
if (vas != VA_STATUS_SUCCESS) {
av_log(avctx, AV_LOG_ERROR, "Failed to destroy decode "
"context %#x: %d (%s).\n",
ctx->va_context, vas, vaErrorStr(vas));
}
}
if (ctx->va_config != VA_INVALID_ID) {
vas = vaDestroyConfig(ctx->hwctx->display, ctx->va_config);
if (vas != VA_STATUS_SUCCESS) {
av_log(avctx, AV_LOG_ERROR, "Failed to destroy decode "
"configuration %#x: %d (%s).\n",
ctx->va_config, vas, vaErrorStr(vas));
}
}
#if FF_API_STRUCT_VAAPI_CONTEXT
}
#endif
return 0;
}

Просмотреть файл

@ -1,105 +0,0 @@
/*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVCODEC_VAAPI_DECODE_H
#define AVCODEC_VAAPI_DECODE_H
#include <va/va.h>
#include <va/va_dec_vp9.h>
#include "libavutil/frame.h"
#include "libavutil/hwcontext.h"
#include "libavutil/hwcontext_vaapi.h"
#include "avcodec.h"
#include "version.h"
#if FF_API_STRUCT_VAAPI_CONTEXT
#include "vaapi.h"
#endif
static inline VASurfaceID ff_vaapi_get_surface_id(AVFrame *pic)
{
return (uintptr_t)pic->data[3];
}
enum {
MAX_PARAM_BUFFERS = 16,
};
typedef struct VAAPIDecodePicture {
VASurfaceID output_surface;
int nb_param_buffers;
VABufferID param_buffers[MAX_PARAM_BUFFERS];
int nb_slices;
VABufferID *slice_buffers;
int slices_allocated;
} VAAPIDecodePicture;
typedef struct VAAPIDecodeContext {
VAConfigID va_config;
VAContextID va_context;
#if FF_API_STRUCT_VAAPI_CONTEXT
FF_DISABLE_DEPRECATION_WARNINGS
int have_old_context;
struct vaapi_context *old_context;
AVBufferRef *device_ref;
FF_ENABLE_DEPRECATION_WARNINGS
#endif
AVHWDeviceContext *device;
AVVAAPIDeviceContext *hwctx;
AVHWFramesContext *frames;
AVVAAPIFramesContext *hwfc;
enum AVPixelFormat surface_format;
int surface_count;
VASurfaceAttrib pixel_format_attribute;
} VAAPIDecodeContext;
int ff_vaapi_decode_make_param_buffer(AVCodecContext *avctx,
VAAPIDecodePicture *pic,
int type,
const void *data,
size_t size);
int ff_vaapi_decode_make_slice_buffer(AVCodecContext *avctx,
VAAPIDecodePicture *pic,
const void *params_data,
size_t params_size,
const void *slice_data,
size_t slice_size);
int ff_vaapi_decode_issue(AVCodecContext *avctx,
VAAPIDecodePicture *pic);
int ff_vaapi_decode_cancel(AVCodecContext *avctx,
VAAPIDecodePicture *pic);
int ff_vaapi_decode_init(AVCodecContext *avctx);
int ff_vaapi_decode_uninit(AVCodecContext *avctx);
int ff_vaapi_common_frame_params(AVCodecContext *avctx,
AVBufferRef *hw_frames_ctx);
#endif /* AVCODEC_VAAPI_DECODE_H */

Просмотреть файл

@ -1,237 +0,0 @@
/*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#include <va/va.h>
#include <va/va_dec_vp8.h>
#include "hwaccel.h"
#include "vaapi_decode.h"
#include "vp8.h"
static VASurfaceID vaapi_vp8_surface_id(VP8Frame *vf)
{
if (vf)
return ff_vaapi_get_surface_id(vf->tf.f);
else
return VA_INVALID_SURFACE;
}
static int vaapi_vp8_start_frame(AVCodecContext *avctx,
av_unused const uint8_t *buffer,
av_unused uint32_t size)
{
const VP8Context *s = avctx->priv_data;
VAAPIDecodePicture *pic = s->framep[VP56_FRAME_CURRENT]->hwaccel_picture_private;
VAPictureParameterBufferVP8 pp;
VAProbabilityDataBufferVP8 prob;
VAIQMatrixBufferVP8 quant;
int err, i, j, k;
pic->output_surface = vaapi_vp8_surface_id(s->framep[VP56_FRAME_CURRENT]);
pp = (VAPictureParameterBufferVP8) {
.frame_width = avctx->width,
.frame_height = avctx->height,
.last_ref_frame = vaapi_vp8_surface_id(s->framep[VP56_FRAME_PREVIOUS]),
.golden_ref_frame = vaapi_vp8_surface_id(s->framep[VP56_FRAME_GOLDEN]),
.alt_ref_frame = vaapi_vp8_surface_id(s->framep[VP56_FRAME_GOLDEN2]),
.out_of_loop_frame = VA_INVALID_SURFACE,
.pic_fields.bits = {
.key_frame = !s->keyframe,
.version = s->profile,
.segmentation_enabled = s->segmentation.enabled,
.update_mb_segmentation_map = s->segmentation.update_map,
.update_segment_feature_data = s->segmentation.update_feature_data,
.filter_type = s->filter.simple,
.sharpness_level = s->filter.sharpness,
.loop_filter_adj_enable = s->lf_delta.enabled,
.mode_ref_lf_delta_update = s->lf_delta.update,
.sign_bias_golden = s->sign_bias[VP56_FRAME_GOLDEN],
.sign_bias_alternate = s->sign_bias[VP56_FRAME_GOLDEN2],
.mb_no_coeff_skip = s->mbskip_enabled,
.loop_filter_disable = s->filter.level == 0,
},
.prob_skip_false = s->prob->mbskip,
.prob_intra = s->prob->intra,
.prob_last = s->prob->last,
.prob_gf = s->prob->golden,
};
for (i = 0; i < 3; i++)
pp.mb_segment_tree_probs[i] = s->prob->segmentid[i];
for (i = 0; i < 4; i++) {
if (s->segmentation.enabled) {
pp.loop_filter_level[i] = s->segmentation.filter_level[i];
if (!s->segmentation.absolute_vals)
pp.loop_filter_level[i] += s->filter.level;
} else {
pp.loop_filter_level[i] = s->filter.level;
}
pp.loop_filter_level[i] = av_clip_uintp2(pp.loop_filter_level[i], 6);
}
for (i = 0; i < 4; i++) {
pp.loop_filter_deltas_ref_frame[i] = s->lf_delta.ref[i];
pp.loop_filter_deltas_mode[i] = s->lf_delta.mode[i + 4];
}
if (s->keyframe) {
static const uint8_t keyframe_y_mode_probs[4] = {
145, 156, 163, 128
};
static const uint8_t keyframe_uv_mode_probs[3] = {
142, 114, 183
};
memcpy(pp.y_mode_probs, keyframe_y_mode_probs, 4);
memcpy(pp.uv_mode_probs, keyframe_uv_mode_probs, 3);
} else {
for (i = 0; i < 4; i++)
pp.y_mode_probs[i] = s->prob->pred16x16[i];
for (i = 0; i < 3; i++)
pp.uv_mode_probs[i] = s->prob->pred8x8c[i];
}
for (i = 0; i < 2; i++)
for (j = 0; j < 19; j++)
pp.mv_probs[i][j] = s->prob->mvc[i][j];
pp.bool_coder_ctx.range = s->coder_state_at_header_end.range;
pp.bool_coder_ctx.value = s->coder_state_at_header_end.value;
pp.bool_coder_ctx.count = s->coder_state_at_header_end.bit_count;
err = ff_vaapi_decode_make_param_buffer(avctx, pic,
VAPictureParameterBufferType,
&pp, sizeof(pp));
if (err < 0)
goto fail;
for (i = 0; i < 4; i++) {
for (j = 0; j < 8; j++) {
static const int coeff_bands_inverse[8] = {
0, 1, 2, 3, 5, 6, 4, 15
};
int coeff_pos = coeff_bands_inverse[j];
for (k = 0; k < 3; k++) {
memcpy(prob.dct_coeff_probs[i][j][k],
s->prob->token[i][coeff_pos][k], 11);
}
}
}
err = ff_vaapi_decode_make_param_buffer(avctx, pic,
VAProbabilityBufferType,
&prob, sizeof(prob));
if (err < 0)
goto fail;
for (i = 0; i < 4; i++) {
int base_qi = s->segmentation.base_quant[i];
if (!s->segmentation.absolute_vals)
base_qi += s->quant.yac_qi;
quant.quantization_index[i][0] = av_clip_uintp2(base_qi, 7);
quant.quantization_index[i][1] = av_clip_uintp2(base_qi + s->quant.ydc_delta, 7);
quant.quantization_index[i][2] = av_clip_uintp2(base_qi + s->quant.y2dc_delta, 7);
quant.quantization_index[i][3] = av_clip_uintp2(base_qi + s->quant.y2ac_delta, 7);
quant.quantization_index[i][4] = av_clip_uintp2(base_qi + s->quant.uvdc_delta, 7);
quant.quantization_index[i][5] = av_clip_uintp2(base_qi + s->quant.uvac_delta, 7);
}
err = ff_vaapi_decode_make_param_buffer(avctx, pic,
VAIQMatrixBufferType,
&quant, sizeof(quant));
if (err < 0)
goto fail;
return 0;
fail:
ff_vaapi_decode_cancel(avctx, pic);
return err;
}
static int vaapi_vp8_end_frame(AVCodecContext *avctx)
{
const VP8Context *s = avctx->priv_data;
VAAPIDecodePicture *pic = s->framep[VP56_FRAME_CURRENT]->hwaccel_picture_private;
return ff_vaapi_decode_issue(avctx, pic);
}
static int vaapi_vp8_decode_slice(AVCodecContext *avctx,
const uint8_t *buffer,
uint32_t size)
{
const VP8Context *s = avctx->priv_data;
VAAPIDecodePicture *pic = s->framep[VP56_FRAME_CURRENT]->hwaccel_picture_private;
VASliceParameterBufferVP8 sp;
int err, i;
unsigned int header_size = 3 + 7 * s->keyframe;
const uint8_t *data = buffer + header_size;
unsigned int data_size = size - header_size;
sp = (VASliceParameterBufferVP8) {
.slice_data_size = data_size,
.slice_data_offset = 0,
.slice_data_flag = VA_SLICE_DATA_FLAG_ALL,
.macroblock_offset = (8 * (s->coder_state_at_header_end.input - data) -
s->coder_state_at_header_end.bit_count - 8),
.num_of_partitions = s->num_coeff_partitions + 1,
};
sp.partition_size[0] = s->header_partition_size - ((sp.macroblock_offset + 7) / 8);
for (i = 0; i < 8; i++)
sp.partition_size[i+1] = s->coeff_partition_size[i];
err = ff_vaapi_decode_make_slice_buffer(avctx, pic, &sp, sizeof(sp), data, data_size);
if (err)
goto fail;
return 0;
fail:
ff_vaapi_decode_cancel(avctx, pic);
return err;
}
const AVHWAccel ff_vp8_vaapi_hwaccel = {
.name = "vp8_vaapi",
.type = AVMEDIA_TYPE_VIDEO,
.id = AV_CODEC_ID_VP8,
.pix_fmt = AV_PIX_FMT_VAAPI,
.start_frame = &vaapi_vp8_start_frame,
.end_frame = &vaapi_vp8_end_frame,
.decode_slice = &vaapi_vp8_decode_slice,
.frame_priv_data_size = sizeof(VAAPIDecodePicture),
.init = &ff_vaapi_decode_init,
.uninit = &ff_vaapi_decode_uninit,
.frame_params = &ff_vaapi_common_frame_params,
.priv_data_size = sizeof(VAAPIDecodeContext),
.caps_internal = HWACCEL_CAP_ASYNC_SAFE,
};

Просмотреть файл

@ -1,185 +0,0 @@
/*
* VP9 HW decode acceleration through VA API
*
* Copyright (C) 2015 Timo Rothenpieler <timo@rothenpieler.org>
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#include "libavutil/pixdesc.h"
#include "hwaccel.h"
#include "vaapi_decode.h"
#include "vp9shared.h"
static VASurfaceID vaapi_vp9_surface_id(const VP9Frame *vf)
{
if (vf)
return ff_vaapi_get_surface_id(vf->tf.f);
else
return VA_INVALID_SURFACE;
}
static int vaapi_vp9_start_frame(AVCodecContext *avctx,
av_unused const uint8_t *buffer,
av_unused uint32_t size)
{
const VP9SharedContext *h = avctx->priv_data;
VAAPIDecodePicture *pic = h->frames[CUR_FRAME].hwaccel_picture_private;
VADecPictureParameterBufferVP9 pic_param;
const AVPixFmtDescriptor *pixdesc = av_pix_fmt_desc_get(avctx->sw_pix_fmt);
int err, i;
pic->output_surface = vaapi_vp9_surface_id(&h->frames[CUR_FRAME]);
pic_param = (VADecPictureParameterBufferVP9) {
.frame_width = avctx->width,
.frame_height = avctx->height,
.pic_fields.bits = {
.subsampling_x = pixdesc->log2_chroma_w,
.subsampling_y = pixdesc->log2_chroma_h,
.frame_type = !h->h.keyframe,
.show_frame = !h->h.invisible,
.error_resilient_mode = h->h.errorres,
.intra_only = h->h.intraonly,
.allow_high_precision_mv = h->h.keyframe ? 0 : h->h.highprecisionmvs,
.mcomp_filter_type = h->h.filtermode ^ (h->h.filtermode <= 1),
.frame_parallel_decoding_mode = h->h.parallelmode,
.reset_frame_context = h->h.resetctx,
.refresh_frame_context = h->h.refreshctx,
.frame_context_idx = h->h.framectxid,
.segmentation_enabled = h->h.segmentation.enabled,
.segmentation_temporal_update = h->h.segmentation.temporal,
.segmentation_update_map = h->h.segmentation.update_map,
.last_ref_frame = h->h.refidx[0],
.last_ref_frame_sign_bias = h->h.signbias[0],
.golden_ref_frame = h->h.refidx[1],
.golden_ref_frame_sign_bias = h->h.signbias[1],
.alt_ref_frame = h->h.refidx[2],
.alt_ref_frame_sign_bias = h->h.signbias[2],
.lossless_flag = h->h.lossless,
},
.filter_level = h->h.filter.level,
.sharpness_level = h->h.filter.sharpness,
.log2_tile_rows = h->h.tiling.log2_tile_rows,
.log2_tile_columns = h->h.tiling.log2_tile_cols,
.frame_header_length_in_bytes = h->h.uncompressed_header_size,
.first_partition_size = h->h.compressed_header_size,
.profile = h->h.profile,
.bit_depth = h->h.bpp,
};
for (i = 0; i < 7; i++)
pic_param.mb_segment_tree_probs[i] = h->h.segmentation.prob[i];
if (h->h.segmentation.temporal) {
for (i = 0; i < 3; i++)
pic_param.segment_pred_probs[i] = h->h.segmentation.pred_prob[i];
} else {
memset(pic_param.segment_pred_probs, 255, sizeof(pic_param.segment_pred_probs));
}
for (i = 0; i < 8; i++) {
if (h->refs[i].f->buf[0])
pic_param.reference_frames[i] = ff_vaapi_get_surface_id(h->refs[i].f);
else
pic_param.reference_frames[i] = VA_INVALID_ID;
}
err = ff_vaapi_decode_make_param_buffer(avctx, pic,
VAPictureParameterBufferType,
&pic_param, sizeof(pic_param));
if (err < 0) {
ff_vaapi_decode_cancel(avctx, pic);
return err;
}
return 0;
}
static int vaapi_vp9_end_frame(AVCodecContext *avctx)
{
const VP9SharedContext *h = avctx->priv_data;
VAAPIDecodePicture *pic = h->frames[CUR_FRAME].hwaccel_picture_private;
return ff_vaapi_decode_issue(avctx, pic);
}
static int vaapi_vp9_decode_slice(AVCodecContext *avctx,
const uint8_t *buffer,
uint32_t size)
{
const VP9SharedContext *h = avctx->priv_data;
VAAPIDecodePicture *pic = h->frames[CUR_FRAME].hwaccel_picture_private;
VASliceParameterBufferVP9 slice_param;
int err, i;
slice_param = (VASliceParameterBufferVP9) {
.slice_data_size = size,
.slice_data_offset = 0,
.slice_data_flag = VA_SLICE_DATA_FLAG_ALL,
};
for (i = 0; i < 8; i++) {
slice_param.seg_param[i] = (VASegmentParameterVP9) {
.segment_flags.fields = {
.segment_reference_enabled = h->h.segmentation.feat[i].ref_enabled,
.segment_reference = h->h.segmentation.feat[i].ref_val,
.segment_reference_skipped = h->h.segmentation.feat[i].skip_enabled,
},
.luma_dc_quant_scale = h->h.segmentation.feat[i].qmul[0][0],
.luma_ac_quant_scale = h->h.segmentation.feat[i].qmul[0][1],
.chroma_dc_quant_scale = h->h.segmentation.feat[i].qmul[1][0],
.chroma_ac_quant_scale = h->h.segmentation.feat[i].qmul[1][1],
};
memcpy(slice_param.seg_param[i].filter_level, h->h.segmentation.feat[i].lflvl, sizeof(slice_param.seg_param[i].filter_level));
}
err = ff_vaapi_decode_make_slice_buffer(avctx, pic,
&slice_param, sizeof(slice_param),
buffer, size);
if (err) {
ff_vaapi_decode_cancel(avctx, pic);
return err;
}
return 0;
}
const AVHWAccel ff_vp9_vaapi_hwaccel = {
.name = "vp9_vaapi",
.type = AVMEDIA_TYPE_VIDEO,
.id = AV_CODEC_ID_VP9,
.pix_fmt = AV_PIX_FMT_VAAPI,
.start_frame = vaapi_vp9_start_frame,
.end_frame = vaapi_vp9_end_frame,
.decode_slice = vaapi_vp9_decode_slice,
.frame_priv_data_size = sizeof(VAAPIDecodePicture),
.init = ff_vaapi_decode_init,
.uninit = ff_vaapi_decode_uninit,
.frame_params = ff_vaapi_common_frame_params,
.priv_data_size = sizeof(VAAPIDecodeContext),
.caps_internal = HWACCEL_CAP_ASYNC_SAFE,
};

Просмотреть файл

@ -158,9 +158,6 @@ av_get_token
av_gettime av_gettime
av_gettime_relative av_gettime_relative
av_gettime_relative_is_monotonic av_gettime_relative_is_monotonic
av_hwdevice_get_hwframe_constraints
av_hwdevice_hwconfig_alloc
av_hwframe_constraints_free
av_hwframe_get_buffer av_hwframe_get_buffer
av_image_alloc av_image_alloc
av_image_check_sar av_image_check_sar
@ -323,9 +320,5 @@ avpriv_slicethread_free
av_hwdevice_get_type_name av_hwdevice_get_type_name
av_hwframe_ctx_alloc av_hwframe_ctx_alloc
av_hwframe_ctx_init av_hwframe_ctx_init
av_hwdevice_ctx_alloc
av_hwdevice_ctx_init
av_hwframe_transfer_get_formats
av_hwdevice_ctx_create_derived
av_malloc_array av_malloc_array
av_mallocz_array av_mallocz_array

Просмотреть файл

@ -1,289 +0,0 @@
/*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#include <fcntl.h>
#include <sys/mman.h>
#include <unistd.h>
#include <drm.h>
#include <xf86drm.h>
#include "avassert.h"
#include "hwcontext.h"
#include "hwcontext_drm.h"
#include "hwcontext_internal.h"
#include "imgutils.h"
static void drm_device_free(AVHWDeviceContext *hwdev)
{
AVDRMDeviceContext *hwctx = hwdev->hwctx;
close(hwctx->fd);
}
static int drm_device_create(AVHWDeviceContext *hwdev, const char *device,
AVDictionary *opts, int flags)
{
AVDRMDeviceContext *hwctx = hwdev->hwctx;
drmVersionPtr version;
hwctx->fd = open(device, O_RDWR);
if (hwctx->fd < 0)
return AVERROR(errno);
version = drmGetVersion(hwctx->fd);
if (!version) {
av_log(hwdev, AV_LOG_ERROR, "Failed to get version information "
"from %s: probably not a DRM device?\n", device);
close(hwctx->fd);
return AVERROR(EINVAL);
}
av_log(hwdev, AV_LOG_VERBOSE, "Opened DRM device %s: driver %s "
"version %d.%d.%d.\n", device, version->name,
version->version_major, version->version_minor,
version->version_patchlevel);
drmFreeVersion(version);
hwdev->free = &drm_device_free;
return 0;
}
static int drm_get_buffer(AVHWFramesContext *hwfc, AVFrame *frame)
{
frame->buf[0] = av_buffer_pool_get(hwfc->pool);
if (!frame->buf[0])
return AVERROR(ENOMEM);
frame->data[0] = (uint8_t*)frame->buf[0]->data;
frame->format = AV_PIX_FMT_DRM_PRIME;
frame->width = hwfc->width;
frame->height = hwfc->height;
return 0;
}
typedef struct DRMMapping {
// Address and length of each mmap()ed region.
int nb_regions;
void *address[AV_DRM_MAX_PLANES];
size_t length[AV_DRM_MAX_PLANES];
} DRMMapping;
static void drm_unmap_frame(AVHWFramesContext *hwfc,
HWMapDescriptor *hwmap)
{
DRMMapping *map = hwmap->priv;
int i;
for (i = 0; i < map->nb_regions; i++)
munmap(map->address[i], map->length[i]);
av_free(map);
}
static int drm_map_frame(AVHWFramesContext *hwfc,
AVFrame *dst, const AVFrame *src, int flags)
{
const AVDRMFrameDescriptor *desc = (AVDRMFrameDescriptor*)src->data[0];
DRMMapping *map;
int err, i, p, plane;
int mmap_prot;
void *addr;
map = av_mallocz(sizeof(*map));
if (!map)
return AVERROR(ENOMEM);
mmap_prot = 0;
if (flags & AV_HWFRAME_MAP_READ)
mmap_prot |= PROT_READ;
if (flags & AV_HWFRAME_MAP_WRITE)
mmap_prot |= PROT_WRITE;
av_assert0(desc->nb_objects <= AV_DRM_MAX_PLANES);
for (i = 0; i < desc->nb_objects; i++) {
addr = mmap(NULL, desc->objects[i].size, mmap_prot, MAP_SHARED,
desc->objects[i].fd, 0);
if (addr == MAP_FAILED) {
err = AVERROR(errno);
av_log(hwfc, AV_LOG_ERROR, "Failed to map DRM object %d to "
"memory: %d.\n", desc->objects[i].fd, errno);
goto fail;
}
map->address[i] = addr;
map->length[i] = desc->objects[i].size;
}
map->nb_regions = i;
plane = 0;
for (i = 0; i < desc->nb_layers; i++) {
const AVDRMLayerDescriptor *layer = &desc->layers[i];
for (p = 0; p < layer->nb_planes; p++) {
dst->data[plane] =
(uint8_t*)map->address[layer->planes[p].object_index] +
layer->planes[p].offset;
dst->linesize[plane] = layer->planes[p].pitch;
++plane;
}
}
av_assert0(plane <= AV_DRM_MAX_PLANES);
dst->width = src->width;
dst->height = src->height;
err = ff_hwframe_map_create(src->hw_frames_ctx, dst, src,
&drm_unmap_frame, map);
if (err < 0)
goto fail;
return 0;
fail:
for (i = 0; i < desc->nb_objects; i++) {
if (map->address[i])
munmap(map->address[i], map->length[i]);
}
av_free(map);
return err;
}
static int drm_transfer_get_formats(AVHWFramesContext *ctx,
enum AVHWFrameTransferDirection dir,
enum AVPixelFormat **formats)
{
enum AVPixelFormat *pix_fmts;
pix_fmts = av_malloc_array(2, sizeof(*pix_fmts));
if (!pix_fmts)
return AVERROR(ENOMEM);
pix_fmts[0] = ctx->sw_format;
pix_fmts[1] = AV_PIX_FMT_NONE;
*formats = pix_fmts;
return 0;
}
static int drm_transfer_data_from(AVHWFramesContext *hwfc,
AVFrame *dst, const AVFrame *src)
{
AVFrame *map;
int err;
if (dst->width > hwfc->width || dst->height > hwfc->height)
return AVERROR(EINVAL);
map = av_frame_alloc();
if (!map)
return AVERROR(ENOMEM);
map->format = dst->format;
err = drm_map_frame(hwfc, map, src, AV_HWFRAME_MAP_READ);
if (err)
goto fail;
map->width = dst->width;
map->height = dst->height;
err = av_frame_copy(dst, map);
if (err)
goto fail;
err = 0;
fail:
av_frame_free(&map);
return err;
}
static int drm_transfer_data_to(AVHWFramesContext *hwfc,
AVFrame *dst, const AVFrame *src)
{
AVFrame *map;
int err;
if (src->width > hwfc->width || src->height > hwfc->height)
return AVERROR(EINVAL);
map = av_frame_alloc();
if (!map)
return AVERROR(ENOMEM);
map->format = src->format;
err = drm_map_frame(hwfc, map, dst, AV_HWFRAME_MAP_WRITE |
AV_HWFRAME_MAP_OVERWRITE);
if (err)
goto fail;
map->width = src->width;
map->height = src->height;
err = av_frame_copy(map, src);
if (err)
goto fail;
err = 0;
fail:
av_frame_free(&map);
return err;
}
static int drm_map_from(AVHWFramesContext *hwfc, AVFrame *dst,
const AVFrame *src, int flags)
{
int err;
if (hwfc->sw_format != dst->format)
return AVERROR(ENOSYS);
err = drm_map_frame(hwfc, dst, src, flags);
if (err)
return err;
err = av_frame_copy_props(dst, src);
if (err)
return err;
return 0;
}
const HWContextType ff_hwcontext_type_drm = {
.type = AV_HWDEVICE_TYPE_DRM,
.name = "DRM",
.device_hwctx_size = sizeof(AVDRMDeviceContext),
.device_create = &drm_device_create,
.frames_get_buffer = &drm_get_buffer,
.transfer_get_formats = &drm_transfer_get_formats,
.transfer_data_to = &drm_transfer_data_to,
.transfer_data_from = &drm_transfer_data_from,
.map_from = &drm_map_from,
.pix_fmts = (const enum AVPixelFormat[]) {
AV_PIX_FMT_DRM_PRIME,
AV_PIX_FMT_NONE
},
};

Просмотреть файл

@ -1,169 +0,0 @@
/*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_HWCONTEXT_DRM_H
#define AVUTIL_HWCONTEXT_DRM_H
#include <stddef.h>
#include <stdint.h>
/**
* @file
* API-specific header for AV_HWDEVICE_TYPE_DRM.
*
* Internal frame allocation is not currently supported - all frames
* must be allocated by the user. Thus AVHWFramesContext is always
* NULL, though this may change if support for frame allocation is
* added in future.
*/
enum {
/**
* The maximum number of layers/planes in a DRM frame.
*/
AV_DRM_MAX_PLANES = 4
};
/**
* DRM object descriptor.
*
* Describes a single DRM object, addressing it as a PRIME file
* descriptor.
*/
typedef struct AVDRMObjectDescriptor {
/**
* DRM PRIME fd for the object.
*/
int fd;
/**
* Total size of the object.
*
* (This includes any parts not which do not contain image data.)
*/
size_t size;
/**
* Format modifier applied to the object (DRM_FORMAT_MOD_*).
*
* If the format modifier is unknown then this should be set to
* DRM_FORMAT_MOD_INVALID.
*/
uint64_t format_modifier;
} AVDRMObjectDescriptor;
/**
* DRM plane descriptor.
*
* Describes a single plane of a layer, which is contained within
* a single object.
*/
typedef struct AVDRMPlaneDescriptor {
/**
* Index of the object containing this plane in the objects
* array of the enclosing frame descriptor.
*/
int object_index;
/**
* Offset within that object of this plane.
*/
ptrdiff_t offset;
/**
* Pitch (linesize) of this plane.
*/
ptrdiff_t pitch;
} AVDRMPlaneDescriptor;
/**
* DRM layer descriptor.
*
* Describes a single layer within a frame. This has the structure
* defined by its format, and will contain one or more planes.
*/
typedef struct AVDRMLayerDescriptor {
/**
* Format of the layer (DRM_FORMAT_*).
*/
uint32_t format;
/**
* Number of planes in the layer.
*
* This must match the number of planes required by format.
*/
int nb_planes;
/**
* Array of planes in this layer.
*/
AVDRMPlaneDescriptor planes[AV_DRM_MAX_PLANES];
} AVDRMLayerDescriptor;
/**
* DRM frame descriptor.
*
* This is used as the data pointer for AV_PIX_FMT_DRM_PRIME frames.
* It is also used by user-allocated frame pools - allocating in
* AVHWFramesContext.pool must return AVBufferRefs which contain
* an object of this type.
*
* The fields of this structure should be set such it can be
* imported directly by EGL using the EGL_EXT_image_dma_buf_import
* and EGL_EXT_image_dma_buf_import_modifiers extensions.
* (Note that the exact layout of a particular format may vary between
* platforms - we only specify that the same platform should be able
* to import it.)
*
* The total number of planes must not exceed AV_DRM_MAX_PLANES, and
* the order of the planes by increasing layer index followed by
* increasing plane index must be the same as the order which would
* be used for the data pointers in the equivalent software format.
*/
typedef struct AVDRMFrameDescriptor {
/**
* Number of DRM objects making up this frame.
*/
int nb_objects;
/**
* Array of objects making up the frame.
*/
AVDRMObjectDescriptor objects[AV_DRM_MAX_PLANES];
/**
* Number of layers in the frame.
*/
int nb_layers;
/**
* Array of layers in the frame.
*/
AVDRMLayerDescriptor layers[AV_DRM_MAX_PLANES];
} AVDRMFrameDescriptor;
/**
* DRM device.
*
* Allocated as AVHWDeviceContext.hwctx.
*/
typedef struct AVDRMDeviceContext {
/**
* File descriptor of DRM device.
*
* This is used as the device to create frames on, and may also be
* used in some derivation and mapping operations.
*
* If no device is required, set to -1.
*/
int fd;
} AVDRMDeviceContext;
#endif /* AVUTIL_HWCONTEXT_DRM_H */

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -1,117 +0,0 @@
/*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_HWCONTEXT_VAAPI_H
#define AVUTIL_HWCONTEXT_VAAPI_H
#include <va/va.h>
/**
* @file
* API-specific header for AV_HWDEVICE_TYPE_VAAPI.
*
* Dynamic frame pools are supported, but note that any pool used as a render
* target is required to be of fixed size in order to be be usable as an
* argument to vaCreateContext().
*
* For user-allocated pools, AVHWFramesContext.pool must return AVBufferRefs
* with the data pointer set to a VASurfaceID.
*/
enum {
/**
* The quirks field has been set by the user and should not be detected
* automatically by av_hwdevice_ctx_init().
*/
AV_VAAPI_DRIVER_QUIRK_USER_SET = (1 << 0),
/**
* The driver does not destroy parameter buffers when they are used by
* vaRenderPicture(). Additional code will be required to destroy them
* separately afterwards.
*/
AV_VAAPI_DRIVER_QUIRK_RENDER_PARAM_BUFFERS = (1 << 1),
/**
* The driver does not support the VASurfaceAttribMemoryType attribute,
* so the surface allocation code will not try to use it.
*/
AV_VAAPI_DRIVER_QUIRK_ATTRIB_MEMTYPE = (1 << 2),
/**
* The driver does not support surface attributes at all.
* The surface allocation code will never pass them to surface allocation,
* and the results of the vaQuerySurfaceAttributes() call will be faked.
*/
AV_VAAPI_DRIVER_QUIRK_SURFACE_ATTRIBUTES = (1 << 3),
};
/**
* VAAPI connection details.
*
* Allocated as AVHWDeviceContext.hwctx
*/
typedef struct AVVAAPIDeviceContext {
/**
* The VADisplay handle, to be filled by the user.
*/
VADisplay display;
/**
* Driver quirks to apply - this is filled by av_hwdevice_ctx_init(),
* with reference to a table of known drivers, unless the
* AV_VAAPI_DRIVER_QUIRK_USER_SET bit is already present. The user
* may need to refer to this field when performing any later
* operations using VAAPI with the same VADisplay.
*/
unsigned int driver_quirks;
} AVVAAPIDeviceContext;
/**
* VAAPI-specific data associated with a frame pool.
*
* Allocated as AVHWFramesContext.hwctx.
*/
typedef struct AVVAAPIFramesContext {
/**
* Set by the user to apply surface attributes to all surfaces in
* the frame pool. If null, default settings are used.
*/
VASurfaceAttrib *attributes;
int nb_attributes;
/**
* The surfaces IDs of all surfaces in the pool after creation.
* Only valid if AVHWFramesContext.initial_pool_size was positive.
* These are intended to be used as the render_targets arguments to
* vaCreateContext().
*/
VASurfaceID *surface_ids;
int nb_surfaces;
} AVVAAPIFramesContext;
/**
* VAAPI hardware pipeline configuration details.
*
* Allocated with av_hwdevice_hwconfig_alloc().
*/
typedef struct AVVAAPIHWConfig {
/**
* ID of a VAAPI pipeline configuration.
*/
VAConfigID config_id;
} AVVAAPIHWConfig;
#endif /* AVUTIL_HWCONTEXT_VAAPI_H */

Просмотреть файл

@ -29,6 +29,7 @@ SOURCES += [
'fixed_dsp.c', 'fixed_dsp.c',
'float_dsp.c', 'float_dsp.c',
'frame.c', 'frame.c',
'hwcontext.c',
'imgutils.c', 'imgutils.c',
'log.c', 'log.c',
'log2_tab.c', 'log2_tab.c',
@ -57,12 +58,6 @@ if not CONFIG['MOZ_FFVPX_AUDIOONLY']:
'threadmessage.c', 'threadmessage.c',
'timecode.c' 'timecode.c'
] ]
if CONFIG['MOZ_WAYLAND']:
SOURCES += [
'hwcontext.c',
'hwcontext_drm.c',
'hwcontext_vaapi.c',
]
SYMBOLS_FILE = 'avutil.symbols' SYMBOLS_FILE = 'avutil.symbols'
NoVisibilityFlags() NoVisibilityFlags()

Просмотреть файл

@ -1,59 +0,0 @@
diff --git a/media/ffvpx/config_common.h b/media/ffvpx/config_common.h
--- a/media/ffvpx/config_common.h
+++ b/media/ffvpx/config_common.h
@@ -18,4 +18,14 @@
#define CONFIG_RDFT 1
#endif
+#ifdef MOZ_WAYLAND
+#define CONFIG_VAAPI 1
+#define CONFIG_VP8_VAAPI_HWACCEL 1
+#define CONFIG_VP9_VAAPI_HWACCEL 1
+#else
+#define CONFIG_VAAPI 0
+#define CONFIG_VP8_VAAPI_HWACCEL 0
+#define CONFIG_VP9_VAAPI_HWACCEL 0
#endif
+
+#endif
diff --git a/media/ffvpx/config_unix32.h b/media/ffvpx/config_unix32.h
--- a/media/ffvpx/config_unix32.h
+++ b/media/ffvpx/config_unix32.h
@@ -524,7 +524,6 @@
#define CONFIG_FFNVCODEC 0
#define CONFIG_NVDEC 0
#define CONFIG_NVENC 0
-#define CONFIG_VAAPI 0
#define CONFIG_VDPAU 0
#define CONFIG_VIDEOTOOLBOX 0
#define CONFIG_V4L2_M2M 0
diff --git a/media/ffvpx/config_unix64.h b/media/ffvpx/config_unix64.h
--- a/media/ffvpx/config_unix64.h
+++ b/media/ffvpx/config_unix64.h
@@ -524,7 +524,6 @@
#define CONFIG_FFNVCODEC 0
#define CONFIG_NVDEC 0
#define CONFIG_NVENC 0
-#define CONFIG_VAAPI 0
#define CONFIG_VDPAU 0
#define CONFIG_VIDEOTOOLBOX 0
#define CONFIG_V4L2_M2M 1
diff --git a/media/ffvpx/defaults_disabled.h b/media/ffvpx/defaults_disabled.h
--- a/media/ffvpx/defaults_disabled.h
+++ b/media/ffvpx/defaults_disabled.h
@@ -1706,7 +1706,6 @@
#define CONFIG_VP8_V4L2M2M_DECODER 0
#define CONFIG_VP8_V4L2M2M_ENCODER 0
#define CONFIG_VP8_VAAPI_ENCODER 0
-#define CONFIG_VP8_VAAPI_HWACCEL 0
#define CONFIG_VP9_CUVID_DECODER 0
#define CONFIG_VP9_D3D11VA2_HWACCEL 0
#define CONFIG_VP9_D3D11VA_HWACCEL 0
@@ -1719,7 +1718,6 @@
#define CONFIG_VP9_SUPERFRAME_BSF 0
#define CONFIG_VP9_V4L2M2M_DECODER 0
#define CONFIG_VP9_VAAPI_ENCODER 0
-#define CONFIG_VP9_VAAPI_HWACCEL 0
#define CONFIG_VPK_DEMUXER 0
#define CONFIG_VPLAYER_DECODER 0
#define CONFIG_VPLAYER_DEMUXER 0