Backed out 6 changesets (bug 1660336) for causing media related failures. CLOSED TREE

Backed out changeset 1aa6c9d65403 (bug 1660336)
Backed out changeset f1dadb052d3a (bug 1660336)
Backed out changeset bdf9d27bd4d2 (bug 1660336)
Backed out changeset 373a658bb281 (bug 1660336)
Backed out changeset 5ccda5ab6563 (bug 1660336)
Backed out changeset e6f396b25887 (bug 1660336)
This commit is contained in:
Csoregi Natalia 2020-11-27 05:08:40 +02:00
Родитель e863ff22f1
Коммит 3bbe4dde1f
41 изменённых файлов: 84 добавлений и 9221 удалений

Просмотреть файл

@ -11,10 +11,6 @@
#include "mozilla/Types.h"
#include "PlatformDecoderModule.h"
#include "prlink.h"
#ifdef MOZ_WAYLAND
# include "mozilla/widget/DMABufLibWrapper.h"
# include "mozilla/StaticPrefs_media.h"
#endif
#define AV_LOG_DEBUG 48
#define AV_LOG_INFO 32
@ -257,46 +253,6 @@ void FFmpegLibWrapper::Unlink() {
PodZero(this);
}
#ifdef MOZ_WAYLAND
void FFmpegLibWrapper::LinkVAAPILibs() {
if (widget::GetDMABufDevice()->IsDMABufVAAPIEnabled()) {
PRLibSpec lspec;
lspec.type = PR_LibSpec_Pathname;
const char* libDrm = "libva-drm.so.2";
lspec.value.pathname = libDrm;
mVALibDrm = PR_LoadLibraryWithFlags(lspec, PR_LD_NOW | PR_LD_LOCAL);
if (!mVALibDrm) {
FFMPEG_LOG("VA-API support: Missing or old %s library.\n", libDrm);
}
if (!StaticPrefs::media_ffmpeg_vaapi_drm_display_enabled()) {
const char* libWayland = "libva-wayland.so.2";
lspec.value.pathname = libWayland;
mVALibWayland = PR_LoadLibraryWithFlags(lspec, PR_LD_NOW | PR_LD_LOCAL);
if (!mVALibWayland) {
FFMPEG_LOG("VA-API support: Missing or old %s library.\n", libWayland);
}
}
if (mVALibWayland || mVALibDrm) {
const char* lib = "libva.so.2";
lspec.value.pathname = lib;
mVALib = PR_LoadLibraryWithFlags(lspec, PR_LD_NOW | PR_LD_LOCAL);
// Don't use libva when it's missing vaExportSurfaceHandle.
if (mVALib && !PR_FindSymbol(mVALib, "vaExportSurfaceHandle")) {
PR_UnloadLibrary(mVALib);
mVALib = nullptr;
}
if (!mVALib) {
FFMPEG_LOG("VA-API support: Missing or old %s library.\n", lib);
}
}
} else {
FFMPEG_LOG("VA-API FFmpeg is disabled by platform");
}
}
#endif
#ifdef MOZ_WAYLAND
bool FFmpegLibWrapper::IsVAAPIAvailable() {
# define VA_FUNC_LOADED(func) (func != nullptr)

Просмотреть файл

@ -56,7 +56,6 @@ struct MOZ_ONLY_USED_TO_AVOID_STATIC_CONSTRUCTORS FFmpegLibWrapper {
#ifdef MOZ_WAYLAND
// Check if mVALib are available and we can use HW decode.
bool IsVAAPIAvailable();
void LinkVAAPILibs();
#endif
// indicate the version of libavcodec linked to.

Просмотреть файл

@ -9,6 +9,10 @@
#include "mozilla/ArrayUtils.h"
#include "FFmpegLog.h"
#include "prlink.h"
#ifdef MOZ_WAYLAND
# include "mozilla/widget/DMABufLibWrapper.h"
# include "mozilla/StaticPrefs_media.h"
#endif
namespace mozilla {
@ -54,7 +58,43 @@ bool FFmpegRuntimeLinker::Init() {
}
#ifdef MOZ_WAYLAND
sLibAV.LinkVAAPILibs();
if (widget::GetDMABufDevice()->IsDMABufVAAPIEnabled()) {
PRLibSpec lspec;
lspec.type = PR_LibSpec_Pathname;
const char* libDrm = "libva-drm.so.2";
lspec.value.pathname = libDrm;
sLibAV.mVALibDrm = PR_LoadLibraryWithFlags(lspec, PR_LD_NOW | PR_LD_LOCAL);
if (!sLibAV.mVALibDrm) {
FFMPEG_LOG("VA-API support: Missing or old %s library.\n", libDrm);
}
if (!StaticPrefs::media_ffmpeg_vaapi_drm_display_enabled()) {
const char* libWayland = "libva-wayland.so.2";
lspec.value.pathname = libWayland;
sLibAV.mVALibWayland =
PR_LoadLibraryWithFlags(lspec, PR_LD_NOW | PR_LD_LOCAL);
if (!sLibAV.mVALibWayland) {
FFMPEG_LOG("VA-API support: Missing or old %s library.\n", libWayland);
}
}
if (sLibAV.mVALibWayland || sLibAV.mVALibDrm) {
const char* lib = "libva.so.2";
lspec.value.pathname = lib;
sLibAV.mVALib = PR_LoadLibraryWithFlags(lspec, PR_LD_NOW | PR_LD_LOCAL);
// Don't use libva when it's missing vaExportSurfaceHandle.
if (sLibAV.mVALib &&
!PR_FindSymbol(sLibAV.mVALib, "vaExportSurfaceHandle")) {
PR_UnloadLibrary(sLibAV.mVALib);
sLibAV.mVALib = nullptr;
}
if (!sLibAV.mVALib) {
FFMPEG_LOG("VA-API support: Missing or old %s library.\n", lib);
}
}
} else {
FFMPEG_LOG("VA-API FFmpeg is disabled by platform");
}
#endif
// While going through all possible libs, this status will be updated with a

Просмотреть файл

@ -126,7 +126,7 @@ static AVPixelFormat ChooseVAAPIPixelFormat(AVCodecContext* aCodecContext,
return AV_PIX_FMT_NONE;
}
DMABufSurfaceWrapper<LIBAV_VER>::DMABufSurfaceWrapper(DMABufSurface* aSurface,
DMABufSurfaceWrapper::DMABufSurfaceWrapper(DMABufSurface* aSurface,
FFmpegLibWrapper* aLib)
: mSurface(aSurface),
mLib(aLib),
@ -140,8 +140,8 @@ DMABufSurfaceWrapper<LIBAV_VER>::DMABufSurfaceWrapper(DMABufSurface* aSurface,
mSurface->GetUID());
}
void DMABufSurfaceWrapper<LIBAV_VER>::LockVAAPIData(
AVCodecContext* aAVCodecContext, AVFrame* aAVFrame) {
void DMABufSurfaceWrapper::LockVAAPIData(AVCodecContext* aAVCodecContext,
AVFrame* aAVFrame) {
FFMPEG_LOG("DMABufSurfaceWrapper: VAAPI locking dmabuf surface UID = %d",
mSurface->GetUID());
if (aAVCodecContext && aAVFrame) {
@ -150,7 +150,7 @@ void DMABufSurfaceWrapper<LIBAV_VER>::LockVAAPIData(
}
}
void DMABufSurfaceWrapper<LIBAV_VER>::ReleaseVAAPIData() {
void DMABufSurfaceWrapper::ReleaseVAAPIData() {
FFMPEG_LOG("DMABufSurfaceWrapper: VAAPI releasing dmabuf surface UID = %d",
mSurface->GetUID());
if (mHWAVBuffer && mAVHWFramesContext) {
@ -160,7 +160,7 @@ void DMABufSurfaceWrapper<LIBAV_VER>::ReleaseVAAPIData() {
mSurface->ReleaseSurface();
}
DMABufSurfaceWrapper<LIBAV_VER>::~DMABufSurfaceWrapper() {
DMABufSurfaceWrapper::~DMABufSurfaceWrapper() {
FFMPEG_LOG("DMABufSurfaceWrapper: deleting dmabuf surface UID = %d",
mSurface->GetUID());
ReleaseVAAPIData();
@ -183,14 +183,7 @@ AVCodec* FFmpegVideoDecoder<LIBAV_VER>::FindVAAPICodec() {
return nullptr;
}
template <int V>
class VAAPIDisplayHolder {};
template <>
class VAAPIDisplayHolder<LIBAV_VER>;
template <>
class VAAPIDisplayHolder<LIBAV_VER> {
class VAAPIDisplayHolder {
public:
VAAPIDisplayHolder(FFmpegLibWrapper* aLib, VADisplay aDisplay)
: mLib(aLib), mDisplay(aDisplay){};
@ -202,8 +195,7 @@ class VAAPIDisplayHolder<LIBAV_VER> {
};
static void VAAPIDisplayReleaseCallback(struct AVHWDeviceContext* hwctx) {
auto displayHolder =
static_cast<VAAPIDisplayHolder<LIBAV_VER>*>(hwctx->user_opaque);
auto displayHolder = static_cast<VAAPIDisplayHolder*>(hwctx->user_opaque);
delete displayHolder;
}
@ -239,7 +231,7 @@ bool FFmpegVideoDecoder<LIBAV_VER>::CreateVAAPIDeviceContext() {
}
}
hwctx->user_opaque = new VAAPIDisplayHolder<LIBAV_VER>(mLib, mDisplay);
hwctx->user_opaque = new VAAPIDisplayHolder(mLib, mDisplay);
hwctx->free = VAAPIDisplayReleaseCallback;
int major, minor;
@ -713,7 +705,7 @@ void FFmpegVideoDecoder<LIBAV_VER>::ReleaseUnusedVAAPIFrames() {
}
}
DMABufSurfaceWrapper<LIBAV_VER>*
DMABufSurfaceWrapper*
FFmpegVideoDecoder<LIBAV_VER>::GetUnusedDMABufSurfaceWrapper() {
int len = mDMABufSurfaces.Length();
for (int i = 0; i < len; i++) {
@ -779,8 +771,7 @@ MediaResult FFmpegVideoDecoder<LIBAV_VER>::CreateImageDMABuf(
RefPtr<DMABufSurfaceYUV> surface;
DMABufSurfaceWrapper<LIBAV_VER>* surfaceWrapper =
GetUnusedDMABufSurfaceWrapper();
DMABufSurfaceWrapper* surfaceWrapper = GetUnusedDMABufSurfaceWrapper();
if (!surfaceWrapper) {
if (mVAAPIDeviceContext) {
surface = DMABufSurfaceYUV::CreateYUVSurface(vaDesc);

Просмотреть файл

@ -55,14 +55,7 @@ namespace mozilla {
// We own the DMABufSurface underlying GPU data and we use it for
// repeated rendering of video frames.
//
template <int V>
class DMABufSurfaceWrapper {};
template <>
class DMABufSurfaceWrapper<LIBAV_VER>;
template <>
class DMABufSurfaceWrapper<LIBAV_VER> final {
class DMABufSurfaceWrapper final {
public:
DMABufSurfaceWrapper(DMABufSurface* aSurface, FFmpegLibWrapper* aLib);
~DMABufSurfaceWrapper();
@ -169,7 +162,7 @@ class FFmpegVideoDecoder<LIBAV_VER>
MediaDataDecoder::DecodedData& aResults);
void ReleaseUnusedVAAPIFrames();
DMABufSurfaceWrapper<LIBAV_VER>* GetUnusedDMABufSurfaceWrapper();
DMABufSurfaceWrapper* GetUnusedDMABufSurfaceWrapper();
void ReleaseDMABufSurfaces();
#endif
@ -187,7 +180,7 @@ class FFmpegVideoDecoder<LIBAV_VER>
const bool mDisableHardwareDecoding;
VADisplay mDisplay;
bool mUseDMABufSurfaces;
nsTArray<DMABufSurfaceWrapper<LIBAV_VER>> mDMABufSurfaces;
nsTArray<DMABufSurfaceWrapper> mDMABufSurfaces;
#endif
RefPtr<KnowsCompositor> mImageAllocator;
RefPtr<ImageContainer> mImageContainer;

Просмотреть файл

@ -27,6 +27,8 @@ if CONFIG['CC_TYPE'] == 'gcc':
]
if CONFIG['MOZ_WAYLAND']:
CXXFLAGS += CONFIG['TK_CFLAGS']
CXXFLAGS += CONFIG['MOZ_WAYLAND_CFLAGS']
DEFINES['MOZ_WAYLAND_USE_VAAPI'] = 1
include('/ipc/chromium/chromium-config.mozbuild')
FINAL_LIBRARY = 'xul'

Просмотреть файл

@ -64,10 +64,6 @@ bool FFVPXRuntimeLinker::Init() {
MOZ_ASSERT(NS_IsMainThread());
sLinkStatus = LinkStatus_FAILED;
#ifdef MOZ_WAYLAND
sFFVPXLib.LinkVAAPILibs();
#endif
// We retrieve the path of the lgpllibs library as this is where mozavcodec
// and mozavutil libs are located.
PathString lgpllibsname = GetLibraryName(nullptr, "lgpllibs");

Просмотреть файл

@ -36,8 +36,4 @@ if CONFIG["CC_TYPE"] == "gcc":
DEFINES["FFVPX_VERSION"] = 46465650
DEFINES["USING_MOZFFVPX"] = True
if CONFIG["MOZ_WAYLAND"]:
CXXFLAGS += CONFIG["TK_CFLAGS"]
DEFINES["MOZ_WAYLAND_USE_VAAPI"] = 1
FINAL_LIBRARY = "xul"

Просмотреть файл

@ -5,23 +5,17 @@
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "DMABUFSurfaceImage.h"
#include "mozilla/widget/DMABufSurface.h"
#include "gfxPlatform.h"
#include "mozilla/layers/CompositableClient.h"
#include "mozilla/layers/CompositableForwarder.h"
#include "mozilla/layers/DMABUFTextureClientOGL.h"
#include "mozilla/layers/TextureForwarder.h"
#include "mozilla/UniquePtr.h"
using namespace mozilla;
using namespace mozilla::layers;
using namespace mozilla::gfx;
DMABUFSurfaceImage::DMABUFSurfaceImage(DMABufSurface* aSurface)
: Image(nullptr, ImageFormat::DMABUF), mSurface(aSurface) {
mSurface->GlobalRefAdd();
}
DMABUFSurfaceImage::~DMABUFSurfaceImage() { mSurface->GlobalRefRelease(); }
TextureClient* DMABUFSurfaceImage::GetTextureClient(
KnowsCompositor* aKnowsCompositor) {
if (!mTextureClient) {
@ -32,7 +26,3 @@ TextureClient* DMABUFSurfaceImage::GetTextureClient(
}
return mTextureClient;
}
gfx::IntSize DMABUFSurfaceImage::GetSize() const {
return gfx::IntSize::Truncate(mSurface->GetWidth(), mSurface->GetHeight());
}

Просмотреть файл

@ -8,24 +8,32 @@
#define SURFACE_DMABUF_H
#include "ImageContainer.h"
class DMABufSurface;
#include "mozilla/widget/DMABufSurface.h"
#include "mozilla/gfx/Point.h"
#include "mozilla/layers/TextureClient.h"
namespace mozilla {
namespace layers {
class TextureClient;
class DMABUFSurfaceImage : public Image {
public:
explicit DMABUFSurfaceImage(DMABufSurface* aSurface);
~DMABUFSurfaceImage();
explicit DMABUFSurfaceImage(DMABufSurface* aSurface)
: Image(nullptr, ImageFormat::DMABUF), mSurface(aSurface) {
mSurface->GlobalRefAdd();
}
~DMABUFSurfaceImage() { mSurface->GlobalRefRelease(); }
DMABufSurface* GetSurface() { return mSurface; }
gfx::IntSize GetSize() const override;
gfx::IntSize GetSize() const override {
return gfx::IntSize::Truncate(mSurface->GetWidth(), mSurface->GetHeight());
}
already_AddRefed<gfx::SourceSurface> GetAsSourceSurface() override {
return nullptr;
}
TextureClient* GetTextureClient(KnowsCompositor* aKnowsCompositor) override;
private:

Просмотреть файл

@ -131,11 +131,6 @@
./libavcodec/thread.h
./libavcodec/unary.h
./libavcodec/utils.c
./libavcodec/vaapi.h
./libavcodec/vaapi_decode.h
./libavcodec/vaapi_decode.c
./libavcodec/vaapi_vp8.c
./libavcodec/vaapi_vp9.c
./libavcodec/version.h
./libavcodec/videodsp.c
./libavcodec/videodsp.h
@ -269,8 +264,6 @@
./libavutil/hwcontext.c
./libavutil/hwcontext.h
./libavutil/hwcontext_internal.h
./libavutil/hwcontext_vaapi.h
./libavutil/hwcontext_vaapi.c
./libavutil/imgutils.c
./libavutil/imgutils.h
./libavutil/imgutils_internal.h

Просмотреть файл

@ -56,5 +56,3 @@ $ for i in `cat $PATH_CENTRAL/media/ffvpx/FILES`; do git diff $REV_LASTSYNC HEAD
Then apply patch.diff on the ffvpx tree.
Compilation will reveal if any files are missing.
Apply linux-vaapi-build.patch patch to enable build VA-API support for Linux.

Просмотреть файл

@ -18,14 +18,4 @@
#define CONFIG_RDFT 1
#endif
#ifdef MOZ_WAYLAND
#define CONFIG_VAAPI 1
#define CONFIG_VP8_VAAPI_HWACCEL 1
#define CONFIG_VP9_VAAPI_HWACCEL 1
#else
#define CONFIG_VAAPI 0
#define CONFIG_VP8_VAAPI_HWACCEL 0
#define CONFIG_VP9_VAAPI_HWACCEL 0
#endif
#endif

Просмотреть файл

@ -524,6 +524,7 @@
#define CONFIG_FFNVCODEC 0
#define CONFIG_NVDEC 0
#define CONFIG_NVENC 0
#define CONFIG_VAAPI 0
#define CONFIG_VDPAU 0
#define CONFIG_VIDEOTOOLBOX 0
#define CONFIG_V4L2_M2M 0

Просмотреть файл

@ -524,6 +524,7 @@
#define CONFIG_FFNVCODEC 0
#define CONFIG_NVDEC 0
#define CONFIG_NVENC 0
#define CONFIG_VAAPI 0
#define CONFIG_VDPAU 0
#define CONFIG_VIDEOTOOLBOX 0
#define CONFIG_V4L2_M2M 1

Просмотреть файл

@ -1706,6 +1706,7 @@
#define CONFIG_VP8_V4L2M2M_DECODER 0
#define CONFIG_VP8_V4L2M2M_ENCODER 0
#define CONFIG_VP8_VAAPI_ENCODER 0
#define CONFIG_VP8_VAAPI_HWACCEL 0
#define CONFIG_VP9_CUVID_DECODER 0
#define CONFIG_VP9_D3D11VA2_HWACCEL 0
#define CONFIG_VP9_D3D11VA_HWACCEL 0
@ -1718,6 +1719,7 @@
#define CONFIG_VP9_SUPERFRAME_BSF 0
#define CONFIG_VP9_V4L2M2M_DECODER 0
#define CONFIG_VP9_VAAPI_ENCODER 0
#define CONFIG_VP9_VAAPI_HWACCEL 0
#define CONFIG_VPK_DEMUXER 0
#define CONFIG_VPLAYER_DECODER 0
#define CONFIG_VPLAYER_DEMUXER 0

Просмотреть файл

@ -71,9 +71,5 @@ elif not CONFIG['RELEASE_OR_BETA']:
# Enable fast assertions in opt builds of Nightly and Aurora.
DEFINES['ASSERT_LEVEL'] = 1
if CONFIG['MOZ_WAYLAND']:
CFLAGS += CONFIG['MOZ_WAYLAND_CFLAGS']
CXXFLAGS += CONFIG['MOZ_WAYLAND_CFLAGS']
# Add libFuzzer configuration directives
include('/tools/fuzzing/libfuzzer-config.mozbuild')

Просмотреть файл

@ -28,11 +28,6 @@ av_get_pcm_codec
av_get_profile_name
av_grow_packet
av_hwaccel_next
av_hwdevice_ctx_init
av_hwdevice_ctx_alloc
av_hwdevice_ctx_create_derived
av_hwframe_transfer_get_formats
av_hwframe_ctx_alloc
av_init_packet
av_lockmgr_register
av_new_packet
@ -98,7 +93,6 @@ avcodec_free_context
avcodec_get_class
avcodec_get_context_defaults3
avcodec_get_frame_class
avcodec_get_hw_config
avcodec_get_name
avcodec_get_subtitle_rect_class
avcodec_get_type

Просмотреть файл

@ -96,13 +96,6 @@ if not CONFIG['MOZ_FFVPX_AUDIOONLY']:
'vp9prob.c',
'vp9recon.c'
]
if CONFIG['MOZ_WAYLAND']:
SOURCES += [
'vaapi_decode.c',
'vaapi_vp8.c',
'vaapi_vp9.c',
]
USE_LIBS += ['mozva']
if CONFIG['MOZ_LIBAV_FFT']:
SOURCES += [

Просмотреть файл

@ -1,86 +0,0 @@
/*
* Video Acceleration API (shared data between FFmpeg and the video player)
* HW decode acceleration for MPEG-2, MPEG-4, H.264 and VC-1
*
* Copyright (C) 2008-2009 Splitted-Desktop Systems
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVCODEC_VAAPI_H
#define AVCODEC_VAAPI_H
/**
* @file
* @ingroup lavc_codec_hwaccel_vaapi
* Public libavcodec VA API header.
*/
#include <stdint.h>
#include "libavutil/attributes.h"
#include "version.h"
#if FF_API_STRUCT_VAAPI_CONTEXT
/**
* @defgroup lavc_codec_hwaccel_vaapi VA API Decoding
* @ingroup lavc_codec_hwaccel
* @{
*/
/**
* This structure is used to share data between the FFmpeg library and
* the client video application.
* This shall be zero-allocated and available as
* AVCodecContext.hwaccel_context. All user members can be set once
* during initialization or through each AVCodecContext.get_buffer()
* function call. In any case, they must be valid prior to calling
* decoding functions.
*
* Deprecated: use AVCodecContext.hw_frames_ctx instead.
*/
struct attribute_deprecated vaapi_context {
/**
* Window system dependent data
*
* - encoding: unused
* - decoding: Set by user
*/
void *display;
/**
* Configuration ID
*
* - encoding: unused
* - decoding: Set by user
*/
uint32_t config_id;
/**
* Context ID (video decode pipeline)
*
* - encoding: unused
* - decoding: Set by user
*/
uint32_t context_id;
};
/* @} */
#endif /* FF_API_STRUCT_VAAPI_CONTEXT */
#endif /* AVCODEC_VAAPI_H */

Просмотреть файл

@ -1,732 +0,0 @@
/*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#include "libavutil/avassert.h"
#include "libavutil/common.h"
#include "libavutil/pixdesc.h"
#include "avcodec.h"
#include "decode.h"
#include "internal.h"
#include "vaapi_decode.h"
int ff_vaapi_decode_make_param_buffer(AVCodecContext *avctx,
VAAPIDecodePicture *pic,
int type,
const void *data,
size_t size)
{
VAAPIDecodeContext *ctx = avctx->internal->hwaccel_priv_data;
VAStatus vas;
VABufferID buffer;
av_assert0(pic->nb_param_buffers + 1 <= MAX_PARAM_BUFFERS);
vas = vaCreateBuffer(ctx->hwctx->display, ctx->va_context,
type, size, 1, (void*)data, &buffer);
if (vas != VA_STATUS_SUCCESS) {
av_log(avctx, AV_LOG_ERROR, "Failed to create parameter "
"buffer (type %d): %d (%s).\n",
type, vas, vaErrorStr(vas));
return AVERROR(EIO);
}
pic->param_buffers[pic->nb_param_buffers++] = buffer;
av_log(avctx, AV_LOG_DEBUG, "Param buffer (type %d, %zu bytes) "
"is %#x.\n", type, size, buffer);
return 0;
}
int ff_vaapi_decode_make_slice_buffer(AVCodecContext *avctx,
VAAPIDecodePicture *pic,
const void *params_data,
size_t params_size,
const void *slice_data,
size_t slice_size)
{
VAAPIDecodeContext *ctx = avctx->internal->hwaccel_priv_data;
VAStatus vas;
int index;
av_assert0(pic->nb_slices <= pic->slices_allocated);
if (pic->nb_slices == pic->slices_allocated) {
if (pic->slices_allocated > 0)
pic->slices_allocated *= 2;
else
pic->slices_allocated = 64;
pic->slice_buffers =
av_realloc_array(pic->slice_buffers,
pic->slices_allocated,
2 * sizeof(*pic->slice_buffers));
if (!pic->slice_buffers)
return AVERROR(ENOMEM);
}
av_assert0(pic->nb_slices + 1 <= pic->slices_allocated);
index = 2 * pic->nb_slices;
vas = vaCreateBuffer(ctx->hwctx->display, ctx->va_context,
VASliceParameterBufferType,
params_size, 1, (void*)params_data,
&pic->slice_buffers[index]);
if (vas != VA_STATUS_SUCCESS) {
av_log(avctx, AV_LOG_ERROR, "Failed to create slice "
"parameter buffer: %d (%s).\n", vas, vaErrorStr(vas));
return AVERROR(EIO);
}
av_log(avctx, AV_LOG_DEBUG, "Slice %d param buffer (%zu bytes) "
"is %#x.\n", pic->nb_slices, params_size,
pic->slice_buffers[index]);
vas = vaCreateBuffer(ctx->hwctx->display, ctx->va_context,
VASliceDataBufferType,
slice_size, 1, (void*)slice_data,
&pic->slice_buffers[index + 1]);
if (vas != VA_STATUS_SUCCESS) {
av_log(avctx, AV_LOG_ERROR, "Failed to create slice "
"data buffer (size %zu): %d (%s).\n",
slice_size, vas, vaErrorStr(vas));
vaDestroyBuffer(ctx->hwctx->display,
pic->slice_buffers[index]);
return AVERROR(EIO);
}
av_log(avctx, AV_LOG_DEBUG, "Slice %d data buffer (%zu bytes) "
"is %#x.\n", pic->nb_slices, slice_size,
pic->slice_buffers[index + 1]);
++pic->nb_slices;
return 0;
}
static void ff_vaapi_decode_destroy_buffers(AVCodecContext *avctx,
VAAPIDecodePicture *pic)
{
VAAPIDecodeContext *ctx = avctx->internal->hwaccel_priv_data;
VAStatus vas;
int i;
for (i = 0; i < pic->nb_param_buffers; i++) {
vas = vaDestroyBuffer(ctx->hwctx->display,
pic->param_buffers[i]);
if (vas != VA_STATUS_SUCCESS) {
av_log(avctx, AV_LOG_ERROR, "Failed to destroy "
"parameter buffer %#x: %d (%s).\n",
pic->param_buffers[i], vas, vaErrorStr(vas));
}
}
for (i = 0; i < 2 * pic->nb_slices; i++) {
vas = vaDestroyBuffer(ctx->hwctx->display,
pic->slice_buffers[i]);
if (vas != VA_STATUS_SUCCESS) {
av_log(avctx, AV_LOG_ERROR, "Failed to destroy slice "
"slice buffer %#x: %d (%s).\n",
pic->slice_buffers[i], vas, vaErrorStr(vas));
}
}
}
int ff_vaapi_decode_issue(AVCodecContext *avctx,
VAAPIDecodePicture *pic)
{
VAAPIDecodeContext *ctx = avctx->internal->hwaccel_priv_data;
VAStatus vas;
int err;
av_log(avctx, AV_LOG_DEBUG, "Decode to surface %#x.\n",
pic->output_surface);
vas = vaBeginPicture(ctx->hwctx->display, ctx->va_context,
pic->output_surface);
if (vas != VA_STATUS_SUCCESS) {
av_log(avctx, AV_LOG_ERROR, "Failed to begin picture decode "
"issue: %d (%s).\n", vas, vaErrorStr(vas));
err = AVERROR(EIO);
goto fail_with_picture;
}
vas = vaRenderPicture(ctx->hwctx->display, ctx->va_context,
pic->param_buffers, pic->nb_param_buffers);
if (vas != VA_STATUS_SUCCESS) {
av_log(avctx, AV_LOG_ERROR, "Failed to upload decode "
"parameters: %d (%s).\n", vas, vaErrorStr(vas));
err = AVERROR(EIO);
goto fail_with_picture;
}
vas = vaRenderPicture(ctx->hwctx->display, ctx->va_context,
pic->slice_buffers, 2 * pic->nb_slices);
if (vas != VA_STATUS_SUCCESS) {
av_log(avctx, AV_LOG_ERROR, "Failed to upload slices: "
"%d (%s).\n", vas, vaErrorStr(vas));
err = AVERROR(EIO);
goto fail_with_picture;
}
vas = vaEndPicture(ctx->hwctx->display, ctx->va_context);
if (vas != VA_STATUS_SUCCESS) {
av_log(avctx, AV_LOG_ERROR, "Failed to end picture decode "
"issue: %d (%s).\n", vas, vaErrorStr(vas));
err = AVERROR(EIO);
if (CONFIG_VAAPI_1 || ctx->hwctx->driver_quirks &
AV_VAAPI_DRIVER_QUIRK_RENDER_PARAM_BUFFERS)
goto fail;
else
goto fail_at_end;
}
if (CONFIG_VAAPI_1 || ctx->hwctx->driver_quirks &
AV_VAAPI_DRIVER_QUIRK_RENDER_PARAM_BUFFERS)
ff_vaapi_decode_destroy_buffers(avctx, pic);
err = 0;
goto exit;
fail_with_picture:
vas = vaEndPicture(ctx->hwctx->display, ctx->va_context);
if (vas != VA_STATUS_SUCCESS) {
av_log(avctx, AV_LOG_ERROR, "Failed to end picture decode "
"after error: %d (%s).\n", vas, vaErrorStr(vas));
}
fail:
ff_vaapi_decode_destroy_buffers(avctx, pic);
fail_at_end:
exit:
pic->nb_param_buffers = 0;
pic->nb_slices = 0;
pic->slices_allocated = 0;
av_freep(&pic->slice_buffers);
return err;
}
int ff_vaapi_decode_cancel(AVCodecContext *avctx,
VAAPIDecodePicture *pic)
{
ff_vaapi_decode_destroy_buffers(avctx, pic);
pic->nb_param_buffers = 0;
pic->nb_slices = 0;
pic->slices_allocated = 0;
av_freep(&pic->slice_buffers);
return 0;
}
static const struct {
uint32_t fourcc;
enum AVPixelFormat pix_fmt;
} vaapi_format_map[] = {
#define MAP(va, av) { VA_FOURCC_ ## va, AV_PIX_FMT_ ## av }
// 4:0:0
MAP(Y800, GRAY8),
// 4:2:0
MAP(NV12, NV12),
MAP(YV12, YUV420P),
MAP(IYUV, YUV420P),
#ifdef VA_FOURCC_I420
MAP(I420, YUV420P),
#endif
MAP(IMC3, YUV420P),
// 4:1:1
MAP(411P, YUV411P),
// 4:2:2
MAP(422H, YUV422P),
#ifdef VA_FOURCC_YV16
MAP(YV16, YUV422P),
#endif
// 4:4:0
MAP(422V, YUV440P),
// 4:4:4
MAP(444P, YUV444P),
// 4:2:0 10-bit
#ifdef VA_FOURCC_P010
MAP(P010, P010),
#endif
#ifdef VA_FOURCC_I010
MAP(I010, YUV420P10),
#endif
#undef MAP
};
static int vaapi_decode_find_best_format(AVCodecContext *avctx,
AVHWDeviceContext *device,
VAConfigID config_id,
AVHWFramesContext *frames)
{
AVVAAPIDeviceContext *hwctx = device->hwctx;
VAStatus vas;
VASurfaceAttrib *attr;
enum AVPixelFormat source_format, best_format, format;
uint32_t best_fourcc, fourcc;
int i, j, nb_attr;
source_format = avctx->sw_pix_fmt;
av_assert0(source_format != AV_PIX_FMT_NONE);
vas = vaQuerySurfaceAttributes(hwctx->display, config_id,
NULL, &nb_attr);
if (vas != VA_STATUS_SUCCESS) {
av_log(avctx, AV_LOG_ERROR, "Failed to query surface attributes: "
"%d (%s).\n", vas, vaErrorStr(vas));
return AVERROR(ENOSYS);
}
attr = av_malloc_array(nb_attr, sizeof(*attr));
if (!attr)
return AVERROR(ENOMEM);
vas = vaQuerySurfaceAttributes(hwctx->display, config_id,
attr, &nb_attr);
if (vas != VA_STATUS_SUCCESS) {
av_log(avctx, AV_LOG_ERROR, "Failed to query surface attributes: "
"%d (%s).\n", vas, vaErrorStr(vas));
av_freep(&attr);
return AVERROR(ENOSYS);
}
best_format = AV_PIX_FMT_NONE;
for (i = 0; i < nb_attr; i++) {
if (attr[i].type != VASurfaceAttribPixelFormat)
continue;
fourcc = attr[i].value.value.i;
for (j = 0; j < FF_ARRAY_ELEMS(vaapi_format_map); j++) {
if (fourcc == vaapi_format_map[j].fourcc)
break;
}
if (j >= FF_ARRAY_ELEMS(vaapi_format_map)) {
av_log(avctx, AV_LOG_DEBUG, "Ignoring unknown format %#x.\n",
fourcc);
continue;
}
format = vaapi_format_map[j].pix_fmt;
av_log(avctx, AV_LOG_DEBUG, "Considering format %#x -> %s.\n",
fourcc, av_get_pix_fmt_name(format));
best_format = av_find_best_pix_fmt_of_2(format, best_format,
source_format, 0, NULL);
if (format == best_format)
best_fourcc = fourcc;
}
av_freep(&attr);
if (best_format == AV_PIX_FMT_NONE) {
av_log(avctx, AV_LOG_ERROR, "No usable formats for decoding!\n");
return AVERROR(EINVAL);
}
av_log(avctx, AV_LOG_DEBUG, "Picked %s (%#x) as best match for %s.\n",
av_get_pix_fmt_name(best_format), best_fourcc,
av_get_pix_fmt_name(source_format));
frames->sw_format = best_format;
if (avctx->internal->hwaccel_priv_data) {
VAAPIDecodeContext *ctx = avctx->internal->hwaccel_priv_data;
AVVAAPIFramesContext *avfc = frames->hwctx;
ctx->pixel_format_attribute = (VASurfaceAttrib) {
.type = VASurfaceAttribPixelFormat,
.value.value.i = best_fourcc,
};
avfc->attributes = &ctx->pixel_format_attribute;
avfc->nb_attributes = 1;
}
return 0;
}
static const struct {
enum AVCodecID codec_id;
int codec_profile;
VAProfile va_profile;
} vaapi_profile_map[] = {
#define MAP(c, p, v) { AV_CODEC_ID_ ## c, FF_PROFILE_ ## p, VAProfile ## v }
MAP(MPEG2VIDEO, MPEG2_SIMPLE, MPEG2Simple ),
MAP(MPEG2VIDEO, MPEG2_MAIN, MPEG2Main ),
MAP(H263, UNKNOWN, H263Baseline),
MAP(MPEG4, MPEG4_SIMPLE, MPEG4Simple ),
MAP(MPEG4, MPEG4_ADVANCED_SIMPLE,
MPEG4AdvancedSimple),
MAP(MPEG4, MPEG4_MAIN, MPEG4Main ),
MAP(H264, H264_CONSTRAINED_BASELINE,
H264ConstrainedBaseline),
MAP(H264, H264_MAIN, H264Main ),
MAP(H264, H264_HIGH, H264High ),
#if VA_CHECK_VERSION(0, 37, 0)
MAP(HEVC, HEVC_MAIN, HEVCMain ),
MAP(HEVC, HEVC_MAIN_10, HEVCMain10 ),
#endif
MAP(MJPEG, MJPEG_HUFFMAN_BASELINE_DCT,
JPEGBaseline),
MAP(WMV3, VC1_SIMPLE, VC1Simple ),
MAP(WMV3, VC1_MAIN, VC1Main ),
MAP(WMV3, VC1_COMPLEX, VC1Advanced ),
MAP(WMV3, VC1_ADVANCED, VC1Advanced ),
MAP(VC1, VC1_SIMPLE, VC1Simple ),
MAP(VC1, VC1_MAIN, VC1Main ),
MAP(VC1, VC1_COMPLEX, VC1Advanced ),
MAP(VC1, VC1_ADVANCED, VC1Advanced ),
MAP(VP8, UNKNOWN, VP8Version0_3 ),
#if VA_CHECK_VERSION(0, 38, 0)
MAP(VP9, VP9_0, VP9Profile0 ),
#endif
#if VA_CHECK_VERSION(0, 39, 0)
MAP(VP9, VP9_2, VP9Profile2 ),
#endif
#undef MAP
};
/*
* Set *va_config and the frames_ref fields from the current codec parameters
* in avctx.
*/
static int vaapi_decode_make_config(AVCodecContext *avctx,
AVBufferRef *device_ref,
VAConfigID *va_config,
AVBufferRef *frames_ref)
{
AVVAAPIHWConfig *hwconfig = NULL;
AVHWFramesConstraints *constraints = NULL;
VAStatus vas;
int err, i, j;
const AVCodecDescriptor *codec_desc;
VAProfile *profile_list = NULL, matched_va_profile;
int profile_count, exact_match, matched_ff_profile;
AVHWDeviceContext *device = (AVHWDeviceContext*)device_ref->data;
AVVAAPIDeviceContext *hwctx = device->hwctx;
codec_desc = avcodec_descriptor_get(avctx->codec_id);
if (!codec_desc) {
err = AVERROR(EINVAL);
goto fail;
}
profile_count = vaMaxNumProfiles(hwctx->display);
profile_list = av_malloc_array(profile_count,
sizeof(VAProfile));
if (!profile_list) {
err = AVERROR(ENOMEM);
goto fail;
}
vas = vaQueryConfigProfiles(hwctx->display,
profile_list, &profile_count);
if (vas != VA_STATUS_SUCCESS) {
av_log(avctx, AV_LOG_ERROR, "Failed to query profiles: "
"%d (%s).\n", vas, vaErrorStr(vas));
err = AVERROR(ENOSYS);
goto fail;
}
matched_va_profile = VAProfileNone;
exact_match = 0;
for (i = 0; i < FF_ARRAY_ELEMS(vaapi_profile_map); i++) {
int profile_match = 0;
if (avctx->codec_id != vaapi_profile_map[i].codec_id)
continue;
if (avctx->profile == vaapi_profile_map[i].codec_profile ||
vaapi_profile_map[i].codec_profile == FF_PROFILE_UNKNOWN)
profile_match = 1;
for (j = 0; j < profile_count; j++) {
if (vaapi_profile_map[i].va_profile == profile_list[j]) {
exact_match = profile_match;
break;
}
}
if (j < profile_count) {
matched_va_profile = vaapi_profile_map[i].va_profile;
matched_ff_profile = vaapi_profile_map[i].codec_profile;
if (exact_match)
break;
}
}
av_freep(&profile_list);
if (matched_va_profile == VAProfileNone) {
av_log(avctx, AV_LOG_ERROR, "No support for codec %s "
"profile %d.\n", codec_desc->name, avctx->profile);
err = AVERROR(ENOSYS);
goto fail;
}
if (!exact_match) {
if (avctx->hwaccel_flags &
AV_HWACCEL_FLAG_ALLOW_PROFILE_MISMATCH) {
av_log(avctx, AV_LOG_VERBOSE, "Codec %s profile %d not "
"supported for hardware decode.\n",
codec_desc->name, avctx->profile);
av_log(avctx, AV_LOG_WARNING, "Using possibly-"
"incompatible profile %d instead.\n",
matched_ff_profile);
} else {
av_log(avctx, AV_LOG_VERBOSE, "Codec %s profile %d not "
"supported for hardware decode.\n",
codec_desc->name, avctx->profile);
err = AVERROR(EINVAL);
goto fail;
}
}
vas = vaCreateConfig(hwctx->display, matched_va_profile,
VAEntrypointVLD, NULL, 0,
va_config);
if (vas != VA_STATUS_SUCCESS) {
av_log(avctx, AV_LOG_ERROR, "Failed to create decode "
"configuration: %d (%s).\n", vas, vaErrorStr(vas));
err = AVERROR(EIO);
goto fail;
}
hwconfig = av_hwdevice_hwconfig_alloc(device_ref);
if (!hwconfig) {
err = AVERROR(ENOMEM);
goto fail;
}
hwconfig->config_id = *va_config;
constraints =
av_hwdevice_get_hwframe_constraints(device_ref, hwconfig);
if (!constraints) {
err = AVERROR(ENOMEM);
goto fail;
}
if (avctx->coded_width < constraints->min_width ||
avctx->coded_height < constraints->min_height ||
avctx->coded_width > constraints->max_width ||
avctx->coded_height > constraints->max_height) {
av_log(avctx, AV_LOG_ERROR, "Hardware does not support image "
"size %dx%d (constraints: width %d-%d height %d-%d).\n",
avctx->coded_width, avctx->coded_height,
constraints->min_width, constraints->max_width,
constraints->min_height, constraints->max_height);
err = AVERROR(EINVAL);
goto fail;
}
if (!constraints->valid_sw_formats ||
constraints->valid_sw_formats[0] == AV_PIX_FMT_NONE) {
av_log(avctx, AV_LOG_ERROR, "Hardware does not offer any "
"usable surface formats.\n");
err = AVERROR(EINVAL);
goto fail;
}
if (frames_ref) {
AVHWFramesContext *frames = (AVHWFramesContext *)frames_ref->data;
frames->format = AV_PIX_FMT_VAAPI;
frames->width = avctx->coded_width;
frames->height = avctx->coded_height;
err = vaapi_decode_find_best_format(avctx, device,
*va_config, frames);
if (err < 0)
goto fail;
frames->initial_pool_size = 1;
// Add per-codec number of surfaces used for storing reference frames.
switch (avctx->codec_id) {
case AV_CODEC_ID_H264:
case AV_CODEC_ID_HEVC:
frames->initial_pool_size += 16;
break;
case AV_CODEC_ID_VP9:
frames->initial_pool_size += 8;
break;
case AV_CODEC_ID_VP8:
frames->initial_pool_size += 3;
break;
default:
frames->initial_pool_size += 2;
}
}
av_hwframe_constraints_free(&constraints);
av_freep(&hwconfig);
return 0;
fail:
av_hwframe_constraints_free(&constraints);
av_freep(&hwconfig);
if (*va_config != VA_INVALID_ID) {
vaDestroyConfig(hwctx->display, *va_config);
*va_config = VA_INVALID_ID;
}
av_freep(&profile_list);
return err;
}
int ff_vaapi_common_frame_params(AVCodecContext *avctx,
AVBufferRef *hw_frames_ctx)
{
AVHWFramesContext *hw_frames = (AVHWFramesContext *)hw_frames_ctx->data;
AVHWDeviceContext *device_ctx = hw_frames->device_ctx;
AVVAAPIDeviceContext *hwctx;
VAConfigID va_config = VA_INVALID_ID;
int err;
if (device_ctx->type != AV_HWDEVICE_TYPE_VAAPI)
return AVERROR(EINVAL);
hwctx = device_ctx->hwctx;
err = vaapi_decode_make_config(avctx, hw_frames->device_ref, &va_config,
hw_frames_ctx);
if (err)
return err;
if (va_config != VA_INVALID_ID)
vaDestroyConfig(hwctx->display, va_config);
return 0;
}
int ff_vaapi_decode_init(AVCodecContext *avctx)
{
VAAPIDecodeContext *ctx = avctx->internal->hwaccel_priv_data;
VAStatus vas;
int err;
ctx->va_config = VA_INVALID_ID;
ctx->va_context = VA_INVALID_ID;
#if FF_API_STRUCT_VAAPI_CONTEXT
if (avctx->hwaccel_context) {
av_log(avctx, AV_LOG_WARNING, "Using deprecated struct "
"vaapi_context in decode.\n");
ctx->have_old_context = 1;
ctx->old_context = avctx->hwaccel_context;
// Really we only want the VAAPI device context, but this
// allocates a whole generic device context because we don't
// have any other way to determine how big it should be.
ctx->device_ref =
av_hwdevice_ctx_alloc(AV_HWDEVICE_TYPE_VAAPI);
if (!ctx->device_ref) {
err = AVERROR(ENOMEM);
goto fail;
}
ctx->device = (AVHWDeviceContext*)ctx->device_ref->data;
ctx->hwctx = ctx->device->hwctx;
ctx->hwctx->display = ctx->old_context->display;
// The old VAAPI decode setup assumed this quirk was always
// present, so set it here to avoid the behaviour changing.
ctx->hwctx->driver_quirks =
AV_VAAPI_DRIVER_QUIRK_RENDER_PARAM_BUFFERS;
}
#endif
#if FF_API_STRUCT_VAAPI_CONTEXT
if (ctx->have_old_context) {
ctx->va_config = ctx->old_context->config_id;
ctx->va_context = ctx->old_context->context_id;
av_log(avctx, AV_LOG_DEBUG, "Using user-supplied decoder "
"context: %#x/%#x.\n", ctx->va_config, ctx->va_context);
} else {
#endif
err = ff_decode_get_hw_frames_ctx(avctx, AV_HWDEVICE_TYPE_VAAPI);
if (err < 0)
goto fail;
ctx->frames = (AVHWFramesContext*)avctx->hw_frames_ctx->data;
ctx->hwfc = ctx->frames->hwctx;
ctx->device = ctx->frames->device_ctx;
ctx->hwctx = ctx->device->hwctx;
err = vaapi_decode_make_config(avctx, ctx->frames->device_ref,
&ctx->va_config, avctx->hw_frames_ctx);
if (err)
goto fail;
vas = vaCreateContext(ctx->hwctx->display, ctx->va_config,
avctx->coded_width, avctx->coded_height,
VA_PROGRESSIVE,
ctx->hwfc->surface_ids,
ctx->hwfc->nb_surfaces,
&ctx->va_context);
if (vas != VA_STATUS_SUCCESS) {
av_log(avctx, AV_LOG_ERROR, "Failed to create decode "
"context: %d (%s).\n", vas, vaErrorStr(vas));
err = AVERROR(EIO);
goto fail;
}
av_log(avctx, AV_LOG_DEBUG, "Decode context initialised: "
"%#x/%#x.\n", ctx->va_config, ctx->va_context);
#if FF_API_STRUCT_VAAPI_CONTEXT
}
#endif
return 0;
fail:
ff_vaapi_decode_uninit(avctx);
return err;
}
int ff_vaapi_decode_uninit(AVCodecContext *avctx)
{
VAAPIDecodeContext *ctx = avctx->internal->hwaccel_priv_data;
VAStatus vas;
#if FF_API_STRUCT_VAAPI_CONTEXT
if (ctx->have_old_context) {
av_buffer_unref(&ctx->device_ref);
} else {
#endif
if (ctx->va_context != VA_INVALID_ID) {
vas = vaDestroyContext(ctx->hwctx->display, ctx->va_context);
if (vas != VA_STATUS_SUCCESS) {
av_log(avctx, AV_LOG_ERROR, "Failed to destroy decode "
"context %#x: %d (%s).\n",
ctx->va_context, vas, vaErrorStr(vas));
}
}
if (ctx->va_config != VA_INVALID_ID) {
vas = vaDestroyConfig(ctx->hwctx->display, ctx->va_config);
if (vas != VA_STATUS_SUCCESS) {
av_log(avctx, AV_LOG_ERROR, "Failed to destroy decode "
"configuration %#x: %d (%s).\n",
ctx->va_config, vas, vaErrorStr(vas));
}
}
#if FF_API_STRUCT_VAAPI_CONTEXT
}
#endif
return 0;
}

Просмотреть файл

@ -1,105 +0,0 @@
/*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVCODEC_VAAPI_DECODE_H
#define AVCODEC_VAAPI_DECODE_H
#include <va/va.h>
#include <va/va_dec_vp9.h>
#include "libavutil/frame.h"
#include "libavutil/hwcontext.h"
#include "libavutil/hwcontext_vaapi.h"
#include "avcodec.h"
#include "version.h"
#if FF_API_STRUCT_VAAPI_CONTEXT
#include "vaapi.h"
#endif
static inline VASurfaceID ff_vaapi_get_surface_id(AVFrame *pic)
{
return (uintptr_t)pic->data[3];
}
enum {
MAX_PARAM_BUFFERS = 16,
};
typedef struct VAAPIDecodePicture {
VASurfaceID output_surface;
int nb_param_buffers;
VABufferID param_buffers[MAX_PARAM_BUFFERS];
int nb_slices;
VABufferID *slice_buffers;
int slices_allocated;
} VAAPIDecodePicture;
typedef struct VAAPIDecodeContext {
VAConfigID va_config;
VAContextID va_context;
#if FF_API_STRUCT_VAAPI_CONTEXT
FF_DISABLE_DEPRECATION_WARNINGS
int have_old_context;
struct vaapi_context *old_context;
AVBufferRef *device_ref;
FF_ENABLE_DEPRECATION_WARNINGS
#endif
AVHWDeviceContext *device;
AVVAAPIDeviceContext *hwctx;
AVHWFramesContext *frames;
AVVAAPIFramesContext *hwfc;
enum AVPixelFormat surface_format;
int surface_count;
VASurfaceAttrib pixel_format_attribute;
} VAAPIDecodeContext;
int ff_vaapi_decode_make_param_buffer(AVCodecContext *avctx,
VAAPIDecodePicture *pic,
int type,
const void *data,
size_t size);
int ff_vaapi_decode_make_slice_buffer(AVCodecContext *avctx,
VAAPIDecodePicture *pic,
const void *params_data,
size_t params_size,
const void *slice_data,
size_t slice_size);
int ff_vaapi_decode_issue(AVCodecContext *avctx,
VAAPIDecodePicture *pic);
int ff_vaapi_decode_cancel(AVCodecContext *avctx,
VAAPIDecodePicture *pic);
int ff_vaapi_decode_init(AVCodecContext *avctx);
int ff_vaapi_decode_uninit(AVCodecContext *avctx);
int ff_vaapi_common_frame_params(AVCodecContext *avctx,
AVBufferRef *hw_frames_ctx);
#endif /* AVCODEC_VAAPI_DECODE_H */

Просмотреть файл

@ -1,237 +0,0 @@
/*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#include <va/va.h>
#include <va/va_dec_vp8.h>
#include "hwaccel.h"
#include "vaapi_decode.h"
#include "vp8.h"
static VASurfaceID vaapi_vp8_surface_id(VP8Frame *vf)
{
if (vf)
return ff_vaapi_get_surface_id(vf->tf.f);
else
return VA_INVALID_SURFACE;
}
static int vaapi_vp8_start_frame(AVCodecContext *avctx,
av_unused const uint8_t *buffer,
av_unused uint32_t size)
{
const VP8Context *s = avctx->priv_data;
VAAPIDecodePicture *pic = s->framep[VP56_FRAME_CURRENT]->hwaccel_picture_private;
VAPictureParameterBufferVP8 pp;
VAProbabilityDataBufferVP8 prob;
VAIQMatrixBufferVP8 quant;
int err, i, j, k;
pic->output_surface = vaapi_vp8_surface_id(s->framep[VP56_FRAME_CURRENT]);
pp = (VAPictureParameterBufferVP8) {
.frame_width = avctx->width,
.frame_height = avctx->height,
.last_ref_frame = vaapi_vp8_surface_id(s->framep[VP56_FRAME_PREVIOUS]),
.golden_ref_frame = vaapi_vp8_surface_id(s->framep[VP56_FRAME_GOLDEN]),
.alt_ref_frame = vaapi_vp8_surface_id(s->framep[VP56_FRAME_GOLDEN2]),
.out_of_loop_frame = VA_INVALID_SURFACE,
.pic_fields.bits = {
.key_frame = !s->keyframe,
.version = s->profile,
.segmentation_enabled = s->segmentation.enabled,
.update_mb_segmentation_map = s->segmentation.update_map,
.update_segment_feature_data = s->segmentation.update_feature_data,
.filter_type = s->filter.simple,
.sharpness_level = s->filter.sharpness,
.loop_filter_adj_enable = s->lf_delta.enabled,
.mode_ref_lf_delta_update = s->lf_delta.update,
.sign_bias_golden = s->sign_bias[VP56_FRAME_GOLDEN],
.sign_bias_alternate = s->sign_bias[VP56_FRAME_GOLDEN2],
.mb_no_coeff_skip = s->mbskip_enabled,
.loop_filter_disable = s->filter.level == 0,
},
.prob_skip_false = s->prob->mbskip,
.prob_intra = s->prob->intra,
.prob_last = s->prob->last,
.prob_gf = s->prob->golden,
};
for (i = 0; i < 3; i++)
pp.mb_segment_tree_probs[i] = s->prob->segmentid[i];
for (i = 0; i < 4; i++) {
if (s->segmentation.enabled) {
pp.loop_filter_level[i] = s->segmentation.filter_level[i];
if (!s->segmentation.absolute_vals)
pp.loop_filter_level[i] += s->filter.level;
} else {
pp.loop_filter_level[i] = s->filter.level;
}
pp.loop_filter_level[i] = av_clip_uintp2(pp.loop_filter_level[i], 6);
}
for (i = 0; i < 4; i++) {
pp.loop_filter_deltas_ref_frame[i] = s->lf_delta.ref[i];
pp.loop_filter_deltas_mode[i] = s->lf_delta.mode[i + 4];
}
if (s->keyframe) {
static const uint8_t keyframe_y_mode_probs[4] = {
145, 156, 163, 128
};
static const uint8_t keyframe_uv_mode_probs[3] = {
142, 114, 183
};
memcpy(pp.y_mode_probs, keyframe_y_mode_probs, 4);
memcpy(pp.uv_mode_probs, keyframe_uv_mode_probs, 3);
} else {
for (i = 0; i < 4; i++)
pp.y_mode_probs[i] = s->prob->pred16x16[i];
for (i = 0; i < 3; i++)
pp.uv_mode_probs[i] = s->prob->pred8x8c[i];
}
for (i = 0; i < 2; i++)
for (j = 0; j < 19; j++)
pp.mv_probs[i][j] = s->prob->mvc[i][j];
pp.bool_coder_ctx.range = s->coder_state_at_header_end.range;
pp.bool_coder_ctx.value = s->coder_state_at_header_end.value;
pp.bool_coder_ctx.count = s->coder_state_at_header_end.bit_count;
err = ff_vaapi_decode_make_param_buffer(avctx, pic,
VAPictureParameterBufferType,
&pp, sizeof(pp));
if (err < 0)
goto fail;
for (i = 0; i < 4; i++) {
for (j = 0; j < 8; j++) {
static const int coeff_bands_inverse[8] = {
0, 1, 2, 3, 5, 6, 4, 15
};
int coeff_pos = coeff_bands_inverse[j];
for (k = 0; k < 3; k++) {
memcpy(prob.dct_coeff_probs[i][j][k],
s->prob->token[i][coeff_pos][k], 11);
}
}
}
err = ff_vaapi_decode_make_param_buffer(avctx, pic,
VAProbabilityBufferType,
&prob, sizeof(prob));
if (err < 0)
goto fail;
for (i = 0; i < 4; i++) {
int base_qi = s->segmentation.base_quant[i];
if (!s->segmentation.absolute_vals)
base_qi += s->quant.yac_qi;
quant.quantization_index[i][0] = av_clip_uintp2(base_qi, 7);
quant.quantization_index[i][1] = av_clip_uintp2(base_qi + s->quant.ydc_delta, 7);
quant.quantization_index[i][2] = av_clip_uintp2(base_qi + s->quant.y2dc_delta, 7);
quant.quantization_index[i][3] = av_clip_uintp2(base_qi + s->quant.y2ac_delta, 7);
quant.quantization_index[i][4] = av_clip_uintp2(base_qi + s->quant.uvdc_delta, 7);
quant.quantization_index[i][5] = av_clip_uintp2(base_qi + s->quant.uvac_delta, 7);
}
err = ff_vaapi_decode_make_param_buffer(avctx, pic,
VAIQMatrixBufferType,
&quant, sizeof(quant));
if (err < 0)
goto fail;
return 0;
fail:
ff_vaapi_decode_cancel(avctx, pic);
return err;
}
static int vaapi_vp8_end_frame(AVCodecContext *avctx)
{
const VP8Context *s = avctx->priv_data;
VAAPIDecodePicture *pic = s->framep[VP56_FRAME_CURRENT]->hwaccel_picture_private;
return ff_vaapi_decode_issue(avctx, pic);
}
static int vaapi_vp8_decode_slice(AVCodecContext *avctx,
const uint8_t *buffer,
uint32_t size)
{
const VP8Context *s = avctx->priv_data;
VAAPIDecodePicture *pic = s->framep[VP56_FRAME_CURRENT]->hwaccel_picture_private;
VASliceParameterBufferVP8 sp;
int err, i;
unsigned int header_size = 3 + 7 * s->keyframe;
const uint8_t *data = buffer + header_size;
unsigned int data_size = size - header_size;
sp = (VASliceParameterBufferVP8) {
.slice_data_size = data_size,
.slice_data_offset = 0,
.slice_data_flag = VA_SLICE_DATA_FLAG_ALL,
.macroblock_offset = (8 * (s->coder_state_at_header_end.input - data) -
s->coder_state_at_header_end.bit_count - 8),
.num_of_partitions = s->num_coeff_partitions + 1,
};
sp.partition_size[0] = s->header_partition_size - ((sp.macroblock_offset + 7) / 8);
for (i = 0; i < 8; i++)
sp.partition_size[i+1] = s->coeff_partition_size[i];
err = ff_vaapi_decode_make_slice_buffer(avctx, pic, &sp, sizeof(sp), data, data_size);
if (err)
goto fail;
return 0;
fail:
ff_vaapi_decode_cancel(avctx, pic);
return err;
}
const AVHWAccel ff_vp8_vaapi_hwaccel = {
.name = "vp8_vaapi",
.type = AVMEDIA_TYPE_VIDEO,
.id = AV_CODEC_ID_VP8,
.pix_fmt = AV_PIX_FMT_VAAPI,
.start_frame = &vaapi_vp8_start_frame,
.end_frame = &vaapi_vp8_end_frame,
.decode_slice = &vaapi_vp8_decode_slice,
.frame_priv_data_size = sizeof(VAAPIDecodePicture),
.init = &ff_vaapi_decode_init,
.uninit = &ff_vaapi_decode_uninit,
.frame_params = &ff_vaapi_common_frame_params,
.priv_data_size = sizeof(VAAPIDecodeContext),
.caps_internal = HWACCEL_CAP_ASYNC_SAFE,
};

Просмотреть файл

@ -1,185 +0,0 @@
/*
* VP9 HW decode acceleration through VA API
*
* Copyright (C) 2015 Timo Rothenpieler <timo@rothenpieler.org>
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#include "libavutil/pixdesc.h"
#include "hwaccel.h"
#include "vaapi_decode.h"
#include "vp9shared.h"
static VASurfaceID vaapi_vp9_surface_id(const VP9Frame *vf)
{
if (vf)
return ff_vaapi_get_surface_id(vf->tf.f);
else
return VA_INVALID_SURFACE;
}
static int vaapi_vp9_start_frame(AVCodecContext *avctx,
av_unused const uint8_t *buffer,
av_unused uint32_t size)
{
const VP9SharedContext *h = avctx->priv_data;
VAAPIDecodePicture *pic = h->frames[CUR_FRAME].hwaccel_picture_private;
VADecPictureParameterBufferVP9 pic_param;
const AVPixFmtDescriptor *pixdesc = av_pix_fmt_desc_get(avctx->sw_pix_fmt);
int err, i;
pic->output_surface = vaapi_vp9_surface_id(&h->frames[CUR_FRAME]);
pic_param = (VADecPictureParameterBufferVP9) {
.frame_width = avctx->width,
.frame_height = avctx->height,
.pic_fields.bits = {
.subsampling_x = pixdesc->log2_chroma_w,
.subsampling_y = pixdesc->log2_chroma_h,
.frame_type = !h->h.keyframe,
.show_frame = !h->h.invisible,
.error_resilient_mode = h->h.errorres,
.intra_only = h->h.intraonly,
.allow_high_precision_mv = h->h.keyframe ? 0 : h->h.highprecisionmvs,
.mcomp_filter_type = h->h.filtermode ^ (h->h.filtermode <= 1),
.frame_parallel_decoding_mode = h->h.parallelmode,
.reset_frame_context = h->h.resetctx,
.refresh_frame_context = h->h.refreshctx,
.frame_context_idx = h->h.framectxid,
.segmentation_enabled = h->h.segmentation.enabled,
.segmentation_temporal_update = h->h.segmentation.temporal,
.segmentation_update_map = h->h.segmentation.update_map,
.last_ref_frame = h->h.refidx[0],
.last_ref_frame_sign_bias = h->h.signbias[0],
.golden_ref_frame = h->h.refidx[1],
.golden_ref_frame_sign_bias = h->h.signbias[1],
.alt_ref_frame = h->h.refidx[2],
.alt_ref_frame_sign_bias = h->h.signbias[2],
.lossless_flag = h->h.lossless,
},
.filter_level = h->h.filter.level,
.sharpness_level = h->h.filter.sharpness,
.log2_tile_rows = h->h.tiling.log2_tile_rows,
.log2_tile_columns = h->h.tiling.log2_tile_cols,
.frame_header_length_in_bytes = h->h.uncompressed_header_size,
.first_partition_size = h->h.compressed_header_size,
.profile = h->h.profile,
.bit_depth = h->h.bpp,
};
for (i = 0; i < 7; i++)
pic_param.mb_segment_tree_probs[i] = h->h.segmentation.prob[i];
if (h->h.segmentation.temporal) {
for (i = 0; i < 3; i++)
pic_param.segment_pred_probs[i] = h->h.segmentation.pred_prob[i];
} else {
memset(pic_param.segment_pred_probs, 255, sizeof(pic_param.segment_pred_probs));
}
for (i = 0; i < 8; i++) {
if (h->refs[i].f->buf[0])
pic_param.reference_frames[i] = ff_vaapi_get_surface_id(h->refs[i].f);
else
pic_param.reference_frames[i] = VA_INVALID_ID;
}
err = ff_vaapi_decode_make_param_buffer(avctx, pic,
VAPictureParameterBufferType,
&pic_param, sizeof(pic_param));
if (err < 0) {
ff_vaapi_decode_cancel(avctx, pic);
return err;
}
return 0;
}
static int vaapi_vp9_end_frame(AVCodecContext *avctx)
{
const VP9SharedContext *h = avctx->priv_data;
VAAPIDecodePicture *pic = h->frames[CUR_FRAME].hwaccel_picture_private;
return ff_vaapi_decode_issue(avctx, pic);
}
static int vaapi_vp9_decode_slice(AVCodecContext *avctx,
const uint8_t *buffer,
uint32_t size)
{
const VP9SharedContext *h = avctx->priv_data;
VAAPIDecodePicture *pic = h->frames[CUR_FRAME].hwaccel_picture_private;
VASliceParameterBufferVP9 slice_param;
int err, i;
slice_param = (VASliceParameterBufferVP9) {
.slice_data_size = size,
.slice_data_offset = 0,
.slice_data_flag = VA_SLICE_DATA_FLAG_ALL,
};
for (i = 0; i < 8; i++) {
slice_param.seg_param[i] = (VASegmentParameterVP9) {
.segment_flags.fields = {
.segment_reference_enabled = h->h.segmentation.feat[i].ref_enabled,
.segment_reference = h->h.segmentation.feat[i].ref_val,
.segment_reference_skipped = h->h.segmentation.feat[i].skip_enabled,
},
.luma_dc_quant_scale = h->h.segmentation.feat[i].qmul[0][0],
.luma_ac_quant_scale = h->h.segmentation.feat[i].qmul[0][1],
.chroma_dc_quant_scale = h->h.segmentation.feat[i].qmul[1][0],
.chroma_ac_quant_scale = h->h.segmentation.feat[i].qmul[1][1],
};
memcpy(slice_param.seg_param[i].filter_level, h->h.segmentation.feat[i].lflvl, sizeof(slice_param.seg_param[i].filter_level));
}
err = ff_vaapi_decode_make_slice_buffer(avctx, pic,
&slice_param, sizeof(slice_param),
buffer, size);
if (err) {
ff_vaapi_decode_cancel(avctx, pic);
return err;
}
return 0;
}
const AVHWAccel ff_vp9_vaapi_hwaccel = {
.name = "vp9_vaapi",
.type = AVMEDIA_TYPE_VIDEO,
.id = AV_CODEC_ID_VP9,
.pix_fmt = AV_PIX_FMT_VAAPI,
.start_frame = vaapi_vp9_start_frame,
.end_frame = vaapi_vp9_end_frame,
.decode_slice = vaapi_vp9_decode_slice,
.frame_priv_data_size = sizeof(VAAPIDecodePicture),
.init = ff_vaapi_decode_init,
.uninit = ff_vaapi_decode_uninit,
.frame_params = ff_vaapi_common_frame_params,
.priv_data_size = sizeof(VAAPIDecodeContext),
.caps_internal = HWACCEL_CAP_ASYNC_SAFE,
};

Просмотреть файл

@ -158,9 +158,6 @@ av_get_token
av_gettime
av_gettime_relative
av_gettime_relative_is_monotonic
av_hwdevice_get_hwframe_constraints
av_hwdevice_hwconfig_alloc
av_hwframe_constraints_free
av_hwframe_get_buffer
av_image_alloc
av_image_check_sar
@ -323,9 +320,5 @@ avpriv_slicethread_free
av_hwdevice_get_type_name
av_hwframe_ctx_alloc
av_hwframe_ctx_init
av_hwdevice_ctx_alloc
av_hwdevice_ctx_init
av_hwframe_transfer_get_formats
av_hwdevice_ctx_create_derived
av_malloc_array
av_mallocz_array

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -1,117 +0,0 @@
/*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_HWCONTEXT_VAAPI_H
#define AVUTIL_HWCONTEXT_VAAPI_H
#include <va/va.h>
/**
* @file
* API-specific header for AV_HWDEVICE_TYPE_VAAPI.
*
* Dynamic frame pools are supported, but note that any pool used as a render
* target is required to be of fixed size in order to be be usable as an
* argument to vaCreateContext().
*
* For user-allocated pools, AVHWFramesContext.pool must return AVBufferRefs
* with the data pointer set to a VASurfaceID.
*/
enum {
/**
* The quirks field has been set by the user and should not be detected
* automatically by av_hwdevice_ctx_init().
*/
AV_VAAPI_DRIVER_QUIRK_USER_SET = (1 << 0),
/**
* The driver does not destroy parameter buffers when they are used by
* vaRenderPicture(). Additional code will be required to destroy them
* separately afterwards.
*/
AV_VAAPI_DRIVER_QUIRK_RENDER_PARAM_BUFFERS = (1 << 1),
/**
* The driver does not support the VASurfaceAttribMemoryType attribute,
* so the surface allocation code will not try to use it.
*/
AV_VAAPI_DRIVER_QUIRK_ATTRIB_MEMTYPE = (1 << 2),
/**
* The driver does not support surface attributes at all.
* The surface allocation code will never pass them to surface allocation,
* and the results of the vaQuerySurfaceAttributes() call will be faked.
*/
AV_VAAPI_DRIVER_QUIRK_SURFACE_ATTRIBUTES = (1 << 3),
};
/**
* VAAPI connection details.
*
* Allocated as AVHWDeviceContext.hwctx
*/
typedef struct AVVAAPIDeviceContext {
/**
* The VADisplay handle, to be filled by the user.
*/
VADisplay display;
/**
* Driver quirks to apply - this is filled by av_hwdevice_ctx_init(),
* with reference to a table of known drivers, unless the
* AV_VAAPI_DRIVER_QUIRK_USER_SET bit is already present. The user
* may need to refer to this field when performing any later
* operations using VAAPI with the same VADisplay.
*/
unsigned int driver_quirks;
} AVVAAPIDeviceContext;
/**
* VAAPI-specific data associated with a frame pool.
*
* Allocated as AVHWFramesContext.hwctx.
*/
typedef struct AVVAAPIFramesContext {
/**
* Set by the user to apply surface attributes to all surfaces in
* the frame pool. If null, default settings are used.
*/
VASurfaceAttrib *attributes;
int nb_attributes;
/**
* The surfaces IDs of all surfaces in the pool after creation.
* Only valid if AVHWFramesContext.initial_pool_size was positive.
* These are intended to be used as the render_targets arguments to
* vaCreateContext().
*/
VASurfaceID *surface_ids;
int nb_surfaces;
} AVVAAPIFramesContext;
/**
* VAAPI hardware pipeline configuration details.
*
* Allocated with av_hwdevice_hwconfig_alloc().
*/
typedef struct AVVAAPIHWConfig {
/**
* ID of a VAAPI pipeline configuration.
*/
VAConfigID config_id;
} AVVAAPIHWConfig;
#endif /* AVUTIL_HWCONTEXT_VAAPI_H */

Просмотреть файл

@ -58,11 +58,6 @@ if not CONFIG['MOZ_FFVPX_AUDIOONLY']:
'threadmessage.c',
'timecode.c'
]
if CONFIG['MOZ_WAYLAND']:
SOURCES += [
'hwcontext_vaapi.c',
]
USE_LIBS += ['mozva']
SYMBOLS_FILE = 'avutil.symbols'
NoVisibilityFlags()

Просмотреть файл

@ -1,83 +0,0 @@
diff --git a/media/ffvpx/config_common.h b/media/ffvpx/config_common.h
--- a/media/ffvpx/config_common.h
+++ b/media/ffvpx/config_common.h
@@ -18,4 +18,14 @@
#define CONFIG_RDFT 1
#endif
+#ifdef MOZ_WAYLAND
+#define CONFIG_VAAPI 1
+#define CONFIG_VP8_VAAPI_HWACCEL 1
+#define CONFIG_VP9_VAAPI_HWACCEL 1
+#else
+#define CONFIG_VAAPI 0
+#define CONFIG_VP8_VAAPI_HWACCEL 0
+#define CONFIG_VP9_VAAPI_HWACCEL 0
#endif
+
+#endif
diff --git a/media/ffvpx/config_unix32.h b/media/ffvpx/config_unix32.h
--- a/media/ffvpx/config_unix32.h
+++ b/media/ffvpx/config_unix32.h
@@ -524,7 +524,6 @@
#define CONFIG_FFNVCODEC 0
#define CONFIG_NVDEC 0
#define CONFIG_NVENC 0
-#define CONFIG_VAAPI 0
#define CONFIG_VDPAU 0
#define CONFIG_VIDEOTOOLBOX 0
#define CONFIG_V4L2_M2M 0
diff --git a/media/ffvpx/config_unix64.h b/media/ffvpx/config_unix64.h
--- a/media/ffvpx/config_unix64.h
+++ b/media/ffvpx/config_unix64.h
@@ -524,7 +524,6 @@
#define CONFIG_FFNVCODEC 0
#define CONFIG_NVDEC 0
#define CONFIG_NVENC 0
-#define CONFIG_VAAPI 0
#define CONFIG_VDPAU 0
#define CONFIG_VIDEOTOOLBOX 0
#define CONFIG_V4L2_M2M 1
diff --git a/media/ffvpx/defaults_disabled.h b/media/ffvpx/defaults_disabled.h
--- a/media/ffvpx/defaults_disabled.h
+++ b/media/ffvpx/defaults_disabled.h
@@ -1706,7 +1706,6 @@
#define CONFIG_VP8_V4L2M2M_DECODER 0
#define CONFIG_VP8_V4L2M2M_ENCODER 0
#define CONFIG_VP8_VAAPI_ENCODER 0
-#define CONFIG_VP8_VAAPI_HWACCEL 0
#define CONFIG_VP9_CUVID_DECODER 0
#define CONFIG_VP9_D3D11VA2_HWACCEL 0
#define CONFIG_VP9_D3D11VA_HWACCEL 0
@@ -1719,7 +1718,6 @@
#define CONFIG_VP9_SUPERFRAME_BSF 0
#define CONFIG_VP9_V4L2M2M_DECODER 0
#define CONFIG_VP9_VAAPI_ENCODER 0
-#define CONFIG_VP9_VAAPI_HWACCEL 0
#define CONFIG_VPK_DEMUXER 0
#define CONFIG_VPLAYER_DECODER 0
#define CONFIG_VPLAYER_DEMUXER 0
diff --git a/media/ffvpx/libavutil/hwcontext_vaapi.c b/media/ffvpx/libavutil/hwcontext_vaapi.c
--- a/media/ffvpx/libavutil/hwcontext_vaapi.c
+++ b/media/ffvpx/libavutil/hwcontext_vaapi.c
@@ -39,17 +39,19 @@
# include <unistd.h>
#endif
#include "avassert.h"
#include "buffer.h"
#include "common.h"
#include "hwcontext.h"
+#if CONFIG_LIBDRM
#include "hwcontext_drm.h"
+#endif
#include "hwcontext_internal.h"
#include "hwcontext_vaapi.h"
#include "mem.h"
#include "pixdesc.h"
#include "pixfmt.h"
typedef struct VAAPIDevicePriv {

Просмотреть файл

@ -11,8 +11,3 @@ DIRS += [
'libavutil',
'libavcodec'
]
if CONFIG['MOZ_WAYLAND']:
DIRS += [
'mozva',
]

Просмотреть файл

@ -1,13 +0,0 @@
# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
# vim: set filetype=python:
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
SOURCES += [
'mozva.cpp',
]
LOCAL_INCLUDES += ['/media/ffvpx']
SharedLibrary('mozva')

Просмотреть файл

@ -1,477 +0,0 @@
/* -*- Mode: C; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim:expandtab:shiftwidth=4:tabstop=4:
*/
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#pragma GCC visibility push(default)
#include <va/va.h>
#pragma GCC visibility pop
#include "mozilla/Types.h"
#include "mozilla/ScopeExit.h"
#include <dlfcn.h>
#include <pthread.h>
#include <stdlib.h>
#define GET_FUNC(func, lib) \
func##Fn = \
(decltype(func##Fn))dlsym(lib, #func) \
#define IS_FUNC_LOADED(func) \
(func##Fn != nullptr) \
static VAStatus (*vaDestroyBufferFn)(VADisplay dpy, VABufferID buffer_id);
static VAStatus (*vaBeginPictureFn)(VADisplay dpy, VAContextID context,
VASurfaceID render_target);
static VAStatus (*vaEndPictureFn)(VADisplay dpy, VAContextID context);
static VAStatus (*vaRenderPictureFn)(VADisplay dpy, VAContextID context,
VABufferID *buffers, int num_buffers);
static int (*vaMaxNumProfilesFn)(VADisplay dpy);
static VAStatus (*vaCreateContextFn)(VADisplay dpy, VAConfigID config_id,
int picture_width, int picture_height,
int flag, VASurfaceID *render_targets,
int num_render_targets,
VAContextID *context /* out */);
static VAStatus (*vaDestroyContextFn)( VADisplay dpy, VAContextID context);
static VAStatus (*vaCreateBufferFn)(VADisplay dpy, VAContextID context,
VABufferType type, /* in */
unsigned int size, /* in */
unsigned int num_elements, /* in */
void *data, /* in */
VABufferID *buf_id /* out */);
static VAStatus (*vaQuerySurfaceAttributesFn)(VADisplay dpy,
VAConfigID config,
VASurfaceAttrib *attrib_list,
unsigned int *num_attribs);
static VAStatus (*vaQueryConfigProfilesFn)(VADisplay dpy,
VAProfile *profile_list, /* out */
int *num_profiles /* out */);
static const char* (*vaErrorStrFn)(VAStatus error_status);
static VAStatus (*vaCreateConfigFn)(VADisplay dpy, VAProfile profile,
VAEntrypoint entrypoint,
VAConfigAttrib *attrib_list,
int num_attribs,
VAConfigID *config_id /* out */);
static VAStatus (*vaDestroyConfigFn)(VADisplay dpy, VAConfigID config_id);
static int (*vaMaxNumImageFormatsFn)(VADisplay dpy);
static VAStatus (*vaQueryImageFormatsFn)(VADisplay dpy,
VAImageFormat *format_list, /* out */
int *num_formats /* out */);
static const char * (*vaQueryVendorStringFn)(VADisplay dpy);
static VAStatus (*vaDestroySurfacesFn)(VADisplay dpy, VASurfaceID *surfaces,
int num_surfaces);
static VAStatus (*vaCreateSurfacesFn)(VADisplay dpy,
unsigned int format,
unsigned int width,
unsigned int height,
VASurfaceID *surfaces,
unsigned int num_surfaces,
VASurfaceAttrib *attrib_list,
unsigned int num_attribs);
static VAStatus (*vaDeriveImageFn)(VADisplay dpy, VASurfaceID surface,
VAImage *image /* out */);
static VAStatus (*vaDestroyImageFn)(VADisplay dpy, VAImageID image);
static VAStatus (*vaPutImageFn)(VADisplay dpy,
VASurfaceID surface,
VAImageID image,
int src_x,
int src_y,
unsigned int src_width,
unsigned int src_height,
int dest_x,
int dest_y,
unsigned int dest_width,
unsigned int dest_height);
static VAStatus (*vaSyncSurfaceFn)(VADisplay dpy, VASurfaceID render_target);
static VAStatus (*vaCreateImageFn)(VADisplay dpy,
VAImageFormat *format,
int width,
int height,
VAImage *image /* out */);
static VAStatus (*vaGetImageFn)(VADisplay dpy,
VASurfaceID surface,
int x, /* coordinates of the upper left source pixel */
int y,
unsigned int width, /* width and height of the region */
unsigned int height,
VAImageID image);
static VAStatus (*vaMapBufferFn)(VADisplay dpy,
VABufferID buf_id, /* in */
void **pbuf /* out */);
static VAStatus (*vaUnmapBufferFn)(VADisplay dpy,
VABufferID buf_id /* in */);
static VAStatus (*vaTerminateFn)(VADisplay dpy);
static VAStatus (*vaInitializeFn)(VADisplay dpy,
int *major_version, /* out */
int *minor_version /* out */);
static VAStatus (*vaSetDriverNameFn)(VADisplay dpy, char *driver_name);
bool LoadVALibrary() {
static bool vaLibraryLoaded = []() {
static void* sVALib = dlopen("libva.so.2", RTLD_LAZY);
if (!sVALib) {
return false;
}
GET_FUNC(vaDestroyBuffer, sVALib);
GET_FUNC(vaBeginPicture, sVALib);
GET_FUNC(vaEndPicture, sVALib);
GET_FUNC(vaRenderPicture, sVALib);
GET_FUNC(vaMaxNumProfiles, sVALib);
GET_FUNC(vaCreateContext, sVALib);
GET_FUNC(vaDestroyContext, sVALib);
GET_FUNC(vaCreateBuffer, sVALib);
GET_FUNC(vaQuerySurfaceAttributes, sVALib);
GET_FUNC(vaQueryConfigProfiles, sVALib);
GET_FUNC(vaErrorStr, sVALib);
GET_FUNC(vaCreateConfig, sVALib);
GET_FUNC(vaDestroyConfig, sVALib);
GET_FUNC(vaMaxNumImageFormats, sVALib);
GET_FUNC(vaQueryImageFormats, sVALib);
GET_FUNC(vaQueryVendorString, sVALib);
GET_FUNC(vaDestroySurfaces, sVALib);
GET_FUNC(vaCreateSurfaces, sVALib);
GET_FUNC(vaDeriveImage, sVALib);
GET_FUNC(vaDestroyImage, sVALib);
GET_FUNC(vaPutImage, sVALib);
GET_FUNC(vaSyncSurface, sVALib);
GET_FUNC(vaCreateImage, sVALib);
GET_FUNC(vaGetImage, sVALib);
GET_FUNC(vaMapBuffer, sVALib);
GET_FUNC(vaUnmapBuffer, sVALib);
GET_FUNC(vaTerminate, sVALib);
GET_FUNC(vaInitialize, sVALib);
GET_FUNC(vaSetDriverName, sVALib);
return (IS_FUNC_LOADED(vaDestroyBuffer) &&
IS_FUNC_LOADED(vaBeginPicture) &&
IS_FUNC_LOADED(vaEndPicture) &&
IS_FUNC_LOADED(vaRenderPicture) &&
IS_FUNC_LOADED(vaMaxNumProfiles) &&
IS_FUNC_LOADED(vaCreateContext) &&
IS_FUNC_LOADED(vaDestroyContext) &&
IS_FUNC_LOADED(vaCreateBuffer) &&
IS_FUNC_LOADED(vaQuerySurfaceAttributes) &&
IS_FUNC_LOADED(vaQueryConfigProfiles) &&
IS_FUNC_LOADED(vaErrorStr) &&
IS_FUNC_LOADED(vaCreateConfig) &&
IS_FUNC_LOADED(vaDestroyConfig) &&
IS_FUNC_LOADED(vaMaxNumImageFormats) &&
IS_FUNC_LOADED(vaQueryImageFormats) &&
IS_FUNC_LOADED(vaQueryVendorString) &&
IS_FUNC_LOADED(vaDestroySurfaces) &&
IS_FUNC_LOADED(vaCreateSurfaces) &&
IS_FUNC_LOADED(vaDeriveImage) &&
IS_FUNC_LOADED(vaDestroyImage) &&
IS_FUNC_LOADED(vaPutImage) &&
IS_FUNC_LOADED(vaSyncSurface) &&
IS_FUNC_LOADED(vaCreateImage) &&
IS_FUNC_LOADED(vaGetImage) &&
IS_FUNC_LOADED(vaMapBuffer) &&
IS_FUNC_LOADED(vaUnmapBuffer) &&
IS_FUNC_LOADED(vaTerminate) &&
IS_FUNC_LOADED(vaInitialize) &&
IS_FUNC_LOADED(vaSetDriverName));
}();
return vaLibraryLoaded;
}
#pragma GCC visibility push(default)
VAStatus vaDestroyBuffer(VADisplay dpy, VABufferID buffer_id)
{
if (LoadVALibrary()) {
return vaDestroyBufferFn(dpy, buffer_id);
}
return VA_STATUS_ERROR_UNIMPLEMENTED;
}
VAStatus vaBeginPicture(VADisplay dpy, VAContextID context,
VASurfaceID render_target)
{
if (LoadVALibrary()) {
return vaBeginPictureFn(dpy, context, render_target);
}
return VA_STATUS_ERROR_UNIMPLEMENTED;
}
VAStatus vaEndPicture(VADisplay dpy, VAContextID context)
{
if (LoadVALibrary()) {
return vaEndPictureFn(dpy, context);
}
return VA_STATUS_ERROR_UNIMPLEMENTED;
}
VAStatus vaRenderPicture(VADisplay dpy, VAContextID context,
VABufferID *buffers, int num_buffers)
{
if (LoadVALibrary()) {
return vaRenderPictureFn(dpy, context, buffers, num_buffers);
}
return VA_STATUS_ERROR_UNIMPLEMENTED;
}
int vaMaxNumProfiles(VADisplay dpy)
{
if (LoadVALibrary()) {
return vaMaxNumProfilesFn(dpy);
}
return 0;
}
VAStatus vaCreateContext(VADisplay dpy, VAConfigID config_id,
int picture_width, int picture_height,
int flag, VASurfaceID *render_targets,
int num_render_targets,
VAContextID *context /* out */)
{
if (LoadVALibrary()) {
return vaCreateContextFn(dpy, config_id, picture_width, picture_height,
flag, render_targets, num_render_targets,
context);
}
*context = 0;
return VA_STATUS_ERROR_UNIMPLEMENTED;
}
VAStatus vaDestroyContext( VADisplay dpy, VAContextID context)
{
if (LoadVALibrary()) {
return vaDestroyContextFn(dpy, context);
}
return VA_STATUS_ERROR_UNIMPLEMENTED;
}
VAStatus vaCreateBuffer(VADisplay dpy, VAContextID context,
VABufferType type, /* in */
unsigned int size, /* in */
unsigned int num_elements, /* in */
void *data, /* in */
VABufferID *buf_id /* out */)
{
if (LoadVALibrary()) {
return vaCreateBufferFn(dpy, context, type, size, num_elements,
data, buf_id);
}
*buf_id = 0;
return VA_STATUS_ERROR_UNIMPLEMENTED;
}
VAStatus vaQuerySurfaceAttributes(VADisplay dpy,
VAConfigID config,
VASurfaceAttrib *attrib_list,
unsigned int *num_attribs)
{
if (LoadVALibrary()) {
return vaQuerySurfaceAttributesFn(dpy, config, attrib_list, num_attribs);
}
return VA_STATUS_ERROR_UNIMPLEMENTED;
}
VAStatus vaQueryConfigProfiles(VADisplay dpy,
VAProfile *profile_list, /* out */
int *num_profiles /* out */)
{
if (LoadVALibrary()) {
return vaQueryConfigProfilesFn(dpy, profile_list, num_profiles);
}
*num_profiles = 0;
return VA_STATUS_ERROR_UNIMPLEMENTED;
}
const char *vaErrorStr(VAStatus error_status)
{
if (LoadVALibrary()) {
return vaErrorStrFn(error_status);
}
static char tmp[] = "Unimplemented";
return tmp;
}
VAStatus vaCreateConfig(VADisplay dpy, VAProfile profile,
VAEntrypoint entrypoint,
VAConfigAttrib *attrib_list,
int num_attribs,
VAConfigID *config_id /* out */)
{
if (LoadVALibrary()) {
return vaCreateConfigFn(dpy, profile, entrypoint, attrib_list, num_attribs,
config_id);
}
*config_id = 0;
return VA_STATUS_ERROR_UNIMPLEMENTED;
}
VAStatus vaDestroyConfig(VADisplay dpy, VAConfigID config_id)
{
if (LoadVALibrary()) {
return vaDestroyConfigFn(dpy, config_id);
}
return VA_STATUS_ERROR_UNIMPLEMENTED;
}
int vaMaxNumImageFormats(VADisplay dpy)
{
if (LoadVALibrary()) {
return vaMaxNumImageFormatsFn(dpy);
}
return 0;
}
VAStatus vaQueryImageFormats(VADisplay dpy,
VAImageFormat *format_list,
int *num_formats)
{
if (LoadVALibrary()) {
return vaQueryImageFormatsFn(dpy, format_list, num_formats);
}
return VA_STATUS_ERROR_UNIMPLEMENTED;
}
const char * vaQueryVendorString(VADisplay dpy)
{
if (LoadVALibrary()) {
return vaQueryVendorStringFn(dpy);
}
return nullptr;
}
VAStatus vaDestroySurfaces(VADisplay dpy, VASurfaceID *surfaces,
int num_surfaces)
{
if (LoadVALibrary()) {
return vaDestroySurfacesFn(dpy, surfaces, num_surfaces);
}
return VA_STATUS_ERROR_UNIMPLEMENTED;
}
VAStatus vaCreateSurfaces(VADisplay dpy,
unsigned int format,
unsigned int width,
unsigned int height,
VASurfaceID *surfaces,
unsigned int num_surfaces,
VASurfaceAttrib *attrib_list,
unsigned int num_attribs)
{
if (LoadVALibrary()) {
return vaCreateSurfacesFn(dpy, format, width, height,
surfaces, num_surfaces, attrib_list, num_attribs);
}
return VA_STATUS_ERROR_UNIMPLEMENTED;
}
VAStatus vaDeriveImage(VADisplay dpy, VASurfaceID surface,
VAImage *image /* out */)
{
if (LoadVALibrary()) {
return vaDeriveImageFn(dpy, surface, image);
}
return VA_STATUS_ERROR_UNIMPLEMENTED;
}
VAStatus vaDestroyImage(VADisplay dpy, VAImageID image)
{
if (LoadVALibrary()) {
return vaDestroyImageFn(dpy, image);
}
return VA_STATUS_ERROR_UNIMPLEMENTED;
}
VAStatus vaPutImage(VADisplay dpy,
VASurfaceID surface,
VAImageID image,
int src_x,
int src_y,
unsigned int src_width,
unsigned int src_height,
int dest_x,
int dest_y,
unsigned int dest_width,
unsigned int dest_height)
{
if (LoadVALibrary()) {
return vaPutImageFn(dpy, surface, image, src_x, src_y, src_width, src_height,
dest_x, dest_y, dest_width, dest_height);
}
return VA_STATUS_ERROR_UNIMPLEMENTED;
}
VAStatus vaSyncSurface(VADisplay dpy, VASurfaceID render_target)
{
if (LoadVALibrary()) {
return vaSyncSurfaceFn(dpy, render_target);
}
return VA_STATUS_ERROR_UNIMPLEMENTED;
}
VAStatus vaCreateImage(VADisplay dpy, VAImageFormat *format,
int width, int height, VAImage *image /* out */)
{
if (LoadVALibrary()) {
return vaCreateImageFn(dpy, format, width, height, image);
}
return VA_STATUS_ERROR_UNIMPLEMENTED;
}
VAStatus vaGetImage(VADisplay dpy,
VASurfaceID surface,
int x, /* coordinates of the upper left source pixel */
int y,
unsigned int width, /* width and height of the region */
unsigned int height,
VAImageID image)
{
if (LoadVALibrary()) {
return vaGetImageFn(dpy, surface, x, y, width, height, image);
}
return VA_STATUS_ERROR_UNIMPLEMENTED;
}
VAStatus vaMapBuffer(VADisplay dpy,
VABufferID buf_id, /* in */
void **pbuf /* out */)
{
if (LoadVALibrary()) {
return vaMapBufferFn(dpy, buf_id, pbuf);
}
return VA_STATUS_ERROR_UNIMPLEMENTED;
}
VAStatus vaUnmapBuffer(VADisplay dpy, VABufferID buf_id /* in */)
{
if (LoadVALibrary()) {
return vaUnmapBufferFn(dpy, buf_id);
}
return VA_STATUS_ERROR_UNIMPLEMENTED;
}
VAStatus vaTerminate(VADisplay dpy)
{
if (LoadVALibrary()) {
return vaTerminateFn(dpy);
}
return VA_STATUS_ERROR_UNIMPLEMENTED;
}
VAStatus vaInitialize(VADisplay dpy,
int *major_version, /* out */
int *minor_version /* out */)
{
if (LoadVALibrary()) {
return vaInitializeFn(dpy, major_version, minor_version);
}
return VA_STATUS_ERROR_UNIMPLEMENTED;
}
VAStatus vaSetDriverName(VADisplay dpy, char *driver_name)
{
if (LoadVALibrary()) {
return vaSetDriverNameFn(dpy, driver_name);
}
return VA_STATUS_ERROR_UNIMPLEMENTED;
}
#pragma GCC visibility pop

Просмотреть файл

@ -1,2 +0,0 @@
There are libva headers from libva 1.7.0 used to build vaapi support for in-tree ffvpx.
Apply va.patch when you update it to new versions.

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -1,33 +0,0 @@
--- va.h.old 2020-10-22 10:41:57.805112031 +0200
+++ va.h 2020-10-22 10:37:22.597088670 +0200
@@ -4613,18 +4613,18 @@
#define VA_PICTURE_HEVC_RPS_LT_CURR 0x00000040
-#include <va/va_dec_hevc.h>
-#include <va/va_dec_jpeg.h>
+//#include <va/va_dec_hevc.h>
+//#include <va/va_dec_jpeg.h>
#include "va_dec_vp8.h"
#include "va_dec_vp9.h"
-#include <va/va_enc_hevc.h>
-#include <va/va_fei_hevc.h>
-#include <va/va_enc_h264.h>
-#include <va/va_enc_jpeg.h>
-#include <va/va_enc_mpeg2.h>
-#include <va/va_enc_vp8.h>
-#include <va/va_enc_vp9.h>
-#include <va/va_fei.h>
-#include <va/va_fei_h264.h>
-#include <va/va_vpp.h>
+//#include <va/va_enc_hevc.h>
+//#include <va/va_fei_hevc.h>
+//#include <va/va_enc_h264.h>
+//#include <va/va_enc_jpeg.h>
+//#include <va/va_enc_mpeg2.h>
+//#include <va/va_enc_vp8.h>
+//#include <va/va_enc_vp9.h>
+//#include <va/va_fei.h>
+//#include <va/va_fei_h264.h>
+//#include <va/va_vpp.h>
/**@}*/

Просмотреть файл

@ -1,254 +0,0 @@
/*
* Copyright (c) 2007-2012 Intel Corporation. All Rights Reserved.
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sub license, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice (including the
* next paragraph) shall be included in all copies or substantial portions
* of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
* OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
* IN NO EVENT SHALL INTEL AND/OR ITS SUPPLIERS BE LIABLE FOR
* ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
/**
* \file va_dec_vp.h
* \brief VP8 decoding API
*
* This file contains the \ref api_dec_vp8 "VP8 decoding API".
*/
#ifndef VA_DEC_VP8_H
#define VA_DEC_VP8_H
#ifdef __cplusplus
extern "C" {
#endif
/**
* \defgroup api_dec_vp8 VP8 decoding API
*
* @{
*/
/**
* \brief VPX Bool Coder Context structure
*
* This common structure is defined for potential sharing by other VP formats
*
*/
typedef struct _VABoolCoderContextVPX
{
/* partition 0 "range" */
uint8_t range;
/* partition 0 "value" */
uint8_t value;
/*
* 'partition 0 number of shifts before an output byte is available'
* it is the number of remaining bits in 'value' for decoding, range [0, 7].
*/
uint8_t count;
} VABoolCoderContextVPX;
/**
* \brief VP8 Decoding Picture Parameter Buffer Structure
*
* This structure conveys frame level parameters and should be sent once
* per frame.
*
*/
typedef struct _VAPictureParameterBufferVP8
{
/* frame width in pixels */
uint32_t frame_width;
/* frame height in pixels */
uint32_t frame_height;
/* specifies the "last" reference frame */
VASurfaceID last_ref_frame;
/* specifies the "golden" reference frame */
VASurfaceID golden_ref_frame;
/* specifies the "alternate" referrence frame */
VASurfaceID alt_ref_frame;
/* specifies the out-of-loop deblocked frame, not used currently */
VASurfaceID out_of_loop_frame;
union {
struct {
/* same as key_frame in bitstream syntax, 0 means a key frame */
uint32_t key_frame : 1;
/* same as version in bitstream syntax */
uint32_t version : 3;
/* same as segmentation_enabled in bitstream syntax */
uint32_t segmentation_enabled : 1;
/* same as update_mb_segmentation_map in bitstream syntax */
uint32_t update_mb_segmentation_map : 1;
/* same as update_segment_feature_data in bitstream syntax */
uint32_t update_segment_feature_data : 1;
/* same as filter_type in bitstream syntax */
uint32_t filter_type : 1;
/* same as sharpness_level in bitstream syntax */
uint32_t sharpness_level : 3;
/* same as loop_filter_adj_enable in bitstream syntax */
uint32_t loop_filter_adj_enable : 1;
/* same as mode_ref_lf_delta_update in bitstream syntax */
uint32_t mode_ref_lf_delta_update : 1;
/* same as sign_bias_golden in bitstream syntax */
uint32_t sign_bias_golden : 1;
/* same as sign_bias_alternate in bitstream syntax */
uint32_t sign_bias_alternate : 1;
/* same as mb_no_coeff_skip in bitstream syntax */
uint32_t mb_no_coeff_skip : 1;
/* flag to indicate that loop filter should be disabled */
uint32_t loop_filter_disable : 1;
} bits;
uint32_t value;
} pic_fields;
/*
* probabilities of the segment_id decoding tree and same as
* mb_segment_tree_probs in the spec.
*/
uint8_t mb_segment_tree_probs[3];
/* Post-adjustment loop filter levels for the 4 segments */
uint8_t loop_filter_level[4];
/* loop filter deltas for reference frame based MB level adjustment */
int8_t loop_filter_deltas_ref_frame[4];
/* loop filter deltas for coding mode based MB level adjustment */
int8_t loop_filter_deltas_mode[4];
/* same as prob_skip_false in bitstream syntax */
uint8_t prob_skip_false;
/* same as prob_intra in bitstream syntax */
uint8_t prob_intra;
/* same as prob_last in bitstream syntax */
uint8_t prob_last;
/* same as prob_gf in bitstream syntax */
uint8_t prob_gf;
/*
* list of 4 probabilities of the luma intra prediction mode decoding
* tree and same as y_mode_probs in frame header
*/
uint8_t y_mode_probs[4];
/*
* list of 3 probabilities of the chroma intra prediction mode decoding
* tree and same as uv_mode_probs in frame header
*/
uint8_t uv_mode_probs[3];
/*
* updated mv decoding probabilities and same as mv_probs in
* frame header
*/
uint8_t mv_probs[2][19];
VABoolCoderContextVPX bool_coder_ctx;
/** \brief Reserved bytes for future use, must be zero */
uint32_t va_reserved[VA_PADDING_LOW];
} VAPictureParameterBufferVP8;
/**
* \brief VP8 Slice Parameter Buffer Structure
*
* This structure conveys parameters related to data partitions and should be
* sent once per frame. Slice data buffer of VASliceDataBufferType is used
* to send the partition data.
*
*/
typedef struct _VASliceParameterBufferVP8
{
/*
* number of bytes in the slice data buffer for the partitions
*/
uint32_t slice_data_size;
/*
* offset to the first byte of partition data (control partition)
*/
uint32_t slice_data_offset;
/*
* see VA_SLICE_DATA_FLAG_XXX definitions
*/
uint32_t slice_data_flag;
/*
* offset to the first bit of MB from the first byte of partition data(slice_data_offset)
*/
uint32_t macroblock_offset;
/*
* Partitions
* (1<<log2_nbr_of_dct_partitions)+1, count both control partition (frame header) and toke partition
*/
uint8_t num_of_partitions;
/*
* partition_size[0] is remaining bytes of control partition after parsed by application.
* exclude current byte for the remaining bits in bool_coder_ctx.
* exclude the uncompress data chunk since first_part_size 'excluding the uncompressed data chunk'
*/
uint32_t partition_size[9];
/** \brief Reserved bytes for future use, must be zero */
uint32_t va_reserved[VA_PADDING_LOW];
} VASliceParameterBufferVP8;
/**
* \brief VP8 Coefficient Probability Data Buffer Structure
*
* Contains the contents of the token probability table, which may be
* incrementally modified in the frame header. There are four dimensions to
* the token probability array. The outermost dimension is indexed by the
* type of plane being decoded; the next dimension is selected by the
* position of the coefficient being decoded; the third dimension, * roughly
* speaking, measures the "local complexity" or extent to which nearby
* coefficients are non-zero; the fourth, and final, dimension of the token
* probability array is indexed by the position in the token tree structure,
* as are all tree probability arrays. This structure is sent once per frame.
*
*/
typedef struct _VAProbabilityDataBufferVP8
{
uint8_t dct_coeff_probs[4][8][3][11];
/** \brief Reserved bytes for future use, must be zero */
uint32_t va_reserved[VA_PADDING_LOW];
} VAProbabilityDataBufferVP8;
/**
* \brief VP8 Inverse Quantization Matrix Buffer Structure
*
* Contains quantization indices for yac(0),ydc(1),y2dc(2),y2ac(3),uvdc(4),
* uvac(5) for each segment (0-3). When segmentation is disabled, only
* quantization_index[0][] will be used. This structure is sent once per frame.
*/
typedef struct _VAIQMatrixBufferVP8
{
/*
* array first dimensional is segment and 2nd dimensional is Q index
* all Q indexs should be clipped to be range [0, 127]
*/
uint16_t quantization_index[4][6];
/** \brief Reserved bytes for future use, must be zero */
uint32_t va_reserved[VA_PADDING_LOW];
} VAIQMatrixBufferVP8;
/**@}*/
#ifdef __cplusplus
}
#endif
#endif /* VA_DEC_VP8_H */

Просмотреть файл

@ -1,319 +0,0 @@
/*
* Copyright (c) 2014 Intel Corporation. All Rights Reserved.
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sub license, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice (including the
* next paragraph) shall be included in all copies or substantial portions
* of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
* OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
* IN NO EVENT SHALL INTEL AND/OR ITS SUPPLIERS BE LIABLE FOR
* ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
/**
* \file va_dec_vp9.h
* \brief The VP9 decoding API
*
* This file contains the \ref api_dec_vp9 "VP9 decoding API".
*/
#ifndef VA_DEC_VP9_H
#define VA_DEC_VP9_H
#ifdef __cplusplus
extern "C" {
#endif
/**
* \defgroup api_dec_vp9 VP9 decoding API
*
* This VP9 decoding API supports 8-bit 420 format only.
*
* @{
*/
/**
* \brief VP9 Decoding Picture Parameter Buffer Structure
*
* This structure conveys picture level parameters.
* App should send a surface with this data structure down to VAAPI once
* per frame.
*
*/
typedef struct _VADecPictureParameterBufferVP9
{
/** \brief picture width
* Picture original resolution. The value may not be multiple of 8.
*/
uint16_t frame_width;
/** \brief picture height
* Picture original resolution. The value may not be multiple of 8.
*/
uint16_t frame_height;
/** \brief Surface indices of reference frames in DPB.
*
* Each entry of the list specifies the surface index of the picture
* that is referred by current picture or will be referred by any future
* picture.
* Application who calls this API should update this list based on the
* refreshing information from VP9 bitstream.
*/
VASurfaceID reference_frames[8];
union
{
struct
{
/** \brief flags for current picture
* same syntax and semantic as those in VP9 code
*/
uint32_t subsampling_x : 1;
uint32_t subsampling_y : 1;
uint32_t frame_type : 1;
uint32_t show_frame : 1;
uint32_t error_resilient_mode : 1;
uint32_t intra_only : 1;
uint32_t allow_high_precision_mv : 1;
uint32_t mcomp_filter_type : 3;
uint32_t frame_parallel_decoding_mode : 1;
uint32_t reset_frame_context : 2;
uint32_t refresh_frame_context : 1;
uint32_t frame_context_idx : 2;
uint32_t segmentation_enabled : 1;
/** \brief corresponds to variable temporal_update in VP9 code.
*/
uint32_t segmentation_temporal_update : 1;
/** \brief corresponds to variable update_mb_segmentation_map
* in VP9 code.
*/
uint32_t segmentation_update_map : 1;
/** \brief Index of reference_frames[] and points to the
* LAST reference frame.
* It corresponds to active_ref_idx[0] in VP9 code.
*/
uint32_t last_ref_frame : 3;
/** \brief Sign Bias of the LAST reference frame.
* It corresponds to ref_frame_sign_bias[LAST_FRAME] in VP9 code.
*/
uint32_t last_ref_frame_sign_bias : 1;
/** \brief Index of reference_frames[] and points to the
* GOLDERN reference frame.
* It corresponds to active_ref_idx[1] in VP9 code.
*/
uint32_t golden_ref_frame : 3;
/** \brief Sign Bias of the GOLDERN reference frame.
* Corresponds to ref_frame_sign_bias[GOLDERN_FRAME] in VP9 code.
*/
uint32_t golden_ref_frame_sign_bias : 1;
/** \brief Index of reference_frames[] and points to the
* ALTERNATE reference frame.
* Corresponds to active_ref_idx[2] in VP9 code.
*/
uint32_t alt_ref_frame : 3;
/** \brief Sign Bias of the ALTERNATE reference frame.
* Corresponds to ref_frame_sign_bias[ALTREF_FRAME] in VP9 code.
*/
uint32_t alt_ref_frame_sign_bias : 1;
/** \brief Lossless Mode
* LosslessFlag = base_qindex == 0 &&
* y_dc_delta_q == 0 &&
* uv_dc_delta_q == 0 &&
* uv_ac_delta_q == 0;
* Where base_qindex, y_dc_delta_q, uv_dc_delta_q and uv_ac_delta_q
* are all variables in VP9 code.
*/
uint32_t lossless_flag : 1;
} bits;
uint32_t value;
} pic_fields;
/* following parameters have same syntax with those in VP9 code */
uint8_t filter_level;
uint8_t sharpness_level;
/** \brief number of tile rows specified by (1 << log2_tile_rows).
* It corresponds the variable with same name in VP9 code.
*/
uint8_t log2_tile_rows;
/** \brief number of tile columns specified by (1 << log2_tile_columns).
* It corresponds the variable with same name in VP9 code.
*/
uint8_t log2_tile_columns;
/** \brief Number of bytes taken up by the uncompressed frame header,
* which corresponds to byte length of function
* read_uncompressed_header() in VP9 code.
* Specifically, it is the byte count from bit stream buffer start to
* the last byte of uncompressed frame header.
* If there are other meta data in the buffer before uncompressed header,
* its size should be also included here.
*/
uint8_t frame_header_length_in_bytes;
/** \brief The byte count of compressed header the bitstream buffer,
* which corresponds to syntax first_partition_size in code.
*/
uint16_t first_partition_size;
/** These values are segment probabilities with same names in VP9
* function setup_segmentation(). They should be parsed directly from
* bitstream by application.
*/
uint8_t mb_segment_tree_probs[7];
uint8_t segment_pred_probs[3];
/** \brief VP9 Profile definition
* value range [0..3].
*/
uint8_t profile;
/** \brief VP9 bit depth per sample
* same for both luma and chroma samples.
*/
uint8_t bit_depth;
/** \brief Reserved bytes for future use, must be zero */
uint32_t va_reserved[VA_PADDING_MEDIUM];
} VADecPictureParameterBufferVP9;
/**
* \brief VP9 Segmentation Parameter Data Structure
*
* This structure conveys per segment parameters.
* 8 of this data structure will be included in VASegmentationParameterBufferVP9
* and sent to API in a single buffer.
*
*/
typedef struct _VASegmentParameterVP9
{
union
{
struct
{
/** \brief Indicates if per segment reference frame indicator
* is enabled.
* Corresponding to variable feature_enabled when
* j == SEG_LVL_REF_FRAME in function setup_segmentation() VP9 code.
*/
uint16_t segment_reference_enabled : 1;
/** \brief Specifies per segment reference indication.
* 0: reserved
* 1: Last ref
* 2: golden
* 3: altref
* Value can be derived from variable data when
* j == SEG_LVL_REF_FRAME in function setup_segmentation() VP9 code.
*/
uint16_t segment_reference : 2;
/** \brief Indicates if per segment skip feature is enabled.
* Corresponding to variable feature_enabled when
* j == SEG_LVL_SKIP in function setup_segmentation() VP9 code.
*/
uint16_t segment_reference_skipped : 1;
} fields;
uint16_t value;
} segment_flags;
/** \brief Specifies the filter level information per segment.
* The value corresponds to variable lfi->lvl[seg][ref][mode] in VP9 code,
* where m is [ref], and n is [mode] in FilterLevel[m][n].
*/
uint8_t filter_level[4][2];
/** \brief Specifies per segment Luma AC quantization scale.
* Corresponding to y_dequant[qindex][1] in vp9_mb_init_quantizer()
* function of VP9 code.
*/
int16_t luma_ac_quant_scale;
/** \brief Specifies per segment Luma DC quantization scale.
* Corresponding to y_dequant[qindex][0] in vp9_mb_init_quantizer()
* function of VP9 code.
*/
int16_t luma_dc_quant_scale;
/** \brief Specifies per segment Chroma AC quantization scale.
* Corresponding to uv_dequant[qindex][1] in vp9_mb_init_quantizer()
* function of VP9 code.
*/
int16_t chroma_ac_quant_scale;
/** \brief Specifies per segment Chroma DC quantization scale.
* Corresponding to uv_dequant[qindex][0] in vp9_mb_init_quantizer()
* function of VP9 code.
*/
int16_t chroma_dc_quant_scale;
/** \brief Reserved bytes for future use, must be zero */
uint32_t va_reserved[VA_PADDING_LOW];
} VASegmentParameterVP9;
/**
* \brief VP9 Slice Parameter Buffer Structure
*
* This structure conveys parameters related to segmentation data and should be
* sent once per frame.
*
* When segmentation is disabled, only SegParam[0] has valid values,
* all other entries should be populated with 0.
* Otherwise, all eight entries should be valid.
*
* Slice data buffer of VASliceDataBufferType is used
* to send the bitstream which should include whole or part of partition 0
* (at least compressed header) to the end of frame.
*
*/
typedef struct _VASliceParameterBufferVP9
{
/** \brief The byte count of current frame in the bitstream buffer,
* starting from first byte of the buffer.
* It uses the name slice_data_size to be consitent with other codec,
* but actually means frame_data_size.
*/
uint32_t slice_data_size;
/**
* offset to the first byte of partition data (control partition)
*/
uint32_t slice_data_offset;
/**
* see VA_SLICE_DATA_FLAG_XXX definitions
*/
uint32_t slice_data_flag;
/**
* \brief per segment information
*/
VASegmentParameterVP9 seg_param[8];
/** \brief Reserved bytes for future use, must be zero */
uint32_t va_reserved[VA_PADDING_LOW];
} VASliceParameterBufferVP9;
/**@}*/
#ifdef __cplusplus
}
#endif
#endif /* VA_DEC_VP9_H */

Просмотреть файл

@ -1,87 +0,0 @@
/*
* Copyright (C) 2009 Splitted-Desktop Systems. All Rights Reserved.
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sub license, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice (including the
* next paragraph) shall be included in all copies or substantial portions
* of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
* OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
* IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
* ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
#ifndef VA_VERSION_H
#define VA_VERSION_H
/**
* VA_MAJOR_VERSION:
*
* The major version of VA-API (1, if %VA_VERSION is 1.2.3)
*/
#define VA_MAJOR_VERSION 1
/**
* VA_MINOR_VERSION:
*
* The minor version of VA-API (2, if %VA_VERSION is 1.2.3)
*/
#define VA_MINOR_VERSION 7
/**
* VA_MICRO_VERSION:
*
* The micro version of VA-API (3, if %VA_VERSION is 1.2.3)
*/
#define VA_MICRO_VERSION 0
/**
* VA_VERSION:
*
* The full version of VA-API, like 1.2.3
*/
#define VA_VERSION 1.7.0
/**
* VA_VERSION_S:
*
* The full version of VA-API, in string form (suited for string
* concatenation)
*/
#define VA_VERSION_S "1.7.0"
/**
* VA_VERSION_HEX:
*
* Numerically encoded version of VA-API, like 0x010203
*/
#define VA_VERSION_HEX ((VA_MAJOR_VERSION << 24) | \
(VA_MINOR_VERSION << 16) | \
(VA_MICRO_VERSION << 8))
/**
* VA_CHECK_VERSION:
* @major: major version, like 1 in 1.2.3
* @minor: minor version, like 2 in 1.2.3
* @micro: micro version, like 3 in 1.2.3
*
* Evaluates to %TRUE if the version of VA-API is greater than
* @major, @minor and @micro
*/
#define VA_CHECK_VERSION(major,minor,micro) \
(VA_MAJOR_VERSION > (major) || \
(VA_MAJOR_VERSION == (major) && VA_MINOR_VERSION > (minor)) || \
(VA_MAJOR_VERSION == (major) && VA_MINOR_VERSION == (minor) && VA_MICRO_VERSION >= (micro)))
#endif /* VA_VERSION_H */

Просмотреть файл

@ -8,9 +8,10 @@
#define DMABufSurface_h__
#include <stdint.h>
#include "GLContext.h"
#include "GLContextTypes.h"
#include "mozilla/widget/nsWaylandDisplay.h"
#include "mozilla/widget/va_drmcommon.h"
#include "GLTypes.h"
typedef void* EGLImageKHR;
typedef void* EGLSyncKHR;
@ -22,9 +23,6 @@ namespace layers {
class SurfaceDescriptor;
class SurfaceDescriptorDMABuf;
} // namespace layers
namespace gl {
class GLContext;
}
} // namespace mozilla
typedef enum {

Просмотреть файл

@ -6,10 +6,6 @@
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "nsWaylandDisplay.h"
#include "base/message_loop.h" // for MessageLoop
#include "base/task.h" // for NewRunnableMethod, etc
#include "mozilla/StaticMutex.h"
#include "mozilla/StaticPrefs_widget.h"
namespace mozilla {

Просмотреть файл

@ -10,6 +10,10 @@
#include "DMABufLibWrapper.h"
#include "base/message_loop.h" // for MessageLoop
#include "base/task.h" // for NewRunnableMethod, etc
#include "mozilla/StaticMutex.h"
#include "mozilla/widget/mozwayland.h"
#include "mozilla/widget/gbm.h"
#include "mozilla/widget/gtk-primary-selection-client-protocol.h"