Bug 922314 - Add support for decoding VPx via MediaFoundation on Windows. r=cpearce,kinetik

Refactor WebMReader's VPx decoding out to SoftwareWebMVideoDecoder and introduce a new IntelWebMVideoDecoder that uses the MediaFoundation/MFT backend to decode VPx when the requisite hardware and software is available.
This commit is contained in:
Joe Olivas 2014-11-11 16:30:52 +13:00
Родитель d437286ca8
Коммит ff0f55d7cb
28 изменённых файлов: 1202 добавлений и 273 удалений

Просмотреть файл

@ -213,11 +213,11 @@ public:
// Decode thread. // Decode thread.
virtual already_AddRefed<MediaDataDecoder> virtual already_AddRefed<MediaDataDecoder>
CreateH264Decoder(const mp4_demuxer::VideoDecoderConfig& aConfig, CreateVideoDecoder(const mp4_demuxer::VideoDecoderConfig& aConfig,
layers::LayersBackend aLayersBackend, layers::LayersBackend aLayersBackend,
layers::ImageContainer* aImageContainer, layers::ImageContainer* aImageContainer,
MediaTaskQueue* aVideoTaskQueue, MediaTaskQueue* aVideoTaskQueue,
MediaDataDecoderCallback* aCallback) MOZ_OVERRIDE { MediaDataDecoderCallback* aCallback) MOZ_OVERRIDE {
BlankVideoDataCreator* creator = new BlankVideoDataCreator( BlankVideoDataCreator* creator = new BlankVideoDataCreator(
aConfig.display_width, aConfig.display_height, aImageContainer); aConfig.display_width, aConfig.display_height, aImageContainer);
nsRefPtr<MediaDataDecoder> decoder = nsRefPtr<MediaDataDecoder> decoder =

Просмотреть файл

@ -311,6 +311,14 @@ MP4Reader::IsSupportedAudioMimeType(const char* aMimeType)
mPlatform->SupportsAudioMimeType(aMimeType); mPlatform->SupportsAudioMimeType(aMimeType);
} }
bool
MP4Reader::IsSupportedVideoMimeType(const char* aMimeType)
{
return (!strcmp(aMimeType, "video/mp4") ||
!strcmp(aMimeType, "video/avc")) &&
mPlatform->SupportsVideoMimeType(aMimeType);
}
nsresult nsresult
MP4Reader::ReadMetadata(MediaInfo* aInfo, MP4Reader::ReadMetadata(MediaInfo* aInfo,
MetadataTags** aTags) MetadataTags** aTags)
@ -413,22 +421,25 @@ MP4Reader::ReadMetadata(MediaInfo* aInfo,
if (HasVideo()) { if (HasVideo()) {
const VideoDecoderConfig& video = mDemuxer->VideoConfig(); const VideoDecoderConfig& video = mDemuxer->VideoConfig();
if (mInfo.mVideo.mHasVideo && !IsSupportedVideoMimeType(video.mime_type)) {
return NS_ERROR_FAILURE;
}
mInfo.mVideo.mDisplay = mInfo.mVideo.mDisplay =
nsIntSize(video.display_width, video.display_height); nsIntSize(video.display_width, video.display_height);
mVideo.mCallback = new DecoderCallback(this, kVideo); mVideo.mCallback = new DecoderCallback(this, kVideo);
if (mSharedDecoderManager) { if (mSharedDecoderManager) {
mVideo.mDecoder = mVideo.mDecoder =
mSharedDecoderManager->CreateH264Decoder(video, mSharedDecoderManager->CreateVideoDecoder(video,
mLayersBackendType, mLayersBackendType,
mDecoder->GetImageContainer(), mDecoder->GetImageContainer(),
mVideo.mTaskQueue, mVideo.mTaskQueue,
mVideo.mCallback); mVideo.mCallback);
} else { } else {
mVideo.mDecoder = mPlatform->CreateH264Decoder(video, mVideo.mDecoder = mPlatform->CreateVideoDecoder(video,
mLayersBackendType, mLayersBackendType,
mDecoder->GetImageContainer(), mDecoder->GetImageContainer(),
mVideo.mTaskQueue, mVideo.mTaskQueue,
mVideo.mCallback); mVideo.mCallback);
} }
NS_ENSURE_TRUE(mVideo.mDecoder != nullptr, NS_ERROR_FAILURE); NS_ENSURE_TRUE(mVideo.mDecoder != nullptr, NS_ERROR_FAILURE);
nsresult rv = mVideo.mDecoder->Init(); nsresult rv = mVideo.mDecoder->Init();

Просмотреть файл

@ -109,6 +109,7 @@ private:
void DrainComplete(mp4_demuxer::TrackType aTrack); void DrainComplete(mp4_demuxer::TrackType aTrack);
void UpdateIndex(); void UpdateIndex();
bool IsSupportedAudioMimeType(const char* aMimeType); bool IsSupportedAudioMimeType(const char* aMimeType);
bool IsSupportedVideoMimeType(const char* aMimeType);
void NotifyResourcesStatusChanged(); void NotifyResourcesStatusChanged();
bool IsWaitingOnCodecResource(); bool IsWaitingOnCodecResource();
virtual bool IsWaitingOnCDMResource() MOZ_OVERRIDE; virtual bool IsWaitingOnCDMResource() MOZ_OVERRIDE;

Просмотреть файл

@ -182,4 +182,10 @@ PlatformDecoderModule::SupportsAudioMimeType(const char* aMimeType)
return !strcmp(aMimeType, "audio/mp4a-latm"); return !strcmp(aMimeType, "audio/mp4a-latm");
} }
bool
PlatformDecoderModule::SupportsVideoMimeType(const char* aMimeType)
{
return !strcmp(aMimeType, "video/mp4") || !strcmp(aMimeType, "video/avc");
}
} // namespace mozilla } // namespace mozilla

Просмотреть файл

@ -97,7 +97,7 @@ public:
// It is safe to store a reference to aConfig. // It is safe to store a reference to aConfig.
// This is called on the decode task queue. // This is called on the decode task queue.
virtual already_AddRefed<MediaDataDecoder> virtual already_AddRefed<MediaDataDecoder>
CreateH264Decoder(const mp4_demuxer::VideoDecoderConfig& aConfig, CreateVideoDecoder(const mp4_demuxer::VideoDecoderConfig& aConfig,
layers::LayersBackend aLayersBackend, layers::LayersBackend aLayersBackend,
layers::ImageContainer* aImageContainer, layers::ImageContainer* aImageContainer,
MediaTaskQueue* aVideoTaskQueue, MediaTaskQueue* aVideoTaskQueue,
@ -122,6 +122,7 @@ public:
// If more audio codec is to be supported, SupportsAudioMimeType will have // If more audio codec is to be supported, SupportsAudioMimeType will have
// to be extended // to be extended
virtual bool SupportsAudioMimeType(const char* aMimeType); virtual bool SupportsAudioMimeType(const char* aMimeType);
virtual bool SupportsVideoMimeType(const char* aMimeType);
virtual ~PlatformDecoderModule() {} virtual ~PlatformDecoderModule() {}

Просмотреть файл

@ -66,14 +66,14 @@ SharedDecoderManager::SharedDecoderManager()
SharedDecoderManager::~SharedDecoderManager() {} SharedDecoderManager::~SharedDecoderManager() {}
already_AddRefed<MediaDataDecoder> already_AddRefed<MediaDataDecoder>
SharedDecoderManager::CreateH264Decoder( SharedDecoderManager::CreateVideoDecoder(
const mp4_demuxer::VideoDecoderConfig& aConfig, const mp4_demuxer::VideoDecoderConfig& aConfig,
layers::LayersBackend aLayersBackend, layers::ImageContainer* aImageContainer, layers::LayersBackend aLayersBackend, layers::ImageContainer* aImageContainer,
MediaTaskQueue* aVideoTaskQueue, MediaDataDecoderCallback* aCallback) MediaTaskQueue* aVideoTaskQueue, MediaDataDecoderCallback* aCallback)
{ {
if (!mDecoder) { if (!mDecoder) {
nsAutoPtr<PlatformDecoderModule> platform(PlatformDecoderModule::Create()); nsAutoPtr<PlatformDecoderModule> platform(PlatformDecoderModule::Create());
mDecoder = platform->CreateH264Decoder( mDecoder = platform->CreateVideoDecoder(
aConfig, aLayersBackend, aImageContainer, aVideoTaskQueue, mCallback); aConfig, aLayersBackend, aImageContainer, aVideoTaskQueue, mCallback);
if (!mDecoder) { if (!mDecoder) {
return nullptr; return nullptr;

Просмотреть файл

@ -24,7 +24,7 @@ public:
SharedDecoderManager(); SharedDecoderManager();
already_AddRefed<MediaDataDecoder> CreateH264Decoder( already_AddRefed<MediaDataDecoder> CreateVideoDecoder(
const mp4_demuxer::VideoDecoderConfig& aConfig, const mp4_demuxer::VideoDecoderConfig& aConfig,
layers::LayersBackend aLayersBackend, layers::LayersBackend aLayersBackend,
layers::ImageContainer* aImageContainer, MediaTaskQueue* aVideoTaskQueue, layers::ImageContainer* aImageContainer, MediaTaskQueue* aVideoTaskQueue,

Просмотреть файл

@ -135,7 +135,7 @@ bool AndroidDecoderModule::SupportsAudioMimeType(const char* aMimeType) {
} }
already_AddRefed<MediaDataDecoder> already_AddRefed<MediaDataDecoder>
AndroidDecoderModule::CreateH264Decoder( AndroidDecoderModule::CreateVideoDecoder(
const mp4_demuxer::VideoDecoderConfig& aConfig, const mp4_demuxer::VideoDecoderConfig& aConfig,
layers::LayersBackend aLayersBackend, layers::LayersBackend aLayersBackend,
layers::ImageContainer* aImageContainer, layers::ImageContainer* aImageContainer,

Просмотреть файл

@ -34,11 +34,11 @@ public:
virtual nsresult Shutdown() MOZ_OVERRIDE; virtual nsresult Shutdown() MOZ_OVERRIDE;
virtual already_AddRefed<MediaDataDecoder> virtual already_AddRefed<MediaDataDecoder>
CreateH264Decoder(const mp4_demuxer::VideoDecoderConfig& aConfig, CreateVideoDecoder(const mp4_demuxer::VideoDecoderConfig& aConfig,
layers::LayersBackend aLayersBackend, layers::LayersBackend aLayersBackend,
layers::ImageContainer* aImageContainer, layers::ImageContainer* aImageContainer,
MediaTaskQueue* aVideoTaskQueue, MediaTaskQueue* aVideoTaskQueue,
MediaDataDecoderCallback* aCallback) MOZ_OVERRIDE; MediaDataDecoderCallback* aCallback) MOZ_OVERRIDE;
virtual already_AddRefed<MediaDataDecoder> virtual already_AddRefed<MediaDataDecoder>
CreateAudioDecoder(const mp4_demuxer::AudioDecoderConfig& aConfig, CreateAudioDecoder(const mp4_demuxer::AudioDecoderConfig& aConfig,

Просмотреть файл

@ -144,11 +144,11 @@ AppleDecoderModule::Shutdown()
} }
already_AddRefed<MediaDataDecoder> already_AddRefed<MediaDataDecoder>
AppleDecoderModule::CreateH264Decoder(const mp4_demuxer::VideoDecoderConfig& aConfig, AppleDecoderModule::CreateVideoDecoder(const mp4_demuxer::VideoDecoderConfig& aConfig,
layers::LayersBackend aLayersBackend, layers::LayersBackend aLayersBackend,
layers::ImageContainer* aImageContainer, layers::ImageContainer* aImageContainer,
MediaTaskQueue* aVideoTaskQueue, MediaTaskQueue* aVideoTaskQueue,
MediaDataDecoderCallback* aCallback) MediaDataDecoderCallback* aCallback)
{ {
nsRefPtr<MediaDataDecoder> decoder; nsRefPtr<MediaDataDecoder> decoder;

Просмотреть файл

@ -26,11 +26,11 @@ public:
// Decode thread. // Decode thread.
virtual already_AddRefed<MediaDataDecoder> virtual already_AddRefed<MediaDataDecoder>
CreateH264Decoder(const mp4_demuxer::VideoDecoderConfig& aConfig, CreateVideoDecoder(const mp4_demuxer::VideoDecoderConfig& aConfig,
layers::LayersBackend aLayersBackend, layers::LayersBackend aLayersBackend,
layers::ImageContainer* aImageContainer, layers::ImageContainer* aImageContainer,
MediaTaskQueue* aVideoTaskQueue, MediaTaskQueue* aVideoTaskQueue,
MediaDataDecoderCallback* aCallback) MOZ_OVERRIDE; MediaDataDecoderCallback* aCallback) MOZ_OVERRIDE;
// Decode thread. // Decode thread.
virtual already_AddRefed<MediaDataDecoder> virtual already_AddRefed<MediaDataDecoder>

Просмотреть файл

@ -194,11 +194,11 @@ EMEDecoderModule::Shutdown()
} }
already_AddRefed<MediaDataDecoder> already_AddRefed<MediaDataDecoder>
EMEDecoderModule::CreateH264Decoder(const VideoDecoderConfig& aConfig, EMEDecoderModule::CreateVideoDecoder(const VideoDecoderConfig& aConfig,
layers::LayersBackend aLayersBackend, layers::LayersBackend aLayersBackend,
layers::ImageContainer* aImageContainer, layers::ImageContainer* aImageContainer,
MediaTaskQueue* aVideoTaskQueue, MediaTaskQueue* aVideoTaskQueue,
MediaDataDecoderCallback* aCallback) MediaDataDecoderCallback* aCallback)
{ {
if (mCDMDecodesVideo && aConfig.crypto.valid) { if (mCDMDecodesVideo && aConfig.crypto.valid) {
nsRefPtr<MediaDataDecoder> decoder(new EMEH264Decoder(mProxy, nsRefPtr<MediaDataDecoder> decoder(new EMEH264Decoder(mProxy,
@ -210,11 +210,11 @@ EMEDecoderModule::CreateH264Decoder(const VideoDecoderConfig& aConfig,
return decoder.forget(); return decoder.forget();
} }
nsRefPtr<MediaDataDecoder> decoder(mPDM->CreateH264Decoder(aConfig, nsRefPtr<MediaDataDecoder> decoder(mPDM->CreateVideoDecoder(aConfig,
aLayersBackend, aLayersBackend,
aImageContainer, aImageContainer,
aVideoTaskQueue, aVideoTaskQueue,
aCallback)); aCallback));
if (!decoder) { if (!decoder) {
return nullptr; return nullptr;
} }

Просмотреть файл

@ -34,7 +34,7 @@ public:
// Decode thread. // Decode thread.
virtual already_AddRefed<MediaDataDecoder> virtual already_AddRefed<MediaDataDecoder>
CreateH264Decoder(const mp4_demuxer::VideoDecoderConfig& aConfig, CreateVideoDecoder(const mp4_demuxer::VideoDecoderConfig& aConfig,
layers::LayersBackend aLayersBackend, layers::LayersBackend aLayersBackend,
layers::ImageContainer* aImageContainer, layers::ImageContainer* aImageContainer,
MediaTaskQueue* aVideoTaskQueue, MediaTaskQueue* aVideoTaskQueue,

Просмотреть файл

@ -26,11 +26,11 @@ public:
virtual nsresult Shutdown() MOZ_OVERRIDE { return NS_OK; } virtual nsresult Shutdown() MOZ_OVERRIDE { return NS_OK; }
virtual already_AddRefed<MediaDataDecoder> virtual already_AddRefed<MediaDataDecoder>
CreateH264Decoder(const mp4_demuxer::VideoDecoderConfig& aConfig, CreateVideoDecoder(const mp4_demuxer::VideoDecoderConfig& aConfig,
layers::LayersBackend aLayersBackend, layers::LayersBackend aLayersBackend,
layers::ImageContainer* aImageContainer, layers::ImageContainer* aImageContainer,
MediaTaskQueue* aVideoTaskQueue, MediaTaskQueue* aVideoTaskQueue,
MediaDataDecoderCallback* aCallback) MOZ_OVERRIDE MediaDataDecoderCallback* aCallback) MOZ_OVERRIDE
{ {
nsRefPtr<MediaDataDecoder> decoder = nsRefPtr<MediaDataDecoder> decoder =
new FFmpegH264Decoder<V>(aVideoTaskQueue, aCallback, aConfig, new FFmpegH264Decoder<V>(aVideoTaskQueue, aCallback, aConfig,

Просмотреть файл

@ -35,11 +35,11 @@ GonkDecoderModule::Shutdown()
} }
already_AddRefed<MediaDataDecoder> already_AddRefed<MediaDataDecoder>
GonkDecoderModule::CreateH264Decoder(const mp4_demuxer::VideoDecoderConfig& aConfig, GonkDecoderModule::CreateVideoDecoder(const mp4_demuxer::VideoDecoderConfig& aConfig,
mozilla::layers::LayersBackend aLayersBackend, mozilla::layers::LayersBackend aLayersBackend,
mozilla::layers::ImageContainer* aImageContainer, mozilla::layers::ImageContainer* aImageContainer,
MediaTaskQueue* aVideoTaskQueue, MediaTaskQueue* aVideoTaskQueue,
MediaDataDecoderCallback* aCallback) MediaDataDecoderCallback* aCallback)
{ {
nsRefPtr<MediaDataDecoder> decoder = nsRefPtr<MediaDataDecoder> decoder =
new GonkMediaDataDecoder(new GonkVideoDecoderManager(aImageContainer,aConfig), new GonkMediaDataDecoder(new GonkVideoDecoderManager(aImageContainer,aConfig),

Просмотреть файл

@ -21,11 +21,11 @@ public:
// Decode thread. // Decode thread.
virtual already_AddRefed<MediaDataDecoder> virtual already_AddRefed<MediaDataDecoder>
CreateH264Decoder(const mp4_demuxer::VideoDecoderConfig& aConfig, CreateVideoDecoder(const mp4_demuxer::VideoDecoderConfig& aConfig,
mozilla::layers::LayersBackend aLayersBackend, mozilla::layers::LayersBackend aLayersBackend,
mozilla::layers::ImageContainer* aImageContainer, mozilla::layers::ImageContainer* aImageContainer,
MediaTaskQueue* aVideoTaskQueue, MediaTaskQueue* aVideoTaskQueue,
MediaDataDecoderCallback* aCallback) MOZ_OVERRIDE; MediaDataDecoderCallback* aCallback) MOZ_OVERRIDE;
// Decode thread. // Decode thread.
virtual already_AddRefed<MediaDataDecoder> virtual already_AddRefed<MediaDataDecoder>

Просмотреть файл

@ -59,16 +59,15 @@ WMFDecoderModule::Shutdown()
{ {
DebugOnly<HRESULT> hr = wmf::MFShutdown(); DebugOnly<HRESULT> hr = wmf::MFShutdown();
NS_ASSERTION(SUCCEEDED(hr), "MFShutdown failed"); NS_ASSERTION(SUCCEEDED(hr), "MFShutdown failed");
return NS_OK; return NS_OK;
} }
already_AddRefed<MediaDataDecoder> already_AddRefed<MediaDataDecoder>
WMFDecoderModule::CreateH264Decoder(const mp4_demuxer::VideoDecoderConfig& aConfig, WMFDecoderModule::CreateVideoDecoder(const mp4_demuxer::VideoDecoderConfig& aConfig,
layers::LayersBackend aLayersBackend, layers::LayersBackend aLayersBackend,
layers::ImageContainer* aImageContainer, layers::ImageContainer* aImageContainer,
MediaTaskQueue* aVideoTaskQueue, MediaTaskQueue* aVideoTaskQueue,
MediaDataDecoderCallback* aCallback) MediaDataDecoderCallback* aCallback)
{ {
nsRefPtr<MediaDataDecoder> decoder = nsRefPtr<MediaDataDecoder> decoder =
new WMFMediaDataDecoder(new WMFVideoMFTManager(aConfig, new WMFMediaDataDecoder(new WMFVideoMFTManager(aConfig,
@ -92,6 +91,15 @@ WMFDecoderModule::CreateAudioDecoder(const mp4_demuxer::AudioDecoderConfig& aCon
return decoder.forget(); return decoder.forget();
} }
bool
WMFDecoderModule::SupportsVideoMimeType(const char* aMimeType)
{
return !strcmp(aMimeType, "video/mp4") ||
!strcmp(aMimeType, "video/avc") ||
!strcmp(aMimeType, "video/webm; codecs=vp8") ||
!strcmp(aMimeType, "video/webm; codecs=vp9");
}
bool bool
WMFDecoderModule::SupportsAudioMimeType(const char* aMimeType) WMFDecoderModule::SupportsAudioMimeType(const char* aMimeType)
{ {

Просмотреть файл

@ -23,17 +23,18 @@ public:
virtual nsresult Shutdown() MOZ_OVERRIDE; virtual nsresult Shutdown() MOZ_OVERRIDE;
virtual already_AddRefed<MediaDataDecoder> virtual already_AddRefed<MediaDataDecoder>
CreateH264Decoder(const mp4_demuxer::VideoDecoderConfig& aConfig, CreateVideoDecoder(const mp4_demuxer::VideoDecoderConfig& aConfig,
layers::LayersBackend aLayersBackend, layers::LayersBackend aLayersBackend,
layers::ImageContainer* aImageContainer, layers::ImageContainer* aImageContainer,
MediaTaskQueue* aVideoTaskQueue, MediaTaskQueue* aVideoTaskQueue,
MediaDataDecoderCallback* aCallback) MOZ_OVERRIDE; MediaDataDecoderCallback* aCallback) MOZ_OVERRIDE;
virtual already_AddRefed<MediaDataDecoder> virtual already_AddRefed<MediaDataDecoder>
CreateAudioDecoder(const mp4_demuxer::AudioDecoderConfig& aConfig, CreateAudioDecoder(const mp4_demuxer::AudioDecoderConfig& aConfig,
MediaTaskQueue* aAudioTaskQueue, MediaTaskQueue* aAudioTaskQueue,
MediaDataDecoderCallback* aCallback) MOZ_OVERRIDE; MediaDataDecoderCallback* aCallback) MOZ_OVERRIDE;
bool SupportsVideoMimeType(const char* aMimeType) MOZ_OVERRIDE;
bool SupportsAudioMimeType(const char* aMimeType) MOZ_OVERRIDE; bool SupportsAudioMimeType(const char* aMimeType) MOZ_OVERRIDE;
// Called on main thread. // Called on main thread.

Просмотреть файл

@ -30,6 +30,38 @@ using mozilla::layers::Image;
using mozilla::layers::LayerManager; using mozilla::layers::LayerManager;
using mozilla::layers::LayersBackend; using mozilla::layers::LayersBackend;
const GUID MFVideoFormat_VP80 =
{
0x30385056,
0x0000,
0x0010,
{0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71}
};
const GUID MFVideoFormat_VP90 =
{
0x30395056,
0x0000,
0x0010,
{0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71}
};
const CLSID CLSID_WebmMfVp8Dec =
{
0x451e3cb7,
0x2622,
0x4ba5,
{0x8e, 0x1d, 0x44, 0xb3, 0xc4, 0x1d, 0x09, 0x24}
};
const CLSID CLSID_WebmMfVp9Dec =
{
0x7ab4bd2,
0x1979,
0x4fcd,
{0xa6, 0x97, 0xdf, 0x9a, 0xd1, 0x5b, 0x34, 0xfe}
};
namespace mozilla { namespace mozilla {
WMFVideoMFTManager::WMFVideoMFTManager( WMFVideoMFTManager::WMFVideoMFTManager(
@ -48,6 +80,18 @@ WMFVideoMFTManager::WMFVideoMFTManager(
NS_ASSERTION(!NS_IsMainThread(), "Should not be on main thread."); NS_ASSERTION(!NS_IsMainThread(), "Should not be on main thread.");
MOZ_ASSERT(mImageContainer); MOZ_ASSERT(mImageContainer);
MOZ_COUNT_CTOR(WMFVideoMFTManager); MOZ_COUNT_CTOR(WMFVideoMFTManager);
// Need additional checks/params to check vp8/vp9
if (!strcmp(aConfig.mime_type, "video/mp4") ||
!strcmp(aConfig.mime_type, "video/avc")) {
mStreamType = H264;
} else if (!strcmp(aConfig.mime_type, "video/webm; codecs=vp8")) {
mStreamType = VP8;
} else if (!strcmp(aConfig.mime_type, "video/webm; codecs=vp9")) {
mStreamType = VP9;
} else {
mStreamType = Unknown;
}
} }
WMFVideoMFTManager::~WMFVideoMFTManager() WMFVideoMFTManager::~WMFVideoMFTManager()
@ -57,6 +101,30 @@ WMFVideoMFTManager::~WMFVideoMFTManager()
DeleteOnMainThread(mDXVA2Manager); DeleteOnMainThread(mDXVA2Manager);
} }
const GUID&
WMFVideoMFTManager::GetMFTGUID()
{
MOZ_ASSERT(mStreamType != Unknown);
switch (mStreamType) {
case H264: return CLSID_CMSH264DecoderMFT;
case VP8: return CLSID_WebmMfVp8Dec;
case VP9: return CLSID_WebmMfVp9Dec;
default: return GUID_NULL;
};
}
const GUID&
WMFVideoMFTManager::GetMediaSubtypeGUID()
{
MOZ_ASSERT(mStreamType != Unknown);
switch (mStreamType) {
case H264: return MFVideoFormat_H264;
case VP8: return MFVideoFormat_VP80;
case VP9: return MFVideoFormat_VP90;
default: return GUID_NULL;
};
}
class CreateDXVAManagerEvent : public nsRunnable { class CreateDXVAManagerEvent : public nsRunnable {
public: public:
NS_IMETHOD Run() { NS_IMETHOD Run() {
@ -95,7 +163,7 @@ WMFVideoMFTManager::Init()
RefPtr<MFTDecoder> decoder(new MFTDecoder()); RefPtr<MFTDecoder> decoder(new MFTDecoder());
HRESULT hr = decoder->Create(CLSID_CMSH264DecoderMFT); HRESULT hr = decoder->Create(GetMFTGUID());
NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr); NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr);
if (useDxva) { if (useDxva) {
@ -126,7 +194,7 @@ WMFVideoMFTManager::Init()
hr = type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); hr = type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr); NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr);
hr = type->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264); hr = type->SetGUID(MF_MT_SUBTYPE, GetMediaSubtypeGUID());
NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr); NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr);
hr = type->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_MixedInterlaceOrProgressive); hr = type->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_MixedInterlaceOrProgressive);
@ -145,8 +213,10 @@ WMFVideoMFTManager::Init()
HRESULT HRESULT
WMFVideoMFTManager::Input(mp4_demuxer::MP4Sample* aSample) WMFVideoMFTManager::Input(mp4_demuxer::MP4Sample* aSample)
{ {
// We must prepare samples in AVC Annex B. if (mStreamType != VP8 && mStreamType != VP9) {
mp4_demuxer::AnnexB::ConvertSample(aSample); // We must prepare samples in AVC Annex B.
mp4_demuxer::AnnexB::ConvertSample(aSample);
}
// Forward sample data to the decoder. // Forward sample data to the decoder.
const uint8_t* data = reinterpret_cast<const uint8_t*>(aSample->data); const uint8_t* data = reinterpret_cast<const uint8_t*>(aSample->data);
uint32_t length = aSample->size; uint32_t length = aSample->size;

Просмотреть файл

@ -65,6 +65,18 @@ private:
const bool mDXVAEnabled; const bool mDXVAEnabled;
const layers::LayersBackend mLayersBackend; const layers::LayersBackend mLayersBackend;
bool mUseHwAccel; bool mUseHwAccel;
enum StreamType {
Unknown,
H264,
VP8,
VP9
};
StreamType mStreamType;
const GUID& GetMFTGUID();
const GUID& GetMediaSubtypeGUID();
}; };
} // namespace mozilla } // namespace mozilla

Просмотреть файл

@ -0,0 +1,464 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim:set ts=2 sw=2 sts=2 et cindent: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "IntelWebMVideoDecoder.h"
#include "gfx2DGlue.h"
#include "Layers.h"
#include "MediaResource.h"
#include "MediaTaskQueue.h"
#include "mozilla/dom/HTMLMediaElement.h"
#include "nsError.h"
#include "SharedThreadPool.h"
#include "WebMReader.h"
#include "VorbisUtils.h"
#include "nestegg/nestegg.h"
#define VPX_DONT_DEFINE_STDINT_TYPES
#include "vpx/vp8dx.h"
#include "vpx/vpx_decoder.h"
#undef LOG
#ifdef PR_LOGGING
PRLogModuleInfo* GetDemuxerLog();
#define LOG(...) PR_LOG(GetDemuxerLog(), PR_LOG_DEBUG, (__VA_ARGS__))
#else
#define LOG(...)
#endif
using namespace mp4_demuxer;
namespace mozilla {
using layers::Image;
using layers::LayerManager;
using layers::LayersBackend;
class VP8Sample : public MP4Sample
{
public:
VP8Sample(int64_t aTimestamp,
int64_t aDuration,
int64_t aByteOffset,
uint8_t* aData,
size_t aSize,
bool aSyncPoint)
{
decode_timestamp = -1;
composition_timestamp = aTimestamp;
duration = aDuration;
byte_offset = aByteOffset;
is_sync_point = aSyncPoint;
data = new uint8_t[aSize];
size = aSize;
memmove(data, aData, size);
}
~VP8Sample()
{
delete data;
}
};
IntelWebMVideoDecoder::IntelWebMVideoDecoder(WebMReader* aReader)
: WebMVideoDecoder()
, mReader(aReader)
, mMonitor("IntelWebMVideoDecoder")
, mNumSamplesInput(0)
, mNumSamplesOutput(0)
, mDecodeAhead(2)
, mInputExhausted(false)
, mDrainComplete(false)
, mError(false)
, mEOS(false)
, mIsFlushing(false)
{
MOZ_COUNT_CTOR(IntelWebMVideoDecoder);
}
IntelWebMVideoDecoder::~IntelWebMVideoDecoder()
{
MOZ_COUNT_DTOR(IntelWebMVideoDecoder);
Shutdown();
}
void
IntelWebMVideoDecoder::Shutdown()
{
if (mMediaDataDecoder) {
Flush();
mMediaDataDecoder->Shutdown();
mMediaDataDecoder = nullptr;
}
mTaskQueue = nullptr;
mQueuedVideoSample = nullptr;
mReader = nullptr;
}
/* static */
WebMVideoDecoder*
IntelWebMVideoDecoder::Create(WebMReader* aReader)
{
nsAutoPtr<IntelWebMVideoDecoder> decoder(new IntelWebMVideoDecoder(aReader));
decoder->mTaskQueue = aReader->GetTaskQueue();
NS_ENSURE_TRUE(decoder->mTaskQueue, nullptr);
return decoder.forget();
}
bool
IntelWebMVideoDecoder::IsSupportedVideoMimeType(const char* aMimeType)
{
return (!strcmp(aMimeType, "video/webm; codecs=vp8") ||
!strcmp(aMimeType, "video/webm; codecs=vp9")) &&
mPlatform->SupportsVideoMimeType(aMimeType);
}
nsresult
IntelWebMVideoDecoder::Init(unsigned int aWidth, unsigned int aHeight)
{
mPlatform = PlatformDecoderModule::Create();
if (!mPlatform) {
return NS_ERROR_FAILURE;
}
mDecoderConfig = new VideoDecoderConfig();
mDecoderConfig->duration = 0;
mDecoderConfig->display_width = aWidth;
mDecoderConfig->display_height = aHeight;
switch (mReader->GetVideoCodec()) {
case NESTEGG_CODEC_VP8:
mDecoderConfig->mime_type = "video/webm; codecs=vp8";
break;
case NESTEGG_CODEC_VP9:
mDecoderConfig->mime_type = "video/webm; codecs=vp9";
break;
default:
return NS_ERROR_FAILURE;
}
const VideoDecoderConfig& video = *mDecoderConfig;
if (!IsSupportedVideoMimeType(video.mime_type)) {
return NS_ERROR_FAILURE;
}
mMediaDataDecoder = mPlatform->CreateVideoDecoder(video,
mReader->GetLayersBackendType(),
mReader->GetDecoder()->GetImageContainer(),
mTaskQueue,
this);
if (!mMediaDataDecoder) {
return NS_ERROR_FAILURE;
}
nsresult rv = mMediaDataDecoder->Init();
NS_ENSURE_SUCCESS(rv, rv);
return NS_OK;
}
bool
IntelWebMVideoDecoder::Demux(nsAutoPtr<VP8Sample>& aSample, bool* aEOS)
{
nsAutoRef<NesteggPacketHolder> holder(mReader->NextPacket(WebMReader::VIDEO));
if (!holder) {
return false;
}
nestegg_packet* packet = holder->mPacket;
unsigned int track = 0;
int r = nestegg_packet_track(packet, &track);
if (r == -1) {
return false;
}
unsigned int count = 0;
r = nestegg_packet_count(packet, &count);
if (r == -1) {
return false;
}
uint64_t tstamp = 0;
r = nestegg_packet_tstamp(packet, &tstamp);
if (r == -1) {
return false;
}
// The end time of this frame is the start time of the next frame. Fetch
// the timestamp of the next packet for this track. If we've reached the
// end of the resource, use the file's duration as the end time of this
// video frame.
uint64_t next_tstamp = 0;
nsAutoRef<NesteggPacketHolder> next_holder(mReader->NextPacket(WebMReader::VIDEO));
if (next_holder) {
r = nestegg_packet_tstamp(next_holder->mPacket, &next_tstamp);
if (r == -1) {
return false;
}
mReader->PushVideoPacket(next_holder.disown());
} else {
next_tstamp = tstamp;
next_tstamp += tstamp - mReader->GetLastVideoFrameTime();
}
mReader->SetLastVideoFrameTime(tstamp);
int64_t tstamp_usecs = tstamp / NS_PER_USEC;
for (uint32_t i = 0; i < count; ++i) {
unsigned char* data;
size_t length;
r = nestegg_packet_data(packet, i, &data, &length);
if (r == -1) {
return false;
}
vpx_codec_stream_info_t si;
memset(&si, 0, sizeof(si));
si.sz = sizeof(si);
if (mReader->GetVideoCodec() == NESTEGG_CODEC_VP8) {
vpx_codec_peek_stream_info(vpx_codec_vp8_dx(), data, length, &si);
} else if (mReader->GetVideoCodec() == NESTEGG_CODEC_VP9) {
vpx_codec_peek_stream_info(vpx_codec_vp9_dx(), data, length, &si);
}
MOZ_ASSERT(mPlatform && mMediaDataDecoder);
aSample = new VP8Sample(tstamp_usecs,
(next_tstamp/NS_PER_USEC) - tstamp_usecs,
0,
data,
length,
si.is_kf);
}
return true;
}
bool
IntelWebMVideoDecoder::Decode()
{
MOZ_ASSERT(mMediaDataDecoder);
mMonitor.Lock();
uint64_t prevNumFramesOutput = mNumSamplesOutput;
while (prevNumFramesOutput == mNumSamplesOutput) {
mMonitor.AssertCurrentThreadOwns();
if (mError) {
// Decode error!
mMonitor.Unlock();
return false;
}
while (prevNumFramesOutput == mNumSamplesOutput &&
(mInputExhausted ||
(mNumSamplesInput - mNumSamplesOutput) < mDecodeAhead) &&
!mEOS) {
mMonitor.AssertCurrentThreadOwns();
mMonitor.Unlock();
nsAutoPtr<VP8Sample> compressed(PopSample());
if (!compressed) {
// EOS, or error. Let the state machine know there are no more
// frames coming.
LOG("Draining Video");
mMonitor.Lock();
MOZ_ASSERT(!mEOS);
mEOS = true;
MOZ_ASSERT(!mDrainComplete);
mDrainComplete = false;
mMonitor.Unlock();
mMediaDataDecoder->Drain();
} else {
#ifdef LOG_SAMPLE_DECODE
LOG("PopSample %s time=%lld dur=%lld", TrackTypeToStr(aTrack),
compressed->composition_timestamp, compressed->duration);
#endif
mMonitor.Lock();
mDrainComplete = false;
mInputExhausted = false;
mNumSamplesInput++;
mMonitor.Unlock();
if (NS_FAILED(mMediaDataDecoder->Input(compressed))) {
return false;
}
// If Input() failed, we let the auto pointer delete |compressed|.
// Otherwise, we assume the decoder will delete it when it's finished
// with it.
compressed.forget();
}
mMonitor.Lock();
}
mMonitor.AssertCurrentThreadOwns();
while (!mError &&
prevNumFramesOutput == mNumSamplesOutput &&
(!mInputExhausted || mEOS) &&
!mDrainComplete) {
mMonitor.Wait();
}
if (mError ||
(mEOS && mDrainComplete)) {
break;
}
}
mMonitor.AssertCurrentThreadOwns();
bool rv = !(mEOS || mError);
mMonitor.Unlock();
return rv;
}
bool
IntelWebMVideoDecoder::SkipVideoDemuxToNextKeyFrame(int64_t aTimeThreshold, uint32_t& aParsed)
{
MOZ_ASSERT(mReader->GetDecoder());
Flush();
// Loop until we reach the next keyframe after the threshold.
while (true) {
nsAutoPtr<VP8Sample> compressed(PopSample());
if (!compressed) {
// EOS, or error. Let the state machine know.
return false;
}
aParsed++;
if (!compressed->is_sync_point ||
compressed->composition_timestamp < aTimeThreshold) {
continue;
}
mQueuedVideoSample = compressed;
break;
}
return true;
}
bool
IntelWebMVideoDecoder::DecodeVideoFrame(bool& aKeyframeSkip,
int64_t aTimeThreshold)
{
uint32_t parsed = 0, decoded = 0;
AbstractMediaDecoder::AutoNotifyDecoded autoNotify(mReader->GetDecoder(), parsed, decoded);
MOZ_ASSERT(mPlatform && mReader->GetDecoder());
if (aKeyframeSkip) {
bool ok = SkipVideoDemuxToNextKeyFrame(aTimeThreshold, parsed);
if (!ok) {
NS_WARNING("Failed to skip demux up to next keyframe");
return false;
}
aKeyframeSkip = false;
nsresult rv = mMediaDataDecoder->Flush();
NS_ENSURE_SUCCESS(rv, false);
}
NS_ASSERTION(mReader->GetDecoder()->OnDecodeThread(), "Should be on decode thread.");
bool rv = Decode();
{
// Report the number of "decoded" frames as the difference in the
// mNumSamplesOutput field since the last time we were called.
MonitorAutoLock mon(mMonitor);
uint64_t delta = mNumSamplesOutput - mLastReportedNumDecodedFrames;
decoded = static_cast<uint32_t>(delta);
mLastReportedNumDecodedFrames = mNumSamplesOutput;
}
return rv;
}
VP8Sample*
IntelWebMVideoDecoder::PopSample()
{
VP8Sample* sample = nullptr;
if (mQueuedVideoSample) {
return mQueuedVideoSample.forget();
}
while (mSampleQueue.empty()) {
nsAutoPtr<VP8Sample> sample;
bool eos = false;
bool ok = Demux(sample, &eos);
if (!ok || eos) {
MOZ_ASSERT(!sample);
return nullptr;
}
MOZ_ASSERT(sample);
mSampleQueue.push_back(sample.forget());
}
MOZ_ASSERT(!mSampleQueue.empty());
sample = mSampleQueue.front();
mSampleQueue.pop_front();
return sample;
}
void
IntelWebMVideoDecoder::Output(MediaData* aSample)
{
#ifdef LOG_SAMPLE_DECODE
LOG("Decoded video sample time=%lld dur=%lld",
aSample->mTime, aSample->mDuration);
#endif
// Don't accept output while we're flushing.
MonitorAutoLock mon(mMonitor);
if (mIsFlushing) {
mon.NotifyAll();
return;
}
MOZ_ASSERT(aSample->mType == MediaData::VIDEO_DATA);
mReader->VideoQueue().Push(static_cast<VideoData*>(aSample));
mNumSamplesOutput++;
mon.NotifyAll();
}
void
IntelWebMVideoDecoder::DrainComplete()
{
MonitorAutoLock mon(mMonitor);
mDrainComplete = true;
mon.NotifyAll();
}
void
IntelWebMVideoDecoder::InputExhausted()
{
MonitorAutoLock mon(mMonitor);
mInputExhausted = true;
mon.NotifyAll();
}
void
IntelWebMVideoDecoder::Error()
{
MonitorAutoLock mon(mMonitor);
mError = true;
mon.NotifyAll();
}
nsresult
IntelWebMVideoDecoder::Flush()
{
if (!mReader->GetDecoder()) {
return NS_ERROR_FAILURE;
}
// Purge the current decoder's state.
// Set a flag so that we ignore all output while we call
// MediaDataDecoder::Flush().
{
MonitorAutoLock mon(mMonitor);
mIsFlushing = true;
mDrainComplete = false;
mEOS = false;
}
mMediaDataDecoder->Flush();
{
MonitorAutoLock mon(mMonitor);
mIsFlushing = false;
}
return NS_OK;
}
} // namespace mozilla

Просмотреть файл

@ -0,0 +1,91 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim:set ts=2 sw=2 sts=2 et cindent: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#if !defined(IntelWebMVideoDecoder_h_)
#define IntelWebMVideoDecoder_h_
#include <stdint.h>
#include "WebMReader.h"
#include "nsAutoPtr.h"
#include "PlatformDecoderModule.h"
#include "mozilla/Monitor.h"
#include "mp4_demuxer/mp4_demuxer.h"
#include "mp4_demuxer/DecoderData.h"
class MediaTaskQueue;
namespace mozilla {
class VP8Sample;
typedef std::deque<VP8Sample*> VP8SampleQueue;
class IntelWebMVideoDecoder : public WebMVideoDecoder, public MediaDataDecoderCallback
{
public:
static WebMVideoDecoder* Create(WebMReader* aReader);
virtual nsresult Init(unsigned int aWidth, unsigned int aHeight) MOZ_OVERRIDE;
virtual nsresult Flush() MOZ_OVERRIDE;
virtual void Shutdown() MOZ_OVERRIDE;
virtual bool DecodeVideoFrame(bool &aKeyframeSkip,
int64_t aTimeThreshold) MOZ_OVERRIDE;
virtual void Output(MediaData* aSample) MOZ_OVERRIDE;
virtual void DrainComplete() MOZ_OVERRIDE;
virtual void InputExhausted() MOZ_OVERRIDE;
virtual void Error() MOZ_OVERRIDE;
IntelWebMVideoDecoder(WebMReader* aReader);
~IntelWebMVideoDecoder();
private:
void InitLayersBackendType();
bool Decode();
bool Demux(nsAutoPtr<VP8Sample>& aSample, bool* aEOS);
bool SkipVideoDemuxToNextKeyFrame(int64_t aTimeThreshold, uint32_t& parsed);
bool IsSupportedVideoMimeType(const char* aMimeType);
VP8Sample* PopSample();
nsRefPtr<WebMReader> mReader;
nsAutoPtr<PlatformDecoderModule> mPlatform;
nsRefPtr<MediaDataDecoder> mMediaDataDecoder;
// TaskQueue on which decoder can choose to decode.
// Only non-null up until the decoder is created.
nsRefPtr<MediaTaskQueue> mTaskQueue;
// Monitor that protects all non-threadsafe state; the primitives
// that follow.
Monitor mMonitor;
nsAutoPtr<mp4_demuxer::VideoDecoderConfig> mDecoderConfig;
VP8SampleQueue mSampleQueue;
nsAutoPtr<VP8Sample> mQueuedVideoSample;
uint64_t mNumSamplesInput;
uint64_t mNumSamplesOutput;
uint64_t mLastReportedNumDecodedFrames;
uint32_t mDecodeAhead;
// Whether this stream exists in the media.
bool mInputExhausted;
bool mDrainComplete;
bool mError;
bool mEOS;
bool mIsFlushing;
};
} // namespace mozilla
#endif

Просмотреть файл

@ -0,0 +1,236 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim:set ts=2 sw=2 sts=2 et cindent: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "SoftwareWebMVideoDecoder.h"
#include "AbstractMediaDecoder.h"
#include "gfx2DGlue.h"
#include "MediaDecoderStateMachine.h"
#include "MediaResource.h"
#include "mozilla/dom/TimeRanges.h"
#include "nsError.h"
#include "OggReader.h"
#include "VorbisUtils.h"
#include "WebMBufferedParser.h"
#include "WebMReader.h"
#include <algorithm>
#define VPX_DONT_DEFINE_STDINT_TYPES
#include "vpx/vp8dx.h"
#include "vpx/vpx_decoder.h"
static const unsigned NS_PER_USEC = 1000;
static const unsigned NS_PER_S = 1e9;
namespace mozilla {
using namespace gfx;
using namespace layers;
SoftwareWebMVideoDecoder::SoftwareWebMVideoDecoder(WebMReader* aReader)
: WebMVideoDecoder(),
mReader(aReader)
{
MOZ_COUNT_CTOR(SoftwareWebMVideoDecoder);
memset(&mVPX, 0, sizeof(vpx_codec_ctx_t));
}
SoftwareWebMVideoDecoder::~SoftwareWebMVideoDecoder()
{
MOZ_COUNT_DTOR(SoftwareWebMVideoDecoder);
}
void
SoftwareWebMVideoDecoder::Shutdown()
{
vpx_codec_destroy(&mVPX);
mReader = nullptr;
}
/* static */
WebMVideoDecoder*
SoftwareWebMVideoDecoder::Create(WebMReader* aReader)
{
return new SoftwareWebMVideoDecoder(aReader);
}
nsresult
SoftwareWebMVideoDecoder::Init(unsigned int aWidth, unsigned int aHeight)
{
vpx_codec_iface_t* dx = nullptr;
switch(mReader->GetVideoCodec()) {
case NESTEGG_CODEC_VP8:
dx = vpx_codec_vp8_dx();
break;
case NESTEGG_CODEC_VP9:
dx = vpx_codec_vp9_dx();
break;
}
if (!dx || vpx_codec_dec_init(&mVPX, dx, nullptr, 0)) {
return NS_ERROR_FAILURE;
}
return NS_OK;
}
bool
SoftwareWebMVideoDecoder::DecodeVideoFrame(bool &aKeyframeSkip,
int64_t aTimeThreshold)
{
NS_ASSERTION(mReader->GetDecoder()->OnDecodeThread(),
"Should be on decode thread.");
// Record number of frames decoded and parsed. Automatically update the
// stats counters using the AutoNotifyDecoded stack-based class.
uint32_t parsed = 0, decoded = 0;
AbstractMediaDecoder::AutoNotifyDecoded autoNotify(mReader->GetDecoder(),
parsed, decoded);
nsAutoRef<NesteggPacketHolder> holder(mReader->NextPacket(WebMReader::VIDEO));
if (!holder) {
return false;
}
nestegg_packet* packet = holder->mPacket;
unsigned int track = 0;
int r = nestegg_packet_track(packet, &track);
if (r == -1) {
return false;
}
unsigned int count = 0;
r = nestegg_packet_count(packet, &count);
if (r == -1) {
return false;
}
uint64_t tstamp = 0;
r = nestegg_packet_tstamp(packet, &tstamp);
if (r == -1) {
return false;
}
// The end time of this frame is the start time of the next frame. Fetch
// the timestamp of the next packet for this track. If we've reached the
// end of the resource, use the file's duration as the end time of this
// video frame.
uint64_t next_tstamp = 0;
nsAutoRef<NesteggPacketHolder> next_holder(mReader->NextPacket(WebMReader::VIDEO));
if (next_holder) {
r = nestegg_packet_tstamp(next_holder->mPacket, &next_tstamp);
if (r == -1) {
return false;
}
mReader->PushVideoPacket(next_holder.disown());
} else {
next_tstamp = tstamp;
next_tstamp += tstamp - mReader->GetLastVideoFrameTime();
}
mReader->SetLastVideoFrameTime(tstamp);
int64_t tstamp_usecs = tstamp / NS_PER_USEC;
for (uint32_t i = 0; i < count; ++i) {
unsigned char* data;
size_t length;
r = nestegg_packet_data(packet, i, &data, &length);
if (r == -1) {
return false;
}
vpx_codec_stream_info_t si;
memset(&si, 0, sizeof(si));
si.sz = sizeof(si);
if (mReader->GetVideoCodec() == NESTEGG_CODEC_VP8) {
vpx_codec_peek_stream_info(vpx_codec_vp8_dx(), data, length, &si);
} else if (mReader->GetVideoCodec() == NESTEGG_CODEC_VP9) {
vpx_codec_peek_stream_info(vpx_codec_vp9_dx(), data, length, &si);
}
if (aKeyframeSkip && (!si.is_kf || tstamp_usecs < aTimeThreshold)) {
// Skipping to next keyframe...
parsed++; // Assume 1 frame per chunk.
continue;
}
if (aKeyframeSkip && si.is_kf) {
aKeyframeSkip = false;
}
if (vpx_codec_decode(&mVPX, data, length, nullptr, 0)) {
return false;
}
// If the timestamp of the video frame is less than
// the time threshold required then it is not added
// to the video queue and won't be displayed.
if (tstamp_usecs < aTimeThreshold) {
parsed++; // Assume 1 frame per chunk.
continue;
}
vpx_codec_iter_t iter = nullptr;
vpx_image_t *img;
while ((img = vpx_codec_get_frame(&mVPX, &iter))) {
NS_ASSERTION(img->fmt == VPX_IMG_FMT_I420, "WebM image format not I420");
// Chroma shifts are rounded down as per the decoding examples in the SDK
VideoData::YCbCrBuffer b;
b.mPlanes[0].mData = img->planes[0];
b.mPlanes[0].mStride = img->stride[0];
b.mPlanes[0].mHeight = img->d_h;
b.mPlanes[0].mWidth = img->d_w;
b.mPlanes[0].mOffset = b.mPlanes[0].mSkip = 0;
b.mPlanes[1].mData = img->planes[1];
b.mPlanes[1].mStride = img->stride[1];
b.mPlanes[1].mHeight = (img->d_h + 1) >> img->y_chroma_shift;
b.mPlanes[1].mWidth = (img->d_w + 1) >> img->x_chroma_shift;
b.mPlanes[1].mOffset = b.mPlanes[1].mSkip = 0;
b.mPlanes[2].mData = img->planes[2];
b.mPlanes[2].mStride = img->stride[2];
b.mPlanes[2].mHeight = (img->d_h + 1) >> img->y_chroma_shift;
b.mPlanes[2].mWidth = (img->d_w + 1) >> img->x_chroma_shift;
b.mPlanes[2].mOffset = b.mPlanes[2].mSkip = 0;
nsIntRect pictureRect = mReader->GetPicture();
IntRect picture = ToIntRect(pictureRect);
nsIntSize initFrame = mReader->GetInitialFrame();
if (img->d_w != static_cast<uint32_t>(initFrame.width) ||
img->d_h != static_cast<uint32_t>(initFrame.height)) {
// Frame size is different from what the container reports. This is
// legal in WebM, and we will preserve the ratio of the crop rectangle
// as it was reported relative to the picture size reported by the
// container.
picture.x = (pictureRect.x * img->d_w) / initFrame.width;
picture.y = (pictureRect.y * img->d_h) / initFrame.height;
picture.width = (img->d_w * pictureRect.width) / initFrame.width;
picture.height = (img->d_h * pictureRect.height) / initFrame.height;
}
VideoInfo videoInfo = mReader->GetMediaInfo().mVideo;
nsRefPtr<VideoData> v = VideoData::Create(videoInfo,
mReader->GetDecoder()->GetImageContainer(),
holder->mOffset,
tstamp_usecs,
(next_tstamp / NS_PER_USEC) - tstamp_usecs,
b,
si.is_kf,
-1,
picture);
if (!v) {
return false;
}
parsed++;
decoded++;
NS_ASSERTION(decoded <= parsed,
"Expect only 1 frame per chunk per packet in WebM...");
mReader->VideoQueue().Push(v);
}
}
return true;
}
} // namespace mozilla

Просмотреть файл

@ -0,0 +1,39 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim:set ts=2 sw=2 sts=2 et cindent: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#if !defined(SoftwareWebMVideoDecoder_h_)
#define SoftwareWebMVideoDecoder_h_
#include <stdint.h>
#include "WebMReader.h"
namespace mozilla {
class SoftwareWebMVideoDecoder : public WebMVideoDecoder
{
public:
static WebMVideoDecoder* Create(WebMReader* aReader);
virtual nsresult Init(unsigned int aWidth, unsigned int aHeight) MOZ_OVERRIDE;
virtual bool DecodeVideoFrame(bool &aKeyframeSkip,
int64_t aTimeThreshold) MOZ_OVERRIDE;
virtual void Shutdown() MOZ_OVERRIDE;
SoftwareWebMVideoDecoder(WebMReader* aReader);
~SoftwareWebMVideoDecoder();
private:
nsRefPtr<WebMReader> mReader;
// VPx decoder state
vpx_codec_ctx_t mVPX;
};
} // namespace mozilla
#endif

Просмотреть файл

@ -7,11 +7,15 @@
#include "MediaDecoderStateMachine.h" #include "MediaDecoderStateMachine.h"
#include "AbstractMediaDecoder.h" #include "AbstractMediaDecoder.h"
#include "MediaResource.h" #include "MediaResource.h"
#include "SoftwareWebMVideoDecoder.h"
#include "WebMReader.h" #include "WebMReader.h"
#include "WebMBufferedParser.h" #include "WebMBufferedParser.h"
#include "mozilla/dom/TimeRanges.h" #include "mozilla/dom/TimeRanges.h"
#include "VorbisUtils.h" #include "VorbisUtils.h"
#include "gfx2DGlue.h" #include "gfx2DGlue.h"
#include "Layers.h"
#include "mozilla/Preferences.h"
#include "SharedThreadPool.h"
#include <algorithm> #include <algorithm>
@ -21,19 +25,17 @@
#include "OggReader.h" #include "OggReader.h"
using mozilla::NesteggPacketHolder; // IntelWebMVideoDecoder uses the WMF backend, which is Windows Vista+ only.
#if defined(MOZ_FMP4) && defined(MOZ_WMF)
template <> #include "IntelWebMVideoDecoder.h"
class nsAutoRefTraits<NesteggPacketHolder> : #define MOZ_PDM_VPX 1
public nsPointerRefTraits<NesteggPacketHolder> #endif
{
public:
static void Release(NesteggPacketHolder* aHolder) { delete aHolder; }
};
// Un-comment to enable logging of seek bisections. // Un-comment to enable logging of seek bisections.
//#define SEEK_LOGGING //#define SEEK_LOGGING
#undef LOG
#ifdef PR_LOGGING #ifdef PR_LOGGING
#include "prprf.h" #include "prprf.h"
#define LOG(type, msg) PR_LOG(gMediaDecoderLog, type, msg) #define LOG(type, msg) PR_LOG(gMediaDecoderLog, type, msg)
@ -55,9 +57,6 @@ using namespace layers;
extern PRLogModuleInfo* gMediaDecoderLog; extern PRLogModuleInfo* gMediaDecoderLog;
PRLogModuleInfo* gNesteggLog; PRLogModuleInfo* gNesteggLog;
static const unsigned NS_PER_USEC = 1000;
static const double NS_PER_S = 1e9;
// Functions for reading and seeking using MediaResource required for // Functions for reading and seeking using MediaResource required for
// nestegg_io. The 'user data' passed to these functions is the // nestegg_io. The 'user data' passed to these functions is the
// decoder from which the media resource is obtained. // decoder from which the media resource is obtained.
@ -148,9 +147,9 @@ static void webm_log(nestegg * context,
#endif #endif
} }
ogg_packet ogg_packet InitOggPacket(const unsigned char* aData, size_t aLength,
InitOggPacket(const unsigned char* aData, size_t aLength, bool aBOS, bool aEOS, bool aBOS, bool aEOS,
int64_t aGranulepos, int64_t aPacketNo) int64_t aGranulepos, int64_t aPacketNo)
{ {
ogg_packet packet; ogg_packet packet;
packet.packet = const_cast<unsigned char*>(aData); packet.packet = const_cast<unsigned char*>(aData);
@ -162,6 +161,10 @@ InitOggPacket(const unsigned char* aData, size_t aLength, bool aBOS, bool aEOS,
return packet; return packet;
} }
#if defined(MOZ_PDM_VPX)
static bool sIsIntelDecoderEnabled = false;
#endif
WebMReader::WebMReader(AbstractMediaDecoder* aDecoder) WebMReader::WebMReader(AbstractMediaDecoder* aDecoder)
: MediaDecoderReader(aDecoder) : MediaDecoderReader(aDecoder)
, mContext(nullptr) , mContext(nullptr)
@ -178,6 +181,7 @@ WebMReader::WebMReader(AbstractMediaDecoder* aDecoder)
, mLastVideoFrameTime(0) , mLastVideoFrameTime(0)
, mAudioCodec(-1) , mAudioCodec(-1)
, mVideoCodec(-1) , mVideoCodec(-1)
, mLayersBackendType(layers::LayersBackend::LAYERS_NONE)
, mHasVideo(false) , mHasVideo(false)
, mHasAudio(false) , mHasAudio(false)
#ifdef MOZ_OPUS #ifdef MOZ_OPUS
@ -192,43 +196,66 @@ WebMReader::WebMReader(AbstractMediaDecoder* aDecoder)
#endif #endif
// Zero these member vars to avoid crashes in VP8 destroy and Vorbis clear // Zero these member vars to avoid crashes in VP8 destroy and Vorbis clear
// functions when destructor is called before |Init|. // functions when destructor is called before |Init|.
memset(&mVPX, 0, sizeof(vpx_codec_ctx_t));
memset(&mVorbisBlock, 0, sizeof(vorbis_block)); memset(&mVorbisBlock, 0, sizeof(vorbis_block));
memset(&mVorbisDsp, 0, sizeof(vorbis_dsp_state)); memset(&mVorbisDsp, 0, sizeof(vorbis_dsp_state));
memset(&mVorbisInfo, 0, sizeof(vorbis_info)); memset(&mVorbisInfo, 0, sizeof(vorbis_info));
memset(&mVorbisComment, 0, sizeof(vorbis_comment)); memset(&mVorbisComment, 0, sizeof(vorbis_comment));
#if defined(MOZ_PDM_VPX)
sIsIntelDecoderEnabled = Preferences::GetBool("media.webm.intel_decoder.enabled", false);
#endif
} }
WebMReader::~WebMReader() WebMReader::~WebMReader()
{ {
Cleanup(); Cleanup();
mVideoPackets.Reset(); mVideoPackets.Reset();
mAudioPackets.Reset(); mAudioPackets.Reset();
vpx_codec_destroy(&mVPX);
vorbis_block_clear(&mVorbisBlock); vorbis_block_clear(&mVorbisBlock);
vorbis_dsp_clear(&mVorbisDsp); vorbis_dsp_clear(&mVorbisDsp);
vorbis_info_clear(&mVorbisInfo); vorbis_info_clear(&mVorbisInfo);
vorbis_comment_clear(&mVorbisComment); vorbis_comment_clear(&mVorbisComment);
if (mOpusDecoder) { if (mOpusDecoder) {
opus_multistream_decoder_destroy(mOpusDecoder); opus_multistream_decoder_destroy(mOpusDecoder);
mOpusDecoder = nullptr; mOpusDecoder = nullptr;
} }
MOZ_ASSERT(!mVideoDecoder);
MOZ_COUNT_DTOR(WebMReader); MOZ_COUNT_DTOR(WebMReader);
} }
void WebMReader::Shutdown()
{
#if defined(MOZ_PDM_VPX)
if (mTaskQueue) {
mTaskQueue->Shutdown();
}
#endif
if (mVideoDecoder) {
mVideoDecoder->Shutdown();
mVideoDecoder = nullptr;
}
}
nsresult WebMReader::Init(MediaDecoderReader* aCloneDonor) nsresult WebMReader::Init(MediaDecoderReader* aCloneDonor)
{ {
vorbis_info_init(&mVorbisInfo); vorbis_info_init(&mVorbisInfo);
vorbis_comment_init(&mVorbisComment); vorbis_comment_init(&mVorbisComment);
memset(&mVorbisDsp, 0, sizeof(vorbis_dsp_state)); memset(&mVorbisDsp, 0, sizeof(vorbis_dsp_state));
memset(&mVorbisBlock, 0, sizeof(vorbis_block)); memset(&mVorbisBlock, 0, sizeof(vorbis_block));
#if defined(MOZ_PDM_VPX)
if (sIsIntelDecoderEnabled) {
PlatformDecoderModule::Init();
InitLayersBackendType();
mTaskQueue = new MediaTaskQueue(
SharedThreadPool::Get(NS_LITERAL_CSTRING("IntelVP8 Video Decode")));
NS_ENSURE_TRUE(mTaskQueue, NS_ERROR_FAILURE);
}
#endif
if (aCloneDonor) { if (aCloneDonor) {
mBufferedState = static_cast<WebMReader*>(aCloneDonor)->mBufferedState; mBufferedState = static_cast<WebMReader*>(aCloneDonor)->mBufferedState;
} else { } else {
@ -238,6 +265,31 @@ nsresult WebMReader::Init(MediaDecoderReader* aCloneDonor)
return NS_OK; return NS_OK;
} }
void WebMReader::InitLayersBackendType()
{
if (!IsVideoContentType(GetDecoder()->GetResource()->GetContentType())) {
// Not playing video, we don't care about the layers backend type.
return;
}
// Extract the layer manager backend type so that platform decoders
// can determine whether it's worthwhile using hardware accelerated
// video decoding.
MediaDecoderOwner* owner = mDecoder->GetOwner();
if (!owner) {
NS_WARNING("WebMReader without a decoder owner, can't get HWAccel");
return;
}
dom::HTMLMediaElement* element = owner->GetMediaElement();
NS_ENSURE_TRUE_VOID(element);
nsRefPtr<LayerManager> layerManager =
nsContentUtils::LayerManagerForDocument(element->OwnerDoc());
NS_ENSURE_TRUE_VOID(layerManager);
mLayersBackendType = layerManager->GetCompositorBackendType();
}
nsresult WebMReader::ResetDecode() nsresult WebMReader::ResetDecode()
{ {
mAudioFrames = 0; mAudioFrames = 0;
@ -315,7 +367,8 @@ nsresult WebMReader::ReadMetadata(MediaInfo* aInfo,
return NS_ERROR_FAILURE; return NS_ERROR_FAILURE;
} }
int type = nestegg_track_type(mContext, track); int type = nestegg_track_type(mContext, track);
if (!mHasVideo && type == NESTEGG_TRACK_VIDEO) { if (!mHasVideo && type == NESTEGG_TRACK_VIDEO &&
mDecoder->GetImageContainer()) {
nestegg_video_params params; nestegg_video_params params;
r = nestegg_track_video_params(mContext, track, &params); r = nestegg_track_video_params(mContext, track, &params);
if (r == -1) { if (r == -1) {
@ -323,14 +376,28 @@ nsresult WebMReader::ReadMetadata(MediaInfo* aInfo,
return NS_ERROR_FAILURE; return NS_ERROR_FAILURE;
} }
vpx_codec_iface_t* dx = nullptr;
mVideoCodec = nestegg_track_codec_id(mContext, track); mVideoCodec = nestegg_track_codec_id(mContext, track);
if (mVideoCodec == NESTEGG_CODEC_VP8) {
dx = vpx_codec_vp8_dx(); #if defined(MOZ_PDM_VPX)
} else if (mVideoCodec == NESTEGG_CODEC_VP9) { if (sIsIntelDecoderEnabled) {
dx = vpx_codec_vp9_dx(); mVideoDecoder = IntelWebMVideoDecoder::Create(this);
if (mVideoDecoder &&
NS_FAILED(mVideoDecoder->Init(params.display_width, params.display_height))) {
mVideoDecoder = nullptr;
}
} }
if (!dx || vpx_codec_dec_init(&mVPX, dx, nullptr, 0)) { #endif
// If there's no decoder yet (e.g. HW decoder not available), use the software decoder.
if (!mVideoDecoder) {
mVideoDecoder = SoftwareWebMVideoDecoder::Create(this);
if (mVideoDecoder &&
NS_FAILED(mVideoDecoder->Init(params.display_width, params.display_height))) {
mVideoDecoder = nullptr;
}
}
if (!mVideoDecoder) {
Cleanup(); Cleanup();
return NS_ERROR_FAILURE; return NS_ERROR_FAILURE;
} }
@ -348,8 +415,7 @@ nsresult WebMReader::ReadMetadata(MediaInfo* aInfo,
if (pictureRect.width <= 0 || if (pictureRect.width <= 0 ||
pictureRect.height <= 0 || pictureRect.height <= 0 ||
pictureRect.x < 0 || pictureRect.x < 0 ||
pictureRect.y < 0) pictureRect.y < 0) {
{
pictureRect.x = 0; pictureRect.x = 0;
pictureRect.y = 0; pictureRect.y = 0;
pictureRect.width = params.width; pictureRect.width = params.width;
@ -390,8 +456,7 @@ nsresult WebMReader::ReadMetadata(MediaInfo* aInfo,
mInfo.mVideo.mStereoMode = StereoMode::RIGHT_LEFT; mInfo.mVideo.mStereoMode = StereoMode::RIGHT_LEFT;
break; break;
} }
} } else if (!mHasAudio && type == NESTEGG_TRACK_AUDIO) {
else if (!mHasAudio && type == NESTEGG_TRACK_AUDIO) {
nestegg_audio_params params; nestegg_audio_params params;
r = nestegg_track_audio_params(mContext, track, &params); r = nestegg_track_audio_params(mContext, track, &params);
if (r == -1) { if (r == -1) {
@ -888,161 +953,12 @@ bool WebMReader::DecodeAudioData()
return DecodeAudioPacket(holder->mPacket, holder->mOffset); return DecodeAudioPacket(holder->mPacket, holder->mOffset);
} }
bool WebMReader::DecodeVideoFrame(bool &aKeyframeSkip, bool WebMReader::DecodeVideoFrame(bool &aKeyframeSkip, int64_t aTimeThreshold)
int64_t aTimeThreshold)
{ {
NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread."); return mVideoDecoder->DecodeVideoFrame(aKeyframeSkip, aTimeThreshold);
// Record number of frames decoded and parsed. Automatically update the
// stats counters using the AutoNotifyDecoded stack-based class.
uint32_t parsed = 0, decoded = 0;
AbstractMediaDecoder::AutoNotifyDecoded autoNotify(mDecoder, parsed, decoded);
nsAutoRef<NesteggPacketHolder> holder(NextPacket(VIDEO));
if (!holder) {
return false;
}
nestegg_packet* packet = holder->mPacket;
unsigned int track = 0;
int r = nestegg_packet_track(packet, &track);
if (r == -1) {
return false;
}
unsigned int count = 0;
r = nestegg_packet_count(packet, &count);
if (r == -1) {
return false;
}
uint64_t tstamp = 0;
r = nestegg_packet_tstamp(packet, &tstamp);
if (r == -1) {
return false;
}
// The end time of this frame is the start time of the next frame. Fetch
// the timestamp of the next packet for this track. If we've reached the
// end of the resource, use the file's duration as the end time of this
// video frame.
uint64_t next_tstamp = 0;
nsAutoRef<NesteggPacketHolder> next_holder(NextPacket(VIDEO));
if (next_holder) {
r = nestegg_packet_tstamp(next_holder->mPacket, &next_tstamp);
if (r == -1) {
return false;
}
PushVideoPacket(next_holder.disown());
} else {
next_tstamp = tstamp;
next_tstamp += tstamp - mLastVideoFrameTime;
}
mLastVideoFrameTime = tstamp;
int64_t tstamp_usecs = tstamp / NS_PER_USEC;
for (uint32_t i = 0; i < count; ++i) {
unsigned char* data;
size_t length;
r = nestegg_packet_data(packet, i, &data, &length);
if (r == -1) {
return false;
}
vpx_codec_stream_info_t si;
memset(&si, 0, sizeof(si));
si.sz = sizeof(si);
if (mVideoCodec == NESTEGG_CODEC_VP8) {
vpx_codec_peek_stream_info(vpx_codec_vp8_dx(), data, length, &si);
} else if (mVideoCodec == NESTEGG_CODEC_VP9) {
vpx_codec_peek_stream_info(vpx_codec_vp9_dx(), data, length, &si);
}
if (aKeyframeSkip && (!si.is_kf || tstamp_usecs < aTimeThreshold)) {
// Skipping to next keyframe...
parsed++; // Assume 1 frame per chunk.
continue;
}
if (aKeyframeSkip && si.is_kf) {
aKeyframeSkip = false;
}
if (vpx_codec_decode(&mVPX, data, length, nullptr, 0)) {
return false;
}
// If the timestamp of the video frame is less than
// the time threshold required then it is not added
// to the video queue and won't be displayed.
if (tstamp_usecs < aTimeThreshold) {
parsed++; // Assume 1 frame per chunk.
continue;
}
vpx_codec_iter_t iter = nullptr;
vpx_image_t *img;
while ((img = vpx_codec_get_frame(&mVPX, &iter))) {
NS_ASSERTION(img->fmt == VPX_IMG_FMT_I420, "WebM image format not I420");
// Chroma shifts are rounded down as per the decoding examples in the SDK
VideoData::YCbCrBuffer b;
b.mPlanes[0].mData = img->planes[0];
b.mPlanes[0].mStride = img->stride[0];
b.mPlanes[0].mHeight = img->d_h;
b.mPlanes[0].mWidth = img->d_w;
b.mPlanes[0].mOffset = b.mPlanes[0].mSkip = 0;
b.mPlanes[1].mData = img->planes[1];
b.mPlanes[1].mStride = img->stride[1];
b.mPlanes[1].mHeight = (img->d_h + 1) >> img->y_chroma_shift;
b.mPlanes[1].mWidth = (img->d_w + 1) >> img->x_chroma_shift;
b.mPlanes[1].mOffset = b.mPlanes[1].mSkip = 0;
b.mPlanes[2].mData = img->planes[2];
b.mPlanes[2].mStride = img->stride[2];
b.mPlanes[2].mHeight = (img->d_h + 1) >> img->y_chroma_shift;
b.mPlanes[2].mWidth = (img->d_w + 1) >> img->x_chroma_shift;
b.mPlanes[2].mOffset = b.mPlanes[2].mSkip = 0;
IntRect picture = ToIntRect(mPicture);
if (img->d_w != static_cast<uint32_t>(mInitialFrame.width) ||
img->d_h != static_cast<uint32_t>(mInitialFrame.height)) {
// Frame size is different from what the container reports. This is
// legal in WebM, and we will preserve the ratio of the crop rectangle
// as it was reported relative to the picture size reported by the
// container.
picture.x = (mPicture.x * img->d_w) / mInitialFrame.width;
picture.y = (mPicture.y * img->d_h) / mInitialFrame.height;
picture.width = (img->d_w * mPicture.width) / mInitialFrame.width;
picture.height = (img->d_h * mPicture.height) / mInitialFrame.height;
}
nsRefPtr<VideoData> v = VideoData::Create(mInfo.mVideo,
mDecoder->GetImageContainer(),
holder->mOffset,
tstamp_usecs,
(next_tstamp / NS_PER_USEC)-tstamp_usecs,
b,
si.is_kf,
-1,
picture);
if (!v) {
return false;
}
parsed++;
decoded++;
NS_ASSERTION(decoded <= parsed,
"Expect only 1 frame per chunk per packet in WebM...");
VideoQueue().Push(v);
}
}
return true;
} }
void void WebMReader::PushVideoPacket(NesteggPacketHolder* aItem)
WebMReader::PushVideoPacket(NesteggPacketHolder* aItem)
{ {
mVideoPackets.PushFront(aItem); mVideoPackets.PushFront(aItem);
} }
@ -1057,6 +973,8 @@ void WebMReader::Seek(int64_t aTarget, int64_t aStartTime, int64_t aEndTime,
nsresult WebMReader::SeekInternal(int64_t aTarget, int64_t aStartTime) nsresult WebMReader::SeekInternal(int64_t aTarget, int64_t aStartTime)
{ {
NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread."); NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
nsresult rv = mVideoDecoder->Flush();
NS_ENSURE_SUCCESS(rv, rv);
LOG(PR_LOG_DEBUG, ("Reader [%p] for Decoder [%p]: About to seek to %fs", LOG(PR_LOG_DEBUG, ("Reader [%p] for Decoder [%p]: About to seek to %fs",
this, mDecoder, double(aTarget) / USECS_PER_S)); this, mDecoder, double(aTarget) / USECS_PER_S));
@ -1163,4 +1081,29 @@ int64_t WebMReader::GetEvictionOffset(double aTime)
return offset; return offset;
} }
int WebMReader::GetVideoCodec()
{
return mVideoCodec;
}
nsIntRect WebMReader::GetPicture()
{
return mPicture;
}
nsIntSize WebMReader::GetInitialFrame()
{
return mInitialFrame;
}
uint64_t WebMReader::GetLastVideoFrameTime()
{
return mLastVideoFrameTime;
}
void WebMReader::SetLastVideoFrameTime(uint64_t aFrameTime)
{
mLastVideoFrameTime = aFrameTime;
}
} // namespace mozilla } // namespace mozilla

Просмотреть файл

@ -16,6 +16,8 @@
#define VPX_DONT_DEFINE_STDINT_TYPES #define VPX_DONT_DEFINE_STDINT_TYPES
#include "vpx/vpx_codec.h" #include "vpx/vpx_codec.h"
#include "mozilla/layers/LayersTypes.h"
#ifdef MOZ_TREMOR #ifdef MOZ_TREMOR
#include "tremor/ivorbiscodec.h" #include "tremor/ivorbiscodec.h"
#else #else
@ -26,10 +28,6 @@
#include "OpusParser.h" #include "OpusParser.h"
#endif #endif
namespace mozilla {
class WebMBufferedState;
// Holds a nestegg_packet, and its file offset. This is needed so we // Holds a nestegg_packet, and its file offset. This is needed so we
// know the offset in the file we've played up to, in order to calculate // know the offset in the file we've played up to, in order to calculate
// whether it's likely we can play through to the end without needing // whether it's likely we can play through to the end without needing
@ -55,6 +53,18 @@ private:
NesteggPacketHolder& operator= (NesteggPacketHolder const& aOther); NesteggPacketHolder& operator= (NesteggPacketHolder const& aOther);
}; };
template <>
class nsAutoRefTraits<NesteggPacketHolder> : public nsPointerRefTraits<NesteggPacketHolder>
{
public:
static void Release(NesteggPacketHolder* aHolder) { delete aHolder; }
};
namespace mozilla {
class WebMBufferedState;
static const unsigned NS_PER_USEC = 1000;
static const double NS_PER_S = 1e9;
// Thread and type safe wrapper around nsDeque. // Thread and type safe wrapper around nsDeque.
class PacketQueueDeallocator : public nsDequeFunctor { class PacketQueueDeallocator : public nsDequeFunctor {
virtual void* operator() (void* aObject) { virtual void* operator() (void* aObject) {
@ -101,6 +111,21 @@ class WebMPacketQueue : private nsDeque {
} }
}; };
class WebMReader;
// Class to handle various video decode paths
class WebMVideoDecoder
{
public:
virtual nsresult Init(unsigned int aWidth = 0, unsigned int aHeight = 0) = 0;
virtual nsresult Flush() { return NS_OK; }
virtual void Shutdown() = 0;
virtual bool DecodeVideoFrame(bool &aKeyframeSkip,
int64_t aTimeThreshold) = 0;
WebMVideoDecoder() {}
virtual ~WebMVideoDecoder() {}
};
class WebMReader : public MediaDecoderReader class WebMReader : public MediaDecoderReader
{ {
public: public:
@ -110,15 +135,13 @@ protected:
~WebMReader(); ~WebMReader();
public: public:
virtual void Shutdown() MOZ_OVERRIDE;
virtual nsresult Init(MediaDecoderReader* aCloneDonor); virtual nsresult Init(MediaDecoderReader* aCloneDonor);
virtual nsresult ResetDecode(); virtual nsresult ResetDecode();
virtual bool DecodeAudioData(); virtual bool DecodeAudioData();
// If the Theora granulepos has not been captured, it may read several packets
// until one with a granulepos has been captured, to ensure that all packets
// read have valid time info.
virtual bool DecodeVideoFrame(bool &aKeyframeSkip, virtual bool DecodeVideoFrame(bool &aKeyframeSkip,
int64_t aTimeThreshold); int64_t aTimeThreshold);
virtual bool HasAudio() virtual bool HasAudio()
{ {
@ -143,7 +166,6 @@ public:
virtual bool IsMediaSeekable() MOZ_OVERRIDE; virtual bool IsMediaSeekable() MOZ_OVERRIDE;
protected:
// Value passed to NextPacket to determine if we are reading a video or an // Value passed to NextPacket to determine if we are reading a video or an
// audio packet. // audio packet.
enum TrackType { enum TrackType {
@ -159,6 +181,15 @@ protected:
// Pushes a packet to the front of the video packet queue. // Pushes a packet to the front of the video packet queue.
virtual void PushVideoPacket(NesteggPacketHolder* aItem); virtual void PushVideoPacket(NesteggPacketHolder* aItem);
int GetVideoCodec();
nsIntRect GetPicture();
nsIntSize GetInitialFrame();
uint64_t GetLastVideoFrameTime();
void SetLastVideoFrameTime(uint64_t aFrameTime);
layers::LayersBackend GetLayersBackendType() { return mLayersBackendType; }
MediaTaskQueue* GetTaskQueue() { return mTaskQueue; }
protected:
#ifdef MOZ_OPUS #ifdef MOZ_OPUS
// Setup opus decoder // Setup opus decoder
bool InitOpusDecoder(); bool InitOpusDecoder();
@ -186,13 +217,16 @@ protected:
virtual nsresult SeekInternal(int64_t aTime, int64_t aStartTime); virtual nsresult SeekInternal(int64_t aTime, int64_t aStartTime);
// Initializes mLayersBackendType if possible.
void InitLayersBackendType();
private: private:
// libnestegg context for webm container. Access on state machine thread // libnestegg context for webm container. Access on state machine thread
// or decoder thread only. // or decoder thread only.
nestegg* mContext; nestegg* mContext;
// VP8 decoder state // The video decoder
vpx_codec_ctx_t mVPX; nsAutoPtr<WebMVideoDecoder> mVideoDecoder;
// Vorbis decoder state // Vorbis decoder state
vorbis_info mVorbisInfo; vorbis_info mVorbisInfo;
@ -247,6 +281,9 @@ private:
// Codec ID of video track // Codec ID of video track
int mVideoCodec; int mVideoCodec;
layers::LayersBackend mLayersBackendType;
nsRefPtr<MediaTaskQueue> mTaskQueue;
// Booleans to indicate if we have audio and/or video data // Booleans to indicate if we have audio and/or video data
bool mHasVideo; bool mHasVideo;
bool mHasAudio; bool mHasAudio;

Просмотреть файл

@ -5,17 +5,23 @@
# file, You can obtain one at http://mozilla.org/MPL/2.0/. # file, You can obtain one at http://mozilla.org/MPL/2.0/.
EXPORTS += [ EXPORTS += [
'IntelWebMVideoDecoder.h',
'SoftwareWebMVideoDecoder.h',
'WebMBufferedParser.h', 'WebMBufferedParser.h',
'WebMDecoder.h', 'WebMDecoder.h',
'WebMReader.h', 'WebMReader.h',
] ]
UNIFIED_SOURCES += [ UNIFIED_SOURCES += [
'SoftwareWebMVideoDecoder.cpp',
'WebMBufferedParser.cpp', 'WebMBufferedParser.cpp',
'WebMDecoder.cpp', 'WebMDecoder.cpp',
'WebMReader.cpp', 'WebMReader.cpp',
] ]
if CONFIG['MOZ_FMP4']:
UNIFIED_SOURCES += ['IntelWebMVideoDecoder.cpp']
if CONFIG['MOZ_WEBM_ENCODER']: if CONFIG['MOZ_WEBM_ENCODER']:
EXPORTS += ['WebMWriter.h'] EXPORTS += ['WebMWriter.h']
UNIFIED_SOURCES += ['EbmlComposer.cpp', UNIFIED_SOURCES += ['EbmlComposer.cpp',

Просмотреть файл

@ -284,6 +284,9 @@ pref("media.wave.enabled", true);
#endif #endif
#ifdef MOZ_WEBM #ifdef MOZ_WEBM
pref("media.webm.enabled", true); pref("media.webm.enabled", true);
#if defined(MOZ_FMP4) && defined(MOZ_WMF)
pref("media.webm.intel_decoder.enabled", false);
#endif
#endif #endif
#ifdef MOZ_GSTREAMER #ifdef MOZ_GSTREAMER
pref("media.gstreamer.enabled", true); pref("media.gstreamer.enabled", true);