2014-02-04 05:49:21 +04:00
|
|
|
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
|
|
|
|
/* vim: set ts=8 sts=2 et sw=2 tw=80: */
|
|
|
|
/* This Source Code Form is subject to the terms of the Mozilla Public
|
|
|
|
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
|
|
|
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
|
|
|
#include "MediaData.h"
|
|
|
|
#include "MediaInfo.h"
|
|
|
|
#ifdef MOZ_OMX_DECODER
|
|
|
|
#include "GrallocImages.h"
|
2014-03-31 19:24:28 +04:00
|
|
|
#include "mozilla/layers/TextureClient.h"
|
2014-02-04 05:49:21 +04:00
|
|
|
#endif
|
|
|
|
#include "VideoUtils.h"
|
|
|
|
#include "ImageContainer.h"
|
|
|
|
|
2014-03-31 19:24:28 +04:00
|
|
|
#ifdef MOZ_WIDGET_GONK
|
|
|
|
#include <cutils/properties.h>
|
|
|
|
#endif
|
|
|
|
|
2014-02-04 05:49:21 +04:00
|
|
|
namespace mozilla {
|
|
|
|
|
|
|
|
using namespace mozilla::gfx;
|
|
|
|
using layers::ImageContainer;
|
|
|
|
using layers::PlanarYCbCrImage;
|
|
|
|
using layers::PlanarYCbCrData;
|
|
|
|
|
|
|
|
void
|
|
|
|
AudioData::EnsureAudioBuffer()
|
|
|
|
{
|
|
|
|
if (mAudioBuffer)
|
|
|
|
return;
|
|
|
|
mAudioBuffer = SharedBuffer::Create(mFrames*mChannels*sizeof(AudioDataValue));
|
|
|
|
|
|
|
|
AudioDataValue* data = static_cast<AudioDataValue*>(mAudioBuffer->Data());
|
|
|
|
for (uint32_t i = 0; i < mFrames; ++i) {
|
|
|
|
for (uint32_t j = 0; j < mChannels; ++j) {
|
|
|
|
data[j*mFrames + i] = mAudioData[i*mChannels + j];
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-05-13 21:38:00 +04:00
|
|
|
size_t
|
|
|
|
AudioData::SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const
|
|
|
|
{
|
|
|
|
size_t size = aMallocSizeOf(this) + aMallocSizeOf(mAudioData);
|
|
|
|
if (mAudioBuffer) {
|
|
|
|
size += mAudioBuffer->SizeOfIncludingThis(aMallocSizeOf);
|
|
|
|
}
|
|
|
|
return size;
|
|
|
|
}
|
|
|
|
|
2015-02-09 15:28:59 +03:00
|
|
|
/* static */
|
|
|
|
already_AddRefed<AudioData>
|
|
|
|
AudioData::TransferAndUpdateTimestampAndDuration(AudioData* aOther,
|
|
|
|
int64_t aTimestamp,
|
|
|
|
int64_t aDuration)
|
|
|
|
{
|
|
|
|
NS_ENSURE_TRUE(aOther, nullptr);
|
|
|
|
nsRefPtr<AudioData> v = new AudioData(aOther->mOffset,
|
|
|
|
aTimestamp,
|
|
|
|
aDuration,
|
|
|
|
aOther->mFrames,
|
|
|
|
aOther->mAudioData,
|
|
|
|
aOther->mChannels,
|
|
|
|
aOther->mRate);
|
|
|
|
v->mDiscontinuity = aOther->mDiscontinuity;
|
|
|
|
// Remove aOther's AudioData as it can't be shared across two targets.
|
|
|
|
aOther->mAudioData.forget();
|
|
|
|
|
|
|
|
return v.forget();
|
|
|
|
}
|
|
|
|
|
2014-02-04 05:49:21 +04:00
|
|
|
static bool
|
|
|
|
ValidatePlane(const VideoData::YCbCrBuffer::Plane& aPlane)
|
|
|
|
{
|
|
|
|
return aPlane.mWidth <= PlanarYCbCrImage::MAX_DIMENSION &&
|
|
|
|
aPlane.mHeight <= PlanarYCbCrImage::MAX_DIMENSION &&
|
|
|
|
aPlane.mWidth * aPlane.mHeight < MAX_VIDEO_WIDTH * MAX_VIDEO_HEIGHT &&
|
|
|
|
aPlane.mStride > 0;
|
|
|
|
}
|
|
|
|
|
2014-02-18 18:50:20 +04:00
|
|
|
#ifdef MOZ_WIDGET_GONK
|
2014-02-04 05:49:21 +04:00
|
|
|
static bool
|
|
|
|
IsYV12Format(const VideoData::YCbCrBuffer::Plane& aYPlane,
|
|
|
|
const VideoData::YCbCrBuffer::Plane& aCbPlane,
|
|
|
|
const VideoData::YCbCrBuffer::Plane& aCrPlane)
|
|
|
|
{
|
|
|
|
return
|
|
|
|
aYPlane.mWidth % 2 == 0 &&
|
|
|
|
aYPlane.mHeight % 2 == 0 &&
|
|
|
|
aYPlane.mWidth / 2 == aCbPlane.mWidth &&
|
|
|
|
aYPlane.mHeight / 2 == aCbPlane.mHeight &&
|
|
|
|
aCbPlane.mWidth == aCrPlane.mWidth &&
|
|
|
|
aCbPlane.mHeight == aCrPlane.mHeight;
|
|
|
|
}
|
2014-03-31 19:24:28 +04:00
|
|
|
|
|
|
|
static bool
|
|
|
|
IsInEmulator()
|
|
|
|
{
|
|
|
|
char propQemu[PROPERTY_VALUE_MAX];
|
|
|
|
property_get("ro.kernel.qemu", propQemu, "");
|
|
|
|
return !strncmp(propQemu, "1", 1);
|
|
|
|
}
|
|
|
|
|
2014-02-04 05:49:21 +04:00
|
|
|
#endif
|
|
|
|
|
|
|
|
VideoData::VideoData(int64_t aOffset, int64_t aTime, int64_t aDuration, int64_t aTimecode)
|
2014-11-03 11:20:14 +03:00
|
|
|
: MediaData(VIDEO_DATA, aOffset, aTime, aDuration),
|
2014-02-04 05:49:21 +04:00
|
|
|
mTimecode(aTimecode),
|
|
|
|
mDuplicate(true),
|
|
|
|
mKeyframe(false)
|
|
|
|
{
|
|
|
|
NS_ASSERTION(mDuration >= 0, "Frame must have non-negative duration.");
|
|
|
|
}
|
|
|
|
|
|
|
|
VideoData::VideoData(int64_t aOffset,
|
|
|
|
int64_t aTime,
|
|
|
|
int64_t aDuration,
|
|
|
|
bool aKeyframe,
|
|
|
|
int64_t aTimecode,
|
2014-02-09 12:04:38 +04:00
|
|
|
IntSize aDisplay)
|
2014-11-03 11:20:14 +03:00
|
|
|
: MediaData(VIDEO_DATA, aOffset, aTime, aDuration),
|
2014-02-04 05:49:21 +04:00
|
|
|
mDisplay(aDisplay),
|
|
|
|
mTimecode(aTimecode),
|
|
|
|
mDuplicate(false),
|
|
|
|
mKeyframe(aKeyframe)
|
|
|
|
{
|
|
|
|
NS_ASSERTION(mDuration >= 0, "Frame must have non-negative duration.");
|
|
|
|
}
|
|
|
|
|
|
|
|
VideoData::~VideoData()
|
|
|
|
{
|
|
|
|
}
|
|
|
|
|
2014-03-20 01:33:12 +04:00
|
|
|
size_t
|
|
|
|
VideoData::SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const
|
|
|
|
{
|
|
|
|
size_t size = aMallocSizeOf(this);
|
|
|
|
|
|
|
|
// Currently only PLANAR_YCBCR has a well defined function for determining
|
|
|
|
// it's size, so reporting is limited to that type.
|
|
|
|
if (mImage && mImage->GetFormat() == ImageFormat::PLANAR_YCBCR) {
|
|
|
|
const mozilla::layers::PlanarYCbCrImage* img =
|
|
|
|
static_cast<const mozilla::layers::PlanarYCbCrImage*>(mImage.get());
|
|
|
|
size += img->SizeOfIncludingThis(aMallocSizeOf);
|
|
|
|
}
|
|
|
|
|
|
|
|
return size;
|
|
|
|
}
|
|
|
|
|
2014-02-04 05:49:21 +04:00
|
|
|
/* static */
|
2014-11-20 00:01:10 +03:00
|
|
|
already_AddRefed<VideoData>
|
|
|
|
VideoData::ShallowCopyUpdateDuration(VideoData* aOther,
|
|
|
|
int64_t aDuration)
|
2014-02-04 05:49:21 +04:00
|
|
|
{
|
2014-11-20 00:01:10 +03:00
|
|
|
nsRefPtr<VideoData> v = new VideoData(aOther->mOffset,
|
|
|
|
aOther->mTime,
|
|
|
|
aDuration,
|
|
|
|
aOther->mKeyframe,
|
|
|
|
aOther->mTimecode,
|
|
|
|
aOther->mDisplay);
|
2015-02-09 15:29:00 +03:00
|
|
|
v->mDiscontinuity = aOther->mDiscontinuity;
|
2014-04-01 07:39:04 +04:00
|
|
|
v->mImage = aOther->mImage;
|
2014-11-20 00:01:10 +03:00
|
|
|
return v.forget();
|
2014-04-01 07:39:04 +04:00
|
|
|
}
|
|
|
|
|
|
|
|
/* static */
|
2014-11-20 00:01:10 +03:00
|
|
|
already_AddRefed<VideoData>
|
|
|
|
VideoData::ShallowCopyUpdateTimestamp(VideoData* aOther,
|
|
|
|
int64_t aTimestamp)
|
2014-04-01 07:39:04 +04:00
|
|
|
{
|
|
|
|
NS_ENSURE_TRUE(aOther, nullptr);
|
2014-11-20 00:01:10 +03:00
|
|
|
nsRefPtr<VideoData> v = new VideoData(aOther->mOffset,
|
|
|
|
aTimestamp,
|
|
|
|
aOther->GetEndTime() - aTimestamp,
|
|
|
|
aOther->mKeyframe,
|
|
|
|
aOther->mTimecode,
|
|
|
|
aOther->mDisplay);
|
2015-02-09 15:29:00 +03:00
|
|
|
v->mDiscontinuity = aOther->mDiscontinuity;
|
2014-02-04 05:49:21 +04:00
|
|
|
v->mImage = aOther->mImage;
|
2014-11-20 00:01:10 +03:00
|
|
|
return v.forget();
|
2014-08-07 04:02:56 +04:00
|
|
|
}
|
|
|
|
|
|
|
|
/* static */
|
2014-11-20 00:01:10 +03:00
|
|
|
already_AddRefed<VideoData>
|
|
|
|
VideoData::ShallowCopyUpdateTimestampAndDuration(VideoData* aOther,
|
|
|
|
int64_t aTimestamp,
|
|
|
|
int64_t aDuration)
|
2014-08-07 04:02:56 +04:00
|
|
|
{
|
|
|
|
NS_ENSURE_TRUE(aOther, nullptr);
|
2014-11-20 00:01:10 +03:00
|
|
|
nsRefPtr<VideoData> v = new VideoData(aOther->mOffset,
|
|
|
|
aTimestamp,
|
|
|
|
aDuration,
|
|
|
|
aOther->mKeyframe,
|
|
|
|
aOther->mTimecode,
|
|
|
|
aOther->mDisplay);
|
2015-02-09 15:29:00 +03:00
|
|
|
v->mDiscontinuity = aOther->mDiscontinuity;
|
2014-08-07 04:02:56 +04:00
|
|
|
v->mImage = aOther->mImage;
|
2014-11-20 00:01:10 +03:00
|
|
|
return v.forget();
|
2014-02-04 05:49:21 +04:00
|
|
|
}
|
|
|
|
|
2014-02-18 18:50:20 +04:00
|
|
|
/* static */
|
|
|
|
void VideoData::SetVideoDataToImage(PlanarYCbCrImage* aVideoImage,
|
|
|
|
VideoInfo& aInfo,
|
|
|
|
const YCbCrBuffer &aBuffer,
|
|
|
|
const IntRect& aPicture,
|
|
|
|
bool aCopyData)
|
|
|
|
{
|
|
|
|
if (!aVideoImage) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
const YCbCrBuffer::Plane &Y = aBuffer.mPlanes[0];
|
|
|
|
const YCbCrBuffer::Plane &Cb = aBuffer.mPlanes[1];
|
|
|
|
const YCbCrBuffer::Plane &Cr = aBuffer.mPlanes[2];
|
|
|
|
|
|
|
|
PlanarYCbCrData data;
|
|
|
|
data.mYChannel = Y.mData + Y.mOffset;
|
|
|
|
data.mYSize = IntSize(Y.mWidth, Y.mHeight);
|
|
|
|
data.mYStride = Y.mStride;
|
|
|
|
data.mYSkip = Y.mSkip;
|
|
|
|
data.mCbChannel = Cb.mData + Cb.mOffset;
|
|
|
|
data.mCrChannel = Cr.mData + Cr.mOffset;
|
|
|
|
data.mCbCrSize = IntSize(Cb.mWidth, Cb.mHeight);
|
|
|
|
data.mCbCrStride = Cb.mStride;
|
|
|
|
data.mCbSkip = Cb.mSkip;
|
|
|
|
data.mCrSkip = Cr.mSkip;
|
|
|
|
data.mPicX = aPicture.x;
|
|
|
|
data.mPicY = aPicture.y;
|
|
|
|
data.mPicSize = aPicture.Size();
|
|
|
|
data.mStereoMode = aInfo.mStereoMode;
|
|
|
|
|
|
|
|
aVideoImage->SetDelayedConversion(true);
|
|
|
|
if (aCopyData) {
|
|
|
|
aVideoImage->SetData(data);
|
|
|
|
} else {
|
|
|
|
aVideoImage->SetDataNoCopy(data);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/* static */
|
2014-11-20 00:01:10 +03:00
|
|
|
already_AddRefed<VideoData>
|
|
|
|
VideoData::Create(VideoInfo& aInfo,
|
|
|
|
ImageContainer* aContainer,
|
|
|
|
Image* aImage,
|
|
|
|
int64_t aOffset,
|
|
|
|
int64_t aTime,
|
|
|
|
int64_t aDuration,
|
|
|
|
const YCbCrBuffer& aBuffer,
|
|
|
|
bool aKeyframe,
|
|
|
|
int64_t aTimecode,
|
|
|
|
const IntRect& aPicture)
|
2014-02-04 05:49:21 +04:00
|
|
|
{
|
|
|
|
if (!aImage && !aContainer) {
|
|
|
|
// Create a dummy VideoData with no image. This gives us something to
|
|
|
|
// send to media streams if necessary.
|
2014-11-20 00:01:10 +03:00
|
|
|
nsRefPtr<VideoData> v(new VideoData(aOffset,
|
|
|
|
aTime,
|
|
|
|
aDuration,
|
|
|
|
aKeyframe,
|
|
|
|
aTimecode,
|
2015-03-29 17:59:08 +03:00
|
|
|
aInfo.mDisplay));
|
2014-02-04 05:49:21 +04:00
|
|
|
return v.forget();
|
|
|
|
}
|
|
|
|
|
|
|
|
// The following situation should never happen unless there is a bug
|
|
|
|
// in the decoder
|
|
|
|
if (aBuffer.mPlanes[1].mWidth != aBuffer.mPlanes[2].mWidth ||
|
|
|
|
aBuffer.mPlanes[1].mHeight != aBuffer.mPlanes[2].mHeight) {
|
|
|
|
NS_ERROR("C planes with different sizes");
|
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
|
|
|
|
// The following situations could be triggered by invalid input
|
|
|
|
if (aPicture.width <= 0 || aPicture.height <= 0) {
|
2014-06-06 17:11:34 +04:00
|
|
|
// In debug mode, makes the error more noticeable
|
|
|
|
MOZ_ASSERT(false, "Empty picture rect");
|
2014-02-04 05:49:21 +04:00
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
if (!ValidatePlane(aBuffer.mPlanes[0]) || !ValidatePlane(aBuffer.mPlanes[1]) ||
|
|
|
|
!ValidatePlane(aBuffer.mPlanes[2])) {
|
|
|
|
NS_WARNING("Invalid plane size");
|
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Ensure the picture size specified in the headers can be extracted out of
|
|
|
|
// the frame we've been supplied without indexing out of bounds.
|
|
|
|
CheckedUint32 xLimit = aPicture.x + CheckedUint32(aPicture.width);
|
|
|
|
CheckedUint32 yLimit = aPicture.y + CheckedUint32(aPicture.height);
|
|
|
|
if (!xLimit.isValid() || xLimit.value() > aBuffer.mPlanes[0].mStride ||
|
|
|
|
!yLimit.isValid() || yLimit.value() > aBuffer.mPlanes[0].mHeight)
|
|
|
|
{
|
|
|
|
// The specified picture dimensions can't be contained inside the video
|
|
|
|
// frame, we'll stomp memory if we try to copy it. Fail.
|
|
|
|
NS_WARNING("Overflowing picture rect");
|
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
|
2014-11-20 00:01:10 +03:00
|
|
|
nsRefPtr<VideoData> v(new VideoData(aOffset,
|
|
|
|
aTime,
|
|
|
|
aDuration,
|
|
|
|
aKeyframe,
|
|
|
|
aTimecode,
|
2015-03-29 17:59:08 +03:00
|
|
|
aInfo.mDisplay));
|
2014-02-18 18:50:20 +04:00
|
|
|
#ifdef MOZ_WIDGET_GONK
|
2014-02-04 05:49:21 +04:00
|
|
|
const YCbCrBuffer::Plane &Y = aBuffer.mPlanes[0];
|
|
|
|
const YCbCrBuffer::Plane &Cb = aBuffer.mPlanes[1];
|
|
|
|
const YCbCrBuffer::Plane &Cr = aBuffer.mPlanes[2];
|
2014-02-18 18:50:20 +04:00
|
|
|
#endif
|
2014-02-04 05:49:21 +04:00
|
|
|
|
|
|
|
if (!aImage) {
|
|
|
|
// Currently our decoder only knows how to output to ImageFormat::PLANAR_YCBCR
|
|
|
|
// format.
|
2014-02-18 18:50:20 +04:00
|
|
|
#ifdef MOZ_WIDGET_GONK
|
2014-03-31 19:24:28 +04:00
|
|
|
if (IsYV12Format(Y, Cb, Cr) && !IsInEmulator()) {
|
2014-02-04 05:49:21 +04:00
|
|
|
v->mImage = aContainer->CreateImage(ImageFormat::GRALLOC_PLANAR_YCBCR);
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
if (!v->mImage) {
|
|
|
|
v->mImage = aContainer->CreateImage(ImageFormat::PLANAR_YCBCR);
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
v->mImage = aImage;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!v->mImage) {
|
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
NS_ASSERTION(v->mImage->GetFormat() == ImageFormat::PLANAR_YCBCR ||
|
|
|
|
v->mImage->GetFormat() == ImageFormat::GRALLOC_PLANAR_YCBCR,
|
|
|
|
"Wrong format?");
|
|
|
|
PlanarYCbCrImage* videoImage = static_cast<PlanarYCbCrImage*>(v->mImage.get());
|
|
|
|
|
|
|
|
if (!aImage) {
|
2014-02-18 18:50:20 +04:00
|
|
|
VideoData::SetVideoDataToImage(videoImage, aInfo, aBuffer, aPicture,
|
|
|
|
true /* aCopyData */);
|
2014-02-04 05:49:21 +04:00
|
|
|
} else {
|
2014-02-18 18:50:20 +04:00
|
|
|
VideoData::SetVideoDataToImage(videoImage, aInfo, aBuffer, aPicture,
|
|
|
|
false /* aCopyData */);
|
2014-02-04 05:49:21 +04:00
|
|
|
}
|
|
|
|
|
2014-02-18 18:50:20 +04:00
|
|
|
#ifdef MOZ_WIDGET_GONK
|
|
|
|
if (!videoImage->IsValid() && !aImage && IsYV12Format(Y, Cb, Cr)) {
|
|
|
|
// Failed to allocate gralloc. Try fallback.
|
|
|
|
v->mImage = aContainer->CreateImage(ImageFormat::PLANAR_YCBCR);
|
|
|
|
if (!v->mImage) {
|
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
videoImage = static_cast<PlanarYCbCrImage*>(v->mImage.get());
|
|
|
|
VideoData::SetVideoDataToImage(videoImage, aInfo, aBuffer, aPicture,
|
|
|
|
true /* aCopyData */);
|
|
|
|
}
|
|
|
|
#endif
|
2014-02-04 05:49:21 +04:00
|
|
|
return v.forget();
|
|
|
|
}
|
|
|
|
|
2014-02-18 18:50:20 +04:00
|
|
|
/* static */
|
2014-11-20 00:01:10 +03:00
|
|
|
already_AddRefed<VideoData>
|
|
|
|
VideoData::Create(VideoInfo& aInfo,
|
|
|
|
ImageContainer* aContainer,
|
|
|
|
int64_t aOffset,
|
|
|
|
int64_t aTime,
|
|
|
|
int64_t aDuration,
|
|
|
|
const YCbCrBuffer& aBuffer,
|
|
|
|
bool aKeyframe,
|
|
|
|
int64_t aTimecode,
|
|
|
|
const IntRect& aPicture)
|
2014-02-04 05:49:21 +04:00
|
|
|
{
|
|
|
|
return Create(aInfo, aContainer, nullptr, aOffset, aTime, aDuration, aBuffer,
|
|
|
|
aKeyframe, aTimecode, aPicture);
|
|
|
|
}
|
|
|
|
|
2014-02-18 18:50:20 +04:00
|
|
|
/* static */
|
2014-11-20 00:01:10 +03:00
|
|
|
already_AddRefed<VideoData>
|
|
|
|
VideoData::Create(VideoInfo& aInfo,
|
|
|
|
Image* aImage,
|
|
|
|
int64_t aOffset,
|
|
|
|
int64_t aTime,
|
|
|
|
int64_t aDuration,
|
|
|
|
const YCbCrBuffer& aBuffer,
|
|
|
|
bool aKeyframe,
|
|
|
|
int64_t aTimecode,
|
|
|
|
const IntRect& aPicture)
|
2014-02-04 05:49:21 +04:00
|
|
|
{
|
|
|
|
return Create(aInfo, nullptr, aImage, aOffset, aTime, aDuration, aBuffer,
|
|
|
|
aKeyframe, aTimecode, aPicture);
|
|
|
|
}
|
|
|
|
|
2014-02-18 18:50:20 +04:00
|
|
|
/* static */
|
2014-11-20 00:01:10 +03:00
|
|
|
already_AddRefed<VideoData>
|
|
|
|
VideoData::CreateFromImage(VideoInfo& aInfo,
|
|
|
|
ImageContainer* aContainer,
|
|
|
|
int64_t aOffset,
|
|
|
|
int64_t aTime,
|
|
|
|
int64_t aDuration,
|
|
|
|
const nsRefPtr<Image>& aImage,
|
|
|
|
bool aKeyframe,
|
|
|
|
int64_t aTimecode,
|
|
|
|
const IntRect& aPicture)
|
2014-02-04 05:49:21 +04:00
|
|
|
{
|
2014-11-20 00:01:10 +03:00
|
|
|
nsRefPtr<VideoData> v(new VideoData(aOffset,
|
|
|
|
aTime,
|
|
|
|
aDuration,
|
|
|
|
aKeyframe,
|
|
|
|
aTimecode,
|
2015-03-29 17:59:08 +03:00
|
|
|
aInfo.mDisplay));
|
2014-02-04 05:49:21 +04:00
|
|
|
v->mImage = aImage;
|
|
|
|
return v.forget();
|
|
|
|
}
|
|
|
|
|
|
|
|
#ifdef MOZ_OMX_DECODER
|
2014-02-18 18:50:20 +04:00
|
|
|
/* static */
|
2014-11-20 00:01:10 +03:00
|
|
|
already_AddRefed<VideoData>
|
|
|
|
VideoData::Create(VideoInfo& aInfo,
|
|
|
|
ImageContainer* aContainer,
|
|
|
|
int64_t aOffset,
|
|
|
|
int64_t aTime,
|
|
|
|
int64_t aDuration,
|
|
|
|
mozilla::layers::TextureClient* aBuffer,
|
|
|
|
bool aKeyframe,
|
|
|
|
int64_t aTimecode,
|
|
|
|
const IntRect& aPicture)
|
2014-02-04 05:49:21 +04:00
|
|
|
{
|
|
|
|
if (!aContainer) {
|
|
|
|
// Create a dummy VideoData with no image. This gives us something to
|
|
|
|
// send to media streams if necessary.
|
2014-11-20 00:01:10 +03:00
|
|
|
nsRefPtr<VideoData> v(new VideoData(aOffset,
|
|
|
|
aTime,
|
|
|
|
aDuration,
|
|
|
|
aKeyframe,
|
|
|
|
aTimecode,
|
2015-03-29 17:59:08 +03:00
|
|
|
aInfo.mDisplay));
|
2014-02-04 05:49:21 +04:00
|
|
|
return v.forget();
|
|
|
|
}
|
|
|
|
|
|
|
|
// The following situations could be triggered by invalid input
|
|
|
|
if (aPicture.width <= 0 || aPicture.height <= 0) {
|
|
|
|
NS_WARNING("Empty picture rect");
|
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Ensure the picture size specified in the headers can be extracted out of
|
|
|
|
// the frame we've been supplied without indexing out of bounds.
|
|
|
|
CheckedUint32 xLimit = aPicture.x + CheckedUint32(aPicture.width);
|
|
|
|
CheckedUint32 yLimit = aPicture.y + CheckedUint32(aPicture.height);
|
|
|
|
if (!xLimit.isValid() || !yLimit.isValid())
|
|
|
|
{
|
|
|
|
// The specified picture dimensions can't be contained inside the video
|
|
|
|
// frame, we'll stomp memory if we try to copy it. Fail.
|
|
|
|
NS_WARNING("Overflowing picture rect");
|
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
|
2014-11-20 00:01:10 +03:00
|
|
|
nsRefPtr<VideoData> v(new VideoData(aOffset,
|
|
|
|
aTime,
|
|
|
|
aDuration,
|
|
|
|
aKeyframe,
|
|
|
|
aTimecode,
|
2015-03-29 17:59:08 +03:00
|
|
|
aInfo.mDisplay));
|
2014-02-04 05:49:21 +04:00
|
|
|
|
|
|
|
v->mImage = aContainer->CreateImage(ImageFormat::GRALLOC_PLANAR_YCBCR);
|
|
|
|
if (!v->mImage) {
|
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
NS_ASSERTION(v->mImage->GetFormat() == ImageFormat::GRALLOC_PLANAR_YCBCR,
|
|
|
|
"Wrong format?");
|
|
|
|
typedef mozilla::layers::GrallocImage GrallocImage;
|
|
|
|
GrallocImage* videoImage = static_cast<GrallocImage*>(v->mImage.get());
|
|
|
|
GrallocImage::GrallocData data;
|
|
|
|
|
2014-02-09 12:04:38 +04:00
|
|
|
data.mPicSize = aPicture.Size();
|
2014-02-04 05:49:21 +04:00
|
|
|
data.mGraphicBuffer = aBuffer;
|
|
|
|
|
|
|
|
videoImage->SetData(data);
|
|
|
|
|
|
|
|
return v.forget();
|
|
|
|
}
|
|
|
|
#endif // MOZ_OMX_DECODER
|
|
|
|
|
|
|
|
} // namespace mozilla
|