Bug 1199809 - Refactor: use RAII to help manage output buffer lifecycle. r=sotaro

This commit is contained in:
John Lin 2015-10-29 21:14:00 +01:00
Родитель c405c5b4c1
Коммит 7c5a9241cb
4 изменённых файлов: 51 добавлений и 50 удалений

Просмотреть файл

@ -155,25 +155,6 @@ GonkAudioDecoderManager::CreateAudioData(MediaBuffer* aBuffer, int64_t aStreamOf
return NS_OK;
}
class AutoReleaseAudioBuffer
{
public:
AutoReleaseAudioBuffer(MediaBuffer* aBuffer, MediaCodecProxy* aCodecProxy)
: mAudioBuffer(aBuffer)
, mCodecProxy(aCodecProxy)
{}
~AutoReleaseAudioBuffer()
{
if (mAudioBuffer) {
mCodecProxy->ReleaseMediaBuffer(mAudioBuffer);
}
}
private:
MediaBuffer* mAudioBuffer;
sp<MediaCodecProxy> mCodecProxy;
};
nsresult
GonkAudioDecoderManager::Output(int64_t aStreamOffset,
RefPtr<MediaData>& aOutData)
@ -187,7 +168,7 @@ GonkAudioDecoderManager::Output(int64_t aStreamOffset,
status_t err;
MediaBuffer* audioBuffer = nullptr;
err = mDecoder->Output(&audioBuffer, READ_OUTPUT_BUFFER_TIMEOUT_US);
AutoReleaseAudioBuffer a(audioBuffer, mDecoder.get());
AutoReleaseMediaBuffer a(audioBuffer, mDecoder.get());
switch (err) {
case OK:

Просмотреть файл

@ -11,6 +11,7 @@
namespace android {
struct ALooper;
class MediaBuffer;
class MediaCodecProxy;
} // namespace android
@ -121,6 +122,34 @@ protected:
MediaDataDecoderCallback* mDecodeCallback; // Reports decoder output or error.
};
class AutoReleaseMediaBuffer
{
public:
AutoReleaseMediaBuffer(android::MediaBuffer* aBuffer, android::MediaCodecProxy* aCodec)
: mBuffer(aBuffer)
, mCodec(aCodec)
{}
~AutoReleaseMediaBuffer()
{
MOZ_ASSERT(mCodec.get());
if (mBuffer) {
mCodec->ReleaseMediaBuffer(mBuffer);
}
}
android::MediaBuffer* forget()
{
android::MediaBuffer* tmp = mBuffer;
mBuffer = nullptr;
return tmp;
}
private:
android::MediaBuffer* mBuffer;
android::sp<android::MediaCodecProxy> mCodec;
};
// Samples are decoded using the GonkDecoder (MediaCodec)
// created by the GonkDecoderManager. This class implements
// the higher-level logic that drives mapping the Gonk to the async

Просмотреть файл

@ -137,39 +137,40 @@ GonkVideoDecoderManager::Init()
}
nsresult
GonkVideoDecoderManager::CreateVideoData(int64_t aStreamOffset, VideoData **v)
GonkVideoDecoderManager::CreateVideoData(MediaBuffer* aBuffer,
int64_t aStreamOffset,
VideoData **v)
{
*v = nullptr;
RefPtr<VideoData> data;
int64_t timeUs;
int32_t keyFrame;
if (mVideoBuffer == nullptr) {
if (aBuffer == nullptr) {
GVDM_LOG("Video Buffer is not valid!");
return NS_ERROR_UNEXPECTED;
}
if (!mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs)) {
ReleaseVideoBuffer();
AutoReleaseMediaBuffer autoRelease(aBuffer, mDecoder.get());
if (!aBuffer->meta_data()->findInt64(kKeyTime, &timeUs)) {
GVDM_LOG("Decoder did not return frame time");
return NS_ERROR_UNEXPECTED;
}
if (mLastTime > timeUs) {
ReleaseVideoBuffer();
GVDM_LOG("Output decoded sample time is revert. time=%lld", timeUs);
return NS_ERROR_NOT_AVAILABLE;
}
mLastTime = timeUs;
if (mVideoBuffer->range_length() == 0) {
if (aBuffer->range_length() == 0) {
// Some decoders may return spurious empty buffers that we just want to ignore
// quoted from Android's AwesomePlayer.cpp
ReleaseVideoBuffer();
return NS_ERROR_NOT_AVAILABLE;
}
if (!mVideoBuffer->meta_data()->findInt32(kKeyIsSyncFrame, &keyFrame)) {
if (!aBuffer->meta_data()->findInt32(kKeyIsSyncFrame, &keyFrame)) {
keyFrame = 0;
}
@ -188,14 +189,15 @@ GonkVideoDecoderManager::CreateVideoData(int64_t aStreamOffset, VideoData **v)
RefPtr<mozilla::layers::TextureClient> textureClient;
if ((mVideoBuffer->graphicBuffer().get())) {
textureClient = mNativeWindow->getTextureClientFromBuffer(mVideoBuffer->graphicBuffer().get());
if ((aBuffer->graphicBuffer().get())) {
textureClient = mNativeWindow->getTextureClientFromBuffer(aBuffer->graphicBuffer().get());
}
if (textureClient) {
GrallocTextureClientOGL* grallocClient = static_cast<GrallocTextureClientOGL*>(textureClient.get());
grallocClient->SetMediaBuffer(mVideoBuffer);
grallocClient->SetMediaBuffer(aBuffer);
textureClient->SetRecycleCallback(GonkVideoDecoderManager::RecycleCallback, this);
autoRelease.forget(); // RecycleCallback will return it back to decoder.
data = VideoData::Create(mInfo.mVideo,
mImageContainer,
@ -208,11 +210,11 @@ GonkVideoDecoderManager::CreateVideoData(int64_t aStreamOffset, VideoData **v)
-1,
picture);
} else {
if (!mVideoBuffer->data()) {
if (!aBuffer->data()) {
GVDM_LOG("No data in Video Buffer!");
return NS_ERROR_UNEXPECTED;
}
uint8_t *yuv420p_buffer = (uint8_t *)mVideoBuffer->data();
uint8_t *yuv420p_buffer = (uint8_t *)aBuffer->data();
int32_t stride = mFrameInfo.mStride;
int32_t slice_height = mFrameInfo.mSliceHeight;
@ -224,9 +226,8 @@ GonkVideoDecoderManager::CreateVideoData(int64_t aStreamOffset, VideoData **v)
crop.left = 0;
crop.right = mFrameInfo.mWidth;
yuv420p_buffer = GetColorConverterBuffer(mFrameInfo.mWidth, mFrameInfo.mHeight);
if (mColorConverter.convertDecoderOutputToI420(mVideoBuffer->data(),
if (mColorConverter.convertDecoderOutputToI420(aBuffer->data(),
mFrameInfo.mWidth, mFrameInfo.mHeight, crop, yuv420p_buffer) != OK) {
ReleaseVideoBuffer();
GVDM_LOG("Color conversion failed!");
return NS_ERROR_UNEXPECTED;
}
@ -275,7 +276,6 @@ GonkVideoDecoderManager::CreateVideoData(int64_t aStreamOffset, VideoData **v)
keyFrame,
-1,
picture);
ReleaseVideoBuffer();
}
data.forget(v);
@ -336,13 +336,14 @@ GonkVideoDecoderManager::Output(int64_t aStreamOffset,
GVDM_LOG("Decoder is not inited");
return NS_ERROR_UNEXPECTED;
}
err = mDecoder->Output(&mVideoBuffer, READ_OUTPUT_BUFFER_TIMEOUT_US);
MediaBuffer* outputBuffer;
err = mDecoder->Output(&outputBuffer, READ_OUTPUT_BUFFER_TIMEOUT_US);
switch (err) {
case OK:
{
RefPtr<VideoData> data;
nsresult rv = CreateVideoData(aStreamOffset, getter_AddRefs(data));
nsresult rv = CreateVideoData(outputBuffer, aStreamOffset, getter_AddRefs(data));
if (rv == NS_ERROR_NOT_AVAILABLE) {
// Decoder outputs a empty video buffer, try again
return NS_ERROR_NOT_AVAILABLE;
@ -379,7 +380,7 @@ GonkVideoDecoderManager::Output(int64_t aStreamOffset,
{
GVDM_LOG("Got the EOS frame!");
RefPtr<VideoData> data;
nsresult rv = CreateVideoData(aStreamOffset, getter_AddRefs(data));
nsresult rv = CreateVideoData(outputBuffer, aStreamOffset, getter_AddRefs(data));
if (rv == NS_ERROR_NOT_AVAILABLE) {
// For EOS, no need to do any thing.
return NS_ERROR_ABORT;
@ -406,13 +407,6 @@ GonkVideoDecoderManager::Output(int64_t aStreamOffset,
return NS_OK;
}
void GonkVideoDecoderManager::ReleaseVideoBuffer() {
if (mVideoBuffer) {
mDecoder->ReleaseMediaBuffer(mVideoBuffer);
mVideoBuffer = nullptr;
}
}
void
GonkVideoDecoderManager::codecReserved()
{

Просмотреть файл

@ -93,8 +93,7 @@ private:
bool SetVideoFormat();
nsresult CreateVideoData(int64_t aStreamOffset, VideoData** aOutData);
void ReleaseVideoBuffer();
nsresult CreateVideoData(MediaBuffer* aBuffer, int64_t aStreamOffset, VideoData** aOutData);
uint8_t* GetColorConverterBuffer(int32_t aWidth, int32_t aHeight);
// For codec resource management
@ -114,8 +113,6 @@ private:
RefPtr<layers::ImageContainer> mImageContainer;
android::MediaBuffer* mVideoBuffer;
MediaInfo mInfo;
android::sp<VideoResourceListener> mVideoListener;
MozPromiseRequestHolder<MediaResourcePromise> mVideoCodecRequest;