Bug 1199809 - Refactor: use RAII to help manage output buffer lifecycle. r=sotaro

This commit is contained in:
John Lin 2015-10-29 21:14:00 +01:00
Родитель c405c5b4c1
Коммит 7c5a9241cb
4 изменённых файлов: 51 добавлений и 50 удалений

Просмотреть файл

@ -155,25 +155,6 @@ GonkAudioDecoderManager::CreateAudioData(MediaBuffer* aBuffer, int64_t aStreamOf
return NS_OK; return NS_OK;
} }
class AutoReleaseAudioBuffer
{
public:
AutoReleaseAudioBuffer(MediaBuffer* aBuffer, MediaCodecProxy* aCodecProxy)
: mAudioBuffer(aBuffer)
, mCodecProxy(aCodecProxy)
{}
~AutoReleaseAudioBuffer()
{
if (mAudioBuffer) {
mCodecProxy->ReleaseMediaBuffer(mAudioBuffer);
}
}
private:
MediaBuffer* mAudioBuffer;
sp<MediaCodecProxy> mCodecProxy;
};
nsresult nsresult
GonkAudioDecoderManager::Output(int64_t aStreamOffset, GonkAudioDecoderManager::Output(int64_t aStreamOffset,
RefPtr<MediaData>& aOutData) RefPtr<MediaData>& aOutData)
@ -187,7 +168,7 @@ GonkAudioDecoderManager::Output(int64_t aStreamOffset,
status_t err; status_t err;
MediaBuffer* audioBuffer = nullptr; MediaBuffer* audioBuffer = nullptr;
err = mDecoder->Output(&audioBuffer, READ_OUTPUT_BUFFER_TIMEOUT_US); err = mDecoder->Output(&audioBuffer, READ_OUTPUT_BUFFER_TIMEOUT_US);
AutoReleaseAudioBuffer a(audioBuffer, mDecoder.get()); AutoReleaseMediaBuffer a(audioBuffer, mDecoder.get());
switch (err) { switch (err) {
case OK: case OK:

Просмотреть файл

@ -11,6 +11,7 @@
namespace android { namespace android {
struct ALooper; struct ALooper;
class MediaBuffer;
class MediaCodecProxy; class MediaCodecProxy;
} // namespace android } // namespace android
@ -121,6 +122,34 @@ protected:
MediaDataDecoderCallback* mDecodeCallback; // Reports decoder output or error. MediaDataDecoderCallback* mDecodeCallback; // Reports decoder output or error.
}; };
class AutoReleaseMediaBuffer
{
public:
AutoReleaseMediaBuffer(android::MediaBuffer* aBuffer, android::MediaCodecProxy* aCodec)
: mBuffer(aBuffer)
, mCodec(aCodec)
{}
~AutoReleaseMediaBuffer()
{
MOZ_ASSERT(mCodec.get());
if (mBuffer) {
mCodec->ReleaseMediaBuffer(mBuffer);
}
}
android::MediaBuffer* forget()
{
android::MediaBuffer* tmp = mBuffer;
mBuffer = nullptr;
return tmp;
}
private:
android::MediaBuffer* mBuffer;
android::sp<android::MediaCodecProxy> mCodec;
};
// Samples are decoded using the GonkDecoder (MediaCodec) // Samples are decoded using the GonkDecoder (MediaCodec)
// created by the GonkDecoderManager. This class implements // created by the GonkDecoderManager. This class implements
// the higher-level logic that drives mapping the Gonk to the async // the higher-level logic that drives mapping the Gonk to the async

Просмотреть файл

@ -137,39 +137,40 @@ GonkVideoDecoderManager::Init()
} }
nsresult nsresult
GonkVideoDecoderManager::CreateVideoData(int64_t aStreamOffset, VideoData **v) GonkVideoDecoderManager::CreateVideoData(MediaBuffer* aBuffer,
int64_t aStreamOffset,
VideoData **v)
{ {
*v = nullptr; *v = nullptr;
RefPtr<VideoData> data; RefPtr<VideoData> data;
int64_t timeUs; int64_t timeUs;
int32_t keyFrame; int32_t keyFrame;
if (mVideoBuffer == nullptr) { if (aBuffer == nullptr) {
GVDM_LOG("Video Buffer is not valid!"); GVDM_LOG("Video Buffer is not valid!");
return NS_ERROR_UNEXPECTED; return NS_ERROR_UNEXPECTED;
} }
if (!mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs)) { AutoReleaseMediaBuffer autoRelease(aBuffer, mDecoder.get());
ReleaseVideoBuffer();
if (!aBuffer->meta_data()->findInt64(kKeyTime, &timeUs)) {
GVDM_LOG("Decoder did not return frame time"); GVDM_LOG("Decoder did not return frame time");
return NS_ERROR_UNEXPECTED; return NS_ERROR_UNEXPECTED;
} }
if (mLastTime > timeUs) { if (mLastTime > timeUs) {
ReleaseVideoBuffer();
GVDM_LOG("Output decoded sample time is revert. time=%lld", timeUs); GVDM_LOG("Output decoded sample time is revert. time=%lld", timeUs);
return NS_ERROR_NOT_AVAILABLE; return NS_ERROR_NOT_AVAILABLE;
} }
mLastTime = timeUs; mLastTime = timeUs;
if (mVideoBuffer->range_length() == 0) { if (aBuffer->range_length() == 0) {
// Some decoders may return spurious empty buffers that we just want to ignore // Some decoders may return spurious empty buffers that we just want to ignore
// quoted from Android's AwesomePlayer.cpp // quoted from Android's AwesomePlayer.cpp
ReleaseVideoBuffer();
return NS_ERROR_NOT_AVAILABLE; return NS_ERROR_NOT_AVAILABLE;
} }
if (!mVideoBuffer->meta_data()->findInt32(kKeyIsSyncFrame, &keyFrame)) { if (!aBuffer->meta_data()->findInt32(kKeyIsSyncFrame, &keyFrame)) {
keyFrame = 0; keyFrame = 0;
} }
@ -188,14 +189,15 @@ GonkVideoDecoderManager::CreateVideoData(int64_t aStreamOffset, VideoData **v)
RefPtr<mozilla::layers::TextureClient> textureClient; RefPtr<mozilla::layers::TextureClient> textureClient;
if ((mVideoBuffer->graphicBuffer().get())) { if ((aBuffer->graphicBuffer().get())) {
textureClient = mNativeWindow->getTextureClientFromBuffer(mVideoBuffer->graphicBuffer().get()); textureClient = mNativeWindow->getTextureClientFromBuffer(aBuffer->graphicBuffer().get());
} }
if (textureClient) { if (textureClient) {
GrallocTextureClientOGL* grallocClient = static_cast<GrallocTextureClientOGL*>(textureClient.get()); GrallocTextureClientOGL* grallocClient = static_cast<GrallocTextureClientOGL*>(textureClient.get());
grallocClient->SetMediaBuffer(mVideoBuffer); grallocClient->SetMediaBuffer(aBuffer);
textureClient->SetRecycleCallback(GonkVideoDecoderManager::RecycleCallback, this); textureClient->SetRecycleCallback(GonkVideoDecoderManager::RecycleCallback, this);
autoRelease.forget(); // RecycleCallback will return it back to decoder.
data = VideoData::Create(mInfo.mVideo, data = VideoData::Create(mInfo.mVideo,
mImageContainer, mImageContainer,
@ -208,11 +210,11 @@ GonkVideoDecoderManager::CreateVideoData(int64_t aStreamOffset, VideoData **v)
-1, -1,
picture); picture);
} else { } else {
if (!mVideoBuffer->data()) { if (!aBuffer->data()) {
GVDM_LOG("No data in Video Buffer!"); GVDM_LOG("No data in Video Buffer!");
return NS_ERROR_UNEXPECTED; return NS_ERROR_UNEXPECTED;
} }
uint8_t *yuv420p_buffer = (uint8_t *)mVideoBuffer->data(); uint8_t *yuv420p_buffer = (uint8_t *)aBuffer->data();
int32_t stride = mFrameInfo.mStride; int32_t stride = mFrameInfo.mStride;
int32_t slice_height = mFrameInfo.mSliceHeight; int32_t slice_height = mFrameInfo.mSliceHeight;
@ -224,9 +226,8 @@ GonkVideoDecoderManager::CreateVideoData(int64_t aStreamOffset, VideoData **v)
crop.left = 0; crop.left = 0;
crop.right = mFrameInfo.mWidth; crop.right = mFrameInfo.mWidth;
yuv420p_buffer = GetColorConverterBuffer(mFrameInfo.mWidth, mFrameInfo.mHeight); yuv420p_buffer = GetColorConverterBuffer(mFrameInfo.mWidth, mFrameInfo.mHeight);
if (mColorConverter.convertDecoderOutputToI420(mVideoBuffer->data(), if (mColorConverter.convertDecoderOutputToI420(aBuffer->data(),
mFrameInfo.mWidth, mFrameInfo.mHeight, crop, yuv420p_buffer) != OK) { mFrameInfo.mWidth, mFrameInfo.mHeight, crop, yuv420p_buffer) != OK) {
ReleaseVideoBuffer();
GVDM_LOG("Color conversion failed!"); GVDM_LOG("Color conversion failed!");
return NS_ERROR_UNEXPECTED; return NS_ERROR_UNEXPECTED;
} }
@ -275,7 +276,6 @@ GonkVideoDecoderManager::CreateVideoData(int64_t aStreamOffset, VideoData **v)
keyFrame, keyFrame,
-1, -1,
picture); picture);
ReleaseVideoBuffer();
} }
data.forget(v); data.forget(v);
@ -336,13 +336,14 @@ GonkVideoDecoderManager::Output(int64_t aStreamOffset,
GVDM_LOG("Decoder is not inited"); GVDM_LOG("Decoder is not inited");
return NS_ERROR_UNEXPECTED; return NS_ERROR_UNEXPECTED;
} }
err = mDecoder->Output(&mVideoBuffer, READ_OUTPUT_BUFFER_TIMEOUT_US); MediaBuffer* outputBuffer;
err = mDecoder->Output(&outputBuffer, READ_OUTPUT_BUFFER_TIMEOUT_US);
switch (err) { switch (err) {
case OK: case OK:
{ {
RefPtr<VideoData> data; RefPtr<VideoData> data;
nsresult rv = CreateVideoData(aStreamOffset, getter_AddRefs(data)); nsresult rv = CreateVideoData(outputBuffer, aStreamOffset, getter_AddRefs(data));
if (rv == NS_ERROR_NOT_AVAILABLE) { if (rv == NS_ERROR_NOT_AVAILABLE) {
// Decoder outputs a empty video buffer, try again // Decoder outputs a empty video buffer, try again
return NS_ERROR_NOT_AVAILABLE; return NS_ERROR_NOT_AVAILABLE;
@ -379,7 +380,7 @@ GonkVideoDecoderManager::Output(int64_t aStreamOffset,
{ {
GVDM_LOG("Got the EOS frame!"); GVDM_LOG("Got the EOS frame!");
RefPtr<VideoData> data; RefPtr<VideoData> data;
nsresult rv = CreateVideoData(aStreamOffset, getter_AddRefs(data)); nsresult rv = CreateVideoData(outputBuffer, aStreamOffset, getter_AddRefs(data));
if (rv == NS_ERROR_NOT_AVAILABLE) { if (rv == NS_ERROR_NOT_AVAILABLE) {
// For EOS, no need to do any thing. // For EOS, no need to do any thing.
return NS_ERROR_ABORT; return NS_ERROR_ABORT;
@ -406,13 +407,6 @@ GonkVideoDecoderManager::Output(int64_t aStreamOffset,
return NS_OK; return NS_OK;
} }
void GonkVideoDecoderManager::ReleaseVideoBuffer() {
if (mVideoBuffer) {
mDecoder->ReleaseMediaBuffer(mVideoBuffer);
mVideoBuffer = nullptr;
}
}
void void
GonkVideoDecoderManager::codecReserved() GonkVideoDecoderManager::codecReserved()
{ {

Просмотреть файл

@ -93,8 +93,7 @@ private:
bool SetVideoFormat(); bool SetVideoFormat();
nsresult CreateVideoData(int64_t aStreamOffset, VideoData** aOutData); nsresult CreateVideoData(MediaBuffer* aBuffer, int64_t aStreamOffset, VideoData** aOutData);
void ReleaseVideoBuffer();
uint8_t* GetColorConverterBuffer(int32_t aWidth, int32_t aHeight); uint8_t* GetColorConverterBuffer(int32_t aWidth, int32_t aHeight);
// For codec resource management // For codec resource management
@ -114,8 +113,6 @@ private:
RefPtr<layers::ImageContainer> mImageContainer; RefPtr<layers::ImageContainer> mImageContainer;
android::MediaBuffer* mVideoBuffer;
MediaInfo mInfo; MediaInfo mInfo;
android::sp<VideoResourceListener> mVideoListener; android::sp<VideoResourceListener> mVideoListener;
MozPromiseRequestHolder<MediaResourcePromise> mVideoCodecRequest; MozPromiseRequestHolder<MediaResourcePromise> mVideoCodecRequest;