Bug 639721 - Fix data race on nsBuiltinDecoderReader::mInfo - r=kinetik

--HG--
extra : rebase_source : 5e58af70c75f563ae7dd7d62e3be897a3c244e85
This commit is contained in:
Chris Double 2011-03-24 16:53:03 +13:00
Родитель 44d9284430
Коммит 1c2d3ceecf
9 изменённых файлов: 86 добавлений и 57 удалений

Просмотреть файл

@ -414,6 +414,7 @@ private:
class nsBuiltinDecoderReader : public nsRunnable {
public:
typedef mozilla::Monitor Monitor;
typedef mozilla::MonitorAutoEnter MonitorAutoEnter;
nsBuiltinDecoderReader(nsBuiltinDecoder* aDecoder);
~nsBuiltinDecoderReader();
@ -443,7 +444,7 @@ public:
// Read header data for all bitstreams in the file. Fills mInfo with
// the data required to present the media. Returns NS_OK on success,
// or NS_ERROR_FAILURE on failure.
virtual nsresult ReadMetadata() = 0;
virtual nsresult ReadMetadata(nsVideoInfo* aInfo) = 0;
// Stores the presentation time of the first frame/sample we'd be
// able to play if we started playback at aOffset, and returns the
@ -463,11 +464,6 @@ public:
PRInt64 aEndTime,
PRInt64 aCurrentTime) = 0;
// Gets presentation info required for playback.
const nsVideoInfo& GetInfo() {
return mInfo;
}
// Queue of audio samples. This queue is threadsafe.
MediaQueue<SoundData> mAudioQueue;
@ -520,7 +516,8 @@ protected:
// Used to seek to media start time.
PRInt64 mDataOffset;
// Stores presentation info required for playback.
// Stores presentation info required for playback. The reader's monitor
// must be held when accessing this.
nsVideoInfo mInfo;
};

Просмотреть файл

@ -417,8 +417,8 @@ void nsBuiltinDecoderStateMachine::AudioLoop()
MonitorAutoEnter mon(mDecoder->GetMonitor());
mAudioCompleted = PR_FALSE;
audioStartTime = mAudioStartTime;
channels = mReader->GetInfo().mAudioChannels;
rate = mReader->GetInfo().mAudioRate;
channels = mInfo.mAudioChannels;
rate = mInfo.mAudioRate;
NS_ASSERTION(audioStartTime != -1, "Should have audio start time by now");
}
while (1) {
@ -712,20 +712,22 @@ void nsBuiltinDecoderStateMachine::StartPlayback()
LOG(PR_LOG_DEBUG, ("%p StartPlayback", mDecoder));
mDecoder->mPlaybackStatistics.Start(TimeStamp::Now());
if (HasAudio()) {
MonitorAutoExit exitMon(mDecoder->GetMonitor());
MonitorAutoEnter audioMon(mAudioMonitor);
if (mAudioStream) {
// We have an audiostream, so it must have been paused the last time
// StopPlayback() was called.
mAudioStream->Resume();
} else {
// No audiostream, create one.
const nsVideoInfo& info = mReader->GetInfo();
mAudioStream = nsAudioStream::AllocateStream();
mAudioStream->Init(info.mAudioChannels,
info.mAudioRate,
MOZ_SOUND_DATA_FORMAT);
mAudioStream->SetVolume(mVolume);
PRInt32 rate = mInfo.mAudioRate;
PRInt32 channels = mInfo.mAudioChannels;
{
MonitorAutoExit exitMon(mDecoder->GetMonitor());
MonitorAutoEnter audioMon(mAudioMonitor);
if (mAudioStream) {
// We have an audiostream, so it must have been paused the last time
// StopPlayback() was called.
mAudioStream->Resume();
} else {
// No audiostream, create one.
mAudioStream = nsAudioStream::AllocateStream();
mAudioStream->Init(channels, rate, MOZ_SOUND_DATA_FORMAT);
mAudioStream->SetVolume(mVolume);
}
}
}
mPlayStartTime = TimeStamp::Now();
@ -1057,8 +1059,12 @@ nsresult nsBuiltinDecoderStateMachine::Run()
VideoData* videoData = FindStartTime();
if (videoData) {
MonitorAutoExit exitMon(mDecoder->GetMonitor());
RenderVideoFrame(videoData, TimeStamp::Now());
nsIntSize display = mInfo.mDisplay;
float aspect = mInfo.mPixelAspectRatio;
{
MonitorAutoExit exitMon(mDecoder->GetMonitor());
RenderVideoFrame(videoData, TimeStamp::Now(), display, aspect);
}
}
// Start the decode threads, so that we can pre buffer the streams.
@ -1082,14 +1088,13 @@ nsresult nsBuiltinDecoderStateMachine::Run()
// setting the default framebuffer size for audioavailable events. Also,
// if there is audio, let the MozAudioAvailable event manager know about
// the metadata.
const nsVideoInfo& info = mReader->GetInfo();
PRUint32 frameBufferLength = info.mAudioChannels * FRAMEBUFFER_LENGTH_PER_CHANNEL;
PRUint32 frameBufferLength = mInfo.mAudioChannels * FRAMEBUFFER_LENGTH_PER_CHANNEL;
nsCOMPtr<nsIRunnable> metadataLoadedEvent =
new nsAudioMetadataEventRunner(mDecoder, info.mAudioChannels,
info.mAudioRate, frameBufferLength);
new nsAudioMetadataEventRunner(mDecoder, mInfo.mAudioChannels,
mInfo.mAudioRate, frameBufferLength);
NS_DispatchToMainThread(metadataLoadedEvent, NS_DISPATCH_NORMAL);
if (HasAudio()) {
mEventManager.Init(info.mAudioChannels, info.mAudioRate);
mEventManager.Init(mInfo.mAudioChannels, mInfo.mAudioRate);
mDecoder->RequestFrameBufferLength(frameBufferLength);
}
@ -1180,7 +1185,12 @@ nsresult nsBuiltinDecoderStateMachine::Run()
if (video) {
NS_ASSERTION(video->mTime <= seekTime && seekTime <= video->mEndTime,
"Seek target should lie inside the first frame after seek");
RenderVideoFrame(video, TimeStamp::Now());
nsIntSize display = mInfo.mDisplay;
float aspect = mInfo.mPixelAspectRatio;
{
MonitorAutoExit exitMon(mDecoder->GetMonitor());
RenderVideoFrame(video, TimeStamp::Now(), display, aspect);
}
mReader->mVideoQueue.PopFront();
nsCOMPtr<nsIRunnable> event =
NS_NewRunnableMethod(mDecoder, &nsBuiltinDecoder::Invalidate);
@ -1336,9 +1346,12 @@ nsresult nsBuiltinDecoderStateMachine::Run()
}
void nsBuiltinDecoderStateMachine::RenderVideoFrame(VideoData* aData,
TimeStamp aTarget)
TimeStamp aTarget,
nsIntSize aDisplaySize,
float aAspectRatio)
{
NS_ASSERTION(IsCurrentThread(mDecoder->mStateMachineThread), "Should be on state machine thread.");
mDecoder->GetMonitor().AssertNotCurrentThreadIn();
if (aData->mDuplicate) {
return;
@ -1346,11 +1359,8 @@ void nsBuiltinDecoderStateMachine::RenderVideoFrame(VideoData* aData,
nsRefPtr<Image> image = aData->mImage;
if (image) {
const nsVideoInfo& info = mReader->GetInfo();
mDecoder->SetVideoData(gfxIntSize(info.mDisplay.width, info.mDisplay.height),
info.mPixelAspectRatio,
image,
aTarget);
mDecoder->SetVideoData(gfxIntSize(aDisplaySize.width, aDisplaySize.height),
aAspectRatio, image, aTarget);
}
}
@ -1460,10 +1470,14 @@ void nsBuiltinDecoderStateMachine::AdvanceFrame()
MsToDuration(currentFrame->mTime - mStartTime);
NS_ASSERTION(currentFrame->mTime >= mStartTime, "Should have positive frame time");
{
MonitorAutoExit exitMon(mDecoder->GetMonitor());
// If we have video, we want to increment the clock in steps of the frame
// duration.
RenderVideoFrame(currentFrame, presTime);
nsIntSize display = mInfo.mDisplay;
float aspect = mInfo.mPixelAspectRatio;
{
MonitorAutoExit exitMon(mDecoder->GetMonitor());
// If we have video, we want to increment the clock in steps of the frame
// duration.
RenderVideoFrame(currentFrame, presTime, display, aspect);
}
}
mDecoder->GetFrameStatistics().NotifyPresentedFrame();
PRInt64 now = DurationToMs(TimeStamp::Now() - mPlayStartTime + mPlayDuration);
@ -1537,9 +1551,10 @@ VideoData* nsBuiltinDecoderStateMachine::FindStartTime()
PRInt64 startTime = 0;
mStartTime = 0;
VideoData* v = nsnull;
PRInt64 dataOffset = mInfo.mDataOffset;
{
MonitorAutoExit exitMon(mDecoder->GetMonitor());
v = mReader->FindStartTime(mReader->GetInfo().mDataOffset, startTime);
v = mReader->FindStartTime(dataOffset, startTime);
}
if (startTime != 0) {
mStartTime = startTime;
@ -1613,11 +1628,12 @@ void nsBuiltinDecoderStateMachine::LoadMetadata()
LOG(PR_LOG_DEBUG, ("Loading Media Headers"));
nsresult res;
nsVideoInfo info;
{
MonitorAutoExit exitMon(mDecoder->GetMonitor());
res = mReader->ReadMetadata();
res = mReader->ReadMetadata(&info);
}
const nsVideoInfo& info = mReader->GetInfo();
mInfo = info;
if (NS_FAILED(res) || (!info.mHasVideo && !info.mHasAudio)) {
mState = DECODER_STATE_SHUTDOWN;

Просмотреть файл

@ -184,14 +184,14 @@ public:
// The decoder monitor must be obtained before calling this.
PRBool HasAudio() const {
mDecoder->GetMonitor().AssertCurrentThreadIn();
return mReader->GetInfo().mHasAudio;
return mInfo.mHasAudio;
}
// This is called on the state machine thread and audio thread.
// The decoder monitor must be obtained before calling this.
PRBool HasVideo() const {
mDecoder->GetMonitor().AssertCurrentThreadIn();
return mReader->GetInfo().mHasVideo;
return mInfo.mHasVideo;
}
// Should be called by main thread.
@ -311,10 +311,11 @@ protected:
void UpdatePlaybackPositionInternal(PRInt64 aTime);
// Performs YCbCr to RGB conversion, and pushes the image down the
// rendering pipeline. Called on the state machine thread.
void RenderVideoFrame(VideoData* aData,
TimeStamp aTarget);
// rendering pipeline. Called on the state machine thread. The decoder
// monitor must not be held when calling this.
void RenderVideoFrame(VideoData* aData, TimeStamp aTarget,
nsIntSize aDisplaySize, float aAspectRatio);
// If we have video, display a video frame if it's time for display has
// arrived, otherwise sleep until it's time for the next sample. Update
// the current frame time as appropriate, and trigger ready state update.
@ -524,6 +525,10 @@ private:
// event manager is accessed from the state machine and audio threads,
// and takes care of synchronizing access to its internal queue.
nsAudioAvailableEventManager mEventManager;
// Stores presentation info required for playback. The decoder monitor
// must be held when accessing this.
nsVideoInfo mInfo;
};
#endif

Просмотреть файл

@ -169,7 +169,7 @@ static PRBool DoneReadingHeaders(nsTArray<nsOggCodecState*>& aBitstreams) {
return PR_TRUE;
}
nsresult nsOggReader::ReadMetadata()
nsresult nsOggReader::ReadMetadata(nsVideoInfo* aInfo)
{
NS_ASSERTION(mDecoder->OnStateMachineThread(), "Should be on play state machine thread.");
MonitorAutoEnter mon(mMonitor);
@ -353,6 +353,8 @@ nsresult nsOggReader::ReadMetadata()
mTheoraSerial = mTheoraState->mSerial;
}
*aInfo = mInfo;
LOG(PR_LOG_DEBUG, ("Done loading headers, data offset %lld", mDataOffset));
return NS_OK;
@ -561,6 +563,10 @@ nsresult nsOggReader::DecodeTheora(nsTArray<nsAutoPtr<VideoData> >& aFrames,
b.mPlanes[i].mWidth = buffer[i].width;
b.mPlanes[i].mStride = buffer[i].stride;
}
// Need the monitor to be held to be able to use mInfo. This
// is held by our caller.
mMonitor.AssertCurrentThreadIn();
VideoData *v = VideoData::Create(mInfo,
mDecoder->GetImageContainer(),
mPageOffset,

Просмотреть файл

@ -90,7 +90,7 @@ public:
return mTheoraState != 0 && mTheoraState->mActive;
}
virtual nsresult ReadMetadata();
virtual nsresult ReadMetadata(nsVideoInfo* aInfo);
virtual nsresult Seek(PRInt64 aTime, PRInt64 aStartTime, PRInt64 aEndTime, PRInt64 aCurrentTime);
virtual nsresult GetBuffered(nsTimeRanges* aBuffered, PRInt64 aStartTime);
@ -194,7 +194,8 @@ private:
nsresult DecodeVorbis(nsTArray<nsAutoPtr<SoundData> >& aChunks,
ogg_packet* aPacket);
// May return NS_ERROR_OUT_OF_MEMORY.
// May return NS_ERROR_OUT_OF_MEMORY. Caller must have obtained the
// reader's monitor.
nsresult DecodeTheora(nsTArray<nsAutoPtr<VideoData> >& aFrames,
ogg_packet* aPacket);

Просмотреть файл

@ -68,7 +68,7 @@ nsresult nsRawReader::ResetDecode()
return nsBuiltinDecoderReader::ResetDecode();
}
nsresult nsRawReader::ReadMetadata()
nsresult nsRawReader::ReadMetadata(nsVideoInfo* aInfo)
{
NS_ASSERTION(mDecoder->OnStateMachineThread(),
"Should be on state machine thread.");
@ -134,6 +134,8 @@ nsresult nsRawReader::ReadMetadata()
(mFrameSize * mFrameRate));
}
*aInfo = mInfo;
return NS_OK;
}

Просмотреть файл

@ -110,7 +110,7 @@ public:
return PR_TRUE;
}
virtual nsresult ReadMetadata();
virtual nsresult ReadMetadata(nsVideoInfo* aInfo);
virtual nsresult Seek(PRInt64 aTime, PRInt64 aStartTime, PRInt64 aEndTime, PRInt64 aCurrentTime);
virtual PRInt64 FindEndTime(PRInt64 aEndOffset);
virtual nsresult GetBuffered(nsTimeRanges* aBuffered, PRInt64 aStartTime);

Просмотреть файл

@ -209,7 +209,7 @@ void nsWebMReader::Cleanup()
}
}
nsresult nsWebMReader::ReadMetadata()
nsresult nsWebMReader::ReadMetadata(nsVideoInfo* aInfo)
{
NS_ASSERTION(mDecoder->OnStateMachineThread(), "Should be on state machine thread.");
MonitorAutoEnter mon(mMonitor);
@ -396,6 +396,8 @@ nsresult nsWebMReader::ReadMetadata()
}
}
*aInfo = mInfo;
return NS_OK;
}

Просмотреть файл

@ -153,7 +153,7 @@ public:
return mHasVideo;
}
virtual nsresult ReadMetadata();
virtual nsresult ReadMetadata(nsVideoInfo* aInfo);
virtual nsresult Seek(PRInt64 aTime, PRInt64 aStartTime, PRInt64 aEndTime, PRInt64 aCurrentTime);
virtual nsresult GetBuffered(nsTimeRanges* aBuffered, PRInt64 aStartTime);
virtual void NotifyDataArrived(const char* aBuffer, PRUint32 aLength, PRUint32 aOffset);