diff --git a/dom/html/HTMLMediaElement.cpp b/dom/html/HTMLMediaElement.cpp
index a0bfa5649dab..35994a767645 100644
--- a/dom/html/HTMLMediaElement.cpp
+++ b/dom/html/HTMLMediaElement.cpp
@@ -3638,8 +3638,8 @@ void HTMLMediaElement::UpdateSrcMediaStreamPlaying(uint32_t aFlags)
SetVolumeInternal();
VideoFrameContainer* container = GetVideoFrameContainer();
- if (container) {
- stream->AddVideoOutput(container);
+ if (mSelectedVideoStreamTrack && container) {
+ mSelectedVideoStreamTrack->AddVideoOutput(container);
}
VideoTrack* videoTrack = VideoTracks()->GetSelectedTrack();
if (videoTrack) {
@@ -3656,8 +3656,8 @@ void HTMLMediaElement::UpdateSrcMediaStreamPlaying(uint32_t aFlags)
stream->RemoveAudioOutput(this);
VideoFrameContainer* container = GetVideoFrameContainer();
- if (container) {
- stream->RemoveVideoOutput(container);
+ if (mSelectedVideoStreamTrack && container) {
+ mSelectedVideoStreamTrack->RemoveVideoOutput(container);
}
VideoTrack* videoTrack = VideoTracks()->GetSelectedTrack();
if (videoTrack) {
@@ -3802,6 +3802,9 @@ void HTMLMediaElement::ConstructMediaTracks()
mMediaStreamSizeListener = new StreamSizeListener(this);
streamTrack->AddDirectListener(mMediaStreamSizeListener);
mSelectedVideoStreamTrack = streamTrack;
+ if (GetVideoFrameContainer()) {
+ mSelectedVideoStreamTrack->AddVideoOutput(GetVideoFrameContainer());
+ }
}
}
@@ -3834,6 +3837,10 @@ HTMLMediaElement::NotifyMediaStreamTrackAdded(const RefPtr& aT
mMediaStreamSizeListener = new StreamSizeListener(this);
t->AddDirectListener(mMediaStreamSizeListener);
mSelectedVideoStreamTrack = t;
+ VideoFrameContainer* container = GetVideoFrameContainer();
+ if (mSrcStreamIsPlaying && container) {
+ mSelectedVideoStreamTrack->AddVideoOutput(container);
+ }
}
}
@@ -3863,6 +3870,10 @@ HTMLMediaElement::NotifyMediaStreamTrackRemoved(const RefPtr&
if (mMediaStreamSizeListener) {
mSelectedVideoStreamTrack->RemoveDirectListener(mMediaStreamSizeListener);
}
+ VideoFrameContainer* container = GetVideoFrameContainer();
+ if (mSrcStreamIsPlaying && container) {
+ mSelectedVideoStreamTrack->RemoveVideoOutput(container);
+ }
mSelectedVideoStreamTrack = nullptr;
MOZ_ASSERT(mSrcStream);
nsTArray> tracks;
@@ -3886,6 +3897,9 @@ HTMLMediaElement::NotifyMediaStreamTrackRemoved(const RefPtr&
track->AddDirectListener(mMediaStreamSizeListener);
}
mSelectedVideoStreamTrack = track;
+ if (container) {
+ mSelectedVideoStreamTrack->AddVideoOutput(container);
+ }
return;
}
diff --git a/dom/media/MediaStreamGraph.cpp b/dom/media/MediaStreamGraph.cpp
index 47034d303d31..f7034f162098 100644
--- a/dom/media/MediaStreamGraph.cpp
+++ b/dom/media/MediaStreamGraph.cpp
@@ -109,6 +109,15 @@ void
MediaStreamGraphImpl::AddStreamGraphThread(MediaStream* aStream)
{
aStream->mTracksStartTime = mProcessedTime;
+
+ if (aStream->AsSourceStream()) {
+ SourceMediaStream* source = aStream->AsSourceStream();
+ TimeStamp currentTimeStamp = CurrentDriver()->GetCurrentTimeStamp();
+ TimeStamp processedTimeStamp = currentTimeStamp +
+ TimeDuration::FromSeconds(MediaTimeToSeconds(mProcessedTime - IterationEnd()));
+ source->SetStreamTracksStartTimeStamp(processedTimeStamp);
+ }
+
if (aStream->IsSuspended()) {
mSuspendedStreams.AppendElement(aStream);
STREAM_LOG(LogLevel::Debug, ("Adding media stream %p to the graph, in the suspended stream array", aStream));
@@ -928,204 +937,6 @@ MediaStreamGraphImpl::PlayAudio(MediaStream* aStream)
return ticksWritten;
}
-static void
-SetImageToBlackPixel(PlanarYCbCrImage* aImage)
-{
- uint8_t blackPixel[] = { 0x10, 0x80, 0x80 };
-
- PlanarYCbCrData data;
- data.mYChannel = blackPixel;
- data.mCbChannel = blackPixel + 1;
- data.mCrChannel = blackPixel + 2;
- data.mYStride = data.mCbCrStride = 1;
- data.mPicSize = data.mYSize = data.mCbCrSize = IntSize(1, 1);
- aImage->CopyData(data);
-}
-
-class VideoFrameContainerInvalidateRunnable : public Runnable {
-public:
- explicit VideoFrameContainerInvalidateRunnable(VideoFrameContainer* aVideoFrameContainer)
- : mVideoFrameContainer(aVideoFrameContainer)
- {}
- NS_IMETHOD Run()
- {
- MOZ_ASSERT(NS_IsMainThread());
-
- mVideoFrameContainer->Invalidate();
-
- return NS_OK;
- }
-private:
- RefPtr mVideoFrameContainer;
-};
-
-void
-MediaStreamGraphImpl::PlayVideo(MediaStream* aStream)
-{
- MOZ_ASSERT(mRealtime, "Should only attempt to play video in realtime mode");
-
- if (aStream->mVideoOutputs.IsEmpty())
- return;
-
- TimeStamp currentTimeStamp = CurrentDriver()->GetCurrentTimeStamp();
-
- // Collect any new frames produced in this iteration.
- AutoTArray newImages;
- PrincipalHandle lastPrincipalHandle = PRINCIPAL_HANDLE_NONE;
- RefPtr blackImage;
-
- MOZ_ASSERT(mProcessedTime >= aStream->mTracksStartTime, "frame position before buffer?");
- // We only look at the non-blocking interval
- StreamTime frameBufferTime = aStream->GraphTimeToStreamTime(mProcessedTime);
- StreamTime bufferEndTime = aStream->GraphTimeToStreamTime(aStream->mStartBlocking);
- StreamTime start;
- const VideoChunk* chunk;
- for ( ;
- frameBufferTime < bufferEndTime;
- frameBufferTime = start + chunk->GetDuration()) {
- // Pick the last track that has a video chunk for the time, and
- // schedule its frame.
- chunk = nullptr;
- for (StreamTracks::TrackIter tracks(aStream->GetStreamTracks(),
- MediaSegment::VIDEO);
- !tracks.IsEnded();
- tracks.Next()) {
- VideoSegment* segment = tracks->Get();
- StreamTime thisStart;
- const VideoChunk* thisChunk =
- segment->FindChunkContaining(frameBufferTime, &thisStart);
- if (thisChunk && thisChunk->mFrame.GetImage()) {
- start = thisStart;
- chunk = thisChunk;
- }
- }
- if (!chunk)
- break;
-
- const VideoFrame* frame = &chunk->mFrame;
- if (*frame == aStream->mLastPlayedVideoFrame) {
- continue;
- }
-
- Image* image = frame->GetImage();
- STREAM_LOG(LogLevel::Verbose,
- ("MediaStream %p writing video frame %p (%dx%d)",
- aStream, image, frame->GetIntrinsicSize().width,
- frame->GetIntrinsicSize().height));
- // Schedule this frame after the previous frame finishes, instead of at
- // its start time. These times only differ in the case of multiple
- // tracks.
- // frameBufferTime is in the non-blocking interval.
- GraphTime frameTime = aStream->StreamTimeToGraphTime(frameBufferTime);
- TimeStamp targetTime = currentTimeStamp +
- TimeDuration::FromSeconds(MediaTimeToSeconds(frameTime - IterationEnd()));
-
- if (frame->GetForceBlack()) {
- if (!blackImage) {
- // Fixme: PlayVideo will be replaced in latter changeset
- // "Call MediaStreamVideoSink::setCurrentFrames in SourceMediaStream::AppendToTrack."
- // of this bug.
- // This is a temp workaround to pass the build and test.
- if (!aStream->mVideoOutputs[0].mListener->AsVideoFrameContainer()) {
- return;
- }
- blackImage = aStream->mVideoOutputs[0].mListener->AsVideoFrameContainer()->
- GetImageContainer()->CreatePlanarYCbCrImage();
- if (blackImage) {
- // Sets the image to a single black pixel, which will be scaled to
- // fill the rendered size.
- SetImageToBlackPixel(blackImage->AsPlanarYCbCrImage());
- }
- }
- if (blackImage) {
- image = blackImage;
- }
- }
- newImages.AppendElement(ImageContainer::NonOwningImage(image, targetTime));
-
- lastPrincipalHandle = chunk->GetPrincipalHandle();
-
- aStream->mLastPlayedVideoFrame = *frame;
- }
-
- if (!aStream->mLastPlayedVideoFrame.GetImage())
- return;
-
- AutoTArray images;
- bool haveMultipleImages = false;
-
- for (const TrackBound& sink : aStream->mVideoOutputs) {
- VideoFrameContainer* output = sink.mListener->AsVideoFrameContainer();
- if (!output) {
- continue;
- }
-
- bool principalHandleChanged =
- lastPrincipalHandle != PRINCIPAL_HANDLE_NONE &&
- lastPrincipalHandle != output->GetLastPrincipalHandle();
-
- // Find previous frames that may still be valid.
- AutoTArray previousImages;
- output->GetImageContainer()->GetCurrentImages(&previousImages);
- uint32_t j = previousImages.Length();
- if (j) {
- // Re-use the most recent frame before currentTimeStamp and subsequent,
- // always keeping at least one frame.
- do {
- --j;
- } while (j > 0 && previousImages[j].mTimeStamp > currentTimeStamp);
- }
- if (previousImages.Length() - j + newImages.Length() > 1) {
- haveMultipleImages = true;
- }
-
- // Don't update if there are no changes.
- if (j == 0 && newImages.IsEmpty())
- continue;
-
- for ( ; j < previousImages.Length(); ++j) {
- const auto& image = previousImages[j];
- // Cope with potential clock skew with AudioCallbackDriver.
- if (newImages.Length() && image.mTimeStamp > newImages[0].mTimeStamp) {
- STREAM_LOG(LogLevel::Warning,
- ("Dropping %u video frames due to clock skew",
- unsigned(previousImages.Length() - j)));
- break;
- }
-
- images.AppendElement(ImageContainer::
- NonOwningImage(image.mImage,
- image.mTimeStamp, image.mFrameID));
- }
-
- // Add the frames from this iteration.
- for (auto& image : newImages) {
- image.mFrameID = output->NewFrameID();
- images.AppendElement(image);
- }
-
- if (principalHandleChanged) {
- output->UpdatePrincipalHandleForFrameID(lastPrincipalHandle,
- newImages.LastElement().mFrameID);
- }
-
- output->SetCurrentFrames(aStream->mLastPlayedVideoFrame.GetIntrinsicSize(),
- images);
-
- nsCOMPtr event =
- new VideoFrameContainerInvalidateRunnable(output);
- DispatchToMainThreadAfterStreamStateUpdate(event.forget());
-
- images.ClearAndRetainStorage();
- }
-
- // If the stream has finished and the timestamps of all frames have expired
- // then no more updates are required.
- if (aStream->mFinished && !haveMultipleImages) {
- aStream->mLastPlayedVideoFrame.SetNull();
- }
-}
-
void
MediaStreamGraphImpl::OpenAudioInputImpl(int aID,
AudioDataListener *aListener)
@@ -1539,7 +1350,6 @@ MediaStreamGraphImpl::Process()
"Each stream should have the same number of frame.");
}
}
- PlayVideo(stream);
}
if (stream->mStartBlocking > mProcessedTime) {
allBlockedForever = false;
@@ -2836,6 +2646,16 @@ SourceMediaStream::ResampleAudioToGraphSampleRate(TrackData* aTrackData, MediaSe
segment->ResampleChunks(aTrackData->mResampler, aTrackData->mInputRate, GraphImpl()->GraphRate());
}
+void
+SourceMediaStream::AdvanceTimeVaryingValuesToCurrentTime(GraphTime aCurrentTime,
+ GraphTime aBlockedTime)
+{
+ MutexAutoLock lock(mMutex);
+ mTracksStartTime += aBlockedTime;
+ mStreamTracksStartTimeStamp += TimeDuration::FromSeconds(GraphImpl()->MediaTimeToSeconds(aBlockedTime));
+ mTracks.ForgetUpTo(aCurrentTime - mTracksStartTime);
+}
+
bool
SourceMediaStream::AppendToTrack(TrackID aID, MediaSegment* aSegment, MediaSegment *aRawSegment)
{
@@ -2958,9 +2778,9 @@ SourceMediaStream::AddDirectTrackListenerImpl(already_AddRefed listener = aListener;
STREAM_LOG(LogLevel::Debug, ("Adding direct track listener %p bound to track %d to source stream %p",
listener.get(), aTrackID, this));
@@ -2973,6 +2793,19 @@ SourceMediaStream::AddDirectTrackListenerImpl(already_AddRefedmData->GetType() == MediaSegment::AUDIO;
isVideo = data->mData->GetType() == MediaSegment::VIDEO;
}
+
+ // The track might be removed from mUpdateTrack but still exist in
+ // mTracks.
+ auto streamTrack = FindTrack(aTrackID);
+ bool foundTrack = !!streamTrack;
+ if (foundTrack) {
+ MediaStreamVideoSink* videoSink = listener->AsMediaStreamVideoSink();
+ // Re-send missed VideoSegment to new added MediaStreamVideoSink.
+ if (streamTrack->GetType() == MediaSegment::VIDEO && videoSink) {
+ videoSink->SetCurrentFrames(*(static_cast(streamTrack->GetSegment())));
+ }
+ }
+
if (found && (isAudio || isVideo)) {
for (auto entry : mDirectTrackListeners) {
if (entry.mListener == listener &&
diff --git a/dom/media/MediaStreamGraph.h b/dom/media/MediaStreamGraph.h
index f86a044b057b..55804723cfb1 100644
--- a/dom/media/MediaStreamGraph.h
+++ b/dom/media/MediaStreamGraph.h
@@ -543,7 +543,9 @@ public:
}
protected:
- void AdvanceTimeVaryingValuesToCurrentTime(GraphTime aCurrentTime, GraphTime aBlockedTime)
+ // |AdvanceTimeVaryingValuesToCurrentTime| will be override in SourceMediaStream.
+ virtual void AdvanceTimeVaryingValuesToCurrentTime(GraphTime aCurrentTime,
+ GraphTime aBlockedTime)
{
mTracksStartTime += aBlockedTime;
mTracks.ForgetUpTo(aCurrentTime - mTracksStartTime);
@@ -805,6 +807,11 @@ public:
*/
bool HasPendingAudioTrack();
+ TimeStamp GetStreamTracksStrartTimeStamp() {
+ MutexAutoLock lock(mMutex);
+ return mStreamTracksStartTimeStamp;
+ }
+
// XXX need a Reset API
friend class MediaStreamGraphImpl;
@@ -869,6 +876,15 @@ protected:
void NotifyDirectConsumers(TrackData *aTrack,
MediaSegment *aSegment);
+ virtual void
+ AdvanceTimeVaryingValuesToCurrentTime(GraphTime aCurrentTime,
+ GraphTime aBlockedTime) override;
+ void SetStreamTracksStartTimeStamp(const TimeStamp& aTimeStamp)
+ {
+ MutexAutoLock lock(mMutex);
+ mStreamTracksStartTimeStamp = aTimeStamp;
+ }
+
// Only accessed on the MSG thread. Used so to ask the MSGImpl to usecount
// users of a specific input.
// XXX Should really be a CubebUtils::AudioDeviceID, but they aren't
@@ -880,6 +896,10 @@ protected:
Mutex mMutex;
// protected by mMutex
StreamTime mUpdateKnownTracksTime;
+ // This time stamp will be updated in adding and blocked SourceMediaStream,
+ // |AddStreamGraphThread| and |AdvanceTimeVaryingValuesToCurrentTime| in
+ // particularly.
+ TimeStamp mStreamTracksStartTimeStamp;
nsTArray mUpdateTracks;
nsTArray mPendingTracks;
nsTArray> mDirectListeners;
diff --git a/dom/media/MediaStreamGraphImpl.h b/dom/media/MediaStreamGraphImpl.h
index 73a3c834cd8e..178b1f1d6df7 100644
--- a/dom/media/MediaStreamGraphImpl.h
+++ b/dom/media/MediaStreamGraphImpl.h
@@ -411,10 +411,6 @@ public:
* to the audio output stream. Returns the number of frames played.
*/
StreamTime PlayAudio(MediaStream* aStream);
- /**
- * Set the correct current video frame for stream aStream.
- */
- void PlayVideo(MediaStream* aStream);
/**
* No more data will be forthcoming for aStream. The stream will end
* at the current buffer end point. The StreamTracks's tracks must be
diff --git a/dom/media/TrackUnionStream.h b/dom/media/TrackUnionStream.h
index 4c935e96ad19..d0950f90a8d3 100644
--- a/dom/media/TrackUnionStream.h
+++ b/dom/media/TrackUnionStream.h
@@ -30,6 +30,8 @@ public:
MediaStream* GetInputStreamFor(TrackID aTrackID) override;
TrackID GetInputTrackIDFor(TrackID aTrackID) override;
+ friend class MediaStreamGraphImpl;
+
protected:
// Only non-ended tracks are allowed to persist in this map.
struct TrackMapEntry {
diff --git a/dom/media/VideoFrameContainer.cpp b/dom/media/VideoFrameContainer.cpp
index a0183b0a1439..494c1c45f8b1 100644
--- a/dom/media/VideoFrameContainer.cpp
+++ b/dom/media/VideoFrameContainer.cpp
@@ -39,6 +39,11 @@ VideoFrameContainer::~VideoFrameContainer()
PrincipalHandle VideoFrameContainer::GetLastPrincipalHandle()
{
MutexAutoLock lock(mMutex);
+ return GetLastPrincipalHandleLocked();
+}
+
+PrincipalHandle VideoFrameContainer::GetLastPrincipalHandleLocked()
+{
return mLastPrincipalHandle;
}
@@ -46,6 +51,12 @@ void VideoFrameContainer::UpdatePrincipalHandleForFrameID(const PrincipalHandle&
const ImageContainer::FrameID& aFrameID)
{
MutexAutoLock lock(mMutex);
+ UpdatePrincipalHandleForFrameIDLocked(aPrincipalHandle, aFrameID);
+}
+
+void VideoFrameContainer::UpdatePrincipalHandleForFrameIDLocked(const PrincipalHandle& aPrincipalHandle,
+ const ImageContainer::FrameID& aFrameID)
+{
if (mPendingPrincipalHandle == aPrincipalHandle) {
return;
}
@@ -147,7 +158,7 @@ void VideoFrameContainer::SetCurrentFrames(const VideoSegment& aSegment)
bool principalHandleChanged =
lastPrincipalHandle != PRINCIPAL_HANDLE_NONE &&
- lastPrincipalHandle != GetLastPrincipalHandle();
+ lastPrincipalHandle != GetLastPrincipalHandleLocked();
// Add the frames from this iteration.
for (auto& image : newImages) {
@@ -156,8 +167,8 @@ void VideoFrameContainer::SetCurrentFrames(const VideoSegment& aSegment)
}
if (principalHandleChanged) {
- UpdatePrincipalHandleForFrameID(lastPrincipalHandle,
- newImages.LastElement().mFrameID);
+ UpdatePrincipalHandleForFrameIDLocked(lastPrincipalHandle,
+ newImages.LastElement().mFrameID);
}
SetCurrentFramesLocked(mLastPlayedVideoFrame.GetIntrinsicSize(), images);
diff --git a/dom/media/VideoFrameContainer.h b/dom/media/VideoFrameContainer.h
index 38c5736b4765..b61323d3c697 100644
--- a/dom/media/VideoFrameContainer.h
+++ b/dom/media/VideoFrameContainer.h
@@ -48,11 +48,14 @@ public:
const TimeStamp& aTargetTime);
// Returns the last principalHandle we notified mElement about.
PrincipalHandle GetLastPrincipalHandle();
+ PrincipalHandle GetLastPrincipalHandleLocked();
// We will notify mElement that aPrincipalHandle has been applied when all
// FrameIDs prior to aFrameID have been flushed out.
// aFrameID is ignored if aPrincipalHandle already is our pending principalHandle.
void UpdatePrincipalHandleForFrameID(const PrincipalHandle& aPrincipalHandle,
const ImageContainer::FrameID& aFrameID);
+ void UpdatePrincipalHandleForFrameIDLocked(const PrincipalHandle& aPrincipalHandle,
+ const ImageContainer::FrameID& aFrameID);
void SetCurrentFrames(const gfx::IntSize& aIntrinsicSize,
const nsTArray& aImages);
void ClearCurrentFrame(const gfx::IntSize& aIntrinsicSize)
diff --git a/dom/media/VideoSegment.cpp b/dom/media/VideoSegment.cpp
index b504be4d266c..48e4760ba304 100644
--- a/dom/media/VideoSegment.cpp
+++ b/dom/media/VideoSegment.cpp
@@ -100,9 +100,11 @@ VideoSegment::AppendFrame(already_AddRefed&& aImage,
StreamTime aDuration,
const IntSize& aIntrinsicSize,
const PrincipalHandle& aPrincipalHandle,
- bool aForceBlack)
+ bool aForceBlack,
+ TimeStamp aTimeStamp)
{
VideoChunk* chunk = AppendChunk(aDuration);
+ chunk->mTimeStamp = aTimeStamp;
VideoFrame frame(aImage, aIntrinsicSize);
frame.SetForceBlack(aForceBlack);
frame.SetPrincipalHandle(aPrincipalHandle);
diff --git a/dom/media/VideoSegment.h b/dom/media/VideoSegment.h
index 749646fbc298..6e871d244b1b 100644
--- a/dom/media/VideoSegment.h
+++ b/dom/media/VideoSegment.h
@@ -109,7 +109,8 @@ public:
StreamTime aDuration,
const IntSize& aIntrinsicSize,
const PrincipalHandle& aPrincipalHandle,
- bool aForceBlack = false);
+ bool aForceBlack = false,
+ TimeStamp aTimeStamp = TimeStamp::Now());
const VideoFrame* GetLastFrame(StreamTime* aStart = nullptr)
{
VideoChunk* c = GetLastChunk();
diff --git a/dom/media/mediasink/DecodedStream.cpp b/dom/media/mediasink/DecodedStream.cpp
index a144e131b365..bc28a7a83c13 100644
--- a/dom/media/mediasink/DecodedStream.cpp
+++ b/dom/media/mediasink/DecodedStream.cpp
@@ -498,6 +498,7 @@ WriteVideoToMediaStream(MediaStream* aStream,
int64_t aEndMicroseconds,
int64_t aStartMicroseconds,
const mozilla::gfx::IntSize& aIntrinsicSize,
+ const TimeStamp& aTimeStamp,
VideoSegment* aOutput,
const PrincipalHandle& aPrincipalHandle)
{
@@ -505,7 +506,8 @@ WriteVideoToMediaStream(MediaStream* aStream,
StreamTime duration =
aStream->MicrosecondsToStreamTimeRoundDown(aEndMicroseconds) -
aStream->MicrosecondsToStreamTimeRoundDown(aStartMicroseconds);
- aOutput->AppendFrame(image.forget(), duration, aIntrinsicSize, aPrincipalHandle);
+ aOutput->AppendFrame(image.forget(), duration, aIntrinsicSize,
+ aPrincipalHandle, false, aTimeStamp);
}
static bool
@@ -537,6 +539,13 @@ DecodedStream::SendVideo(bool aIsSameOrigin, const PrincipalHandle& aPrincipalHa
// is ref-counted.
mVideoQueue.GetElementsAfter(mData->mNextVideoTime, &video);
+ // tracksStartTimeStamp might be null when the SourceMediaStream not yet
+ // be added to MediaStreamGraph.
+ TimeStamp tracksStartTimeStamp = sourceStream->GetStreamTracksStrartTimeStamp();
+ if (tracksStartTimeStamp.IsNull()) {
+ tracksStartTimeStamp = TimeStamp::Now();
+ }
+
for (uint32_t i = 0; i < video.Length(); ++i) {
VideoData* v = video[i]->As();
@@ -551,14 +560,17 @@ DecodedStream::SendVideo(bool aIsSameOrigin, const PrincipalHandle& aPrincipalHa
// and capture happens at 15 sec, we'll have to append a black frame
// that is 15 sec long.
WriteVideoToMediaStream(sourceStream, mData->mLastVideoImage, v->mTime,
- mData->mNextVideoTime, mData->mLastVideoImageDisplaySize, &output,
- aPrincipalHandle);
+ mData->mNextVideoTime, mData->mLastVideoImageDisplaySize,
+ tracksStartTimeStamp + TimeDuration::FromMicroseconds(v->mTime),
+ &output, aPrincipalHandle);
mData->mNextVideoTime = v->mTime;
}
if (mData->mNextVideoTime < v->GetEndTime()) {
WriteVideoToMediaStream(sourceStream, v->mImage, v->GetEndTime(),
- mData->mNextVideoTime, v->mDisplay, &output, aPrincipalHandle);
+ mData->mNextVideoTime, v->mDisplay,
+ tracksStartTimeStamp + TimeDuration::FromMicroseconds(v->GetEndTime()),
+ &output, aPrincipalHandle);
mData->mNextVideoTime = v->GetEndTime();
mData->mLastVideoImage = v->mImage;
mData->mLastVideoImageDisplaySize = v->mDisplay;
@@ -585,7 +597,9 @@ DecodedStream::SendVideo(bool aIsSameOrigin, const PrincipalHandle& aPrincipalHa
int64_t deviation_usec = sourceStream->StreamTimeToMicroseconds(1);
WriteVideoToMediaStream(sourceStream, mData->mLastVideoImage,
mData->mNextVideoTime + deviation_usec, mData->mNextVideoTime,
- mData->mLastVideoImageDisplaySize, &endSegment, aPrincipalHandle);
+ mData->mLastVideoImageDisplaySize,
+ tracksStartTimeStamp + TimeDuration::FromMicroseconds(mData->mNextVideoTime + deviation_usec),
+ &endSegment, aPrincipalHandle);
mData->mNextVideoTime += deviation_usec;
MOZ_ASSERT(endSegment.GetDuration() > 0);
if (!aIsSameOrigin) {