diff --git a/dom/html/HTMLMediaElement.cpp b/dom/html/HTMLMediaElement.cpp
index 35994a767645..a0bfa5649dab 100644
--- a/dom/html/HTMLMediaElement.cpp
+++ b/dom/html/HTMLMediaElement.cpp
@@ -3638,8 +3638,8 @@ void HTMLMediaElement::UpdateSrcMediaStreamPlaying(uint32_t aFlags)
SetVolumeInternal();
VideoFrameContainer* container = GetVideoFrameContainer();
- if (mSelectedVideoStreamTrack && container) {
- mSelectedVideoStreamTrack->AddVideoOutput(container);
+ if (container) {
+ stream->AddVideoOutput(container);
}
VideoTrack* videoTrack = VideoTracks()->GetSelectedTrack();
if (videoTrack) {
@@ -3656,8 +3656,8 @@ void HTMLMediaElement::UpdateSrcMediaStreamPlaying(uint32_t aFlags)
stream->RemoveAudioOutput(this);
VideoFrameContainer* container = GetVideoFrameContainer();
- if (mSelectedVideoStreamTrack && container) {
- mSelectedVideoStreamTrack->RemoveVideoOutput(container);
+ if (container) {
+ stream->RemoveVideoOutput(container);
}
VideoTrack* videoTrack = VideoTracks()->GetSelectedTrack();
if (videoTrack) {
@@ -3802,9 +3802,6 @@ void HTMLMediaElement::ConstructMediaTracks()
mMediaStreamSizeListener = new StreamSizeListener(this);
streamTrack->AddDirectListener(mMediaStreamSizeListener);
mSelectedVideoStreamTrack = streamTrack;
- if (GetVideoFrameContainer()) {
- mSelectedVideoStreamTrack->AddVideoOutput(GetVideoFrameContainer());
- }
}
}
@@ -3837,10 +3834,6 @@ HTMLMediaElement::NotifyMediaStreamTrackAdded(const RefPtr& aT
mMediaStreamSizeListener = new StreamSizeListener(this);
t->AddDirectListener(mMediaStreamSizeListener);
mSelectedVideoStreamTrack = t;
- VideoFrameContainer* container = GetVideoFrameContainer();
- if (mSrcStreamIsPlaying && container) {
- mSelectedVideoStreamTrack->AddVideoOutput(container);
- }
}
}
@@ -3870,10 +3863,6 @@ HTMLMediaElement::NotifyMediaStreamTrackRemoved(const RefPtr&
if (mMediaStreamSizeListener) {
mSelectedVideoStreamTrack->RemoveDirectListener(mMediaStreamSizeListener);
}
- VideoFrameContainer* container = GetVideoFrameContainer();
- if (mSrcStreamIsPlaying && container) {
- mSelectedVideoStreamTrack->RemoveVideoOutput(container);
- }
mSelectedVideoStreamTrack = nullptr;
MOZ_ASSERT(mSrcStream);
nsTArray> tracks;
@@ -3897,9 +3886,6 @@ HTMLMediaElement::NotifyMediaStreamTrackRemoved(const RefPtr&
track->AddDirectListener(mMediaStreamSizeListener);
}
mSelectedVideoStreamTrack = track;
- if (container) {
- mSelectedVideoStreamTrack->AddVideoOutput(container);
- }
return;
}
diff --git a/dom/media/MediaStreamGraph.cpp b/dom/media/MediaStreamGraph.cpp
index f7034f162098..47034d303d31 100644
--- a/dom/media/MediaStreamGraph.cpp
+++ b/dom/media/MediaStreamGraph.cpp
@@ -109,15 +109,6 @@ void
MediaStreamGraphImpl::AddStreamGraphThread(MediaStream* aStream)
{
aStream->mTracksStartTime = mProcessedTime;
-
- if (aStream->AsSourceStream()) {
- SourceMediaStream* source = aStream->AsSourceStream();
- TimeStamp currentTimeStamp = CurrentDriver()->GetCurrentTimeStamp();
- TimeStamp processedTimeStamp = currentTimeStamp +
- TimeDuration::FromSeconds(MediaTimeToSeconds(mProcessedTime - IterationEnd()));
- source->SetStreamTracksStartTimeStamp(processedTimeStamp);
- }
-
if (aStream->IsSuspended()) {
mSuspendedStreams.AppendElement(aStream);
STREAM_LOG(LogLevel::Debug, ("Adding media stream %p to the graph, in the suspended stream array", aStream));
@@ -937,6 +928,204 @@ MediaStreamGraphImpl::PlayAudio(MediaStream* aStream)
return ticksWritten;
}
+static void
+SetImageToBlackPixel(PlanarYCbCrImage* aImage)
+{
+ uint8_t blackPixel[] = { 0x10, 0x80, 0x80 };
+
+ PlanarYCbCrData data;
+ data.mYChannel = blackPixel;
+ data.mCbChannel = blackPixel + 1;
+ data.mCrChannel = blackPixel + 2;
+ data.mYStride = data.mCbCrStride = 1;
+ data.mPicSize = data.mYSize = data.mCbCrSize = IntSize(1, 1);
+ aImage->CopyData(data);
+}
+
+class VideoFrameContainerInvalidateRunnable : public Runnable {
+public:
+ explicit VideoFrameContainerInvalidateRunnable(VideoFrameContainer* aVideoFrameContainer)
+ : mVideoFrameContainer(aVideoFrameContainer)
+ {}
+ NS_IMETHOD Run()
+ {
+ MOZ_ASSERT(NS_IsMainThread());
+
+ mVideoFrameContainer->Invalidate();
+
+ return NS_OK;
+ }
+private:
+ RefPtr mVideoFrameContainer;
+};
+
+void
+MediaStreamGraphImpl::PlayVideo(MediaStream* aStream)
+{
+ MOZ_ASSERT(mRealtime, "Should only attempt to play video in realtime mode");
+
+ if (aStream->mVideoOutputs.IsEmpty())
+ return;
+
+ TimeStamp currentTimeStamp = CurrentDriver()->GetCurrentTimeStamp();
+
+ // Collect any new frames produced in this iteration.
+ AutoTArray newImages;
+ PrincipalHandle lastPrincipalHandle = PRINCIPAL_HANDLE_NONE;
+ RefPtr blackImage;
+
+ MOZ_ASSERT(mProcessedTime >= aStream->mTracksStartTime, "frame position before buffer?");
+ // We only look at the non-blocking interval
+ StreamTime frameBufferTime = aStream->GraphTimeToStreamTime(mProcessedTime);
+ StreamTime bufferEndTime = aStream->GraphTimeToStreamTime(aStream->mStartBlocking);
+ StreamTime start;
+ const VideoChunk* chunk;
+ for ( ;
+ frameBufferTime < bufferEndTime;
+ frameBufferTime = start + chunk->GetDuration()) {
+ // Pick the last track that has a video chunk for the time, and
+ // schedule its frame.
+ chunk = nullptr;
+ for (StreamTracks::TrackIter tracks(aStream->GetStreamTracks(),
+ MediaSegment::VIDEO);
+ !tracks.IsEnded();
+ tracks.Next()) {
+ VideoSegment* segment = tracks->Get();
+ StreamTime thisStart;
+ const VideoChunk* thisChunk =
+ segment->FindChunkContaining(frameBufferTime, &thisStart);
+ if (thisChunk && thisChunk->mFrame.GetImage()) {
+ start = thisStart;
+ chunk = thisChunk;
+ }
+ }
+ if (!chunk)
+ break;
+
+ const VideoFrame* frame = &chunk->mFrame;
+ if (*frame == aStream->mLastPlayedVideoFrame) {
+ continue;
+ }
+
+ Image* image = frame->GetImage();
+ STREAM_LOG(LogLevel::Verbose,
+ ("MediaStream %p writing video frame %p (%dx%d)",
+ aStream, image, frame->GetIntrinsicSize().width,
+ frame->GetIntrinsicSize().height));
+ // Schedule this frame after the previous frame finishes, instead of at
+ // its start time. These times only differ in the case of multiple
+ // tracks.
+ // frameBufferTime is in the non-blocking interval.
+ GraphTime frameTime = aStream->StreamTimeToGraphTime(frameBufferTime);
+ TimeStamp targetTime = currentTimeStamp +
+ TimeDuration::FromSeconds(MediaTimeToSeconds(frameTime - IterationEnd()));
+
+ if (frame->GetForceBlack()) {
+ if (!blackImage) {
+ // Fixme: PlayVideo will be replaced in latter changeset
+ // "Call MediaStreamVideoSink::setCurrentFrames in SourceMediaStream::AppendToTrack."
+ // of this bug.
+ // This is a temp workaround to pass the build and test.
+ if (!aStream->mVideoOutputs[0].mListener->AsVideoFrameContainer()) {
+ return;
+ }
+ blackImage = aStream->mVideoOutputs[0].mListener->AsVideoFrameContainer()->
+ GetImageContainer()->CreatePlanarYCbCrImage();
+ if (blackImage) {
+ // Sets the image to a single black pixel, which will be scaled to
+ // fill the rendered size.
+ SetImageToBlackPixel(blackImage->AsPlanarYCbCrImage());
+ }
+ }
+ if (blackImage) {
+ image = blackImage;
+ }
+ }
+ newImages.AppendElement(ImageContainer::NonOwningImage(image, targetTime));
+
+ lastPrincipalHandle = chunk->GetPrincipalHandle();
+
+ aStream->mLastPlayedVideoFrame = *frame;
+ }
+
+ if (!aStream->mLastPlayedVideoFrame.GetImage())
+ return;
+
+ AutoTArray images;
+ bool haveMultipleImages = false;
+
+ for (const TrackBound& sink : aStream->mVideoOutputs) {
+ VideoFrameContainer* output = sink.mListener->AsVideoFrameContainer();
+ if (!output) {
+ continue;
+ }
+
+ bool principalHandleChanged =
+ lastPrincipalHandle != PRINCIPAL_HANDLE_NONE &&
+ lastPrincipalHandle != output->GetLastPrincipalHandle();
+
+ // Find previous frames that may still be valid.
+ AutoTArray previousImages;
+ output->GetImageContainer()->GetCurrentImages(&previousImages);
+ uint32_t j = previousImages.Length();
+ if (j) {
+ // Re-use the most recent frame before currentTimeStamp and subsequent,
+ // always keeping at least one frame.
+ do {
+ --j;
+ } while (j > 0 && previousImages[j].mTimeStamp > currentTimeStamp);
+ }
+ if (previousImages.Length() - j + newImages.Length() > 1) {
+ haveMultipleImages = true;
+ }
+
+ // Don't update if there are no changes.
+ if (j == 0 && newImages.IsEmpty())
+ continue;
+
+ for ( ; j < previousImages.Length(); ++j) {
+ const auto& image = previousImages[j];
+ // Cope with potential clock skew with AudioCallbackDriver.
+ if (newImages.Length() && image.mTimeStamp > newImages[0].mTimeStamp) {
+ STREAM_LOG(LogLevel::Warning,
+ ("Dropping %u video frames due to clock skew",
+ unsigned(previousImages.Length() - j)));
+ break;
+ }
+
+ images.AppendElement(ImageContainer::
+ NonOwningImage(image.mImage,
+ image.mTimeStamp, image.mFrameID));
+ }
+
+ // Add the frames from this iteration.
+ for (auto& image : newImages) {
+ image.mFrameID = output->NewFrameID();
+ images.AppendElement(image);
+ }
+
+ if (principalHandleChanged) {
+ output->UpdatePrincipalHandleForFrameID(lastPrincipalHandle,
+ newImages.LastElement().mFrameID);
+ }
+
+ output->SetCurrentFrames(aStream->mLastPlayedVideoFrame.GetIntrinsicSize(),
+ images);
+
+ nsCOMPtr event =
+ new VideoFrameContainerInvalidateRunnable(output);
+ DispatchToMainThreadAfterStreamStateUpdate(event.forget());
+
+ images.ClearAndRetainStorage();
+ }
+
+ // If the stream has finished and the timestamps of all frames have expired
+ // then no more updates are required.
+ if (aStream->mFinished && !haveMultipleImages) {
+ aStream->mLastPlayedVideoFrame.SetNull();
+ }
+}
+
void
MediaStreamGraphImpl::OpenAudioInputImpl(int aID,
AudioDataListener *aListener)
@@ -1350,6 +1539,7 @@ MediaStreamGraphImpl::Process()
"Each stream should have the same number of frame.");
}
}
+ PlayVideo(stream);
}
if (stream->mStartBlocking > mProcessedTime) {
allBlockedForever = false;
@@ -2646,16 +2836,6 @@ SourceMediaStream::ResampleAudioToGraphSampleRate(TrackData* aTrackData, MediaSe
segment->ResampleChunks(aTrackData->mResampler, aTrackData->mInputRate, GraphImpl()->GraphRate());
}
-void
-SourceMediaStream::AdvanceTimeVaryingValuesToCurrentTime(GraphTime aCurrentTime,
- GraphTime aBlockedTime)
-{
- MutexAutoLock lock(mMutex);
- mTracksStartTime += aBlockedTime;
- mStreamTracksStartTimeStamp += TimeDuration::FromSeconds(GraphImpl()->MediaTimeToSeconds(aBlockedTime));
- mTracks.ForgetUpTo(aCurrentTime - mTracksStartTime);
-}
-
bool
SourceMediaStream::AppendToTrack(TrackID aID, MediaSegment* aSegment, MediaSegment *aRawSegment)
{
@@ -2778,9 +2958,9 @@ SourceMediaStream::AddDirectTrackListenerImpl(already_AddRefed listener = aListener;
STREAM_LOG(LogLevel::Debug, ("Adding direct track listener %p bound to track %d to source stream %p",
listener.get(), aTrackID, this));
@@ -2793,19 +2973,6 @@ SourceMediaStream::AddDirectTrackListenerImpl(already_AddRefedmData->GetType() == MediaSegment::AUDIO;
isVideo = data->mData->GetType() == MediaSegment::VIDEO;
}
-
- // The track might be removed from mUpdateTrack but still exist in
- // mTracks.
- auto streamTrack = FindTrack(aTrackID);
- bool foundTrack = !!streamTrack;
- if (foundTrack) {
- MediaStreamVideoSink* videoSink = listener->AsMediaStreamVideoSink();
- // Re-send missed VideoSegment to new added MediaStreamVideoSink.
- if (streamTrack->GetType() == MediaSegment::VIDEO && videoSink) {
- videoSink->SetCurrentFrames(*(static_cast(streamTrack->GetSegment())));
- }
- }
-
if (found && (isAudio || isVideo)) {
for (auto entry : mDirectTrackListeners) {
if (entry.mListener == listener &&
diff --git a/dom/media/MediaStreamGraph.h b/dom/media/MediaStreamGraph.h
index 55804723cfb1..f86a044b057b 100644
--- a/dom/media/MediaStreamGraph.h
+++ b/dom/media/MediaStreamGraph.h
@@ -543,9 +543,7 @@ public:
}
protected:
- // |AdvanceTimeVaryingValuesToCurrentTime| will be override in SourceMediaStream.
- virtual void AdvanceTimeVaryingValuesToCurrentTime(GraphTime aCurrentTime,
- GraphTime aBlockedTime)
+ void AdvanceTimeVaryingValuesToCurrentTime(GraphTime aCurrentTime, GraphTime aBlockedTime)
{
mTracksStartTime += aBlockedTime;
mTracks.ForgetUpTo(aCurrentTime - mTracksStartTime);
@@ -807,11 +805,6 @@ public:
*/
bool HasPendingAudioTrack();
- TimeStamp GetStreamTracksStrartTimeStamp() {
- MutexAutoLock lock(mMutex);
- return mStreamTracksStartTimeStamp;
- }
-
// XXX need a Reset API
friend class MediaStreamGraphImpl;
@@ -876,15 +869,6 @@ protected:
void NotifyDirectConsumers(TrackData *aTrack,
MediaSegment *aSegment);
- virtual void
- AdvanceTimeVaryingValuesToCurrentTime(GraphTime aCurrentTime,
- GraphTime aBlockedTime) override;
- void SetStreamTracksStartTimeStamp(const TimeStamp& aTimeStamp)
- {
- MutexAutoLock lock(mMutex);
- mStreamTracksStartTimeStamp = aTimeStamp;
- }
-
// Only accessed on the MSG thread. Used so to ask the MSGImpl to usecount
// users of a specific input.
// XXX Should really be a CubebUtils::AudioDeviceID, but they aren't
@@ -896,10 +880,6 @@ protected:
Mutex mMutex;
// protected by mMutex
StreamTime mUpdateKnownTracksTime;
- // This time stamp will be updated in adding and blocked SourceMediaStream,
- // |AddStreamGraphThread| and |AdvanceTimeVaryingValuesToCurrentTime| in
- // particularly.
- TimeStamp mStreamTracksStartTimeStamp;
nsTArray mUpdateTracks;
nsTArray mPendingTracks;
nsTArray> mDirectListeners;
diff --git a/dom/media/MediaStreamGraphImpl.h b/dom/media/MediaStreamGraphImpl.h
index 178b1f1d6df7..73a3c834cd8e 100644
--- a/dom/media/MediaStreamGraphImpl.h
+++ b/dom/media/MediaStreamGraphImpl.h
@@ -411,6 +411,10 @@ public:
* to the audio output stream. Returns the number of frames played.
*/
StreamTime PlayAudio(MediaStream* aStream);
+ /**
+ * Set the correct current video frame for stream aStream.
+ */
+ void PlayVideo(MediaStream* aStream);
/**
* No more data will be forthcoming for aStream. The stream will end
* at the current buffer end point. The StreamTracks's tracks must be
diff --git a/dom/media/TrackUnionStream.h b/dom/media/TrackUnionStream.h
index d0950f90a8d3..4c935e96ad19 100644
--- a/dom/media/TrackUnionStream.h
+++ b/dom/media/TrackUnionStream.h
@@ -30,8 +30,6 @@ public:
MediaStream* GetInputStreamFor(TrackID aTrackID) override;
TrackID GetInputTrackIDFor(TrackID aTrackID) override;
- friend class MediaStreamGraphImpl;
-
protected:
// Only non-ended tracks are allowed to persist in this map.
struct TrackMapEntry {
diff --git a/dom/media/VideoFrameContainer.cpp b/dom/media/VideoFrameContainer.cpp
index 494c1c45f8b1..a0183b0a1439 100644
--- a/dom/media/VideoFrameContainer.cpp
+++ b/dom/media/VideoFrameContainer.cpp
@@ -39,11 +39,6 @@ VideoFrameContainer::~VideoFrameContainer()
PrincipalHandle VideoFrameContainer::GetLastPrincipalHandle()
{
MutexAutoLock lock(mMutex);
- return GetLastPrincipalHandleLocked();
-}
-
-PrincipalHandle VideoFrameContainer::GetLastPrincipalHandleLocked()
-{
return mLastPrincipalHandle;
}
@@ -51,12 +46,6 @@ void VideoFrameContainer::UpdatePrincipalHandleForFrameID(const PrincipalHandle&
const ImageContainer::FrameID& aFrameID)
{
MutexAutoLock lock(mMutex);
- UpdatePrincipalHandleForFrameIDLocked(aPrincipalHandle, aFrameID);
-}
-
-void VideoFrameContainer::UpdatePrincipalHandleForFrameIDLocked(const PrincipalHandle& aPrincipalHandle,
- const ImageContainer::FrameID& aFrameID)
-{
if (mPendingPrincipalHandle == aPrincipalHandle) {
return;
}
@@ -158,7 +147,7 @@ void VideoFrameContainer::SetCurrentFrames(const VideoSegment& aSegment)
bool principalHandleChanged =
lastPrincipalHandle != PRINCIPAL_HANDLE_NONE &&
- lastPrincipalHandle != GetLastPrincipalHandleLocked();
+ lastPrincipalHandle != GetLastPrincipalHandle();
// Add the frames from this iteration.
for (auto& image : newImages) {
@@ -167,8 +156,8 @@ void VideoFrameContainer::SetCurrentFrames(const VideoSegment& aSegment)
}
if (principalHandleChanged) {
- UpdatePrincipalHandleForFrameIDLocked(lastPrincipalHandle,
- newImages.LastElement().mFrameID);
+ UpdatePrincipalHandleForFrameID(lastPrincipalHandle,
+ newImages.LastElement().mFrameID);
}
SetCurrentFramesLocked(mLastPlayedVideoFrame.GetIntrinsicSize(), images);
diff --git a/dom/media/VideoFrameContainer.h b/dom/media/VideoFrameContainer.h
index b61323d3c697..38c5736b4765 100644
--- a/dom/media/VideoFrameContainer.h
+++ b/dom/media/VideoFrameContainer.h
@@ -48,14 +48,11 @@ public:
const TimeStamp& aTargetTime);
// Returns the last principalHandle we notified mElement about.
PrincipalHandle GetLastPrincipalHandle();
- PrincipalHandle GetLastPrincipalHandleLocked();
// We will notify mElement that aPrincipalHandle has been applied when all
// FrameIDs prior to aFrameID have been flushed out.
// aFrameID is ignored if aPrincipalHandle already is our pending principalHandle.
void UpdatePrincipalHandleForFrameID(const PrincipalHandle& aPrincipalHandle,
const ImageContainer::FrameID& aFrameID);
- void UpdatePrincipalHandleForFrameIDLocked(const PrincipalHandle& aPrincipalHandle,
- const ImageContainer::FrameID& aFrameID);
void SetCurrentFrames(const gfx::IntSize& aIntrinsicSize,
const nsTArray& aImages);
void ClearCurrentFrame(const gfx::IntSize& aIntrinsicSize)
diff --git a/dom/media/VideoSegment.cpp b/dom/media/VideoSegment.cpp
index 48e4760ba304..b504be4d266c 100644
--- a/dom/media/VideoSegment.cpp
+++ b/dom/media/VideoSegment.cpp
@@ -100,11 +100,9 @@ VideoSegment::AppendFrame(already_AddRefed&& aImage,
StreamTime aDuration,
const IntSize& aIntrinsicSize,
const PrincipalHandle& aPrincipalHandle,
- bool aForceBlack,
- TimeStamp aTimeStamp)
+ bool aForceBlack)
{
VideoChunk* chunk = AppendChunk(aDuration);
- chunk->mTimeStamp = aTimeStamp;
VideoFrame frame(aImage, aIntrinsicSize);
frame.SetForceBlack(aForceBlack);
frame.SetPrincipalHandle(aPrincipalHandle);
diff --git a/dom/media/VideoSegment.h b/dom/media/VideoSegment.h
index 6e871d244b1b..749646fbc298 100644
--- a/dom/media/VideoSegment.h
+++ b/dom/media/VideoSegment.h
@@ -109,8 +109,7 @@ public:
StreamTime aDuration,
const IntSize& aIntrinsicSize,
const PrincipalHandle& aPrincipalHandle,
- bool aForceBlack = false,
- TimeStamp aTimeStamp = TimeStamp::Now());
+ bool aForceBlack = false);
const VideoFrame* GetLastFrame(StreamTime* aStart = nullptr)
{
VideoChunk* c = GetLastChunk();
diff --git a/dom/media/mediasink/DecodedStream.cpp b/dom/media/mediasink/DecodedStream.cpp
index bc28a7a83c13..a144e131b365 100644
--- a/dom/media/mediasink/DecodedStream.cpp
+++ b/dom/media/mediasink/DecodedStream.cpp
@@ -498,7 +498,6 @@ WriteVideoToMediaStream(MediaStream* aStream,
int64_t aEndMicroseconds,
int64_t aStartMicroseconds,
const mozilla::gfx::IntSize& aIntrinsicSize,
- const TimeStamp& aTimeStamp,
VideoSegment* aOutput,
const PrincipalHandle& aPrincipalHandle)
{
@@ -506,8 +505,7 @@ WriteVideoToMediaStream(MediaStream* aStream,
StreamTime duration =
aStream->MicrosecondsToStreamTimeRoundDown(aEndMicroseconds) -
aStream->MicrosecondsToStreamTimeRoundDown(aStartMicroseconds);
- aOutput->AppendFrame(image.forget(), duration, aIntrinsicSize,
- aPrincipalHandle, false, aTimeStamp);
+ aOutput->AppendFrame(image.forget(), duration, aIntrinsicSize, aPrincipalHandle);
}
static bool
@@ -539,13 +537,6 @@ DecodedStream::SendVideo(bool aIsSameOrigin, const PrincipalHandle& aPrincipalHa
// is ref-counted.
mVideoQueue.GetElementsAfter(mData->mNextVideoTime, &video);
- // tracksStartTimeStamp might be null when the SourceMediaStream not yet
- // be added to MediaStreamGraph.
- TimeStamp tracksStartTimeStamp = sourceStream->GetStreamTracksStrartTimeStamp();
- if (tracksStartTimeStamp.IsNull()) {
- tracksStartTimeStamp = TimeStamp::Now();
- }
-
for (uint32_t i = 0; i < video.Length(); ++i) {
VideoData* v = video[i]->As();
@@ -560,17 +551,14 @@ DecodedStream::SendVideo(bool aIsSameOrigin, const PrincipalHandle& aPrincipalHa
// and capture happens at 15 sec, we'll have to append a black frame
// that is 15 sec long.
WriteVideoToMediaStream(sourceStream, mData->mLastVideoImage, v->mTime,
- mData->mNextVideoTime, mData->mLastVideoImageDisplaySize,
- tracksStartTimeStamp + TimeDuration::FromMicroseconds(v->mTime),
- &output, aPrincipalHandle);
+ mData->mNextVideoTime, mData->mLastVideoImageDisplaySize, &output,
+ aPrincipalHandle);
mData->mNextVideoTime = v->mTime;
}
if (mData->mNextVideoTime < v->GetEndTime()) {
WriteVideoToMediaStream(sourceStream, v->mImage, v->GetEndTime(),
- mData->mNextVideoTime, v->mDisplay,
- tracksStartTimeStamp + TimeDuration::FromMicroseconds(v->GetEndTime()),
- &output, aPrincipalHandle);
+ mData->mNextVideoTime, v->mDisplay, &output, aPrincipalHandle);
mData->mNextVideoTime = v->GetEndTime();
mData->mLastVideoImage = v->mImage;
mData->mLastVideoImageDisplaySize = v->mDisplay;
@@ -597,9 +585,7 @@ DecodedStream::SendVideo(bool aIsSameOrigin, const PrincipalHandle& aPrincipalHa
int64_t deviation_usec = sourceStream->StreamTimeToMicroseconds(1);
WriteVideoToMediaStream(sourceStream, mData->mLastVideoImage,
mData->mNextVideoTime + deviation_usec, mData->mNextVideoTime,
- mData->mLastVideoImageDisplaySize,
- tracksStartTimeStamp + TimeDuration::FromMicroseconds(mData->mNextVideoTime + deviation_usec),
- &endSegment, aPrincipalHandle);
+ mData->mLastVideoImageDisplaySize, &endSegment, aPrincipalHandle);
mData->mNextVideoTime += deviation_usec;
MOZ_ASSERT(endSegment.GetDuration() > 0);
if (!aIsSameOrigin) {