зеркало из https://github.com/mozilla/gecko-dev.git
Bug 1201363 - Call MediaStreamVideoSink::setCurrentFrames in SourceMediaStream::AppendToTrack. r=jesup
In this patch, we first deal with the case of MediaElement. Now we replace |PlayVideo| with |VideoFrameContainer::SetCurrentFrames| in |SourceMediaStream::AppendToTrack|. The MSG use TimeStamp::Now() for the TimeStamp of each video frame in most of case except MediaElement case. Becasue the MediaElement has its own VideoQueue, we need to calucalte the correct Timestamp based on the StartTimeStamp of this MediaStream and the elpased time of the video frame in DecodedStream. MozReview-Commit-ID: 2bm2AHkFXHu --HG-- extra : transplant_source : %3D%AA%00%CE%A3SV5%8F%84%96%AC%E2%D9%10%EC%85%07N%DF
This commit is contained in:
Родитель
384447b1ba
Коммит
2c69985ffe
|
@ -3638,8 +3638,8 @@ void HTMLMediaElement::UpdateSrcMediaStreamPlaying(uint32_t aFlags)
|
|||
SetVolumeInternal();
|
||||
|
||||
VideoFrameContainer* container = GetVideoFrameContainer();
|
||||
if (container) {
|
||||
stream->AddVideoOutput(container);
|
||||
if (mSelectedVideoStreamTrack && container) {
|
||||
mSelectedVideoStreamTrack->AddVideoOutput(container);
|
||||
}
|
||||
VideoTrack* videoTrack = VideoTracks()->GetSelectedTrack();
|
||||
if (videoTrack) {
|
||||
|
@ -3656,8 +3656,8 @@ void HTMLMediaElement::UpdateSrcMediaStreamPlaying(uint32_t aFlags)
|
|||
|
||||
stream->RemoveAudioOutput(this);
|
||||
VideoFrameContainer* container = GetVideoFrameContainer();
|
||||
if (container) {
|
||||
stream->RemoveVideoOutput(container);
|
||||
if (mSelectedVideoStreamTrack && container) {
|
||||
mSelectedVideoStreamTrack->RemoveVideoOutput(container);
|
||||
}
|
||||
VideoTrack* videoTrack = VideoTracks()->GetSelectedTrack();
|
||||
if (videoTrack) {
|
||||
|
@ -3802,6 +3802,9 @@ void HTMLMediaElement::ConstructMediaTracks()
|
|||
mMediaStreamSizeListener = new StreamSizeListener(this);
|
||||
streamTrack->AddDirectListener(mMediaStreamSizeListener);
|
||||
mSelectedVideoStreamTrack = streamTrack;
|
||||
if (GetVideoFrameContainer()) {
|
||||
mSelectedVideoStreamTrack->AddVideoOutput(GetVideoFrameContainer());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -3834,6 +3837,10 @@ HTMLMediaElement::NotifyMediaStreamTrackAdded(const RefPtr<MediaStreamTrack>& aT
|
|||
mMediaStreamSizeListener = new StreamSizeListener(this);
|
||||
t->AddDirectListener(mMediaStreamSizeListener);
|
||||
mSelectedVideoStreamTrack = t;
|
||||
VideoFrameContainer* container = GetVideoFrameContainer();
|
||||
if (mSrcStreamIsPlaying && container) {
|
||||
mSelectedVideoStreamTrack->AddVideoOutput(container);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -3863,6 +3870,10 @@ HTMLMediaElement::NotifyMediaStreamTrackRemoved(const RefPtr<MediaStreamTrack>&
|
|||
if (mMediaStreamSizeListener) {
|
||||
mSelectedVideoStreamTrack->RemoveDirectListener(mMediaStreamSizeListener);
|
||||
}
|
||||
VideoFrameContainer* container = GetVideoFrameContainer();
|
||||
if (mSrcStreamIsPlaying && container) {
|
||||
mSelectedVideoStreamTrack->RemoveVideoOutput(container);
|
||||
}
|
||||
mSelectedVideoStreamTrack = nullptr;
|
||||
MOZ_ASSERT(mSrcStream);
|
||||
nsTArray<RefPtr<VideoStreamTrack>> tracks;
|
||||
|
@ -3886,6 +3897,9 @@ HTMLMediaElement::NotifyMediaStreamTrackRemoved(const RefPtr<MediaStreamTrack>&
|
|||
track->AddDirectListener(mMediaStreamSizeListener);
|
||||
}
|
||||
mSelectedVideoStreamTrack = track;
|
||||
if (container) {
|
||||
mSelectedVideoStreamTrack->AddVideoOutput(container);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
|
|
|
@ -109,6 +109,15 @@ void
|
|||
MediaStreamGraphImpl::AddStreamGraphThread(MediaStream* aStream)
|
||||
{
|
||||
aStream->mTracksStartTime = mProcessedTime;
|
||||
|
||||
if (aStream->AsSourceStream()) {
|
||||
SourceMediaStream* source = aStream->AsSourceStream();
|
||||
TimeStamp currentTimeStamp = CurrentDriver()->GetCurrentTimeStamp();
|
||||
TimeStamp processedTimeStamp = currentTimeStamp +
|
||||
TimeDuration::FromSeconds(MediaTimeToSeconds(mProcessedTime - IterationEnd()));
|
||||
source->SetStreamTracksStartTimeStamp(processedTimeStamp);
|
||||
}
|
||||
|
||||
if (aStream->IsSuspended()) {
|
||||
mSuspendedStreams.AppendElement(aStream);
|
||||
STREAM_LOG(LogLevel::Debug, ("Adding media stream %p to the graph, in the suspended stream array", aStream));
|
||||
|
@ -928,204 +937,6 @@ MediaStreamGraphImpl::PlayAudio(MediaStream* aStream)
|
|||
return ticksWritten;
|
||||
}
|
||||
|
||||
static void
|
||||
SetImageToBlackPixel(PlanarYCbCrImage* aImage)
|
||||
{
|
||||
uint8_t blackPixel[] = { 0x10, 0x80, 0x80 };
|
||||
|
||||
PlanarYCbCrData data;
|
||||
data.mYChannel = blackPixel;
|
||||
data.mCbChannel = blackPixel + 1;
|
||||
data.mCrChannel = blackPixel + 2;
|
||||
data.mYStride = data.mCbCrStride = 1;
|
||||
data.mPicSize = data.mYSize = data.mCbCrSize = IntSize(1, 1);
|
||||
aImage->CopyData(data);
|
||||
}
|
||||
|
||||
class VideoFrameContainerInvalidateRunnable : public Runnable {
|
||||
public:
|
||||
explicit VideoFrameContainerInvalidateRunnable(VideoFrameContainer* aVideoFrameContainer)
|
||||
: mVideoFrameContainer(aVideoFrameContainer)
|
||||
{}
|
||||
NS_IMETHOD Run()
|
||||
{
|
||||
MOZ_ASSERT(NS_IsMainThread());
|
||||
|
||||
mVideoFrameContainer->Invalidate();
|
||||
|
||||
return NS_OK;
|
||||
}
|
||||
private:
|
||||
RefPtr<VideoFrameContainer> mVideoFrameContainer;
|
||||
};
|
||||
|
||||
void
|
||||
MediaStreamGraphImpl::PlayVideo(MediaStream* aStream)
|
||||
{
|
||||
MOZ_ASSERT(mRealtime, "Should only attempt to play video in realtime mode");
|
||||
|
||||
if (aStream->mVideoOutputs.IsEmpty())
|
||||
return;
|
||||
|
||||
TimeStamp currentTimeStamp = CurrentDriver()->GetCurrentTimeStamp();
|
||||
|
||||
// Collect any new frames produced in this iteration.
|
||||
AutoTArray<ImageContainer::NonOwningImage,4> newImages;
|
||||
PrincipalHandle lastPrincipalHandle = PRINCIPAL_HANDLE_NONE;
|
||||
RefPtr<Image> blackImage;
|
||||
|
||||
MOZ_ASSERT(mProcessedTime >= aStream->mTracksStartTime, "frame position before buffer?");
|
||||
// We only look at the non-blocking interval
|
||||
StreamTime frameBufferTime = aStream->GraphTimeToStreamTime(mProcessedTime);
|
||||
StreamTime bufferEndTime = aStream->GraphTimeToStreamTime(aStream->mStartBlocking);
|
||||
StreamTime start;
|
||||
const VideoChunk* chunk;
|
||||
for ( ;
|
||||
frameBufferTime < bufferEndTime;
|
||||
frameBufferTime = start + chunk->GetDuration()) {
|
||||
// Pick the last track that has a video chunk for the time, and
|
||||
// schedule its frame.
|
||||
chunk = nullptr;
|
||||
for (StreamTracks::TrackIter tracks(aStream->GetStreamTracks(),
|
||||
MediaSegment::VIDEO);
|
||||
!tracks.IsEnded();
|
||||
tracks.Next()) {
|
||||
VideoSegment* segment = tracks->Get<VideoSegment>();
|
||||
StreamTime thisStart;
|
||||
const VideoChunk* thisChunk =
|
||||
segment->FindChunkContaining(frameBufferTime, &thisStart);
|
||||
if (thisChunk && thisChunk->mFrame.GetImage()) {
|
||||
start = thisStart;
|
||||
chunk = thisChunk;
|
||||
}
|
||||
}
|
||||
if (!chunk)
|
||||
break;
|
||||
|
||||
const VideoFrame* frame = &chunk->mFrame;
|
||||
if (*frame == aStream->mLastPlayedVideoFrame) {
|
||||
continue;
|
||||
}
|
||||
|
||||
Image* image = frame->GetImage();
|
||||
STREAM_LOG(LogLevel::Verbose,
|
||||
("MediaStream %p writing video frame %p (%dx%d)",
|
||||
aStream, image, frame->GetIntrinsicSize().width,
|
||||
frame->GetIntrinsicSize().height));
|
||||
// Schedule this frame after the previous frame finishes, instead of at
|
||||
// its start time. These times only differ in the case of multiple
|
||||
// tracks.
|
||||
// frameBufferTime is in the non-blocking interval.
|
||||
GraphTime frameTime = aStream->StreamTimeToGraphTime(frameBufferTime);
|
||||
TimeStamp targetTime = currentTimeStamp +
|
||||
TimeDuration::FromSeconds(MediaTimeToSeconds(frameTime - IterationEnd()));
|
||||
|
||||
if (frame->GetForceBlack()) {
|
||||
if (!blackImage) {
|
||||
// Fixme: PlayVideo will be replaced in latter changeset
|
||||
// "Call MediaStreamVideoSink::setCurrentFrames in SourceMediaStream::AppendToTrack."
|
||||
// of this bug.
|
||||
// This is a temp workaround to pass the build and test.
|
||||
if (!aStream->mVideoOutputs[0].mListener->AsVideoFrameContainer()) {
|
||||
return;
|
||||
}
|
||||
blackImage = aStream->mVideoOutputs[0].mListener->AsVideoFrameContainer()->
|
||||
GetImageContainer()->CreatePlanarYCbCrImage();
|
||||
if (blackImage) {
|
||||
// Sets the image to a single black pixel, which will be scaled to
|
||||
// fill the rendered size.
|
||||
SetImageToBlackPixel(blackImage->AsPlanarYCbCrImage());
|
||||
}
|
||||
}
|
||||
if (blackImage) {
|
||||
image = blackImage;
|
||||
}
|
||||
}
|
||||
newImages.AppendElement(ImageContainer::NonOwningImage(image, targetTime));
|
||||
|
||||
lastPrincipalHandle = chunk->GetPrincipalHandle();
|
||||
|
||||
aStream->mLastPlayedVideoFrame = *frame;
|
||||
}
|
||||
|
||||
if (!aStream->mLastPlayedVideoFrame.GetImage())
|
||||
return;
|
||||
|
||||
AutoTArray<ImageContainer::NonOwningImage,4> images;
|
||||
bool haveMultipleImages = false;
|
||||
|
||||
for (const TrackBound<MediaStreamVideoSink>& sink : aStream->mVideoOutputs) {
|
||||
VideoFrameContainer* output = sink.mListener->AsVideoFrameContainer();
|
||||
if (!output) {
|
||||
continue;
|
||||
}
|
||||
|
||||
bool principalHandleChanged =
|
||||
lastPrincipalHandle != PRINCIPAL_HANDLE_NONE &&
|
||||
lastPrincipalHandle != output->GetLastPrincipalHandle();
|
||||
|
||||
// Find previous frames that may still be valid.
|
||||
AutoTArray<ImageContainer::OwningImage,4> previousImages;
|
||||
output->GetImageContainer()->GetCurrentImages(&previousImages);
|
||||
uint32_t j = previousImages.Length();
|
||||
if (j) {
|
||||
// Re-use the most recent frame before currentTimeStamp and subsequent,
|
||||
// always keeping at least one frame.
|
||||
do {
|
||||
--j;
|
||||
} while (j > 0 && previousImages[j].mTimeStamp > currentTimeStamp);
|
||||
}
|
||||
if (previousImages.Length() - j + newImages.Length() > 1) {
|
||||
haveMultipleImages = true;
|
||||
}
|
||||
|
||||
// Don't update if there are no changes.
|
||||
if (j == 0 && newImages.IsEmpty())
|
||||
continue;
|
||||
|
||||
for ( ; j < previousImages.Length(); ++j) {
|
||||
const auto& image = previousImages[j];
|
||||
// Cope with potential clock skew with AudioCallbackDriver.
|
||||
if (newImages.Length() && image.mTimeStamp > newImages[0].mTimeStamp) {
|
||||
STREAM_LOG(LogLevel::Warning,
|
||||
("Dropping %u video frames due to clock skew",
|
||||
unsigned(previousImages.Length() - j)));
|
||||
break;
|
||||
}
|
||||
|
||||
images.AppendElement(ImageContainer::
|
||||
NonOwningImage(image.mImage,
|
||||
image.mTimeStamp, image.mFrameID));
|
||||
}
|
||||
|
||||
// Add the frames from this iteration.
|
||||
for (auto& image : newImages) {
|
||||
image.mFrameID = output->NewFrameID();
|
||||
images.AppendElement(image);
|
||||
}
|
||||
|
||||
if (principalHandleChanged) {
|
||||
output->UpdatePrincipalHandleForFrameID(lastPrincipalHandle,
|
||||
newImages.LastElement().mFrameID);
|
||||
}
|
||||
|
||||
output->SetCurrentFrames(aStream->mLastPlayedVideoFrame.GetIntrinsicSize(),
|
||||
images);
|
||||
|
||||
nsCOMPtr<nsIRunnable> event =
|
||||
new VideoFrameContainerInvalidateRunnable(output);
|
||||
DispatchToMainThreadAfterStreamStateUpdate(event.forget());
|
||||
|
||||
images.ClearAndRetainStorage();
|
||||
}
|
||||
|
||||
// If the stream has finished and the timestamps of all frames have expired
|
||||
// then no more updates are required.
|
||||
if (aStream->mFinished && !haveMultipleImages) {
|
||||
aStream->mLastPlayedVideoFrame.SetNull();
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
MediaStreamGraphImpl::OpenAudioInputImpl(int aID,
|
||||
AudioDataListener *aListener)
|
||||
|
@ -1539,7 +1350,6 @@ MediaStreamGraphImpl::Process()
|
|||
"Each stream should have the same number of frame.");
|
||||
}
|
||||
}
|
||||
PlayVideo(stream);
|
||||
}
|
||||
if (stream->mStartBlocking > mProcessedTime) {
|
||||
allBlockedForever = false;
|
||||
|
@ -2836,6 +2646,16 @@ SourceMediaStream::ResampleAudioToGraphSampleRate(TrackData* aTrackData, MediaSe
|
|||
segment->ResampleChunks(aTrackData->mResampler, aTrackData->mInputRate, GraphImpl()->GraphRate());
|
||||
}
|
||||
|
||||
void
|
||||
SourceMediaStream::AdvanceTimeVaryingValuesToCurrentTime(GraphTime aCurrentTime,
|
||||
GraphTime aBlockedTime)
|
||||
{
|
||||
MutexAutoLock lock(mMutex);
|
||||
mTracksStartTime += aBlockedTime;
|
||||
mStreamTracksStartTimeStamp += TimeDuration::FromSeconds(GraphImpl()->MediaTimeToSeconds(aBlockedTime));
|
||||
mTracks.ForgetUpTo(aCurrentTime - mTracksStartTime);
|
||||
}
|
||||
|
||||
bool
|
||||
SourceMediaStream::AppendToTrack(TrackID aID, MediaSegment* aSegment, MediaSegment *aRawSegment)
|
||||
{
|
||||
|
@ -2958,9 +2778,9 @@ SourceMediaStream::AddDirectTrackListenerImpl(already_AddRefed<DirectMediaStream
|
|||
{
|
||||
MOZ_ASSERT(IsTrackIDExplicit(aTrackID));
|
||||
TrackData* data;
|
||||
bool found;
|
||||
bool isAudio;
|
||||
bool isVideo;
|
||||
bool found = false;
|
||||
bool isAudio = false;
|
||||
bool isVideo = false;
|
||||
RefPtr<DirectMediaStreamTrackListener> listener = aListener;
|
||||
STREAM_LOG(LogLevel::Debug, ("Adding direct track listener %p bound to track %d to source stream %p",
|
||||
listener.get(), aTrackID, this));
|
||||
|
@ -2973,6 +2793,19 @@ SourceMediaStream::AddDirectTrackListenerImpl(already_AddRefed<DirectMediaStream
|
|||
isAudio = data->mData->GetType() == MediaSegment::AUDIO;
|
||||
isVideo = data->mData->GetType() == MediaSegment::VIDEO;
|
||||
}
|
||||
|
||||
// The track might be removed from mUpdateTrack but still exist in
|
||||
// mTracks.
|
||||
auto streamTrack = FindTrack(aTrackID);
|
||||
bool foundTrack = !!streamTrack;
|
||||
if (foundTrack) {
|
||||
MediaStreamVideoSink* videoSink = listener->AsMediaStreamVideoSink();
|
||||
// Re-send missed VideoSegment to new added MediaStreamVideoSink.
|
||||
if (streamTrack->GetType() == MediaSegment::VIDEO && videoSink) {
|
||||
videoSink->SetCurrentFrames(*(static_cast<VideoSegment*>(streamTrack->GetSegment())));
|
||||
}
|
||||
}
|
||||
|
||||
if (found && (isAudio || isVideo)) {
|
||||
for (auto entry : mDirectTrackListeners) {
|
||||
if (entry.mListener == listener &&
|
||||
|
|
|
@ -543,7 +543,9 @@ public:
|
|||
}
|
||||
|
||||
protected:
|
||||
void AdvanceTimeVaryingValuesToCurrentTime(GraphTime aCurrentTime, GraphTime aBlockedTime)
|
||||
// |AdvanceTimeVaryingValuesToCurrentTime| will be override in SourceMediaStream.
|
||||
virtual void AdvanceTimeVaryingValuesToCurrentTime(GraphTime aCurrentTime,
|
||||
GraphTime aBlockedTime)
|
||||
{
|
||||
mTracksStartTime += aBlockedTime;
|
||||
mTracks.ForgetUpTo(aCurrentTime - mTracksStartTime);
|
||||
|
@ -805,6 +807,11 @@ public:
|
|||
*/
|
||||
bool HasPendingAudioTrack();
|
||||
|
||||
TimeStamp GetStreamTracksStrartTimeStamp() {
|
||||
MutexAutoLock lock(mMutex);
|
||||
return mStreamTracksStartTimeStamp;
|
||||
}
|
||||
|
||||
// XXX need a Reset API
|
||||
|
||||
friend class MediaStreamGraphImpl;
|
||||
|
@ -869,6 +876,15 @@ protected:
|
|||
void NotifyDirectConsumers(TrackData *aTrack,
|
||||
MediaSegment *aSegment);
|
||||
|
||||
virtual void
|
||||
AdvanceTimeVaryingValuesToCurrentTime(GraphTime aCurrentTime,
|
||||
GraphTime aBlockedTime) override;
|
||||
void SetStreamTracksStartTimeStamp(const TimeStamp& aTimeStamp)
|
||||
{
|
||||
MutexAutoLock lock(mMutex);
|
||||
mStreamTracksStartTimeStamp = aTimeStamp;
|
||||
}
|
||||
|
||||
// Only accessed on the MSG thread. Used so to ask the MSGImpl to usecount
|
||||
// users of a specific input.
|
||||
// XXX Should really be a CubebUtils::AudioDeviceID, but they aren't
|
||||
|
@ -880,6 +896,10 @@ protected:
|
|||
Mutex mMutex;
|
||||
// protected by mMutex
|
||||
StreamTime mUpdateKnownTracksTime;
|
||||
// This time stamp will be updated in adding and blocked SourceMediaStream,
|
||||
// |AddStreamGraphThread| and |AdvanceTimeVaryingValuesToCurrentTime| in
|
||||
// particularly.
|
||||
TimeStamp mStreamTracksStartTimeStamp;
|
||||
nsTArray<TrackData> mUpdateTracks;
|
||||
nsTArray<TrackData> mPendingTracks;
|
||||
nsTArray<RefPtr<DirectMediaStreamListener>> mDirectListeners;
|
||||
|
|
|
@ -411,10 +411,6 @@ public:
|
|||
* to the audio output stream. Returns the number of frames played.
|
||||
*/
|
||||
StreamTime PlayAudio(MediaStream* aStream);
|
||||
/**
|
||||
* Set the correct current video frame for stream aStream.
|
||||
*/
|
||||
void PlayVideo(MediaStream* aStream);
|
||||
/**
|
||||
* No more data will be forthcoming for aStream. The stream will end
|
||||
* at the current buffer end point. The StreamTracks's tracks must be
|
||||
|
|
|
@ -30,6 +30,8 @@ public:
|
|||
MediaStream* GetInputStreamFor(TrackID aTrackID) override;
|
||||
TrackID GetInputTrackIDFor(TrackID aTrackID) override;
|
||||
|
||||
friend class MediaStreamGraphImpl;
|
||||
|
||||
protected:
|
||||
// Only non-ended tracks are allowed to persist in this map.
|
||||
struct TrackMapEntry {
|
||||
|
|
|
@ -39,6 +39,11 @@ VideoFrameContainer::~VideoFrameContainer()
|
|||
PrincipalHandle VideoFrameContainer::GetLastPrincipalHandle()
|
||||
{
|
||||
MutexAutoLock lock(mMutex);
|
||||
return GetLastPrincipalHandleLocked();
|
||||
}
|
||||
|
||||
PrincipalHandle VideoFrameContainer::GetLastPrincipalHandleLocked()
|
||||
{
|
||||
return mLastPrincipalHandle;
|
||||
}
|
||||
|
||||
|
@ -46,6 +51,12 @@ void VideoFrameContainer::UpdatePrincipalHandleForFrameID(const PrincipalHandle&
|
|||
const ImageContainer::FrameID& aFrameID)
|
||||
{
|
||||
MutexAutoLock lock(mMutex);
|
||||
UpdatePrincipalHandleForFrameIDLocked(aPrincipalHandle, aFrameID);
|
||||
}
|
||||
|
||||
void VideoFrameContainer::UpdatePrincipalHandleForFrameIDLocked(const PrincipalHandle& aPrincipalHandle,
|
||||
const ImageContainer::FrameID& aFrameID)
|
||||
{
|
||||
if (mPendingPrincipalHandle == aPrincipalHandle) {
|
||||
return;
|
||||
}
|
||||
|
@ -147,7 +158,7 @@ void VideoFrameContainer::SetCurrentFrames(const VideoSegment& aSegment)
|
|||
|
||||
bool principalHandleChanged =
|
||||
lastPrincipalHandle != PRINCIPAL_HANDLE_NONE &&
|
||||
lastPrincipalHandle != GetLastPrincipalHandle();
|
||||
lastPrincipalHandle != GetLastPrincipalHandleLocked();
|
||||
|
||||
// Add the frames from this iteration.
|
||||
for (auto& image : newImages) {
|
||||
|
@ -156,8 +167,8 @@ void VideoFrameContainer::SetCurrentFrames(const VideoSegment& aSegment)
|
|||
}
|
||||
|
||||
if (principalHandleChanged) {
|
||||
UpdatePrincipalHandleForFrameID(lastPrincipalHandle,
|
||||
newImages.LastElement().mFrameID);
|
||||
UpdatePrincipalHandleForFrameIDLocked(lastPrincipalHandle,
|
||||
newImages.LastElement().mFrameID);
|
||||
}
|
||||
|
||||
SetCurrentFramesLocked(mLastPlayedVideoFrame.GetIntrinsicSize(), images);
|
||||
|
|
|
@ -48,11 +48,14 @@ public:
|
|||
const TimeStamp& aTargetTime);
|
||||
// Returns the last principalHandle we notified mElement about.
|
||||
PrincipalHandle GetLastPrincipalHandle();
|
||||
PrincipalHandle GetLastPrincipalHandleLocked();
|
||||
// We will notify mElement that aPrincipalHandle has been applied when all
|
||||
// FrameIDs prior to aFrameID have been flushed out.
|
||||
// aFrameID is ignored if aPrincipalHandle already is our pending principalHandle.
|
||||
void UpdatePrincipalHandleForFrameID(const PrincipalHandle& aPrincipalHandle,
|
||||
const ImageContainer::FrameID& aFrameID);
|
||||
void UpdatePrincipalHandleForFrameIDLocked(const PrincipalHandle& aPrincipalHandle,
|
||||
const ImageContainer::FrameID& aFrameID);
|
||||
void SetCurrentFrames(const gfx::IntSize& aIntrinsicSize,
|
||||
const nsTArray<ImageContainer::NonOwningImage>& aImages);
|
||||
void ClearCurrentFrame(const gfx::IntSize& aIntrinsicSize)
|
||||
|
|
|
@ -100,9 +100,11 @@ VideoSegment::AppendFrame(already_AddRefed<Image>&& aImage,
|
|||
StreamTime aDuration,
|
||||
const IntSize& aIntrinsicSize,
|
||||
const PrincipalHandle& aPrincipalHandle,
|
||||
bool aForceBlack)
|
||||
bool aForceBlack,
|
||||
TimeStamp aTimeStamp)
|
||||
{
|
||||
VideoChunk* chunk = AppendChunk(aDuration);
|
||||
chunk->mTimeStamp = aTimeStamp;
|
||||
VideoFrame frame(aImage, aIntrinsicSize);
|
||||
frame.SetForceBlack(aForceBlack);
|
||||
frame.SetPrincipalHandle(aPrincipalHandle);
|
||||
|
|
|
@ -109,7 +109,8 @@ public:
|
|||
StreamTime aDuration,
|
||||
const IntSize& aIntrinsicSize,
|
||||
const PrincipalHandle& aPrincipalHandle,
|
||||
bool aForceBlack = false);
|
||||
bool aForceBlack = false,
|
||||
TimeStamp aTimeStamp = TimeStamp::Now());
|
||||
const VideoFrame* GetLastFrame(StreamTime* aStart = nullptr)
|
||||
{
|
||||
VideoChunk* c = GetLastChunk();
|
||||
|
|
|
@ -498,6 +498,7 @@ WriteVideoToMediaStream(MediaStream* aStream,
|
|||
int64_t aEndMicroseconds,
|
||||
int64_t aStartMicroseconds,
|
||||
const mozilla::gfx::IntSize& aIntrinsicSize,
|
||||
const TimeStamp& aTimeStamp,
|
||||
VideoSegment* aOutput,
|
||||
const PrincipalHandle& aPrincipalHandle)
|
||||
{
|
||||
|
@ -505,7 +506,8 @@ WriteVideoToMediaStream(MediaStream* aStream,
|
|||
StreamTime duration =
|
||||
aStream->MicrosecondsToStreamTimeRoundDown(aEndMicroseconds) -
|
||||
aStream->MicrosecondsToStreamTimeRoundDown(aStartMicroseconds);
|
||||
aOutput->AppendFrame(image.forget(), duration, aIntrinsicSize, aPrincipalHandle);
|
||||
aOutput->AppendFrame(image.forget(), duration, aIntrinsicSize,
|
||||
aPrincipalHandle, false, aTimeStamp);
|
||||
}
|
||||
|
||||
static bool
|
||||
|
@ -537,6 +539,13 @@ DecodedStream::SendVideo(bool aIsSameOrigin, const PrincipalHandle& aPrincipalHa
|
|||
// is ref-counted.
|
||||
mVideoQueue.GetElementsAfter(mData->mNextVideoTime, &video);
|
||||
|
||||
// tracksStartTimeStamp might be null when the SourceMediaStream not yet
|
||||
// be added to MediaStreamGraph.
|
||||
TimeStamp tracksStartTimeStamp = sourceStream->GetStreamTracksStrartTimeStamp();
|
||||
if (tracksStartTimeStamp.IsNull()) {
|
||||
tracksStartTimeStamp = TimeStamp::Now();
|
||||
}
|
||||
|
||||
for (uint32_t i = 0; i < video.Length(); ++i) {
|
||||
VideoData* v = video[i]->As<VideoData>();
|
||||
|
||||
|
@ -551,14 +560,17 @@ DecodedStream::SendVideo(bool aIsSameOrigin, const PrincipalHandle& aPrincipalHa
|
|||
// and capture happens at 15 sec, we'll have to append a black frame
|
||||
// that is 15 sec long.
|
||||
WriteVideoToMediaStream(sourceStream, mData->mLastVideoImage, v->mTime,
|
||||
mData->mNextVideoTime, mData->mLastVideoImageDisplaySize, &output,
|
||||
aPrincipalHandle);
|
||||
mData->mNextVideoTime, mData->mLastVideoImageDisplaySize,
|
||||
tracksStartTimeStamp + TimeDuration::FromMicroseconds(v->mTime),
|
||||
&output, aPrincipalHandle);
|
||||
mData->mNextVideoTime = v->mTime;
|
||||
}
|
||||
|
||||
if (mData->mNextVideoTime < v->GetEndTime()) {
|
||||
WriteVideoToMediaStream(sourceStream, v->mImage, v->GetEndTime(),
|
||||
mData->mNextVideoTime, v->mDisplay, &output, aPrincipalHandle);
|
||||
mData->mNextVideoTime, v->mDisplay,
|
||||
tracksStartTimeStamp + TimeDuration::FromMicroseconds(v->GetEndTime()),
|
||||
&output, aPrincipalHandle);
|
||||
mData->mNextVideoTime = v->GetEndTime();
|
||||
mData->mLastVideoImage = v->mImage;
|
||||
mData->mLastVideoImageDisplaySize = v->mDisplay;
|
||||
|
@ -585,7 +597,9 @@ DecodedStream::SendVideo(bool aIsSameOrigin, const PrincipalHandle& aPrincipalHa
|
|||
int64_t deviation_usec = sourceStream->StreamTimeToMicroseconds(1);
|
||||
WriteVideoToMediaStream(sourceStream, mData->mLastVideoImage,
|
||||
mData->mNextVideoTime + deviation_usec, mData->mNextVideoTime,
|
||||
mData->mLastVideoImageDisplaySize, &endSegment, aPrincipalHandle);
|
||||
mData->mLastVideoImageDisplaySize,
|
||||
tracksStartTimeStamp + TimeDuration::FromMicroseconds(mData->mNextVideoTime + deviation_usec),
|
||||
&endSegment, aPrincipalHandle);
|
||||
mData->mNextVideoTime += deviation_usec;
|
||||
MOZ_ASSERT(endSegment.GetDuration() > 0);
|
||||
if (!aIsSameOrigin) {
|
||||
|
|
Загрузка…
Ссылка в новой задаче