Bug 1493613 - Move MediaStream control from DOMMediaStream to MediaStreamTrack. r=padenot

This is inherently large, because modifying these bits of DOMMediaStream and
MediaStreamTrack affects all consumers and producers of all DOMMediaStreams and
MediaStreamTracks.

Things are generally much simpler now.

Producers of tracks now create a MediaStream in the graph, add it to a
MediaStreamTrackSource subclass that takes ownership of it, and add the source
to a MediaStreamTrack. Should the producer need a DOMMediaStream it is now much
simpler to create as the only thing needed is the current window. The stream is
a rather simple wrapper around an array of MediaStreamTracks.

HTMLMediaElement is still not as straight forward as other consumers since it
consumes the DOMMediaStream directly, as opposed to a set of tracks.
The new MediaStreamRenderer helper class helps bridge the gap between this fact
and the new track-based MediaStreamGraph interface, as it needs to juggle
registering multiple audio tracks for audio output. This hooks into existing
HTMLMediaElement logic and brings a welcome simplification to all the glue
previously needed there.

Differential Revision: https://phabricator.services.mozilla.com/D37934

--HG--
extra : moz-landing-system : lando
This commit is contained in:
Andreas Pehrson 2019-07-31 07:58:17 +00:00
Родитель 719dfe4ede
Коммит 4b3fa9c67e
47 изменённых файлов: 1612 добавлений и 2356 удалений

Просмотреть файл

@ -10,7 +10,7 @@
#include "jsapi.h"
#include "jsfriendapi.h"
#include "Layers.h"
#include "MediaSegment.h"
#include "MediaStreamGraph.h"
#include "mozilla/Assertions.h"
#include "mozilla/Base64.h"
#include "mozilla/CheckedInt.h"
@ -19,7 +19,7 @@
#include "mozilla/dom/Event.h"
#include "mozilla/dom/File.h"
#include "mozilla/dom/HTMLCanvasElementBinding.h"
#include "mozilla/dom/MediaStreamTrack.h"
#include "mozilla/dom/VideoStreamTrack.h"
#include "mozilla/dom/MouseEvent.h"
#include "mozilla/dom/OffscreenCanvas.h"
#include "mozilla/EventDispatcher.h"
@ -677,14 +677,9 @@ already_AddRefed<CanvasCaptureMediaStream> HTMLCanvasElement::CaptureStream(
return nullptr;
}
RefPtr<CanvasCaptureMediaStream> stream =
CanvasCaptureMediaStream::CreateSourceStream(window, this);
if (!stream) {
aRv.Throw(NS_ERROR_FAILURE);
return nullptr;
}
auto stream = MakeRefPtr<CanvasCaptureMediaStream>(window, this);
TrackID videoTrackId = 1;
const TrackID videoTrackId = 1;
nsCOMPtr<nsIPrincipal> principal = NodePrincipal();
nsresult rv = stream->Init(aFrameRate, videoTrackId, principal);
if (NS_FAILED(rv)) {
@ -693,8 +688,8 @@ already_AddRefed<CanvasCaptureMediaStream> HTMLCanvasElement::CaptureStream(
}
RefPtr<MediaStreamTrack> track =
stream->CreateDOMTrack(videoTrackId, MediaSegment::VIDEO,
new CanvasCaptureTrackSource(principal, stream));
new VideoStreamTrack(window, stream->GetSourceStream(), videoTrackId,
new CanvasCaptureTrackSource(principal, stream));
stream->AddTrackInternal(track);
// Check site-specific permission and display prompt if appropriate.

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -54,14 +54,17 @@ class MediaDecoder;
class MediaInputPort;
class MediaStream;
class MediaStreamGraph;
class MediaStreamGraphImpl;
class MediaStreamWindowCapturer;
class VideoFrameContainer;
namespace dom {
class MediaKeys;
class TextTrack;
class TimeRanges;
class WakeLock;
class MediaTrack;
class MediaStreamTrack;
class MediaStreamTrackSource;
class MediaTrack;
class VideoStreamTrack;
} // namespace dom
} // namespace mozilla
@ -98,7 +101,7 @@ enum class StreamCaptureBehavior : uint8_t {
class HTMLMediaElement : public nsGenericHTMLElement,
public MediaDecoderOwner,
public PrincipalChangeObserver<DOMMediaStream>,
public PrincipalChangeObserver<MediaStreamTrack>,
public SupportsWeakPtr<HTMLMediaElement>,
public nsStubMutationObserver {
public:
@ -229,8 +232,8 @@ class HTMLMediaElement : public nsGenericHTMLElement,
*/
void NotifyOwnerDocumentActivityChanged();
// From PrincipalChangeObserver<DOMMediaStream>.
void PrincipalChanged(DOMMediaStream* aStream) override;
// From PrincipalChangeObserver<MediaStreamTrack>.
void PrincipalChanged(MediaStreamTrack* aTrack) override;
void UpdateSrcStreamVideoPrincipal(const PrincipalHandle& aPrincipalHandle);
@ -296,8 +299,6 @@ class HTMLMediaElement : public nsGenericHTMLElement,
void GetEMEInfo(dom::EMEDebugInfo& aInfo);
class StreamCaptureTrackSource;
// Update the visual size of the media. Called from the decoder on the
// main thread when/if the size changes.
virtual void UpdateMediaSize(const nsIntSize& aSize);
@ -337,13 +338,6 @@ class HTMLMediaElement : public nsGenericHTMLElement,
*/
void NotifyMediaTrackDisabled(MediaTrack* aTrack);
/**
* Called when a captured MediaStreamTrack is stopped so we can clean up its
* MediaInputPort.
*/
void NotifyOutputTrackStopped(DOMMediaStream* aOwningStream,
TrackID aDestinationTrackID);
/**
* Returns the current load ID. Asynchronous events store the ID that was
* current when they were enqueued, and if it has changed when they come to
@ -392,13 +386,6 @@ class HTMLMediaElement : public nsGenericHTMLElement,
*/
void FireTimeUpdate(bool aPeriodic) final;
/**
* This will return null if mSrcStream is null, or if mSrcStream is not
* null but its GetPlaybackStream() returns null --- which can happen during
* cycle collection unlinking!
*/
MediaStream* GetSrcMediaStream() const;
// WebIDL
MediaError* GetError() const;
@ -752,14 +739,25 @@ class HTMLMediaElement : public nsGenericHTMLElement,
class ChannelLoader;
class ErrorSink;
class MediaLoadListener;
class MediaStreamRenderer;
class MediaStreamTrackListener;
class FirstFrameListener;
class ShutdownObserver;
class StreamCaptureTrackSource;
MediaDecoderOwner::NextFrameStatus NextFrameStatus();
void SetDecoder(MediaDecoder* aDecoder);
struct SharedDummyStream {
NS_INLINE_DECL_REFCOUNTING(SharedDummyStream)
explicit SharedDummyStream(MediaStream* aStream);
const RefPtr<MediaStream> mStream;
private:
~SharedDummyStream();
};
// Holds references to the DOM wrappers for the MediaStreams that we're
// writing to.
struct OutputMediaStream {
@ -767,14 +765,18 @@ class HTMLMediaElement : public nsGenericHTMLElement,
~OutputMediaStream();
RefPtr<DOMMediaStream> mStream;
TrackID mNextAvailableTrackID;
RefPtr<MediaStreamGraphImpl> mGraph;
// Dummy stream to keep mGraph from shutting down when MediaDecoder shuts
// down. Shared across all OutputMediaStreams as one stream is enough to
// keep the graph alive.
RefPtr<SharedDummyStream> mGraphKeepAliveDummyStream;
bool mFinishWhenEnded;
bool mCapturingAudioOnly;
bool mCapturingDecoder;
bool mCapturingMediaStream;
// The following members are keeping state for a captured MediaStream.
nsTArray<Pair<nsString, RefPtr<MediaInputPort>>> mTrackPorts;
nsTArray<Pair<nsString, RefPtr<MediaStreamTrackSource>>> mTracks;
};
void PlayInternal(bool aHandlingUserInput);
@ -1331,31 +1333,25 @@ class HTMLMediaElement : public nsGenericHTMLElement,
// At most one of mDecoder and mSrcStream can be non-null.
RefPtr<DOMMediaStream> mSrcStream;
// The MediaStreamRenderer handles rendering of our selected video track, and
// enabled audio tracks, while mSrcStream is set.
RefPtr<MediaStreamRenderer> mMediaStreamRenderer;
// True once mSrcStream's initial set of tracks are known.
bool mSrcStreamTracksAvailable = false;
// While mPaused is true and mSrcStream is set, this is the value to use for
// CurrentTime(). Otherwise this is Nothing.
Maybe<GraphTime> mSrcStreamPausedGraphTime;
// The offset in GraphTime at which this media element started playing the
// playback stream of mSrcStream.
GraphTime mSrcStreamGraphTimeOffset = 0;
// True once PlaybackEnded() is called and we're playing a MediaStream.
// Reset to false if we start playing mSrcStream again.
bool mSrcStreamPlaybackEnded = false;
// Holds a reference to the stream connecting this stream to the capture sink.
RefPtr<MediaInputPort> mCaptureStreamPort;
// Holds a reference to the stream connecting this stream to the window
// capture sink.
UniquePtr<MediaStreamWindowCapturer> mStreamWindowCapturer;
// Holds references to the DOM wrappers for the MediaStreams that we're
// writing to.
nsTArray<OutputMediaStream> mOutputStreams;
// The next track id to use for a captured MediaDecoder.
TrackID mNextAvailableMediaDecoderOutputTrackID = 1;
// Holds a reference to the first-frame-getting track listener attached to
// mSelectedVideoStreamTrack.
RefPtr<FirstFrameListener> mFirstFrameListener;

Просмотреть файл

@ -5,11 +5,33 @@
#include "AudioStreamTrack.h"
#include "MediaStreamGraph.h"
#include "nsContentUtils.h"
namespace mozilla {
namespace dom {
void AudioStreamTrack::AddAudioOutput(void* aKey) {
if (Ended()) {
return;
}
mStream->AddAudioOutput(aKey);
}
void AudioStreamTrack::RemoveAudioOutput(void* aKey) {
if (Ended()) {
return;
}
mStream->RemoveAudioOutput(aKey);
}
void AudioStreamTrack::SetAudioOutputVolume(void* aKey, float aVolume) {
if (Ended()) {
return;
}
mStream->SetAudioOutputVolume(aKey, aVolume);
}
void AudioStreamTrack::GetLabel(nsAString& aLabel, CallerType aCallerType) {
if (nsContentUtils::ResistFingerprinting(aCallerType)) {
aLabel.AssignLiteral("Internal Microphone");

Просмотреть файл

@ -15,25 +15,29 @@ namespace dom {
class AudioStreamTrack : public MediaStreamTrack {
public:
AudioStreamTrack(
DOMMediaStream* aStream, TrackID aTrackID, TrackID aInputTrackID,
nsPIDOMWindowInner* aWindow, MediaStream* aInputStream, TrackID aTrackID,
MediaStreamTrackSource* aSource,
const MediaTrackConstraints& aConstraints = MediaTrackConstraints())
: MediaStreamTrack(aStream, aTrackID, aInputTrackID, aSource,
: MediaStreamTrack(aWindow, aInputStream, aTrackID, aSource,
aConstraints) {}
AudioStreamTrack* AsAudioStreamTrack() override { return this; }
const AudioStreamTrack* AsAudioStreamTrack() const override { return this; }
void AddAudioOutput(void* aKey);
void RemoveAudioOutput(void* aKey);
void SetAudioOutputVolume(void* aKey, float aVolume);
// WebIDL
void GetKind(nsAString& aKind) override { aKind.AssignLiteral("audio"); }
void GetLabel(nsAString& aLabel, CallerType aCallerType) override;
protected:
already_AddRefed<MediaStreamTrack> CloneInternal(
DOMMediaStream* aOwningStream, TrackID aTrackID) override {
return do_AddRef(new AudioStreamTrack(
aOwningStream, aTrackID, mInputTrackID, mSource, mConstraints));
already_AddRefed<MediaStreamTrack> CloneInternal() override {
return do_AddRef(
new AudioStreamTrack(mWindow, Ended() ? nullptr : mInputStream.get(),
mTrackID, mSource, mConstraints));
}
};

Просмотреть файл

@ -14,9 +14,20 @@ namespace dom {
AudioTrack::AudioTrack(nsIGlobalObject* aOwnerGlobal, const nsAString& aId,
const nsAString& aKind, const nsAString& aLabel,
const nsAString& aLanguage, bool aEnabled)
const nsAString& aLanguage, bool aEnabled,
AudioStreamTrack* aStreamTrack)
: MediaTrack(aOwnerGlobal, aId, aKind, aLabel, aLanguage),
mEnabled(aEnabled) {}
mEnabled(aEnabled),
mAudioStreamTrack(aStreamTrack) {}
AudioTrack::~AudioTrack() = default;
NS_IMPL_CYCLE_COLLECTION_INHERITED(AudioTrack, MediaTrack, mAudioStreamTrack)
NS_IMPL_ADDREF_INHERITED(AudioTrack, MediaTrack)
NS_IMPL_RELEASE_INHERITED(AudioTrack, MediaTrack)
NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(AudioTrack)
NS_INTERFACE_MAP_END_INHERITING(MediaTrack)
JSObject* AudioTrack::WrapObject(JSContext* aCx,
JS::Handle<JSObject*> aGivenProto) {

Просмотреть файл

@ -12,11 +12,17 @@
namespace mozilla {
namespace dom {
class AudioStreamTrack;
class AudioTrack : public MediaTrack {
public:
AudioTrack(nsIGlobalObject* aOwnerGlobal, const nsAString& aId,
const nsAString& aKind, const nsAString& aLabel,
const nsAString& aLanguage, bool aEnabled);
const nsAString& aLanguage, bool aEnabled,
AudioStreamTrack* aStreamTrack = nullptr);
NS_DECL_ISUPPORTS_INHERITED
NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(AudioTrack, MediaTrack)
JSObject* WrapObject(JSContext* aCx,
JS::Handle<JSObject*> aGivenProto) override;
@ -25,13 +31,21 @@ class AudioTrack : public MediaTrack {
void SetEnabledInternal(bool aEnabled, int aFlags) override;
// Get associated audio stream track when the audio track comes from
// MediaStream. This might be nullptr when the src of owning HTMLMediaElement
// is not MediaStream.
AudioStreamTrack* GetAudioStreamTrack() { return mAudioStreamTrack; }
// WebIDL
bool Enabled() const { return mEnabled; }
void SetEnabled(bool aEnabled);
private:
virtual ~AudioTrack();
bool mEnabled;
RefPtr<AudioStreamTrack> mAudioStreamTrack;
};
} // namespace dom

Просмотреть файл

@ -44,7 +44,9 @@ OutputStreamDriver::~OutputStreamDriver() {
void OutputStreamDriver::EndTrack() {
MOZ_ASSERT(NS_IsMainThread());
mSourceStream->EndTrack(mTrackId);
if (!mSourceStream->IsDestroyed()) {
mSourceStream->Destroy();
}
}
void OutputStreamDriver::SetImage(const RefPtr<layers::Image>& aImage,
@ -148,7 +150,7 @@ NS_INTERFACE_MAP_END_INHERITING(DOMMediaStream)
CanvasCaptureMediaStream::CanvasCaptureMediaStream(nsPIDOMWindowInner* aWindow,
HTMLCanvasElement* aCanvas)
: DOMMediaStream(aWindow), mCanvas(aCanvas), mOutputStreamDriver(nullptr) {}
: DOMMediaStream(aWindow), mCanvas(aCanvas) {}
CanvasCaptureMediaStream::~CanvasCaptureMediaStream() {
if (mOutputStreamDriver) {
@ -168,36 +170,26 @@ void CanvasCaptureMediaStream::RequestFrame() {
}
nsresult CanvasCaptureMediaStream::Init(const dom::Optional<double>& aFPS,
const TrackID& aTrackId,
const TrackID aTrackId,
nsIPrincipal* aPrincipal) {
MediaStreamGraph* graph = MediaStreamGraph::GetInstance(
MediaStreamGraph::SYSTEM_THREAD_DRIVER, mWindow,
MediaStreamGraph::REQUEST_DEFAULT_SAMPLE_RATE);
SourceMediaStream* source = graph->CreateSourceStream();
PrincipalHandle principalHandle = MakePrincipalHandle(aPrincipal);
if (!aFPS.WasPassed()) {
mOutputStreamDriver = new AutoDriver(GetInputStream()->AsSourceStream(),
aTrackId, principalHandle);
mOutputStreamDriver = new AutoDriver(source, aTrackId, principalHandle);
} else if (aFPS.Value() < 0) {
return NS_ERROR_ILLEGAL_VALUE;
} else {
// Cap frame rate to 60 FPS for sanity
double fps = std::min(60.0, aFPS.Value());
mOutputStreamDriver = new TimerDriver(GetInputStream()->AsSourceStream(),
fps, aTrackId, principalHandle);
mOutputStreamDriver =
new TimerDriver(source, fps, aTrackId, principalHandle);
}
return NS_OK;
}
already_AddRefed<CanvasCaptureMediaStream>
CanvasCaptureMediaStream::CreateSourceStream(nsPIDOMWindowInner* aWindow,
HTMLCanvasElement* aCanvas) {
RefPtr<CanvasCaptureMediaStream> stream =
new CanvasCaptureMediaStream(aWindow, aCanvas);
MediaStreamGraph* graph = MediaStreamGraph::GetInstance(
MediaStreamGraph::SYSTEM_THREAD_DRIVER, aWindow,
MediaStreamGraph::REQUEST_DEFAULT_SAMPLE_RATE);
stream->InitSourceStream(graph);
return stream.forget();
}
FrameCaptureListener* CanvasCaptureMediaStream::FrameCaptureListener() {
return mOutputStreamDriver;
}
@ -212,5 +204,12 @@ void CanvasCaptureMediaStream::StopCapture() {
mOutputStreamDriver = nullptr;
}
SourceMediaStream* CanvasCaptureMediaStream::GetSourceStream() const {
if (!mOutputStreamDriver) {
return nullptr;
}
return mOutputStreamDriver->mSourceStream;
}
} // namespace dom
} // namespace mozilla

Просмотреть файл

@ -84,13 +84,12 @@ class OutputStreamDriver : public FrameCaptureListener {
*/
virtual void Forget() {}
protected:
virtual ~OutputStreamDriver();
private:
const TrackID mTrackId;
const RefPtr<SourceMediaStream> mSourceStream;
const PrincipalHandle mPrincipalHandle;
protected:
virtual ~OutputStreamDriver();
};
class CanvasCaptureMediaStream : public DOMMediaStream {
@ -102,7 +101,7 @@ class CanvasCaptureMediaStream : public DOMMediaStream {
NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(CanvasCaptureMediaStream,
DOMMediaStream)
nsresult Init(const dom::Optional<double>& aFPS, const TrackID& aTrackId,
nsresult Init(const dom::Optional<double>& aFPS, const TrackID aTrackId,
nsIPrincipal* aPrincipal);
JSObject* WrapObject(JSContext* aCx,
@ -119,12 +118,7 @@ class CanvasCaptureMediaStream : public DOMMediaStream {
*/
void StopCapture();
/**
* Create a CanvasCaptureMediaStream whose underlying stream is a
* SourceMediaStream.
*/
static already_AddRefed<CanvasCaptureMediaStream> CreateSourceStream(
nsPIDOMWindowInner* aWindow, HTMLCanvasElement* aCanvas);
SourceMediaStream* GetSourceStream() const;
protected:
~CanvasCaptureMediaStream();

Просмотреть файл

@ -13,8 +13,6 @@
#include "MediaStreamGraphImpl.h"
#include "MediaStreamListener.h"
#include "VideoStreamTrack.h"
#include "mozilla/BasePrincipal.h"
#include "mozilla/dom/AudioNode.h"
#include "mozilla/dom/AudioTrack.h"
#include "mozilla/dom/AudioTrackList.h"
#include "mozilla/dom/DocGroup.h"
@ -26,6 +24,7 @@
#include "mozilla/dom/VideoTrackList.h"
#include "mozilla/media/MediaUtils.h"
#include "nsContentUtils.h"
#include "nsGlobalWindowInner.h"
#include "nsIScriptError.h"
#include "nsIUUIDGenerator.h"
#include "nsPIDOMWindow.h"
@ -48,9 +47,9 @@ static LazyLogModule gMediaStreamLog("MediaStream");
const TrackID TRACK_VIDEO_PRIMARY = 1;
static bool ContainsLiveTracks(
nsTArray<RefPtr<DOMMediaStream::TrackPort>>& aTracks) {
for (auto& port : aTracks) {
if (port->GetTrack()->ReadyState() == MediaStreamTrackState::Live) {
const nsTArray<RefPtr<MediaStreamTrack>>& aTracks) {
for (const auto& track : aTracks) {
if (track->ReadyState() == MediaStreamTrackState::Live) {
return true;
}
}
@ -58,51 +57,6 @@ static bool ContainsLiveTracks(
return false;
}
DOMMediaStream::TrackPort::TrackPort(MediaInputPort* aInputPort,
MediaStreamTrack* aTrack,
const InputPortOwnership aOwnership)
: mInputPort(aInputPort), mTrack(aTrack), mOwnership(aOwnership) {
MOZ_ASSERT(mInputPort);
MOZ_ASSERT(mTrack);
MOZ_COUNT_CTOR(TrackPort);
}
DOMMediaStream::TrackPort::~TrackPort() {
MOZ_COUNT_DTOR(TrackPort);
if (mOwnership == InputPortOwnership::OWNED) {
DestroyInputPort();
}
}
void DOMMediaStream::TrackPort::DestroyInputPort() {
if (mInputPort) {
mInputPort->Destroy();
mInputPort = nullptr;
}
}
MediaStream* DOMMediaStream::TrackPort::GetSource() const {
return mInputPort ? mInputPort->GetSource() : nullptr;
}
TrackID DOMMediaStream::TrackPort::GetSourceTrackId() const {
return mInputPort ? mInputPort->GetSourceTrackId() : TRACK_INVALID;
}
RefPtr<GenericPromise> DOMMediaStream::TrackPort::BlockSourceTrackId(
TrackID aTrackId, BlockingMode aBlockingMode) {
if (!mInputPort) {
return GenericPromise::CreateAndReject(NS_ERROR_FAILURE, __func__);
}
return mInputPort->BlockSourceTrackId(aTrackId, aBlockingMode);
}
NS_IMPL_CYCLE_COLLECTION(DOMMediaStream::TrackPort, mTrack)
NS_IMPL_CYCLE_COLLECTION_ROOT_NATIVE(DOMMediaStream::TrackPort, AddRef)
NS_IMPL_CYCLE_COLLECTION_UNROOT_NATIVE(DOMMediaStream::TrackPort, Release)
class DOMMediaStream::PlaybackTrackListener : public MediaStreamTrackConsumer {
public:
explicit PlaybackTrackListener(DOMMediaStream* aStream) : mStream(aStream) {}
@ -143,23 +97,17 @@ NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN_INHERITED(DOMMediaStream,
DOMEventTargetHelper)
tmp->Destroy();
NS_IMPL_CYCLE_COLLECTION_UNLINK(mWindow)
NS_IMPL_CYCLE_COLLECTION_UNLINK(mOwnedTracks)
NS_IMPL_CYCLE_COLLECTION_UNLINK(mTracks)
NS_IMPL_CYCLE_COLLECTION_UNLINK(mConsumersToKeepAlive)
NS_IMPL_CYCLE_COLLECTION_UNLINK(mPlaybackTrackListener)
NS_IMPL_CYCLE_COLLECTION_UNLINK(mPrincipal)
NS_IMPL_CYCLE_COLLECTION_UNLINK(mVideoPrincipal)
NS_IMPL_CYCLE_COLLECTION_UNLINK_END
NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN_INHERITED(DOMMediaStream,
DOMEventTargetHelper)
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mWindow)
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mOwnedTracks)
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mTracks)
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mConsumersToKeepAlive)
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mPlaybackTrackListener)
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mPrincipal)
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mVideoPrincipal)
NS_IMPL_CYCLE_COLLECTION_TRAVERSE_END
NS_IMPL_ADDREF_INHERITED(DOMMediaStream, DOMEventTargetHelper)
@ -169,24 +117,9 @@ NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(DOMMediaStream)
NS_INTERFACE_MAP_ENTRY(DOMMediaStream)
NS_INTERFACE_MAP_END_INHERITING(DOMEventTargetHelper)
NS_IMPL_CYCLE_COLLECTION_INHERITED(DOMAudioNodeMediaStream, DOMMediaStream,
mStreamNode)
NS_IMPL_ADDREF_INHERITED(DOMAudioNodeMediaStream, DOMMediaStream)
NS_IMPL_RELEASE_INHERITED(DOMAudioNodeMediaStream, DOMMediaStream)
NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(DOMAudioNodeMediaStream)
NS_INTERFACE_MAP_END_INHERITING(DOMMediaStream)
DOMMediaStream::DOMMediaStream(nsPIDOMWindowInner* aWindow)
: mWindow(aWindow),
mInputStream(nullptr),
mOwnedStream(nullptr),
mPlaybackStream(nullptr),
mTracksPendingRemoval(0),
mPlaybackTrackListener(MakeAndAddRef<PlaybackTrackListener>(this)),
mTracksCreated(false),
mNotifiedOfMediaStreamGraphShutdown(false),
mActive(false),
mFinishedOnInactive(true) {
nsresult rv;
@ -209,37 +142,13 @@ DOMMediaStream::~DOMMediaStream() { Destroy(); }
void DOMMediaStream::Destroy() {
LOG(LogLevel::Debug, ("DOMMediaStream %p Being destroyed.", this));
for (const RefPtr<TrackPort>& info : mTracks) {
for (const auto& track : mTracks) {
// We must remove ourselves from each track's principal change observer list
// before we die. CC may have cleared info->mTrack so guard against it.
MediaStreamTrack* track = info->GetTrack();
if (track) {
track->RemovePrincipalChangeObserver(this);
if (!track->Ended()) {
track->RemoveConsumer(mPlaybackTrackListener);
}
// before we die.
if (!track->Ended()) {
track->RemoveConsumer(mPlaybackTrackListener);
}
}
if (mPlaybackPort) {
mPlaybackPort->Destroy();
mPlaybackPort = nullptr;
}
if (mOwnedPort) {
mOwnedPort->Destroy();
mOwnedPort = nullptr;
}
if (mPlaybackStream) {
mPlaybackStream->UnregisterUser();
mPlaybackStream = nullptr;
}
if (mOwnedStream) {
mOwnedStream->UnregisterUser();
mOwnedStream = nullptr;
}
if (mInputStream) {
mInputStream->UnregisterUser();
mInputStream = nullptr;
}
mTrackListeners.Clear();
}
@ -289,23 +198,9 @@ already_AddRefed<DOMMediaStream> DOMMediaStream::Constructor(
}
auto newStream = MakeRefPtr<DOMMediaStream>(ownerWindow);
for (MediaStreamTrack& track : aTracks) {
if (!newStream->GetPlaybackStream()) {
MOZ_RELEASE_ASSERT(track.Graph());
newStream->InitPlaybackStreamCommon(track.Graph());
}
newStream->AddTrack(track);
}
if (!newStream->GetPlaybackStream()) {
MOZ_ASSERT(aTracks.IsEmpty());
MediaStreamGraph* graph = MediaStreamGraph::GetInstance(
MediaStreamGraph::SYSTEM_THREAD_DRIVER, ownerWindow,
MediaStreamGraph::REQUEST_DEFAULT_SAMPLE_RATE);
newStream->InitPlaybackStreamCommon(graph);
}
return newStream.forget();
}
@ -382,8 +277,8 @@ void DOMMediaStream::GetId(nsAString& aID) const { aID = mID; }
void DOMMediaStream::GetAudioTracks(
nsTArray<RefPtr<AudioStreamTrack>>& aTracks) const {
for (const RefPtr<TrackPort>& info : mTracks) {
if (AudioStreamTrack* t = info->GetTrack()->AsAudioStreamTrack()) {
for (const auto& track : mTracks) {
if (AudioStreamTrack* t = track->AsAudioStreamTrack()) {
aTracks.AppendElement(t);
}
}
@ -391,17 +286,17 @@ void DOMMediaStream::GetAudioTracks(
void DOMMediaStream::GetAudioTracks(
nsTArray<RefPtr<MediaStreamTrack>>& aTracks) const {
for (const RefPtr<TrackPort>& info : mTracks) {
if (info->GetTrack()->AsAudioStreamTrack()) {
aTracks.AppendElement(info->GetTrack());
for (const auto& track : mTracks) {
if (track->AsAudioStreamTrack()) {
aTracks.AppendElement(track);
}
}
}
void DOMMediaStream::GetVideoTracks(
nsTArray<RefPtr<VideoStreamTrack>>& aTracks) const {
for (const RefPtr<TrackPort>& info : mTracks) {
if (VideoStreamTrack* t = info->GetTrack()->AsVideoStreamTrack()) {
for (const auto& track : mTracks) {
if (VideoStreamTrack* t = track->AsVideoStreamTrack()) {
aTracks.AppendElement(t);
}
}
@ -409,49 +304,24 @@ void DOMMediaStream::GetVideoTracks(
void DOMMediaStream::GetVideoTracks(
nsTArray<RefPtr<MediaStreamTrack>>& aTracks) const {
for (const RefPtr<TrackPort>& info : mTracks) {
if (info->GetTrack()->AsVideoStreamTrack()) {
aTracks.AppendElement(info->GetTrack());
for (const auto& track : mTracks) {
if (track->AsVideoStreamTrack()) {
aTracks.AppendElement(track);
}
}
}
void DOMMediaStream::GetTracks(
nsTArray<RefPtr<MediaStreamTrack>>& aTracks) const {
for (const RefPtr<TrackPort>& info : mTracks) {
aTracks.AppendElement(info->GetTrack());
for (const auto& track : mTracks) {
aTracks.AppendElement(track);
}
}
void DOMMediaStream::AddTrack(MediaStreamTrack& aTrack) {
MOZ_RELEASE_ASSERT(mPlaybackStream);
RefPtr<ProcessedMediaStream> dest = mPlaybackStream->AsProcessedStream();
MOZ_ASSERT(dest);
if (!dest) {
return;
}
LOG(LogLevel::Info,
("DOMMediaStream %p Adding track %p (from stream %p with ID %d)", this,
&aTrack, aTrack.mOwningStream.get(), aTrack.mTrackID));
if (mPlaybackStream->Graph() != aTrack.Graph()) {
NS_ASSERTION(false,
"Cannot combine tracks from different MediaStreamGraphs");
LOG(LogLevel::Error, ("DOMMediaStream %p Own MSG %p != aTrack's MSG %p",
this, mPlaybackStream->Graph(), aTrack.Graph()));
AutoTArray<nsString, 1> params;
aTrack.GetId(*params.AppendElement());
nsCOMPtr<nsPIDOMWindowInner> pWindow = GetParentObject();
Document* document = pWindow ? pWindow->GetExtantDoc() : nullptr;
nsContentUtils::ReportToConsole(
nsIScriptError::errorFlag, NS_LITERAL_CSTRING("Media"), document,
nsContentUtils::eDOM_PROPERTIES,
"MediaStreamAddTrackDifferentAudioChannel", params);
return;
}
&aTrack, aTrack.GetStream(), aTrack.GetTrackID()));
if (HasTrack(aTrack)) {
LOG(LogLevel::Debug,
@ -459,45 +329,24 @@ void DOMMediaStream::AddTrack(MediaStreamTrack& aTrack) {
return;
}
// Hook up the underlying track with our underlying playback stream.
RefPtr<MediaInputPort> inputPort = GetPlaybackStream()->AllocateInputPort(
aTrack.GetOwnedStream(), aTrack.mTrackID);
RefPtr<TrackPort> trackPort =
new TrackPort(inputPort, &aTrack, TrackPort::InputPortOwnership::OWNED);
mTracks.AppendElement(trackPort.forget());
mTracks.AppendElement(&aTrack);
NotifyTrackAdded(&aTrack);
LOG(LogLevel::Debug, ("DOMMediaStream %p Added track %p", this, &aTrack));
}
void DOMMediaStream::RemoveTrack(MediaStreamTrack& aTrack) {
LOG(LogLevel::Info,
("DOMMediaStream %p Removing track %p (from stream %p with ID %d)", this,
&aTrack, aTrack.mOwningStream.get(), aTrack.mTrackID));
&aTrack, aTrack.GetStream(), aTrack.GetTrackID()));
RefPtr<TrackPort> toRemove = FindPlaybackTrackPort(aTrack);
if (!toRemove) {
if (!mTracks.RemoveElement(&aTrack)) {
LOG(LogLevel::Debug,
("DOMMediaStream %p does not contain track %p", this, &aTrack));
return;
}
DebugOnly<bool> removed = mTracks.RemoveElement(toRemove);
NS_ASSERTION(removed,
"If there's a track port we should be able to remove it");
// If the track comes from a TRACK_ANY input port (i.e., mOwnedPort), we need
// to block it in the port. Doing this for a locked track is still OK as it
// will first block the track, then destroy the port. Both cause the track to
// end.
// If the track has already ended, it's input port might be gone, so in those
// cases blocking the underlying track should be avoided.
if (!aTrack.Ended()) {
BlockPlaybackTrack(toRemove);
NotifyTrackRemoved(&aTrack);
}
LOG(LogLevel::Debug, ("DOMMediaStream %p Removed track %p", this, &aTrack));
}
already_AddRefed<DOMMediaStream> DOMMediaStream::Clone() {
@ -506,27 +355,12 @@ already_AddRefed<DOMMediaStream> DOMMediaStream::Clone() {
LOG(LogLevel::Info,
("DOMMediaStream %p created clone %p", this, newStream.get()));
MOZ_RELEASE_ASSERT(mPlaybackStream);
MOZ_RELEASE_ASSERT(mPlaybackStream->Graph());
MediaStreamGraph* graph = mPlaybackStream->Graph();
// We initiate the owned and playback streams first, since we need to create
// all existing DOM tracks before we add the generic input port from
// mInputStream to mOwnedStream (see AllocateInputPort wrt. destination
// TrackID as to why).
newStream->InitOwnedStreamCommon(graph);
newStream->InitPlaybackStreamCommon(graph);
// Set up existing DOM tracks.
TrackID allocatedTrackID = 1;
for (const RefPtr<TrackPort>& info : mTracks) {
MediaStreamTrack& track = *info->GetTrack();
for (const auto& track : mTracks) {
LOG(LogLevel::Debug,
("DOMMediaStream %p forwarding external track %p to clone %p", this,
&track, newStream.get()));
RefPtr<MediaStreamTrack> trackClone =
newStream->CloneDOMTrack(track, allocatedTrackID++);
track.get(), newStream.get()));
RefPtr<MediaStreamTrack> clone = track->Clone();
newStream->AddTrack(*clone);
}
return newStream.forget();
@ -535,375 +369,37 @@ already_AddRefed<DOMMediaStream> DOMMediaStream::Clone() {
bool DOMMediaStream::Active() const { return mActive; }
MediaStreamTrack* DOMMediaStream::GetTrackById(const nsAString& aId) const {
for (const RefPtr<TrackPort>& info : mTracks) {
for (const auto& track : mTracks) {
nsString id;
info->GetTrack()->GetId(id);
track->GetId(id);
if (id == aId) {
return info->GetTrack();
}
}
return nullptr;
}
MediaStreamTrack* DOMMediaStream::GetOwnedTrackById(const nsAString& aId) {
for (const RefPtr<TrackPort>& info : mOwnedTracks) {
nsString id;
info->GetTrack()->GetId(id);
if (id == aId) {
return info->GetTrack();
return track;
}
}
return nullptr;
}
bool DOMMediaStream::HasTrack(const MediaStreamTrack& aTrack) const {
return !!FindPlaybackTrackPort(aTrack);
}
bool DOMMediaStream::OwnsTrack(const MediaStreamTrack& aTrack) const {
return !!FindOwnedTrackPort(aTrack);
}
bool DOMMediaStream::IsFinished() const {
return !mPlaybackStream || mPlaybackStream->IsFinished();
}
TrackRate DOMMediaStream::GraphRate() {
if (mPlaybackStream) {
return mPlaybackStream->GraphRate();
}
if (mOwnedStream) {
return mOwnedStream->GraphRate();
}
if (mInputStream) {
return mInputStream->GraphRate();
}
MOZ_ASSERT(false, "Not hooked up to a graph");
return 0;
}
void DOMMediaStream::InitSourceStream(MediaStreamGraph* aGraph) {
InitInputStreamCommon(aGraph->CreateSourceStream(), aGraph);
InitOwnedStreamCommon(aGraph);
InitPlaybackStreamCommon(aGraph);
}
void DOMMediaStream::InitTrackUnionStream(MediaStreamGraph* aGraph) {
InitInputStreamCommon(aGraph->CreateTrackUnionStream(), aGraph);
InitOwnedStreamCommon(aGraph);
InitPlaybackStreamCommon(aGraph);
}
void DOMMediaStream::InitAudioCaptureStream(nsIPrincipal* aPrincipal,
MediaStreamGraph* aGraph) {
const TrackID AUDIO_TRACK = 1;
RefPtr<BasicTrackSource> audioCaptureSource =
new BasicTrackSource(aPrincipal, MediaSourceEnum::AudioCapture);
AudioCaptureStream* audioCaptureStream = static_cast<AudioCaptureStream*>(
aGraph->CreateAudioCaptureStream(AUDIO_TRACK));
InitInputStreamCommon(audioCaptureStream, aGraph);
InitOwnedStreamCommon(aGraph);
InitPlaybackStreamCommon(aGraph);
RefPtr<MediaStreamTrack> track =
CreateDOMTrack(AUDIO_TRACK, MediaSegment::AUDIO, audioCaptureSource);
AddTrackInternal(track);
audioCaptureStream->Start();
}
void DOMMediaStream::InitInputStreamCommon(MediaStream* aStream,
MediaStreamGraph* aGraph) {
MOZ_ASSERT(!mOwnedStream,
"Input stream must be initialized before owned stream");
mInputStream = aStream;
mInputStream->RegisterUser();
}
void DOMMediaStream::InitOwnedStreamCommon(MediaStreamGraph* aGraph) {
MOZ_ASSERT(!mPlaybackStream,
"Owned stream must be initialized before playback stream");
mOwnedStream = aGraph->CreateTrackUnionStream();
mOwnedStream->QueueSetAutofinish(true);
mOwnedStream->RegisterUser();
if (mInputStream) {
mOwnedPort = mOwnedStream->AllocateInputPort(mInputStream);
}
}
void DOMMediaStream::InitPlaybackStreamCommon(MediaStreamGraph* aGraph) {
mPlaybackStream = aGraph->CreateTrackUnionStream();
mPlaybackStream->QueueSetAutofinish(true);
mPlaybackStream->RegisterUser();
if (mOwnedStream) {
mPlaybackPort = mPlaybackStream->AllocateInputPort(mOwnedStream);
}
LOG(LogLevel::Debug, ("DOMMediaStream %p Initiated with mInputStream=%p, "
"mOwnedStream=%p, mPlaybackStream=%p",
this, mInputStream, mOwnedStream, mPlaybackStream));
}
already_AddRefed<DOMMediaStream> DOMMediaStream::CreateSourceStreamAsInput(
nsPIDOMWindowInner* aWindow, MediaStreamGraph* aGraph) {
auto stream = MakeRefPtr<DOMMediaStream>(aWindow);
stream->InitSourceStream(aGraph);
return stream.forget();
}
already_AddRefed<DOMMediaStream> DOMMediaStream::CreateTrackUnionStreamAsInput(
nsPIDOMWindowInner* aWindow, MediaStreamGraph* aGraph) {
auto stream = MakeRefPtr<DOMMediaStream>(aWindow);
stream->InitTrackUnionStream(aGraph);
return stream.forget();
}
already_AddRefed<DOMMediaStream>
DOMMediaStream::CreateAudioCaptureStreamAsInput(nsPIDOMWindowInner* aWindow,
nsIPrincipal* aPrincipal,
MediaStreamGraph* aGraph) {
auto stream = MakeRefPtr<DOMMediaStream>(aWindow);
stream->InitAudioCaptureStream(aPrincipal, aGraph);
return stream.forget();
}
void DOMMediaStream::PrincipalChanged(MediaStreamTrack* aTrack) {
MOZ_ASSERT(aTrack);
NS_ASSERTION(HasTrack(*aTrack), "Principal changed for an unknown track");
LOG(LogLevel::Info,
("DOMMediaStream %p Principal changed for track %p", this, aTrack));
RecomputePrincipal();
}
void DOMMediaStream::RecomputePrincipal() {
nsCOMPtr<nsIPrincipal> previousPrincipal = mPrincipal.forget();
nsCOMPtr<nsIPrincipal> previousVideoPrincipal = mVideoPrincipal.forget();
if (mTracksPendingRemoval > 0) {
LOG(LogLevel::Info, ("DOMMediaStream %p RecomputePrincipal() Cannot "
"recompute stream principal with tracks pending "
"removal.",
this));
return;
}
LOG(LogLevel::Debug, ("DOMMediaStream %p Recomputing principal. "
"Old principal was %p.",
this, previousPrincipal.get()));
// mPrincipal is recomputed based on all current tracks, and tracks that have
// not ended in our playback stream.
for (const RefPtr<TrackPort>& info : mTracks) {
if (info->GetTrack()->Ended()) {
continue;
}
LOG(LogLevel::Debug,
("DOMMediaStream %p Taking live track %p with "
"principal %p into account.",
this, info->GetTrack(), info->GetTrack()->GetPrincipal()));
nsContentUtils::CombineResourcePrincipals(&mPrincipal,
info->GetTrack()->GetPrincipal());
if (info->GetTrack()->AsVideoStreamTrack()) {
nsContentUtils::CombineResourcePrincipals(
&mVideoPrincipal, info->GetTrack()->GetPrincipal());
}
}
LOG(LogLevel::Debug,
("DOMMediaStream %p new principal is %p.", this, mPrincipal.get()));
if (previousPrincipal != mPrincipal ||
previousVideoPrincipal != mVideoPrincipal) {
NotifyPrincipalChanged();
}
}
void DOMMediaStream::NotifyPrincipalChanged() {
if (!mPrincipal) {
// When all tracks are removed, mPrincipal will change to nullptr.
LOG(LogLevel::Info,
("DOMMediaStream %p Principal changed to nothing.", this));
} else {
LOG(LogLevel::Info, ("DOMMediaStream %p Principal changed. Now: "
"null=%d, codebase=%d, expanded=%d, system=%d",
this, mPrincipal->GetIsNullPrincipal(),
mPrincipal->GetIsContentPrincipal(),
mPrincipal->GetIsExpandedPrincipal(),
mPrincipal->IsSystemPrincipal()));
}
for (uint32_t i = 0; i < mPrincipalChangeObservers.Length(); ++i) {
mPrincipalChangeObservers[i]->PrincipalChanged(this);
}
}
bool DOMMediaStream::AddPrincipalChangeObserver(
PrincipalChangeObserver<DOMMediaStream>* aObserver) {
return mPrincipalChangeObservers.AppendElement(aObserver) != nullptr;
}
bool DOMMediaStream::RemovePrincipalChangeObserver(
PrincipalChangeObserver<DOMMediaStream>* aObserver) {
return mPrincipalChangeObservers.RemoveElement(aObserver);
return mTracks.Contains(&aTrack);
}
void DOMMediaStream::AddTrackInternal(MediaStreamTrack* aTrack) {
MOZ_ASSERT(aTrack->mOwningStream == this);
MOZ_ASSERT(FindOwnedDOMTrack(aTrack->GetInputStream(), aTrack->mInputTrackID,
aTrack->mTrackID));
MOZ_ASSERT(!FindPlaybackDOMTrack(aTrack->GetOwnedStream(), aTrack->mTrackID));
LOG(LogLevel::Debug,
("DOMMediaStream %p Adding owned track %p", this, aTrack));
mTracks.AppendElement(new TrackPort(mPlaybackPort, aTrack,
TrackPort::InputPortOwnership::EXTERNAL));
NotifyTrackAdded(aTrack);
AddTrack(*aTrack);
DispatchTrackEvent(NS_LITERAL_STRING("addtrack"), aTrack);
}
already_AddRefed<MediaStreamTrack> DOMMediaStream::CreateDOMTrack(
TrackID aTrackID, MediaSegment::Type aType, MediaStreamTrackSource* aSource,
const MediaTrackConstraints& aConstraints) {
MOZ_RELEASE_ASSERT(mInputStream);
MOZ_RELEASE_ASSERT(mOwnedStream);
MOZ_ASSERT(FindOwnedDOMTrack(GetInputStream(), aTrackID) == nullptr);
RefPtr<MediaStreamTrack> track;
switch (aType) {
case MediaSegment::AUDIO:
track =
new AudioStreamTrack(this, aTrackID, aTrackID, aSource, aConstraints);
break;
case MediaSegment::VIDEO:
track =
new VideoStreamTrack(this, aTrackID, aTrackID, aSource, aConstraints);
break;
default:
MOZ_CRASH("Unhandled track type");
}
LOG(LogLevel::Debug, ("DOMMediaStream %p Created new track %p with ID %u",
this, track.get(), aTrackID));
mOwnedTracks.AppendElement(new TrackPort(
mOwnedPort, track, TrackPort::InputPortOwnership::EXTERNAL));
return track.forget();
}
already_AddRefed<MediaStreamTrack> DOMMediaStream::CloneDOMTrack(
MediaStreamTrack& aTrack, TrackID aCloneTrackID) {
MOZ_RELEASE_ASSERT(mOwnedStream);
MOZ_RELEASE_ASSERT(mPlaybackStream);
MOZ_RELEASE_ASSERT(IsTrackIDExplicit(aCloneTrackID));
TrackID inputTrackID = aTrack.mInputTrackID;
MediaStream* inputStream = aTrack.GetInputStream();
RefPtr<MediaStreamTrack> newTrack = aTrack.CloneInternal(this, aCloneTrackID);
newTrack->mOriginalTrack =
aTrack.mOriginalTrack ? aTrack.mOriginalTrack.get() : &aTrack;
LOG(LogLevel::Debug,
("DOMMediaStream %p Created new track %p cloned from stream %p track %d",
this, newTrack.get(), inputStream, inputTrackID));
RefPtr<MediaInputPort> inputPort =
mOwnedStream->AllocateInputPort(inputStream, inputTrackID, aCloneTrackID);
mOwnedTracks.AppendElement(
new TrackPort(inputPort, newTrack, TrackPort::InputPortOwnership::OWNED));
mTracks.AppendElement(new TrackPort(mPlaybackPort, newTrack,
TrackPort::InputPortOwnership::EXTERNAL));
NotifyTrackAdded(newTrack);
newTrack->SetEnabled(aTrack.Enabled());
newTrack->SetMuted(aTrack.Muted());
newTrack->SetReadyState(aTrack.ReadyState());
if (aTrack.Ended()) {
// For extra suspenders, make sure that we don't forward data by mistake
// to the clone when the original has already ended.
// We only block END_EXISTING to allow any pending clones to end.
Unused << inputPort->BlockSourceTrackId(inputTrackID,
BlockingMode::END_EXISTING);
}
return newTrack.forget();
}
static DOMMediaStream::TrackPort* FindTrackPortAmongTracks(
const MediaStreamTrack& aTrack,
const nsTArray<RefPtr<DOMMediaStream::TrackPort>>& aTracks) {
for (const RefPtr<DOMMediaStream::TrackPort>& info : aTracks) {
if (info->GetTrack() == &aTrack) {
return info;
already_AddRefed<nsIPrincipal> DOMMediaStream::GetPrincipal() {
nsCOMPtr<nsIPrincipal> principal =
nsGlobalWindowInner::Cast(mWindow)->GetPrincipal();
for (const auto& t : mTracks) {
if (t->Ended()) {
continue;
}
nsContentUtils::CombineResourcePrincipals(&principal, t->GetPrincipal());
}
return nullptr;
}
MediaStreamTrack* DOMMediaStream::FindOwnedDOMTrack(MediaStream* aInputStream,
TrackID aInputTrackID,
TrackID aTrackID) const {
MOZ_RELEASE_ASSERT(mOwnedStream);
for (const RefPtr<TrackPort>& info : mOwnedTracks) {
if (info->GetInputPort() &&
info->GetInputPort()->GetSource() == aInputStream &&
info->GetTrack()->mInputTrackID == aInputTrackID &&
(aTrackID == TRACK_ANY || info->GetTrack()->mTrackID == aTrackID)) {
// This track is owned externally but in our playback stream.
return info->GetTrack();
}
}
return nullptr;
}
DOMMediaStream::TrackPort* DOMMediaStream::FindOwnedTrackPort(
const MediaStreamTrack& aTrack) const {
return FindTrackPortAmongTracks(aTrack, mOwnedTracks);
}
MediaStreamTrack* DOMMediaStream::FindPlaybackDOMTrack(
MediaStream* aInputStream, TrackID aInputTrackID) const {
if (!mPlaybackStream) {
// One would think we can assert mPlaybackStream here, but track clones have
// a dummy DOMMediaStream that doesn't have a playback stream, so we can't.
return nullptr;
}
for (const RefPtr<TrackPort>& info : mTracks) {
if (info->GetInputPort() == mPlaybackPort && aInputStream == mOwnedStream &&
info->GetTrack()->mInputTrackID == aInputTrackID) {
// This track is in our owned and playback streams.
return info->GetTrack();
}
if (info->GetInputPort() &&
info->GetInputPort()->GetSource() == aInputStream &&
info->GetSourceTrackId() == aInputTrackID) {
// This track is owned externally but in our playback stream.
MOZ_ASSERT(IsTrackIDExplicit(aInputTrackID));
return info->GetTrack();
}
}
return nullptr;
}
DOMMediaStream::TrackPort* DOMMediaStream::FindPlaybackTrackPort(
const MediaStreamTrack& aTrack) const {
return FindTrackPortAmongTracks(aTrack, mTracks);
return principal.forget();
}
void DOMMediaStream::NotifyActive() {
@ -927,10 +423,6 @@ void DOMMediaStream::NotifyInactive() {
void DOMMediaStream::RegisterTrackListener(TrackListener* aListener) {
MOZ_ASSERT(NS_IsMainThread());
if (mNotifiedOfMediaStreamGraphShutdown) {
// No more tracks will ever be added, so just do nothing.
return;
}
mTrackListeners.AppendElement(aListener);
}
@ -956,29 +448,6 @@ void DOMMediaStream::SetFinishedOnInactive(bool aFinishedOnInactive) {
void DOMMediaStream::NotifyTrackAdded(const RefPtr<MediaStreamTrack>& aTrack) {
MOZ_ASSERT(NS_IsMainThread());
if (mTracksPendingRemoval > 0) {
// If there are tracks pending removal we may not degrade the current
// principals until those tracks have been confirmed removed from the
// playback stream. Instead combine with the new track and the (potentially)
// degraded principal will be calculated when it's safe.
nsContentUtils::CombineResourcePrincipals(&mPrincipal,
aTrack->GetPrincipal());
LOG(LogLevel::Debug, ("DOMMediaStream %p saw a track get added. Combining "
"its principal %p into our while waiting for pending "
"tracks to be removed. New principal is %p.",
this, aTrack->GetPrincipal(), mPrincipal.get()));
if (aTrack->AsVideoStreamTrack()) {
nsContentUtils::CombineResourcePrincipals(&mVideoPrincipal,
aTrack->GetPrincipal());
}
} else {
LOG(LogLevel::Debug, ("DOMMediaStream %p saw a track get added. "
"Recomputing principal.",
this));
RecomputePrincipal();
}
aTrack->AddPrincipalChangeObserver(this);
aTrack->AddConsumer(mPlaybackTrackListener);
for (int32_t i = mTrackListeners.Length() - 1; i >= 0; --i) {
@ -1007,17 +476,11 @@ void DOMMediaStream::NotifyTrackRemoved(
// go inactive, regardless of the timing of the last track ending.
aTrack->RemoveConsumer(mPlaybackTrackListener);
aTrack->RemovePrincipalChangeObserver(this);
for (int32_t i = mTrackListeners.Length() - 1; i >= 0; --i) {
mTrackListeners[i]->NotifyTrackRemoved(aTrack);
}
// Don't call RecomputePrincipal here as the track may still exist in the
// playback stream in the MediaStreamGraph. It will instead be called when
// the track has been confirmed removed by the graph. See
// BlockPlaybackTrack().
if (!mActive) {
NS_ASSERTION(false, "Shouldn't remove a live track if already inactive");
return;
@ -1045,46 +508,3 @@ nsresult DOMMediaStream::DispatchTrackEvent(
return DispatchTrustedEvent(event);
}
void DOMMediaStream::BlockPlaybackTrack(TrackPort* aTrack) {
MOZ_ASSERT(aTrack);
++mTracksPendingRemoval;
RefPtr<DOMMediaStream> that = this;
aTrack
->BlockSourceTrackId(aTrack->GetTrack()->mTrackID, BlockingMode::CREATION)
->Then(
GetCurrentThreadSerialEventTarget(), __func__,
[this, that](bool aIgnore) { NotifyPlaybackTrackBlocked(); },
[](const nsresult& aIgnore) {
NS_ERROR("Could not remove track from MSG");
});
}
void DOMMediaStream::NotifyPlaybackTrackBlocked() {
MOZ_ASSERT(mTracksPendingRemoval > 0,
"A track reported finished blocking more times than we asked for");
if (--mTracksPendingRemoval == 0) {
// The MediaStreamGraph has reported a track was blocked and we are not
// waiting for any further tracks to get blocked. It is now safe to
// recompute the principal based on our main thread track set state.
LOG(LogLevel::Debug, ("DOMMediaStream %p saw all tracks pending removal "
"finish. Recomputing principal.",
this));
RecomputePrincipal();
}
}
DOMAudioNodeMediaStream::DOMAudioNodeMediaStream(nsPIDOMWindowInner* aWindow,
AudioNode* aNode)
: DOMMediaStream(aWindow), mStreamNode(aNode) {}
DOMAudioNodeMediaStream::~DOMAudioNodeMediaStream() {}
already_AddRefed<DOMAudioNodeMediaStream>
DOMAudioNodeMediaStream::CreateTrackUnionStreamAsInput(
nsPIDOMWindowInner* aWindow, AudioNode* aNode, MediaStreamGraph* aGraph) {
RefPtr<DOMAudioNodeMediaStream> stream =
new DOMAudioNodeMediaStream(aWindow, aNode);
stream->InitTrackUnionStream(aGraph);
return stream.forget();
}

Просмотреть файл

@ -17,7 +17,6 @@
#include "MediaTrackConstraints.h"
#include "mozilla/DOMEventTargetHelper.h"
#include "mozilla/RelativeTimeline.h"
#include "PrincipalChangeObserver.h"
namespace mozilla {
@ -25,23 +24,16 @@ class AbstractThread;
class DOMMediaStream;
class MediaStream;
class MediaInputPort;
class MediaStreamGraph;
class ProcessedMediaStream;
enum class BlockingMode;
namespace dom {
class AudioNode;
class HTMLCanvasElement;
class MediaStreamTrack;
class MediaStreamTrackSource;
class AudioStreamTrack;
class VideoStreamTrack;
class AudioTrack;
class VideoTrack;
class AudioTrackList;
class VideoTrackList;
class MediaTrackListListener;
} // namespace dom
namespace layers {
@ -57,131 +49,17 @@ class OverlayImage;
}
/**
// clang-format off
* DOM wrapper for MediaStreams.
* DOMMediaStream is the implementation of the js-exposed MediaStream interface.
*
* To account for track operations such as clone(), addTrack() and
* removeTrack(), a DOMMediaStream wraps three internal (and chained)
* MediaStreams:
* 1. mInputStream
* - Controlled by the owner/source of the DOMMediaStream.
* It's a stream of the type indicated by
* - DOMMediaStream::CreateSourceStream/CreateTrackUnionStream. A source
* typically creates its DOMMediaStream, creates the MediaStreamTracks
* owned by said stream, then gets the internal input stream to which it
* feeds data for the previously created tracks.
* - When necessary it can create tracks on the internal stream only and
* their corresponding MediaStreamTracks will be asynchronously created.
* 2. mOwnedStream
* - A TrackUnionStream containing tracks owned by this stream.
* - The internal model of a MediaStreamTrack consists of its owning
* DOMMediaStream and the TrackID of the corresponding internal track in
* the owning DOMMediaStream's mOwnedStream.
* - The owned stream is different from the input stream since a cloned
* DOMMediaStream is also the owner of its (cloned) MediaStreamTracks.
* - Stopping an original track shall not stop its clone. This is
* solved by stopping it at the owned stream, while the clone's owned
* stream gets data directly from the original input stream.
* - A DOMMediaStream (original or clone) gets all tracks dynamically
* added by the source automatically forwarded by having a TRACK_ANY
* MediaInputPort set up from the owning DOMMediaStream's input stream
* to this DOMMediaStream's owned stream.
* 3. mPlaybackStream
* - A TrackUnionStream containing the tracks corresponding to the
* MediaStreamTracks currently in this DOMMediaStream (per getTracks()).
* - Similarly as for mOwnedStream, there's a TRACK_ANY MediaInputPort set
* up from the owned stream to the playback stream to allow tracks
* dynamically added by the source to be automatically forwarded to any
* audio or video sinks.
* - MediaStreamTracks added by addTrack() are set up with a MediaInputPort
* locked to their internal TrackID, from their owning DOMMediaStream's
* owned stream to this playback stream.
*
*
* A graphical representation of how tracks are connected in various cases as
* follows:
*
* addTrack()ed case:
* DOMStream A
* Input Owned Playback
* t1 ---------> t1 ------------> t1 <- MediaStreamTrack X
* (pointing to t1 in A)
* --------> t2 <- MediaStreamTrack Y
* / (pointing to t1 in B)
* DOMStream B /
* Input Owned / Playback
* t1 ---------> t1 ------------> t1 <- MediaStreamTrack Y
* (pointing to t1 in B)
*
* removeTrack()ed case:
* DOMStream A
* Input Owned Playback
* t1 ---------> t1 <- No tracks
*
*
* clone()d case:
* DOMStream A
* Input Owned Playback
* t1 ---------> t1 ------------> t1 <- MediaStreamTrack X
* \ (pointing to t1 in A)
* -----
* DOMStream B \
* Input \ Owned Playback
* -> t1 ------------> t1 <- MediaStreamTrack Y
* (pointing to t1 in B)
*
*
* addTrack()ed, removeTrack()ed and clone()d case:
*
* Here we have done the following:
* var A = someStreamWithTwoTracks;
* var B = someStreamWithOneTrack;
* var X = A.getTracks()[0];
* var Y = A.getTracks()[1];
* var Z = B.getTracks()[0];
* A.addTrack(Z);
* A.removeTrack(X);
* B.removeTrack(Z);
* var A' = A.clone();
*
* DOMStream A
* Input Owned Playback
* t1 ---------> t1 <- MediaStreamTrack X
* (removed)
* (pointing to t1 in A)
* t2 ---------> t2 ------------> t2 <- MediaStreamTrack Y
* \ (pointing to t2 in A)
* \ ------> t3 <- MediaStreamTrack Z
* \ / (pointing to t1 in B)
* DOMStream B \ /
* Input \ Owned / Playback
* t1 ---^-----> t1 --- <- MediaStreamTrack Z
* (removed)
* \ \ (pointing to t1 in B)
* \ \
* DOMStream A' \ \
* Input \ \ Owned Playback
* \ -> t1 ------------> t1 <- MediaStreamTrack Y'
* \ (pointing to t1 in A')
* ----> t2 ------------> t2 <- MediaStreamTrack Z'
* (pointing to t2 in A')
* This is a thin main-thread class grouping MediaStreamTracks together.
*/
// clang-format on
class DOMMediaStream
: public DOMEventTargetHelper,
public dom::PrincipalChangeObserver<dom::MediaStreamTrack>,
public RelativeTimeline,
public SupportsWeakPtr<DOMMediaStream> {
friend class dom::MediaStreamTrack;
class DOMMediaStream : public DOMEventTargetHelper,
public RelativeTimeline,
public SupportsWeakPtr<DOMMediaStream> {
typedef dom::MediaStreamTrack MediaStreamTrack;
typedef dom::AudioStreamTrack AudioStreamTrack;
typedef dom::VideoStreamTrack VideoStreamTrack;
typedef dom::MediaStreamTrackSource MediaStreamTrackSource;
typedef dom::AudioTrack AudioTrack;
typedef dom::VideoTrack VideoTrack;
typedef dom::AudioTrackList AudioTrackList;
typedef dom::VideoTrackList VideoTrackList;
public:
typedef dom::MediaTrackConstraints MediaTrackConstraints;
@ -215,80 +93,6 @@ class DOMMediaStream
virtual void NotifyInactive(){};
};
/**
* TrackPort is a representation of a MediaStreamTrack-MediaInputPort pair
* that make up a link between the Owned stream and the Playback stream.
*
* Semantically, the track is the identifier/key and the port the value of
* this connection.
*
* The input port can be shared between several TrackPorts. This is the case
* for DOMMediaStream's mPlaybackPort which forwards all tracks in its
* mOwnedStream automatically.
*
* If the MediaStreamTrack is owned by another DOMMediaStream (called A) than
* the one owning the TrackPort (called B), the input port (locked to the
* MediaStreamTrack's TrackID) connects A's mOwnedStream to B's
* mPlaybackStream.
*
* A TrackPort may never leave the DOMMediaStream it was created in. Internal
* use only.
*/
class TrackPort {
public:
NS_INLINE_DECL_CYCLE_COLLECTING_NATIVE_REFCOUNTING(TrackPort)
NS_DECL_CYCLE_COLLECTION_NATIVE_CLASS(TrackPort)
/**
* Indicates MediaInputPort ownership to the TrackPort.
*
* OWNED - Owned by the TrackPort itself. TrackPort must destroy the
* input port when it's destructed.
* EXTERNAL - Owned by another entity. It's the caller's responsibility to
* ensure the the MediaInputPort outlives the TrackPort.
*/
enum class InputPortOwnership { OWNED = 1, EXTERNAL };
TrackPort(MediaInputPort* aInputPort, MediaStreamTrack* aTrack,
const InputPortOwnership aOwnership);
protected:
virtual ~TrackPort();
public:
void DestroyInputPort();
/**
* Returns the source stream of the input port.
*/
MediaStream* GetSource() const;
/**
* Returns the track ID this track is locked to in the source stream of the
* input port.
*/
TrackID GetSourceTrackId() const;
MediaInputPort* GetInputPort() const { return mInputPort; }
MediaStreamTrack* GetTrack() const { return mTrack; }
/**
* Blocks aTrackId from going into mInputPort unless the port has been
* destroyed. Returns a promise that gets resolved when the MediaStreamGraph
* has applied the block in the playback stream.
*/
RefPtr<GenericPromise> BlockSourceTrackId(TrackID aTrackId,
BlockingMode aBlockingMode);
private:
RefPtr<MediaInputPort> mInputPort;
RefPtr<MediaStreamTrack> mTrack;
// Defines if we've been given ownership of the input port or if it's owned
// externally. The owner is responsible for destroying the port.
const InputPortOwnership mOwnership;
};
explicit DOMMediaStream(nsPIDOMWindowInner* aWindow);
NS_DECL_ISUPPORTS_INHERITED
@ -335,128 +139,21 @@ class DOMMediaStream
// NON-WebIDL
MediaStreamTrack* GetOwnedTrackById(const nsAString& aId);
/**
* Returns true if this DOMMediaStream has aTrack in its mPlaybackStream.
* Returns true if this DOMMediaStream has aTrack in mTracks.
*/
bool HasTrack(const MediaStreamTrack& aTrack) const;
/**
* Returns true if this DOMMediaStream owns aTrack.
*/
bool OwnsTrack(const MediaStreamTrack& aTrack) const;
/**
* Returns the corresponding MediaStreamTrack if it's in our mOwnedStream.
* aInputTrackID should match the track's TrackID in its input stream,
* and aTrackID the TrackID in mOwnedStream.
*
* When aTrackID is not supplied or set to TRACK_ANY, we return the first
* MediaStreamTrack that matches the given input track. Note that there may
* be multiple MediaStreamTracks matching the same input track, but that they
* in that case all share the same MediaStreamTrackSource.
*/
MediaStreamTrack* FindOwnedDOMTrack(MediaStream* aInputStream,
TrackID aInputTrackID,
TrackID aTrackID = TRACK_ANY) const;
/**
* Returns the TrackPort connecting aTrack's input stream to mOwnedStream,
* or nullptr if aTrack is not owned by this DOMMediaStream.
*/
TrackPort* FindOwnedTrackPort(const MediaStreamTrack& aTrack) const;
/**
* Returns the corresponding MediaStreamTrack if it's in our mPlaybackStream.
* aInputTrackID should match the track's TrackID in its owned stream.
*/
MediaStreamTrack* FindPlaybackDOMTrack(MediaStream* aInputStream,
TrackID aInputTrackID) const;
/**
* Returns the TrackPort connecting mOwnedStream to mPlaybackStream for
* aTrack.
*/
TrackPort* FindPlaybackTrackPort(const MediaStreamTrack& aTrack) const;
MediaStream* GetInputStream() const { return mInputStream; }
ProcessedMediaStream* GetOwnedStream() const { return mOwnedStream; }
ProcessedMediaStream* GetPlaybackStream() const { return mPlaybackStream; }
/**
* Allows a video element to identify this stream as a camera stream, which
* needs special treatment.
*/
virtual MediaStream* GetCameraStream() const { return nullptr; }
/**
* Legacy method that returns true when the playback stream has finished.
*/
bool IsFinished() const;
TrackRate GraphRate();
/**
* Returns a principal indicating who may access this stream. The stream
* contents can only be accessed by principals subsuming this principal.
*/
nsIPrincipal* GetPrincipal() { return mPrincipal; }
/**
* Returns a principal indicating who may access video data of this stream.
* The video principal will be a combination of all live video tracks.
*/
nsIPrincipal* GetVideoPrincipal() { return mVideoPrincipal; }
// From PrincipalChangeObserver<MediaStreamTrack>.
void PrincipalChanged(MediaStreamTrack* aTrack) override;
/**
* Add a PrincipalChangeObserver to this stream.
*
* Returns true if it was successfully added.
*
* Ownership of the PrincipalChangeObserver remains with the caller, and it's
* the caller's responsibility to remove the observer before it dies.
*/
bool AddPrincipalChangeObserver(
dom::PrincipalChangeObserver<DOMMediaStream>* aObserver);
/**
* Remove an added PrincipalChangeObserver from this stream.
*
* Returns true if it was successfully removed.
*/
bool RemovePrincipalChangeObserver(
dom::PrincipalChangeObserver<DOMMediaStream>* aObserver);
already_AddRefed<nsIPrincipal> GetPrincipal();
// Webrtc allows the remote side to name a stream whatever it wants, and we
// need to surface this to content.
void AssignId(const nsAString& aID) { mID = aID; }
/**
* Create a DOMMediaStream whose underlying input stream is a
* SourceMediaStream.
*/
static already_AddRefed<DOMMediaStream> CreateSourceStreamAsInput(
nsPIDOMWindowInner* aWindow, MediaStreamGraph* aGraph);
/**
* Create a DOMMediaStream whose underlying input stream is a
* TrackUnionStream.
*/
static already_AddRefed<DOMMediaStream> CreateTrackUnionStreamAsInput(
nsPIDOMWindowInner* aWindow, MediaStreamGraph* aGraph);
/**
* Create an DOMMediaStream whose underlying input stream is an
* AudioCaptureStream.
*/
static already_AddRefed<DOMMediaStream> CreateAudioCaptureStreamAsInput(
nsPIDOMWindowInner* aWindow, nsIPrincipal* aPrincipal,
MediaStreamGraph* aGraph);
/**
* Adds a MediaStreamTrack to mTracks and raises "addtrack".
*
@ -466,26 +163,6 @@ class DOMMediaStream
*/
void AddTrackInternal(MediaStreamTrack* aTrack);
/**
* Called for each track in our owned stream to indicate to JS that we
* are carrying that track.
*
* Pre-creates a MediaStreamTrack and returns it.
* It is up to the caller to make sure it is added through AddTrackInternal.
*/
already_AddRefed<MediaStreamTrack> CreateDOMTrack(
TrackID aTrackID, MediaSegment::Type aType,
MediaStreamTrackSource* aSource,
const MediaTrackConstraints& aConstraints = MediaTrackConstraints());
/**
* Creates a MediaStreamTrack cloned from aTrack, adds it to mTracks and
* returns it.
* aCloneTrackID is the TrackID the new track will get in mOwnedStream.
*/
already_AddRefed<MediaStreamTrack> CloneDOMTrack(MediaStreamTrack& aTrack,
TrackID aCloneTrackID);
/**
* Add an nsISupports object that this stream will keep alive as long as
* the stream itself is alive.
@ -512,27 +189,6 @@ class DOMMediaStream
virtual ~DOMMediaStream();
void Destroy();
void InitSourceStream(MediaStreamGraph* aGraph);
void InitTrackUnionStream(MediaStreamGraph* aGraph);
void InitAudioCaptureStream(nsIPrincipal* aPrincipal,
MediaStreamGraph* aGraph);
// Sets up aStream as mInputStream. A producer may append data to a
// SourceMediaStream input stream, or connect another stream to a
// TrackUnionStream input stream.
void InitInputStreamCommon(MediaStream* aStream, MediaStreamGraph* aGraph);
// Sets up a new TrackUnionStream as mOwnedStream and connects it to
// mInputStream with a TRACK_ANY MediaInputPort if available.
// If this DOMMediaStream should have an input stream (producing data),
// it has to be initiated before the owned stream.
void InitOwnedStreamCommon(MediaStreamGraph* aGraph);
// Sets up a new TrackUnionStream as mPlaybackStream and connects it to
// mOwnedStream with a TRACK_ANY MediaInputPort if available.
// If this DOMMediaStream should have an owned stream (producer or clone),
// it has to be initiated before the playback stream.
void InitPlaybackStreamCommon(MediaStreamGraph* aGraph);
// Dispatches NotifyActive() to all registered track listeners.
void NotifyActive();
@ -550,76 +206,21 @@ class DOMMediaStream
nsresult DispatchTrackEvent(const nsAString& aName,
const RefPtr<MediaStreamTrack>& aTrack);
class PlaybackTrackListener;
friend class PlaybackTrackListener;
/**
* Block a track in our playback stream. Calls NotifyPlaybackTrackBlocked()
* after the MediaStreamGraph has applied the block and the track is no longer
* live.
*/
void BlockPlaybackTrack(TrackPort* aTrack);
/**
* Called on main thread after MediaStreamGraph has applied a track block in
* our playback stream.
*/
void NotifyPlaybackTrackBlocked();
// Recomputes the current principal of this stream based on the set of tracks
// it currently contains. PrincipalChangeObservers will be notified only if
// the principal changes.
void RecomputePrincipal();
// We need this to track our parent object.
nsCOMPtr<nsPIDOMWindowInner> mWindow;
// MediaStreams are owned by the graph, but we tell them when to die,
// and they won't die until we let them.
// This stream contains tracks used as input by us. Cloning happens from this
// stream. Tracks may exist in these stream but not in |mOwnedStream| if they
// have been stopped.
MediaStream* mInputStream;
// This stream contains tracks owned by us (if we were created directly from
// source, or cloned from some other stream). Tracks map to |mOwnedTracks|.
ProcessedMediaStream* mOwnedStream;
// This stream contains tracks currently played by us, despite of owner.
// Tracks map to |mTracks|.
ProcessedMediaStream* mPlaybackStream;
// This port connects mInputStream to mOwnedStream. All tracks forwarded.
RefPtr<MediaInputPort> mOwnedPort;
// This port connects mOwnedStream to mPlaybackStream. All tracks not
// explicitly blocked due to removal are forwarded.
RefPtr<MediaInputPort> mPlaybackPort;
// MediaStreamTracks corresponding to tracks in our mOwnedStream.
AutoTArray<RefPtr<TrackPort>, 2> mOwnedTracks;
// MediaStreamTracks corresponding to tracks in our mPlaybackStream.
AutoTArray<RefPtr<TrackPort>, 2> mTracks;
// Number of MediaStreamTracks that have been removed on main thread but are
// waiting to be removed on MediaStreamGraph thread.
size_t mTracksPendingRemoval;
// MediaStreamTracks contained by this DOMMediaStream.
nsTArray<RefPtr<MediaStreamTrack>> mTracks;
// Listener tracking when live MediaStreamTracks in mTracks end.
class PlaybackTrackListener;
RefPtr<PlaybackTrackListener> mPlaybackTrackListener;
// Set to true after MediaStreamGraph has created tracks for mPlaybackStream.
bool mTracksCreated;
nsString mID;
// Keep these alive while the stream is alive.
nsTArray<nsCOMPtr<nsISupports>> mConsumersToKeepAlive;
bool mNotifiedOfMediaStreamGraphShutdown;
// The track listeners subscribe to changes in this stream's track set.
nsTArray<TrackListener*> mTrackListeners;
@ -629,48 +230,10 @@ class DOMMediaStream
// For compatibility with mozCaptureStream, we in some cases do not go
// inactive until the MediaDecoder lets us. (Remove this in Bug 1302379)
bool mFinishedOnInactive;
private:
void NotifyPrincipalChanged();
// Principal identifying who may access the collected contents of this stream.
// If null, this stream can be used by anyone because it has no content yet.
nsCOMPtr<nsIPrincipal> mPrincipal;
// Video principal is used by video element as access is requested to its
// image data.
nsCOMPtr<nsIPrincipal> mVideoPrincipal;
nsTArray<dom::PrincipalChangeObserver<DOMMediaStream>*>
mPrincipalChangeObservers;
};
NS_DEFINE_STATIC_IID_ACCESSOR(DOMMediaStream, NS_DOMMEDIASTREAM_IID)
class DOMAudioNodeMediaStream : public DOMMediaStream {
typedef dom::AudioNode AudioNode;
public:
DOMAudioNodeMediaStream(nsPIDOMWindowInner* aWindow, AudioNode* aNode);
NS_DECL_ISUPPORTS_INHERITED
NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(DOMAudioNodeMediaStream,
DOMMediaStream)
/**
* Create a DOMAudioNodeMediaStream whose underlying stream is a
* TrackUnionStream.
*/
static already_AddRefed<DOMAudioNodeMediaStream>
CreateTrackUnionStreamAsInput(nsPIDOMWindowInner* aWindow, AudioNode* aNode,
MediaStreamGraph* aGraph);
protected:
~DOMAudioNodeMediaStream();
private:
// If this object wraps a stream owned by an AudioNode, we need to ensure that
// the node isn't cycle-collected too early.
RefPtr<AudioNode> mStreamNode;
};
} // namespace mozilla
#endif /* NSDOMMEDIASTREAM_H_ */

Просмотреть файл

@ -237,12 +237,12 @@ RefPtr<GenericPromise> MediaDecoder::SetSink(AudioDeviceInfo* aSink) {
return GetStateMachine()->InvokeSetSink(aSink);
}
void MediaDecoder::AddOutputStream(DOMMediaStream* aStream) {
void MediaDecoder::AddOutputStream(DOMMediaStream* aStream,
MediaStreamGraphImpl* aGraph) {
MOZ_ASSERT(NS_IsMainThread());
MOZ_ASSERT(mDecoderStateMachine, "Must be called after Load().");
AbstractThread::AutoEnter context(AbstractMainThread());
mDecoderStateMachine->EnsureOutputStreamManager(
aStream->GetInputStream()->Graph());
mDecoderStateMachine->EnsureOutputStreamManager(aGraph);
if (mInfo) {
mDecoderStateMachine->EnsureOutputStreamManagerHasTracks(*mInfo);
}
@ -256,20 +256,6 @@ void MediaDecoder::RemoveOutputStream(DOMMediaStream* aStream) {
mDecoderStateMachine->RemoveOutputStream(aStream);
}
void MediaDecoder::SetNextOutputStreamTrackID(TrackID aNextTrackID) {
MOZ_ASSERT(NS_IsMainThread());
MOZ_ASSERT(mDecoderStateMachine, "Must be called after Load().");
AbstractThread::AutoEnter context(AbstractMainThread());
mDecoderStateMachine->SetNextOutputStreamTrackID(aNextTrackID);
}
TrackID MediaDecoder::GetNextOutputStreamTrackID() {
MOZ_ASSERT(NS_IsMainThread());
MOZ_ASSERT(mDecoderStateMachine, "Must be called after Load().");
AbstractThread::AutoEnter context(AbstractMainThread());
return mDecoderStateMachine->GetNextOutputStreamTrackID();
}
void MediaDecoder::SetOutputStreamPrincipal(nsIPrincipal* aPrincipal) {
MOZ_ASSERT(NS_IsMainThread());
MOZ_ASSERT(mDecoderStateMachine, "Must be called after Load().");

Просмотреть файл

@ -16,10 +16,8 @@
# include "MediaPromiseDefs.h"
# include "MediaResource.h"
# include "MediaStatistics.h"
# include "MediaStreamGraph.h"
# include "SeekTarget.h"
# include "TimeUnits.h"
# include "TrackID.h"
# include "mozilla/Atomics.h"
# include "mozilla/CDMProxy.h"
# include "mozilla/MozPromise.h"
@ -33,6 +31,7 @@
# include "nsISupports.h"
# include "nsITimer.h"
class AudioDeviceInfo;
class nsIPrincipal;
namespace mozilla {
@ -48,6 +47,7 @@ class VideoFrameContainer;
class MediaFormatReader;
class MediaDecoderStateMachine;
struct MediaPlaybackEvent;
class MediaStreamGraphImpl;
enum class Visibility : uint8_t;
@ -175,17 +175,10 @@ class MediaDecoder : public DecoderDoctorLifeLogger<MediaDecoder> {
// Add an output stream. All decoder output will be sent to the stream.
// The stream is initially blocked. The decoder is responsible for unblocking
// it while it is playing back.
void AddOutputStream(DOMMediaStream* aStream);
void AddOutputStream(DOMMediaStream* aStream, MediaStreamGraphImpl* aGraph);
// Remove an output stream added with AddOutputStream.
void RemoveOutputStream(DOMMediaStream* aStream);
// Set the TrackID to be used as the initial id by the next DecodedStream
// sink.
void SetNextOutputStreamTrackID(TrackID aNextTrackID);
// Get the next TrackID to be allocated by DecodedStream,
// or the last set TrackID if there is no DecodedStream sink.
TrackID GetNextOutputStreamTrackID();
// Update the principal for any output streams and their tracks.
void SetOutputStreamPrincipal(nsIPrincipal* aPrincipal);

Просмотреть файл

@ -3339,7 +3339,6 @@ RefPtr<ShutdownPromise> MediaDecoderStateMachine::BeginShutdown() {
MOZ_ASSERT(NS_IsMainThread());
if (mOutputStreamManager) {
mOutputStreamManager->Disconnect();
mNextOutputStreamTrackID = mOutputStreamManager->NextTrackID();
}
return InvokeAsync(OwnerThread(), this, __func__,
&MediaDecoderStateMachine::Shutdown);
@ -3777,14 +3776,13 @@ void MediaDecoderStateMachine::RemoveOutputStream(DOMMediaStream* aStream) {
}
void MediaDecoderStateMachine::EnsureOutputStreamManager(
MediaStreamGraph* aGraph) {
MediaStreamGraphImpl* aGraph) {
MOZ_ASSERT(NS_IsMainThread());
if (mOutputStreamManager) {
return;
}
mOutputStreamManager = new OutputStreamManager(
aGraph->CreateSourceStream(), mNextOutputStreamTrackID,
mOutputStreamPrincipal, mAbstractMainThread);
mOutputStreamManager = new OutputStreamManager(aGraph, mOutputStreamPrincipal,
mAbstractMainThread);
}
void MediaDecoderStateMachine::EnsureOutputStreamManagerHasTracks(
@ -3801,33 +3799,20 @@ void MediaDecoderStateMachine::EnsureOutputStreamManagerHasTracks(
}
if (aLoadedInfo.HasAudio()) {
MOZ_ASSERT(!mOutputStreamManager->HasTrackType(MediaSegment::AUDIO));
mOutputStreamManager->AddTrack(MediaSegment::AUDIO);
LOG("Pre-created audio track with id %d",
mOutputStreamManager->GetLiveTrackIDFor(MediaSegment::AUDIO));
RefPtr<SourceMediaStream> dummy =
mOutputStreamManager->AddTrack(MediaSegment::AUDIO);
LOG("Pre-created audio track with underlying stream %p", dummy.get());
Unused << dummy;
}
if (aLoadedInfo.HasVideo()) {
MOZ_ASSERT(!mOutputStreamManager->HasTrackType(MediaSegment::VIDEO));
mOutputStreamManager->AddTrack(MediaSegment::VIDEO);
LOG("Pre-created video track with id %d",
mOutputStreamManager->GetLiveTrackIDFor(MediaSegment::VIDEO));
RefPtr<SourceMediaStream> dummy =
mOutputStreamManager->AddTrack(MediaSegment::VIDEO);
LOG("Pre-created video track with underlying stream %p", dummy.get());
Unused << dummy;
}
}
void MediaDecoderStateMachine::SetNextOutputStreamTrackID(
TrackID aNextTrackID) {
MOZ_ASSERT(NS_IsMainThread());
LOG("SetNextOutputStreamTrackID aNextTrackID=%d", aNextTrackID);
mNextOutputStreamTrackID = aNextTrackID;
}
TrackID MediaDecoderStateMachine::GetNextOutputStreamTrackID() {
MOZ_ASSERT(NS_IsMainThread());
if (mOutputStreamManager) {
return mOutputStreamManager->NextTrackID();
}
return mNextOutputStreamTrackID;
}
class VideoQueueMemoryFunctor : public nsDequeFunctor {
public:
VideoQueueMemoryFunctor() : mSize(0) {}

Просмотреть файл

@ -188,7 +188,7 @@ class MediaDecoderStateMachine
void SetOutputStreamPrincipal(nsIPrincipal* aPrincipal);
// If an OutputStreamManager does not exist, one will be created.
void EnsureOutputStreamManager(MediaStreamGraph* aGraph);
void EnsureOutputStreamManager(MediaStreamGraphImpl* aGraph);
// If an OutputStreamManager exists, tracks matching aLoadedInfo will be
// created unless they already exist in the manager.
void EnsureOutputStreamManagerHasTracks(const MediaInfo& aLoadedInfo);
@ -198,12 +198,6 @@ class MediaDecoderStateMachine
// Remove an output stream added with AddOutputStream. If the last output
// stream was removed, we will also tear down the OutputStreamManager.
void RemoveOutputStream(DOMMediaStream* aStream);
// Set the TrackID to be used as the initial id by the next DecodedStream
// sink.
void SetNextOutputStreamTrackID(TrackID aNextTrackID);
// Get the next TrackID to be allocated by DecodedStream,
// or the last set TrackID if there is no DecodedStream sink.
TrackID GetNextOutputStreamTrackID();
// Seeks to the decoder to aTarget asynchronously.
RefPtr<MediaDecoder::SeekPromise> InvokeSeek(const SeekTarget& aTarget);
@ -685,10 +679,6 @@ class MediaDecoderStateMachine
// Principal used by output streams. Main thread only.
nsCOMPtr<nsIPrincipal> mOutputStreamPrincipal;
// The next TrackID to be used when a DecodedStream allocates a track.
// Main thread only.
TrackID mNextOutputStreamTrackID = 1;
// Track the current video decode mode.
VideoDecodeMode mVideoDecodeMode;

Просмотреть файл

@ -6,10 +6,11 @@
#include "MediaManager.h"
#include "AudioCaptureStream.h"
#include "AudioDeviceInfo.h"
#include "AudioStreamTrack.h"
#include "MediaStreamGraphImpl.h"
#include "MediaTimer.h"
#include "mozilla/dom/MediaStreamTrack.h"
#include "mozilla/dom/MediaDeviceInfo.h"
#include "MediaStreamListener.h"
#include "nsArray.h"
@ -55,6 +56,7 @@
#include "mozilla/media/MediaChild.h"
#include "mozilla/media/MediaTaskUtils.h"
#include "MediaTrackConstraints.h"
#include "VideoStreamTrack.h"
#include "VideoUtils.h"
#include "ThreadSafeRefcountingWithMainThreadDestruction.h"
#include "nsProxyRelease.h"
@ -177,7 +179,8 @@ namespace mozilla {
LazyLogModule gMediaManagerLog("MediaManager");
#define LOG(...) MOZ_LOG(gMediaManagerLog, LogLevel::Debug, (__VA_ARGS__))
using dom::BasicTrackSource;
class LocalTrackSource;
using dom::CallerType;
using dom::ConstrainDOMStringParameters;
using dom::ConstrainDoubleRange;
@ -210,9 +213,13 @@ using media::Refcountable;
static Atomic<bool> sHasShutdown;
struct DeviceState {
DeviceState(RefPtr<MediaDevice> aDevice, bool aOffWhileDisabled)
: mOffWhileDisabled(aOffWhileDisabled), mDevice(std::move(aDevice)) {
DeviceState(RefPtr<MediaDevice> aDevice,
RefPtr<LocalTrackSource> aTrackSource, bool aOffWhileDisabled)
: mOffWhileDisabled(aOffWhileDisabled),
mDevice(std::move(aDevice)),
mTrackSource(std::move(aTrackSource)) {
MOZ_ASSERT(mDevice);
MOZ_ASSERT(mTrackSource);
}
// true if we have stopped mDevice, this is a terminal state.
@ -250,6 +257,11 @@ struct DeviceState {
// The underlying device we keep state for. Always non-null.
// Threadsafe access, but see method declarations for individual constraints.
const RefPtr<MediaDevice> mDevice;
// The MediaStreamTrackSource for any tracks (original and clones) originating
// from this device. Always non-null. Threadsafe access, but see method
// declarations for individual constraints.
const RefPtr<LocalTrackSource> mTrackSource;
};
/**
@ -315,9 +327,10 @@ class SourceListener : public SupportsWeakPtr<SourceListener> {
/**
* Marks this listener as active and adds itself as a listener to aStream.
*/
void Activate(RefPtr<SourceMediaStream> aStream,
RefPtr<MediaDevice> aAudioDevice,
RefPtr<MediaDevice> aVideoDevice);
void Activate(RefPtr<MediaDevice> aAudioDevice,
RefPtr<LocalTrackSource> aAudioTrackSource,
RefPtr<MediaDevice> aVideoDevice,
RefPtr<LocalTrackSource> aVideoTrackSource);
/**
* Posts a task to initialize and start all associated devices.
@ -377,10 +390,6 @@ class SourceListener : public SupportsWeakPtr<SourceListener> {
*/
void StopSharing();
MediaStream* Stream() const { return mStream; }
SourceMediaStream* GetSourceStream();
MediaDevice* GetAudioDevice() const {
return mAudioDeviceState ? mAudioDeviceState->mDevice.get() : nullptr;
}
@ -389,7 +398,7 @@ class SourceListener : public SupportsWeakPtr<SourceListener> {
return mVideoDeviceState ? mVideoDeviceState->mDevice.get() : nullptr;
}
bool Activated() const { return mStream; }
bool Activated() const { return mAudioDeviceState || mVideoDeviceState; }
bool Stopped() const { return mStopped; }
@ -436,7 +445,6 @@ class SourceListener : public SupportsWeakPtr<SourceListener> {
// No locking needed as they're set on Activate() and never assigned to again.
UniquePtr<DeviceState> mAudioDeviceState;
UniquePtr<DeviceState> mVideoDeviceState;
RefPtr<SourceMediaStream> mStream; // threadsafe refcnt
};
/**
@ -479,9 +487,10 @@ class GetUserMediaWindowListener {
* WindowListener.
*/
void Activate(RefPtr<SourceListener> aListener,
RefPtr<SourceMediaStream> aStream,
RefPtr<MediaDevice> aAudioDevice,
RefPtr<MediaDevice> aVideoDevice) {
RefPtr<LocalTrackSource> aAudioTrackSource,
RefPtr<MediaDevice> aVideoDevice,
RefPtr<LocalTrackSource> aVideoTrackSource) {
MOZ_ASSERT(NS_IsMainThread());
MOZ_ASSERT(aListener);
MOZ_ASSERT(!aListener->Activated());
@ -490,8 +499,8 @@ class GetUserMediaWindowListener {
MOZ_ASSERT(!mActiveListeners.Contains(aListener), "Already activated");
mInactiveListeners.RemoveElement(aListener);
aListener->Activate(std::move(aStream), std::move(aAudioDevice),
std::move(aVideoDevice));
aListener->Activate(std::move(aAudioDevice), std::move(aAudioTrackSource),
std::move(aVideoDevice), std::move(aVideoTrackSource));
mActiveListeners.AppendElement(std::move(aListener));
}
@ -706,6 +715,129 @@ class GetUserMediaWindowListener {
nsTArray<RefPtr<SourceListener>> mActiveListeners;
};
class LocalTrackSource : public MediaStreamTrackSource {
public:
LocalTrackSource(nsIPrincipal* aPrincipal, const nsString& aLabel,
const RefPtr<SourceListener>& aListener,
MediaSourceEnum aSource, MediaStream* aStream,
TrackID aTrackID, RefPtr<PeerIdentity> aPeerIdentity)
: MediaStreamTrackSource(aPrincipal, aLabel),
mSource(aSource),
mStream(aStream),
mTrackID(aTrackID),
mPeerIdentity(std::move(aPeerIdentity)),
mListener(aListener.get()) {}
MediaSourceEnum GetMediaSource() const override { return mSource; }
const PeerIdentity* GetPeerIdentity() const override { return mPeerIdentity; }
RefPtr<MediaStreamTrackSource::ApplyConstraintsPromise> ApplyConstraints(
const MediaTrackConstraints& aConstraints,
CallerType aCallerType) override {
MOZ_ASSERT(NS_IsMainThread());
if (sHasShutdown || !mListener) {
// Track has been stopped, or we are in shutdown. In either case
// there's no observable outcome, so pretend we succeeded.
return MediaStreamTrackSource::ApplyConstraintsPromise::CreateAndResolve(
false, __func__);
}
return mListener->ApplyConstraintsToTrack(mTrackID, aConstraints,
aCallerType);
}
void GetSettings(MediaTrackSettings& aOutSettings) override {
if (mListener) {
mListener->GetSettingsFor(mTrackID, aOutSettings);
}
}
void Stop() override {
if (mListener) {
mListener->StopTrack(mTrackID);
mListener = nullptr;
}
if (!mStream->IsDestroyed()) {
mStream->Destroy();
}
}
void Disable() override {
if (mListener) {
mListener->SetEnabledFor(mTrackID, false);
}
}
void Enable() override {
if (mListener) {
mListener->SetEnabledFor(mTrackID, true);
}
}
const MediaSourceEnum mSource;
const RefPtr<MediaStream> mStream;
const TrackID mTrackID;
const RefPtr<const PeerIdentity> mPeerIdentity;
protected:
~LocalTrackSource() {
MOZ_ASSERT(NS_IsMainThread());
MOZ_ASSERT(mStream->IsDestroyed());
}
// This is a weak pointer to avoid having the SourceListener (which may
// have references to threads and threadpools) kept alive by DOM-objects
// that may have ref-cycles and thus are released very late during
// shutdown, even after xpcom-shutdown-threads. See bug 1351655 for what
// can happen.
WeakPtr<SourceListener> mListener;
};
class AudioCaptureTrackSource : public LocalTrackSource {
public:
AudioCaptureTrackSource(nsIPrincipal* aPrincipal, nsPIDOMWindowInner* aWindow,
const nsString& aLabel,
AudioCaptureStream* aAudioCaptureStream,
RefPtr<PeerIdentity> aPeerIdentity)
: LocalTrackSource(aPrincipal, aLabel, nullptr,
MediaSourceEnum::AudioCapture, aAudioCaptureStream,
kAudioTrack, std::move(aPeerIdentity)),
mWindow(aWindow),
mAudioCaptureStream(aAudioCaptureStream) {
mAudioCaptureStream->Start();
mAudioCaptureStream->Graph()->RegisterCaptureStreamForWindow(
mWindow->WindowID(), mAudioCaptureStream);
mWindow->SetAudioCapture(true);
}
void Stop() override {
MOZ_ASSERT(NS_IsMainThread());
if (!mAudioCaptureStream->IsDestroyed()) {
MOZ_ASSERT(mWindow);
mWindow->SetAudioCapture(false);
mAudioCaptureStream->Graph()->UnregisterCaptureStreamForWindow(
mWindow->WindowID());
mWindow = nullptr;
}
// LocalTrackSource destroys the stream.
LocalTrackSource::Stop();
MOZ_ASSERT(mAudioCaptureStream->IsDestroyed());
}
ProcessedMediaStream* InputStream() const {
return mAudioCaptureStream.get();
}
protected:
~AudioCaptureTrackSource() {
MOZ_ASSERT(NS_IsMainThread());
MOZ_ASSERT(mAudioCaptureStream->IsDestroyed());
}
RefPtr<nsPIDOMWindowInner> mWindow;
const RefPtr<AudioCaptureStream> mAudioCaptureStream;
};
/**
* nsIMediaDevice implementation.
*/
@ -1117,142 +1249,62 @@ class GetUserMediaStreamRunnable : public Runnable {
MediaStreamGraph* msg = MediaStreamGraph::GetInstance(
graphDriverType, window, MediaStreamGraph::REQUEST_DEFAULT_SAMPLE_RATE);
RefPtr<DOMMediaStream> domStream;
RefPtr<SourceMediaStream> stream;
auto domStream = MakeRefPtr<DOMMediaStream>(window);
RefPtr<LocalTrackSource> audioTrackSource;
RefPtr<LocalTrackSource> videoTrackSource;
nsCOMPtr<nsIPrincipal> principal;
if (mPeerIdentity) {
principal = NullPrincipal::CreateWithInheritedAttributes(
window->GetExtantDoc()->NodePrincipal());
} else {
principal = window->GetExtantDoc()->NodePrincipal();
}
RefPtr<GenericNonExclusivePromise> firstFramePromise;
// AudioCapture is a special case, here, in the sense that we're not really
// using the audio source and the SourceMediaStream, which acts as
// placeholders. We re-route a number of stream internaly in the MSG and mix
// them down instead.
if (mAudioDevice &&
mAudioDevice->GetMediaSource() == MediaSourceEnum::AudioCapture) {
// AudioCapture is a special case, here, in the sense that we're not
// really using the audio source and the SourceMediaStream, which acts as
// placeholders. We re-route a number of streams internally in the MSG and
// mix them down instead.
NS_WARNING(
"MediaCaptureWindowState doesn't handle "
"MediaSourceEnum::AudioCapture. This must be fixed with UX "
"before shipping.");
// It should be possible to pipe the capture stream to anything. CORS is
// not a problem here, we got explicit user content.
nsCOMPtr<nsIPrincipal> principal =
window->GetExtantDoc()->NodePrincipal();
domStream = DOMMediaStream::CreateAudioCaptureStreamAsInput(
window, principal, msg);
stream = msg->CreateSourceStream(); // Placeholder
msg->RegisterCaptureStreamForWindow(
mWindowID, domStream->GetInputStream()->AsProcessedStream());
window->SetAudioCapture(true);
auto audioCaptureSource = MakeRefPtr<AudioCaptureTrackSource>(
principal, window, NS_LITERAL_STRING("Window audio capture"),
msg->CreateAudioCaptureStream(kAudioTrack), mPeerIdentity);
audioTrackSource = audioCaptureSource;
RefPtr<MediaStreamTrack> track =
new dom::AudioStreamTrack(window, audioCaptureSource->InputStream(),
kAudioTrack, audioCaptureSource);
domStream->AddTrackInternal(track);
} else {
class LocalTrackSource : public MediaStreamTrackSource {
public:
LocalTrackSource(nsIPrincipal* aPrincipal, const nsString& aLabel,
const RefPtr<SourceListener>& aListener,
MediaSourceEnum aSource, TrackID aTrackID,
RefPtr<PeerIdentity> aPeerIdentity)
: MediaStreamTrackSource(aPrincipal, aLabel),
mListener(aListener.get()),
mSource(aSource),
mTrackID(aTrackID),
mPeerIdentity(std::move(aPeerIdentity)) {}
MediaSourceEnum GetMediaSource() const override { return mSource; }
const PeerIdentity* GetPeerIdentity() const override {
return mPeerIdentity;
}
RefPtr<MediaStreamTrackSource::ApplyConstraintsPromise>
ApplyConstraints(const MediaTrackConstraints& aConstraints,
CallerType aCallerType) override {
MOZ_ASSERT(NS_IsMainThread());
if (sHasShutdown || !mListener) {
// Track has been stopped, or we are in shutdown. In either case
// there's no observable outcome, so pretend we succeeded.
return MediaStreamTrackSource::ApplyConstraintsPromise::
CreateAndResolve(false, __func__);
}
return mListener->ApplyConstraintsToTrack(mTrackID, aConstraints,
aCallerType);
}
void GetSettings(MediaTrackSettings& aOutSettings) override {
if (mListener) {
mListener->GetSettingsFor(mTrackID, aOutSettings);
}
}
void Stop() override {
if (mListener) {
mListener->StopTrack(mTrackID);
mListener = nullptr;
}
}
void Disable() override {
if (mListener) {
mListener->SetEnabledFor(mTrackID, false);
}
}
void Enable() override {
if (mListener) {
mListener->SetEnabledFor(mTrackID, true);
}
}
protected:
~LocalTrackSource() {}
// This is a weak pointer to avoid having the SourceListener (which may
// have references to threads and threadpools) kept alive by DOM-objects
// that may have ref-cycles and thus are released very late during
// shutdown, even after xpcom-shutdown-threads. See bug 1351655 for what
// can happen.
WeakPtr<SourceListener> mListener;
const MediaSourceEnum mSource;
const TrackID mTrackID;
const RefPtr<const PeerIdentity> mPeerIdentity;
};
nsCOMPtr<nsIPrincipal> principal;
if (mPeerIdentity) {
principal = NullPrincipal::CreateWithInheritedAttributes(
window->GetExtantDoc()->NodePrincipal());
} else {
principal = window->GetExtantDoc()->NodePrincipal();
}
// Normal case, connect the source stream to the track union stream to
// avoid us blocking. Pass a simple TrackSourceGetter for potential
// fake tracks. Apart from them gUM never adds tracks dynamically.
domStream = DOMMediaStream::CreateSourceStreamAsInput(window, msg);
stream = domStream->GetInputStream()->AsSourceStream();
if (mAudioDevice) {
nsString audioDeviceName;
mAudioDevice->GetName(audioDeviceName);
const MediaSourceEnum source = mAudioDevice->GetMediaSource();
RefPtr<MediaStreamTrackSource> audioSource =
new LocalTrackSource(principal, audioDeviceName, mSourceListener,
source, kAudioTrack, mPeerIdentity);
RefPtr<MediaStream> stream = msg->CreateSourceStream();
audioTrackSource = new LocalTrackSource(
principal, audioDeviceName, mSourceListener,
mAudioDevice->GetMediaSource(), stream, kAudioTrack, mPeerIdentity);
MOZ_ASSERT(IsOn(mConstraints.mAudio));
RefPtr<MediaStreamTrack> track = domStream->CreateDOMTrack(
kAudioTrack, MediaSegment::AUDIO, audioSource,
RefPtr<MediaStreamTrack> track = new dom::AudioStreamTrack(
window, stream, kAudioTrack, audioTrackSource,
GetInvariant(mConstraints.mAudio));
domStream->AddTrackInternal(track);
}
if (mVideoDevice) {
nsString videoDeviceName;
mVideoDevice->GetName(videoDeviceName);
const MediaSourceEnum source = mVideoDevice->GetMediaSource();
RefPtr<MediaStreamTrackSource> videoSource =
new LocalTrackSource(principal, videoDeviceName, mSourceListener,
source, kVideoTrack, mPeerIdentity);
RefPtr<MediaStream> stream = msg->CreateSourceStream();
videoTrackSource = new LocalTrackSource(
principal, videoDeviceName, mSourceListener,
mVideoDevice->GetMediaSource(), stream, kVideoTrack, mPeerIdentity);
MOZ_ASSERT(IsOn(mConstraints.mVideo));
RefPtr<MediaStreamTrack> track = domStream->CreateDOMTrack(
kVideoTrack, MediaSegment::VIDEO, videoSource,
RefPtr<MediaStreamTrack> track = new dom::VideoStreamTrack(
window, stream, kVideoTrack, videoTrackSource,
GetInvariant(mConstraints.mVideo));
domStream->AddTrackInternal(track);
switch (source) {
switch (mVideoDevice->GetMediaSource()) {
case MediaSourceEnum::Browser:
case MediaSourceEnum::Screen:
case MediaSourceEnum::Window:
@ -1266,7 +1318,8 @@ class GetUserMediaStreamRunnable : public Runnable {
}
}
if (!domStream || !stream || sHasShutdown) {
if (!domStream || (!audioTrackSource && !videoTrackSource) ||
sHasShutdown) {
LOG("Returning error for getUserMedia() - no stream");
mHolder.Reject(MakeRefPtr<MediaMgrError>(
@ -1280,8 +1333,9 @@ class GetUserMediaStreamRunnable : public Runnable {
// Activate our source listener. We'll call Start() on the source when we
// get a callback that the MediaStream has started consuming. The listener
// is freed when the page is invalidated (on navigation or close).
mWindowListener->Activate(mSourceListener, stream, mAudioDevice,
mVideoDevice);
mWindowListener->Activate(mSourceListener, mAudioDevice,
std::move(audioTrackSource), mVideoDevice,
std::move(videoTrackSource));
nsTArray<RefPtr<MediaStreamTrack>> tracks(2);
domStream->GetTracks(tracks);
@ -4084,9 +4138,10 @@ void SourceListener::Register(GetUserMediaWindowListener* aListener) {
mWindowListener = aListener;
}
void SourceListener::Activate(RefPtr<SourceMediaStream> aStream,
RefPtr<MediaDevice> aAudioDevice,
RefPtr<MediaDevice> aVideoDevice) {
void SourceListener::Activate(RefPtr<MediaDevice> aAudioDevice,
RefPtr<LocalTrackSource> aAudioTrackSource,
RefPtr<MediaDevice> aVideoDevice,
RefPtr<LocalTrackSource> aVideoTrackSource) {
MOZ_ASSERT(NS_IsMainThread(), "Only call on main thread");
LOG("SourceListener %p activating audio=%p video=%p", this,
@ -4096,14 +4151,14 @@ void SourceListener::Activate(RefPtr<SourceMediaStream> aStream,
MOZ_ASSERT(!Activated(), "Already activated");
mMainThreadCheck = GetCurrentVirtualThread();
mStream = std::move(aStream);
if (aAudioDevice) {
bool offWhileDisabled =
aAudioDevice->GetMediaSource() == MediaSourceEnum::Microphone &&
Preferences::GetBool(
"media.getusermedia.microphone.off_while_disabled.enabled", true);
mAudioDeviceState =
MakeUnique<DeviceState>(std::move(aAudioDevice), offWhileDisabled);
MakeUnique<DeviceState>(std::move(aAudioDevice),
std::move(aAudioTrackSource), offWhileDisabled);
}
if (aVideoDevice) {
@ -4112,7 +4167,8 @@ void SourceListener::Activate(RefPtr<SourceMediaStream> aStream,
Preferences::GetBool(
"media.getusermedia.camera.off_while_disabled.enabled", true);
mVideoDeviceState =
MakeUnique<DeviceState>(std::move(aVideoDevice), offWhileDisabled);
MakeUnique<DeviceState>(std::move(aVideoDevice),
std::move(aVideoTrackSource), offWhileDisabled);
}
}
@ -4123,24 +4179,28 @@ SourceListener::InitializeAsync() {
return MediaManager::PostTask<SourceListenerPromise>(
__func__,
[stream = mStream, principal = GetPrincipalHandle(),
[principal = GetPrincipalHandle(),
audioDevice =
mAudioDeviceState ? mAudioDeviceState->mDevice : nullptr,
audioStream = mAudioDeviceState
? mAudioDeviceState->mTrackSource->mStream
: nullptr,
videoDevice =
mVideoDeviceState ? mVideoDeviceState->mDevice : nullptr](
mVideoDeviceState ? mVideoDeviceState->mDevice : nullptr,
videoStream = mVideoDeviceState
? mVideoDeviceState->mTrackSource->mStream
: nullptr](
MozPromiseHolder<SourceListenerPromise>& aHolder) {
if (audioDevice) {
audioDevice->SetTrack(stream, kAudioTrack, principal);
audioDevice->SetTrack(audioStream->AsSourceStream(),
kAudioTrack, principal);
}
if (videoDevice) {
videoDevice->SetTrack(stream, kVideoTrack, principal);
videoDevice->SetTrack(videoStream->AsSourceStream(),
kVideoTrack, principal);
}
// SetTrack() queued the tracks. We add them synchronously here
// to avoid races.
stream->FinishAddTracks();
if (audioDevice) {
nsresult rv = audioDevice->Start();
if (rv == NS_ERROR_NOT_AVAILABLE) {
@ -4493,21 +4553,11 @@ void SourceListener::StopSharing() {
}
if (mAudioDeviceState && mAudioDeviceState->mDevice->GetMediaSource() ==
MediaSourceEnum::AudioCapture) {
uint64_t windowID = mWindowListener->WindowID();
auto* window = nsGlobalWindowInner::GetInnerWindowWithId(windowID);
MOZ_RELEASE_ASSERT(window);
window->SetAudioCapture(false);
MediaStreamGraph* graph = mStream->Graph();
graph->UnregisterCaptureStreamForWindow(windowID);
mStream->Destroy();
static_cast<AudioCaptureTrackSource*>(mAudioDeviceState->mTrackSource.get())
->Stop();
}
}
SourceMediaStream* SourceListener::GetSourceStream() {
NS_ASSERTION(mStream, "Getting stream from never-activated SourceListener");
return mStream;
}
bool SourceListener::CapturingVideo() const {
MOZ_ASSERT(NS_IsMainThread());
return Activated() && mVideoDeviceState && !mVideoDeviceState->mStopped &&

Просмотреть файл

@ -95,10 +95,6 @@ class MediaDevice : public nsIMediaDevice {
nsresult Stop();
nsresult Deallocate();
void Pull(const RefPtr<SourceMediaStream>& aStream, TrackID aTrackID,
StreamTime aEndOfAppendedData, StreamTime aDesiredTime,
const PrincipalHandle& aPrincipal);
void GetSettings(dom::MediaTrackSettings& aOutSettings) const;
dom::MediaSourceEnum GetMediaSource() const;

Просмотреть файл

@ -799,7 +799,7 @@ class MediaRecorder::Session : public PrincipalChangeObserver<MediaStreamTrack>,
LOG(LogLevel::Debug,
("Session.MediaTracksReady track type = (%d)", trackTypes));
InitEncoder(trackTypes, mMediaStream->GraphRate());
InitEncoder(trackTypes, tracks[0]->Graph()->GraphRate());
}
void ConnectMediaStreamTrack(MediaStreamTrack& aTrack) {
@ -1309,12 +1309,12 @@ void MediaRecorder::Start(const Optional<uint32_t>& aTimeSlice,
if (!tracks.IsEmpty()) {
// If there are tracks already available that we're not allowed
// to record, we should throw a security error.
RefPtr<nsIPrincipal> streamPrincipal = mDOMStream->GetPrincipal();
bool subsumes = false;
nsPIDOMWindowInner* window;
Document* doc;
if (!(window = GetOwner()) || !(doc = window->GetExtantDoc()) ||
NS_FAILED(doc->NodePrincipal()->Subsumes(mDOMStream->GetPrincipal(),
&subsumes)) ||
NS_FAILED(doc->NodePrincipal()->Subsumes(streamPrincipal, &subsumes)) ||
!subsumes) {
aResult.Throw(NS_ERROR_DOM_SECURITY_ERR);
return;

Просмотреть файл

@ -1863,7 +1863,6 @@ MediaStream::MediaStream()
mMainThreadFinished(false),
mFinishedNotificationSent(false),
mMainThreadDestroyed(false),
mNrOfMainThreadUsers(0),
mGraph(nullptr) {
MOZ_COUNT_CTOR(MediaStream);
}
@ -2015,8 +2014,6 @@ void MediaStream::DestroyImpl() {
}
void MediaStream::Destroy() {
NS_ASSERTION(mNrOfMainThreadUsers == 0,
"Do not mix Destroy() and RegisterUser()/UnregisterUser()");
// Keep this stream alive until we leave this method
RefPtr<MediaStream> kungFuDeathGrip = this;
@ -2038,23 +2035,6 @@ void MediaStream::Destroy() {
mMainThreadDestroyed = true;
}
void MediaStream::RegisterUser() {
MOZ_ASSERT(NS_IsMainThread());
++mNrOfMainThreadUsers;
}
void MediaStream::UnregisterUser() {
MOZ_ASSERT(NS_IsMainThread());
--mNrOfMainThreadUsers;
NS_ASSERTION(mNrOfMainThreadUsers >= 0, "Double-removal of main thread user");
NS_ASSERTION(!IsDestroyed(),
"Do not mix Destroy() and RegisterUser()/UnregisterUser()");
if (mNrOfMainThreadUsers == 0) {
Destroy();
}
}
void MediaStream::AddAudioOutput(void* aKey) {
class Message : public ControlMessage {
public:
@ -3524,7 +3504,7 @@ ProcessedMediaStream* MediaStreamGraph::CreateTrackUnionStream() {
return stream;
}
ProcessedMediaStream* MediaStreamGraph::CreateAudioCaptureStream(
AudioCaptureStream* MediaStreamGraph::CreateAudioCaptureStream(
TrackID aTrackId) {
AudioCaptureStream* stream = new AudioCaptureStream(aTrackId);
AddStream(stream);

Просмотреть файл

@ -28,7 +28,8 @@ class nsPIDOMWindowInner;
namespace mozilla {
class AsyncLogger;
};
class AudioCaptureStream;
}; // namespace mozilla
extern mozilla::AsyncLogger gMSGTraceLogger;
@ -365,17 +366,8 @@ class MediaStream : public mozilla::LinkedListElement<MediaStream> {
void RunAfterPendingUpdates(already_AddRefed<nsIRunnable> aRunnable);
// Signal that the client is done with this MediaStream. It will be deleted
// later. Do not mix usage of Destroy() with RegisterUser()/UnregisterUser().
// That will cause the MediaStream to be destroyed twice, which will cause
// some assertions to fail.
// later.
virtual void Destroy();
// Signal that a client is using this MediaStream. Useful to not have to
// explicitly manage ownership (responsibility to Destroy()) when there are
// multiple clients using a MediaStream.
void RegisterUser();
// Signal that a client no longer needs this MediaStream. When the number of
// clients using this MediaStream reaches 0, it will be destroyed.
void UnregisterUser();
// Returns the main-thread's view of how much data has been processed by
// this stream.
@ -618,7 +610,6 @@ class MediaStream : public mozilla::LinkedListElement<MediaStream> {
bool mMainThreadFinished;
bool mFinishedNotificationSent;
bool mMainThreadDestroyed;
int mNrOfMainThreadUsers;
// Our media stream graph. null if destroyed on the graph thread.
MediaStreamGraphImpl* mGraph;
@ -1226,7 +1217,7 @@ class MediaStreamGraph {
/**
* Create a stream that will mix all its audio input.
*/
ProcessedMediaStream* CreateAudioCaptureStream(TrackID aTrackId);
AudioCaptureStream* CreateAudioCaptureStream(TrackID aTrackId);
/**
* Add a new stream to the graph. Main thread.

Просмотреть файл

@ -139,7 +139,7 @@ class MediaStreamTrack::MSGListener : public MediaStreamTrackListener {
WeakPtr<MediaStreamTrack> mTrack;
};
class TrackSink : public MediaStreamTrackSource::Sink {
class MediaStreamTrack::TrackSink : public MediaStreamTrackSource::Sink {
public:
explicit TrackSink(MediaStreamTrack* aTrack) : mTrack(aTrack) {}
@ -168,27 +168,37 @@ class TrackSink : public MediaStreamTrackSource::Sink {
}
}
void OverrideEnded() override {
if (mTrack) {
mTrack->OverrideEnded();
}
}
private:
WeakPtr<MediaStreamTrack> mTrack;
};
MediaStreamTrack::MediaStreamTrack(DOMMediaStream* aStream, TrackID aTrackID,
TrackID aInputTrackID,
MediaStreamTrack::MediaStreamTrack(nsPIDOMWindowInner* aWindow,
MediaStream* aInputStream, TrackID aTrackID,
MediaStreamTrackSource* aSource,
const MediaTrackConstraints& aConstraints)
: mOwningStream(aStream),
: mWindow(aWindow),
mInputStream(aInputStream),
mStream(mInputStream ? mInputStream->Graph()->CreateTrackUnionStream()
: nullptr),
mPort(mStream ? mStream->AllocateInputPort(mInputStream) : nullptr),
mTrackID(aTrackID),
mInputTrackID(aInputTrackID),
mSource(aSource),
mSink(MakeUnique<TrackSink>(this)),
mPrincipal(aSource->GetPrincipal()),
mReadyState(MediaStreamTrackState::Live),
mReadyState(mStream ? MediaStreamTrackState::Live
: MediaStreamTrackState::Ended),
mEnabled(true),
mMuted(false),
mConstraints(aConstraints) {
GetSource().RegisterSink(mSink.get());
if (!Ended()) {
GetSource().RegisterSink(mSink.get());
if (GetOwnedStream()) {
mMSGListener = new MSGListener(this);
AddListener(mMSGListener);
}
@ -211,16 +221,7 @@ MediaStreamTrack::MediaStreamTrack(DOMMediaStream* aStream, TrackID aTrackID,
MediaStreamTrack::~MediaStreamTrack() { Destroy(); }
void MediaStreamTrack::Destroy() {
mReadyState = MediaStreamTrackState::Ended;
if (mSource) {
mSource->UnregisterSink(mSink.get());
}
if (mMSGListener) {
if (GetOwnedStream()) {
RemoveListener(mMSGListener);
}
mMSGListener = nullptr;
}
SetReadyState(MediaStreamTrackState::Ended);
// Remove all listeners -- avoid iterating over the list we're removing from
const nsTArray<RefPtr<MediaStreamTrackListener>> trackListeners(
mTrackListeners);
@ -240,18 +241,16 @@ NS_IMPL_CYCLE_COLLECTION_CLASS(MediaStreamTrack)
NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN_INHERITED(MediaStreamTrack,
DOMEventTargetHelper)
tmp->Destroy();
NS_IMPL_CYCLE_COLLECTION_UNLINK(mOwningStream)
NS_IMPL_CYCLE_COLLECTION_UNLINK(mWindow)
NS_IMPL_CYCLE_COLLECTION_UNLINK(mSource)
NS_IMPL_CYCLE_COLLECTION_UNLINK(mOriginalTrack)
NS_IMPL_CYCLE_COLLECTION_UNLINK(mPrincipal)
NS_IMPL_CYCLE_COLLECTION_UNLINK(mPendingPrincipal)
NS_IMPL_CYCLE_COLLECTION_UNLINK_END
NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN_INHERITED(MediaStreamTrack,
DOMEventTargetHelper)
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mOwningStream)
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mWindow)
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mSource)
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mOriginalTrack)
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mPrincipal)
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mPendingPrincipal)
NS_IMPL_CYCLE_COLLECTION_TRAVERSE_END
@ -261,11 +260,6 @@ NS_IMPL_RELEASE_INHERITED(MediaStreamTrack, DOMEventTargetHelper)
NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(MediaStreamTrack)
NS_INTERFACE_MAP_END_INHERITING(DOMEventTargetHelper)
nsPIDOMWindowInner* MediaStreamTrack::GetParentObject() const {
MOZ_RELEASE_ASSERT(mOwningStream);
return mOwningStream->GetParentObject();
}
JSObject* MediaStreamTrack::WrapObject(JSContext* aCx,
JS::Handle<JSObject*> aGivenProto) {
return MediaStreamTrack_Binding::Wrap(aCx, this, aGivenProto);
@ -282,9 +276,14 @@ void MediaStreamTrack::SetEnabled(bool aEnabled) {
}
mEnabled = aEnabled;
GetOwnedStream()->SetTrackEnabled(
mTrackID,
mEnabled ? DisabledTrackMode::ENABLED : DisabledTrackMode::SILENCE_BLACK);
if (Ended()) {
return;
}
mStream->SetTrackEnabled(mTrackID, mEnabled
? DisabledTrackMode::ENABLED
: DisabledTrackMode::SILENCE_BLACK);
GetSource().SinkEnabledStateChanged();
}
@ -296,21 +295,7 @@ void MediaStreamTrack::Stop() {
return;
}
if (!mSource) {
MOZ_ASSERT(false);
return;
}
mSource->UnregisterSink(mSink.get());
MOZ_ASSERT(mOwningStream,
"Every MediaStreamTrack needs an owning DOMMediaStream");
DOMMediaStream::TrackPort* port = mOwningStream->FindOwnedTrackPort(*this);
MOZ_ASSERT(port,
"A MediaStreamTrack must exist in its owning DOMMediaStream");
Unused << port->BlockSourceTrackId(mInputTrackID, BlockingMode::CREATION);
mReadyState = MediaStreamTrackState::Ended;
SetReadyState(MediaStreamTrackState::Ended);
NotifyEnded();
}
@ -346,8 +331,7 @@ already_AddRefed<Promise> MediaStreamTrack::ApplyConstraints(
this, NS_ConvertUTF16toUTF8(str).get()));
}
nsPIDOMWindowInner* window = mOwningStream->GetParentObject();
nsIGlobalObject* go = window ? window->AsGlobal() : nullptr;
nsIGlobalObject* go = mWindow ? mWindow->AsGlobal() : nullptr;
RefPtr<Promise> promise = Promise::Create(go, aRv);
if (aRv.Failed()) {
@ -367,29 +351,28 @@ already_AddRefed<Promise> MediaStreamTrack::ApplyConstraints(
->Then(
GetCurrentThreadSerialEventTarget(), __func__,
[this, self, promise, aConstraints](bool aDummy) {
nsPIDOMWindowInner* window = mOwningStream->GetParentObject();
if (!window || !window->IsCurrentInnerWindow()) {
if (!mWindow || !mWindow->IsCurrentInnerWindow()) {
return; // Leave Promise pending after navigation by design.
}
mConstraints = aConstraints;
promise->MaybeResolve(false);
},
[this, self, promise](const RefPtr<MediaMgrError>& aError) {
nsPIDOMWindowInner* window = mOwningStream->GetParentObject();
if (!window || !window->IsCurrentInnerWindow()) {
if (!mWindow || !mWindow->IsCurrentInnerWindow()) {
return; // Leave Promise pending after navigation by design.
}
promise->MaybeReject(MakeRefPtr<MediaStreamError>(window, *aError));
promise->MaybeReject(
MakeRefPtr<MediaStreamError>(mWindow, *aError));
});
return promise.forget();
}
MediaStreamGraph* MediaStreamTrack::Graph() {
return GetOwnedStream()->Graph();
}
ProcessedMediaStream* MediaStreamTrack::GetStream() const { return mStream; }
MediaStreamGraphImpl* MediaStreamTrack::GraphImpl() {
return GetOwnedStream()->GraphImpl();
MediaStreamGraph* MediaStreamTrack::Graph() const { return mStream->Graph(); }
MediaStreamGraphImpl* MediaStreamTrack::GraphImpl() const {
return mStream->GraphImpl();
}
void MediaStreamTrack::SetPrincipal(nsIPrincipal* aPrincipal) {
@ -506,16 +489,11 @@ void MediaStreamTrack::RemoveConsumer(MediaStreamTrackConsumer* aConsumer) {
}
already_AddRefed<MediaStreamTrack> MediaStreamTrack::Clone() {
// MediaStreamTracks are currently governed by streams, so we need a dummy
// DOMMediaStream to own our track clone.
RefPtr<DOMMediaStream> newStream =
new DOMMediaStream(mOwningStream->GetParentObject());
MediaStreamGraph* graph = Graph();
newStream->InitOwnedStreamCommon(graph);
newStream->InitPlaybackStreamCommon(graph);
return newStream->CloneDOMTrack(*this, mTrackID);
RefPtr<MediaStreamTrack> newTrack = CloneInternal();
newTrack->SetEnabled(Enabled());
newTrack->SetMuted(Muted());
MOZ_DIAGNOSTIC_ASSERT(newTrack->ReadyState() == ReadyState());
return newTrack.forget();
}
void MediaStreamTrack::SetReadyState(MediaStreamTrackState aState) {
@ -523,9 +501,27 @@ void MediaStreamTrack::SetReadyState(MediaStreamTrackState aState) {
aState == MediaStreamTrackState::Live),
"We don't support overriding the ready state from ended to live");
if (Ended()) {
return;
}
if (mReadyState == MediaStreamTrackState::Live &&
aState == MediaStreamTrackState::Ended && mSource) {
mSource->UnregisterSink(mSink.get());
aState == MediaStreamTrackState::Ended) {
if (mSource) {
mSource->UnregisterSink(mSink.get());
}
if (mMSGListener) {
RemoveListener(mMSGListener);
mMSGListener = nullptr;
}
if (mPort) {
mPort->Destroy();
mPort = nullptr;
}
if (mStream) {
mStream->Destroy();
mStream = nullptr;
}
}
mReadyState = aState;
@ -540,63 +536,33 @@ void MediaStreamTrack::OverrideEnded() {
LOG(LogLevel::Info, ("MediaStreamTrack %p ended", this));
if (!mSource) {
MOZ_ASSERT(false);
return;
}
mSource->UnregisterSink(mSink.get());
if (mMSGListener) {
RemoveListener(mMSGListener);
}
mMSGListener = nullptr;
mReadyState = MediaStreamTrackState::Ended;
SetReadyState(MediaStreamTrackState::Ended);
NotifyEnded();
DispatchTrustedEvent(NS_LITERAL_STRING("ended"));
}
DOMMediaStream* MediaStreamTrack::GetInputDOMStream() {
MediaStreamTrack* originalTrack =
mOriginalTrack ? mOriginalTrack.get() : this;
MOZ_RELEASE_ASSERT(originalTrack->mOwningStream);
return originalTrack->mOwningStream;
}
MediaStream* MediaStreamTrack::GetInputStream() {
DOMMediaStream* inputDOMStream = GetInputDOMStream();
MOZ_RELEASE_ASSERT(inputDOMStream->GetInputStream());
return inputDOMStream->GetInputStream();
}
ProcessedMediaStream* MediaStreamTrack::GetOwnedStream() {
if (!mOwningStream) {
return nullptr;
}
return mOwningStream->GetOwnedStream();
}
void MediaStreamTrack::AddListener(MediaStreamTrackListener* aListener) {
LOG(LogLevel::Debug,
("MediaStreamTrack %p adding listener %p", this, aListener));
MOZ_ASSERT(GetOwnedStream());
GetOwnedStream()->AddTrackListener(aListener, mTrackID);
mTrackListeners.AppendElement(aListener);
if (Ended()) {
return;
}
mStream->AddTrackListener(aListener, mTrackID);
}
void MediaStreamTrack::RemoveListener(MediaStreamTrackListener* aListener) {
LOG(LogLevel::Debug,
("MediaStreamTrack %p removing listener %p", this, aListener));
mTrackListeners.RemoveElement(aListener);
if (GetOwnedStream()) {
GetOwnedStream()->RemoveTrackListener(aListener, mTrackID);
mTrackListeners.RemoveElement(aListener);
if (Ended()) {
return;
}
mStream->RemoveTrackListener(aListener, mTrackID);
}
void MediaStreamTrack::AddDirectListener(
@ -604,43 +570,36 @@ void MediaStreamTrack::AddDirectListener(
LOG(LogLevel::Debug, ("MediaStreamTrack %p (%s) adding direct listener %p to "
"stream %p, track %d",
this, AsAudioStreamTrack() ? "audio" : "video",
aListener, GetOwnedStream(), mTrackID));
MOZ_ASSERT(GetOwnedStream());
GetOwnedStream()->AddDirectTrackListener(aListener, mTrackID);
aListener, mStream.get(), mTrackID));
mDirectTrackListeners.AppendElement(aListener);
if (Ended()) {
return;
}
mStream->AddDirectTrackListener(aListener, mTrackID);
}
void MediaStreamTrack::RemoveDirectListener(
DirectMediaStreamTrackListener* aListener) {
LOG(LogLevel::Debug,
("MediaStreamTrack %p removing direct listener %p from stream %p", this,
aListener, GetOwnedStream()));
aListener, mStream.get()));
mDirectTrackListeners.RemoveElement(aListener);
if (GetOwnedStream()) {
GetOwnedStream()->RemoveDirectTrackListener(aListener, mTrackID);
mDirectTrackListeners.RemoveElement(aListener);
if (Ended()) {
return;
}
mStream->RemoveDirectTrackListener(aListener, mTrackID);
}
already_AddRefed<MediaInputPort> MediaStreamTrack::ForwardTrackContentsTo(
ProcessedMediaStream* aStream, TrackID aDestinationTrackID) {
ProcessedMediaStream* aStream) {
MOZ_ASSERT(NS_IsMainThread());
MOZ_RELEASE_ASSERT(aStream);
RefPtr<MediaInputPort> port = aStream->AllocateInputPort(
GetOwnedStream(), mTrackID, aDestinationTrackID);
RefPtr<MediaInputPort> port =
aStream->AllocateInputPort(mStream, mTrackID, mTrackID);
return port.forget();
}
bool MediaStreamTrack::IsForwardedThrough(MediaInputPort* aPort) {
MOZ_ASSERT(NS_IsMainThread());
MOZ_ASSERT(aPort);
if (!aPort) {
return false;
}
return aPort->GetSource() == GetOwnedStream() &&
aPort->PassTrackThrough(mTrackID);
}
} // namespace dom
} // namespace mozilla

Просмотреть файл

@ -40,7 +40,6 @@ namespace dom {
class AudioStreamTrack;
class VideoStreamTrack;
class TrackSink;
enum class CallerType : uint32_t;
/**
@ -98,6 +97,12 @@ class MediaStreamTrackSource : public nsISupports {
*/
virtual void MutedChanged(bool aNewState) = 0;
/**
* Called when the MediaStreamTrackSource where this sink is registered has
* stopped producing data for good, i.e., it has ended.
*/
virtual void OverrideEnded() = 0;
protected:
virtual ~Sink() = default;
};
@ -285,6 +290,23 @@ class MediaStreamTrackSource : public nsISupports {
}
}
/**
* Called by a sub class when the source has stopped producing data for good,
* i.e., it has ended. Notifies all sinks.
*/
void OverrideEnded() {
MOZ_ASSERT(NS_IsMainThread());
nsTArray<WeakPtr<Sink>> sinks(mSinks);
for (auto& sink : sinks) {
if (!sink) {
MOZ_ASSERT_UNREACHABLE("Sink was not explicitly removed");
mSinks.RemoveElement(sink);
continue;
}
sink->OverrideEnded();
}
}
// Principal identifying who may access the contents of this source.
nsCOMPtr<nsIPrincipal> mPrincipal;
@ -339,15 +361,45 @@ class MediaStreamTrackConsumer
virtual void NotifyEnded(MediaStreamTrack* aTrack){};
};
// clang-format off
/**
* Class representing a track in a DOMMediaStream.
* DOM wrapper for MediaStreamGraph-MediaStreams.
*
* To account for cloning, a MediaStreamTrack wraps two internal (and chained)
* MediaStreams:
* 1. mInputStream
* - Controlled by the producer of the data in the track. The producer
* decides on lifetime of the MediaStream and the track inside it.
* - It can be any type of MediaStream.
* - Contains one track only.
* 2. mStream
* - A TrackUnionStream representing this MediaStreamTrack.
* - Its data is piped from mInputStream through mPort.
* - Contains one track only.
* - When this MediaStreamTrack is enabled/disabled this is reflected in
* the chunks in the track in mStream.
* - When this MediaStreamTrack has ended, mStream gets destroyed.
* Note that mInputStream is unaffected, such that any clones of mStream
* can live on. When all clones are ended, this is signaled to the
* producer via our MediaStreamTrackSource. It is then likely to destroy
* mInputStream.
*
* A graphical representation of how tracks are connected when cloned follows:
*
* MediaStreamTrack A
* mInputStream mStream
* t1 ---------> t1
* \
* -----
* MediaStreamTrack B \ (clone of A)
* mInputStream \ mStream
* * -> t1
*
* (*) is a copy of A's mInputStream
*/
// clang-format on
class MediaStreamTrack : public DOMEventTargetHelper,
public SupportsWeakPtr<MediaStreamTrack> {
// DOMMediaStream owns MediaStreamTrack instances, and requires access to
// some internal state, e.g., GetInputStream(), GetOwnedStream().
friend class mozilla::DOMMediaStream;
// PeerConnection and friends need to know our owning DOMStream and track id.
friend class mozilla::PeerConnectionImpl;
friend class mozilla::PeerConnectionMedia;
@ -355,14 +407,14 @@ class MediaStreamTrack : public DOMEventTargetHelper,
friend class mozilla::RemoteSourceStreamInfo;
class MSGListener;
class TrackSink;
public:
/**
* aTrackID is the MediaStreamGraph track ID for the track in the
* MediaStream owned by aStream.
* aTrackID is the MediaStreamGraph track ID for the track in aInputStream.
*/
MediaStreamTrack(
DOMMediaStream* aStream, TrackID aTrackID, TrackID aInputTrackID,
nsPIDOMWindowInner* aWindow, MediaStream* aInputStream, TrackID aTrackID,
MediaStreamTrackSource* aSource,
const MediaTrackConstraints& aConstraints = MediaTrackConstraints());
@ -372,7 +424,7 @@ class MediaStreamTrack : public DOMEventTargetHelper,
MOZ_DECLARE_WEAKREFERENCE_TYPENAME(MediaStreamTrack)
nsPIDOMWindowInner* GetParentObject() const;
nsPIDOMWindowInner* GetParentObject() const { return mWindow; }
JSObject* WrapObject(JSContext* aCx,
JS::Handle<JSObject*> aGivenProto) override;
@ -410,39 +462,11 @@ class MediaStreamTrack : public DOMEventTargetHelper,
*/
bool Ended() const { return mReadyState == MediaStreamTrackState::Ended; }
/**
* Forces the ready state to a particular value, for instance when we're
* cloning an already ended track.
*/
void SetReadyState(MediaStreamTrackState aState);
/**
* Notified by the MediaStreamGraph, through our owning MediaStream on the
* main thread.
*
* Note that this sets the track to ended and raises the "ended" event
* synchronously.
*/
void OverrideEnded();
/**
* Get this track's principal.
*/
nsIPrincipal* GetPrincipal() const { return mPrincipal; }
/**
* Called by the MSGListener when this track's PrincipalHandle changes on
* the MediaStreamGraph thread. When the PrincipalHandle matches the pending
* principal we know that the principal change has propagated to consumers.
*/
void NotifyPrincipalHandleChanged(const PrincipalHandle& aPrincipalHandle);
/**
* Called when this track's readyState transitions to "ended".
* Notifies all MediaStreamTrackConsumers that this track ended.
*/
void NotifyEnded();
/**
* Get this track's PeerIdentity.
*/
@ -450,8 +474,9 @@ class MediaStreamTrack : public DOMEventTargetHelper,
return GetSource().GetPeerIdentity();
}
MediaStreamGraph* Graph();
MediaStreamGraphImpl* GraphImpl();
ProcessedMediaStream* GetStream() const;
MediaStreamGraph* Graph() const;
MediaStreamGraphImpl* GraphImpl() const;
MediaStreamTrackSource& GetSource() const {
MOZ_RELEASE_ASSERT(mSource,
@ -463,16 +488,6 @@ class MediaStreamTrack : public DOMEventTargetHelper,
// need to surface this to content.
void AssignId(const nsAString& aID) { mID = aID; }
/**
* Called when mSource's principal has changed.
*/
void PrincipalChanged();
/**
* Called when mSource's muted state has changed.
*/
void MutedChanged(bool aNewState);
/**
* Add a PrincipalChangeObserver to this track.
*
@ -530,24 +545,51 @@ class MediaStreamTrack : public DOMEventTargetHelper,
* MediaStreamTrack represents, to aStream, and returns it.
*/
already_AddRefed<MediaInputPort> ForwardTrackContentsTo(
ProcessedMediaStream* aStream, TrackID aDestinationTrackID = TRACK_ANY);
ProcessedMediaStream* aStream);
/**
* Returns true if this track is connected to aPort and forwarded to aPort's
* output stream.
*/
bool IsForwardedThrough(MediaInputPort* aPort);
void SetMediaStreamSizeListener(DirectMediaStreamTrackListener* aListener);
// Returns the original DOMMediaStream's underlying input stream.
MediaStream* GetInputStream();
TrackID GetInputTrackId() const { return mInputTrackID; }
TrackID GetTrackID() const { return mTrackID; }
protected:
virtual ~MediaStreamTrack();
/**
* Forces the ready state to a particular value, for instance when we're
* cloning an already ended track.
*/
void SetReadyState(MediaStreamTrackState aState);
/**
* Notified by the MediaStreamGraph, through our owning MediaStream on the
* main thread.
*
* Note that this sets the track to ended and raises the "ended" event
* synchronously.
*/
void OverrideEnded();
/**
* Called by the MSGListener when this track's PrincipalHandle changes on
* the MediaStreamGraph thread. When the PrincipalHandle matches the pending
* principal we know that the principal change has propagated to consumers.
*/
void NotifyPrincipalHandleChanged(const PrincipalHandle& aNewPrincipalHandle);
/**
* Called when this track's readyState transitions to "ended".
* Notifies all MediaStreamTrackConsumers that this track ended.
*/
void NotifyEnded();
/**
* Called when mSource's principal has changed.
*/
void PrincipalChanged();
/**
* Called when mSource's muted state has changed.
*/
void MutedChanged(bool aNewState);
/**
* Sets this track's muted state without raising any events.
*/
@ -555,37 +597,38 @@ class MediaStreamTrack : public DOMEventTargetHelper,
virtual void Destroy();
// Returns the owning DOMMediaStream's underlying owned stream.
ProcessedMediaStream* GetOwnedStream();
// Returns the original DOMMediaStream. If this track is a clone,
// the original track's owning DOMMediaStream is returned.
DOMMediaStream* GetInputDOMStream();
/**
* Sets the principal and notifies PrincipalChangeObservers if it changes.
*/
void SetPrincipal(nsIPrincipal* aPrincipal);
/**
* Creates a new MediaStreamTrack with the same type, input track ID and
* source as this MediaStreamTrack.
* aTrackID is the TrackID the new track will have in its owned stream.
* Creates a new MediaStreamTrack with the same kind, input stream, input
* track ID and source as this MediaStreamTrack.
*/
virtual already_AddRefed<MediaStreamTrack> CloneInternal(
DOMMediaStream* aOwningStream, TrackID aTrackID) = 0;
virtual already_AddRefed<MediaStreamTrack> CloneInternal() = 0;
nsTArray<PrincipalChangeObserver<MediaStreamTrack>*>
mPrincipalChangeObservers;
nsTArray<WeakPtr<MediaStreamTrackConsumer>> mConsumers;
RefPtr<DOMMediaStream> mOwningStream;
TrackID mTrackID;
TrackID mInputTrackID;
// We need this to track our parent object.
nsCOMPtr<nsPIDOMWindowInner> mWindow;
// The input MediaStream assigned us by the data producer. Valid until we end.
// Owned by the producer.
RefPtr<MediaStream> mInputStream;
// The MediaStream representing this MediaStreamTrack in the MediaStreamGraph.
// Valid until we end. Owned by us.
RefPtr<ProcessedMediaStream> mStream;
// The MediaInputPort connecting mInputStream to mStream. Valid until we end.
// Owned by us.
RefPtr<MediaInputPort> mPort;
// The TrackID of this track in mInputStream and mStream.
const TrackID mTrackID;
RefPtr<MediaStreamTrackSource> mSource;
const UniquePtr<TrackSink> mSink;
RefPtr<MediaStreamTrack> mOriginalTrack;
nsCOMPtr<nsIPrincipal> mPrincipal;
nsCOMPtr<nsIPrincipal> mPendingPrincipal;
RefPtr<MSGListener> mMSGListener;

Просмотреть файл

@ -0,0 +1,73 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-*/
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "MediaStreamWindowCapturer.h"
#include "AudioStreamTrack.h"
#include "DOMMediaStream.h"
#include "MediaStreamGraph.h"
namespace mozilla {
using dom::AudioStreamTrack;
using dom::MediaStreamTrack;
MediaStreamWindowCapturer::CapturedTrack::CapturedTrack(
MediaStreamTrack* aTrack, uint64_t aWindowID)
: mTrack(aTrack),
mPort(aTrack->Graph()->ConnectToCaptureStream(aWindowID,
aTrack->GetStream())) {}
MediaStreamWindowCapturer::CapturedTrack::~CapturedTrack() { mPort->Destroy(); }
MediaStreamWindowCapturer::MediaStreamWindowCapturer(DOMMediaStream* aStream,
uint64_t aWindowId)
: mStream(aStream), mWindowId(aWindowId) {
mStream->RegisterTrackListener(this);
nsTArray<RefPtr<AudioStreamTrack>> tracks;
mStream->GetAudioTracks(tracks);
for (const auto& t : tracks) {
if (t->Ended()) {
continue;
}
AddTrack(t);
}
}
MediaStreamWindowCapturer::~MediaStreamWindowCapturer() {
if (mStream) {
mStream->UnregisterTrackListener(this);
}
}
void MediaStreamWindowCapturer::NotifyTrackAdded(
const RefPtr<MediaStreamTrack>& aTrack) {
if (AudioStreamTrack* at = aTrack->AsAudioStreamTrack()) {
AddTrack(at);
}
}
void MediaStreamWindowCapturer::NotifyTrackRemoved(
const RefPtr<MediaStreamTrack>& aTrack) {
if (AudioStreamTrack* at = aTrack->AsAudioStreamTrack()) {
RemoveTrack(at);
}
}
void MediaStreamWindowCapturer::AddTrack(AudioStreamTrack* aTrack) {
if (aTrack->Ended()) {
return;
}
mTracks.AppendElement(MakeUnique<CapturedTrack>(aTrack, mWindowId));
}
void MediaStreamWindowCapturer::RemoveTrack(AudioStreamTrack* aTrack) {
for (size_t i = mTracks.Length(); i > 0; --i) {
if (mTracks[i - 1]->mTrack == aTrack) {
mTracks.RemoveElementAt(i - 1);
break;
}
}
}
} // namespace mozilla

Просмотреть файл

@ -0,0 +1,49 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-*/
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef MediaStreamWindowCapturer_h
#define MediaStreamWindowCapturer_h
#include "DOMMediaStream.h"
namespace mozilla {
namespace dom {
class AudioStreamTrack;
class MediaStreamTrack;
} // namespace dom
/**
* Given a DOMMediaStream and a window id, this class will pipe the audio from
* all live audio tracks in the stream to the MediaStreamGraph's window capture
* mechanism.
*/
class MediaStreamWindowCapturer : public DOMMediaStream::TrackListener {
public:
MediaStreamWindowCapturer(DOMMediaStream* aStream, uint64_t aWindowId);
~MediaStreamWindowCapturer();
void NotifyTrackAdded(const RefPtr<dom::MediaStreamTrack>& aTrack) override;
void NotifyTrackRemoved(const RefPtr<dom::MediaStreamTrack>& aTrack) override;
struct CapturedTrack {
CapturedTrack(dom::MediaStreamTrack* aTrack, uint64_t aWindowID);
~CapturedTrack();
const WeakPtr<dom::MediaStreamTrack> mTrack;
const RefPtr<MediaInputPort> mPort;
};
const WeakPtr<DOMMediaStream> mStream;
const uint64_t mWindowId;
protected:
void AddTrack(dom::AudioStreamTrack* aTrack);
void RemoveTrack(dom::AudioStreamTrack* aTrack);
nsTArray<UniquePtr<CapturedTrack>> mTracks;
};
} // namespace mozilla
#endif /* MediaStreamWindowCapturer_h */

Просмотреть файл

@ -9,7 +9,6 @@
#include "mozilla/AsyncEventDispatcher.h"
#include "mozilla/dom/HTMLMediaElement.h"
#include "mozilla/dom/AudioTrack.h"
#include "mozilla/dom/VideoStreamTrack.h"
#include "mozilla/dom/VideoTrack.h"
#include "mozilla/dom/TrackEvent.h"
#include "nsThreadUtils.h"
@ -86,9 +85,10 @@ void MediaTrackList::RemoveTracks() {
already_AddRefed<AudioTrack> MediaTrackList::CreateAudioTrack(
nsIGlobalObject* aOwnerGlobal, const nsAString& aId, const nsAString& aKind,
const nsAString& aLabel, const nsAString& aLanguage, bool aEnabled) {
RefPtr<AudioTrack> track =
new AudioTrack(aOwnerGlobal, aId, aKind, aLabel, aLanguage, aEnabled);
const nsAString& aLabel, const nsAString& aLanguage, bool aEnabled,
AudioStreamTrack* aAudioTrack) {
RefPtr<AudioTrack> track = new AudioTrack(aOwnerGlobal, aId, aKind, aLabel,
aLanguage, aEnabled, aAudioTrack);
return track.forget();
}

Просмотреть файл

@ -14,13 +14,14 @@ class DOMMediaStream;
namespace dom {
class AudioStreamTrack;
class AudioTrack;
class AudioTrackList;
class HTMLMediaElement;
class MediaTrack;
class AudioTrackList;
class VideoTrackList;
class AudioTrack;
class VideoTrack;
class VideoStreamTrack;
class VideoTrack;
class VideoTrackList;
/**
* Base class of AudioTrackList and VideoTrackList. The AudioTrackList and
@ -53,10 +54,13 @@ class MediaTrackList : public DOMEventTargetHelper {
void RemoveTracks();
// For the case of src of HTMLMediaElement is non-MediaStream, leave the
// aAudioTrack as default(nullptr).
static already_AddRefed<AudioTrack> CreateAudioTrack(
nsIGlobalObject* aOwnerGlobal, const nsAString& aId,
const nsAString& aKind, const nsAString& aLabel,
const nsAString& aLanguage, bool aEnabled);
const nsAString& aLanguage, bool aEnabled,
AudioStreamTrack* aAudioTrack = nullptr);
// For the case of src of HTMLMediaElement is non-MediaStream, leave the
// aVideoTrack as default(nullptr).

Просмотреть файл

@ -14,12 +14,12 @@
namespace mozilla {
namespace dom {
VideoStreamTrack::VideoStreamTrack(DOMMediaStream* aStream, TrackID aTrackID,
TrackID aInputTrackID,
VideoStreamTrack::VideoStreamTrack(nsPIDOMWindowInner* aWindow,
MediaStream* aInputStream, TrackID aTrackID,
MediaStreamTrackSource* aSource,
const MediaTrackConstraints& aConstraints)
: MediaStreamTrack(aStream, aTrackID, aInputTrackID, aSource,
aConstraints) {}
: MediaStreamTrack(aWindow, aInputStream, aTrackID, aSource, aConstraints) {
}
void VideoStreamTrack::Destroy() {
mVideoOutputs.Clear();
@ -27,6 +27,9 @@ void VideoStreamTrack::Destroy() {
}
void VideoStreamTrack::AddVideoOutput(VideoFrameContainer* aSink) {
if (Ended()) {
return;
}
auto output = MakeRefPtr<VideoOutput>(
aSink, nsGlobalWindowInner::Cast(GetParentObject())
->AbstractMainThreadFor(TaskCategory::Other));
@ -34,6 +37,9 @@ void VideoStreamTrack::AddVideoOutput(VideoFrameContainer* aSink) {
}
void VideoStreamTrack::AddVideoOutput(VideoOutput* aOutput) {
if (Ended()) {
return;
}
for (const auto& output : mVideoOutputs) {
if (output == aOutput) {
MOZ_ASSERT_UNREACHABLE("A VideoOutput was already added");

Просмотреть файл

@ -19,7 +19,7 @@ namespace dom {
class VideoStreamTrack : public MediaStreamTrack {
public:
VideoStreamTrack(
DOMMediaStream* aStream, TrackID aTrackID, TrackID aInputTrackID,
nsPIDOMWindowInner* aWindow, MediaStream* aInputStream, TrackID aTrackID,
MediaStreamTrackSource* aSource,
const MediaTrackConstraints& aConstraints = MediaTrackConstraints());
@ -39,10 +39,10 @@ class VideoStreamTrack : public MediaStreamTrack {
void GetLabel(nsAString& aLabel, CallerType aCallerType) override;
protected:
already_AddRefed<MediaStreamTrack> CloneInternal(
DOMMediaStream* aOwningStream, TrackID aTrackID) override {
return do_AddRef(new VideoStreamTrack(
aOwningStream, aTrackID, mInputTrackID, mSource, mConstraints));
already_AddRefed<MediaStreamTrack> CloneInternal() override {
return do_AddRef(
new VideoStreamTrack(mWindow, Ended() ? nullptr : mInputStream.get(),
mTrackID, mSource, mConstraints));
}
private:

Просмотреть файл

@ -16,12 +16,12 @@ namespace dom {
VideoTrack::VideoTrack(nsIGlobalObject* aOwnerGlobal, const nsAString& aId,
const nsAString& aKind, const nsAString& aLabel,
const nsAString& aLanguage,
VideoStreamTrack* aStreamTarck)
VideoStreamTrack* aStreamTrack)
: MediaTrack(aOwnerGlobal, aId, aKind, aLabel, aLanguage),
mSelected(false),
mVideoStreamTrack(aStreamTarck) {}
mVideoStreamTrack(aStreamTrack) {}
VideoTrack::~VideoTrack() {}
VideoTrack::~VideoTrack() = default;
NS_IMPL_CYCLE_COLLECTION_INHERITED(VideoTrack, MediaTrack, mVideoStreamTrack)

Просмотреть файл

@ -20,7 +20,7 @@ class VideoTrack : public MediaTrack {
VideoTrack(nsIGlobalObject* aOwnerGlobal, const nsAString& aId,
const nsAString& aKind, const nsAString& aLabel,
const nsAString& aLanguage,
VideoStreamTrack* aStreamTarck = nullptr);
VideoStreamTrack* aStreamTrack = nullptr);
NS_DECL_ISUPPORTS_INHERITED
NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(VideoTrack, MediaTrack)

Просмотреть файл

@ -31,9 +31,8 @@ using media::TimeUnit;
*/
struct PlaybackInfoInit {
TimeUnit mStartTime;
StreamTime mOffset;
MediaInfo mInfo;
TrackID mAudioTrackID;
TrackID mVideoTrackID;
};
class DecodedStreamGraphListener;
@ -41,7 +40,7 @@ class DecodedStreamGraphListener;
class DecodedStreamTrackListener : public MediaStreamTrackListener {
public:
DecodedStreamTrackListener(DecodedStreamGraphListener* aGraphListener,
SourceMediaStream* aStream, TrackID aTrackID);
SourceMediaStream* aStream);
void NotifyOutput(MediaStreamGraph* aGraph,
StreamTime aCurrentTrackTime) override;
@ -50,91 +49,89 @@ class DecodedStreamTrackListener : public MediaStreamTrackListener {
private:
const RefPtr<DecodedStreamGraphListener> mGraphListener;
const RefPtr<SourceMediaStream> mStream;
const TrackID mTrackID;
};
class DecodedStreamGraphListener {
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(DecodedStreamGraphListener)
public:
DecodedStreamGraphListener(
SourceMediaStream* aStream, TrackID aAudioTrackID,
SourceMediaStream* aAudioStream,
MozPromiseHolder<DecodedStream::EndedPromise>&& aAudioEndedHolder,
TrackID aVideoTrackID,
SourceMediaStream* aVideoStream,
MozPromiseHolder<DecodedStream::EndedPromise>&& aVideoEndedHolder,
AbstractThread* aMainThread)
: mAudioTrackListener(IsTrackIDExplicit(aAudioTrackID)
? MakeRefPtr<DecodedStreamTrackListener>(
this, aStream, aAudioTrackID)
: nullptr),
StreamTime aOffset, AbstractThread* aMainThread)
: mAudioTrackListener(
aAudioStream
? MakeRefPtr<DecodedStreamTrackListener>(this, aAudioStream)
: nullptr),
mAudioEndedHolder(std::move(aAudioEndedHolder)),
mVideoTrackListener(IsTrackIDExplicit(aVideoTrackID)
? MakeRefPtr<DecodedStreamTrackListener>(
this, aStream, aVideoTrackID)
: nullptr),
mVideoTrackListener(
aVideoStream
? MakeRefPtr<DecodedStreamTrackListener>(this, aVideoStream)
: nullptr),
mVideoEndedHolder(std::move(aVideoEndedHolder)),
mStream(aStream),
mAudioTrackID(aAudioTrackID),
mVideoTrackID(aVideoTrackID),
mAudioStream(aAudioStream),
mVideoStream(aVideoStream),
mOffset(aOffset),
mAbstractMainThread(aMainThread) {
MOZ_ASSERT(NS_IsMainThread());
if (mAudioTrackListener) {
mStream->AddTrackListener(mAudioTrackListener, mAudioTrackID);
mAudioStream->AddTrackListener(mAudioTrackListener,
OutputStreamManager::sTrackID);
} else {
mAudioEnded = true;
mAudioEndedHolder.ResolveIfExists(true, __func__);
}
if (mVideoTrackListener) {
mStream->AddTrackListener(mVideoTrackListener, mVideoTrackID);
mVideoStream->AddTrackListener(mVideoTrackListener,
OutputStreamManager::sTrackID);
} else {
mVideoEnded = true;
mVideoEndedHolder.ResolveIfExists(true, __func__);
}
}
void NotifyOutput(TrackID aTrackID, StreamTime aCurrentTrackTime) {
if (aTrackID == mAudioTrackID) {
if (aCurrentTrackTime >= mAudioEnd) {
mStream->EndTrack(mAudioTrackID);
void NotifyOutput(SourceMediaStream* aStream, StreamTime aCurrentTrackTime) {
StreamTime currentTime = aCurrentTrackTime + mOffset;
if (aStream == mAudioStream) {
if (currentTime >= mAudioEnd) {
mAudioStream->EndTrack(OutputStreamManager::sTrackID);
}
} else if (aTrackID == mVideoTrackID) {
if (aCurrentTrackTime >= mVideoEnd) {
mStream->EndTrack(mVideoTrackID);
} else if (aStream == mVideoStream) {
if (currentTime >= mVideoEnd) {
mVideoStream->EndTrack(OutputStreamManager::sTrackID);
}
} else {
MOZ_CRASH("Unexpected TrackID");
MOZ_CRASH("Unexpected source stream");
}
if (aTrackID != mAudioTrackID && mAudioTrackID != TRACK_NONE &&
!mAudioEnded) {
if (aStream != mAudioStream && mAudioStream && !mAudioEnded) {
// Only audio playout drives the clock forward, if present and live.
return;
}
MOZ_ASSERT_IF(aTrackID == mAudioTrackID, !mAudioEnded);
MOZ_ASSERT_IF(aTrackID == mVideoTrackID, !mVideoEnded);
mOnOutput.Notify(mStream->StreamTimeToMicroseconds(aCurrentTrackTime));
MOZ_ASSERT_IF(aStream == mAudioStream, !mAudioEnded);
MOZ_ASSERT_IF(aStream == mVideoStream, !mVideoEnded);
mOnOutput.Notify(aStream->StreamTimeToMicroseconds(currentTime));
}
void NotifyEnded(TrackID aTrackID) {
if (aTrackID == mAudioTrackID) {
void NotifyEnded(SourceMediaStream* aStream) {
if (aStream == mAudioStream) {
mAudioEnded = true;
} else if (aTrackID == mVideoTrackID) {
} else if (aStream == mVideoStream) {
mVideoEnded = true;
} else {
MOZ_CRASH("Unexpected TrackID");
MOZ_CRASH("Unexpected source stream");
}
mStream->Graph()->DispatchToMainThreadStableState(
NewRunnableMethod<TrackID>(
aStream->Graph()->DispatchToMainThreadStableState(
NewRunnableMethod<RefPtr<SourceMediaStream>>(
"DecodedStreamGraphListener::DoNotifyTrackEnded", this,
&DecodedStreamGraphListener::DoNotifyTrackEnded, aTrackID));
&DecodedStreamGraphListener::DoNotifyTrackEnded, aStream));
}
TrackID AudioTrackID() const { return mAudioTrackID; }
TrackID VideoTrackID() const { return mVideoTrackID; }
/**
* Tell the graph listener to end the given track after it has seen at least
* aEnd worth of output reported as processed by the graph.
* Tell the graph listener to end the track sourced by the given stream after
* it has seen at least aEnd worth of output reported as processed by the
* graph.
*
* A StreamTime of STREAM_TIME_MAX indicates that the track has no end and is
* the default.
@ -152,40 +149,42 @@ class DecodedStreamGraphListener {
*
* Callable from any thread.
*/
void EndTrackAt(TrackID aTrackID, StreamTime aEnd) {
if (aTrackID == mAudioTrackID) {
void EndTrackAt(SourceMediaStream* aStream, StreamTime aEnd) {
if (aStream == mAudioStream) {
mAudioEnd = aEnd;
} else if (aTrackID == mVideoTrackID) {
} else if (aStream == mVideoStream) {
mVideoEnd = aEnd;
} else {
MOZ_CRASH("Unexpected TrackID");
MOZ_CRASH("Unexpected source stream");
}
}
void DoNotifyTrackEnded(TrackID aTrackID) {
void DoNotifyTrackEnded(SourceMediaStream* aStream) {
MOZ_ASSERT(NS_IsMainThread());
if (aTrackID == mAudioTrackID) {
if (aStream == mAudioStream) {
mAudioEndedHolder.ResolveIfExists(true, __func__);
} else if (aTrackID == mVideoTrackID) {
} else if (aStream == mVideoStream) {
mVideoEndedHolder.ResolveIfExists(true, __func__);
} else {
MOZ_CRASH("Unexpected track id");
MOZ_CRASH("Unexpected source stream");
}
}
void Forget() {
MOZ_ASSERT(NS_IsMainThread());
if (mAudioTrackListener && !mStream->IsDestroyed()) {
mStream->EndTrack(mAudioTrackID);
mStream->RemoveTrackListener(mAudioTrackListener, mAudioTrackID);
if (mAudioTrackListener && !mAudioStream->IsDestroyed()) {
mAudioStream->EndTrack(OutputStreamManager::sTrackID);
mAudioStream->RemoveTrackListener(mAudioTrackListener,
OutputStreamManager::sTrackID);
}
mAudioTrackListener = nullptr;
mAudioEndedHolder.ResolveIfExists(false, __func__);
if (mVideoTrackListener && !mStream->IsDestroyed()) {
mStream->EndTrack(mVideoTrackID);
mStream->RemoveTrackListener(mVideoTrackListener, mVideoTrackID);
if (mVideoTrackListener && !mVideoStream->IsDestroyed()) {
mVideoStream->EndTrack(OutputStreamManager::sTrackID);
mVideoStream->RemoveTrackListener(mVideoTrackListener,
OutputStreamManager::sTrackID);
}
mVideoTrackListener = nullptr;
mVideoEndedHolder.ResolveIfExists(false, __func__);
@ -212,40 +211,41 @@ class DecodedStreamGraphListener {
bool mVideoEnded = false;
// Any thread.
const RefPtr<SourceMediaStream> mStream;
const TrackID mAudioTrackID;
const RefPtr<SourceMediaStream> mAudioStream;
const RefPtr<SourceMediaStream> mVideoStream;
Atomic<StreamTime> mAudioEnd{STREAM_TIME_MAX};
const TrackID mVideoTrackID;
Atomic<StreamTime> mVideoEnd{STREAM_TIME_MAX};
const StreamTime mOffset;
const RefPtr<AbstractThread> mAbstractMainThread;
};
DecodedStreamTrackListener::DecodedStreamTrackListener(
DecodedStreamGraphListener* aGraphListener, SourceMediaStream* aStream,
TrackID aTrackID)
: mGraphListener(aGraphListener), mStream(aStream), mTrackID(aTrackID) {}
DecodedStreamGraphListener* aGraphListener, SourceMediaStream* aStream)
: mGraphListener(aGraphListener), mStream(aStream) {}
void DecodedStreamTrackListener::NotifyOutput(MediaStreamGraph* aGraph,
StreamTime aCurrentTrackTime) {
mGraphListener->NotifyOutput(mTrackID, aCurrentTrackTime);
mGraphListener->NotifyOutput(mStream, aCurrentTrackTime);
}
void DecodedStreamTrackListener::NotifyEnded() {
mGraphListener->NotifyEnded(mTrackID);
mGraphListener->NotifyEnded(mStream);
}
/*
* All MediaStream-related data is protected by the decoder's monitor.
* We have at most one DecodedStreamDaata per MediaDecoder. Its stream
* is used as the input for each ProcessedMediaStream created by calls to
* captureStream(UntilEnded). Seeking creates a new source stream, as does
* replaying after the input as ended. In the latter case, the new source is
* not connected to streams created by captureStreamUntilEnded.
/**
* All MediaStream-related data is protected by the decoder's monitor. We have
* at most one DecodedStreamData per MediaDecoder. Its streams are used as
* inputs for all output tracks created by OutputStreamManager after calls to
* captureStream/UntilEnded. Seeking creates new source streams, as does
* replaying after the input as ended. In the latter case, the new sources are
* not connected to tracks created by captureStreamUntilEnded.
*/
class DecodedStreamData final {
public:
DecodedStreamData(
OutputStreamManager* aOutputStreamManager, PlaybackInfoInit&& aInit,
RefPtr<SourceMediaStream> aAudioStream,
RefPtr<SourceMediaStream> aVideoStream,
MozPromiseHolder<DecodedStream::EndedPromise>&& aAudioEndedPromise,
MozPromiseHolder<DecodedStream::EndedPromise>&& aVideoEndedPromise,
AbstractThread* aMainThread);
@ -266,9 +266,9 @@ class DecodedStreamData final {
// Count of audio frames written to the stream
int64_t mAudioFramesWritten;
// Count of video frames written to the stream in the stream's rate
StreamTime mStreamVideoWritten;
StreamTime mVideoStreamWritten;
// Count of audio frames written to the stream in the stream's rate
StreamTime mStreamAudioWritten;
StreamTime mAudioStreamWritten;
// mNextAudioTime is the end timestamp for the last packet sent to the stream.
// Therefore audio packets starting at or after this time need to be copied
// to the output stream.
@ -291,8 +291,8 @@ class DecodedStreamData final {
bool mHaveSentFinishAudio;
bool mHaveSentFinishVideo;
// The decoder is responsible for calling Destroy() on this stream.
const RefPtr<SourceMediaStream> mStream;
const RefPtr<SourceMediaStream> mAudioStream;
const RefPtr<SourceMediaStream> mVideoStream;
const RefPtr<DecodedStreamGraphListener> mListener;
const RefPtr<OutputStreamManager> mOutputStreamManager;
@ -301,32 +301,35 @@ class DecodedStreamData final {
DecodedStreamData::DecodedStreamData(
OutputStreamManager* aOutputStreamManager, PlaybackInfoInit&& aInit,
RefPtr<SourceMediaStream> aAudioStream,
RefPtr<SourceMediaStream> aVideoStream,
MozPromiseHolder<DecodedStream::EndedPromise>&& aAudioEndedPromise,
MozPromiseHolder<DecodedStream::EndedPromise>&& aVideoEndedPromise,
AbstractThread* aMainThread)
: mAudioFramesWritten(0),
mStreamVideoWritten(0),
mStreamAudioWritten(0),
mVideoStreamWritten(0),
mAudioStreamWritten(0),
mNextAudioTime(aInit.mStartTime),
mHaveSentFinishAudio(false),
mHaveSentFinishVideo(false),
mStream(aOutputStreamManager->mSourceStream),
mAudioStream(std::move(aAudioStream)),
mVideoStream(std::move(aVideoStream)),
// DecodedStreamGraphListener will resolve these promises.
mListener(MakeRefPtr<DecodedStreamGraphListener>(
mStream, aInit.mAudioTrackID, std::move(aAudioEndedPromise),
aInit.mVideoTrackID, std::move(aVideoEndedPromise), aMainThread)),
mAudioStream, std::move(aAudioEndedPromise), mVideoStream,
std::move(aVideoEndedPromise), aInit.mOffset, aMainThread)),
mOutputStreamManager(aOutputStreamManager),
mAbstractMainThread(aMainThread) {
MOZ_ASSERT(NS_IsMainThread());
MOZ_DIAGNOSTIC_ASSERT(
mOutputStreamManager->HasTracks(aInit.mAudioTrackID, aInit.mVideoTrackID),
mOutputStreamManager->HasTracks(mAudioStream, mVideoStream),
"Tracks must be pre-created on main thread");
if (IsTrackIDExplicit(aInit.mAudioTrackID)) {
mStream->AddAudioTrack(aInit.mAudioTrackID, aInit.mInfo.mAudio.mRate,
new AudioSegment());
if (mAudioStream) {
mAudioStream->AddAudioTrack(OutputStreamManager::sTrackID,
aInit.mInfo.mAudio.mRate, new AudioSegment());
}
if (IsTrackIDExplicit(aInit.mVideoTrackID)) {
mStream->AddTrack(aInit.mVideoTrackID, new VideoSegment());
if (mVideoStream) {
mVideoStream->AddTrack(OutputStreamManager::sTrackID, new VideoSegment());
}
}
@ -341,7 +344,7 @@ void DecodedStreamData::Forget() { mListener->Forget(); }
void DecodedStreamData::GetDebugInfo(dom::DecodedStreamDataDebugInfo& aInfo) {
aInfo.mInstance = NS_ConvertUTF8toUTF16(nsPrintfCString("%p", this));
aInfo.mAudioFramesWritten = mAudioFramesWritten;
aInfo.mStreamAudioWritten = mStreamAudioWritten;
aInfo.mStreamAudioWritten = mAudioStreamWritten;
aInfo.mNextAudioTime = mNextAudioTime.ToMicroseconds();
aInfo.mLastVideoStartTime =
mLastVideoStartTime.valueOr(TimeUnit::FromMicroseconds(-1))
@ -436,20 +439,23 @@ nsresult DecodedStream::Start(const TimeUnit& aStartTime,
mVideoEndedPromise.Resolve(true, __func__);
return NS_OK;
}
if (mInit.mInfo.HasAudio() &&
!mOutputStreamManager->HasTrackType(MediaSegment::AUDIO)) {
mOutputStreamManager->AddTrack(MediaSegment::AUDIO);
RefPtr<SourceMediaStream> audioStream =
mOutputStreamManager->GetPrecreatedTrackOfType(MediaSegment::AUDIO);
if (mInit.mInfo.HasAudio() && !audioStream) {
MOZ_DIAGNOSTIC_ASSERT(
!mOutputStreamManager->HasTrackType(MediaSegment::AUDIO));
audioStream = mOutputStreamManager->AddTrack(MediaSegment::AUDIO);
}
if (mInit.mInfo.HasVideo() &&
!mOutputStreamManager->HasTrackType(MediaSegment::VIDEO)) {
mOutputStreamManager->AddTrack(MediaSegment::VIDEO);
RefPtr<SourceMediaStream> videoStream =
mOutputStreamManager->GetPrecreatedTrackOfType(MediaSegment::VIDEO);
if (mInit.mInfo.HasVideo() && !videoStream) {
MOZ_DIAGNOSTIC_ASSERT(
!mOutputStreamManager->HasTrackType(MediaSegment::VIDEO));
videoStream = mOutputStreamManager->AddTrack(MediaSegment::VIDEO);
}
mInit.mAudioTrackID =
mOutputStreamManager->GetLiveTrackIDFor(MediaSegment::AUDIO);
mInit.mVideoTrackID =
mOutputStreamManager->GetLiveTrackIDFor(MediaSegment::VIDEO);
mData = MakeUnique<DecodedStreamData>(
mOutputStreamManager, std::move(mInit), std::move(mAudioEndedPromise),
mOutputStreamManager, std::move(mInit), std::move(audioStream),
std::move(videoStream), std::move(mAudioEndedPromise),
std::move(mVideoEndedPromise), mAbstractMainThread);
return NS_OK;
}
@ -468,7 +474,7 @@ nsresult DecodedStream::Start(const TimeUnit& aStartTime,
mAudioEndedPromise = audioEndedHolder.Ensure(__func__);
MozPromiseHolder<DecodedStream::EndedPromise> videoEndedHolder;
mVideoEndedPromise = videoEndedHolder.Ensure(__func__);
PlaybackInfoInit init{aStartTime, aInfo, TRACK_INVALID, TRACK_INVALID};
PlaybackInfoInit init{aStartTime, mStreamTimeOffset, aInfo};
nsCOMPtr<nsIRunnable> r = new R(std::move(init), std::move(audioEndedHolder),
std::move(videoEndedHolder),
mOutputStreamManager, mAbstractMainThread);
@ -477,8 +483,6 @@ nsresult DecodedStream::Start(const TimeUnit& aStartTime,
mData = static_cast<R*>(r.get())->ReleaseData();
if (mData) {
mInfo.mAudio.mTrackId = mData->mListener->AudioTrackID();
mInfo.mVideo.mTrackId = mData->mListener->VideoTrackID();
mOutputListener = mData->OnOutput().Connect(mOwnerThread, this,
&DecodedStream::NotifyOutput);
SendData();
@ -623,8 +627,6 @@ void DecodedStream::SendAudio(double aVolume,
AudioSegment output;
uint32_t rate = mInfo.mAudio.mRate;
AutoTArray<RefPtr<AudioData>, 10> audio;
TrackID audioTrackId = mInfo.mAudio.mTrackId;
SourceMediaStream* sourceStream = mData->mStream;
// It's OK to hold references to the AudioData because AudioData
// is ref-counted.
@ -639,12 +641,13 @@ void DecodedStream::SendAudio(double aVolume,
// |mNextAudioTime| is updated as we process each audio sample in
// SendStreamAudio().
if (output.GetDuration() > 0) {
mData->mStreamAudioWritten +=
sourceStream->AppendToTrack(audioTrackId, &output);
mData->mAudioStreamWritten += mData->mAudioStream->AppendToTrack(
OutputStreamManager::sTrackID, &output);
}
if (mAudioQueue.IsFinished() && !mData->mHaveSentFinishAudio) {
mData->mListener->EndTrackAt(audioTrackId, mData->mStreamAudioWritten);
mData->mListener->EndTrackAt(mData->mAudioStream,
mData->mAudioStreamWritten);
mData->mHaveSentFinishAudio = true;
}
}
@ -654,9 +657,10 @@ void DecodedStreamData::WriteVideoToSegment(
const gfx::IntSize& aIntrinsicSize, const TimeStamp& aTimeStamp,
VideoSegment* aOutput, const PrincipalHandle& aPrincipalHandle) {
RefPtr<layers::Image> image = aImage;
auto end = mStream->MicrosecondsToStreamTimeRoundDown(aEnd.ToMicroseconds());
auto end =
mVideoStream->MicrosecondsToStreamTimeRoundDown(aEnd.ToMicroseconds());
auto start =
mStream->MicrosecondsToStreamTimeRoundDown(aStart.ToMicroseconds());
mVideoStream->MicrosecondsToStreamTimeRoundDown(aStart.ToMicroseconds());
aOutput->AppendFrame(image.forget(), aIntrinsicSize, aPrincipalHandle, false,
aTimeStamp);
// Extend this so we get accurate durations for all frames.
@ -702,7 +706,7 @@ void DecodedStream::ResetVideo(const PrincipalHandle& aPrincipalHandle) {
// for video tracks as part of bug 1493618.
resetter.AppendFrame(nullptr, mData->mLastVideoImageDisplaySize,
aPrincipalHandle, false, currentTime);
mData->mStream->AppendToTrack(mInfo.mVideo.mTrackId, &resetter);
mData->mVideoStream->AppendToTrack(OutputStreamManager::sTrackID, &resetter);
// Consumer buffers have been reset. We now set the next time to the start
// time of the current frame, so that it can be displayed again on resuming.
@ -732,9 +736,7 @@ void DecodedStream::SendVideo(const PrincipalHandle& aPrincipalHandle) {
}
VideoSegment output;
TrackID videoTrackId = mInfo.mVideo.mTrackId;
AutoTArray<RefPtr<VideoData>, 10> video;
SourceMediaStream* sourceStream = mData->mStream;
// It's OK to hold references to the VideoData because VideoData
// is ref-counted.
@ -784,7 +786,7 @@ void DecodedStream::SendVideo(const PrincipalHandle& aPrincipalHandle) {
TimeUnit end = std::max(
v->GetEndTime(),
lastEnd + TimeUnit::FromMicroseconds(
sourceStream->StreamTimeToMicroseconds(1) + 1));
mData->mVideoStream->StreamTimeToMicroseconds(1) + 1));
mData->mLastVideoImage = v->mImage;
mData->mLastVideoImageDisplaySize = v->mDisplay;
mData->WriteVideoToSegment(v->mImage, lastEnd, end, v->mDisplay, t,
@ -800,8 +802,8 @@ void DecodedStream::SendVideo(const PrincipalHandle& aPrincipalHandle) {
}
if (output.GetDuration() > 0) {
mData->mStreamVideoWritten +=
sourceStream->AppendToTrack(videoTrackId, &output);
mData->mVideoStreamWritten += mData->mVideoStream->AppendToTrack(
OutputStreamManager::sTrackID, &output);
}
if (mVideoQueue.IsFinished() && !mData->mHaveSentFinishVideo) {
@ -823,7 +825,7 @@ void DecodedStream::SendVideo(const PrincipalHandle& aPrincipalHandle) {
// We round the nr of microseconds up, because WriteVideoToSegment
// will round the conversion from microseconds to StreamTime down.
auto deviation = TimeUnit::FromMicroseconds(
sourceStream->StreamTimeToMicroseconds(1) + 1);
mData->mVideoStream->StreamTimeToMicroseconds(1) + 1);
auto start = mData->mLastVideoEndTime.valueOr(mStartTime.ref());
mData->WriteVideoToSegment(
mData->mLastVideoImage, start, start + deviation,
@ -834,10 +836,11 @@ void DecodedStream::SendVideo(const PrincipalHandle& aPrincipalHandle) {
if (forceBlack) {
endSegment.ReplaceWithDisabled();
}
mData->mStreamVideoWritten +=
sourceStream->AppendToTrack(videoTrackId, &endSegment);
mData->mVideoStreamWritten += mData->mVideoStream->AppendToTrack(
OutputStreamManager::sTrackID, &endSegment);
}
mData->mListener->EndTrackAt(videoTrackId, mData->mStreamVideoWritten);
mData->mListener->EndTrackAt(mData->mVideoStream,
mData->mVideoStreamWritten);
mData->mHaveSentFinishVideo = true;
}
}
@ -849,7 +852,7 @@ StreamTime DecodedStream::SentDuration() {
return 0;
}
return std::max(mData->mStreamAudioWritten, mData->mStreamVideoWritten);
return std::max(mData->mAudioStreamWritten, mData->mVideoStreamWritten);
}
void DecodedStream::SendData() {

Просмотреть файл

@ -7,8 +7,10 @@
#include "OutputStreamManager.h"
#include "DOMMediaStream.h"
#include "MediaStreamGraph.h"
#include "../MediaStreamGraphImpl.h"
#include "mozilla/dom/MediaStreamTrack.h"
#include "mozilla/dom/AudioStreamTrack.h"
#include "mozilla/dom/VideoStreamTrack.h"
#include "nsContentUtils.h"
namespace mozilla {
@ -22,11 +24,11 @@ class DecodedStreamTrackSource : public dom::MediaStreamTrackSource {
NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(DecodedStreamTrackSource,
dom::MediaStreamTrackSource)
explicit DecodedStreamTrackSource(OutputStreamManager* aManager,
OutputStreamData* aData, TrackID aTrackID,
nsIPrincipal* aPrincipal,
AbstractThread* aAbstractMainThread)
: dom::MediaStreamTrackSource(aPrincipal, nsString()) {
explicit DecodedStreamTrackSource(SourceMediaStream* aSourceStream,
nsIPrincipal* aPrincipal)
: dom::MediaStreamTrackSource(aPrincipal, nsString()),
mStream(aSourceStream->Graph()->CreateTrackUnionStream()),
mPort(mStream->AllocateInputPort(aSourceStream)) {
MOZ_ASSERT(NS_IsMainThread());
}
@ -40,7 +42,12 @@ class DecodedStreamTrackSource : public dom::MediaStreamTrackSource {
// We don't notify the source that a track was stopped since it will keep
// producing tracks until the element ends. The decoder also needs the
// tracks it created to be live at the source since the decoder's clock is
// based on MediaStreams during capture.
// based on MediaStreams during capture. We do however, disconnect this
// track's underlying stream.
if (!mStream->IsDestroyed()) {
mStream->Destroy();
mPort->Destroy();
}
}
void Disable() override {}
@ -53,8 +60,16 @@ class DecodedStreamTrackSource : public dom::MediaStreamTrackSource {
PrincipalChanged();
}
void ForceEnded() { OverrideEnded(); }
const RefPtr<ProcessedMediaStream> mStream;
const RefPtr<MediaInputPort> mPort;
protected:
virtual ~DecodedStreamTrackSource() { MOZ_ASSERT(NS_IsMainThread()); }
virtual ~DecodedStreamTrackSource() {
MOZ_ASSERT(NS_IsMainThread());
MOZ_ASSERT(mStream->IsDestroyed());
}
};
NS_IMPL_ADDREF_INHERITED(DecodedStreamTrackSource, dom::MediaStreamTrackSource)
@ -69,34 +84,35 @@ OutputStreamData::OutputStreamData(OutputStreamManager* aManager,
DOMMediaStream* aDOMStream)
: mManager(aManager),
mAbstractMainThread(aAbstractMainThread),
mDOMStream(aDOMStream),
mInputStream(mDOMStream->GetInputStream()->AsProcessedStream()),
mPort(mInputStream->AllocateInputPort(mManager->mSourceStream)) {
mDOMStream(aDOMStream) {
MOZ_ASSERT(NS_IsMainThread());
}
OutputStreamData::~OutputStreamData() {
MOZ_ASSERT(NS_IsMainThread());
OutputStreamData::~OutputStreamData() = default;
// Disconnect any existing port.
if (mPort) {
mPort->Destroy();
}
}
void OutputStreamData::AddTrack(TrackID aTrackID, MediaSegment::Type aType,
void OutputStreamData::AddTrack(SourceMediaStream* aStream,
MediaSegment::Type aType,
nsIPrincipal* aPrincipal, bool aAsyncAddTrack) {
MOZ_ASSERT(NS_IsMainThread());
MOZ_DIAGNOSTIC_ASSERT(mDOMStream);
LOG(LogLevel::Debug, "Adding output %s track with id %d to MediaStream %p%s",
aType == MediaSegment::AUDIO ? "audio" : "video", aTrackID,
LOG(LogLevel::Debug,
"Adding output %s track sourced from stream %p to MediaStream %p%s",
aType == MediaSegment::AUDIO ? "audio" : "video", aStream,
mDOMStream.get(), aAsyncAddTrack ? " (async)" : "");
RefPtr<dom::MediaStreamTrackSource> source = new DecodedStreamTrackSource(
mManager, this, aTrackID, aPrincipal, mAbstractMainThread);
RefPtr<dom::MediaStreamTrack> track =
mDOMStream->CreateDOMTrack(aTrackID, aType, source);
auto source = MakeRefPtr<DecodedStreamTrackSource>(aStream, aPrincipal);
RefPtr<dom::MediaStreamTrack> track;
if (aType == MediaSegment::AUDIO) {
track = new dom::AudioStreamTrack(mDOMStream->GetParentObject(),
source->mStream,
OutputStreamManager::sTrackID, source);
} else {
MOZ_ASSERT(aType == MediaSegment::VIDEO);
track = new dom::VideoStreamTrack(mDOMStream->GetParentObject(),
source->mStream,
OutputStreamManager::sTrackID, source);
}
mTracks.AppendElement(track.get());
if (aAsyncAddTrack) {
GetMainThreadEventTarget()->Dispatch(
@ -108,54 +124,54 @@ void OutputStreamData::AddTrack(TrackID aTrackID, MediaSegment::Type aType,
}
}
void OutputStreamData::RemoveTrack(TrackID aTrackID) {
void OutputStreamData::RemoveTrack(SourceMediaStream* aStream) {
MOZ_ASSERT(NS_IsMainThread());
MOZ_DIAGNOSTIC_ASSERT(mDOMStream);
LOG(LogLevel::Debug, "Removing output track with id %d from MediaStream %p",
aTrackID, mDOMStream.get());
LOG(LogLevel::Debug,
"Removing output track sourced by stream %p from MediaStream %p", aStream,
mDOMStream.get());
RefPtr<dom::MediaStreamTrack> track =
mDOMStream->FindOwnedDOMTrack(mInputStream, aTrackID);
MOZ_DIAGNOSTIC_ASSERT(track);
mTracks.RemoveElement(track);
GetMainThreadEventTarget()->Dispatch(
NewRunnableMethod("MediaStreamTrack::OverrideEnded", track,
&dom::MediaStreamTrack::OverrideEnded));
for (const auto& t : nsTArray<WeakPtr<dom::MediaStreamTrack>>(mTracks)) {
mTracks.RemoveElement(t);
if (!t || t->Ended()) {
continue;
}
DecodedStreamTrackSource& source =
static_cast<DecodedStreamTrackSource&>(t->GetSource());
GetMainThreadEventTarget()->Dispatch(
NewRunnableMethod("DecodedStreamTrackSource::ForceEnded", &source,
&DecodedStreamTrackSource::ForceEnded));
}
}
void OutputStreamData::SetPrincipal(nsIPrincipal* aPrincipal) {
MOZ_DIAGNOSTIC_ASSERT(mDOMStream);
for (const WeakPtr<dom::MediaStreamTrack>& track : mTracks) {
MOZ_DIAGNOSTIC_ASSERT(track);
if (!track || track->Ended()) {
continue;
}
DecodedStreamTrackSource& source =
static_cast<DecodedStreamTrackSource&>(track->GetSource());
source.SetPrincipal(aPrincipal);
}
}
OutputStreamManager::OutputStreamManager(SourceMediaStream* aSourceStream,
TrackID aNextTrackID,
OutputStreamManager::OutputStreamManager(MediaStreamGraphImpl* aGraph,
nsIPrincipal* aPrincipal,
AbstractThread* aAbstractMainThread)
: mSourceStream(aSourceStream),
mAbstractMainThread(aAbstractMainThread),
: mAbstractMainThread(aAbstractMainThread),
mGraph(aGraph),
mPrincipalHandle(
aAbstractMainThread,
aPrincipal ? MakePrincipalHandle(aPrincipal) : PRINCIPAL_HANDLE_NONE,
"OutputStreamManager::mPrincipalHandle (Canonical)"),
mPrincipal(aPrincipal),
mNextTrackID(aNextTrackID),
mPlaying(true) // mSourceStream always starts non-suspended
{
mPrincipal(aPrincipal) {
MOZ_ASSERT(NS_IsMainThread());
}
void OutputStreamManager::Add(DOMMediaStream* aDOMStream) {
MOZ_ASSERT(NS_IsMainThread());
MOZ_ASSERT(!mSourceStream->IsDestroyed());
// All streams must belong to the same graph.
MOZ_ASSERT(mSourceStream->Graph() == aDOMStream->GetInputStream()->Graph());
LOG(LogLevel::Info, "Adding MediaStream %p", aDOMStream);
@ -163,14 +179,13 @@ void OutputStreamManager::Add(DOMMediaStream* aDOMStream) {
.AppendElement(new OutputStreamData(
this, mAbstractMainThread, aDOMStream))
->get();
for (const Pair<TrackID, MediaSegment::Type>& pair : mLiveTracks) {
p->AddTrack(pair.first(), pair.second(), mPrincipal, false);
for (const auto& lt : mLiveTracks) {
p->AddTrack(lt->mSourceStream, lt->mType, mPrincipal, false);
}
}
void OutputStreamManager::Remove(DOMMediaStream* aDOMStream) {
MOZ_ASSERT(NS_IsMainThread());
MOZ_ASSERT(!mSourceStream->IsDestroyed());
LOG(LogLevel::Info, "Removing MediaStream %p", aDOMStream);
@ -178,8 +193,8 @@ void OutputStreamManager::Remove(DOMMediaStream* aDOMStream) {
mStreams.ApplyIf(
aDOMStream, 0, StreamComparator(),
[&](const UniquePtr<OutputStreamData>& aData) {
for (const Pair<TrackID, MediaSegment::Type>& pair : mLiveTracks) {
aData->RemoveTrack(pair.first());
for (const auto& lt : mLiveTracks) {
aData->RemoveTrack(lt->mSourceStream);
}
},
[]() { MOZ_ASSERT_UNREACHABLE("Didn't exist"); });
@ -193,80 +208,93 @@ bool OutputStreamManager::HasTrackType(MediaSegment::Type aType) {
return mLiveTracks.Contains(aType, TrackTypeComparator());
}
bool OutputStreamManager::HasTracks(TrackID aAudioTrack, TrackID aVideoTrack) {
bool OutputStreamManager::HasTracks(SourceMediaStream* aAudioStream,
SourceMediaStream* aVideoStream) {
MOZ_ASSERT(NS_IsMainThread());
size_t nrExpectedTracks = 0;
bool asExpected = true;
if (IsTrackIDExplicit(aAudioTrack)) {
if (aAudioStream) {
Unused << ++nrExpectedTracks;
asExpected = asExpected && mLiveTracks.Contains(
MakePair(aAudioTrack, MediaSegment::AUDIO),
MakePair(aAudioStream, MediaSegment::AUDIO),
TrackComparator());
}
if (IsTrackIDExplicit(aVideoTrack)) {
if (aVideoStream) {
Unused << ++nrExpectedTracks;
asExpected = asExpected && mLiveTracks.Contains(
MakePair(aVideoTrack, MediaSegment::VIDEO),
MakePair(aVideoStream, MediaSegment::VIDEO),
TrackComparator());
}
asExpected = asExpected && mLiveTracks.Length() == nrExpectedTracks;
return asExpected;
}
SourceMediaStream* OutputStreamManager::GetPrecreatedTrackOfType(
MediaSegment::Type aType) const {
auto i = mLiveTracks.IndexOf(aType, 0, PrecreatedTrackTypeComparator());
return i == nsTArray<UniquePtr<LiveTrack>>::NoIndex
? nullptr
: mLiveTracks[i]->mSourceStream.get();
}
size_t OutputStreamManager::NumberOfTracks() {
MOZ_ASSERT(NS_IsMainThread());
return mLiveTracks.Length();
}
void OutputStreamManager::AddTrack(MediaSegment::Type aType) {
already_AddRefed<SourceMediaStream> OutputStreamManager::AddTrack(
MediaSegment::Type aType) {
MOZ_ASSERT(NS_IsMainThread());
MOZ_ASSERT(!mSourceStream->IsDestroyed());
MOZ_ASSERT(!HasTrackType(aType),
"Cannot have two tracks of the same type at the same time");
TrackID id = mNextTrackID++;
RefPtr<SourceMediaStream> stream = mGraph->CreateSourceStream();
if (!mPlaying) {
stream->Suspend();
}
LOG(LogLevel::Info, "Adding %s track with id %d",
aType == MediaSegment::AUDIO ? "audio" : "video", id);
LOG(LogLevel::Info, "Adding %s track sourced by stream %p",
aType == MediaSegment::AUDIO ? "audio" : "video", stream.get());
mLiveTracks.AppendElement(MakePair(id, aType));
mLiveTracks.AppendElement(MakeUnique<LiveTrack>(stream, aType));
AutoRemoveDestroyedStreams();
for (const auto& data : mStreams) {
data->AddTrack(id, aType, mPrincipal, true);
data->AddTrack(stream, aType, mPrincipal, true);
}
return stream.forget();
}
OutputStreamManager::LiveTrack::~LiveTrack() { mSourceStream->Destroy(); }
void OutputStreamManager::AutoRemoveDestroyedStreams() {
MOZ_ASSERT(NS_IsMainThread());
for (size_t i = mStreams.Length(); i > 0; --i) {
const auto& data = mStreams[i - 1];
if (!data->mDOMStream || !data->mDOMStream->GetInputStream()) {
if (!data->mDOMStream) {
// If the mDOMStream WeakPtr is now null, mDOMStream has been destructed.
// If mDOMStream's input stream is now null, it has been unlinked but not
// yet destructed. In both cases we can consider it dead.
mStreams.RemoveElementAt(i - 1);
}
}
}
void OutputStreamManager::RemoveTrack(TrackID aTrackID) {
void OutputStreamManager::RemoveTrack(SourceMediaStream* aStream) {
MOZ_ASSERT(NS_IsMainThread());
MOZ_ASSERT(!mSourceStream->IsDestroyed());
LOG(LogLevel::Info, "Removing track with id %d", aTrackID);
DebugOnly<bool> rv = mLiveTracks.RemoveElement(aTrackID, TrackIDComparator());
LOG(LogLevel::Info, "Removing track with source stream %p", aStream);
DebugOnly<bool> rv =
mLiveTracks.RemoveElement(aStream, TrackStreamComparator());
MOZ_ASSERT(rv);
AutoRemoveDestroyedStreams();
for (const auto& data : mStreams) {
data->RemoveTrack(aTrackID);
data->RemoveTrack(aStream);
}
}
void OutputStreamManager::RemoveTracks() {
MOZ_ASSERT(NS_IsMainThread());
nsTArray<Pair<TrackID, MediaSegment::Type>> liveTracks(mLiveTracks);
for (const auto& pair : liveTracks) {
RemoveTrack(pair.first());
for (size_t i = mLiveTracks.Length(); i > 0; --i) {
RemoveTrack(mLiveTracks[i - 1]->mSourceStream);
}
}
@ -283,9 +311,6 @@ void OutputStreamManager::Disconnect() {
Remove(domStream);
}
MOZ_ASSERT(mStreams.IsEmpty());
if (!mSourceStream->IsDestroyed()) {
mSourceStream->Destroy();
}
}
AbstractCanonical<PrincipalHandle>*
@ -306,21 +331,6 @@ void OutputStreamManager::SetPrincipal(nsIPrincipal* aPrincipal) {
}
}
TrackID OutputStreamManager::NextTrackID() const {
MOZ_ASSERT(NS_IsMainThread());
return mNextTrackID;
}
TrackID OutputStreamManager::GetLiveTrackIDFor(MediaSegment::Type aType) const {
MOZ_ASSERT(NS_IsMainThread());
for (const auto& pair : mLiveTracks) {
if (pair.second() == aType) {
return pair.first();
}
}
return TRACK_NONE;
}
void OutputStreamManager::SetPlaying(bool aPlaying) {
MOZ_ASSERT(NS_IsMainThread());
if (mPlaying == aPlaying) {
@ -328,13 +338,18 @@ void OutputStreamManager::SetPlaying(bool aPlaying) {
}
mPlaying = aPlaying;
if (mPlaying) {
mSourceStream->Resume();
} else {
mSourceStream->Suspend();
for (auto& lt : mLiveTracks) {
if (mPlaying) {
lt->mSourceStream->Resume();
lt->mEverPlayed = true;
} else {
lt->mSourceStream->Suspend();
}
}
}
OutputStreamManager::~OutputStreamManager() = default;
#undef LOG
} // namespace mozilla

Просмотреть файл

@ -38,26 +38,19 @@ class OutputStreamData {
// to it. For a true aAsyncAddTrack we will dispatch a task to add the
// created track to mDOMStream, as is required by spec for the "addtrack"
// event.
void AddTrack(TrackID aTrackID, MediaSegment::Type aType,
void AddTrack(SourceMediaStream* aStream, MediaSegment::Type aType,
nsIPrincipal* aPrincipal, bool aAsyncAddTrack);
// Ends the MediaStreamTrack with aTrackID. Calling this with a TrackID that
// doesn't exist in mDOMStream is an error.
void RemoveTrack(TrackID aTrackID);
// Ends any MediaStreamTracks sourced from aStream.
void RemoveTrack(SourceMediaStream* aStream);
void SetPrincipal(nsIPrincipal* aPrincipal);
// The source stream DecodedStream is feeding tracks to.
const RefPtr<OutputStreamManager> mManager;
const RefPtr<AbstractThread> mAbstractMainThread;
// The DOMMediaStream we add tracks to and represent.
const WeakPtr<DOMMediaStream> mDOMStream;
// The input stream of mDOMStream.
const RefPtr<ProcessedMediaStream> mInputStream;
private:
// mPort connects mSourceStream to mInputStream.
const RefPtr<MediaInputPort> mPort;
// Tracks that have been added and not yet removed.
nsTArray<WeakPtr<dom::MediaStreamTrack>> mTracks;
};
@ -66,35 +59,34 @@ class OutputStreamManager {
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(OutputStreamManager);
public:
explicit OutputStreamManager(SourceMediaStream* aSourceStream,
TrackID aNextTrackID, nsIPrincipal* aPrincipal,
AbstractThread* aAbstractMainThread);
OutputStreamManager(MediaStreamGraphImpl* aGraph, nsIPrincipal* aPrincipal,
AbstractThread* aAbstractMainThread);
// Add the output stream to the collection.
void Add(DOMMediaStream* aDOMStream);
// Remove the output stream from the collection.
void Remove(DOMMediaStream* aDOMStream);
// Returns true if there's a live track of the given type.
bool HasTrackType(MediaSegment::Type aType);
// Returns true if the given tracks and no others are currently live.
// Use a non-explicit TrackID to make it ignored for that type.
bool HasTracks(TrackID aAudioTrack, TrackID aVideoTrack);
// Returns true if the given streams are sourcing all currently live tracks.
// Use nullptr to make it ignored for that type.
bool HasTracks(SourceMediaStream* aAudioStream,
SourceMediaStream* aVideoStream);
// Gets the underlying stream for the given type if it has never been played,
// or nullptr if there is none.
SourceMediaStream* GetPrecreatedTrackOfType(MediaSegment::Type aType) const;
// Returns the number of live tracks.
size_t NumberOfTracks();
// Add a track to all output streams.
void AddTrack(MediaSegment::Type aType);
// Remove all currently live tracks from all output streams.
// Add a track sourced to all output streams and return the MediaStream that
// sources it.
already_AddRefed<SourceMediaStream> AddTrack(MediaSegment::Type aType);
// Remove all currently live tracks.
void RemoveTracks();
// Disconnect mSourceStream from all output streams.
// Remove all currently live tracks and all output streams.
void Disconnect();
// The principal handle for the underlying decoder.
AbstractCanonical<PrincipalHandle>* CanonicalPrincipalHandle();
// Called when the underlying decoder's principal has changed.
void SetPrincipal(nsIPrincipal* aPrincipal);
// Returns the track id that would be used the next time a track is added.
TrackID NextTrackID() const;
// Returns the TrackID for the currently live track of the given type, or
// TRACK_NONE otherwise.
TrackID GetLiveTrackIDFor(MediaSegment::Type aType) const;
// Called by DecodedStream when its playing state changes. While not playing
// we suspend mSourceStream.
void SetPlaying(bool aPlaying);
@ -104,51 +96,69 @@ class OutputStreamManager {
return mStreams.IsEmpty();
}
// Keep the source stream so we can connect the output streams that
// are added after Connect().
const RefPtr<SourceMediaStream> mSourceStream;
static const TrackID sTrackID = 1;
const RefPtr<AbstractThread> mAbstractMainThread;
private:
~OutputStreamManager() = default;
~OutputStreamManager();
class LiveTrack {
public:
LiveTrack(SourceMediaStream* aSourceStream, MediaSegment::Type aType)
: mSourceStream(aSourceStream), mType(aType) {}
~LiveTrack();
const RefPtr<SourceMediaStream> mSourceStream;
const MediaSegment::Type mType;
bool mEverPlayed = false;
};
struct StreamComparator {
static bool Equals(const UniquePtr<OutputStreamData>& aData,
DOMMediaStream* aStream) {
return aData->mDOMStream == aStream;
}
};
struct TrackIDComparator {
static bool Equals(const Pair<TrackID, MediaSegment::Type>& aLiveTrack,
TrackID aTrackID) {
return aLiveTrack.first() == aTrackID;
struct TrackStreamComparator {
static bool Equals(const UniquePtr<LiveTrack>& aLiveTrack,
SourceMediaStream* aStream) {
return aLiveTrack->mSourceStream == aStream;
}
};
struct TrackTypeComparator {
static bool Equals(const Pair<TrackID, MediaSegment::Type>& aLiveTrack,
static bool Equals(const UniquePtr<LiveTrack>& aLiveTrack,
MediaSegment::Type aType) {
return aLiveTrack.second() == aType;
return aLiveTrack->mType == aType;
}
};
struct PrecreatedTrackTypeComparator {
static bool Equals(const UniquePtr<LiveTrack>& aLiveTrack,
MediaSegment::Type aType) {
return !aLiveTrack->mEverPlayed && aLiveTrack->mType == aType;
}
};
struct TrackComparator {
static bool Equals(const Pair<TrackID, MediaSegment::Type>& aLiveTrack,
const Pair<TrackID, MediaSegment::Type>& aOther) {
return aLiveTrack.first() == aOther.first() &&
aLiveTrack.second() == aOther.second();
static bool Equals(
const UniquePtr<LiveTrack>& aLiveTrack,
const Pair<SourceMediaStream*, MediaSegment::Type>& aOther) {
return aLiveTrack->mSourceStream == aOther.first() &&
aLiveTrack->mType == aOther.second();
}
};
// Goes through mStreams and removes any entries that have been destroyed.
void AutoRemoveDestroyedStreams();
// Remove aTrackID from all output streams.
void RemoveTrack(TrackID aTrackID);
// Remove tracks sourced from aStream from all output streams.
void RemoveTrack(SourceMediaStream* aStream);
const RefPtr<MediaStreamGraphImpl> mGraph;
nsTArray<UniquePtr<OutputStreamData>> mStreams;
nsTArray<Pair<TrackID, MediaSegment::Type>> mLiveTracks;
nsTArray<UniquePtr<LiveTrack>> mLiveTracks;
Canonical<PrincipalHandle> mPrincipalHandle;
nsCOMPtr<nsIPrincipal> mPrincipal;
TrackID mNextTrackID;
bool mPlaying;
bool mPlaying = false;
};
} // namespace mozilla

Просмотреть файл

@ -154,6 +154,7 @@ EXPORTS += [
'MediaStreamGraph.h',
'MediaStreamListener.h',
'MediaStreamTypes.h',
'MediaStreamWindowCapturer.h',
'MediaTimer.h',
'MediaTrack.h',
'MediaTrackList.h',
@ -271,6 +272,7 @@ UNIFIED_SOURCES += [
'MediaStreamGraph.cpp',
'MediaStreamListener.cpp',
'MediaStreamTrack.cpp',
'MediaStreamWindowCapturer.cpp',
'MediaTimer.cpp',
'MediaTrack.cpp',
'MediaTrackList.cpp',

Просмотреть файл

@ -20,12 +20,12 @@
let copies = new Array(numCopies).fill(0).map(() => copy(stream));
ok(await SpecialStream.countUnderlyingStreams() > startStreams,
"MediaStream constructor creates more underlying streams");
"MediaStreamTrack constructor creates more underlying streams");
copies = [];
await new Promise(r => SpecialPowers.exactGC(r));
is(await SpecialStream.countUnderlyingStreams(), startStreams,
"MediaStreams should have been collected");
"MediaStreamTracks should have been collected");
}
runTest(async () => {
@ -34,19 +34,6 @@
DISABLE_LOOPBACK_TONE = true;
let gUMStream = await getUserMedia({video: true});
info("Testing GC of copy constructor");
await testGC(gUMStream, 10, s => new MediaStream(s));
info("Testing GC of track-array constructor");
await testGC(gUMStream, 10, s => new MediaStream(s.getTracks()));
info("Testing GC of empty constructor plus addTrack");
await testGC(gUMStream, 10, s => {
let s2 = new MediaStream();
s.getTracks().forEach(t => s2.addTrack(t));
return s2;
});
info("Testing GC of track-array constructor with cloned tracks");
await testGC(gUMStream, 10, s => new MediaStream(s.getTracks().map(t => t.clone())));

Просмотреть файл

@ -9,8 +9,8 @@
#include "mozilla/dom/MediaStreamAudioDestinationNodeBinding.h"
#include "AudioNodeEngine.h"
#include "AudioNodeStream.h"
#include "AudioStreamTrack.h"
#include "DOMMediaStream.h"
#include "MediaStreamTrack.h"
#include "TrackUnionStream.h"
namespace mozilla {
@ -23,10 +23,19 @@ class AudioDestinationTrackSource final : public MediaStreamTrackSource {
MediaStreamTrackSource)
AudioDestinationTrackSource(MediaStreamAudioDestinationNode* aNode,
MediaStream* aInputStream,
ProcessedMediaStream* aStream,
nsIPrincipal* aPrincipal)
: MediaStreamTrackSource(aPrincipal, nsString()), mNode(aNode) {}
: MediaStreamTrackSource(aPrincipal, nsString()),
mStream(aStream),
mPort(mStream->AllocateInputPort(aInputStream)),
mNode(aNode) {}
void Destroy() override {
if (!mStream->IsDestroyed()) {
mStream->Destroy();
mPort->Destroy();
}
if (mNode) {
mNode->DestroyMediaStream();
mNode = nullptr;
@ -43,6 +52,9 @@ class AudioDestinationTrackSource final : public MediaStreamTrackSource {
void Enable() override {}
const RefPtr<ProcessedMediaStream> mStream;
const RefPtr<MediaInputPort> mPort;
private:
~AudioDestinationTrackSource() = default;
@ -69,8 +81,7 @@ MediaStreamAudioDestinationNode::MediaStreamAudioDestinationNode(
AudioContext* aContext)
: AudioNode(aContext, 2, ChannelCountMode::Explicit,
ChannelInterpretation::Speakers),
mDOMStream(DOMAudioNodeMediaStream::CreateTrackUnionStreamAsInput(
GetOwner(), this, aContext->Graph())) {
mDOMStream(MakeAndAddRef<DOMMediaStream>(GetOwner())) {
// Ensure an audio track with the correct ID is exposed to JS. If we can't get
// a principal here because the document is not available, pass in a null
// principal. This happens in edge cases when the document is being unloaded
@ -81,21 +92,14 @@ MediaStreamAudioDestinationNode::MediaStreamAudioDestinationNode(
Document* doc = aContext->GetParentObject()->GetExtantDoc();
principal = doc->NodePrincipal();
}
RefPtr<MediaStreamTrackSource> source =
new AudioDestinationTrackSource(this, principal);
RefPtr<MediaStreamTrack> track = mDOMStream->CreateDOMTrack(
AudioNodeStream::AUDIO_TRACK, MediaSegment::AUDIO, source,
MediaTrackConstraints());
mStream = AudioNodeStream::Create(aContext, new AudioNodeEngine(this),
AudioNodeStream::EXTERNAL_OUTPUT,
aContext->Graph());
auto source = MakeRefPtr<AudioDestinationTrackSource>(
this, mStream, aContext->Graph()->CreateTrackUnionStream(), principal);
auto track = MakeRefPtr<AudioStreamTrack>(
GetOwner(), source->mStream, AudioNodeStream::AUDIO_TRACK, source);
mDOMStream->AddTrackInternal(track);
ProcessedMediaStream* outputStream =
mDOMStream->GetInputStream()->AsProcessedStream();
MOZ_ASSERT(!!outputStream);
AudioNodeEngine* engine = new AudioNodeEngine(this);
mStream = AudioNodeStream::Create(
aContext, engine, AudioNodeStream::EXTERNAL_OUTPUT, aContext->Graph());
mPort =
outputStream->AllocateInputPort(mStream, AudioNodeStream::AUDIO_TRACK);
}
/* static */
@ -124,7 +128,6 @@ size_t MediaStreamAudioDestinationNode::SizeOfExcludingThis(
// Future:
// - mDOMStream
size_t amount = AudioNode::SizeOfExcludingThis(aMallocSizeOf);
amount += mPort->SizeOfIncludingThis(aMallocSizeOf);
return amount;
}
@ -135,10 +138,6 @@ size_t MediaStreamAudioDestinationNode::SizeOfIncludingThis(
void MediaStreamAudioDestinationNode::DestroyMediaStream() {
AudioNode::DestroyMediaStream();
if (mPort) {
mPort->Destroy();
mPort = nullptr;
}
}
JSObject* MediaStreamAudioDestinationNode::WrapObject(

Просмотреть файл

@ -52,7 +52,6 @@ class MediaStreamAudioDestinationNode final : public AudioNode {
~MediaStreamAudioDestinationNode() = default;
RefPtr<DOMMediaStream> mDOMStream;
RefPtr<MediaInputPort> mPort;
};
} // namespace dom

Просмотреть файл

@ -52,18 +52,6 @@ already_AddRefed<MediaStreamAudioSourceNode> MediaStreamAudioSourceNode::Create(
return nullptr;
}
if (aAudioContext.Graph() !=
aOptions.mMediaStream->GetPlaybackStream()->Graph()) {
nsCOMPtr<nsPIDOMWindowInner> pWindow = aAudioContext.GetParentObject();
Document* document = pWindow ? pWindow->GetExtantDoc() : nullptr;
nsContentUtils::ReportToConsole(nsIScriptError::warningFlag,
NS_LITERAL_CSTRING("Web Audio"), document,
nsContentUtils::eDOM_PROPERTIES,
"MediaStreamAudioSourceNodeDifferentRate");
aRv.Throw(NS_ERROR_DOM_NOT_SUPPORTED_ERR);
return nullptr;
}
RefPtr<MediaStreamAudioSourceNode> node =
new MediaStreamAudioSourceNode(&aAudioContext, LockOnTrackPicked);
@ -82,16 +70,9 @@ void MediaStreamAudioSourceNode::Init(DOMMediaStream* aMediaStream,
return;
}
MediaStream* inputStream = aMediaStream->GetPlaybackStream();
MediaStreamGraph* graph = Context()->Graph();
if (NS_WARN_IF(graph != inputStream->Graph())) {
aRv.Throw(NS_ERROR_DOM_NOT_SUPPORTED_ERR);
return;
}
mInputStream = aMediaStream;
AudioNodeEngine* engine = new MediaStreamAudioSourceNodeEngine(this);
mStream = AudioNodeExternalInputStream::Create(graph, engine);
mStream = AudioNodeExternalInputStream::Create(Context()->Graph(), engine);
mInputStream->AddConsumerToKeepAlive(ToSupports(this));
mInputStream->RegisterTrackListener(this);
@ -112,7 +93,7 @@ void MediaStreamAudioSourceNode::Destroy() {
MediaStreamAudioSourceNode::~MediaStreamAudioSourceNode() { Destroy(); }
void MediaStreamAudioSourceNode::AttachToTrack(
const RefPtr<MediaStreamTrack>& aTrack) {
const RefPtr<MediaStreamTrack>& aTrack, ErrorResult& aRv) {
MOZ_ASSERT(!mInputTrack);
MOZ_ASSERT(aTrack->AsAudioStreamTrack());
@ -120,6 +101,17 @@ void MediaStreamAudioSourceNode::AttachToTrack(
return;
}
if (NS_WARN_IF(Context()->Graph() != aTrack->Graph())) {
nsCOMPtr<nsPIDOMWindowInner> pWindow = Context()->GetParentObject();
Document* document = pWindow ? pWindow->GetExtantDoc() : nullptr;
nsContentUtils::ReportToConsole(nsIScriptError::warningFlag,
NS_LITERAL_CSTRING("Web Audio"), document,
nsContentUtils::eDOM_PROPERTIES,
"MediaStreamAudioSourceNodeDifferentRate");
aRv.Throw(NS_ERROR_DOM_NOT_SUPPORTED_ERR);
return;
}
mInputTrack = aTrack;
ProcessedMediaStream* outputStream =
static_cast<ProcessedMediaStream*>(mStream.get());
@ -170,7 +162,7 @@ void MediaStreamAudioSourceNode::AttachToRightTrack(
}
}
AttachToTrack(track);
AttachToTrack(track, aRv);
MarkActive();
return;
}
@ -192,7 +184,7 @@ void MediaStreamAudioSourceNode::NotifyTrackAdded(
return;
}
AttachToTrack(aTrack);
AttachToTrack(aTrack, IgnoreErrors());
}
void MediaStreamAudioSourceNode::NotifyTrackRemoved(

Просмотреть файл

@ -8,8 +8,9 @@
#define MediaStreamAudioSourceNode_h_
#include "AudioNode.h"
#include "DOMMediaStream.h"
#include "AudioNodeEngine.h"
#include "DOMMediaStream.h"
#include "PrincipalChangeObserver.h"
namespace mozilla {
@ -77,7 +78,7 @@ class MediaStreamAudioSourceNode
size_t SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const override;
// Attaches to aTrack so that its audio content will be used as input.
void AttachToTrack(const RefPtr<MediaStreamTrack>& aTrack);
void AttachToTrack(const RefPtr<MediaStreamTrack>& aTrack, ErrorResult& aRv);
// Detaches from the currently attached track if there is one.
void DetachFromTrack();

Просмотреть файл

@ -251,8 +251,7 @@ void MediaEngineDefaultVideoSource::SetTrack(
mStream = aStream;
mTrackID = aTrackID;
mPrincipalHandle = aPrincipal;
aStream->AddTrack(aTrackID, new VideoSegment(),
SourceMediaStream::ADDTRACK_QUEUED);
aStream->AddTrack(aTrackID, new VideoSegment());
}
nsresult MediaEngineDefaultVideoSource::Start() {
@ -497,8 +496,7 @@ void MediaEngineDefaultAudioSource::SetTrack(
mStream = aStream;
mTrackID = aTrackID;
mPrincipalHandle = aPrincipal;
aStream->AddAudioTrack(aTrackID, aStream->GraphRate(), new AudioSegment(),
SourceMediaStream::ADDTRACK_QUEUED);
aStream->AddAudioTrack(aTrackID, aStream->GraphRate(), new AudioSegment());
}
nsresult MediaEngineDefaultAudioSource::Start() {

Просмотреть файл

@ -289,8 +289,7 @@ void MediaEngineRemoteVideoSource::SetTrack(
mTrackID = aTrackID;
mPrincipal = aPrincipal;
}
aStream->AddTrack(aTrackID, new VideoSegment(),
SourceMediaStream::ADDTRACK_QUEUED);
aStream->AddTrack(aTrackID, new VideoSegment());
}
nsresult MediaEngineRemoteVideoSource::Start() {

Просмотреть файл

@ -495,8 +495,7 @@ void MediaEngineWebRTCMicrophoneSource::SetTrack(
AudioSegment* segment = new AudioSegment();
mStream->AddAudioTrack(mTrackID, mStream->GraphRate(), segment,
SourceMediaStream::ADDTRACK_QUEUED);
mStream->AddAudioTrack(mTrackID, mStream->GraphRate(), segment);
mInputProcessing = new AudioInputProcessing(mDeviceMaxChannelCount, mStream,
mTrackID, mPrincipal);

Просмотреть файл

@ -11,7 +11,6 @@
#include "AudioSegment.h"
#include "AudioStreamTrack.h"
#include "DOMMediaStream.h"
#include "mozilla/Mutex.h"
#include "mozilla/RefPtr.h"
#include "MediaPipeline.h"
@ -38,19 +37,6 @@ static MtransportTestUtils* test_utils;
namespace {
class FakeSourceMediaStream : public mozilla::SourceMediaStream {
public:
FakeSourceMediaStream() : SourceMediaStream() {}
virtual ~FakeSourceMediaStream() override { mMainThreadDestroyed = true; }
virtual StreamTime AppendToTrack(
TrackID aID, MediaSegment* aSegment,
MediaSegment* aRawSegment = nullptr) override {
return aSegment->GetDuration();
}
};
class FakeMediaStreamTrackSource : public mozilla::dom::MediaStreamTrackSource {
public:
FakeMediaStreamTrackSource() : MediaStreamTrackSource(nullptr, nsString()) {}
@ -69,8 +55,7 @@ class FakeMediaStreamTrackSource : public mozilla::dom::MediaStreamTrackSource {
class FakeAudioStreamTrack : public mozilla::dom::AudioStreamTrack {
public:
FakeAudioStreamTrack()
: AudioStreamTrack(new DOMMediaStream(nullptr), 0, 1,
new FakeMediaStreamTrackSource()),
: AudioStreamTrack(nullptr, nullptr, 0, new FakeMediaStreamTrackSource()),
mMutex("Fake AudioStreamTrack"),
mStop(false),
mCount(0) {

Просмотреть файл

@ -1165,31 +1165,32 @@ void MediaPipelineTransmit::PipelineListener::NewData(
class GenericReceiveListener : public MediaStreamTrackListener {
public:
explicit GenericReceiveListener(dom::MediaStreamTrack* aTrack)
: mTrack(new nsMainThreadPtrHolder<dom::MediaStreamTrack>(
"GenericReceiveListener::mTrack", aTrack)),
mTrackId(aTrack->GetInputTrackId()),
mSource(mTrack->GetInputStream()->AsSourceStream()),
: mTrackSource(new nsMainThreadPtrHolder<RemoteTrackSource>(
"GenericReceiveListener::mTrackSource",
&static_cast<RemoteTrackSource&>(aTrack->GetSource()))),
mTrackId(aTrack->GetTrackID()),
mSource(mTrackSource->mStream),
mIsAudio(aTrack->AsAudioStreamTrack()),
mPrincipalHandle(PRINCIPAL_HANDLE_NONE),
mListening(false),
mMaybeTrackNeedsUnmute(true) {
MOZ_RELEASE_ASSERT(mSource, "Must be used with a SourceMediaStream");
MOZ_DIAGNOSTIC_ASSERT(NS_IsMainThread());
MOZ_DIAGNOSTIC_ASSERT(mSource, "Must be used with a SourceMediaStream");
}
virtual ~GenericReceiveListener() = default;
void AddTrackToSource(uint32_t aRate = 0) {
MOZ_ASSERT((aRate != 0 && mTrack->AsAudioStreamTrack()) ||
mTrack->AsVideoStreamTrack());
MOZ_ASSERT_IF(mIsAudio, aRate != 0);
if (mTrack->AsAudioStreamTrack()) {
if (mIsAudio) {
mSource->AddAudioTrack(mTrackId, aRate, new AudioSegment());
} else if (mTrack->AsVideoStreamTrack()) {
} else {
mSource->AddTrack(mTrackId, new VideoSegment());
}
MOZ_LOG(gMediaPipelineLog, LogLevel::Debug,
("GenericReceiveListener added %s track %d (%p) to stream %p",
mTrack->AsAudioStreamTrack() ? "audio" : "video", mTrackId,
mTrack.get(), mSource.get()));
("GenericReceiveListener added %s track %d to stream %p",
mIsAudio ? "audio" : "video", mTrackId, mSource.get()));
mSource->AddTrackListener(this, mTrackId);
}
@ -1200,7 +1201,7 @@ class GenericReceiveListener : public MediaStreamTrackListener {
}
mListening = true;
mMaybeTrackNeedsUnmute = true;
if (mTrack->AsAudioStreamTrack() && !mSource->IsDestroyed()) {
if (mIsAudio && !mSource->IsDestroyed()) {
mSource->SetPullingEnabled(mTrackId, true);
}
}
@ -1210,7 +1211,7 @@ class GenericReceiveListener : public MediaStreamTrackListener {
return;
}
mListening = false;
if (mTrack->AsAudioStreamTrack() && !mSource->IsDestroyed()) {
if (mIsAudio && !mSource->IsDestroyed()) {
mSource->SetPullingEnabled(mTrackId, false);
}
}
@ -1226,7 +1227,7 @@ class GenericReceiveListener : public MediaStreamTrackListener {
void OnRtpReceived_m() {
if (mListening) {
static_cast<RemoteTrackSource&>(mTrack->GetSource()).SetMuted(false);
mTrackSource->SetMuted(false);
}
}
@ -1234,9 +1235,16 @@ class GenericReceiveListener : public MediaStreamTrackListener {
MOZ_LOG(gMediaPipelineLog, LogLevel::Debug,
("GenericReceiveListener ending track"));
// This breaks the cycle with the SourceMediaStream
mSource->RemoveTrackListener(this, mTrackId);
mSource->EndTrack(mTrackId);
if (!mSource->IsDestroyed()) {
// This breaks the cycle with the SourceMediaStream
mSource->RemoveTrackListener(this, mTrackId);
mSource->EndTrack(mTrackId);
mSource->Destroy();
}
NS_DispatchToMainThread(NewRunnableMethod("RemoteTrackSource::ForceEnded",
mTrackSource.get(),
&RemoteTrackSource::ForceEnded));
}
// Must be called on the main thread
@ -1257,7 +1265,7 @@ class GenericReceiveListener : public MediaStreamTrackListener {
PrincipalHandle mPrincipalHandle;
};
mTrack->GraphImpl()->AppendMessage(
mSource->GraphImpl()->AppendMessage(
MakeUnique<Message>(this, aPrincipalHandle));
}
@ -1267,9 +1275,10 @@ class GenericReceiveListener : public MediaStreamTrackListener {
}
protected:
const nsMainThreadPtrHandle<dom::MediaStreamTrack> mTrack;
const nsMainThreadPtrHandle<RemoteTrackSource> mTrackSource;
const TrackID mTrackId;
const RefPtr<SourceMediaStream> mSource;
const bool mIsAudio;
PrincipalHandle mPrincipalHandle;
bool mListening;
Atomic<bool> mMaybeTrackNeedsUnmute;
@ -1290,12 +1299,11 @@ class MediaPipelineReceiveAudio::PipelineListener
PipelineListener(dom::MediaStreamTrack* aTrack,
const RefPtr<MediaSessionConduit>& aConduit)
: GenericReceiveListener(aTrack),
mConduit(aConduit)
mConduit(aConduit),
// AudioSession conduit only supports 16, 32, 44.1 and 48kHz
// This is an artificial limitation, it would however require more
// changes to support any rates. If the sampling rate is not-supported,
// we will use 48kHz instead.
,
mRate(static_cast<AudioSessionConduit*>(mConduit.get())
->IsSamplingFreqSupported(mSource->GraphRate())
? mSource->GraphRate()

Просмотреть файл

@ -35,6 +35,7 @@
#include "AudioConduit.h"
#include "VideoConduit.h"
#include "MediaStreamGraph.h"
#include "runnable_utils.h"
#include "PeerConnectionCtx.h"
#include "PeerConnectionImpl.h"
@ -1797,18 +1798,7 @@ OwningNonNull<dom::MediaStreamTrack> PeerConnectionImpl::CreateReceiveTrack(
SdpMediaSection::MediaType type) {
bool audio = (type == SdpMediaSection::MediaType::kAudio);
MediaStreamGraph* graph = MediaStreamGraph::GetInstance(
audio ? MediaStreamGraph::AUDIO_THREAD_DRIVER
: MediaStreamGraph::SYSTEM_THREAD_DRIVER,
GetWindow(), MediaStreamGraph::REQUEST_DEFAULT_SAMPLE_RATE);
RefPtr<DOMMediaStream> stream =
DOMMediaStream::CreateSourceStreamAsInput(GetWindow(), graph);
CSFLogDebug(LOGTAG, "Created media stream %p, inner: %p", stream.get(),
stream->GetInputStream());
// Set the principal used for creating the tracks. This makes the stream
// Set the principal used for creating the tracks. This makes the track
// data (audio/video samples) accessible to the receiving page. We're
// only certain that privacy hasn't been requested if we're connected.
nsCOMPtr<nsIPrincipal> principal;
@ -1817,29 +1807,39 @@ OwningNonNull<dom::MediaStreamTrack> PeerConnectionImpl::CreateReceiveTrack(
if (mPrivacyRequested.isSome() && !*mPrivacyRequested) {
principal = doc->NodePrincipal();
} else {
// we're either certain that we need isolation for the streams, OR
// we're not sure and we can fix the stream in SetDtlsConnected
// we're either certain that we need isolation for the tracks, OR
// we're not sure and we can fix the track in SetDtlsConnected
principal =
NullPrincipal::CreateWithInheritedAttributes(doc->NodePrincipal());
}
MediaStreamGraph* graph = MediaStreamGraph::GetInstance(
audio ? MediaStreamGraph::AUDIO_THREAD_DRIVER
: MediaStreamGraph::SYSTEM_THREAD_DRIVER,
GetWindow(), MediaStreamGraph::REQUEST_DEFAULT_SAMPLE_RATE);
RefPtr<MediaStreamTrack> track;
RefPtr<RemoteTrackSource> trackSource;
RefPtr<SourceMediaStream> source = graph->CreateSourceStream();
if (audio) {
trackSource = new RemoteTrackSource(principal,
trackSource = new RemoteTrackSource(source, principal,
NS_ConvertASCIItoUTF16("remote audio"));
track = stream->CreateDOMTrack(333, // Use a constant TrackID. Dependents
// read this from the DOM track.
MediaSegment::AUDIO, trackSource);
track = new AudioStreamTrack(GetWindow(), source,
333, // Use a constant TrackID. Dependents
// read this from the DOM track.
trackSource);
} else {
trackSource = new RemoteTrackSource(principal,
trackSource = new RemoteTrackSource(source, principal,
NS_ConvertASCIItoUTF16("remote video"));
track = stream->CreateDOMTrack(666, // Use a constant TrackID. Dependents
// read this from the DOM track.
MediaSegment::VIDEO, trackSource);
track = new VideoStreamTrack(GetWindow(), source,
666, // Use a constant TrackID. Dependents
// read this from the DOM track.
trackSource);
}
stream->AddTrackInternal(track);
CSFLogDebug(LOGTAG, "Created %s track %p, inner: %p",
audio ? "audio" : "video", track.get(), track->GetStream());
// Spec says remote tracks start out muted.
trackSource->SetMuted(true);

Просмотреть файл

@ -12,8 +12,9 @@ namespace mozilla {
class RemoteTrackSource : public dom::MediaStreamTrackSource {
public:
explicit RemoteTrackSource(nsIPrincipal* aPrincipal, const nsString& aLabel)
: dom::MediaStreamTrackSource(aPrincipal, aLabel) {}
explicit RemoteTrackSource(SourceMediaStream* aStream,
nsIPrincipal* aPrincipal, const nsString& aLabel)
: dom::MediaStreamTrackSource(aPrincipal, aLabel), mStream(aStream) {}
dom::MediaSourceEnum GetMediaSource() const override {
return dom::MediaSourceEnum::Other;
@ -43,9 +44,15 @@ class RemoteTrackSource : public dom::MediaStreamTrackSource {
PrincipalChanged();
}
void SetMuted(bool aMuted) { MutedChanged(aMuted); }
void ForceEnded() { OverrideEnded(); }
const RefPtr<SourceMediaStream> mStream;
protected:
virtual ~RemoteTrackSource() {}
virtual ~RemoteTrackSource() {
MOZ_ASSERT(NS_IsMainThread());
MOZ_ASSERT(mStream->IsDestroyed());
}
};
} // namespace mozilla

Просмотреть файл

@ -1,4 +0,0 @@
[MediaStream-clone.https.html]
[Tests that cloning MediaStream objects works as expected]
expected: FAIL