2012-04-30 07:11:26 +04:00
|
|
|
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-*/
|
|
|
|
/* This Source Code Form is subject to the terms of the Mozilla Public
|
|
|
|
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
|
|
|
* You can obtain one at http://mozilla.org/MPL/2.0/. */
|
|
|
|
|
|
|
|
#ifndef MOZILLA_MEDIASTREAMGRAPH_H_
|
|
|
|
#define MOZILLA_MEDIASTREAMGRAPH_H_
|
|
|
|
|
2013-07-19 18:40:58 +04:00
|
|
|
#include "mozilla/LinkedList.h"
|
2015-07-16 21:52:43 +03:00
|
|
|
#include "mozilla/Mutex.h"
|
|
|
|
#include "mozilla/TaskQueue.h"
|
|
|
|
|
|
|
|
#include "mozilla/dom/AudioChannelBinding.h"
|
|
|
|
|
2015-07-23 02:48:47 +03:00
|
|
|
#include "AudioSegment.h"
|
2012-11-14 23:46:40 +04:00
|
|
|
#include "AudioStream.h"
|
2012-04-30 07:11:26 +04:00
|
|
|
#include "nsTArray.h"
|
|
|
|
#include "nsIRunnable.h"
|
2016-01-26 05:49:01 +03:00
|
|
|
#include "StreamTracks.h"
|
2012-04-30 07:11:26 +04:00
|
|
|
#include "VideoFrameContainer.h"
|
|
|
|
#include "VideoSegment.h"
|
2013-09-19 17:54:42 +04:00
|
|
|
#include "MainThreadUtils.h"
|
2016-06-07 23:10:18 +03:00
|
|
|
#include "nsAutoPtr.h"
|
2014-03-24 14:06:05 +04:00
|
|
|
#include "nsAutoRef.h"
|
2014-07-02 06:21:34 +04:00
|
|
|
#include <speex/speex_resampler.h>
|
2013-09-19 17:54:42 +04:00
|
|
|
|
|
|
|
class nsIRunnable;
|
2012-04-30 07:11:26 +04:00
|
|
|
|
2014-03-24 14:06:05 +04:00
|
|
|
template <>
|
|
|
|
class nsAutoRefTraits<SpeexResamplerState> : public nsPointerRefTraits<SpeexResamplerState>
|
|
|
|
{
|
|
|
|
public:
|
|
|
|
static void Release(SpeexResamplerState* aState) { speex_resampler_destroy(aState); }
|
|
|
|
};
|
|
|
|
|
2012-04-30 07:11:26 +04:00
|
|
|
namespace mozilla {
|
|
|
|
|
2015-11-15 16:49:01 +03:00
|
|
|
extern LazyLogModule gMediaStreamGraphLog;
|
2012-07-31 16:17:21 +04:00
|
|
|
|
2015-09-24 23:49:03 +03:00
|
|
|
namespace dom {
|
|
|
|
enum class AudioContextOperation;
|
|
|
|
}
|
|
|
|
|
2016-03-16 18:00:34 +03:00
|
|
|
namespace media {
|
|
|
|
template<typename V, typename E> class Pledge;
|
|
|
|
}
|
|
|
|
|
2012-04-30 07:11:26 +04:00
|
|
|
/*
|
|
|
|
* MediaStreamGraph is a framework for synchronized audio/video processing
|
|
|
|
* and playback. It is designed to be used by other browser components such as
|
|
|
|
* HTML media elements, media capture APIs, real-time media streaming APIs,
|
|
|
|
* multitrack media APIs, and advanced audio APIs.
|
|
|
|
*
|
|
|
|
* The MediaStreamGraph uses a dedicated thread to process media --- the media
|
|
|
|
* graph thread. This ensures that we can process media through the graph
|
|
|
|
* without blocking on main-thread activity. The media graph is only modified
|
|
|
|
* on the media graph thread, to ensure graph changes can be processed without
|
|
|
|
* interfering with media processing. All interaction with the media graph
|
|
|
|
* thread is done with message passing.
|
|
|
|
*
|
|
|
|
* APIs that modify the graph or its properties are described as "control APIs".
|
|
|
|
* These APIs are asynchronous; they queue graph changes internally and
|
|
|
|
* those changes are processed all-at-once by the MediaStreamGraph. The
|
|
|
|
* MediaStreamGraph monitors the main thread event loop via nsIAppShell::RunInStableState
|
|
|
|
* to ensure that graph changes from a single event loop task are always
|
|
|
|
* processed all together. Control APIs should only be used on the main thread,
|
|
|
|
* currently; we may be able to relax that later.
|
|
|
|
*
|
|
|
|
* To allow precise synchronization of times in the control API, the
|
|
|
|
* MediaStreamGraph maintains a "media timeline". Control APIs that take or
|
|
|
|
* return times use that timeline. Those times never advance during
|
|
|
|
* an event loop task. This time is returned by MediaStreamGraph::GetCurrentTime().
|
|
|
|
*
|
|
|
|
* Media decoding, audio processing and media playback use thread-safe APIs to
|
|
|
|
* the media graph to ensure they can continue while the main thread is blocked.
|
|
|
|
*
|
|
|
|
* When the graph is changed, we may need to throw out buffered data and
|
|
|
|
* reprocess it. This is triggered automatically by the MediaStreamGraph.
|
|
|
|
*/
|
|
|
|
|
2016-06-29 13:27:13 +03:00
|
|
|
class AudioNodeEngine;
|
|
|
|
class AudioNodeExternalInputStream;
|
|
|
|
class AudioNodeStream;
|
|
|
|
class CameraPreviewMediaStream;
|
|
|
|
class MediaInputPort;
|
|
|
|
class MediaStream;
|
2012-04-30 07:11:26 +04:00
|
|
|
class MediaStreamGraph;
|
2016-06-29 13:27:13 +03:00
|
|
|
class MediaStreamGraphImpl;
|
|
|
|
class ProcessedMediaStream;
|
|
|
|
class SourceMediaStream;
|
2012-04-30 07:11:26 +04:00
|
|
|
|
|
|
|
/**
|
2012-08-20 08:20:44 +04:00
|
|
|
* This is a base class for media graph thread listener callbacks.
|
|
|
|
* Override methods to be notified of audio or video data or changes in stream
|
|
|
|
* state.
|
2012-04-30 07:11:26 +04:00
|
|
|
*
|
|
|
|
* This can be used by stream recorders or network connections that receive
|
|
|
|
* stream input. It could also be used for debugging.
|
|
|
|
*
|
|
|
|
* All notification methods are called from the media graph thread. Overriders
|
|
|
|
* of these methods are responsible for all synchronization. Beware!
|
|
|
|
* These methods are called without the media graph monitor held, so
|
|
|
|
* reentry into media graph methods is possible, although very much discouraged!
|
|
|
|
* You should do something non-blocking and non-reentrant (e.g. dispatch an
|
|
|
|
* event to some thread) and return.
|
2012-08-20 08:20:44 +04:00
|
|
|
* The listener is not allowed to add/remove any listeners from the stream.
|
2012-05-24 14:37:14 +04:00
|
|
|
*
|
|
|
|
* When a listener is first attached, we guarantee to send a NotifyBlockingChanged
|
|
|
|
* callback to notify of the initial blocking state. Also, if a listener is
|
|
|
|
* attached to a stream that has already finished, we'll call NotifyFinished.
|
2012-04-30 07:11:26 +04:00
|
|
|
*/
|
|
|
|
class MediaStreamListener {
|
2014-04-02 20:21:11 +04:00
|
|
|
protected:
|
|
|
|
// Protected destructor, to discourage deletion outside of Release():
|
2012-04-30 07:11:26 +04:00
|
|
|
virtual ~MediaStreamListener() {}
|
|
|
|
|
2014-04-02 20:21:11 +04:00
|
|
|
public:
|
2012-04-30 07:11:26 +04:00
|
|
|
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(MediaStreamListener)
|
|
|
|
|
2012-07-20 23:36:03 +04:00
|
|
|
/**
|
|
|
|
* When a SourceMediaStream has pulling enabled, and the MediaStreamGraph
|
|
|
|
* control loop is ready to pull, this gets called. A NotifyPull implementation
|
|
|
|
* is allowed to call the SourceMediaStream methods that alter track
|
|
|
|
* data. It is not allowed to make other MediaStream API calls, including
|
|
|
|
* calls to add or remove MediaStreamListeners. It is not allowed to block
|
|
|
|
* for any length of time.
|
|
|
|
* aDesiredTime is the stream time we would like to get data up to. Data
|
|
|
|
* beyond this point will not be played until NotifyPull runs again, so there's
|
|
|
|
* not much point in providing it. Note that if the stream is blocked for
|
|
|
|
* some reason, then data before aDesiredTime may not be played immediately.
|
|
|
|
*/
|
|
|
|
virtual void NotifyPull(MediaStreamGraph* aGraph, StreamTime aDesiredTime) {}
|
|
|
|
|
2012-04-30 07:11:26 +04:00
|
|
|
enum Blocking {
|
|
|
|
BLOCKED,
|
|
|
|
UNBLOCKED
|
|
|
|
};
|
|
|
|
/**
|
2012-06-01 10:26:17 +04:00
|
|
|
* Notify that the blocking status of the stream changed. The initial state
|
|
|
|
* is assumed to be BLOCKED.
|
2012-04-30 07:11:26 +04:00
|
|
|
*/
|
|
|
|
virtual void NotifyBlockingChanged(MediaStreamGraph* aGraph, Blocking aBlocked) {}
|
|
|
|
|
2012-09-20 04:47:51 +04:00
|
|
|
/**
|
2013-03-20 15:19:39 +04:00
|
|
|
* Notify that the stream has data in each track
|
|
|
|
* for the stream's current time. Once this state becomes true, it will
|
|
|
|
* always be true since we block stream time from progressing to times where
|
|
|
|
* there isn't data in each track.
|
2012-09-20 04:47:51 +04:00
|
|
|
*/
|
2013-03-20 15:19:39 +04:00
|
|
|
virtual void NotifyHasCurrentData(MediaStreamGraph* aGraph) {}
|
2012-09-20 04:47:51 +04:00
|
|
|
|
2012-04-30 07:11:26 +04:00
|
|
|
/**
|
2013-11-25 15:59:49 +04:00
|
|
|
* Notify that the stream output is advancing. aCurrentTime is the graph's
|
|
|
|
* current time. MediaStream::GraphTimeToStreamTime can be used to get the
|
|
|
|
* stream time.
|
2012-04-30 07:11:26 +04:00
|
|
|
*/
|
2013-11-25 15:59:49 +04:00
|
|
|
virtual void NotifyOutput(MediaStreamGraph* aGraph, GraphTime aCurrentTime) {}
|
2012-04-30 07:11:26 +04:00
|
|
|
|
2014-07-14 09:47:56 +04:00
|
|
|
enum MediaStreamGraphEvent {
|
|
|
|
EVENT_FINISHED,
|
|
|
|
EVENT_REMOVED,
|
|
|
|
EVENT_HAS_DIRECT_LISTENERS, // transition from no direct listeners
|
|
|
|
EVENT_HAS_NO_DIRECT_LISTENERS, // transition to no direct listeners
|
|
|
|
};
|
2012-04-30 07:12:50 +04:00
|
|
|
|
2013-01-07 06:31:30 +04:00
|
|
|
/**
|
2014-07-14 09:47:56 +04:00
|
|
|
* Notify that an event has occurred on the Stream
|
2013-01-07 06:31:30 +04:00
|
|
|
*/
|
2014-07-14 09:47:56 +04:00
|
|
|
virtual void NotifyEvent(MediaStreamGraph* aGraph, MediaStreamGraphEvent aEvent) {}
|
2013-01-07 06:31:30 +04:00
|
|
|
|
2016-05-04 11:08:44 +03:00
|
|
|
// maskable flags, not a simple enumerated value
|
2012-04-30 07:12:50 +04:00
|
|
|
enum {
|
|
|
|
TRACK_EVENT_CREATED = 0x01,
|
2016-05-04 11:08:44 +03:00
|
|
|
TRACK_EVENT_ENDED = 0x02,
|
|
|
|
TRACK_EVENT_UNUSED = ~(TRACK_EVENT_ENDED | TRACK_EVENT_CREATED),
|
2012-04-30 07:12:50 +04:00
|
|
|
};
|
|
|
|
/**
|
|
|
|
* Notify that changes to one of the stream tracks have been queued.
|
|
|
|
* aTrackEvents can be any combination of TRACK_EVENT_CREATED and
|
|
|
|
* TRACK_EVENT_ENDED. aQueuedMedia is the data being added to the track
|
|
|
|
* at aTrackOffset (relative to the start of the stream).
|
2015-09-30 04:31:53 +03:00
|
|
|
* aInputStream and aInputTrackID will be set if the changes originated
|
|
|
|
* from an input stream's track. In practice they will only be used for
|
|
|
|
* ProcessedMediaStreams.
|
2012-04-30 07:12:50 +04:00
|
|
|
*/
|
|
|
|
virtual void NotifyQueuedTrackChanges(MediaStreamGraph* aGraph, TrackID aID,
|
2014-09-18 09:20:43 +04:00
|
|
|
StreamTime aTrackOffset,
|
2012-08-22 19:56:38 +04:00
|
|
|
uint32_t aTrackEvents,
|
2015-09-30 04:31:53 +03:00
|
|
|
const MediaSegment& aQueuedMedia,
|
|
|
|
MediaStream* aInputStream = nullptr,
|
|
|
|
TrackID aInputTrackID = TRACK_INVALID) {}
|
2015-02-06 12:38:11 +03:00
|
|
|
|
2016-05-04 11:08:44 +03:00
|
|
|
/**
|
|
|
|
* Notify queued audio data. Only audio data need to be queued. The video data
|
|
|
|
* will be notified by MediaStreamVideoSink::SetCurrentFrame.
|
|
|
|
*/
|
|
|
|
virtual void NotifyQueuedAudioData(MediaStreamGraph* aGraph, TrackID aID,
|
|
|
|
StreamTime aTrackOffset,
|
|
|
|
const AudioSegment& aQueuedMedia,
|
|
|
|
MediaStream* aInputStream = nullptr,
|
|
|
|
TrackID aInputTrackID = TRACK_INVALID) {}
|
|
|
|
|
2015-02-06 12:38:11 +03:00
|
|
|
/**
|
|
|
|
* Notify that all new tracks this iteration have been created.
|
|
|
|
* This is to ensure that tracks added atomically to MediaStreamGraph
|
|
|
|
* are also notified of atomically to MediaStreamListeners.
|
|
|
|
*/
|
|
|
|
virtual void NotifyFinishedTrackCreation(MediaStreamGraph* aGraph) {}
|
2016-01-21 19:51:36 +03:00
|
|
|
};
|
|
|
|
|
|
|
|
class AudioDataListenerInterface {
|
|
|
|
protected:
|
|
|
|
// Protected destructor, to discourage deletion outside of Release():
|
|
|
|
virtual ~AudioDataListenerInterface() {}
|
2016-01-21 19:51:35 +03:00
|
|
|
|
2016-01-21 19:51:36 +03:00
|
|
|
public:
|
2016-01-21 19:51:35 +03:00
|
|
|
/* These are for cubeb audio input & output streams: */
|
|
|
|
/**
|
|
|
|
* Output data to speakers, for use as the "far-end" data for echo
|
|
|
|
* cancellation. This is not guaranteed to be in any particular size
|
|
|
|
* chunks.
|
|
|
|
*/
|
2016-01-21 19:51:36 +03:00
|
|
|
virtual void NotifyOutputData(MediaStreamGraph* aGraph,
|
|
|
|
AudioDataValue* aBuffer, size_t aFrames,
|
2016-02-17 21:19:01 +03:00
|
|
|
TrackRate aRate, uint32_t aChannels) = 0;
|
2016-01-21 19:51:35 +03:00
|
|
|
/**
|
|
|
|
* Input data from a microphone (or other audio source. This is not
|
|
|
|
* guaranteed to be in any particular size chunks.
|
|
|
|
*/
|
|
|
|
virtual void NotifyInputData(MediaStreamGraph* aGraph,
|
2016-01-21 19:51:36 +03:00
|
|
|
const AudioDataValue* aBuffer, size_t aFrames,
|
2016-02-17 21:19:01 +03:00
|
|
|
TrackRate aRate, uint32_t aChannels) = 0;
|
2016-04-22 17:24:17 +03:00
|
|
|
|
|
|
|
/**
|
|
|
|
* Called when the underlying audio device has changed.
|
|
|
|
*/
|
|
|
|
virtual void DeviceChanged() = 0;
|
2016-01-21 19:51:36 +03:00
|
|
|
};
|
|
|
|
|
|
|
|
class AudioDataListener : public AudioDataListenerInterface {
|
|
|
|
protected:
|
|
|
|
// Protected destructor, to discourage deletion outside of Release():
|
|
|
|
virtual ~AudioDataListener() {}
|
|
|
|
|
|
|
|
public:
|
|
|
|
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(AudioDataListener)
|
2012-04-30 07:11:26 +04:00
|
|
|
};
|
|
|
|
|
2016-03-03 19:27:59 +03:00
|
|
|
/**
|
|
|
|
* This is a base class for media graph thread listener callbacks locked to
|
|
|
|
* specific tracks. Override methods to be notified of audio or video data or
|
|
|
|
* changes in track state.
|
|
|
|
*
|
|
|
|
* All notification methods are called from the media graph thread. Overriders
|
|
|
|
* of these methods are responsible for all synchronization. Beware!
|
|
|
|
* These methods are called without the media graph monitor held, so
|
|
|
|
* reentry into media graph methods is possible, although very much discouraged!
|
|
|
|
* You should do something non-blocking and non-reentrant (e.g. dispatch an
|
|
|
|
* event to some thread) and return.
|
|
|
|
* The listener is not allowed to add/remove any listeners from the parent
|
|
|
|
* stream.
|
|
|
|
*
|
|
|
|
* If a listener is attached to a track that has already ended, we guarantee
|
|
|
|
* to call NotifyEnded.
|
|
|
|
*/
|
|
|
|
class MediaStreamTrackListener
|
|
|
|
{
|
|
|
|
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(MediaStreamTrackListener)
|
|
|
|
|
|
|
|
public:
|
|
|
|
virtual void NotifyQueuedChanges(MediaStreamGraph* aGraph,
|
|
|
|
StreamTime aTrackOffset,
|
|
|
|
const MediaSegment& aQueuedMedia) {}
|
|
|
|
|
2016-01-26 11:45:25 +03:00
|
|
|
virtual void NotifyPrincipalHandleChanged(MediaStreamGraph* aGraph,
|
|
|
|
const PrincipalHandle& aNewPrincipalHandle) {}
|
|
|
|
|
2016-03-03 19:27:59 +03:00
|
|
|
virtual void NotifyEnded() {}
|
|
|
|
|
|
|
|
virtual void NotifyRemoved() {}
|
|
|
|
|
|
|
|
protected:
|
|
|
|
virtual ~MediaStreamTrackListener() {}
|
|
|
|
};
|
|
|
|
|
|
|
|
|
2013-08-24 17:53:11 +04:00
|
|
|
/**
|
|
|
|
* This is a base class for media graph thread listener direct callbacks
|
2016-04-07 06:44:00 +03:00
|
|
|
* from within AppendToTrack(). Note that your regular listener will
|
2013-08-24 17:53:11 +04:00
|
|
|
* still get NotifyQueuedTrackChanges() callbacks from the MSG thread, so
|
|
|
|
* you must be careful to ignore them if AddDirectListener was successful.
|
|
|
|
*/
|
2016-06-13 22:23:53 +03:00
|
|
|
class MediaStreamDirectListener : public MediaStreamListener
|
2013-08-24 17:53:11 +04:00
|
|
|
{
|
|
|
|
public:
|
2016-06-13 22:23:53 +03:00
|
|
|
virtual ~MediaStreamDirectListener() {}
|
2013-08-24 17:53:11 +04:00
|
|
|
|
|
|
|
/*
|
2016-06-13 22:23:53 +03:00
|
|
|
* This will be called on any MediaStreamDirectListener added to a
|
2013-08-24 17:53:11 +04:00
|
|
|
* a SourceMediaStream when AppendToTrack() is called. The MediaSegment
|
|
|
|
* will be the RawSegment (unresampled) if available in AppendToTrack().
|
|
|
|
* Note that NotifyQueuedTrackChanges() calls will also still occur.
|
|
|
|
*/
|
|
|
|
virtual void NotifyRealtimeData(MediaStreamGraph* aGraph, TrackID aID,
|
2014-09-18 09:20:43 +04:00
|
|
|
StreamTime aTrackOffset,
|
2013-08-24 17:53:11 +04:00
|
|
|
uint32_t aTrackEvents,
|
|
|
|
const MediaSegment& aMedia) {}
|
|
|
|
};
|
|
|
|
|
2016-03-03 19:28:37 +03:00
|
|
|
/**
|
|
|
|
* This is a base class for media graph thread listener direct callbacks from
|
|
|
|
* within AppendToTrack(). It is bound to a certain track and can only be
|
|
|
|
* installed on audio tracks. Once added to a track on any stream in the graph,
|
|
|
|
* the graph will try to install it at that track's source of media data.
|
|
|
|
*
|
|
|
|
* This works for TrackUnionStreams, which will forward the listener to the
|
|
|
|
* track's input track if it exists, or wait for it to be created before
|
|
|
|
* forwarding if it doesn't.
|
|
|
|
* Once it reaches a SourceMediaStream, it can be successfully installed.
|
|
|
|
* Other types of streams will fail installation since they are not supported.
|
|
|
|
*
|
|
|
|
* Note that this listener and others for the same track will still get
|
|
|
|
* NotifyQueuedChanges() callbacks from the MSG tread, so you must be careful
|
|
|
|
* to ignore them if this listener was successfully installed.
|
|
|
|
*/
|
2016-06-13 22:23:53 +03:00
|
|
|
class MediaStreamTrackDirectListener : public MediaStreamTrackListener
|
2016-03-03 19:28:37 +03:00
|
|
|
{
|
2016-03-18 16:21:51 +03:00
|
|
|
friend class SourceMediaStream;
|
|
|
|
friend class TrackUnionStream;
|
|
|
|
|
2016-03-03 19:28:37 +03:00
|
|
|
public:
|
|
|
|
/*
|
2016-06-13 22:23:53 +03:00
|
|
|
* This will be called on any MediaStreamTrackDirectListener added to a
|
2016-03-03 19:28:37 +03:00
|
|
|
* SourceMediaStream when AppendToTrack() is called for the listener's bound
|
2016-04-07 06:44:00 +03:00
|
|
|
* track, using the thread of the AppendToTrack() caller. The MediaSegment
|
|
|
|
* will be the RawSegment (unresampled) if available in AppendToTrack().
|
2016-03-18 16:21:51 +03:00
|
|
|
* If the track is enabled at the source but has been disabled in one of the
|
|
|
|
* streams in between the source and where it was originally added, aMedia
|
|
|
|
* will be a disabled version of the one passed to AppendToTrack() as well.
|
2016-04-07 06:44:00 +03:00
|
|
|
* Note that NotifyQueuedTrackChanges() calls will also still occur.
|
2016-03-03 19:28:37 +03:00
|
|
|
*/
|
|
|
|
virtual void NotifyRealtimeTrackData(MediaStreamGraph* aGraph,
|
|
|
|
StreamTime aTrackOffset,
|
|
|
|
const MediaSegment& aMedia) {}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* When a direct listener is processed for installation by the
|
|
|
|
* MediaStreamGraph it will be notified with whether the installation was
|
|
|
|
* successful or not. The results of this installation are the following:
|
|
|
|
* TRACK_NOT_FOUND_AT_SOURCE
|
|
|
|
* We found the source stream of media data for this track, but the track
|
|
|
|
* didn't exist. This should only happen if you try to install the listener
|
|
|
|
* directly to a SourceMediaStream that doesn't contain the given TrackID.
|
|
|
|
* TRACK_TYPE_NOT_SUPPORTED
|
2016-06-13 22:24:54 +03:00
|
|
|
* This is the failure when you install the listener to a non-audio track.
|
2016-03-03 19:28:37 +03:00
|
|
|
* STREAM_NOT_SUPPORTED
|
|
|
|
* While looking for the data source of this track, we found a MediaStream
|
|
|
|
* that is not a SourceMediaStream or a TrackUnionStream.
|
|
|
|
* SUCCESS
|
|
|
|
* Installation was successful and this listener will start receiving
|
|
|
|
* NotifyRealtimeData on the next AppendToTrack().
|
|
|
|
*/
|
|
|
|
enum class InstallationResult {
|
|
|
|
TRACK_NOT_FOUND_AT_SOURCE,
|
2016-06-13 22:24:54 +03:00
|
|
|
TRACK_TYPE_NOT_SUPPORTED,
|
2016-03-03 19:28:37 +03:00
|
|
|
STREAM_NOT_SUPPORTED,
|
|
|
|
SUCCESS
|
|
|
|
};
|
|
|
|
virtual void NotifyDirectListenerInstalled(InstallationResult aResult) {}
|
|
|
|
virtual void NotifyDirectListenerUninstalled() {}
|
|
|
|
|
|
|
|
protected:
|
2016-06-13 22:23:53 +03:00
|
|
|
virtual ~MediaStreamTrackDirectListener() {}
|
2016-03-18 16:21:51 +03:00
|
|
|
|
|
|
|
void MirrorAndDisableSegment(AudioSegment& aFrom, AudioSegment& aTo)
|
|
|
|
{
|
|
|
|
aTo.Clear();
|
|
|
|
aTo.AppendNullData(aFrom.GetDuration());
|
|
|
|
}
|
|
|
|
|
|
|
|
void NotifyRealtimeTrackDataAndApplyTrackDisabling(MediaStreamGraph* aGraph,
|
|
|
|
StreamTime aTrackOffset,
|
|
|
|
MediaSegment& aMedia)
|
|
|
|
{
|
|
|
|
if (mDisabledCount == 0) {
|
|
|
|
NotifyRealtimeTrackData(aGraph, aTrackOffset, aMedia);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!mMedia) {
|
|
|
|
mMedia = aMedia.CreateEmptyClone();
|
|
|
|
}
|
|
|
|
if (aMedia.GetType() == MediaSegment::AUDIO) {
|
|
|
|
MirrorAndDisableSegment(static_cast<AudioSegment&>(aMedia),
|
|
|
|
static_cast<AudioSegment&>(*mMedia));
|
|
|
|
} else {
|
|
|
|
MOZ_CRASH("Unsupported media type");
|
|
|
|
}
|
|
|
|
NotifyRealtimeTrackData(aGraph, aTrackOffset, *mMedia);
|
|
|
|
}
|
|
|
|
|
|
|
|
void IncreaseDisabled()
|
|
|
|
{
|
|
|
|
++mDisabledCount;
|
|
|
|
}
|
|
|
|
void DecreaseDisabled()
|
|
|
|
{
|
|
|
|
--mDisabledCount;
|
|
|
|
MOZ_ASSERT(mDisabledCount >= 0, "Double decrease");
|
|
|
|
}
|
|
|
|
|
|
|
|
// Matches the number of disabled streams to which this listener is attached.
|
|
|
|
// The number of streams are those between the stream the listener was added
|
|
|
|
// and the SourceMediaStream that is the input of the data.
|
|
|
|
Atomic<int32_t> mDisabledCount;
|
|
|
|
|
|
|
|
nsAutoPtr<MediaSegment> mMedia;
|
2016-03-03 19:28:37 +03:00
|
|
|
};
|
|
|
|
|
2012-08-20 08:20:44 +04:00
|
|
|
/**
|
|
|
|
* This is a base class for main-thread listener callbacks.
|
|
|
|
* This callback is invoked on the main thread when the main-thread-visible
|
|
|
|
* state of a stream has changed.
|
|
|
|
*
|
2013-02-04 14:04:24 +04:00
|
|
|
* These methods are called with the media graph monitor held, so
|
|
|
|
* reentry into general media graph methods is not possible.
|
2012-08-20 08:20:44 +04:00
|
|
|
* You should do something non-blocking and non-reentrant (e.g. dispatch an
|
2013-02-04 14:04:24 +04:00
|
|
|
* event) and return. DispatchFromMainThreadAfterNextStreamStateUpdate
|
|
|
|
* would be a good choice.
|
2012-08-20 08:20:44 +04:00
|
|
|
* The listener is allowed to synchronously remove itself from the stream, but
|
|
|
|
* not add or remove any other listeners.
|
|
|
|
*/
|
|
|
|
class MainThreadMediaStreamListener {
|
|
|
|
public:
|
2015-05-11 17:07:24 +03:00
|
|
|
virtual void NotifyMainThreadStreamFinished() = 0;
|
2012-08-20 08:20:44 +04:00
|
|
|
};
|
|
|
|
|
2014-04-13 22:08:10 +04:00
|
|
|
/**
|
|
|
|
* Helper struct used to keep track of memory usage by AudioNodes.
|
|
|
|
*/
|
|
|
|
struct AudioNodeSizes
|
|
|
|
{
|
2014-06-20 21:29:10 +04:00
|
|
|
AudioNodeSizes() : mDomNode(0), mStream(0), mEngine(0), mNodeType() {}
|
2014-04-13 22:08:10 +04:00
|
|
|
size_t mDomNode;
|
|
|
|
size_t mStream;
|
|
|
|
size_t mEngine;
|
|
|
|
nsCString mNodeType;
|
|
|
|
};
|
|
|
|
|
2016-03-03 19:27:59 +03:00
|
|
|
/**
|
|
|
|
* Helper struct for binding a track listener to a specific TrackID.
|
|
|
|
*/
|
|
|
|
template<typename Listener>
|
|
|
|
struct TrackBound
|
|
|
|
{
|
|
|
|
RefPtr<Listener> mListener;
|
|
|
|
TrackID mTrackID;
|
|
|
|
};
|
|
|
|
|
2012-04-30 07:11:26 +04:00
|
|
|
/**
|
|
|
|
* A stream of synchronized audio and video data. All (not blocked) streams
|
|
|
|
* progress at the same rate --- "real time". Streams cannot seek. The only
|
|
|
|
* operation readers can perform on a stream is to read the next data.
|
|
|
|
*
|
|
|
|
* Consumers of a stream can be reading from it at different offsets, but that
|
|
|
|
* should only happen due to the order in which consumers are being run.
|
|
|
|
* Those offsets must not diverge in the long term, otherwise we would require
|
|
|
|
* unbounded buffering.
|
|
|
|
*
|
|
|
|
* Streams can be in a "blocked" state. While blocked, a stream does not
|
|
|
|
* produce data. A stream can be explicitly blocked via the control API,
|
|
|
|
* or implicitly blocked by whatever's generating it (e.g. an underrun in the
|
|
|
|
* source resource), or implicitly blocked because something consuming it
|
|
|
|
* blocks, or implicitly because it has finished.
|
|
|
|
*
|
|
|
|
* A stream can be in a "finished" state. "Finished" streams are permanently
|
|
|
|
* blocked.
|
|
|
|
*
|
|
|
|
* Transitions into and out of the "blocked" and "finished" states are managed
|
|
|
|
* by the MediaStreamGraph on the media graph thread.
|
|
|
|
*
|
|
|
|
* We buffer media data ahead of the consumers' reading offsets. It is possible
|
|
|
|
* to have buffered data but still be blocked.
|
|
|
|
*
|
|
|
|
* Any stream can have its audio and video playing when requested. The media
|
|
|
|
* stream graph plays audio by constructing audio output streams as necessary.
|
|
|
|
* Video is played by setting video frames into an VideoFrameContainer at the right
|
|
|
|
* time. To ensure video plays in sync with audio, make sure that the same
|
|
|
|
* stream is playing both the audio and video.
|
|
|
|
*
|
2016-01-26 05:49:01 +03:00
|
|
|
* The data in a stream is managed by StreamTracks. It consists of a set of
|
2012-04-30 07:11:26 +04:00
|
|
|
* tracks of various types that can start and end over time.
|
|
|
|
*
|
|
|
|
* Streams are explicitly managed. The client creates them via
|
|
|
|
* MediaStreamGraph::CreateInput/ProcessedMediaStream, and releases them by calling
|
|
|
|
* Destroy() when no longer needed (actual destruction will be deferred).
|
|
|
|
* The actual object is owned by the MediaStreamGraph. The basic idea is that
|
|
|
|
* main thread objects will keep Streams alive as long as necessary (using the
|
|
|
|
* cycle collector to clean up whenever needed).
|
|
|
|
*
|
|
|
|
* We make them refcounted only so that stream-related messages with MediaStream*
|
|
|
|
* pointers can be sent to the main thread safely.
|
2012-08-09 15:30:02 +04:00
|
|
|
*
|
|
|
|
* The lifetimes of MediaStreams are controlled from the main thread.
|
|
|
|
* For MediaStreams exposed to the DOM, the lifetime is controlled by the DOM
|
|
|
|
* wrapper; the DOM wrappers own their associated MediaStreams. When a DOM
|
|
|
|
* wrapper is destroyed, it sends a Destroy message for the associated
|
|
|
|
* MediaStream and clears its reference (the last main-thread reference to
|
2016-04-07 06:44:00 +03:00
|
|
|
* the object). When the Destroy message is processed on the graph manager
|
|
|
|
* thread we immediately release the affected objects (disentangling them
|
2012-08-09 15:30:02 +04:00
|
|
|
* from other objects as necessary).
|
|
|
|
*
|
|
|
|
* This could cause problems for media processing if a MediaStream is
|
|
|
|
* destroyed while a downstream MediaStream is still using it. Therefore
|
|
|
|
* the DOM wrappers must keep upstream MediaStreams alive as long as they
|
|
|
|
* could be being used in the media graph.
|
|
|
|
*
|
|
|
|
* At any time, however, a set of MediaStream wrappers could be
|
|
|
|
* collected via cycle collection. Destroy messages will be sent
|
|
|
|
* for those objects in arbitrary order and the MediaStreamGraph has to be able
|
|
|
|
* to handle this.
|
2012-04-30 07:11:26 +04:00
|
|
|
*/
|
2016-05-25 15:12:27 +03:00
|
|
|
|
|
|
|
// GetCurrentTime is defined in winbase.h as zero argument macro forwarding to
|
|
|
|
// GetTickCount() and conflicts with MediaStream::GetCurrentTime.
|
|
|
|
#ifdef GetCurrentTime
|
|
|
|
#undef GetCurrentTime
|
|
|
|
#endif
|
|
|
|
|
2015-05-13 16:34:56 +03:00
|
|
|
class MediaStream : public mozilla::LinkedListElement<MediaStream>
|
|
|
|
{
|
2012-04-30 07:11:26 +04:00
|
|
|
public:
|
|
|
|
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(MediaStream)
|
|
|
|
|
2016-06-29 13:27:13 +03:00
|
|
|
MediaStream();
|
2014-04-02 20:21:11 +04:00
|
|
|
|
|
|
|
protected:
|
|
|
|
// Protected destructor, to discourage deletion outside of Release():
|
2013-02-04 14:04:24 +04:00
|
|
|
virtual ~MediaStream()
|
|
|
|
{
|
2013-05-21 23:17:47 +04:00
|
|
|
MOZ_COUNT_DTOR(MediaStream);
|
2013-02-04 14:04:24 +04:00
|
|
|
NS_ASSERTION(mMainThreadDestroyed, "Should have been destroyed already");
|
|
|
|
NS_ASSERTION(mMainThreadListeners.IsEmpty(),
|
|
|
|
"All main thread listeners should have been removed");
|
|
|
|
}
|
2012-04-30 07:11:26 +04:00
|
|
|
|
2014-04-02 20:21:11 +04:00
|
|
|
public:
|
2012-04-30 07:11:26 +04:00
|
|
|
/**
|
|
|
|
* Returns the graph that owns this stream.
|
|
|
|
*/
|
|
|
|
MediaStreamGraphImpl* GraphImpl();
|
2012-07-31 16:17:21 +04:00
|
|
|
MediaStreamGraph* Graph();
|
2013-02-01 23:43:36 +04:00
|
|
|
/**
|
|
|
|
* Sets the graph that owns this stream. Should only be called once.
|
|
|
|
*/
|
|
|
|
void SetGraphImpl(MediaStreamGraphImpl* aGraph);
|
2013-06-10 23:01:19 +04:00
|
|
|
void SetGraphImpl(MediaStreamGraph* aGraph);
|
2012-04-30 07:11:26 +04:00
|
|
|
|
2014-09-18 09:05:04 +04:00
|
|
|
/**
|
|
|
|
* Returns sample rate of the graph.
|
|
|
|
*/
|
2016-01-26 05:49:01 +03:00
|
|
|
TrackRate GraphRate() { return mTracks.GraphRate(); }
|
2014-09-18 09:05:04 +04:00
|
|
|
|
2012-04-30 07:11:26 +04:00
|
|
|
// Control API.
|
|
|
|
// Since a stream can be played multiple ways, we need to combine independent
|
|
|
|
// volume settings. The aKey parameter is used to keep volume settings
|
|
|
|
// separate. Since the stream is always playing the same contents, only
|
|
|
|
// a single audio output stream is used; the volumes are combined.
|
|
|
|
// Currently only the first enabled audio track is played.
|
|
|
|
// XXX change this so all enabled audio tracks are mixed and played.
|
2013-06-10 23:01:19 +04:00
|
|
|
virtual void AddAudioOutput(void* aKey);
|
|
|
|
virtual void SetAudioOutputVolume(void* aKey, float aVolume);
|
|
|
|
virtual void RemoveAudioOutput(void* aKey);
|
2012-04-30 07:11:26 +04:00
|
|
|
// Since a stream can be played multiple ways, we need to be able to
|
|
|
|
// play to multiple VideoFrameContainers.
|
|
|
|
// Only the first enabled video track is played.
|
2013-06-10 23:01:19 +04:00
|
|
|
virtual void AddVideoOutput(VideoFrameContainer* aContainer);
|
|
|
|
virtual void RemoveVideoOutput(VideoFrameContainer* aContainer);
|
2015-09-10 16:45:36 +03:00
|
|
|
// Explicitly suspend. Useful for example if a media element is pausing
|
|
|
|
// and we need to stop its stream emitting its buffered data. As soon as the
|
|
|
|
// Suspend message reaches the graph, the stream stops processing. It
|
|
|
|
// ignores its inputs and produces silence/no video until Resumed. Its
|
|
|
|
// current time does not advance.
|
|
|
|
virtual void Suspend();
|
|
|
|
virtual void Resume();
|
2012-04-30 07:11:26 +04:00
|
|
|
// Events will be dispatched by calling methods of aListener.
|
2013-06-10 23:01:19 +04:00
|
|
|
virtual void AddListener(MediaStreamListener* aListener);
|
|
|
|
virtual void RemoveListener(MediaStreamListener* aListener);
|
2016-03-03 19:27:59 +03:00
|
|
|
virtual void AddTrackListener(MediaStreamTrackListener* aListener,
|
|
|
|
TrackID aTrackID);
|
|
|
|
virtual void RemoveTrackListener(MediaStreamTrackListener* aListener,
|
|
|
|
TrackID aTrackID);
|
2016-03-03 19:28:37 +03:00
|
|
|
|
|
|
|
/**
|
|
|
|
* Adds aListener to the source stream of track aTrackID in this stream.
|
|
|
|
* When the MediaStreamGraph processes the added listener, it will traverse
|
|
|
|
* the graph and add it to the track's source stream (remapping the TrackID
|
|
|
|
* along the way).
|
|
|
|
* Note that the listener will be notified on the MediaStreamGraph thread
|
|
|
|
* with whether the installation of it at the source was successful or not.
|
|
|
|
*/
|
2016-06-13 22:23:53 +03:00
|
|
|
virtual void AddDirectTrackListener(MediaStreamTrackDirectListener* aListener,
|
2016-03-03 19:28:37 +03:00
|
|
|
TrackID aTrackID);
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Removes aListener from the source stream of track aTrackID in this stream.
|
|
|
|
* Note that the listener has already been removed if the link between the
|
|
|
|
* source of track aTrackID and this stream has been broken (and made track
|
|
|
|
* aTrackID end). The caller doesn't have to care about this, removing when
|
|
|
|
* the source cannot be found, or when the listener had already been removed
|
|
|
|
* does nothing.
|
|
|
|
*/
|
2016-06-13 22:23:53 +03:00
|
|
|
virtual void RemoveDirectTrackListener(MediaStreamTrackDirectListener* aListener,
|
2016-03-03 19:28:37 +03:00
|
|
|
TrackID aTrackID);
|
|
|
|
|
2013-05-30 08:44:43 +04:00
|
|
|
// A disabled track has video replaced by black, and audio replaced by
|
|
|
|
// silence.
|
|
|
|
void SetTrackEnabled(TrackID aTrackID, bool aEnabled);
|
2015-05-11 17:07:38 +03:00
|
|
|
|
|
|
|
// Finish event will be notified by calling methods of aListener. It is the
|
2013-02-04 14:04:24 +04:00
|
|
|
// responsibility of the caller to remove aListener before it is destroyed.
|
2015-05-11 17:07:38 +03:00
|
|
|
void AddMainThreadListener(MainThreadMediaStreamListener* aListener);
|
2013-02-04 14:04:24 +04:00
|
|
|
// It's safe to call this even if aListener is not currently a listener;
|
|
|
|
// the call will be ignored.
|
2012-08-20 08:20:44 +04:00
|
|
|
void RemoveMainThreadListener(MainThreadMediaStreamListener* aListener)
|
|
|
|
{
|
2015-05-11 17:07:38 +03:00
|
|
|
MOZ_ASSERT(NS_IsMainThread());
|
|
|
|
MOZ_ASSERT(aListener);
|
2012-08-20 08:20:44 +04:00
|
|
|
mMainThreadListeners.RemoveElement(aListener);
|
|
|
|
}
|
2015-05-11 17:07:38 +03:00
|
|
|
|
2013-10-25 03:07:29 +04:00
|
|
|
/**
|
|
|
|
* Ensure a runnable will run on the main thread after running all pending
|
|
|
|
* updates that were sent from the graph thread or will be sent before the
|
|
|
|
* graph thread receives the next graph update.
|
|
|
|
*
|
2014-07-02 10:04:54 +04:00
|
|
|
* If the graph has been shut down or destroyed, then the runnable will be
|
|
|
|
* dispatched to the event queue immediately. If the graph is non-realtime
|
2013-10-25 03:07:29 +04:00
|
|
|
* and has not started, then the runnable will be run
|
|
|
|
* synchronously/immediately. (There are no pending updates in these
|
|
|
|
* situations.)
|
|
|
|
*
|
|
|
|
* Main thread only.
|
|
|
|
*/
|
2015-03-17 19:29:17 +03:00
|
|
|
void RunAfterPendingUpdates(already_AddRefed<nsIRunnable> aRunnable);
|
2013-10-25 03:07:29 +04:00
|
|
|
|
2016-01-05 05:16:22 +03:00
|
|
|
// Signal that the client is done with this MediaStream. It will be deleted
|
|
|
|
// later. Do not mix usage of Destroy() with RegisterUser()/UnregisterUser().
|
|
|
|
// That will cause the MediaStream to be destroyed twice, which will cause
|
|
|
|
// some assertions to fail.
|
2013-03-20 18:07:46 +04:00
|
|
|
virtual void Destroy();
|
2016-01-05 05:16:22 +03:00
|
|
|
// Signal that a client is using this MediaStream. Useful to not have to
|
|
|
|
// explicitly manage ownership (responsibility to Destroy()) when there are
|
|
|
|
// multiple clients using a MediaStream.
|
|
|
|
void RegisterUser();
|
|
|
|
// Signal that a client no longer needs this MediaStream. When the number of
|
|
|
|
// clients using this MediaStream reaches 0, it will be destroyed.
|
|
|
|
void UnregisterUser();
|
|
|
|
|
2012-04-30 07:11:26 +04:00
|
|
|
// Returns the main-thread's view of how much data has been processed by
|
|
|
|
// this stream.
|
2012-08-09 15:30:09 +04:00
|
|
|
StreamTime GetCurrentTime()
|
|
|
|
{
|
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Call only on main thread");
|
|
|
|
return mMainThreadCurrentTime;
|
|
|
|
}
|
2012-04-30 07:11:26 +04:00
|
|
|
// Return the main thread's view of whether this stream has finished.
|
2012-08-09 15:30:09 +04:00
|
|
|
bool IsFinished()
|
|
|
|
{
|
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Call only on main thread");
|
|
|
|
return mMainThreadFinished;
|
|
|
|
}
|
2015-05-11 17:07:24 +03:00
|
|
|
|
2012-08-09 15:30:09 +04:00
|
|
|
bool IsDestroyed()
|
|
|
|
{
|
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Call only on main thread");
|
|
|
|
return mMainThreadDestroyed;
|
|
|
|
}
|
2012-04-30 07:11:26 +04:00
|
|
|
|
|
|
|
friend class MediaStreamGraphImpl;
|
2012-07-31 16:17:21 +04:00
|
|
|
friend class MediaInputPort;
|
2013-07-24 15:29:39 +04:00
|
|
|
friend class AudioNodeExternalInputStream;
|
2012-04-30 07:11:26 +04:00
|
|
|
|
2012-07-30 18:20:58 +04:00
|
|
|
virtual SourceMediaStream* AsSourceStream() { return nullptr; }
|
2012-07-31 16:17:21 +04:00
|
|
|
virtual ProcessedMediaStream* AsProcessedStream() { return nullptr; }
|
2013-01-14 02:46:57 +04:00
|
|
|
virtual AudioNodeStream* AsAudioNodeStream() { return nullptr; }
|
2012-04-30 07:11:40 +04:00
|
|
|
|
2012-04-30 07:11:26 +04:00
|
|
|
// These Impl methods perform the core functionality of the control methods
|
|
|
|
// above, on the media graph thread.
|
|
|
|
/**
|
|
|
|
* Stop all stream activity and disconnect it from all inputs and outputs.
|
|
|
|
* This must be idempotent.
|
|
|
|
*/
|
|
|
|
virtual void DestroyImpl();
|
2016-01-26 05:49:01 +03:00
|
|
|
StreamTime GetTracksEnd() { return mTracks.GetEnd(); }
|
2013-03-07 12:53:45 +04:00
|
|
|
#ifdef DEBUG
|
2016-01-26 05:49:01 +03:00
|
|
|
void DumpTrackInfo() { return mTracks.DumpTrackInfo(); }
|
2013-03-07 12:53:45 +04:00
|
|
|
#endif
|
2012-04-30 07:11:26 +04:00
|
|
|
void SetAudioOutputVolumeImpl(void* aKey, float aVolume);
|
2015-10-29 08:19:51 +03:00
|
|
|
void AddAudioOutputImpl(void* aKey);
|
2014-08-25 17:25:49 +04:00
|
|
|
// Returns true if this stream has an audio output.
|
|
|
|
bool HasAudioOutput()
|
|
|
|
{
|
|
|
|
return !mAudioOutputs.IsEmpty();
|
|
|
|
}
|
2012-04-30 07:11:26 +04:00
|
|
|
void RemoveAudioOutputImpl(void* aKey);
|
2015-10-29 08:19:51 +03:00
|
|
|
void AddVideoOutputImpl(already_AddRefed<VideoFrameContainer> aContainer);
|
|
|
|
void RemoveVideoOutputImpl(VideoFrameContainer* aContainer);
|
2012-05-24 14:37:14 +04:00
|
|
|
void AddListenerImpl(already_AddRefed<MediaStreamListener> aListener);
|
2013-01-07 06:31:30 +04:00
|
|
|
void RemoveListenerImpl(MediaStreamListener* aListener);
|
|
|
|
void RemoveAllListenersImpl();
|
2016-03-03 19:27:59 +03:00
|
|
|
virtual void AddTrackListenerImpl(already_AddRefed<MediaStreamTrackListener> aListener,
|
|
|
|
TrackID aTrackID);
|
|
|
|
virtual void RemoveTrackListenerImpl(MediaStreamTrackListener* aListener,
|
|
|
|
TrackID aTrackID);
|
2016-06-13 22:23:53 +03:00
|
|
|
virtual void AddDirectTrackListenerImpl(already_AddRefed<MediaStreamTrackDirectListener> aListener,
|
2016-03-03 19:28:37 +03:00
|
|
|
TrackID aTrackID);
|
2016-06-13 22:23:53 +03:00
|
|
|
virtual void RemoveDirectTrackListenerImpl(MediaStreamTrackDirectListener* aListener,
|
2016-03-03 19:28:37 +03:00
|
|
|
TrackID aTrackID);
|
2014-12-09 13:37:01 +03:00
|
|
|
virtual void SetTrackEnabledImpl(TrackID aTrackID, bool aEnabled);
|
2013-01-07 06:31:30 +04:00
|
|
|
|
2012-07-31 16:17:21 +04:00
|
|
|
void AddConsumer(MediaInputPort* aPort)
|
|
|
|
{
|
|
|
|
mConsumers.AppendElement(aPort);
|
|
|
|
}
|
|
|
|
void RemoveConsumer(MediaInputPort* aPort)
|
|
|
|
{
|
|
|
|
mConsumers.RemoveElement(aPort);
|
|
|
|
}
|
2013-12-12 18:31:51 +04:00
|
|
|
uint32_t ConsumerCount()
|
|
|
|
{
|
|
|
|
return mConsumers.Length();
|
|
|
|
}
|
2016-01-26 05:49:01 +03:00
|
|
|
StreamTracks& GetStreamTracks() { return mTracks; }
|
|
|
|
GraphTime GetStreamTracksStartTime() { return mTracksStartTime; }
|
2014-06-12 08:44:56 +04:00
|
|
|
|
|
|
|
double StreamTimeToSeconds(StreamTime aTime)
|
|
|
|
{
|
2014-09-18 09:20:43 +04:00
|
|
|
NS_ASSERTION(0 <= aTime && aTime <= STREAM_TIME_MAX, "Bad time");
|
2016-01-26 05:49:01 +03:00
|
|
|
return static_cast<double>(aTime)/mTracks.GraphRate();
|
2014-06-12 08:44:56 +04:00
|
|
|
}
|
|
|
|
int64_t StreamTimeToMicroseconds(StreamTime aTime)
|
|
|
|
{
|
2014-09-18 09:20:43 +04:00
|
|
|
NS_ASSERTION(0 <= aTime && aTime <= STREAM_TIME_MAX, "Bad time");
|
2016-01-26 05:49:01 +03:00
|
|
|
return (aTime*1000000)/mTracks.GraphRate();
|
2014-06-12 08:44:56 +04:00
|
|
|
}
|
2015-09-18 06:42:00 +03:00
|
|
|
StreamTime SecondsToNearestStreamTime(double aSeconds)
|
|
|
|
{
|
|
|
|
NS_ASSERTION(0 <= aSeconds && aSeconds <= TRACK_TICKS_MAX/TRACK_RATE_MAX,
|
|
|
|
"Bad seconds");
|
2016-01-26 05:49:01 +03:00
|
|
|
return mTracks.GraphRate() * aSeconds + 0.5;
|
2015-09-18 06:42:00 +03:00
|
|
|
}
|
2014-09-18 03:50:01 +04:00
|
|
|
StreamTime MicrosecondsToStreamTimeRoundDown(int64_t aMicroseconds) {
|
2016-01-26 05:49:01 +03:00
|
|
|
return (aMicroseconds*mTracks.GraphRate())/1000000;
|
2014-09-18 03:50:01 +04:00
|
|
|
}
|
|
|
|
|
2014-06-12 08:44:56 +04:00
|
|
|
TrackTicks TimeToTicksRoundUp(TrackRate aRate, StreamTime aTime)
|
|
|
|
{
|
2016-01-26 05:49:01 +03:00
|
|
|
return RateConvertTicksRoundUp(aRate, mTracks.GraphRate(), aTime);
|
2014-06-12 08:44:56 +04:00
|
|
|
}
|
|
|
|
StreamTime TicksToTimeRoundDown(TrackRate aRate, TrackTicks aTicks)
|
|
|
|
{
|
2016-01-26 05:49:01 +03:00
|
|
|
return RateConvertTicksRoundDown(mTracks.GraphRate(), aRate, aTicks);
|
2014-06-12 08:44:56 +04:00
|
|
|
}
|
2013-02-04 14:04:26 +04:00
|
|
|
/**
|
|
|
|
* Convert graph time to stream time. aTime must be <= mStateComputedTime
|
|
|
|
* to ensure we know exactly how much time this stream will be blocked during
|
|
|
|
* the interval.
|
|
|
|
*/
|
2015-09-08 06:41:00 +03:00
|
|
|
StreamTime GraphTimeToStreamTimeWithBlocking(GraphTime aTime);
|
2013-02-04 14:04:26 +04:00
|
|
|
/**
|
2015-09-16 07:35:16 +03:00
|
|
|
* Convert graph time to stream time. This assumes there is no blocking time
|
|
|
|
* to take account of, which is always true except between a stream
|
|
|
|
* having its blocking time calculated in UpdateGraph and its blocking time
|
|
|
|
* taken account of in UpdateCurrentTimeForStreams.
|
2013-02-04 14:04:26 +04:00
|
|
|
*/
|
2015-09-16 07:35:16 +03:00
|
|
|
StreamTime GraphTimeToStreamTime(GraphTime aTime);
|
|
|
|
/**
|
|
|
|
* Convert stream time to graph time. This assumes there is no blocking time
|
|
|
|
* to take account of, which is always true except between a stream
|
|
|
|
* having its blocking time calculated in UpdateGraph and its blocking time
|
|
|
|
* taken account of in UpdateCurrentTimeForStreams.
|
|
|
|
*/
|
|
|
|
GraphTime StreamTimeToGraphTime(StreamTime aTime);
|
2015-09-08 06:41:00 +03:00
|
|
|
|
2012-07-31 16:17:21 +04:00
|
|
|
bool IsFinishedOnGraphThread() { return mFinished; }
|
|
|
|
void FinishOnGraphThread();
|
2012-04-30 07:11:26 +04:00
|
|
|
|
2013-03-20 15:19:39 +04:00
|
|
|
bool HasCurrentData() { return mHasCurrentData; }
|
|
|
|
|
2016-01-21 14:25:25 +03:00
|
|
|
/**
|
|
|
|
* Find track by track id.
|
|
|
|
*/
|
2016-01-26 05:49:01 +03:00
|
|
|
StreamTracks::Track* FindTrack(TrackID aID);
|
2016-01-21 14:25:25 +03:00
|
|
|
|
2016-01-26 05:49:01 +03:00
|
|
|
StreamTracks::Track* EnsureTrack(TrackID aTrack);
|
2013-05-21 23:17:47 +04:00
|
|
|
|
2014-12-09 13:37:01 +03:00
|
|
|
virtual void ApplyTrackDisabling(TrackID aTrackID, MediaSegment* aSegment, MediaSegment* aRawSegment = nullptr);
|
2013-05-30 08:44:43 +04:00
|
|
|
|
2013-06-19 07:09:44 +04:00
|
|
|
// Return true if the main thread needs to observe updates from this stream.
|
|
|
|
virtual bool MainThreadNeedsUpdates() const
|
|
|
|
{
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2014-04-13 22:08:10 +04:00
|
|
|
virtual size_t SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const;
|
|
|
|
virtual size_t SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const;
|
|
|
|
|
2014-04-18 13:23:36 +04:00
|
|
|
void SetAudioChannelType(dom::AudioChannel aType) { mAudioChannelType = aType; }
|
2014-08-03 17:46:17 +04:00
|
|
|
dom::AudioChannel AudioChannelType() const { return mAudioChannelType; }
|
2014-04-18 13:23:36 +04:00
|
|
|
|
2015-09-16 07:15:21 +03:00
|
|
|
bool IsSuspended() { return mSuspendedCount > 0; }
|
|
|
|
void IncrementSuspendCount() { ++mSuspendedCount; }
|
|
|
|
void DecrementSuspendCount()
|
|
|
|
{
|
|
|
|
NS_ASSERTION(mSuspendedCount > 0, "Suspend count underrun");
|
|
|
|
--mSuspendedCount;
|
|
|
|
}
|
|
|
|
|
2012-04-30 07:11:26 +04:00
|
|
|
protected:
|
2015-08-03 03:28:19 +03:00
|
|
|
void AdvanceTimeVaryingValuesToCurrentTime(GraphTime aCurrentTime, GraphTime aBlockedTime)
|
2012-04-30 07:11:26 +04:00
|
|
|
{
|
2016-01-26 05:49:01 +03:00
|
|
|
mTracksStartTime += aBlockedTime;
|
|
|
|
mTracks.ForgetUpTo(aCurrentTime - mTracksStartTime);
|
2012-04-30 07:11:26 +04:00
|
|
|
}
|
|
|
|
|
2015-05-11 17:07:38 +03:00
|
|
|
void NotifyMainThreadListeners()
|
|
|
|
{
|
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Call only on main thread");
|
|
|
|
|
|
|
|
for (int32_t i = mMainThreadListeners.Length() - 1; i >= 0; --i) {
|
|
|
|
mMainThreadListeners[i]->NotifyMainThreadStreamFinished();
|
|
|
|
}
|
|
|
|
mMainThreadListeners.Clear();
|
|
|
|
}
|
|
|
|
|
2015-05-11 17:07:24 +03:00
|
|
|
bool ShouldNotifyStreamFinished()
|
|
|
|
{
|
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Call only on main thread");
|
|
|
|
if (!mMainThreadFinished || mFinishedNotificationSent) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
mFinishedNotificationSent = true;
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2012-04-30 07:11:26 +04:00
|
|
|
// This state is all initialized on the main thread but
|
|
|
|
// otherwise modified only on the media graph thread.
|
|
|
|
|
2016-01-26 05:49:01 +03:00
|
|
|
// Buffered data. The start of the buffer corresponds to mTracksStartTime.
|
2012-04-30 07:11:26 +04:00
|
|
|
// Conceptually the buffer contains everything this stream has ever played,
|
|
|
|
// but we forget some prefix of the buffered data to bound the space usage.
|
2016-01-26 05:49:01 +03:00
|
|
|
StreamTracks mTracks;
|
2012-04-30 07:11:26 +04:00
|
|
|
// The time when the buffered data could be considered to have started playing.
|
|
|
|
// This increases over time to account for time the stream was blocked before
|
|
|
|
// mCurrentTime.
|
2016-01-26 05:49:01 +03:00
|
|
|
GraphTime mTracksStartTime;
|
2012-04-30 07:11:26 +04:00
|
|
|
|
|
|
|
// Client-set volume of this stream
|
|
|
|
struct AudioOutput {
|
2014-09-01 07:50:23 +04:00
|
|
|
explicit AudioOutput(void* aKey) : mKey(aKey), mVolume(1.0f) {}
|
2012-04-30 07:11:26 +04:00
|
|
|
void* mKey;
|
|
|
|
float mVolume;
|
|
|
|
};
|
|
|
|
nsTArray<AudioOutput> mAudioOutputs;
|
2015-10-18 08:24:48 +03:00
|
|
|
nsTArray<RefPtr<VideoFrameContainer> > mVideoOutputs;
|
2015-08-14 05:16:57 +03:00
|
|
|
// We record the last played video frame to avoid playing the frame again
|
|
|
|
// with a different frame id.
|
2012-04-30 07:11:26 +04:00
|
|
|
VideoFrame mLastPlayedVideoFrame;
|
2015-10-18 08:24:48 +03:00
|
|
|
nsTArray<RefPtr<MediaStreamListener> > mListeners;
|
2016-03-03 19:27:59 +03:00
|
|
|
nsTArray<TrackBound<MediaStreamTrackListener>> mTrackListeners;
|
2013-02-04 14:04:24 +04:00
|
|
|
nsTArray<MainThreadMediaStreamListener*> mMainThreadListeners;
|
2013-05-30 08:44:43 +04:00
|
|
|
nsTArray<TrackID> mDisabledTrackIDs;
|
2012-04-30 07:11:26 +04:00
|
|
|
|
2015-09-04 09:44:43 +03:00
|
|
|
// GraphTime at which this stream starts blocking.
|
|
|
|
// This is only valid up to mStateComputedTime. The stream is considered to
|
2016-01-26 05:49:01 +03:00
|
|
|
// have not been blocked before mCurrentTime (its mTracksStartTime is increased
|
2015-09-04 09:44:43 +03:00
|
|
|
// as necessary to account for that time instead).
|
|
|
|
GraphTime mStartBlocking;
|
2012-04-30 07:11:26 +04:00
|
|
|
|
2012-07-31 16:17:21 +04:00
|
|
|
// MediaInputPorts to which this is connected
|
|
|
|
nsTArray<MediaInputPort*> mConsumers;
|
|
|
|
|
2012-07-31 16:17:22 +04:00
|
|
|
// Where audio output is going. There is one AudioOutputStream per
|
|
|
|
// audio track.
|
2015-05-13 16:34:56 +03:00
|
|
|
struct AudioOutputStream
|
|
|
|
{
|
2012-07-31 16:17:22 +04:00
|
|
|
// When we started audio playback for this track.
|
|
|
|
// Add mStream->GetPosition() to find the current audio playback position.
|
|
|
|
GraphTime mAudioPlaybackStartTime;
|
|
|
|
// Amount of time that we've wanted to play silence because of the stream
|
|
|
|
// blocking.
|
|
|
|
MediaTime mBlockedAudioTime;
|
2014-03-24 14:06:06 +04:00
|
|
|
// Last tick written to the audio output.
|
2014-09-18 09:20:43 +04:00
|
|
|
StreamTime mLastTickWritten;
|
2012-07-31 16:17:22 +04:00
|
|
|
TrackID mTrackID;
|
|
|
|
};
|
|
|
|
nsTArray<AudioOutputStream> mAudioOutputStreams;
|
|
|
|
|
2015-09-16 07:15:21 +03:00
|
|
|
/**
|
|
|
|
* Number of outstanding suspend operations on this stream. Stream is
|
|
|
|
* suspended when this is > 0.
|
|
|
|
*/
|
|
|
|
int32_t mSuspendedCount;
|
|
|
|
|
2012-04-30 07:11:26 +04:00
|
|
|
/**
|
|
|
|
* When true, this means the stream will be finished once all
|
|
|
|
* buffered data has been consumed.
|
|
|
|
*/
|
|
|
|
bool mFinished;
|
|
|
|
/**
|
|
|
|
* When true, mFinished is true and we've played all the data in this stream
|
|
|
|
* and fired NotifyFinished notifications.
|
|
|
|
*/
|
|
|
|
bool mNotifiedFinished;
|
2013-01-02 17:49:18 +04:00
|
|
|
/**
|
|
|
|
* When true, the last NotifyBlockingChanged delivered to the listeners
|
|
|
|
* indicated that the stream is blocked.
|
|
|
|
*/
|
|
|
|
bool mNotifiedBlocked;
|
2013-03-20 15:19:39 +04:00
|
|
|
/**
|
|
|
|
* True if some data can be present by this stream if/when it's unblocked.
|
|
|
|
* Set by the stream itself on the MediaStreamGraph thread. Only changes
|
|
|
|
* from false to true once a stream has data, since we won't
|
|
|
|
* unblock it until there's more data.
|
|
|
|
*/
|
|
|
|
bool mHasCurrentData;
|
|
|
|
/**
|
|
|
|
* True if mHasCurrentData is true and we've notified listeners.
|
|
|
|
*/
|
|
|
|
bool mNotifiedHasCurrentData;
|
2012-04-30 07:11:26 +04:00
|
|
|
|
|
|
|
// Main-thread views of state
|
|
|
|
StreamTime mMainThreadCurrentTime;
|
|
|
|
bool mMainThreadFinished;
|
2015-05-11 17:07:24 +03:00
|
|
|
bool mFinishedNotificationSent;
|
2012-08-09 15:30:09 +04:00
|
|
|
bool mMainThreadDestroyed;
|
2016-01-05 05:16:22 +03:00
|
|
|
int mNrOfMainThreadUsers;
|
2013-02-01 23:43:36 +04:00
|
|
|
|
2014-07-25 01:09:22 +04:00
|
|
|
// Our media stream graph. null if destroyed on the graph thread.
|
2013-02-01 23:43:36 +04:00
|
|
|
MediaStreamGraphImpl* mGraph;
|
2014-04-18 13:23:36 +04:00
|
|
|
|
|
|
|
dom::AudioChannel mAudioChannelType;
|
2012-04-30 07:11:26 +04:00
|
|
|
};
|
|
|
|
|
2012-04-30 07:11:40 +04:00
|
|
|
/**
|
|
|
|
* This is a stream into which a decoder can write audio and video.
|
|
|
|
*
|
|
|
|
* Audio and video can be written on any thread, but you probably want to
|
|
|
|
* always write from the same thread to avoid unexpected interleavings.
|
|
|
|
*/
|
2015-05-13 16:34:56 +03:00
|
|
|
class SourceMediaStream : public MediaStream
|
|
|
|
{
|
2012-04-30 07:11:40 +04:00
|
|
|
public:
|
2016-06-29 13:27:13 +03:00
|
|
|
explicit SourceMediaStream() :
|
|
|
|
MediaStream(),
|
2012-06-01 10:26:17 +04:00
|
|
|
mMutex("mozilla::media::SourceMediaStream"),
|
|
|
|
mUpdateKnownTracksTime(0),
|
2012-07-20 23:36:03 +04:00
|
|
|
mPullEnabled(false),
|
2014-09-03 00:55:11 +04:00
|
|
|
mUpdateFinished(false),
|
|
|
|
mNeedsMixing(false)
|
2012-04-30 07:11:40 +04:00
|
|
|
{}
|
|
|
|
|
2016-01-18 06:50:29 +03:00
|
|
|
SourceMediaStream* AsSourceStream() override { return this; }
|
2012-04-30 07:11:40 +04:00
|
|
|
|
2012-05-23 10:01:15 +04:00
|
|
|
// Media graph thread only
|
2016-02-04 05:12:51 +03:00
|
|
|
|
|
|
|
// Users of audio inputs go through the stream so it can track when the
|
|
|
|
// last stream referencing an input goes away, so it can close the cubeb
|
|
|
|
// input. Also note: callable on any thread (though it bounces through
|
|
|
|
// MainThread to set the command if needed).
|
2016-03-08 20:11:09 +03:00
|
|
|
nsresult OpenAudioInput(int aID,
|
2016-02-04 05:12:51 +03:00
|
|
|
AudioDataListener *aListener);
|
|
|
|
// Note: also implied when Destroy() happens
|
|
|
|
void CloseAudioInput();
|
|
|
|
|
2016-01-18 06:50:29 +03:00
|
|
|
void DestroyImpl() override;
|
2012-05-23 10:01:15 +04:00
|
|
|
|
2012-04-30 07:11:40 +04:00
|
|
|
// Call these on any thread.
|
2012-07-20 23:36:03 +04:00
|
|
|
/**
|
|
|
|
* Enable or disable pulling. When pulling is enabled, NotifyPull
|
|
|
|
* gets called on MediaStreamListeners for this stream during the
|
|
|
|
* MediaStreamGraph control loop. Pulling is initially disabled.
|
|
|
|
* Due to unavoidable race conditions, after a call to SetPullEnabled(false)
|
|
|
|
* it is still possible for a NotifyPull to occur.
|
|
|
|
*/
|
|
|
|
void SetPullEnabled(bool aEnabled);
|
2013-08-24 17:53:11 +04:00
|
|
|
|
2014-08-17 10:09:21 +04:00
|
|
|
/**
|
|
|
|
* These add/remove DirectListeners, which allow bypassing the graph and any
|
|
|
|
* synchronization delays for e.g. PeerConnection, which wants the data ASAP
|
|
|
|
* and lets the far-end handle sync and playout timing.
|
|
|
|
*/
|
|
|
|
void NotifyListenersEventImpl(MediaStreamListener::MediaStreamGraphEvent aEvent);
|
|
|
|
void NotifyListenersEvent(MediaStreamListener::MediaStreamGraphEvent aEvent);
|
2016-06-13 22:23:53 +03:00
|
|
|
void AddDirectListener(MediaStreamDirectListener* aListener);
|
|
|
|
void RemoveDirectListener(MediaStreamDirectListener* aListener);
|
2013-08-24 17:53:11 +04:00
|
|
|
|
2015-02-19 20:04:26 +03:00
|
|
|
enum {
|
|
|
|
ADDTRACK_QUEUED = 0x01 // Queue track add until FinishAddTracks()
|
|
|
|
};
|
2012-04-30 07:11:40 +04:00
|
|
|
/**
|
|
|
|
* Add a new track to the stream starting at the given base time (which
|
|
|
|
* must be greater than or equal to the last time passed to
|
|
|
|
* AdvanceKnownTracksTime). Takes ownership of aSegment. aSegment should
|
|
|
|
* contain data starting after aStart.
|
|
|
|
*/
|
2015-02-19 20:04:26 +03:00
|
|
|
void AddTrack(TrackID aID, StreamTime aStart, MediaSegment* aSegment,
|
|
|
|
uint32_t aFlags = 0)
|
2014-09-18 03:50:02 +04:00
|
|
|
{
|
2015-02-19 20:04:26 +03:00
|
|
|
AddTrackInternal(aID, GraphRate(), aStart, aSegment, aFlags);
|
2014-09-18 03:50:02 +04:00
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Like AddTrack, but resamples audio from aRate to the graph rate.
|
|
|
|
*/
|
2014-09-18 09:20:43 +04:00
|
|
|
void AddAudioTrack(TrackID aID, TrackRate aRate, StreamTime aStart,
|
2015-02-19 20:04:26 +03:00
|
|
|
AudioSegment* aSegment, uint32_t aFlags = 0)
|
2014-09-18 03:50:02 +04:00
|
|
|
{
|
2015-02-19 20:04:26 +03:00
|
|
|
AddTrackInternal(aID, aRate, aStart, aSegment, aFlags);
|
2014-09-18 03:50:02 +04:00
|
|
|
}
|
2014-03-24 14:06:05 +04:00
|
|
|
|
2015-02-19 20:04:26 +03:00
|
|
|
/**
|
|
|
|
* Call after a series of AddTrack or AddAudioTrack calls to implement
|
|
|
|
* any pending track adds.
|
|
|
|
*/
|
|
|
|
void FinishAddTracks();
|
|
|
|
|
2012-04-30 07:11:40 +04:00
|
|
|
/**
|
|
|
|
* Append media data to a track. Ownership of aSegment remains with the caller,
|
|
|
|
* but aSegment is emptied.
|
2013-02-25 13:25:07 +04:00
|
|
|
* Returns false if the data was not appended because no such track exists
|
|
|
|
* or the stream was already finished.
|
2012-04-30 07:11:40 +04:00
|
|
|
*/
|
2013-08-24 17:53:11 +04:00
|
|
|
bool AppendToTrack(TrackID aID, MediaSegment* aSegment, MediaSegment *aRawSegment = nullptr);
|
2014-12-30 04:54:01 +03:00
|
|
|
/**
|
|
|
|
* Get the stream time of the end of the data that has been appended so far.
|
|
|
|
* Can be called from any thread but won't be useful if it can race with
|
|
|
|
* an AppendToTrack call, so should probably just be called from the thread
|
|
|
|
* that also calls AppendToTrack.
|
|
|
|
*/
|
|
|
|
StreamTime GetEndOfAppendedData(TrackID aID);
|
2012-04-30 07:11:40 +04:00
|
|
|
/**
|
|
|
|
* Indicate that a track has ended. Do not do any more API calls
|
|
|
|
* affecting this track.
|
2013-02-25 13:25:07 +04:00
|
|
|
* Ignored if the track does not exist.
|
2012-04-30 07:11:40 +04:00
|
|
|
*/
|
|
|
|
void EndTrack(TrackID aID);
|
|
|
|
/**
|
|
|
|
* Indicate that no tracks will be added starting before time aKnownTime.
|
|
|
|
* aKnownTime must be >= its value at the last call to AdvanceKnownTracksTime.
|
|
|
|
*/
|
|
|
|
void AdvanceKnownTracksTime(StreamTime aKnownTime);
|
|
|
|
/**
|
|
|
|
* Indicate that this stream should enter the "finished" state. All tracks
|
|
|
|
* must have been ended via EndTrack. The finish time of the stream is
|
2014-05-19 00:24:01 +04:00
|
|
|
* when all tracks have ended.
|
2012-04-30 07:11:40 +04:00
|
|
|
*/
|
2012-10-25 03:21:32 +04:00
|
|
|
void FinishWithLockHeld();
|
|
|
|
void Finish()
|
2014-12-09 13:37:01 +03:00
|
|
|
{
|
|
|
|
MutexAutoLock lock(mMutex);
|
|
|
|
FinishWithLockHeld();
|
|
|
|
}
|
2012-10-25 03:21:32 +04:00
|
|
|
|
2013-08-26 10:07:17 +04:00
|
|
|
// Overriding allows us to hold the mMutex lock while changing the track enable status
|
2016-03-18 16:21:51 +03:00
|
|
|
void SetTrackEnabledImpl(TrackID aTrackID, bool aEnabled) override;
|
2012-10-25 03:21:32 +04:00
|
|
|
|
2014-12-09 13:37:01 +03:00
|
|
|
// Overriding allows us to ensure mMutex is locked while changing the track enable status
|
2016-01-18 06:50:29 +03:00
|
|
|
void
|
2014-12-09 13:37:01 +03:00
|
|
|
ApplyTrackDisabling(TrackID aTrackID, MediaSegment* aSegment,
|
2015-03-21 19:28:04 +03:00
|
|
|
MediaSegment* aRawSegment = nullptr) override {
|
2014-12-09 13:37:01 +03:00
|
|
|
mMutex.AssertCurrentThreadOwns();
|
|
|
|
MediaStream::ApplyTrackDisabling(aTrackID, aSegment, aRawSegment);
|
|
|
|
}
|
|
|
|
|
2012-10-25 03:21:32 +04:00
|
|
|
/**
|
|
|
|
* End all tracks and Finish() this stream. Used to voluntarily revoke access
|
|
|
|
* to a LocalMediaStream.
|
|
|
|
*/
|
|
|
|
void EndAllTrackAndFinish();
|
2012-04-30 07:11:40 +04:00
|
|
|
|
2014-07-25 00:23:40 +04:00
|
|
|
void RegisterForAudioMixing();
|
|
|
|
|
2016-06-22 18:44:46 +03:00
|
|
|
/**
|
|
|
|
* Returns true if this SourceMediaStream contains at least one audio track
|
|
|
|
* that is in pending state.
|
|
|
|
* This is thread safe, and takes the SourceMediaStream mutex.
|
|
|
|
*/
|
|
|
|
bool HasPendingAudioTrack();
|
|
|
|
|
2012-04-30 07:11:40 +04:00
|
|
|
// XXX need a Reset API
|
|
|
|
|
|
|
|
friend class MediaStreamGraphImpl;
|
|
|
|
|
2014-07-25 00:23:40 +04:00
|
|
|
protected:
|
2012-04-30 07:11:40 +04:00
|
|
|
enum TrackCommands {
|
2012-04-30 07:12:50 +04:00
|
|
|
TRACK_CREATE = MediaStreamListener::TRACK_EVENT_CREATED,
|
2016-05-04 11:08:44 +03:00
|
|
|
TRACK_END = MediaStreamListener::TRACK_EVENT_ENDED,
|
|
|
|
TRACK_UNUSED = MediaStreamListener::TRACK_EVENT_UNUSED,
|
2012-04-30 07:11:40 +04:00
|
|
|
};
|
|
|
|
/**
|
|
|
|
* Data for each track that hasn't ended.
|
|
|
|
*/
|
|
|
|
struct TrackData {
|
|
|
|
TrackID mID;
|
2014-03-24 14:06:05 +04:00
|
|
|
// Sample rate of the input data.
|
|
|
|
TrackRate mInputRate;
|
|
|
|
// Resampler if the rate of the input track does not match the
|
|
|
|
// MediaStreamGraph's.
|
|
|
|
nsAutoRef<SpeexResamplerState> mResampler;
|
2014-06-09 04:11:41 +04:00
|
|
|
int mResamplerChannelCount;
|
2014-09-18 09:20:43 +04:00
|
|
|
StreamTime mStart;
|
2014-12-30 04:54:01 +03:00
|
|
|
// End-time of data already flushed to the track (excluding mData)
|
|
|
|
StreamTime mEndOfFlushedData;
|
2012-04-30 07:11:40 +04:00
|
|
|
// Each time the track updates are flushed to the media graph thread,
|
|
|
|
// the segment buffer is emptied.
|
|
|
|
nsAutoPtr<MediaSegment> mData;
|
2014-12-30 04:54:01 +03:00
|
|
|
// Each time the track updates are flushed to the media graph thread,
|
|
|
|
// this is cleared.
|
|
|
|
uint32_t mCommands;
|
2012-04-30 07:11:40 +04:00
|
|
|
};
|
|
|
|
|
2014-03-24 14:06:06 +04:00
|
|
|
bool NeedsMixing();
|
|
|
|
|
2014-07-25 00:23:40 +04:00
|
|
|
void ResampleAudioToGraphSampleRate(TrackData* aTrackData, MediaSegment* aSegment);
|
|
|
|
|
2016-06-13 22:23:53 +03:00
|
|
|
void AddDirectTrackListenerImpl(already_AddRefed<MediaStreamTrackDirectListener> aListener,
|
2016-03-03 19:28:37 +03:00
|
|
|
TrackID aTrackID) override;
|
2016-06-13 22:23:53 +03:00
|
|
|
void RemoveDirectTrackListenerImpl(MediaStreamTrackDirectListener* aListener,
|
2016-03-03 19:28:37 +03:00
|
|
|
TrackID aTrackID) override;
|
|
|
|
|
2014-09-18 03:50:02 +04:00
|
|
|
void AddTrackInternal(TrackID aID, TrackRate aRate,
|
2015-02-19 20:04:26 +03:00
|
|
|
StreamTime aStart, MediaSegment* aSegment,
|
|
|
|
uint32_t aFlags);
|
2014-09-18 03:50:02 +04:00
|
|
|
|
2012-04-30 07:11:40 +04:00
|
|
|
TrackData* FindDataForTrack(TrackID aID)
|
|
|
|
{
|
2014-08-26 05:20:44 +04:00
|
|
|
mMutex.AssertCurrentThreadOwns();
|
2012-08-22 19:56:38 +04:00
|
|
|
for (uint32_t i = 0; i < mUpdateTracks.Length(); ++i) {
|
2012-04-30 07:11:40 +04:00
|
|
|
if (mUpdateTracks[i].mID == aID) {
|
|
|
|
return &mUpdateTracks[i];
|
|
|
|
}
|
|
|
|
}
|
2012-07-30 18:20:58 +04:00
|
|
|
return nullptr;
|
2012-04-30 07:11:40 +04:00
|
|
|
}
|
|
|
|
|
2013-08-24 17:53:11 +04:00
|
|
|
/**
|
|
|
|
* Notify direct consumers of new data to one of the stream tracks.
|
|
|
|
* The data doesn't have to be resampled (though it may be). This is called
|
|
|
|
* from AppendToTrack on the thread providing the data, and will call
|
|
|
|
* the Listeners on this thread.
|
|
|
|
*/
|
|
|
|
void NotifyDirectConsumers(TrackData *aTrack,
|
|
|
|
MediaSegment *aSegment);
|
|
|
|
|
2016-02-04 05:12:51 +03:00
|
|
|
// Only accessed on the MSG thread. Used so to ask the MSGImpl to usecount
|
|
|
|
// users of a specific input.
|
|
|
|
// XXX Should really be a CubebUtils::AudioDeviceID, but they aren't
|
|
|
|
// copyable (opaque pointers)
|
|
|
|
RefPtr<AudioDataListener> mInputListener;
|
|
|
|
|
2012-05-23 10:01:15 +04:00
|
|
|
// This must be acquired *before* MediaStreamGraphImpl's lock, if they are
|
|
|
|
// held together.
|
2012-04-30 07:11:40 +04:00
|
|
|
Mutex mMutex;
|
|
|
|
// protected by mMutex
|
|
|
|
StreamTime mUpdateKnownTracksTime;
|
|
|
|
nsTArray<TrackData> mUpdateTracks;
|
2015-02-19 20:04:26 +03:00
|
|
|
nsTArray<TrackData> mPendingTracks;
|
2016-06-13 22:23:53 +03:00
|
|
|
nsTArray<RefPtr<MediaStreamDirectListener> > mDirectListeners;
|
|
|
|
nsTArray<TrackBound<MediaStreamTrackDirectListener>> mDirectTrackListeners;
|
2012-07-20 23:36:03 +04:00
|
|
|
bool mPullEnabled;
|
2012-04-30 07:11:40 +04:00
|
|
|
bool mUpdateFinished;
|
2014-03-24 14:06:06 +04:00
|
|
|
bool mNeedsMixing;
|
2012-04-30 07:11:40 +04:00
|
|
|
};
|
|
|
|
|
2016-06-07 17:20:29 +03:00
|
|
|
/**
|
|
|
|
* The blocking mode decides how a track should be blocked in a MediaInputPort.
|
|
|
|
*/
|
|
|
|
enum class BlockingMode
|
|
|
|
{
|
|
|
|
/**
|
|
|
|
* BlockingMode CREATION blocks the source track from being created
|
|
|
|
* in the destination. It'll end if it already exists.
|
|
|
|
*/
|
|
|
|
CREATION,
|
|
|
|
/**
|
|
|
|
* BlockingMode END_EXISTING allows a track to be created in the destination
|
|
|
|
* but will end it before any data has been passed through.
|
|
|
|
*/
|
|
|
|
END_EXISTING,
|
|
|
|
};
|
|
|
|
|
2012-07-31 16:17:21 +04:00
|
|
|
/**
|
|
|
|
* Represents a connection between a ProcessedMediaStream and one of its
|
|
|
|
* input streams.
|
|
|
|
* We make these refcounted so that stream-related messages with MediaInputPort*
|
|
|
|
* pointers can be sent to the main thread safely.
|
|
|
|
*
|
2015-09-30 04:31:53 +03:00
|
|
|
* A port can be locked to a specific track in the source stream, in which case
|
|
|
|
* only this track will be forwarded to the destination stream. TRACK_ANY
|
|
|
|
* can used to signal that all tracks shall be forwarded.
|
|
|
|
*
|
2016-01-05 05:16:22 +03:00
|
|
|
* When a port is locked to a specific track in the source stream, it may also
|
|
|
|
* indicate a TrackID to map this source track to in the destination stream
|
|
|
|
* by setting aDestTrack to an explicit ID. When we do this, we must know
|
|
|
|
* that this TrackID in the destination stream is available. We assert during
|
|
|
|
* processing that the ID is available and that there are no generic input
|
|
|
|
* ports already attached to the destination stream.
|
|
|
|
* Note that this is currently only handled by TrackUnionStreams.
|
|
|
|
*
|
2012-07-31 16:17:21 +04:00
|
|
|
* When a port's source or destination stream dies, the stream's DestroyImpl
|
|
|
|
* calls MediaInputPort::Disconnect to disconnect the port from
|
|
|
|
* the source and destination streams.
|
|
|
|
*
|
|
|
|
* The lifetimes of MediaInputPort are controlled from the main thread.
|
|
|
|
* The media graph adds a reference to the port. When a MediaInputPort is no
|
|
|
|
* longer needed, main-thread code sends a Destroy message for the port and
|
|
|
|
* clears its reference (the last main-thread reference to the object). When
|
|
|
|
* the Destroy message is processed on the graph manager thread we disconnect
|
|
|
|
* the port and drop the graph's reference, destroying the object.
|
|
|
|
*/
|
2015-05-13 16:34:56 +03:00
|
|
|
class MediaInputPort final
|
|
|
|
{
|
2014-04-02 20:21:11 +04:00
|
|
|
private:
|
2013-05-05 19:47:36 +04:00
|
|
|
// Do not call this constructor directly. Instead call aDest->AllocateInputPort.
|
2015-09-30 04:31:53 +03:00
|
|
|
MediaInputPort(MediaStream* aSource, TrackID& aSourceTrack,
|
2016-01-05 05:16:22 +03:00
|
|
|
ProcessedMediaStream* aDest, TrackID& aDestTrack,
|
2015-09-04 07:45:21 +03:00
|
|
|
uint16_t aInputNumber, uint16_t aOutputNumber)
|
2013-05-05 19:47:36 +04:00
|
|
|
: mSource(aSource)
|
2015-09-30 04:31:53 +03:00
|
|
|
, mSourceTrack(aSourceTrack)
|
2013-05-05 19:47:36 +04:00
|
|
|
, mDest(aDest)
|
2016-01-05 05:16:22 +03:00
|
|
|
, mDestTrack(aDestTrack)
|
2013-05-05 19:47:36 +04:00
|
|
|
, mInputNumber(aInputNumber)
|
|
|
|
, mOutputNumber(aOutputNumber)
|
|
|
|
, mGraph(nullptr)
|
|
|
|
{
|
|
|
|
MOZ_COUNT_CTOR(MediaInputPort);
|
|
|
|
}
|
|
|
|
|
2014-04-02 20:21:11 +04:00
|
|
|
// Private destructor, to discourage deletion outside of Release():
|
|
|
|
~MediaInputPort()
|
|
|
|
{
|
|
|
|
MOZ_COUNT_DTOR(MediaInputPort);
|
|
|
|
}
|
|
|
|
|
2012-07-31 16:17:21 +04:00
|
|
|
public:
|
|
|
|
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(MediaInputPort)
|
|
|
|
|
|
|
|
// Called on graph manager thread
|
|
|
|
// Do not call these from outside MediaStreamGraph.cpp!
|
|
|
|
void Init();
|
|
|
|
// Called during message processing to trigger removal of this stream.
|
|
|
|
void Disconnect();
|
|
|
|
|
|
|
|
// Control API
|
|
|
|
/**
|
|
|
|
* Disconnects and destroys the port. The caller must not reference this
|
|
|
|
* object again.
|
|
|
|
*/
|
|
|
|
void Destroy();
|
|
|
|
|
|
|
|
// Any thread
|
|
|
|
MediaStream* GetSource() { return mSource; }
|
2015-09-30 04:31:53 +03:00
|
|
|
TrackID GetSourceTrackId() { return mSourceTrack; }
|
2012-07-31 16:17:21 +04:00
|
|
|
ProcessedMediaStream* GetDestination() { return mDest; }
|
2016-01-05 05:16:22 +03:00
|
|
|
TrackID GetDestinationTrackId() { return mDestTrack; }
|
2012-07-31 16:17:21 +04:00
|
|
|
|
2016-03-16 18:00:34 +03:00
|
|
|
/**
|
2016-04-22 18:14:26 +03:00
|
|
|
* Block aTrackId in the source stream from being passed through the port.
|
|
|
|
* Consumers will interpret this track as ended.
|
2016-03-16 18:00:34 +03:00
|
|
|
* Returns a pledge that resolves on the main thread after the track block has
|
|
|
|
* been applied by the MSG.
|
|
|
|
*/
|
2016-06-07 17:20:29 +03:00
|
|
|
already_AddRefed<media::Pledge<bool, nsresult>> BlockSourceTrackId(TrackID aTrackId,
|
|
|
|
BlockingMode aBlockingMode);
|
2015-09-30 04:31:53 +03:00
|
|
|
private:
|
2016-06-07 17:20:29 +03:00
|
|
|
void BlockSourceTrackIdImpl(TrackID aTrackId, BlockingMode aBlockingMode);
|
2015-09-30 04:31:53 +03:00
|
|
|
|
|
|
|
public:
|
2016-06-07 17:20:29 +03:00
|
|
|
// Returns true if aTrackId has not been blocked for any reason and this port
|
|
|
|
// has not been locked to another track.
|
2015-09-30 04:31:53 +03:00
|
|
|
bool PassTrackThrough(TrackID aTrackId) {
|
2016-06-07 17:20:29 +03:00
|
|
|
bool blocked = false;
|
|
|
|
for (auto pair : mBlockedTracks) {
|
|
|
|
if (pair.first() == aTrackId &&
|
|
|
|
(pair.second() == BlockingMode::CREATION ||
|
|
|
|
pair.second() == BlockingMode::END_EXISTING)) {
|
|
|
|
blocked = true;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return !blocked && (mSourceTrack == TRACK_ANY || mSourceTrack == aTrackId);
|
|
|
|
}
|
|
|
|
|
|
|
|
// Returns true if aTrackId has not been blocked for track creation and this
|
|
|
|
// port has not been locked to another track.
|
|
|
|
bool AllowCreationOf(TrackID aTrackId) {
|
|
|
|
bool blocked = false;
|
|
|
|
for (auto pair : mBlockedTracks) {
|
|
|
|
if (pair.first() == aTrackId &&
|
|
|
|
pair.second() == BlockingMode::CREATION) {
|
|
|
|
blocked = true;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return !blocked && (mSourceTrack == TRACK_ANY || mSourceTrack == aTrackId);
|
2015-09-30 04:31:53 +03:00
|
|
|
}
|
|
|
|
|
2013-05-05 19:47:36 +04:00
|
|
|
uint16_t InputNumber() const { return mInputNumber; }
|
|
|
|
uint16_t OutputNumber() const { return mOutputNumber; }
|
|
|
|
|
2012-07-31 16:17:21 +04:00
|
|
|
// Call on graph manager thread
|
|
|
|
struct InputInterval {
|
|
|
|
GraphTime mStart;
|
|
|
|
GraphTime mEnd;
|
|
|
|
bool mInputIsBlocked;
|
|
|
|
};
|
|
|
|
// Find the next time interval starting at or after aTime during which
|
|
|
|
// mDest is not blocked and mSource's blocking status does not change.
|
|
|
|
InputInterval GetNextInputInterval(GraphTime aTime);
|
|
|
|
|
2012-08-23 16:46:20 +04:00
|
|
|
/**
|
|
|
|
* Returns the graph that owns this port.
|
|
|
|
*/
|
|
|
|
MediaStreamGraphImpl* GraphImpl();
|
|
|
|
MediaStreamGraph* Graph();
|
2013-02-01 23:49:58 +04:00
|
|
|
/**
|
|
|
|
* Sets the graph that owns this stream. Should only be called once.
|
|
|
|
*/
|
|
|
|
void SetGraphImpl(MediaStreamGraphImpl* aGraph);
|
2012-08-23 16:46:20 +04:00
|
|
|
|
2014-04-13 22:08:10 +04:00
|
|
|
size_t SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const
|
|
|
|
{
|
|
|
|
size_t amount = 0;
|
|
|
|
|
|
|
|
// Not owned:
|
|
|
|
// - mSource
|
|
|
|
// - mDest
|
|
|
|
// - mGraph
|
|
|
|
return amount;
|
|
|
|
}
|
|
|
|
|
|
|
|
size_t SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const
|
|
|
|
{
|
|
|
|
return aMallocSizeOf(this) + SizeOfExcludingThis(aMallocSizeOf);
|
|
|
|
}
|
|
|
|
|
2014-04-02 20:21:11 +04:00
|
|
|
private:
|
2012-07-31 16:17:21 +04:00
|
|
|
friend class MediaStreamGraphImpl;
|
|
|
|
friend class MediaStream;
|
|
|
|
friend class ProcessedMediaStream;
|
|
|
|
// Never modified after Init()
|
|
|
|
MediaStream* mSource;
|
2015-09-30 04:31:53 +03:00
|
|
|
TrackID mSourceTrack;
|
2012-07-31 16:17:21 +04:00
|
|
|
ProcessedMediaStream* mDest;
|
2016-01-05 05:16:22 +03:00
|
|
|
TrackID mDestTrack;
|
2013-05-05 19:47:36 +04:00
|
|
|
// The input and output numbers are optional, and are currently only used by
|
|
|
|
// Web Audio.
|
|
|
|
const uint16_t mInputNumber;
|
|
|
|
const uint16_t mOutputNumber;
|
2016-06-07 17:20:29 +03:00
|
|
|
|
|
|
|
typedef Pair<TrackID, BlockingMode> BlockedTrack;
|
|
|
|
nsTArray<BlockedTrack> mBlockedTracks;
|
2013-02-01 23:49:58 +04:00
|
|
|
|
|
|
|
// Our media stream graph
|
|
|
|
MediaStreamGraphImpl* mGraph;
|
2012-07-31 16:17:21 +04:00
|
|
|
};
|
|
|
|
|
|
|
|
/**
|
|
|
|
* This stream processes zero or more input streams in parallel to produce
|
|
|
|
* its output. The details of how the output is produced are handled by
|
2014-03-05 01:53:55 +04:00
|
|
|
* subclasses overriding the ProcessInput method.
|
2012-07-31 16:17:21 +04:00
|
|
|
*/
|
2015-05-13 16:34:56 +03:00
|
|
|
class ProcessedMediaStream : public MediaStream
|
|
|
|
{
|
2012-07-31 16:17:21 +04:00
|
|
|
public:
|
2016-06-29 13:27:13 +03:00
|
|
|
explicit ProcessedMediaStream()
|
|
|
|
: MediaStream(), mAutofinish(false), mCycleMarker(0)
|
2012-07-31 16:17:21 +04:00
|
|
|
{}
|
|
|
|
|
|
|
|
// Control API.
|
|
|
|
/**
|
|
|
|
* Allocates a new input port attached to source aStream.
|
|
|
|
* This stream can be removed by calling MediaInputPort::Remove().
|
2016-01-05 05:16:25 +03:00
|
|
|
*
|
2015-09-30 04:31:53 +03:00
|
|
|
* The input port is tied to aTrackID in the source stream.
|
|
|
|
* aTrackID can be set to TRACK_ANY to automatically forward all tracks from
|
2016-01-05 05:16:22 +03:00
|
|
|
* aStream.
|
2016-01-05 05:16:25 +03:00
|
|
|
*
|
2016-01-05 05:16:22 +03:00
|
|
|
* If aTrackID is an explicit ID, aDestTrackID can also be made explicit
|
|
|
|
* to ensure that the track is assigned this ID in the destination stream.
|
|
|
|
* To avoid intermittent TrackID collisions the destination stream may not
|
|
|
|
* have any existing generic input ports (with TRACK_ANY source track) when
|
|
|
|
* you allocate an input port with a destination TrackID.
|
2016-01-05 05:16:25 +03:00
|
|
|
*
|
2016-01-05 05:16:22 +03:00
|
|
|
* To end a track in the destination stream forwarded with TRACK_ANY,
|
2015-09-30 04:31:53 +03:00
|
|
|
* it can be blocked in the input port through MediaInputPort::BlockTrackId().
|
2016-01-05 05:16:25 +03:00
|
|
|
*
|
|
|
|
* Tracks in aBlockedTracks will be blocked in the input port initially. This
|
|
|
|
* ensures that they don't get created by the MSG-thread before we can
|
|
|
|
* BlockTrackId() on the main thread.
|
2012-07-31 16:17:21 +04:00
|
|
|
*/
|
2016-01-05 05:16:22 +03:00
|
|
|
already_AddRefed<MediaInputPort>
|
|
|
|
AllocateInputPort(MediaStream* aStream,
|
|
|
|
TrackID aTrackID = TRACK_ANY,
|
|
|
|
TrackID aDestTrackID = TRACK_ANY,
|
|
|
|
uint16_t aInputNumber = 0,
|
2016-01-05 05:16:25 +03:00
|
|
|
uint16_t aOutputNumber = 0,
|
|
|
|
nsTArray<TrackID>* aBlockedTracks = nullptr);
|
2012-07-31 16:17:21 +04:00
|
|
|
/**
|
|
|
|
* Force this stream into the finished state.
|
|
|
|
*/
|
|
|
|
void Finish();
|
|
|
|
/**
|
|
|
|
* Set the autofinish flag on this stream (defaults to false). When this flag
|
|
|
|
* is set, and all input streams are in the finished state (including if there
|
|
|
|
* are no input streams), this stream automatically enters the finished state.
|
|
|
|
*/
|
|
|
|
void SetAutofinish(bool aAutofinish);
|
|
|
|
|
2016-01-18 06:50:29 +03:00
|
|
|
ProcessedMediaStream* AsProcessedStream() override { return this; }
|
2012-07-31 16:17:21 +04:00
|
|
|
|
|
|
|
friend class MediaStreamGraphImpl;
|
|
|
|
|
|
|
|
// Do not call these from outside MediaStreamGraph.cpp!
|
2013-09-13 20:12:07 +04:00
|
|
|
virtual void AddInput(MediaInputPort* aPort);
|
2012-07-31 16:17:21 +04:00
|
|
|
virtual void RemoveInput(MediaInputPort* aPort)
|
|
|
|
{
|
|
|
|
mInputs.RemoveElement(aPort);
|
|
|
|
}
|
|
|
|
bool HasInputPort(MediaInputPort* aPort)
|
|
|
|
{
|
|
|
|
return mInputs.Contains(aPort);
|
|
|
|
}
|
2013-12-12 18:31:51 +04:00
|
|
|
uint32_t InputPortCount()
|
|
|
|
{
|
|
|
|
return mInputs.Length();
|
|
|
|
}
|
2016-05-24 14:23:50 +03:00
|
|
|
virtual MediaStream* GetInputStreamFor(TrackID aTrackID) { return nullptr; }
|
|
|
|
virtual TrackID GetInputTrackIDFor(TrackID aTrackID) { return TRACK_NONE; }
|
2016-01-18 06:50:29 +03:00
|
|
|
void DestroyImpl() override;
|
2012-07-31 16:17:21 +04:00
|
|
|
/**
|
|
|
|
* This gets called after we've computed the blocking states for all
|
|
|
|
* streams (mBlocked is up to date up to mStateComputedTime).
|
|
|
|
* Also, we've produced output for all streams up to this one. If this stream
|
|
|
|
* is not in a cycle, then all its source streams have produced data.
|
2013-12-06 00:23:57 +04:00
|
|
|
* Generate output from aFrom to aTo.
|
2013-12-10 04:49:03 +04:00
|
|
|
* This will be called on streams that have finished. Most stream types should
|
|
|
|
* just return immediately if IsFinishedOnGraphThread(), but some may wish to
|
|
|
|
* update internal state (see AudioNodeStream).
|
2014-03-05 01:53:55 +04:00
|
|
|
* ProcessInput is allowed to call FinishOnGraphThread only if ALLOW_FINISH
|
2013-12-06 00:23:57 +04:00
|
|
|
* is in aFlags. (This flag will be set when aTo >= mStateComputedTime, i.e.
|
|
|
|
* when we've producing the last block of data we need to produce.) Otherwise
|
|
|
|
* we can get into a situation where we've determined the stream should not
|
|
|
|
* block before mStateComputedTime, but the stream finishes before
|
|
|
|
* mStateComputedTime, violating the invariant that finished streams are blocked.
|
2012-07-31 16:17:21 +04:00
|
|
|
*/
|
2013-12-06 00:23:57 +04:00
|
|
|
enum {
|
|
|
|
ALLOW_FINISH = 0x01
|
|
|
|
};
|
2014-03-05 01:53:55 +04:00
|
|
|
virtual void ProcessInput(GraphTime aFrom, GraphTime aTo, uint32_t aFlags) = 0;
|
2012-07-31 16:17:21 +04:00
|
|
|
void SetAutofinishImpl(bool aAutofinish) { mAutofinish = aAutofinish; }
|
|
|
|
|
2014-07-17 04:55:55 +04:00
|
|
|
// Only valid after MediaStreamGraphImpl::UpdateStreamOrder() has run.
|
|
|
|
// A DelayNode is considered to break a cycle and so this will not return
|
|
|
|
// true for echo loops, only for muted cycles.
|
|
|
|
bool InMutedCycle() const { return mCycleMarker; }
|
2013-09-02 17:15:24 +04:00
|
|
|
|
2016-01-18 06:50:29 +03:00
|
|
|
size_t SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const override
|
2014-04-13 22:08:10 +04:00
|
|
|
{
|
|
|
|
size_t amount = MediaStream::SizeOfExcludingThis(aMallocSizeOf);
|
|
|
|
// Not owned:
|
|
|
|
// - mInputs elements
|
2015-07-29 09:24:24 +03:00
|
|
|
amount += mInputs.ShallowSizeOfExcludingThis(aMallocSizeOf);
|
2014-04-13 22:08:10 +04:00
|
|
|
return amount;
|
|
|
|
}
|
|
|
|
|
2016-01-18 06:50:29 +03:00
|
|
|
size_t SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const override
|
2014-04-13 22:08:10 +04:00
|
|
|
{
|
|
|
|
return aMallocSizeOf(this) + SizeOfExcludingThis(aMallocSizeOf);
|
|
|
|
}
|
2013-09-02 17:15:24 +04:00
|
|
|
|
2012-07-31 16:17:21 +04:00
|
|
|
protected:
|
|
|
|
// This state is all accessed only on the media graph thread.
|
|
|
|
|
|
|
|
// The list of all inputs that are currently enabled or waiting to be enabled.
|
|
|
|
nsTArray<MediaInputPort*> mInputs;
|
|
|
|
bool mAutofinish;
|
2014-07-17 04:55:55 +04:00
|
|
|
// After UpdateStreamOrder(), mCycleMarker is either 0 or 1 to indicate
|
|
|
|
// whether this stream is in a muted cycle. During ordering it can contain
|
|
|
|
// other marker values - see MediaStreamGraphImpl::UpdateStreamOrder().
|
|
|
|
uint32_t mCycleMarker;
|
2012-07-31 16:17:21 +04:00
|
|
|
};
|
|
|
|
|
2012-04-30 07:11:26 +04:00
|
|
|
/**
|
2015-08-25 11:17:31 +03:00
|
|
|
* There can be multiple MediaStreamGraph per process: one per AudioChannel.
|
|
|
|
* Additionaly, each OfflineAudioContext object creates its own MediaStreamGraph
|
|
|
|
* object too..
|
2012-04-30 07:11:26 +04:00
|
|
|
*/
|
2015-05-13 16:34:56 +03:00
|
|
|
class MediaStreamGraph
|
|
|
|
{
|
2012-04-30 07:11:26 +04:00
|
|
|
public:
|
2015-08-25 11:17:31 +03:00
|
|
|
|
2013-01-14 02:46:57 +04:00
|
|
|
// We ensure that the graph current time advances in multiples of
|
2014-04-23 13:20:56 +04:00
|
|
|
// IdealAudioBlockSize()/AudioStream::PreferredSampleRate(). A stream that
|
|
|
|
// never blocks and has a track with the ideal audio rate will produce audio
|
|
|
|
// in multiples of the block size.
|
2013-01-14 02:46:57 +04:00
|
|
|
|
2015-08-25 11:17:31 +03:00
|
|
|
// Initializing an graph that outputs audio can be quite long on some
|
|
|
|
// platforms. Code that want to output audio at some point can express the
|
|
|
|
// fact that they will need an audio stream at some point by passing
|
|
|
|
// AUDIO_THREAD_DRIVER when getting an instance of MediaStreamGraph, so that
|
|
|
|
// the graph starts with the right driver.
|
|
|
|
enum GraphDriverType {
|
|
|
|
AUDIO_THREAD_DRIVER,
|
|
|
|
SYSTEM_THREAD_DRIVER,
|
|
|
|
OFFLINE_THREAD_DRIVER
|
|
|
|
};
|
2012-04-30 07:11:26 +04:00
|
|
|
// Main thread only
|
2015-08-25 11:17:31 +03:00
|
|
|
static MediaStreamGraph* GetInstance(GraphDriverType aGraphDriverRequested,
|
|
|
|
dom::AudioChannel aChannel);
|
2014-04-23 13:20:56 +04:00
|
|
|
static MediaStreamGraph* CreateNonRealtimeInstance(TrackRate aSampleRate);
|
2013-09-10 09:05:22 +04:00
|
|
|
// Idempotent
|
2013-05-08 15:44:07 +04:00
|
|
|
static void DestroyNonRealtimeInstance(MediaStreamGraph* aGraph);
|
|
|
|
|
2016-03-08 20:11:09 +03:00
|
|
|
virtual nsresult OpenAudioInput(int aID,
|
2016-01-21 19:51:36 +03:00
|
|
|
AudioDataListener *aListener) {
|
2016-01-21 19:51:35 +03:00
|
|
|
return NS_ERROR_FAILURE;
|
|
|
|
}
|
2016-01-21 19:51:36 +03:00
|
|
|
virtual void CloseAudioInput(AudioDataListener *aListener) {}
|
2016-01-21 19:51:35 +03:00
|
|
|
|
2012-04-30 07:11:26 +04:00
|
|
|
// Control API.
|
2012-04-30 07:11:40 +04:00
|
|
|
/**
|
|
|
|
* Create a stream that a media decoder (or some other source of
|
|
|
|
* media data, such as a camera) can write to.
|
|
|
|
*/
|
2016-06-29 13:27:13 +03:00
|
|
|
SourceMediaStream* CreateSourceStream();
|
2012-07-31 16:17:21 +04:00
|
|
|
/**
|
|
|
|
* Create a stream that will form the union of the tracks of its input
|
|
|
|
* streams.
|
|
|
|
* A TrackUnionStream contains all the tracks of all its input streams.
|
|
|
|
* Adding a new input stream makes that stream's tracks immediately appear as new
|
|
|
|
* tracks starting at the time the input stream was added.
|
|
|
|
* Removing an input stream makes the output tracks corresponding to the
|
|
|
|
* removed tracks immediately end.
|
|
|
|
* For each added track, the track ID of the output track is the track ID
|
|
|
|
* of the input track or one plus the maximum ID of all previously added
|
|
|
|
* tracks, whichever is greater.
|
|
|
|
* TODO at some point we will probably need to add API to select
|
|
|
|
* particular tracks of each input stream.
|
|
|
|
*/
|
2016-06-29 13:27:13 +03:00
|
|
|
ProcessedMediaStream* CreateTrackUnionStream();
|
2015-07-24 15:28:16 +03:00
|
|
|
/**
|
|
|
|
* Create a stream that will mix all its audio input.
|
|
|
|
*/
|
2016-06-29 13:27:13 +03:00
|
|
|
ProcessedMediaStream* CreateAudioCaptureStream(TrackID aTrackId);
|
2013-07-24 15:29:39 +04:00
|
|
|
|
2015-08-12 02:46:56 +03:00
|
|
|
/**
|
|
|
|
* Add a new stream to the graph. Main thread.
|
|
|
|
*/
|
2015-10-22 12:14:46 +03:00
|
|
|
void AddStream(MediaStream* aStream);
|
2013-07-24 15:29:39 +04:00
|
|
|
|
Bug 1094764 - Implement AudioContext.suspend and friends. r=roc,ehsan
- Relevant spec text:
- http://webaudio.github.io/web-audio-api/#widl-AudioContext-suspend-Promise
- http://webaudio.github.io/web-audio-api/#widl-AudioContext-resume-Promise
- http://webaudio.github.io/web-audio-api/#widl-AudioContext-close-Promise
- http://webaudio.github.io/web-audio-api/#widl-AudioContext-state
- http://webaudio.github.io/web-audio-api/#widl-AudioContext-onstatechange
- In a couple words, the behavior we want:
- Closed context cannot have new nodes created, but can do decodeAudioData,
and create buffers, and such.
- OfflineAudioContexts don't support those methods, transitions happen at
startRendering and at the end of processing. onstatechange is used to make
this observable.
- (regular) AudioContexts support those methods. The promises and
onstatechange should be resolved/called when the operation has actually
completed on the rendering thread. Once a context has been closed, it
cannot transition back to "running". An AudioContext switches to "running"
when the audio callback start running, this allow authors to know how long
the audio stack takes to start running.
- MediaStreams that feed in/go out of a suspended graph should respectively
not buffer at the graph input, and output silence
- suspended context should not be doing much on the CPU, and we should try
to pause audio streams if we can (this behaviour is the main reason we need
this in the first place, for saving battery on mobile, and CPU on all
platforms)
- Now, the implementation:
- AudioNodeStreams are now tagged with a context id, to be able to operate
on all the streams of a given AudioContext on the Graph thread without
having to go and lock everytime to touch the AudioContext. This happens in
the AudioNodeStream ctor. IDs are of course constant for the lifetime of the
node.
- When an AudioContext goes into suspended mode, streams for this
AudioContext are moved out of the mStreams array to a second array,
mSuspendedStreams. Streams in mSuspendedStream are not ordered, and are not
processed.
- The MSG will automatically switch to a SystemClockDriver when it finds
that there are no more AudioNodeStream/Stream with an audio track. This is
how pausing the audio subsystem and saving battery works. Subsequently, when
the MSG finds that there are only streams in mSuspendedStreams, it will go
to sleep (block on a monitor), so we save CPU, but it does not shut itself
down. This is mostly not a new behaviour (this is what the MSG does since
the refactoring), but is important to note.
- Promises are gripped (addref-ed) on the main thread, and then shepherd
down other threads and to the GraphDriver, if needed (sometimes we can
resolve them right away). They move between threads as void* to prevent
calling methods on them, as they are not thread safe. Then, the driver
executes the operation, and when it's done (initializing and closing audio
streams can take some time), we send the promise back to the main thread,
and resolve it, casting back to Promise* after asserting we're back on the
main thread. This way, we can send them back on the main thread once an
operation has complete (suspending an audio stream, starting it again on
resume(), etc.), without having to do bookkeeping between suspend calls and
their result. Promises are not thread safe, so we can't move them around
AddRef-ed.
- The stream destruction logic now takes into account that a stream can be
destroyed while not being in mStreams.
- A graph can now switch GraphDriver twice or more per iteration, for
example if an author goes suspend()/resume()/suspend() in the same script.
- Some operation have to be done on suspended stream, so we now use double
for-loop around mSuspendedStreams and mStreams in some places in
MediaStreamGraph.cpp.
- A tricky part was making sure everything worked at AudioContext
boundaries. TrackUnionStream that have one of their input stream suspended
append null ticks instead.
- The graph ordering algorithm had to be altered to not include suspended
streams.
- There are some edge cases (adding a stream on a suspended graph, calling
suspend/resume when a graph has just been close()d).
2015-02-27 20:22:05 +03:00
|
|
|
/* From the main thread, ask the MSG to send back an event when the graph
|
|
|
|
* thread is running, and audio is being processed. */
|
|
|
|
void NotifyWhenGraphStarted(AudioNodeStream* aNodeStream);
|
|
|
|
/* From the main thread, suspend, resume or close an AudioContext.
|
2015-09-16 07:15:21 +03:00
|
|
|
* aStreams are the streams of all the AudioNodes of the AudioContext that
|
|
|
|
* need to be suspended or resumed. This can be empty if this is a second
|
|
|
|
* consecutive suspend call and all the nodes are already suspended.
|
Bug 1094764 - Implement AudioContext.suspend and friends. r=roc,ehsan
- Relevant spec text:
- http://webaudio.github.io/web-audio-api/#widl-AudioContext-suspend-Promise
- http://webaudio.github.io/web-audio-api/#widl-AudioContext-resume-Promise
- http://webaudio.github.io/web-audio-api/#widl-AudioContext-close-Promise
- http://webaudio.github.io/web-audio-api/#widl-AudioContext-state
- http://webaudio.github.io/web-audio-api/#widl-AudioContext-onstatechange
- In a couple words, the behavior we want:
- Closed context cannot have new nodes created, but can do decodeAudioData,
and create buffers, and such.
- OfflineAudioContexts don't support those methods, transitions happen at
startRendering and at the end of processing. onstatechange is used to make
this observable.
- (regular) AudioContexts support those methods. The promises and
onstatechange should be resolved/called when the operation has actually
completed on the rendering thread. Once a context has been closed, it
cannot transition back to "running". An AudioContext switches to "running"
when the audio callback start running, this allow authors to know how long
the audio stack takes to start running.
- MediaStreams that feed in/go out of a suspended graph should respectively
not buffer at the graph input, and output silence
- suspended context should not be doing much on the CPU, and we should try
to pause audio streams if we can (this behaviour is the main reason we need
this in the first place, for saving battery on mobile, and CPU on all
platforms)
- Now, the implementation:
- AudioNodeStreams are now tagged with a context id, to be able to operate
on all the streams of a given AudioContext on the Graph thread without
having to go and lock everytime to touch the AudioContext. This happens in
the AudioNodeStream ctor. IDs are of course constant for the lifetime of the
node.
- When an AudioContext goes into suspended mode, streams for this
AudioContext are moved out of the mStreams array to a second array,
mSuspendedStreams. Streams in mSuspendedStream are not ordered, and are not
processed.
- The MSG will automatically switch to a SystemClockDriver when it finds
that there are no more AudioNodeStream/Stream with an audio track. This is
how pausing the audio subsystem and saving battery works. Subsequently, when
the MSG finds that there are only streams in mSuspendedStreams, it will go
to sleep (block on a monitor), so we save CPU, but it does not shut itself
down. This is mostly not a new behaviour (this is what the MSG does since
the refactoring), but is important to note.
- Promises are gripped (addref-ed) on the main thread, and then shepherd
down other threads and to the GraphDriver, if needed (sometimes we can
resolve them right away). They move between threads as void* to prevent
calling methods on them, as they are not thread safe. Then, the driver
executes the operation, and when it's done (initializing and closing audio
streams can take some time), we send the promise back to the main thread,
and resolve it, casting back to Promise* after asserting we're back on the
main thread. This way, we can send them back on the main thread once an
operation has complete (suspending an audio stream, starting it again on
resume(), etc.), without having to do bookkeeping between suspend calls and
their result. Promises are not thread safe, so we can't move them around
AddRef-ed.
- The stream destruction logic now takes into account that a stream can be
destroyed while not being in mStreams.
- A graph can now switch GraphDriver twice or more per iteration, for
example if an author goes suspend()/resume()/suspend() in the same script.
- Some operation have to be done on suspended stream, so we now use double
for-loop around mSuspendedStreams and mStreams in some places in
MediaStreamGraph.cpp.
- A tricky part was making sure everything worked at AudioContext
boundaries. TrackUnionStream that have one of their input stream suspended
append null ticks instead.
- The graph ordering algorithm had to be altered to not include suspended
streams.
- There are some edge cases (adding a stream on a suspended graph, calling
suspend/resume when a graph has just been close()d).
2015-02-27 20:22:05 +03:00
|
|
|
*
|
|
|
|
* This can possibly pause the graph thread, releasing system resources, if
|
|
|
|
* all streams have been suspended/closed.
|
|
|
|
*
|
|
|
|
* When the operation is complete, aPromise is resolved.
|
|
|
|
*/
|
2015-09-16 07:15:21 +03:00
|
|
|
void ApplyAudioContextOperation(MediaStream* aDestinationStream,
|
|
|
|
const nsTArray<MediaStream*>& aStreams,
|
Bug 1094764 - Implement AudioContext.suspend and friends. r=roc,ehsan
- Relevant spec text:
- http://webaudio.github.io/web-audio-api/#widl-AudioContext-suspend-Promise
- http://webaudio.github.io/web-audio-api/#widl-AudioContext-resume-Promise
- http://webaudio.github.io/web-audio-api/#widl-AudioContext-close-Promise
- http://webaudio.github.io/web-audio-api/#widl-AudioContext-state
- http://webaudio.github.io/web-audio-api/#widl-AudioContext-onstatechange
- In a couple words, the behavior we want:
- Closed context cannot have new nodes created, but can do decodeAudioData,
and create buffers, and such.
- OfflineAudioContexts don't support those methods, transitions happen at
startRendering and at the end of processing. onstatechange is used to make
this observable.
- (regular) AudioContexts support those methods. The promises and
onstatechange should be resolved/called when the operation has actually
completed on the rendering thread. Once a context has been closed, it
cannot transition back to "running". An AudioContext switches to "running"
when the audio callback start running, this allow authors to know how long
the audio stack takes to start running.
- MediaStreams that feed in/go out of a suspended graph should respectively
not buffer at the graph input, and output silence
- suspended context should not be doing much on the CPU, and we should try
to pause audio streams if we can (this behaviour is the main reason we need
this in the first place, for saving battery on mobile, and CPU on all
platforms)
- Now, the implementation:
- AudioNodeStreams are now tagged with a context id, to be able to operate
on all the streams of a given AudioContext on the Graph thread without
having to go and lock everytime to touch the AudioContext. This happens in
the AudioNodeStream ctor. IDs are of course constant for the lifetime of the
node.
- When an AudioContext goes into suspended mode, streams for this
AudioContext are moved out of the mStreams array to a second array,
mSuspendedStreams. Streams in mSuspendedStream are not ordered, and are not
processed.
- The MSG will automatically switch to a SystemClockDriver when it finds
that there are no more AudioNodeStream/Stream with an audio track. This is
how pausing the audio subsystem and saving battery works. Subsequently, when
the MSG finds that there are only streams in mSuspendedStreams, it will go
to sleep (block on a monitor), so we save CPU, but it does not shut itself
down. This is mostly not a new behaviour (this is what the MSG does since
the refactoring), but is important to note.
- Promises are gripped (addref-ed) on the main thread, and then shepherd
down other threads and to the GraphDriver, if needed (sometimes we can
resolve them right away). They move between threads as void* to prevent
calling methods on them, as they are not thread safe. Then, the driver
executes the operation, and when it's done (initializing and closing audio
streams can take some time), we send the promise back to the main thread,
and resolve it, casting back to Promise* after asserting we're back on the
main thread. This way, we can send them back on the main thread once an
operation has complete (suspending an audio stream, starting it again on
resume(), etc.), without having to do bookkeeping between suspend calls and
their result. Promises are not thread safe, so we can't move them around
AddRef-ed.
- The stream destruction logic now takes into account that a stream can be
destroyed while not being in mStreams.
- A graph can now switch GraphDriver twice or more per iteration, for
example if an author goes suspend()/resume()/suspend() in the same script.
- Some operation have to be done on suspended stream, so we now use double
for-loop around mSuspendedStreams and mStreams in some places in
MediaStreamGraph.cpp.
- A tricky part was making sure everything worked at AudioContext
boundaries. TrackUnionStream that have one of their input stream suspended
append null ticks instead.
- The graph ordering algorithm had to be altered to not include suspended
streams.
- There are some edge cases (adding a stream on a suspended graph, calling
suspend/resume when a graph has just been close()d).
2015-02-27 20:22:05 +03:00
|
|
|
dom::AudioContextOperation aState,
|
2015-09-16 07:15:21 +03:00
|
|
|
void* aPromise);
|
Bug 1094764 - Implement AudioContext.suspend and friends. r=roc,ehsan
- Relevant spec text:
- http://webaudio.github.io/web-audio-api/#widl-AudioContext-suspend-Promise
- http://webaudio.github.io/web-audio-api/#widl-AudioContext-resume-Promise
- http://webaudio.github.io/web-audio-api/#widl-AudioContext-close-Promise
- http://webaudio.github.io/web-audio-api/#widl-AudioContext-state
- http://webaudio.github.io/web-audio-api/#widl-AudioContext-onstatechange
- In a couple words, the behavior we want:
- Closed context cannot have new nodes created, but can do decodeAudioData,
and create buffers, and such.
- OfflineAudioContexts don't support those methods, transitions happen at
startRendering and at the end of processing. onstatechange is used to make
this observable.
- (regular) AudioContexts support those methods. The promises and
onstatechange should be resolved/called when the operation has actually
completed on the rendering thread. Once a context has been closed, it
cannot transition back to "running". An AudioContext switches to "running"
when the audio callback start running, this allow authors to know how long
the audio stack takes to start running.
- MediaStreams that feed in/go out of a suspended graph should respectively
not buffer at the graph input, and output silence
- suspended context should not be doing much on the CPU, and we should try
to pause audio streams if we can (this behaviour is the main reason we need
this in the first place, for saving battery on mobile, and CPU on all
platforms)
- Now, the implementation:
- AudioNodeStreams are now tagged with a context id, to be able to operate
on all the streams of a given AudioContext on the Graph thread without
having to go and lock everytime to touch the AudioContext. This happens in
the AudioNodeStream ctor. IDs are of course constant for the lifetime of the
node.
- When an AudioContext goes into suspended mode, streams for this
AudioContext are moved out of the mStreams array to a second array,
mSuspendedStreams. Streams in mSuspendedStream are not ordered, and are not
processed.
- The MSG will automatically switch to a SystemClockDriver when it finds
that there are no more AudioNodeStream/Stream with an audio track. This is
how pausing the audio subsystem and saving battery works. Subsequently, when
the MSG finds that there are only streams in mSuspendedStreams, it will go
to sleep (block on a monitor), so we save CPU, but it does not shut itself
down. This is mostly not a new behaviour (this is what the MSG does since
the refactoring), but is important to note.
- Promises are gripped (addref-ed) on the main thread, and then shepherd
down other threads and to the GraphDriver, if needed (sometimes we can
resolve them right away). They move between threads as void* to prevent
calling methods on them, as they are not thread safe. Then, the driver
executes the operation, and when it's done (initializing and closing audio
streams can take some time), we send the promise back to the main thread,
and resolve it, casting back to Promise* after asserting we're back on the
main thread. This way, we can send them back on the main thread once an
operation has complete (suspending an audio stream, starting it again on
resume(), etc.), without having to do bookkeeping between suspend calls and
their result. Promises are not thread safe, so we can't move them around
AddRef-ed.
- The stream destruction logic now takes into account that a stream can be
destroyed while not being in mStreams.
- A graph can now switch GraphDriver twice or more per iteration, for
example if an author goes suspend()/resume()/suspend() in the same script.
- Some operation have to be done on suspended stream, so we now use double
for-loop around mSuspendedStreams and mStreams in some places in
MediaStreamGraph.cpp.
- A tricky part was making sure everything worked at AudioContext
boundaries. TrackUnionStream that have one of their input stream suspended
append null ticks instead.
- The graph ordering algorithm had to be altered to not include suspended
streams.
- There are some edge cases (adding a stream on a suspended graph, calling
suspend/resume when a graph has just been close()d).
2015-02-27 20:22:05 +03:00
|
|
|
|
2013-09-10 09:05:22 +04:00
|
|
|
bool IsNonRealtime() const;
|
2013-05-17 03:30:41 +04:00
|
|
|
/**
|
|
|
|
* Start processing non-realtime for a specific number of ticks.
|
|
|
|
*/
|
2014-11-19 13:21:38 +03:00
|
|
|
void StartNonRealtimeProcessing(uint32_t aTicksToProcess);
|
2012-04-30 07:11:26 +04:00
|
|
|
|
|
|
|
/**
|
|
|
|
* Media graph thread only.
|
|
|
|
* Dispatches a runnable that will run on the main thread after all
|
|
|
|
* main-thread stream state has been next updated.
|
2013-10-25 03:12:33 +04:00
|
|
|
* Should only be called during MediaStreamListener callbacks or during
|
2014-03-05 01:53:55 +04:00
|
|
|
* ProcessedMediaStream::ProcessInput().
|
2012-04-30 07:11:26 +04:00
|
|
|
*/
|
2014-09-09 17:27:24 +04:00
|
|
|
virtual void DispatchToMainThreadAfterStreamStateUpdate(already_AddRefed<nsIRunnable> aRunnable)
|
2012-04-30 07:11:26 +04:00
|
|
|
{
|
2013-02-04 14:04:24 +04:00
|
|
|
*mPendingUpdateRunnables.AppendElement() = aRunnable;
|
2012-04-30 07:11:26 +04:00
|
|
|
}
|
|
|
|
|
2014-09-18 03:50:01 +04:00
|
|
|
/**
|
|
|
|
* Returns graph sample rate in Hz.
|
|
|
|
*/
|
|
|
|
TrackRate GraphRate() const { return mSampleRate; }
|
|
|
|
|
2015-07-24 15:28:16 +03:00
|
|
|
void RegisterCaptureStreamForWindow(uint64_t aWindowId,
|
|
|
|
ProcessedMediaStream* aCaptureStream);
|
|
|
|
void UnregisterCaptureStreamForWindow(uint64_t aWindowId);
|
|
|
|
already_AddRefed<MediaInputPort> ConnectToCaptureStream(
|
|
|
|
uint64_t aWindowId, MediaStream* aMediaStream);
|
|
|
|
|
2016-01-21 19:51:35 +03:00
|
|
|
/**
|
|
|
|
* Data going to the speakers from the GraphDriver's DataCallback
|
|
|
|
* to notify any listeners (for echo cancellation).
|
|
|
|
*/
|
2016-01-21 19:51:36 +03:00
|
|
|
void NotifyOutputData(AudioDataValue* aBuffer, size_t aFrames,
|
2016-02-17 21:19:01 +03:00
|
|
|
TrackRate aRate, uint32_t aChannels);
|
2016-01-21 19:51:35 +03:00
|
|
|
|
2012-04-30 07:11:26 +04:00
|
|
|
protected:
|
2014-12-11 01:49:09 +03:00
|
|
|
explicit MediaStreamGraph(TrackRate aSampleRate)
|
2015-07-31 12:36:05 +03:00
|
|
|
: mSampleRate(aSampleRate)
|
2012-04-30 07:11:26 +04:00
|
|
|
{
|
|
|
|
MOZ_COUNT_CTOR(MediaStreamGraph);
|
|
|
|
}
|
2013-11-18 17:09:47 +04:00
|
|
|
virtual ~MediaStreamGraph()
|
2012-04-30 07:11:26 +04:00
|
|
|
{
|
|
|
|
MOZ_COUNT_DTOR(MediaStreamGraph);
|
|
|
|
}
|
|
|
|
|
|
|
|
// Media graph thread only
|
|
|
|
nsTArray<nsCOMPtr<nsIRunnable> > mPendingUpdateRunnables;
|
|
|
|
|
2014-09-18 03:50:01 +04:00
|
|
|
/**
|
|
|
|
* Sample rate at which this graph runs. For real time graphs, this is
|
|
|
|
* the rate of the audio mixer. For offline graphs, this is the rate specified
|
|
|
|
* at construction.
|
|
|
|
*/
|
|
|
|
TrackRate mSampleRate;
|
2016-01-21 19:51:35 +03:00
|
|
|
|
2016-01-21 19:51:36 +03:00
|
|
|
/**
|
|
|
|
* Lifetime is controlled by OpenAudioInput/CloseAudioInput. Destroying the listener
|
|
|
|
* without removing it is an error; callers should assert on that.
|
|
|
|
*/
|
|
|
|
nsTArray<AudioDataListener *> mAudioInputs;
|
2012-04-30 07:11:26 +04:00
|
|
|
};
|
|
|
|
|
2015-07-13 18:25:42 +03:00
|
|
|
} // namespace mozilla
|
2012-04-30 07:11:26 +04:00
|
|
|
|
|
|
|
#endif /* MOZILLA_MEDIASTREAMGRAPH_H_ */
|