Bug 1208371 - Forward declare MediaStreamGraph classes in MediaPipeline.h. r=bwc

MozReview-Commit-ID: DsDHelzMsz0

--HG--
extra : rebase_source : b83f9298795de1ae803930f17df4ba18f00e8425
This commit is contained in:
Andreas Pehrson 2016-01-26 16:19:08 +08:00
Родитель b40a807720
Коммит 785d6859a4
4 изменённых файлов: 683 добавлений и 605 удалений

Просмотреть файл

@ -22,6 +22,7 @@
#include "LayersLogging.h"
#include "ImageTypes.h"
#include "ImageContainer.h"
#include "DOMMediaStream.h"
#include "MediaStreamTrack.h"
#include "VideoUtils.h"
#ifdef WEBRTC_GONK
@ -33,6 +34,7 @@
#include "nsError.h"
#include "AudioSegment.h"
#include "MediaSegment.h"
#include "MediaPipelineFilter.h"
#include "databuffer.h"
#include "transportflow.h"
#include "transportlayer.h"
@ -70,6 +72,44 @@ namespace mozilla {
static char kDTLSExporterLabel[] = "EXTRACTOR-dtls_srtp";
MediaPipeline::MediaPipeline(const std::string& pc,
Direction direction,
nsCOMPtr<nsIEventTarget> main_thread,
nsCOMPtr<nsIEventTarget> sts_thread,
const std::string& track_id,
int level,
RefPtr<MediaSessionConduit> conduit,
RefPtr<TransportFlow> rtp_transport,
RefPtr<TransportFlow> rtcp_transport,
nsAutoPtr<MediaPipelineFilter> filter)
: direction_(direction),
track_id_(track_id),
level_(level),
conduit_(conduit),
rtp_(rtp_transport, rtcp_transport ? RTP : MUX),
rtcp_(rtcp_transport ? rtcp_transport : rtp_transport,
rtcp_transport ? RTCP : MUX),
main_thread_(main_thread),
sts_thread_(sts_thread),
rtp_packets_sent_(0),
rtcp_packets_sent_(0),
rtp_packets_received_(0),
rtcp_packets_received_(0),
rtp_bytes_sent_(0),
rtp_bytes_received_(0),
pc_(pc),
description_(),
filter_(filter),
rtp_parser_(webrtc::RtpHeaderParser::Create()) {
// To indicate rtcp-mux rtcp_transport should be nullptr.
// Therefore it's an error to send in the same flow for
// both rtp and rtcp.
MOZ_ASSERT(rtp_transport != rtcp_transport);
// PipelineTransport() will access this->sts_thread_; moved here for safety
transport_ = new PipelineTransport(this);
}
MediaPipeline::~MediaPipeline() {
ASSERT_ON_THREAD(main_thread_);
MOZ_MTLOG(ML_INFO, "Destroying MediaPipeline: " << description_);
@ -642,6 +682,119 @@ void MediaPipeline::PacketReceived(TransportLayer *layer,
}
}
class MediaPipelineTransmit::PipelineListener
: public MediaStreamTrackDirectListener
{
friend class MediaPipelineTransmit;
public:
explicit PipelineListener(const RefPtr<MediaSessionConduit>& conduit)
: conduit_(conduit),
track_id_(TRACK_INVALID),
mMutex("MediaPipelineTransmit::PipelineListener"),
track_id_external_(TRACK_INVALID),
active_(false),
enabled_(false),
direct_connect_(false),
packetizer_(nullptr)
#if !defined(MOZILLA_EXTERNAL_LINKAGE)
, last_img_(-1)
#endif // MOZILLA_EXTERNAL_LINKAGE
{
}
~PipelineListener()
{
if (!NS_IsMainThread()) {
// release conduit on mainthread. Must use forget()!
nsresult rv = NS_DispatchToMainThread(new
ConduitDeleteEvent(conduit_.forget()));
MOZ_ASSERT(!NS_FAILED(rv),"Could not dispatch conduit shutdown to main");
if (NS_FAILED(rv)) {
MOZ_CRASH();
}
} else {
conduit_ = nullptr;
}
}
// Dispatches setting the internal TrackID to TRACK_INVALID to the media
// graph thread to keep it in sync with other MediaStreamGraph operations
// like RemoveListener() and AddListener(). The TrackID will be updated on
// the next NewData() callback.
void UnsetTrackId(MediaStreamGraphImpl* graph);
void SetActive(bool active) { active_ = active; }
void SetEnabled(bool enabled) { enabled_ = enabled; }
// Implement MediaStreamTrackListener
void NotifyQueuedChanges(MediaStreamGraph* aGraph,
StreamTime aTrackOffset,
const MediaSegment& aQueuedMedia) override;
// Implement MediaStreamTrackDirectListener
void NotifyRealtimeTrackData(MediaStreamGraph* aGraph,
StreamTime aTrackOffset,
const MediaSegment& aMedia) override;
void NotifyDirectListenerInstalled(InstallationResult aResult) override;
void NotifyDirectListenerUninstalled() override;
private:
void UnsetTrackIdImpl() {
MutexAutoLock lock(mMutex);
track_id_ = track_id_external_ = TRACK_INVALID;
}
void NewData(MediaStreamGraph* graph,
StreamTime offset,
const MediaSegment& media);
virtual void ProcessAudioChunk(AudioSessionConduit *conduit,
TrackRate rate, AudioChunk& chunk);
#if !defined(MOZILLA_EXTERNAL_LINKAGE)
virtual void ProcessVideoChunk(VideoSessionConduit *conduit,
VideoChunk& chunk);
#endif // MOZILLA_EXTERNAL_LINKAGE
RefPtr<MediaSessionConduit> conduit_;
// May be TRACK_INVALID until we see data from the track
TrackID track_id_; // this is the current TrackID this listener is attached to
Mutex mMutex;
// protected by mMutex
// May be TRACK_INVALID until we see data from the track
TrackID track_id_external_; // this is queried from other threads
// active is true if there is a transport to send on
mozilla::Atomic<bool> active_;
// enabled is true if the media access control permits sending
// actual content; when false you get black/silence
mozilla::Atomic<bool> enabled_;
// Written and read on the MediaStreamGraph thread
bool direct_connect_;
nsAutoPtr<AudioPacketizer<int16_t, int16_t>> packetizer_;
#if !defined(MOZILLA_EXTERNAL_LINKAGE)
int32_t last_img_; // serial number of last Image
#endif // MOZILLA_EXTERNAL_LINKAGE
};
MediaPipelineTransmit::MediaPipelineTransmit(
const std::string& pc,
nsCOMPtr<nsIEventTarget> main_thread,
nsCOMPtr<nsIEventTarget> sts_thread,
dom::MediaStreamTrack* domtrack,
const std::string& track_id,
int level,
RefPtr<MediaSessionConduit> conduit,
RefPtr<TransportFlow> rtp_transport,
RefPtr<TransportFlow> rtcp_transport,
nsAutoPtr<MediaPipelineFilter> filter) :
MediaPipeline(pc, TRANSMIT, main_thread, sts_thread, track_id, level,
conduit, rtp_transport, rtcp_transport, filter),
listener_(new PipelineListener(conduit)),
domtrack_(domtrack)
{}
nsresult MediaPipelineTransmit::Init() {
AttachToTrack(track_id_);
@ -1315,19 +1468,29 @@ void MediaPipelineTransmit::PipelineListener::ProcessVideoChunk(
}
#endif
nsresult MediaPipelineReceiveAudio::Init() {
ASSERT_ON_THREAD(main_thread_);
MOZ_MTLOG(ML_DEBUG, __FUNCTION__);
class TrackAddedCallback {
public:
virtual void TrackAdded(TrackTicks current_ticks) = 0;
description_ = pc_ + "| Receive audio[";
description_ += track_id_;
description_ += "]";
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(TrackAddedCallback);
listener_->AddSelf(new AudioSegment());
protected:
virtual ~TrackAddedCallback() {}
};
return MediaPipelineReceive::Init();
}
class GenericReceiveListener;
class GenericReceiveCallback : public TrackAddedCallback
{
public:
explicit GenericReceiveCallback(GenericReceiveListener* listener)
: listener_(listener) {}
void TrackAdded(TrackTicks time);
private:
RefPtr<GenericReceiveListener> listener_;
};
// Add a track and listener on the MSG thread using the MSG command queue
static void AddTrackAndListener(MediaStream* source,
@ -1415,22 +1578,48 @@ static void AddTrackAndListener(MediaStream* source,
" on MediaStream " << source);
}
void GenericReceiveListener::AddSelf(MediaSegment* segment) {
class GenericReceiveListener : public MediaStreamListener
{
public:
GenericReceiveListener(SourceMediaStream *source, TrackID track_id,
TrackRate track_rate, bool queue_track)
: source_(source),
track_id_(track_id),
track_rate_(track_rate),
played_ticks_(0),
queue_track_(queue_track),
principal_handle_(PRINCIPAL_HANDLE_NONE) {}
virtual ~GenericReceiveListener() {}
void AddSelf(MediaSegment* segment)
{
RefPtr<TrackAddedCallback> callback = new GenericReceiveCallback(this);
AddTrackAndListener(source_, track_id_, track_rate_, this, segment, callback,
queue_track_);
}
}
void SetPlayedTicks(TrackTicks time) {
played_ticks_ = time;
}
void EndTrack() {
source_->EndTrack(track_id_);
}
#ifndef USE_FAKE_MEDIA_STREAMS
void GenericReceiveListener::SetPrincipalHandle_m(const PrincipalHandle& principal_handle)
{
// Must be called on the main thread
void SetPrincipalHandle_m(const PrincipalHandle& principal_handle)
{
class Message : public ControlMessage
{
public:
Message(GenericReceiveListener* listener,
MediaStream* stream,
const PrincipalHandle& principal_handle)
: ControlMessage(stream), listener_(listener), principal_handle_(principal_handle)
: ControlMessage(stream),
listener_(listener),
principal_handle_(principal_handle)
{}
void Run() override {
@ -1442,25 +1631,92 @@ void GenericReceiveListener::SetPrincipalHandle_m(const PrincipalHandle& princip
};
source_->GraphImpl()->AppendMessage(MakeUnique<Message>(this, source_, principal_handle));
}
}
void GenericReceiveListener::SetPrincipalHandle_msg(const PrincipalHandle& principal_handle)
{
// Must be called on the MediaStreamGraph thread
void SetPrincipalHandle_msg(const PrincipalHandle& principal_handle)
{
principal_handle_ = principal_handle;
}
}
#endif // USE_FAKE_MEDIA_STREAMS
MediaPipelineReceiveAudio::PipelineListener::PipelineListener(
SourceMediaStream * source, TrackID track_id,
const RefPtr<MediaSessionConduit>& conduit, bool queue_track)
: GenericReceiveListener(source, track_id, DEFAULT_SAMPLE_RATE, queue_track), // XXX rate assumption
conduit_(conduit)
protected:
SourceMediaStream *source_;
TrackID track_id_;
TrackRate track_rate_;
TrackTicks played_ticks_;
bool queue_track_;
PrincipalHandle principal_handle_;
};
void GenericReceiveCallback::TrackAdded(TrackTicks time)
{
MOZ_ASSERT(track_rate_%100 == 0);
listener_->SetPlayedTicks(time);
}
void MediaPipelineReceiveAudio::PipelineListener::
NotifyPull(MediaStreamGraph* graph, StreamTime desired_time) {
MediaPipelineReceive::MediaPipelineReceive(
const std::string& pc,
nsCOMPtr<nsIEventTarget> main_thread,
nsCOMPtr<nsIEventTarget> sts_thread,
SourceMediaStream *stream,
const std::string& track_id,
int level,
RefPtr<MediaSessionConduit> conduit,
RefPtr<TransportFlow> rtp_transport,
RefPtr<TransportFlow> rtcp_transport,
nsAutoPtr<MediaPipelineFilter> filter) :
MediaPipeline(pc, RECEIVE, main_thread, sts_thread,
track_id, level, conduit, rtp_transport,
rtcp_transport, filter),
stream_(stream),
segments_added_(0)
{
MOZ_ASSERT(stream_);
}
MediaPipelineReceive::~MediaPipelineReceive()
{
MOZ_ASSERT(!stream_); // Check that we have shut down already.
}
class MediaPipelineReceiveAudio::PipelineListener
: public GenericReceiveListener
{
public:
PipelineListener(SourceMediaStream * source, TrackID track_id,
const RefPtr<MediaSessionConduit>& conduit,
bool queue_track)
: GenericReceiveListener(source, track_id, DEFAULT_SAMPLE_RATE, queue_track), // XXX rate assumption
conduit_(conduit)
{
MOZ_ASSERT(track_rate_%100 == 0);
}
~PipelineListener()
{
if (!NS_IsMainThread()) {
// release conduit on mainthread. Must use forget()!
nsresult rv = NS_DispatchToMainThread(new
ConduitDeleteEvent(conduit_.forget()));
MOZ_ASSERT(!NS_FAILED(rv),"Could not dispatch conduit shutdown to main");
if (NS_FAILED(rv)) {
MOZ_CRASH();
}
} else {
conduit_ = nullptr;
}
}
// Implement MediaStreamListener
void NotifyQueuedTrackChanges(MediaStreamGraph* graph, TrackID tid,
StreamTime offset,
uint32_t events,
const MediaSegment& queued_media,
MediaStream* input_stream,
TrackID input_tid) override {}
void NotifyPull(MediaStreamGraph* graph, StreamTime desired_time) override
{
MOZ_ASSERT(source_);
if (!source_) {
MOZ_MTLOG(ML_ERROR, "NotifyPull() called from a non-SourceMediaStream");
@ -1540,29 +1796,66 @@ NotifyPull(MediaStreamGraph* graph, StreamTime desired_time) {
return;
}
}
}
private:
RefPtr<MediaSessionConduit> conduit_;
};
MediaPipelineReceiveAudio::MediaPipelineReceiveAudio(
const std::string& pc,
nsCOMPtr<nsIEventTarget> main_thread,
nsCOMPtr<nsIEventTarget> sts_thread,
SourceMediaStream* stream,
const std::string& media_stream_track_id,
TrackID numeric_track_id,
int level,
RefPtr<AudioSessionConduit> conduit,
RefPtr<TransportFlow> rtp_transport,
RefPtr<TransportFlow> rtcp_transport,
nsAutoPtr<MediaPipelineFilter> filter,
bool queue_track) :
MediaPipelineReceive(pc, main_thread, sts_thread,
stream, media_stream_track_id, level, conduit,
rtp_transport, rtcp_transport, filter),
listener_(new PipelineListener(stream, numeric_track_id, conduit,
queue_track))
{}
void MediaPipelineReceiveAudio::DetachMedia()
{
ASSERT_ON_THREAD(main_thread_);
if (stream_) {
stream_->RemoveListener(listener_);
stream_ = nullptr;
}
}
nsresult MediaPipelineReceiveVideo::Init() {
nsresult MediaPipelineReceiveAudio::Init() {
ASSERT_ON_THREAD(main_thread_);
MOZ_MTLOG(ML_DEBUG, __FUNCTION__);
description_ = pc_ + "| Receive video[";
description_ = pc_ + "| Receive audio[";
description_ += track_id_;
description_ += "]";
#if defined(MOZILLA_INTERNAL_API)
listener_->AddSelf(new VideoSegment());
#endif
// Always happens before we can DetachMedia()
static_cast<VideoSessionConduit *>(conduit_.get())->
AttachRenderer(renderer_);
listener_->AddSelf(new AudioSegment());
return MediaPipelineReceive::Init();
}
MediaPipelineReceiveVideo::PipelineListener::PipelineListener(
SourceMediaStream* source, TrackID track_id, bool queue_track)
#ifndef USE_FAKE_MEDIA_STREAMS
void MediaPipelineReceiveAudio::SetPrincipalHandle_m(const PrincipalHandle& principal_handle)
{
listener_->SetPrincipalHandle_m(principal_handle);
}
#endif // USE_FAKE_MEDIA_STREAMS
class MediaPipelineReceiveVideo::PipelineListener
: public GenericReceiveListener {
public:
PipelineListener(SourceMediaStream * source, TrackID track_id,
bool queue_track)
: GenericReceiveListener(source, track_id, source->GraphRate(), queue_track),
width_(640),
height_(480),
@ -1570,31 +1863,80 @@ MediaPipelineReceiveVideo::PipelineListener::PipelineListener(
image_container_(),
image_(),
#endif
monitor_("Video PipelineListener") {
monitor_("Video PipelineListener")
{
#if !defined(MOZILLA_EXTERNAL_LINKAGE)
image_container_ = LayerManager::CreateImageContainer();
#endif
}
}
void MediaPipelineReceiveVideo::PipelineListener::RenderVideoFrame(
const unsigned char* buffer,
// Implement MediaStreamListener
void NotifyQueuedTrackChanges(MediaStreamGraph* graph, TrackID tid,
StreamTime offset,
uint32_t events,
const MediaSegment& queued_media,
MediaStream* input_stream,
TrackID input_tid) override {}
void NotifyPull(MediaStreamGraph* graph, StreamTime desired_time) override
{
ReentrantMonitorAutoEnter enter(monitor_);
#if defined(MOZILLA_INTERNAL_API)
RefPtr<Image> image = image_;
// our constructor sets track_rate_ to the graph rate
MOZ_ASSERT(track_rate_ == source_->GraphRate());
#endif
#if defined(MOZILLA_INTERNAL_API)
StreamTime delta = desired_time - played_ticks_;
// Don't append if we've already provided a frame that supposedly
// goes past the current aDesiredTime Doing so means a negative
// delta and thus messes up handling of the graph
if (delta > 0) {
VideoSegment segment;
segment.AppendFrame(image.forget(), delta, IntSize(width_, height_),
principal_handle_);
// Handle track not actually added yet or removed/finished
if (source_->AppendToTrack(track_id_, &segment)) {
played_ticks_ = desired_time;
} else {
MOZ_MTLOG(ML_ERROR, "AppendToTrack failed");
return;
}
}
#endif
}
// Accessors for external writes from the renderer
void FrameSizeChange(unsigned int width,
unsigned int height,
unsigned int number_of_streams) {
ReentrantMonitorAutoEnter enter(monitor_);
width_ = width;
height_ = height;
}
void RenderVideoFrame(const unsigned char* buffer,
size_t buffer_size,
uint32_t time_stamp,
int64_t render_time,
const RefPtr<Image>& video_image) {
const RefPtr<layers::Image>& video_image)
{
RenderVideoFrame(buffer, buffer_size, width_, (width_ + 1) >> 1,
time_stamp, render_time, video_image);
}
}
void MediaPipelineReceiveVideo::PipelineListener::RenderVideoFrame(
const unsigned char* buffer,
void RenderVideoFrame(const unsigned char* buffer,
size_t buffer_size,
uint32_t y_stride,
uint32_t cbcr_stride,
uint32_t time_stamp,
int64_t render_time,
const RefPtr<Image>& video_image) {
const RefPtr<layers::Image>& video_image)
{
#ifdef MOZILLA_INTERNAL_API
ReentrantMonitorAutoEnter enter(monitor_);
#endif // MOZILLA_INTERNAL_API
@ -1637,38 +1979,127 @@ void MediaPipelineReceiveVideo::PipelineListener::RenderVideoFrame(
}
#endif // WEBRTC_GONK
#endif // MOZILLA_INTERNAL_API
}
private:
int width_;
int height_;
#if defined(MOZILLA_INTERNAL_API)
RefPtr<layers::ImageContainer> image_container_;
RefPtr<layers::Image> image_;
#endif
mozilla::ReentrantMonitor monitor_; // Monitor for processing WebRTC frames.
// Protects image_ against:
// - Writing from the GIPS thread
// - Reading from the MSG thread
};
class MediaPipelineReceiveVideo::PipelineRenderer : public VideoRenderer
{
public:
explicit PipelineRenderer(MediaPipelineReceiveVideo *pipeline) :
pipeline_(pipeline) {}
void Detach() { pipeline_ = nullptr; }
// Implement VideoRenderer
void FrameSizeChange(unsigned int width,
unsigned int height,
unsigned int number_of_streams) override
{
pipeline_->listener_->FrameSizeChange(width, height, number_of_streams);
}
void RenderVideoFrame(const unsigned char* buffer,
size_t buffer_size,
uint32_t time_stamp,
int64_t render_time,
const ImageHandle& handle) override
{
pipeline_->listener_->RenderVideoFrame(buffer, buffer_size,
time_stamp, render_time,
handle.GetImage());
}
void RenderVideoFrame(const unsigned char* buffer,
size_t buffer_size,
uint32_t y_stride,
uint32_t cbcr_stride,
uint32_t time_stamp,
int64_t render_time,
const ImageHandle& handle) override
{
pipeline_->listener_->RenderVideoFrame(buffer, buffer_size,
y_stride, cbcr_stride,
time_stamp, render_time,
handle.GetImage());
}
private:
MediaPipelineReceiveVideo *pipeline_; // Raw pointer to avoid cycles
};
MediaPipelineReceiveVideo::MediaPipelineReceiveVideo(
const std::string& pc,
nsCOMPtr<nsIEventTarget> main_thread,
nsCOMPtr<nsIEventTarget> sts_thread,
SourceMediaStream *stream,
const std::string& media_stream_track_id,
TrackID numeric_track_id,
int level,
RefPtr<VideoSessionConduit> conduit,
RefPtr<TransportFlow> rtp_transport,
RefPtr<TransportFlow> rtcp_transport,
nsAutoPtr<MediaPipelineFilter> filter,
bool queue_track) :
MediaPipelineReceive(pc, main_thread, sts_thread,
stream, media_stream_track_id, level, conduit,
rtp_transport, rtcp_transport, filter),
renderer_(new PipelineRenderer(this)),
listener_(new PipelineListener(stream, numeric_track_id, queue_track))
{}
void MediaPipelineReceiveVideo::DetachMedia()
{
ASSERT_ON_THREAD(main_thread_);
listener_->EndTrack();
// stop generating video and thus stop invoking the PipelineRenderer
// and PipelineListener - the renderer has a raw ptr to the Pipeline to
// avoid cycles, and the render callbacks are invoked from a different
// thread so simple null-checks would cause TSAN bugs without locks.
static_cast<VideoSessionConduit*>(conduit_.get())->DetachRenderer();
if (stream_) {
stream_->RemoveListener(listener_);
stream_ = nullptr;
}
}
void MediaPipelineReceiveVideo::PipelineListener::
NotifyPull(MediaStreamGraph* graph, StreamTime desired_time) {
ReentrantMonitorAutoEnter enter(monitor_);
nsresult MediaPipelineReceiveVideo::Init() {
ASSERT_ON_THREAD(main_thread_);
MOZ_MTLOG(ML_DEBUG, __FUNCTION__);
description_ = pc_ + "| Receive video[";
description_ += track_id_;
description_ += "]";
#if defined(MOZILLA_INTERNAL_API)
RefPtr<Image> image = image_;
// our constructor sets track_rate_ to the graph rate
MOZ_ASSERT(track_rate_ == source_->GraphRate());
listener_->AddSelf(new VideoSegment());
#endif
#if defined(MOZILLA_INTERNAL_API)
StreamTime delta = desired_time - played_ticks_;
// Always happens before we can DetachMedia()
static_cast<VideoSessionConduit *>(conduit_.get())->
AttachRenderer(renderer_);
// Don't append if we've already provided a frame that supposedly
// goes past the current aDesiredTime Doing so means a negative
// delta and thus messes up handling of the graph
if (delta > 0) {
VideoSegment segment;
segment.AppendFrame(image.forget(), delta, IntSize(width_, height_),
principal_handle_);
// Handle track not actually added yet or removed/finished
if (source_->AppendToTrack(track_id_, &segment)) {
played_ticks_ = desired_time;
} else {
MOZ_MTLOG(ML_ERROR, "AppendToTrack failed");
return;
}
}
#endif
return MediaPipelineReceive::Init();
}
#ifndef USE_FAKE_MEDIA_STREAMS
void MediaPipelineReceiveVideo::SetPrincipalHandle_m(const PrincipalHandle& principal_handle)
{
listener_->SetPrincipalHandle_m(principal_handle);
}
#endif // USE_FAKE_MEDIA_STREAMS
} // end namespace

Просмотреть файл

@ -12,14 +12,8 @@
#ifdef USE_FAKE_MEDIA_STREAMS
#include "FakeMediaStreams.h"
#else
#include "DOMMediaStream.h"
#include "MediaStreamGraph.h"
#include "VideoUtils.h"
#endif
#include "MediaConduitInterface.h"
#include "MediaPipelineFilter.h"
#include "AudioSegment.h"
#include "mozilla/ReentrantMonitor.h"
#include "mozilla/Atomics.h"
#include "SrtpFlow.h"
@ -27,17 +21,24 @@
#include "runnable_utils.h"
#include "transportflow.h"
#include "AudioPacketizer.h"
#if defined(MOZILLA_INTERNAL_API)
#include "VideoSegment.h"
#endif
#include "StreamBuffer.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
namespace mozilla {
class nsIPrincipal;
namespace mozilla {
class MediaPipelineFilter;
class PeerIdentity;
#ifndef USE_FAKE_MEDIA_STREAMS
namespace dom {
class MediaStreamTrack;
} // namespace dom
class SourceMediaStream;
#endif // USE_FAKE_MEDIA_STREAMS
// A class that represents the pipeline of audio and video
// The dataflow looks like:
//
@ -82,34 +83,7 @@ class MediaPipeline : public sigslot::has_slots<> {
RefPtr<MediaSessionConduit> conduit,
RefPtr<TransportFlow> rtp_transport,
RefPtr<TransportFlow> rtcp_transport,
nsAutoPtr<MediaPipelineFilter> filter)
: direction_(direction),
track_id_(track_id),
level_(level),
conduit_(conduit),
rtp_(rtp_transport, rtcp_transport ? RTP : MUX),
rtcp_(rtcp_transport ? rtcp_transport : rtp_transport,
rtcp_transport ? RTCP : MUX),
main_thread_(main_thread),
sts_thread_(sts_thread),
rtp_packets_sent_(0),
rtcp_packets_sent_(0),
rtp_packets_received_(0),
rtcp_packets_received_(0),
rtp_bytes_sent_(0),
rtp_bytes_received_(0),
pc_(pc),
description_(),
filter_(filter),
rtp_parser_(webrtc::RtpHeaderParser::Create()) {
// To indicate rtcp-mux rtcp_transport should be nullptr.
// Therefore it's an error to send in the same flow for
// both rtp and rtcp.
MOZ_ASSERT(rtp_transport != rtcp_transport);
// PipelineTransport() will access this->sts_thread_; moved here for safety
transport_ = new PipelineTransport(this);
}
nsAutoPtr<MediaPipelineFilter> filter);
// Must be called on the STS thread. Must be called after ShutdownMedia_m().
void DetachTransport_s();
@ -301,73 +275,6 @@ class MediaPipeline : public sigslot::has_slots<> {
bool IsRtp(const unsigned char *data, size_t len);
};
class GenericReceiveListener : public MediaStreamListener
{
public:
GenericReceiveListener(SourceMediaStream *source, TrackID track_id,
TrackRate track_rate, bool queue_track)
: source_(source),
track_id_(track_id),
track_rate_(track_rate),
played_ticks_(0),
queue_track_(queue_track),
principal_handle_(PRINCIPAL_HANDLE_NONE) {}
virtual ~GenericReceiveListener() {}
void AddSelf(MediaSegment* segment);
void SetPlayedTicks(TrackTicks time) {
played_ticks_ = time;
}
void EndTrack() {
source_->EndTrack(track_id_);
}
#ifndef USE_FAKE_MEDIA_STREAMS
// Must be called on the main thread
void SetPrincipalHandle_m(const PrincipalHandle& aPrincipal);
// Must be called on the MediaStreamGraph thread
void SetPrincipalHandle_msg(const PrincipalHandle& aPrincipal);
#endif // USE_FAKE_MEDIA_STREAMS
protected:
SourceMediaStream *source_;
TrackID track_id_;
TrackRate track_rate_;
TrackTicks played_ticks_;
bool queue_track_;
PrincipalHandle principal_handle_;
};
class TrackAddedCallback {
public:
virtual void TrackAdded(TrackTicks current_ticks) = 0;
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(TrackAddedCallback);
protected:
virtual ~TrackAddedCallback() {}
};
class GenericReceiveListener;
class GenericReceiveCallback : public TrackAddedCallback
{
public:
explicit GenericReceiveCallback(GenericReceiveListener* listener)
: listener_(listener) {}
void TrackAdded(TrackTicks time) {
listener_->SetPlayedTicks(time);
}
private:
RefPtr<GenericReceiveListener> listener_;
};
class ConduitDeleteEvent: public nsRunnable
{
public:
@ -394,12 +301,7 @@ public:
RefPtr<MediaSessionConduit> conduit,
RefPtr<TransportFlow> rtp_transport,
RefPtr<TransportFlow> rtcp_transport,
nsAutoPtr<MediaPipelineFilter> filter) :
MediaPipeline(pc, TRANSMIT, main_thread, sts_thread, track_id, level,
conduit, rtp_transport, rtcp_transport, filter),
listener_(new PipelineListener(conduit)),
domtrack_(domtrack)
{}
nsAutoPtr<MediaPipelineFilter> filter);
// Initialize (stuff here may fail)
nsresult Init() override;
@ -430,101 +332,8 @@ public:
// multiple tracks of a type in a stream yet). bug 1056650
virtual nsresult ReplaceTrack(dom::MediaStreamTrack& domtrack);
// Separate class to allow ref counting
class PipelineListener : public MediaStreamTrackDirectListener {
friend class MediaPipelineTransmit;
public:
explicit PipelineListener(const RefPtr<MediaSessionConduit>& conduit)
: conduit_(conduit),
track_id_(TRACK_INVALID),
mMutex("MediaPipelineTransmit::PipelineListener"),
track_id_external_(TRACK_INVALID),
active_(false),
enabled_(false),
direct_connect_(false),
packetizer_(nullptr)
#if !defined(MOZILLA_EXTERNAL_LINKAGE)
, last_img_(-1)
#endif // MOZILLA_INTERNAL_API
{
}
~PipelineListener()
{
if (!NS_IsMainThread()) {
// release conduit on mainthread. Must use forget()!
nsresult rv = NS_DispatchToMainThread(new
ConduitDeleteEvent(conduit_.forget()));
MOZ_ASSERT(!NS_FAILED(rv),"Could not dispatch conduit shutdown to main");
if (NS_FAILED(rv)) {
MOZ_CRASH();
}
} else {
conduit_ = nullptr;
}
}
// Dispatches setting the internal TrackID to TRACK_INVALID to the media
// graph thread to keep it in sync with other MediaStreamGraph operations
// like RemoveListener() and AddListener(). The TrackID will be updated on
// the next NewData() callback.
void UnsetTrackId(MediaStreamGraphImpl* graph);
void SetActive(bool active) { active_ = active; }
void SetEnabled(bool enabled) { enabled_ = enabled; }
// Implement MediaStreamTrackListener
void NotifyQueuedChanges(MediaStreamGraph* aGraph,
StreamTime aTrackOffset,
const MediaSegment& aQueuedMedia) override;
// Implement MediaStreamTrackDirectListener
void NotifyRealtimeTrackData(MediaStreamGraph* aGraph,
StreamTime aTrackOffset,
const MediaSegment& aMedia) override;
void NotifyDirectListenerInstalled(InstallationResult aResult) override;
void NotifyDirectListenerUninstalled() override;
private:
void UnsetTrackIdImpl() {
MutexAutoLock lock(mMutex);
track_id_ = track_id_external_ = TRACK_INVALID;
}
void NewData(MediaStreamGraph* graph,
StreamTime offset,
const MediaSegment& media);
virtual void ProcessAudioChunk(AudioSessionConduit *conduit,
TrackRate rate, AudioChunk& chunk);
#if !defined(MOZILLA_EXTERNAL_LINKAGE)
virtual void ProcessVideoChunk(VideoSessionConduit *conduit,
VideoChunk& chunk);
#endif
RefPtr<MediaSessionConduit> conduit_;
// May be TRACK_INVALID until we see data from the track
TrackID track_id_; // this is the current TrackID this listener is attached to
Mutex mMutex;
// protected by mMutex
// May be TRACK_INVALID until we see data from the track
TrackID track_id_external_; // this is queried from other threads
// active is true if there is a transport to send on
mozilla::Atomic<bool> active_;
// enabled is true if the media access control permits sending
// actual content; when false you get black/silence
mozilla::Atomic<bool> enabled_;
// Written and read on the MediaStreamGraph thread
bool direct_connect_;
nsAutoPtr<AudioPacketizer<int16_t, int16_t>> packetizer_;
#if !defined(MOZILLA_EXTERNAL_LINKAGE)
int32_t last_img_; // serial number of last Image
#endif // MOZILLA_INTERNAL_API
};
class PipelineListener;
private:
RefPtr<PipelineListener> listener_;
@ -546,14 +355,7 @@ class MediaPipelineReceive : public MediaPipeline {
RefPtr<MediaSessionConduit> conduit,
RefPtr<TransportFlow> rtp_transport,
RefPtr<TransportFlow> rtcp_transport,
nsAutoPtr<MediaPipelineFilter> filter) :
MediaPipeline(pc, RECEIVE, main_thread, sts_thread,
track_id, level, conduit, rtp_transport,
rtcp_transport, filter),
stream_(stream),
segments_added_(0) {
MOZ_ASSERT(stream_);
}
nsAutoPtr<MediaPipelineFilter> filter);
int segments_added() const { return segments_added_; }
@ -563,9 +365,7 @@ class MediaPipelineReceive : public MediaPipeline {
virtual void SetPrincipalHandle_m(const PrincipalHandle& principal_handle) = 0;
#endif // USE_FAKE_MEDIA_STREAMS
protected:
~MediaPipelineReceive() {
MOZ_ASSERT(!stream_); // Check that we have shut down already.
}
~MediaPipelineReceive();
RefPtr<SourceMediaStream> stream_;
int segments_added_;
@ -594,67 +394,20 @@ class MediaPipelineReceiveAudio : public MediaPipelineReceive {
RefPtr<TransportFlow> rtp_transport,
RefPtr<TransportFlow> rtcp_transport,
nsAutoPtr<MediaPipelineFilter> filter,
bool queue_track) :
MediaPipelineReceive(pc, main_thread, sts_thread,
stream, media_stream_track_id, level, conduit,
rtp_transport, rtcp_transport, filter),
listener_(new PipelineListener(stream, numeric_track_id, conduit,
queue_track)) {
}
bool queue_track);
void DetachMedia() override {
ASSERT_ON_THREAD(main_thread_);
if (stream_) {
stream_->RemoveListener(listener_);
stream_ = nullptr;
}
}
void DetachMedia() override;
nsresult Init() override;
bool IsVideo() const override { return false; }
#ifndef USE_FAKE_MEDIA_STREAMS
void SetPrincipalHandle_m(const PrincipalHandle& principal_handle) override
{
listener_->SetPrincipalHandle_m(principal_handle);
}
void SetPrincipalHandle_m(const PrincipalHandle& principal_handle) override;
#endif // USE_FAKE_MEDIA_STREAMS
private:
// Separate class to allow ref counting
class PipelineListener : public GenericReceiveListener {
public:
PipelineListener(SourceMediaStream * source, TrackID track_id,
const RefPtr<MediaSessionConduit>& conduit,
bool queue_track);
~PipelineListener()
{
if (!NS_IsMainThread()) {
// release conduit on mainthread. Must use forget()!
nsresult rv = NS_DispatchToMainThread(new
ConduitDeleteEvent(conduit_.forget()));
MOZ_ASSERT(!NS_FAILED(rv),"Could not dispatch conduit shutdown to main");
if (NS_FAILED(rv)) {
MOZ_CRASH();
}
} else {
conduit_ = nullptr;
}
}
// Implement MediaStreamListener
void NotifyQueuedTrackChanges(MediaStreamGraph* graph, TrackID tid,
StreamTime offset,
uint32_t events,
const MediaSegment& queued_media,
MediaStream* input_stream,
TrackID input_tid) override {}
void NotifyPull(MediaStreamGraph* graph, StreamTime desired_time) override;
private:
RefPtr<MediaSessionConduit> conduit_;
};
class PipelineListener;
RefPtr<PipelineListener> listener_;
};
@ -680,139 +433,29 @@ class MediaPipelineReceiveVideo : public MediaPipelineReceive {
RefPtr<TransportFlow> rtp_transport,
RefPtr<TransportFlow> rtcp_transport,
nsAutoPtr<MediaPipelineFilter> filter,
bool queue_track) :
MediaPipelineReceive(pc, main_thread, sts_thread,
stream, media_stream_track_id, level, conduit,
rtp_transport, rtcp_transport, filter),
renderer_(new PipelineRenderer(this)),
listener_(new PipelineListener(stream, numeric_track_id, queue_track)) {
}
bool queue_track);
// Called on the main thread.
void DetachMedia() override {
ASSERT_ON_THREAD(main_thread_);
listener_->EndTrack();
// stop generating video and thus stop invoking the PipelineRenderer
// and PipelineListener - the renderer has a raw ptr to the Pipeline to
// avoid cycles, and the render callbacks are invoked from a different
// thread so simple null-checks would cause TSAN bugs without locks.
static_cast<VideoSessionConduit*>(conduit_.get())->DetachRenderer();
if (stream_) {
stream_->RemoveListener(listener_);
stream_ = nullptr;
}
}
void DetachMedia() override;
nsresult Init() override;
bool IsVideo() const override { return true; }
#ifndef USE_FAKE_MEDIA_STREAMS
void SetPrincipalHandle_m(const PrincipalHandle& principal_handle) override
{
listener_->SetPrincipalHandle_m(principal_handle);
}
void SetPrincipalHandle_m(const PrincipalHandle& principal_handle) override;
#endif // USE_FAKE_MEDIA_STREAMS
private:
class PipelineRenderer : public VideoRenderer {
public:
explicit PipelineRenderer(MediaPipelineReceiveVideo *pipeline) :
pipeline_(pipeline) {}
void Detach() { pipeline_ = nullptr; }
// Implement VideoRenderer
void FrameSizeChange(unsigned int width,
unsigned int height,
unsigned int number_of_streams) override {
pipeline_->listener_->FrameSizeChange(width, height, number_of_streams);
}
void RenderVideoFrame(const unsigned char* buffer,
size_t buffer_size,
uint32_t time_stamp,
int64_t render_time,
const ImageHandle& handle) override {
pipeline_->listener_->RenderVideoFrame(buffer, buffer_size,
time_stamp, render_time,
handle.GetImage());
}
void RenderVideoFrame(const unsigned char* buffer,
size_t buffer_size,
uint32_t y_stride,
uint32_t cbcr_stride,
uint32_t time_stamp,
int64_t render_time,
const ImageHandle& handle) override {
pipeline_->listener_->RenderVideoFrame(buffer, buffer_size,
y_stride, cbcr_stride,
time_stamp, render_time,
handle.GetImage());
}
private:
MediaPipelineReceiveVideo *pipeline_; // Raw pointer to avoid cycles
};
class PipelineRenderer;
friend class PipelineRenderer;
// Separate class to allow ref counting
class PipelineListener : public GenericReceiveListener {
public:
PipelineListener(SourceMediaStream * source, TrackID track_id,
bool queue_track);
// Implement MediaStreamListener
void NotifyQueuedTrackChanges(MediaStreamGraph* graph, TrackID tid,
StreamTime offset,
uint32_t events,
const MediaSegment& queued_media,
MediaStream* input_stream,
TrackID input_tid) override {}
void NotifyPull(MediaStreamGraph* graph, StreamTime desired_time) override;
// Accessors for external writes from the renderer
void FrameSizeChange(unsigned int width,
unsigned int height,
unsigned int number_of_streams) {
ReentrantMonitorAutoEnter enter(monitor_);
width_ = width;
height_ = height;
}
void RenderVideoFrame(const unsigned char* buffer,
size_t buffer_size,
uint32_t time_stamp,
int64_t render_time,
const RefPtr<layers::Image>& video_image);
void RenderVideoFrame(const unsigned char* buffer,
size_t buffer_size,
uint32_t y_stride,
uint32_t cbcr_stride,
uint32_t time_stamp,
int64_t render_time,
const RefPtr<layers::Image>& video_image);
private:
int width_;
int height_;
#if defined(MOZILLA_INTERNAL_API)
RefPtr<layers::ImageContainer> image_container_;
RefPtr<layers::Image> image_;
#endif
mozilla::ReentrantMonitor monitor_; // Monitor for processing WebRTC frames.
// Protects image_ against:
// - Writing from the GIPS thread
// - Reading from the MSG thread
};
friend class PipelineRenderer;
class PipelineListener;
RefPtr<PipelineRenderer> renderer_;
RefPtr<PipelineListener> listener_;
};
} // end namespace
} // namespace mozilla
#endif

Просмотреть файл

@ -9,6 +9,7 @@
#include "PeerConnectionImpl.h"
#include "PeerConnectionMedia.h"
#include "MediaPipelineFactory.h"
#include "MediaPipelineFilter.h"
#include "transportflow.h"
#include "transportlayer.h"
#include "transportlayerdtls.h"

Просмотреть файл

@ -28,6 +28,9 @@
#include "FakeMediaStreams.h"
#else
#include "MediaSegment.h"
#ifdef MOZILLA_INTERNAL_API
#include "MediaStreamGraph.h"
#endif
#endif
#include "nsNetCID.h"