зеркало из https://github.com/mozilla/gecko-dev.git
Bug 1212237 - Be explicit about principals for received tracks. r=bwc
This patch fixes two things: 1) A potential threading issue in setting and reading the PrincipalHandle for MediaPipelineReceiveVideo. 2) Plumbs the PrincipalHandle down to the receiving MediaPipelines right from the start. It hasn't been necessary in the past as initially the principal handed to a track's constructor is trusted, but it's good form and will help clarify the situation for the next patch which switches the initial principal from always-private to mostly-non-private. In most cases this principal does no longer get updated after the track's been created, so it helps to have a PrincipalHandle that matches the track's principal. Differential Revision: https://phabricator.services.mozilla.com/D48946 --HG-- extra : moz-landing-system : lando
This commit is contained in:
Родитель
d817feca61
Коммит
b009f6cd63
|
@ -345,7 +345,7 @@ class TestAgentReceive : public TestAgent {
|
||||||
audio_pipeline_ = new mozilla::MediaPipelineReceiveAudio(
|
audio_pipeline_ = new mozilla::MediaPipelineReceiveAudio(
|
||||||
test_pc, transport_, nullptr, test_utils->sts_target(),
|
test_pc, transport_, nullptr, test_utils->sts_target(),
|
||||||
static_cast<mozilla::AudioSessionConduit*>(audio_conduit_.get()),
|
static_cast<mozilla::AudioSessionConduit*>(audio_conduit_.get()),
|
||||||
nullptr);
|
nullptr, PRINCIPAL_HANDLE_NONE);
|
||||||
|
|
||||||
audio_pipeline_->Start();
|
audio_pipeline_->Start();
|
||||||
|
|
||||||
|
|
|
@ -1168,7 +1168,6 @@ class GenericReceiveListener : public MediaTrackListener {
|
||||||
&static_cast<RemoteTrackSource&>(aTrack->GetSource()))),
|
&static_cast<RemoteTrackSource&>(aTrack->GetSource()))),
|
||||||
mSource(mTrackSource->mStream),
|
mSource(mTrackSource->mStream),
|
||||||
mIsAudio(aTrack->AsAudioStreamTrack()),
|
mIsAudio(aTrack->AsAudioStreamTrack()),
|
||||||
mPrincipalHandle(PRINCIPAL_HANDLE_NONE),
|
|
||||||
mListening(false),
|
mListening(false),
|
||||||
mMaybeTrackNeedsUnmute(true) {
|
mMaybeTrackNeedsUnmute(true) {
|
||||||
MOZ_DIAGNOSTIC_ASSERT(NS_IsMainThread());
|
MOZ_DIAGNOSTIC_ASSERT(NS_IsMainThread());
|
||||||
|
@ -1229,38 +1228,10 @@ class GenericReceiveListener : public MediaTrackListener {
|
||||||
&RemoteTrackSource::ForceEnded));
|
&RemoteTrackSource::ForceEnded));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Must be called on the main thread
|
|
||||||
void SetPrincipalHandle_m(const PrincipalHandle& aPrincipalHandle) {
|
|
||||||
class Message : public ControlMessage {
|
|
||||||
public:
|
|
||||||
Message(GenericReceiveListener* aListener,
|
|
||||||
const PrincipalHandle& aPrincipalHandle)
|
|
||||||
: ControlMessage(nullptr),
|
|
||||||
mListener(aListener),
|
|
||||||
mPrincipalHandle(aPrincipalHandle) {}
|
|
||||||
|
|
||||||
void Run() override {
|
|
||||||
mListener->SetPrincipalHandle_mtg(mPrincipalHandle);
|
|
||||||
}
|
|
||||||
|
|
||||||
const RefPtr<GenericReceiveListener> mListener;
|
|
||||||
PrincipalHandle mPrincipalHandle;
|
|
||||||
};
|
|
||||||
|
|
||||||
mSource->GraphImpl()->AppendMessage(
|
|
||||||
MakeUnique<Message>(this, aPrincipalHandle));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Must be called on the MediaTrackGraph thread
|
|
||||||
void SetPrincipalHandle_mtg(const PrincipalHandle& aPrincipalHandle) {
|
|
||||||
mPrincipalHandle = aPrincipalHandle;
|
|
||||||
}
|
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
const nsMainThreadPtrHandle<RemoteTrackSource> mTrackSource;
|
const nsMainThreadPtrHandle<RemoteTrackSource> mTrackSource;
|
||||||
const RefPtr<SourceMediaTrack> mSource;
|
const RefPtr<SourceMediaTrack> mSource;
|
||||||
const bool mIsAudio;
|
const bool mIsAudio;
|
||||||
PrincipalHandle mPrincipalHandle;
|
|
||||||
bool mListening;
|
bool mListening;
|
||||||
Atomic<bool> mMaybeTrackNeedsUnmute;
|
Atomic<bool> mMaybeTrackNeedsUnmute;
|
||||||
};
|
};
|
||||||
|
@ -1278,7 +1249,8 @@ class MediaPipelineReceiveAudio::PipelineListener
|
||||||
: public GenericReceiveListener {
|
: public GenericReceiveListener {
|
||||||
public:
|
public:
|
||||||
PipelineListener(dom::MediaStreamTrack* aTrack,
|
PipelineListener(dom::MediaStreamTrack* aTrack,
|
||||||
const RefPtr<MediaSessionConduit>& aConduit)
|
const RefPtr<MediaSessionConduit>& aConduit,
|
||||||
|
const PrincipalHandle& aPrincipalHandle)
|
||||||
: GenericReceiveListener(aTrack),
|
: GenericReceiveListener(aTrack),
|
||||||
mConduit(aConduit),
|
mConduit(aConduit),
|
||||||
// AudioSession conduit only supports 16, 32, 44.1 and 48kHz
|
// AudioSession conduit only supports 16, 32, 44.1 and 48kHz
|
||||||
|
@ -1292,7 +1264,8 @@ class MediaPipelineReceiveAudio::PipelineListener
|
||||||
mTaskQueue(
|
mTaskQueue(
|
||||||
new TaskQueue(GetMediaThreadPool(MediaThreadType::WEBRTC_DECODER),
|
new TaskQueue(GetMediaThreadPool(MediaThreadType::WEBRTC_DECODER),
|
||||||
"AudioPipelineListener")),
|
"AudioPipelineListener")),
|
||||||
mPlayedTicks(0) {
|
mPlayedTicks(0),
|
||||||
|
mPrincipalHandle(aPrincipalHandle) {
|
||||||
mSource->SetAppendDataSourceRate(mRate);
|
mSource->SetAppendDataSourceRate(mRate);
|
||||||
mSource->AddListener(this);
|
mSource->AddListener(this);
|
||||||
}
|
}
|
||||||
|
@ -1303,6 +1276,33 @@ class MediaPipelineReceiveAudio::PipelineListener
|
||||||
NotifyPullImpl(aDesiredTime);
|
NotifyPullImpl(aDesiredTime);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Must be called on the main thread
|
||||||
|
void SetPrincipalHandle_m(const PrincipalHandle& aPrincipalHandle) {
|
||||||
|
class Message : public ControlMessage {
|
||||||
|
public:
|
||||||
|
Message(PipelineListener* aListener,
|
||||||
|
const PrincipalHandle& aPrincipalHandle)
|
||||||
|
: ControlMessage(nullptr),
|
||||||
|
mListener(aListener),
|
||||||
|
mPrincipalHandle(aPrincipalHandle) {}
|
||||||
|
|
||||||
|
void Run() override {
|
||||||
|
mListener->SetPrincipalHandle_mtg(mPrincipalHandle);
|
||||||
|
}
|
||||||
|
|
||||||
|
const RefPtr<PipelineListener> mListener;
|
||||||
|
const PrincipalHandle mPrincipalHandle;
|
||||||
|
};
|
||||||
|
|
||||||
|
mSource->GraphImpl()->AppendMessage(
|
||||||
|
MakeUnique<Message>(this, aPrincipalHandle));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Must be called on the MediaTrackGraph thread
|
||||||
|
void SetPrincipalHandle_mtg(const PrincipalHandle& aPrincipalHandle) {
|
||||||
|
mPrincipalHandle = aPrincipalHandle;
|
||||||
|
}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
~PipelineListener() {
|
~PipelineListener() {
|
||||||
NS_ReleaseOnMainThreadSystemGroup("MediaPipeline::mConduit",
|
NS_ReleaseOnMainThreadSystemGroup("MediaPipeline::mConduit",
|
||||||
|
@ -1399,15 +1399,19 @@ class MediaPipelineReceiveAudio::PipelineListener
|
||||||
// Number of frames of data that has been added to the SourceMediaTrack in
|
// Number of frames of data that has been added to the SourceMediaTrack in
|
||||||
// the graph's rate.
|
// the graph's rate.
|
||||||
TrackTicks mPlayedTicks;
|
TrackTicks mPlayedTicks;
|
||||||
|
PrincipalHandle mPrincipalHandle;
|
||||||
};
|
};
|
||||||
|
|
||||||
MediaPipelineReceiveAudio::MediaPipelineReceiveAudio(
|
MediaPipelineReceiveAudio::MediaPipelineReceiveAudio(
|
||||||
const std::string& aPc, MediaTransportHandler* aTransportHandler,
|
const std::string& aPc, MediaTransportHandler* aTransportHandler,
|
||||||
nsCOMPtr<nsIEventTarget> aMainThread, nsCOMPtr<nsIEventTarget> aStsThread,
|
nsCOMPtr<nsIEventTarget> aMainThread, nsCOMPtr<nsIEventTarget> aStsThread,
|
||||||
RefPtr<AudioSessionConduit> aConduit, dom::MediaStreamTrack* aTrack)
|
RefPtr<AudioSessionConduit> aConduit, dom::MediaStreamTrack* aTrack,
|
||||||
|
const PrincipalHandle& aPrincipalHandle)
|
||||||
: MediaPipelineReceive(aPc, aTransportHandler, aMainThread, aStsThread,
|
: MediaPipelineReceive(aPc, aTransportHandler, aMainThread, aStsThread,
|
||||||
aConduit),
|
aConduit),
|
||||||
mListener(aTrack ? new PipelineListener(aTrack, mConduit) : nullptr) {
|
mListener(aTrack
|
||||||
|
? new PipelineListener(aTrack, mConduit, aPrincipalHandle)
|
||||||
|
: nullptr) {
|
||||||
mDescription = mPc + "| Receive audio";
|
mDescription = mPc + "| Receive audio";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1448,15 +1452,28 @@ void MediaPipelineReceiveAudio::OnRtpPacketReceived() {
|
||||||
class MediaPipelineReceiveVideo::PipelineListener
|
class MediaPipelineReceiveVideo::PipelineListener
|
||||||
: public GenericReceiveListener {
|
: public GenericReceiveListener {
|
||||||
public:
|
public:
|
||||||
explicit PipelineListener(dom::MediaStreamTrack* aTrack)
|
PipelineListener(dom::MediaStreamTrack* aTrack,
|
||||||
|
const PrincipalHandle& aPrincipalHandle)
|
||||||
: GenericReceiveListener(aTrack),
|
: GenericReceiveListener(aTrack),
|
||||||
mImageContainer(
|
mImageContainer(
|
||||||
LayerManager::CreateImageContainer(ImageContainer::ASYNCHRONOUS)) {
|
LayerManager::CreateImageContainer(ImageContainer::ASYNCHRONOUS)),
|
||||||
|
mMutex("MediaPipelineReceiveVideo::PipelineListener::mMutex"),
|
||||||
|
mPrincipalHandle(aPrincipalHandle) {
|
||||||
mSource->AddListener(this);
|
mSource->AddListener(this);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void SetPrincipalHandle_m(const PrincipalHandle& aPrincipalHandle) {
|
||||||
|
MutexAutoLock lock(mMutex);
|
||||||
|
mPrincipalHandle = aPrincipalHandle;
|
||||||
|
}
|
||||||
|
|
||||||
void RenderVideoFrame(const webrtc::VideoFrameBuffer& aBuffer,
|
void RenderVideoFrame(const webrtc::VideoFrameBuffer& aBuffer,
|
||||||
uint32_t aTimeStamp, int64_t aRenderTime) {
|
uint32_t aTimeStamp, int64_t aRenderTime) {
|
||||||
|
PrincipalHandle principal;
|
||||||
|
{
|
||||||
|
MutexAutoLock lock(mMutex);
|
||||||
|
principal = mPrincipalHandle;
|
||||||
|
}
|
||||||
RefPtr<Image> image;
|
RefPtr<Image> image;
|
||||||
if (aBuffer.type() == webrtc::VideoFrameBuffer::Type::kNative) {
|
if (aBuffer.type() == webrtc::VideoFrameBuffer::Type::kNative) {
|
||||||
// We assume that only native handles are used with the
|
// We assume that only native handles are used with the
|
||||||
|
@ -1501,12 +1518,14 @@ class MediaPipelineReceiveVideo::PipelineListener
|
||||||
|
|
||||||
VideoSegment segment;
|
VideoSegment segment;
|
||||||
auto size = image->GetSize();
|
auto size = image->GetSize();
|
||||||
segment.AppendFrame(image.forget(), size, mPrincipalHandle);
|
segment.AppendFrame(image.forget(), size, principal);
|
||||||
mSource->AppendData(&segment);
|
mSource->AppendData(&segment);
|
||||||
}
|
}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
RefPtr<layers::ImageContainer> mImageContainer;
|
RefPtr<layers::ImageContainer> mImageContainer;
|
||||||
|
Mutex mMutex; // Protects the below members.
|
||||||
|
PrincipalHandle mPrincipalHandle;
|
||||||
};
|
};
|
||||||
|
|
||||||
class MediaPipelineReceiveVideo::PipelineRenderer
|
class MediaPipelineReceiveVideo::PipelineRenderer
|
||||||
|
@ -1531,11 +1550,13 @@ class MediaPipelineReceiveVideo::PipelineRenderer
|
||||||
MediaPipelineReceiveVideo::MediaPipelineReceiveVideo(
|
MediaPipelineReceiveVideo::MediaPipelineReceiveVideo(
|
||||||
const std::string& aPc, MediaTransportHandler* aTransportHandler,
|
const std::string& aPc, MediaTransportHandler* aTransportHandler,
|
||||||
nsCOMPtr<nsIEventTarget> aMainThread, nsCOMPtr<nsIEventTarget> aStsThread,
|
nsCOMPtr<nsIEventTarget> aMainThread, nsCOMPtr<nsIEventTarget> aStsThread,
|
||||||
RefPtr<VideoSessionConduit> aConduit, dom::MediaStreamTrack* aTrack)
|
RefPtr<VideoSessionConduit> aConduit, dom::MediaStreamTrack* aTrack,
|
||||||
|
const PrincipalHandle& aPrincipalHandle)
|
||||||
: MediaPipelineReceive(aPc, aTransportHandler, aMainThread, aStsThread,
|
: MediaPipelineReceive(aPc, aTransportHandler, aMainThread, aStsThread,
|
||||||
aConduit),
|
aConduit),
|
||||||
mRenderer(new PipelineRenderer(this)),
|
mRenderer(new PipelineRenderer(this)),
|
||||||
mListener(aTrack ? new PipelineListener(aTrack) : nullptr) {
|
mListener(aTrack ? new PipelineListener(aTrack, aPrincipalHandle)
|
||||||
|
: nullptr) {
|
||||||
mDescription = mPc + "| Receive video";
|
mDescription = mPc + "| Receive video";
|
||||||
aConduit->AttachRenderer(mRenderer);
|
aConduit->AttachRenderer(mRenderer);
|
||||||
}
|
}
|
||||||
|
|
|
@ -348,7 +348,8 @@ class MediaPipelineReceiveAudio : public MediaPipelineReceive {
|
||||||
nsCOMPtr<nsIEventTarget> aMainThread,
|
nsCOMPtr<nsIEventTarget> aMainThread,
|
||||||
nsCOMPtr<nsIEventTarget> aStsThread,
|
nsCOMPtr<nsIEventTarget> aStsThread,
|
||||||
RefPtr<AudioSessionConduit> aConduit,
|
RefPtr<AudioSessionConduit> aConduit,
|
||||||
dom::MediaStreamTrack* aTrack);
|
dom::MediaStreamTrack* aTrack,
|
||||||
|
const PrincipalHandle& aPrincipalHandle);
|
||||||
|
|
||||||
void DetachMedia() override;
|
void DetachMedia() override;
|
||||||
|
|
||||||
|
@ -365,7 +366,7 @@ class MediaPipelineReceiveAudio : public MediaPipelineReceive {
|
||||||
// Separate class to allow ref counting
|
// Separate class to allow ref counting
|
||||||
class PipelineListener;
|
class PipelineListener;
|
||||||
|
|
||||||
RefPtr<PipelineListener> mListener;
|
const RefPtr<PipelineListener> mListener;
|
||||||
};
|
};
|
||||||
|
|
||||||
// A specialization of pipeline for reading from the network and
|
// A specialization of pipeline for reading from the network and
|
||||||
|
@ -377,7 +378,8 @@ class MediaPipelineReceiveVideo : public MediaPipelineReceive {
|
||||||
nsCOMPtr<nsIEventTarget> aMainThread,
|
nsCOMPtr<nsIEventTarget> aMainThread,
|
||||||
nsCOMPtr<nsIEventTarget> aStsThread,
|
nsCOMPtr<nsIEventTarget> aStsThread,
|
||||||
RefPtr<VideoSessionConduit> aConduit,
|
RefPtr<VideoSessionConduit> aConduit,
|
||||||
dom::MediaStreamTrack* aTrack);
|
dom::MediaStreamTrack* aTrack,
|
||||||
|
const PrincipalHandle& aPrincipalHandle);
|
||||||
|
|
||||||
// Called on the main thread.
|
// Called on the main thread.
|
||||||
void DetachMedia() override;
|
void DetachMedia() override;
|
||||||
|
@ -399,7 +401,7 @@ class MediaPipelineReceiveVideo : public MediaPipelineReceive {
|
||||||
class PipelineListener;
|
class PipelineListener;
|
||||||
|
|
||||||
const RefPtr<PipelineRenderer> mRenderer;
|
const RefPtr<PipelineRenderer> mRenderer;
|
||||||
RefPtr<PipelineListener> mListener;
|
const RefPtr<PipelineListener> mListener;
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace mozilla
|
} // namespace mozilla
|
||||||
|
|
|
@ -1001,13 +1001,29 @@ already_AddRefed<TransceiverImpl> PeerConnectionImpl::CreateTransceiverImpl(
|
||||||
aSendTrack->AddPrincipalChangeObserver(this);
|
aSendTrack->AddPrincipalChangeObserver(this);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Set the principal used for the receive tracks. This makes the track
|
||||||
|
// data (audio/video samples) accessible to the receiving page. We're
|
||||||
|
// only certain that privacy has been requested if we're connected.
|
||||||
|
RefPtr<nsIPrincipal> principal;
|
||||||
|
Document* doc = GetWindow()->GetExtantDoc();
|
||||||
|
MOZ_ASSERT(doc);
|
||||||
|
const bool privacyRequested = mPrivacyRequested.valueOr(true);
|
||||||
|
if (privacyRequested) {
|
||||||
|
// We're either certain that we need isolation for the tracks, OR
|
||||||
|
// we're not sure and we can fix the track and pipeline in AlpnNegotiated.
|
||||||
|
principal =
|
||||||
|
NullPrincipal::CreateWithInheritedAttributes(doc->NodePrincipal());
|
||||||
|
} else {
|
||||||
|
principal = doc->NodePrincipal();
|
||||||
|
}
|
||||||
|
|
||||||
OwningNonNull<dom::MediaStreamTrack> receiveTrack =
|
OwningNonNull<dom::MediaStreamTrack> receiveTrack =
|
||||||
CreateReceiveTrack(aJsepTransceiver->GetMediaType());
|
CreateReceiveTrack(aJsepTransceiver->GetMediaType(), principal);
|
||||||
|
|
||||||
RefPtr<TransceiverImpl> transceiverImpl;
|
RefPtr<TransceiverImpl> transceiverImpl;
|
||||||
|
aRv =
|
||||||
aRv = mMedia->AddTransceiver(aJsepTransceiver, *receiveTrack, aSendTrack,
|
mMedia->AddTransceiver(aJsepTransceiver, *receiveTrack, aSendTrack,
|
||||||
&transceiverImpl);
|
MakePrincipalHandle(principal), &transceiverImpl);
|
||||||
|
|
||||||
return transceiverImpl.forget();
|
return transceiverImpl.forget();
|
||||||
}
|
}
|
||||||
|
@ -1829,24 +1845,9 @@ static int GetDTMFToneCode(uint16_t c) {
|
||||||
}
|
}
|
||||||
|
|
||||||
OwningNonNull<dom::MediaStreamTrack> PeerConnectionImpl::CreateReceiveTrack(
|
OwningNonNull<dom::MediaStreamTrack> PeerConnectionImpl::CreateReceiveTrack(
|
||||||
SdpMediaSection::MediaType type) {
|
SdpMediaSection::MediaType type, nsIPrincipal* aPrincipal) {
|
||||||
bool audio = (type == SdpMediaSection::MediaType::kAudio);
|
bool audio = (type == SdpMediaSection::MediaType::kAudio);
|
||||||
|
|
||||||
// Set the principal used for creating the tracks. This makes the track
|
|
||||||
// data (audio/video samples) accessible to the receiving page. We're
|
|
||||||
// only certain that privacy hasn't been requested if we're connected.
|
|
||||||
nsCOMPtr<nsIPrincipal> principal;
|
|
||||||
Document* doc = GetWindow()->GetExtantDoc();
|
|
||||||
MOZ_ASSERT(doc);
|
|
||||||
if (mPrivacyRequested.isSome() && !*mPrivacyRequested) {
|
|
||||||
principal = doc->NodePrincipal();
|
|
||||||
} else {
|
|
||||||
// we're either certain that we need isolation for the tracks, OR
|
|
||||||
// we're not sure and we can fix the track in SetDtlsConnected
|
|
||||||
principal =
|
|
||||||
NullPrincipal::CreateWithInheritedAttributes(doc->NodePrincipal());
|
|
||||||
}
|
|
||||||
|
|
||||||
MediaTrackGraph* graph = MediaTrackGraph::GetInstance(
|
MediaTrackGraph* graph = MediaTrackGraph::GetInstance(
|
||||||
audio ? MediaTrackGraph::AUDIO_THREAD_DRIVER
|
audio ? MediaTrackGraph::AUDIO_THREAD_DRIVER
|
||||||
: MediaTrackGraph::SYSTEM_THREAD_DRIVER,
|
: MediaTrackGraph::SYSTEM_THREAD_DRIVER,
|
||||||
|
@ -1857,13 +1858,13 @@ OwningNonNull<dom::MediaStreamTrack> PeerConnectionImpl::CreateReceiveTrack(
|
||||||
if (audio) {
|
if (audio) {
|
||||||
RefPtr<SourceMediaTrack> source =
|
RefPtr<SourceMediaTrack> source =
|
||||||
graph->CreateSourceTrack(MediaSegment::AUDIO);
|
graph->CreateSourceTrack(MediaSegment::AUDIO);
|
||||||
trackSource = new RemoteTrackSource(source, principal,
|
trackSource = new RemoteTrackSource(source, aPrincipal,
|
||||||
NS_ConvertASCIItoUTF16("remote audio"));
|
NS_ConvertASCIItoUTF16("remote audio"));
|
||||||
track = new AudioStreamTrack(GetWindow(), source, trackSource);
|
track = new AudioStreamTrack(GetWindow(), source, trackSource);
|
||||||
} else {
|
} else {
|
||||||
RefPtr<SourceMediaTrack> source =
|
RefPtr<SourceMediaTrack> source =
|
||||||
graph->CreateSourceTrack(MediaSegment::VIDEO);
|
graph->CreateSourceTrack(MediaSegment::VIDEO);
|
||||||
trackSource = new RemoteTrackSource(source, principal,
|
trackSource = new RemoteTrackSource(source, aPrincipal,
|
||||||
NS_ConvertASCIItoUTF16("remote video"));
|
NS_ConvertASCIItoUTF16("remote video"));
|
||||||
track = new VideoStreamTrack(GetWindow(), source, trackSource);
|
track = new VideoStreamTrack(GetWindow(), source, trackSource);
|
||||||
}
|
}
|
||||||
|
|
|
@ -310,7 +310,7 @@ class PeerConnectionImpl final
|
||||||
ErrorResult& rv);
|
ErrorResult& rv);
|
||||||
|
|
||||||
OwningNonNull<dom::MediaStreamTrack> CreateReceiveTrack(
|
OwningNonNull<dom::MediaStreamTrack> CreateReceiveTrack(
|
||||||
SdpMediaSection::MediaType type);
|
SdpMediaSection::MediaType type, nsIPrincipal* aPrincipal);
|
||||||
|
|
||||||
bool CheckNegotiationNeeded(ErrorResult& rv);
|
bool CheckNegotiationNeeded(ErrorResult& rv);
|
||||||
|
|
||||||
|
|
|
@ -581,16 +581,16 @@ void PeerConnectionMedia::ShutdownMediaTransport_s() {
|
||||||
|
|
||||||
nsresult PeerConnectionMedia::AddTransceiver(
|
nsresult PeerConnectionMedia::AddTransceiver(
|
||||||
JsepTransceiver* aJsepTransceiver, dom::MediaStreamTrack& aReceiveTrack,
|
JsepTransceiver* aJsepTransceiver, dom::MediaStreamTrack& aReceiveTrack,
|
||||||
dom::MediaStreamTrack* aSendTrack,
|
dom::MediaStreamTrack* aSendTrack, const PrincipalHandle& aPrincipalHandle,
|
||||||
RefPtr<TransceiverImpl>* aTransceiverImpl) {
|
RefPtr<TransceiverImpl>* aTransceiverImpl) {
|
||||||
if (!mCall) {
|
if (!mCall) {
|
||||||
mCall = WebRtcCallWrapper::Create();
|
mCall = WebRtcCallWrapper::Create();
|
||||||
}
|
}
|
||||||
|
|
||||||
RefPtr<TransceiverImpl> transceiver =
|
RefPtr<TransceiverImpl> transceiver = new TransceiverImpl(
|
||||||
new TransceiverImpl(mParent->GetHandle(), mTransportHandler,
|
mParent->GetHandle(), mTransportHandler, aJsepTransceiver,
|
||||||
aJsepTransceiver, mMainThread.get(), mSTSThread.get(),
|
mMainThread.get(), mSTSThread.get(), &aReceiveTrack, aSendTrack,
|
||||||
&aReceiveTrack, aSendTrack, mCall.get());
|
mCall.get(), aPrincipalHandle);
|
||||||
|
|
||||||
if (!transceiver->IsValid()) {
|
if (!transceiver->IsValid()) {
|
||||||
return NS_ERROR_FAILURE;
|
return NS_ERROR_FAILURE;
|
||||||
|
|
|
@ -82,6 +82,7 @@ class PeerConnectionMedia : public sigslot::has_slots<> {
|
||||||
nsresult AddTransceiver(JsepTransceiver* aJsepTransceiver,
|
nsresult AddTransceiver(JsepTransceiver* aJsepTransceiver,
|
||||||
dom::MediaStreamTrack& aReceiveTrack,
|
dom::MediaStreamTrack& aReceiveTrack,
|
||||||
dom::MediaStreamTrack* aSendTrack,
|
dom::MediaStreamTrack* aSendTrack,
|
||||||
|
const PrincipalHandle& aPrincipalHandle,
|
||||||
RefPtr<TransceiverImpl>* aTransceiverImpl);
|
RefPtr<TransceiverImpl>* aTransceiverImpl);
|
||||||
|
|
||||||
void GetTransmitPipelinesMatching(
|
void GetTransmitPipelinesMatching(
|
||||||
|
|
|
@ -35,7 +35,8 @@ TransceiverImpl::TransceiverImpl(
|
||||||
const std::string& aPCHandle, MediaTransportHandler* aTransportHandler,
|
const std::string& aPCHandle, MediaTransportHandler* aTransportHandler,
|
||||||
JsepTransceiver* aJsepTransceiver, nsIEventTarget* aMainThread,
|
JsepTransceiver* aJsepTransceiver, nsIEventTarget* aMainThread,
|
||||||
nsIEventTarget* aStsThread, dom::MediaStreamTrack* aReceiveTrack,
|
nsIEventTarget* aStsThread, dom::MediaStreamTrack* aReceiveTrack,
|
||||||
dom::MediaStreamTrack* aSendTrack, WebRtcCallWrapper* aCallWrapper)
|
dom::MediaStreamTrack* aSendTrack, WebRtcCallWrapper* aCallWrapper,
|
||||||
|
const PrincipalHandle& aPrincipalHandle)
|
||||||
: mPCHandle(aPCHandle),
|
: mPCHandle(aPCHandle),
|
||||||
mTransportHandler(aTransportHandler),
|
mTransportHandler(aTransportHandler),
|
||||||
mJsepTransceiver(aJsepTransceiver),
|
mJsepTransceiver(aJsepTransceiver),
|
||||||
|
@ -47,9 +48,9 @@ TransceiverImpl::TransceiverImpl(
|
||||||
mSendTrack(aSendTrack),
|
mSendTrack(aSendTrack),
|
||||||
mCallWrapper(aCallWrapper) {
|
mCallWrapper(aCallWrapper) {
|
||||||
if (IsVideo()) {
|
if (IsVideo()) {
|
||||||
InitVideo();
|
InitVideo(aPrincipalHandle);
|
||||||
} else {
|
} else {
|
||||||
InitAudio();
|
InitAudio(aPrincipalHandle);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!IsValid()) {
|
if (!IsValid()) {
|
||||||
|
@ -69,7 +70,7 @@ TransceiverImpl::~TransceiverImpl() = default;
|
||||||
|
|
||||||
NS_IMPL_ISUPPORTS0(TransceiverImpl)
|
NS_IMPL_ISUPPORTS0(TransceiverImpl)
|
||||||
|
|
||||||
void TransceiverImpl::InitAudio() {
|
void TransceiverImpl::InitAudio(const PrincipalHandle& aPrincipalHandle) {
|
||||||
mConduit = AudioSessionConduit::Create(mCallWrapper, mStsThread);
|
mConduit = AudioSessionConduit::Create(mCallWrapper, mStsThread);
|
||||||
|
|
||||||
if (!mConduit) {
|
if (!mConduit) {
|
||||||
|
@ -82,10 +83,11 @@ void TransceiverImpl::InitAudio() {
|
||||||
|
|
||||||
mReceivePipeline = new MediaPipelineReceiveAudio(
|
mReceivePipeline = new MediaPipelineReceiveAudio(
|
||||||
mPCHandle, mTransportHandler, mMainThread.get(), mStsThread.get(),
|
mPCHandle, mTransportHandler, mMainThread.get(), mStsThread.get(),
|
||||||
static_cast<AudioSessionConduit*>(mConduit.get()), mReceiveTrack);
|
static_cast<AudioSessionConduit*>(mConduit.get()), mReceiveTrack,
|
||||||
|
aPrincipalHandle);
|
||||||
}
|
}
|
||||||
|
|
||||||
void TransceiverImpl::InitVideo() {
|
void TransceiverImpl::InitVideo(const PrincipalHandle& aPrincipalHandle) {
|
||||||
mConduit = VideoSessionConduit::Create(mCallWrapper, mStsThread);
|
mConduit = VideoSessionConduit::Create(mCallWrapper, mStsThread);
|
||||||
|
|
||||||
if (!mConduit) {
|
if (!mConduit) {
|
||||||
|
@ -98,7 +100,8 @@ void TransceiverImpl::InitVideo() {
|
||||||
|
|
||||||
mReceivePipeline = new MediaPipelineReceiveVideo(
|
mReceivePipeline = new MediaPipelineReceiveVideo(
|
||||||
mPCHandle, mTransportHandler, mMainThread.get(), mStsThread.get(),
|
mPCHandle, mTransportHandler, mMainThread.get(), mStsThread.get(),
|
||||||
static_cast<VideoSessionConduit*>(mConduit.get()), mReceiveTrack);
|
static_cast<VideoSessionConduit*>(mConduit.get()), mReceiveTrack,
|
||||||
|
aPrincipalHandle);
|
||||||
}
|
}
|
||||||
|
|
||||||
nsresult TransceiverImpl::UpdateSinkIdentity(
|
nsresult TransceiverImpl::UpdateSinkIdentity(
|
||||||
|
@ -256,9 +259,9 @@ nsresult TransceiverImpl::UpdatePrincipal(nsIPrincipal* aPrincipal) {
|
||||||
return NS_OK;
|
return NS_OK;
|
||||||
}
|
}
|
||||||
|
|
||||||
// This blasts away the existing principal.
|
// This updates the existing principal. If we're setting a principal that does
|
||||||
// We only do this when we become certain that all tracks are safe to make
|
// not subsume the old principal, there will be a delay while the principal
|
||||||
// accessible to the script principal.
|
// propagates in media content, before the track has the principal aPrincipal.
|
||||||
static_cast<RemoteTrackSource&>(mReceiveTrack->GetSource())
|
static_cast<RemoteTrackSource&>(mReceiveTrack->GetSource())
|
||||||
.SetPrincipal(aPrincipal);
|
.SetPrincipal(aPrincipal);
|
||||||
|
|
||||||
|
|
|
@ -56,7 +56,8 @@ class TransceiverImpl : public nsISupports {
|
||||||
nsIEventTarget* aMainThread, nsIEventTarget* aStsThread,
|
nsIEventTarget* aMainThread, nsIEventTarget* aStsThread,
|
||||||
dom::MediaStreamTrack* aReceiveTrack,
|
dom::MediaStreamTrack* aReceiveTrack,
|
||||||
dom::MediaStreamTrack* aSendTrack,
|
dom::MediaStreamTrack* aSendTrack,
|
||||||
WebRtcCallWrapper* aCallWrapper);
|
WebRtcCallWrapper* aCallWrapper,
|
||||||
|
const PrincipalHandle& aPrincipalHandle);
|
||||||
|
|
||||||
bool IsValid() const { return !!mConduit; }
|
bool IsValid() const { return !!mConduit; }
|
||||||
|
|
||||||
|
@ -133,8 +134,8 @@ class TransceiverImpl : public nsISupports {
|
||||||
|
|
||||||
private:
|
private:
|
||||||
virtual ~TransceiverImpl();
|
virtual ~TransceiverImpl();
|
||||||
void InitAudio();
|
void InitAudio(const PrincipalHandle& aPrincipalHandle);
|
||||||
void InitVideo();
|
void InitVideo(const PrincipalHandle& aPrincipalHandle);
|
||||||
nsresult UpdateAudioConduit();
|
nsresult UpdateAudioConduit();
|
||||||
nsresult UpdateVideoConduit();
|
nsresult UpdateVideoConduit();
|
||||||
nsresult ConfigureVideoCodecMode(VideoSessionConduit& aConduit);
|
nsresult ConfigureVideoCodecMode(VideoSessionConduit& aConduit);
|
||||||
|
|
Загрузка…
Ссылка в новой задаче