From e1e882cb5679fd885c158f3f61ff857fe1edc8ff Mon Sep 17 00:00:00 2001 From: Paul Adenot Date: Fri, 24 Jul 2015 14:28:16 +0200 Subject: [PATCH] Bug 1156472 - Part 5 - Add MediaEngineWebRTCAudioCaptureSource as a new audio source, and "audioCapture" as a new MediaSource. r=jesup,bz --- dom/media/MediaManager.cpp | 117 ++++++++++++-------- dom/media/MediaManager.h | 2 +- dom/media/webrtc/MediaEngineWebRTC.cpp | 14 ++- dom/media/webrtc/MediaEngineWebRTC.h | 64 ++++++++++- dom/media/webrtc/MediaEngineWebRTCAudio.cpp | 52 +++++++++ dom/webidl/Constraints.webidl | 1 + 6 files changed, 194 insertions(+), 56 deletions(-) diff --git a/dom/media/MediaManager.cpp b/dom/media/MediaManager.cpp index 7af74e55c71a..d989563b8057 100644 --- a/dom/media/MediaManager.cpp +++ b/dom/media/MediaManager.cpp @@ -300,7 +300,8 @@ protected: NS_IMPL_ISUPPORTS(MediaDevice, nsIMediaDevice) MediaDevice::MediaDevice(MediaEngineSource* aSource, bool aIsVideo) - : mSource(aSource) + : mMediaSource(aSource->GetMediaSource()) + , mSource(aSource) , mIsVideo(aIsVideo) { mSource->GetName(mName); @@ -311,9 +312,7 @@ MediaDevice::MediaDevice(MediaEngineSource* aSource, bool aIsVideo) VideoDevice::VideoDevice(MediaEngineVideoSource* aSource) : MediaDevice(aSource, true) -{ - mMediaSource = aSource->GetMediaSource(); -} +{} /** * Helper functions that implement the constraints algorithm from @@ -439,6 +438,8 @@ MediaDevice::GetMediaSource(nsAString& aMediaSource) { if (mMediaSource == dom::MediaSourceEnum::Microphone) { aMediaSource.Assign(NS_LITERAL_STRING("microphone")); + } else if (mMediaSource == dom::MediaSourceEnum::AudioCapture) { + aMediaSource.Assign(NS_LITERAL_STRING("audioCapture")); } else if (mMediaSource == dom::MediaSourceEnum::Window) { // this will go away aMediaSource.Assign(NS_LITERAL_STRING("window")); } else { // all the rest are shared @@ -784,11 +785,52 @@ public: } } #endif - // Create a media stream. - nsRefPtr trackunion = - nsDOMUserMediaStream::CreateTrackUnionStream(window, mListener, - mAudioSource, mVideoSource); - if (!trackunion || sInShutdown) { + + MediaStreamGraph* msg = MediaStreamGraph::GetInstance(); + nsRefPtr stream = msg->CreateSourceStream(nullptr); + + nsRefPtr domStream; + // AudioCapture is a special case, here, in the sense that we're not really + // using the audio source and the SourceMediaStream, which acts as + // placeholders. We re-route a number of stream internaly in the MSG and mix + // them down instead. + if (mAudioSource && + mAudioSource->GetMediaSource() == dom::MediaSourceEnum::AudioCapture) { + domStream = DOMLocalMediaStream::CreateAudioCaptureStream(window); + msg->RegisterCaptureStreamForWindow( + mWindowID, domStream->GetStream()->AsProcessedStream()); + window->SetAudioCapture(true); + } else { + // Normal case, connect the source stream to the track union stream to + // avoid us blocking + nsRefPtr trackunion = + nsDOMUserMediaStream::CreateTrackUnionStream(window, mListener, + mAudioSource, mVideoSource); + trackunion->GetStream()->AsProcessedStream()->SetAutofinish(true); + nsRefPtr port = trackunion->GetStream()->AsProcessedStream()-> + AllocateInputPort(stream, MediaInputPort::FLAG_BLOCK_OUTPUT); + trackunion->mSourceStream = stream; + trackunion->mPort = port.forget(); + // Log the relationship between SourceMediaStream and TrackUnion stream + // Make sure logger starts before capture + AsyncLatencyLogger::Get(true); + LogLatency(AsyncLatencyLogger::MediaStreamCreate, + reinterpret_cast(stream.get()), + reinterpret_cast(trackunion->GetStream())); + + nsCOMPtr principal; + if (mPeerIdentity) { + principal = nsNullPrincipal::Create(); + trackunion->SetPeerIdentity(mPeerIdentity.forget()); + } else { + principal = window->GetExtantDoc()->NodePrincipal(); + } + trackunion->CombineWithPrincipal(principal); + + domStream = trackunion.forget(); + } + + if (!domStream || sInShutdown) { nsCOMPtr onFailure = mOnFailure.forget(); LOG(("Returning error for getUserMedia() - no stream")); @@ -802,36 +844,6 @@ public: } return NS_OK; } - trackunion->AudioConfig(aec_on, (uint32_t) aec, - agc_on, (uint32_t) agc, - noise_on, (uint32_t) noise, - playout_delay); - - - MediaStreamGraph* gm = MediaStreamGraph::GetInstance(); - nsRefPtr stream = gm->CreateSourceStream(nullptr); - - // connect the source stream to the track union stream to avoid us blocking - trackunion->GetStream()->AsProcessedStream()->SetAutofinish(true); - nsRefPtr port = trackunion->GetStream()->AsProcessedStream()-> - AllocateInputPort(stream, MediaInputPort::FLAG_BLOCK_OUTPUT); - trackunion->mSourceStream = stream; - trackunion->mPort = port.forget(); - // Log the relationship between SourceMediaStream and TrackUnion stream - // Make sure logger starts before capture - AsyncLatencyLogger::Get(true); - LogLatency(AsyncLatencyLogger::MediaStreamCreate, - reinterpret_cast(stream.get()), - reinterpret_cast(trackunion->GetStream())); - - nsCOMPtr principal; - if (mPeerIdentity) { - principal = nsNullPrincipal::Create(); - trackunion->SetPeerIdentity(mPeerIdentity.forget()); - } else { - principal = window->GetExtantDoc()->NodePrincipal(); - } - trackunion->CombineWithPrincipal(principal); // The listener was added at the beginning in an inactive state. // Activate our listener. We'll call Start() on the source when get a callback @@ -841,7 +853,7 @@ public: // Note: includes JS callbacks; must be released on MainThread TracksAvailableCallback* tracksAvailableCallback = - new TracksAvailableCallback(mManager, mOnSuccess, mWindowID, trackunion); + new TracksAvailableCallback(mManager, mOnSuccess, mWindowID, domStream); mListener->AudioConfig(aec_on, (uint32_t) aec, agc_on, (uint32_t) agc, @@ -852,11 +864,11 @@ public: // because that can take a while. // Pass ownership of trackunion to the MediaOperationTask // to ensure it's kept alive until the MediaOperationTask runs (at least). - MediaManager::PostTask(FROM_HERE, - new MediaOperationTask(MEDIA_START, mListener, trackunion, - tracksAvailableCallback, - mAudioSource, mVideoSource, false, mWindowID, - mOnFailure.forget())); + MediaManager::PostTask( + FROM_HERE, new MediaOperationTask(MEDIA_START, mListener, domStream, + tracksAvailableCallback, mAudioSource, + mVideoSource, false, mWindowID, + mOnFailure.forget())); // We won't need mOnFailure now. mOnFailure = nullptr; @@ -2075,7 +2087,7 @@ StopSharingCallback(MediaManager *aThis, listener->Invalidate(); } listener->Remove(); - listener->StopScreenWindowSharing(); + listener->StopSharing(); } aListeners->Clear(); aThis->RemoveWindowID(aWindowID); @@ -2398,7 +2410,7 @@ MediaManager::Observe(nsISupports* aSubject, const char* aTopic, uint64_t windowID = PromiseFlatString(Substring(data, strlen("screen:"))).ToInteger64(&rv); MOZ_ASSERT(NS_SUCCEEDED(rv)); if (NS_SUCCEEDED(rv)) { - LOG(("Revoking Screeen/windowCapture access for window %llu", windowID)); + LOG(("Revoking Screen/windowCapture access for window %llu", windowID)); StopScreensharing(windowID); } } else { @@ -2579,7 +2591,7 @@ StopScreensharingCallback(MediaManager *aThis, if (aListeners) { auto length = aListeners->Length(); for (size_t i = 0; i < length; ++i) { - aListeners->ElementAt(i)->StopScreenWindowSharing(); + aListeners->ElementAt(i)->StopSharing(); } } } @@ -2741,7 +2753,7 @@ GetUserMediaCallbackMediaStreamListener::Invalidate() // Doesn't kill audio // XXX refactor to combine with Invalidate()? void -GetUserMediaCallbackMediaStreamListener::StopScreenWindowSharing() +GetUserMediaCallbackMediaStreamListener::StopSharing() { NS_ASSERTION(NS_IsMainThread(), "Only call on main thread"); if (mVideoSource && !mStopped && @@ -2754,6 +2766,13 @@ GetUserMediaCallbackMediaStreamListener::StopScreenWindowSharing() this, nullptr, nullptr, nullptr, mVideoSource, mFinished, mWindowID, nullptr)); + } else if (mAudioSource && + mAudioSource->GetMediaSource() == dom::MediaSourceEnum::AudioCapture) { + nsCOMPtr window = nsGlobalWindow::GetInnerWindowWithId(mWindowID); + MOZ_ASSERT(window); + window->SetAudioCapture(false); + MediaStreamGraph::GetInstance()->UnregisterCaptureStreamForWindow(mWindowID); + mStream->Destroy(); } } diff --git a/dom/media/MediaManager.h b/dom/media/MediaManager.h index d7af9c5a7b99..68ab74269201 100644 --- a/dom/media/MediaManager.h +++ b/dom/media/MediaManager.h @@ -103,7 +103,7 @@ public: return mStream->AsSourceStream(); } - void StopScreenWindowSharing(); + void StopSharing(); void StopTrack(TrackID aID, bool aIsAudio); diff --git a/dom/media/webrtc/MediaEngineWebRTC.cpp b/dom/media/webrtc/MediaEngineWebRTC.cpp index 34adf582ed88..207e97acb33e 100644 --- a/dom/media/webrtc/MediaEngineWebRTC.cpp +++ b/dom/media/webrtc/MediaEngineWebRTC.cpp @@ -291,6 +291,13 @@ MediaEngineWebRTC::EnumerateAudioDevices(dom::MediaSourceEnum aMediaSource, // We spawn threads to handle gUM runnables, so we must protect the member vars MutexAutoLock lock(mMutex); + if (aMediaSource == dom::MediaSourceEnum::AudioCapture) { + nsRefPtr audioCaptureSource = + new MediaEngineWebRTCAudioCaptureSource(nullptr); + aASources->AppendElement(audioCaptureSource); + return; + } + #ifdef MOZ_WIDGET_ANDROID jobject context = mozilla::AndroidBridge::Bridge()->GetGlobalContextRef(); @@ -358,7 +365,7 @@ MediaEngineWebRTC::EnumerateAudioDevices(dom::MediaSourceEnum aMediaSource, strcpy(uniqueId,deviceName); // safe given assert and initialization/error-check } - nsRefPtr aSource; + nsRefPtr aSource; NS_ConvertUTF8toUTF16 uuid(uniqueId); if (mAudioSources.Get(uuid, getter_AddRefs(aSource))) { // We've already seen this device, just append. @@ -384,9 +391,8 @@ ClearVideoSource (const nsAString&, // unused } static PLDHashOperator -ClearAudioSource (const nsAString&, // unused - MediaEngineWebRTCAudioSource* aData, - void *userArg) +ClearAudioSource(const nsAString &, // unused + MediaEngineAudioSource *aData, void *userArg) { if (aData) { aData->Shutdown(); diff --git a/dom/media/webrtc/MediaEngineWebRTC.h b/dom/media/webrtc/MediaEngineWebRTC.h index 8b05480718b6..112c5134d607 100644 --- a/dom/media/webrtc/MediaEngineWebRTC.h +++ b/dom/media/webrtc/MediaEngineWebRTC.h @@ -133,6 +133,67 @@ private: void GetCapability(size_t aIndex, webrtc::CaptureCapability& aOut) override; }; +class MediaEngineWebRTCAudioCaptureSource : public MediaEngineAudioSource +{ +public: + NS_DECL_THREADSAFE_ISUPPORTS + + explicit MediaEngineWebRTCAudioCaptureSource(const char* aUuid) + : MediaEngineAudioSource(kReleased) + { + } + void GetName(nsAString& aName) override; + void GetUUID(nsACString& aUUID) override; + nsresult Allocate(const dom::MediaTrackConstraints& aConstraints, + const MediaEnginePrefs& aPrefs, + const nsString& aDeviceId) override + { + // Nothing to do here, everything is managed in MediaManager.cpp + return NS_OK; + } + nsresult Deallocate() override + { + // Nothing to do here, everything is managed in MediaManager.cpp + return NS_OK; + } + void Shutdown() override + { + // Nothing to do here, everything is managed in MediaManager.cpp + } + nsresult Start(SourceMediaStream* aMediaStream, TrackID aId) override; + nsresult Stop(SourceMediaStream* aMediaStream, TrackID aId) override; + void SetDirectListeners(bool aDirect) override + {} + nsresult Config(bool aEchoOn, uint32_t aEcho, bool aAgcOn, + uint32_t aAGC, bool aNoiseOn, uint32_t aNoise, + int32_t aPlayoutDelay) override + { + return NS_OK; + } + void NotifyPull(MediaStreamGraph* aGraph, SourceMediaStream* aSource, + TrackID aID, StreamTime aDesiredTime) override + {} + const dom::MediaSourceEnum GetMediaSource() override + { + return dom::MediaSourceEnum::AudioCapture; + } + bool IsFake() override + { + return false; + } + nsresult TakePhoto(PhotoCallback* aCallback) override + { + return NS_ERROR_NOT_IMPLEMENTED; + } + uint32_t GetBestFitnessDistance( + const nsTArray& aConstraintSets, + const nsString& aDeviceId) override; + +protected: + virtual ~MediaEngineWebRTCAudioCaptureSource() { Shutdown(); } + nsCString mUUID; +}; + class MediaEngineWebRTCMicrophoneSource : public MediaEngineAudioSource, public webrtc::VoEMediaProcess, private MediaConstraintsHelper @@ -297,8 +358,7 @@ private: // Store devices we've already seen in a hashtable for quick return. // Maps UUID to MediaEngineSource (one set for audio, one for video). nsRefPtrHashtable mVideoSources; - nsRefPtrHashtable - mAudioSources; + nsRefPtrHashtable mAudioSources; }; } diff --git a/dom/media/webrtc/MediaEngineWebRTCAudio.cpp b/dom/media/webrtc/MediaEngineWebRTCAudio.cpp index 2aca1ecbbf99..223a5acde76e 100644 --- a/dom/media/webrtc/MediaEngineWebRTCAudio.cpp +++ b/dom/media/webrtc/MediaEngineWebRTCAudio.cpp @@ -44,6 +44,7 @@ extern PRLogModuleInfo* GetMediaManagerLog(); * Webrtc microphone source source. */ NS_IMPL_ISUPPORTS0(MediaEngineWebRTCMicrophoneSource) +NS_IMPL_ISUPPORTS0(MediaEngineWebRTCAudioCaptureSource) // XXX temp until MSG supports registration StaticRefPtr gFarendObserver; @@ -620,4 +621,55 @@ MediaEngineWebRTCMicrophoneSource::Process(int channel, return; } +void +MediaEngineWebRTCAudioCaptureSource::GetName(nsAString &aName) +{ + aName.AssignLiteral("AudioCapture"); +} +void +MediaEngineWebRTCAudioCaptureSource::GetUUID(nsACString &aUUID) +{ + nsID uuid; + char uuidBuffer[NSID_LENGTH]; + nsCString asciiString; + ErrorResult rv; + + rv = nsContentUtils::GenerateUUIDInPlace(uuid); + if (rv.Failed()) { + aUUID.AssignLiteral(""); + return; + } + + + uuid.ToProvidedString(uuidBuffer); + asciiString.AssignASCII(uuidBuffer); + + // Remove {} and the null terminator + aUUID.Assign(Substring(asciiString, 1, NSID_LENGTH - 3)); +} + +nsresult +MediaEngineWebRTCAudioCaptureSource::Start(SourceMediaStream *aMediaStream, + TrackID aId) +{ + aMediaStream->AddTrack(aId, 0, new AudioSegment()); + return NS_OK; +} + +nsresult +MediaEngineWebRTCAudioCaptureSource::Stop(SourceMediaStream *aMediaStream, + TrackID aId) +{ + aMediaStream->EndAllTrackAndFinish(); + return NS_OK; +} + +uint32_t +MediaEngineWebRTCAudioCaptureSource::GetBestFitnessDistance( + const nsTArray& aConstraintSets, + const nsString& aDeviceId) +{ + // There is only one way of capturing audio for now, and it's always adequate. + return 0; +} } diff --git a/dom/webidl/Constraints.webidl b/dom/webidl/Constraints.webidl index fc6275b59b4e..f7f0c706a9d7 100644 --- a/dom/webidl/Constraints.webidl +++ b/dom/webidl/Constraints.webidl @@ -25,6 +25,7 @@ enum MediaSourceEnum { "window", "browser", "microphone", + "audioCapture", "other" };