Bug 1652884 - Add observer messages to mute/unmute all camera tracks. r=pehrsons

Add UA (user agent) muting, a spec-supported feature that somewhat mirrors
track enabling/disabling, except only the browser controls it. The effect
on track sinks is additive: must be unmuted and enabled for there to be
output.

Fire mute/unmute events on JS, and observably set track.muted independent
of track.enabled (reusing existing infrastructure already in use by
RTCPeerConnection tracks).

Low-level: add mDeviceMuted and SetMutedFor() modeled after mDeviceEnabled
and SetEnabledFor() as parallel device state for both camera and microphone for
symmetry and maintenance.

High-level: Only expose messages to mute/unmute camera at the moment, since
that is what is immediately required for Android in bug 1564451.

Differential Revision: https://phabricator.services.mozilla.com/D84222
This commit is contained in:
Jan-Ivar Bruaroey 2020-08-19 14:03:09 +00:00
Родитель cfd7d0ea9c
Коммит 31c3ad60ff
3 изменённых файлов: 255 добавлений и 100 удалений

Просмотреть файл

@ -122,6 +122,20 @@ class WebRTCChild extends JSWindowActorChild {
aMessage.data
);
break;
case "webrtc:MuteCamera":
Services.obs.notifyObservers(
null,
"getUserMedia:muteVideo",
aMessage.data
);
break;
case "webrtc:UnmuteCamera":
Services.obs.notifyObservers(
null,
"getUserMedia:unmuteVideo",
aMessage.data
);
break;
}
}
}

Просмотреть файл

@ -210,10 +210,16 @@ struct DeviceState {
// MainThread only.
bool mStopped = false;
// true if mDevice is currently enabled, i.e., turned on and capturing.
// true if mDevice is currently enabled.
// A device must be both enabled and unmuted to be turned on and capturing.
// MainThread only.
bool mDeviceEnabled = false;
// true if mDevice is currently muted.
// A device that is either muted or disabled is turned off and not capturing.
// MainThread only.
bool mDeviceMuted = false;
// true if the application has currently enabled mDevice.
// MainThread only.
bool mTrackEnabled = false;
@ -228,7 +234,7 @@ struct DeviceState {
bool mOperationInProgress = false;
// true if we are allowed to turn off the underlying source while all tracks
// are disabled.
// are disabled. Only affects disabling; always turns off on user-agent mute.
// MainThread only.
bool mOffWhileDisabled = false;
@ -378,12 +384,31 @@ class SourceListener : public SupportsWeakPtr {
*/
void SetEnabledFor(MediaTrack* aTrack, bool aEnabled);
/**
* Posts a task to set the muted state of the device associated with
* aTrackSource to aMuted and notifies the associated window listener that a
* track's state has changed.
*
* Turning the hardware off while the device is muted is supported for:
* - Camera (enabled by default, controlled by pref
* "media.getusermedia.camera.off_while_disabled.enabled")
* - Microphone (disabled by default, controlled by pref
* "media.getusermedia.microphone.off_while_disabled.enabled")
* Screen-, app-, or windowsharing is not supported at this time.
*/
void SetMutedFor(LocalTrackSource* aTrackSource, bool aMuted);
/**
* Stops all screen/app/window/audioCapture sharing, but not camera or
* microphone.
*/
void StopSharing();
/**
* Mutes or unmutes the associated video device if it is a camera.
*/
void MuteOrUnmuteCamera(bool aMute);
MediaDevice* GetAudioDevice() const {
return mAudioDeviceState ? mAudioDeviceState->mDevice.get() : nullptr;
}
@ -411,6 +436,15 @@ class SourceListener : public SupportsWeakPtr {
private:
virtual ~SourceListener() = default;
using DeviceOperationPromise =
MozPromise<nsresult, bool, /* IsExclusive = */ true>;
/**
* Posts a task to start or stop the device associated with aTrack, based on
* a passed-in boolean. Private method used by SetEnabledFor and SetMutedFor.
*/
RefPtr<DeviceOperationPromise> UpdateDevice(MediaTrack* aTrack, bool aOn);
/**
* Returns a pointer to the device state for aTrack.
*
@ -638,6 +672,8 @@ class GetUserMediaWindowListener {
void StopRawID(const nsString& removedDeviceID);
void MuteOrUnmuteCameras(bool aMute);
/**
* Called by one of our SourceListeners when one of its tracks has changed so
* that chrome state is affected.
@ -771,6 +807,10 @@ class LocalTrackSource : public MediaStreamTrackSource {
}
}
void Mute() { MutedChanged(true); }
void Unmute() { MutedChanged(false); }
const MediaSourceEnum mSource;
const RefPtr<MediaTrack> mTrack;
const RefPtr<const PeerIdentity> mPeerIdentity;
@ -1995,6 +2035,8 @@ MediaManager* MediaManager::Get() {
obs->AddObserver(sSingleton, "getUserMedia:response:noOSPermission",
false);
obs->AddObserver(sSingleton, "getUserMedia:revoke", false);
obs->AddObserver(sSingleton, "getUserMedia:muteVideo", false);
obs->AddObserver(sSingleton, "getUserMedia:unmuteVideo", false);
}
// else MediaManager won't work properly and will leak (see bug 837874)
nsCOMPtr<nsIPrefBranch> prefs = do_GetService(NS_PREFSERVICE_CONTRACTID);
@ -3392,6 +3434,16 @@ void MediaManager::OnNavigation(uint64_t aWindowID) {
MOZ_ASSERT(!GetWindowListener(aWindowID));
}
void MediaManager::OnCameraMute(bool aMute) {
MOZ_ASSERT(NS_IsMainThread());
LOG("OnCameraMute for all windows");
// This is safe since we're on main-thread, and the windowlist can only
// be added to from the main-thread
for (auto iter = mActiveWindows.Iter(); !iter.Done(); iter.Next()) {
iter.UserData()->MuteOrUnmuteCameras(aMute);
}
}
void MediaManager::AddWindowID(uint64_t aWindowId,
RefPtr<GetUserMediaWindowListener> aListener) {
MOZ_ASSERT(NS_IsMainThread());
@ -3519,6 +3571,8 @@ void MediaManager::Shutdown() {
obs->RemoveObserver(this, "getUserMedia:response:deny");
obs->RemoveObserver(this, "getUserMedia:response:noOSPermission");
obs->RemoveObserver(this, "getUserMedia:revoke");
obs->RemoveObserver(this, "getUserMedia:muteVideo");
obs->RemoveObserver(this, "getUserMedia:unmuteVideo");
nsCOMPtr<nsIPrefBranch> prefs = do_GetService(NS_PREFSERVICE_CONTRACTID);
if (prefs) {
@ -3670,6 +3724,23 @@ bool IsGUMResponseNoAccess(const char* aTopic,
return false;
}
static MediaSourceEnum ParseScreenColonWindowID(const char16_t* aData,
uint64_t* aWindowIDOut) {
MOZ_ASSERT(aWindowIDOut);
// may be windowid or screen:windowid
const nsDependentString data(aData);
if (Substring(data, 0, strlen("screen:")).EqualsLiteral("screen:")) {
nsresult rv;
*aWindowIDOut = Substring(data, strlen("screen:")).ToInteger64(&rv);
MOZ_RELEASE_ASSERT(NS_SUCCEEDED(rv));
return MediaSourceEnum::Screen;
}
nsresult rv;
*aWindowIDOut = data.ToInteger64(&rv);
MOZ_RELEASE_ASSERT(NS_SUCCEEDED(rv));
return MediaSourceEnum::Camera;
}
nsresult MediaManager::Observe(nsISupports* aSubject, const char* aTopic,
const char16_t* aData) {
MOZ_ASSERT(NS_IsMainThread());
@ -3774,28 +3845,20 @@ nsresult MediaManager::Observe(nsISupports* aSubject, const char* aTopic,
return NS_OK;
} else if (!strcmp(aTopic, "getUserMedia:revoke")) {
nsresult rv;
// may be windowid or screen:windowid
const nsDependentString data(aData);
if (Substring(data, 0, strlen("screen:")).EqualsLiteral("screen:")) {
uint64_t windowID = Substring(data, strlen("screen:")).ToInteger64(&rv);
MOZ_ASSERT(NS_SUCCEEDED(rv));
if (NS_SUCCEEDED(rv)) {
LOG("Revoking Screen/windowCapture access for window %" PRIu64,
windowID);
StopScreensharing(windowID);
}
uint64_t windowID;
if (ParseScreenColonWindowID(aData, &windowID) == MediaSourceEnum::Screen) {
LOG("Revoking ScreenCapture access for window %" PRIu64, windowID);
StopScreensharing(windowID);
} else {
uint64_t windowID = data.ToInteger64(&rv);
MOZ_ASSERT(NS_SUCCEEDED(rv));
if (NS_SUCCEEDED(rv)) {
LOG("Revoking MediaCapture access for window %" PRIu64, windowID);
OnNavigation(windowID);
}
LOG("Revoking MediaCapture access for window %" PRIu64, windowID);
OnNavigation(windowID);
}
return NS_OK;
} else if (!strcmp(aTopic, "getUserMedia:muteVideo") ||
!strcmp(aTopic, "getUserMedia:unmuteVideo")) {
OnCameraMute(!strcmp(aTopic, "getUserMedia:muteVideo"));
return NS_OK;
}
return NS_OK;
}
@ -4275,6 +4338,80 @@ void SourceListener::GetSettingsFor(MediaTrack* aTrack,
}
}
static bool SameGroupAsCurrentAudioOutput(const nsString& aGroupId) {
CubebDeviceEnumerator* enumerator = CubebDeviceEnumerator::GetInstance();
// Get the current graph's device info. This is always the
// default audio output device for now.
RefPtr<AudioDeviceInfo> outputDevice =
enumerator->DefaultDevice(CubebDeviceEnumerator::Side::OUTPUT);
return outputDevice && outputDevice->GroupID().Equals(aGroupId);
}
auto SourceListener::UpdateDevice(MediaTrack* aTrack, bool aOn)
-> RefPtr<DeviceOperationPromise> {
MOZ_ASSERT(NS_IsMainThread());
RefPtr<SourceListener> self = this;
DeviceState& state = GetDeviceStateFor(aTrack);
nsString groupId;
state.mDevice->GetRawGroupId(groupId);
return MediaManager::Dispatch<DeviceOperationPromise>(
__func__,
[self, device = state.mDevice, aOn,
groupId](MozPromiseHolder<DeviceOperationPromise>& h) {
if (device->mKind == dom::MediaDeviceKind::Audioinput && !aOn &&
SameGroupAsCurrentAudioOutput(groupId)) {
// Don't turn off the microphone of a device that is on the
// same physical device as the output.
//
// Also don't take this branch when turning on, in case the
// default audio output device has changed. The AudioInput
// source start/stop are idempotent, so this works.
LOG("Not turning device off, as it matches audio output (%s)",
NS_ConvertUTF16toUTF8(groupId).get());
h.Resolve(NS_OK, __func__);
return;
}
LOG("Turning %s device (%s)", aOn ? "on" : "off",
NS_ConvertUTF16toUTF8(groupId).get());
h.Resolve(aOn ? device->Start() : device->Stop(), __func__);
})
->Then(
GetMainThreadSerialEventTarget(), __func__,
[self, this, &state, track = RefPtr<MediaTrack>(aTrack),
aOn](nsresult aResult) {
if (state.mStopped) {
// Device was stopped on main thread during the operation. Done.
return DeviceOperationPromise::CreateAndResolve(aResult,
__func__);
}
LOG("SourceListener %p turning %s %s input device for track %p %s",
this, aOn ? "on" : "off",
&state == mAudioDeviceState.get() ? "audio" : "video",
track.get(), NS_SUCCEEDED(aResult) ? "succeeded" : "failed");
if (NS_FAILED(aResult) && aResult != NS_ERROR_ABORT) {
// This path handles errors from starting or stopping the device.
// NS_ERROR_ABORT are for cases where *we* aborted. They need
// graceful handling.
if (aOn) {
// Starting the device failed. Stopping the track here will make
// the MediaStreamTrack end after a pass through the
// MediaTrackGraph.
StopTrack(track);
} else {
// Stopping the device failed. This is odd, but not fatal.
MOZ_ASSERT_UNREACHABLE("The device should be stoppable");
}
}
return DeviceOperationPromise::CreateAndResolve(aResult, __func__);
},
[]() {
MOZ_ASSERT_UNREACHABLE("Unexpected and unhandled reject");
return DeviceOperationPromise::CreateAndReject(false, __func__);
});
}
void SourceListener::SetEnabledFor(MediaTrack* aTrack, bool aEnable) {
MOZ_ASSERT(NS_IsMainThread(), "Only call on main thread");
MOZ_ASSERT(Activated(), "No device to set enabled state for");
@ -4327,8 +4464,6 @@ void SourceListener::SetEnabledFor(MediaTrack* aTrack, bool aEnable) {
timerPromise = state.mDisableTimer->WaitFor(delay, __func__);
}
typedef MozPromise<nsresult, bool, /* IsExclusive = */ true>
DeviceOperationPromise;
RefPtr<SourceListener> self = this;
timerPromise
->Then(
@ -4357,53 +4492,14 @@ void SourceListener::SetEnabledFor(MediaTrack* aTrack, bool aEnable) {
if (mWindowListener) {
mWindowListener->ChromeAffectingStateChanged();
}
if (!state.mOffWhileDisabled) {
if (!state.mOffWhileDisabled || state.mDeviceMuted) {
// If the feature to turn a device off while disabled is itself
// disabled we shortcut the device operation and tell the
// disabled, or the device is currently user agent muted, then
// we shortcut the device operation and tell the
// ux-updating code that everything went fine.
return DeviceOperationPromise::CreateAndResolve(NS_OK, __func__);
}
nsString inputDeviceGroupId;
state.mDevice->GetRawGroupId(inputDeviceGroupId);
return MediaManager::Dispatch<DeviceOperationPromise>(
__func__,
[self, device = state.mDevice, aEnable, inputDeviceGroupId](
MozPromiseHolder<DeviceOperationPromise>& h) {
// Only take this branch when muting, to avoid muting, in case
// the default audio output device has changed and we need to
// really call `Start` on the source. The AudioInput source
// start/stop are idempotent, so this works.
if (device->mKind == dom::MediaDeviceKind::Audioinput &&
!aEnable) {
// Don't turn off the microphone of a device that is on the
// same physical device as the output.
CubebDeviceEnumerator* enumerator =
CubebDeviceEnumerator::GetInstance();
// Get the current graph's device info. This is always the
// default audio output device for now.
RefPtr<AudioDeviceInfo> outputDevice =
enumerator->DefaultDevice(
CubebDeviceEnumerator::Side::OUTPUT);
if (outputDevice &&
outputDevice->GroupID().Equals(inputDeviceGroupId)) {
LOG("Device group id match when %s, "
"not turning the input device off (%s)",
aEnable ? "unmuting" : "muting",
NS_ConvertUTF16toUTF8(outputDevice->GroupID()).get());
h.Resolve(NS_OK, __func__);
return;
}
}
LOG("Device group id don't match when %s, "
"not turning the audio input device off (%s)",
aEnable ? "unmuting" : "muting",
NS_ConvertUTF16toUTF8(inputDeviceGroupId).get());
h.Resolve(aEnable ? device->Start() : device->Stop(),
__func__);
});
return UpdateDevice(track, aEnable);
},
[]() {
// Timer was canceled by us. We signal this with NS_ERROR_ABORT.
@ -4425,28 +4521,10 @@ void SourceListener::SetEnabledFor(MediaTrack* aTrack, bool aEnable) {
return;
}
LOG("SourceListener %p %s %s track for track %p %s", this,
aEnable ? "enabling" : "disabling",
&state == mAudioDeviceState.get() ? "audio" : "video",
track.get(), NS_SUCCEEDED(aResult) ? "succeeded" : "failed");
if (NS_FAILED(aResult) && aResult != NS_ERROR_ABORT) {
// This path handles errors from starting or stopping the device.
// NS_ERROR_ABORT are for cases where *we* aborted. They need
// graceful handling.
if (aEnable) {
// Starting the device failed. Stopping the track here will make
// the MediaStreamTrack end after a pass through the
// MediaTrackGraph.
StopTrack(track);
} else {
// Stopping the device failed. This is odd, but not fatal.
MOZ_ASSERT_UNREACHABLE("The device should be stoppable");
// To keep our internal state sane in this case, we disallow
// future stops due to disable.
state.mOffWhileDisabled = false;
}
if (NS_FAILED(aResult) && aResult != NS_ERROR_ABORT && !aEnable) {
// To keep our internal state sane in this case, we disallow
// future stops due to disable.
state.mOffWhileDisabled = false;
return;
}
@ -4456,22 +4534,58 @@ void SourceListener::SetEnabledFor(MediaTrack* aTrack, bool aEnable) {
// update the device state if the track state changed in the
// meantime.
if (state.mTrackEnabled == state.mDeviceEnabled) {
// Intended state is same as device's current state.
// Nothing more to do.
return;
}
// Track state changed during this operation. We'll start over.
if (state.mTrackEnabled) {
SetEnabledFor(track, true);
} else {
SetEnabledFor(track, false);
if (state.mTrackEnabled != state.mDeviceEnabled) {
// Track state changed during this operation. We'll start over.
SetEnabledFor(track, state.mTrackEnabled);
}
},
[]() { MOZ_ASSERT_UNREACHABLE("Unexpected and unhandled reject"); });
}
void SourceListener::SetMutedFor(LocalTrackSource* aTrackSource, bool aMute) {
MOZ_ASSERT(NS_IsMainThread(), "Only call on main thread");
MOZ_ASSERT(Activated(), "No device to set muted state for");
MediaTrack* track = aTrackSource->mTrack;
DeviceState& state = GetDeviceStateFor(track);
LOG("SourceListener %p %s %s track for track %p", this,
aMute ? "muting" : "unmuting",
&state == mAudioDeviceState.get() ? "audio" : "video", track);
if (state.mStopped) {
// Device terminally stopped. Updating device state is pointless.
return;
}
if (state.mDeviceMuted == aMute) {
// Device is already in the desired state.
return;
}
LOG("SourceListener %p %s %s track for track %p - starting device operation",
this, aMute ? "muting" : "unmuting",
&state == mAudioDeviceState.get() ? "audio" : "video", track);
state.mDeviceMuted = aMute;
if (mWindowListener) {
mWindowListener->ChromeAffectingStateChanged();
}
// Update trackSource to fire mute/unmute events on all its tracks
if (aMute) {
aTrackSource->Mute();
} else {
aTrackSource->Unmute();
}
if (state.mOffWhileDisabled && !state.mDeviceEnabled &&
state.mDevice->mKind == dom::MediaDeviceKind::Videoinput) {
// Camera is already off. TODO: Revisit once we support UA-muting mics.
return;
}
UpdateDevice(track, !aMute);
}
void SourceListener::StopSharing() {
MOZ_ASSERT(NS_IsMainThread());
@ -4499,6 +4613,22 @@ void SourceListener::StopSharing() {
}
}
void SourceListener::MuteOrUnmuteCamera(bool aMute) {
MOZ_ASSERT(NS_IsMainThread());
if (mStopped) {
return;
}
MOZ_RELEASE_ASSERT(mWindowListener);
LOG("SourceListener %p MuteOrUnmuteCamera", this);
if (mVideoDeviceState && (mVideoDeviceState->mDevice->GetMediaSource() ==
MediaSourceEnum::Camera)) {
SetMutedFor(mVideoDeviceState->mTrackSource, aMute);
}
}
bool SourceListener::CapturingVideo() const {
MOZ_ASSERT(NS_IsMainThread());
return Activated() && mVideoDeviceState && !mVideoDeviceState->mStopped &&
@ -4540,9 +4670,9 @@ CaptureState SourceListener::CapturingSource(MediaSourceEnum aSource) const {
return CaptureState::Off;
}
// Source is a match and is active
// Source is a match and is active and unmuted
if (state.mDeviceEnabled) {
if (state.mDeviceEnabled && !state.mDeviceMuted) {
return CaptureState::Enabled;
}
@ -4648,6 +4778,16 @@ void GetUserMediaWindowListener::StopRawID(const nsString& removedDeviceID) {
}
}
void GetUserMediaWindowListener::MuteOrUnmuteCameras(bool aMute) {
MOZ_ASSERT(NS_IsMainThread(), "Only call on main thread");
for (auto& source : mActiveListeners) {
if (source->GetVideoDevice()) {
source->MuteOrUnmuteCamera(aMute);
}
}
}
void GetUserMediaWindowListener::ChromeAffectingStateChanged() {
MOZ_ASSERT(NS_IsMainThread());

Просмотреть файл

@ -254,6 +254,7 @@ class MediaManager final : public nsIMediaManagerService, public nsIObserver {
const nsString& aDeviceId);
void OnNavigation(uint64_t aWindowID);
void OnCameraMute(bool aMute);
bool IsActivelyCapturingOrHasAPermission(uint64_t aWindowId);
MediaEventSource<void>& DeviceListChangeEvent() {