зеркало из https://github.com/mozilla/gecko-dev.git
Bug 1395819 - Add b-c test cases for removing temporary permissions per-kind by stopping tracks. r=johannh,jib
Differential Revision: https://phabricator.services.mozilla.com/D109942
This commit is contained in:
Родитель
4742401b5b
Коммит
5a34ce8fc8
|
@ -125,6 +125,268 @@ var gTests = [
|
|||
},
|
||||
},
|
||||
|
||||
{
|
||||
desc: "getUserMedia audio+camera -camera",
|
||||
run: async function checkAudioVideoWhileLiveTracksExist_audio_nocamera() {
|
||||
// State: fresh
|
||||
|
||||
{
|
||||
const popupShown = promisePopupNotificationShown("webRTC-shareDevices");
|
||||
const request = expectObserverCalled("getUserMedia:request");
|
||||
await promiseRequestDevice(true, true);
|
||||
await popupShown;
|
||||
await request;
|
||||
const indicator = promiseIndicatorWindow();
|
||||
const response = expectObserverCalled("getUserMedia:response:allow");
|
||||
const deviceEvents = expectObserverCalled("recording-device-events");
|
||||
await promiseMessage("ok", () => {
|
||||
PopupNotifications.panel.firstElementChild.button.click();
|
||||
});
|
||||
await response;
|
||||
await deviceEvents;
|
||||
Assert.deepEqual(
|
||||
await getMediaCaptureState(),
|
||||
{ audio: true, video: true },
|
||||
"expected camera and microphone to be shared"
|
||||
);
|
||||
await indicator;
|
||||
await checkSharingUI({ audio: true, video: true });
|
||||
|
||||
// Stop the camera track.
|
||||
await stopTracks("video");
|
||||
await checkSharingUI({ audio: true, video: false });
|
||||
}
|
||||
|
||||
// State: live audio
|
||||
|
||||
{
|
||||
// If there's an active audio track from an audio+camera request,
|
||||
// gUM(camera) causes a prompt.
|
||||
const request = expectObserverCalled("getUserMedia:request");
|
||||
const popupShown = promisePopupNotificationShown("webRTC-shareDevices");
|
||||
await promiseRequestDevice(false, true);
|
||||
await popupShown;
|
||||
await request;
|
||||
checkDeviceSelectors(false, true);
|
||||
|
||||
// Allow and stop the camera again.
|
||||
const response = expectObserverCalled("getUserMedia:response:allow");
|
||||
const deviceEvents = expectObserverCalled("recording-device-events");
|
||||
await promiseMessage("ok", () => {
|
||||
PopupNotifications.panel.firstElementChild.button.click();
|
||||
});
|
||||
await response;
|
||||
await deviceEvents;
|
||||
Assert.deepEqual(
|
||||
await getMediaCaptureState(),
|
||||
{ audio: true, video: true },
|
||||
"expected camera and microphone to be shared"
|
||||
);
|
||||
await checkSharingUI({ audio: true, video: true });
|
||||
|
||||
await stopTracks("video");
|
||||
await checkSharingUI({ audio: true, video: false });
|
||||
}
|
||||
|
||||
// State: live audio
|
||||
|
||||
{
|
||||
// If there's an active audio track from an audio+camera request,
|
||||
// gUM(audio+camera) causes a prompt.
|
||||
const request = expectObserverCalled("getUserMedia:request");
|
||||
const popupShown = promisePopupNotificationShown("webRTC-shareDevices");
|
||||
await promiseRequestDevice(true, true);
|
||||
await popupShown;
|
||||
await request;
|
||||
checkDeviceSelectors(true, true);
|
||||
|
||||
// Allow and stop the camera again.
|
||||
const response = expectObserverCalled("getUserMedia:response:allow");
|
||||
const deviceEvents = expectObserverCalled("recording-device-events");
|
||||
await promiseMessage("ok", () => {
|
||||
PopupNotifications.panel.firstElementChild.button.click();
|
||||
});
|
||||
await response;
|
||||
await deviceEvents;
|
||||
Assert.deepEqual(
|
||||
await getMediaCaptureState(),
|
||||
{ audio: true, video: true },
|
||||
"expected camera and microphone to be shared"
|
||||
);
|
||||
await checkSharingUI({ audio: true, video: true });
|
||||
|
||||
await stopTracks("video");
|
||||
await checkSharingUI({ audio: true, video: false });
|
||||
}
|
||||
|
||||
// State: live audio
|
||||
|
||||
{
|
||||
// After closing all streams, gUM(audio) causes a prompt.
|
||||
await closeStream();
|
||||
const request = expectObserverCalled("getUserMedia:request");
|
||||
const popupShown = promisePopupNotificationShown("webRTC-shareDevices");
|
||||
await promiseRequestDevice(true, false);
|
||||
await popupShown;
|
||||
await request;
|
||||
checkDeviceSelectors(true, false);
|
||||
|
||||
const response = expectObserverCalled("getUserMedia:response:deny");
|
||||
const windowEnded = expectObserverCalled("recording-window-ended");
|
||||
|
||||
await promiseMessage(permissionError, () => {
|
||||
activateSecondaryAction(kActionDeny);
|
||||
});
|
||||
|
||||
await response;
|
||||
await windowEnded;
|
||||
|
||||
await checkNotSharing();
|
||||
SitePermissions.removeFromPrincipal(
|
||||
null,
|
||||
"camera",
|
||||
gBrowser.selectedBrowser
|
||||
);
|
||||
SitePermissions.removeFromPrincipal(
|
||||
null,
|
||||
"microphone",
|
||||
gBrowser.selectedBrowser
|
||||
);
|
||||
}
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
desc: "getUserMedia audio+camera -audio",
|
||||
run: async function checkAudioVideoWhileLiveTracksExist_camera_noaudio() {
|
||||
// State: fresh
|
||||
|
||||
{
|
||||
const popupShown = promisePopupNotificationShown("webRTC-shareDevices");
|
||||
const request = expectObserverCalled("getUserMedia:request");
|
||||
await promiseRequestDevice(true, true);
|
||||
await popupShown;
|
||||
await request;
|
||||
const indicator = promiseIndicatorWindow();
|
||||
const response = expectObserverCalled("getUserMedia:response:allow");
|
||||
const deviceEvents = expectObserverCalled("recording-device-events");
|
||||
await promiseMessage("ok", () => {
|
||||
PopupNotifications.panel.firstElementChild.button.click();
|
||||
});
|
||||
await response;
|
||||
await deviceEvents;
|
||||
Assert.deepEqual(
|
||||
await getMediaCaptureState(),
|
||||
{ audio: true, video: true },
|
||||
"expected camera and microphone to be shared"
|
||||
);
|
||||
await indicator;
|
||||
await checkSharingUI({ audio: true, video: true });
|
||||
|
||||
// Stop the audio track.
|
||||
await stopTracks("audio");
|
||||
await checkSharingUI({ audio: false, video: true });
|
||||
}
|
||||
|
||||
// State: live camera
|
||||
|
||||
{
|
||||
// If there's an active video track from an audio+camera request,
|
||||
// gUM(audio) causes a prompt.
|
||||
const request = expectObserverCalled("getUserMedia:request");
|
||||
const popupShown = promisePopupNotificationShown("webRTC-shareDevices");
|
||||
await promiseRequestDevice(true, false);
|
||||
await popupShown;
|
||||
await request;
|
||||
checkDeviceSelectors(true, false);
|
||||
|
||||
// Allow and stop the microphone again.
|
||||
const response = expectObserverCalled("getUserMedia:response:allow");
|
||||
const deviceEvents = expectObserverCalled("recording-device-events");
|
||||
await promiseMessage("ok", () => {
|
||||
PopupNotifications.panel.firstElementChild.button.click();
|
||||
});
|
||||
await response;
|
||||
await deviceEvents;
|
||||
Assert.deepEqual(
|
||||
await getMediaCaptureState(),
|
||||
{ audio: true, video: true },
|
||||
"expected camera and microphone to be shared"
|
||||
);
|
||||
await checkSharingUI({ audio: true, video: true });
|
||||
|
||||
await stopTracks("audio");
|
||||
await checkSharingUI({ audio: false, video: true });
|
||||
}
|
||||
|
||||
// State: live camera
|
||||
|
||||
{
|
||||
// If there's an active video track from an audio+camera request,
|
||||
// gUM(audio+camera) causes a prompt.
|
||||
const request = expectObserverCalled("getUserMedia:request");
|
||||
const popupShown = promisePopupNotificationShown("webRTC-shareDevices");
|
||||
await promiseRequestDevice(true, true);
|
||||
await popupShown;
|
||||
await request;
|
||||
checkDeviceSelectors(true, true);
|
||||
|
||||
// Allow and stop the microphone again.
|
||||
const response = expectObserverCalled("getUserMedia:response:allow");
|
||||
const deviceEvents = expectObserverCalled("recording-device-events");
|
||||
await promiseMessage("ok", () => {
|
||||
PopupNotifications.panel.firstElementChild.button.click();
|
||||
});
|
||||
await response;
|
||||
await deviceEvents;
|
||||
Assert.deepEqual(
|
||||
await getMediaCaptureState(),
|
||||
{ audio: true, video: true },
|
||||
"expected camera and microphone to be shared"
|
||||
);
|
||||
await checkSharingUI({ audio: true, video: true });
|
||||
|
||||
await stopTracks("audio");
|
||||
await checkSharingUI({ audio: false, video: true });
|
||||
}
|
||||
|
||||
// State: live camera
|
||||
|
||||
{
|
||||
// After closing all streams, gUM(camera) causes a prompt.
|
||||
await closeStream();
|
||||
const request = expectObserverCalled("getUserMedia:request");
|
||||
const popupShown = promisePopupNotificationShown("webRTC-shareDevices");
|
||||
await promiseRequestDevice(false, true);
|
||||
await popupShown;
|
||||
await request;
|
||||
checkDeviceSelectors(false, true);
|
||||
|
||||
const response = expectObserverCalled("getUserMedia:response:deny");
|
||||
const windowEnded = expectObserverCalled("recording-window-ended");
|
||||
|
||||
await promiseMessage(permissionError, () => {
|
||||
activateSecondaryAction(kActionDeny);
|
||||
});
|
||||
|
||||
await response;
|
||||
await windowEnded;
|
||||
|
||||
await checkNotSharing();
|
||||
SitePermissions.removeFromPrincipal(
|
||||
null,
|
||||
"camera",
|
||||
gBrowser.selectedBrowser
|
||||
);
|
||||
SitePermissions.removeFromPrincipal(
|
||||
null,
|
||||
"microphone",
|
||||
gBrowser.selectedBrowser
|
||||
);
|
||||
}
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
desc: "getUserMedia camera",
|
||||
run: async function checkAudioVideoWhileLiveTracksExist_camera() {
|
||||
|
|
|
@ -74,11 +74,27 @@ async function requestDevice(aAudio, aVideo, aShare, aBadDevice = false) {
|
|||
}
|
||||
message("pending");
|
||||
|
||||
function stopTracks(aKind) {
|
||||
for (let stream of gStreams) {
|
||||
for (let track of stream.getTracks()) {
|
||||
if (track.kind == aKind) {
|
||||
track.stop();
|
||||
stream.removeTrack(track);
|
||||
}
|
||||
}
|
||||
}
|
||||
gStreams = gStreams.filter(s => !!s.getTracks().length);
|
||||
if (aKind == "video") {
|
||||
gVideoEvents = [];
|
||||
} else if (aKind == "audio") {
|
||||
gAudioEvents = [];
|
||||
}
|
||||
}
|
||||
|
||||
function closeStream() {
|
||||
for (let stream of gStreams) {
|
||||
if (stream) {
|
||||
stream.getTracks().forEach(t => t.stop());
|
||||
stream = null;
|
||||
for (let track of stream.getTracks()) {
|
||||
track.stop();
|
||||
}
|
||||
}
|
||||
gStreams = [];
|
||||
|
|
|
@ -74,11 +74,27 @@ async function requestDevice(aAudio, aVideo, aShare, aBadDevice = false) {
|
|||
}
|
||||
message("pending");
|
||||
|
||||
function stopTracks(aKind) {
|
||||
for (let stream of gStreams) {
|
||||
for (let track of stream.getTracks()) {
|
||||
if (track.kind == aKind) {
|
||||
track.stop();
|
||||
stream.removeTrack(track);
|
||||
}
|
||||
}
|
||||
}
|
||||
gStreams = gStreams.filter(s => !!s.getTracks().length);
|
||||
if (aKind == "video") {
|
||||
gVideoEvents = [];
|
||||
} else if (aKind == "audio") {
|
||||
gAudioEvents = [];
|
||||
}
|
||||
}
|
||||
|
||||
function closeStream() {
|
||||
for (let stream of gStreams) {
|
||||
if (stream) {
|
||||
stream.getTracks().forEach(t => t.stop());
|
||||
stream = null;
|
||||
for (let track of stream.getTracks()) {
|
||||
track.stop();
|
||||
}
|
||||
}
|
||||
gStreams = [];
|
||||
|
|
|
@ -44,11 +44,22 @@ function requestDevice(aAudio, aVideo, aShare) {
|
|||
}
|
||||
message("pending");
|
||||
|
||||
function stopTracks(aKind) {
|
||||
for (let stream of gStreams) {
|
||||
for (let track of stream.getTracks()) {
|
||||
if (track.kind == aKind) {
|
||||
track.stop();
|
||||
stream.removeTrack(track);
|
||||
}
|
||||
}
|
||||
}
|
||||
gStreams = gStreams.filter(s => !!s.getTracks().length);
|
||||
}
|
||||
|
||||
function closeStream() {
|
||||
for (let stream of gStreams) {
|
||||
if (stream) {
|
||||
stream.getTracks().forEach(t => t.stop());
|
||||
stream = null;
|
||||
for (let track of stream.getTracks()) {
|
||||
track.stop();
|
||||
}
|
||||
}
|
||||
gStreams = [];
|
||||
|
|
|
@ -44,11 +44,22 @@ function requestDevice(aAudio, aVideo, aShare) {
|
|||
}
|
||||
message("pending");
|
||||
|
||||
function stopTracks(aKind) {
|
||||
for (let stream of gStreams) {
|
||||
for (let track of stream.getTracks()) {
|
||||
if (track.kind == aKind) {
|
||||
track.stop();
|
||||
stream.removeTrack(track);
|
||||
}
|
||||
}
|
||||
}
|
||||
gStreams = gStreams.filter(s => !!s.getTracks().length);
|
||||
}
|
||||
|
||||
function closeStream() {
|
||||
for (let stream of gStreams) {
|
||||
if (stream) {
|
||||
stream.getTracks().forEach(t => t.stop());
|
||||
stream = null;
|
||||
for (let track of stream.getTracks()) {
|
||||
track.stop();
|
||||
}
|
||||
}
|
||||
gStreams = [];
|
||||
|
|
|
@ -692,6 +692,51 @@ async function promiseRequestDevice(
|
|||
);
|
||||
}
|
||||
|
||||
async function stopTracks(
|
||||
aKind,
|
||||
aAlreadyStopped,
|
||||
aLastTracks,
|
||||
aFrameId,
|
||||
aBrowsingContext,
|
||||
aBrowsingContextToObserve
|
||||
) {
|
||||
// If the observers are listening to other frames, listen for a notification
|
||||
// on the right subframe.
|
||||
let frameBC =
|
||||
aBrowsingContext ??
|
||||
(await getBrowsingContextForFrame(
|
||||
gBrowser.selectedBrowser.browsingContext,
|
||||
aFrameId
|
||||
));
|
||||
|
||||
let observerPromises = [];
|
||||
if (!aAlreadyStopped) {
|
||||
observerPromises.push(
|
||||
expectObserverCalled(
|
||||
"recording-device-events",
|
||||
1,
|
||||
aBrowsingContextToObserve
|
||||
)
|
||||
);
|
||||
}
|
||||
if (aLastTracks) {
|
||||
observerPromises.push(
|
||||
expectObserverCalled(
|
||||
"recording-window-ended",
|
||||
1,
|
||||
aBrowsingContextToObserve
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
info(`Stopping all ${aKind} tracks`);
|
||||
await SpecialPowers.spawn(frameBC, [aKind], async function(kind) {
|
||||
content.wrappedJSObject.stopTracks(kind);
|
||||
});
|
||||
|
||||
await Promise.all(observerPromises);
|
||||
}
|
||||
|
||||
async function closeStream(
|
||||
aAlreadyClosed,
|
||||
aFrameId,
|
||||
|
|
Загрузка…
Ссылка в новой задаче