зеркало из https://github.com/mozilla/gecko-dev.git
Bug 1400363 - Part 1: Test muted/onmuted/onunmuted for webrtc cases. r=jib
MozReview-Commit-ID: ABFHeq4Eu6X --HG-- extra : rebase_source : dcfbb5268dc64c92f2c8858af02d243be455f3f3
This commit is contained in:
Родитель
63a499f3be
Коммит
82aaa61421
|
@ -18,8 +18,18 @@
|
|||
|
||||
test = new PeerConnectionTest(options);
|
||||
test.setMediaConstraints([{audio: true}], []);
|
||||
var haveFirstUnmuteEvent;
|
||||
|
||||
test.chain.insertBefore("PC_REMOTE_SET_LOCAL_DESCRIPTION", [
|
||||
function PC_REMOTE_SETUP_ONUNMUTE_1() {
|
||||
haveFirstUnmuteEvent = haveEvent(test.pcRemote._pc.getReceivers()[0].track, "unmute");
|
||||
}
|
||||
]);
|
||||
|
||||
test.chain.append([
|
||||
function PC_REMOTE_CHECK_AUDIO_UNMUTED() {
|
||||
return haveFirstUnmuteEvent;
|
||||
},
|
||||
function PC_REMOTE_CHECK_AUDIO_FLOWING() {
|
||||
return helper.checkAudioFlowing(test.pcRemote._pc.getRemoteStreams()[0]);
|
||||
}
|
||||
|
|
|
@ -14,6 +14,8 @@
|
|||
runNetworkTest(function (options) {
|
||||
const test = new PeerConnectionTest(options);
|
||||
let originalTrack;
|
||||
let haveMuteEvent;
|
||||
let haveUnmuteEvent;
|
||||
addRenegotiation(test.chain,
|
||||
[
|
||||
function PC_REMOTE_FIND_RECEIVER(test) {
|
||||
|
@ -33,6 +35,9 @@
|
|||
},
|
||||
],
|
||||
[
|
||||
function PC_REMOTE_WAIT_FOR_UNMUTE() {
|
||||
return haveUnmuteEvent;
|
||||
},
|
||||
function PC_REMOTE_CHECK_ADDED_TRACK(test) {
|
||||
is(test.pcRemote._pc.getTransceivers().length, 2,
|
||||
"pcRemote should have two transceivers");
|
||||
|
@ -43,6 +48,9 @@
|
|||
const freq = analyser.binIndexForFrequency(TEST_AUDIO_FREQ);
|
||||
return analyser.waitForAnalysisSuccess(arr => arr[freq] > 200);
|
||||
},
|
||||
function PC_REMOTE_WAIT_FOR_MUTE() {
|
||||
return haveMuteEvent;
|
||||
},
|
||||
function PC_REMOTE_CHECK_REMOVED_TRACK(test) {
|
||||
is(test.pcRemote._pc.getTransceivers().length, 2,
|
||||
"pcRemote should have two transceivers");
|
||||
|
@ -56,6 +64,15 @@
|
|||
]
|
||||
);
|
||||
|
||||
test.chain.insertBefore("PC_REMOTE_SET_LOCAL_DESCRIPTION", [
|
||||
function PC_REMOTE_SETUP_ONMUTE(test) {
|
||||
haveMuteEvent = haveEvent(test.pcRemote._pc.getReceivers()[0].track, "mute");
|
||||
},
|
||||
function PC_REMOTE_SETUP_ONUNMUTE(test) {
|
||||
haveUnmuteEvent = haveEvent(test.pcRemote._pc.getReceivers()[1].track, "unmute");
|
||||
}
|
||||
], false, 1);
|
||||
|
||||
test.setMediaConstraints([{audio: true}], [{audio: true}]);
|
||||
test.run();
|
||||
});
|
||||
|
|
|
@ -16,6 +16,8 @@
|
|||
const test = new PeerConnectionTest(options);
|
||||
const helper = new VideoStreamHelper();
|
||||
var originalTrack;
|
||||
let haveMuteEvent;
|
||||
let haveUnmuteEvent;
|
||||
addRenegotiation(test.chain,
|
||||
[
|
||||
function PC_REMOTE_FIND_RECEIVER(test) {
|
||||
|
@ -37,6 +39,9 @@
|
|||
},
|
||||
],
|
||||
[
|
||||
function PC_REMOTE_WAIT_FOR_UNMUTE() {
|
||||
return haveUnmuteEvent;
|
||||
},
|
||||
function PC_REMOTE_CHECK_ADDED_TRACK(test) {
|
||||
is(test.pcRemote._pc.getTransceivers().length, 2,
|
||||
"pcRemote should have two transceivers");
|
||||
|
@ -46,6 +51,9 @@
|
|||
elem => elem.id.includes(track.id));
|
||||
return helper.checkVideoPlaying(vAdded);
|
||||
},
|
||||
function PC_REMOTE_WAIT_FOR_MUTE() {
|
||||
return haveMuteEvent;
|
||||
},
|
||||
function PC_REMOTE_CHECK_REMOVED_TRACK(test) {
|
||||
is(test.pcRemote._pc.getTransceivers().length, 2,
|
||||
"pcRemote should have two transceivers");
|
||||
|
@ -58,6 +66,15 @@
|
|||
]
|
||||
);
|
||||
|
||||
test.chain.insertBefore("PC_REMOTE_SET_LOCAL_DESCRIPTION", [
|
||||
function PC_REMOTE_SETUP_ONMUTE(test) {
|
||||
haveMuteEvent = haveEvent(test.pcRemote._pc.getReceivers()[0].track, "mute");
|
||||
},
|
||||
function PC_REMOTE_SETUP_ONUNMUTE(test) {
|
||||
haveUnmuteEvent = haveEvent(test.pcRemote._pc.getReceivers()[1].track, "unmute");
|
||||
}
|
||||
], false, 1);
|
||||
|
||||
test.setMediaConstraints([{video: true}], [{video: true}]);
|
||||
test.run();
|
||||
});
|
||||
|
|
|
@ -79,6 +79,24 @@
|
|||
return new Promise(resolve => pc.onnegotiationneeded = resolve);
|
||||
};
|
||||
|
||||
let countEvents = (target, name) => {
|
||||
let result = {count: 0};
|
||||
target.addEventListener(name, e => result.count++);
|
||||
return result;
|
||||
};
|
||||
|
||||
let gotMuteEvent = async track => {
|
||||
await haveEvent(track, "mute");
|
||||
|
||||
ok(track.muted, "track should be muted after onmute");
|
||||
};
|
||||
|
||||
let gotUnmuteEvent = async track => {
|
||||
await haveEvent(track, "unmute");
|
||||
|
||||
ok(!track.muted, "track should not be muted after onunmute");
|
||||
};
|
||||
|
||||
let logExpected = expected => {
|
||||
info("(expected " + JSON.stringify(expected) + ")");
|
||||
};
|
||||
|
@ -144,7 +162,7 @@
|
|||
hasProps(pc.getTransceivers(),
|
||||
[
|
||||
{
|
||||
receiver: {track: {kind: "audio", readyState: "live"}},
|
||||
receiver: {track: {kind: "audio", readyState: "live", muted: true}},
|
||||
sender: {track: null},
|
||||
direction: "sendrecv",
|
||||
mid: null,
|
||||
|
@ -152,7 +170,7 @@
|
|||
stopped: false
|
||||
},
|
||||
{
|
||||
receiver: {track: {kind: "video", readyState: "live"}},
|
||||
receiver: {track: {kind: "video", readyState: "live", muted: true}},
|
||||
sender: {track: null},
|
||||
direction: "sendrecv",
|
||||
mid: null,
|
||||
|
@ -688,7 +706,6 @@
|
|||
answer = await pc1.createAnswer();
|
||||
hasProps(pc1.getTransceivers(), [{currentDirection: "sendrecv"}]);
|
||||
|
||||
// TODO(bug 1400363): Check onmute/muted
|
||||
await pc1.setLocalDescription(answer);
|
||||
hasProps(pc1.getTransceivers(), [{currentDirection: "recvonly"}]);
|
||||
|
||||
|
@ -713,7 +730,6 @@
|
|||
answer = await pc1.createAnswer();
|
||||
hasProps(pc1.getTransceivers(), [{currentDirection: "recvonly"}]);
|
||||
|
||||
// TODO(bug 1400363): Check onunmute/muted
|
||||
await pc1.setLocalDescription(answer);
|
||||
hasProps(pc1.getTransceivers(), [{currentDirection: "sendrecv"}]);
|
||||
|
||||
|
@ -780,6 +796,133 @@
|
|||
stopTracks(stream);
|
||||
};
|
||||
|
||||
let checkMute = async () => {
|
||||
let pc1 = new RTCPeerConnection();
|
||||
let stream1 = await getUserMedia({audio: true, video: true});
|
||||
let audio1 = stream1.getAudioTracks()[0];
|
||||
pc1.addTrack(audio1, stream1);
|
||||
let countMuteAudio1 = countEvents(pc1.getTransceivers()[0].receiver.track, "mute");
|
||||
let countUnmuteAudio1 = countEvents(pc1.getTransceivers()[0].receiver.track, "unmute");
|
||||
|
||||
let video1 = stream1.getVideoTracks()[0];
|
||||
pc1.addTrack(video1, stream1);
|
||||
let countMuteVideo1 = countEvents(pc1.getTransceivers()[1].receiver.track, "mute");
|
||||
let countUnmuteVideo1 = countEvents(pc1.getTransceivers()[1].receiver.track, "unmute");
|
||||
|
||||
let pc2 = new RTCPeerConnection();
|
||||
let stream2 = await getUserMedia({audio: true, video: true});
|
||||
let audio2 = stream2.getAudioTracks()[0];
|
||||
pc2.addTrack(audio2, stream2);
|
||||
let countMuteAudio2 = countEvents(pc2.getTransceivers()[0].receiver.track, "mute");
|
||||
let countUnmuteAudio2 = countEvents(pc2.getTransceivers()[0].receiver.track, "unmute");
|
||||
|
||||
let video2 = stream2.getVideoTracks()[0];
|
||||
pc2.addTrack(video2, stream2);
|
||||
let countMuteVideo2 = countEvents(pc2.getTransceivers()[1].receiver.track, "mute");
|
||||
let countUnmuteVideo2 = countEvents(pc2.getTransceivers()[1].receiver.track, "unmute");
|
||||
|
||||
|
||||
// Check that receive tracks start muted
|
||||
hasProps(pc1.getTransceivers(),
|
||||
[
|
||||
{receiver: {track: {kind: "audio", muted: true}}},
|
||||
{receiver: {track: {kind: "video", muted: true}}}
|
||||
]);
|
||||
|
||||
hasProps(pc1.getTransceivers(),
|
||||
[
|
||||
{receiver: {track: {kind: "audio", muted: true}}},
|
||||
{receiver: {track: {kind: "video", muted: true}}}
|
||||
]);
|
||||
|
||||
let offer = await pc1.createOffer();
|
||||
await pc2.setRemoteDescription(offer);
|
||||
trickle(pc1, pc2);
|
||||
await pc1.setLocalDescription(offer);
|
||||
let answer = await pc2.createAnswer();
|
||||
await pc1.setRemoteDescription(answer);
|
||||
trickle(pc2, pc1);
|
||||
await pc2.setLocalDescription(answer);
|
||||
|
||||
let gotUnmuteAudio1 = gotUnmuteEvent(pc1.getTransceivers()[0].receiver.track);
|
||||
let gotUnmuteVideo1 = gotUnmuteEvent(pc1.getTransceivers()[1].receiver.track);
|
||||
|
||||
let gotUnmuteAudio2 = gotUnmuteEvent(pc2.getTransceivers()[0].receiver.track);
|
||||
let gotUnmuteVideo2 = gotUnmuteEvent(pc2.getTransceivers()[1].receiver.track);
|
||||
|
||||
await iceConnected(pc1);
|
||||
await iceConnected(pc2);
|
||||
|
||||
// Check that receive tracks are unmuted when RTP starts flowing
|
||||
await gotUnmuteAudio1;
|
||||
await gotUnmuteVideo1;
|
||||
await gotUnmuteAudio2;
|
||||
await gotUnmuteVideo2;
|
||||
|
||||
// Check whether disabling recv locally causes onmute
|
||||
pc1.getTransceivers()[0].direction = "sendonly";
|
||||
pc1.getTransceivers()[1].direction = "sendonly";
|
||||
offer = await pc1.createOffer();
|
||||
await pc2.setRemoteDescription(offer);
|
||||
await pc1.setLocalDescription(offer);
|
||||
answer = await pc2.createAnswer();
|
||||
let gotMuteAudio1 = gotMuteEvent(pc1.getTransceivers()[0].receiver.track);
|
||||
let gotMuteVideo1 = gotMuteEvent(pc1.getTransceivers()[1].receiver.track);
|
||||
await pc1.setRemoteDescription(answer);
|
||||
await pc2.setLocalDescription(answer);
|
||||
await gotMuteAudio1;
|
||||
await gotMuteVideo1;
|
||||
|
||||
// Check whether disabling on remote causes onmute
|
||||
pc1.getTransceivers()[0].direction = "inactive";
|
||||
pc1.getTransceivers()[1].direction = "inactive";
|
||||
offer = await pc1.createOffer();
|
||||
await pc2.setRemoteDescription(offer);
|
||||
await pc1.setLocalDescription(offer);
|
||||
answer = await pc2.createAnswer();
|
||||
let gotMuteAudio2 = gotMuteEvent(pc2.getTransceivers()[0].receiver.track);
|
||||
let gotMuteVideo2 = gotMuteEvent(pc2.getTransceivers()[1].receiver.track);
|
||||
await pc1.setRemoteDescription(answer);
|
||||
await pc2.setLocalDescription(answer);
|
||||
await gotMuteAudio2;
|
||||
await gotMuteVideo2;
|
||||
|
||||
// Check whether onunmute fires when we turn everything on again
|
||||
pc1.getTransceivers()[0].direction = "sendrecv";
|
||||
pc1.getTransceivers()[1].direction = "sendrecv";
|
||||
offer = await pc1.createOffer();
|
||||
await pc2.setRemoteDescription(offer);
|
||||
await pc1.setLocalDescription(offer);
|
||||
answer = await pc2.createAnswer();
|
||||
gotUnmuteAudio1 = gotUnmuteEvent(pc1.getTransceivers()[0].receiver.track);
|
||||
gotUnmuteVideo1 = gotUnmuteEvent(pc1.getTransceivers()[1].receiver.track);
|
||||
gotUnmuteAudio2 = gotUnmuteEvent(pc2.getTransceivers()[0].receiver.track);
|
||||
gotUnmuteVideo2 = gotUnmuteEvent(pc2.getTransceivers()[1].receiver.track);
|
||||
await pc1.setRemoteDescription(answer);
|
||||
await pc2.setLocalDescription(answer);
|
||||
await gotUnmuteAudio1;
|
||||
await gotUnmuteVideo1;
|
||||
await gotUnmuteAudio2;
|
||||
await gotUnmuteVideo2;
|
||||
|
||||
// Wait a little, just in case some stray events fire
|
||||
await wait(100);
|
||||
|
||||
is(1, countMuteAudio1.count, "Got 1 mute event for pc1's audio track");
|
||||
is(1, countMuteVideo1.count, "Got 1 mute event for pc1's video track");
|
||||
is(1, countMuteAudio2.count, "Got 1 mute event for pc2's audio track");
|
||||
is(1, countMuteVideo2.count, "Got 1 mute event for pc2's video track");
|
||||
is(2, countUnmuteAudio1.count, "Got 2 unmute events for pc1's audio track");
|
||||
is(2, countUnmuteVideo1.count, "Got 2 unmute events for pc1's video track");
|
||||
is(2, countUnmuteAudio2.count, "Got 2 unmute events for pc2's audio track");
|
||||
is(2, countUnmuteVideo2.count, "Got 2 unmute events for pc2's video track");
|
||||
|
||||
pc1.close();
|
||||
pc2.close();
|
||||
stopTracks(stream1);
|
||||
stopTracks(stream2);
|
||||
};
|
||||
|
||||
let checkStop = async () => {
|
||||
let pc1 = new RTCPeerConnection();
|
||||
let stream = await getUserMedia({audio: true});
|
||||
|
@ -1690,6 +1833,7 @@
|
|||
await checkAddTransceiverThenAddTrackPairs();
|
||||
await checkAddTrackPairs();
|
||||
await checkReplaceTrackNullDoesntPreventPairing();
|
||||
await checkMute();
|
||||
await checkStop();
|
||||
await checkStopAfterCreateOffer();
|
||||
await checkStopAfterSetLocalOffer();
|
||||
|
|
|
@ -27,7 +27,18 @@
|
|||
}
|
||||
]);
|
||||
|
||||
var haveFirstUnmuteEvent;
|
||||
|
||||
test.chain.insertBefore("PC_REMOTE_SET_LOCAL_DESCRIPTION", [
|
||||
function PC_REMOTE_SETUP_ONUNMUTE_1() {
|
||||
haveFirstUnmuteEvent = haveEvent(test.pcRemote._pc.getReceivers()[0].track, "unmute");
|
||||
}
|
||||
]);
|
||||
|
||||
test.chain.append([
|
||||
function PC_REMOTE_CHECK_VIDEO_UNMUTED() {
|
||||
return haveFirstUnmuteEvent;
|
||||
},
|
||||
function PC_REMOTE_WAIT_FOR_FRAMES() {
|
||||
var vremote = test.pcRemote.remoteMediaElements[0];
|
||||
ok(vremote, "Should have remote video element for pcRemote");
|
||||
|
|
Загрузка…
Ссылка в новой задаче