Bug 1093835 - Commoditize checkVideoPlaying/checkVideoPaused. r=jib

This moves those two functions from a single test to a VideoStreamHelper in a
common file, so they can be used when checking video flow in multiple tests.

It also implements a VideoFrameEmitter that provides tests with an easy to
access video source where VideoStreamHelper can verify flow on the sink.

MozReview-Commit-ID: Fin9eiVmBe

--HG--
extra : rebase_source : 8b62124b0182d7e7bd78788e031b2d2259db9e57
This commit is contained in:
Andreas Pehrson 2017-05-04 15:23:36 +02:00
Родитель 0fb77b5e31
Коммит c41610f6b8
9 изменённых файлов: 148 добавлений и 132 удалений

Просмотреть файл

@ -107,7 +107,7 @@ CaptureStreamTestHelper.prototype = {
*/ */
isOpaquePixelNot: function(px, refColor, threshold) { isOpaquePixelNot: function(px, refColor, threshold) {
px[3] = refColor.data[3]; px[3] = refColor.data[3];
return h.isPixelNot(px, refColor, threshold); return this.isPixelNot(px, refColor, threshold);
}, },
/* /*

Просмотреть файл

@ -908,55 +908,94 @@ AudioStreamHelper.prototype = {
} }
} }
function VideoStreamHelper() { class VideoFrameEmitter {
this._helper = new CaptureStreamTestHelper2D(50,50); constructor(color1, color2) {
this._canvas = this._helper.createAndAppendElement('canvas', 'source_canvas'); this._helper = new CaptureStreamTestHelper2D(50,50);
// Make sure this is initted this._canvas = this._helper.createAndAppendElement('canvas', 'source_canvas');
this._helper.drawColor(this._canvas, this._helper.green); this._color1 = color1 ? color1 : this._helper.green;
this._stream = this._canvas.captureStream(10); this._color2 = color2 ? color2 : this._helper.red;
} // Make sure this is initted
this._helper.drawColor(this._canvas, this._color1);
this._stream = this._canvas.captureStream();
this._started = false;
}
VideoStreamHelper.prototype = { stream() {
stream: function() {
return this._stream; return this._stream;
}, }
startCapturingFrames: function() { start() {
var i = 0; if (this._started) {
var helper = this; return;
return setInterval(function() { }
let i = 0;
this._started = true;
this._intervalId = setInterval(() => {
try { try {
helper._helper.drawColor(helper._canvas, this._helper.drawColor(this._canvas, i ? this._color1: this._color2);
i ? helper._helper.green : helper._helper.red);
i = 1 - i; i = 1 - i;
helper._stream.requestFrame();
} catch (e) { } catch (e) {
// ignore; stream might have shut down, and we don't bother clearing // ignore; stream might have shut down, and we don't bother clearing
// the setInterval. // the setInterval.
} }
}, 500); }, 500);
}, }
waitForFrames: function(canvas, timeout_value) { stop() {
var intervalId = this.startCapturingFrames(); if (this._started) {
timeout_value = timeout_value || 8000; clearInterval(this._intervalId);
this._started = false;
}
}
}
return addFinallyToPromise(timeout( class VideoStreamHelper {
Promise.all([ constructor() {
this._helper.waitForPixelColor(canvas, this._helper.green, 128, this._helper = new CaptureStreamTestHelper2D(50,50);
canvas.id + " should become green"), }
this._helper.waitForPixelColor(canvas, this._helper.red, 128,
canvas.id + " should become red")
]),
timeout_value,
"Timed out waiting for frames")).finally(() => clearInterval(intervalId));
},
verifyNoFrames: function(canvas) { checkHasFrame(video, offsetX, offsetY, threshold) {
return this.waitForFrames(canvas).then( const h = this._helper;
() => ok(false, "Color should not change"), return h.waitForPixel(video, offsetX, offsetY, px => {
() => ok(true, "Color should not change") let result = h.isOpaquePixelNot(px, h.black, threshold);
); info("Checking that we have a frame, got [" +
Array.slice(px) + "]. Ref=[" +
Array.slice(h.black.data) + "]. Threshold=" + threshold +
". Pass=" + result);
return result;
});
}
async checkVideoPlaying(video, offsetX, offsetY, threshold) {
const h = this._helper;
await this.checkHasFrame(video, offsetX, offsetY, threshold);
let startPixel = { data: h.getPixel(video, offsetX, offsetY)
, name: "startcolor"
};
return h.waitForPixel(video, offsetX, offsetY, px => {
let result = h.isPixelNot(px, startPixel, threshold)
info("Checking playing, [" +
Array.slice(px) + "] vs [" + Array.slice(startPixel.data) +
"]. Threshold=" + threshold + " Pass=" + result);
return result;
});
}
async checkVideoPaused(video, offsetX, offsetY, threshold, timeout) {
const h = this._helper;
await this.checkHasFrame(video, offsetX, offsetY, threshold);
let startPixel = { data: h.getPixel(video, offsetX, offsetY)
, name: "startcolor"
};
const changed = await h.waitForPixel(video, offsetX, offsetY, px => {
let result = h.isOpaquePixelNot(px, startPixel, threshold);
info("Checking paused, [" +
Array.slice(px) + "] vs [" + Array.slice(startPixel.data) +
"]. Threshold=" + threshold + " Pass=" + result);
return result;
}, timeout);
ok(!changed, "Frame shouldn't change within " + timeout / 1000 + " seconds.");
} }
} }

Просмотреть файл

@ -2,7 +2,6 @@
<html> <html>
<head> <head>
<script type="application/javascript" src="mediaStreamPlayback.js"></script> <script type="application/javascript" src="mediaStreamPlayback.js"></script>
<script type="application/javascript" src="head.js"></script>
</head> </head>
<body> <body>
<pre id="test"> <pre id="test">

Просмотреть файл

@ -2,7 +2,6 @@
<html> <html>
<head> <head>
<script type="application/javascript" src="mediaStreamPlayback.js"></script> <script type="application/javascript" src="mediaStreamPlayback.js"></script>
<script type="application/javascript" src="head.js"></script>
</head> </head>
<body> <body>
<pre id="test"> <pre id="test">

Просмотреть файл

@ -3,7 +3,6 @@
<head> <head>
<script type="application/javascript" src="/tests/dom/canvas/test/captureStream_common.js"></script> <script type="application/javascript" src="/tests/dom/canvas/test/captureStream_common.js"></script>
<script type="application/javascript" src="mediaStreamPlayback.js"></script> <script type="application/javascript" src="mediaStreamPlayback.js"></script>
<script type="application/javascript" src="head.js"></script>
</head> </head>
<body> <body>
<pre id="test"> <pre id="test">
@ -24,43 +23,11 @@ const offsetX = 20;
const offsetY = 20; const offsetY = 20;
const threshold = 16; const threshold = 16;
const pausedTimeout = 1000; const pausedTimeout = 1000;
const h = new CaptureStreamTestHelper2D(50, 50); let h;
var checkHasFrame = video => h.waitForPixel(video, offsetX, offsetY, px => {
let result = h.isOpaquePixelNot(px, h.black, threshold);
info("Checking that we have a frame, got [" +
Array.slice(px) + "]. Pass=" + result);
return result;
});
var checkVideoPlaying = video => checkHasFrame(video)
.then(() => {
let startPixel = { data: h.getPixel(video, offsetX, offsetY)
, name: "startcolor"
};
return h.waitForPixel(video, offsetX, offsetY, px => {
let result = h.isPixelNot(px, startPixel, threshold)
info("Checking playing, [" + Array.slice(px) + "] vs [" +
Array.slice(startPixel.data) + "]. Pass=" + result);
return result;
});
});
var checkVideoPaused = video => checkHasFrame(video)
.then(() => {
let startPixel = { data: h.getPixel(video, offsetX, offsetY)
, name: "startcolor"
};
return h.waitForPixel(video, offsetX, offsetY, px => {
let result = h.isOpaquePixelNot(px, startPixel, threshold);
info("Checking paused, [" + Array.slice(px) + "] vs [" +
Array.slice(startPixel.data) + "]. Pass=" + result);
return result;
}, pausedTimeout);
}).then(result => ok(!result, "Frame shouldn't change within " + pausedTimeout / 1000 + " seconds."));
runTest(() => getUserMedia({video: true, fake: true}) runTest(() => getUserMedia({video: true, fake: true})
.then(stream => { .then(stream => {
h = new VideoStreamHelper();
gUMVideoElement = gUMVideoElement =
createMediaElement("video", "gUMVideo"); createMediaElement("video", "gUMVideo");
gUMVideoElement.srcObject = stream; gUMVideoElement.srcObject = stream;
@ -80,43 +47,45 @@ runTest(() => getUserMedia({video: true, fake: true})
let osc = createOscillatorStream(new AudioContext(), 1000); let osc = createOscillatorStream(new AudioContext(), 1000);
captureStreamElement.srcObject.addTrack(osc.getTracks()[0]); captureStreamElement.srcObject.addTrack(osc.getTracks()[0]);
return checkVideoPlaying(captureStreamElement); return h.checkVideoPlaying(captureStreamElement, 10, 10, 16);
}) })
.then(() => { .then(() => {
info("Video flowing. Pausing."); info("Video flowing. Pausing.");
gUMVideoElement.pause(); gUMVideoElement.pause();
return checkVideoPaused(captureStreamElement); return h.checkVideoPaused(captureStreamElement, 10, 10, 16, pausedTimeout);
}) })
.then(() => { .then(() => {
info("Video stopped flowing. Playing."); info("Video stopped flowing. Playing.");
gUMVideoElement.play(); gUMVideoElement.play();
return checkVideoPlaying(captureStreamElement); return h.checkVideoPlaying(captureStreamElement, 10, 10, 16);
}) })
.then(() => { .then(() => {
info("Video flowing. Removing source."); info("Video flowing. Removing source.");
var stream = gUMVideoElement.srcObject; var stream = gUMVideoElement.srcObject;
gUMVideoElement.srcObject = null; gUMVideoElement.srcObject = null;
return checkVideoPaused(captureStreamElement).then(() => stream); return h.checkVideoPaused(captureStreamElement, 10, 10, 16, pausedTimeout)
.then(() => stream);
}) })
.then(stream => { .then(stream => {
info("Video stopped flowing. Setting source."); info("Video stopped flowing. Setting source.");
gUMVideoElement.srcObject = stream; gUMVideoElement.srcObject = stream;
return checkVideoPlaying(captureStreamElement); return h.checkVideoPlaying(captureStreamElement, 10, 10, 16);
}) })
.then(() => { .then(() => {
info("Video flowing. Changing source by track manipulation. Remove first."); info("Video flowing. Changing source by track manipulation. Remove first.");
var track = gUMVideoElement.srcObject.getTracks()[0]; var track = gUMVideoElement.srcObject.getTracks()[0];
gUMVideoElement.srcObject.removeTrack(track); gUMVideoElement.srcObject.removeTrack(track);
return checkVideoPaused(captureStreamElement).then(() => track); return h.checkVideoPaused(captureStreamElement, 10, 10, 16, pausedTimeout)
.then(() => track);
}) })
.then(track => { .then(track => {
info("Video paused. Changing source by track manipulation. Add first."); info("Video paused. Changing source by track manipulation. Add first.");
gUMVideoElement.srcObject.addTrack(track); gUMVideoElement.srcObject.addTrack(track);
gUMVideoElement.play(); gUMVideoElement.play();
return checkVideoPlaying(captureStreamElement); return h.checkVideoPlaying(captureStreamElement, 10, 10, 16);
}) })
.then(() => { .then(() => {
gUMVideoElement.srcObject.getTracks().forEach(t => t.stop()); gUMVideoElement.srcObject.getTracks().forEach(t => t.stop());

Просмотреть файл

@ -1,7 +1,6 @@
<!DOCTYPE HTML> <!DOCTYPE HTML>
<html> <html>
<head> <head>
<script type="application/javascript" src="head.js"></script>
<script type="application/javascript" src="mediaStreamPlayback.js"></script> <script type="application/javascript" src="mediaStreamPlayback.js"></script>
</head> </head>
<body> <body>

Просмотреть файл

@ -1,7 +1,6 @@
<!DOCTYPE HTML> <!DOCTYPE HTML>
<html> <html>
<head> <head>
<script type="application/javascript" src="head.js"></script>
<script type="application/javascript" src="pc.js"></script> <script type="application/javascript" src="pc.js"></script>
</head> </head>
<body> <body>

Просмотреть файл

@ -13,21 +13,20 @@
visible: true visible: true
}); });
var test; const pushPrefs = (...p) => SpecialPowers.pushPrefEnv({set: p});
var pushPrefs = (...p) => SpecialPowers.pushPrefEnv({set: p});
function addRIDExtension(pc, extensionId) { function addRIDExtension(pc, extensionId) {
var receivers = pc._pc.getReceivers(); const receivers = pc._pc.getReceivers();
is(receivers.length, 1, "We have exactly one RTP receiver"); is(receivers.length, 1, "We have exactly one RTP receiver");
var receiver = receivers[0]; const receiver = receivers[0];
SpecialPowers.wrap(pc._pc).mozAddRIDExtension(receiver, extensionId); SpecialPowers.wrap(pc._pc).mozAddRIDExtension(receiver, extensionId);
} }
function selectRecvRID(pc, rid) { function selectRecvRID(pc, rid) {
var receivers = pc._pc.getReceivers(); const receivers = pc._pc.getReceivers();
is(receivers.length, 1, "We have exactly one RTP receiver"); is(receivers.length, 1, "We have exactly one RTP receiver");
var receiver = receivers[0]; const receiver = receivers[0];
SpecialPowers.wrap(pc._pc).mozAddRIDFilter(receiver, rid); SpecialPowers.wrap(pc._pc).mozAddRIDFilter(receiver, rid);
} }
@ -38,23 +37,24 @@
// the 80Kbps+overhead needed for the two simulcast streams. // the 80Kbps+overhead needed for the two simulcast streams.
// 100Kbps was apparently too low. // 100Kbps was apparently too low.
['media.peerconnection.video.min_bitrate_estimate', 180*1000]).then(() => { ['media.peerconnection.video.min_bitrate_estimate', 180*1000]).then(() => {
var helper; let emitter, helper;
test = new PeerConnectionTest({bundle: false}); test = new PeerConnectionTest({bundle: false});
test.setMediaConstraints([{video: true}], [{video: true}]); test.setMediaConstraints([{video: true}], [{video: true}]);
test.chain.replace("PC_REMOTE_GUM", [ test.chain.replace("PC_REMOTE_GUM", [
function PC_REMOTE_CANVAS_CAPTURESTREAM(test) { function PC_REMOTE_CANVAS_CAPTURESTREAM(test) {
emitter = new VideoFrameEmitter();
helper = new VideoStreamHelper(); helper = new VideoStreamHelper();
test.pcRemote.attachLocalStream(helper.stream()); test.pcRemote.attachLocalStream(emitter.stream());
} }
]); ]);
test.chain.insertAfter('PC_REMOTE_GET_OFFER', [ test.chain.insertAfter('PC_REMOTE_GET_OFFER', [
function PC_REMOTE_SET_RIDS(test) { function PC_REMOTE_SET_RIDS(test) {
var senders = test.pcRemote._pc.getSenders(); const senders = test.pcRemote._pc.getSenders();
is(senders.length, 1, "We have exactly one RTP sender"); is(senders.length, 1, "We have exactly one RTP sender");
var sender = senders[0]; const sender = senders[0];
ok(sender.track, "Sender has a track"); ok(sender.track, "Sender has a track");
return sender.setParameters({ return sender.setParameters({
@ -88,7 +88,7 @@
// has been created. // has been created.
test.chain.insertAfter('PC_LOCAL_SET_REMOTE_DESCRIPTION',[ test.chain.insertAfter('PC_LOCAL_SET_REMOTE_DESCRIPTION',[
function PC_LOCAL_SET_RTP_FIRST_RID(test) { function PC_LOCAL_SET_RTP_FIRST_RID(test) {
var extmap_id = test._local_offer.sdp.match( const extmap_id = test._local_offer.sdp.match(
"a=extmap:([0-9+])/recvonly urn:ietf:params:rtp-hdrext:sdes:rtp-stream-id"); "a=extmap:([0-9+])/recvonly urn:ietf:params:rtp-hdrext:sdes:rtp-stream-id");
ok(extmap_id, "Local offer has extmap id for simulcast: " + extmap_id[1]); ok(extmap_id, "Local offer has extmap id for simulcast: " + extmap_id[1]);
// Cause pcLocal to filter out everything but RID "bar", only // Cause pcLocal to filter out everything but RID "bar", only
@ -99,14 +99,16 @@
]); ]);
test.chain.append([ test.chain.append([
function PC_LOCAL_WAIT_FOR_FRAMES() { async function PC_LOCAL_WAIT_FOR_FRAMES() {
var vremote = test.pcLocal.remoteMediaElements[0]; const vremote = test.pcLocal.remoteMediaElements[0];
ok(vremote, "Should have remote video element for pcLocal"); ok(vremote, "Should have remote video element for pcLocal");
return helper.waitForFrames(vremote); emitter.start();
await helper.checkVideoPlaying(vremote, 10, 10, 16);
emitter.stop();
}, },
function PC_LOCAL_CHECK_SIZE_1() { function PC_LOCAL_CHECK_SIZE_1() {
var vlocal = test.pcRemote.localMediaElements[0]; const vlocal = test.pcRemote.localMediaElements[0];
var vremote = test.pcLocal.remoteMediaElements[0]; const vremote = test.pcLocal.remoteMediaElements[0];
ok(vlocal, "Should have local video element for pcRemote"); ok(vlocal, "Should have local video element for pcRemote");
ok(vremote, "Should have remote video element for pcLocal"); ok(vremote, "Should have remote video element for pcLocal");
ok(vlocal.videoWidth > 0, "source width is positive"); ok(vlocal.videoWidth > 0, "source width is positive");
@ -122,21 +124,25 @@
function PC_LOCAL_WAIT_FOR_SECOND_MEDIA_FLOW(test) { function PC_LOCAL_WAIT_FOR_SECOND_MEDIA_FLOW(test) {
return test.pcLocal.waitForMediaFlow(); return test.pcLocal.waitForMediaFlow();
}, },
function PC_LOCAL_WAIT_FOR_FRAMES_2() { async function PC_LOCAL_WAIT_FOR_FRAMES_2() {
var vremote = test.pcLocal.remoteMediaElements[0]; const vremote = test.pcLocal.remoteMediaElements[0];
ok(vremote, "Should have remote video element for pcLocal"); ok(vremote, "Should have remote video element for pcLocal");
return helper.waitForFrames(vremote); emitter.start();
await helper.checkVideoPlaying(vremote, 10, 10, 16);
emitter.stop();
}, },
// For some reason, even though we're getting a 25x25 stream, sometimes // For some reason, even though we're getting a 25x25 stream, sometimes
// the resolution isn't updated on the video element on the first frame. // the resolution isn't updated on the video element on the first frame.
function PC_LOCAL_WAIT_FOR_FRAMES_3() { async function PC_LOCAL_WAIT_FOR_FRAMES_3() {
var vremote = test.pcLocal.remoteMediaElements[0]; const vremote = test.pcLocal.remoteMediaElements[0];
ok(vremote, "Should have remote video element for pcLocal"); ok(vremote, "Should have remote video element for pcLocal");
return helper.waitForFrames(vremote); emitter.start();
await helper.checkVideoPlaying(vremote, 10, 10, 16);
emitter.stop();
}, },
function PC_LOCAL_CHECK_SIZE_2() { function PC_LOCAL_CHECK_SIZE_2() {
var vlocal = test.pcRemote.localMediaElements[0]; const vlocal = test.pcRemote.localMediaElements[0];
var vremote = test.pcLocal.remoteMediaElements[0]; const vremote = test.pcLocal.remoteMediaElements[0];
ok(vlocal, "Should have local video element for pcRemote"); ok(vlocal, "Should have local video element for pcRemote");
ok(vremote, "Should have remote video element for pcLocal"); ok(vremote, "Should have remote video element for pcLocal");
ok(vlocal.videoWidth > 0, "source width is positive"); ok(vlocal.videoWidth > 0, "source width is positive");

Просмотреть файл

@ -13,21 +13,20 @@
visible: true visible: true
}); });
var test; const pushPrefs = (...p) => SpecialPowers.pushPrefEnv({set: p});
var pushPrefs = (...p) => SpecialPowers.pushPrefEnv({set: p});
function addRIDExtension(pc, extensionId) { function addRIDExtension(pc, extensionId) {
var receivers = pc._pc.getReceivers(); const receivers = pc._pc.getReceivers();
is(receivers.length, 1, "We have exactly one RTP receiver"); is(receivers.length, 1, "We have exactly one RTP receiver");
var receiver = receivers[0]; const receiver = receivers[0];
SpecialPowers.wrap(pc._pc).mozAddRIDExtension(receiver, extensionId); SpecialPowers.wrap(pc._pc).mozAddRIDExtension(receiver, extensionId);
} }
function selectRecvRID(pc, rid) { function selectRecvRID(pc, rid) {
var receivers = pc._pc.getReceivers(); const receivers = pc._pc.getReceivers();
is(receivers.length, 1, "We have exactly one RTP receiver"); is(receivers.length, 1, "We have exactly one RTP receiver");
var receiver = receivers[0]; const receiver = receivers[0];
SpecialPowers.wrap(pc._pc).mozAddRIDFilter(receiver, rid); SpecialPowers.wrap(pc._pc).mozAddRIDFilter(receiver, rid);
} }
@ -38,23 +37,24 @@
// the 80Kbps+overhead needed for the two simulcast streams. // the 80Kbps+overhead needed for the two simulcast streams.
// 100Kbps was apparently too low. // 100Kbps was apparently too low.
['media.peerconnection.video.min_bitrate_estimate', 180*1000]).then(() => { ['media.peerconnection.video.min_bitrate_estimate', 180*1000]).then(() => {
var helper; let emitter, helper;
test = new PeerConnectionTest({bundle: false}); const test = new PeerConnectionTest({bundle: false});
test.setMediaConstraints([{video: true}], []); test.setMediaConstraints([{video: true}], []);
test.chain.replace("PC_LOCAL_GUM", [ test.chain.replace("PC_LOCAL_GUM", [
function PC_LOCAL_CANVAS_CAPTURESTREAM(test) { function PC_LOCAL_CANVAS_CAPTURESTREAM(test) {
emitter = new VideoFrameEmitter();
helper = new VideoStreamHelper(); helper = new VideoStreamHelper();
test.pcLocal.attachLocalStream(helper.stream()); test.pcLocal.attachLocalStream(emitter.stream());
} }
]); ]);
test.chain.insertBefore('PC_LOCAL_CREATE_OFFER', [ test.chain.insertBefore('PC_LOCAL_CREATE_OFFER', [
function PC_LOCAL_SET_RIDS(test) { function PC_LOCAL_SET_RIDS(test) {
var senders = test.pcLocal._pc.getSenders(); const senders = test.pcLocal._pc.getSenders();
is(senders.length, 1, "We have exactly one RTP sender"); is(senders.length, 1, "We have exactly one RTP sender");
var sender = senders[0]; const sender = senders[0];
ok(sender.track, "Sender has a track"); ok(sender.track, "Sender has a track");
return sender.setParameters({ return sender.setParameters({
@ -80,7 +80,7 @@
// has been created. // has been created.
test.chain.insertAfter('PC_REMOTE_SET_LOCAL_DESCRIPTION',[ test.chain.insertAfter('PC_REMOTE_SET_LOCAL_DESCRIPTION',[
function PC_REMOTE_SET_RTP_FIRST_RID(test) { function PC_REMOTE_SET_RTP_FIRST_RID(test) {
var extmap_id = test.originalOffer.sdp.match( const extmap_id = test.originalOffer.sdp.match(
"a=extmap:([0-9+])/sendonly urn:ietf:params:rtp-hdrext:sdes:rtp-stream-id"); "a=extmap:([0-9+])/sendonly urn:ietf:params:rtp-hdrext:sdes:rtp-stream-id");
ok(extmap_id, "Original offer has extmap id for simulcast: " + extmap_id[1]); ok(extmap_id, "Original offer has extmap id for simulcast: " + extmap_id[1]);
// Cause pcRemote to filter out everything but RID "foo", only // Cause pcRemote to filter out everything but RID "foo", only
@ -91,14 +91,16 @@
]); ]);
test.chain.append([ test.chain.append([
function PC_REMOTE_WAIT_FOR_FRAMES() { async function PC_REMOTE_WAIT_FOR_FRAMES() {
var vremote = test.pcRemote.remoteMediaElements[0]; const vremote = test.pcRemote.remoteMediaElements[0];
ok(vremote, "Should have remote video element for pcRemote"); ok(vremote, "Should have remote video element for pcRemote");
return helper.waitForFrames(vremote); emitter.start();
await helper.checkVideoPlaying(vremote, 10, 10, 16);
emitter.stop();
}, },
function PC_REMOTE_CHECK_SIZE_1() { function PC_REMOTE_CHECK_SIZE_1() {
var vlocal = test.pcLocal.localMediaElements[0]; const vlocal = test.pcLocal.localMediaElements[0];
var vremote = test.pcRemote.remoteMediaElements[0]; const vremote = test.pcRemote.remoteMediaElements[0];
ok(vlocal, "Should have local video element for pcLocal"); ok(vlocal, "Should have local video element for pcLocal");
ok(vremote, "Should have remote video element for pcRemote"); ok(vremote, "Should have remote video element for pcRemote");
ok(vlocal.videoWidth > 0, "source width is positive"); ok(vlocal.videoWidth > 0, "source width is positive");
@ -114,21 +116,25 @@
function PC_REMOTE_WAIT_FOR_SECOND_MEDIA_FLOW(test) { function PC_REMOTE_WAIT_FOR_SECOND_MEDIA_FLOW(test) {
return test.pcRemote.waitForMediaFlow(); return test.pcRemote.waitForMediaFlow();
}, },
function PC_REMOTE_WAIT_FOR_FRAMES_2() { async function PC_REMOTE_WAIT_FOR_FRAMES_2() {
var vremote = test.pcRemote.remoteMediaElements[0]; const vremote = test.pcRemote.remoteMediaElements[0];
ok(vremote, "Should have remote video element for pcRemote"); ok(vremote, "Should have remote video element for pcRemote");
return helper.waitForFrames(vremote); emitter.start();
await helper.checkVideoPlaying(vremote, 10, 10, 16);
emitter.stop();
}, },
// For some reason, even though we're getting a 25x25 stream, sometimes // For some reason, even though we're getting a 25x25 stream, sometimes
// the resolution isn't updated on the video element on the first frame. // the resolution isn't updated on the video element on the first frame.
function PC_REMOTE_WAIT_FOR_FRAMES_3() { async function PC_REMOTE_WAIT_FOR_FRAMES_3() {
var vremote = test.pcRemote.remoteMediaElements[0]; const vremote = test.pcRemote.remoteMediaElements[0];
ok(vremote, "Should have remote video element for pcRemote"); ok(vremote, "Should have remote video element for pcRemote");
return helper.waitForFrames(vremote); emitter.start();
await helper.checkVideoPlaying(vremote, 10, 10, 16);
emitter.stop();
}, },
function PC_REMOTE_CHECK_SIZE_2() { function PC_REMOTE_CHECK_SIZE_2() {
var vlocal = test.pcLocal.localMediaElements[0]; const vlocal = test.pcLocal.localMediaElements[0];
var vremote = test.pcRemote.remoteMediaElements[0]; const vremote = test.pcRemote.remoteMediaElements[0];
ok(vlocal, "Should have local video element for pcLocal"); ok(vlocal, "Should have local video element for pcLocal");
ok(vremote, "Should have remote video element for pcRemote"); ok(vremote, "Should have remote video element for pcRemote");
ok(vlocal.videoWidth > 0, "source width is positive"); ok(vlocal.videoWidth > 0, "source width is positive");