Bug 1093835 - Commoditize checkVideoPlaying/checkVideoPaused. r=jib

This moves those two functions from a single test to a VideoStreamHelper in a
common file, so they can be used when checking video flow in multiple tests.

It also implements a VideoFrameEmitter that provides tests with an easy to
access video source where VideoStreamHelper can verify flow on the sink.

MozReview-Commit-ID: Fin9eiVmBe

--HG--
extra : rebase_source : 8b62124b0182d7e7bd78788e031b2d2259db9e57
This commit is contained in:
Andreas Pehrson 2017-05-04 15:23:36 +02:00
Родитель 0fb77b5e31
Коммит c41610f6b8
9 изменённых файлов: 148 добавлений и 132 удалений

Просмотреть файл

@ -107,7 +107,7 @@ CaptureStreamTestHelper.prototype = {
*/
isOpaquePixelNot: function(px, refColor, threshold) {
px[3] = refColor.data[3];
return h.isPixelNot(px, refColor, threshold);
return this.isPixelNot(px, refColor, threshold);
},
/*

Просмотреть файл

@ -908,55 +908,94 @@ AudioStreamHelper.prototype = {
}
}
function VideoStreamHelper() {
this._helper = new CaptureStreamTestHelper2D(50,50);
this._canvas = this._helper.createAndAppendElement('canvas', 'source_canvas');
// Make sure this is initted
this._helper.drawColor(this._canvas, this._helper.green);
this._stream = this._canvas.captureStream(10);
}
class VideoFrameEmitter {
constructor(color1, color2) {
this._helper = new CaptureStreamTestHelper2D(50,50);
this._canvas = this._helper.createAndAppendElement('canvas', 'source_canvas');
this._color1 = color1 ? color1 : this._helper.green;
this._color2 = color2 ? color2 : this._helper.red;
// Make sure this is initted
this._helper.drawColor(this._canvas, this._color1);
this._stream = this._canvas.captureStream();
this._started = false;
}
VideoStreamHelper.prototype = {
stream: function() {
stream() {
return this._stream;
},
}
startCapturingFrames: function() {
var i = 0;
var helper = this;
return setInterval(function() {
start() {
if (this._started) {
return;
}
let i = 0;
this._started = true;
this._intervalId = setInterval(() => {
try {
helper._helper.drawColor(helper._canvas,
i ? helper._helper.green : helper._helper.red);
this._helper.drawColor(this._canvas, i ? this._color1: this._color2);
i = 1 - i;
helper._stream.requestFrame();
} catch (e) {
// ignore; stream might have shut down, and we don't bother clearing
// the setInterval.
}
}, 500);
},
}
waitForFrames: function(canvas, timeout_value) {
var intervalId = this.startCapturingFrames();
timeout_value = timeout_value || 8000;
stop() {
if (this._started) {
clearInterval(this._intervalId);
this._started = false;
}
}
}
return addFinallyToPromise(timeout(
Promise.all([
this._helper.waitForPixelColor(canvas, this._helper.green, 128,
canvas.id + " should become green"),
this._helper.waitForPixelColor(canvas, this._helper.red, 128,
canvas.id + " should become red")
]),
timeout_value,
"Timed out waiting for frames")).finally(() => clearInterval(intervalId));
},
class VideoStreamHelper {
constructor() {
this._helper = new CaptureStreamTestHelper2D(50,50);
}
verifyNoFrames: function(canvas) {
return this.waitForFrames(canvas).then(
() => ok(false, "Color should not change"),
() => ok(true, "Color should not change")
);
checkHasFrame(video, offsetX, offsetY, threshold) {
const h = this._helper;
return h.waitForPixel(video, offsetX, offsetY, px => {
let result = h.isOpaquePixelNot(px, h.black, threshold);
info("Checking that we have a frame, got [" +
Array.slice(px) + "]. Ref=[" +
Array.slice(h.black.data) + "]. Threshold=" + threshold +
". Pass=" + result);
return result;
});
}
async checkVideoPlaying(video, offsetX, offsetY, threshold) {
const h = this._helper;
await this.checkHasFrame(video, offsetX, offsetY, threshold);
let startPixel = { data: h.getPixel(video, offsetX, offsetY)
, name: "startcolor"
};
return h.waitForPixel(video, offsetX, offsetY, px => {
let result = h.isPixelNot(px, startPixel, threshold)
info("Checking playing, [" +
Array.slice(px) + "] vs [" + Array.slice(startPixel.data) +
"]. Threshold=" + threshold + " Pass=" + result);
return result;
});
}
async checkVideoPaused(video, offsetX, offsetY, threshold, timeout) {
const h = this._helper;
await this.checkHasFrame(video, offsetX, offsetY, threshold);
let startPixel = { data: h.getPixel(video, offsetX, offsetY)
, name: "startcolor"
};
const changed = await h.waitForPixel(video, offsetX, offsetY, px => {
let result = h.isOpaquePixelNot(px, startPixel, threshold);
info("Checking paused, [" +
Array.slice(px) + "] vs [" + Array.slice(startPixel.data) +
"]. Threshold=" + threshold + " Pass=" + result);
return result;
}, timeout);
ok(!changed, "Frame shouldn't change within " + timeout / 1000 + " seconds.");
}
}

Просмотреть файл

@ -2,7 +2,6 @@
<html>
<head>
<script type="application/javascript" src="mediaStreamPlayback.js"></script>
<script type="application/javascript" src="head.js"></script>
</head>
<body>
<pre id="test">

Просмотреть файл

@ -2,7 +2,6 @@
<html>
<head>
<script type="application/javascript" src="mediaStreamPlayback.js"></script>
<script type="application/javascript" src="head.js"></script>
</head>
<body>
<pre id="test">

Просмотреть файл

@ -3,7 +3,6 @@
<head>
<script type="application/javascript" src="/tests/dom/canvas/test/captureStream_common.js"></script>
<script type="application/javascript" src="mediaStreamPlayback.js"></script>
<script type="application/javascript" src="head.js"></script>
</head>
<body>
<pre id="test">
@ -24,43 +23,11 @@ const offsetX = 20;
const offsetY = 20;
const threshold = 16;
const pausedTimeout = 1000;
const h = new CaptureStreamTestHelper2D(50, 50);
var checkHasFrame = video => h.waitForPixel(video, offsetX, offsetY, px => {
let result = h.isOpaquePixelNot(px, h.black, threshold);
info("Checking that we have a frame, got [" +
Array.slice(px) + "]. Pass=" + result);
return result;
});
var checkVideoPlaying = video => checkHasFrame(video)
.then(() => {
let startPixel = { data: h.getPixel(video, offsetX, offsetY)
, name: "startcolor"
};
return h.waitForPixel(video, offsetX, offsetY, px => {
let result = h.isPixelNot(px, startPixel, threshold)
info("Checking playing, [" + Array.slice(px) + "] vs [" +
Array.slice(startPixel.data) + "]. Pass=" + result);
return result;
});
});
var checkVideoPaused = video => checkHasFrame(video)
.then(() => {
let startPixel = { data: h.getPixel(video, offsetX, offsetY)
, name: "startcolor"
};
return h.waitForPixel(video, offsetX, offsetY, px => {
let result = h.isOpaquePixelNot(px, startPixel, threshold);
info("Checking paused, [" + Array.slice(px) + "] vs [" +
Array.slice(startPixel.data) + "]. Pass=" + result);
return result;
}, pausedTimeout);
}).then(result => ok(!result, "Frame shouldn't change within " + pausedTimeout / 1000 + " seconds."));
let h;
runTest(() => getUserMedia({video: true, fake: true})
.then(stream => {
h = new VideoStreamHelper();
gUMVideoElement =
createMediaElement("video", "gUMVideo");
gUMVideoElement.srcObject = stream;
@ -80,43 +47,45 @@ runTest(() => getUserMedia({video: true, fake: true})
let osc = createOscillatorStream(new AudioContext(), 1000);
captureStreamElement.srcObject.addTrack(osc.getTracks()[0]);
return checkVideoPlaying(captureStreamElement);
return h.checkVideoPlaying(captureStreamElement, 10, 10, 16);
})
.then(() => {
info("Video flowing. Pausing.");
gUMVideoElement.pause();
return checkVideoPaused(captureStreamElement);
return h.checkVideoPaused(captureStreamElement, 10, 10, 16, pausedTimeout);
})
.then(() => {
info("Video stopped flowing. Playing.");
gUMVideoElement.play();
return checkVideoPlaying(captureStreamElement);
return h.checkVideoPlaying(captureStreamElement, 10, 10, 16);
})
.then(() => {
info("Video flowing. Removing source.");
var stream = gUMVideoElement.srcObject;
gUMVideoElement.srcObject = null;
return checkVideoPaused(captureStreamElement).then(() => stream);
return h.checkVideoPaused(captureStreamElement, 10, 10, 16, pausedTimeout)
.then(() => stream);
})
.then(stream => {
info("Video stopped flowing. Setting source.");
gUMVideoElement.srcObject = stream;
return checkVideoPlaying(captureStreamElement);
return h.checkVideoPlaying(captureStreamElement, 10, 10, 16);
})
.then(() => {
info("Video flowing. Changing source by track manipulation. Remove first.");
var track = gUMVideoElement.srcObject.getTracks()[0];
gUMVideoElement.srcObject.removeTrack(track);
return checkVideoPaused(captureStreamElement).then(() => track);
return h.checkVideoPaused(captureStreamElement, 10, 10, 16, pausedTimeout)
.then(() => track);
})
.then(track => {
info("Video paused. Changing source by track manipulation. Add first.");
gUMVideoElement.srcObject.addTrack(track);
gUMVideoElement.play();
return checkVideoPlaying(captureStreamElement);
return h.checkVideoPlaying(captureStreamElement, 10, 10, 16);
})
.then(() => {
gUMVideoElement.srcObject.getTracks().forEach(t => t.stop());

Просмотреть файл

@ -1,7 +1,6 @@
<!DOCTYPE HTML>
<html>
<head>
<script type="application/javascript" src="head.js"></script>
<script type="application/javascript" src="mediaStreamPlayback.js"></script>
</head>
<body>

Просмотреть файл

@ -1,7 +1,6 @@
<!DOCTYPE HTML>
<html>
<head>
<script type="application/javascript" src="head.js"></script>
<script type="application/javascript" src="pc.js"></script>
</head>
<body>

Просмотреть файл

@ -13,21 +13,20 @@
visible: true
});
var test;
var pushPrefs = (...p) => SpecialPowers.pushPrefEnv({set: p});
const pushPrefs = (...p) => SpecialPowers.pushPrefEnv({set: p});
function addRIDExtension(pc, extensionId) {
var receivers = pc._pc.getReceivers();
const receivers = pc._pc.getReceivers();
is(receivers.length, 1, "We have exactly one RTP receiver");
var receiver = receivers[0];
const receiver = receivers[0];
SpecialPowers.wrap(pc._pc).mozAddRIDExtension(receiver, extensionId);
}
function selectRecvRID(pc, rid) {
var receivers = pc._pc.getReceivers();
const receivers = pc._pc.getReceivers();
is(receivers.length, 1, "We have exactly one RTP receiver");
var receiver = receivers[0];
const receiver = receivers[0];
SpecialPowers.wrap(pc._pc).mozAddRIDFilter(receiver, rid);
}
@ -38,23 +37,24 @@
// the 80Kbps+overhead needed for the two simulcast streams.
// 100Kbps was apparently too low.
['media.peerconnection.video.min_bitrate_estimate', 180*1000]).then(() => {
var helper;
let emitter, helper;
test = new PeerConnectionTest({bundle: false});
test.setMediaConstraints([{video: true}], [{video: true}]);
test.chain.replace("PC_REMOTE_GUM", [
function PC_REMOTE_CANVAS_CAPTURESTREAM(test) {
emitter = new VideoFrameEmitter();
helper = new VideoStreamHelper();
test.pcRemote.attachLocalStream(helper.stream());
test.pcRemote.attachLocalStream(emitter.stream());
}
]);
test.chain.insertAfter('PC_REMOTE_GET_OFFER', [
function PC_REMOTE_SET_RIDS(test) {
var senders = test.pcRemote._pc.getSenders();
const senders = test.pcRemote._pc.getSenders();
is(senders.length, 1, "We have exactly one RTP sender");
var sender = senders[0];
const sender = senders[0];
ok(sender.track, "Sender has a track");
return sender.setParameters({
@ -88,7 +88,7 @@
// has been created.
test.chain.insertAfter('PC_LOCAL_SET_REMOTE_DESCRIPTION',[
function PC_LOCAL_SET_RTP_FIRST_RID(test) {
var extmap_id = test._local_offer.sdp.match(
const extmap_id = test._local_offer.sdp.match(
"a=extmap:([0-9+])/recvonly urn:ietf:params:rtp-hdrext:sdes:rtp-stream-id");
ok(extmap_id, "Local offer has extmap id for simulcast: " + extmap_id[1]);
// Cause pcLocal to filter out everything but RID "bar", only
@ -99,14 +99,16 @@
]);
test.chain.append([
function PC_LOCAL_WAIT_FOR_FRAMES() {
var vremote = test.pcLocal.remoteMediaElements[0];
async function PC_LOCAL_WAIT_FOR_FRAMES() {
const vremote = test.pcLocal.remoteMediaElements[0];
ok(vremote, "Should have remote video element for pcLocal");
return helper.waitForFrames(vremote);
emitter.start();
await helper.checkVideoPlaying(vremote, 10, 10, 16);
emitter.stop();
},
function PC_LOCAL_CHECK_SIZE_1() {
var vlocal = test.pcRemote.localMediaElements[0];
var vremote = test.pcLocal.remoteMediaElements[0];
const vlocal = test.pcRemote.localMediaElements[0];
const vremote = test.pcLocal.remoteMediaElements[0];
ok(vlocal, "Should have local video element for pcRemote");
ok(vremote, "Should have remote video element for pcLocal");
ok(vlocal.videoWidth > 0, "source width is positive");
@ -122,21 +124,25 @@
function PC_LOCAL_WAIT_FOR_SECOND_MEDIA_FLOW(test) {
return test.pcLocal.waitForMediaFlow();
},
function PC_LOCAL_WAIT_FOR_FRAMES_2() {
var vremote = test.pcLocal.remoteMediaElements[0];
async function PC_LOCAL_WAIT_FOR_FRAMES_2() {
const vremote = test.pcLocal.remoteMediaElements[0];
ok(vremote, "Should have remote video element for pcLocal");
return helper.waitForFrames(vremote);
emitter.start();
await helper.checkVideoPlaying(vremote, 10, 10, 16);
emitter.stop();
},
// For some reason, even though we're getting a 25x25 stream, sometimes
// the resolution isn't updated on the video element on the first frame.
function PC_LOCAL_WAIT_FOR_FRAMES_3() {
var vremote = test.pcLocal.remoteMediaElements[0];
async function PC_LOCAL_WAIT_FOR_FRAMES_3() {
const vremote = test.pcLocal.remoteMediaElements[0];
ok(vremote, "Should have remote video element for pcLocal");
return helper.waitForFrames(vremote);
emitter.start();
await helper.checkVideoPlaying(vremote, 10, 10, 16);
emitter.stop();
},
function PC_LOCAL_CHECK_SIZE_2() {
var vlocal = test.pcRemote.localMediaElements[0];
var vremote = test.pcLocal.remoteMediaElements[0];
const vlocal = test.pcRemote.localMediaElements[0];
const vremote = test.pcLocal.remoteMediaElements[0];
ok(vlocal, "Should have local video element for pcRemote");
ok(vremote, "Should have remote video element for pcLocal");
ok(vlocal.videoWidth > 0, "source width is positive");

Просмотреть файл

@ -13,21 +13,20 @@
visible: true
});
var test;
var pushPrefs = (...p) => SpecialPowers.pushPrefEnv({set: p});
const pushPrefs = (...p) => SpecialPowers.pushPrefEnv({set: p});
function addRIDExtension(pc, extensionId) {
var receivers = pc._pc.getReceivers();
const receivers = pc._pc.getReceivers();
is(receivers.length, 1, "We have exactly one RTP receiver");
var receiver = receivers[0];
const receiver = receivers[0];
SpecialPowers.wrap(pc._pc).mozAddRIDExtension(receiver, extensionId);
}
function selectRecvRID(pc, rid) {
var receivers = pc._pc.getReceivers();
const receivers = pc._pc.getReceivers();
is(receivers.length, 1, "We have exactly one RTP receiver");
var receiver = receivers[0];
const receiver = receivers[0];
SpecialPowers.wrap(pc._pc).mozAddRIDFilter(receiver, rid);
}
@ -38,23 +37,24 @@
// the 80Kbps+overhead needed for the two simulcast streams.
// 100Kbps was apparently too low.
['media.peerconnection.video.min_bitrate_estimate', 180*1000]).then(() => {
var helper;
let emitter, helper;
test = new PeerConnectionTest({bundle: false});
const test = new PeerConnectionTest({bundle: false});
test.setMediaConstraints([{video: true}], []);
test.chain.replace("PC_LOCAL_GUM", [
function PC_LOCAL_CANVAS_CAPTURESTREAM(test) {
emitter = new VideoFrameEmitter();
helper = new VideoStreamHelper();
test.pcLocal.attachLocalStream(helper.stream());
test.pcLocal.attachLocalStream(emitter.stream());
}
]);
test.chain.insertBefore('PC_LOCAL_CREATE_OFFER', [
function PC_LOCAL_SET_RIDS(test) {
var senders = test.pcLocal._pc.getSenders();
const senders = test.pcLocal._pc.getSenders();
is(senders.length, 1, "We have exactly one RTP sender");
var sender = senders[0];
const sender = senders[0];
ok(sender.track, "Sender has a track");
return sender.setParameters({
@ -80,7 +80,7 @@
// has been created.
test.chain.insertAfter('PC_REMOTE_SET_LOCAL_DESCRIPTION',[
function PC_REMOTE_SET_RTP_FIRST_RID(test) {
var extmap_id = test.originalOffer.sdp.match(
const extmap_id = test.originalOffer.sdp.match(
"a=extmap:([0-9+])/sendonly urn:ietf:params:rtp-hdrext:sdes:rtp-stream-id");
ok(extmap_id, "Original offer has extmap id for simulcast: " + extmap_id[1]);
// Cause pcRemote to filter out everything but RID "foo", only
@ -91,14 +91,16 @@
]);
test.chain.append([
function PC_REMOTE_WAIT_FOR_FRAMES() {
var vremote = test.pcRemote.remoteMediaElements[0];
async function PC_REMOTE_WAIT_FOR_FRAMES() {
const vremote = test.pcRemote.remoteMediaElements[0];
ok(vremote, "Should have remote video element for pcRemote");
return helper.waitForFrames(vremote);
emitter.start();
await helper.checkVideoPlaying(vremote, 10, 10, 16);
emitter.stop();
},
function PC_REMOTE_CHECK_SIZE_1() {
var vlocal = test.pcLocal.localMediaElements[0];
var vremote = test.pcRemote.remoteMediaElements[0];
const vlocal = test.pcLocal.localMediaElements[0];
const vremote = test.pcRemote.remoteMediaElements[0];
ok(vlocal, "Should have local video element for pcLocal");
ok(vremote, "Should have remote video element for pcRemote");
ok(vlocal.videoWidth > 0, "source width is positive");
@ -114,21 +116,25 @@
function PC_REMOTE_WAIT_FOR_SECOND_MEDIA_FLOW(test) {
return test.pcRemote.waitForMediaFlow();
},
function PC_REMOTE_WAIT_FOR_FRAMES_2() {
var vremote = test.pcRemote.remoteMediaElements[0];
async function PC_REMOTE_WAIT_FOR_FRAMES_2() {
const vremote = test.pcRemote.remoteMediaElements[0];
ok(vremote, "Should have remote video element for pcRemote");
return helper.waitForFrames(vremote);
emitter.start();
await helper.checkVideoPlaying(vremote, 10, 10, 16);
emitter.stop();
},
// For some reason, even though we're getting a 25x25 stream, sometimes
// the resolution isn't updated on the video element on the first frame.
function PC_REMOTE_WAIT_FOR_FRAMES_3() {
var vremote = test.pcRemote.remoteMediaElements[0];
async function PC_REMOTE_WAIT_FOR_FRAMES_3() {
const vremote = test.pcRemote.remoteMediaElements[0];
ok(vremote, "Should have remote video element for pcRemote");
return helper.waitForFrames(vremote);
emitter.start();
await helper.checkVideoPlaying(vremote, 10, 10, 16);
emitter.stop();
},
function PC_REMOTE_CHECK_SIZE_2() {
var vlocal = test.pcLocal.localMediaElements[0];
var vremote = test.pcRemote.remoteMediaElements[0];
const vlocal = test.pcLocal.localMediaElements[0];
const vremote = test.pcRemote.remoteMediaElements[0];
ok(vlocal, "Should have local video element for pcLocal");
ok(vremote, "Should have remote video element for pcRemote");
ok(vlocal.videoWidth > 0, "source width is positive");