зеркало из https://github.com/mozilla/gecko-dev.git
Bug 1725488: Rewrite simulcast mochitests, and remove the hooks they used. r=ng,emilio
Differential Revision: https://phabricator.services.mozilla.com/D122626
This commit is contained in:
Родитель
c41f086b94
Коммит
641e8f33fd
|
@ -571,18 +571,6 @@ void RTCRtpReceiver::UpdateStreams(StreamAssociationChanges* aChanges) {
|
|||
}
|
||||
}
|
||||
|
||||
void RTCRtpReceiver::MozAddRIDExtension(unsigned short aExtensionId) {
|
||||
if (mPipeline) {
|
||||
mPipeline->AddRIDExtension_m(aExtensionId);
|
||||
}
|
||||
}
|
||||
|
||||
void RTCRtpReceiver::MozAddRIDFilter(const nsAString& aRid) {
|
||||
if (mPipeline) {
|
||||
mPipeline->AddRIDFilter_m(NS_ConvertUTF16toUTF8(aRid).get());
|
||||
}
|
||||
}
|
||||
|
||||
// test-only: adds fake CSRCs and audio data
|
||||
void RTCRtpReceiver::MozInsertAudioLevelForContributingSource(
|
||||
const uint32_t aSource, const DOMHighResTimeStamp aTimestamp,
|
||||
|
|
|
@ -60,9 +60,6 @@ class RTCRtpReceiver : public nsISupports,
|
|||
nsTArray<dom::RTCRtpContributingSource>& aSources);
|
||||
void GetSynchronizationSources(
|
||||
nsTArray<dom::RTCRtpSynchronizationSource>& aSources);
|
||||
// test-only: called from simulcast mochitests.
|
||||
void MozAddRIDExtension(unsigned short aExtensionId);
|
||||
void MozAddRIDFilter(const nsAString& aRid);
|
||||
// test-only: insert fake CSRCs and audio levels for testing
|
||||
void MozInsertAudioLevelForContributingSource(
|
||||
const uint32_t aSource, const DOMHighResTimeStamp aTimestamp,
|
||||
|
|
|
@ -20,6 +20,8 @@ support-files =
|
|||
parser_rtp.js
|
||||
peerconnection_audio_forced_sample_rate.js
|
||||
iceTestUtils.js
|
||||
simulcast.js
|
||||
helpers_from_wpt/sdp.js
|
||||
!/dom/canvas/test/captureStream_common.js
|
||||
!/dom/canvas/test/webgl-mochitest/webgl-util.js
|
||||
!/dom/media/test/manifest.js
|
||||
|
|
|
@ -0,0 +1,140 @@
|
|||
"use strict";
|
||||
/* Helper functions to munge SDP and split the sending track into
|
||||
* separate tracks on the receiving end. This can be done in a number
|
||||
* of ways, the one used here uses the fact that the MID and RID header
|
||||
* extensions which are used for packet routing share the same wire
|
||||
* format. The receiver interprets the rids from the sender as mids
|
||||
* which allows receiving the different spatial resolutions on separate
|
||||
* m-lines and tracks.
|
||||
*/
|
||||
|
||||
// Adapted from wpt to improve handling of cases where answerer is the
|
||||
// simulcast sender, better handling of a=setup and direction attributes, and
|
||||
// some simplification. Will probably end up merging back at some point.
|
||||
|
||||
const ridExtensions = [
|
||||
"urn:ietf:params:rtp-hdrext:sdes:rtp-stream-id",
|
||||
"urn:ietf:params:rtp-hdrext:sdes:repaired-rtp-stream-id",
|
||||
];
|
||||
|
||||
function ridToMid(sdpString) {
|
||||
const sections = SDPUtils.splitSections(sdpString);
|
||||
const dtls = SDPUtils.getDtlsParameters(sections[1], sections[0]);
|
||||
const ice = SDPUtils.getIceParameters(sections[1], sections[0]);
|
||||
const rtpParameters = SDPUtils.parseRtpParameters(sections[1]);
|
||||
const setupValue = sdpString.match(/a=setup:(.*)/)[1];
|
||||
const directionValue =
|
||||
sdpString.match(/a=sendrecv|a=sendonly|a=recvonly|a=inactive/) ||
|
||||
"a=sendrecv";
|
||||
|
||||
// Skip mid extension; we are replacing it with the rid extmap
|
||||
rtpParameters.headerExtensions = rtpParameters.headerExtensions.filter(
|
||||
ext => {
|
||||
return ext.uri != "urn:ietf:params:rtp-hdrext:sdes:mid";
|
||||
}
|
||||
);
|
||||
|
||||
rtpParameters.headerExtensions.forEach(ext => {
|
||||
if (ext.uri == "urn:ietf:params:rtp-hdrext:sdes:rtp-stream-id") {
|
||||
ext.uri = "urn:ietf:params:rtp-hdrext:sdes:mid";
|
||||
}
|
||||
});
|
||||
|
||||
let rids = Array.from(sdpString.matchAll(/a=rid:(.*) send/g)).map(r => r[1]);
|
||||
|
||||
let sdp =
|
||||
SDPUtils.writeSessionBoilerplate() +
|
||||
SDPUtils.writeDtlsParameters(dtls, setupValue) +
|
||||
SDPUtils.writeIceParameters(ice) +
|
||||
"a=group:BUNDLE " +
|
||||
rids.join(" ") +
|
||||
"\r\n";
|
||||
const baseRtpDescription = SDPUtils.writeRtpDescription(
|
||||
"video",
|
||||
rtpParameters
|
||||
);
|
||||
rids.forEach(rid => {
|
||||
sdp +=
|
||||
baseRtpDescription +
|
||||
"a=mid:" +
|
||||
rid +
|
||||
"\r\n" +
|
||||
"a=msid:rid-" +
|
||||
rid +
|
||||
" rid-" +
|
||||
rid +
|
||||
"\r\n";
|
||||
sdp += directionValue + "\r\n";
|
||||
});
|
||||
|
||||
return sdp;
|
||||
}
|
||||
|
||||
function midToRid(sdpString) {
|
||||
const sections = SDPUtils.splitSections(sdpString);
|
||||
const dtls = SDPUtils.getDtlsParameters(sections[1], sections[0]);
|
||||
const ice = SDPUtils.getIceParameters(sections[1], sections[0]);
|
||||
const rtpParameters = SDPUtils.parseRtpParameters(sections[1]);
|
||||
const setupValue = sdpString.match(/a=setup:(.*)/)[1];
|
||||
const directionValue =
|
||||
sdpString.match(/a=sendrecv|a=sendonly|a=recvonly|a=inactive/) ||
|
||||
"a=sendrecv";
|
||||
|
||||
// Skip rid extensions; we are replacing them with the mid extmap
|
||||
rtpParameters.headerExtensions = rtpParameters.headerExtensions.filter(
|
||||
ext => {
|
||||
return !ridExtensions.includes(ext.uri);
|
||||
}
|
||||
);
|
||||
|
||||
rtpParameters.headerExtensions.forEach(ext => {
|
||||
if (ext.uri == "urn:ietf:params:rtp-hdrext:sdes:mid") {
|
||||
ext.uri = "urn:ietf:params:rtp-hdrext:sdes:rtp-stream-id";
|
||||
}
|
||||
});
|
||||
|
||||
let mids = [];
|
||||
for (let i = 1; i < sections.length; i++) {
|
||||
mids.push(SDPUtils.getMid(sections[i]));
|
||||
}
|
||||
|
||||
let sdp =
|
||||
SDPUtils.writeSessionBoilerplate() +
|
||||
SDPUtils.writeDtlsParameters(dtls, setupValue) +
|
||||
SDPUtils.writeIceParameters(ice) +
|
||||
"a=group:BUNDLE " +
|
||||
mids[0] +
|
||||
"\r\n";
|
||||
sdp += SDPUtils.writeRtpDescription("video", rtpParameters);
|
||||
// Although we are converting mids to rids, we still need a mid.
|
||||
// The first one will be consistent with trickle ICE candidates.
|
||||
sdp += "a=mid:" + mids[0] + "\r\n";
|
||||
sdp += directionValue + "\r\n";
|
||||
|
||||
mids.forEach(mid => {
|
||||
sdp += "a=rid:" + mid + " recv\r\n";
|
||||
});
|
||||
sdp += "a=simulcast:recv " + mids.join(";") + "\r\n";
|
||||
|
||||
return sdp;
|
||||
}
|
||||
|
||||
// This would be useful for cases other than simulcast, but we do not use it
|
||||
// anywhere else right now, nor do we have a place for wpt-friendly helpers at
|
||||
// the moment.
|
||||
function createPlaybackElement(track) {
|
||||
const elem = document.createElement(track.kind);
|
||||
elem.autoplay = true;
|
||||
elem.srcObject = new MediaStream([track]);
|
||||
elem.id = track.id;
|
||||
return elem;
|
||||
}
|
||||
|
||||
async function getPlaybackWithLoadedMetadata(track) {
|
||||
const elem = createPlaybackElement(track);
|
||||
return new Promise(resolve => {
|
||||
elem.addEventListener("loadedmetadata", () => {
|
||||
resolve(elem);
|
||||
});
|
||||
});
|
||||
}
|
|
@ -3,6 +3,8 @@
|
|||
<head>
|
||||
<script type="application/javascript" src="pc.js"></script>
|
||||
<script type="application/javascript" src="/tests/dom/canvas/test/captureStream_common.js"></script>
|
||||
<script type="application/javascript" src="helpers_from_wpt/sdp.js"></script>
|
||||
<script type="application/javascript" src="simulcast.js"></script>
|
||||
</head>
|
||||
<body>
|
||||
<pre id="test">
|
||||
|
@ -13,151 +15,90 @@
|
|||
visible: true
|
||||
});
|
||||
|
||||
function addRIDExtension(pc, extensionId) {
|
||||
const receivers = pc._pc.getReceivers();
|
||||
is(receivers.length, 1, "We have exactly one RTP receiver");
|
||||
const receiver = receivers[0];
|
||||
runNetworkTest(async () => {
|
||||
await pushPrefs(
|
||||
['media.peerconnection.simulcast', true],
|
||||
// 180Kbps was determined empirically, set well-higher than
|
||||
// the 80Kbps+overhead needed for the two simulcast streams.
|
||||
// 100Kbps was apparently too low.
|
||||
['media.peerconnection.video.min_bitrate_estimate', 180*1000]);
|
||||
|
||||
SpecialPowers.wrap(receiver).mozAddRIDExtension(extensionId);
|
||||
}
|
||||
|
||||
function selectRecvRID(pc, rid) {
|
||||
const receivers = pc._pc.getReceivers();
|
||||
is(receivers.length, 1, "We have exactly one RTP receiver");
|
||||
const receiver = receivers[0];
|
||||
const offerer = new RTCPeerConnection();
|
||||
const answerer = new RTCPeerConnection();
|
||||
|
||||
SpecialPowers.wrap(receiver).mozAddRIDFilter(rid);
|
||||
}
|
||||
const add = (pc, can, failed) => can && pc.addIceCandidate(can).catch(failed);
|
||||
offerer.onicecandidate = e => add(answerer, e.candidate, generateErrorCallback());
|
||||
answerer.onicecandidate = e => add(offerer, e.candidate, generateErrorCallback());
|
||||
|
||||
runNetworkTest(() =>
|
||||
pushPrefs(['media.peerconnection.simulcast', true],
|
||||
// 180Kbps was determined empirically, set well-higher than
|
||||
// the 80Kbps+overhead needed for the two simulcast streams.
|
||||
// 100Kbps was apparently too low.
|
||||
['media.peerconnection.video.min_bitrate_estimate', 180*1000]).then(() => {
|
||||
let emitter, helper;
|
||||
const metadataToBeLoaded = [];
|
||||
offerer.ontrack = (e) => {
|
||||
metadataToBeLoaded.push(getPlaybackWithLoadedMetadata(e.track));
|
||||
};
|
||||
|
||||
test = new PeerConnectionTest({bundle: false});
|
||||
test.setMediaConstraints([{video: true}], [{video: true}]);
|
||||
// Two recv transceivers, one for each simulcast stream
|
||||
offerer.addTransceiver('video', { direction: 'recvonly' });
|
||||
offerer.addTransceiver('video', { direction: 'recvonly' });
|
||||
|
||||
test.chain.replace("PC_REMOTE_GUM", [
|
||||
function PC_REMOTE_CANVAS_CAPTURESTREAM(test) {
|
||||
emitter = new VideoFrameEmitter();
|
||||
helper = new VideoStreamHelper();
|
||||
test.pcRemote.attachLocalStream(emitter.stream());
|
||||
emitter.start();
|
||||
}
|
||||
]);
|
||||
// One send transceiver, that will be used to send both simulcast streams
|
||||
const emitter = new VideoFrameEmitter();
|
||||
const videoStream = emitter.stream();
|
||||
answerer.addTrack(videoStream.getVideoTracks()[0], videoStream);
|
||||
emitter.start();
|
||||
|
||||
test.chain.insertAfter('PC_REMOTE_GET_OFFER', [
|
||||
function PC_REMOTE_SET_RIDS(test) {
|
||||
const senders = test.pcRemote._pc.getSenders();
|
||||
is(senders.length, 1, "We have exactly one RTP sender");
|
||||
const sender = senders[0];
|
||||
ok(sender.track, "Sender has a track");
|
||||
const offer = await offerer.createOffer();
|
||||
|
||||
return sender.setParameters({
|
||||
encodings: [{ rid: "foo", maxBitrate: 40000 },
|
||||
{ rid: "bar", maxBitrate: 40000, scaleResolutionDownBy: 2 }]
|
||||
});
|
||||
},
|
||||
function PC_LOCAL_ADD_RIDS_TO_OFFER(test) {
|
||||
// Create a dummy offer, and use it to set simulcast stuff on the
|
||||
// offer we will actually be using.
|
||||
return test.createOffer(test.pcRemote).then(offer => {
|
||||
test._local_offer.sdp = sdputils.transferSimulcastProperties(
|
||||
offer.sdp, test._local_offer.sdp);
|
||||
info("Offer with RIDs: " + JSON.stringify(test._local_offer));
|
||||
ok(test._local_offer.sdp.match(/a=simulcast:/), "Modified offer has simulcast");
|
||||
ok(test._local_offer.sdp.match(/a=rid:foo/), "Modified offer has rid foo");
|
||||
ok(test._local_offer.sdp.match(/a=rid:bar/), "Modified offer has rid bar");
|
||||
ok(test._local_offer.sdp.match(/urn:ietf:params:rtp-hdrext:sdes:rtp-stream-id/), "Modified offer has RID");
|
||||
});
|
||||
}
|
||||
]);
|
||||
const mungedOffer = midToRid(offer.sdp);
|
||||
info(`Transformed recv offer to simulcast: ${offer.sdp} to ${mungedOffer}`);
|
||||
|
||||
test.chain.insertAfter('PC_LOCAL_GET_ANSWER',[
|
||||
function PC_LOCAL_REMOVE_SIMULCAST_ATTRS_FROM_ANSWER(test) {
|
||||
test._remote_answer.sdp =
|
||||
sdputils.removeSimulcastProperties(test._remote_answer.sdp);
|
||||
}
|
||||
]);
|
||||
await answerer.setRemoteDescription({type: 'offer', sdp: mungedOffer});
|
||||
await offerer.setLocalDescription(offer);
|
||||
|
||||
// do this after set remote description so the MediaPipeline
|
||||
// has been created.
|
||||
test.chain.insertAfter('PC_LOCAL_SET_REMOTE_DESCRIPTION',[
|
||||
function PC_LOCAL_SET_RTP_FIRST_RID(test) {
|
||||
const extmap_id = test._local_offer.sdp.match(
|
||||
"a=extmap:([0-9+])/recvonly urn:ietf:params:rtp-hdrext:sdes:rtp-stream-id");
|
||||
ok(extmap_id, "Local offer has extmap id for simulcast: " + extmap_id[1]);
|
||||
// Cause pcLocal to filter out everything but RID "bar", only
|
||||
// allowing one of the simulcast streams through.
|
||||
addRIDExtension(test.pcLocal, extmap_id[1]);
|
||||
selectRecvRID(test.pcLocal, "bar");
|
||||
}
|
||||
]);
|
||||
const rids = offerer.getTransceivers().map(t => t.mid);
|
||||
is(rids.length, 2, 'Should have 2 mids in offer');
|
||||
ok(rids[0] != '', 'First mid should be non-empty');
|
||||
ok(rids[1] != '', 'Second mid should be non-empty');
|
||||
info(`rids: ${JSON.stringify(rids)}`);
|
||||
|
||||
test.chain.append([
|
||||
async function PC_LOCAL_WAIT_FOR_FRAMES() {
|
||||
const vremote = test.pcLocal.remoteMediaElements[0];
|
||||
ok(vremote, "Should have remote video element for pcLocal");
|
||||
emitter.start();
|
||||
await helper.checkVideoPlaying(vremote);
|
||||
emitter.stop();
|
||||
},
|
||||
function PC_LOCAL_CHECK_SIZE_1() {
|
||||
const vlocal = test.pcRemote.localMediaElements[0];
|
||||
const vremote = test.pcLocal.remoteMediaElements[0];
|
||||
ok(vlocal, "Should have local video element for pcRemote");
|
||||
ok(vremote, "Should have remote video element for pcLocal");
|
||||
is(vlocal.videoWidth, 50, "correct source width");
|
||||
is(vlocal.videoHeight, 50, "correct source height");
|
||||
is(vremote.videoWidth, 24,
|
||||
"sink is 1/2 width of source, modulo our cropping algorithm");
|
||||
is(vremote.videoHeight, 24,
|
||||
"sink is 1/2 height of source, modulo our cropping algorithm");
|
||||
},
|
||||
function PC_LOCAL_SET_RTP_SECOND_RID(test) {
|
||||
// Now, cause pcLocal to filter out everything but RID "foo", only
|
||||
// allowing the other simulcast stream through.
|
||||
selectRecvRID(test.pcLocal, "foo");
|
||||
},
|
||||
function PC_LOCAL_WAIT_FOR_SECOND_MEDIA_FLOW(test) {
|
||||
return test.pcLocal.waitForMediaFlow();
|
||||
},
|
||||
async function PC_LOCAL_WAIT_FOR_FRAMES_2() {
|
||||
const vremote = test.pcLocal.remoteMediaElements[0];
|
||||
ok(vremote, "Should have remote video element for pcLocal");
|
||||
emitter.start();
|
||||
await helper.checkVideoPlaying(vremote);
|
||||
emitter.stop();
|
||||
},
|
||||
// For some reason, even though we're getting a 24x24 stream, sometimes
|
||||
// the resolution isn't updated on the video element on the first frame.
|
||||
async function PC_LOCAL_WAIT_FOR_FRAMES_3() {
|
||||
const vremote = test.pcLocal.remoteMediaElements[0];
|
||||
ok(vremote, "Should have remote video element for pcLocal");
|
||||
emitter.start();
|
||||
await helper.checkVideoPlaying(vremote);
|
||||
emitter.stop();
|
||||
},
|
||||
function PC_LOCAL_CHECK_SIZE_2() {
|
||||
const vlocal = test.pcRemote.localMediaElements[0];
|
||||
const vremote = test.pcLocal.remoteMediaElements[0];
|
||||
ok(vlocal, "Should have local video element for pcRemote");
|
||||
ok(vremote, "Should have remote video element for pcLocal");
|
||||
is(vlocal.videoWidth, 50, "correct source width");
|
||||
is(vlocal.videoHeight, 50, "correct source height");
|
||||
is(vremote.videoWidth, 48,
|
||||
"sink is same width as source, modulo our cropping algorithm");
|
||||
is(vremote.videoHeight, 48,
|
||||
"sink is same height as source, modulo our cropping algorithm");
|
||||
},
|
||||
]);
|
||||
const sender = answerer.getSenders()[0];
|
||||
await sender.setParameters({
|
||||
encodings: [
|
||||
{ rid: rids[0], maxBitrate: 40000 },
|
||||
{ rid: rids[1], maxBitrate: 40000, scaleResolutionDownBy: 2 }
|
||||
]
|
||||
});
|
||||
|
||||
return test.run();
|
||||
})
|
||||
.catch(e => ok(false, "unexpected failure: " + e)));
|
||||
const answer = await answerer.createAnswer();
|
||||
|
||||
const mungedAnswer = ridToMid(answer.sdp);
|
||||
info(`Transformed send simulcast answer to multiple m-sections: ${answer.sdp} to ${mungedAnswer}`);
|
||||
await offerer.setRemoteDescription({type: 'answer', sdp: mungedAnswer});
|
||||
await answerer.setLocalDescription(answer);
|
||||
|
||||
is(metadataToBeLoaded.length, 2, 'Offerer should have gotten 2 ontrack events');
|
||||
info('Waiting for 2 loadedmetadata events');
|
||||
const videoElems = await Promise.all(metadataToBeLoaded);
|
||||
|
||||
const helper = new VideoStreamHelper();
|
||||
info('Waiting for first video element to start playing');
|
||||
await helper.checkVideoPlaying(videoElems[0]);
|
||||
info('Waiting for second video element to start playing');
|
||||
await helper.checkVideoPlaying(videoElems[1]);
|
||||
|
||||
is(videoElems[0].videoWidth, 48,
|
||||
"sink is same width as source, modulo our cropping algorithm");
|
||||
is(videoElems[0].videoHeight, 48,
|
||||
"sink is same height as source, modulo our cropping algorithm");
|
||||
is(videoElems[1].videoWidth, 24,
|
||||
"sink is 1/2 width of source, modulo our cropping algorithm");
|
||||
is(videoElems[1].videoHeight, 24,
|
||||
"sink is 1/2 height of source, modulo our cropping algorithm");
|
||||
|
||||
emitter.stop();
|
||||
videoStream.getVideoTracks()[0].stop();
|
||||
offerer.close();
|
||||
answerer.close();
|
||||
});
|
||||
</script>
|
||||
</pre>
|
||||
</body>
|
||||
|
|
|
@ -3,6 +3,8 @@
|
|||
<head>
|
||||
<script type="application/javascript" src="pc.js"></script>
|
||||
<script type="application/javascript" src="/tests/dom/canvas/test/captureStream_common.js"></script>
|
||||
<script type="application/javascript" src="helpers_from_wpt/sdp.js"></script>
|
||||
<script type="application/javascript" src="simulcast.js"></script>
|
||||
</head>
|
||||
<body>
|
||||
<pre id="test">
|
||||
|
@ -13,151 +15,90 @@
|
|||
visible: true
|
||||
});
|
||||
|
||||
function addRIDExtension(pc, extensionId) {
|
||||
const receivers = pc._pc.getReceivers();
|
||||
is(receivers.length, 1, "We have exactly one RTP receiver");
|
||||
const receiver = receivers[0];
|
||||
runNetworkTest(async () => {
|
||||
await pushPrefs(
|
||||
['media.peerconnection.simulcast', true],
|
||||
// 180Kbps was determined empirically, set well-higher than
|
||||
// the 80Kbps+overhead needed for the two simulcast streams.
|
||||
// 100Kbps was apparently too low.
|
||||
['media.peerconnection.video.min_bitrate_estimate', 180*1000]);
|
||||
|
||||
SpecialPowers.wrap(receiver).mozAddRIDExtension(extensionId);
|
||||
}
|
||||
|
||||
function selectRecvRID(pc, rid) {
|
||||
const receivers = pc._pc.getReceivers();
|
||||
is(receivers.length, 1, "We have exactly one RTP receiver");
|
||||
const receiver = receivers[0];
|
||||
const offerer = new RTCPeerConnection();
|
||||
const answerer = new RTCPeerConnection();
|
||||
|
||||
SpecialPowers.wrap(receiver).mozAddRIDFilter(rid);
|
||||
}
|
||||
const add = (pc, can, failed) => can && pc.addIceCandidate(can).catch(failed);
|
||||
offerer.onicecandidate = e => add(answerer, e.candidate, generateErrorCallback());
|
||||
answerer.onicecandidate = e => add(offerer, e.candidate, generateErrorCallback());
|
||||
|
||||
runNetworkTest(() =>
|
||||
pushPrefs(['media.peerconnection.simulcast', true],
|
||||
// 180Kbps was determined empirically, set well-higher than
|
||||
// the 80Kbps+overhead needed for the two simulcast streams.
|
||||
// 100Kbps was apparently too low.
|
||||
['media.peerconnection.video.min_bitrate_estimate', 180*1000]).then(() => {
|
||||
let emitter, helper;
|
||||
const metadataToBeLoaded = [];
|
||||
offerer.ontrack = (e) => {
|
||||
metadataToBeLoaded.push(getPlaybackWithLoadedMetadata(e.track));
|
||||
};
|
||||
|
||||
test = new PeerConnectionTest({bundle: false});
|
||||
test.setMediaConstraints([{video: true}], [{video: true}]);
|
||||
// Two recv transceivers, one for each simulcast stream
|
||||
offerer.addTransceiver('video', { direction: 'recvonly' });
|
||||
offerer.addTransceiver('video', { direction: 'recvonly' });
|
||||
|
||||
test.chain.replace("PC_REMOTE_GUM", [
|
||||
function PC_REMOTE_CANVAS_CAPTURESTREAM(test) {
|
||||
emitter = new VideoFrameEmitter();
|
||||
helper = new VideoStreamHelper();
|
||||
test.pcRemote.attachLocalStream(emitter.stream());
|
||||
emitter.start();
|
||||
}
|
||||
]);
|
||||
// One send transceiver, that will be used to send both simulcast streams
|
||||
const emitter = new VideoFrameEmitter();
|
||||
const videoStream = emitter.stream();
|
||||
answerer.addTrack(videoStream.getVideoTracks()[0], videoStream);
|
||||
emitter.start();
|
||||
|
||||
test.chain.insertAfter('PC_REMOTE_GET_OFFER', [
|
||||
function PC_REMOTE_SET_RIDS(test) {
|
||||
const senders = test.pcRemote._pc.getSenders();
|
||||
is(senders.length, 1, "We have exactly one RTP sender");
|
||||
const sender = senders[0];
|
||||
ok(sender.track, "Sender has a track");
|
||||
const offer = await offerer.createOffer();
|
||||
|
||||
return sender.setParameters({
|
||||
encodings: [{ rid: "foo", maxBitrate: 40000, scaleResolutionDownBy: 2 },
|
||||
{ rid: "bar", maxBitrate: 40000 }]
|
||||
});
|
||||
},
|
||||
function PC_LOCAL_ADD_RIDS_TO_OFFER(test) {
|
||||
// Create a dummy offer, and use it to set simulcast stuff on the
|
||||
// offer we will actually be using.
|
||||
return test.createOffer(test.pcRemote).then(offer => {
|
||||
test._local_offer.sdp = sdputils.transferSimulcastProperties(
|
||||
offer.sdp, test._local_offer.sdp);
|
||||
info("Offer with RIDs: " + JSON.stringify(test._local_offer));
|
||||
ok(test._local_offer.sdp.match(/a=simulcast:/), "Modified offer has simulcast");
|
||||
ok(test._local_offer.sdp.match(/a=rid:foo/), "Modified offer has rid foo");
|
||||
ok(test._local_offer.sdp.match(/a=rid:bar/), "Modified offer has rid bar");
|
||||
ok(test._local_offer.sdp.match(/urn:ietf:params:rtp-hdrext:sdes:rtp-stream-id/), "Modified offer has RID");
|
||||
});
|
||||
}
|
||||
]);
|
||||
const mungedOffer = midToRid(offer.sdp);
|
||||
info(`Transformed recv offer to simulcast: ${offer.sdp} to ${mungedOffer}`);
|
||||
|
||||
test.chain.insertAfter('PC_LOCAL_GET_ANSWER',[
|
||||
function PC_LOCAL_REMOVE_SIMULCAST_ATTRS_FROM_ANSWER(test) {
|
||||
test._remote_answer.sdp =
|
||||
sdputils.removeSimulcastProperties(test._remote_answer.sdp);
|
||||
}
|
||||
]);
|
||||
await answerer.setRemoteDescription({type: 'offer', sdp: mungedOffer});
|
||||
await offerer.setLocalDescription(offer);
|
||||
|
||||
// do this after set remote description so the MediaPipeline
|
||||
// has been created.
|
||||
test.chain.insertAfter('PC_LOCAL_SET_REMOTE_DESCRIPTION',[
|
||||
function PC_LOCAL_SET_RTP_FIRST_RID(test) {
|
||||
const extmap_id = test._local_offer.sdp.match(
|
||||
"a=extmap:([0-9+])/recvonly urn:ietf:params:rtp-hdrext:sdes:rtp-stream-id");
|
||||
ok(extmap_id, "Local offer has extmap id for simulcast: " + extmap_id[1]);
|
||||
// Cause pcLocal to filter out everything but RID "bar", only
|
||||
// allowing one of the simulcast streams through.
|
||||
addRIDExtension(test.pcLocal, extmap_id[1]);
|
||||
selectRecvRID(test.pcLocal, "bar");
|
||||
}
|
||||
]);
|
||||
const rids = offerer.getTransceivers().map(t => t.mid);
|
||||
is(rids.length, 2, 'Should have 2 mids in offer');
|
||||
ok(rids[0] != '', 'First mid should be non-empty');
|
||||
ok(rids[1] != '', 'Second mid should be non-empty');
|
||||
info(`rids: ${JSON.stringify(rids)}`);
|
||||
|
||||
test.chain.append([
|
||||
async function PC_LOCAL_WAIT_FOR_FRAMES() {
|
||||
const vremote = test.pcLocal.remoteMediaElements[0];
|
||||
ok(vremote, "Should have remote video element for pcLocal");
|
||||
emitter.start();
|
||||
await helper.checkVideoPlaying(vremote);
|
||||
emitter.stop();
|
||||
},
|
||||
function PC_LOCAL_CHECK_SIZE_1() {
|
||||
const vlocal = test.pcRemote.localMediaElements[0];
|
||||
const vremote = test.pcLocal.remoteMediaElements[0];
|
||||
ok(vlocal, "Should have local video element for pcRemote");
|
||||
ok(vremote, "Should have remote video element for pcLocal");
|
||||
is(vlocal.videoWidth, 50, "correct source width");
|
||||
is(vlocal.videoHeight, 50, "correct source height");
|
||||
is(vremote.videoWidth, 48,
|
||||
"sink is same width as source, modulo our cropping algorithm");
|
||||
is(vremote.videoHeight, 48,
|
||||
"sink is same height as source, modulo our cropping algorithm");
|
||||
},
|
||||
function PC_LOCAL_SET_RTP_SECOND_RID(test) {
|
||||
// Now, cause pcLocal to filter out everything but RID "foo", only
|
||||
// allowing the other simulcast stream through.
|
||||
selectRecvRID(test.pcLocal, "foo");
|
||||
},
|
||||
function PC_LOCAL_WAIT_FOR_SECOND_MEDIA_FLOW(test) {
|
||||
return test.pcLocal.waitForMediaFlow();
|
||||
},
|
||||
async function PC_LOCAL_WAIT_FOR_FRAMES_2() {
|
||||
const vremote = test.pcLocal.remoteMediaElements[0];
|
||||
ok(vremote, "Should have remote video element for pcLocal");
|
||||
emitter.start();
|
||||
await helper.checkVideoPlaying(vremote);
|
||||
emitter.stop();
|
||||
},
|
||||
// For some reason, even though we're getting a 24x24 stream, sometimes
|
||||
// the resolution isn't updated on the video element on the first frame.
|
||||
async function PC_LOCAL_WAIT_FOR_FRAMES_3() {
|
||||
const vremote = test.pcLocal.remoteMediaElements[0];
|
||||
ok(vremote, "Should have remote video element for pcLocal");
|
||||
emitter.start();
|
||||
await helper.checkVideoPlaying(vremote);
|
||||
emitter.stop();
|
||||
},
|
||||
function PC_LOCAL_CHECK_SIZE_2() {
|
||||
const vlocal = test.pcRemote.localMediaElements[0];
|
||||
const vremote = test.pcLocal.remoteMediaElements[0];
|
||||
ok(vlocal, "Should have local video element for pcRemote");
|
||||
ok(vremote, "Should have remote video element for pcLocal");
|
||||
is(vlocal.videoWidth, 50, "correct source width");
|
||||
is(vlocal.videoHeight, 50, "correct source height");
|
||||
is(vremote.videoWidth, 24,
|
||||
"sink is 1/2 width of source, modulo our cropping algorithm");
|
||||
is(vremote.videoHeight, 24,
|
||||
"sink is 1/2 height of source, modulo our cropping algorithm");
|
||||
},
|
||||
]);
|
||||
const sender = answerer.getSenders()[0];
|
||||
sender.setParameters({
|
||||
encodings: [
|
||||
{ rid: rids[0], maxBitrate: 40000, scaleResolutionDownBy: 2 },
|
||||
{ rid: rids[1], maxBitrate: 40000 }
|
||||
]
|
||||
});
|
||||
|
||||
return test.run();
|
||||
})
|
||||
.catch(e => ok(false, "unexpected failure: " + e)));
|
||||
const answer = await answerer.createAnswer();
|
||||
|
||||
const mungedAnswer = ridToMid(answer.sdp);
|
||||
info(`Transformed send simulcast answer to multiple m-sections: ${answer.sdp} to ${mungedAnswer}`);
|
||||
await offerer.setRemoteDescription({type: 'answer', sdp: mungedAnswer});
|
||||
await answerer.setLocalDescription(answer);
|
||||
|
||||
is(metadataToBeLoaded.length, 2, 'Offerer should have gotten 2 ontrack events');
|
||||
info('Waiting for 2 loadedmetadata events');
|
||||
const videoElems = await Promise.all(metadataToBeLoaded);
|
||||
|
||||
const helper = new VideoStreamHelper();
|
||||
info('Waiting for first video element to start playing');
|
||||
await helper.checkVideoPlaying(videoElems[0]);
|
||||
info('Waiting for second video element to start playing');
|
||||
await helper.checkVideoPlaying(videoElems[1]);
|
||||
|
||||
is(videoElems[1].videoWidth, 48,
|
||||
"sink is same width as source, modulo our cropping algorithm");
|
||||
is(videoElems[1].videoHeight, 48,
|
||||
"sink is same height as source, modulo our cropping algorithm");
|
||||
is(videoElems[0].videoWidth, 24,
|
||||
"sink is 1/2 width of source, modulo our cropping algorithm");
|
||||
is(videoElems[0].videoHeight, 24,
|
||||
"sink is 1/2 height of source, modulo our cropping algorithm");
|
||||
|
||||
emitter.stop();
|
||||
videoStream.getVideoTracks()[0].stop();
|
||||
offerer.close();
|
||||
answerer.close();
|
||||
});
|
||||
</script>
|
||||
</pre>
|
||||
</body>
|
||||
|
|
|
@ -3,6 +3,8 @@
|
|||
<head>
|
||||
<script type="application/javascript" src="pc.js"></script>
|
||||
<script type="application/javascript" src="/tests/dom/canvas/test/captureStream_common.js"></script>
|
||||
<script type="application/javascript" src="helpers_from_wpt/sdp.js"></script>
|
||||
<script type="application/javascript" src="simulcast.js"></script>
|
||||
</head>
|
||||
<body>
|
||||
<pre id="test">
|
||||
|
@ -13,196 +15,143 @@
|
|||
visible: true
|
||||
});
|
||||
|
||||
const encodings = [{ rid: "foo", maxBitrate: 40000, scaleResolutionDownBy: 1.9 },
|
||||
{ rid: "bar", maxBitrate: 40000, scaleResolutionDownBy: 3.5 },
|
||||
{ rid: "baz", maxBitrate: 40000, scaleResolutionDownBy: 17.8 }];
|
||||
|
||||
function addRIDExtension(pc, extensionId) {
|
||||
const receivers = pc._pc.getReceivers();
|
||||
is(receivers.length, 1, "We have exactly one RTP receiver");
|
||||
const receiver = receivers[0];
|
||||
|
||||
SpecialPowers.wrap(receiver).mozAddRIDExtension(extensionId);
|
||||
}
|
||||
|
||||
function selectRecvRID(pc, rid) {
|
||||
const receivers = pc._pc.getReceivers();
|
||||
is(receivers.length, 1, "We have exactly one RTP receiver");
|
||||
const receiver = receivers[0];
|
||||
|
||||
SpecialPowers.wrap(receiver).mozAddRIDFilter(rid);
|
||||
}
|
||||
|
||||
async function changeSourceResolution(test, emitter, width, height) {
|
||||
info(`Changing source resolution to ${width}x${height}`);
|
||||
emitter.size(width, height);
|
||||
emitter.start();
|
||||
await Promise.all([
|
||||
haveEvent(test.pcRemote.localMediaElements[0], "resize"),
|
||||
haveEvent(test.pcLocal.remoteMediaElements[0], "resize"),
|
||||
]);
|
||||
emitter.stop();
|
||||
info("Source resolution changed");
|
||||
}
|
||||
|
||||
async function setParameters(test, emitter, encodings) {
|
||||
info(`Setting parameters to ${JSON.stringify(encodings)}`);
|
||||
emitter.start();
|
||||
await test.pcRemote._pc.getSenders()[0].setParameters({encodings});
|
||||
await haveEvent(test.pcLocal.remoteMediaElements[0], "resize");
|
||||
emitter.stop();
|
||||
info("Parameters set");
|
||||
}
|
||||
|
||||
async function checkResolution(test, emitter, rid) {
|
||||
const vlocal = test.pcRemote.localMediaElements[0];
|
||||
const vremote = test.pcLocal.remoteMediaElements[0];
|
||||
|
||||
info(`Changing to rid ${rid}`);
|
||||
selectRecvRID(test.pcLocal, rid);
|
||||
emitter.start();
|
||||
await haveEvent(vremote, "resize");
|
||||
emitter.stop();
|
||||
|
||||
const srcWidth = vlocal.videoWidth;
|
||||
const srcHeight = vlocal.videoHeight;
|
||||
info(`Source resolution is ${srcWidth}x${srcHeight}`);
|
||||
|
||||
const scaleDownBy = encodings.find(({rid: r}) => r == rid).scaleResolutionDownBy;
|
||||
const expectedWidth = srcWidth / scaleDownBy;
|
||||
const expectedHeight = srcHeight / scaleDownBy;
|
||||
const margin = srcWidth * 0.1;
|
||||
const width = vremote.videoWidth;
|
||||
const height = vremote.videoHeight;
|
||||
ok(width >= expectedWidth - margin && width <= expectedWidth + margin,
|
||||
`Width ${width} should be within 10% of ${expectedWidth} for rid '${rid}'`);
|
||||
ok(height >= expectedHeight - margin && height <= expectedHeight + margin,
|
||||
`Height ${height} should be within 10% of ${expectedHeight} for rid '${rid}'`);
|
||||
}
|
||||
|
||||
async function checkResolutions(test, emitter) {
|
||||
const vremote = test.pcLocal.remoteMediaElements[0];
|
||||
|
||||
// Start by making sure we're not on foo (first default) or
|
||||
// baz (subsequent default), thus getting resize events.
|
||||
selectRecvRID(test.pcLocal, "bar");
|
||||
emitter.start();
|
||||
await haveEvent(vremote, "resize");
|
||||
emitter.stop();
|
||||
|
||||
await checkResolution(test, emitter, "foo");
|
||||
await checkResolution(test, emitter, "bar");
|
||||
await checkResolution(test, emitter, "baz");
|
||||
}
|
||||
|
||||
runNetworkTest(async () => {
|
||||
await pushPrefs(['media.peerconnection.simulcast', true],
|
||||
['media.peerconnection.video.lock_scaling', true],
|
||||
// 240Kbps was determined empirically
|
||||
['media.peerconnection.video.min_bitrate_estimate', 240*1000]);
|
||||
const helper = new VideoStreamHelper();
|
||||
const emitter = new VideoFrameEmitter(helper.green, helper.red, 705, 528);
|
||||
|
||||
let emitter, helper;
|
||||
async function checkVideoElement(senderElement, receiverElement, encoding) {
|
||||
info(`Waiting for receiver video element ${encoding.rid} to start playing`);
|
||||
await helper.checkVideoPlaying(receiverElement);
|
||||
const srcWidth = senderElement.videoWidth;
|
||||
const srcHeight = senderElement.videoHeight;
|
||||
info(`Source resolution is ${srcWidth}x${srcHeight}`);
|
||||
|
||||
test = new PeerConnectionTest({bundle: false});
|
||||
test.setMediaConstraints([{video: true}], [{video: true}]);
|
||||
const scaleDownBy = encoding.scaleResolutionDownBy;
|
||||
const expectedWidth = srcWidth / scaleDownBy;
|
||||
const expectedHeight = srcHeight / scaleDownBy;
|
||||
const margin = srcWidth * 0.1;
|
||||
const width = receiverElement.videoWidth;
|
||||
const height = receiverElement.videoHeight;
|
||||
const rid = encoding.rid;
|
||||
ok(width >= expectedWidth - margin && width <= expectedWidth + margin,
|
||||
`Width ${width} should be within 10% of ${expectedWidth} for rid '${rid}'`);
|
||||
ok(height >= expectedHeight - margin && height <= expectedHeight + margin,
|
||||
`Height ${height} should be within 10% of ${expectedHeight} for rid '${rid}'`);
|
||||
}
|
||||
|
||||
test.chain.replace("PC_REMOTE_GUM", [
|
||||
function PC_REMOTE_CANVAS_CAPTURESTREAM(test) {
|
||||
helper = new VideoStreamHelper();
|
||||
emitter = new VideoFrameEmitter(helper.green, helper.red, 705, 528);
|
||||
test.pcRemote.attachLocalStream(emitter.stream());
|
||||
// Don't start the emitter yet. Only do that when we're able to set rid filters.
|
||||
async function checkVideoElements(senderElement, receiverElements, encodings) {
|
||||
is(receiverElements.length, encodings.length, 'Number of video elements should match number of encodings');
|
||||
info('Waiting for sender video element to start playing');
|
||||
await helper.checkVideoPlaying(senderElement);
|
||||
for (let i = 0; i < encodings.length; i++) {
|
||||
await checkVideoElement(senderElement, receiverElements[i], encodings[i]);
|
||||
}
|
||||
]);
|
||||
}
|
||||
|
||||
test.chain.insertAfter('PC_REMOTE_GET_OFFER', [
|
||||
function PC_REMOTE_SET_RIDS(test) {
|
||||
const senders = test.pcRemote._pc.getSenders();
|
||||
is(senders.length, 1, "We have exactly one RTP sender");
|
||||
const sender = senders[0];
|
||||
ok(sender.track, "Sender has a track");
|
||||
async function waitForResizeEvents(elements) {
|
||||
return Promise.all(elements.map(elem => haveEvent(elem, 'resize')));
|
||||
}
|
||||
|
||||
return sender.setParameters({encodings});
|
||||
},
|
||||
async function PC_LOCAL_ADD_RIDS_TO_OFFER(test) {
|
||||
// Create a dummy offer, and use it to set simulcast stuff on the
|
||||
// offer we will actually be using.
|
||||
let offer = await test.createOffer(test.pcRemote);
|
||||
test._local_offer.sdp = sdputils.transferSimulcastProperties(
|
||||
offer.sdp, test._local_offer.sdp);
|
||||
info(`Offer with RIDs: ${JSON.stringify(test._local_offer)}`);
|
||||
ok(test._local_offer.sdp.match(/a=simulcast:/), "Modified offer has simulcast");
|
||||
ok(test._local_offer.sdp.match(/a=rid:foo/), "Modified offer has rid foo");
|
||||
ok(test._local_offer.sdp.match(/a=rid:bar/), "Modified offer has rid bar");
|
||||
ok(test._local_offer.sdp.match(/a=rid:baz/), "Modified offer has rid bar");
|
||||
ok(test._local_offer.sdp.match(/urn:ietf:params:rtp-hdrext:sdes:rtp-stream-id/), "Modified offer has RID");
|
||||
}
|
||||
]);
|
||||
const encodings = [{ rid: "0", maxBitrate: 40000, scaleResolutionDownBy: 1.9 },
|
||||
{ rid: "1", maxBitrate: 40000, scaleResolutionDownBy: 3.5 },
|
||||
{ rid: "2", maxBitrate: 40000, scaleResolutionDownBy: 17.8 }];
|
||||
|
||||
test.chain.insertAfter('PC_LOCAL_GET_ANSWER',[
|
||||
function PC_LOCAL_REMOVE_SIMULCAST_ATTRS_FROM_ANSWER(test) {
|
||||
test._remote_answer.sdp =
|
||||
sdputils.removeSimulcastProperties(test._remote_answer.sdp);
|
||||
}
|
||||
]);
|
||||
await pushPrefs(
|
||||
['media.peerconnection.simulcast', true],
|
||||
// 180Kbps was determined empirically, set well-higher than
|
||||
// the 80Kbps+overhead needed for the two simulcast streams.
|
||||
// 100Kbps was apparently too low.
|
||||
['media.peerconnection.video.min_bitrate_estimate', 180*1000]);
|
||||
|
||||
// do this after set remote description so the MediaPipeline
|
||||
// has been created.
|
||||
test.chain.insertAfter('PC_LOCAL_SET_REMOTE_DESCRIPTION',[
|
||||
function PC_LOCAL_SET_RTP_FIRST_RID(test) {
|
||||
info(`local offer: ${test._local_offer.sdp}`);
|
||||
const extmap_id = test._local_offer.sdp.match(
|
||||
"a=extmap:([0-9+])/recvonly urn:ietf:params:rtp-hdrext:sdes:rtp-stream-id");
|
||||
ok(extmap_id, `Original answer has extmap id for simulcast: ${extmap_id[1]}`);
|
||||
// Cause pcLocal to filter out everything but RID "foo", only
|
||||
// allowing one of the simulcast streams through.
|
||||
addRIDExtension(test.pcLocal, extmap_id[1]);
|
||||
selectRecvRID(test.pcLocal, "foo");
|
||||
emitter.start();
|
||||
},
|
||||
function SETUP_RESIZE_LISTENERS(test) {
|
||||
for(const elem of [...test.pcRemote.localMediaElements,
|
||||
...test.pcLocal.remoteMediaElements,
|
||||
]) {
|
||||
elem.addEventListener("resize",
|
||||
() => info(`element ${elem.id} resized to ${elem.videoWidth}x${elem.videoHeight}`));
|
||||
}
|
||||
},
|
||||
]);
|
||||
|
||||
test.chain.append([
|
||||
async function PC_LOCAL_CHECK_INITIAL_SIZES() {
|
||||
await checkResolutions(test, emitter);
|
||||
},
|
||||
async function PC_LOCAL_CHANGE_SRC_1() {
|
||||
await changeSourceResolution(test, emitter, 1280, 720);
|
||||
await checkResolutions(test, emitter);
|
||||
},
|
||||
async function PC_LOCAL_CHANGE_PARAMS_2() {
|
||||
encodings.find(({rid}) => rid == "foo").scaleResolutionDownBy = 1;
|
||||
encodings.find(({rid}) => rid == "bar").scaleResolutionDownBy = 2;
|
||||
encodings.find(({rid}) => rid == "baz").scaleResolutionDownBy = 3;
|
||||
await setParameters(test, emitter, encodings);
|
||||
await checkResolutions(test, emitter);
|
||||
},
|
||||
async function PC_LOCAL_CHANGE_PARAMS_3() {
|
||||
encodings.find(({rid}) => rid == "foo").scaleResolutionDownBy = 6;
|
||||
encodings.find(({rid}) => rid == "bar").scaleResolutionDownBy = 5;
|
||||
encodings.find(({rid}) => rid == "baz").scaleResolutionDownBy = 4;
|
||||
await setParameters(test, emitter, encodings);
|
||||
await checkResolutions(test, emitter);
|
||||
},
|
||||
async function PC_LOCAL_CHANGE_PARAMS_4() {
|
||||
encodings.find(({rid}) => rid == "foo").scaleResolutionDownBy = 4;
|
||||
encodings.find(({rid}) => rid == "bar").scaleResolutionDownBy = 1;
|
||||
encodings.find(({rid}) => rid == "baz").scaleResolutionDownBy = 2;
|
||||
await setParameters(test, emitter, encodings);
|
||||
await checkResolutions(test, emitter);
|
||||
},
|
||||
]);
|
||||
const offerer = new RTCPeerConnection();
|
||||
const answerer = new RTCPeerConnection();
|
||||
|
||||
await test.run();
|
||||
const add = (pc, can, failed) => can && pc.addIceCandidate(can).catch(failed);
|
||||
offerer.onicecandidate = e => add(answerer, e.candidate, generateErrorCallback());
|
||||
answerer.onicecandidate = e => add(offerer, e.candidate, generateErrorCallback());
|
||||
|
||||
const metadataToBeLoaded = [];
|
||||
answerer.ontrack = (e) => {
|
||||
metadataToBeLoaded.push(getPlaybackWithLoadedMetadata(e.track));
|
||||
};
|
||||
|
||||
// One send transceiver, that will be used to send both simulcast streams
|
||||
const videoStream = emitter.stream();
|
||||
offerer.addTrack(videoStream.getVideoTracks()[0], videoStream);
|
||||
const senderElement = document.createElement('video');
|
||||
senderElement.autoplay = true;
|
||||
senderElement.srcObject = videoStream;
|
||||
senderElement.id = videoStream.id
|
||||
|
||||
const sender = offerer.getSenders()[0];
|
||||
sender.setParameters({encodings});
|
||||
|
||||
const offer = await offerer.createOffer();
|
||||
|
||||
const mungedOffer = ridToMid(offer.sdp);
|
||||
info(`Transformed send simulcast offer to multiple m-sections: ${offer.sdp} to ${mungedOffer}`);
|
||||
|
||||
await answerer.setRemoteDescription({type: 'offer', sdp: mungedOffer});
|
||||
await offerer.setLocalDescription(offer);
|
||||
|
||||
const rids = answerer.getTransceivers().map(t => t.mid);
|
||||
is(rids.length, 3, 'Should have 3 mids in offer');
|
||||
ok(rids[0], 'First mid should be non-empty');
|
||||
ok(rids[1], 'Second mid should be non-empty');
|
||||
ok(rids[2], 'Third mid should be non-empty');
|
||||
info(`rids: ${JSON.stringify(rids)}`);
|
||||
|
||||
const answer = await answerer.createAnswer();
|
||||
|
||||
const mungedAnswer = midToRid(answer.sdp);
|
||||
info(`Transformed recv answer to simulcast: ${answer.sdp} to ${mungedAnswer}`);
|
||||
await offerer.setRemoteDescription({type: 'answer', sdp: mungedAnswer});
|
||||
await answerer.setLocalDescription(answer);
|
||||
|
||||
is(metadataToBeLoaded.length, 3, 'Offerer should have gotten 3 ontrack events');
|
||||
emitter.start();
|
||||
info('Waiting for 3 loadedmetadata events');
|
||||
const videoElems = await Promise.all(metadataToBeLoaded);
|
||||
await checkVideoElements(senderElement, videoElems, encodings);
|
||||
emitter.stop();
|
||||
|
||||
info(`Changing source resolution to 1280x720`);
|
||||
emitter.size(1280, 720);
|
||||
emitter.start();
|
||||
await waitForResizeEvents([senderElement, ...videoElems]);
|
||||
await checkVideoElements(senderElement, videoElems, encodings);
|
||||
|
||||
encodings[0].scaleResolutionDownBy = 1;
|
||||
encodings[1].scaleResolutionDownBy = 2;
|
||||
encodings[2].scaleResolutionDownBy = 3;
|
||||
info(`Changing encodings to ${JSON.stringify(encodings)}`);
|
||||
await sender.setParameters({encodings});
|
||||
await waitForResizeEvents(videoElems);
|
||||
await checkVideoElements(senderElement, videoElems, encodings);
|
||||
|
||||
encodings[0].scaleResolutionDownBy = 6;
|
||||
encodings[1].scaleResolutionDownBy = 5;
|
||||
encodings[2].scaleResolutionDownBy = 4;
|
||||
info(`Changing encodings to ${JSON.stringify(encodings)}`);
|
||||
await sender.setParameters({encodings});
|
||||
await waitForResizeEvents(videoElems);
|
||||
await checkVideoElements(senderElement, videoElems, encodings);
|
||||
|
||||
encodings[0].scaleResolutionDownBy = 4;
|
||||
encodings[1].scaleResolutionDownBy = 1;
|
||||
encodings[2].scaleResolutionDownBy = 2;
|
||||
info(`Changing encodings to ${JSON.stringify(encodings)}`);
|
||||
await sender.setParameters({encodings});
|
||||
await waitForResizeEvents(videoElems);
|
||||
await checkVideoElements(senderElement, videoElems, encodings);
|
||||
|
||||
emitter.stop();
|
||||
videoStream.getVideoTracks()[0].stop();
|
||||
offerer.close();
|
||||
answerer.close();
|
||||
});
|
||||
|
||||
</script>
|
||||
</pre>
|
||||
</body>
|
||||
|
|
|
@ -4,6 +4,8 @@
|
|||
<script type="application/javascript" src="pc.js"></script>
|
||||
<script type="application/javascript" src="parser_rtp.js"></script>
|
||||
<script type="application/javascript" src="/tests/dom/canvas/test/captureStream_common.js"></script>
|
||||
<script type="application/javascript" src="helpers_from_wpt/sdp.js"></script>
|
||||
<script type="application/javascript" src="simulcast.js"></script>
|
||||
</head>
|
||||
<body>
|
||||
<pre id="test">
|
||||
|
@ -14,172 +16,85 @@
|
|||
visible: true
|
||||
});
|
||||
|
||||
function addRIDExtension(pc, extensionId) {
|
||||
const receivers = pc._pc.getReceivers();
|
||||
is(receivers.length, 1, "We have exactly one RTP receiver");
|
||||
const receiver = receivers[0];
|
||||
|
||||
SpecialPowers.wrap(receiver).mozAddRIDExtension(extensionId);
|
||||
}
|
||||
|
||||
function selectRecvRID(pc, rid) {
|
||||
const receivers = pc._pc.getReceivers();
|
||||
is(receivers.length, 1, "We have exactly one RTP receiver");
|
||||
const receiver = receivers[0];
|
||||
|
||||
SpecialPowers.wrap(receiver).mozAddRIDFilter(rid);
|
||||
}
|
||||
|
||||
runNetworkTest(async () => {
|
||||
await pushPrefs(
|
||||
['media.peerconnection.simulcast', true],
|
||||
// 180Kbps was determined empirically, set well-higher than
|
||||
// the 80Kbps+overhead needed for the two simulcast streams.
|
||||
// 100Kbps was apparently too low.
|
||||
['media.peerconnection.video.min_bitrate_estimate', 180*1000],
|
||||
);
|
||||
['media.peerconnection.simulcast', true],
|
||||
// 180Kbps was determined empirically, set well-higher than
|
||||
// the 80Kbps+overhead needed for the two simulcast streams.
|
||||
// 100Kbps was apparently too low.
|
||||
['media.peerconnection.video.min_bitrate_estimate', 180*1000]);
|
||||
|
||||
let emitter, helper;
|
||||
|
||||
const test = new PeerConnectionTest({bundle: false});
|
||||
test.setMediaConstraints([{video: true}], []);
|
||||
const offerer = new RTCPeerConnection();
|
||||
const answerer = new RTCPeerConnection();
|
||||
|
||||
test.chain.replace("PC_LOCAL_GUM", [
|
||||
function PC_LOCAL_CANVAS_CAPTURESTREAM(test) {
|
||||
emitter = new VideoFrameEmitter();
|
||||
helper = new VideoStreamHelper();
|
||||
test.pcLocal.attachLocalStream(emitter.stream());
|
||||
emitter.start();
|
||||
}
|
||||
]);
|
||||
const add = (pc, can, failed) => can && pc.addIceCandidate(can).catch(failed);
|
||||
offerer.onicecandidate = e => add(answerer, e.candidate, generateErrorCallback());
|
||||
answerer.onicecandidate = e => add(offerer, e.candidate, generateErrorCallback());
|
||||
|
||||
test.chain.insertBefore('PC_LOCAL_CREATE_OFFER', [
|
||||
function PC_LOCAL_SET_RIDS(test) {
|
||||
const senders = test.pcLocal._pc.getSenders();
|
||||
is(senders.length, 1, "We have exactly one RTP sender");
|
||||
const sender = senders[0];
|
||||
ok(sender.track, "Sender has a track");
|
||||
const metadataToBeLoaded = [];
|
||||
answerer.ontrack = (e) => {
|
||||
metadataToBeLoaded.push(getPlaybackWithLoadedMetadata(e.track));
|
||||
};
|
||||
|
||||
return sender.setParameters({
|
||||
encodings: [{ rid: "foo", maxBitrate: 40000 },
|
||||
{ rid: "bar", maxBitrate: 40000, scaleResolutionDownBy: 2 }]
|
||||
});
|
||||
}
|
||||
]);
|
||||
// One send transceiver, that will be used to send both simulcast streams
|
||||
const emitter = new VideoFrameEmitter();
|
||||
const videoStream = emitter.stream();
|
||||
offerer.addTrack(videoStream.getVideoTracks()[0], videoStream);
|
||||
emitter.start();
|
||||
|
||||
test.chain.insertAfter('PC_LOCAL_GET_ANSWER', [
|
||||
function PC_LOCAL_ADD_RIDS_TO_ANSWER(test) {
|
||||
test._remote_answer.sdp = sdputils.transferSimulcastProperties(
|
||||
test.originalOffer.sdp, test._remote_answer.sdp);
|
||||
info("Answer with RIDs: " + JSON.stringify(test._remote_answer));
|
||||
ok(test._remote_answer.sdp.match(/a=simulcast:/), "Modified answer has simulcast");
|
||||
ok(test._remote_answer.sdp.match(/a=rid:foo/), "Modified answer has rid foo");
|
||||
ok(test._remote_answer.sdp.match(/a=rid:bar/), "Modified answer has rid bar");
|
||||
ok(test._remote_answer.sdp.match(/urn:ietf:params:rtp-hdrext:sdes:rtp-stream-id/), "Modified answer has RID");
|
||||
}
|
||||
]);
|
||||
const sender = offerer.getSenders()[0];
|
||||
sender.setParameters({
|
||||
encodings: [
|
||||
{ rid: '0', maxBitrate: 40000 },
|
||||
{ rid: '1', maxBitrate: 40000, scaleResolutionDownBy: 2 }
|
||||
]
|
||||
});
|
||||
|
||||
// For storing the rid extension so it can be checked in the RTP
|
||||
let ridExtensionId = 0;
|
||||
const offer = await offerer.createOffer();
|
||||
|
||||
// do this after set local description so the MediaPipeline
|
||||
// has been created.
|
||||
test.chain.insertAfter('PC_REMOTE_SET_LOCAL_DESCRIPTION',[
|
||||
function PC_REMOTE_SET_RTP_FIRST_RID(test) {
|
||||
const extmap_id = test.originalOffer.sdp.match(
|
||||
"a=extmap:([0-9+])/sendonly urn:ietf:params:rtp-hdrext:sdes:rtp-stream-id");
|
||||
ok(extmap_id, "Original offer has extmap id for simulcast: " + extmap_id[1]);
|
||||
ridExtensionId = extmap_id[1];
|
||||
// Cause pcRemote to filter out everything but RID "foo", only
|
||||
// allowing one of the simulcast streams through.
|
||||
addRIDExtension(test.pcRemote, extmap_id[1]);
|
||||
selectRecvRID(test.pcRemote, "foo");
|
||||
}
|
||||
]);
|
||||
const mungedOffer = ridToMid(offer.sdp);
|
||||
info(`Transformed send simulcast offer to multiple m-sections: ${offer.sdp} to ${mungedOffer}`);
|
||||
|
||||
let getRtpPacket = (pc) => {
|
||||
pc.mozEnablePacketDump(0, "rtp", false);
|
||||
return new Promise((res, rej) =>
|
||||
pc.mozSetPacketCallback((...args) => {
|
||||
res([...args]);
|
||||
pc.mozSetPacketCallback(() => {});
|
||||
pc.mozDisablePacketDump(0, "rtp", false);
|
||||
})
|
||||
);
|
||||
}
|
||||
await answerer.setRemoteDescription({type: 'offer', sdp: mungedOffer});
|
||||
await offerer.setLocalDescription(offer);
|
||||
|
||||
test.chain.insertBefore('PC_REMOTE_WAIT_FOR_MEDIA_FLOW', [
|
||||
async function PC_REMOTE_CHECK_RID_IN_RTP() {
|
||||
let pc = SpecialPowers.wrap(test.pcRemote._pc);
|
||||
let [level, type, sending, data] = await getRtpPacket(pc);
|
||||
let extensions = ParseRtpPacket(data).header.extensions;
|
||||
ok(ridExtensionId, "RID extension ID has been extracted from SDP");
|
||||
let ridExt = extensions.find(e => e.id == ridExtensionId);
|
||||
ok(ridExt, "RID is present in RTP.");
|
||||
is(new TextDecoder('utf-8').decode(ridExt.data), "foo",
|
||||
"RID is 'foo'.");
|
||||
}
|
||||
]);
|
||||
const rids = answerer.getTransceivers().map(t => t.mid);
|
||||
is(rids.length, 2, 'Should have 2 mids in offer');
|
||||
ok(rids[0] != '', 'First mid should be non-empty');
|
||||
ok(rids[1] != '', 'Second mid should be non-empty');
|
||||
info(`rids: ${JSON.stringify(rids)}`);
|
||||
|
||||
test.chain.append([
|
||||
async function PC_REMOTE_WAIT_FOR_FRAMES() {
|
||||
const vremote = test.pcRemote.remoteMediaElements[0];
|
||||
ok(vremote, "Should have remote video element for pcRemote");
|
||||
emitter.start();
|
||||
await helper.checkVideoPlaying(vremote);
|
||||
emitter.stop();
|
||||
},
|
||||
function PC_REMOTE_CHECK_SIZE_1() {
|
||||
const vlocal = test.pcLocal.localMediaElements[0];
|
||||
const vremote = test.pcRemote.remoteMediaElements[0];
|
||||
ok(vlocal, "Should have local video element for pcLocal");
|
||||
ok(vremote, "Should have remote video element for pcRemote");
|
||||
is(vlocal.videoWidth, 50, "correct source width");
|
||||
is(vlocal.videoHeight, 50, "correct source height");
|
||||
is(vremote.videoWidth, 48,
|
||||
"sink is same width as source, modulo our cropping algorithm");
|
||||
is(vremote.videoHeight, 48,
|
||||
"sink is same height as source, modulo our cropping algorithm");
|
||||
},
|
||||
function PC_REMOTE_SET_RTP_SECOND_RID(test) {
|
||||
// Now, cause pcRemote to filter out everything but RID "bar", only
|
||||
// allowing the other simulcast stream through.
|
||||
selectRecvRID(test.pcRemote, "bar");
|
||||
},
|
||||
function PC_REMOTE_WAIT_FOR_SECOND_MEDIA_FLOW(test) {
|
||||
return test.pcRemote.waitForMediaFlow();
|
||||
},
|
||||
async function PC_REMOTE_WAIT_FOR_FRAMES_2() {
|
||||
const vremote = test.pcRemote.remoteMediaElements[0];
|
||||
ok(vremote, "Should have remote video element for pcRemote");
|
||||
emitter.start();
|
||||
await helper.checkVideoPlaying(vremote);
|
||||
emitter.stop();
|
||||
},
|
||||
// For some reason, even though we're getting a 25x25 stream, sometimes
|
||||
// the resolution isn't updated on the video element on the first frame.
|
||||
async function PC_REMOTE_WAIT_FOR_FRAMES_3() {
|
||||
const vremote = test.pcRemote.remoteMediaElements[0];
|
||||
ok(vremote, "Should have remote video element for pcRemote");
|
||||
emitter.start();
|
||||
await helper.checkVideoPlaying(vremote);
|
||||
emitter.stop();
|
||||
},
|
||||
function PC_REMOTE_CHECK_SIZE_2() {
|
||||
const vlocal = test.pcLocal.localMediaElements[0];
|
||||
const vremote = test.pcRemote.remoteMediaElements[0];
|
||||
ok(vlocal, "Should have local video element for pcLocal");
|
||||
ok(vremote, "Should have remote video element for pcRemote");
|
||||
is(vlocal.videoWidth, 50, "correct source width");
|
||||
is(vlocal.videoHeight, 50, "correct source height");
|
||||
is(vremote.videoWidth, 24,
|
||||
"sink is 1/2 width of source, modulo our cropping algorithm");
|
||||
is(vremote.videoHeight, 24,
|
||||
"sink is 1/2 height of source, modulo our cropping algorithm");
|
||||
},
|
||||
]);
|
||||
const answer = await answerer.createAnswer();
|
||||
|
||||
await test.run();
|
||||
const mungedAnswer = midToRid(answer.sdp);
|
||||
info(`Transformed recv answer to simulcast: ${answer.sdp} to ${mungedAnswer}`);
|
||||
await offerer.setRemoteDescription({type: 'answer', sdp: mungedAnswer});
|
||||
await answerer.setLocalDescription(answer);
|
||||
|
||||
is(metadataToBeLoaded.length, 2, 'Offerer should have gotten 2 ontrack events');
|
||||
info('Waiting for 2 loadedmetadata events');
|
||||
const videoElems = await Promise.all(metadataToBeLoaded);
|
||||
|
||||
const helper = new VideoStreamHelper();
|
||||
info('Waiting for first video element to start playing');
|
||||
await helper.checkVideoPlaying(videoElems[0]);
|
||||
info('Waiting for second video element to start playing');
|
||||
await helper.checkVideoPlaying(videoElems[1]);
|
||||
|
||||
is(videoElems[0].videoWidth, 48,
|
||||
"sink is same width as source, modulo our cropping algorithm");
|
||||
is(videoElems[0].videoHeight, 48,
|
||||
"sink is same height as source, modulo our cropping algorithm");
|
||||
is(videoElems[1].videoWidth, 24,
|
||||
"sink is 1/2 width of source, modulo our cropping algorithm");
|
||||
is(videoElems[1].videoHeight, 24,
|
||||
"sink is 1/2 height of source, modulo our cropping algorithm");
|
||||
|
||||
emitter.stop();
|
||||
videoStream.getVideoTracks()[0].stop();
|
||||
offerer.close();
|
||||
answerer.close();
|
||||
});
|
||||
</script>
|
||||
</pre>
|
||||
|
|
|
@ -4,6 +4,8 @@
|
|||
<script type="application/javascript" src="pc.js"></script>
|
||||
<script type="application/javascript" src="parser_rtp.js"></script>
|
||||
<script type="application/javascript" src="/tests/dom/canvas/test/captureStream_common.js"></script>
|
||||
<script type="application/javascript" src="helpers_from_wpt/sdp.js"></script>
|
||||
<script type="application/javascript" src="simulcast.js"></script>
|
||||
</head>
|
||||
<body>
|
||||
<pre id="test">
|
||||
|
@ -14,172 +16,85 @@
|
|||
visible: true
|
||||
});
|
||||
|
||||
function addRIDExtension(pc, extensionId) {
|
||||
const receivers = pc._pc.getReceivers();
|
||||
is(receivers.length, 1, "We have exactly one RTP receiver");
|
||||
const receiver = receivers[0];
|
||||
|
||||
SpecialPowers.wrap(receiver).mozAddRIDExtension(extensionId);
|
||||
}
|
||||
|
||||
function selectRecvRID(pc, rid) {
|
||||
const receivers = pc._pc.getReceivers();
|
||||
is(receivers.length, 1, "We have exactly one RTP receiver");
|
||||
const receiver = receivers[0];
|
||||
|
||||
SpecialPowers.wrap(receiver).mozAddRIDFilter(rid);
|
||||
}
|
||||
|
||||
runNetworkTest(async () => {
|
||||
await pushPrefs(
|
||||
['media.peerconnection.simulcast', true],
|
||||
// 180Kbps was determined empirically, set well-higher than
|
||||
// the 80Kbps+overhead needed for the two simulcast streams.
|
||||
// 100Kbps was apparently too low.
|
||||
['media.peerconnection.video.min_bitrate_estimate', 180*1000],
|
||||
);
|
||||
['media.peerconnection.simulcast', true],
|
||||
// 180Kbps was determined empirically, set well-higher than
|
||||
// the 80Kbps+overhead needed for the two simulcast streams.
|
||||
// 100Kbps was apparently too low.
|
||||
['media.peerconnection.video.min_bitrate_estimate', 180*1000]);
|
||||
|
||||
let emitter, helper;
|
||||
|
||||
const test = new PeerConnectionTest({bundle: false});
|
||||
test.setMediaConstraints([{video: true}], []);
|
||||
const offerer = new RTCPeerConnection();
|
||||
const answerer = new RTCPeerConnection();
|
||||
|
||||
test.chain.replace("PC_LOCAL_GUM", [
|
||||
function PC_LOCAL_CANVAS_CAPTURESTREAM(test) {
|
||||
emitter = new VideoFrameEmitter();
|
||||
helper = new VideoStreamHelper();
|
||||
test.pcLocal.attachLocalStream(emitter.stream());
|
||||
emitter.start();
|
||||
}
|
||||
]);
|
||||
const add = (pc, can, failed) => can && pc.addIceCandidate(can).catch(failed);
|
||||
offerer.onicecandidate = e => add(answerer, e.candidate, generateErrorCallback());
|
||||
answerer.onicecandidate = e => add(offerer, e.candidate, generateErrorCallback());
|
||||
|
||||
test.chain.insertBefore('PC_LOCAL_CREATE_OFFER', [
|
||||
function PC_LOCAL_SET_RIDS(test) {
|
||||
const senders = test.pcLocal._pc.getSenders();
|
||||
is(senders.length, 1, "We have exactly one RTP sender");
|
||||
const sender = senders[0];
|
||||
ok(sender.track, "Sender has a track");
|
||||
const metadataToBeLoaded = [];
|
||||
answerer.ontrack = (e) => {
|
||||
metadataToBeLoaded.push(getPlaybackWithLoadedMetadata(e.track));
|
||||
};
|
||||
|
||||
return sender.setParameters({
|
||||
encodings: [{ rid: "foo", maxBitrate: 40000, scaleResolutionDownBy: 2 },
|
||||
{ rid: "bar", maxBitrate: 40000 }]
|
||||
});
|
||||
}
|
||||
]);
|
||||
// One send transceiver, that will be used to send both simulcast streams
|
||||
const emitter = new VideoFrameEmitter();
|
||||
const videoStream = emitter.stream();
|
||||
offerer.addTrack(videoStream.getVideoTracks()[0], videoStream);
|
||||
emitter.start();
|
||||
|
||||
test.chain.insertAfter('PC_LOCAL_GET_ANSWER', [
|
||||
function PC_LOCAL_ADD_RIDS_TO_ANSWER(test) {
|
||||
test._remote_answer.sdp = sdputils.transferSimulcastProperties(
|
||||
test.originalOffer.sdp, test._remote_answer.sdp);
|
||||
info("Answer with RIDs: " + JSON.stringify(test._remote_answer));
|
||||
ok(test._remote_answer.sdp.match(/a=simulcast:/), "Modified answer has simulcast");
|
||||
ok(test._remote_answer.sdp.match(/a=rid:foo/), "Modified answer has rid foo");
|
||||
ok(test._remote_answer.sdp.match(/a=rid:bar/), "Modified answer has rid bar");
|
||||
ok(test._remote_answer.sdp.match(/urn:ietf:params:rtp-hdrext:sdes:rtp-stream-id/), "Modified answer has RID");
|
||||
}
|
||||
]);
|
||||
const sender = offerer.getSenders()[0];
|
||||
sender.setParameters({
|
||||
encodings: [
|
||||
{ rid: '0', maxBitrate: 40000, scaleResolutionDownBy: 2 },
|
||||
{ rid: '1', maxBitrate: 40000 }
|
||||
]
|
||||
});
|
||||
|
||||
// For storing the rid extension so it can be checked in the RTP
|
||||
let ridExtensionId = 0;
|
||||
const offer = await offerer.createOffer();
|
||||
|
||||
// do this after set local description so the MediaPipeline
|
||||
// has been created.
|
||||
test.chain.insertAfter('PC_REMOTE_SET_LOCAL_DESCRIPTION',[
|
||||
function PC_REMOTE_SET_RTP_FIRST_RID(test) {
|
||||
const extmap_id = test.originalOffer.sdp.match(
|
||||
"a=extmap:([0-9+])/sendonly urn:ietf:params:rtp-hdrext:sdes:rtp-stream-id");
|
||||
ok(extmap_id, "Original offer has extmap id for simulcast: " + extmap_id[1]);
|
||||
ridExtensionId = extmap_id[1];
|
||||
// Cause pcRemote to filter out everything but RID "foo", only
|
||||
// allowing one of the simulcast streams through.
|
||||
addRIDExtension(test.pcRemote, extmap_id[1]);
|
||||
selectRecvRID(test.pcRemote, "foo");
|
||||
}
|
||||
]);
|
||||
const mungedOffer = ridToMid(offer.sdp);
|
||||
info(`Transformed send simulcast offer to multiple m-sections: ${offer.sdp} to ${mungedOffer}`);
|
||||
|
||||
let getRtpPacket = (pc) => {
|
||||
pc.mozEnablePacketDump(0, "rtp", false);
|
||||
return new Promise((res, rej) =>
|
||||
pc.mozSetPacketCallback((...args) => {
|
||||
res([...args]);
|
||||
pc.mozSetPacketCallback(() => {});
|
||||
pc.mozDisablePacketDump(0, "rtp", false);
|
||||
})
|
||||
);
|
||||
}
|
||||
await answerer.setRemoteDescription({type: 'offer', sdp: mungedOffer});
|
||||
await offerer.setLocalDescription(offer);
|
||||
|
||||
test.chain.insertBefore('PC_REMOTE_WAIT_FOR_MEDIA_FLOW', [
|
||||
async function PC_REMOTE_CHECK_RID_IN_RTP() {
|
||||
let pc = SpecialPowers.wrap(test.pcRemote._pc);
|
||||
let [level, type, sending, data] = await getRtpPacket(pc);
|
||||
let extensions = ParseRtpPacket(data).header.extensions;
|
||||
ok(ridExtensionId, "RID extension ID has been extracted from SDP");
|
||||
let ridExt = extensions.find(e => e.id == ridExtensionId);
|
||||
ok(ridExt, "RID is present in RTP.");
|
||||
is(new TextDecoder('utf-8').decode(ridExt.data), "foo",
|
||||
"RID is 'foo'.");
|
||||
}
|
||||
]);
|
||||
const rids = answerer.getTransceivers().map(t => t.mid);
|
||||
is(rids.length, 2, 'Should have 2 mids in offer');
|
||||
ok(rids[0] != '', 'First mid should be non-empty');
|
||||
ok(rids[1] != '', 'Second mid should be non-empty');
|
||||
info(`rids: ${JSON.stringify(rids)}`);
|
||||
|
||||
test.chain.append([
|
||||
async function PC_REMOTE_WAIT_FOR_FRAMES() {
|
||||
const vremote = test.pcRemote.remoteMediaElements[0];
|
||||
ok(vremote, "Should have remote video element for pcRemote");
|
||||
emitter.start();
|
||||
await helper.checkVideoPlaying(vremote);
|
||||
emitter.stop();
|
||||
},
|
||||
function PC_REMOTE_CHECK_SIZE_1() {
|
||||
const vlocal = test.pcLocal.localMediaElements[0];
|
||||
const vremote = test.pcRemote.remoteMediaElements[0];
|
||||
ok(vlocal, "Should have local video element for pcLocal");
|
||||
ok(vremote, "Should have remote video element for pcRemote");
|
||||
is(vlocal.videoWidth, 50, "correct source width");
|
||||
is(vlocal.videoHeight, 50, "correct source height");
|
||||
is(vremote.videoWidth, 24,
|
||||
"sink is 1/2 width of source, modulo our cropping algorithm");
|
||||
is(vremote.videoHeight, 24,
|
||||
"sink is 1/2 height of source, modulo our cropping algorithm");
|
||||
},
|
||||
function PC_REMOTE_SET_RTP_SECOND_RID(test) {
|
||||
// Now, cause pcRemote to filter out everything but RID "bar", only
|
||||
// allowing the other simulcast stream through.
|
||||
selectRecvRID(test.pcRemote, "bar");
|
||||
},
|
||||
function PC_REMOTE_WAIT_FOR_SECOND_MEDIA_FLOW(test) {
|
||||
return test.pcRemote.waitForMediaFlow();
|
||||
},
|
||||
async function PC_REMOTE_WAIT_FOR_FRAMES_2() {
|
||||
const vremote = test.pcRemote.remoteMediaElements[0];
|
||||
ok(vremote, "Should have remote video element for pcRemote");
|
||||
emitter.start();
|
||||
await helper.checkVideoPlaying(vremote);
|
||||
emitter.stop();
|
||||
},
|
||||
// For some reason, even though we're getting a 25x25 stream, sometimes
|
||||
// the resolution isn't updated on the video element on the first frame.
|
||||
async function PC_REMOTE_WAIT_FOR_FRAMES_3() {
|
||||
const vremote = test.pcRemote.remoteMediaElements[0];
|
||||
ok(vremote, "Should have remote video element for pcRemote");
|
||||
emitter.start();
|
||||
await helper.checkVideoPlaying(vremote);
|
||||
emitter.stop();
|
||||
},
|
||||
function PC_REMOTE_CHECK_SIZE_2() {
|
||||
const vlocal = test.pcLocal.localMediaElements[0];
|
||||
const vremote = test.pcRemote.remoteMediaElements[0];
|
||||
ok(vlocal, "Should have local video element for pcLocal");
|
||||
ok(vremote, "Should have remote video element for pcRemote");
|
||||
is(vlocal.videoWidth, 50, "correct source width");
|
||||
is(vlocal.videoHeight, 50, "correct source height");
|
||||
is(vremote.videoWidth, 48,
|
||||
"sink is same width as source, modulo our cropping algorithm");
|
||||
is(vremote.videoHeight, 48,
|
||||
"sink is same height as source, modulo our cropping algorithm");
|
||||
},
|
||||
]);
|
||||
const answer = await answerer.createAnswer();
|
||||
|
||||
await test.run();
|
||||
const mungedAnswer = midToRid(answer.sdp);
|
||||
info(`Transformed recv answer to simulcast: ${answer.sdp} to ${mungedAnswer}`);
|
||||
await offerer.setRemoteDescription({type: 'answer', sdp: mungedAnswer});
|
||||
await answerer.setLocalDescription(answer);
|
||||
|
||||
is(metadataToBeLoaded.length, 2, 'Offerer should have gotten 2 ontrack events');
|
||||
info('Waiting for 2 loadedmetadata events');
|
||||
const videoElems = await Promise.all(metadataToBeLoaded);
|
||||
|
||||
const helper = new VideoStreamHelper();
|
||||
info('Waiting for first video element to start playing');
|
||||
await helper.checkVideoPlaying(videoElems[0]);
|
||||
info('Waiting for second video element to start playing');
|
||||
await helper.checkVideoPlaying(videoElems[1]);
|
||||
|
||||
is(videoElems[1].videoWidth, 48,
|
||||
"sink is same width as source, modulo our cropping algorithm");
|
||||
is(videoElems[1].videoHeight, 48,
|
||||
"sink is same height as source, modulo our cropping algorithm");
|
||||
is(videoElems[0].videoWidth, 24,
|
||||
"sink is 1/2 width of source, modulo our cropping algorithm");
|
||||
is(videoElems[0].videoHeight, 24,
|
||||
"sink is 1/2 height of source, modulo our cropping algorithm");
|
||||
|
||||
emitter.stop();
|
||||
videoStream.getVideoTracks()[0].stop();
|
||||
offerer.close();
|
||||
answerer.close();
|
||||
});
|
||||
</script>
|
||||
</pre>
|
||||
|
|
|
@ -353,31 +353,6 @@ void MediaPipeline::UpdateTransport_s(
|
|||
}
|
||||
}
|
||||
|
||||
void MediaPipeline::AddRIDExtension_m(size_t aExtensionId) {
|
||||
RUN_ON_THREAD(mStsThread,
|
||||
WrapRunnable(RefPtr<MediaPipeline>(this),
|
||||
&MediaPipeline::AddRIDExtension_s, aExtensionId),
|
||||
NS_DISPATCH_NORMAL);
|
||||
}
|
||||
|
||||
void MediaPipeline::AddRIDExtension_s(size_t aExtensionId) {
|
||||
mRtpParser->RegisterRtpHeaderExtension(webrtc::kRtpExtensionRtpStreamId,
|
||||
aExtensionId);
|
||||
}
|
||||
|
||||
void MediaPipeline::AddRIDFilter_m(const std::string& aRid) {
|
||||
RUN_ON_THREAD(mStsThread,
|
||||
WrapRunnable(RefPtr<MediaPipeline>(this),
|
||||
&MediaPipeline::AddRIDFilter_s, aRid),
|
||||
NS_DISPATCH_NORMAL);
|
||||
}
|
||||
|
||||
void MediaPipeline::AddRIDFilter_s(const std::string& aRid) {
|
||||
// Running a simulcast test, ignore other filtering
|
||||
mFilter = MakeUnique<MediaPipelineFilter>();
|
||||
mFilter->AddRemoteRtpStreamId(aRid);
|
||||
}
|
||||
|
||||
void MediaPipeline::GetContributingSourceStats(
|
||||
const nsString& aInboundRtpStreamId,
|
||||
FallibleTArray<dom::RTCRTPContributingSourceStats>& aArr) const {
|
||||
|
|
|
@ -103,15 +103,6 @@ class MediaPipeline : public sigslot::has_slots<> {
|
|||
void UpdateTransport_s(const std::string& aTransportId,
|
||||
UniquePtr<MediaPipelineFilter>&& aFilter);
|
||||
|
||||
// Used only for testing; adds RTP header extension for RTP Stream Id with
|
||||
// the given id.
|
||||
void AddRIDExtension_m(size_t aExtensionId);
|
||||
void AddRIDExtension_s(size_t aExtensionId);
|
||||
// Used only for testing; installs a MediaPipelineFilter that filters
|
||||
// everything but the given RID
|
||||
void AddRIDFilter_m(const std::string& aRid);
|
||||
void AddRIDFilter_s(const std::string& aRid);
|
||||
|
||||
virtual DirectionType Direction() const { return mDirection; }
|
||||
size_t Level() const { return mLevel; }
|
||||
virtual bool IsVideo() const = 0;
|
||||
|
|
|
@ -33,21 +33,6 @@ void MediaPipelineFilter::SetRemoteMediaStreamId(
|
|||
}
|
||||
}
|
||||
|
||||
void MediaPipelineFilter::AddRtpExtensionMapping(
|
||||
const webrtc::RtpExtension& aExt) {
|
||||
mExtMap.push_back(aExt);
|
||||
}
|
||||
|
||||
Maybe<webrtc::RtpExtension> MediaPipelineFilter::GetRtpExtension(
|
||||
const std::string& aUri) const {
|
||||
for (const auto& ext : mExtMap) {
|
||||
if (ext.uri == aUri) {
|
||||
return Some(ext);
|
||||
}
|
||||
}
|
||||
return Nothing();
|
||||
}
|
||||
|
||||
bool MediaPipelineFilter::Filter(const webrtc::RTPHeader& header) {
|
||||
DEBUG_LOG(("MediaPipelineFilter inspecting seq# %u SSRC: %u",
|
||||
header.sequenceNumber, header.ssrc));
|
||||
|
@ -97,17 +82,6 @@ bool MediaPipelineFilter::Filter(const webrtc::RTPHeader& header) {
|
|||
// RTP-STREAM-ID based filtering (for tests only)
|
||||
//
|
||||
|
||||
const auto streamId = fromStreamId(header.extension.stream_id);
|
||||
if (streamId && !remote_rid_set_.empty()) {
|
||||
if (remote_rid_set_.count(streamId.value())) {
|
||||
DEBUG_LOG(("MediaPipelineFilter RID: %s matched. passing packet",
|
||||
streamId.value().c_str()));
|
||||
return true;
|
||||
}
|
||||
DEBUG_LOG(("MediaPipelineFilter RID: %s did not match any of %zu RIDs",
|
||||
streamId.value().c_str(), remote_rid_set_.size()));
|
||||
}
|
||||
|
||||
//
|
||||
// Remote SSRC based filtering
|
||||
//
|
||||
|
@ -152,10 +126,6 @@ void MediaPipelineFilter::AddRemoteSSRC(uint32_t ssrc) {
|
|||
remote_ssrc_set_.insert(ssrc);
|
||||
}
|
||||
|
||||
void MediaPipelineFilter::AddRemoteRtpStreamId(const std::string& rtp_strm_id) {
|
||||
remote_rid_set_.insert(rtp_strm_id);
|
||||
}
|
||||
|
||||
void MediaPipelineFilter::AddUniquePT(uint8_t payload_type) {
|
||||
payload_type_set_.insert(payload_type);
|
||||
}
|
||||
|
|
|
@ -62,9 +62,6 @@ class MediaPipelineFilter {
|
|||
bool Filter(const webrtc::RTPHeader& header);
|
||||
|
||||
void AddRemoteSSRC(uint32_t ssrc);
|
||||
void AddRemoteRtpStreamId(const std::string& rtp_strm_id);
|
||||
// Used to set Extention Mappings for tests
|
||||
void AddRtpExtensionMapping(const webrtc::RtpExtension& aExt);
|
||||
|
||||
void SetRemoteMediaStreamId(const Maybe<std::string>& aMid);
|
||||
|
||||
|
@ -76,13 +73,10 @@ class MediaPipelineFilter {
|
|||
std::vector<webrtc::RtpExtension> GetExtmap() const { return mExtMap; }
|
||||
|
||||
private:
|
||||
Maybe<webrtc::RtpExtension> GetRtpExtension(const std::string& aUri) const;
|
||||
// The number of filters we manage here is quite small, so I am optimizing
|
||||
// for readability.
|
||||
std::set<uint32_t> remote_ssrc_set_;
|
||||
std::set<uint8_t> payload_type_set_;
|
||||
// Set by tests, sticky
|
||||
std::set<std::string> remote_rid_set_;
|
||||
Maybe<std::string> mRemoteMid;
|
||||
std::set<uint32_t> mRemoteMidBindings;
|
||||
// RID extension can be set by tests and is sticky, the rest of
|
||||
|
|
|
@ -18,10 +18,6 @@ interface RTCRtpReceiver {
|
|||
[Pref="media.peerconnection.rtpsourcesapi.enabled"]
|
||||
sequence<RTCRtpSynchronizationSource> getSynchronizationSources();
|
||||
|
||||
[ChromeOnly]
|
||||
void mozAddRIDExtension(unsigned short extensionId);
|
||||
[ChromeOnly]
|
||||
void mozAddRIDFilter(DOMString rid);
|
||||
// test-only: for testing getContributingSources
|
||||
[ChromeOnly]
|
||||
void mozInsertAudioLevelForContributingSource(unsigned long source,
|
||||
|
|
Загрузка…
Ссылка в новой задаче