gecko-dev/dom/media/tests/mochitest/test_getUserMedia_audioCapt...

110 строки
3.8 KiB
HTML

<!DOCTYPE HTML>
<html>
<head>
<title>Test AudioCapture </title>
<script type="application/javascript" src="mediaStreamPlayback.js"></script>
</head>
<body>
<pre id="test">
<script>
createHTML({
bug: "1156472",
title: "Test AudioCapture with regular HTMLMediaElement, AudioContext, and HTMLMediaElement playing a MediaStream",
visible: true
});
scriptsReady
.then(() => FAKE_ENABLED = false)
.then(() => {
runTestWhenReady(function() {
// Get an opus file containing a sine wave at maximum amplitude, of duration
// `lengthSeconds`, and of frequency `frequency`.
function getSineWaveFile(frequency, lengthSeconds, callback) {
var chunks = [];
var off = new OfflineAudioContext(1, lengthSeconds * 48000, 48000);
var osc = off.createOscillator();
var rec = new MediaRecorder(osc);
rec.ondataavailable = function(e) {
chunks.push(e.data);
};
rec.onstop = function(e) {
var blob = new Blob(chunks, { 'type' : 'audio/ogg; codecs=opus' });
callback(blob);
}
osc.frequency.value = frequency;
osc.start();
rec.start();
off.startRendering().then(function(buffer) {
rec.stop();
});
}
/**
* Get two HTMLMediaElements:
* - One playing a sine tone from a blob (of an opus file created on the fly)
* - One being the output for an AudioContext's OscillatorNode, connected to
* a MediaSourceDestinationNode.
*
* Also, use the AudioContext playing through its AudioDestinationNode another
* tone, using another OscillatorNode.
*
* Capture the output of the document, feed that back into the AudioContext,
* with an AnalyserNode, and check the frequency content to make sure we
* have recorded the three sources.
*
* The three sine tones have frequencies far apart from each other, so that we
* can check that the spectrum of the capture stream contains three
* components with a high magnitude.
*/
var wavtone = createMediaElement("audio", "WaveTone");
var acTone = createMediaElement("audio", "audioContextTone");
var ac = new AudioContext();
var oscThroughMediaElement = ac.createOscillator();
oscThroughMediaElement.frequency.value = 1000;
var oscThroughAudioDestinationNode = ac.createOscillator();
oscThroughAudioDestinationNode.frequency.value = 5000;
var msDest = ac.createMediaStreamDestination();
oscThroughMediaElement.connect(msDest);
oscThroughAudioDestinationNode.connect(ac.destination);
acTone.mozSrcObject = msDest.stream;
getSineWaveFile(10000, 10, function(blob) {
wavtone.src = URL.createObjectURL(blob);
oscThroughMediaElement.start();
oscThroughAudioDestinationNode.start();
wavtone.loop = true;
wavtone.play();
acTone.play();
});
var constraints = {audio: {mediaSource: "audioCapture"}};
return getUserMedia(constraints).then((stream) => {
window.grip = stream;
var analyser = new AudioStreamAnalyser(ac, stream);
analyser.enableDebugCanvas();
return analyser.waitForAnalysisSuccess(function(array) {
// We want to find three frequency components here, around 1000, 5000
// and 10000Hz. Frequency are logarithmic. Also make sure we have low
// energy in between, not just a flat white noise.
return (array[analyser.binIndexForFrequency(50)] < 50 &&
array[analyser.binIndexForFrequency(1000)] > 200 &&
array[analyser.binIndexForFrequency(2500)] < 50 &&
array[analyser.binIndexForFrequency(5000)] > 200 &&
array[analyser.binIndexForFrequency(7500)] < 50 &&
array[analyser.binIndexForFrequency(10000)] > 200);
}).then(finish);
}).catch(finish);
});
});
</script>
</pre>
</body>
</html>