Backed out 2 changesets (bug 1553215) for Web platform tests failures in mediastreamaudiosourcenode-routing.html

Backed out changeset 992d3dd0cee0 (bug 1553215)
Backed out changeset fad5e3bf0695 (bug 1553215)
This commit is contained in:
Oana Pop Rus 2019-06-12 15:51:21 +03:00
Родитель b4164c9465
Коммит 6a33762809
7 изменённых файлов: 58 добавлений и 308 удалений

Просмотреть файл

@ -14,8 +14,7 @@ namespace mozilla {
namespace dom {
MediaElementAudioSourceNode::MediaElementAudioSourceNode(AudioContext* aContext)
: MediaStreamAudioSourceNode(aContext, TrackChangeBehavior::FollowChanges) {
}
: MediaStreamAudioSourceNode(aContext) {}
/* static */
already_AddRefed<MediaElementAudioSourceNode>

Просмотреть файл

@ -13,7 +13,6 @@
#include "mozilla/CORSMode.h"
#include "nsContentUtils.h"
#include "nsIScriptError.h"
#include "nsID.h"
namespace mozilla {
namespace dom {
@ -38,11 +37,9 @@ NS_INTERFACE_MAP_END_INHERITING(AudioNode)
NS_IMPL_ADDREF_INHERITED(MediaStreamAudioSourceNode, AudioNode)
NS_IMPL_RELEASE_INHERITED(MediaStreamAudioSourceNode, AudioNode)
MediaStreamAudioSourceNode::MediaStreamAudioSourceNode(
AudioContext* aContext, TrackChangeBehavior aBehavior)
MediaStreamAudioSourceNode::MediaStreamAudioSourceNode(AudioContext* aContext)
: AudioNode(aContext, 2, ChannelCountMode::Max,
ChannelInterpretation::Speakers),
mBehavior(aBehavior) {}
ChannelInterpretation::Speakers) {}
/* static */
already_AddRefed<MediaStreamAudioSourceNode> MediaStreamAudioSourceNode::Create(
@ -66,7 +63,7 @@ already_AddRefed<MediaStreamAudioSourceNode> MediaStreamAudioSourceNode::Create(
}
RefPtr<MediaStreamAudioSourceNode> node =
new MediaStreamAudioSourceNode(&aAudioContext, LockOnTrackPicked);
new MediaStreamAudioSourceNode(&aAudioContext);
node->Init(aOptions.mMediaStream, aRv);
if (aRv.Failed()) {
@ -99,7 +96,7 @@ void MediaStreamAudioSourceNode::Init(DOMMediaStream* aMediaStream,
if (mInputStream->Active()) {
NotifyActive();
}
AttachToRightTrack(mInputStream, aRv);
AttachToFirstTrack(mInputStream);
}
void MediaStreamAudioSourceNode::Destroy() {
@ -140,35 +137,14 @@ void MediaStreamAudioSourceNode::DetachFromTrack() {
}
}
static int AudioTrackCompare(const RefPtr<AudioStreamTrack>& aLhs,
const RefPtr<AudioStreamTrack>& aRhs) {
nsAutoStringN<NSID_LENGTH> IDLhs;
nsAutoStringN<NSID_LENGTH> IDRhs;
aLhs->GetId(IDLhs);
aRhs->GetId(IDRhs);
return NS_ConvertUTF16toUTF8(IDLhs).Compare(
NS_ConvertUTF16toUTF8(IDRhs).get());
}
void MediaStreamAudioSourceNode::AttachToRightTrack(
const RefPtr<DOMMediaStream>& aMediaStream, ErrorResult& aRv) {
void MediaStreamAudioSourceNode::AttachToFirstTrack(
const RefPtr<DOMMediaStream>& aMediaStream) {
nsTArray<RefPtr<AudioStreamTrack>> tracks;
aMediaStream->GetAudioTracks(tracks);
if (tracks.IsEmpty() && mBehavior == LockOnTrackPicked) {
aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR);
return;
}
// Sort the track to have a stable order, on their ID by lexicographic
// ordering on sequences of code unit values.
tracks.Sort(AudioTrackCompare);
for (const RefPtr<AudioStreamTrack>& track : tracks) {
if (mBehavior == FollowChanges) {
if (track->Ended()) {
continue;
}
if (track->Ended()) {
continue;
}
AttachToTrack(track);
@ -182,9 +158,6 @@ void MediaStreamAudioSourceNode::AttachToRightTrack(
void MediaStreamAudioSourceNode::NotifyTrackAdded(
const RefPtr<MediaStreamTrack>& aTrack) {
if (mBehavior != FollowChanges) {
return;
}
if (mInputTrack) {
return;
}
@ -198,14 +171,12 @@ void MediaStreamAudioSourceNode::NotifyTrackAdded(
void MediaStreamAudioSourceNode::NotifyTrackRemoved(
const RefPtr<MediaStreamTrack>& aTrack) {
if (mBehavior == FollowChanges) {
if (aTrack != mInputTrack) {
return;
}
DetachFromTrack();
AttachToRightTrack(mInputStream, IgnoreErrors());
if (aTrack != mInputTrack) {
return;
}
DetachFromTrack();
AttachToFirstTrack(mInputStream);
}
void MediaStreamAudioSourceNode::NotifyActive() {

Просмотреть файл

@ -80,10 +80,8 @@ class MediaStreamAudioSourceNode
// Detaches from the currently attached track if there is one.
void DetachFromTrack();
// Attaches to the first audio track in the MediaStream, when the tracks are
// ordered by id.
void AttachToRightTrack(const RefPtr<DOMMediaStream>& aMediaStream,
ErrorResult& aRv);
// Attaches to the first available audio track in aMediaStream.
void AttachToFirstTrack(const RefPtr<DOMMediaStream>& aMediaStream);
// From DOMMediaStream::TrackListener.
void NotifyTrackAdded(const RefPtr<MediaStreamTrack>& aTrack) override;
@ -93,27 +91,13 @@ class MediaStreamAudioSourceNode
// From PrincipalChangeObserver<MediaStreamTrack>.
void PrincipalChanged(MediaStreamTrack* aMediaStreamTrack) override;
// This allows implementing the correct behaviour for both
// MediaElementAudioSourceNode and MediaStreamAudioSourceNode, that have most
// of their behaviour shared.
enum TrackChangeBehavior {
// MediaStreamAudioSourceNode locks on the track it picked, and never
// changes.
LockOnTrackPicked,
// MediaElementAudioSourceNode can change track, depending on what the
// HTMLMediaElement does.
FollowChanges
};
protected:
MediaStreamAudioSourceNode(AudioContext* aContext,
TrackChangeBehavior aBehavior);
explicit MediaStreamAudioSourceNode(AudioContext* aContext);
void Init(DOMMediaStream* aMediaStream, ErrorResult& aRv);
virtual void Destroy();
virtual ~MediaStreamAudioSourceNode();
private:
const TrackChangeBehavior mBehavior;
RefPtr<MediaInputPort> mInputPort;
RefPtr<DOMMediaStream> mInputStream;

Просмотреть файл

@ -33,7 +33,7 @@ function tryToCreateNodeOnClosedContext(ctx) {
args: [new Audio()],
onOfflineAudioContext: false },
{ name: "createMediaStreamSource",
args: [(new AudioContext()).createMediaStreamDestination().stream],
args: [new Audio().mozCaptureStream()],
onOfflineAudioContext: false } ].forEach(function(e) {
if (e.onOfflineAudioContext == false &&
@ -74,6 +74,7 @@ function tryLegalOpeerationsOnClosedContext(ctx) {
loadFile("ting-44.1k-1ch.ogg", function(buf) {
ctx.decodeAudioData(buf).then(function(decodedBuf) {
ok(true, "decodeAudioData on a closed context should work, it did.")
todo(false, "0 " + (ctx instanceof OfflineAudioContext ? "Offline" : "Realtime"));
finish();
}).catch(function(e){
ok(false, "decodeAudioData on a closed context should work, it did not");
@ -120,6 +121,8 @@ function testMultiContextOutput() {
}
}
todo(false, "input buffer is " + (silent ? "silent" : "noisy"));
if (silent) {
silentBuffersInARow++;
if (silentBuffersInARow == 10) {
@ -128,6 +131,7 @@ function testMultiContextOutput() {
sp2.onaudioprocess = null;
ac1.close();
ac2.close();
todo(false,"1");
finish();
}
} else {
@ -178,6 +182,7 @@ function testMultiContextInput() {
ac2.close();
sp1.onaudioprocess = null;
sp2.onaudioprocess = null;
todo(false, "2");
finish();
}
}
@ -226,6 +231,7 @@ function testScriptProcessNodeSuspended() {
}
} else {
sp.onaudioprocess = null;
todo(false,"3");
finish();
}
}

Просмотреть файл

@ -12,48 +12,45 @@
SimpleTest.waitForExplicitFinish();
var audio = new Audio("http://example.org:80/tests/dom/media/webaudio/test/small-shot.ogg");
audio.load();
var context = new AudioContext();
audio.onloadedmetadata = function() {
var node = context.createMediaStreamSource(audio.mozCaptureStreamUntilEnded());
var sp = context.createScriptProcessor(2048, 1);
node.connect(sp);
var nonzeroSampleCount = 0;
var complete = false;
var iterationCount = 0;
var node = context.createMediaStreamSource(audio.mozCaptureStreamUntilEnded());
var sp = context.createScriptProcessor(2048, 1);
node.connect(sp);
var nonzeroSampleCount = 0;
var complete = false;
var iterationCount = 0;
// This test ensures we receive at least expectedSampleCount nonzero samples
function processSamples(e) {
if (complete) {
return;
}
if (iterationCount == 0) {
// Don't start playing the audio until the AudioContext stuff is connected
// and running.
audio.play();
}
++iterationCount;
var buf = e.inputBuffer.getChannelData(0);
var nonzeroSamplesThisBuffer = 0;
for (var i = 0; i < buf.length; ++i) {
if (buf[i] != 0) {
++nonzeroSamplesThisBuffer;
}
}
is(nonzeroSamplesThisBuffer, 0,
"Checking all samples are zero");
if (iterationCount >= 20) {
SimpleTest.finish();
complete = true;
}
// This test ensures we receive at least expectedSampleCount nonzero samples
function processSamples(e) {
if (complete) {
return;
}
audio.oncanplaythrough = function() {
sp.onaudioprocess = processSamples;
};
if (iterationCount == 0) {
// Don't start playing the audio until the AudioContext stuff is connected
// and running.
audio.play();
}
++iterationCount;
var buf = e.inputBuffer.getChannelData(0);
var nonzeroSamplesThisBuffer = 0;
for (var i = 0; i < buf.length; ++i) {
if (buf[i] != 0) {
++nonzeroSamplesThisBuffer;
}
}
is(nonzeroSamplesThisBuffer, 0,
"Checking all samples are zero");
if (iterationCount >= 20) {
SimpleTest.finish();
complete = true;
}
}
audio.oncanplaythrough = function() {
sp.onaudioprocess = processSamples;
};
</script>
</pre>
</body>

Просмотреть файл

@ -1,73 +0,0 @@
<!DOCTYPE html>
<html class="a">
<head>
<title>MediaStreamAudioSourceNode</title>
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
</head>
<body class="a">
<div id="log"></div>
<script>
setup({explicit_done: true});
// Wait until the DOM is ready to be able to get a reference to the canvas
// element.
window.addEventListener("load", function() {
const ac = new AudioContext();
const emptyStream = new MediaStream();
test(function() {
assert_throws(
"InvalidStateError",
function() {
ac.createMediaStreamSource(emptyStream);
},
`A MediaStreamAudioSourceNode can only be constructed via the factory
method with a MediaStream that has at least one track of kind "audio"`
);
}, "MediaStreamAudioSourceNode created with factory method and MediaStream with no tracks");
test(function() {
assert_throws(
"InvalidStateError",
function() {
new MediaStreamAudioSourceNode(ac, { mediaStream: emptyStream });
},
`A MediaStreamAudioSourceNode can only be constructed via the constructor
with a MediaStream that has at least one track of kind "audio"`
);
}, "MediaStreamAudioSourceNode created with constructor and MediaStream with no tracks");
const canvas = document.querySelector("canvas");
const ctx = canvas.getContext("2d");
const videoOnlyStream = canvas.captureStream();
test(function() {
assert_throws(
"InvalidStateError",
function() {
ac.createMediaStreamSource(videoOnlyStream);
},
`A MediaStreamAudioSourceNode can only be constructed via the factory with a
MediaStream that has at least one track of kind "audio"`
);
}, `MediaStreamAudioSourceNode created with the factory method and MediaStream with only a video track`);
test(function() {
assert_throws(
"InvalidStateError",
function() {
new MediaStreamAudioSourceNode(ac, {
mediaStream: videoOnlyStream,
});
},
`A MediaStreamAudioSourceNode can only be constructed via the factory with a
MediaStream that has at least one track of kind "audio"`
);
}, `MediaStreamAudioSourceNode created with constructor and MediaStream with only a video track`);
done();
});
</script>
</body>
<canvas></canvas>
</html>

Просмотреть файл

@ -1,134 +0,0 @@
<!DOCTYPE html>
<html class="a">
<head>
<title>MediaStreamAudioSourceNode</title>
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
</head>
<body class="a">
<div id="log"></div>
<script>
function binIndexForFrequency(frequency, analyser) {
return (
1 +
Math.round(
(frequency * analyser.fftSize) / analyser.context.sampleRate
)
);
}
const t = async_test(
"MediaStreamAudioSourceNode captures the right track."
);
const ac = new AudioContext();
// Test that the right track is captured. Set up a MediaStream that has two
// tracks, one with a tone at 100Hz and one with a tone at 1000Hz.
const dest0 = ac.createMediaStreamDestination();
const dest1 = ac.createMediaStreamDestination();
const osc0 = ac.createOscillator();
const osc1 = ac.createOscillator();
osc0.frequency.value = 100;
osc1.frequency.value = 1000;
osc0.connect(dest0);
osc1.connect(dest1);
osc0.start(0);
osc1.start(0);
const track0 = dest0.stream.getAudioTracks()[0];
const track0id = track0.id;
const track1 = dest1.stream.getAudioTracks()[0];
const track1id = track1.id;
let ids = [track0id, track1id];
ids.sort();
let targetFrequency;
let otherFrequency;
if (ids[0] == track0id) {
targetFrequency = 100;
otherFrequency = 1000;
} else {
targetFrequency = 1000;
otherFrequency = 100;
}
let twoTrackMediaStream = new MediaStream();
twoTrackMediaStream.addTrack(track0);
twoTrackMediaStream.addTrack(track1);
const twoTrackSource = ac.createMediaStreamSource(twoTrackMediaStream);
const analyser = ac.createAnalyser();
twoTrackSource.connect(analyser);
const indexToCheckForHighEnergy = binIndexForFrequency(
targetFrequency,
analyser
);
const indexToCheckForLowEnergy = binIndexForFrequency(
otherFrequency,
analyser
);
let frequencyData = new Float32Array(1024);
let checkCount = 0;
let numberOfRemovals = 0;
let stopped = false;
function analyse() {
analyser.getFloatFrequencyData(frequencyData);
// there should be high energy in the right bin, higher than 40dbfs because
// it's supposed to be a sine wave at 0dbfs
if (frequencyData[indexToCheckForHighEnergy] > -40 && !stopped) {
assert_true(true, "Correct track routed to the AudioContext.");
checkCount++;
}
if (stopped && frequencyData[indexToCheckForHighEnergy] < -40) {
assert_true(
true,
`After stopping the track, low energy is found in the
same bin`
);
checkCount++;
}
// Don't assert(false) immediately here if the bin is still higher than
// -40db the analyzer node has a window and it's expecte that it takes some
// time for the volume of this bin to decrease.
if (frequencyData[indexToCheckForLowEnergy] < -80) {
assert_true(true, "Correct track routed to the AudioContext.");
} else {
assert_true(
false,
"Other track seem to be routed to the AudioContext?"
);
}
if (checkCount > 5 && checkCount < 20) {
twoTrackMediaStream.getAudioTracks().forEach(track => {
if (track.id == ids[0]) {
numberOfRemovals++;
window.removedTrack = track;
twoTrackMediaStream.removeTrack(track);
}
});
assert_true(
numberOfRemovals == 1,
`The mediastreamtrack can only be
removed once from the mediastream`
);
} else if (checkCount >= 20 && checkCount < 30) {
window.removedTrack.stop();
stopped = true;
} else if (checkCount >= 30) {
assert_true(
numberOfRemovals == 1,
`After removing the track from the
mediastream, it's still routed to the graph.`
);
// After some time, consider that it worked.
t.done();
return;
}
t.step_timeout(analyse, 100);
}
t.step_timeout(analyse, 100);
</script>
</body>
</html>