Backed out 7 changesets (bug 1659244) for DestinationNodeEngine related crashes. CLOSED TREE

Backed out changeset 50d7aabc075e (bug 1659244)
Backed out changeset da3af45b9195 (bug 1659244)
Backed out changeset 9e748be643e7 (bug 1659244)
Backed out changeset 47a5552dece7 (bug 1659244)
Backed out changeset 9c362f616f93 (bug 1659244)
Backed out changeset 4f1c72021ec9 (bug 1659244)
Backed out changeset 90f3b8edbd8b (bug 1659244)
This commit is contained in:
Razvan Maries 2020-09-22 18:54:47 +03:00
Родитель 297501ed08
Коммит 655ae40b05
10 изменённых файлов: 90 добавлений и 195 удалений

Просмотреть файл

@ -11,7 +11,6 @@
#include "AudioSampleFormat.h" #include "AudioSampleFormat.h"
#include "WebAudioUtils.h" #include "WebAudioUtils.h"
#include "AudioBlock.h"
namespace mozilla { namespace mozilla {
@ -26,38 +25,10 @@ class AudibilityMonitor {
mSilentFramesInARow(0), mSilentFramesInARow(0),
mEverAudible(false) {} mEverAudible(false) {}
void Process(const AudioData* aData) { void ProcessAudioData(const AudioData* aData) {
ProcessInterleaved(aData->Data(), aData->mChannels); ProcessInterleaved(aData->Data(), aData->mChannels);
} }
void Process(const AudioBlock& aData) {
if (aData.IsNull() || aData.IsMuted()) {
mSilentFramesInARow += aData.GetDuration();
return;
}
ProcessPlanar(aData.ChannelData<AudioDataValue>(), aData.GetDuration());
}
void ProcessPlanar(const nsTArray<const AudioDataValue*>& aPlanar,
TrackTime aFrames) {
uint32_t lastFrameAudibleAccrossChannels = 0;
for (uint32_t channel = 0; channel < aPlanar.Length(); channel++) {
uint32_t lastSampleAudible = 0;
for (uint32_t frame = 0; frame < aFrames; frame++) {
float dbfs = dom::WebAudioUtils::ConvertLinearToDecibels(
abs(AudioSampleToFloat(aPlanar[channel][frame])), -100.f);
if (dbfs > AUDIBILITY_THREHSOLD) {
mEverAudible = true;
mSilentFramesInARow = 0;
lastSampleAudible = frame;
}
}
lastFrameAudibleAccrossChannels =
std::max(lastFrameAudibleAccrossChannels, lastSampleAudible);
}
mSilentFramesInARow += aFrames - lastFrameAudibleAccrossChannels - 1;
}
void ProcessInterleaved(const Span<AudioDataValue>& aInterleaved, void ProcessInterleaved(const Span<AudioDataValue>& aInterleaved,
size_t aChannels) { size_t aChannels) {
MOZ_ASSERT(aInterleaved.Length() % aChannels == 0); MOZ_ASSERT(aInterleaved.Length() % aChannels == 0);

Просмотреть файл

@ -208,6 +208,23 @@ struct AudioChunk {
bool IsMuted() const { return mVolume == 0.0f; } bool IsMuted() const { return mVolume == 0.0f; }
bool IsAudible() const {
for (auto&& channel : mChannelData) {
// Transform sound into dB RMS and assume that the value smaller than -100
// is inaudible.
float dbrms = 0.0;
for (uint32_t idx = 0; idx < mDuration; idx++) {
dbrms += std::pow(static_cast<const AudioDataValue*>(channel)[idx], 2);
}
dbrms /= mDuration;
dbrms = std::sqrt(dbrms) != 0.0 ? 20 * log10(dbrms) : -1000.0;
if (dbrms > -100.0) {
return true;
}
}
return false;
}
size_t SizeOfExcludingThisIfUnshared(MallocSizeOf aMallocSizeOf) const { size_t SizeOfExcludingThisIfUnshared(MallocSizeOf aMallocSizeOf) const {
return SizeOfExcludingThis(aMallocSizeOf, true); return SizeOfExcludingThis(aMallocSizeOf, true);
} }

Просмотреть файл

@ -320,7 +320,7 @@ void AudioSink::Errored() {
void AudioSink::CheckIsAudible(const AudioData* aData) { void AudioSink::CheckIsAudible(const AudioData* aData) {
MOZ_ASSERT(aData); MOZ_ASSERT(aData);
mAudibilityMonitor.Process(aData); mAudibilityMonitor.ProcessAudioData(aData);
bool isAudible = mAudibilityMonitor.RecentlyAudible(); bool isAudible = mAudibilityMonitor.RecentlyAudible();
if (isAudible != mIsAudioDataAudible) { if (isAudible != mIsAudioDataAudible) {

Просмотреть файл

@ -679,7 +679,7 @@ void DecodedStream::SendAudio(double aVolume,
void DecodedStream::CheckIsDataAudible(const AudioData* aData) { void DecodedStream::CheckIsDataAudible(const AudioData* aData) {
MOZ_ASSERT(aData); MOZ_ASSERT(aData);
mAudibilityMonitor->Process(aData); mAudibilityMonitor->ProcessAudioData(aData);
bool isAudible = mAudibilityMonitor->RecentlyAudible(); bool isAudible = mAudibilityMonitor->RecentlyAudible();
if (isAudible != mIsAudioDataAudible) { if (isAudible != mIsAudioDataAudible) {

Просмотреть файл

@ -38,6 +38,7 @@ class AudioBlock : private AudioChunk {
using AudioChunk::ChannelCount; using AudioChunk::ChannelCount;
using AudioChunk::ChannelData; using AudioChunk::ChannelData;
using AudioChunk::GetDuration; using AudioChunk::GetDuration;
using AudioChunk::IsAudible;
using AudioChunk::IsNull; using AudioChunk::IsNull;
using AudioChunk::SizeOfExcludingThis; using AudioChunk::SizeOfExcludingThis;
using AudioChunk::SizeOfExcludingThisIfUnshared; using AudioChunk::SizeOfExcludingThisIfUnshared;

Просмотреть файл

@ -5,28 +5,25 @@
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "AudioDestinationNode.h" #include "AudioDestinationNode.h"
#include "AudioContext.h"
#include "AlignmentUtils.h" #include "AlignmentUtils.h"
#include "AudibilityMonitor.h"
#include "AudioChannelService.h"
#include "AudioContext.h" #include "AudioContext.h"
#include "AudioContext.h"
#include "AudioNodeEngine.h"
#include "AudioNodeTrack.h"
#include "CubebUtils.h" #include "CubebUtils.h"
#include "MediaTrackGraph.h"
#include "mozilla/StaticPrefs_dom.h"
#include "mozilla/dom/AudioDestinationNodeBinding.h" #include "mozilla/dom/AudioDestinationNodeBinding.h"
#include "mozilla/dom/BaseAudioContextBinding.h" #include "mozilla/dom/BaseAudioContextBinding.h"
#include "mozilla/dom/OfflineAudioCompletionEvent.h" #include "mozilla/dom/OfflineAudioCompletionEvent.h"
#include "mozilla/dom/Promise.h" #include "mozilla/dom/power/PowerManagerService.h"
#include "mozilla/dom/ScriptSettings.h" #include "mozilla/dom/ScriptSettings.h"
#include "mozilla/dom/WakeLock.h" #include "mozilla/dom/WakeLock.h"
#include "mozilla/dom/power/PowerManagerService.h" #include "AudioChannelService.h"
#include "AudioNodeEngine.h"
#include "AudioNodeTrack.h"
#include "MediaTrackGraph.h"
#include "nsContentUtils.h" #include "nsContentUtils.h"
#include "nsIInterfaceRequestorUtils.h" #include "nsIInterfaceRequestorUtils.h"
#include "nsIScriptObjectPrincipal.h" #include "nsIScriptObjectPrincipal.h"
#include "nsServiceManagerUtils.h" #include "nsServiceManagerUtils.h"
#include "mozilla/dom/Promise.h"
extern mozilla::LazyLogModule gAudioChannelLog; extern mozilla::LazyLogModule gAudioChannelLog;
@ -190,13 +187,10 @@ class DestinationNodeEngine final : public AudioNodeEngine {
public: public:
explicit DestinationNodeEngine(AudioDestinationNode* aNode) explicit DestinationNodeEngine(AudioDestinationNode* aNode)
: AudioNodeEngine(aNode), : AudioNodeEngine(aNode),
mSampleRate(CubebUtils::PreferredSampleRate()),
mVolume(1.0f), mVolume(1.0f),
mAudibilityMonitor( mLastInputAudible(false),
mSampleRate,
StaticPrefs::dom_media_silence_duration_for_audibility()),
mSuspended(false), mSuspended(false),
mLastInputAudible(false) { mSampleRate(CubebUtils::PreferredSampleRate()) {
MOZ_ASSERT(aNode); MOZ_ASSERT(aNode);
} }
@ -210,10 +204,32 @@ class DestinationNodeEngine final : public AudioNodeEngine {
return; return;
} }
mAudibilityMonitor.Process(aInput); bool isInputAudible =
bool isInputAudible = mAudibilityMonitor.RecentlyAudible(); !aInput.IsNull() && !aInput.IsMuted() && aInput.IsAudible();
if (isInputAudible != mLastInputAudible) { auto shouldNotifyChanged = [&]() {
// We don't want to notify state changed frequently if the input track is
// consist of interleaving audible and inaudible blocks. This situation is
// really common, especially when user is using OscillatorNode to produce
// sound. Sending unnessary runnable frequently would cause performance
// debasing. If the track contains 10 interleaving samples and 5 of them
// are audible, others are inaudible, user would tend to feel the track
// is audible. Therefore, we have the loose checking when track is
// changing from inaudible to audible, but have strict checking when
// streaming is changing from audible to inaudible. If the inaudible
// blocks continue over a speicific time threshold, then we will treat the
// track as inaudible.
if (isInputAudible && !mLastInputAudible) {
return true;
}
// Use more strict condition, choosing 1 seconds as a threshold.
if (!isInputAudible && mLastInputAudible &&
aFrom - mLastInputAudibleTime >= mSampleRate) {
return true;
}
return false;
};
if (shouldNotifyChanged()) {
mLastInputAudible = isInputAudible; mLastInputAudible = isInputAudible;
RefPtr<AudioNodeTrack> track = aTrack; RefPtr<AudioNodeTrack> track = aTrack;
auto r = [track, isInputAudible]() -> void { auto r = [track, isInputAudible]() -> void {
@ -229,6 +245,10 @@ class DestinationNodeEngine final : public AudioNodeEngine {
aTrack->Graph()->DispatchToMainThreadStableState(NS_NewRunnableFunction( aTrack->Graph()->DispatchToMainThreadStableState(NS_NewRunnableFunction(
"dom::WebAudioAudibleStateChangedRunnable", r)); "dom::WebAudioAudibleStateChangedRunnable", r));
} }
if (isInputAudible) {
mLastInputAudibleTime = aFrom;
}
} }
bool IsActive() const override { bool IsActive() const override {
@ -242,7 +262,7 @@ class DestinationNodeEngine final : public AudioNodeEngine {
void SetDoubleParameter(uint32_t aIndex, double aParam) override { void SetDoubleParameter(uint32_t aIndex, double aParam) override {
if (aIndex == VOLUME) { if (aIndex == VOLUME) {
mVolume = static_cast<float>(aParam); mVolume = aParam;
} }
} }
@ -265,11 +285,11 @@ class DestinationNodeEngine final : public AudioNodeEngine {
} }
private: private:
int mSampleRate;
float mVolume; float mVolume;
AudibilityMonitor mAudibilityMonitor;
bool mSuspended;
bool mLastInputAudible; bool mLastInputAudible;
GraphTime mLastInputAudibleTime = 0;
bool mSuspended;
int mSampleRate;
}; };
NS_IMPL_CYCLE_COLLECTION_INHERITED(AudioDestinationNode, AudioNode, NS_IMPL_CYCLE_COLLECTION_INHERITED(AudioDestinationNode, AudioNode,
@ -493,7 +513,7 @@ AudioDestinationNode::WindowVolumeChanged(float aVolume, bool aMuted) {
"aVolume = %f, aMuted = %s\n", "aVolume = %f, aMuted = %s\n",
this, aVolume, aMuted ? "true" : "false"); this, aVolume, aMuted ? "true" : "false");
float volume = aMuted ? 0.0f : aVolume; float volume = aMuted ? 0.0 : aVolume;
mTrack->SetAudioOutputVolume(nullptr, volume); mTrack->SetAudioOutputVolume(nullptr, volume);
AudioChannelService::AudibleState audible = AudioChannelService::AudibleState audible =
@ -639,7 +659,6 @@ void AudioDestinationNode::NotifyAudibleStateChanged(bool aAudible) {
if (IsCapturingAudio()) { if (IsCapturingAudio()) {
StopAudioCapturingTrack(); StopAudioCapturingTrack();
} }
ReleaseAudioWakeLockIfExists();
return; return;
} }

Просмотреть файл

@ -36,12 +36,6 @@ function getWakeLockState(topic, needLock, isTabInForeground) {
return { return {
check: async () => { check: async () => {
if (needLock) { if (needLock) {
const lockState = powerManager.getWakeLockState(topic);
info(`topic=${topic}, state=${lockState}`);
if (lockState == `locked-${tabState}`) {
ok(true, `requested '${topic}' wakelock in ${tabState}`);
return;
}
await promise; await promise;
ok(true, `requested '${topic}' wakelock in ${tabState}`); ok(true, `requested '${topic}' wakelock in ${tabState}`);
} else { } else {
@ -74,63 +68,24 @@ async function waitUntilVideoStarted({ muted, volume } = {}) {
); );
} }
async function initializeWebAudio({ suspend } = {}) {
if (suspend) {
await content.ac.suspend();
} else {
const ac = content.ac;
if (ac.state == "running") {
return;
}
while (ac.state != "running") {
await new Promise(r => (ac.onstatechange = r));
}
}
}
function webaudioDocument() {
content.ac = new content.AudioContext();
const ac = content.ac;
const dest = ac.destination;
const source = new content.OscillatorNode(ac);
source.start(ac.currentTime);
source.connect(dest);
}
async function test_media_wakelock({ async function test_media_wakelock({
description, description,
urlOrFunction, url,
additionalParams, videoAttsParams,
lockAudio, lockAudio,
lockVideo, lockVideo,
}) { }) {
info(`- start a new test for '${description}' -`); info(`- start a new test for '${description}' -`);
info(`- open new foreground tab -`); info(`- open new foreground tab -`);
var url; url = LOCATION + url;
if (typeof urlOrFunction == "string") {
url = LOCATION + urlOrFunction;
} else {
url = "about:blank";
}
const tab = await BrowserTestUtils.openNewForegroundTab(window.gBrowser, url); const tab = await BrowserTestUtils.openNewForegroundTab(window.gBrowser, url);
const browser = tab.linkedBrowser; const browser = tab.linkedBrowser;
if (typeof urlOrFunction == "function") {
await SpecialPowers.spawn(browser, [], urlOrFunction);
}
let audioWakeLock = getWakeLockState("audio-playing", lockAudio, true); let audioWakeLock = getWakeLockState("audio-playing", lockAudio, true);
let videoWakeLock = getWakeLockState("video-playing", lockVideo, true); let videoWakeLock = getWakeLockState("video-playing", lockVideo, true);
var initFunction = null;
if (description.includes("web audio")) {
initFunction = initializeWebAudio;
} else {
initFunction = waitUntilVideoStarted;
}
info(`- wait for media starting playing -`); info(`- wait for media starting playing -`);
await SpecialPowers.spawn(browser, [additionalParams], initFunction); await SpecialPowers.spawn(browser, [videoAttsParams], waitUntilVideoStarted);
await audioWakeLock.check(); await audioWakeLock.check();
await videoWakeLock.check(); await videoWakeLock.check();
@ -159,14 +114,14 @@ async function test_media_wakelock({
add_task(async function start_tests() { add_task(async function start_tests() {
await test_media_wakelock({ await test_media_wakelock({
description: "playing video", description: "playing video",
urlOrFunction: "file_video.html", url: "file_video.html",
lockAudio: true, lockAudio: true,
lockVideo: true, lockVideo: true,
}); });
await test_media_wakelock({ await test_media_wakelock({
description: "playing muted video", description: "playing muted video",
urlOrFunction: "file_video.html", url: "file_video.html",
additionalParams: { videoAttsParams: {
muted: true, muted: true,
}, },
lockAudio: false, lockAudio: false,
@ -174,8 +129,8 @@ add_task(async function start_tests() {
}); });
await test_media_wakelock({ await test_media_wakelock({
description: "playing volume=0 video", description: "playing volume=0 video",
urlOrFunction: "file_video.html", url: "file_video.html",
additionalParams: { videoAttsParams: {
volume: 0.0, volume: 0.0,
}, },
lockAudio: false, lockAudio: false,
@ -183,99 +138,32 @@ add_task(async function start_tests() {
}); });
await test_media_wakelock({ await test_media_wakelock({
description: "playing video without audio in it", description: "playing video without audio in it",
urlOrFunction: "file_videoWithoutAudioTrack.html", url: "file_videoWithoutAudioTrack.html",
lockAudio: false, lockAudio: false,
lockVideo: false, lockVideo: false,
}); });
await test_media_wakelock({ await test_media_wakelock({
description: "playing audio in video element", description: "playing audio in video element",
urlOrFunction: "file_videoWithAudioOnly.html", url: "file_videoWithAudioOnly.html",
lockAudio: true, lockAudio: true,
lockVideo: false, lockVideo: false,
}); });
await test_media_wakelock({ await test_media_wakelock({
description: "playing audio in audio element", description: "playing audio in audio element",
urlOrFunction: "file_mediaPlayback2.html", url: "file_mediaPlayback2.html",
lockAudio: true, lockAudio: true,
lockVideo: false, lockVideo: false,
}); });
await test_media_wakelock({ await test_media_wakelock({
description: "playing video from media stream with audio and video tracks", description: "playing video from media stream with audio and video tracks",
urlOrFunction: "browser_mediaStreamPlayback.html", url: "browser_mediaStreamPlayback.html",
lockAudio: true, lockAudio: true,
lockVideo: true, lockVideo: true,
}); });
await test_media_wakelock({ await test_media_wakelock({
description: "playing video from media stream without audio track", description: "playing video from media stream without audio track",
urlOrFunction: "browser_mediaStreamPlaybackWithoutAudio.html", url: "browser_mediaStreamPlaybackWithoutAudio.html",
lockAudio: true, lockAudio: true,
lockVideo: true, lockVideo: true,
}); });
await test_media_wakelock({
description: "playing audible web audio",
urlOrFunction: webaudioDocument,
lockAudio: true,
lockVideo: false,
}); });
await test_media_wakelock({
description: "suspended web audio",
urlOrFunction: webaudioDocument,
additionalParams: {
suspend: true,
},
lockAudio: false,
lockVideo: false,
});
});
async function waitUntilAudioContextStarts() {
const ac = content.ac;
if (ac.state == "running") {
return;
}
while (ac.state != "running") {
await new Promise(r => (ac.onstatechange = r));
}
}
add_task(
async function testBrieflyAudibleAudioContextReleasesAudioWakeLockWhenInaudible() {
const tab = await BrowserTestUtils.openNewForegroundTab(
window.gBrowser,
"about:blank"
);
const browser = tab.linkedBrowser;
let audioWakeLock = getWakeLockState("audio-playing", true, true);
let videoWakeLock = getWakeLockState("video-playing", false, true);
// Make a short noise
await SpecialPowers.spawn(browser, [], () => {
content.ac = new content.AudioContext();
const ac = content.ac;
const dest = ac.destination;
const source = new content.OscillatorNode(ac);
source.start(ac.currentTime);
source.stop(ac.currentTime + 0.1);
source.connect(dest);
});
await SpecialPowers.spawn(browser, [], waitUntilAudioContextStarts);
info("AudioContext is running.");
await audioWakeLock.check();
await videoWakeLock.check();
await waitForTabPlayingEvent(tab, false);
audioWakeLock = getWakeLockState("audio-playing", false, true);
videoWakeLock = getWakeLockState("video-playing", false, true);
await audioWakeLock.check();
await videoWakeLock.check();
await BrowserTestUtils.removeTab(tab);
}
);

Просмотреть файл

@ -35,10 +35,14 @@ add_task(async function testSilentAudioContext() {
content.ac = new content.AudioContext(); content.ac = new content.AudioContext();
const ac = content.ac; const ac = content.ac;
const dest = ac.destination; const dest = ac.destination;
const source = new content.OscillatorNode(content.ac); const source = ac.createBufferSource();
const gain = new content.GainNode(content.ac); const buf = ac.createBuffer(1, 3 * ac.sampleRate, ac.sampleRate);
gain.gain.value = 0.0; const bufData = Cu.cloneInto(buf.getChannelData(0), {});
source.connect(gain).connect(dest); for (let idx = 0; idx < buf.length; idx++) {
bufData[idx] = 0.0;
}
source.buffer = buf;
source.connect(dest);
source.start(); source.start();
}); });
info(`- check AudioContext's state -`); info(`- check AudioContext's state -`);

Просмотреть файл

@ -3,20 +3,12 @@
<meta content="text/html;charset=utf-8" http-equiv="Content-Type"> <meta content="text/html;charset=utf-8" http-equiv="Content-Type">
<meta content="utf-8" http-equiv="encoding"> <meta content="utf-8" http-equiv="encoding">
</head> </head>
<body>
<pre id=state></pre>
<button id="start" onclick="start_webaudio()">Start</button>
<button id="stop" onclick="stop_webaudio()">Stop</button>
<script type="text/javascript"> <script type="text/javascript">
var ac = new AudioContext(); var ac = new AudioContext();
var dest = ac.destination; var dest = ac.destination;
var osc = ac.createOscillator(); var osc = ac.createOscillator();
osc.connect(dest); osc.connect(dest);
osc.start(); osc.start();
document.querySelector("pre").innerText = ac.state;
ac.onstatechange = function() {
document.querySelector("pre").innerText = ac.state;
}
function start_webaudio() { function start_webaudio() {
ac.resume(); ac.resume();
@ -26,4 +18,7 @@
ac.suspend(); ac.suspend();
} }
</script> </script>
<body>
<button id="start" onclick="start_webaudio()">Start</button>
<button id="stop" onclick="stop_webaudio()">Stop</button>
</body> </body>

Просмотреть файл

@ -64,7 +64,7 @@ async function waitForTabPlayingEvent(tab, expectPlaying) {
if (tab.soundPlaying == expectPlaying) { if (tab.soundPlaying == expectPlaying) {
ok(true, "The tab should " + (expectPlaying ? "" : "not ") + "be playing"); ok(true, "The tab should " + (expectPlaying ? "" : "not ") + "be playing");
} else { } else {
info("Playing state doesn't match, wait for attributes changes."); info("Playing state doens't match, wait for attributes changes.");
await BrowserTestUtils.waitForEvent( await BrowserTestUtils.waitForEvent(
tab, tab,
"TabAttrModified", "TabAttrModified",