зеркало из https://github.com/mozilla/gecko-dev.git
Bug 1413098 - part1 : add policy to decide whether allow audio context to start. r=padenot
Audio context would be allowed to start if (1) its document has been activated by user gesture (2) it's a offline audio context, because it won't directly output sound to audio devices In addition, all resume promises would be pending until audio context has been allowed and user calls resume() again. MozReview-Commit-ID: G6RV8dDM6vQ --HG-- extra : rebase_source : bc1d2ad0594ad1f54c05ade06495918aaee14911 extra : source : 5031770d70fd643230cb4caf6a5106616adaf0fd
This commit is contained in:
Родитель
46bd5d1cf4
Коммит
f59daea779
|
@ -8,6 +8,7 @@
|
|||
|
||||
#include "mozilla/EventStateManager.h"
|
||||
#include "mozilla/Preferences.h"
|
||||
#include "mozilla/dom/AudioContext.h"
|
||||
#include "mozilla/dom/HTMLMediaElement.h"
|
||||
#include "mozilla/dom/HTMLMediaElementBinding.h"
|
||||
#include "nsContentUtils.h"
|
||||
|
@ -68,5 +69,42 @@ AutoplayPolicy::IsMediaElementAllowedToPlay(NotNull<HTMLMediaElement*> aElement)
|
|||
return false;
|
||||
}
|
||||
|
||||
/* static */ bool
|
||||
AutoplayPolicy::IsAudioContextAllowedToPlay(NotNull<AudioContext*> aContext)
|
||||
{
|
||||
if (Preferences::GetBool("media.autoplay.enabled")) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!Preferences::GetBool("media.autoplay.enabled.user-gestures-needed", false)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Offline context won't directly output sound to audio devices.
|
||||
if (aContext->IsOffline()) {
|
||||
return true;
|
||||
}
|
||||
|
||||
nsPIDOMWindowInner* window = aContext->GetOwner();
|
||||
if (!window) {
|
||||
return false;
|
||||
}
|
||||
|
||||
nsCOMPtr<nsIPrincipal> principal = aContext->GetParentObject()->AsGlobal()->PrincipalOrNull();
|
||||
|
||||
// Whitelisted.
|
||||
if (principal &&
|
||||
nsContentUtils::IsExactSitePermAllow(principal, "autoplay-media")) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Activated by user gesture.
|
||||
if (window->GetExtantDoc()->HasBeenUserActivated()) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
} // namespace dom
|
||||
} // namespace mozilla
|
||||
|
|
|
@ -15,6 +15,7 @@ namespace mozilla {
|
|||
namespace dom {
|
||||
|
||||
class HTMLMediaElement;
|
||||
class AudioContext;
|
||||
|
||||
/**
|
||||
* AutoplayPolicy is used to manage autoplay logic for all kinds of media,
|
||||
|
@ -32,6 +33,9 @@ class AutoplayPolicy
|
|||
{
|
||||
public:
|
||||
static bool IsMediaElementAllowedToPlay(NotNull<HTMLMediaElement*> aElement);
|
||||
static bool IsAudioContextAllowedToPlay(NotNull<AudioContext*> aContext);
|
||||
private:
|
||||
static bool IsDocumentAllowedToPlay(nsIDocument* aDoc);
|
||||
};
|
||||
|
||||
} // namespace dom
|
||||
|
|
|
@ -9,6 +9,7 @@
|
|||
#include "blink/PeriodicWave.h"
|
||||
|
||||
#include "mozilla/ErrorResult.h"
|
||||
#include "mozilla/NotNull.h"
|
||||
#include "mozilla/OwningNonNull.h"
|
||||
#include "mozilla/RefPtr.h"
|
||||
#include "mozilla/Preferences.h"
|
||||
|
@ -44,6 +45,7 @@
|
|||
#include "AudioListener.h"
|
||||
#include "AudioNodeStream.h"
|
||||
#include "AudioStream.h"
|
||||
#include "AutoplayPolicy.h"
|
||||
#include "BiquadFilterNode.h"
|
||||
#include "ChannelMergerNode.h"
|
||||
#include "ChannelSplitterNode.h"
|
||||
|
@ -83,6 +85,7 @@ NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN(AudioContext)
|
|||
NS_IMPL_CYCLE_COLLECTION_UNLINK(mDestination)
|
||||
NS_IMPL_CYCLE_COLLECTION_UNLINK(mListener)
|
||||
NS_IMPL_CYCLE_COLLECTION_UNLINK(mPromiseGripArray)
|
||||
NS_IMPL_CYCLE_COLLECTION_UNLINK(mPendingResumePromises)
|
||||
if (!tmp->mIsStarted) {
|
||||
NS_IMPL_CYCLE_COLLECTION_UNLINK(mActiveNodes)
|
||||
}
|
||||
|
@ -101,6 +104,7 @@ NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN_INHERITED(AudioContext,
|
|||
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mDestination)
|
||||
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mListener)
|
||||
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mPromiseGripArray)
|
||||
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mPendingResumePromises)
|
||||
if (!tmp->mIsStarted) {
|
||||
MOZ_ASSERT(tmp->mIsOffline,
|
||||
"Online AudioContexts should always be started");
|
||||
|
@ -149,13 +153,27 @@ AudioContext::AudioContext(nsPIDOMWindowInner* aWindow,
|
|||
|
||||
// Note: AudioDestinationNode needs an AudioContext that must already be
|
||||
// bound to the window.
|
||||
bool allowToStart = AutoplayPolicy::IsAudioContextAllowedToPlay(WrapNotNull(this));
|
||||
mDestination = new AudioDestinationNode(this, aIsOffline,
|
||||
aNumberOfChannels, aLength, aSampleRate);
|
||||
aNumberOfChannels,
|
||||
aLength,
|
||||
aSampleRate,
|
||||
allowToStart);
|
||||
|
||||
// The context can't be muted until it has a destination.
|
||||
if (mute) {
|
||||
Mute();
|
||||
}
|
||||
|
||||
// If we won't allow audio context to start, we need to suspend all its stream
|
||||
// in order to delay the state changing from 'suspend' to 'start'.
|
||||
if (!allowToStart) {
|
||||
ErrorResult rv;
|
||||
RefPtr<Promise> dummy = Suspend(rv);
|
||||
MOZ_ASSERT(!rv.Failed(), "can't create promise");
|
||||
MOZ_ASSERT(dummy->State() != Promise::PromiseState::Rejected,
|
||||
"suspend failed");
|
||||
}
|
||||
}
|
||||
|
||||
nsresult
|
||||
|
@ -726,6 +744,11 @@ AudioContext::Shutdown()
|
|||
}
|
||||
|
||||
mPromiseGripArray.Clear();
|
||||
|
||||
for (const auto& p : mPendingResumePromises) {
|
||||
p->MaybeReject(NS_ERROR_DOM_INVALID_STATE_ERR);
|
||||
}
|
||||
mPendingResumePromises.Clear();
|
||||
}
|
||||
|
||||
// Release references to active nodes.
|
||||
|
@ -905,6 +928,16 @@ AudioContext::OnStateChanged(void* aPromise, AudioContextState aNewState)
|
|||
}
|
||||
}
|
||||
|
||||
// Resolve all pending promises once the audio context has been allowed to
|
||||
// start.
|
||||
if (mAudioContextState == AudioContextState::Suspended &&
|
||||
aNewState == AudioContextState::Running) {
|
||||
for (const auto& p : mPendingResumePromises) {
|
||||
p->MaybeResolveWithUndefined();
|
||||
}
|
||||
mPendingResumePromises.Clear();
|
||||
}
|
||||
|
||||
if (mAudioContextState != aNewState) {
|
||||
RefPtr<OnStateChangeTask> task = new OnStateChangeTask(this);
|
||||
Dispatch(task.forget());
|
||||
|
@ -988,6 +1021,9 @@ AudioContext::Resume(ErrorResult& aRv)
|
|||
return promise.forget();
|
||||
}
|
||||
|
||||
mPendingResumePromises.AppendElement(promise);
|
||||
|
||||
if (AutoplayPolicy::IsAudioContextAllowedToPlay(WrapNotNull(this))) {
|
||||
Destination()->Resume();
|
||||
|
||||
nsTArray<MediaStream*> streams;
|
||||
|
@ -998,12 +1034,11 @@ AudioContext::Resume(ErrorResult& aRv)
|
|||
if (mSuspendCalled) {
|
||||
streams = GetAllStreams();
|
||||
}
|
||||
mPromiseGripArray.AppendElement(promise);
|
||||
Graph()->ApplyAudioContextOperation(DestinationStream()->AsAudioNodeStream(),
|
||||
streams,
|
||||
AudioContextOperation::Resume, promise);
|
||||
|
||||
mSuspendCalled = false;
|
||||
}
|
||||
|
||||
return promise.forget();
|
||||
}
|
||||
|
|
|
@ -353,9 +353,14 @@ private:
|
|||
RefPtr<AudioDestinationNode> mDestination;
|
||||
RefPtr<AudioListener> mListener;
|
||||
nsTArray<UniquePtr<WebAudioDecodeJob> > mDecodeJobs;
|
||||
// This array is used to keep the suspend/resume/close promises alive until
|
||||
// This array is used to keep the suspend/close promises alive until
|
||||
// they are resolved, so we can safely pass them accross threads.
|
||||
nsTArray<RefPtr<Promise>> mPromiseGripArray;
|
||||
// This array is used to onlly keep the resume promises alive until they are
|
||||
// resolved, so we can safely pass them accross threads. If the audio context
|
||||
// is not allowed to play, the promise would be pending in this array and be
|
||||
// resolved until audio context has been allowed and user call resume() again.
|
||||
nsTArray<RefPtr<Promise>> mPendingResumePromises;
|
||||
// See RegisterActiveNode. These will keep the AudioContext alive while it
|
||||
// is rendering and the window remains alive.
|
||||
nsTHashtable<nsRefPtrHashKey<AudioNode> > mActiveNodes;
|
||||
|
|
|
@ -324,7 +324,9 @@ NS_IMPL_RELEASE_INHERITED(AudioDestinationNode, AudioNode)
|
|||
AudioDestinationNode::AudioDestinationNode(AudioContext* aContext,
|
||||
bool aIsOffline,
|
||||
uint32_t aNumberOfChannels,
|
||||
uint32_t aLength, float aSampleRate)
|
||||
uint32_t aLength,
|
||||
float aSampleRate,
|
||||
bool aAllowToStart)
|
||||
: AudioNode(aContext, aNumberOfChannels,
|
||||
ChannelCountMode::Explicit, ChannelInterpretation::Speakers)
|
||||
, mFramesToProduce(aLength)
|
||||
|
@ -352,7 +354,7 @@ AudioDestinationNode::AudioDestinationNode(AudioContext* aContext,
|
|||
mStream->AddMainThreadListener(this);
|
||||
mStream->AddAudioOutput(&gWebAudioOutputKey);
|
||||
|
||||
if (!aIsOffline) {
|
||||
if (!aIsOffline && aAllowToStart) {
|
||||
graph->NotifyWhenGraphStarted(mStream);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,7 +27,8 @@ public:
|
|||
bool aIsOffline,
|
||||
uint32_t aNumberOfChannels = 0,
|
||||
uint32_t aLength = 0,
|
||||
float aSampleRate = 0.0f);
|
||||
float aSampleRate = 0.0f,
|
||||
bool aAllowToStart = true);
|
||||
|
||||
void DestroyMediaStream() override;
|
||||
|
||||
|
|
Загрузка…
Ссылка в новой задаче