merge mozilla-inbound to mozilla-central a=merge

This commit is contained in:
Carsten "Tomcat" Book 2015-09-17 14:56:37 +02:00
Родитель 9619696835 647b520991
Коммит 587ddedf21
284 изменённых файлов: 4792 добавлений и 2696 удалений

Просмотреть файл

@ -1940,10 +1940,10 @@ pref("view_source.tab", true);
#endif
// Enable ServiceWorkers for Push API consumers.
// Interception is still disabled.
// Interception is still disabled on beta and release.
pref("dom.serviceWorkers.enabled", true);
#ifdef NIGHTLY_BUILD
#ifndef RELEASE_BUILD
pref("dom.serviceWorkers.interception.enabled", true);
#endif

Просмотреть файл

@ -38,7 +38,7 @@ class HistoryTracker final : public HistoryTrackerBase
{
public:
explicit HistoryTracker(uint32_t aTimeout)
: HistoryTrackerBase(1000 * aTimeout / 2)
: HistoryTrackerBase(1000 * aTimeout / 2, "HistoryTracker")
{
}

Просмотреть файл

@ -30,7 +30,7 @@
// Form related includes
#include "nsIDOMHTMLFormElement.h"
#include "pldhash.h"
#include "PLDHashTable.h"
#ifdef DEBUG_CONTENT_LIST
#include "nsIContentIterator.h"

Просмотреть файл

@ -23,6 +23,7 @@
#include "nsILoadContext.h"
#include "nsCOMArray.h"
#include "nsContentUtils.h"
#include "mozilla/dom/nsMixedContentBlocker.h"
using mozilla::LogLevel;
@ -119,6 +120,12 @@ nsContentPolicy::CheckPolicy(CPMethod policyMethod,
nsContentPolicyType externalType =
nsContentUtils::InternalContentPolicyTypeToExternal(contentType);
nsContentPolicyType externalTypeOrScript =
nsContentUtils::InternalContentPolicyTypeToExternalOrScript(contentType);
nsCOMPtr<nsIContentPolicy> mixedContentBlocker =
do_GetService(NS_MIXEDCONTENTBLOCKER_CONTRACTID);
/*
* Enumerate mPolicies and ask each of them, taking the logical AND of
* their permissions.
@ -129,7 +136,15 @@ nsContentPolicy::CheckPolicy(CPMethod policyMethod,
int32_t count = entries.Count();
for (int32_t i = 0; i < count; i++) {
/* check the appropriate policy */
rv = (entries[i]->*policyMethod)(externalType, contentLocation,
// Send the internal content policy type to the mixed content blocker
// which needs to know about TYPE_INTERNAL_WORKER,
// TYPE_INTERNAL_SHARED_WORKER and TYPE_INTERNAL_SERVICE_WORKER.
bool isMixedContentBlocker = mixedContentBlocker == entries[i];
nsContentPolicyType type = externalType;
if (isMixedContentBlocker) {
type = externalTypeOrScript;
}
rv = (entries[i]->*policyMethod)(type, contentLocation,
requestingLocation, requestingContext,
mimeType, extra, requestPrincipal,
decision);

Просмотреть файл

@ -126,6 +126,7 @@ NS_CP_ContentTypeName(uint32_t contentType)
CASE_RETURN( TYPE_INTERNAL_TRACK );
CASE_RETURN( TYPE_INTERNAL_XMLHTTPREQUEST );
CASE_RETURN( TYPE_INTERNAL_EVENTSOURCE );
CASE_RETURN( TYPE_INTERNAL_SERVICE_WORKER );
default:
return "<Unknown Type>";
}

Просмотреть файл

@ -7928,6 +7928,7 @@ nsContentUtils::InternalContentPolicyTypeToExternal(nsContentPolicyType aType)
case nsIContentPolicy::TYPE_INTERNAL_SCRIPT:
case nsIContentPolicy::TYPE_INTERNAL_WORKER:
case nsIContentPolicy::TYPE_INTERNAL_SHARED_WORKER:
case nsIContentPolicy::TYPE_INTERNAL_SERVICE_WORKER:
return nsIContentPolicy::TYPE_SCRIPT;
case nsIContentPolicy::TYPE_INTERNAL_EMBED:
@ -7952,6 +7953,22 @@ nsContentUtils::InternalContentPolicyTypeToExternal(nsContentPolicyType aType)
}
}
/* static */
nsContentPolicyType
nsContentUtils::InternalContentPolicyTypeToExternalOrScript(nsContentPolicyType aType)
{
switch (aType) {
case nsIContentPolicy::TYPE_INTERNAL_SCRIPT:
case nsIContentPolicy::TYPE_INTERNAL_WORKER:
case nsIContentPolicy::TYPE_INTERNAL_SHARED_WORKER:
case nsIContentPolicy::TYPE_INTERNAL_SERVICE_WORKER:
return aType;
default:
return InternalContentPolicyTypeToExternal(aType);
}
}
nsresult
nsContentUtils::SetFetchReferrerURIWithPolicy(nsIPrincipal* aPrincipal,

Просмотреть файл

@ -953,6 +953,18 @@ public:
*/
static nsContentPolicyType InternalContentPolicyTypeToExternal(nsContentPolicyType aType);
/**
* Map internal content policy types to external ones or script types:
* * TYPE_INTERNAL_SCRIPT
* * TYPE_INTERNAL_WORKER
* * TYPE_INTERNAL_SHARED_WORKER
* * TYPE_INTERNAL_SERVICE_WORKER
*
*
* Note: DO NOT call this function unless you know what you're doing!
*/
static nsContentPolicyType InternalContentPolicyTypeToExternalOrScript(nsContentPolicyType aType);
/**
* Quick helper to determine whether there are any mutation listeners
* of a given type that apply to this content or any of its ancestors.

Просмотреть файл

@ -3266,6 +3266,7 @@ private:
if (window == aWindow) {
break;
}
item = item->getNext();
}
return item;
}

Просмотреть файл

@ -1429,7 +1429,8 @@ nsDOMStyleSheetSetList::EnsureFresh()
// ==================================================================
nsIDocument::SelectorCache::SelectorCache()
: nsExpirationTracker<SelectorCacheKey, 4>(1000) { }
: nsExpirationTracker<SelectorCacheKey, 4>(1000, "nsIDocument::SelectorCache")
{ }
// CacheList takes ownership of aSelectorList.
void nsIDocument::SelectorCache::CacheList(const nsAString& aSelector,

Просмотреть файл

@ -45,7 +45,7 @@
#include "nsIApplicationCache.h"
#include "nsIApplicationCacheContainer.h"
#include "nsStyleSet.h"
#include "pldhash.h"
#include "PLDHashTable.h"
#include "nsAttrAndChildArray.h"
#include "nsDOMAttributeMap.h"
#include "nsIContentViewer.h"

Просмотреть файл

@ -33,7 +33,7 @@
#include "nsCCUncollectableMarker.h"
#include "mozAutoDocUpdate.h"
#include "pldhash.h"
#include "PLDHashTable.h"
#include "prprf.h"
#include "nsWrapperCacheInlines.h"

Просмотреть файл

@ -53,6 +53,7 @@
#include "ScriptSettings.h"
#include "mozilla/Preferences.h"
#include "mozilla/Likely.h"
#include "mozilla/Snprintf.h"
#include "mozilla/unused.h"
// Other Classes
@ -542,6 +543,14 @@ NS_IMPL_CYCLE_COLLECTION_TRAVERSE_END
NS_IMPL_CYCLE_COLLECTION_ROOT_NATIVE(nsTimeout, AddRef)
NS_IMPL_CYCLE_COLLECTION_UNROOT_NATIVE(nsTimeout, Release)
nsresult
nsTimeout::InitTimer(uint32_t aDelay)
{
return mTimer->InitWithNameableFuncCallback(
nsGlobalWindow::TimerCallback, this, aDelay,
nsITimer::TYPE_ONE_SHOT, nsGlobalWindow::TimerNameCallback);
}
// Return true if this timeout has a refcount of 1. This is used to check
// that dummy_timeout doesn't leak from nsGlobalWindow::RunTimeout.
bool
@ -12501,7 +12510,7 @@ nsGlobalWindow::SetTimeoutOrInterval(nsIScriptTimeoutHandler *aHandler,
nsRefPtr<nsTimeout> copy = timeout;
rv = timeout->InitTimer(TimerCallback, realInterval);
rv = timeout->InitTimer(realInterval);
if (NS_FAILED(rv)) {
return rv;
}
@ -12757,7 +12766,7 @@ nsGlobalWindow::RescheduleTimeout(nsTimeout* aTimeout, const TimeStamp& now,
// Reschedule the OS timer. Don't bother returning any error codes if
// this fails since the callers of this method don't care about them.
nsresult rv = aTimeout->InitTimer(TimerCallback, delay.ToMilliseconds());
nsresult rv = aTimeout->InitTimer(delay.ToMilliseconds());
if (NS_FAILED(rv)) {
NS_ERROR("Error initializing timer for DOM timeout!");
@ -13057,7 +13066,7 @@ nsresult nsGlobalWindow::ResetTimersForNonBackgroundWindow()
timeout->mFiringDepth = firingDepth;
timeout->Release();
nsresult rv = timeout->InitTimer(TimerCallback, delay.ToMilliseconds());
nsresult rv = timeout->InitTimer(delay.ToMilliseconds());
if (NS_FAILED(rv)) {
NS_WARNING("Error resetting non background timer for DOM timeout!");
@ -13154,6 +13163,19 @@ nsGlobalWindow::TimerCallback(nsITimer *aTimer, void *aClosure)
timeout->mWindow->RunTimeout(timeout);
}
// static
void
nsGlobalWindow::TimerNameCallback(nsITimer* aTimer, void* aClosure, char* aBuf,
size_t aLen)
{
nsRefPtr<nsTimeout> timeout = (nsTimeout*)aClosure;
const char* filename;
uint32_t lineNum, column;
timeout->mScriptHandler->GetLocation(&filename, &lineNum, &column);
snprintf(aBuf, aLen, "[content] %s:%u:%u", filename, lineNum, column);
}
//*****************************************************************************
// nsGlobalWindow: Helper Functions
//*****************************************************************************
@ -13499,7 +13521,7 @@ nsGlobalWindow::ResumeTimeouts(bool aThawChildren)
t->mTimer = do_CreateInstance("@mozilla.org/timer;1");
NS_ENSURE_TRUE(t->mTimer, NS_ERROR_OUT_OF_MEMORY);
rv = t->InitTimer(TimerCallback, delay);
rv = t->InitTimer(delay);
if (NS_FAILED(rv)) {
t->mTimer = nullptr;
return rv;

Просмотреть файл

@ -159,11 +159,7 @@ public:
NS_DECL_CYCLE_COLLECTION_NATIVE_CLASS(nsTimeout)
NS_INLINE_DECL_CYCLE_COLLECTING_NATIVE_REFCOUNTING(nsTimeout)
nsresult InitTimer(nsTimerCallbackFunc aFunc, uint32_t aDelay)
{
return mTimer->InitWithFuncCallback(aFunc, this, aDelay,
nsITimer::TYPE_ONE_SHOT);
}
nsresult InitTimer(uint32_t aDelay);
bool HasRefCntOne();
@ -1428,6 +1424,8 @@ public:
// fire after it, but no earlier than mTimeoutInsertionPoint, if any.
void InsertTimeoutIntoList(nsTimeout *aTimeout);
static void TimerCallback(nsITimer *aTimer, void *aClosure);
static void TimerNameCallback(nsITimer* aTimer, void* aClosure, char* aBuf,
size_t aLen);
// Helper Functions
already_AddRefed<nsIDocShellTreeOwner> GetTreeOwner();

Просмотреть файл

@ -20,7 +20,7 @@ interface nsIPrincipal;
* by launching a dialog to prompt the user for something).
*/
[scriptable,uuid(3663021e-5670-496f-887b-b408d6526b5b)]
[scriptable,uuid(ce321216-c404-40a7-a711-d80454ec6b76)]
interface nsIContentPolicy : nsIContentPolicyBase
{
/**

Просмотреть файл

@ -24,7 +24,7 @@ typedef unsigned long nsContentPolicyType;
* by launching a dialog to prompt the user for something).
*/
[scriptable,uuid(20f7b9bf-d7d5-4987-ade8-b7dc0398d44a)]
[scriptable,uuid(8527ae0d-0c43-4413-bc46-85c0bcb66876)]
interface nsIContentPolicyBase : nsISupports
{
/**
@ -271,6 +271,15 @@ interface nsIContentPolicyBase : nsISupports
*/
const nsContentPolicyType TYPE_INTERNAL_EVENTSOURCE = 34;
/**
* Indicates an internal constant for scripts loaded through a service
* worker.
*
* This will be mapped to TYPE_SCRIPT before being passed to content policy
* implementations.
*/
const nsContentPolicyType TYPE_INTERNAL_SERVICE_WORKER = 35;
/* When adding new content types, please update nsContentBlocker,
* NS_CP_ContentTypeName, nsCSPContext, all nsIContentPolicy
* implementations, the static_assert in dom/cache/DBSchema.cpp,

Просмотреть файл

@ -28,7 +28,7 @@ interface nsIDOMElement;
* by launching a dialog to prompt the user for something).
*/
[scriptable,uuid(b181c97c-9d67-4da1-95a0-e0a202e1807c)]
[scriptable,uuid(b9df71e3-a9b3-4706-b2d5-e6c0d3d68ec7)]
interface nsISimpleContentPolicy : nsIContentPolicyBase
{
/**

Просмотреть файл

@ -1636,10 +1636,10 @@ nsJSContext::BeginCycleCollectionCallback()
// an incremental collection, and we want to be sure to finish it.
CallCreateInstance("@mozilla.org/timer;1", &sICCTimer);
if (sICCTimer) {
sICCTimer->InitWithFuncCallback(ICCTimerFired,
nullptr,
kICCIntersliceDelay,
nsITimer::TYPE_REPEATING_SLACK);
sICCTimer->InitWithNamedFuncCallback(ICCTimerFired, nullptr,
kICCIntersliceDelay,
nsITimer::TYPE_REPEATING_SLACK,
"ICCTimerFired");
}
}
@ -2036,14 +2036,15 @@ nsJSContext::PokeGC(JS::gcreason::Reason aReason, int aDelay)
static bool first = true;
sGCTimer->InitWithFuncCallback(GCTimerFired, reinterpret_cast<void *>(aReason),
aDelay
? aDelay
: (first
? NS_FIRST_GC_DELAY
: NS_GC_DELAY),
nsITimer::TYPE_ONE_SHOT);
sGCTimer->InitWithNamedFuncCallback(GCTimerFired,
reinterpret_cast<void *>(aReason),
aDelay
? aDelay
: (first
? NS_FIRST_GC_DELAY
: NS_GC_DELAY),
nsITimer::TYPE_ONE_SHOT,
"GCTimerFired");
first = false;
}
@ -2062,9 +2063,11 @@ nsJSContext::PokeShrinkGCBuffers()
return;
}
sShrinkGCBuffersTimer->InitWithFuncCallback(ShrinkGCBuffersTimerFired, nullptr,
NS_SHRINK_GC_BUFFERS_DELAY,
nsITimer::TYPE_ONE_SHOT);
sShrinkGCBuffersTimer->InitWithNamedFuncCallback(ShrinkGCBuffersTimerFired,
nullptr,
NS_SHRINK_GC_BUFFERS_DELAY,
nsITimer::TYPE_ONE_SHOT,
"ShrinkGCBuffersTimerFired");
}
// static
@ -2082,9 +2085,10 @@ nsJSContext::PokeShrinkingGC()
return;
}
sShrinkingGCTimer->InitWithFuncCallback(ShrinkingGCTimerFired, nullptr,
sCompactOnUserInactiveDelay,
nsITimer::TYPE_ONE_SHOT);
sShrinkingGCTimer->InitWithNamedFuncCallback(ShrinkingGCTimerFired, nullptr,
sCompactOnUserInactiveDelay,
nsITimer::TYPE_ONE_SHOT,
"ShrinkingGCTimerFired");
}
// static
@ -2104,9 +2108,10 @@ nsJSContext::MaybePokeCC()
// We can kill some objects before running forgetSkippable.
nsCycleCollector_dispatchDeferredDeletion();
sCCTimer->InitWithFuncCallback(CCTimerFired, nullptr,
NS_CC_SKIPPABLE_DELAY,
nsITimer::TYPE_REPEATING_SLACK);
sCCTimer->InitWithNamedFuncCallback(CCTimerFired, nullptr,
NS_CC_SKIPPABLE_DELAY,
nsITimer::TYPE_REPEATING_SLACK,
"CCTimerFired");
}
}
@ -2263,10 +2268,11 @@ DOMGCSliceCallback(JSRuntime *aRt, JS::GCProgress aProgress, const JS::GCDescrip
if (aDesc.isCompartment_) {
if (!sFullGCTimer && !sShuttingDown) {
CallCreateInstance("@mozilla.org/timer;1", &sFullGCTimer);
sFullGCTimer->InitWithFuncCallback(FullGCTimerFired,
nullptr,
NS_FULL_GC_DELAY,
nsITimer::TYPE_ONE_SHOT);
sFullGCTimer->InitWithNamedFuncCallback(FullGCTimerFired,
nullptr,
NS_FULL_GC_DELAY,
nsITimer::TYPE_ONE_SHOT,
"FullGCTimerFired");
}
} else {
nsJSContext::KillFullGCTimer();
@ -2295,10 +2301,11 @@ DOMGCSliceCallback(JSRuntime *aRt, JS::GCProgress aProgress, const JS::GCDescrip
nsJSContext::KillInterSliceGCTimer();
if (!sShuttingDown) {
CallCreateInstance("@mozilla.org/timer;1", &sInterSliceGCTimer);
sInterSliceGCTimer->InitWithFuncCallback(InterSliceGCTimerFired,
nullptr,
NS_INTERSLICE_GC_DELAY,
nsITimer::TYPE_ONE_SHOT);
sInterSliceGCTimer->InitWithNamedFuncCallback(InterSliceGCTimerFired,
nullptr,
NS_INTERSLICE_GC_DELAY,
nsITimer::TYPE_ONE_SHOT,
"InterSliceGCTimerFired");
}
if (ShouldTriggerCC(nsCycleCollector_suspectedCount())) {

Просмотреть файл

@ -13,7 +13,7 @@
#include "nsIDocument.h"
#include "mozilla/EventListenerManager.h"
#include "nsIXPConnect.h"
#include "pldhash.h"
#include "PLDHashTable.h"
#include "nsIDOMAttr.h"
#include "nsCOMArray.h"
#include "nsPIDOMWindow.h"

Просмотреть файл

@ -23,7 +23,7 @@
#include "mozilla/MemoryReporting.h"
#include "pldhash.h"
#include "PLDHashTable.h"
#include "nsError.h"
#include "nsIAtom.h"

Просмотреть файл

@ -27,7 +27,7 @@
#include "nsIScriptNameSpaceManager.h"
#include "nsString.h"
#include "nsID.h"
#include "pldhash.h"
#include "PLDHashTable.h"
#include "nsDOMClassInfo.h"
#include "nsIObserver.h"
#include "nsWeakReference.h"

5
dom/cache/DBSchema.cpp поставляемый
Просмотреть файл

@ -274,8 +274,9 @@ static_assert(nsIContentPolicy::TYPE_INVALID == 0 &&
nsIContentPolicy::TYPE_INTERNAL_VIDEO == 31 &&
nsIContentPolicy::TYPE_INTERNAL_TRACK == 32 &&
nsIContentPolicy::TYPE_INTERNAL_XMLHTTPREQUEST == 33 &&
nsIContentPolicy::TYPE_INTERNAL_EVENTSOURCE == 34,
"nsContentPolicytType values are as expected");
nsIContentPolicy::TYPE_INTERNAL_EVENTSOURCE == 34 &&
nsIContentPolicy::TYPE_INTERNAL_SERVICE_WORKER == 35,
"nsContentPolicyType values are as expected");
namespace {

Просмотреть файл

@ -40,7 +40,6 @@ CameraPreviewMediaStream::CameraPreviewMediaStream(DOMMediaStream* aWrapper)
MediaStreamGraph::GetInstance(
MediaStreamGraph::SYSTEM_THREAD_DRIVER, AudioChannel::Normal));
mFakeMediaStreamGraph = new FakeMediaStreamGraph();
mIsConsumed = false;
}
void
@ -64,15 +63,6 @@ CameraPreviewMediaStream::AddVideoOutput(VideoFrameContainer* aContainer)
MutexAutoLock lock(mMutex);
nsRefPtr<VideoFrameContainer> container = aContainer;
AddVideoOutputImpl(container.forget());
if (mVideoOutputs.Length() > 1) {
return;
}
mIsConsumed = true;
for (uint32_t j = 0; j < mListeners.Length(); ++j) {
MediaStreamListener* l = mListeners[j];
l->NotifyConsumptionChanged(mFakeMediaStreamGraph, MediaStreamListener::CONSUMED);
}
}
void
@ -80,20 +70,6 @@ CameraPreviewMediaStream::RemoveVideoOutput(VideoFrameContainer* aContainer)
{
MutexAutoLock lock(mMutex);
RemoveVideoOutputImpl(aContainer);
if (!mVideoOutputs.IsEmpty()) {
return;
}
mIsConsumed = false;
for (uint32_t j = 0; j < mListeners.Length(); ++j) {
MediaStreamListener* l = mListeners[j];
l->NotifyConsumptionChanged(mFakeMediaStreamGraph, MediaStreamListener::NOT_CONSUMED);
}
}
void
CameraPreviewMediaStream::ChangeExplicitBlockerCount(int32_t aDelta)
{
}
void

Просмотреть файл

@ -48,7 +48,8 @@ public:
virtual void RemoveAudioOutput(void* aKey) override;
virtual void AddVideoOutput(VideoFrameContainer* aContainer) override;
virtual void RemoveVideoOutput(VideoFrameContainer* aContainer) override;
virtual void ChangeExplicitBlockerCount(int32_t aDelta) override;
virtual void Suspend() override {}
virtual void Resume() override {}
virtual void AddListener(MediaStreamListener* aListener) override;
virtual void RemoveListener(MediaStreamListener* aListener) override;
virtual void Destroy() override;

Просмотреть файл

@ -216,7 +216,7 @@ public:
};
ImageCache::ImageCache()
: nsExpirationTracker<ImageCacheEntryData,4>(GENERATION_MS)
: nsExpirationTracker<ImageCacheEntryData,4>(GENERATION_MS, "ImageCache")
, mTotal(0)
{
if (!sPrefsInitialized) {

Просмотреть файл

@ -47,33 +47,6 @@ ToChar(bool aBool)
return aBool ? "true" : "false";
}
static const char*
ToChar(EventMessage aEventMessage)
{
switch (aEventMessage) {
case eQuerySelectedText:
return "eQuerySelectedText";
case eQueryTextContent:
return "eQueryTextContent";
case eQueryCaretRect:
return "eQueryCaretRect";
case eQueryTextRect:
return "eQueryTextRect";
case eQueryEditorRect:
return "eQueryEditorRect";
case eQueryContentState:
return "eQueryContentState";
case eQuerySelectionAsTransferable:
return "eQuerySelectionAsTransferable";
case eQueryCharacterAtPoint:
return "eQueryCharacterAtPoint";
case eQueryDOMWidgetHittest:
return "eQueryDOMWidgetHittest";
default:
return "Unsupported message";
}
}
static const char*
ToChar(IMEMessage aIMEMessage)
{

Просмотреть файл

@ -133,29 +133,6 @@ GetIMEStateSetOpenName(IMEState::Open aOpen)
}
}
static const char*
GetEventMessageName(EventMessage aMessage)
{
switch (aMessage) {
case eCompositionStart:
return "eCompositionStart";
case eCompositionEnd:
return "eCompositionEnd";
case eCompositionUpdate:
return "eCompositionUpdate";
case eCompositionChange:
return "eCompositionChange";
case eCompositionCommitAsIs:
return "eCompositionCommitAsIs";
case eCompositionCommit:
return "eCompositionCommit";
case eSetSelection:
return "eSetSelection";
default:
return "unacceptable event message";
}
}
static const char*
GetNotifyIMEMessageName(IMEMessage aMessage)
{
@ -1137,7 +1114,7 @@ IMEStateManager::DispatchCompositionEvent(
"mFlags={ mIsTrusted=%s, mPropagationStopped=%s } }, "
"aIsSynthesized=%s), tabParent=%p",
aEventTargetNode, aPresContext,
GetEventMessageName(aCompositionEvent->mMessage),
ToChar(aCompositionEvent->mMessage),
GetBoolName(aCompositionEvent->mFlags.mIsTrusted),
GetBoolName(aCompositionEvent->mFlags.mPropagationStopped),
GetBoolName(aIsSynthesized), tabParent.get()));
@ -1235,7 +1212,7 @@ IMEStateManager::HandleSelectionEvent(nsPresContext* aPresContext,
"aEventTargetContent=0x%p, aSelectionEvent={ mMessage=%s, "
"mFlags={ mIsTrusted=%s } }), tabParent=%p",
aPresContext, aEventTargetContent,
GetEventMessageName(aSelectionEvent->mMessage),
ToChar(aSelectionEvent->mMessage),
GetBoolName(aSelectionEvent->mFlags.mIsTrusted),
tabParent.get()));
@ -1268,7 +1245,7 @@ IMEStateManager::OnCompositionEventDiscarded(
MOZ_LOG(sISMLog, LogLevel::Info,
("ISM: IMEStateManager::OnCompositionEventDiscarded(aCompositionEvent={ "
"mMessage=%s, mFlags={ mIsTrusted=%s } })",
GetEventMessageName(aCompositionEvent->mMessage),
ToChar(aCompositionEvent->mMessage),
GetBoolName(aCompositionEvent->mFlags.mIsTrusted)));
if (!aCompositionEvent->mFlags.mIsTrusted) {

Просмотреть файл

@ -116,6 +116,7 @@ InternalRequest::MapContentPolicyTypeToRequestContext(nsContentPolicyType aConte
context = RequestContext::Internal;
break;
case nsIContentPolicy::TYPE_INTERNAL_SCRIPT:
case nsIContentPolicy::TYPE_INTERNAL_SERVICE_WORKER:
context = RequestContext::Script;
break;
case nsIContentPolicy::TYPE_INTERNAL_WORKER:

Просмотреть файл

@ -1875,10 +1875,6 @@ HTMLMediaElement::CaptureStreamInternal(bool aFinishWhenEnded,
out->mFinishWhenEnded = aFinishWhenEnded;
mAudioCaptured = true;
// Block the output stream initially.
// Decoders are responsible for removing the block while they are playing
// back into the output stream.
out->mStream->GetStream()->ChangeExplicitBlockerCount(1);
if (mDecoder) {
mDecoder->AddOutputStream(out->mStream->GetStream()->AsProcessedStream(),
aFinishWhenEnded);
@ -3526,10 +3522,9 @@ void HTMLMediaElement::StartProgressTimer()
NS_ASSERTION(!mProgressTimer, "Already started progress timer.");
mProgressTimer = do_CreateInstance("@mozilla.org/timer;1");
mProgressTimer->InitWithFuncCallback(ProgressTimerCallback,
this,
PROGRESS_MS,
nsITimer::TYPE_REPEATING_SLACK);
mProgressTimer->InitWithNamedFuncCallback(
ProgressTimerCallback, this, PROGRESS_MS, nsITimer::TYPE_REPEATING_SLACK,
"HTMLMediaElement::ProgressTimerCallback");
}
void HTMLMediaElement::StartProgress()

Просмотреть файл

@ -14,7 +14,7 @@
#include "nsIScriptElement.h"
#include "nsTArray.h"
#include "pldhash.h"
#include "PLDHashTable.h"
#include "nsIHttpChannel.h"
#include "nsHTMLStyleSheet.h"

Просмотреть файл

@ -65,8 +65,7 @@ AudioCaptureStream::ProcessInput(GraphTime aFrom, GraphTime aTo,
// HTMLMediaElement with a stream as source, or an AudioContext), a cycle
// situation occur. This can work if it's an AudioContext with at least one
// DelayNode, but the MSG will mute the whole cycle otherwise.
bool blocked = mFinished || mBlocked.GetAt(aFrom);
if (blocked || InMutedCycle() || inputCount == 0) {
if (mFinished || InMutedCycle() || inputCount == 0) {
track->Get<AudioSegment>()->AppendNullData(aTo - aFrom);
} else {
// We mix down all the tracks of all inputs, to a stereo track. Everything
@ -78,8 +77,8 @@ AudioCaptureStream::ProcessInput(GraphTime aFrom, GraphTime aTo,
StreamBuffer::TrackIter tracks(s->GetStreamBuffer(), MediaSegment::AUDIO);
while (!tracks.IsEnded()) {
AudioSegment* inputSegment = tracks->Get<AudioSegment>();
StreamTime inputStart = s->GraphTimeToStreamTime(aFrom);
StreamTime inputEnd = s->GraphTimeToStreamTime(aTo);
StreamTime inputStart = s->GraphTimeToStreamTimeWithBlocking(aFrom);
StreamTime inputEnd = s->GraphTimeToStreamTimeWithBlocking(aTo);
AudioSegment toMix;
toMix.AppendSlice(*inputSegment, inputStart, inputEnd);
// Care for streams blocked in the [aTo, aFrom] range.
@ -95,7 +94,7 @@ AudioCaptureStream::ProcessInput(GraphTime aFrom, GraphTime aTo,
}
// Regardless of the status of the input tracks, we go foward.
mBuffer.AdvanceKnownTracksTime(GraphTimeToStreamTime((aTo)));
mBuffer.AdvanceKnownTracksTime(GraphTimeToStreamTimeWithBlocking((aTo)));
}
void

Просмотреть файл

@ -299,8 +299,6 @@ ThreadedDriver::RunThread()
(long)mIterationStart, (long)mIterationEnd,
(long)stateComputedTime, (long)nextStateComputedTime));
mGraphImpl->mFlushSourcesNow = mGraphImpl->mFlushSourcesOnNextIteration;
mGraphImpl->mFlushSourcesOnNextIteration = false;
stillProcessing = mGraphImpl->OneIteration(nextStateComputedTime);
if (mNextDriver && stillProcessing) {
@ -967,7 +965,6 @@ AudioCallbackDriver::DeviceChangedCallback() {
STREAM_LOG(LogLevel::Error, ("Switching to SystemClockDriver during output switch"));
mSelfReference.Take(this);
mCallbackReceivedWhileSwitching = 0;
mGraphImpl->mFlushSourcesOnNextIteration = true;
mNextDriver = new SystemClockDriver(GraphImpl());
mNextDriver->SetGraphTime(this, mIterationStart, mIterationEnd);
mGraphImpl->SetCurrentDriver(mNextDriver);

Просмотреть файл

@ -817,7 +817,7 @@ public:
msg);
trackunion->GetStream()->AsProcessedStream()->SetAutofinish(true);
nsRefPtr<MediaInputPort> port = trackunion->GetStream()->AsProcessedStream()->
AllocateInputPort(stream, MediaInputPort::FLAG_BLOCK_OUTPUT);
AllocateInputPort(stream);
trackunion->mSourceStream = stream;
trackunion->mPort = port.forget();
// Log the relationship between SourceMediaStream and TrackUnion stream

Просмотреть файл

@ -407,7 +407,7 @@ public:
MOZ_ASSERT(NS_IsMainThread());
NS_ENSURE_TRUE(mTrackUnionStream, NS_ERROR_FAILURE);
mTrackUnionStream->ChangeExplicitBlockerCount(1);
mTrackUnionStream->Suspend();
return NS_OK;
}
@ -418,7 +418,7 @@ public:
MOZ_ASSERT(NS_IsMainThread());
NS_ENSURE_TRUE(mTrackUnionStream, NS_ERROR_FAILURE);
mTrackUnionStream->ChangeExplicitBlockerCount(-1);
mTrackUnionStream->Resume();
return NS_OK;
}
@ -788,13 +788,11 @@ MediaRecorder::MediaRecorder(AudioNode& aSrcAudioNode,
AudioNodeStream::Flags flags =
AudioNodeStream::EXTERNAL_OUTPUT |
AudioNodeStream::NEED_MAIN_THREAD_FINISHED;
mPipeStream = AudioNodeStream::Create(ctx->Graph(), engine, flags);
mPipeStream = AudioNodeStream::Create(ctx, engine, flags);
AudioNodeStream* ns = aSrcAudioNode.GetStream();
if (ns) {
mInputPort = mPipeStream->AllocateInputPort(aSrcAudioNode.GetStream(),
0,
0,
aSrcOutput);
0, aSrcOutput);
}
}
mAudioNode = &aSrcAudioNode;

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -17,7 +17,6 @@
#include "nsTArray.h"
#include "nsIRunnable.h"
#include "StreamBuffer.h"
#include "TimeVarying.h"
#include "VideoFrameContainer.h"
#include "VideoSegment.h"
#include "MainThreadUtils.h"
@ -102,18 +101,6 @@ protected:
public:
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(MediaStreamListener)
enum Consumption {
CONSUMED,
NOT_CONSUMED
};
/**
* Notify that the stream is hooked up and we'd like to start or stop receiving
* data on it. Only fires on SourceMediaStreams.
* The initial state is assumed to be NOT_CONSUMED.
*/
virtual void NotifyConsumptionChanged(MediaStreamGraph* aGraph, Consumption aConsuming) {}
/**
* When a SourceMediaStream has pulling enabled, and the MediaStreamGraph
* control loop is ready to pull, this gets called. A NotifyPull implementation
@ -319,7 +306,6 @@ public:
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(MediaStream)
explicit MediaStream(DOMMediaStream* aWrapper);
virtual dom::AudioContext::AudioContextId AudioContextId() const { return 0; }
protected:
// Protected destructor, to discourage deletion outside of Release():
@ -363,11 +349,13 @@ public:
// Only the first enabled video track is played.
virtual void AddVideoOutput(VideoFrameContainer* aContainer);
virtual void RemoveVideoOutput(VideoFrameContainer* aContainer);
// Explicitly block. Useful for example if a media element is pausing
// and we need to stop its stream emitting its buffered data.
virtual void ChangeExplicitBlockerCount(int32_t aDelta);
void BlockStreamIfNeeded();
void UnblockStreamIfNeeded();
// Explicitly suspend. Useful for example if a media element is pausing
// and we need to stop its stream emitting its buffered data. As soon as the
// Suspend message reaches the graph, the stream stops processing. It
// ignores its inputs and produces silence/no video until Resumed. Its
// current time does not advance.
virtual void Suspend();
virtual void Resume();
// Events will be dispatched by calling methods of aListener.
virtual void AddListener(MediaStreamListener* aListener);
virtual void RemoveListener(MediaStreamListener* aListener);
@ -466,40 +454,10 @@ public:
aContainer->ClearFutureFrames();
mVideoOutputs.RemoveElement(aContainer);
}
void ChangeExplicitBlockerCountImpl(GraphTime aTime, int32_t aDelta)
{
mExplicitBlockerCount.SetAtAndAfter(aTime, mExplicitBlockerCount.GetAt(aTime) + aDelta);
}
void BlockStreamIfNeededImpl(GraphTime aTime)
{
bool blocked = mExplicitBlockerCount.GetAt(aTime) > 0;
if (blocked) {
return;
}
ChangeExplicitBlockerCountImpl(aTime, 1);
}
void UnblockStreamIfNeededImpl(GraphTime aTime)
{
bool blocked = mExplicitBlockerCount.GetAt(aTime) > 0;
if (!blocked) {
return;
}
ChangeExplicitBlockerCountImpl(aTime, -1);
}
void AddListenerImpl(already_AddRefed<MediaStreamListener> aListener);
void RemoveListenerImpl(MediaStreamListener* aListener);
void RemoveAllListenersImpl();
virtual void SetTrackEnabledImpl(TrackID aTrackID, bool aEnabled);
/**
* Returns true when this stream requires the contents of its inputs even if
* its own outputs are not being consumed. This is used to signal inputs to
* this stream that they are being consumed; when they're not being consumed,
* we make some optimizations.
*/
virtual bool IsIntrinsicallyConsumed() const
{
return !mAudioOutputs.IsEmpty() || !mVideoOutputs.IsEmpty();
}
void AddConsumer(MediaInputPort* aPort)
{
@ -543,19 +501,22 @@ public:
* to ensure we know exactly how much time this stream will be blocked during
* the interval.
*/
StreamTime GraphTimeToStreamTimeWithBlocking(GraphTime aTime);
/**
* Convert graph time to stream time. This assumes there is no blocking time
* to take account of, which is always true except between a stream
* having its blocking time calculated in UpdateGraph and its blocking time
* taken account of in UpdateCurrentTimeForStreams.
*/
StreamTime GraphTimeToStreamTime(GraphTime aTime);
/**
* Convert graph time to stream time. aTime can be > mStateComputedTime,
* in which case we optimistically assume the stream will not be blocked
* after mStateComputedTime.
*/
StreamTime GraphTimeToStreamTimeOptimistic(GraphTime aTime);
/**
* Convert stream time to graph time. The result can be > mStateComputedTime,
* in which case we did the conversion optimistically assuming the stream
* will not be blocked after mStateComputedTime.
* Convert stream time to graph time. This assumes there is no blocking time
* to take account of, which is always true except between a stream
* having its blocking time calculated in UpdateGraph and its blocking time
* taken account of in UpdateCurrentTimeForStreams.
*/
GraphTime StreamTimeToGraphTime(StreamTime aTime);
bool IsFinishedOnGraphThread() { return mFinished; }
void FinishOnGraphThread();
@ -583,12 +544,18 @@ public:
void SetAudioChannelType(dom::AudioChannel aType) { mAudioChannelType = aType; }
dom::AudioChannel AudioChannelType() const { return mAudioChannelType; }
bool IsSuspended() { return mSuspendedCount > 0; }
void IncrementSuspendCount() { ++mSuspendedCount; }
void DecrementSuspendCount()
{
NS_ASSERTION(mSuspendedCount > 0, "Suspend count underrun");
--mSuspendedCount;
}
protected:
void AdvanceTimeVaryingValuesToCurrentTime(GraphTime aCurrentTime, GraphTime aBlockedTime)
{
mBufferStartTime += aBlockedTime;
mExplicitBlockerCount.AdvanceCurrentTime(aCurrentTime);
mBuffer.ForgetUpTo(aCurrentTime - mBufferStartTime);
}
@ -636,20 +603,15 @@ protected:
// We record the last played video frame to avoid playing the frame again
// with a different frame id.
VideoFrame mLastPlayedVideoFrame;
// The number of times this stream has been explicitly blocked by the control
// API, minus the number of times it has been explicitly unblocked.
TimeVarying<GraphTime,uint32_t,0> mExplicitBlockerCount;
nsTArray<nsRefPtr<MediaStreamListener> > mListeners;
nsTArray<MainThreadMediaStreamListener*> mMainThreadListeners;
nsTArray<TrackID> mDisabledTrackIDs;
// Precomputed blocking status (over GraphTime).
// This is only valid between the graph's mCurrentTime and
// mStateComputedTime. The stream is considered to have
// not been blocked before mCurrentTime (its mBufferStartTime is increased
// as necessary to account for that time instead) --- this avoids us having to
// record the entire history of the stream's blocking-ness in mBlocked.
TimeVarying<GraphTime,bool,5> mBlocked;
// GraphTime at which this stream starts blocking.
// This is only valid up to mStateComputedTime. The stream is considered to
// have not been blocked before mCurrentTime (its mBufferStartTime is increased
// as necessary to account for that time instead).
GraphTime mStartBlocking;
// MediaInputPorts to which this is connected
nsTArray<MediaInputPort*> mConsumers;
@ -670,6 +632,12 @@ protected:
};
nsTArray<AudioOutputStream> mAudioOutputStreams;
/**
* Number of outstanding suspend operations on this stream. Stream is
* suspended when this is > 0.
*/
int32_t mSuspendedCount;
/**
* When true, this means the stream will be finished once all
* buffered data has been consumed.
@ -697,16 +665,6 @@ protected:
*/
bool mNotifiedHasCurrentData;
// True if the stream is being consumed (i.e. has track data being played,
// or is feeding into some stream that is being consumed).
bool mIsConsumed;
// Temporary data for computing blocking status of streams
// True if we've added this stream to the set of streams we're computing
// blocking for.
bool mInBlockingSet;
// True if this stream should be blocked in this phase.
bool mBlockInThisPhase;
// This state is only used on the main thread.
DOMMediaStream* mWrapper;
// Main-thread views of state
@ -732,7 +690,6 @@ class SourceMediaStream : public MediaStream
public:
explicit SourceMediaStream(DOMMediaStream* aWrapper) :
MediaStream(aWrapper),
mLastConsumptionState(MediaStreamListener::NOT_CONSUMED),
mMutex("mozilla::media::SourceMediaStream"),
mUpdateKnownTracksTime(0),
mPullEnabled(false),
@ -858,17 +815,6 @@ public:
*/
void EndAllTrackAndFinish();
/**
* Note: Only call from Media Graph thread (eg NotifyPull)
*
* Returns amount of time (data) that is currently buffered in the track,
* assuming playout via PlayAudio or via a TrackUnion - note that
* NotifyQueuedTrackChanges() on a SourceMediaStream will occur without
* any "extra" buffering, but NotifyQueued TrackChanges() on a TrackUnion
* will be buffered.
*/
StreamTime GetBufferedTicks(TrackID aID);
void RegisterForAudioMixing();
// XXX need a Reset API
@ -940,9 +886,6 @@ protected:
void NotifyDirectConsumers(TrackData *aTrack,
MediaSegment *aSegment);
// Media stream graph thread only
MediaStreamListener::Consumption mLastConsumptionState;
// This must be acquired *before* MediaStreamGraphImpl's lock, if they are
// held together.
Mutex mMutex;
@ -978,11 +921,9 @@ class MediaInputPort final
private:
// Do not call this constructor directly. Instead call aDest->AllocateInputPort.
MediaInputPort(MediaStream* aSource, ProcessedMediaStream* aDest,
uint32_t aFlags, uint16_t aInputNumber,
uint16_t aOutputNumber)
uint16_t aInputNumber, uint16_t aOutputNumber)
: mSource(aSource)
, mDest(aDest)
, mFlags(aFlags)
, mInputNumber(aInputNumber)
, mOutputNumber(aOutputNumber)
, mGraph(nullptr)
@ -999,20 +940,6 @@ private:
public:
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(MediaInputPort)
/**
* The FLAG_BLOCK_INPUT and FLAG_BLOCK_OUTPUT flags can be used to control
* exactly how the blocking statuses of the input and output streams affect
* each other.
*/
enum {
// When set, blocking on the output stream forces blocking on the input
// stream.
FLAG_BLOCK_INPUT = 0x01,
// When set, blocking on the input stream forces blocking on the output
// stream.
FLAG_BLOCK_OUTPUT = 0x02
};
// Called on graph manager thread
// Do not call these from outside MediaStreamGraph.cpp!
void Init();
@ -1076,7 +1003,6 @@ private:
// Never modified after Init()
MediaStream* mSource;
ProcessedMediaStream* mDest;
uint32_t mFlags;
// The input and output numbers are optional, and are currently only used by
// Web Audio.
const uint16_t mInputNumber;
@ -1104,7 +1030,6 @@ public:
* This stream can be removed by calling MediaInputPort::Remove().
*/
already_AddRefed<MediaInputPort> AllocateInputPort(MediaStream* aStream,
uint32_t aFlags = 0,
uint16_t aInputNumber = 0,
uint16_t aOutputNumber = 0);
/**
@ -1253,25 +1178,31 @@ public:
*/
ProcessedMediaStream* CreateAudioCaptureStream(DOMMediaStream* aWrapper);
enum {
ADD_STREAM_SUSPENDED = 0x01
};
/**
* Add a new stream to the graph. Main thread.
*/
void AddStream(MediaStream* aStream);
void AddStream(MediaStream* aStream, uint32_t aFlags = 0);
/* From the main thread, ask the MSG to send back an event when the graph
* thread is running, and audio is being processed. */
void NotifyWhenGraphStarted(AudioNodeStream* aNodeStream);
/* From the main thread, suspend, resume or close an AudioContext.
* aNodeStream is the stream of the DestinationNode of the AudioContext.
* aStreams are the streams of all the AudioNodes of the AudioContext that
* need to be suspended or resumed. This can be empty if this is a second
* consecutive suspend call and all the nodes are already suspended.
*
* This can possibly pause the graph thread, releasing system resources, if
* all streams have been suspended/closed.
*
* When the operation is complete, aPromise is resolved.
*/
void ApplyAudioContextOperation(AudioNodeStream* aNodeStream,
void ApplyAudioContextOperation(MediaStream* aDestinationStream,
const nsTArray<MediaStream*>& aStreams,
dom::AudioContextOperation aState,
void * aPromise);
void* aPromise);
bool IsNonRealtime() const;
/**

Просмотреть файл

@ -167,12 +167,17 @@ public:
}
#endif
}
/*
* This does the actual iteration: Message processing, MediaStream ordering,
* blocking computation and processing.
*/
void DoIteration();
void MaybeProduceMemoryReport();
/**
* Returns true if this MediaStreamGraph should keep running
*/
bool UpdateMainThreadState();
/**
* Returns true if this MediaStreamGraph should keep running
*/
bool OneIteration(GraphTime aStateEnd);
bool Running() const
@ -216,10 +221,9 @@ public:
bool ShouldUpdateMainThread();
// The following methods are the various stages of RunThread processing.
/**
* Advance all stream state to the new current time.
* Advance all stream state to mStateComputedTime.
*/
void UpdateCurrentTimeForStreams(GraphTime aPrevCurrentTime,
GraphTime aNextCurrentTime);
void UpdateCurrentTimeForStreams(GraphTime aPrevCurrentTime);
/**
* Process graph message for this iteration, update stream processing order,
* and recompute stream blocking until aEndBlockingDecisions.
@ -232,9 +236,10 @@ public:
mFrontMessageQueue.SwapElements(mBackMessageQueue);
}
/**
* Do all the processing and play the audio and video, ffrom aFrom to aTo.
* Do all the processing and play the audio and video, from
* mProcessedTime to mStateComputedTime.
*/
void Process(GraphTime aFrom, GraphTime aTo);
void Process();
/**
* Update the consumption state of aStream to reflect whether its data
* is needed or not.
@ -250,17 +255,6 @@ public:
* Update "have enough data" flags in aStream.
*/
void UpdateBufferSufficiencyState(SourceMediaStream* aStream);
/**
* Mark aStream and all its inputs (recursively) as consumed.
*/
static void MarkConsumed(MediaStream* aStream);
/**
* Given the Id of an AudioContext, return the set of all MediaStreams that
* are part of this context.
*/
void StreamSetForAudioContext(dom::AudioContext::AudioContextId aAudioContextId,
mozilla::LinkedList<MediaStream>& aStreamSet);
/**
* Called when a suspend/resume/close operation has been completed, on the
@ -274,28 +268,28 @@ public:
* Apply and AudioContext operation (suspend/resume/closed), on the graph
* thread.
*/
void ApplyAudioContextOperationImpl(AudioNodeStream* aStream,
void ApplyAudioContextOperationImpl(MediaStream* aDestinationStream,
const nsTArray<MediaStream*>& aStreams,
dom::AudioContextOperation aOperation,
void* aPromise);
/**
* Increment suspend count on aStream and move it to mSuspendedStreams if
* necessary.
*/
void IncrementSuspendCount(MediaStream* aStream);
/**
* Increment suspend count on aStream and move it to mStreams if
* necessary.
*/
void DecrementSuspendCount(MediaStream* aStream);
/*
* Move streams from the mStreams to mSuspendedStream if suspending/closing an
* AudioContext, or the inverse when resuming an AudioContext.
*/
void MoveStreams(dom::AudioContextOperation aAudioContextOperation,
mozilla::LinkedList<MediaStream>& aStreamSet);
/*
* Reset some state about the streams before suspending them, or resuming
* them.
*/
void ResetVisitedStreamState();
/*
* True if a stream is suspended, that is, is not in mStreams, but in
* mSuspendedStream.
*/
bool StreamSuspended(MediaStream* aStream);
void SuspendOrResumeStreams(dom::AudioContextOperation aAudioContextOperation,
const nsTArray<MediaStream*>& aStreamSet);
/**
* Sort mStreams so that every stream not in a cycle is after any streams
@ -303,88 +297,33 @@ public:
* Also sets mIsConsumed on every stream.
*/
void UpdateStreamOrder();
/**
* Compute the blocking states of streams from mStateComputedTime
* until the desired future time aEndBlockingDecisions.
* Updates mStateComputedTime and sets MediaStream::mBlocked
* for all streams.
*/
void RecomputeBlocking(GraphTime aEndBlockingDecisions);
// The following methods are used to help RecomputeBlocking.
/**
* If aStream isn't already in aStreams, add it and recursively call
* AddBlockingRelatedStreamsToSet on all the streams whose blocking
* status could depend on or affect the state of aStream.
*/
void AddBlockingRelatedStreamsToSet(nsTArray<MediaStream*>* aStreams,
MediaStream* aStream);
/**
* Mark a stream blocked at time aTime. If this results in decisions that need
* to be revisited at some point in the future, *aEnd will be reduced to the
* first time in the future to recompute those decisions.
*/
void MarkStreamBlocking(MediaStream* aStream);
/**
* Recompute blocking for the streams in aStreams for the interval starting at aTime.
* If this results in decisions that need to be revisited at some point
* in the future, *aEnd will be reduced to the first time in the future to
* recompute those decisions.
*/
void RecomputeBlockingAt(const nsTArray<MediaStream*>& aStreams,
GraphTime aTime, GraphTime aEndBlockingDecisions,
GraphTime* aEnd);
/**
* Returns smallest value of t such that t is a multiple of
* WEBAUDIO_BLOCK_SIZE and t > aTime.
*/
GraphTime RoundUpToNextAudioBlock(GraphTime aTime);
/**
* Produce data for all streams >= aStreamIndex for the given time interval.
* Produce data for all streams >= aStreamIndex for the current time interval.
* Advances block by block, each iteration producing data for all streams
* for a single block.
* This is called whenever we have an AudioNodeStream in the graph.
*/
void ProduceDataForStreamsBlockByBlock(uint32_t aStreamIndex,
TrackRate aSampleRate,
GraphTime aFrom,
GraphTime aTo);
TrackRate aSampleRate);
/**
* Returns true if aStream will underrun at aTime for its own playback.
* aEndBlockingDecisions is when we plan to stop making blocking decisions.
* *aEnd will be reduced to the first time in the future to recompute these
* decisions.
* If aStream will underrun between aTime, and aEndBlockingDecisions, returns
* the time at which the underrun will start. Otherwise return
* aEndBlockingDecisions.
*/
bool WillUnderrun(MediaStream* aStream, GraphTime aTime,
GraphTime aEndBlockingDecisions, GraphTime* aEnd);
GraphTime WillUnderrun(MediaStream* aStream, GraphTime aEndBlockingDecisions);
/**
* Given a graph time aTime, convert it to a stream time taking into
* account the time during which aStream is scheduled to be blocked.
*/
StreamTime GraphTimeToStreamTime(MediaStream* aStream, GraphTime aTime);
/**
* Given a graph time aTime, convert it to a stream time taking into
* account the time during which aStream is scheduled to be blocked, and
* when we don't know whether it's blocked or not, we assume it's not blocked.
*/
StreamTime GraphTimeToStreamTimeOptimistic(MediaStream* aStream, GraphTime aTime);
enum
{
INCLUDE_TRAILING_BLOCKED_INTERVAL = 0x01
};
StreamTime GraphTimeToStreamTimeWithBlocking(MediaStream* aStream, GraphTime aTime);
/**
* Given a stream time aTime, convert it to a graph time taking into
* account the time during which aStream is scheduled to be blocked.
* aTime must be <= mStateComputedTime since blocking decisions
* are only known up to that point.
* If aTime is exactly at the start of a blocked interval, then the blocked
* interval is included in the time returned if and only if
* aFlags includes INCLUDE_TRAILING_BLOCKED_INTERVAL.
*/
GraphTime StreamTimeToGraphTime(MediaStream* aStream, StreamTime aTime,
uint32_t aFlags = 0);
/**
* Call NotifyHaveCurrentData on aStream's listeners.
*/
@ -393,12 +332,12 @@ public:
* If aStream needs an audio stream but doesn't have one, create it.
* If aStream doesn't need an audio stream but has one, destroy it.
*/
void CreateOrDestroyAudioStreams(GraphTime aAudioOutputStartTime, MediaStream* aStream);
void CreateOrDestroyAudioStreams(MediaStream* aStream);
/**
* Queue audio (mix of stream audio and silence for blocked intervals)
* to the audio output stream. Returns the number of frames played.
*/
StreamTime PlayAudio(MediaStream* aStream, GraphTime aFrom, GraphTime aTo);
StreamTime PlayAudio(MediaStream* aStream);
/**
* Set the correct current video frame for stream aStream.
*/
@ -605,6 +544,10 @@ public:
* and are therefore not doing any processing.
*/
nsTArray<MediaStream*> mSuspendedStreams;
/**
* Suspended AudioContext IDs
*/
nsTHashtable<nsUint64HashKey> mSuspendedContexts;
/**
* Streams from mFirstCycleBreaker to the end of mStreams produce output
* before they receive input. They correspond to DelayNodes that are in
@ -730,13 +673,6 @@ public:
*/
bool mPostedRunInStableStateEvent;
/**
* Used to flush any accumulated data when the output streams
* may have stalled (on Mac after an output device change)
*/
bool mFlushSourcesNow;
bool mFlushSourcesOnNextIteration;
// Main thread only
/**
@ -786,10 +722,6 @@ public:
private:
virtual ~MediaStreamGraphImpl();
void StreamNotifyOutput(MediaStream* aStream);
void StreamReadyToFinish(MediaStream* aStream);
MOZ_DEFINE_MALLOC_SIZE_OF(MallocSizeOf)
/**

Просмотреть файл

@ -174,7 +174,9 @@ MediaTimer::ArmTimer(const TimeStamp& aTarget, const TimeStamp& aNow)
unsigned long delay = std::ceil((aTarget - aNow).ToMilliseconds());
TIMER_LOG("MediaTimer::ArmTimer delay=%lu", delay);
mCurrentTimerTarget = aTarget;
nsresult rv = mTimer->InitWithFuncCallback(&TimerCallback, this, delay, nsITimer::TYPE_ONE_SHOT);
nsresult rv = mTimer->InitWithNamedFuncCallback(&TimerCallback, this, delay,
nsITimer::TYPE_ONE_SHOT,
"MediaTimer::TimerCallback");
MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
(void) rv;
}

Просмотреть файл

@ -61,7 +61,6 @@ public:
* the same track across StreamBuffers. A StreamBuffer should never have
* two tracks with the same ID (even if they don't overlap in time).
* TODO Tracks can also be enabled and disabled over time.
* TODO Add TimeVarying<StreamTime,bool> mEnabled.
* Takes ownership of aSegment.
*/
class Track final

Просмотреть файл

@ -1,237 +0,0 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-*/
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef MOZILLA_TIMEVARYING_H_
#define MOZILLA_TIMEVARYING_H_
#include "nsTArray.h"
namespace mozilla {
/**
* This is just for CTOR/DTOR tracking. We can't put this in
* TimeVarying directly because the different template instances have
* different sizes and that confuses things.
*/
class TimeVaryingBase {
protected:
TimeVaryingBase()
{
MOZ_COUNT_CTOR(TimeVaryingBase);
}
~TimeVaryingBase()
{
MOZ_COUNT_DTOR(TimeVaryingBase);
}
};
/**
* Objects of this class represent values that can change over time ---
* a mathematical function of time.
* Time is the type of time values, T is the value that changes over time.
* There are a finite set of "change times"; at each change time, the function
* instantly changes to a new value. ReservedChanges should be set to the
* expected number of change events that the object is likely to contain.
* This value should be 0 for all consumers unless you know that a higher value
* would be a benefit.
* There is also a "current time" which must always advance (not go backward).
* The function is constant for all times less than the current time.
* When the current time is advanced, the value of the function at the new
* current time replaces the values for all previous times.
*
* The implementation records a mCurrent (the value at the current time)
* and an array of "change times" (greater than the current time) and the
* new value for each change time. This is a simple but dumb implementation.
* We maintain the invariant that each change entry in the array must have
* a different value to the value in the previous change entry (or, for
* the first change entry, mCurrent).
*/
template <typename Time, typename T, uint32_t ReservedChanges>
class TimeVarying : public TimeVaryingBase {
public:
explicit TimeVarying(const T& aInitial) : mCurrent(aInitial) {}
/**
* This constructor can only be called if mCurrent has a no-argument
* constructor.
*/
TimeVarying() : mCurrent() {}
/**
* Sets the value for all times >= aTime to aValue.
*/
void SetAtAndAfter(Time aTime, const T& aValue)
{
for (int32_t i = mChanges.Length() - 1; i >= 0; --i) {
NS_ASSERTION(i == int32_t(mChanges.Length() - 1),
"Always considering last element of array");
if (aTime > mChanges[i].mTime) {
if (mChanges[i].mValue != aValue) {
mChanges.AppendElement(Entry(aTime, aValue));
}
return;
}
if (aTime == mChanges[i].mTime) {
if ((i > 0 ? mChanges[i - 1].mValue : mCurrent) == aValue) {
mChanges.RemoveElementAt(i);
return;
}
mChanges[i].mValue = aValue;
return;
}
mChanges.RemoveElementAt(i);
}
if (mCurrent == aValue) {
return;
}
mChanges.InsertElementAt(0, Entry(aTime, aValue));
}
/**
* Returns the final value of the function. If aTime is non-null,
* sets aTime to the time at which the function changes to that final value.
* If there are no changes after the current time, returns INT64_MIN in aTime.
*/
const T& GetLast(Time* aTime = nullptr) const
{
if (mChanges.IsEmpty()) {
if (aTime) {
*aTime = INT64_MIN;
}
return mCurrent;
}
if (aTime) {
*aTime = mChanges[mChanges.Length() - 1].mTime;
}
return mChanges[mChanges.Length() - 1].mValue;
}
/**
* Returns the value of the function just before time aTime.
*/
const T& GetBefore(Time aTime) const
{
if (mChanges.IsEmpty() || aTime <= mChanges[0].mTime) {
return mCurrent;
}
int32_t changesLength = mChanges.Length();
if (mChanges[changesLength - 1].mTime < aTime) {
return mChanges[changesLength - 1].mValue;
}
for (uint32_t i = 1; ; ++i) {
if (aTime <= mChanges[i].mTime) {
NS_ASSERTION(mChanges[i].mValue != mChanges[i - 1].mValue,
"Only changed values appear in array");
return mChanges[i - 1].mValue;
}
}
}
/**
* Returns the value of the function at time aTime.
* If aEnd is non-null, sets *aEnd to the time at which the function will
* change from the returned value to a new value, or INT64_MAX if that
* never happens.
* If aStart is non-null, sets *aStart to the time at which the function
* changed to the returned value, or INT64_MIN if that happened at or
* before the current time.
*
* Currently uses a linear search, but could use a binary search.
*/
const T& GetAt(Time aTime, Time* aEnd = nullptr, Time* aStart = nullptr) const
{
if (mChanges.IsEmpty() || aTime < mChanges[0].mTime) {
if (aStart) {
*aStart = INT64_MIN;
}
if (aEnd) {
*aEnd = mChanges.IsEmpty() ? INT64_MAX : mChanges[0].mTime;
}
return mCurrent;
}
int32_t changesLength = mChanges.Length();
if (mChanges[changesLength - 1].mTime <= aTime) {
if (aEnd) {
*aEnd = INT64_MAX;
}
if (aStart) {
*aStart = mChanges[changesLength - 1].mTime;
}
return mChanges[changesLength - 1].mValue;
}
for (uint32_t i = 1; ; ++i) {
if (aTime < mChanges[i].mTime) {
if (aEnd) {
*aEnd = mChanges[i].mTime;
}
if (aStart) {
*aStart = mChanges[i - 1].mTime;
}
NS_ASSERTION(mChanges[i].mValue != mChanges[i - 1].mValue,
"Only changed values appear in array");
return mChanges[i - 1].mValue;
}
}
}
/**
* Advance the current time to aTime.
*/
void AdvanceCurrentTime(Time aTime)
{
for (uint32_t i = 0; i < mChanges.Length(); ++i) {
if (aTime < mChanges[i].mTime) {
mChanges.RemoveElementsAt(0, i);
return;
}
mCurrent = mChanges[i].mValue;
}
mChanges.Clear();
}
/**
* Make all currently pending changes happen aDelta later than their
* current change times.
*/
void InsertTimeAtStart(Time aDelta)
{
for (uint32_t i = 0; i < mChanges.Length(); ++i) {
mChanges[i].mTime += aDelta;
}
}
/**
* Replace the values of this function at aTimeOffset and later with the
* values of aOther taken from zero, so if aOther is V at time T >= 0
* then this function will be V at time T + aTimeOffset. aOther's current
* time must be >= 0.
*/
void Append(const TimeVarying& aOther, Time aTimeOffset)
{
NS_ASSERTION(aOther.mChanges.IsEmpty() || aOther.mChanges[0].mTime >= 0,
"Negative time not allowed here");
NS_ASSERTION(&aOther != this, "Can't self-append");
SetAtAndAfter(aTimeOffset, aOther.mCurrent);
for (uint32_t i = 0; i < aOther.mChanges.Length(); ++i) {
const Entry& e = aOther.mChanges[i];
SetAtAndAfter(aTimeOffset + e.mTime, e.mValue);
}
}
size_t SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const
{
return mChanges.ShallowSizeOfExcludingThis(aMallocSizeOf);
}
private:
struct Entry {
Entry(Time aTime, const T& aValue) : mTime(aTime), mValue(aValue) {}
// The time at which the value changes to mValue
Time mTime;
T mValue;
};
nsAutoTArray<Entry, ReservedChanges> mChanges;
T mCurrent;
};
} // namespace mozilla
#endif /* MOZILLA_TIMEVARYING_H_ */

Просмотреть файл

@ -138,7 +138,7 @@ TrackUnionStream::TrackUnionStream(DOMMediaStream* aWrapper) :
// so we're finished now.
FinishOnGraphThread();
} else {
mBuffer.AdvanceKnownTracksTime(GraphTimeToStreamTime(aTo));
mBuffer.AdvanceKnownTracksTime(GraphTimeToStreamTimeWithBlocking(aTo));
}
if (allHaveCurrentData) {
// We can make progress if we're not blocked
@ -183,7 +183,7 @@ TrackUnionStream::TrackUnionStream(DOMMediaStream* aWrapper) :
// Round up the track start time so the track, if anything, starts a
// little later than the true time. This means we'll have enough
// samples in our input stream to go just beyond the destination time.
StreamTime outputStart = GraphTimeToStreamTime(aFrom);
StreamTime outputStart = GraphTimeToStreamTimeWithBlocking(aFrom);
nsAutoPtr<MediaSegment> segment;
segment = aTrack->GetSegment()->CreateEmptyClone();
@ -244,7 +244,7 @@ TrackUnionStream::TrackUnionStream(DOMMediaStream* aWrapper) :
for (GraphTime t = aFrom; t < aTo; t = next) {
MediaInputPort::InputInterval interval = map->mInputPort->GetNextInputInterval(t);
interval.mEnd = std::min(interval.mEnd, aTo);
StreamTime inputEnd = source->GraphTimeToStreamTime(interval.mEnd);
StreamTime inputEnd = source->GraphTimeToStreamTimeWithBlocking(interval.mEnd);
StreamTime inputTrackEndPoint = STREAM_TIME_MAX;
if (aInputTrack->IsEnded() &&
@ -269,12 +269,12 @@ TrackUnionStream::TrackUnionStream(DOMMediaStream* aWrapper) :
} else if (InMutedCycle()) {
segment->AppendNullData(ticks);
} else {
if (GraphImpl()->StreamSuspended(source)) {
if (source->IsSuspended()) {
segment->AppendNullData(aTo - aFrom);
} else {
MOZ_ASSERT(outputTrack->GetEnd() == GraphTimeToStreamTime(interval.mStart),
MOZ_ASSERT(outputTrack->GetEnd() == GraphTimeToStreamTimeWithBlocking(interval.mStart),
"Samples missing");
StreamTime inputStart = source->GraphTimeToStreamTime(interval.mStart);
StreamTime inputStart = source->GraphTimeToStreamTimeWithBlocking(interval.mStart);
segment->AppendSlice(*aInputTrack->GetSegment(),
std::min(inputTrackEndPoint, inputStart),
std::min(inputTrackEndPoint, inputEnd));

Просмотреть файл

@ -35,7 +35,8 @@ public:
{
MutexAutoLock lock(mMutex);
if (mStream) {
mLastOutputTime = mStream->StreamTimeToMicroseconds(mStream->GraphTimeToStreamTime(aCurrentTime));
mLastOutputTime = mStream->StreamTimeToMicroseconds(
mStream->GraphTimeToStreamTime(aCurrentTime));
}
}
@ -86,14 +87,21 @@ private:
};
static void
UpdateStreamBlocking(MediaStream* aStream, bool aBlocking)
UpdateStreamSuspended(MediaStream* aStream, bool aBlocking)
{
int32_t delta = aBlocking ? 1 : -1;
if (NS_IsMainThread()) {
aStream->ChangeExplicitBlockerCount(delta);
if (aBlocking) {
aStream->Suspend();
} else {
aStream->Resume();
}
} else {
nsCOMPtr<nsIRunnable> r = NS_NewRunnableMethodWithArg<int32_t>(
aStream, &MediaStream::ChangeExplicitBlockerCount, delta);
nsCOMPtr<nsIRunnable> r;
if (aBlocking) {
r = NS_NewRunnableMethod(aStream, &MediaStream::Suspend);
} else {
r = NS_NewRunnableMethod(aStream, &MediaStream::Resume);
}
AbstractThread::MainThread()->Dispatch(r.forget());
}
}
@ -189,7 +197,7 @@ DecodedStreamData::SetPlaying(bool aPlaying)
{
if (mPlaying != aPlaying) {
mPlaying = aPlaying;
UpdateStreamBlocking(mStream, !mPlaying);
UpdateStreamSuspended(mStream, !mPlaying);
}
}
@ -216,10 +224,7 @@ OutputStreamData::Connect(MediaStream* aStream)
MOZ_ASSERT(!mPort, "Already connected?");
MOZ_ASSERT(!mStream->IsDestroyed(), "Can't connect a destroyed stream.");
mPort = mStream->AllocateInputPort(aStream, 0);
// Unblock the output stream now. The input stream is responsible for
// controlling blocking from now on.
mStream->ChangeExplicitBlockerCount(-1);
mPort = mStream->AllocateInputPort(aStream);
}
bool
@ -239,9 +244,6 @@ OutputStreamData::Disconnect()
mPort->Destroy();
mPort = nullptr;
}
// Block the stream again. It will be unlocked when connecting
// to the input stream.
mStream->ChangeExplicitBlockerCount(1);
return true;
}

Просмотреть файл

@ -144,7 +144,6 @@ EXPORTS += [
'StreamBuffer.h',
'ThreadPoolCOMListener.h',
'TimeUnits.h',
'TimeVarying.h',
'TrackUnionStream.h',
'VideoFrameContainer.h',
'VideoSegment.h',

Просмотреть файл

@ -86,7 +86,7 @@ AnalyserNode::AnalyserNode(AudioContext* aContext)
, mMaxDecibels(-30.)
, mSmoothingTimeConstant(.8)
{
mStream = AudioNodeStream::Create(aContext->Graph(),
mStream = AudioNodeStream::Create(aContext,
new AnalyserNodeEngine(this),
AudioNodeStream::NO_STREAM_FLAGS);

Просмотреть файл

@ -575,7 +575,7 @@ AudioBufferSourceNode::AudioBufferSourceNode(AudioContext* aContext)
, mStartCalled(false)
{
AudioBufferSourceNodeEngine* engine = new AudioBufferSourceNodeEngine(this, aContext->Destination());
mStream = AudioNodeStream::Create(aContext->Graph(), engine,
mStream = AudioNodeStream::Create(aContext, engine,
AudioNodeStream::NEED_MAIN_THREAD_FINISHED);
engine->SetSourceStream(mStream);
mStream->AddMainThreadListener(this);

Просмотреть файл

@ -98,11 +98,11 @@ AudioContext::AudioContext(nsPIDOMWindow* aWindow,
, mSampleRate(GetSampleRateForAudioContext(aIsOffline, aSampleRate))
, mAudioContextState(AudioContextState::Suspended)
, mNumberOfChannels(aNumberOfChannels)
, mNodeCount(0)
, mIsOffline(aIsOffline)
, mIsStarted(!aIsOffline)
, mIsShutDown(false)
, mCloseCalled(false)
, mSuspendCalled(false)
{
bool mute = aWindow->AddAudioContext(this);
@ -694,11 +694,6 @@ AudioContext::Shutdown()
}
}
AudioContextState AudioContext::State() const
{
return mAudioContextState;
}
StateChangeTask::StateChangeTask(AudioContext* aAudioContext,
void* aPromise,
AudioContextState aNewState)
@ -830,6 +825,19 @@ AudioContext::OnStateChanged(void* aPromise, AudioContextState aNewState)
mAudioContextState = aNewState;
}
nsTArray<MediaStream*>
AudioContext::GetAllStreams() const
{
nsTArray<MediaStream*> streams;
for (auto iter = mAllNodes.ConstIter(); !iter.Done(); iter.Next()) {
MediaStream* s = iter.Get()->GetKey()->GetStream();
if (s) {
streams.AppendElement(s);
}
}
return streams;
}
already_AddRefed<Promise>
AudioContext::Suspend(ErrorResult& aRv)
{
@ -858,9 +866,21 @@ AudioContext::Suspend(ErrorResult& aRv)
Destination()->Suspend();
mPromiseGripArray.AppendElement(promise);
nsTArray<MediaStream*> streams;
// If mSuspendCalled is true then we already suspended all our streams,
// so don't suspend them again (since suspend(); suspend(); resume(); should
// cancel both suspends). But we still need to do ApplyAudioContextOperation
// to ensure our new promise is resolved.
if (!mSuspendCalled) {
streams = GetAllStreams();
}
Graph()->ApplyAudioContextOperation(DestinationStream()->AsAudioNodeStream(),
streams,
AudioContextOperation::Suspend, promise);
mSuspendCalled = true;
return promise.forget();
}
@ -892,10 +912,21 @@ AudioContext::Resume(ErrorResult& aRv)
Destination()->Resume();
nsTArray<MediaStream*> streams;
// If mSuspendCalled is false then we already resumed all our streams,
// so don't resume them again (since suspend(); resume(); resume(); should
// be OK). But we still need to do ApplyAudioContextOperation
// to ensure our new promise is resolved.
if (mSuspendCalled) {
streams = GetAllStreams();
}
mPromiseGripArray.AppendElement(promise);
Graph()->ApplyAudioContextOperation(DestinationStream()->AsAudioNodeStream(),
streams,
AudioContextOperation::Resume, promise);
mSuspendCalled = false;
return promise.forget();
}
@ -919,8 +950,6 @@ AudioContext::Close(ErrorResult& aRv)
return promise.forget();
}
mCloseCalled = true;
if (Destination()) {
Destination()->DestroyAudioChannelAgent();
}
@ -931,24 +960,43 @@ AudioContext::Close(ErrorResult& aRv)
// this point, so we need extra null-checks.
MediaStream* ds = DestinationStream();
if (ds) {
Graph()->ApplyAudioContextOperation(ds->AsAudioNodeStream(),
nsTArray<MediaStream*> streams;
// If mSuspendCalled or mCloseCalled are true then we already suspended
// all our streams, so don't suspend them again. But we still need to do
// ApplyAudioContextOperation to ensure our new promise is resolved.
if (!mSuspendCalled && !mCloseCalled) {
streams = GetAllStreams();
}
Graph()->ApplyAudioContextOperation(ds->AsAudioNodeStream(), streams,
AudioContextOperation::Close, promise);
}
mCloseCalled = true;
return promise.forget();
}
void
AudioContext::UpdateNodeCount(int32_t aDelta)
AudioContext::RegisterNode(AudioNode* aNode)
{
bool firstNode = mNodeCount == 0;
mNodeCount += aDelta;
MOZ_ASSERT(mNodeCount >= 0);
MOZ_ASSERT(!mAllNodes.Contains(aNode));
mAllNodes.PutEntry(aNode);
// mDestinationNode may be null when we're destroying nodes unlinked by CC.
// Skipping unnecessary calls after shutdown avoids RunInStableState events
// getting stuck in CycleCollectedJSRuntime during final cycle collection
// (bug 1200514).
if (!firstNode && mDestination && !mIsShutDown) {
mDestination->SetIsOnlyNodeForContext(mNodeCount == 1);
if (mDestination && !mIsShutDown) {
mDestination->SetIsOnlyNodeForContext(mAllNodes.Count() == 1);
}
}
void
AudioContext::UnregisterNode(AudioNode* aNode)
{
MOZ_ASSERT(mAllNodes.Contains(aNode));
mAllNodes.RemoveEntry(aNode);
// mDestinationNode may be null when we're destroying nodes unlinked by CC
if (mDestination) {
mDestination->SetIsOnlyNodeForContext(mAllNodes.Count() == 1);
}
}

Просмотреть файл

@ -175,16 +175,14 @@ public:
return mSampleRate;
}
AudioContextId Id() const
{
return mId;
}
bool ShouldSuspendNewStream() const { return mSuspendCalled; }
double CurrentTime() const;
AudioListener* Listener();
AudioContextState State() const;
AudioContextState State() const { return mAudioContextState; }
// Those three methods return a promise to content, that is resolved when an
// (possibly long) operation is completed on the MSG (and possibly other)
// thread(s). To avoid having to match the calls and asychronous result when
@ -303,7 +301,8 @@ public:
AudioChannel TestAudioChannelInAudioNodeStream();
void UpdateNodeCount(int32_t aDelta);
void RegisterNode(AudioNode* aNode);
void UnregisterNode(AudioNode* aNode);
double DOMTimeToStreamTime(double aTime) const
{
@ -343,6 +342,8 @@ private:
bool CheckClosed(ErrorResult& aRv);
nsTArray<MediaStream*> GetAllStreams() const;
private:
// Each AudioContext has an id, that is passed down the MediaStreams that
// back the AudioNodes, so we can easily compute the set of all the
@ -361,6 +362,8 @@ private:
// See RegisterActiveNode. These will keep the AudioContext alive while it
// is rendering and the window remains alive.
nsTHashtable<nsRefPtrHashKey<AudioNode> > mActiveNodes;
// Raw (non-owning) references to all AudioNodes for this AudioContext.
nsTHashtable<nsPtrHashKey<AudioNode> > mAllNodes;
// Hashsets containing all the PannerNodes, to compute the doppler shift.
// These are weak pointers.
nsTHashtable<nsPtrHashKey<PannerNode> > mPannerNodes;
@ -368,13 +371,13 @@ private:
nsRefPtr<BasicWaveFormCache> mBasicWaveFormCache;
// Number of channels passed in the OfflineAudioContext ctor.
uint32_t mNumberOfChannels;
// Number of nodes that currently exist for this AudioContext
int32_t mNodeCount;
bool mIsOffline;
bool mIsStarted;
bool mIsShutDown;
// Close has been called, reject suspend and resume call.
bool mCloseCalled;
// Suspend has been called with no following resume.
bool mSuspendCalled;
};
static const dom::AudioContext::AudioContextId NO_AUDIO_CONTEXT = 0;

Просмотреть файл

@ -332,7 +332,7 @@ AudioDestinationNode::AudioDestinationNode(AudioContext* aContext,
AudioNodeStream::NEED_MAIN_THREAD_CURRENT_TIME |
AudioNodeStream::NEED_MAIN_THREAD_FINISHED |
AudioNodeStream::EXTERNAL_OUTPUT;
mStream = AudioNodeStream::Create(graph, engine, flags);
mStream = AudioNodeStream::Create(aContext, engine, flags, graph);
mStream->AddMainThreadListener(this);
mStream->AddAudioOutput(&gWebAudioOutputKey);
@ -680,7 +680,7 @@ AudioDestinationNode::SetIsOnlyNodeForContext(bool aIsOnlyNode)
}
if (aIsOnlyNode) {
mStream->ChangeExplicitBlockerCount(1);
mStream->Suspend();
mStartedBlockingDueToBeingOnlyNode = TimeStamp::Now();
// Don't do an update of mExtraCurrentTimeSinceLastStartedBlocking until the next stable state.
mExtraCurrentTimeUpdatedSinceLastStableState = true;
@ -690,7 +690,7 @@ AudioDestinationNode::SetIsOnlyNodeForContext(bool aIsOnlyNode)
ExtraCurrentTime();
mExtraCurrentTime += mExtraCurrentTimeSinceLastStartedBlocking;
mExtraCurrentTimeSinceLastStartedBlocking = 0;
mStream->ChangeExplicitBlockerCount(-1);
mStream->Resume();
mStartedBlockingDueToBeingOnlyNode = TimeStamp();
}
}

Просмотреть файл

@ -23,7 +23,7 @@ NS_IMPL_CYCLE_COLLECTION_CLASS(AudioNode)
NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN_INHERITED(AudioNode, DOMEventTargetHelper)
tmp->DisconnectFromGraph();
if (tmp->mContext) {
tmp->mContext->UpdateNodeCount(-1);
tmp->mContext->UnregisterNode(tmp);
}
NS_IMPL_CYCLE_COLLECTION_UNLINK(mContext)
NS_IMPL_CYCLE_COLLECTION_UNLINK(mOutputNodes)
@ -72,7 +72,7 @@ AudioNode::AudioNode(AudioContext* aContext,
{
MOZ_ASSERT(aContext);
DOMEventTargetHelper::BindToOwner(aContext->GetParentObject());
aContext->UpdateNodeCount(1);
aContext->RegisterNode(this);
}
AudioNode::~AudioNode()
@ -85,7 +85,7 @@ AudioNode::~AudioNode()
"The webaudio-node-demise notification must have been sent");
#endif
if (mContext) {
mContext->UpdateNodeCount(-1);
mContext->UnregisterNode(this);
}
}
@ -225,9 +225,8 @@ AudioNode::Connect(AudioNode& aDestination, uint32_t aOutput,
MOZ_ASSERT(aInput <= UINT16_MAX, "Unexpected large input port number");
MOZ_ASSERT(aOutput <= UINT16_MAX, "Unexpected large output port number");
input->mStreamPort = destinationStream->
AllocateInputPort(mStream, 0,
static_cast<uint16_t>(aInput),
static_cast<uint16_t>(aOutput));
AllocateInputPort(mStream, static_cast<uint16_t>(aInput),
static_cast<uint16_t>(aOutput));
}
aDestination.NotifyInputsChanged();
@ -268,7 +267,7 @@ AudioNode::Connect(AudioParam& aDestination, uint32_t aOutput,
// Setup our stream as an input to the AudioParam's stream
MOZ_ASSERT(aOutput <= UINT16_MAX, "Unexpected large output port number");
input->mStreamPort =
ps->AllocateInputPort(mStream, 0, 0, static_cast<uint16_t>(aOutput));
ps->AllocateInputPort(mStream, 0, static_cast<uint16_t>(aOutput));
}
}

Просмотреть файл

@ -177,7 +177,7 @@ public:
};
// Returns the stream, if any.
AudioNodeStream* GetStream() { return mStream; }
AudioNodeStream* GetStream() const { return mStream; }
const nsTArray<InputNode>& InputNodes() const
{

Просмотреть файл

@ -12,8 +12,8 @@ using namespace mozilla::dom;
namespace mozilla {
AudioNodeExternalInputStream::AudioNodeExternalInputStream(AudioNodeEngine* aEngine, TrackRate aSampleRate, uint32_t aContextId)
: AudioNodeStream(aEngine, NO_STREAM_FLAGS, aSampleRate, aContextId)
AudioNodeExternalInputStream::AudioNodeExternalInputStream(AudioNodeEngine* aEngine, TrackRate aSampleRate)
: AudioNodeStream(aEngine, NO_STREAM_FLAGS, aSampleRate)
{
MOZ_COUNT_CTOR(AudioNodeExternalInputStream);
}
@ -27,13 +27,14 @@ AudioNodeExternalInputStream::~AudioNodeExternalInputStream()
AudioNodeExternalInputStream::Create(MediaStreamGraph* aGraph,
AudioNodeEngine* aEngine)
{
AudioContext* ctx = aEngine->NodeMainThread()->Context();
MOZ_ASSERT(NS_IsMainThread());
MOZ_ASSERT(aGraph->GraphRate() == aEngine->NodeMainThread()->Context()->SampleRate());
MOZ_ASSERT(aGraph->GraphRate() == ctx->SampleRate());
nsRefPtr<AudioNodeExternalInputStream> stream =
new AudioNodeExternalInputStream(aEngine, aGraph->GraphRate(),
aEngine->NodeMainThread()->Context()->Id());
aGraph->AddStream(stream);
new AudioNodeExternalInputStream(aEngine, aGraph->GraphRate());
aGraph->AddStream(stream,
ctx->ShouldSuspendNewStream() ? MediaStreamGraph::ADD_STREAM_SUSPENDED : 0);
return stream.forget();
}
@ -153,6 +154,8 @@ AudioNodeExternalInputStream::ProcessInput(GraphTime aFrom, GraphTime aTo,
break;
next = interval.mEnd;
// We know this stream does not block during the processing interval ---
// we're not finished, we don't underrun, and we're not suspended.
StreamTime outputStart = GraphTimeToStreamTime(interval.mStart);
StreamTime outputEnd = GraphTimeToStreamTime(interval.mEnd);
StreamTime ticks = outputEnd - outputStart;
@ -160,6 +163,8 @@ AudioNodeExternalInputStream::ProcessInput(GraphTime aFrom, GraphTime aTo,
if (interval.mInputIsBlocked) {
segment.AppendNullData(ticks);
} else {
// The input stream is not blocked in this interval, so no need to call
// GraphTimeToStreamTimeWithBlocking.
StreamTime inputStart =
std::min(inputSegment.GetDuration(),
source->GraphTimeToStreamTime(interval.mStart));

Просмотреть файл

@ -25,8 +25,7 @@ public:
Create(MediaStreamGraph* aGraph, AudioNodeEngine* aEngine);
protected:
AudioNodeExternalInputStream(AudioNodeEngine* aEngine, TrackRate aSampleRate,
uint32_t aContextId);
AudioNodeExternalInputStream(AudioNodeEngine* aEngine, TrackRate aSampleRate);
~AudioNodeExternalInputStream();
public:

Просмотреть файл

@ -11,6 +11,7 @@
#include "AudioChannelFormat.h"
#include "AudioParamTimeline.h"
#include "AudioContext.h"
#include "nsMathUtils.h"
using namespace mozilla::dom;
@ -27,12 +28,10 @@ namespace mozilla {
AudioNodeStream::AudioNodeStream(AudioNodeEngine* aEngine,
Flags aFlags,
TrackRate aSampleRate,
AudioContext::AudioContextId aContextId)
TrackRate aSampleRate)
: ProcessedMediaStream(nullptr),
mEngine(aEngine),
mSampleRate(aSampleRate),
mAudioContextId(aContextId),
mFlags(aFlags),
mNumberOfInputChannels(2),
mMarkAsFinishedAfterThisBlock(false),
@ -64,26 +63,25 @@ AudioNodeStream::DestroyImpl()
}
/* static */ already_AddRefed<AudioNodeStream>
AudioNodeStream::Create(MediaStreamGraph* aGraph, AudioNodeEngine* aEngine,
Flags aFlags)
AudioNodeStream::Create(AudioContext* aCtx, AudioNodeEngine* aEngine,
Flags aFlags, MediaStreamGraph* aGraph)
{
MOZ_ASSERT(NS_IsMainThread());
// MediaRecorders use an AudioNodeStream, but no AudioNode
AudioNode* node = aEngine->NodeMainThread();
MOZ_ASSERT(!node || aGraph->GraphRate() == node->Context()->SampleRate());
MediaStreamGraph* graph = aGraph ? aGraph : aCtx->Graph();
MOZ_ASSERT(graph->GraphRate() == aCtx->SampleRate());
dom::AudioContext::AudioContextId contextIdForStream = node ? node->Context()->Id() :
NO_AUDIO_CONTEXT;
nsRefPtr<AudioNodeStream> stream =
new AudioNodeStream(aEngine, aFlags, aGraph->GraphRate(),
contextIdForStream);
if (aEngine->HasNode()) {
stream->SetChannelMixingParametersImpl(aEngine->NodeMainThread()->ChannelCount(),
aEngine->NodeMainThread()->ChannelCountModeValue(),
aEngine->NodeMainThread()->ChannelInterpretationValue());
new AudioNodeStream(aEngine, aFlags, graph->GraphRate());
if (node) {
stream->SetChannelMixingParametersImpl(node->ChannelCount(),
node->ChannelCountModeValue(),
node->ChannelInterpretationValue());
}
aGraph->AddStream(stream);
graph->AddStream(stream,
aCtx->ShouldSuspendNewStream() ? MediaStreamGraph::ADD_STREAM_SUSPENDED : 0);
return stream.forget();
}
@ -522,12 +520,7 @@ AudioNodeStream::ProcessInput(GraphTime aFrom, GraphTime aTo, uint32_t aFlags)
uint16_t outputCount = mLastChunks.Length();
MOZ_ASSERT(outputCount == std::max(uint16_t(1), mEngine->OutputCount()));
// Consider this stream blocked if it has already finished output. Normally
// mBlocked would reflect this, but due to rounding errors our audio track may
// appear to extend slightly beyond aFrom, so we might not be blocked yet.
bool blocked = mFinished || mBlocked.GetAt(aFrom);
// If the stream has finished at this time, it will be blocked.
if (blocked || InMutedCycle()) {
if (mFinished || InMutedCycle()) {
mInputChunks.Clear();
for (uint16_t i = 0; i < outputCount; ++i) {
mLastChunks[i].SetNull(WEBAUDIO_BLOCK_SIZE);
@ -565,8 +558,8 @@ AudioNodeStream::ProcessInput(GraphTime aFrom, GraphTime aTo, uint32_t aFlags)
}
}
if (!blocked) {
// Don't output anything while blocked
if (!mFinished) {
// Don't output anything while finished
AdvanceOutputSegment();
if (mMarkAsFinishedAfterThisBlock && (aFlags & ALLOW_FINISH)) {
// This stream was finished the last time that we looked at it, and all
@ -586,12 +579,7 @@ AudioNodeStream::ProduceOutputBeforeInput(GraphTime aFrom)
MOZ_ASSERT(!InMutedCycle(), "DelayNodes should break cycles");
MOZ_ASSERT(mLastChunks.Length() == 1);
// Consider this stream blocked if it has already finished output. Normally
// mBlocked would reflect this, but due to rounding errors our audio track may
// appear to extend slightly beyond aFrom, so we might not be blocked yet.
bool blocked = mFinished || mBlocked.GetAt(aFrom);
// If the stream has finished at this time, it will be blocked.
if (blocked) {
if (mFinished) {
mLastChunks[0].SetNull(WEBAUDIO_BLOCK_SIZE);
} else {
mEngine->ProduceBlockBeforeInput(&mLastChunks[0]);
@ -668,9 +656,8 @@ AudioNodeStream::FractionalTicksFromDestinationTime(AudioNodeStream* aDestinatio
GraphTime graphTime =
aDestination->StreamTimeToGraphTime(destinationStreamTime);
StreamTime thisStreamTime = GraphTimeToStreamTimeOptimistic(graphTime);
StreamTime thisStreamTime = GraphTimeToStreamTime(graphTime);
double thisFractionalTicks = thisStreamTime + offset;
MOZ_ASSERT(thisFractionalTicks >= 0.0);
return thisFractionalTicks;
}
@ -683,9 +670,7 @@ AudioNodeStream::TicksFromDestinationTime(MediaStream* aDestination,
double thisSeconds =
FractionalTicksFromDestinationTime(destination, aSeconds);
// Round to nearest
StreamTime ticks = thisSeconds + 0.5;
return ticks;
return NS_round(thisSeconds);
}
double
@ -693,8 +678,9 @@ AudioNodeStream::DestinationTimeFromTicks(AudioNodeStream* aDestination,
StreamTime aPosition)
{
MOZ_ASSERT(SampleRate() == aDestination->SampleRate());
GraphTime graphTime = StreamTimeToGraphTime(aPosition);
StreamTime destinationTime = aDestination->GraphTimeToStreamTimeOptimistic(graphTime);
StreamTime destinationTime = aDestination->GraphTimeToStreamTime(graphTime);
return StreamTimeToSeconds(destinationTime);
}

Просмотреть файл

@ -57,9 +57,13 @@ public:
/**
* Create a stream that will process audio for an AudioNode.
* Takes ownership of aEngine.
* If aGraph is non-null, use that as the MediaStreamGraph, otherwise use
* aCtx's graph. aGraph is only non-null when called for AudioDestinationNode
* since the context's graph hasn't been set up in that case.
*/
static already_AddRefed<AudioNodeStream>
Create(MediaStreamGraph* aGraph, AudioNodeEngine* aEngine, Flags aKind);
Create(AudioContext* aCtx, AudioNodeEngine* aEngine, Flags aKind,
MediaStreamGraph* aGraph = nullptr);
protected:
/**
@ -67,8 +71,7 @@ protected:
*/
AudioNodeStream(AudioNodeEngine* aEngine,
Flags aFlags,
TrackRate aSampleRate,
AudioContext::AudioContextId aContextId);
TrackRate aSampleRate);
~AudioNodeStream();
@ -132,15 +135,10 @@ public:
return ((mFlags & NEED_MAIN_THREAD_FINISHED) && mFinished) ||
(mFlags & NEED_MAIN_THREAD_CURRENT_TIME);
}
virtual bool IsIntrinsicallyConsumed() const override
{
return true;
}
// Any thread
AudioNodeEngine* Engine() { return mEngine; }
TrackRate SampleRate() const { return mSampleRate; }
AudioContext::AudioContextId AudioContextId() const override { return mAudioContextId; }
/**
* Convert a time in seconds on the destination stream to ticks
@ -192,9 +190,6 @@ protected:
OutputChunks mLastChunks;
// The stream's sampling rate
const TrackRate mSampleRate;
// This is necessary to be able to find all the nodes for a given
// AudioContext. It is set on the main thread, in the constructor.
const AudioContext::AudioContextId mAudioContextId;
// Whether this is an internal or external stream
const Flags mFlags;
// The number of input channels that this stream requires. 0 means don't care.

Просмотреть файл

@ -100,7 +100,7 @@ AudioParam::Stream()
AudioNodeEngine* engine = new AudioNodeEngine(nullptr);
nsRefPtr<AudioNodeStream> stream =
AudioNodeStream::Create(mNode->Context()->Graph(), engine,
AudioNodeStream::Create(mNode->Context(), engine,
AudioNodeStream::NO_STREAM_FLAGS);
// Force the input to have only one channel, and make it down-mix using
@ -114,7 +114,7 @@ AudioParam::Stream()
// Setup the AudioParam's stream as an input to the owner AudioNode's stream
AudioNodeStream* nodeStream = mNode->GetStream();
if (nodeStream) {
mNodeStreamPort = nodeStream->AllocateInputPort(mStream, 0);
mNodeStreamPort = nodeStream->AllocateInputPort(mStream);
}
// Let the MSG's copy of AudioParamTimeline know about the change in the stream

Просмотреть файл

@ -250,7 +250,7 @@ BiquadFilterNode::BiquadFilterNode(AudioContext* aContext)
, mGain(new AudioParam(this, SendGainToStream, 0.f, "gain"))
{
BiquadFilterNodeEngine* engine = new BiquadFilterNodeEngine(this, aContext->Destination());
mStream = AudioNodeStream::Create(aContext->Graph(), engine,
mStream = AudioNodeStream::Create(aContext, engine,
AudioNodeStream::NO_STREAM_FLAGS);
engine->SetSourceStream(mStream);
}

Просмотреть файл

@ -73,7 +73,7 @@ ChannelMergerNode::ChannelMergerNode(AudioContext* aContext,
ChannelInterpretation::Speakers)
, mInputCount(aInputCount)
{
mStream = AudioNodeStream::Create(aContext->Graph(),
mStream = AudioNodeStream::Create(aContext,
new ChannelMergerNodeEngine(this),
AudioNodeStream::NO_STREAM_FLAGS);
}

Просмотреть файл

@ -60,7 +60,7 @@ ChannelSplitterNode::ChannelSplitterNode(AudioContext* aContext,
ChannelInterpretation::Speakers)
, mOutputCount(aOutputCount)
{
mStream = AudioNodeStream::Create(aContext->Graph(),
mStream = AudioNodeStream::Create(aContext,
new ChannelSplitterNodeEngine(this),
AudioNodeStream::NO_STREAM_FLAGS);
}

Просмотреть файл

@ -191,7 +191,7 @@ ConvolverNode::ConvolverNode(AudioContext* aContext)
, mNormalize(true)
{
ConvolverNodeEngine* engine = new ConvolverNodeEngine(this, mNormalize);
mStream = AudioNodeStream::Create(aContext->Graph(), engine,
mStream = AudioNodeStream::Create(aContext, engine,
AudioNodeStream::NO_STREAM_FLAGS);
}

Просмотреть файл

@ -198,7 +198,7 @@ DelayNode::DelayNode(AudioContext* aContext, double aMaxDelay)
DelayNodeEngine* engine =
new DelayNodeEngine(this, aContext->Destination(),
aContext->SampleRate() * aMaxDelay);
mStream = AudioNodeStream::Create(aContext->Graph(), engine,
mStream = AudioNodeStream::Create(aContext, engine,
AudioNodeStream::NO_STREAM_FLAGS);
engine->SetSourceStream(mStream);
}

Просмотреть файл

@ -203,7 +203,7 @@ DynamicsCompressorNode::DynamicsCompressorNode(AudioContext* aContext)
, mRelease(new AudioParam(this, SendReleaseToStream, 0.25f, "release"))
{
DynamicsCompressorNodeEngine* engine = new DynamicsCompressorNodeEngine(this, aContext->Destination());
mStream = AudioNodeStream::Create(aContext->Graph(), engine,
mStream = AudioNodeStream::Create(aContext, engine,
AudioNodeStream::NO_STREAM_FLAGS);
engine->SetSourceStream(mStream);
}

Просмотреть файл

@ -128,7 +128,7 @@ GainNode::GainNode(AudioContext* aContext)
, mGain(new AudioParam(this, SendGainToStream, 1.0f, "gain"))
{
GainNodeEngine* engine = new GainNodeEngine(this, aContext->Destination());
mStream = AudioNodeStream::Create(aContext->Graph(), engine,
mStream = AudioNodeStream::Create(aContext, engine,
AudioNodeStream::NO_STREAM_FLAGS);
engine->SetSourceStream(mStream);
}

Просмотреть файл

@ -39,7 +39,7 @@ MediaStreamAudioDestinationNode::MediaStreamAudioDestinationNode(AudioContext* a
ProcessedMediaStream* outputStream = mDOMStream->GetStream()->AsProcessedStream();
MOZ_ASSERT(!!outputStream);
AudioNodeEngine* engine = new AudioNodeEngine(this);
mStream = AudioNodeStream::Create(aContext->Graph(), engine,
mStream = AudioNodeStream::Create(aContext, engine,
AudioNodeStream::EXTERNAL_OUTPUT);
mPort = outputStream->AllocateInputPort(mStream);

Просмотреть файл

@ -41,7 +41,7 @@ MediaStreamAudioSourceNode::MediaStreamAudioSourceNode(AudioContext* aContext,
AudioNodeEngine* engine = new MediaStreamAudioSourceNodeEngine(this);
mStream = AudioNodeExternalInputStream::Create(aContext->Graph(), engine);
ProcessedMediaStream* outputStream = static_cast<ProcessedMediaStream*>(mStream.get());
mInputPort = outputStream->AllocateInputPort(aMediaStream->GetStream(), 0);
mInputPort = outputStream->AllocateInputPort(aMediaStream->GetStream());
mInputStream->AddConsumerToKeepAlive(static_cast<nsIDOMEventTarget*>(this));
PrincipalChanged(mInputStream); // trigger enabling/disabling of the connector

Просмотреть файл

@ -384,7 +384,7 @@ OscillatorNode::OscillatorNode(AudioContext* aContext)
, mStartCalled(false)
{
OscillatorNodeEngine* engine = new OscillatorNodeEngine(this, aContext->Destination());
mStream = AudioNodeStream::Create(aContext->Graph(), engine,
mStream = AudioNodeStream::Create(aContext, engine,
AudioNodeStream::NEED_MAIN_THREAD_FINISHED);
engine->SetSourceStream(mStream);
mStream->AddMainThreadListener(this);

Просмотреть файл

@ -240,7 +240,7 @@ PannerNode::PannerNode(AudioContext* aContext)
, mConeOuterAngle(360.)
, mConeOuterGain(0.)
{
mStream = AudioNodeStream::Create(aContext->Graph(),
mStream = AudioNodeStream::Create(aContext,
new PannerNodeEngine(this),
AudioNodeStream::NO_STREAM_FLAGS);
// We should register once we have set up our stream and engine.

Просмотреть файл

@ -501,7 +501,7 @@ ScriptProcessorNode::ScriptProcessorNode(AudioContext* aContext,
aContext->Destination(),
BufferSize(),
aNumberOfInputChannels);
mStream = AudioNodeStream::Create(aContext->Graph(), engine,
mStream = AudioNodeStream::Create(aContext, engine,
AudioNodeStream::NO_STREAM_FLAGS);
engine->SetSourceStream(mStream);
}

Просмотреть файл

@ -181,7 +181,7 @@ StereoPannerNode::StereoPannerNode(AudioContext* aContext)
, mPan(new AudioParam(this, SendPanToStream, 0.f, "pan"))
{
StereoPannerNodeEngine* engine = new StereoPannerNodeEngine(this, aContext->Destination());
mStream = AudioNodeStream::Create(aContext->Graph(), engine,
mStream = AudioNodeStream::Create(aContext, engine,
AudioNodeStream::NO_STREAM_FLAGS);
engine->SetSourceStream(mStream);
}

Просмотреть файл

@ -288,7 +288,7 @@ WaveShaperNode::WaveShaperNode(AudioContext* aContext)
mozilla::HoldJSObjects(this);
WaveShaperNodeEngine* engine = new WaveShaperNodeEngine(this);
mStream = AudioNodeStream::Create(aContext->Graph(), engine,
mStream = AudioNodeStream::Create(aContext, engine,
AudioNodeStream::NO_STREAM_FLAGS);
}

Просмотреть файл

@ -563,7 +563,7 @@ nsSpeechTask::Pause()
}
if (mStream) {
mStream->ChangeExplicitBlockerCount(1);
mStream->Suspend();
}
if (!mInited) {
@ -586,7 +586,7 @@ nsSpeechTask::Resume()
}
if (mStream) {
mStream->ChangeExplicitBlockerCount(-1);
mStream->Resume();
}
if (mPrePaused) {
@ -612,7 +612,7 @@ nsSpeechTask::Cancel()
}
if (mStream) {
mStream->ChangeExplicitBlockerCount(1);
mStream->Suspend();
}
if (!mInited) {
@ -628,7 +628,7 @@ void
nsSpeechTask::ForceEnd()
{
if (mStream) {
mStream->ChangeExplicitBlockerCount(1);
mStream->Suspend();
}
if (!mInited) {

Просмотреть файл

@ -9,7 +9,7 @@
#include "nscore.h"
#include "npapi.h"
#include "npruntime.h"
#include "pldhash.h"
#include "PLDHashTable.h"
class nsJSNPRuntime
{

Просмотреть файл

@ -291,7 +291,7 @@ nsMixedContentBlocker::AsyncOnChannelRedirect(nsIChannel* aOldChannel,
return NS_OK;
}
uint32_t contentPolicyType = loadInfo->GetContentPolicyType();
nsContentPolicyType contentPolicyType = loadInfo->InternalContentPolicyType();
nsCOMPtr<nsIPrincipal> requestingPrincipal = loadInfo->LoadingPrincipal();
// Since we are calling shouldLoad() directly on redirects, we don't go through the code
@ -310,7 +310,7 @@ nsMixedContentBlocker::AsyncOnChannelRedirect(nsIChannel* aOldChannel,
}
int16_t decision = REJECT_REQUEST;
rv = ShouldLoad(nsContentUtils::InternalContentPolicyTypeToExternal(contentPolicyType),
rv = ShouldLoad(nsContentUtils::InternalContentPolicyTypeToExternalOrScript(contentPolicyType),
newUri,
requestingLocation,
loadInfo->LoadingNode(),
@ -378,9 +378,17 @@ nsMixedContentBlocker::ShouldLoad(bool aHadInsecureImageRedirect,
// to them.
MOZ_ASSERT(NS_IsMainThread());
MOZ_ASSERT(aContentType == nsContentUtils::InternalContentPolicyTypeToExternal(aContentType),
MOZ_ASSERT(aContentType == nsContentUtils::InternalContentPolicyTypeToExternalOrScript(aContentType),
"We should only see external content policy types here.");
// The content policy type that we receive may be an internal type for
// scripts. Let's remember if we have seen a worker type, and reset it to the
// external type in all cases right now.
bool isWorkerType = aContentType == nsIContentPolicy::TYPE_INTERNAL_WORKER ||
aContentType == nsIContentPolicy::TYPE_INTERNAL_SHARED_WORKER ||
aContentType == nsIContentPolicy::TYPE_INTERNAL_SERVICE_WORKER;
aContentType = nsContentUtils::InternalContentPolicyTypeToExternal(aContentType);
// Assume active (high risk) content and blocked by default
MixedContentTypes classification = eMixedScript;
// Make decision to block/reject by default
@ -625,6 +633,23 @@ nsMixedContentBlocker::ShouldLoad(bool aHadInsecureImageRedirect,
return NS_OK;
}
// Disallow mixed content loads for workers, shared workers and service
// workers.
if (isWorkerType) {
// For workers, we can assume that we're mixed content at this point, since
// the parent is https, and the protocol associated with aContentLocation
// doesn't map to the secure URI flags checked above. Assert this for
// sanity's sake
#ifdef DEBUG
bool isHttpsScheme = false;
rv = aContentLocation->SchemeIs("https", &isHttpsScheme);
NS_ENSURE_SUCCESS(rv, rv);
MOZ_ASSERT(!isHttpsScheme);
#endif
*aDecision = REJECT_REQUEST;
return NS_OK;
}
// Determine if the rootDoc is https and if the user decided to allow Mixed Content
nsCOMPtr<nsIDocShell> docShell = NS_CP_GetDocShellFromContext(aRequestingContext);
NS_ENSURE_TRUE(docShell, NS_OK);

Просмотреть файл

@ -13,7 +13,7 @@
#include "nsSMILAnimationFunction.h"
#include "nsSMILTargetIdentifier.h"
#include "nsSMILCompositorTable.h"
#include "pldhash.h"
#include "PLDHashTable.h"
//----------------------------------------------------------------------
// nsSMILCompositor

Просмотреть файл

@ -990,11 +990,11 @@ var interfaceNamesInGlobalScope =
// IMPORTANT: Do not change this list without review from a DOM peer!
"Selection",
// IMPORTANT: Do not change this list without review from a DOM peer!
{name: "ServiceWorker", b2g: false, releaseAndroid: false},
{name: "ServiceWorker", b2g: false, nightlyAndroid: true, android: false},
// IMPORTANT: Do not change this list without review from a DOM peer!
{name: "ServiceWorkerContainer", b2g: false, releaseAndroid: false},
{name: "ServiceWorkerContainer", b2g: false, nightlyAndroid: true, android: false},
// IMPORTANT: Do not change this list without review from a DOM peer!
{name: "ServiceWorkerRegistration", b2g: false, releaseAndroid: false},
{name: "ServiceWorkerRegistration", b2g: false, nightlyAndroid: true, android: false},
// IMPORTANT: Do not change this list without review from a DOM peer!
"SettingsLock",
// IMPORTANT: Do not change this list without review from a DOM peer!
@ -1487,15 +1487,15 @@ function createInterfaceMap(isXBLScope) {
} else {
ok(!("pref" in entry), "Bogus pref annotation for " + entry.name);
if ((entry.nightly === !isNightly) ||
(entry.nightlyAndroid === !(isAndroid && isNightly) && isAndroid) ||
(entry.xbl === !isXBLScope) ||
(entry.desktop === !isDesktop) ||
(entry.b2g === !isB2G) ||
(entry.windows === !isWindows) ||
(entry.mac === !isMac) ||
(entry.linux === !isLinux) ||
(entry.android === !isAndroid) ||
(entry.android === !isAndroid && !entry.nightlyAndroid) ||
(entry.release === !isRelease) ||
(entry.releaseAndroid === !(isAndroid && isRelease)) ||
(entry.permission && !hasPermission(entry.permission)) ||
entry.disabled) {
interfaceMap[entry.name] = false;

Просмотреть файл

@ -120,21 +120,18 @@ ChannelFromScriptURL(nsIPrincipal* principal,
return NS_ERROR_DOM_SYNTAX_ERR;
}
// If we're part of a document then check the content load policy.
if (parentDoc) {
int16_t shouldLoad = nsIContentPolicy::ACCEPT;
rv = NS_CheckContentLoadPolicy(aContentPolicyType, uri,
principal, parentDoc,
NS_LITERAL_CSTRING("text/javascript"),
nullptr, &shouldLoad,
nsContentUtils::GetContentPolicy(),
secMan);
if (NS_FAILED(rv) || NS_CP_REJECTED(shouldLoad)) {
if (NS_FAILED(rv) || shouldLoad != nsIContentPolicy::REJECT_TYPE) {
return rv = NS_ERROR_CONTENT_BLOCKED;
}
return rv = NS_ERROR_CONTENT_BLOCKED_SHOW_ALT;
int16_t shouldLoad = nsIContentPolicy::ACCEPT;
rv = NS_CheckContentLoadPolicy(aContentPolicyType, uri,
principal, parentDoc,
NS_LITERAL_CSTRING("text/javascript"),
nullptr, &shouldLoad,
nsContentUtils::GetContentPolicy(),
secMan);
if (NS_FAILED(rv) || NS_CP_REJECTED(shouldLoad)) {
if (NS_FAILED(rv) || shouldLoad != nsIContentPolicy::REJECT_TYPE) {
return rv = NS_ERROR_CONTENT_BLOCKED;
}
return rv = NS_ERROR_CONTENT_BLOCKED_SHOW_ALT;
}
if (aWorkerScriptType == DebuggerScript) {

Просмотреть файл

@ -1188,8 +1188,9 @@ public:
return false;
}
if (NS_FAILED(timer->InitWithFuncCallback(DummyCallback, nullptr, aDelayMS,
nsITimer::TYPE_ONE_SHOT))) {
if (NS_FAILED(timer->InitWithNamedFuncCallback(
DummyCallback, nullptr, aDelayMS, nsITimer::TYPE_ONE_SHOT,
"dom::workers::DummyCallback(1)"))) {
JS_ReportError(aCx, "Failed to start timer!");
return false;
}
@ -4536,9 +4537,9 @@ WorkerPrivate::SetGCTimerMode(GCTimerMode aMode)
}
MOZ_ALWAYS_TRUE(NS_SUCCEEDED(mGCTimer->SetTarget(target)));
MOZ_ALWAYS_TRUE(NS_SUCCEEDED(mGCTimer->InitWithFuncCallback(DummyCallback,
nullptr, delay,
type)));
MOZ_ALWAYS_TRUE(NS_SUCCEEDED(
mGCTimer->InitWithNamedFuncCallback(DummyCallback, nullptr, delay, type,
"dom::workers::DummyCallback(2)")));
if (aMode == PeriodicTimer) {
LOG(("Worker %p scheduled periodic GC timer\n", this));
@ -5945,8 +5946,9 @@ WorkerPrivate::RescheduleTimeoutTimer(JSContext* aCx)
(mTimeouts[0]->mTargetTime - TimeStamp::Now()).ToMilliseconds();
uint32_t delay = delta > 0 ? std::min(delta, double(UINT32_MAX)) : 0;
nsresult rv = mTimer->InitWithFuncCallback(DummyCallback, nullptr, delay,
nsITimer::TYPE_ONE_SHOT);
nsresult rv = mTimer->InitWithNamedFuncCallback(
DummyCallback, nullptr, delay, nsITimer::TYPE_ONE_SHOT,
"dom::workers::DummyCallback(3)");
if (NS_FAILED(rv)) {
JS_ReportError(aCx, "Failed to start timer!");
return false;

Просмотреть файл

@ -732,7 +732,7 @@ public:
case WorkerTypeShared:
return nsIContentPolicy::TYPE_INTERNAL_SHARED_WORKER;
case WorkerTypeService:
return nsIContentPolicy::TYPE_SCRIPT;
return nsIContentPolicy::TYPE_INTERNAL_SERVICE_WORKER;
default:
MOZ_ASSERT_UNREACHABLE("Invalid worker type");
return nsIContentPolicy::TYPE_INVALID;

Просмотреть файл

@ -174,7 +174,7 @@ var interfaceNamesInGlobalScope =
// IMPORTANT: Do not change this list without review from a DOM peer!
"Response",
// IMPORTANT: Do not change this list without review from a DOM peer!
{ name: "ServiceWorkerRegistration", b2g: false, releaseAndroid: false },
{ name: "ServiceWorkerRegistration", b2g: false, nightlyAndroid: true, android: false },
// IMPORTANT: Do not change this list without review from a DOM peer!
"TextDecoder",
// IMPORTANT: Do not change this list without review from a DOM peer!
@ -219,11 +219,11 @@ function createInterfaceMap(permissionMap, version, userAgent, isB2G) {
} else {
ok(!("pref" in entry), "Bogus pref annotation for " + entry.name);
if ((entry.nightly === !isNightly) ||
(entry.nightlyAndroid === !(isAndroid && isNightly) && isAndroid) ||
(entry.desktop === !isDesktop) ||
(entry.android === !isAndroid) ||
(entry.android === !isAndroid && !entry.nightlyAndroid) ||
(entry.b2g === !isB2G) ||
(entry.release === !isRelease) ||
(entry.releaseAndroid === !(isAndroid && isRelease)) ||
(entry.permission && !permissionMap[entry.permission]) ||
entry.disabled) {
interfaceMap[entry.name] = false;

Просмотреть файл

@ -6,7 +6,7 @@
#ifndef nsContentSupportMap_h__
#define nsContentSupportMap_h__
#include "pldhash.h"
#include "PLDHashTable.h"
#include "nsTemplateMatch.h"
/**

Просмотреть файл

@ -34,7 +34,7 @@
#include "nsIAtom.h"
#include "nsIDOMNode.h"
#include "plhash.h"
#include "pldhash.h"
#include "PLDHashTable.h"
#include "nsIRDFNode.h"
class nsXULTemplateResultSetRDF;

Просмотреть файл

@ -6,7 +6,7 @@
#ifndef nsTemplateMap_h__
#define nsTemplateMap_h__
#include "pldhash.h"
#include "PLDHashTable.h"
#include "nsXULElement.h"
class nsTemplateMap {

Просмотреть файл

@ -8,7 +8,7 @@
#include "nsCOMPtr.h"
#include "nsTArray.h"
#include "pldhash.h"
#include "PLDHashTable.h"
#include "nsIXULTemplateResult.h"
#include "nsTemplateMatch.h"
#include "nsIRDFResource.h"

Просмотреть файл

@ -34,7 +34,7 @@
#include "nsTextNode.h"
#include "mozilla/dom/Element.h"
#include "pldhash.h"
#include "PLDHashTable.h"
#include "rdf.h"
using namespace mozilla;

Просмотреть файл

@ -57,7 +57,7 @@
#include "jsapi.h"
#include "mozilla/Logging.h"
#include "rdf.h"
#include "pldhash.h"
#include "PLDHashTable.h"
#include "plhash.h"
#include "nsDOMClassInfoID.h"
#include "nsPIDOMWindow.h"

Просмотреть файл

@ -10,7 +10,7 @@
#include "nsString.h"
#include "nsICommandParams.h"
#include "nsCOMPtr.h"
#include "pldhash.h"
#include "PLDHashTable.h"
class nsCommandParams : public nsICommandParams
{

Просмотреть файл

@ -2636,12 +2636,12 @@ nsPermissionManager::ImportDefaults()
rv = NS_NewChannel(getter_AddRefs(channel),
defaultsURI,
nsContentUtils::GetSystemPrincipal(),
nsILoadInfo::SEC_NORMAL,
nsILoadInfo::SEC_ALLOW_CROSS_ORIGIN_DATA_IS_NULL,
nsIContentPolicy::TYPE_OTHER);
NS_ENSURE_SUCCESS(rv, rv);
nsCOMPtr<nsIInputStream> inputStream;
rv = channel->Open(getter_AddRefs(inputStream));
rv = channel->Open2(getter_AddRefs(inputStream));
NS_ENSURE_SUCCESS(rv, rv);
rv = _DoImport(inputStream, nullptr);

Просмотреть файл

@ -57,7 +57,8 @@ static const char *kTypeString[] = {
"", // TYPE_INTERNAL_VIDEO
"", // TYPE_INTERNAL_TRACK
"", // TYPE_INTERNAL_XMLHTTPREQUEST
"" // TYPE_INTERNAL_EVENTSOURCE
"", // TYPE_INTERNAL_EVENTSOURCE
"", // TYPE_INTERNAL_SERVICE_WORKER
};
#define NUMBER_OF_TYPES MOZ_ARRAY_LENGTH(kTypeString)

Просмотреть файл

@ -698,12 +698,15 @@ public:
_11 *= aX;
_12 *= aX;
_13 *= aX;
_14 *= aX;
_21 *= aY;
_22 *= aY;
_23 *= aY;
_24 *= aY;
_31 *= aZ;
_32 *= aZ;
_33 *= aZ;
_34 *= aZ;
return *this;
}

Просмотреть файл

@ -28,6 +28,7 @@
#include "UnitTransforms.h" // for ViewAs
#include "gfxPrefs.h" // for gfxPrefs
#include "OverscrollHandoffState.h" // for OverscrollHandoffState
#include "TaskThrottler.h" // for TaskThrottler
#include "TreeTraversal.h" // for generic tree traveral algorithms
#include "LayersLogging.h" // for Stringify
#include "Units.h" // for ParentlayerPixel
@ -107,11 +108,18 @@ APZCTreeManager::~APZCTreeManager()
}
AsyncPanZoomController*
APZCTreeManager::MakeAPZCInstance(uint64_t aLayersId,
GeckoContentController* aController)
APZCTreeManager::NewAPZCInstance(uint64_t aLayersId,
GeckoContentController* aController,
TaskThrottler* aPaintThrottler)
{
return new AsyncPanZoomController(aLayersId, this, mInputQueue,
aController, AsyncPanZoomController::USE_GESTURE_DETECTOR);
aController, aPaintThrottler, AsyncPanZoomController::USE_GESTURE_DETECTOR);
}
TimeStamp
APZCTreeManager::GetFrameTime()
{
return TimeStamp::Now();
}
void
@ -413,7 +421,17 @@ APZCTreeManager::PrepareNodeForLayer(const LayerMetricsWrapper& aLayer,
// a destroyed APZC and so we need to throw that out and make a new one.
bool newApzc = (apzc == nullptr || apzc->IsDestroyed());
if (newApzc) {
apzc = MakeAPZCInstance(aLayersId, state->mController);
// Look up the paint throttler for this layers id, or create it if
// this is the first APZC for this layers id.
auto throttlerInsertResult = mPaintThrottlerMap.insert(
std::make_pair(aLayersId, nsRefPtr<TaskThrottler>()));
if (throttlerInsertResult.second) {
throttlerInsertResult.first->second = new TaskThrottler(
GetFrameTime(), TimeDuration::FromMilliseconds(500));
}
apzc = NewAPZCInstance(aLayersId, state->mController,
throttlerInsertResult.first->second);
apzc->SetCompositorParent(aState.mCompositor);
if (state->mCrossProcessParent != nullptr) {
apzc->ShareFrameMetricsAcrossProcesses();

Просмотреть файл

@ -49,6 +49,7 @@ class LayerMetricsWrapper;
class InputQueue;
class GeckoContentController;
class HitTestingTreeNode;
class TaskThrottler;
/**
* ****************** NOTE ON LOCK ORDERING IN APZ **************************
@ -391,9 +392,13 @@ protected:
// Protected destructor, to discourage deletion outside of Release():
virtual ~APZCTreeManager();
// Hook for gtests subclass
virtual AsyncPanZoomController* MakeAPZCInstance(uint64_t aLayersId,
GeckoContentController* aController);
// Protected hooks for gtests subclass
virtual AsyncPanZoomController* NewAPZCInstance(uint64_t aLayersId,
GeckoContentController* aController,
TaskThrottler* aPaintThrottler);
public:
// Public hooks for gtests subclass
virtual TimeStamp GetFrameTime();
public:
/* Some helper functions to find an APZC given some identifying input. These functions
@ -506,7 +511,10 @@ private:
/* Holds the zoom constraints for scrollable layers, as determined by the
* the main-thread gecko code. */
std::map<ScrollableLayerGuid, ZoomConstraints> mZoomConstraints;
/* Stores a paint throttler for each layers id. There is one for each layers
* id to ensure that one child process painting slowly doesn't hold up
* another. */
std::map<uint64_t, nsRefPtr<TaskThrottler>> mPaintThrottlerMap;
/* This tracks the APZC that should receive all inputs for the current input event block.
* This allows touch points to move outside the thing they started on, but still have the
* touch events delivered to the same initial APZC. This will only ever be touched on the

Просмотреть файл

@ -388,7 +388,8 @@ static uint32_t sAsyncPanZoomControllerCount = 0;
TimeStamp
AsyncPanZoomController::GetFrameTime() const
{
return TimeStamp::Now();
APZCTreeManager* treeManagerLocal = GetApzcTreeManager();
return treeManagerLocal ? treeManagerLocal->GetFrameTime() : TimeStamp::Now();
}
class MOZ_STACK_CLASS StateChangeNotificationBlocker {
@ -807,11 +808,14 @@ AsyncPanZoomController::AsyncPanZoomController(uint64_t aLayersId,
APZCTreeManager* aTreeManager,
const nsRefPtr<InputQueue>& aInputQueue,
GeckoContentController* aGeckoContentController,
TaskThrottler* aPaintThrottler,
GestureBehavior aGestures)
: mLayersId(aLayersId),
mPaintThrottler(GetFrameTime(), TimeDuration::FromMilliseconds(500)),
mPaintThrottler(aPaintThrottler),
mGeckoContentController(aGeckoContentController),
mRefPtrMonitor("RefPtrMonitor"),
// mTreeManager must be initialized before GetFrameTime() is called
mTreeManager(aTreeManager),
mSharingFrameMetricsAcrossProcesses(false),
mMonitor("AsyncPanZoomController"),
mX(this),
@ -826,7 +830,6 @@ AsyncPanZoomController::AsyncPanZoomController(uint64_t aLayersId,
mState(NOTHING),
mNotificationBlockers(0),
mInputQueue(aInputQueue),
mTreeManager(aTreeManager),
mAPZCId(sAsyncPanZoomControllerCount++),
mSharedLock(nullptr),
mAsyncTransformAppliedToContent(false)
@ -2485,7 +2488,7 @@ void AsyncPanZoomController::ScheduleComposite() {
void AsyncPanZoomController::ScheduleCompositeAndMaybeRepaint() {
ScheduleComposite();
TimeDuration timePaintDelta = mPaintThrottler.TimeSinceLastRequest(GetFrameTime());
TimeDuration timePaintDelta = mPaintThrottler->TimeSinceLastRequest(GetFrameTime());
if (timePaintDelta.ToMilliseconds() > gfxPrefs::APZPanRepaintInterval()) {
RequestContentRepaint();
}
@ -2506,7 +2509,7 @@ void AsyncPanZoomController::FlushRepaintForNewInputBlock() {
// Therefore we should clear out the pending task and restore the
// state of mLastPaintRequestMetrics to what it was before the
// pending task was queued.
mPaintThrottler.CancelPendingTask();
mPaintThrottler->CancelPendingTask();
mLastPaintRequestMetrics = mLastDispatchedPaintMetrics;
RequestContentRepaint(mFrameMetrics, false /* not throttled */);
@ -2517,7 +2520,7 @@ void AsyncPanZoomController::FlushRepaintIfPending() {
// Just tell the paint throttler to send the pending repaint request if
// there is one.
ReentrantMonitorAutoEnter lock(mMonitor);
mPaintThrottler.TaskComplete(GetFrameTime());
mPaintThrottler->TaskComplete(GetFrameTime());
}
bool AsyncPanZoomController::SnapBackIfOverscrolled() {
@ -2560,7 +2563,7 @@ void AsyncPanZoomController::RequestContentRepaint(FrameMetrics& aFrameMetrics,
aFrameMetrics.SetDisplayPortMargins(
CalculatePendingDisplayPort(aFrameMetrics,
GetVelocityVector(),
mPaintThrottler.AverageDuration().ToSeconds()));
mPaintThrottler->AverageDuration().ToSeconds()));
aFrameMetrics.SetUseDisplayPortMargins();
// If we're trying to paint what we already think is painted, discard this
@ -2583,7 +2586,7 @@ void AsyncPanZoomController::RequestContentRepaint(FrameMetrics& aFrameMetrics,
SendAsyncScrollEvent();
if (aThrottled) {
mPaintThrottler.PostTask(
mPaintThrottler->PostTask(
FROM_HERE,
UniquePtr<CancelableTask>(NewRunnableMethod(this,
&AsyncPanZoomController::DispatchRepaintRequest,
@ -2654,7 +2657,7 @@ bool AsyncPanZoomController::UpdateAnimation(const TimeStamp& aSampleTime,
bool continueAnimation = mAnimation->Sample(mFrameMetrics, sampleTimeDelta);
*aOutDeferredTasks = mAnimation->TakeDeferredTasks();
if (continueAnimation) {
if (mPaintThrottler.TimeSinceLastRequest(aSampleTime) >
if (mPaintThrottler->TimeSinceLastRequest(aSampleTime) >
mAnimation->mRepaintInterval) {
RequestContentRepaint();
}
@ -2844,7 +2847,14 @@ Matrix4x4 AsyncPanZoomController::GetTransformToLastDispatchedPaint() const {
* mLastContentPaintMetrics.GetDevPixelsPerCSSPixel()
* mLastContentPaintMetrics.GetCumulativeResolution();
gfxSize zoomChange = mLastContentPaintMetrics.GetZoom() / mLastDispatchedPaintMetrics.GetZoom();
// We're interested in the async zoom change. Factor out the content scale
// that may change when dragging the window to a monitor with a different
// content scale.
LayoutDeviceToParentLayerScale2D lastContentZoom =
mLastContentPaintMetrics.GetZoom() / mLastContentPaintMetrics.GetDevPixelsPerCSSPixel();
LayoutDeviceToParentLayerScale2D lastDispatchedZoom =
mLastDispatchedPaintMetrics.GetZoom() / mLastDispatchedPaintMetrics.GetDevPixelsPerCSSPixel();
gfxSize zoomChange = lastContentZoom / lastDispatchedZoom;
return Matrix4x4::Translation(scrollChange.x, scrollChange.y, 0).
PostScale(zoomChange.width, zoomChange.height, 1);
@ -2888,7 +2898,7 @@ void AsyncPanZoomController::NotifyLayersUpdated(const FrameMetrics& aLayerMetri
aLayerMetrics.GetCriticalDisplayPort() + aLayerMetrics.GetScrollOffset());
}
mPaintThrottler.TaskComplete(GetFrameTime());
mPaintThrottler->TaskComplete(GetFrameTime());
bool needContentRepaint = false;
bool viewportUpdated = false;
if (FuzzyEqualsAdditive(aLayerMetrics.GetCompositionBounds().width, mFrameMetrics.GetCompositionBounds().width) &&
@ -2916,8 +2926,8 @@ void AsyncPanZoomController::NotifyLayersUpdated(const FrameMetrics& aLayerMetri
if (aIsFirstPaint || isDefault) {
// Initialize our internal state to something sane when the content
// that was just painted is something we knew nothing about previously
mPaintThrottler.ClearHistory();
mPaintThrottler.SetMaxDurations(gfxPrefs::APZNumPaintDurationSamples());
mPaintThrottler->ClearHistory();
mPaintThrottler->SetMaxDurations(gfxPrefs::APZNumPaintDurationSamples());
CancelAnimation();

Просмотреть файл

@ -103,6 +103,7 @@ public:
APZCTreeManager* aTreeManager,
const nsRefPtr<InputQueue>& aInputQueue,
GeckoContentController* aController,
TaskThrottler* aPaintThrottler,
GestureBehavior aGestures = DEFAULT_GESTURES);
// --------------------------------------------------------------------------
@ -383,16 +384,13 @@ public:
void NotifyMozMouseScrollEvent(const nsString& aString) const;
protected:
// These functions are protected virtual so test code can override them.
// Returns the cached current frame time.
virtual TimeStamp GetFrameTime() const;
protected:
// Protected destructor, to discourage deletion outside of Release():
virtual ~AsyncPanZoomController();
// Returns the cached current frame time.
TimeStamp GetFrameTime() const;
/**
* Helper method for touches beginning. Sets everything up for panning and any
* multitouch gestures.
@ -651,7 +649,7 @@ protected:
uint64_t mLayersId;
nsRefPtr<CompositorParent> mCompositorParent;
TaskThrottler mPaintThrottler;
nsRefPtr<TaskThrottler> mPaintThrottler;
/* Access to the following two fields is protected by the mRefPtrMonitor,
since they are accessed on the UI thread but can be cleared on the
@ -660,6 +658,13 @@ protected:
nsRefPtr<GestureEventListener> mGestureEventListener;
mutable Monitor mRefPtrMonitor;
// This is a raw pointer to avoid introducing a reference cycle between
// AsyncPanZoomController and APZCTreeManager. Since these objects don't
// live on the main thread, we can't use the cycle collector with them.
// The APZCTreeManager owns the lifetime of the APZCs, so nulling this
// pointer out in Destroy() will prevent accessing deleted memory.
Atomic<APZCTreeManager*> mTreeManager;
/* Utility functions that return a addrefed pointer to the corresponding fields. */
already_AddRefed<GeckoContentController> GetGeckoContentController() const;
already_AddRefed<GestureEventListener> GetGestureEventListener() const;
@ -928,12 +933,8 @@ public:
}
private:
// This is a raw pointer to avoid introducing a reference cycle between
// AsyncPanZoomController and APZCTreeManager. Since these objects don't
// live on the main thread, we can't use the cycle collector with them.
// The APZCTreeManager owns the lifetime of the APZCs, so nulling this
// pointer out in Destroy() will prevent accessing deleted memory.
Atomic<APZCTreeManager*> mTreeManager;
// |mTreeManager| belongs in this section but it's declaration is a bit
// further above due to initialization-order constraints.
nsRefPtr<AsyncPanZoomController> mParent;

Просмотреть файл

@ -6,6 +6,10 @@
#include "TaskThrottler.h"
#include "mozilla/layers/APZThreadUtils.h" // for NewTimerCallback
#include "nsComponentManagerUtils.h" // for do_CreateInstance
#include "nsITimer.h"
namespace mozilla {
namespace layers {
@ -15,8 +19,14 @@ TaskThrottler::TaskThrottler(const TimeStamp& aTimeStamp, const TimeDuration& aM
, mStartTime(aTimeStamp)
, mMaxWait(aMaxWait)
, mMean(1)
, mTimer(do_CreateInstance(NS_TIMER_CONTRACTID))
{ }
TaskThrottler::~TaskThrottler()
{
mTimer->Cancel();
}
void
TaskThrottler::PostTask(const tracked_objects::Location& aLocation,
UniquePtr<CancelableTask> aTask, const TimeStamp& aTimeStamp)
@ -27,9 +37,23 @@ TaskThrottler::PostTask(const tracked_objects::Location& aLocation,
if (mQueuedTask) {
mQueuedTask->Cancel();
mQueuedTask = nullptr;
mTimer->Cancel();
}
if (TimeSinceLastRequest(aTimeStamp) < mMaxWait) {
mQueuedTask = Move(aTask);
// Make sure the queued task is sent after mMaxWait time elapses,
// even if we don't get a TaskComplete() until then.
TimeDuration timeout = mMaxWait - TimeSinceLastRequest(aTimeStamp);
TimeStamp timeoutTime = mStartTime + mMaxWait;
nsRefPtr<TaskThrottler> refPtrThis = this;
mTimer->InitWithCallback(NewTimerCallback(
[refPtrThis, timeoutTime]()
{
if (refPtrThis->mQueuedTask) {
refPtrThis->RunQueuedTask(timeoutTime);
}
}),
timeout.ToMilliseconds(), nsITimer::TYPE_ONE_SHOT);
return;
}
// we've been waiting for more than the max-wait limit, so just fall through
@ -51,20 +75,29 @@ TaskThrottler::TaskComplete(const TimeStamp& aTimeStamp)
mMean.insert(aTimeStamp - mStartTime);
if (mQueuedTask) {
mStartTime = aTimeStamp;
mQueuedTask->Run();
mQueuedTask = nullptr;
RunQueuedTask(aTimeStamp);
mTimer->Cancel();
} else {
mOutstanding = false;
}
}
void
TaskThrottler::RunQueuedTask(const TimeStamp& aTimeStamp)
{
mStartTime = aTimeStamp;
mQueuedTask->Run();
mQueuedTask = nullptr;
}
void
TaskThrottler::CancelPendingTask()
{
if (mQueuedTask) {
mQueuedTask->Cancel();
mQueuedTask = nullptr;
mTimer->Cancel();
}
}

Просмотреть файл

@ -13,8 +13,12 @@
#include "mozilla/RollingMean.h" // for RollingMean
#include "mozilla/mozalloc.h" // for operator delete
#include "mozilla/UniquePtr.h" // for UniquePtr
#include "nsCOMPtr.h" // for nsCOMPtr
#include "nsISupportsImpl.h" // for NS_INLINE_DECL_THREADSAFE_REFCOUNTING
#include "nsTArray.h" // for nsTArray
class nsITimer;
namespace tracked_objects {
class Location;
} // namespace tracked_objects
@ -45,6 +49,8 @@ class TaskThrottler {
public:
TaskThrottler(const TimeStamp& aTimeStamp, const TimeDuration& aMaxWait);
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(TaskThrottler)
/** Post a task to be run as soon as there are no outstanding tasks, or
* post it immediately if it has been more than the max-wait time since
* the last task was posted.
@ -107,6 +113,10 @@ private:
TimeStamp mStartTime;
TimeDuration mMaxWait;
RollingMean<TimeDuration, TimeDuration> mMean;
nsCOMPtr<nsITimer> mTimer;
~TaskThrottler();
void RunQueuedTask(const TimeStamp& aTimeStamp);
};
} // namespace layers

Просмотреть файл

@ -81,5 +81,7 @@ APZThreadUtils::RunOnControllerThread(Task* aTask)
#endif
}
NS_IMPL_ISUPPORTS(GenericTimerCallbackBase, nsITimerCallback)
} // namespace layers
} // namespace mozilla

Просмотреть файл

@ -7,6 +7,7 @@
#define mozilla_layers_APZThreadUtils_h
#include "base/message_loop.h"
#include "nsITimer.h"
class Task;
@ -51,6 +52,46 @@ public:
static void RunOnControllerThread(Task* aTask);
};
// A base class for GenericTimerCallback<Function>.
// This is necessary because NS_IMPL_ISUPPORTS doesn't work for a class
// template.
class GenericTimerCallbackBase : public nsITimerCallback
{
public:
NS_DECL_THREADSAFE_ISUPPORTS
protected:
virtual ~GenericTimerCallbackBase() {}
};
// An nsITimerCallback implementation that can be used with any function
// object that's callable with no arguments.
template <typename Function>
class GenericTimerCallback final : public GenericTimerCallbackBase
{
public:
explicit GenericTimerCallback(const Function& aFunction) : mFunction(aFunction) {}
NS_IMETHODIMP Notify(nsITimer*) override
{
mFunction();
return NS_OK;
}
private:
Function mFunction;
};
// Convenience function for constructing a GenericTimerCallback.
// Returns a raw pointer, suitable for passing directly as an argument to
// nsITimer::InitWithCallback(). The intention is to enable the following
// terse inline usage:
// timer->InitWithCallback(NewTimerCallback([](){ ... }), delay);
template <typename Function>
GenericTimerCallback<Function>* NewTimerCallback(const Function& aFunction)
{
return new GenericTimerCallback<Function>(aFunction);
}
} // namespace layers
} // namespace mozilla

Просмотреть файл

@ -442,7 +442,7 @@ gfxShmSharedReadLock::GetReadCount() {
class TileExpiry final : public nsExpirationTracker<TileClient, 3>
{
public:
TileExpiry() : nsExpirationTracker<TileClient, 3>(1000) {}
TileExpiry() : nsExpirationTracker<TileClient, 3>(1000, "TileExpiry") {}
static void AddTile(TileClient* aTile)
{

Просмотреть файл

@ -147,22 +147,33 @@ private:
class TestAPZCTreeManager : public APZCTreeManager {
public:
TestAPZCTreeManager() {}
explicit TestAPZCTreeManager(MockContentControllerDelayed* aMcc) : mcc(aMcc) {}
nsRefPtr<InputQueue> GetInputQueue() const {
return mInputQueue;
}
protected:
AsyncPanZoomController* MakeAPZCInstance(uint64_t aLayersId, GeckoContentController* aController) override;
AsyncPanZoomController* NewAPZCInstance(uint64_t aLayersId,
GeckoContentController* aController,
TaskThrottler* aPaintThrottler) override;
TimeStamp GetFrameTime() override {
return mcc->Time();
}
private:
nsRefPtr<MockContentControllerDelayed> mcc;
};
class TestAsyncPanZoomController : public AsyncPanZoomController {
public:
TestAsyncPanZoomController(uint64_t aLayersId, MockContentControllerDelayed* aMcc,
TestAPZCTreeManager* aTreeManager,
TaskThrottler* aPaintThrottler,
GestureBehavior aBehavior = DEFAULT_GESTURES)
: AsyncPanZoomController(aLayersId, aTreeManager, aTreeManager->GetInputQueue(), aMcc, aBehavior)
: AsyncPanZoomController(aLayersId, aTreeManager, aTreeManager->GetInputQueue(),
aMcc, aPaintThrottler, aBehavior)
, mWaitForMainThread(false)
, mcc(aMcc)
{}
@ -239,10 +250,6 @@ public:
mWaitForMainThread = true;
}
TimeStamp GetFrameTime() const {
return mcc->Time();
}
static TimeStamp GetStartupTime() {
static TimeStamp sStartupTime = TimeStamp::Now();
return sStartupTime;
@ -254,10 +261,12 @@ private:
};
AsyncPanZoomController*
TestAPZCTreeManager::MakeAPZCInstance(uint64_t aLayersId, GeckoContentController* aController)
TestAPZCTreeManager::NewAPZCInstance(uint64_t aLayersId,
GeckoContentController* aController,
TaskThrottler* aPaintThrottler)
{
MockContentControllerDelayed* mcc = static_cast<MockContentControllerDelayed*>(aController);
return new TestAsyncPanZoomController(aLayersId, mcc, this,
return new TestAsyncPanZoomController(aLayersId, mcc, this, aPaintThrottler,
AsyncPanZoomController::USE_GESTURE_DETECTOR);
}
@ -289,8 +298,9 @@ protected:
APZThreadUtils::SetControllerThread(MessageLoop::current());
mcc = new NiceMock<MockContentControllerDelayed>();
tm = new TestAPZCTreeManager();
apzc = new TestAsyncPanZoomController(0, mcc, tm, mGestureBehavior);
mPaintThrottler = new TaskThrottler(mcc->Time(), TimeDuration::FromMilliseconds(500));
tm = new TestAPZCTreeManager(mcc);
apzc = new TestAsyncPanZoomController(0, mcc, tm, mPaintThrottler, mGestureBehavior);
apzc->SetFrameMetrics(TestFrameMetrics());
}
@ -373,6 +383,7 @@ protected:
AsyncPanZoomController::GestureBehavior mGestureBehavior;
nsRefPtr<MockContentControllerDelayed> mcc;
nsRefPtr<TaskThrottler> mPaintThrottler;
nsRefPtr<TestAPZCTreeManager> tm;
nsRefPtr<TestAsyncPanZoomController> apzc;
};
@ -967,7 +978,7 @@ TEST_F(APZCBasicTester, ComplexTransform) {
// sides.
nsRefPtr<TestAsyncPanZoomController> childApzc =
new TestAsyncPanZoomController(0, mcc, tm);
new TestAsyncPanZoomController(0, mcc, tm, mPaintThrottler);
const char* layerTreeSyntax = "c(c)";
// LayerID 0 1
@ -1870,7 +1881,7 @@ protected:
APZThreadUtils::SetControllerThread(MessageLoop::current());
mcc = new NiceMock<MockContentControllerDelayed>();
manager = new TestAPZCTreeManager();
manager = new TestAPZCTreeManager(mcc);
}
virtual void TearDown() {
@ -3236,7 +3247,7 @@ public:
APZTaskThrottlerTester()
{
now = TimeStamp::Now();
throttler = MakeUnique<TaskThrottler>(now, TimeDuration::FromMilliseconds(100));
throttler = new TaskThrottler(now, TimeDuration::FromMilliseconds(100));
}
protected:
@ -3252,7 +3263,7 @@ protected:
}
TimeStamp now;
UniquePtr<TaskThrottler> throttler;
nsRefPtr<TaskThrottler> throttler;
TaskRunMetrics metrics;
};

Просмотреть файл

@ -36,7 +36,9 @@ class FrameTextRunCache final : public nsExpirationTracker<gfxTextRun,3> {
public:
enum { TIMEOUT_SECONDS = 10 };
FrameTextRunCache()
: nsExpirationTracker<gfxTextRun,3>(TIMEOUT_SECONDS*1000) {}
: nsExpirationTracker<gfxTextRun,3>(TIMEOUT_SECONDS * 1000,
"FrameTextRunCache")
{}
~FrameTextRunCache() {
AgeAllGenerations();
}

Просмотреть файл

@ -264,7 +264,7 @@ class BlurCache final : public nsExpirationTracker<BlurCacheData,4>
{
public:
BlurCache()
: nsExpirationTracker<BlurCacheData, 4>(GENERATION_MS)
: nsExpirationTracker<BlurCacheData, 4>(GENERATION_MS, "BlurCache")
{
}

Просмотреть файл

@ -165,7 +165,8 @@ gfxFontCache::Shutdown()
}
gfxFontCache::gfxFontCache()
: nsExpirationTracker<gfxFont,3>(FONT_TIMEOUT_SECONDS * 1000)
: nsExpirationTracker<gfxFont,3>(FONT_TIMEOUT_SECONDS * 1000,
"gfxFontCache")
{
nsCOMPtr<nsIObserverService> obs = GetObserverService();
if (obs) {

Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше