Merge inbound to central, a=merge

MozReview-Commit-ID: E4oXfAM1mtd
This commit is contained in:
Wes Kocher 2017-07-25 19:04:37 -07:00
Родитель b9ecd6f0f8 cbbcce1fa2
Коммит 65bbd0525a
101 изменённых файлов: 1490 добавлений и 597 удалений

Просмотреть файл

@ -35,12 +35,12 @@ add_task(async function test() {
is(ContextualIdentityService.countContainerTabs(1), 2, "2 container tabs created with id 1");
is(ContextualIdentityService.countContainerTabs(2), 1, "1 container tab created with id 2");
ContextualIdentityService.closeContainerTabs(1);
await ContextualIdentityService.closeContainerTabs(1);
is(ContextualIdentityService.countContainerTabs(), 1, "1 container tab created");
is(ContextualIdentityService.countContainerTabs(1), 0, "0 container tabs created with id 1");
is(ContextualIdentityService.countContainerTabs(2), 1, "1 container tab created with id 2");
ContextualIdentityService.closeContainerTabs();
await ContextualIdentityService.closeContainerTabs();
is(ContextualIdentityService.countContainerTabs(), 0, "0 container tabs at the end.");
is(ContextualIdentityService.countContainerTabs(1), 0, "0 container tabs at the end with id 1.");
is(ContextualIdentityService.countContainerTabs(2), 0, "0 container tabs at the end with id 2.");

Просмотреть файл

@ -40,7 +40,7 @@ let gContainersPane = {
}
},
onRemoveClick(button) {
async onRemoveClick(button) {
let userContextId = parseInt(button.getAttribute("value"), 10);
let count = ContextualIdentityService.countContainerTabs(userContextId);
@ -62,7 +62,7 @@ let gContainersPane = {
return;
}
ContextualIdentityService.closeContainerTabs(userContextId);
await ContextualIdentityService.closeContainerTabs(userContextId);
}
ContextualIdentityService.remove(userContextId);

Просмотреть файл

@ -7,8 +7,6 @@
Components.utils.import("resource://gre/modules/AppConstants.jsm");
Components.utils.import("resource://gre/modules/PluralForm.jsm");
XPCOMUtils.defineLazyModuleGetter(this, "ContextualIdentityService",
"resource://gre/modules/ContextualIdentityService.jsm");
XPCOMUtils.defineLazyModuleGetter(this, "PluralForm",
"resource://gre/modules/PluralForm.jsm");
XPCOMUtils.defineLazyModuleGetter(this, "LoginHelper",

Просмотреть файл

@ -40,7 +40,7 @@ let gContainersPane = {
}
},
onRemoveClick(button) {
async onRemoveClick(button) {
let userContextId = parseInt(button.getAttribute("value"), 10);
let count = ContextualIdentityService.countContainerTabs(userContextId);
@ -62,7 +62,7 @@ let gContainersPane = {
return;
}
ContextualIdentityService.closeContainerTabs(userContextId);
await ContextualIdentityService.closeContainerTabs(userContextId);
}
ContextualIdentityService.remove(userContextId);

Просмотреть файл

@ -87,6 +87,7 @@ var gPrivacyPane = {
let count = ContextualIdentityService.countContainerTabs();
if (count == 0) {
ContextualIdentityService.notifyAllContainersCleared();
Services.prefs.setBoolPref("privacy.userContext.enabled", false);
return;
}
@ -106,8 +107,10 @@ var gPrivacyPane = {
let rv = Services.prompt.confirmEx(window, title, message, buttonFlags,
okButton, cancelButton, null, null, {});
if (rv == 0) {
ContextualIdentityService.closeContainerTabs();
Services.prefs.setBoolPref("privacy.userContext.enabled", false);
ContextualIdentityService.closeContainerTabs().then(() => {
ContextualIdentityService.notifyAllContainersCleared();
});
return;
}

Просмотреть файл

@ -12,16 +12,15 @@ namespace mozilla {
namespace dom {
Timeout::Timeout()
: mCleared(false),
: mTimeoutId(0),
mFiringId(TimeoutManager::InvalidFiringId),
mPopupState(openAllowed),
mReason(Reason::eTimeoutOrInterval),
mNestingLevel(0),
mCleared(false),
mRunning(false),
mIsInterval(false),
mIsTracking(false),
mReason(Reason::eTimeoutOrInterval),
mTimeoutId(0),
mInterval(0),
mFiringId(TimeoutManager::InvalidFiringId),
mNestingLevel(0),
mPopupState(openAllowed)
mIsTracking(false)
{
}

Просмотреть файл

@ -35,7 +35,7 @@ public:
NS_DECL_CYCLE_COLLECTION_NATIVE_CLASS(Timeout)
NS_INLINE_DECL_CYCLE_COLLECTING_NATIVE_REFCOUNTING(Timeout)
enum class Reason
enum class Reason : uint8_t
{
eTimeoutOrInterval,
eIdleCallbackTimeout,
@ -50,9 +50,52 @@ public:
// Can only be called when frozen.
const TimeDuration& TimeRemaining() const;
private:
// mWhen and mTimeRemaining can't be in a union, sadly, because they
// have constructors.
// Nominal time to run this timeout. Use only when timeouts are not
// frozen.
TimeStamp mWhen;
// Remaining time to wait. Used only when timeouts are frozen.
TimeDuration mTimeRemaining;
~Timeout() = default;
public:
// Public member variables in this section. Please don't add to this list
// or mix methods with these. The interleaving public/private sections
// is necessary as we migrate members to private while still trying to
// keep decent binary packing.
// Window for which this timeout fires
RefPtr<nsGlobalWindow> mWindow;
// The language-specific information about the callback.
nsCOMPtr<nsITimeoutHandler> mScriptHandler;
// Interval
TimeDuration mInterval;
// Returned as value of setTimeout()
uint32_t mTimeoutId;
// Identifies which firing level this Timeout is being processed in
// when sync loops trigger nested firing.
uint32_t mFiringId;
// The popup state at timeout creation time if not created from
// another timeout
PopupControlState mPopupState;
// Used to allow several reasons for setting a timeout, where each
// 'Reason' value is using a possibly overlapping set of id:s.
Reason mReason;
// Between 0 and DOM_CLAMP_TIMEOUT_NESTING_LEVEL. Currently we don't
// care about nesting levels beyond that value.
uint8_t mNestingLevel;
// True if the timeout was cleared
bool mCleared;
@ -64,41 +107,6 @@ public:
// True if this is a timeout coming from a tracking script
bool mIsTracking;
// Used to allow several reasons for setting a timeout, where each
// 'Reason' value is using a possibly overlapping set of id:s.
Reason mReason;
// Returned as value of setTimeout()
uint32_t mTimeoutId;
// Interval
TimeDuration mInterval;
// Identifies which firing level this Timeout is being processed in
// when sync loops trigger nested firing.
uint32_t mFiringId;
uint32_t mNestingLevel;
// The popup state at timeout creation time if not created from
// another timeout
PopupControlState mPopupState;
// The language-specific information about the callback.
nsCOMPtr<nsITimeoutHandler> mScriptHandler;
private:
// mWhen and mTimeRemaining can't be in a union, sadly, because they
// have constructors.
// Nominal time to run this timeout. Use only when timeouts are not
// frozen.
TimeStamp mWhen;
// Remaining time to wait. Used only when timeouts are frozen.
TimeDuration mTimeRemaining;
~Timeout() = default;
};
} // namespace dom

Просмотреть файл

@ -309,15 +309,14 @@ TimeoutManager::IsInvalidFiringId(uint32_t aFiringId) const
// The number of nested timeouts before we start clamping. HTML5 says 1, WebKit
// uses 5.
#define DOM_CLAMP_TIMEOUT_NESTING_LEVEL 5
#define DOM_CLAMP_TIMEOUT_NESTING_LEVEL 5u
TimeDuration
TimeoutManager::CalculateDelay(Timeout* aTimeout) const {
MOZ_DIAGNOSTIC_ASSERT(aTimeout);
TimeDuration result = aTimeout->mInterval;
if (aTimeout->mIsInterval ||
aTimeout->mNestingLevel >= DOM_CLAMP_TIMEOUT_NESTING_LEVEL) {
if (aTimeout->mNestingLevel >= DOM_CLAMP_TIMEOUT_NESTING_LEVEL) {
result = TimeDuration::Max(
result, TimeDuration::FromMilliseconds(gMinClampTimeoutValue));
}
@ -533,9 +532,8 @@ TimeoutManager::SetTimeout(nsITimeoutHandler* aHandler,
return NS_OK;
}
// Disallow negative intervals. If aIsInterval also disallow 0,
// because we use that as a "don't repeat" flag.
interval = std::max(aIsInterval ? 1 : 0, interval);
// Disallow negative intervals.
interval = std::max(0, interval);
// Make sure we don't proceed with an interval larger than our timer
// code can handle. (Note: we already forced |interval| to be non-negative,
@ -592,10 +590,8 @@ TimeoutManager::SetTimeout(nsITimeoutHandler* aHandler,
break;
}
uint32_t nestingLevel = sNestingLevel + 1;
if (!aIsInterval) {
timeout->mNestingLevel = nestingLevel;
}
timeout->mNestingLevel = sNestingLevel < DOM_CLAMP_TIMEOUT_NESTING_LEVEL
? sNestingLevel + 1 : sNestingLevel;
// Now clamp the actual interval we will use for the timer based on
TimeDuration realInterval = CalculateDelay(timeout);
@ -986,6 +982,12 @@ TimeoutManager::RescheduleTimeout(Timeout* aTimeout,
return false;
}
// Automatically increase the nesting level when a setInterval()
// is rescheduled just as if it was using a chained setTimeout().
if (aTimeout->mNestingLevel < DOM_CLAMP_TIMEOUT_NESTING_LEVEL) {
aTimeout->mNestingLevel += 1;
}
// Compute time to next timeout for interval timer.
// Make sure nextInterval is at least CalculateDelay().
TimeDuration nextInterval = CalculateDelay(aTimeout);

Просмотреть файл

@ -10545,7 +10545,7 @@ nsContentUtils::HtmlObjectContentTypeForMIMEType(const nsCString& aMIMEType,
return nsIObjectLoadingContent::TYPE_NULL;
}
/* static */ already_AddRefed<nsIEventTarget>
/* static */ already_AddRefed<nsISerialEventTarget>
nsContentUtils::GetEventTargetByLoadInfo(nsILoadInfo* aLoadInfo, TaskCategory aCategory)
{
if (NS_WARN_IF(!aLoadInfo)) {
@ -10555,7 +10555,7 @@ nsContentUtils::GetEventTargetByLoadInfo(nsILoadInfo* aLoadInfo, TaskCategory aC
nsCOMPtr<nsIDOMDocument> domDoc;
aLoadInfo->GetLoadingDocument(getter_AddRefs(domDoc));
nsCOMPtr<nsIDocument> doc = do_QueryInterface(domDoc);
nsCOMPtr<nsIEventTarget> target;
nsCOMPtr<nsISerialEventTarget> target;
if (doc) {
if (DocGroup* group = doc->GetDocGroup()) {
target = group->EventTargetFor(aCategory);

Просмотреть файл

@ -3028,7 +3028,7 @@ public:
bool aNoFakePlugin,
nsIContent* aContent);
static already_AddRefed<nsIEventTarget>
static already_AddRefed<nsISerialEventTarget>
GetEventTargetByLoadInfo(nsILoadInfo* aLoadInfo, mozilla::TaskCategory aCategory);
/**

Просмотреть файл

@ -2598,8 +2598,18 @@ SetMemoryMaxPrefChangedCallback(const char* aPrefName, void* aClosure)
{
int32_t pref = Preferences::GetInt(aPrefName, -1);
// handle overflow and negative pref values
uint32_t max = (pref <= 0 || pref >= 0x1000) ? -1 : (uint32_t)pref * 1024 * 1024;
SetGCParameter(JSGC_MAX_BYTES, max);
CheckedInt<uint32_t> max = CheckedInt<uint32_t>(pref) * 1024 * 1024;
SetGCParameter(JSGC_MAX_BYTES, max.isValid() ? max.value() : -1);
}
static void
SetMemoryNurseryMaxPrefChangedCallback(const char* aPrefName, void* aClosure)
{
int32_t pref = Preferences::GetInt(aPrefName, -1);
// handle overflow and negative pref values
CheckedInt<uint32_t> max = CheckedInt<uint32_t>(pref) * 1024;
SetGCParameter(JSGC_MAX_NURSERY_BYTES,
max.isValid() ? max.value() : JS::DefaultNurseryBytes);
}
static void
@ -2792,6 +2802,8 @@ nsJSContext::EnsureStatics()
Preferences::RegisterCallbackAndCall(SetMemoryMaxPrefChangedCallback,
"javascript.options.mem.max");
Preferences::RegisterCallbackAndCall(SetMemoryNurseryMaxPrefChangedCallback,
"javascript.options.mem.nursery.max_kb");
Preferences::RegisterCallbackAndCall(SetMemoryGCModePrefChangedCallback,
"javascript.options.mem.gc_per_zone");

Просмотреть файл

@ -779,6 +779,8 @@ skip-if = e10s # Bug 1156489.
[test_text_wholeText.html]
[test_textnode_normalize_in_selection.html]
[test_textnode_split_in_selection.html]
[test_timeout_clamp.html]
skip-if = debug == true && toolkit == 'android' # Timing dependent, skip slow debug android builds
[test_timer_flood.html]
[test_title.html]
[test_treewalker_nextsibling.xml]

Просмотреть файл

@ -0,0 +1,122 @@
<!DOCTYPE HTML>
<html>
<!--
https://bugzilla.mozilla.org/show_bug.cgi?id=1378586
-->
<head>
<meta charset="utf-8">
<title>Test for Bug 1378586</title>
<script type="application/javascript" src="/tests/SimpleTest/SimpleTest.js"></script>
<link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css"/>
</head>
<body>
<a target="_blank" href="https://bugzilla.mozilla.org/show_bug.cgi?id=1378586">Mozilla Bug 1378586</a>
<script>
SimpleTest.waitForExplicitFinish();
// We need to clear our nesting level periodically. We do this by firing
// a postMessage() to get a runnable on the event loop without any setTimeout()
// nesting.
function clearNestingLevel() {
return new Promise(resolve => {
window.addEventListener('message', function onMessage() {
window.removeEventListener('message', onMessage);
resolve();
});
postMessage('done', '*');
});
}
function delayByTimeoutChain(iterations) {
return new Promise(resolve => {
let count = 0;
function tick() {
count += 1;
if (count >= iterations) {
resolve();
return;
}
setTimeout(tick, 0);
}
setTimeout(tick, 0);
});
}
function delayByInterval(iterations) {
return new Promise(resolve => {
let count = 0;
function tick() {
count += 1;
if (count >= iterations) {
resolve();
return;
}
}
setInterval(tick, 0);
});
}
// Use a very long clamp delay to make it easier to measure the change
// in automation. Some of our test servers are very slow and noisy.
const clampDelayMS = 10000;
// We expect that we will clamp on the 5th callback. This should
// be the same for both setTimeout() chains and setInterval().
const expectedClampIteration = 5;
async function runTests() {
// Things like pushPrefEnv() can use setTimeout() internally which may give
// us a nesting level. Clear the nesting level to start so this doesn't
// confuse the test.
await clearNestingLevel();
// Verify a setTimeout() chain clamps correctly
let start = performance.now();
await delayByTimeoutChain(expectedClampIteration);
let delta = performance.now() - start;
ok(delta >= clampDelayMS, "setTimeout() chain clamped");
ok(delta < (2*clampDelayMS), "setTimeout() chain did not clamp twice");
await clearNestingLevel();
// Verify setInterval() clamps correctly
start = performance.now();
await delayByInterval(expectedClampIteration);
delta = performance.now() - start;
ok(delta >= clampDelayMS, "setInterval() clamped");
ok(delta < (2*clampDelayMS), "setInterval() did not clamp twice");
await clearNestingLevel();
// Verfy a setTimeout() chain will continue to clamp past the first
// expected iteration.
const expectedDelay = (1 + expectedClampIteration) * clampDelayMS;
start = performance.now();
await delayByTimeoutChain(2 * expectedClampIteration);
delta = performance.now() - start;
ok(delta >= expectedDelay, "setTimeout() chain continued to clamp");
await clearNestingLevel();
// Verfy setInterval() will continue to clamp past the first expected
// iteration.
start = performance.now();
await delayByTimeoutChain(2 * expectedClampIteration);
delta = performance.now() - start;
ok(delta >= expectedDelay, "setInterval() continued to clamp");
SimpleTest.finish();
}
SpecialPowers.pushPrefEnv({ 'set': [["dom.min_timeout_value", clampDelayMS]]},
runTests);
</script>
</body>
</html>

Просмотреть файл

@ -685,7 +685,7 @@ DefineUnforgeableAttributes(JSContext* cx, JS::Handle<JSObject*> obj,
// funToString ObjectOps member for interface objects.
JSString*
InterfaceObjectToString(JSContext* aCx, JS::Handle<JSObject*> aObject,
unsigned /* indent */)
bool /* isToSource */)
{
const js::Class* clasp = js::GetObjectClass(aObject);
MOZ_ASSERT(IsDOMIfaceAndProtoClass(clasp));

Просмотреть файл

@ -41,7 +41,7 @@ codegen_dependencies := \
$(GLOBAL_DEPS) \
$(NULL)
include codegen.pp
-include codegen.pp
codegen.pp: $(codegen_dependencies)
$(call py_action,webidl,$(srcdir))

Просмотреть файл

@ -2462,6 +2462,10 @@ TabChild::InternalSetDocShellIsActive(bool aIsActive, bool aPreserveLayers)
if (aIsActive) {
MakeVisible();
if (!docShell) {
return;
}
// We don't use TabChildBase::GetPresShell() here because that would create
// a content viewer if one doesn't exist yet. Creating a content viewer can
// cause JS to run, which we want to avoid. nsIDocShell::GetPresShell

Просмотреть файл

@ -11,3 +11,4 @@ support-files =
[test_nosniff.html]
[test_block_script_wrong_mime.html]
[test_block_toplevel_data_navigation.html]
skip-if = toolkit == 'android' # intermittent failure

Просмотреть файл

@ -143,20 +143,12 @@ ServiceWorkerContainer::Register(const nsAString& aScriptURL,
}
nsCOMPtr<nsIURI> baseURI;
nsIDocument* doc = GetEntryDocument();
if (doc) {
baseURI = doc->GetBaseURI();
nsCOMPtr<nsPIDOMWindowInner> window = GetOwner();
if (window) {
baseURI = window->GetDocBaseURI();
} else {
// XXXnsm. One of our devtools browser test calls register() from a content
// script where there is no valid entry document. Use the window to resolve
// the uri in that case.
nsCOMPtr<nsPIDOMWindowInner> window = GetOwner();
nsCOMPtr<nsPIDOMWindowOuter> outerWindow;
if (window && (outerWindow = window->GetOuterWindow()) &&
outerWindow->GetServiceWorkersTestingEnabled()) {
baseURI = window->GetDocBaseURI();
}
aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR);
return nullptr;
}
nsresult rv;

Просмотреть файл

@ -10,6 +10,7 @@
#include "mozilla/Assertions.h"
#include "mozilla/DebugOnly.h"
#include "mozilla/mscom/EnsureMTA.h"
#include "mozilla/SystemGroup.h"
#include "mozilla/UniquePtr.h"
#include "nsError.h"
#include "nsThreadUtils.h"
@ -41,9 +42,10 @@ struct MainThreadRelease
return;
}
DebugOnly<nsresult> rv =
NS_DispatchToMainThread(NewNonOwningRunnableMethod("mscom::MainThreadRelease",
aPtr,
&T::Release));
SystemGroup::Dispatch("mscom::MainThreadRelease",
TaskCategory::Other,
NewNonOwningRunnableMethod("mscom::MainThreadRelease",
aPtr, &T::Release));
MOZ_ASSERT(NS_SUCCEEDED(rv));
}
};

Просмотреть файл

@ -436,7 +436,7 @@ typedef bool
* that object. A null return value means OOM.
*/
typedef JSString*
(* JSFunToStringOp)(JSContext* cx, JS::HandleObject obj, unsigned indent);
(* JSFunToStringOp)(JSContext* cx, JS::HandleObject obj, bool isToSource);
/**
* Resolve a lazy property named by id in obj by defining it directly in obj.

Просмотреть файл

@ -65,7 +65,7 @@ namespace JS {
D(RESET) \
D(OUT_OF_NURSERY) \
D(EVICT_NURSERY) \
D(UNUSED0) \
D(DELAYED_ATOMS_GC) \
D(SHARED_MEMORY_LIMIT) \
D(UNUSED1) \
D(INCREMENTAL_TOO_SLOW) \

Просмотреть файл

@ -101,10 +101,14 @@ MOZ_ALWAYS_INLINE bool IsInsideNursery(const js::gc::Cell* cell);
namespace JS {
struct Zone;
/* Default size for the generational nursery in bytes. */
/*
* Default size for the generational nursery in bytes.
* This is the initial nursery size, when running in the browser this is
* updated by JS_SetGCParameter().
*/
const uint32_t DefaultNurseryBytes = 16 * js::gc::ChunkSize;
/* Default maximum heap size in bytes to pass to JS_NewRuntime(). */
/* Default maximum heap size in bytes to pass to JS_NewContext(). */
const uint32_t DefaultHeapMaxBytes = 32 * 1024 * 1024;
namespace shadow {

Просмотреть файл

@ -50,7 +50,7 @@ JS_SetICUMemoryFunctions(JS_ICUAllocFn allocFn,
/**
* Initialize SpiderMonkey, returning true only if initialization succeeded.
* Once this method has succeeded, it is safe to call JS_NewRuntime and other
* Once this method has succeeded, it is safe to call JS_NewContext and other
* JSAPI methods.
*
* This method must be called before any other JSAPI method is used on any

Просмотреть файл

@ -330,7 +330,7 @@ class JS_FRIEND_API(BaseProxyHandler)
ESClass* cls) const;
virtual bool isArray(JSContext* cx, HandleObject proxy, JS::IsArrayAnswer* answer) const;
virtual const char* className(JSContext* cx, HandleObject proxy) const;
virtual JSString* fun_toString(JSContext* cx, HandleObject proxy, unsigned indent) const;
virtual JSString* fun_toString(JSContext* cx, HandleObject proxy, bool isToSource) const;
virtual RegExpShared* regexp_toShared(JSContext* cx, HandleObject proxy) const;
virtual bool boxedValue_unbox(JSContext* cx, HandleObject proxy, MutableHandleValue vp) const;
virtual void trace(JSTracer* trc, JSObject* proxy) const;

Просмотреть файл

@ -140,7 +140,11 @@ class GCSchedulingTunables
*/
UnprotectedData<size_t> gcMaxBytes_;
/* Maximum nursery size for each zone group. */
/*
* Maximum nursery size for each zone group.
* Initially DefaultNurseryBytes and can be set by
* javascript.options.mem.nursery.max_kb
*/
ActiveThreadData<size_t> gcMaxNurseryBytes_;
/*
@ -709,11 +713,7 @@ class GCRuntime
bool canChangeActiveContext(JSContext* cx);
void triggerFullGCForAtoms() {
MOZ_ASSERT(fullGCForAtomsRequested_);
fullGCForAtomsRequested_ = false;
MOZ_RELEASE_ASSERT(triggerGC(JS::gcreason::ALLOC_TRIGGER));
}
void triggerFullGCForAtoms(JSContext* cx);
void runDebugGC();
void notifyRootsRemoved();

Просмотреть файл

@ -646,6 +646,11 @@ js::Nursery::collect(JS::gcreason::Reason reason)
// because gcBytes >= gcMaxBytes.
if (rt->gc.usage.gcBytes() >= rt->gc.tunables.gcMaxBytes())
disable();
// Disable the nursery if the user changed the configuration setting. The
// nursery can only be re-enabled by resetting the configurationa and
// restarting firefox.
if (maxNurseryChunks_ == 0)
disable();
endProfile(ProfileKey::Total);
minorGcCount_++;
@ -924,6 +929,7 @@ js::Nursery::maybeResizeNursery(JS::gcreason::Reason reason, double promotionRat
{
static const double GrowThreshold = 0.05;
static const double ShrinkThreshold = 0.01;
unsigned newMaxNurseryChunks;
// Shrink the nursery to its minimum size of we ran out of memory or
// received a memory pressure event.
@ -932,10 +938,30 @@ js::Nursery::maybeResizeNursery(JS::gcreason::Reason reason, double promotionRat
return;
}
#ifdef JS_GC_ZEAL
// This zeal mode disabled nursery resizing.
if (runtime()->hasZealMode(ZealMode::GenerationalGC))
return;
#endif
newMaxNurseryChunks = runtime()->gc.tunables.gcMaxNurseryBytes() >> ChunkShift;
if (newMaxNurseryChunks != maxNurseryChunks_) {
maxNurseryChunks_ = newMaxNurseryChunks;
/* The configured maximum nursery size is changing */
int extraChunks = numChunks() - newMaxNurseryChunks;
if (extraChunks > 0) {
/* We need to shrink the nursery */
shrinkAllocableSpace(extraChunks);
previousPromotionRate_ = promotionRate;
return;
}
}
if (promotionRate > GrowThreshold)
growAllocableSpace();
else if (promotionRate < ShrinkThreshold && previousPromotionRate_ < ShrinkThreshold)
shrinkAllocableSpace();
shrinkAllocableSpace(1);
previousPromotionRate_ = promotionRate;
}
@ -947,13 +973,13 @@ js::Nursery::growAllocableSpace()
}
void
js::Nursery::shrinkAllocableSpace()
js::Nursery::shrinkAllocableSpace(unsigned removeNumChunks)
{
#ifdef JS_GC_ZEAL
if (runtime()->hasZealMode(ZealMode::GenerationalGC))
return;
#endif
updateNumChunks(Max(numChunks() - 1, 1u));
updateNumChunks(Max(numChunks() - removeNumChunks, 1u));
}
void

Просмотреть файл

@ -468,7 +468,7 @@ class Nursery
/* Change the allocable space provided by the nursery. */
void maybeResizeNursery(JS::gcreason::Reason reason, double promotionRate);
void growAllocableSpace();
void shrinkAllocableSpace();
void shrinkAllocableSpace(unsigned removeNumChunks);
void minimizeAllocableSpace();
/* Profile recording and printing. */

Просмотреть файл

@ -43,6 +43,7 @@ JS::Zone::Zone(JSRuntime* rt, ZoneGroup* group)
markedAtoms_(group),
atomCache_(group),
externalStringCache_(group),
functionToStringCache_(group),
usage(&rt->gc.usage),
threshold(),
gcDelayBytes(0),

Просмотреть файл

@ -107,6 +107,31 @@ class MOZ_NON_TEMPORARY_CLASS ExternalStringCache
MOZ_ALWAYS_INLINE void put(JSString* s);
};
class MOZ_NON_TEMPORARY_CLASS FunctionToStringCache
{
struct Entry {
JSScript* script;
JSString* string;
void set(JSScript* scriptArg, JSString* stringArg) {
script = scriptArg;
string = stringArg;
}
};
static const size_t NumEntries = 2;
mozilla::Array<Entry, NumEntries> entries_;
FunctionToStringCache(const FunctionToStringCache&) = delete;
void operator=(const FunctionToStringCache&) = delete;
public:
FunctionToStringCache() { purge(); }
void purge() { mozilla::PodArrayZero(entries_); }
MOZ_ALWAYS_INLINE JSString* lookup(JSScript* script) const;
MOZ_ALWAYS_INLINE void put(JSScript* script, JSString* string);
};
} // namespace js
namespace JS {
@ -210,11 +235,13 @@ struct Zone : public JS::shadow::Zone,
void scheduleGC() { MOZ_ASSERT(!CurrentThreadIsHeapBusy()); gcScheduled_ = true; }
void unscheduleGC() { gcScheduled_ = false; }
bool isGCScheduled() { return gcScheduled_ && canCollect(); }
bool isGCScheduled() { return gcScheduled_; }
void setPreservingCode(bool preserving) { gcPreserveCode_ = preserving; }
bool isPreservingCode() const { return gcPreserveCode_; }
// Whether this zone can currently be collected. This doesn't take account
// of AutoKeepAtoms for the atoms zone.
bool canCollect();
void changeGCState(GCState prev, GCState next) {
@ -443,6 +470,9 @@ struct Zone : public JS::shadow::Zone,
// Cache storing allocated external strings. Purged on GC.
js::ZoneGroupOrGCTaskData<js::ExternalStringCache> externalStringCache_;
// Cache for Function.prototype.toString. Purged on GC.
js::ZoneGroupOrGCTaskData<js::FunctionToStringCache> functionToStringCache_;
public:
js::SparseBitmap& markedAtoms() { return markedAtoms_.ref(); }
@ -450,6 +480,8 @@ struct Zone : public JS::shadow::Zone,
js::ExternalStringCache& externalStringCache() { return externalStringCache_.ref(); };
js::FunctionToStringCache& functionToStringCache() { return functionToStringCache_.ref(); }
// Track heap usage under this Zone.
js::gc::HeapUsage usage;

Просмотреть файл

@ -0,0 +1,27 @@
// Exercise triggering GC of atoms zone while off-thread parsing is happening.
if (helperThreadCount() === 0)
quit();
gczeal(0);
// Reduce some GC parameters so that we can trigger a GC more easily.
gcparam('lowFrequencyHeapGrowth', 120);
gcparam('highFrequencyHeapGrowthMin', 120);
gcparam('highFrequencyHeapGrowthMax', 120);
gcparam('allocationThreshold', 1);
gc();
// Start an off-thread parse.
offThreadCompileScript("print('Finished')");
// Allocate lots of atoms, parsing occasionally.
for (let i = 0; i < 10; i++) {
print(i);
for (let j = 0; j < 10000; j++)
Symbol.for(i + 10 * j);
eval(`${i}`);
}
// Finish the off-thread parse.
runOffThreadScript();

Просмотреть файл

@ -0,0 +1,20 @@
function test() {
var count = 0;
function f(x) {
"use strict";
if (x) {
Object.seal(this);
}
this[0] = 1;
}
for (var y of [1, 0, arguments, 1]) {
try {
var o = new f(y);
} catch (e) {
count++;
}
}
assertEq(count, 3);
}
test();
test();

Просмотреть файл

@ -1506,6 +1506,119 @@ BaselineCacheIRCompiler::emitStoreDenseElementHole()
return true;
}
bool
BaselineCacheIRCompiler::emitArrayPush()
{
ObjOperandId objId = reader.objOperandId();
ValOperandId rhsId = reader.valOperandId();
// Allocate the fixed registers first. These need to be fixed for
// callTypeUpdateIC.
AutoScratchRegister scratch(allocator, masm, R1.scratchReg());
ValueOperand val = allocator.useFixedValueRegister(masm, rhsId, R0);
Register obj = allocator.useRegister(masm, objId);
AutoScratchRegister scratchLength(allocator, masm);
FailurePath* failure;
if (!addFailurePath(&failure))
return false;
// Load obj->elements in scratch.
masm.loadPtr(Address(obj, NativeObject::offsetOfElements()), scratch);
masm.load32(Address(scratch, ObjectElements::offsetOfLength()), scratchLength);
BaseObjectElementIndex element(scratch, scratchLength);
Address initLength(scratch, ObjectElements::offsetOfInitializedLength());
Address elementsFlags(scratch, ObjectElements::offsetOfFlags());
// Check for copy-on-write or frozen elements.
masm.branchTest32(Assembler::NonZero, elementsFlags,
Imm32(ObjectElements::COPY_ON_WRITE |
ObjectElements::FROZEN),
failure->label());
// Fail if length != initLength.
masm.branch32(Assembler::NotEqual, initLength, scratchLength, failure->label());
// If scratchLength < capacity, we can add a dense element inline. If not we
// need to allocate more elements.
Label capacityOk;
Address capacity(scratch, ObjectElements::offsetOfCapacity());
masm.branch32(Assembler::Above, capacity, scratchLength, &capacityOk);
// Check for non-writable array length. We only have to do this if
// index >= capacity.
masm.branchTest32(Assembler::NonZero, elementsFlags,
Imm32(ObjectElements::NONWRITABLE_ARRAY_LENGTH),
failure->label());
LiveRegisterSet save(GeneralRegisterSet::Volatile(), liveVolatileFloatRegs());
save.takeUnchecked(scratch);
masm.PushRegsInMask(save);
masm.setupUnalignedABICall(scratch);
masm.loadJSContext(scratch);
masm.passABIArg(scratch);
masm.passABIArg(obj);
masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, NativeObject::addDenseElementDontReportOOM));
masm.mov(ReturnReg, scratch);
masm.PopRegsInMask(save);
masm.branchIfFalseBool(scratch, failure->label());
// Load the reallocated elements pointer.
masm.loadPtr(Address(obj, NativeObject::offsetOfElements()), scratch);
masm.bind(&capacityOk);
// Check if we have to convert a double element.
Label noConversion;
masm.branchTest32(Assembler::Zero, elementsFlags,
Imm32(ObjectElements::CONVERT_DOUBLE_ELEMENTS),
&noConversion);
// We need to convert int32 values being stored into doubles. Note that
// double arrays are only created by IonMonkey, so if we have no FP support
// Ion is disabled and there should be no double arrays.
if (cx_->runtime()->jitSupportsFloatingPoint) {
// It's fine to convert the value in place in Baseline. We can't do
// this in Ion.
masm.convertInt32ValueToDouble(val);
} else {
masm.assumeUnreachable("There shouldn't be double arrays when there is no FP support.");
}
masm.bind(&noConversion);
// Call the type update IC. After this everything must be infallible as we
// don't save all registers here.
LiveGeneralRegisterSet saveRegs;
saveRegs.add(obj);
saveRegs.add(val);
if (!callTypeUpdateIC(obj, val, scratch, saveRegs))
return false;
// Reload obj->elements as callTypeUpdateIC used the scratch register.
masm.loadPtr(Address(obj, NativeObject::offsetOfElements()), scratch);
// Increment initLength and length.
Address length(scratch, ObjectElements::offsetOfLength());
masm.add32(Imm32(1), initLength);
masm.load32(length, scratchLength);
masm.add32(Imm32(1), length);
// Store the value.
masm.storeValue(val, element);
emitPostBarrierElement(obj, val, scratch, scratchLength);
// Return value is new length.
masm.add32(Imm32(1), scratchLength);
masm.tagValue(JSVAL_TYPE_INT32, scratchLength, val);
return true;
}
bool
BaselineCacheIRCompiler::emitStoreTypedElement()
{
@ -2098,9 +2211,10 @@ BaselineCacheIRCompiler::init(CacheKind kind)
static const size_t MaxOptimizedCacheIRStubs = 16;
ICStub*
jit::AttachBaselineCacheIRStub(JSContext* cx, const CacheIRWriter& writer,
CacheKind kind, ICStubEngine engine, JSScript* outerScript,
ICFallbackStub* stub, bool* attached)
js::jit::AttachBaselineCacheIRStub(JSContext* cx, const CacheIRWriter& writer,
CacheKind kind, BaselineCacheIRStubKind stubKind,
ICStubEngine engine, JSScript* outerScript,
ICFallbackStub* stub, bool* attached)
{
// We shouldn't GC or report OOM (or any other exception) here.
AutoAssertNoPendingException aanpe(cx);
@ -2115,33 +2229,16 @@ jit::AttachBaselineCacheIRStub(JSContext* cx, const CacheIRWriter& writer,
// unlimited number of stubs.
MOZ_ASSERT(stub->numOptimizedStubs() < MaxOptimizedCacheIRStubs);
enum class CacheIRStubKind { Regular, Monitored, Updated };
uint32_t stubDataOffset;
CacheIRStubKind stubKind;
switch (kind) {
case CacheKind::Compare:
case CacheKind::In:
case CacheKind::HasOwn:
case CacheKind::BindName:
case CacheKind::TypeOf:
case CacheKind::GetIterator:
stubDataOffset = sizeof(ICCacheIR_Regular);
stubKind = CacheIRStubKind::Regular;
break;
case CacheKind::GetProp:
case CacheKind::GetElem:
case CacheKind::GetName:
case CacheKind::GetPropSuper:
case CacheKind::GetElemSuper:
case CacheKind::Call:
uint32_t stubDataOffset = 0;
switch (stubKind) {
case BaselineCacheIRStubKind::Monitored:
stubDataOffset = sizeof(ICCacheIR_Monitored);
stubKind = CacheIRStubKind::Monitored;
break;
case CacheKind::SetProp:
case CacheKind::SetElem:
case BaselineCacheIRStubKind::Regular:
stubDataOffset = sizeof(ICCacheIR_Regular);
break;
case BaselineCacheIRStubKind::Updated:
stubDataOffset = sizeof(ICCacheIR_Updated);
stubKind = CacheIRStubKind::Updated;
break;
}
@ -2186,7 +2283,7 @@ jit::AttachBaselineCacheIRStub(JSContext* cx, const CacheIRWriter& writer,
for (ICStubConstIterator iter = stub->beginChainConst(); !iter.atEnd(); iter++) {
bool updated = false;
switch (stubKind) {
case CacheIRStubKind::Regular: {
case BaselineCacheIRStubKind::Regular: {
if (!iter->isCacheIR_Regular())
continue;
auto otherStub = iter->toCacheIR_Regular();
@ -2196,7 +2293,7 @@ jit::AttachBaselineCacheIRStub(JSContext* cx, const CacheIRWriter& writer,
continue;
break;
}
case CacheIRStubKind::Monitored: {
case BaselineCacheIRStubKind::Monitored: {
if (!iter->isCacheIR_Monitored())
continue;
auto otherStub = iter->toCacheIR_Monitored();
@ -2206,7 +2303,7 @@ jit::AttachBaselineCacheIRStub(JSContext* cx, const CacheIRWriter& writer,
continue;
break;
}
case CacheIRStubKind::Updated: {
case BaselineCacheIRStubKind::Updated: {
if (!iter->isCacheIR_Updated())
continue;
auto otherStub = iter->toCacheIR_Updated();
@ -2237,14 +2334,14 @@ jit::AttachBaselineCacheIRStub(JSContext* cx, const CacheIRWriter& writer,
return nullptr;
switch (stubKind) {
case CacheIRStubKind::Regular: {
case BaselineCacheIRStubKind::Regular: {
auto newStub = new(newStubMem) ICCacheIR_Regular(code, stubInfo);
writer.copyStubData(newStub->stubDataStart());
stub->addNewStub(newStub);
*attached = true;
return newStub;
}
case CacheIRStubKind::Monitored: {
case BaselineCacheIRStubKind::Monitored: {
ICStub* monitorStub =
stub->toMonitoredFallbackStub()->fallbackMonitorStub()->firstMonitorStub();
auto newStub = new(newStubMem) ICCacheIR_Monitored(code, monitorStub, stubInfo);
@ -2253,7 +2350,7 @@ jit::AttachBaselineCacheIRStub(JSContext* cx, const CacheIRWriter& writer,
*attached = true;
return newStub;
}
case CacheIRStubKind::Updated: {
case BaselineCacheIRStubKind::Updated: {
auto newStub = new(newStubMem) ICCacheIR_Updated(code, stubInfo);
if (!newStub->initUpdatingChain(cx, stubSpace)) {
cx->recoverFromOutOfMemory();

Просмотреть файл

@ -17,8 +17,11 @@ namespace jit {
class ICFallbackStub;
class ICStub;
enum class BaselineCacheIRStubKind { Regular, Monitored, Updated };
ICStub* AttachBaselineCacheIRStub(JSContext* cx, const CacheIRWriter& writer,
CacheKind kind, ICStubEngine engine, JSScript* outerScript,
CacheKind kind, BaselineCacheIRStubKind stubKind,
ICStubEngine engine, JSScript* outerScript,
ICFallbackStub* stub, bool* attached);
} // namespace jit

Просмотреть файл

@ -814,6 +814,7 @@ DoGetElemFallback(JSContext* cx, BaselineFrame* frame, ICGetElem_Fallback* stub_
&isTemporarilyUnoptimizable, lhs, rhs, lhs, CanAttachGetter::Yes);
if (gen.tryAttachStub()) {
ICStub* newStub = AttachBaselineCacheIRStub(cx, gen.writerRef(), gen.cacheKind(),
BaselineCacheIRStubKind::Monitored,
engine, script, stub, &attached);
if (newStub) {
JitSpew(JitSpew_BaselineIC, " Attached CacheIR stub");
@ -886,6 +887,7 @@ DoGetElemSuperFallback(JSContext* cx, BaselineFrame* frame, ICGetElem_Fallback*
CanAttachGetter::Yes);
if (gen.tryAttachStub()) {
ICStub* newStub = AttachBaselineCacheIRStub(cx, gen.writerRef(), gen.cacheKind(),
BaselineCacheIRStubKind::Monitored,
engine, script, stub, &attached);
if (newStub) {
JitSpew(JitSpew_BaselineIC, " Attached CacheIR stub");
@ -1051,6 +1053,7 @@ DoSetElemFallback(JSContext* cx, BaselineFrame* frame, ICSetElem_Fallback* stub_
&isTemporarilyUnoptimizable, objv, index, rhs);
if (gen.tryAttachStub()) {
ICStub* newStub = AttachBaselineCacheIRStub(cx, gen.writerRef(), gen.cacheKind(),
BaselineCacheIRStubKind::Updated,
ICStubEngine::Baseline, frame->script(),
stub, &attached);
if (newStub) {
@ -1114,6 +1117,7 @@ DoSetElemFallback(JSContext* cx, BaselineFrame* frame, ICSetElem_Fallback* stub_
&isTemporarilyUnoptimizable, objv, index, rhs);
if (gen.tryAttachAddSlotStub(oldGroup, oldShape)) {
ICStub* newStub = AttachBaselineCacheIRStub(cx, gen.writerRef(), gen.cacheKind(),
BaselineCacheIRStubKind::Updated,
ICStubEngine::Baseline, frame->script(),
stub, &attached);
if (newStub) {
@ -1311,6 +1315,7 @@ DoInFallback(JSContext* cx, BaselineFrame* frame, ICIn_Fallback* stub_,
bool attached = false;
if (gen.tryAttachStub()) {
ICStub* newStub = AttachBaselineCacheIRStub(cx, gen.writerRef(), gen.cacheKind(),
BaselineCacheIRStubKind::Regular,
engine, script, stub, &attached);
if (newStub)
JitSpew(JitSpew_BaselineIC, " Attached CacheIR stub");
@ -1379,6 +1384,7 @@ DoHasOwnFallback(JSContext* cx, BaselineFrame* frame, ICHasOwn_Fallback* stub_,
bool attached = false;
if (gen.tryAttachStub()) {
ICStub* newStub = AttachBaselineCacheIRStub(cx, gen.writerRef(), gen.cacheKind(),
BaselineCacheIRStubKind::Regular,
engine, script, stub, &attached);
if (newStub)
JitSpew(JitSpew_BaselineIC, " Attached CacheIR stub");
@ -1450,6 +1456,7 @@ DoGetNameFallback(JSContext* cx, BaselineFrame* frame, ICGetName_Fallback* stub_
GetNameIRGenerator gen(cx, script, pc, stub->state().mode(), envChain, name);
if (gen.tryAttachStub()) {
ICStub* newStub = AttachBaselineCacheIRStub(cx, gen.writerRef(), gen.cacheKind(),
BaselineCacheIRStubKind::Monitored,
engine, script, stub, &attached);
if (newStub)
JitSpew(JitSpew_BaselineIC, " Attached CacheIR stub");
@ -1529,6 +1536,7 @@ DoBindNameFallback(JSContext* cx, BaselineFrame* frame, ICBindName_Fallback* stu
BindNameIRGenerator gen(cx, script, pc, stub->state().mode(), envChain, name);
if (gen.tryAttachStub()) {
ICStub* newStub = AttachBaselineCacheIRStub(cx, gen.writerRef(), gen.cacheKind(),
BaselineCacheIRStubKind::Regular,
ICStubEngine::Baseline, script, stub,
&attached);
if (newStub)
@ -1703,6 +1711,7 @@ DoSetPropFallback(JSContext* cx, BaselineFrame* frame, ICSetProp_Fallback* stub_
&isTemporarilyUnoptimizable, lhs, idVal, rhs);
if (gen.tryAttachStub()) {
ICStub* newStub = AttachBaselineCacheIRStub(cx, gen.writerRef(), gen.cacheKind(),
BaselineCacheIRStubKind::Updated,
ICStubEngine::Baseline, frame->script(),
stub, &attached);
if (newStub) {
@ -1774,6 +1783,7 @@ DoSetPropFallback(JSContext* cx, BaselineFrame* frame, ICSetProp_Fallback* stub_
&isTemporarilyUnoptimizable, lhs, idVal, rhs);
if (gen.tryAttachAddSlotStub(oldGroup, oldShape)) {
ICStub* newStub = AttachBaselineCacheIRStub(cx, gen.writerRef(), gen.cacheKind(),
BaselineCacheIRStubKind::Updated,
ICStubEngine::Baseline, frame->script(),
stub, &attached);
if (newStub) {
@ -2510,16 +2520,40 @@ DoCallFallback(JSContext* cx, BaselineFrame* frame, ICCall_Fallback* stub_, uint
return false;
}
CallIRGenerator gen(cx, script, pc, stub->state().mode(), argc,
callee, callArgs.thisv(),
HandleValueArray::fromMarkedLocation(argc, vp+2));
bool optimizeAfterCall = false;
CallIRGenerator::OptStrategy optStrategy = gen.getOptStrategy(&optimizeAfterCall);
// Transition stub state to megamorphic or generic if warranted.
if (stub->state().maybeTransition())
stub->discardStubs(cx);
// Try attaching a call stub, if the CallIRGenerator has determined that this
// operation cannot be optimized after the call.
bool canAttachStub = stub->state().canAttachStub();
bool handled = false;
if (!optimizeAfterCall) {
// Only bother to try optimizing JSOP_CALL with CacheIR if the chain is still
// allowed to attach stubs.
if (canAttachStub) {
CallIRGenerator gen(cx, script, pc, stub, stub->state().mode(), argc,
callee, callArgs.thisv(),
HandleValueArray::fromMarkedLocation(argc, vp+2));
if (gen.tryAttachStub()) {
ICStub* newStub = AttachBaselineCacheIRStub(cx, gen.writerRef(), gen.cacheKind(),
gen.cacheIRStubKind(),
ICStubEngine::Baseline,
script, stub, &handled);
if (newStub) {
JitSpew(JitSpew_BaselineIC, " Attached CacheIR stub");
// If it's an updated stub, initialize it.
if (gen.cacheIRStubKind() == BaselineCacheIRStubKind::Updated)
SetUpdateStubData(newStub->toCacheIR_Updated(), gen.typeCheckInfo());
}
}
if (!handled)
stub->state().trackNotAttached();
}
// Try attaching a regular call stub, but only if the CacheIR attempt didn't add
// any stubs.
if (!handled) {
bool createSingleton = ObjectGroup::useSingletonForNewObject(cx, script, pc);
if (!TryAttachCallStub(cx, stub, script, pc, op, argc, vp, constructing, false,
createSingleton, &handled))
@ -2568,17 +2602,6 @@ DoCallFallback(JSContext* cx, BaselineFrame* frame, ICCall_Fallback* stub_, uint
if (!stub->addMonitorStubForValue(cx, frame, types, res))
return false;
if (optimizeAfterCall && !handled && optStrategy != CallIRGenerator::OptStrategy::None) {
if (gen.tryAttachStub()) {
ICStub* newStub = AttachBaselineCacheIRStub(cx, gen.writerRef(), gen.cacheKind(),
ICStubEngine::Baseline, script, stub,
&handled);
if (newStub) {
JitSpew(JitSpew_BaselineIC, " Attached CacheIR stub");
}
}
}
if (!handled) {
// If 'callee' is a potential Call_ConstStringSplit, try to attach an
// optimized ConstStringSplit stub. Note that vp[0] now holds the return value
@ -4172,6 +4195,7 @@ DoGetIteratorFallback(JSContext* cx, BaselineFrame* frame, ICGetIterator_Fallbac
bool attached = false;
if (gen.tryAttachStub()) {
ICStub* newStub = AttachBaselineCacheIRStub(cx, gen.writerRef(), gen.cacheKind(),
BaselineCacheIRStubKind::Regular,
engine, script, stub, &attached);
if (newStub)
JitSpew(JitSpew_BaselineIC, " Attached CacheIR stub");
@ -4562,6 +4586,7 @@ DoTypeOfFallback(JSContext* cx, BaselineFrame* frame, ICTypeOf_Fallback* stub, H
bool attached = false;
if (gen.tryAttachStub()) {
ICStub* newStub = AttachBaselineCacheIRStub(cx, gen.writerRef(), gen.cacheKind(),
BaselineCacheIRStubKind::Regular,
engine, script, stub, &attached);
if (newStub)
JitSpew(JitSpew_BaselineIC, " Attached CacheIR stub");

Просмотреть файл

@ -747,11 +747,11 @@ class ICCall_Fallback : public ICMonitoredFallbackStub
static const uint32_t MAX_OPTIMIZED_STUBS = 16;
static const uint32_t MAX_SCRIPTED_STUBS = 7;
static const uint32_t MAX_NATIVE_STUBS = 7;
private:
private:
explicit ICCall_Fallback(JitCode* stubCode)
: ICMonitoredFallbackStub(ICStub::Call_Fallback, stubCode)
{ }
{}
public:
void noteUnoptimizableCall() {

Просмотреть файл

@ -9,6 +9,7 @@
#include "mozilla/DebugOnly.h"
#include "mozilla/FloatingPoint.h"
#include "jit/BaselineCacheIRCompiler.h"
#include "jit/BaselineIC.h"
#include "jit/CacheIRSpewer.h"
#include "jit/IonCaches.h"
@ -3747,62 +3748,34 @@ GetIteratorIRGenerator::tryAttachNativeIterator(ObjOperandId objId, HandleObject
}
CallIRGenerator::CallIRGenerator(JSContext* cx, HandleScript script, jsbytecode* pc,
ICState::Mode mode, uint32_t argc,
ICCall_Fallback* stub, ICState::Mode mode, uint32_t argc,
HandleValue callee, HandleValue thisval, HandleValueArray args)
: IRGenerator(cx, script, pc, CacheKind::Call, mode),
argc_(argc),
callee_(callee),
thisval_(thisval),
args_(args),
cachedStrategy_()
typeCheckInfo_(cx, /* needsTypeBarrier = */ true),
cacheIRStubKind_(BaselineCacheIRStubKind::Regular)
{ }
CallIRGenerator::OptStrategy
CallIRGenerator::canOptimize()
{
// Ensure callee is a function.
if (!callee_.isObject() || !callee_.toObject().is<JSFunction>())
return OptStrategy::None;
RootedFunction calleeFunc(cx_, &callee_.toObject().as<JSFunction>());
OptStrategy strategy;
if ((strategy = canOptimizeStringSplit(calleeFunc)) != OptStrategy::None) {
return strategy;
}
return OptStrategy::None;
}
CallIRGenerator::OptStrategy
CallIRGenerator::canOptimizeStringSplit(HandleFunction calleeFunc)
bool
CallIRGenerator::tryAttachStringSplit()
{
// Only optimize StringSplitString(str, str)
if (argc_ != 2 || !args_[0].isString() || !args_[1].isString())
return OptStrategy::None;
return false;
// Just for now: if they're both atoms, then do not optimize using
// CacheIR and allow the legacy "ConstStringSplit" BaselineIC optimization
// to proceed.
if (args_[0].toString()->isAtom() && args_[1].toString()->isAtom())
return OptStrategy::None;
return false;
if (!calleeFunc->isNative())
return OptStrategy::None;
if (calleeFunc->native() != js::intrinsic_StringSplitString)
return OptStrategy::None;
return OptStrategy::StringSplit;
}
bool
CallIRGenerator::tryAttachStringSplit()
{
// Get the object group to use for this location.
RootedObjectGroup group(cx_, ObjectGroupCompartment::getStringSplitStringGroup(cx_));
if (!group) {
if (!group)
return false;
}
AutoAssertNoPendingException aanpe(cx_);
Int32OperandId argcId(writer.setInputOperandId(0));
@ -3810,15 +3783,14 @@ CallIRGenerator::tryAttachStringSplit()
// Ensure argc == 1.
writer.guardSpecificInt32Immediate(argcId, 2);
// 1 argument only. Stack-layout here is (bottom to top):
// 2 arguments. Stack-layout here is (bottom to top):
//
// 3: Callee
// 2: ThisValue
// 1: Arg0
// 0: Arg1 <-- Top of stack
// Ensure callee is an object and is the function that matches the callee optimized
// against during stub generation (i.e. the String_split function object).
// Ensure callee is the |String_split| native function.
ValOperandId calleeValId = writer.loadStackValue(3);
ObjOperandId calleeObjId = writer.guardIsObject(calleeValId);
writer.guardIsNativeFunction(calleeObjId, js::intrinsic_StringSplitString);
@ -3835,42 +3807,164 @@ CallIRGenerator::tryAttachStringSplit()
writer.callStringSplitResult(arg0StrId, arg1StrId, group);
writer.typeMonitorResult();
cacheIRStubKind_ = BaselineCacheIRStubKind::Monitored;
trackAttached("StringSplitString");
TypeScript::Monitor(cx_, script_, pc_, TypeSet::ObjectType(group));
return true;
}
CallIRGenerator::OptStrategy
CallIRGenerator::getOptStrategy(bool* optimizeAfterCall)
bool
CallIRGenerator::tryAttachArrayPush()
{
if (!cachedStrategy_) {
cachedStrategy_ = mozilla::Some(canOptimize());
}
if (optimizeAfterCall != nullptr) {
MOZ_ASSERT(cachedStrategy_.isSome());
switch (cachedStrategy_.value()) {
case OptStrategy::StringSplit:
*optimizeAfterCall = true;
break;
// Only optimize on obj.push(val);
if (argc_ != 1 || !thisval_.isObject())
return false;
default:
*optimizeAfterCall = false;
}
}
return cachedStrategy_.value();
// Where |obj| is a native array.
RootedObject thisobj(cx_, &thisval_.toObject());
if (!thisobj->is<ArrayObject>())
return false;
RootedArrayObject thisarray(cx_, &thisobj->as<ArrayObject>());
// And the object group for the array is not collecting preliminary objects.
if (thisobj->group()->maybePreliminaryObjects())
return false;
// Check for other indexed properties or class hooks.
if (!CanAttachAddElement(thisobj, /* isInit = */ false))
return false;
// Can't add new elements to arrays with non-writable length.
if (!thisarray->lengthIsWritable())
return false;
// Check that array is extensible.
if (!thisarray->nonProxyIsExtensible())
return false;
MOZ_ASSERT(!thisarray->getElementsHeader()->isFrozen(),
"Extensible arrays should not have frozen elements");
MOZ_ASSERT(thisarray->lengthIsWritable());
// After this point, we can generate code fine.
// Generate code.
AutoAssertNoPendingException aanpe(cx_);
Int32OperandId argcId(writer.setInputOperandId(0));
// Ensure argc == 1.
writer.guardSpecificInt32Immediate(argcId, 1);
// 1 argument only. Stack-layout here is (bottom to top):
//
// 2: Callee
// 1: ThisValue
// 0: Arg0 <-- Top of stack.
// Guard callee is the |js::array_push| native function.
ValOperandId calleeValId = writer.loadStackValue(2);
ObjOperandId calleeObjId = writer.guardIsObject(calleeValId);
writer.guardIsNativeFunction(calleeObjId, js::array_push);
// Guard this is an array object.
ValOperandId thisValId = writer.loadStackValue(1);
ObjOperandId thisObjId = writer.guardIsObject(thisValId);
writer.guardClass(thisObjId, GuardClassKind::Array);
// This is a soft assert, documenting the fact that we pass 'true'
// for needsTypeBarrier when constructing typeCheckInfo_ for CallIRGenerator.
// Can be removed safely if the assumption becomes false.
MOZ_ASSERT(typeCheckInfo_.needsTypeBarrier());
// Guard that the group and shape matches.
if (typeCheckInfo_.needsTypeBarrier())
writer.guardGroup(thisObjId, thisobj->group());
writer.guardShape(thisObjId, thisarray->shape());
// Guard proto chain shapes.
ShapeGuardProtoChain(writer, thisobj, thisObjId);
// arr.push(x) is equivalent to arr[arr.length] = x for regular arrays.
ValOperandId argId = writer.loadStackValue(0);
writer.arrayPush(thisObjId, argId);
writer.returnFromIC();
// Set the type-check info, and the stub kind to Updated
typeCheckInfo_.set(thisobj->group(), JSID_VOID);
cacheIRStubKind_ = BaselineCacheIRStubKind::Updated;
trackAttached("ArrayPush");
return true;
}
bool
CallIRGenerator::tryAttachStub()
{
OptStrategy strategy = getOptStrategy();
// Only optimize when the mode is Specialized.
if (mode_ != ICState::Mode::Specialized)
return false;
if (strategy == OptStrategy::StringSplit) {
return tryAttachStringSplit();
// Ensure callee is a function.
if (!callee_.isObject() || !callee_.toObject().is<JSFunction>())
return false;
RootedFunction calleeFunc(cx_, &callee_.toObject().as<JSFunction>());
// Check for native-function optimizations.
if (calleeFunc->isNative()) {
if (calleeFunc->native() == js::intrinsic_StringSplitString) {
if (tryAttachStringSplit())
return true;
}
if (calleeFunc->native() == js::array_push) {
if (tryAttachArrayPush())
return true;
}
}
MOZ_ASSERT(strategy == OptStrategy::None);
return false;
}
void
CallIRGenerator::trackAttached(const char* name)
{
#ifdef JS_CACHEIR_SPEW
CacheIRSpewer& sp = CacheIRSpewer::singleton();
if (sp.enabled()) {
LockGuard<Mutex> guard(sp.lock());
sp.beginCache(guard, *this);
sp.valueProperty(guard, "callee", callee_);
sp.valueProperty(guard, "thisval", thisval_);
sp.valueProperty(guard, "argc", Int32Value(argc_));
sp.attached(guard, name);
sp.endCache(guard);
}
#endif
}
void
CallIRGenerator::trackNotAttached()
{
#ifdef JS_CACHEIR_SPEW
CacheIRSpewer& sp = CacheIRSpewer::singleton();
if (sp.enabled()) {
LockGuard<Mutex> guard(sp.lock());
sp.beginCache(guard, *this);
sp.valueProperty(guard, "callee", callee_);
sp.valueProperty(guard, "thisval", thisval_);
sp.valueProperty(guard, "argc", Int32Value(argc_));
sp.endCache(guard);
}
#endif
}
CompareIRGenerator::CompareIRGenerator(JSContext* cx, HandleScript script, jsbytecode* pc,
ICState::Mode mode, JSOp op,
HandleValue lhsVal, HandleValue rhsVal)

Просмотреть файл

@ -19,6 +19,9 @@
namespace js {
namespace jit {
enum class BaselineCacheIRStubKind;
// CacheIR is an (extremely simple) linear IR language for inline caches.
// From this IR, we can generate machine code for Baseline or Ion IC stubs.
//
@ -218,6 +221,7 @@ extern const char* CacheKindNames[];
_(StoreUnboxedProperty) \
_(StoreDenseElement) \
_(StoreDenseElementHole) \
_(ArrayPush) \
_(StoreTypedElement) \
_(StoreUnboxedArrayElement) \
_(StoreUnboxedArrayElementHole) \
@ -820,6 +824,10 @@ class MOZ_RAII CacheIRWriter : public JS::CustomAutoRooter
writeOperandId(rhs);
buffer_.writeByte(handleAdd);
}
void arrayPush(ObjOperandId obj, ValOperandId rhs) {
writeOpWithOperandId(CacheOp::ArrayPush, obj);
writeOperandId(rhs);
}
void callScriptedSetter(ObjOperandId obj, JSFunction* setter, ValOperandId rhs) {
writeOpWithOperandId(CacheOp::CallScriptedSetter, obj);
addStubField(uintptr_t(setter), StubField::Type::JSObject);
@ -1460,31 +1468,35 @@ class MOZ_RAII GetIteratorIRGenerator : public IRGenerator
class MOZ_RAII CallIRGenerator : public IRGenerator
{
public:
enum class OptStrategy {
None = 0,
StringSplit
};
private:
uint32_t argc_;
HandleValue callee_;
HandleValue thisval_;
HandleValueArray args_;
PropertyTypeCheckInfo typeCheckInfo_;
BaselineCacheIRStubKind cacheIRStubKind_;
mozilla::Maybe<OptStrategy> cachedStrategy_;
OptStrategy canOptimize();
OptStrategy canOptimizeStringSplit(HandleFunction calleeFunc);
bool tryAttachStringSplit();
bool tryAttachArrayPush();
void trackAttached(const char* name);
void trackNotAttached();
public:
CallIRGenerator(JSContext* cx, HandleScript, jsbytecode* pc, ICState::Mode mode,
CallIRGenerator(JSContext* cx, HandleScript script, jsbytecode* pc,
ICCall_Fallback* stub, ICState::Mode mode,
uint32_t argc, HandleValue callee, HandleValue thisval,
HandleValueArray args);
OptStrategy getOptStrategy(bool* optimizeAfterCall = nullptr);
bool tryAttachStub();
BaselineCacheIRStubKind cacheIRStubKind() const {
return cacheIRStubKind_;
}
const PropertyTypeCheckInfo* typeCheckInfo() const {
return &typeCheckInfo_;
}
};
class MOZ_RAII CompareIRGenerator : public IRGenerator

Просмотреть файл

@ -8473,10 +8473,11 @@ class OutOfLineStoreElementHole : public OutOfLineCodeBase<CodeGenerator>
{
LInstruction* ins_;
Label rejoinStore_;
bool strict_;
public:
explicit OutOfLineStoreElementHole(LInstruction* ins)
: ins_(ins)
explicit OutOfLineStoreElementHole(LInstruction* ins, bool strict)
: ins_(ins), strict_(strict)
{
MOZ_ASSERT(ins->isStoreElementHoleV() || ins->isStoreElementHoleT() ||
ins->isFallibleStoreElementV() || ins->isFallibleStoreElementT());
@ -8491,6 +8492,9 @@ class OutOfLineStoreElementHole : public OutOfLineCodeBase<CodeGenerator>
Label* rejoinStore() {
return &rejoinStore_;
}
bool strict() const {
return strict_;
}
};
void
@ -8579,7 +8583,8 @@ CodeGenerator::emitStoreElementHoleT(T* lir)
static_assert(std::is_same<T, LStoreElementHoleT>::value || std::is_same<T, LFallibleStoreElementT>::value,
"emitStoreElementHoleT called with unexpected argument type");
OutOfLineStoreElementHole* ool = new(alloc()) OutOfLineStoreElementHole(lir);
OutOfLineStoreElementHole* ool =
new(alloc()) OutOfLineStoreElementHole(lir, current->mir()->strict());
addOutOfLineCode(ool, lir->mir());
Register obj = ToRegister(lir->object());
@ -8638,7 +8643,8 @@ CodeGenerator::emitStoreElementHoleV(T* lir)
static_assert(std::is_same<T, LStoreElementHoleV>::value || std::is_same<T, LFallibleStoreElementV>::value,
"emitStoreElementHoleV called with unexpected parameter type");
OutOfLineStoreElementHole* ool = new(alloc()) OutOfLineStoreElementHole(lir);
OutOfLineStoreElementHole* ool =
new(alloc()) OutOfLineStoreElementHole(lir, current->mir()->strict());
addOutOfLineCode(ool, lir->mir());
Register obj = ToRegister(lir->object());
@ -8898,7 +8904,7 @@ CodeGenerator::visitOutOfLineStoreElementHole(OutOfLineStoreElementHole* ool)
masm.bind(&callStub);
saveLive(ins);
pushArg(Imm32(current->mir()->strict()));
pushArg(Imm32(ool->strict()));
pushArg(value);
if (index->isConstant())
pushArg(Imm32(ToInt32(index)));

Просмотреть файл

@ -66,7 +66,9 @@ class ICState
size_t numOptimizedStubs() const { return numOptimizedStubs_; }
MOZ_ALWAYS_INLINE bool canAttachStub() const {
MOZ_ASSERT(numOptimizedStubs_ <= MaxOptimizedStubs);
// Note: we cannot assert that numOptimizedStubs_ <= MaxOptimizedStubs
// because old-style baseline ICs may attach more stubs than
// MaxOptimizedStubs allows.
if (mode_ == Mode::Generic || JitOptions.disableCacheIR)
return false;
return true;
@ -78,7 +80,9 @@ class ICState
// If this returns true, we transitioned to a new mode and the caller
// should discard all stubs.
MOZ_MUST_USE MOZ_ALWAYS_INLINE bool maybeTransition() {
MOZ_ASSERT(numOptimizedStubs_ <= MaxOptimizedStubs);
// Note: we cannot assert that numOptimizedStubs_ <= MaxOptimizedStubs
// because old-style baseline ICs may attach more stubs than
// MaxOptimizedStubs allows.
if (mode_ == Mode::Generic)
return false;
if (numOptimizedStubs_ < MaxOptimizedStubs && numFailures_ < maxFailures())

Просмотреть файл

@ -1782,6 +1782,13 @@ IonCacheIRCompiler::emitStoreDenseElementHole()
return true;
}
bool
IonCacheIRCompiler::emitArrayPush()
{
MOZ_ASSERT_UNREACHABLE("emitArrayPush not supported for IonCaches.");
return false;
}
bool
IonCacheIRCompiler::emitStoreTypedElement()
{

Просмотреть файл

@ -1462,6 +1462,7 @@ DoCompareFallback(JSContext* cx, void* payload, ICCompare_Fallback* stub_, Handl
bool attached = false;
if (gen.tryAttachStub()) {
ICStub* newStub = AttachBaselineCacheIRStub(cx, gen.writerRef(), gen.cacheKind(),
BaselineCacheIRStubKind::Regular,
engine, script, stub, &attached);
if (newStub)
JitSpew(JitSpew_BaselineIC, " Attached CacheIR stub");
@ -2071,6 +2072,7 @@ DoGetPropFallback(JSContext* cx, BaselineFrame* frame, ICGetProp_Fallback* stub_
&isTemporarilyUnoptimizable, val, idVal, val, CanAttachGetter::Yes);
if (gen.tryAttachStub()) {
ICStub* newStub = AttachBaselineCacheIRStub(cx, gen.writerRef(), gen.cacheKind(),
BaselineCacheIRStubKind::Monitored,
ICStubEngine::Baseline, script,
stub, &attached);
if (newStub) {
@ -2141,6 +2143,7 @@ DoGetPropSuperFallback(JSContext* cx, BaselineFrame* frame, ICGetProp_Fallback*
CanAttachGetter::Yes);
if (gen.tryAttachStub()) {
ICStub* newStub = AttachBaselineCacheIRStub(cx, gen.writerRef(), gen.cacheKind(),
BaselineCacheIRStubKind::Monitored,
ICStubEngine::Baseline, script,
stub, &attached);
if (newStub) {

Просмотреть файл

@ -163,6 +163,10 @@ CodeGeneratorShared::generateEpilogue()
bool
CodeGeneratorShared::generateOutOfLineCode()
{
// OOL paths should not attempt to use |current| as it's the last block
// instead of the block corresponding to the OOL path.
current = nullptr;
for (size_t i = 0; i < outOfLineCode_.length(); i++) {
// Add native => bytecode mapping entries for OOL sites.
// Not enabled on wasm yet since it doesn't contain bytecode mappings.

Просмотреть файл

@ -116,7 +116,7 @@ BEGIN_TEST(testXDR_source)
CHECK(script);
script = FreezeThaw(cx, script);
CHECK(script);
JSString* out = JS_DecompileScript(cx, script, "testing", 0);
JSString* out = JS_DecompileScript(cx, script);
CHECK(out);
bool equal;
CHECK(JS_StringEqualsAscii(cx, out, *s, &equal));

Просмотреть файл

@ -4599,7 +4599,7 @@ JS::CompileFunction(JSContext* cx, AutoObjectVector& envChain,
}
JS_PUBLIC_API(JSString*)
JS_DecompileScript(JSContext* cx, HandleScript script, const char* name, unsigned indent)
JS_DecompileScript(JSContext* cx, HandleScript script)
{
MOZ_ASSERT(!cx->runtime()->isAtomsCompartment(cx->compartment()));
@ -4608,7 +4608,7 @@ JS_DecompileScript(JSContext* cx, HandleScript script, const char* name, unsigne
script->ensureNonLazyCanonicalFunction();
RootedFunction fun(cx, script->functionNonDelazifying());
if (fun)
return JS_DecompileFunction(cx, fun, indent);
return JS_DecompileFunction(cx, fun);
bool haveSource = script->scriptSource()->hasSourceData();
if (!haveSource && !JSScript::loadSource(cx, script->scriptSource(), &haveSource))
return nullptr;
@ -4617,13 +4617,13 @@ JS_DecompileScript(JSContext* cx, HandleScript script, const char* name, unsigne
}
JS_PUBLIC_API(JSString*)
JS_DecompileFunction(JSContext* cx, HandleFunction fun, unsigned indent)
JS_DecompileFunction(JSContext* cx, HandleFunction fun)
{
MOZ_ASSERT(!cx->runtime()->isAtomsCompartment(cx->compartment()));
AssertHeapIsIdle();
CHECK_REQUEST(cx);
assertSameCompartment(cx, fun);
return FunctionToString(cx, fun, !(indent & JS_DONT_PRETTY_PRINT));
return FunctionToString(cx, fun, /* isToSource = */ false);
}
MOZ_NEVER_INLINE static bool

Просмотреть файл

@ -999,7 +999,7 @@ JS_IsBuiltinFunctionConstructor(JSFunction* fun);
* It is important that SpiderMonkey be initialized, and the first context
* be created, in a single-threaded fashion. Otherwise the behavior of the
* library is undefined.
* See: http://developer.mozilla.org/en/docs/Category:JSAPI_Reference
* See: https://developer.mozilla.org/en-US/docs/Mozilla/Projects/SpiderMonkey/JSAPI_reference
*/
// Create a new runtime, with a single cooperative context for this thread.
@ -4359,16 +4359,10 @@ CompileFunction(JSContext* cx, AutoObjectVector& envChain,
} /* namespace JS */
extern JS_PUBLIC_API(JSString*)
JS_DecompileScript(JSContext* cx, JS::Handle<JSScript*> script, const char* name, unsigned indent);
/*
* API extension: OR this into indent to avoid pretty-printing the decompiled
* source resulting from JS_DecompileFunction.
*/
#define JS_DONT_PRETTY_PRINT ((unsigned)0x8000)
JS_DecompileScript(JSContext* cx, JS::Handle<JSScript*> script);
extern JS_PUBLIC_API(JSString*)
JS_DecompileFunction(JSContext* cx, JS::Handle<JSFunction*> fun, unsigned indent);
JS_DecompileFunction(JSContext* cx, JS::Handle<JSFunction*> fun);
/*

Просмотреть файл

@ -308,6 +308,7 @@ struct JSContext : public JS::RootingContext,
void addPendingOutOfMemory();
JSRuntime* runtime() { return runtime_; }
const JSRuntime* runtime() const { return runtime_; }
static size_t offsetOfCompartment() {
return offsetof(JSContext, compartment_);
@ -567,6 +568,10 @@ struct JSContext : public JS::RootingContext,
// exclusive threads are running.
js::ThreadLocalData<unsigned> keepAtoms;
bool canCollectAtoms() const {
return !keepAtoms && !runtime()->hasHelperThreadZones();
}
private:
// Pools used for recycling name maps and vectors when parsing and
// emitting bytecode. Purged on GC when there are no active script
@ -1256,8 +1261,8 @@ class MOZ_RAII AutoKeepAtoms
JSRuntime* rt = cx->runtime();
if (!cx->helperThread()) {
if (rt->gc.fullGCForAtomsRequested() && !cx->keepAtoms)
rt->gc.triggerFullGCForAtoms();
if (rt->gc.fullGCForAtomsRequested() && cx->canCollectAtoms())
rt->gc.triggerFullGCForAtoms(cx);
}
}
};

Просмотреть файл

@ -991,39 +991,50 @@ const Class JSFunction::class_ = {
const Class* const js::FunctionClassPtr = &JSFunction::class_;
JSString*
js::FunctionToString(JSContext* cx, HandleFunction fun, bool prettyPrint)
js::FunctionToStringCache::lookup(JSScript* script) const
{
for (size_t i = 0; i < NumEntries; i++) {
if (entries_[i].script == script)
return entries_[i].string;
}
return nullptr;
}
void
js::FunctionToStringCache::put(JSScript* script, JSString* string)
{
for (size_t i = NumEntries - 1; i > 0; i--)
entries_[i] = entries_[i - 1];
entries_[0].set(script, string);
}
JSString*
js::FunctionToString(JSContext* cx, HandleFunction fun, bool isToSource)
{
if (fun->isInterpretedLazy() && !JSFunction::getOrCreateScript(cx, fun))
return nullptr;
if (IsAsmJSModule(fun))
return AsmJSModuleToString(cx, fun, !prettyPrint);
return AsmJSModuleToString(cx, fun, isToSource);
if (IsAsmJSFunction(fun))
return AsmJSFunctionToString(cx, fun);
if (IsWrappedAsyncFunction(fun)) {
RootedFunction unwrapped(cx, GetUnwrappedAsyncFunction(fun));
return FunctionToString(cx, unwrapped, prettyPrint);
return FunctionToString(cx, unwrapped, isToSource);
}
if (IsWrappedAsyncGenerator(fun)) {
RootedFunction unwrapped(cx, GetUnwrappedAsyncGenerator(fun));
return FunctionToString(cx, unwrapped, prettyPrint);
return FunctionToString(cx, unwrapped, isToSource);
}
StringBuffer out(cx);
RootedScript script(cx);
if (fun->hasScript()) {
script = fun->nonLazyScript();
if (script->isGeneratorExp()) {
if (!out.append("function genexp() {") ||
!out.append("\n [generator expression]\n") ||
!out.append("}"))
{
return nullptr;
}
return out.finishString();
}
if (MOZ_UNLIKELY(script->isGeneratorExp()))
return NewStringCopyZ<CanGC>(cx, "function genexp() {\n [generator expression]\n}");
}
// Default class constructors are self-hosted, but have their source
@ -1033,9 +1044,9 @@ js::FunctionToString(JSContext* cx, HandleFunction fun, bool prettyPrint)
bool haveSource = fun->isInterpreted() && (fun->isClassConstructor() ||
!fun->isSelfHostedBuiltin());
// If we're not in pretty mode, put parentheses around lambda functions
// so that eval returns lambda, not function statement.
bool addParentheses = haveSource && !prettyPrint && (fun->isLambda() && !fun->isArrow());
// If we're in toSource mode, put parentheses around lambda functions so
// that eval returns lambda, not function statement.
bool addParentheses = haveSource && isToSource && (fun->isLambda() && !fun->isArrow());
if (haveSource && !script->scriptSource()->hasSourceData() &&
!JSScript::loadSource(cx, script->scriptSource(), &haveSource))
@ -1043,6 +1054,24 @@ js::FunctionToString(JSContext* cx, HandleFunction fun, bool prettyPrint)
return nullptr;
}
// Fast path for the common case, to avoid StringBuffer overhead.
if (!addParentheses && haveSource) {
FunctionToStringCache& cache = cx->zone()->functionToStringCache();
if (JSString* str = cache.lookup(script))
return str;
size_t start = script->toStringStart(), end = script->toStringEnd();
JSString* str = (end - start <= ScriptSource::SourceDeflateLimit)
? script->scriptSource()->substring(cx, start, end)
: script->scriptSource()->substringDontDeflate(cx, start, end);
if (!str)
return nullptr;
cache.put(script, str);
return str;
}
StringBuffer out(cx);
if (addParentheses) {
if (!out.append('('))
return nullptr;
@ -1110,11 +1139,11 @@ js::FunctionToString(JSContext* cx, HandleFunction fun, bool prettyPrint)
}
JSString*
fun_toStringHelper(JSContext* cx, HandleObject obj, unsigned indent)
fun_toStringHelper(JSContext* cx, HandleObject obj, bool isToSource)
{
if (!obj->is<JSFunction>()) {
if (JSFunToStringOp op = obj->getOpsFunToString())
return op(cx, obj, indent);
return op(cx, obj, isToSource);
JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr,
JSMSG_INCOMPATIBLE_PROTO,
@ -1123,7 +1152,7 @@ fun_toStringHelper(JSContext* cx, HandleObject obj, unsigned indent)
}
RootedFunction fun(cx, &obj->as<JSFunction>());
return FunctionToString(cx, fun, indent != JS_DONT_PRETTY_PRINT);
return FunctionToString(cx, fun, isToSource);
}
bool
@ -1146,16 +1175,11 @@ js::fun_toString(JSContext* cx, unsigned argc, Value* vp)
CallArgs args = CallArgsFromVp(argc, vp);
MOZ_ASSERT(IsFunctionObject(args.calleev()));
uint32_t indent = 0;
if (args.length() != 0 && !ToUint32(cx, args[0], &indent))
return false;
RootedObject obj(cx, ToObject(cx, args.thisv()));
if (!obj)
return false;
RootedString str(cx, fun_toStringHelper(cx, obj, indent));
JSString* str = fun_toStringHelper(cx, obj, /* isToSource = */ false);
if (!str)
return false;
@ -1176,12 +1200,12 @@ fun_toSource(JSContext* cx, unsigned argc, Value* vp)
RootedString str(cx);
if (obj->isCallable())
str = fun_toStringHelper(cx, obj, JS_DONT_PRETTY_PRINT);
str = fun_toStringHelper(cx, obj, /* isToSource = */ true);
else
str = ObjectToSource(cx, obj);
if (!str)
return false;
args.rval().setString(str);
return true;
}

Просмотреть файл

@ -673,7 +673,7 @@ static_assert(sizeof(JSFunction) == sizeof(js::shadow::Function),
"shadow interface must match actual interface");
extern JSString*
fun_toStringHelper(JSContext* cx, js::HandleObject obj, unsigned indent);
fun_toStringHelper(JSContext* cx, js::HandleObject obj, bool isToSource);
namespace js {
@ -883,7 +883,7 @@ JSFunction::getExtendedSlot(size_t which) const
namespace js {
JSString* FunctionToString(JSContext* cx, HandleFunction fun, bool prettyPring);
JSString* FunctionToString(JSContext* cx, HandleFunction fun, bool isToSource);
template<XDRMode mode>
bool

Просмотреть файл

@ -2589,6 +2589,7 @@ GCRuntime::updateZonePointersToRelocatedCells(Zone* zone, AutoLockForExclusiveAc
comp->fixupAfterMovingGC();
zone->externalStringCache().purge();
zone->functionToStringCache().purge();
// Iterate through all cells that can contain relocatable pointers to update
// them. Since updating each cell is independent we try to parallelize this
@ -3130,6 +3131,18 @@ GCRuntime::maybeGC(Zone* zone)
}
}
void
GCRuntime::triggerFullGCForAtoms(JSContext* cx)
{
MOZ_ASSERT(fullGCForAtomsRequested_);
MOZ_ASSERT(CurrentThreadCanAccessRuntime(rt));
MOZ_ASSERT(!JS::CurrentThreadIsHeapCollecting());
MOZ_ASSERT(!cx->keepAtoms);
MOZ_ASSERT(!rt->hasHelperThreadZones());
fullGCForAtomsRequested_ = false;
MOZ_RELEASE_ASSERT(triggerGC(JS::gcreason::DELAYED_ATOMS_GC));
}
// Do all possible decommit immediately from the current thread without
// releasing the GC lock or allocating any memory.
void
@ -3696,6 +3709,7 @@ GCRuntime::purgeRuntime(AutoLockForExclusiveAccess& lock)
for (GCZonesIter zone(rt); !zone.done(); zone.next()) {
zone->atomCache().clearAndShrink();
zone->externalStringCache().purge();
zone->functionToStringCache().purge();
}
for (const CooperatingContext& target : rt->cooperatingContexts()) {
@ -3855,18 +3869,40 @@ RelazifyFunctions(Zone* zone, AllocKind kind)
static bool
ShouldCollectZone(Zone* zone, JS::gcreason::Reason reason)
{
// Normally we collect all scheduled zones.
if (reason != JS::gcreason::COMPARTMENT_REVIVED)
return zone->isGCScheduled();
// If we are repeating a GC becuase we noticed dead compartments haven't
// If we are repeating a GC because we noticed dead compartments haven't
// been collected, then only collect zones contianing those compartments.
for (CompartmentsInZoneIter comp(zone); !comp.done(); comp.next()) {
if (comp->scheduledForDestruction)
return true;
if (reason == JS::gcreason::COMPARTMENT_REVIVED) {
for (CompartmentsInZoneIter comp(zone); !comp.done(); comp.next()) {
if (comp->scheduledForDestruction)
return true;
}
return false;
}
return false;
// Otherwise we only collect scheduled zones.
if (!zone->isGCScheduled())
return false;
// If canCollectAtoms() is false then either an instance of AutoKeepAtoms is
// currently on the stack or parsing is currently happening on another
// thread. In either case we don't have information about which atoms are
// roots, so we must skip collecting atoms.
//
// Note that only affects the first slice of an incremental GC since root
// marking is completed before we return to the mutator.
//
// Off-thread parsing is inhibited after the start of GC which prevents
// races between creating atoms during parsing and sweeping atoms on the
// active thread.
//
// Otherwise, we always schedule a GC in the atoms zone so that atoms which
// the other collected zones are using are marked, and we can update the
// set of atoms in use by the other collected zones at the end of the GC.
if (zone->isAtomsZone())
return TlsContext.get()->canCollectAtoms();
return true;
}
bool
@ -3891,10 +3927,9 @@ GCRuntime::prepareZonesForCollection(JS::gcreason::Reason reason, bool* isFullOu
for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) {
/* Set up which zones will be collected. */
if (ShouldCollectZone(zone, reason)) {
if (!zone->isAtomsZone()) {
any = true;
zone->changeGCState(Zone::NoGC, Zone::Mark);
}
MOZ_ASSERT(zone->canCollect());
any = true;
zone->changeGCState(Zone::NoGC, Zone::Mark);
} else {
*isFullOut = false;
}
@ -3921,30 +3956,10 @@ GCRuntime::prepareZonesForCollection(JS::gcreason::Reason reason, bool* isFullOu
}
/*
* If keepAtoms() is true then either an instance of AutoKeepAtoms is
* currently on the stack or parsing is currently happening on another
* thread. In either case we don't have information about which atoms are
* roots, so we must skip collecting atoms.
*
* Note that only affects the first slice of an incremental GC since root
* marking is completed before we return to the mutator.
*
* Off-thread parsing is inhibited after the start of GC which prevents
* races between creating atoms during parsing and sweeping atoms on the
* active thread.
*
* Otherwise, we always schedule a GC in the atoms zone so that atoms which
* the other collected zones are using are marked, and we can update the
* set of atoms in use by the other collected zones at the end of the GC.
* Check that we do collect the atoms zone if we triggered a GC for that
* purpose.
*/
if (!TlsContext.get()->keepAtoms || rt->hasHelperThreadZones()) {
Zone* atomsZone = rt->atomsCompartment(lock)->zone();
if (atomsZone->isGCScheduled()) {
MOZ_ASSERT(!atomsZone->isCollecting());
atomsZone->changeGCState(Zone::NoGC, Zone::Mark);
any = true;
}
}
MOZ_ASSERT_IF(reason == JS::gcreason::DELAYED_ATOMS_GC, atomsZone->isGCMarking());
/* Check that at least one zone is scheduled for collection. */
return any;
@ -6678,17 +6693,19 @@ GCRuntime::budgetIncrementalGC(bool nonincrementalByAPI, JS::gcreason::Reason re
bool reset = false;
for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) {
if (zone->usage.gcBytes() >= zone->threshold.gcTriggerBytes()) {
MOZ_ASSERT(zone->isGCScheduled());
budget.makeUnlimited();
stats().nonincremental(AbortReason::GCBytesTrigger);
}
if (isIncrementalGCInProgress() && zone->isGCScheduled() != zone->wasGCStarted())
reset = true;
if (zone->isTooMuchMalloc()) {
MOZ_ASSERT(zone->isGCScheduled());
budget.makeUnlimited();
stats().nonincremental(AbortReason::MallocBytesTrigger);
}
if (isIncrementalGCInProgress() && zone->isGCScheduled() != zone->wasGCStarted())
reset = true;
}
if (reset)
@ -6709,16 +6726,22 @@ class AutoScheduleZonesForGC
if (rt->gc.gcMode() == JSGC_MODE_GLOBAL)
zone->scheduleGC();
/* This is a heuristic to avoid resets. */
// This is a heuristic to avoid resets.
if (rt->gc.isIncrementalGCInProgress() && zone->needsIncrementalBarrier())
zone->scheduleGC();
/* This is a heuristic to reduce the total number of collections. */
// This is a heuristic to reduce the total number of collections.
if (zone->usage.gcBytes() >=
zone->threshold.allocTrigger(rt->gc.schedulingState.inHighFrequencyGCMode()))
{
zone->scheduleGC();
}
// This ensures we collect zones that have reached the malloc limit.
// TODO: Start collecting these zones earlier like we do for the GC
// bytes trigger above (bug 1384049).
if (zone->isTooMuchMalloc())
zone->scheduleGC();
}
}

Просмотреть файл

@ -1209,7 +1209,7 @@ ToDisassemblySource(JSContext* cx, HandleValue v, JSAutoByteString* bytes)
if (obj.is<JSFunction>()) {
RootedFunction fun(cx, &obj.as<JSFunction>());
JSString* str = JS_DecompileFunction(cx, fun, JS_DONT_PRETTY_PRINT);
JSString* str = JS_DecompileFunction(cx, fun);
if (!str)
return false;
return bytes->encodeLatin1(cx, str);
@ -2729,7 +2729,7 @@ GetPCCountJSON(JSContext* cx, const ScriptAndCounts& sac, StringBuffer& buf)
if (!AppendJSONProperty(buf, "text", NO_COMMA))
return false;
JSString* str = JS_DecompileScript(cx, script, nullptr, 0);
JSString* str = JS_DecompileScript(cx, script);
if (!str || !(str = StringToSource(cx, str)))
return false;

Просмотреть файл

@ -1834,9 +1834,7 @@ ScriptSource::appendSubstring(JSContext* cx, StringBuffer& buf, size_t start, si
PinnedChars chars(cx, this, holder, start, len);
if (!chars.get())
return false;
// Sources can be large and we don't want to check "is this char Latin1"
// for each source code character, so inflate the buffer here.
if (len > 100 && !buf.ensureTwoByteChars())
if (len > SourceDeflateLimit && !buf.ensureTwoByteChars())
return false;
return buf.append(chars.get(), len);
}

Просмотреть файл

@ -502,6 +502,10 @@ class ScriptSource
void movePendingCompressedSource();
public:
// When creating a JSString* from TwoByte source characters, we don't try to
// to deflate to Latin1 for longer strings, because this can be slow.
static const size_t SourceDeflateLimit = 100;
explicit ScriptSource()
: refs(0),
data(SourceType(Missing())),

Просмотреть файл

@ -115,7 +115,7 @@ class JS_FRIEND_API(Wrapper) : public BaseProxyHandler
JS::IsArrayAnswer* answer) const override;
virtual const char* className(JSContext* cx, HandleObject proxy) const override;
virtual JSString* fun_toString(JSContext* cx, HandleObject proxy,
unsigned indent) const override;
bool isToSource) const override;
virtual RegExpShared* regexp_toShared(JSContext* cx, HandleObject proxy) const override;
virtual bool boxedValue_unbox(JSContext* cx, HandleObject proxy,
MutableHandleValue vp) const override;
@ -209,7 +209,7 @@ class JS_FRIEND_API(CrossCompartmentWrapper) : public Wrapper
bool* bp) const override;
virtual const char* className(JSContext* cx, HandleObject proxy) const override;
virtual JSString* fun_toString(JSContext* cx, HandleObject wrapper,
unsigned indent) const override;
bool isToSource) const override;
virtual RegExpShared* regexp_toShared(JSContext* cx, HandleObject proxy) const override;
virtual bool boxedValue_unbox(JSContext* cx, HandleObject proxy, MutableHandleValue vp) const override;
@ -268,7 +268,8 @@ class JS_FRIEND_API(OpaqueCrossCompartmentWrapper) : public CrossCompartmentWrap
virtual bool isArray(JSContext* cx, HandleObject obj,
JS::IsArrayAnswer* answer) const override;
virtual const char* className(JSContext* cx, HandleObject wrapper) const override;
virtual JSString* fun_toString(JSContext* cx, HandleObject proxy, unsigned indent) const override;
virtual JSString* fun_toString(JSContext* cx, HandleObject proxy,
bool isToSource) const override;
static const OpaqueCrossCompartmentWrapper singleton;
};

Просмотреть файл

@ -316,7 +316,7 @@ BaseProxyHandler::className(JSContext* cx, HandleObject proxy) const
}
JSString*
BaseProxyHandler::fun_toString(JSContext* cx, HandleObject proxy, unsigned indent) const
BaseProxyHandler::fun_toString(JSContext* cx, HandleObject proxy, bool isToSource) const
{
if (proxy->isCallable())
return JS_NewStringCopyZ(cx, "function () {\n [native code]\n}");

Просмотреть файл

@ -449,12 +449,12 @@ CrossCompartmentWrapper::className(JSContext* cx, HandleObject wrapper) const
}
JSString*
CrossCompartmentWrapper::fun_toString(JSContext* cx, HandleObject wrapper, unsigned indent) const
CrossCompartmentWrapper::fun_toString(JSContext* cx, HandleObject wrapper, bool isToSource) const
{
RootedString str(cx);
{
AutoCompartment call(cx, wrappedObject(wrapper));
str = Wrapper::fun_toString(cx, wrapper, indent);
str = Wrapper::fun_toString(cx, wrapper, isToSource);
if (!str)
return nullptr;
}

Просмотреть файл

@ -153,7 +153,7 @@ DeadObjectProxy<CC>::className(JSContext* cx, HandleObject wrapper) const
template <DeadProxyIsCallableIsConstructorOption CC>
JSString*
DeadObjectProxy<CC>::fun_toString(JSContext* cx, HandleObject proxy, unsigned indent) const
DeadObjectProxy<CC>::fun_toString(JSContext* cx, HandleObject proxy, bool isToSource) const
{
ReportDead(cx);
return nullptr;

Просмотреть файл

@ -59,7 +59,8 @@ class DeadObjectProxy : public BaseProxyHandler
virtual bool getBuiltinClass(JSContext* cx, HandleObject proxy, ESClass* cls) const override;
virtual bool isArray(JSContext* cx, HandleObject proxy, JS::IsArrayAnswer* answer) const override;
virtual const char* className(JSContext* cx, HandleObject proxy) const override;
virtual JSString* fun_toString(JSContext* cx, HandleObject proxy, unsigned indent) const override;
virtual JSString* fun_toString(JSContext* cx, HandleObject proxy,
bool isToSource) const override;
virtual RegExpShared* regexp_toShared(JSContext* cx, HandleObject proxy) const override;
virtual bool isCallable(JSObject* obj) const override {

Просмотреть файл

@ -183,7 +183,7 @@ OpaqueCrossCompartmentWrapper::className(JSContext* cx,
JSString*
OpaqueCrossCompartmentWrapper::fun_toString(JSContext* cx, HandleObject proxy,
unsigned indent) const
bool isToSource) const
{
JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr, JSMSG_INCOMPATIBLE_PROTO,
js_Function_str, js_toString_str, "object");

Просмотреть файл

@ -562,7 +562,7 @@ Proxy::className(JSContext* cx, HandleObject proxy)
}
JSString*
Proxy::fun_toString(JSContext* cx, HandleObject proxy, unsigned indent)
Proxy::fun_toString(JSContext* cx, HandleObject proxy, bool isToSource)
{
if (!CheckRecursionLimit(cx))
return nullptr;
@ -571,8 +571,8 @@ Proxy::fun_toString(JSContext* cx, HandleObject proxy, unsigned indent)
BaseProxyHandler::GET, /* mayThrow = */ false);
// Do the safe thing if the policy rejects.
if (!policy.allowed())
return handler->BaseProxyHandler::fun_toString(cx, proxy, indent);
return handler->fun_toString(cx, proxy, indent);
return handler->BaseProxyHandler::fun_toString(cx, proxy, isToSource);
return handler->fun_toString(cx, proxy, isToSource);
}
RegExpShared*

Просмотреть файл

@ -59,7 +59,7 @@ class Proxy
static bool getBuiltinClass(JSContext* cx, HandleObject proxy, ESClass* cls);
static bool isArray(JSContext* cx, HandleObject proxy, JS::IsArrayAnswer* answer);
static const char* className(JSContext* cx, HandleObject proxy);
static JSString* fun_toString(JSContext* cx, HandleObject proxy, unsigned indent);
static JSString* fun_toString(JSContext* cx, HandleObject proxy, bool isToSource);
static RegExpShared* regexp_toShared(JSContext* cx, HandleObject proxy);
static bool boxedValue_unbox(JSContext* cx, HandleObject proxy, MutableHandleValue vp);

Просмотреть файл

@ -1260,7 +1260,7 @@ ScriptedProxyHandler::className(JSContext* cx, HandleObject proxy) const
}
JSString*
ScriptedProxyHandler::fun_toString(JSContext* cx, HandleObject proxy, unsigned indent) const
ScriptedProxyHandler::fun_toString(JSContext* cx, HandleObject proxy, bool isToSource) const
{
JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr, JSMSG_INCOMPATIBLE_PROTO,
js_Function_str, js_toString_str, "object");

Просмотреть файл

@ -68,7 +68,7 @@ class ScriptedProxyHandler : public BaseProxyHandler
JS::IsArrayAnswer* answer) const override;
virtual const char* className(JSContext* cx, HandleObject proxy) const override;
virtual JSString* fun_toString(JSContext* cx, HandleObject proxy,
unsigned indent) const override;
bool isToSource) const override;
virtual RegExpShared* regexp_toShared(JSContext* cx, HandleObject proxy) const override;
virtual bool boxedValue_unbox(JSContext* cx, HandleObject proxy,
MutableHandleValue vp) const override;

Просмотреть файл

@ -264,11 +264,11 @@ Wrapper::className(JSContext* cx, HandleObject proxy) const
}
JSString*
Wrapper::fun_toString(JSContext* cx, HandleObject proxy, unsigned indent) const
Wrapper::fun_toString(JSContext* cx, HandleObject proxy, bool isToSource) const
{
assertEnteredPolicy(cx, proxy, JSID_VOID, GET);
RootedObject target(cx, proxy->as<ProxyObject>().target());
return fun_toStringHelper(cx, target, indent);
return fun_toStringHelper(cx, target, isToSource);
}
RegExpShared*

Просмотреть файл

@ -4784,7 +4784,7 @@ DecompileFunction(JSContext* cx, unsigned argc, Value* vp)
return true;
}
RootedFunction fun(cx, &args[0].toObject().as<JSFunction>());
JSString* result = JS_DecompileFunction(cx, fun, 0);
JSString* result = JS_DecompileFunction(cx, fun);
if (!result)
return false;
args.rval().setString(result);
@ -4806,7 +4806,7 @@ DecompileThisScript(JSContext* cx, unsigned argc, Value* vp)
JSAutoCompartment ac(cx, iter.script());
RootedScript script(cx, iter.script());
JSString* result = JS_DecompileScript(cx, script, "test", 0);
JSString* result = JS_DecompileScript(cx, script);
if (!result)
return false;

Просмотреть файл

@ -887,8 +887,9 @@ JSRuntime::clearUsedByHelperThread(Zone* zone)
MOZ_ASSERT(zone->group()->usedByHelperThread);
zone->group()->usedByHelperThread = false;
numHelperThreadZones--;
if (gc.fullGCForAtomsRequested() && !TlsContext.get())
gc.triggerFullGCForAtoms();
JSContext* cx = TlsContext.get();
if (gc.fullGCForAtomsRequested() && cx->canCollectAtoms())
gc.triggerFullGCForAtoms(cx);
}
bool

Просмотреть файл

@ -8896,7 +8896,7 @@ js::IsAsmJSModuleLoadedFromCache(JSContext* cx, unsigned argc, Value* vp)
// asm.js toString/toSource support
JSString*
js::AsmJSModuleToString(JSContext* cx, HandleFunction fun, bool addParenToLambda)
js::AsmJSModuleToString(JSContext* cx, HandleFunction fun, bool isToSource)
{
MOZ_ASSERT(IsAsmJSModule(fun));
@ -8907,7 +8907,7 @@ js::AsmJSModuleToString(JSContext* cx, HandleFunction fun, bool addParenToLambda
StringBuffer out(cx);
if (addParenToLambda && fun->isLambda() && !out.append("("))
if (isToSource && fun->isLambda() && !out.append("("))
return nullptr;
bool haveSource = source->hasSourceData();
@ -8930,7 +8930,7 @@ js::AsmJSModuleToString(JSContext* cx, HandleFunction fun, bool addParenToLambda
return nullptr;
}
if (addParenToLambda && fun->isLambda() && !out.append(")"))
if (isToSource && fun->isLambda() && !out.append(")"))
return nullptr;
return out.finishString();

Просмотреть файл

@ -80,7 +80,7 @@ extern JSString*
AsmJSFunctionToString(JSContext* cx, HandleFunction fun);
extern JSString*
AsmJSModuleToString(JSContext* cx, HandleFunction fun, bool addParenToLambda);
AsmJSModuleToString(JSContext* cx, HandleFunction fun, bool isToSource);
// asm.js heap:

Просмотреть файл

@ -32,6 +32,7 @@
#include "mozilla/layers/LayerManagerComposite.h"
#include "mozilla/layers/CompositorBridgeChild.h"
#include "mozilla/layers/WebRenderLayerManager.h"
#include "mozilla/webrender/WebRenderAPI.h"
#include "ClientLayerManager.h"
#include "FrameLayerBuilder.h"
@ -49,7 +50,7 @@ typedef FrameMetrics::ViewID ViewID;
* from the nearest display item reference frame (which we assume will be inducing
* a ContainerLayer).
*/
static nsIntPoint
static LayoutDeviceIntPoint
GetContentRectLayerOffset(nsIFrame* aContainerFrame, nsDisplayListBuilder* aBuilder)
{
nscoord auPerDevPixel = aContainerFrame->PresContext()->AppUnitsPerDevPixel();
@ -61,7 +62,7 @@ GetContentRectLayerOffset(nsIFrame* aContainerFrame, nsDisplayListBuilder* aBuil
nsPoint frameOffset = aBuilder->ToReferenceFrame(aContainerFrame) +
aContainerFrame->GetContentRectRelativeToSelf().TopLeft();
return frameOffset.ToNearestPixels(auPerDevPixel);
return LayoutDeviceIntPoint::FromAppUnitsToNearest(frameOffset, auPerDevPixel);
}
// Return true iff |aManager| is a "temporary layer manager". They're
@ -160,7 +161,6 @@ already_AddRefed<Layer>
RenderFrameParent::BuildLayer(nsDisplayListBuilder* aBuilder,
nsIFrame* aFrame,
LayerManager* aManager,
const nsIntRect& aVisibleRect,
nsDisplayItem* aItem,
const ContainerLayerParameters& aContainerParameters)
{
@ -200,7 +200,7 @@ RenderFrameParent::BuildLayer(nsDisplayListBuilder* aBuilder,
return nullptr;
}
static_cast<RefLayer*>(layer.get())->SetReferentId(mLayersId);
nsIntPoint offset = GetContentRectLayerOffset(aFrame, aBuilder);
LayoutDeviceIntPoint offset = GetContentRectLayerOffset(aFrame, aBuilder);
// We can only have an offset if we're a child of an inactive
// container, but our display item is LAYER_ACTIVE_FORCE which
// forces all layers above to be active.
@ -374,12 +374,28 @@ nsDisplayRemote::BuildLayer(nsDisplayListBuilder* aBuilder,
LayerManager* aManager,
const ContainerLayerParameters& aContainerParameters)
{
int32_t appUnitsPerDevPixel = mFrame->PresContext()->AppUnitsPerDevPixel();
nsIntRect visibleRect = GetVisibleRect().ToNearestPixels(appUnitsPerDevPixel);
visibleRect += aContainerParameters.mOffset;
RefPtr<Layer> layer = mRemoteFrame->BuildLayer(aBuilder, mFrame, aManager, visibleRect, this, aContainerParameters);
RefPtr<Layer> layer = mRemoteFrame->BuildLayer(aBuilder, mFrame, aManager, this, aContainerParameters);
if (layer && layer->AsContainerLayer()) {
layer->AsContainerLayer()->SetEventRegionsOverride(mEventRegionsOverride);
}
return layer.forget();
}
bool
nsDisplayRemote::CreateWebRenderCommands(mozilla::wr::DisplayListBuilder& aBuilder,
const StackingContextHelper& aSc,
nsTArray<WebRenderParentCommand>& aParentCommands,
mozilla::layers::WebRenderLayerManager* aManager,
nsDisplayListBuilder* aDisplayListBuilder)
{
MOZ_ASSERT(aManager->IsLayersFreeTransaction());
mozilla::LayoutDeviceRect visible = mozilla::LayoutDeviceRect::FromAppUnits(
GetVisibleRect(), mFrame->PresContext()->AppUnitsPerDevPixel());
visible += mozilla::layout::GetContentRectLayerOffset(mFrame, aDisplayListBuilder);
aBuilder.PushIFrame(aSc.ToRelativeLayoutRect(visible),
mozilla::wr::AsPipelineId(mRemoteFrame->GetLayersId()));
return true;
}

Просмотреть файл

@ -72,7 +72,6 @@ public:
already_AddRefed<Layer> BuildLayer(nsDisplayListBuilder* aBuilder,
nsIFrame* aFrame,
LayerManager* aManager,
const nsIntRect& aVisibleRect,
nsDisplayItem* aItem,
const ContainerLayerParameters& aContainerParameters);
@ -163,6 +162,12 @@ public:
BuildLayer(nsDisplayListBuilder* aBuilder, LayerManager* aManager,
const ContainerLayerParameters& aContainerParameters) override;
virtual bool CreateWebRenderCommands(mozilla::wr::DisplayListBuilder& aBuilder,
const StackingContextHelper& aSc,
nsTArray<WebRenderParentCommand>& aParentCommands,
mozilla::layers::WebRenderLayerManager* aManager,
nsDisplayListBuilder* aDisplayListBuilder) override;
NS_DISPLAY_DECL_NAME("Remote", TYPE_REMOTE)
private:

Просмотреть файл

@ -2091,10 +2091,7 @@ already_AddRefed<LayerManager> nsDisplayList::PaintRoot(nsDisplayListBuilder* aB
nsIDocument* document = presShell->GetDocument();
if (gfxPrefs::WebRenderLayersFree() &&
layerManager->GetBackendType() == layers::LayersBackend::LAYERS_WR &&
// We don't yet support many display items used in chrome, so
// layers-free mode is only for content.
!presContext->IsChrome()) {
layerManager->GetBackendType() == layers::LayersBackend::LAYERS_WR) {
if (doBeginTransaction) {
if (aCtx) {
if (!layerManager->BeginTransactionWithTarget(aCtx)) {

Просмотреть файл

@ -1984,12 +1984,9 @@ fuzzy-if(Android,27,874) fuzzy-if(gtkWidget,14,29) == 1313772.xhtml 1313772-ref.
fuzzy(2,320000) == 1315113-1.html 1315113-1-ref.html
fuzzy(2,20000) == 1315113-2.html 1315113-2-ref.html
== 1315632-1.html 1315632-1-ref.html
fuzzy(2,40000) fuzzy-if(webrender,26,691) == 1316719-1a.html 1316719-1-ref.html
fuzzy(2,40000) fuzzy-if(webrender,26,691) == 1316719-1b.html 1316719-1-ref.html
fuzzy(2,40000) == 1316719-1c.html 1316719-1-ref.html
pref(layers.advanced.background-color,1) skip-if(!webrender) fuzzy-if(webrender,27,700) == 1316719-1a.html 1316719-1-ref.html
pref(layers.advanced.background-color,1) skip-if(!webrender) fuzzy-if(webrender,27,700) == 1316719-1b.html 1316719-1-ref.html
pref(layers.advanced.background-color,1) skip-if(!webrender) fuzzy-if(webrender,27,700) == 1316719-1c.html 1316719-1-ref.html
fuzzy(2,40000) fuzzy-if(webrender,1-2,349-349) == 1316719-1a.html 1316719-1-ref.html
fuzzy(2,40000) fuzzy-if(webrender,1-2,349-349) == 1316719-1b.html 1316719-1-ref.html
fuzzy(2,40000) fuzzy-if(webrender,1-1,323-323) == 1316719-1c.html 1316719-1-ref.html
skip-if(Android) != 1318769-1.html 1318769-1-ref.html
fails-if(styloVsGecko) == 1322512-1.html 1322512-1-ref.html
== 1330051.svg 1330051-ref.svg

Просмотреть файл

@ -37,7 +37,8 @@ test-pref(font.size.inflation.emPerLine,15) test-pref(font.size.inflation.forceE
test-pref(font.size.inflation.emPerLine,15) test-pref(font.size.inflation.forceEnabled,true) test-pref(font.size.inflation.lineThreshold,0) == textarea-3.html textarea-3-ref.html
test-pref(font.size.inflation.emPerLine,15) test-pref(font.size.inflation.forceEnabled,true) test-pref(font.size.inflation.lineThreshold,0) == css-transform-1.html css-transform-1-ref.html
test-pref(font.size.inflation.emPerLine,15) test-pref(font.size.inflation.forceEnabled,true) test-pref(font.size.inflation.lineThreshold,0) == css-transform-2.html css-transform-2-ref.html
fuzzy-if(asyncPan&&!layersGPUAccelerated,102,1764) test-pref(font.size.inflation.emPerLine,15) test-pref(font.size.inflation.forceEnabled,true) test-pref(font.size.inflation.lineThreshold,0) == container-with-clamping.html container-with-clamping-ref.html
# skipped - bug 1380830
fuzzy-if(asyncPan&&!layersGPUAccelerated,102,1764) skip test-pref(font.size.inflation.emPerLine,15) test-pref(font.size.inflation.forceEnabled,true) test-pref(font.size.inflation.lineThreshold,0) == container-with-clamping.html container-with-clamping-ref.html
test-pref(font.size.inflation.emPerLine,15) test-pref(font.size.inflation.forceEnabled,true) test-pref(font.size.inflation.lineThreshold,0) skip-if(styloVsGecko) load video-1.html
test-pref(font.size.inflation.emPerLine,15) test-pref(font.size.inflation.forceEnabled,true) test-pref(font.size.inflation.lineThreshold,0) HTTP(..) == intrinsic-min-1.html intrinsic-min-1-ref.html
test-pref(font.size.inflation.emPerLine,15) test-pref(font.size.inflation.forceEnabled,true) test-pref(font.size.inflation.lineThreshold,0) HTTP(..) == intrinsic-max-1.html intrinsic-max-1-ref.html

Просмотреть файл

@ -13,6 +13,7 @@
#define mozilla_AllocPolicy_h
#include "mozilla/Attributes.h"
#include "mozilla/Assertions.h"
#include "mozilla/TemplateLib.h"
#include <stddef.h>
@ -128,6 +129,67 @@ public:
}
};
/*
* A policy which always fails to allocate memory, returning nullptr. Methods
* which expect an existing allocation assert.
*
* This type should be used in situations where you want to use a MFBT type with
* inline storage, and don't want to allow it to allocate on the heap.
*/
class NeverAllocPolicy
{
public:
template <typename T>
T* maybe_pod_malloc(size_t aNumElems)
{
return nullptr;
}
template <typename T>
T* maybe_pod_calloc(size_t aNumElems)
{
return nullptr;
}
template <typename T>
T* maybe_pod_realloc(T* aPtr, size_t aOldSize, size_t aNewSize)
{
MOZ_CRASH("NeverAllocPolicy::maybe_pod_realloc");
}
template <typename T>
T* pod_malloc(size_t aNumElems)
{
return nullptr;
}
template <typename T>
T* pod_calloc(size_t aNumElems)
{
return nullptr;
}
template <typename T>
T* pod_realloc(T* aPtr, size_t aOldSize, size_t aNewSize)
{
MOZ_CRASH("NeverAllocPolicy::pod_realloc");
}
void free_(void* aPtr)
{
MOZ_CRASH("NeverAllocPolicy::free_");
}
void reportAllocOverflow() const
{
}
MOZ_MUST_USE bool checkSimulatedOOM() const
{
return true;
}
};
} // namespace mozilla
#endif /* mozilla_AllocPolicy_h */

Просмотреть файл

@ -1116,6 +1116,10 @@ public class BrowserApp extends GeckoApp
// We can't show the first run experience until Gecko has finished initialization (bug 1077583).
checkFirstrun(this, intent);
if (Versions.preJB) {
conditionallyNotifyEOL();
}
if (!IntentUtils.getIsInAutomationFromEnvironment(intent)) {
DawnHelper.conditionallyNotifyDawn(this);
}
@ -3452,6 +3456,10 @@ public class BrowserApp extends GeckoApp
}
}
@Override
public void onContextMenu(GeckoView view, int screenX, int screenY,
String uri, String elementSrc) {}
@Override
public boolean onPrepareOptionsMenu(Menu aMenu) {
if (aMenu == null)

Просмотреть файл

@ -567,4 +567,8 @@ public class CustomTabsActivity extends AppCompatActivity
@Override
public void onFullScreen(GeckoView view, boolean fullScreen) {}
@Override
public void onContextMenu(GeckoView view, int screenX, int screenY,
String uri, String elementSrc) {}
}

Просмотреть файл

@ -22,6 +22,7 @@ class GeckoViewContent extends GeckoViewContentModule {
addEventListener("MozDOMFullscreen:Exit", this, false);
addEventListener("MozDOMFullscreen:Exited", this, false);
addEventListener("MozDOMFullscreen:Request", this, false);
addEventListener("contextmenu", this, { capture: true, passive: false });
this.messageManager.addMessageListener("GeckoView:DOMFullscreenEntered",
this);
@ -37,6 +38,7 @@ class GeckoViewContent extends GeckoViewContentModule {
removeEventListener("MozDOMFullscreen:Exit", this);
removeEventListener("MozDOMFullscreen:Exited", this);
removeEventListener("MozDOMFullscreen:Request", this);
removeEventListener("contextmenu", this);
this.messageManager.removeMessageListener("GeckoView:DOMFullscreenEntered",
this);
@ -66,13 +68,35 @@ class GeckoViewContent extends GeckoViewContentModule {
}
handleEvent(aEvent) {
if (aEvent.originalTarget.defaultView != content) {
return;
}
debug("handleEvent " + aEvent.type);
switch (aEvent.type) {
case "contextmenu":
function nearestParentHref(node) {
while (node && !node.href) {
node = node.parentNode;
}
return node && node.href;
}
let node = aEvent.target;
let hrefNode = nearestParentHref(node);
let isImageNode = node instanceof Ci.nsIDOMHTMLImageElement;
let isMediaNode = node instanceof Ci.nsIDOMHTMLMediaElement;
let msg = {
screenX: aEvent.screenX,
screenY: aEvent.screenY,
uri: hrefNode,
elementSrc: isImageNode || isMediaNode
? node.currentSrc || node.src
: null
};
if (hrefNode || isImageNode || isMediaNode) {
sendAsyncMessage("GeckoView:ContextMenu", msg);
aEvent.preventDefault();
}
break;
case "MozDOMFullscreen:Request":
sendAsyncMessage("GeckoView:DOMFullscreenRequest");
break;

Просмотреть файл

@ -96,6 +96,7 @@ public class GeckoView extends LayerView {
new GeckoViewHandler<ContentListener>(
"GeckoViewContent", this,
new String[]{
"GeckoView:ContextMenu",
"GeckoView:DOMTitleChanged",
"GeckoView:FullScreenEnter",
"GeckoView:FullScreenExit"
@ -107,7 +108,13 @@ public class GeckoView extends LayerView {
final GeckoBundle message,
final EventCallback callback) {
if ("GeckoView:DOMTitleChanged".equals(event)) {
if ("GeckoView:ContextMenu".equals(event)) {
listener.onContextMenu(GeckoView.this,
message.getInt("screenX"),
message.getInt("screenY"),
message.getString("uri"),
message.getString("elementSrc"));
} else if ("GeckoView:DOMTitleChanged".equals(event)) {
listener.onTitleChange(GeckoView.this,
message.getString("title"));
} else if ("GeckoView:FullScreenEnter".equals(event)) {
@ -115,7 +122,6 @@ public class GeckoView extends LayerView {
} else if ("GeckoView:FullScreenExit".equals(event)) {
listener.onFullScreen(GeckoView.this, false);
}
}
};
@ -1288,6 +1294,23 @@ public class GeckoView extends LayerView {
* @param fullScreen True if the page is in full screen mode.
*/
void onFullScreen(GeckoView view, boolean fullScreen);
/**
* A user has initiated the context menu via long-press.
* This event is fired on links, (nested) images and (nested) media
* elements.
*
* @param view The GeckoView that initiated the callback.
* @param screenX The screen coordinates of the press.
* @param screenY The screen coordinates of the press.
* @param uri The URI of the pressed link, set for links and
* image-links.
* @param elementSrc The source URI of the pressed element, set for
* (nested) images and media elements.
*/
void onContextMenu(GeckoView view, int screenX, int screenY,
String uri, String elementSrc);
}
public interface NavigationListener {

Просмотреть файл

@ -133,6 +133,14 @@ public class GeckoViewActivity extends Activity {
getActionBar().show();
}
}
@Override
public void onContextMenu(GeckoView view, int screenX, int screenY,
String uri, String elementSrc) {
Log.d(LOGTAG, "onContextMenu screenX=" + screenX +
" screenY=" + screenY + " uri=" + uri +
" elementSrc=" + elementSrc);
}
}
private class MyGeckoViewProgress implements GeckoView.ProgressListener {

Просмотреть файл

@ -38,6 +38,7 @@ class GeckoViewContent extends GeckoViewModule {
this.messageManager.addMessageListener("GeckoView:DOMFullscreenExit", this);
this.messageManager.addMessageListener("GeckoView:DOMFullscreenRequest", this);
this.messageManager.addMessageListener("GeckoView:DOMTitleChanged", this);
this.messageManager.addMessageListener("GeckoView:ContextMenu", this);
}
// Bundle event handler.
@ -59,6 +60,7 @@ class GeckoViewContent extends GeckoViewModule {
this.messageManager.removeMessageListener("GeckoView:DOMFullscreenExit", this);
this.messageManager.removeMessageListener("GeckoView:DOMFullscreenRequest", this);
this.messageManager.removeMessageListener("GeckoView:DOMTitleChanged", this);
this.messageManager.removeMessageListener("GeckoView:ContextMenu", this);
}
// DOM event handler
@ -83,6 +85,15 @@ class GeckoViewContent extends GeckoViewModule {
debug("receiveMessage " + aMsg.name);
switch (aMsg.name) {
case "GeckoView:ContextMenu":
this.eventDispatcher.sendRequest({
type: aMsg.name,
screenX: aMsg.data.screenX,
screenY: aMsg.data.screenY,
elementSrc: aMsg.data.elementSrc,
uri: aMsg.data.uri
});
break;
case "GeckoView:DOMFullscreenExit":
this.window.QueryInterface(Ci.nsIInterfaceRequestor)
.getInterface(Ci.nsIDOMWindowUtils)

Просмотреть файл

@ -1440,6 +1440,7 @@ pref("javascript.options.discardSystemSource", false);
// Comment 32 and Bug 613551.
pref("javascript.options.mem.high_water_mark", 128);
pref("javascript.options.mem.max", -1);
pref("javascript.options.mem.nursery.max_kb", -1);
pref("javascript.options.mem.gc_per_zone", true);
pref("javascript.options.mem.gc_incremental", true);
pref("javascript.options.mem.gc_incremental_slice_ms", 5);

Просмотреть файл

@ -6,7 +6,6 @@
#include "ExtensionProtocolHandler.h"
#include "mozilla/AbstractThread.h"
#include "mozilla/ClearOnShutdown.h"
#include "mozilla/ExtensionPolicyService.h"
#include "mozilla/FileUtils.h"
@ -19,6 +18,7 @@
#include "FileDescriptor.h"
#include "FileDescriptorFile.h"
#include "LoadInfo.h"
#include "nsContentUtils.h"
#include "nsServiceManagerUtils.h"
#include "nsIFile.h"
#include "nsIFileChannel.h"
@ -109,6 +109,8 @@ class ExtensionStreamGetter : public RefCounted<ExtensionStreamGetter>
{
MOZ_ASSERT(aURI);
MOZ_ASSERT(aLoadInfo);
SetupEventTarget();
}
// To use when getting an FD for a packed extension JAR file
@ -126,10 +128,21 @@ class ExtensionStreamGetter : public RefCounted<ExtensionStreamGetter>
MOZ_ASSERT(aLoadInfo);
MOZ_ASSERT(mJarChannel);
MOZ_ASSERT(aJarFile);
SetupEventTarget();
}
~ExtensionStreamGetter() {}
void SetupEventTarget()
{
mMainThreadEventTarget =
nsContentUtils::GetEventTargetByLoadInfo(mLoadInfo, TaskCategory::Other);
if (!mMainThreadEventTarget) {
mMainThreadEventTarget = GetMainThreadSerialEventTarget();
}
}
// Get an input stream or file descriptor from the parent asynchronously.
Result<Ok, nsresult> GetAsync(nsIStreamListener* aListener,
nsIChannel* aChannel);
@ -149,6 +162,7 @@ class ExtensionStreamGetter : public RefCounted<ExtensionStreamGetter>
nsCOMPtr<nsIFile> mJarFile;
nsCOMPtr<nsIStreamListener> mListener;
nsCOMPtr<nsIChannel> mChannel;
nsCOMPtr<nsISerialEventTarget> mMainThreadEventTarget;
bool mIsJarChannel;
};
@ -225,6 +239,7 @@ ExtensionStreamGetter::GetAsync(nsIStreamListener* aListener,
nsIChannel* aChannel)
{
MOZ_ASSERT(IsNeckoChild());
MOZ_ASSERT(mMainThreadEventTarget);
mListener = aListener;
mChannel = aChannel;
@ -241,7 +256,7 @@ ExtensionStreamGetter::GetAsync(nsIStreamListener* aListener,
if (mIsJarChannel) {
// Request an FD for this moz-extension URI
gNeckoChild->SendGetExtensionFD(uri, loadInfo)->Then(
AbstractThread::MainThread(),
mMainThreadEventTarget,
__func__,
[self] (const FileDescriptor& fd) {
self->OnFD(fd);
@ -255,7 +270,7 @@ ExtensionStreamGetter::GetAsync(nsIStreamListener* aListener,
// Request an input stream for this moz-extension URI
gNeckoChild->SendGetExtensionStream(uri, loadInfo)->Then(
AbstractThread::MainThread(),
mMainThreadEventTarget,
__func__,
[self] (const OptionalIPCStream& stream) {
nsCOMPtr<nsIInputStream> inputStream;
@ -277,6 +292,7 @@ ExtensionStreamGetter::OnStream(nsIInputStream* aStream)
{
MOZ_ASSERT(IsNeckoChild());
MOZ_ASSERT(mListener);
MOZ_ASSERT(mMainThreadEventTarget);
// We must keep an owning reference to the listener
// until we pass it on to AsyncRead.
@ -293,7 +309,8 @@ ExtensionStreamGetter::OnStream(nsIInputStream* aStream)
}
nsCOMPtr<nsIInputStreamPump> pump;
nsresult rv = NS_NewInputStreamPump(getter_AddRefs(pump), aStream);
nsresult rv = NS_NewInputStreamPump(getter_AddRefs(pump), aStream, -1, -1, 0,
0, false, mMainThreadEventTarget);
if (NS_FAILED(rv)) {
mChannel->Cancel(NS_BINDING_ABORTED);
return;

Просмотреть файл

@ -6,7 +6,6 @@
this.EXPORTED_SYMBOLS = [
"BulkKeyBundle",
"SyncKeyBundle"
];
var {classes: Cc, interfaces: Ci, utils: Cu, results: Cr} = Components;
@ -167,49 +166,3 @@ BulkKeyBundle.prototype = {
this.hmacKey = Utils.safeAtoB(value[1]);
},
};
/**
* Represents a key pair derived from a Sync Key via HKDF.
*
* Instances of this type should be considered immutable. You create an
* instance by specifying the username and 26 character "friendly" Base32
* encoded Sync Key. The Sync Key is derived at instance creation time.
*
* If the username or Sync Key is invalid, an Error will be thrown.
*/
this.SyncKeyBundle = function SyncKeyBundle(username, syncKey) {
let log = Log.repository.getLogger("Sync.SyncKeyBundle");
log.info("SyncKeyBundle being created.");
KeyBundle.call(this);
this.generateFromKey(username, syncKey);
}
SyncKeyBundle.prototype = {
__proto__: KeyBundle.prototype,
/*
* If we've got a string, hash it into keys and store them.
*/
generateFromKey: function generateFromKey(username, syncKey) {
if (!username || (typeof username != "string")) {
throw new Error("Sync Key cannot be generated from non-string username.");
}
if (!syncKey || (typeof syncKey != "string")) {
throw new Error("Sync Key cannot be generated from non-string key.");
}
if (!Utils.isPassphrase(syncKey)) {
throw new Error("Provided key is not a passphrase, cannot derive Sync " +
"Key Bundle.");
}
// Expand the base32 Sync Key to an AES 256 and 256 bit HMAC key.
let prk = Utils.decodeKeyBase32(syncKey);
let info = HMAC_INPUT + username;
let okm = Utils.hkdfExpand(prk, info, 32 * 2);
this.encryptionKey = okm.slice(0, 32);
this.hmacKey = okm.slice(32, 64);
},
};

Просмотреть файл

@ -97,7 +97,8 @@ const EHTestsCommon = {
generateCredentialsChangedFailure() {
// Make sync fail due to changed credentials. We simply re-encrypt
// the keys with a different Sync Key, without changing the local one.
let newSyncKeyBundle = new SyncKeyBundle("johndoe", "23456234562345623456234562");
let newSyncKeyBundle = new BulkKeyBundle("crypto");
newSyncKeyBundle.generateRandom();
let keys = Service.collectionKeys.asWBO();
keys.encrypt(newSyncKeyBundle);
return keys.upload(Service.resource(Service.cryptoKeysURL));

Просмотреть файл

@ -117,54 +117,6 @@ add_test(function test_repeated_hmac() {
run_next_test();
});
add_test(function test_sync_key_bundle_derivation() {
_("Ensure derivation from known values works.");
// The known values in this test were originally verified against Firefox
// Home.
let bundle = new SyncKeyBundle("st3fan", "q7ynpwq7vsc9m34hankbyi3s3i");
// These should be compared to the results from Home, as they once were.
let e = "14b8c09fa84e92729ee695160af6e0385f8f6215a25d14906e1747bdaa2de426";
let h = "370e3566245d79fe602a3adb5137e42439cd2a571235197e0469d7d541b07875";
let realE = Utils.bytesAsHex(bundle.encryptionKey);
let realH = Utils.bytesAsHex(bundle.hmacKey);
_("Real E: " + realE);
_("Real H: " + realH);
do_check_eq(realH, h);
do_check_eq(realE, e);
run_next_test();
});
add_test(function test_keymanager() {
let testKey = "ababcdefabcdefabcdefabcdef";
let username = "john@example.com";
// Decode the key here to mirror what generateEntry will do,
// but pass it encoded into the KeyBundle call below.
let sha256inputE = "" + HMAC_INPUT + username + "\x01";
let key = Utils.makeHMACKey(Utils.decodeKeyBase32(testKey));
let encryptKey = sha256HMAC(sha256inputE, key);
let sha256inputH = encryptKey + HMAC_INPUT + username + "\x02";
let hmacKey = sha256HMAC(sha256inputH, key);
// Encryption key is stored in base64 for WeaveCrypto convenience.
do_check_eq(encryptKey, new SyncKeyBundle(username, testKey).encryptionKey);
do_check_eq(hmacKey, new SyncKeyBundle(username, testKey).hmacKey);
// Test with the same KeyBundle for both.
let obj = new SyncKeyBundle(username, testKey);
do_check_eq(hmacKey, obj.hmacKey);
do_check_eq(encryptKey, obj.encryptionKey);
run_next_test();
});
add_task(async function test_ensureLoggedIn() {
let log = Log.repository.getLogger("Test");
Log.repository.rootLogger.addAppender(new Log.DumpAppender());

Просмотреть файл

@ -220,7 +220,8 @@ add_task(async function v5_upgrade() {
_("New meta/global: " + JSON.stringify(meta_global));
// Fill the keys with bad data.
let badKeys = new SyncKeyBundle("foobar", "aaaaaaaaaaaaaaaaaaaaaaaaaa");
let badKeys = new BulkKeyBundle("crypto");
badKeys.generateRandom();
await update_server_keys(badKeys, "keys", "crypto/keys"); // v4
await update_server_keys(badKeys, "bulk", "crypto/bulk"); // v5

Просмотреть файл

@ -1,5 +0,0 @@
[controller-on-reload.https.html]
type: testharness
[controller is set upon reload after registration]
expected: FAIL

Просмотреть файл

@ -1,8 +0,0 @@
[url-parsing.https.html]
type: testharness
[register should use the relevant global of the object it was called on to resolve the script URL and the default scope URL]
expected: FAIL
[register should use the relevant global of the object it was called on to resolve the script URL and the given scope URL]
expected: FAIL

Просмотреть файл

@ -1,5 +0,0 @@
[multiple-register.https.html]
type: testharness
[Subsequent registrations from a different iframe resolve to the different registration object but they refer to the same registration and workers]
expected: FAIL

Просмотреть файл

@ -1,11 +0,0 @@
[registration-iframe.https.html]
type: testharness
[register method should use the "relevant global object" to parse its scriptURL and scope - normal case]
expected: FAIL
[register method should use the "relevant global object" to parse its scriptURL and scope - error case]
expected: FAIL
[A scope url should start with the given script url]
expected: FAIL

Просмотреть файл

@ -33,6 +33,30 @@ XPCOMUtils.defineLazyModuleGetter(this, "DeferredTask",
XPCOMUtils.defineLazyModuleGetter(this, "FileUtils",
"resource://gre/modules/FileUtils.jsm");
function _TabRemovalObserver(resolver, tabParentIds) {
this._resolver = resolver;
this._tabParentIds = tabParentIds;
Services.obs.addObserver(this, "ipc:browser-destroyed");
}
_TabRemovalObserver.prototype = {
_resolver: null,
_tabParentIds: null,
QueryInterface: XPCOMUtils.generateQI([Ci.nsIObserver]),
observe(subject, topic, data) {
let tabParent = subject.QueryInterface(Ci.nsITabParent);
if (this._tabParentIds.has(tabParent.tabId)) {
this._tabParentIds.delete(tabParent.tabId);
if (this._tabParentIds.size == 0) {
Services.obs.removeObserver(this, "ipc:browser-destroyed");
this._resolver();
}
}
}
};
function _ContextualIdentityService(path) {
this.init(path);
}
@ -309,9 +333,33 @@ _ContextualIdentityService.prototype = {
},
closeContainerTabs(userContextId = 0) {
this._forEachContainerTab(function(tab, tabbrowser) {
tabbrowser.removeTab(tab);
}, userContextId);
return new Promise(resolve => {
let tabParentIds = new Set();
this._forEachContainerTab((tab, tabbrowser) => {
let frameLoader = tab.linkedBrowser.QueryInterface(Ci.nsIFrameLoaderOwner).frameLoader;
// We don't have tabParent in non-e10s mode.
if (frameLoader.tabParent) {
tabParentIds.add(frameLoader.tabParent.tabId);
}
tabbrowser.removeTab(tab);
}, userContextId);
if (tabParentIds.size == 0) {
resolve();
return;
}
new _TabRemovalObserver(resolve, tabParentIds);
});
},
notifyAllContainersCleared() {
for (let identity of this._identities) {
Services.obs.notifyObservers(null, "clear-origin-attributes-data",
JSON.stringify({ userContextId: identity.userContextId }));
}
},
_forEachContainerTab(callback, userContextId = 0) {

Просмотреть файл

@ -48,12 +48,6 @@ const MIN_SUBSESSION_LENGTH_MS = Preferences.get("toolkit.telemetry.minSubsessio
const LOGGER_NAME = "Toolkit.Telemetry";
const LOGGER_PREFIX = "TelemetrySession" + (Utils.isContentProcess ? "#content::" : "::");
const PREF_BRANCH = "toolkit.telemetry.";
const PREF_PREVIOUS_BUILDID = PREF_BRANCH + "previousBuildID";
const PREF_FHR_UPLOAD_ENABLED = "datareporting.healthreport.uploadEnabled";
const PREF_UNIFIED = PREF_BRANCH + "unified";
const PREF_SHUTDOWN_PINGSENDER = PREF_BRANCH + "shutdownPingSender.enabled";
const MESSAGE_TELEMETRY_PAYLOAD = "Telemetry:Payload";
const MESSAGE_TELEMETRY_THREAD_HANGS = "Telemetry:ChildThreadHangs";
const MESSAGE_TELEMETRY_GET_CHILD_THREAD_HANGS = "Telemetry:GetChildThreadHangs";
@ -65,7 +59,7 @@ const ABORTED_SESSION_FILE_NAME = "aborted-session-ping";
// Whether the FHR/Telemetry unification features are enabled.
// Changing this pref requires a restart.
const IS_UNIFIED_TELEMETRY = Preferences.get(PREF_UNIFIED, false);
const IS_UNIFIED_TELEMETRY = Preferences.get(TelemetryUtils.Preferences.Unified, false);
// Maximum number of content payloads that we are willing to store.
const MAX_NUM_CONTENT_PAYLOADS = 10;
@ -531,9 +525,6 @@ var TelemetryScheduler = {
this.EXPORTED_SYMBOLS = ["TelemetrySession"];
this.TelemetrySession = Object.freeze({
Constants: Object.freeze({
PREF_PREVIOUS_BUILDID,
}),
/**
* Send a ping to a test server. Used only for testing.
*/
@ -1516,12 +1507,12 @@ var Impl = {
// Record old value and update build ID preference if this is the first
// run with a new build ID.
let previousBuildId = Preferences.get(PREF_PREVIOUS_BUILDID, null);
let previousBuildId = Preferences.get(TelemetryUtils.Preferences.PreviousBuildID, null);
let thisBuildID = Services.appinfo.appBuildID;
// If there is no previousBuildId preference, we send null to the server.
if (previousBuildId != thisBuildID) {
this._previousBuildId = previousBuildId;
Preferences.set(PREF_PREVIOUS_BUILDID, thisBuildID);
Preferences.set(TelemetryUtils.Preferences.PreviousBuildID, thisBuildID);
}
this.attachEarlyObservers();
@ -1811,7 +1802,7 @@ var Impl = {
// browsing session on, to mitigate issues with "bot" profiles (see bug 1354482).
// Note: sending the "shutdown" ping using the pingsender is currently disabled
// due to a crash happening on OSX platforms. See bug 1357745 for context.
let sendWithPingsender = Preferences.get(PREF_SHUTDOWN_PINGSENDER, false) &&
let sendWithPingsender = Preferences.get(TelemetryUtils.Preferences.ShutdownPingSender, false) &&
!TelemetryReportingPolicy.isFirstRun();
let options = {

Просмотреть файл

@ -36,6 +36,7 @@ this.TelemetryUtils = {
Unified: "toolkit.telemetry.unified",
NewProfilePingEnabled: "toolkit.telemetry.newProfilePing.enabled",
NewProfilePingDelay: "toolkit.telemetry.newProfilePing.delay",
PreviousBuildID: "toolkit.telemetry.previousBuildID",
// Log Preferences
LogLevel: "toolkit.telemetry.log.level",

Просмотреть файл

@ -34,7 +34,7 @@ add_task(async function test_firstRun() {
let metadata = TelemetrySession.getMetadata();
do_check_false("previousBuildID" in metadata);
let appBuildID = getAppInfo().appBuildID;
let buildIDPref = Services.prefs.getCharPref(TelemetrySession.Constants.PREF_PREVIOUS_BUILDID);
let buildIDPref = Services.prefs.getCharPref(TelemetryUtils.Preferences.PreviousBuildID);
do_check_eq(appBuildID, buildIDPref);
});
@ -57,7 +57,7 @@ add_task(async function test_newBuild() {
await TelemetryController.testReset();
let metadata = TelemetrySession.getMetadata();
do_check_eq(metadata.previousBuildId, oldBuildID);
let buildIDPref = Services.prefs.getCharPref(TelemetrySession.Constants.PREF_PREVIOUS_BUILDID);
let buildIDPref = Services.prefs.getCharPref(TelemetryUtils.Preferences.PreviousBuildID);
do_check_eq(NEW_BUILD_ID, buildIDPref);
});

Просмотреть файл

@ -8,6 +8,8 @@
#include "ProfilerMarker.h"
using namespace mozilla;
ProfileBuffer::ProfileBuffer(int aEntrySize)
: mEntries(mozilla::MakeUnique<ProfileBufferEntry[]>(aEntrySize))
, mWritePos(0)
@ -55,24 +57,6 @@ ProfileBuffer::AddThreadIdEntry(int aThreadId, LastSample* aLS)
AddEntry(ProfileBufferEntry::ThreadId(aThreadId));
}
void
ProfileBuffer::AddDynamicStringEntry(const char* aStr)
{
size_t strLen = strlen(aStr) + 1; // +1 for the null terminator
for (size_t j = 0; j < strLen; ) {
// Store up to kNumChars characters in the entry.
char chars[ProfileBufferEntry::kNumChars];
size_t len = ProfileBufferEntry::kNumChars;
if (j + len >= strLen) {
len = strLen - j;
}
memcpy(chars, &aStr[j], len);
j += ProfileBufferEntry::kNumChars;
AddEntry(ProfileBufferEntry::DynamicStringFragment(chars));
}
}
void
ProfileBuffer::AddStoredMarker(ProfilerMarker *aStoredMarker)
{
@ -80,6 +64,51 @@ ProfileBuffer::AddStoredMarker(ProfilerMarker *aStoredMarker)
mStoredMarkers.insert(aStoredMarker);
}
void
ProfileBuffer::CollectNativeLeafAddr(void* aAddr)
{
AddEntry(ProfileBufferEntry::NativeLeafAddr(aAddr));
}
void
ProfileBuffer::CollectJitReturnAddr(void* aAddr)
{
AddEntry(ProfileBufferEntry::JitReturnAddr(aAddr));
}
void
ProfileBuffer::CollectCodeLocation(
const char* aLabel, const char* aStr, int aLineNumber,
const Maybe<js::ProfileEntry::Category>& aCategory)
{
AddEntry(ProfileBufferEntry::Label(aLabel));
if (aStr) {
// Store the string using one or more DynamicStringFragment entries.
size_t strLen = strlen(aStr) + 1; // +1 for the null terminator
for (size_t j = 0; j < strLen; ) {
// Store up to kNumChars characters in the entry.
char chars[ProfileBufferEntry::kNumChars];
size_t len = ProfileBufferEntry::kNumChars;
if (j + len >= strLen) {
len = strLen - j;
}
memcpy(chars, &aStr[j], len);
j += ProfileBufferEntry::kNumChars;
AddEntry(ProfileBufferEntry::DynamicStringFragment(chars));
}
}
if (aLineNumber != -1) {
AddEntry(ProfileBufferEntry::LineNumber(aLineNumber));
}
if (aCategory.isSome()) {
AddEntry(ProfileBufferEntry::Category(int(*aCategory)));
}
}
void
ProfileBuffer::DeleteExpiredStoredMarkers()
{

Просмотреть файл

@ -13,7 +13,7 @@
#include "mozilla/RefPtr.h"
#include "mozilla/RefCounted.h"
class ProfileBuffer final
class ProfileBuffer final : public ProfilerStackCollector
{
public:
explicit ProfileBuffer(int aEntrySize);
@ -42,9 +42,16 @@ public:
// record the resulting generation and index in |aLS| if it's non-null.
void AddThreadIdEntry(int aThreadId, LastSample* aLS = nullptr);
// Add to the buffer a dynamic string. It'll be spread across one or more
// DynamicStringFragment entries.
void AddDynamicStringEntry(const char* aStr);
virtual mozilla::Maybe<uint32_t> Generation() override
{
return mozilla::Some(mGeneration);
}
virtual void CollectNativeLeafAddr(void* aAddr) override;
virtual void CollectJitReturnAddr(void* aAddr) override;
virtual void CollectCodeLocation(
const char* aLabel, const char* aStr, int aLineNumber,
const mozilla::Maybe<js::ProfileEntry::Category>& aCategory) override;
// Maximum size of a frameKey string that we'll handle.
static const size_t kMaxFrameKeyLength = 512;

Просмотреть файл

@ -22,7 +22,7 @@
//
// - A "backtrace" sample is the simplest kind. It is done in response to an
// API call (profiler_suspend_and_sample_thread()). It involves getting a
// stack trace and passing it to a callback function; it does not write to a
// stack trace via a ProfilerStackCollector; it does not write to a
// ProfileBuffer. The sampling is done from off-thread, and so uses
// SuspendAndSampleAndResumeThread() to get the register values.
@ -54,7 +54,9 @@
#include "nsIXULRuntime.h"
#include "nsDirectoryServiceUtils.h"
#include "nsDirectoryServiceDefs.h"
#include "nsJSPrincipals.h"
#include "nsMemoryReporterManager.h"
#include "nsScriptSecurityManager.h"
#include "nsXULAppAPI.h"
#include "nsProfilerStartParams.h"
#include "ProfilerParent.h"
@ -657,17 +659,31 @@ public:
#endif
};
static void
AddPseudoEntry(PSLockRef aLock, NotNull<RacyThreadInfo*> aRacyInfo,
const js::ProfileEntry& entry, ProfileBuffer& aBuffer)
static bool
IsChromeJSScript(JSScript* aScript)
{
// WARNING: this function runs within the profiler's "critical section".
nsIScriptSecurityManager* const secman =
nsScriptSecurityManager::GetScriptSecurityManager();
NS_ENSURE_TRUE(secman, false);
JSPrincipals* const principals = JS_GetScriptPrincipals(aScript);
return secman->IsSystemPrincipal(nsJSPrincipals::get(principals));
}
static void
AddPseudoEntry(uint32_t aFeatures, NotNull<RacyThreadInfo*> aRacyInfo,
const js::ProfileEntry& entry,
ProfilerStackCollector& aCollector)
{
// WARNING: this function runs within the profiler's "critical section".
// WARNING: this function might be called while the profiler is inactive, and
// cannot rely on ActivePS.
MOZ_ASSERT(entry.kind() == js::ProfileEntry::Kind::CPP_NORMAL ||
entry.kind() == js::ProfileEntry::Kind::JS_NORMAL);
aBuffer.AddEntry(ProfileBufferEntry::Label(entry.label()));
const char* dynamicString = entry.dynamicString();
int lineno = -1;
@ -675,28 +691,30 @@ AddPseudoEntry(PSLockRef aLock, NotNull<RacyThreadInfo*> aRacyInfo,
// |dynamicString|. Perhaps it shouldn't?
if (dynamicString) {
// Adjust the dynamic string as necessary.
if (ActivePS::FeaturePrivacy(aLock)) {
dynamicString = "(private)";
} else if (strlen(dynamicString) >= ProfileBuffer::kMaxFrameKeyLength) {
dynamicString = "(too long)";
}
// Store the string using one or more DynamicStringFragment entries.
aBuffer.AddDynamicStringEntry(dynamicString);
bool isChromeJSEntry = false;
if (entry.isJs()) {
JSScript* script = entry.script();
if (script) {
// We call entry.script() repeatedly -- rather than storing the result in
// a local variable in order -- to avoid rooting hazards.
if (entry.script()) {
isChromeJSEntry = IsChromeJSScript(entry.script());
if (!entry.pc()) {
// The JIT only allows the top-most entry to have a nullptr pc.
MOZ_ASSERT(&entry == &aRacyInfo->entries[aRacyInfo->stackSize() - 1]);
} else {
lineno = JS_PCToLineNumber(script, entry.pc());
lineno = JS_PCToLineNumber(entry.script(), entry.pc());
}
}
} else {
lineno = entry.line();
}
// Adjust the dynamic string as necessary.
if (ProfilerFeature::HasPrivacy(aFeatures) && !isChromeJSEntry) {
dynamicString = "(private)";
} else if (strlen(dynamicString) >= ProfileBuffer::kMaxFrameKeyLength) {
dynamicString = "(too long)";
}
} else {
// XXX: Bug 1010578. Don't assume a CPP entry and try to get the line for
// js entries as well.
@ -705,11 +723,8 @@ AddPseudoEntry(PSLockRef aLock, NotNull<RacyThreadInfo*> aRacyInfo,
}
}
if (lineno != -1) {
aBuffer.AddEntry(ProfileBufferEntry::LineNumber(lineno));
}
aBuffer.AddEntry(ProfileBufferEntry::Category(int(entry.category())));
aCollector.CollectCodeLocation(entry.label(), dynamicString, lineno,
Some(entry.category()));
}
// Setting MAX_NATIVE_FRAMES too high risks the unwinder wasting a lot of time
@ -747,12 +762,17 @@ struct AutoWalkJSStack
}
};
// Merges the pseudo-stack, native stack, and JS stack, outputting the details
// to aCollector.
static void
MergeStacksIntoProfile(PSLockRef aLock, bool aIsSynchronous,
const ThreadInfo& aThreadInfo, const Registers& aRegs,
const NativeStack& aNativeStack, ProfileBuffer& aBuffer)
MergeStacks(uint32_t aFeatures, bool aIsSynchronous,
const ThreadInfo& aThreadInfo, const Registers& aRegs,
const NativeStack& aNativeStack,
ProfilerStackCollector& aCollector)
{
// WARNING: this function runs within the profiler's "critical section".
// WARNING: this function might be called while the profiler is inactive, and
// cannot rely on ActivePS.
NotNull<RacyThreadInfo*> racyInfo = aThreadInfo.RacyInfo();
js::ProfileEntry* pseudoEntries = racyInfo->entries;
@ -767,10 +787,10 @@ MergeStacksIntoProfile(PSLockRef aLock, bool aIsSynchronous,
// ProfilingFrameIterator to avoid incorrectly resetting the generation of
// sampled JIT entries inside the JS engine. See note below concerning 'J'
// entries.
uint32_t startBufferGen;
startBufferGen = aIsSynchronous
? UINT32_MAX
: aBuffer.mGeneration;
uint32_t startBufferGen = UINT32_MAX;
if (!aIsSynchronous && aCollector.Generation().isSome()) {
startBufferGen = *aCollector.Generation();
}
uint32_t jsCount = 0;
JS::ProfilingFrameIterator::Frame jsFrames[MAX_JS_FRAMES];
@ -882,7 +902,7 @@ MergeStacksIntoProfile(PSLockRef aLock, bool aIsSynchronous,
// Pseudo-frames with the CPP_MARKER_FOR_JS kind are just annotations and
// should not be recorded in the profile.
if (pseudoEntry.kind() != js::ProfileEntry::Kind::CPP_MARKER_FOR_JS) {
AddPseudoEntry(aLock, racyInfo, pseudoEntry, aBuffer);
AddPseudoEntry(aFeatures, racyInfo, pseudoEntry, aCollector);
}
pseudoIndex++;
continue;
@ -908,13 +928,11 @@ MergeStacksIntoProfile(PSLockRef aLock, bool aIsSynchronous,
// with stale JIT code return addresses.
if (aIsSynchronous ||
jsFrame.kind == JS::ProfilingFrameIterator::Frame_Wasm) {
aBuffer.AddEntry(ProfileBufferEntry::Label(""));
aBuffer.AddDynamicStringEntry(jsFrame.label);
aCollector.CollectCodeLocation("", jsFrame.label, -1, Nothing());
} else {
MOZ_ASSERT(jsFrame.kind == JS::ProfilingFrameIterator::Frame_Ion ||
jsFrame.kind == JS::ProfilingFrameIterator::Frame_Baseline);
aBuffer.AddEntry(
ProfileBufferEntry::JitReturnAddr(jsFrames[jsIndex].returnAddress));
aCollector.CollectJitReturnAddr(jsFrames[jsIndex].returnAddress);
}
jsIndex--;
@ -926,7 +944,7 @@ MergeStacksIntoProfile(PSLockRef aLock, bool aIsSynchronous,
if (nativeStackAddr) {
MOZ_ASSERT(nativeIndex >= 0);
void* addr = (void*)aNativeStack.mPCs[nativeIndex];
aBuffer.AddEntry(ProfileBufferEntry::NativeLeafAddr(addr));
aCollector.CollectNativeLeafAddr(addr);
}
if (nativeIndex >= 0) {
nativeIndex--;
@ -937,10 +955,11 @@ MergeStacksIntoProfile(PSLockRef aLock, bool aIsSynchronous,
//
// Do not do this for synchronous samples, which use their own
// ProfileBuffers instead of the global one in CorePS.
if (!aIsSynchronous && context) {
MOZ_ASSERT(aBuffer.mGeneration >= startBufferGen);
uint32_t lapCount = aBuffer.mGeneration - startBufferGen;
JS::UpdateJSContextProfilerSampleBufferGen(context, aBuffer.mGeneration,
if (!aIsSynchronous && context && aCollector.Generation().isSome()) {
MOZ_ASSERT(*aCollector.Generation() >= startBufferGen);
uint32_t lapCount = *aCollector.Generation() - startBufferGen;
JS::UpdateJSContextProfilerSampleBufferGen(context,
*aCollector.Generation(),
lapCount);
}
}
@ -965,6 +984,8 @@ DoNativeBacktrace(PSLockRef aLock, const ThreadInfo& aThreadInfo,
const Registers& aRegs, NativeStack& aNativeStack)
{
// WARNING: this function runs within the profiler's "critical section".
// WARNING: this function might be called while the profiler is inactive, and
// cannot rely on ActivePS.
// Start with the current function. We use 0 as the frame number here because
// the FramePointerStackWalk() and MozStackWalk() calls below will use 1..N.
@ -998,6 +1019,8 @@ DoNativeBacktrace(PSLockRef aLock, const ThreadInfo& aThreadInfo,
const Registers& aRegs, NativeStack& aNativeStack)
{
// WARNING: this function runs within the profiler's "critical section".
// WARNING: this function might be called while the profiler is inactive, and
// cannot rely on ActivePS.
const mcontext_t* mcontext = &aRegs.mContext->uc_mcontext;
mcontext_t savedContext;
@ -1077,6 +1100,8 @@ DoNativeBacktrace(PSLockRef aLock, const ThreadInfo& aThreadInfo,
const Registers& aRegs, NativeStack& aNativeStack)
{
// WARNING: this function runs within the profiler's "critical section".
// WARNING: this function might be called while the profiler is inactive, and
// cannot rely on ActivePS.
const mcontext_t* mc = &aRegs.mContext->uc_mcontext;
@ -1222,13 +1247,13 @@ DoSharedSample(PSLockRef aLock, bool aIsSynchronous,
if (ActivePS::FeatureStackWalk(aLock)) {
DoNativeBacktrace(aLock, aThreadInfo, aRegs, nativeStack);
MergeStacksIntoProfile(aLock, aIsSynchronous, aThreadInfo, aRegs,
nativeStack, aBuffer);
MergeStacks(ActivePS::Features(aLock), aIsSynchronous, aThreadInfo, aRegs,
nativeStack, aBuffer);
} else
#endif
{
MergeStacksIntoProfile(aLock, aIsSynchronous, aThreadInfo, aRegs,
nativeStack, aBuffer);
MergeStacks(ActivePS::Features(aLock), aIsSynchronous, aThreadInfo, aRegs,
nativeStack, aBuffer);
if (ActivePS::FeatureLeaf(aLock)) {
aBuffer.AddEntry(ProfileBufferEntry::NativeLeafAddr((void*)aRegs.mPC));
@ -3046,5 +3071,62 @@ profiler_suspend_and_sample_thread(
}
}
// NOTE: aCollector's methods will be called while the target thread is paused.
// Doing things in those methods like allocating -- which may try to claim
// locks -- is a surefire way to deadlock.
void
profiler_suspend_and_sample_thread(int aThreadId,
uint32_t aFeatures,
ProfilerStackCollector& aCollector,
bool aSampleNative /* = true */)
{
// Lock the profiler mutex
PSAutoLock lock(gPSMutex);
const CorePS::ThreadVector& liveThreads = CorePS::LiveThreads(lock);
for (uint32_t i = 0; i < liveThreads.size(); i++) {
ThreadInfo* info = liveThreads.at(i);
if (info->ThreadId() == aThreadId) {
if (info->IsMainThread()) {
aCollector.SetIsMainThread();
}
// Allocate the space for the native stack
NativeStack nativeStack;
// Suspend, sample, and then resume the target thread.
Sampler sampler(lock);
sampler.SuspendAndSampleAndResumeThread(lock, *info,
[&](const Registers& aRegs) {
// The target thread is now suspended. Collect a native backtrace, and
// call the callback.
bool isSynchronous = false;
#if defined(HAVE_NATIVE_UNWIND)
if (aSampleNative) {
DoNativeBacktrace(lock, *info, aRegs, nativeStack);
MergeStacks(aFeatures, isSynchronous, *info, aRegs, nativeStack,
aCollector);
} else
#endif
{
MergeStacks(aFeatures, isSynchronous, *info, aRegs, nativeStack,
aCollector);
if (ProfilerFeature::HasLeaf(aFeatures)) {
aCollector.CollectNativeLeafAddr((void*)aRegs.mPC);
}
}
});
// NOTE: Make sure to disable the sampler before it is destroyed, in case
// the profiler is running at the same time.
sampler.Disable(lock);
break;
}
}
}
// END externally visible functions
////////////////////////////////////////////////////////////////////////

Просмотреть файл

@ -9,9 +9,13 @@
#include "nsThreadUtils.h"
#include "nsITimer.h"
#include "mozilla/Mutex.h"
#include "mozilla/RefPtr.h"
#include "mozilla/SystemGroup.h"
using mozilla::Mutex;
using mozilla::MutexAutoLock;
using mozilla::SystemGroup;
using mozilla::TaskCategory;
using mozilla::TimeStamp;
class CheckResponsivenessTask : public mozilla::Runnable,
@ -45,7 +49,9 @@ public:
// Dispatching can fail during early startup, particularly when
// MOZ_PROFILER_STARTUP is used.
nsresult rv = NS_DispatchToMainThread(this);
nsresult rv = SystemGroup::Dispatch("CheckResponsivenessTask",
TaskCategory::Other,
do_AddRef(this));
if (NS_SUCCEEDED(rv)) {
mHasEverBeenSuccessfullyDispatched = true;
}
@ -66,6 +72,7 @@ public:
mLastTracerTime = TimeStamp::Now();
if (!mTimer) {
mTimer = do_CreateInstance("@mozilla.org/timer;1");
mTimer->SetTarget(SystemGroup::EventTargetFor(TaskCategory::Other));
}
mTimer->InitWithCallback(this, 16, nsITimer::TYPE_ONE_SHOT);
@ -74,7 +81,9 @@ public:
NS_IMETHOD Notify(nsITimer* aTimer) final
{
NS_DispatchToMainThread(this);
SystemGroup::Dispatch("CheckResponsivenessTask",
TaskCategory::Other,
do_AddRef(this));
return NS_OK;
}

Просмотреть файл

@ -81,6 +81,7 @@ if CONFIG['MOZ_GECKO_PROFILER']:
]
LOCAL_INCLUDES += [
'/caps',
'/docshell/base',
'/ipc/chromium/src',
'/mozglue/linker',

Просмотреть файл

@ -25,6 +25,7 @@
#include "mozilla/Assertions.h"
#include "mozilla/Attributes.h"
#include "mozilla/GuardObjects.h"
#include "mozilla/Maybe.h"
#include "mozilla/Sprintf.h"
#include "mozilla/ThreadLocal.h"
#include "mozilla/UniquePtr.h"
@ -267,11 +268,52 @@ typedef void ProfilerStackCallback(void** aPCs, size_t aCount, bool aIsMainThrea
// WARNING: The target thread is suspended during the callback. Do not try to
// allocate or acquire any locks, or you could deadlock. The target thread will
// have resumed by the time this function returns.
//
// XXX: this function is in the process of being replaced with the other profiler_suspend_and_sample_thread() function.
PROFILER_FUNC_VOID(
profiler_suspend_and_sample_thread(int aThreadId,
const std::function<ProfilerStackCallback>& aCallback,
bool aSampleNative = true))
// An object of this class is passed to profiler_suspend_and_sample_thread().
// For each stack frame, one of the Collect methods will be called.
class ProfilerStackCollector
{
public:
// Some collectors need to worry about possibly overwriting previous
// generations of data. If that's not an issue, this can return Nothing,
// which is the default behaviour.
virtual mozilla::Maybe<uint32_t> Generation() { return mozilla::Nothing(); }
// This method will be called once if the thread being suspended is the main
// thread. Default behaviour is to do nothing.
virtual void SetIsMainThread() {}
// WARNING: The target thread is suspended when the Collect methods are
// called. Do not try to allocate or acquire any locks, or you could
// deadlock. The target thread will have resumed by the time this function
// returns.
virtual void CollectNativeLeafAddr(void* aAddr) = 0;
virtual void CollectJitReturnAddr(void* aAddr) = 0;
// aLabel is static and never null. aStr may be null. aLineNumber may be -1.
virtual void CollectCodeLocation(
const char* aLabel, const char* aStr, int aLineNumber,
const mozilla::Maybe<js::ProfileEntry::Category>& aCategory) = 0;
};
// This method suspends the thread identified by aThreadId, samples its
// pseudo-stack, JS stack, and (optionally) native stack, passing the collected
// frames into aCollector. aFeatures dictates which compiler features are used.
// |Privacy| and |Leaf| are the only relevant ones.
PROFILER_FUNC_VOID(
profiler_suspend_and_sample_thread(int aThreadId,
uint32_t aFeatures,
ProfilerStackCollector& aCollector,
bool aSampleNative = true))
struct ProfilerBacktraceDestructor
{
#ifdef MOZ_GECKO_PROFILER

Просмотреть файл

@ -664,3 +664,70 @@ TEST(GeckoProfiler, Bug1355807)
profiler_stop();
}
class GTestStackCollector final : public ProfilerStackCollector
{
public:
GTestStackCollector()
: mSetIsMainThread(0)
, mFrames(0)
{}
virtual void SetIsMainThread() { mSetIsMainThread++; }
virtual void CollectNativeLeafAddr(void* aAddr) { mFrames++; }
virtual void CollectJitReturnAddr(void* aAddr) { mFrames++; }
virtual void CollectCodeLocation(
const char* aLabel, const char* aStr, int aLineNumber,
const mozilla::Maybe<js::ProfileEntry::Category>& aCategory) { mFrames++; }
int mSetIsMainThread;
int mFrames;
};
void DoSuspendAndSample(int aTid, nsIThread* aThread)
{
aThread->Dispatch(
NS_NewRunnableFunction(
"GeckoProfiler_SuspendAndSample_Test::TestBody",
[&]() {
uint32_t features = ProfilerFeature::Leaf;
GTestStackCollector collector;
profiler_suspend_and_sample_thread(aTid, features, collector,
/* sampleNative = */ true);
ASSERT_TRUE(collector.mSetIsMainThread == 1);
ASSERT_TRUE(collector.mFrames > 5); // approximate; must be > 0
}),
NS_DISPATCH_SYNC);
}
TEST(GeckoProfiler, SuspendAndSample)
{
nsCOMPtr<nsIThread> thread;
nsresult rv = NS_NewNamedThread("GeckoProfGTest", getter_AddRefs(thread));
ASSERT_TRUE(NS_SUCCEEDED(rv));
int tid = Thread::GetCurrentId();
ASSERT_TRUE(!profiler_is_active());
// Suspend and sample while the profiler is inactive.
DoSuspendAndSample(tid, thread);
uint32_t features = ProfilerFeature::JS | ProfilerFeature::Threads;
const char* filters[] = { "GeckoMain", "Compositor" };
profiler_start(PROFILER_DEFAULT_ENTRIES, PROFILER_DEFAULT_INTERVAL,
features, filters, MOZ_ARRAY_LENGTH(filters));
ASSERT_TRUE(profiler_is_active());
// Suspend and sample while the profiler is active.
DoSuspendAndSample(tid, thread);
profiler_stop();
ASSERT_TRUE(!profiler_is_active());
}

Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше