This commit is contained in:
Richard Newman 2012-02-18 23:36:35 -08:00
Родитель 64c9c39aca a4ac4954c0
Коммит 4bc82cb040
102 изменённых файлов: 3318 добавлений и 1224 удалений

Просмотреть файл

@ -1,5 +1,5 @@
<?xml version="1.0"?>
<blocklist xmlns="http://www.mozilla.org/2006/addons-blocklist" lastupdate="1328822681000">
<blocklist xmlns="http://www.mozilla.org/2006/addons-blocklist" lastupdate="1329176667000">
<emItems>
<emItem blockID="i58" id="webmaster@buzzzzvideos.info">
<versionRange minVersion="0" maxVersion="*">
@ -27,8 +27,12 @@
</targetApplication>
</versionRange>
</emItem>
<emItem blockID="i19" id="{46551EC9-40F0-4e47-8E18-8E5CF550CFB8}">
<versionRange minVersion="1.1b1" maxVersion="1.1b1">
<emItem blockID="i65" id="activity@facebook.com">
<versionRange minVersion="0" maxVersion="*">
</versionRange>
</emItem>
<emItem blockID="i66" id="youtubeer@youtuber.com">
<versionRange minVersion="0" maxVersion="*">
</versionRange>
</emItem>
<emItem blockID="i54" id="applebeegifts@mozilla.doslash.org">
@ -122,8 +126,11 @@
<versionRange minVersion="0" maxVersion="*">
</versionRange>
</emItem>
<emItem blockID="i56" id="flash@adobe.com">
<versionRange minVersion="0" maxVersion="*">
<emItem blockID="i23" id="firefox@bandoo.com">
<versionRange minVersion="5.0" maxVersion="5.0" severity="1">
<targetApplication id="{ec8030f7-c20a-464f-9b0e-13a3a9e97384}">
<versionRange minVersion="3.7a1pre" maxVersion="*" />
</targetApplication>
</versionRange>
</emItem>
<emItem blockID="i55" id="youtube@youtube7.com">
@ -178,11 +185,8 @@
<versionRange minVersion="2.2" maxVersion="2.2">
</versionRange>
</emItem>
<emItem blockID="i23" id="firefox@bandoo.com">
<versionRange minVersion="5.0" maxVersion="5.0" severity="1">
<targetApplication id="{ec8030f7-c20a-464f-9b0e-13a3a9e97384}">
<versionRange minVersion="3.7a1pre" maxVersion="*" />
</targetApplication>
<emItem blockID="i56" id="flash@adobe.com">
<versionRange minVersion="0" maxVersion="*">
</versionRange>
</emItem>
<emItem blockID="i45" id="{22119944-ED35-4ab1-910B-E619EA06A115}">
@ -192,6 +196,10 @@
</targetApplication>
</versionRange>
</emItem>
<emItem blockID="i19" id="{46551EC9-40F0-4e47-8E18-8E5CF550CFB8}">
<versionRange minVersion="1.1b1" maxVersion="1.1b1">
</versionRange>
</emItem>
<emItem blockID="i3" id="langpack-vi-VN@firefox.mozilla.org">
<versionRange minVersion="2.0" maxVersion="2.0">
</versionRange>

Просмотреть файл

@ -3551,12 +3551,14 @@
// it triggers will correctly update our URL bar.
this.tabbrowser.selectedTab = newTab;
} else {
let url = browserDragAndDrop.drop(event, { });
// Pass true to disallow dropping javascript: or data: urls
let url;
try {
url = browserDragAndDrop.drop(event, { }, true);
} catch (ex) {}
// valid urls don't contain spaces ' '; if we have a space it isn't a valid url.
// Also disallow dropping javascript: or data: urls--bail out
if (!url || !url.length || url.indexOf(" ", 0) != -1 ||
/^\s*(javascript|data):/.test(url))
if (!url || url.indexOf(" ") != -1)
return;
let bgLoad = Services.prefs.getBoolPref("browser.tabs.loadInBackground");

Просмотреть файл

@ -268,6 +268,7 @@ _BROWSER_FILES = \
browser_aboutSyncProgress.js \
browser_middleMouse_inherit.js \
redirect_bug623155.sjs \
browser_tabDrop.js \
$(NULL)
ifneq (cocoa,$(MOZ_WIDGET_TOOLKIT))

Просмотреть файл

@ -0,0 +1,71 @@
/* Any copyright is dedicated to the Public Domain.
* http://creativecommons.org/publicdomain/zero/1.0/ */
function test() {
waitForExplicitFinish();
let newTab = gBrowser.selectedTab = gBrowser.addTab("about:blank", {skipAnimation: true});
registerCleanupFunction(function () {
gBrowser.removeTab(newTab);
});
let scriptLoader = Cc["@mozilla.org/moz/jssubscript-loader;1"].
getService(Ci.mozIJSSubScriptLoader);
let chromeUtils = {};
scriptLoader.loadSubScript("chrome://mochikit/content/tests/SimpleTest/ChromeUtils.js", chromeUtils);
let tabContainer = gBrowser.tabContainer;
var receivedDropCount = 0;
function dropListener() {
receivedDropCount++;
if (receivedDropCount == triggeredDropCount) {
is(openedTabs, validDropCount, "correct number of tabs were opened");
executeSoon(finish);
}
}
tabContainer.addEventListener("drop", dropListener, false);
registerCleanupFunction(function () {
tabContainer.removeEventListener("drop", dropListener, false);
});
var openedTabs = 0;
function tabOpenListener(e) {
openedTabs++;
let tab = e.target;
executeSoon(function () {
gBrowser.removeTab(tab);
});
}
tabContainer.addEventListener("TabOpen", tabOpenListener, false);
registerCleanupFunction(function () {
tabContainer.removeEventListener("TabOpen", tabOpenListener, false);
});
var triggeredDropCount = 0;
var validDropCount = 0;
function drop(text, valid) {
triggeredDropCount++;
if (valid)
validDropCount++;
executeSoon(function () {
// A drop type of "link" onto an existing tab would normally trigger a
// load in that same tab, but tabbrowser code in _getDragTargetTab treats
// drops on the outer edges of a tab differently (loading a new tab
// instead). The events created by synthesizeDrop have all of their
// coordinates set to 0 (screenX/screenY), so they're treated as drops
// on the outer edge of the tab, thus they open new tabs.
chromeUtils.synthesizeDrop(newTab, newTab, [[{type: "text/plain", data: text}]], "link", window, EventUtils);
});
}
// Begin and end with valid drops to make sure we wait for all drops before
// ending the test
drop("mochi.test/first", true);
drop("javascript:'bad'");
drop("jAvascript:'bad'");
drop("space bad");
drop("mochi.test/second", true);
drop("data:text/html,bad");
drop("mochi.test/third", true);
}

Просмотреть файл

@ -109,7 +109,6 @@ _TEST_FILES = \
test_bug495300.html \
test_bug686942.html \
test_can_play_type.html \
test_closing_connections.html \
test_constants.html \
test_controls.html \
test_currentTime.html \
@ -176,6 +175,8 @@ endif
# test_mixed_principals.html
# Disabled since we don't play Wave files standalone, for now
# test_audioDocumentTitle.html
# Bug 634564:
# test_closing_connections.html \
# sample files
_TEST_FILES += \

Просмотреть файл

@ -4119,9 +4119,7 @@ nsDocShell::DisplayLoadError(nsresult aError, nsIURI *aURI,
// Display the error as a page or an alert prompt
NS_ENSURE_FALSE(messageStr.IsEmpty(), NS_ERROR_FAILURE);
// Note: For now, display an alert instead of an error page if we have no
// URI object. Missing URI objects are handled badly by session history.
if (mUseErrorPages && aURI) {
if (mUseErrorPages) {
// Display an error page
LoadErrorPage(aURI, aURL, errorPage.get(), error.get(),
messageStr.get(), cssClass.get(), aFailedChannel);
@ -4190,6 +4188,10 @@ nsDocShell::LoadErrorPage(nsIURI *aURI, const PRUnichar *aURL,
}
else if (aURL)
{
// We need a URI object to store a session history entry, so make up a URI
nsresult rv = NS_NewURI(getter_AddRefs(mFailedURI), "about:blank");
NS_ENSURE_SUCCESS(rv, rv);
CopyUTF16toUTF8(aURL, url);
}
else

Просмотреть файл

@ -2091,6 +2091,13 @@ nsDOMWindowUtils::GetFileReferences(const nsAString& aDatabaseName,
return NS_OK;
}
NS_IMETHODIMP
nsDOMWindowUtils::IsIncrementalGCEnabled(JSContext* cx, bool* aResult)
{
*aResult = js::IsIncrementalGCEnabled(JS_GetRuntime(cx));
return NS_OK;
}
NS_IMETHODIMP
nsDOMWindowUtils::StartPCCountProfiling(JSContext* cx)
{

Просмотреть файл

@ -135,6 +135,9 @@ static PRLogModuleInfo* gJSDiagnostics;
// doing the first GC.
#define NS_FIRST_GC_DELAY 10000 // ms
// Maximum amount of time that should elapse between incremental GC slices
#define NS_INTERSLICE_GC_DELAY 100 // ms
// The amount of time we wait between a request to CC (after GC ran)
// and doing the actual CC.
#define NS_CC_DELAY 5000 // ms
@ -154,6 +157,9 @@ static nsITimer *sCCTimer;
static PRTime sLastCCEndTime;
static bool sGCHasRun;
static bool sCCLockedOut;
static js::GCSliceCallback sPrevGCSliceCallback;
// The number of currently pending document loads. This count isn't
// guaranteed to always reflect reality and can't easily as we don't
@ -3274,6 +3280,11 @@ nsJSContext::CycleCollectNow(nsICycleCollectorListener *aListener,
return;
}
if (sCCLockedOut) {
// We're in the middle of an incremental GC; finish it first
nsJSContext::GarbageCollectNow(js::gcreason::CC_FORCED, nsGCNormal);
}
SAMPLE_LABEL("GC", "CycleCollectNow");
NS_TIME_FUNCTION_MIN(1.0);
@ -3357,7 +3368,7 @@ GCTimerFired(nsITimer *aTimer, void *aClosure)
NS_RELEASE(sGCTimer);
uintptr_t reason = reinterpret_cast<uintptr_t>(aClosure);
nsJSContext::GarbageCollectNow(static_cast<js::gcreason::Reason>(reason), nsGCNormal);
nsJSContext::GarbageCollectNow(static_cast<js::gcreason::Reason>(reason), nsGCIncremental);
}
void
@ -3375,6 +3386,9 @@ CCTimerFired(nsITimer *aTimer, void *aClosure)
if (sDidShutdown) {
return;
}
if (sCCLockedOut) {
return;
}
++sCCTimerFireCount;
if (sCCTimerFireCount < (NS_CC_DELAY / NS_CC_SKIPPABLE_DELAY)) {
PRUint32 suspected = nsCycleCollector_suspectedCount();
@ -3443,7 +3457,7 @@ nsJSContext::LoadEnd()
// static
void
nsJSContext::PokeGC(js::gcreason::Reason aReason)
nsJSContext::PokeGC(js::gcreason::Reason aReason, int aDelay)
{
if (sGCTimer) {
// There's already a timer for GC'ing, just return
@ -3460,9 +3474,11 @@ nsJSContext::PokeGC(js::gcreason::Reason aReason)
static bool first = true;
sGCTimer->InitWithFuncCallback(GCTimerFired, reinterpret_cast<void *>(aReason),
first
aDelay
? aDelay
: (first
? NS_FIRST_GC_DELAY
: NS_GC_DELAY,
: NS_GC_DELAY),
nsITimer::TYPE_ONE_SHOT);
first = false;
@ -3549,11 +3565,11 @@ nsJSContext::GC(js::gcreason::Reason aReason)
}
static void
DOMGCFinishedCallback(JSRuntime *rt, JSCompartment *comp, const char *status)
DOMGCSliceCallback(JSRuntime *aRt, js::GCProgress aProgress, const js::GCDescription &aDesc)
{
NS_ASSERTION(NS_IsMainThread(), "GCs must run on the main thread");
if (sPostGCEventsToConsole) {
if (aDesc.logMessage && sPostGCEventsToConsole) {
PRTime now = PR_Now();
PRTime delta = 0;
if (sFirstCollectionTime) {
@ -3565,13 +3581,30 @@ DOMGCFinishedCallback(JSRuntime *rt, JSCompartment *comp, const char *status)
NS_NAMED_LITERAL_STRING(kFmt, "GC(T+%.1f) %s");
nsString msg;
msg.Adopt(nsTextFormatter::smprintf(kFmt.get(),
double(delta) / PR_USEC_PER_SEC, status));
double(delta) / PR_USEC_PER_SEC,
aDesc.logMessage));
nsCOMPtr<nsIConsoleService> cs = do_GetService(NS_CONSOLESERVICE_CONTRACTID);
if (cs) {
cs->LogStringMessage(msg.get());
}
}
// Prevent cycle collections during incremental GC.
if (aProgress == js::GC_CYCLE_BEGIN) {
sCCLockedOut = true;
} else if (aProgress == js::GC_CYCLE_END) {
sCCLockedOut = false;
}
// The GC has more work to do, so schedule another GC slice.
if (aProgress == js::GC_SLICE_END) {
nsJSContext::KillGCTimer();
nsJSContext::KillCCTimer();
nsJSContext::PokeGC(js::gcreason::INTER_SLICE_GC, NS_INTERSLICE_GC_DELAY);
}
if (aProgress == js::GC_CYCLE_END) {
sCCollectedWaitingForGC = 0;
sCleanupSinceLastGC = false;
@ -3584,7 +3617,7 @@ DOMGCFinishedCallback(JSRuntime *rt, JSCompartment *comp, const char *status)
// result of last-ditch or MaybeGC. In both cases its
// probably a time of heavy activity and we want to delay
// the full GC, but we do want it to happen eventually.
if (comp) {
if (aDesc.isCompartment) {
nsJSContext::PokeGC(js::gcreason::POST_COMPARTMENT);
// We poked the GC, so we can kill any pending CC here.
@ -3592,20 +3625,24 @@ DOMGCFinishedCallback(JSRuntime *rt, JSCompartment *comp, const char *status)
}
} else {
// If this was a full GC, poke the CC to run soon.
if (!comp) {
if (!aDesc.isCompartment) {
sGCHasRun = true;
nsJSContext::MaybePokeCC();
}
}
// If we didn't end up scheduling a GC, make sure that we release GC buffers
// soon after canceling previous shrinking attempt
// soon after canceling previous shrinking attempt.
nsJSContext::KillShrinkGCBuffersTimer();
if (!sGCTimer) {
nsJSContext::PokeShrinkGCBuffers();
}
}
if (sPrevGCSliceCallback)
(*sPrevGCSliceCallback)(aRt, aProgress, aDesc);
}
// Script object mananagement - note duplicate implementation
// in nsJSRuntime below...
nsresult
@ -3697,6 +3734,7 @@ nsJSRuntime::Startup()
// initialize all our statics, so that we can restart XPCOM
sGCTimer = sCCTimer = nsnull;
sGCHasRun = false;
sCCLockedOut = false;
sLastCCEndTime = 0;
sPendingLoadCount = 0;
sLoadingInProgress = false;
@ -3768,10 +3806,27 @@ SetMemoryMaxPrefChangedCallback(const char* aPrefName, void* aClosure)
static int
SetMemoryGCModePrefChangedCallback(const char* aPrefName, void* aClosure)
{
bool enableCompartmentGC = Preferences::GetBool(aPrefName);
JS_SetGCParameter(nsJSRuntime::sRuntime, JSGC_MODE, enableCompartmentGC
? JSGC_MODE_COMPARTMENT
: JSGC_MODE_GLOBAL);
PRBool enableCompartmentGC = Preferences::GetBool("javascript.options.mem.gc_per_compartment");
PRBool enableIncrementalGC = Preferences::GetBool("javascript.options.mem.gc_incremental");
JSGCMode mode;
if (enableIncrementalGC) {
mode = JSGC_MODE_INCREMENTAL;
} else if (enableCompartmentGC) {
mode = JSGC_MODE_COMPARTMENT;
} else {
mode = JSGC_MODE_GLOBAL;
}
JS_SetGCParameter(nsJSRuntime::sRuntime, JSGC_MODE, mode);
return 0;
}
static int
SetMemoryGCSliceTimePrefChangedCallback(const char* aPrefName, void* aClosure)
{
PRInt32 pref = Preferences::GetInt(aPrefName, -1);
// handle overflow and negative pref values
if (pref > 0 && pref < 100000)
JS_SetGCParameter(nsJSRuntime::sRuntime, JSGC_SLICE_TIME_BUDGET, pref);
return 0;
}
@ -3858,7 +3913,7 @@ nsJSRuntime::Init()
// Let's make sure that our main thread is the same as the xpcom main thread.
NS_ASSERTION(NS_IsMainThread(), "bad");
::JS_SetGCFinishedCallback(sRuntime, DOMGCFinishedCallback);
sPrevGCSliceCallback = js::SetGCSliceCallback(sRuntime, DOMGCSliceCallback);
JSSecurityCallbacks *callbacks = JS_GetRuntimeSecurityCallbacks(sRuntime);
NS_ASSERTION(callbacks, "SecMan should have set security callbacks!");
@ -3903,6 +3958,16 @@ nsJSRuntime::Init()
SetMemoryGCModePrefChangedCallback("javascript.options.mem.gc_per_compartment",
nsnull);
Preferences::RegisterCallback(SetMemoryGCModePrefChangedCallback,
"javascript.options.mem.gc_incremental");
SetMemoryGCModePrefChangedCallback("javascript.options.mem.gc_incremental",
nsnull);
Preferences::RegisterCallback(SetMemoryGCSliceTimePrefChangedCallback,
"javascript.options.mem.gc_incremental_slice_ms");
SetMemoryGCSliceTimePrefChangedCallback("javascript.options.mem.gc_incremental_slice_ms",
nsnull);
nsCOMPtr<nsIObserverService> obs = mozilla::services::GetObserverService();
if (!obs)
return NS_ERROR_FAILURE;

Просмотреть файл

@ -188,7 +188,7 @@ public:
static void CycleCollectNow(nsICycleCollectorListener *aListener = nsnull,
PRInt32 aExtraForgetSkippableCalls = 0);
static void PokeGC(js::gcreason::Reason aReason);
static void PokeGC(js::gcreason::Reason aReason, int aDelay = 0);
static void KillGCTimer();
static void PokeShrinkGCBuffers();

Просмотреть файл

@ -70,7 +70,7 @@ interface nsIDOMFile;
interface nsIFile;
interface nsIDOMTouch;
[scriptable, uuid(ab6e9c71-8aa1-40bb-8bf9-65e16429055f)]
[scriptable, uuid(73b48170-55d5-11e1-b86c-0800200c9a66)]
interface nsIDOMWindowUtils : nsISupports {
/**
@ -992,6 +992,12 @@ interface nsIDOMWindowUtils : nsISupports {
[optional] out long aDBRefCnt,
[optional] out long aSliceRefCnt);
/**
* Return whether incremental GC has been disabled due to a binary add-on.
*/
[implicit_jscontext]
boolean isIncrementalGCEnabled();
/**
* Begin opcode-level profiling of all JavaScript execution in the window's
* runtime.

Просмотреть файл

@ -179,7 +179,7 @@ CreateNPObjectMember(NPP npp, JSContext *cx, JSObject *obj, NPObject *npobj,
static JSClass sNPObjectJSWrapperClass =
{
NPRUNTIME_JSCLASS_NAME,
JSCLASS_HAS_PRIVATE | JSCLASS_NEW_RESOLVE | JSCLASS_NEW_ENUMERATE,
JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_NEW_RESOLVE | JSCLASS_NEW_ENUMERATE,
NPObjWrapper_AddProperty, NPObjWrapper_DelProperty,
NPObjWrapper_GetProperty, NPObjWrapper_SetProperty,
(JSEnumerateOp)NPObjWrapper_newEnumerate,

Просмотреть файл

@ -3683,10 +3683,13 @@ void nsPluginInstanceOwner::SetFrame(nsObjectFrame *aFrame)
container->SetCurrentImage(nsnull);
}
// If we had an old frame and we're not going to have a new one then
// we should unregister for some things.
#if defined(XP_MACOSX) && !defined(NP_NO_QUICKDRAW)
if (!aFrame) {
// Unregister scroll position listeners
// At this point we had a frame but it is going away and we're not getting a new one.
// Unregister for a scroll position listening, which is only required for Carbon
// event model plugins on Mac OS X. It's OK to unregister when we didn't register,
// so don't be strict about unregistering. Better to unregister when we didn't have to
// than to not unregister when we should.
for (nsIFrame* f = mObjectFrame; f; f = nsLayoutUtils::GetCrossDocParentFrame(f)) {
nsIScrollableFrame* sf = do_QueryFrame(f);
if (sf) {
@ -3694,14 +3697,19 @@ void nsPluginInstanceOwner::SetFrame(nsObjectFrame *aFrame)
}
}
}
#endif
// Make sure the old frame isn't holding a reference to us.
mObjectFrame->SetInstanceOwner(nsnull);
} else {
// Scroll position listening is only required for Carbon event model plugins on Mac OS X.
// Note that we probably have a crash bug in the way we register/unregister, bug 723190.
// Bug 723190 is mitigated by limiting registration to Carbon event model plugins.
#if defined(XP_MACOSX) && !defined(NP_NO_QUICKDRAW)
if (aFrame) {
// We didn't have an object frame before but we do now!
// We need to register a scroll position listener on every scrollable
// frame up to the top
// We didn't have an object frame before but we do now. We need to register a scroll
// position listener on every scrollable frame up to the top.
if (GetEventModel() == NPEventModelCarbon) {
for (nsIFrame* f = aFrame; f; f = nsLayoutUtils::GetCrossDocParentFrame(f)) {
nsIScrollableFrame* sf = do_QueryFrame(f);
if (sf) {
@ -3710,6 +3718,8 @@ void nsPluginInstanceOwner::SetFrame(nsObjectFrame *aFrame)
}
}
}
#endif
}
// Swap in the new frame (or no frame)
mObjectFrame = aFrame;

Просмотреть файл

@ -233,6 +233,8 @@ nsJSEventListener::HandleEvent(nsIDOMEvent* aEvent)
"JSEventListener has wrong script context?");
#endif
nsCOMPtr<nsIVariant> vrv;
xpc_UnmarkGrayObject(mScopeObject);
xpc_UnmarkGrayObject(mHandler);
rv = mContext->CallEventHandler(mTarget, mScopeObject, mHandler, iargv,
getter_AddRefs(vrv));

Просмотреть файл

@ -24,21 +24,20 @@ var ConsoleObserver = {
if (aTopic == "console-storage-cache-event") {
apiCallCount ++;
if (apiCallCount == 4) {
// remove the observer so we don't trigger this test again
Services.obs.removeObserver(this, "console-storage-cache-event");
try {
var tab = gBrowser.selectedTab;
let tab = gBrowser.selectedTab;
let browser = gBrowser.selectedBrowser;
let win = XPCNativeWrapper.unwrap(browser.contentWindow);
let windowID = getWindowId(win);
let messages = ConsoleAPIStorage.getEvents(windowID);
ok(messages.length >= 4, "Some messages found in the storage service");
ConsoleAPIStorage.clearEvents();
messages = ConsoleAPIStorage.getEvents(windowID);
ok(messages.length == 0, "Cleared Storage, no events found");
// remove the observer so we don't trigger this test again
Services.obs.removeObserver(this, "console-storage-cache-event");
is(messages.length, 0, "Cleared Storage");
// make sure a closed window's events are in fact removed from the
// storage cache
@ -52,7 +51,7 @@ var ConsoleObserver = {
executeSoon(function () {
// use the old windowID again to see if we have any stray cached messages
messages = ConsoleAPIStorage.getEvents(windowID);
ok(messages.length == 0, "0 events found, tab close is clearing the cache");
is(messages.length, 0, "tab close is clearing the cache");
finish();
});
} catch (ex) {
@ -60,17 +59,15 @@ var ConsoleObserver = {
dump(ex.stack + "\n\n\n");
}
}
}
}
};
function tearDown()
{
while (gBrowser.tabs.length > 1) {
while (gBrowser.tabs.length > 1)
gBrowser.removeCurrentTab();
}
}
function test()
{
@ -86,10 +83,7 @@ function test()
browser.addEventListener("DOMContentLoaded", function onLoad(event) {
browser.removeEventListener("DOMContentLoaded", onLoad, false);
executeSoon(function test_executeSoon() {
var contentWin = browser.contentWindow;
let win = XPCNativeWrapper.unwrap(contentWin);
let win = XPCNativeWrapper.unwrap(browser.contentWindow);
win.console.log("this", "is", "a", "log message");
win.console.info("this", "is", "a", "info message");
win.console.warn("this", "is", "a", "warn message");

Просмотреть файл

@ -107,6 +107,9 @@ struct Listener : PRCList
static void
Remove(JSContext* aCx, Listener* aListener)
{
if (js::IsIncrementalBarrierNeeded(aCx))
js::IncrementalValueBarrier(aListener->mListenerVal);
PR_REMOVE_LINK(aListener);
JS_free(aCx, aListener);
}

Просмотреть файл

@ -300,7 +300,7 @@ private:
JSClass Worker::sClass = {
"Worker",
JSCLASS_HAS_PRIVATE,
JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS,
JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub,
JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, Finalize, NULL, NULL, NULL,
NULL, NULL, NULL, Trace, NULL
@ -415,7 +415,7 @@ private:
JSClass ChromeWorker::sClass = {
"ChromeWorker",
JSCLASS_HAS_PRIVATE,
JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS,
JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub,
JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, Finalize, NULL, NULL, NULL,
NULL, NULL, NULL, Trace, NULL

Просмотреть файл

@ -799,7 +799,7 @@ private:
JSClass DedicatedWorkerGlobalScope::sClass = {
"DedicatedWorkerGlobalScope",
JSCLASS_GLOBAL_FLAGS | JSCLASS_HAS_PRIVATE | JSCLASS_NEW_RESOLVE,
JSCLASS_GLOBAL_FLAGS | JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_NEW_RESOLVE,
JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub,
JS_EnumerateStub, reinterpret_cast<JSResolveOp>(Resolve), JS_ConvertStub,
Finalize, NULL, NULL, NULL, NULL, NULL, NULL, Trace, NULL

Просмотреть файл

@ -220,7 +220,7 @@ private:
JSClass XMLHttpRequestUpload::sClass = {
"XMLHttpRequestUpload",
JSCLASS_HAS_PRIVATE | JSCLASS_HAS_RESERVED_SLOTS(SLOT_COUNT),
JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_HAS_RESERVED_SLOTS(SLOT_COUNT),
JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub,
JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, Finalize,
NULL, NULL, NULL, NULL, NULL, NULL, Trace, NULL
@ -769,7 +769,7 @@ private:
JSClass XMLHttpRequest::sClass = {
"XMLHttpRequest",
JSCLASS_HAS_PRIVATE | JSCLASS_HAS_RESERVED_SLOTS(SLOT_COUNT),
JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_HAS_RESERVED_SLOTS(SLOT_COUNT),
JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub,
JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, Finalize,
NULL, NULL, NULL, NULL, NULL, NULL, Trace, NULL

Просмотреть файл

@ -107,8 +107,8 @@
#define JSD_AUTOREG_ENTRY "JSDebugger Startup Observer"
#define JSD_STARTUP_ENTRY "JSDebugger Startup Observer"
static JSBool
jsds_GCCallbackProc (JSContext *cx, JSGCStatus status);
static void
jsds_GCSliceCallbackProc (JSRuntime *rt, js::GCProgress progress, const js::GCDescription &desc);
/*******************************************************************************
* global vars
@ -129,8 +129,8 @@ PRUint32 gFrameCount = 0;
#endif
static jsdService *gJsds = 0;
static JSGCCallback gLastGCProc = jsds_GCCallbackProc;
static JSGCStatus gGCStatus = JSGC_END;
static js::GCSliceCallback gPrevGCSliceCallback = jsds_GCSliceCallbackProc;
static bool gGCRunning = false;
static struct DeadScript {
PRCList links;
@ -460,11 +460,8 @@ jsds_FilterHook (JSDContext *jsdc, JSDThreadState *state)
*******************************************************************************/
static void
jsds_NotifyPendingDeadScripts (JSContext *cx)
jsds_NotifyPendingDeadScripts (JSRuntime *rt)
{
#ifdef CAUTIOUS_SCRIPTHOOK
JSRuntime *rt = JS_GetRuntime(cx);
#endif
jsdService *jsds = gJsds;
nsCOMPtr<jsdIScriptHook> hook;
@ -511,31 +508,23 @@ jsds_NotifyPendingDeadScripts (JSContext *cx)
}
}
static JSBool
jsds_GCCallbackProc (JSContext *cx, JSGCStatus status)
static void
jsds_GCSliceCallbackProc (JSRuntime *rt, js::GCProgress progress, const js::GCDescription &desc)
{
#ifdef DEBUG_verbose
printf ("new gc status is %i\n", status);
#endif
if (status == JSGC_END) {
/* just to guard against reentering. */
gGCStatus = JSGC_BEGIN;
if (progress == js::GC_CYCLE_END || progress == js::GC_SLICE_END) {
NS_ASSERTION(gGCRunning, "GC slice callback was missed");
while (gDeadScripts)
jsds_NotifyPendingDeadScripts (cx);
jsds_NotifyPendingDeadScripts (rt);
gGCRunning = false;
} else {
NS_ASSERTION(!gGCRunning, "should not re-enter GC");
gGCRunning = true;
}
gGCStatus = status;
if (gLastGCProc && !gLastGCProc (cx, status)) {
/*
* If gLastGCProc returns false, then the GC will abort without making
* another callback with status=JSGC_END, so set the status to JSGC_END
* here.
*/
gGCStatus = JSGC_END;
return JS_FALSE;
}
return JS_TRUE;
if (gPrevGCSliceCallback)
(*gPrevGCSliceCallback)(rt, progress, desc);
}
static uintN
@ -751,7 +740,7 @@ jsds_ScriptHookProc (JSDContext* jsdc, JSDScript* jsdscript, JSBool creating,
jsdis->Invalidate();
if (gGCStatus == JSGC_END) {
if (!gGCRunning) {
nsCOMPtr<jsdIScriptHook> hook;
gJsds->GetScriptHook(getter_AddRefs(hook));
if (!hook)
@ -2580,9 +2569,9 @@ jsdService::ActivateDebugger (JSRuntime *rt)
mRuntime = rt;
if (gLastGCProc == jsds_GCCallbackProc)
if (gPrevGCSliceCallback == jsds_GCSliceCallbackProc)
/* condition indicates that the callback proc has not been set yet */
gLastGCProc = JS_SetGCCallbackRT (rt, jsds_GCCallbackProc);
gPrevGCSliceCallback = js::SetGCSliceCallback (rt, jsds_GCSliceCallbackProc);
mCx = JSD_DebuggerOnForUser (rt, NULL, NULL);
if (!mCx)
@ -2652,19 +2641,14 @@ jsdService::Off (void)
return NS_ERROR_NOT_INITIALIZED;
if (gDeadScripts) {
if (gGCStatus != JSGC_END)
if (gGCRunning)
return NS_ERROR_NOT_AVAILABLE;
JSContext *cx = JSD_GetDefaultJSContext(mCx);
while (gDeadScripts)
jsds_NotifyPendingDeadScripts (cx);
jsds_NotifyPendingDeadScripts (JS_GetRuntime(cx));
}
/*
if (gLastGCProc != jsds_GCCallbackProc)
JS_SetGCCallbackRT (mRuntime, gLastGCProc);
*/
DeactivateDebugger();
#ifdef DEBUG
@ -3374,7 +3358,7 @@ jsdService::~jsdService()
mThrowHook = nsnull;
mTopLevelHook = nsnull;
mFunctionHook = nsnull;
gGCStatus = JSGC_END;
gGCRunning = false;
Off();
gJsds = nsnull;
}

Просмотреть файл

@ -119,7 +119,6 @@ CPPSRCS = \
jsfun.cpp \
jsgc.cpp \
jsgcmark.cpp \
jsgcstats.cpp \
jscrashreport.cpp \
jshash.cpp \
jsinfer.cpp \
@ -193,7 +192,6 @@ INSTALLED_HEADERS = \
jsfriendapi.h \
jsgc.h \
jscell.h \
jsgcstats.h \
jshash.h \
jslock.h \
json.h \

Просмотреть файл

@ -146,7 +146,7 @@ HashableValue::equals(const HashableValue &other) const
Class MapObject::class_ = {
"Map",
JSCLASS_HAS_PRIVATE |
JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS |
JSCLASS_HAS_CACHED_PROTO(JSProto_Map),
JS_PropertyStub, /* addProperty */
JS_PropertyStub, /* delProperty */
@ -297,7 +297,7 @@ js_InitMapClass(JSContext *cx, JSObject *obj)
Class SetObject::class_ = {
"Set",
JSCLASS_HAS_PRIVATE |
JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS |
JSCLASS_HAS_CACHED_PROTO(JSProto_Set),
JS_PropertyStub, /* addProperty */
JS_PropertyStub, /* delProperty */

Просмотреть файл

@ -255,7 +255,7 @@ static JSClass sCDataProtoClass = {
static JSClass sCTypeClass = {
"CType",
JSCLASS_HAS_RESERVED_SLOTS(CTYPE_SLOTS),
JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_HAS_RESERVED_SLOTS(CTYPE_SLOTS),
JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub,
JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, CType::Finalize,
NULL, NULL, CType::ConstructData, CType::ConstructData, NULL,
@ -272,7 +272,7 @@ static JSClass sCDataClass = {
static JSClass sCClosureClass = {
"CClosure",
JSCLASS_HAS_RESERVED_SLOTS(CCLOSURE_SLOTS),
JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_HAS_RESERVED_SLOTS(CCLOSURE_SLOTS),
JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub,
JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, CClosure::Finalize,
NULL, NULL, NULL, NULL, NULL, NULL, CClosure::Trace, NULL

Просмотреть файл

@ -257,7 +257,7 @@ Parser::trace(JSTracer *trc)
{
ObjectBox *objbox = traceListHead;
while (objbox) {
MarkObjectRoot(trc, objbox->object, "parser.object");
MarkObjectRoot(trc, &objbox->object, "parser.object");
if (objbox->isFunctionBox)
static_cast<FunctionBox *>(objbox)->bindings.trace(trc);
objbox = objbox->traceLink;

Просмотреть файл

@ -266,6 +266,31 @@ HeapId::operator=(const HeapId &v)
return *this;
}
inline const Value &
ReadBarrieredValue::get() const
{
if (value.isObject())
JSObject::readBarrier(&value.toObject());
else if (value.isString())
JSString::readBarrier(value.toString());
else
JS_ASSERT(!value.isMarkable());
return value;
}
inline
ReadBarrieredValue::operator const Value &() const
{
return get();
}
inline JSObject &
ReadBarrieredValue::toObject() const
{
return get().toObject();
}
} /* namespace js */
#endif /* jsgc_barrier_inl_h___ */

Просмотреть файл

@ -406,6 +406,7 @@ class HeapId
bool operator!=(jsid id) const { return value != id; }
jsid get() const { return value; }
jsid *unsafeGet() { return &value; }
operator jsid() const { return value; }
private:
@ -456,6 +457,20 @@ class ReadBarriered
operator MarkablePtr<U>() const { return MarkablePtr<U>(value); }
};
class ReadBarrieredValue
{
Value value;
public:
ReadBarrieredValue() : value(UndefinedValue()) {}
ReadBarrieredValue(const Value &value) : value(value) {}
inline const Value &get() const;
inline operator const Value &() const;
inline JSObject &toObject() const;
};
}
#endif /* jsgc_barrier_h___ */

Просмотреть файл

@ -38,9 +38,10 @@
* ***** END LICENSE BLOCK ***** */
#include <stdio.h>
#include <ctype.h>
#include <stdarg.h>
#include "jscntxt.h"
#include "jscompartment.h"
#include "jscrashformat.h"
#include "jscrashreport.h"
#include "jsprf.h"
@ -69,78 +70,114 @@ ExplainReason(gcreason::Reason reason)
}
}
Statistics::ColumnInfo::ColumnInfo(const char *title, double t, double total)
: title(title)
void
Statistics::fmt(const char *f, ...)
{
JS_snprintf(str, sizeof(str), "%.1f", t);
JS_snprintf(totalStr, sizeof(totalStr), "%.1f", total);
width = 6;
}
va_list va;
size_t off = strlen(buffer);
Statistics::ColumnInfo::ColumnInfo(const char *title, double t)
: title(title)
{
JS_snprintf(str, sizeof(str), "%.1f", t);
strcpy(totalStr, "n/a");
width = 6;
va_start(va, f);
JS_vsnprintf(buffer + off, BUFFER_SIZE - off, f, va);
va_end(va);
}
Statistics::ColumnInfo::ColumnInfo(const char *title, unsigned int data)
: title(title)
{
JS_snprintf(str, sizeof(str), "%d", data);
strcpy(totalStr, "n/a");
width = 4;
}
Statistics::ColumnInfo::ColumnInfo(const char *title, const char *data)
: title(title)
{
JS_ASSERT(strlen(data) < sizeof(str));
strcpy(str, data);
strcpy(totalStr, "n/a ");
width = 0;
}
static const int NUM_COLUMNS = 17;
void
Statistics::makeTable(ColumnInfo *cols)
Statistics::fmtIfNonzero(const char *name, double t)
{
int i = 0;
if (t) {
if (needComma)
fmt(", ");
fmt("%s: %.1f", name, t);
needComma = true;
}
}
cols[i++] = ColumnInfo("Type", compartment ? "Comp" : "Glob");
void
Statistics::formatPhases(int64_t *times)
{
needComma = false;
fmtIfNonzero("mark", t(times[PHASE_MARK]));
fmtIfNonzero("mark-roots", t(times[PHASE_MARK_ROOTS]));
fmtIfNonzero("mark-delayed", t(times[PHASE_MARK_DELAYED]));
fmtIfNonzero("mark-other", t(times[PHASE_MARK_OTHER]));
fmtIfNonzero("sweep", t(times[PHASE_SWEEP]));
fmtIfNonzero("sweep-obj", t(times[PHASE_SWEEP_OBJECT]));
fmtIfNonzero("sweep-string", t(times[PHASE_SWEEP_STRING]));
fmtIfNonzero("sweep-script", t(times[PHASE_SWEEP_SCRIPT]));
fmtIfNonzero("sweep-shape", t(times[PHASE_SWEEP_SHAPE]));
fmtIfNonzero("discard-code", t(times[PHASE_DISCARD_CODE]));
fmtIfNonzero("discard-analysis", t(times[PHASE_DISCARD_ANALYSIS]));
fmtIfNonzero("xpconnect", t(times[PHASE_XPCONNECT]));
fmtIfNonzero("deallocate", t(times[PHASE_DESTROY]));
}
cols[i++] = ColumnInfo("Total", t(PHASE_GC), total(PHASE_GC));
cols[i++] = ColumnInfo("Wait", beginDelay(PHASE_MARK, PHASE_GC));
cols[i++] = ColumnInfo("Mark", t(PHASE_MARK), total(PHASE_MARK));
cols[i++] = ColumnInfo("Sweep", t(PHASE_SWEEP), total(PHASE_SWEEP));
cols[i++] = ColumnInfo("FinObj", t(PHASE_SWEEP_OBJECT), total(PHASE_SWEEP_OBJECT));
cols[i++] = ColumnInfo("FinStr", t(PHASE_SWEEP_STRING), total(PHASE_SWEEP_STRING));
cols[i++] = ColumnInfo("FinScr", t(PHASE_SWEEP_SCRIPT), total(PHASE_SWEEP_SCRIPT));
cols[i++] = ColumnInfo("FinShp", t(PHASE_SWEEP_SHAPE), total(PHASE_SWEEP_SHAPE));
cols[i++] = ColumnInfo("DisCod", t(PHASE_DISCARD_CODE), total(PHASE_DISCARD_CODE));
cols[i++] = ColumnInfo("DisAnl", t(PHASE_DISCARD_ANALYSIS), total(PHASE_DISCARD_ANALYSIS));
cols[i++] = ColumnInfo("XPCnct", t(PHASE_XPCONNECT), total(PHASE_XPCONNECT));
cols[i++] = ColumnInfo("Destry", t(PHASE_DESTROY), total(PHASE_DESTROY));
cols[i++] = ColumnInfo("End", endDelay(PHASE_GC, PHASE_DESTROY));
/* Except for the first and last, slices of less than 12ms are not reported. */
static const int64_t SLICE_MIN_REPORT_TIME = 12 * PRMJ_USEC_PER_MSEC;
cols[i++] = ColumnInfo("+Chu", counts[STAT_NEW_CHUNK]);
cols[i++] = ColumnInfo("-Chu", counts[STAT_DESTROY_CHUNK]);
const char *
Statistics::formatData()
{
buffer[0] = 0x00;
cols[i++] = ColumnInfo("Reason", ExplainReason(triggerReason));
int64_t total = 0, longest = 0;
JS_ASSERT(i == NUM_COLUMNS);
for (SliceData *slice = slices.begin(); slice != slices.end(); slice++) {
total += slice->duration();
if (slice->duration() > longest)
longest = slice->duration();
}
double mmu20 = computeMMU(20 * PRMJ_USEC_PER_MSEC);
double mmu50 = computeMMU(50 * PRMJ_USEC_PER_MSEC);
fmt("TotalTime: %.1fms, Type: %s", t(total), compartment ? "compartment" : "global");
fmt(", MMU(20ms): %d%%, MMU(50ms): %d%%", int(mmu20 * 100), int(mmu50 * 100));
if (slices.length() > 1)
fmt(", MaxPause: %.1f", t(longest));
else
fmt(", Reason: %s", ExplainReason(slices[0].reason));
if (wasReset)
fmt(", ***RESET***");
fmt(", +chunks: %d, -chunks: %d\n", counts[STAT_NEW_CHUNK], counts[STAT_DESTROY_CHUNK]);
if (slices.length() > 1) {
for (size_t i = 0; i < slices.length(); i++) {
int64_t width = slices[i].duration();
if (i != 0 && i != slices.length() - 1 && width < SLICE_MIN_REPORT_TIME)
continue;
fmt(" Slice %d @ %.1fms (Pause: %.1f, Reason: %s): ",
i,
t(slices[i].end - slices[0].start),
t(width),
ExplainReason(slices[i].reason));
formatPhases(slices[i].phaseTimes);
fmt("\n");
}
fmt(" Totals: ");
}
formatPhases(phaseTimes);
fmt("\n");
return buffer;
}
Statistics::Statistics(JSRuntime *rt)
: runtime(rt),
triggerReason(gcreason::NO_REASON)
startupTime(PRMJ_Now()),
fp(NULL),
fullFormat(false),
compartment(NULL),
wasReset(false),
needComma(false)
{
PodArrayZero(phaseTotals);
PodArrayZero(counts);
PodArrayZero(totals);
startupTime = PRMJ_Now();
char *env = getenv("MOZ_GCTIMER");
if (!env || strcmp(env, "none") == 0) {
@ -159,14 +196,6 @@ Statistics::Statistics(JSRuntime *rt)
fp = fopen(env, "a");
JS_ASSERT(fp);
fprintf(fp, " AppTime");
ColumnInfo cols[NUM_COLUMNS];
makeTable(cols);
for (int i = 0; i < NUM_COLUMNS; i++)
fprintf(fp, ", %*s", cols[i].width, cols[i].title);
fprintf(fp, "\n");
}
}
@ -174,13 +203,9 @@ Statistics::~Statistics()
{
if (fp) {
if (fullFormat) {
fprintf(fp, "------>TOTAL");
ColumnInfo cols[NUM_COLUMNS];
makeTable(cols);
for (int i = 0; i < NUM_COLUMNS && cols[i].totalStr[0]; i++)
fprintf(fp, ", %*s", cols[i].width, cols[i].totalStr);
fprintf(fp, "\n");
buffer[0] = 0x00;
formatPhases(phaseTotals);
fprintf(fp, "TOTALS\n%s\n\n-------\n", buffer);
}
if (fp != stdout && fp != stderr)
@ -188,120 +213,65 @@ Statistics::~Statistics()
}
}
struct GCCrashData
{
int isRegen;
int isCompartment;
};
void
Statistics::beginGC(JSCompartment *comp, gcreason::Reason reason)
{
compartment = comp;
PodArrayZero(phaseStarts);
PodArrayZero(phaseEnds);
PodArrayZero(phaseTimes);
triggerReason = reason;
beginPhase(PHASE_GC);
Probes::GCStart();
GCCrashData crashData;
crashData.isCompartment = !!compartment;
crash::SaveCrashData(crash::JS_CRASH_TAG_GC, &crashData, sizeof(crashData));
}
double
Statistics::t(Phase phase)
Statistics::t(int64_t t)
{
return double(phaseTimes[phase]) / PRMJ_USEC_PER_MSEC;
return double(t) / PRMJ_USEC_PER_MSEC;
}
double
Statistics::total(Phase phase)
int64_t
Statistics::gcDuration()
{
return double(totals[phase]) / PRMJ_USEC_PER_MSEC;
}
double
Statistics::beginDelay(Phase phase1, Phase phase2)
{
return double(phaseStarts[phase1] - phaseStarts[phase2]) / PRMJ_USEC_PER_MSEC;
}
double
Statistics::endDelay(Phase phase1, Phase phase2)
{
return double(phaseEnds[phase1] - phaseEnds[phase2]) / PRMJ_USEC_PER_MSEC;
}
void
Statistics::statsToString(char *buffer, size_t size)
{
JS_ASSERT(size);
buffer[0] = 0x00;
ColumnInfo cols[NUM_COLUMNS];
makeTable(cols);
size_t pos = 0;
for (int i = 0; i < NUM_COLUMNS; i++) {
int len = strlen(cols[i].title) + 1 + strlen(cols[i].str);
if (i > 0)
len += 2;
if (pos + len >= size)
break;
if (i > 0)
strcat(buffer, ", ");
strcat(buffer, cols[i].title);
strcat(buffer, ":");
strcat(buffer, cols[i].str);
pos += len;
}
return slices.back().end - slices[0].start;
}
void
Statistics::printStats()
{
if (fullFormat) {
fprintf(fp, "%12.0f", double(phaseStarts[PHASE_GC] - startupTime) / PRMJ_USEC_PER_MSEC);
ColumnInfo cols[NUM_COLUMNS];
makeTable(cols);
for (int i = 0; i < NUM_COLUMNS; i++)
fprintf(fp, ", %*s", cols[i].width, cols[i].str);
fprintf(fp, "\n");
fprintf(fp, "GC(T+%.3fs) %s\n",
t(slices[0].start - startupTime) / 1000.0,
formatData());
} else {
fprintf(fp, "%f %f %f\n",
t(PHASE_GC), t(PHASE_MARK), t(PHASE_SWEEP));
t(gcDuration()),
t(phaseTimes[PHASE_MARK]),
t(phaseTimes[PHASE_SWEEP]));
}
fflush(fp);
}
void
Statistics::beginGC()
{
PodArrayZero(phaseStarts);
PodArrayZero(phaseTimes);
slices.clearAndFree();
wasReset = false;
Probes::GCStart();
}
void
Statistics::endGC()
{
Probes::GCEnd();
endPhase(PHASE_GC);
crash::SnapshotGCStack();
for (int i = 0; i < PHASE_LIMIT; i++)
totals[i] += phaseTimes[i];
phaseTotals[i] += phaseTimes[i];
if (JSAccumulateTelemetryDataCallback cb = runtime->telemetryCallback) {
(*cb)(JS_TELEMETRY_GC_REASON, triggerReason);
(*cb)(JS_TELEMETRY_GC_IS_COMPARTMENTAL, compartment ? 1 : 0);
(*cb)(JS_TELEMETRY_GC_MS, t(PHASE_GC));
(*cb)(JS_TELEMETRY_GC_MARK_MS, t(PHASE_MARK));
(*cb)(JS_TELEMETRY_GC_SWEEP_MS, t(PHASE_SWEEP));
}
(*cb)(JS_TELEMETRY_GC_MS, t(gcDuration()));
(*cb)(JS_TELEMETRY_GC_MARK_MS, t(phaseTimes[PHASE_MARK]));
(*cb)(JS_TELEMETRY_GC_SWEEP_MS, t(phaseTimes[PHASE_SWEEP]));
(*cb)(JS_TELEMETRY_GC_RESET, wasReset);
(*cb)(JS_TELEMETRY_GC_INCREMENTAL_DISABLED, !runtime->gcIncrementalEnabled);
if (JSGCFinishedCallback cb = runtime->gcFinishedCallback) {
char buffer[1024];
statsToString(buffer, sizeof(buffer));
(*cb)(runtime, compartment, buffer);
double mmu50 = computeMMU(50 * PRMJ_USEC_PER_MSEC);
(*cb)(JS_TELEMETRY_GC_MMU_50, mmu50 * 100);
}
if (fp)
@ -310,6 +280,47 @@ Statistics::endGC()
PodArrayZero(counts);
}
void
Statistics::beginSlice(JSCompartment *comp, gcreason::Reason reason)
{
compartment = comp;
bool first = runtime->gcIncrementalState == gc::NO_INCREMENTAL;
if (first)
beginGC();
SliceData data(reason, PRMJ_Now());
(void) slices.append(data); /* Ignore any OOMs here. */
if (JSAccumulateTelemetryDataCallback cb = runtime->telemetryCallback)
(*cb)(JS_TELEMETRY_GC_REASON, reason);
if (GCSliceCallback cb = runtime->gcSliceCallback) {
GCDescription desc(NULL, !!compartment);
(*cb)(runtime, first ? GC_CYCLE_BEGIN : GC_SLICE_BEGIN, desc);
}
}
void
Statistics::endSlice()
{
slices.back().end = PRMJ_Now();
if (JSAccumulateTelemetryDataCallback cb = runtime->telemetryCallback)
(*cb)(JS_TELEMETRY_GC_SLICE_MS, t(slices.back().end - slices.back().start));
bool last = runtime->gcIncrementalState == gc::NO_INCREMENTAL;
if (last)
endGC();
if (GCSliceCallback cb = runtime->gcSliceCallback) {
if (last)
(*cb)(runtime, GC_CYCLE_END, GCDescription(formatData(), !!compartment));
else
(*cb)(runtime, GC_SLICE_END, GCDescription(NULL, !!compartment));
}
}
void
Statistics::beginPhase(Phase phase)
{
@ -324,8 +335,10 @@ Statistics::beginPhase(Phase phase)
void
Statistics::endPhase(Phase phase)
{
phaseEnds[phase] = PRMJ_Now();
phaseTimes[phase] += phaseEnds[phase] - phaseStarts[phase];
int64_t now = PRMJ_Now();
int64_t t = now - phaseStarts[phase];
slices.back().phaseTimes[phase] += t;
phaseTimes[phase] += t;
if (phase == gcstats::PHASE_MARK)
Probes::GCEndMarkPhase();
@ -333,5 +346,44 @@ Statistics::endPhase(Phase phase)
Probes::GCEndSweepPhase();
}
/*
* MMU (minimum mutator utilization) is a measure of how much garbage collection
* is affecting the responsiveness of the system. MMU measurements are given
* with respect to a certain window size. If we report MMU(50ms) = 80%, then
* that means that, for any 50ms window of time, at least 80% of the window is
* devoted to the mutator. In other words, the GC is running for at most 20% of
* the window, or 10ms. The GC can run multiple slices during the 50ms window
* as long as the total time it spends is at most 10ms.
*/
double
Statistics::computeMMU(int64_t window)
{
JS_ASSERT(!slices.empty());
int64_t gc = slices[0].end - slices[0].start;
int64_t gcMax = gc;
if (gc >= window)
return 0.0;
int startIndex = 0;
for (size_t endIndex = 1; endIndex < slices.length(); endIndex++) {
gc += slices[endIndex].end - slices[endIndex].start;
while (slices[endIndex].end - slices[startIndex].end >= window) {
gc -= slices[startIndex].end - slices[startIndex].start;
startIndex++;
}
int64_t cur = gc;
if (slices[endIndex].end - slices[startIndex].start > window)
cur -= (slices[endIndex].end - slices[startIndex].start - window);
if (cur > gcMax)
gcMax = cur;
}
return double(window - gcMax) / window;
}
} /* namespace gcstats */
} /* namespace js */

Просмотреть файл

@ -52,8 +52,10 @@ namespace js {
namespace gcstats {
enum Phase {
PHASE_GC,
PHASE_MARK,
PHASE_MARK_ROOTS,
PHASE_MARK_DELAYED,
PHASE_MARK_OTHER,
PHASE_SWEEP,
PHASE_SWEEP_OBJECT,
PHASE_SWEEP_STRING,
@ -74,16 +76,20 @@ enum Stat {
STAT_LIMIT
};
static const size_t BUFFER_SIZE = 8192;
struct Statistics {
Statistics(JSRuntime *rt);
~Statistics();
void beginGC(JSCompartment *comp, gcreason::Reason reason);
void endGC();
void beginPhase(Phase phase);
void endPhase(Phase phase);
void beginSlice(JSCompartment *comp, gcreason::Reason reason);
void endSlice();
void reset() { wasReset = true; }
void count(Stat s) {
JS_ASSERT(s < STAT_LIMIT);
counts[s]++;
@ -92,48 +98,64 @@ struct Statistics {
private:
JSRuntime *runtime;
uint64_t startupTime;
int64_t startupTime;
FILE *fp;
bool fullFormat;
gcreason::Reason triggerReason;
JSCompartment *compartment;
bool wasReset;
uint64_t phaseStarts[PHASE_LIMIT];
uint64_t phaseEnds[PHASE_LIMIT];
uint64_t phaseTimes[PHASE_LIMIT];
uint64_t totals[PHASE_LIMIT];
struct SliceData {
SliceData(gcreason::Reason reason, int64_t start)
: reason(reason), start(start)
{
PodArrayZero(phaseTimes);
}
gcreason::Reason reason;
int64_t start, end;
int64_t phaseTimes[PHASE_LIMIT];
int64_t duration() const { return end - start; }
};
Vector<SliceData, 8, SystemAllocPolicy> slices;
/* Most recent time when the given phase started. */
int64_t phaseStarts[PHASE_LIMIT];
/* Total time in a given phase for this GC. */
int64_t phaseTimes[PHASE_LIMIT];
/* Total time in a given phase over all GCs. */
int64_t phaseTotals[PHASE_LIMIT];
/* Number of events of this type for this GC. */
unsigned int counts[STAT_LIMIT];
double t(Phase phase);
double total(Phase phase);
double beginDelay(Phase phase1, Phase phase2);
double endDelay(Phase phase1, Phase phase2);
char buffer[BUFFER_SIZE];
bool needComma;
void beginGC();
void endGC();
int64_t gcDuration();
double t(int64_t t);
void printStats();
void statsToString(char *buffer, size_t size);
void fmt(const char *f, ...);
void fmtIfNonzero(const char *name, double t);
void formatPhases(int64_t *times);
const char *formatData();
struct ColumnInfo {
const char *title;
char str[32];
char totalStr[32];
int width;
ColumnInfo() {}
ColumnInfo(const char *title, double t, double total);
ColumnInfo(const char *title, double t);
ColumnInfo(const char *title, unsigned int data);
ColumnInfo(const char *title, const char *data);
double computeMMU(int64_t resolution);
};
void makeTable(ColumnInfo *cols);
};
struct AutoGC {
AutoGC(Statistics &stats, JSCompartment *comp, gcreason::Reason reason
struct AutoGCSlice {
AutoGCSlice(Statistics &stats, JSCompartment *comp, gcreason::Reason reason
JS_GUARD_OBJECT_NOTIFIER_PARAM)
: stats(stats) { JS_GUARD_OBJECT_NOTIFIER_INIT; stats.beginGC(comp, reason); }
~AutoGC() { stats.endGC(); }
: stats(stats) { JS_GUARD_OBJECT_NOTIFIER_INIT; stats.beginSlice(comp, reason); }
~AutoGCSlice() { stats.endSlice(); }
Statistics &stats;
JS_DECL_USE_GUARD_OBJECT_NOTIFIER

Просмотреть файл

@ -723,8 +723,6 @@ JSRuntime::JSRuntime()
gcMaxBytes(0),
gcMaxMallocBytes(0),
gcNumArenasFreeCommitted(0),
gcNumber(0),
gcIncrementalTracer(NULL),
gcVerifyData(NULL),
gcChunkAllocationSinceLastGC(false),
gcNextFullGCTime(0),
@ -733,12 +731,20 @@ JSRuntime::JSRuntime()
gcIsNeeded(0),
gcWeakMapList(NULL),
gcStats(thisFromCtor()),
gcNumber(0),
gcStartNumber(0),
gcTriggerReason(gcreason::NO_REASON),
gcTriggerCompartment(NULL),
gcCurrentCompartment(NULL),
gcCheckCompartment(NULL),
gcIncrementalState(gc::NO_INCREMENTAL),
gcCompartmentCreated(false),
gcLastMarkSlice(false),
gcInterFrameGC(0),
gcSliceBudget(SliceBudget::Unlimited),
gcIncrementalEnabled(true),
gcIncrementalCompartment(NULL),
gcPoke(false),
gcMarkAndSweep(false),
gcRunning(false),
#ifdef JS_GC_ZEAL
gcZeal_(0),
@ -747,7 +753,7 @@ JSRuntime::JSRuntime()
gcDebugCompartmentGC(false),
#endif
gcCallback(NULL),
gcFinishedCallback(NULL),
gcSliceCallback(NULL),
gcMallocBytes(0),
gcBlackRootsTraceOp(NULL),
gcBlackRootsData(NULL),
@ -814,6 +820,9 @@ JSRuntime::init(uint32_t maxbytes)
if (!js_InitGC(this, maxbytes))
return false;
if (!gcMarker.init())
return false;
if (!(atomsCompartment = this->new_<JSCompartment>(this)) ||
!atomsCompartment->init(NULL) ||
!compartments.append(atomsCompartment)) {
@ -2437,13 +2446,7 @@ JS_SetExtraGCRootsTracer(JSRuntime *rt, JSTraceDataOp traceOp, void *data)
JS_PUBLIC_API(void)
JS_TracerInit(JSTracer *trc, JSContext *cx, JSTraceCallback callback)
{
trc->runtime = cx->runtime;
trc->context = cx;
trc->callback = callback;
trc->debugPrinter = NULL;
trc->debugPrintArg = NULL;
trc->debugPrintIndex = size_t(-1);
trc->eagerlyTraceWeakMaps = true;
InitTracer(trc, cx->runtime, cx, callback);
}
JS_PUBLIC_API(void)
@ -2875,8 +2878,7 @@ JS_CompartmentGC(JSContext *cx, JSCompartment *comp)
/* We cannot GC the atoms compartment alone; use a full GC instead. */
JS_ASSERT(comp != cx->runtime->atomsCompartment);
js::gc::VerifyBarriers(cx, true);
js_GC(cx, comp, GC_NORMAL, gcreason::API);
GC(cx, comp, GC_NORMAL, gcreason::API);
}
JS_PUBLIC_API(void)
@ -2914,7 +2916,6 @@ JS_PUBLIC_API(JSBool)
JS_IsAboutToBeFinalized(void *thing)
{
gc::Cell *t = static_cast<gc::Cell *>(thing);
JS_ASSERT(!t->compartment()->rt->gcIncrementalTracer);
return IsAboutToBeFinalized(t);
}
@ -2931,11 +2932,15 @@ JS_SetGCParameter(JSRuntime *rt, JSGCParamKey key, uint32_t value)
case JSGC_MAX_MALLOC_BYTES:
rt->setGCMaxMallocBytes(value);
break;
case JSGC_SLICE_TIME_BUDGET:
rt->gcSliceBudget = SliceBudget::TimeBudget(value);
break;
default:
JS_ASSERT(key == JSGC_MODE);
rt->gcMode = JSGCMode(value);
JS_ASSERT(rt->gcMode == JSGC_MODE_GLOBAL ||
rt->gcMode == JSGC_MODE_COMPARTMENT);
rt->gcMode == JSGC_MODE_COMPARTMENT ||
rt->gcMode == JSGC_MODE_INCREMENTAL);
return;
}
}
@ -2956,9 +2961,11 @@ JS_GetGCParameter(JSRuntime *rt, JSGCParamKey key)
return uint32_t(rt->gcChunkPool.getEmptyCount());
case JSGC_TOTAL_CHUNKS:
return uint32_t(rt->gcChunkSet.count() + rt->gcChunkPool.getEmptyCount());
case JSGC_SLICE_TIME_BUDGET:
return uint32_t(rt->gcSliceBudget > 0 ? rt->gcSliceBudget / PRMJ_USEC_PER_MSEC : 0);
default:
JS_ASSERT(key == JSGC_NUMBER);
return rt->gcNumber;
return uint32_t(rt->gcNumber);
}
}
@ -6609,7 +6616,16 @@ JS_AbortIfWrongThread(JSRuntime *rt)
JS_PUBLIC_API(void)
JS_SetGCZeal(JSContext *cx, uint8_t zeal, uint32_t frequency, JSBool compartment)
{
bool schedule = zeal >= js::gc::ZealAllocThreshold && zeal < js::gc::ZealVerifierThreshold;
#ifdef JS_GC_ZEAL
const char *env = getenv("JS_GC_ZEAL");
if (env) {
zeal = atoi(env);
frequency = 1;
compartment = false;
}
#endif
bool schedule = zeal >= js::gc::ZealAllocValue;
cx->runtime->gcZeal_ = zeal;
cx->runtime->gcZealFrequency = frequency;
cx->runtime->gcNextScheduled = schedule ? frequency : 0;

Просмотреть файл

@ -1029,7 +1029,7 @@ class AutoEnumStateRooter : private AutoGCRooter
protected:
void trace(JSTracer *trc);
JSObject * const obj;
JSObject *obj;
private:
Value stateValue;
@ -1428,8 +1428,11 @@ typedef JSBool
(* JSContextCallback)(JSContext *cx, uintN contextOp);
typedef enum JSGCStatus {
/* These callbacks happen outside the GC lock. */
JSGC_BEGIN,
JSGC_END,
/* These callbacks happen within the GC lock. */
JSGC_MARK_END,
JSGC_FINALIZE_END
} JSGCStatus;
@ -3290,7 +3293,10 @@ typedef enum JSGCParamKey {
JSGC_UNUSED_CHUNKS = 7,
/* Total number of allocated GC chunks. */
JSGC_TOTAL_CHUNKS = 8
JSGC_TOTAL_CHUNKS = 8,
/* Max milliseconds to spend in an incremental GC slice. */
JSGC_SLICE_TIME_BUDGET = 9
} JSGCParamKey;
typedef enum JSGCMode {
@ -3298,7 +3304,13 @@ typedef enum JSGCMode {
JSGC_MODE_GLOBAL = 0,
/* Perform per-compartment GCs until too much garbage has accumulated. */
JSGC_MODE_COMPARTMENT = 1
JSGC_MODE_COMPARTMENT = 1,
/*
* Collect in short time slices rather than all at once. Implies
* JSGC_MODE_COMPARTMENT.
*/
JSGC_MODE_INCREMENTAL = 2
} JSGCMode;
extern JS_PUBLIC_API(void)
@ -3393,6 +3405,8 @@ struct JSClass {
object in prototype chain
passed in via *objp in/out
parameter */
#define JSCLASS_IMPLEMENTS_BARRIERS (1<<5) /* Correctly implements GC read
and write barriers */
#define JSCLASS_DOCUMENT_OBSERVER (1<<6) /* DOM document observer */
/*

Просмотреть файл

@ -386,15 +386,20 @@ js_TraceAtomState(JSTracer *trc)
JSAtomState *state = &rt->atomState;
if (rt->gcKeepAtoms) {
for (AtomSet::Range r = state->atoms.all(); !r.empty(); r.popFront())
MarkStringRoot(trc, r.front().asPtr(), "locked_atom");
for (AtomSet::Range r = state->atoms.all(); !r.empty(); r.popFront()) {
JSAtom *tmp = r.front().asPtr();
MarkStringRoot(trc, &tmp, "locked_atom");
JS_ASSERT(tmp == r.front().asPtr());
}
} else {
for (AtomSet::Range r = state->atoms.all(); !r.empty(); r.popFront()) {
AtomStateEntry entry = r.front();
if (!entry.isTagged())
continue;
MarkStringRoot(trc, entry.asPtr(), "interned_atom");
JSAtom *tmp = entry.asPtr();
MarkStringRoot(trc, &tmp, "interned_atom");
JS_ASSERT(tmp == entry.asPtr());
}
}
}

Просмотреть файл

@ -282,10 +282,10 @@ js_DestroyContext(JSContext *cx, JSDestroyContextMode mode)
c->clearTraps(cx);
JS_ClearAllWatchPoints(cx);
js_GC(cx, NULL, GC_NORMAL, gcreason::LAST_CONTEXT);
GC(cx, NULL, GC_NORMAL, gcreason::LAST_CONTEXT);
} else if (mode == JSDCM_FORCE_GC) {
js_GC(cx, NULL, GC_NORMAL, gcreason::DESTROY_CONTEXT);
GC(cx, NULL, GC_NORMAL, gcreason::DESTROY_CONTEXT);
} else if (mode == JSDCM_MAYBE_GC) {
JS_MaybeGC(cx);
}
@ -875,7 +875,7 @@ js_InvokeOperationCallback(JSContext *cx)
JS_ATOMIC_SET(&rt->interrupt, 0);
if (rt->gcIsNeeded)
js_GC(cx, rt->gcTriggerCompartment, GC_NORMAL, rt->gcTriggerReason);
GCSlice(cx, rt->gcTriggerCompartment, GC_NORMAL, rt->gcTriggerReason);
#ifdef JS_THREADSAFE
/*
@ -1278,7 +1278,7 @@ JSContext::mark(JSTracer *trc)
/* Mark other roots-by-definition in the JSContext. */
if (globalObject && !hasRunOption(JSOPTION_UNROOTED_GLOBAL))
MarkObjectRoot(trc, globalObject, "global object");
MarkObjectRoot(trc, &globalObject, "global object");
if (isExceptionPending())
MarkValueRoot(trc, &exception, "exception");

Просмотреть файл

@ -304,24 +304,25 @@ struct JSRuntime : js::RuntimeFriendFields
* in MaybeGC.
*/
volatile uint32_t gcNumArenasFreeCommitted;
uint32_t gcNumber;
js::GCMarker *gcIncrementalTracer;
js::FullGCMarker gcMarker;
void *gcVerifyData;
bool gcChunkAllocationSinceLastGC;
int64_t gcNextFullGCTime;
int64_t gcJitReleaseTime;
JSGCMode gcMode;
volatile uintptr_t gcBarrierFailed;
volatile uintptr_t gcIsNeeded;
js::WeakMapBase *gcWeakMapList;
js::gcstats::Statistics gcStats;
/* Incremented on every GC slice. */
uint64_t gcNumber;
/* The gcNumber at the time of the most recent GC's first slice. */
uint64_t gcStartNumber;
/* The reason that an interrupt-triggered GC should be called. */
js::gcreason::Reason gcTriggerReason;
/* Pre-allocated space for the GC mark stack. */
uintptr_t gcMarkStackArray[js::MARK_STACK_LENGTH];
/*
* Compartment that triggered GC. If more than one Compatment need GC,
* gcTriggerCompartment is reset to NULL and a global GC is performed.
@ -337,6 +338,53 @@ struct JSRuntime : js::RuntimeFriendFields
*/
JSCompartment *gcCheckCompartment;
/*
* The current incremental GC phase. During non-incremental GC, this is
* always NO_INCREMENTAL.
*/
js::gc::State gcIncrementalState;
/* Indicates that a new compartment was created during incremental GC. */
bool gcCompartmentCreated;
/* Indicates that the last incremental slice exhausted the mark stack. */
bool gcLastMarkSlice;
/*
* Indicates that a GC slice has taken place in the middle of an animation
* frame, rather than at the beginning. In this case, the next slice will be
* delayed so that we don't get back-to-back slices.
*/
volatile uintptr_t gcInterFrameGC;
/* Default budget for incremental GC slice. See SliceBudget in jsgc.h. */
int64_t gcSliceBudget;
/*
* We disable incremental GC if we encounter a js::Class with a trace hook
* that does not implement write barriers.
*/
bool gcIncrementalEnabled;
/* Compartment that is undergoing an incremental GC. */
JSCompartment *gcIncrementalCompartment;
/*
* We save all conservative scanned roots in this vector so that
* conservative scanning can be "replayed" deterministically. In DEBUG mode,
* this allows us to run a non-incremental GC after every incremental GC to
* ensure that no objects were missed.
*/
#ifdef DEBUG
struct SavedGCRoot {
void *thing;
JSGCTraceKind kind;
SavedGCRoot(void *thing, JSGCTraceKind kind) : thing(thing), kind(kind) {}
};
js::Vector<SavedGCRoot, 0, js::SystemAllocPolicy> gcSavedRoots;
#endif
/*
* We can pack these flags as only the GC thread writes to them. Atomic
* updates to packed bytes are not guaranteed, so stores issued by one
@ -344,7 +392,6 @@ struct JSRuntime : js::RuntimeFriendFields
* other threads.
*/
bool gcPoke;
bool gcMarkAndSweep;
bool gcRunning;
/*
@ -353,7 +400,7 @@ struct JSRuntime : js::RuntimeFriendFields
* gcNextScheduled is decremented. When it reaches zero, we do either a
* full or a compartmental GC, based on gcDebugCompartmentGC.
*
* At this point, if gcZeal_ >= 2 then gcNextScheduled is reset to the
* At this point, if gcZeal_ == 2 then gcNextScheduled is reset to the
* value of gcZealFrequency. Otherwise, no additional GCs take place.
*
* You can control these values in several ways:
@ -361,9 +408,8 @@ struct JSRuntime : js::RuntimeFriendFields
* - Call gczeal() or schedulegc() from inside shell-executed JS code
* (see the help for details)
*
* Additionally, if gzZeal_ == 1 then we perform GCs in select places
* (during MaybeGC and whenever a GC poke happens). This option is mainly
* useful to embedders.
* If gzZeal_ == 1 then we perform GCs in select places (during MaybeGC and
* whenever a GC poke happens). This option is mainly useful to embedders.
*
* We use gcZeal_ == 4 to enable write barrier verification. See the comment
* in jsgc.cpp for more information about this.
@ -378,7 +424,7 @@ struct JSRuntime : js::RuntimeFriendFields
bool needZealousGC() {
if (gcNextScheduled > 0 && --gcNextScheduled == 0) {
if (gcZeal() >= js::gc::ZealAllocThreshold && gcZeal() < js::gc::ZealVerifierThreshold)
if (gcZeal() == js::gc::ZealAllocValue)
gcNextScheduled = gcZealFrequency;
return true;
}
@ -390,7 +436,7 @@ struct JSRuntime : js::RuntimeFriendFields
#endif
JSGCCallback gcCallback;
JSGCFinishedCallback gcFinishedCallback;
js::GCSliceCallback gcSliceCallback;
private:
/*

Просмотреть файл

@ -73,7 +73,6 @@ JSCompartment::JSCompartment(JSRuntime *rt)
: rt(rt),
principals(NULL),
needsBarrier_(false),
gcIncrementalTracer(NULL),
gcBytes(0),
gcTriggerBytes(0),
gcLastBytes(0),
@ -128,6 +127,9 @@ JSCompartment::init(JSContext *cx)
if (!scriptFilenameTable.init())
return false;
if (!barrierMarker_.init())
return false;
return debuggees.init();
}
@ -435,7 +437,8 @@ JSCompartment::markTypes(JSTracer *trc)
for (CellIterUnderGC i(this, FINALIZE_SCRIPT); !i.done(); i.next()) {
JSScript *script = i.get<JSScript>();
MarkScriptRoot(trc, script, "mark_types_script");
MarkScriptRoot(trc, &script, "mark_types_script");
JS_ASSERT(script == i.get<JSScript>());
}
for (size_t thingKind = FINALIZE_OBJECT0;
@ -443,46 +446,23 @@ JSCompartment::markTypes(JSTracer *trc)
thingKind++) {
for (CellIterUnderGC i(this, AllocKind(thingKind)); !i.done(); i.next()) {
JSObject *object = i.get<JSObject>();
if (object->hasSingletonType())
MarkObjectRoot(trc, object, "mark_types_singleton");
if (object->hasSingletonType()) {
MarkObjectRoot(trc, &object, "mark_types_singleton");
JS_ASSERT(object == i.get<JSObject>());
}
}
}
for (CellIterUnderGC i(this, FINALIZE_TYPE_OBJECT); !i.done(); i.next())
MarkTypeObjectRoot(trc, i.get<types::TypeObject>(), "mark_types_scan");
for (CellIterUnderGC i(this, FINALIZE_TYPE_OBJECT); !i.done(); i.next()) {
types::TypeObject *type = i.get<types::TypeObject>();
MarkTypeObjectRoot(trc, &type, "mark_types_scan");
JS_ASSERT(type == i.get<types::TypeObject>());
}
}
void
JSCompartment::sweep(JSContext *cx, bool releaseTypes)
JSCompartment::discardJitCode(JSContext *cx)
{
/* Remove dead wrappers from the table. */
for (WrapperMap::Enum e(crossCompartmentWrappers); !e.empty(); e.popFront()) {
JS_ASSERT_IF(IsAboutToBeFinalized(e.front().key) &&
!IsAboutToBeFinalized(e.front().value),
e.front().key.isString());
if (IsAboutToBeFinalized(e.front().key) ||
IsAboutToBeFinalized(e.front().value)) {
e.removeFront();
}
}
/* Remove dead references held weakly by the compartment. */
sweepBaseShapeTable(cx);
sweepInitialShapeTable(cx);
sweepNewTypeObjectTable(cx, newTypeObjects);
sweepNewTypeObjectTable(cx, lazyTypeObjects);
if (emptyTypeObject && IsAboutToBeFinalized(emptyTypeObject))
emptyTypeObject = NULL;
newObjectCache.reset();
sweepBreakpoints(cx);
{
gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_DISCARD_CODE);
/*
* Kick all frames on the stack into the interpreter, and release all JIT
* code in the compartment.
@ -504,6 +484,41 @@ JSCompartment::sweep(JSContext *cx, bool releaseTypes)
#endif
}
void
JSCompartment::sweep(JSContext *cx, bool releaseTypes)
{
/* Remove dead wrappers from the table. */
for (WrapperMap::Enum e(crossCompartmentWrappers); !e.empty(); e.popFront()) {
JS_ASSERT_IF(IsAboutToBeFinalized(e.front().key) &&
!IsAboutToBeFinalized(e.front().value),
e.front().key.isString());
if (IsAboutToBeFinalized(e.front().key) ||
IsAboutToBeFinalized(e.front().value)) {
e.removeFront();
}
}
/* Remove dead references held weakly by the compartment. */
regExps.sweep(rt);
sweepBaseShapeTable(cx);
sweepInitialShapeTable(cx);
sweepNewTypeObjectTable(cx, newTypeObjects);
sweepNewTypeObjectTable(cx, lazyTypeObjects);
if (emptyTypeObject && IsAboutToBeFinalized(emptyTypeObject))
emptyTypeObject = NULL;
newObjectCache.reset();
sweepBreakpoints(cx);
{
gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_DISCARD_CODE);
discardJitCode(cx);
}
if (!activeAnalysis) {
gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_DISCARD_ANALYSIS);
@ -555,8 +570,6 @@ JSCompartment::sweep(JSContext *cx, bool releaseTypes)
void
JSCompartment::purge(JSContext *cx)
{
arenas.purge();
regExps.purge();
dtoaCache.purge();
/*
@ -770,13 +783,6 @@ JSCompartment::sweepBreakpoints(JSContext *cx)
}
}
GCMarker *
JSCompartment::createBarrierTracer()
{
JS_ASSERT(!gcIncrementalTracer);
return NULL;
}
size_t
JSCompartment::sizeOfShapeTable(JSMallocSizeOfFun mallocSizeOf)
{

Просмотреть файл

@ -46,7 +46,6 @@
#include "jscntxt.h"
#include "jsfun.h"
#include "jsgc.h"
#include "jsgcstats.h"
#include "jsobj.h"
#include "jsscope.h"
#include "vm/GlobalObject.h"
@ -163,6 +162,23 @@ typedef HashSet<ScriptFilenameEntry *,
ScriptFilenameHasher,
SystemAllocPolicy> ScriptFilenameTable;
/* If HashNumber grows, need to change WrapperHasher. */
JS_STATIC_ASSERT(sizeof(HashNumber) == 4);
struct WrapperHasher
{
typedef Value Lookup;
static HashNumber hash(Value key) {
uint64_t bits = JSVAL_TO_IMPL(key).asBits;
return uint32_t(bits) ^ uint32_t(bits >> 32);
}
static bool match(const Value &l, const Value &k) { return l == k; }
};
typedef HashMap<Value, ReadBarrieredValue, WrapperHasher, SystemAllocPolicy> WrapperMap;
} /* namespace js */
namespace JS {
@ -177,7 +193,7 @@ struct JSCompartment
js::gc::ArenaLists arenas;
bool needsBarrier_;
js::GCMarker *gcIncrementalTracer;
js::BarrierGCMarker barrierMarker_;
bool needsBarrier() {
return needsBarrier_;
@ -185,9 +201,7 @@ struct JSCompartment
js::GCMarker *barrierTracer() {
JS_ASSERT(needsBarrier_);
if (gcIncrementalTracer)
return gcIncrementalTracer;
return createBarrierTracer();
return &barrierMarker_;
}
size_t gcBytes;
@ -325,10 +339,11 @@ struct JSCompartment
bool wrap(JSContext *cx, js::AutoIdVector &props);
void markTypes(JSTracer *trc);
void discardJitCode(JSContext *cx);
void sweep(JSContext *cx, bool releaseTypes);
void purge(JSContext *cx);
void setGCLastBytes(size_t lastBytes, JSGCInvocationKind gckind);
void setGCLastBytes(size_t lastBytes, js::JSGCInvocationKind gckind);
void reduceGCTriggerBytes(size_t amount);
void resetGCMallocBytes();
@ -397,8 +412,6 @@ struct JSCompartment
private:
void sweepBreakpoints(JSContext *cx);
js::GCMarker *createBarrierTracer();
public:
js::WatchpointMap *watchpointMap;
};

Просмотреть файл

@ -94,7 +94,7 @@ exn_resolve(JSContext *cx, JSObject *obj, jsid id, uintN flags,
Class js::ErrorClass = {
js_Error_str,
JSCLASS_HAS_PRIVATE | JSCLASS_NEW_RESOLVE |
JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_NEW_RESOLVE |
JSCLASS_HAS_CACHED_PROTO(JSProto_Error),
JS_PropertyStub, /* addProperty */
JS_PropertyStub, /* delProperty */
@ -419,14 +419,14 @@ exn_trace(JSTracer *trc, JSObject *obj)
priv = GetExnPrivate(obj);
if (priv) {
if (priv->message)
MarkString(trc, priv->message, "exception message");
MarkString(trc, &priv->message, "exception message");
if (priv->filename)
MarkString(trc, priv->filename, "exception filename");
MarkString(trc, &priv->filename, "exception filename");
elem = priv->stackElems;
for (vcount = i = 0; i != priv->stackDepth; ++i, ++elem) {
if (elem->funName)
MarkString(trc, elem->funName, "stack trace function name");
MarkString(trc, &elem->funName, "stack trace function name");
if (IS_GC_MARKING_TRACER(trc) && elem->filename)
js_MarkScriptFilename(elem->filename);
vcount += elem->argc;

Просмотреть файл

@ -132,7 +132,7 @@ JS_NewObjectWithUniqueType(JSContext *cx, JSClass *clasp, JSObject *proto, JSObj
JS_FRIEND_API(void)
js::GCForReason(JSContext *cx, gcreason::Reason reason)
{
js_GC(cx, NULL, GC_NORMAL, reason);
GC(cx, NULL, GC_NORMAL, reason);
}
JS_FRIEND_API(void)
@ -141,13 +141,19 @@ js::CompartmentGCForReason(JSContext *cx, JSCompartment *comp, gcreason::Reason
/* We cannot GC the atoms compartment alone; use a full GC instead. */
JS_ASSERT(comp != cx->runtime->atomsCompartment);
js_GC(cx, comp, GC_NORMAL, reason);
GC(cx, comp, GC_NORMAL, reason);
}
JS_FRIEND_API(void)
js::ShrinkingGC(JSContext *cx, gcreason::Reason reason)
{
js_GC(cx, NULL, GC_SHRINK, reason);
GC(cx, NULL, GC_SHRINK, reason);
}
JS_FRIEND_API(void)
js::IncrementalGC(JSContext *cx, gcreason::Reason reason)
{
GCSlice(cx, NULL, GC_NORMAL, reason);
}
JS_FRIEND_API(void)
@ -171,7 +177,7 @@ JS_WrapPropertyDescriptor(JSContext *cx, js::PropertyDescriptor *desc)
JS_FRIEND_API(void)
JS_TraceShapeCycleCollectorChildren(JSTracer *trc, void *shape)
{
MarkCycleCollectorChildren(trc, (const Shape *)shape);
MarkCycleCollectorChildren(trc, (Shape *)shape);
}
AutoPreserveCompartment::AutoPreserveCompartment(JSContext *cx
@ -401,12 +407,6 @@ JS_SetAccumulateTelemetryCallback(JSRuntime *rt, JSAccumulateTelemetryDataCallba
rt->telemetryCallback = callback;
}
JS_FRIEND_API(void)
JS_SetGCFinishedCallback(JSRuntime *rt, JSGCFinishedCallback callback)
{
rt->gcFinishedCallback = callback;
}
#ifdef DEBUG
JS_FRIEND_API(void)
js_DumpString(JSString *str)
@ -551,39 +551,6 @@ js::DumpHeapComplete(JSContext *cx, FILE *fp)
namespace js {
JS_FRIEND_API(bool)
IsIncrementalBarrierNeeded(JSRuntime *rt)
{
return !!rt->gcIncrementalTracer && !rt->gcRunning;
}
JS_FRIEND_API(bool)
IsIncrementalBarrierNeeded(JSContext *cx)
{
return IsIncrementalBarrierNeeded(cx->runtime);
}
extern JS_FRIEND_API(void)
IncrementalReferenceBarrier(void *ptr)
{
if (!ptr)
return;
JS_ASSERT(!static_cast<gc::Cell *>(ptr)->compartment()->rt->gcRunning);
uint32_t kind = gc::GetGCThingTraceKind(ptr);
if (kind == JSTRACE_OBJECT)
JSObject::writeBarrierPre((JSObject *) ptr);
else if (kind == JSTRACE_STRING)
JSString::writeBarrierPre((JSString *) ptr);
else
JS_NOT_REACHED("invalid trace kind");
}
extern JS_FRIEND_API(void)
IncrementalValueBarrier(const Value &v)
{
HeapValue::writeBarrierPre(v);
}
/* static */ void
AutoLockGC::LockGC(JSRuntime *rt)
{
@ -719,4 +686,90 @@ SizeOfJSContext()
return sizeof(JSContext);
}
JS_FRIEND_API(GCSliceCallback)
SetGCSliceCallback(JSRuntime *rt, GCSliceCallback callback)
{
GCSliceCallback old = rt->gcSliceCallback;
rt->gcSliceCallback = callback;
return old;
}
JS_FRIEND_API(bool)
WantGCSlice(JSRuntime *rt)
{
if (rt->gcZeal() == gc::ZealFrameVerifierValue || rt->gcZeal() == gc::ZealFrameGCValue)
return true;
if (rt->gcIncrementalState != gc::NO_INCREMENTAL)
return true;
return false;
}
JS_FRIEND_API(void)
NotifyDidPaint(JSContext *cx)
{
JSRuntime *rt = cx->runtime;
if (rt->gcZeal() == gc::ZealFrameVerifierValue) {
gc::VerifyBarriers(cx);
return;
}
if (rt->gcZeal() == gc::ZealFrameGCValue) {
GCSlice(cx, NULL, GC_NORMAL, gcreason::REFRESH_FRAME);
return;
}
if (rt->gcIncrementalState != gc::NO_INCREMENTAL && !rt->gcInterFrameGC)
GCSlice(cx, rt->gcIncrementalCompartment, GC_NORMAL, gcreason::REFRESH_FRAME);
rt->gcInterFrameGC = false;
}
extern JS_FRIEND_API(bool)
IsIncrementalGCEnabled(JSRuntime *rt)
{
return rt->gcIncrementalEnabled;
}
JS_FRIEND_API(bool)
IsIncrementalBarrierNeeded(JSRuntime *rt)
{
return (rt->gcIncrementalState == gc::MARK && !rt->gcRunning);
}
JS_FRIEND_API(bool)
IsIncrementalBarrierNeeded(JSContext *cx)
{
return IsIncrementalBarrierNeeded(cx->runtime);
}
JS_FRIEND_API(bool)
IsIncrementalBarrierNeededOnObject(JSObject *obj)
{
return obj->compartment()->needsBarrier();
}
extern JS_FRIEND_API(void)
IncrementalReferenceBarrier(void *ptr)
{
if (!ptr)
return;
JS_ASSERT(!static_cast<gc::Cell *>(ptr)->compartment()->rt->gcRunning);
uint32_t kind = gc::GetGCThingTraceKind(ptr);
if (kind == JSTRACE_OBJECT)
JSObject::writeBarrierPre((JSObject *) ptr);
else if (kind == JSTRACE_STRING)
JSString::writeBarrierPre((JSString *) ptr);
else
JS_NOT_REACHED("invalid trace kind");
}
extern JS_FRIEND_API(void)
IncrementalValueBarrier(const Value &v)
{
HeapValue::writeBarrierPre(v);
}
} // namespace js

Просмотреть файл

@ -100,7 +100,11 @@ enum {
JS_TELEMETRY_GC_IS_COMPARTMENTAL,
JS_TELEMETRY_GC_MS,
JS_TELEMETRY_GC_MARK_MS,
JS_TELEMETRY_GC_SWEEP_MS
JS_TELEMETRY_GC_SWEEP_MS,
JS_TELEMETRY_GC_SLICE_MS,
JS_TELEMETRY_GC_MMU_50,
JS_TELEMETRY_GC_RESET,
JS_TELEMETRY_GC_INCREMENTAL_DISABLED
};
typedef void
@ -109,12 +113,6 @@ typedef void
extern JS_FRIEND_API(void)
JS_SetAccumulateTelemetryCallback(JSRuntime *rt, JSAccumulateTelemetryDataCallback callback);
typedef void
(* JSGCFinishedCallback)(JSRuntime *rt, JSCompartment *comp, const char *description);
extern JS_FRIEND_API(void)
JS_SetGCFinishedCallback(JSRuntime *rt, JSGCFinishedCallback callback);
extern JS_FRIEND_API(JSPrincipals *)
JS_GetCompartmentPrincipals(JSCompartment *compartment);
@ -703,12 +701,65 @@ CompartmentGCForReason(JSContext *cx, JSCompartment *comp, gcreason::Reason reas
extern JS_FRIEND_API(void)
ShrinkingGC(JSContext *cx, gcreason::Reason reason);
extern JS_FRIEND_API(void)
IncrementalGC(JSContext *cx, gcreason::Reason reason);
extern JS_FRIEND_API(void)
SetGCSliceTimeBudget(JSContext *cx, int64_t millis);
enum GCProgress {
/*
* During non-incremental GC, the GC is bracketed by JSGC_CYCLE_BEGIN/END
* callbacks. During an incremental GC, the sequence of callbacks is as
* follows:
* JSGC_CYCLE_BEGIN, JSGC_SLICE_END (first slice)
* JSGC_SLICE_BEGIN, JSGC_SLICE_END (second slice)
* ...
* JSGC_SLICE_BEGIN, JSGC_CYCLE_END (last slice)
*/
GC_CYCLE_BEGIN,
GC_SLICE_BEGIN,
GC_SLICE_END,
GC_CYCLE_END
};
struct GCDescription {
const char *logMessage;
bool isCompartment;
GCDescription(const char *msg, bool isCompartment)
: logMessage(msg), isCompartment(isCompartment) {}
};
typedef void
(* GCSliceCallback)(JSRuntime *rt, GCProgress progress, const GCDescription &desc);
extern JS_FRIEND_API(GCSliceCallback)
SetGCSliceCallback(JSRuntime *rt, GCSliceCallback callback);
extern JS_FRIEND_API(bool)
WantGCSlice(JSRuntime *rt);
/*
* Signals a good place to do an incremental slice, because the browser is
* drawing a frame.
*/
extern JS_FRIEND_API(void)
NotifyDidPaint(JSContext *cx);
extern JS_FRIEND_API(bool)
IsIncrementalGCEnabled(JSRuntime *rt);
extern JS_FRIEND_API(bool)
IsIncrementalBarrierNeeded(JSRuntime *rt);
extern JS_FRIEND_API(bool)
IsIncrementalBarrierNeeded(JSContext *cx);
extern JS_FRIEND_API(bool)
IsIncrementalBarrierNeededOnObject(JSObject *obj);
extern JS_FRIEND_API(void)
IncrementalReferenceBarrier(void *ptr);

Просмотреть файл

@ -539,7 +539,7 @@ args_trace(JSTracer *trc, JSObject *obj)
#if JS_HAS_GENERATORS
StackFrame *fp = argsobj.maybeStackFrame();
if (fp && fp->isFloatingGenerator())
MarkObject(trc, js_FloatingFrameToGenerator(fp)->obj, "generator object");
MarkObject(trc, &js_FloatingFrameToGenerator(fp)->obj, "generator object");
#endif
}
@ -551,7 +551,7 @@ args_trace(JSTracer *trc, JSObject *obj)
*/
Class js::NormalArgumentsObjectClass = {
"Arguments",
JSCLASS_NEW_RESOLVE |
JSCLASS_NEW_RESOLVE | JSCLASS_IMPLEMENTS_BARRIERS |
JSCLASS_HAS_RESERVED_SLOTS(NormalArgumentsObject::RESERVED_SLOTS) |
JSCLASS_HAS_CACHED_PROTO(JSProto_Object) |
JSCLASS_FOR_OF_ITERATION,
@ -587,7 +587,7 @@ Class js::NormalArgumentsObjectClass = {
*/
Class js::StrictArgumentsObjectClass = {
"Arguments",
JSCLASS_NEW_RESOLVE |
JSCLASS_NEW_RESOLVE | JSCLASS_IMPLEMENTS_BARRIERS |
JSCLASS_HAS_RESERVED_SLOTS(StrictArgumentsObject::RESERVED_SLOTS) |
JSCLASS_HAS_CACHED_PROTO(JSProto_Object) |
JSCLASS_FOR_OF_ITERATION,
@ -936,13 +936,13 @@ call_trace(JSTracer *trc, JSObject *obj)
#if JS_HAS_GENERATORS
StackFrame *fp = (StackFrame *) obj->getPrivate();
if (fp && fp->isFloatingGenerator())
MarkObject(trc, js_FloatingFrameToGenerator(fp)->obj, "generator object");
MarkObject(trc, &js_FloatingFrameToGenerator(fp)->obj, "generator object");
#endif
}
JS_PUBLIC_DATA(Class) js::CallClass = {
"Call",
JSCLASS_HAS_PRIVATE |
JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS |
JSCLASS_HAS_RESERVED_SLOTS(CallObject::RESERVED_SLOTS) |
JSCLASS_NEW_RESOLVE | JSCLASS_IS_ANONYMOUS,
JS_PropertyStub, /* addProperty */
@ -1465,7 +1465,7 @@ JSFunction::trace(JSTracer *trc)
if (isInterpreted()) {
if (script())
MarkScript(trc, script(), "script");
MarkScript(trc, &script(), "script");
if (environment())
MarkObjectUnbarriered(trc, environment(), "fun_callscope");
}
@ -1499,7 +1499,7 @@ JSFunction::sizeOfMisc(JSMallocSizeOfFun mallocSizeOf) const
*/
JS_FRIEND_DATA(Class) js::FunctionClass = {
js_Function_str,
JSCLASS_NEW_RESOLVE |
JSCLASS_NEW_RESOLVE | JSCLASS_IMPLEMENTS_BARRIERS |
JSCLASS_HAS_CACHED_PROTO(JSProto_Function),
JS_PropertyStub, /* addProperty */
JS_PropertyStub, /* delProperty */

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -55,7 +55,6 @@
#include "jslock.h"
#include "jsutil.h"
#include "jsversion.h"
#include "jsgcstats.h"
#include "jscell.h"
#include "ds/BitArray.h"
@ -82,6 +81,14 @@ struct Shape;
namespace gc {
enum State {
NO_INCREMENTAL,
MARK_ROOTS,
MARK,
SWEEP,
INVALID
};
struct Arena;
/*
@ -419,6 +426,10 @@ struct ArenaHeader {
* not present in the stack we use an extra flag to tag arenas on the
* stack.
*
* Delayed marking is also used for arenas that we allocate into during an
* incremental GC. In this case, we intend to mark all the objects in the
* arena, and it's faster to do this marking in bulk.
*
* To minimize the ArenaHeader size we record the next delayed marking
* linkage as arenaAddress() >> ArenaShift and pack it with the allocKind
* field and hasDelayedMarking flag. We use 8 bits for the allocKind, not
@ -427,7 +438,9 @@ struct ArenaHeader {
*/
public:
size_t hasDelayedMarking : 1;
size_t nextDelayedMarking : JS_BITS_PER_WORD - 8 - 1;
size_t allocatedDuringIncremental : 1;
size_t markOverflow : 1;
size_t nextDelayedMarking : JS_BITS_PER_WORD - 8 - 1 - 1 - 1;
static void staticAsserts() {
/* We must be able to fit the allockind into uint8_t. */
@ -437,7 +450,7 @@ struct ArenaHeader {
* nextDelayedMarkingpacking assumes that ArenaShift has enough bits
* to cover allocKind and hasDelayedMarking.
*/
JS_STATIC_ASSERT(ArenaShift >= 8 + 1);
JS_STATIC_ASSERT(ArenaShift >= 8 + 1 + 1 + 1);
}
inline uintptr_t address() const;
@ -450,6 +463,8 @@ struct ArenaHeader {
void init(JSCompartment *comp, AllocKind kind) {
JS_ASSERT(!allocated());
JS_ASSERT(!markOverflow);
JS_ASSERT(!allocatedDuringIncremental);
JS_ASSERT(!hasDelayedMarking);
compartment = comp;
@ -462,6 +477,8 @@ struct ArenaHeader {
void setAsNotAllocated() {
allocKind = size_t(FINALIZE_LIMIT);
markOverflow = 0;
allocatedDuringIncremental = 0;
hasDelayedMarking = 0;
nextDelayedMarking = 0;
}
@ -507,8 +524,8 @@ struct ArenaHeader {
void checkSynchronizedWithFreeList() const;
#endif
inline Arena *getNextDelayedMarking() const;
inline void setNextDelayedMarking(Arena *arena);
inline ArenaHeader *getNextDelayedMarking() const;
inline void setNextDelayedMarking(ArenaHeader *aheader);
};
struct Arena {
@ -908,25 +925,24 @@ ArenaHeader::getThingSize() const
return Arena::thingSize(getAllocKind());
}
inline Arena *
inline ArenaHeader *
ArenaHeader::getNextDelayedMarking() const
{
return reinterpret_cast<Arena *>(nextDelayedMarking << ArenaShift);
return &reinterpret_cast<Arena *>(nextDelayedMarking << ArenaShift)->aheader;
}
inline void
ArenaHeader::setNextDelayedMarking(Arena *arena)
ArenaHeader::setNextDelayedMarking(ArenaHeader *aheader)
{
JS_ASSERT(!hasDelayedMarking);
JS_ASSERT(!(uintptr_t(aheader) & ArenaMask));
hasDelayedMarking = 1;
nextDelayedMarking = arena->address() >> ArenaShift;
nextDelayedMarking = aheader->arenaAddress() >> ArenaShift;
}
JS_ALWAYS_INLINE void
ChunkBitmap::getMarkWordAndMask(const Cell *cell, uint32_t color,
uintptr_t **wordp, uintptr_t *maskp)
{
JS_ASSERT(cell->chunk() == Chunk::fromAddress(reinterpret_cast<uintptr_t>(this)));
size_t bit = (cell->address() & ChunkMask) / Cell::CellSize + color;
JS_ASSERT(bit < ArenaBitmapBits * ArenasPerChunk);
*maskp = uintptr_t(1) << (bit % JS_BITS_PER_WORD);
@ -970,21 +986,6 @@ Cell::compartment() const
return arenaHeader()->compartment;
}
/*
* Lower limit after which we limit the heap growth
*/
const size_t GC_ALLOCATION_THRESHOLD = 30 * 1024 * 1024;
/*
* A GC is triggered once the number of newly allocated arenas is
* GC_HEAP_GROWTH_FACTOR times the number of live arenas after the last GC
* starting after the lower limit of GC_ALLOCATION_THRESHOLD.
*/
const float GC_HEAP_GROWTH_FACTOR = 3.0f;
/* Perform a Full GC every 20 seconds if MaybeGC is called */
static const int64_t GC_IDLE_FULL_SPAN = 20 * 1000 * 1000;
static inline JSGCTraceKind
MapAllocToTraceKind(AllocKind thingKind)
{
@ -1168,13 +1169,14 @@ struct ArenaLists {
FreeSpan *headSpan = &freeLists[i];
if (!headSpan->isEmpty()) {
ArenaHeader *aheader = headSpan->arenaHeader();
JS_ASSERT(!aheader->hasFreeThings());
aheader->setFirstFreeSpan(headSpan);
headSpan->initAsEmpty();
}
}
}
inline void prepareForIncrementalGC(JSCompartment *comp);
/*
* Temporarily copy the free list heads to the arenas so the code can see
* the proper value in ArenaHeader::freeList when accessing the latter
@ -1309,23 +1311,6 @@ typedef js::HashMap<void *,
js::DefaultHasher<void *>,
js::SystemAllocPolicy> RootedValueMap;
/* If HashNumber grows, need to change WrapperHasher. */
JS_STATIC_ASSERT(sizeof(HashNumber) == 4);
struct WrapperHasher
{
typedef Value Lookup;
static HashNumber hash(Value key) {
uint64_t bits = JSVAL_TO_IMPL(key).asBits;
return uint32_t(bits) ^ uint32_t(bits >> 32);
}
static bool match(const Value &l, const Value &k) { return l == k; }
};
typedef HashMap<Value, Value, WrapperHasher, SystemAllocPolicy> WrapperMap;
} /* namespace js */
extern JS_FRIEND_API(JSGCTraceKind)
@ -1376,6 +1361,9 @@ js_IsAddressableGCThing(JSRuntime *rt, uintptr_t w, js::gc::AllocKind *thingKind
namespace js {
extern void
MarkCompartmentActive(js::StackFrame *fp);
extern void
TraceRuntime(JSTracer *trc);
@ -1396,8 +1384,6 @@ MaybeGC(JSContext *cx);
extern void
ShrinkGCBuffers(JSRuntime *rt);
} /* namespace js */
/*
* Kinds of js_GC invocation.
*/
@ -1411,10 +1397,21 @@ typedef enum JSGCInvocationKind {
/* Pass NULL for |comp| to get a full GC. */
extern void
js_GC(JSContext *cx, JSCompartment *comp, JSGCInvocationKind gckind, js::gcreason::Reason r);
GC(JSContext *cx, JSCompartment *comp, JSGCInvocationKind gckind, js::gcreason::Reason reason);
extern void
GCSlice(JSContext *cx, JSCompartment *comp, JSGCInvocationKind gckind, js::gcreason::Reason reason);
extern void
GCDebugSlice(JSContext *cx, int64_t objCount);
} /* namespace js */
namespace js {
void
InitTracer(JSTracer *trc, JSRuntime *rt, JSContext *cx, JSTraceCallback callback);
#ifdef JS_THREADSAFE
class GCHelperThread {
@ -1572,17 +1569,56 @@ struct MarkStack {
T *tos;
T *limit;
bool push(T item) {
if (tos == limit)
T *ballast;
T *ballastLimit;
MarkStack()
: stack(NULL),
tos(NULL),
limit(NULL),
ballast(NULL),
ballastLimit(NULL) { }
~MarkStack() {
if (stack != ballast)
js_free(stack);
js_free(ballast);
}
bool init(size_t ballastcap) {
JS_ASSERT(!stack);
if (ballastcap == 0)
return true;
ballast = (T *)js_malloc(sizeof(T) * ballastcap);
if (!ballast)
return false;
ballastLimit = ballast + ballastcap;
stack = ballast;
limit = ballastLimit;
tos = stack;
return true;
}
bool push(T item) {
if (tos == limit) {
if (!enlarge())
return false;
}
JS_ASSERT(tos < limit);
*tos++ = item;
return true;
}
bool push(T item1, T item2, T item3) {
T *nextTos = tos + 3;
if (nextTos > limit)
if (nextTos > limit) {
if (!enlarge())
return false;
nextTos = tos + 3;
}
JS_ASSERT(nextTos <= limit);
tos[0] = item1;
tos[1] = item2;
tos[2] = item3;
@ -1599,61 +1635,130 @@ struct MarkStack {
return *--tos;
}
template<size_t N>
MarkStack(T (&buffer)[N])
: stack(buffer),
tos(buffer),
limit(buffer + N) { }
ptrdiff_t position() const {
return tos - stack;
}
void reset() {
if (stack != ballast) {
js_free(stack);
stack = ballast;
limit = ballastLimit;
}
tos = stack;
JS_ASSERT(limit == ballastLimit);
}
bool enlarge() {
size_t tosIndex = tos - stack;
size_t cap = limit - stack;
size_t newcap = cap * 2;
if (newcap == 0)
newcap = 32;
T *newStack;
if (stack == ballast) {
newStack = (T *)js_malloc(sizeof(T) * newcap);
if (!newStack)
return false;
for (T *src = stack, *dst = newStack; src < tos; )
*dst++ = *src++;
} else {
newStack = (T *)js_realloc(stack, sizeof(T) * newcap);
if (!newStack)
return false;
}
stack = newStack;
tos = stack + tosIndex;
limit = newStack + newcap;
return true;
}
};
/*
* This class records how much work has been done in a given GC slice, so that
* we can return before pausing for too long. Some slices are allowed to run for
* unlimited time, and others are bounded. To reduce the number of gettimeofday
* calls, we only check the time every 1000 operations.
*/
struct SliceBudget {
int64_t deadline; /* in microseconds */
intptr_t counter;
static const intptr_t CounterReset = 1000;
static const int64_t Unlimited = 0;
static int64_t TimeBudget(int64_t millis);
static int64_t WorkBudget(int64_t work);
/* Equivalent to SliceBudget(UnlimitedBudget). */
SliceBudget();
/* Instantiate as SliceBudget(Time/WorkBudget(n)). */
SliceBudget(int64_t budget);
void reset() {
deadline = INT64_MAX;
counter = INTPTR_MAX;
}
void step() {
counter--;
}
bool checkOverBudget();
bool isOverBudget() {
if (counter > 0)
return false;
return checkOverBudget();
}
};
static const size_t MARK_STACK_LENGTH = 32768;
struct GCMarker : public JSTracer {
private:
/*
* We use a common mark stack to mark GC things of different types and use
* the explicit tags to distinguish them when it cannot be deduced from
* the context of push or pop operation.
*
* Currently we need only 4 tags. However that can be extended to 8 if
* necessary as we tag only GC things.
*/
enum StackTag {
ValueArrayTag,
ObjectTag,
TypeTag,
XmlTag,
LastTag = XmlTag
SavedValueArrayTag,
LastTag = SavedValueArrayTag
};
static const uintptr_t StackTagMask = 3;
static const uintptr_t StackTagMask = 7;
static void staticAsserts() {
JS_STATIC_ASSERT(StackTagMask >= uintptr_t(LastTag));
JS_STATIC_ASSERT(StackTagMask <= gc::Cell::CellMask);
}
private:
/* The color is only applied to objects, functions and xml. */
uint32_t color;
public:
/* Pointer to the top of the stack of arenas we are delaying marking on. */
js::gc::Arena *unmarkedArenaStackTop;
/* Count of arenas that are currently in the stack. */
DebugOnly<size_t> markLaterArenas;
explicit GCMarker();
bool init(bool lazy);
#ifdef JS_DUMP_CONSERVATIVE_GC_ROOTS
js::gc::ConservativeGCStats conservativeStats;
Vector<void *, 0, SystemAllocPolicy> conservativeRoots;
const char *conservativeDumpFileName;
void dumpConservativeRoots();
#endif
void start(JSRuntime *rt, JSContext *cx);
void stop();
void reset();
MarkStack<uintptr_t> stack;
void pushObject(JSObject *obj) {
pushTaggedPtr(ObjectTag, obj);
}
public:
explicit GCMarker(JSContext *cx);
~GCMarker();
void pushType(types::TypeObject *type) {
pushTaggedPtr(TypeTag, type);
}
void pushXML(JSXML *xml) {
pushTaggedPtr(XmlTag, xml);
}
uint32_t getMarkColor() const {
return color;
@ -1668,43 +1773,123 @@ struct GCMarker : public JSTracer {
* objects that are still reachable.
*/
void setMarkColorGray() {
JS_ASSERT(isDrained());
JS_ASSERT(color == gc::BLACK);
color = gc::GRAY;
}
inline void delayMarkingArena(gc::ArenaHeader *aheader);
void delayMarkingChildren(const void *thing);
void markDelayedChildren(gc::ArenaHeader *aheader);
bool markDelayedChildren(SliceBudget &budget);
bool hasDelayedChildren() const {
return !!unmarkedArenaStackTop;
}
void markDelayedChildren();
bool isDrained() {
return isMarkStackEmpty() && !unmarkedArenaStackTop;
}
bool drainMarkStack(SliceBudget &budget);
/*
* Gray marking must be done after all black marking is complete. However,
* we do not have write barriers on XPConnect roots. Therefore, XPConnect
* roots must be accumulated in the first slice of incremental GC. We
* accumulate these roots in the GrayRootMarker and then mark them later,
* after black marking is complete. This accumulation can fail, but in that
* case we switch to non-incremental GC.
*/
bool hasBufferedGrayRoots() const;
void startBufferingGrayRoots();
void endBufferingGrayRoots();
void markBufferedGrayRoots();
static void GrayCallback(JSTracer *trc, void **thing, JSGCTraceKind kind);
MarkStack<uintptr_t> stack;
private:
#ifdef DEBUG
void checkCompartment(void *p);
#else
void checkCompartment(void *p) {}
#endif
void pushTaggedPtr(StackTag tag, void *ptr) {
checkCompartment(ptr);
uintptr_t addr = reinterpret_cast<uintptr_t>(ptr);
JS_ASSERT(!(addr & StackTagMask));
if (!stack.push(addr | uintptr_t(tag)))
delayMarkingChildren(ptr);
}
void pushValueArray(JSObject *obj, void *start, void *end) {
checkCompartment(obj);
if (start == end)
return;
JS_ASSERT(start <= end);
uintptr_t tagged = reinterpret_cast<uintptr_t>(obj) | GCMarker::ValueArrayTag;
uintptr_t startAddr = reinterpret_cast<uintptr_t>(start);
uintptr_t endAddr = reinterpret_cast<uintptr_t>(end);
/*
* Push in the reverse order so obj will be on top. If we cannot push
* the array, we trigger delay marking for the whole object.
*/
if (!stack.push(endAddr, startAddr, tagged))
delayMarkingChildren(obj);
}
bool isMarkStackEmpty() {
return stack.isEmpty();
}
void drainMarkStack();
bool restoreValueArray(JSObject *obj, void **vpp, void **endp);
void saveValueRanges();
inline void processMarkStackTop(SliceBudget &budget);
inline void processMarkStackTop();
void appendGrayRoot(void *thing, JSGCTraceKind kind);
void pushObject(JSObject *obj) {
pushTaggedPtr(ObjectTag, obj);
/* The color is only applied to objects, functions and xml. */
uint32_t color;
DebugOnly<bool> started;
/* Pointer to the top of the stack of arenas we are delaying marking on. */
js::gc::ArenaHeader *unmarkedArenaStackTop;
/* Count of arenas that are currently in the stack. */
DebugOnly<size_t> markLaterArenas;
struct GrayRoot {
void *thing;
JSGCTraceKind kind;
#ifdef DEBUG
JSTraceNamePrinter debugPrinter;
const void *debugPrintArg;
size_t debugPrintIndex;
#endif
GrayRoot(void *thing, JSGCTraceKind kind)
: thing(thing), kind(kind) {}
};
bool grayFailed;
Vector<GrayRoot, 0, SystemAllocPolicy> grayRoots;
};
struct BarrierGCMarker : public GCMarker {
bool init() {
return GCMarker::init(true);
}
};
void pushType(types::TypeObject *type) {
pushTaggedPtr(TypeTag, type);
}
void pushXML(JSXML *xml) {
pushTaggedPtr(XmlTag, xml);
}
void pushTaggedPtr(StackTag tag, void *ptr) {
uintptr_t addr = reinterpret_cast<uintptr_t>(ptr);
JS_ASSERT(!(addr & StackTagMask));
if (!stack.push(addr | uintptr_t(tag)))
delayMarkingChildren(ptr);
struct FullGCMarker : public GCMarker {
bool init() {
return GCMarker::init(false);
}
};
@ -1757,7 +1942,8 @@ js_FinalizeStringRT(JSRuntime *rt, JSString *str);
/*
* Macro to test if a traversal is the marking phase of the GC.
*/
#define IS_GC_MARKING_TRACER(trc) ((trc)->callback == NULL)
#define IS_GC_MARKING_TRACER(trc) \
((trc)->callback == NULL || (trc)->callback == GCMarker::GrayCallback)
namespace js {
namespace gc {
@ -1778,20 +1964,30 @@ inline void MaybeCheckStackRoots(JSContext *cx) { CheckStackRoots(cx); }
inline void MaybeCheckStackRoots(JSContext *cx) {}
#endif
const int ZealPokeThreshold = 1;
const int ZealAllocThreshold = 2;
const int ZealVerifierThreshold = 4;
const int ZealPokeValue = 1;
const int ZealAllocValue = 2;
const int ZealFrameGCValue = 3;
const int ZealVerifierValue = 4;
const int ZealFrameVerifierValue = 5;
#ifdef JS_GC_ZEAL
/* Check that write barriers have been used correctly. See jsgc.cpp. */
void
VerifyBarriers(JSContext *cx, bool always = false);
VerifyBarriers(JSContext *cx);
void
MaybeVerifyBarriers(JSContext *cx, bool always = false);
#else
static inline void
VerifyBarriers(JSContext *cx, bool always = false)
VerifyBarriers(JSContext *cx)
{
}
static inline void
MaybeVerifyBarriers(JSContext *cx, bool always = false)
{
}

Просмотреть файл

@ -210,7 +210,7 @@ GCPoke(JSRuntime *rt, Value oldval)
#ifdef JS_GC_ZEAL
/* Schedule a GC to happen "soon" after a GC poke. */
if (rt->gcZeal() >= js::gc::ZealPokeThreshold)
if (rt->gcZeal() == js::gc::ZealPokeValue)
rt->gcNextScheduled = 1;
#endif
}
@ -262,14 +262,25 @@ class CellIterImpl
CellIterImpl() {
}
void init(JSCompartment *comp, AllocKind kind) {
void initSpan(JSCompartment *comp, AllocKind kind) {
JS_ASSERT(comp->arenas.isSynchronizedFreeList(kind));
firstThingOffset = Arena::firstThingOffset(kind);
thingSize = Arena::thingSize(kind);
aheader = comp->arenas.getFirstArena(kind);
firstSpan.initAsEmpty();
span = &firstSpan;
thing = span->first;
}
void init(ArenaHeader *singleAheader) {
aheader = singleAheader;
initSpan(aheader->compartment, aheader->getAllocKind());
next();
aheader = NULL;
}
void init(JSCompartment *comp, AllocKind kind) {
initSpan(comp, kind);
aheader = comp->arenas.getFirstArena(kind);
next();
}
@ -311,13 +322,18 @@ class CellIterImpl
}
};
class CellIterUnderGC : public CellIterImpl {
class CellIterUnderGC : public CellIterImpl
{
public:
CellIterUnderGC(JSCompartment *comp, AllocKind kind) {
JS_ASSERT(comp->rt->gcRunning);
init(comp, kind);
}
CellIterUnderGC(ArenaHeader *aheader) {
JS_ASSERT(aheader->compartment->rt->gcRunning);
init(aheader);
}
};
/*
@ -335,7 +351,8 @@ class CellIter: public CellIterImpl
public:
CellIter(JSContext *cx, JSCompartment *comp, AllocKind kind)
: lists(&comp->arenas),
kind(kind) {
kind(kind)
{
#ifdef JS_THREADSAFE
JS_ASSERT(comp->arenas.doneBackgroundFinalize(kind));
#endif
@ -397,6 +414,9 @@ NewGCThing(JSContext *cx, js::gc::AllocKind kind, size_t thingSize)
void *t = comp->arenas.allocateFromFreeList(kind, thingSize);
if (!t)
t = js::gc::ArenaLists::refillFreeList(cx, kind);
JS_ASSERT_IF(t && comp->needsBarrier(),
static_cast<T *>(t)->arenaHeader()->allocatedDuringIncremental);
return static_cast<T *>(t);
}
@ -419,6 +439,8 @@ TryNewGCThing(JSContext *cx, js::gc::AllocKind kind, size_t thingSize)
#endif
void *t = cx->compartment->arenas.allocateFromFreeList(kind, thingSize);
JS_ASSERT_IF(t && cx->compartment->needsBarrier(),
static_cast<T *>(t)->arenaHeader()->allocatedDuringIncremental);
return static_cast<T *>(t);
}

Просмотреть файл

@ -61,7 +61,7 @@ static inline void
PushMarkStack(GCMarker *gcmarker, JSScript *thing);
static inline void
PushMarkStack(GCMarker *gcmarker, const Shape *thing);
PushMarkStack(GCMarker *gcmarker, Shape *thing);
static inline void
PushMarkStack(GCMarker *gcmarker, JSString *thing);
@ -103,7 +103,7 @@ MarkInternal(JSTracer *trc, T *thing)
* GC.
*/
if (!rt->gcCurrentCompartment || thing->compartment() == rt->gcCurrentCompartment) {
if (IS_GC_MARKING_TRACER(trc)) {
if (!trc->callback) {
PushMarkStack(static_cast<GCMarker *>(trc), thing);
} else {
void *tmp = (void *)thing;
@ -118,6 +118,12 @@ MarkInternal(JSTracer *trc, T *thing)
#endif
}
#define JS_ROOT_MARKING_ASSERT(trc) \
JS_ASSERT_IF(IS_GC_MARKING_TRACER(trc), \
trc->runtime->gcIncrementalState == NO_INCREMENTAL || \
trc->runtime->gcIncrementalState == MARK_ROOTS);
template <typename T>
static void
MarkUnbarriered(JSTracer *trc, T *thing, const char *name)
@ -128,18 +134,19 @@ MarkUnbarriered(JSTracer *trc, T *thing, const char *name)
template <typename T>
static void
Mark(JSTracer *trc, const HeapPtr<T> &thing, const char *name)
Mark(JSTracer *trc, HeapPtr<T> *thing, const char *name)
{
JS_SET_TRACING_NAME(trc, name);
MarkInternal(trc, thing.get());
MarkInternal(trc, thing->get());
}
template <typename T>
static void
MarkRoot(JSTracer *trc, T *thing, const char *name)
MarkRoot(JSTracer *trc, T **thingp, const char *name)
{
JS_ROOT_MARKING_ASSERT(trc);
JS_SET_TRACING_NAME(trc, name);
MarkInternal(trc, thing);
MarkInternal(trc, *thingp);
}
template <typename T>
@ -158,6 +165,7 @@ template <typename T>
static void
MarkRootRange(JSTracer *trc, size_t len, T **vec, const char *name)
{
JS_ROOT_MARKING_ASSERT(trc);
for (size_t i = 0; i < len; ++i) {
JS_SET_TRACING_INDEX(trc, name, i);
MarkInternal(trc, vec[i]);
@ -166,15 +174,15 @@ MarkRootRange(JSTracer *trc, size_t len, T **vec, const char *name)
#define DeclMarkerImpl(base, type) \
void \
Mark##base(JSTracer *trc, const HeapPtr<type> &thing, const char *name) \
Mark##base(JSTracer *trc, HeapPtr<type> *thing, const char *name) \
{ \
Mark<type>(trc, thing, name); \
} \
\
void \
Mark##base##Root(JSTracer *trc, type *thing, const char *name) \
Mark##base##Root(JSTracer *trc, type **thingp, const char *name) \
{ \
MarkRoot<type>(trc, thing, name); \
MarkRoot<type>(trc, thingp, name); \
} \
\
void \
@ -246,6 +254,7 @@ MarkKind(JSTracer *trc, void *thing, JSGCTraceKind kind)
void
MarkGCThingRoot(JSTracer *trc, void *thing, const char *name)
{
JS_ROOT_MARKING_ASSERT(trc);
JS_SET_TRACING_NAME(trc, name);
if (!thing)
return;
@ -255,24 +264,30 @@ MarkGCThingRoot(JSTracer *trc, void *thing, const char *name)
/*** ID Marking ***/
static inline void
MarkIdInternal(JSTracer *trc, const jsid &id)
MarkIdInternal(JSTracer *trc, jsid *id)
{
if (JSID_IS_STRING(id))
MarkInternal(trc, JSID_TO_STRING(id));
else if (JS_UNLIKELY(JSID_IS_OBJECT(id)))
MarkInternal(trc, JSID_TO_OBJECT(id));
if (JSID_IS_STRING(*id)) {
JSString *str = JSID_TO_STRING(*id);
MarkInternal(trc, str);
*id = ATOM_TO_JSID(reinterpret_cast<JSAtom *>(str));
} else if (JS_UNLIKELY(JSID_IS_OBJECT(*id))) {
JSObject *obj = JSID_TO_OBJECT(*id);
MarkInternal(trc, obj);
*id = OBJECT_TO_JSID(obj);
}
}
void
MarkId(JSTracer *trc, const HeapId &id, const char *name)
MarkId(JSTracer *trc, HeapId *id, const char *name)
{
JS_SET_TRACING_NAME(trc, name);
MarkIdInternal(trc, id);
MarkIdInternal(trc, id->unsafeGet());
}
void
MarkIdRoot(JSTracer *trc, const jsid &id, const char *name)
MarkIdRoot(JSTracer *trc, jsid *id, const char *name)
{
JS_ROOT_MARKING_ASSERT(trc);
JS_SET_TRACING_NAME(trc, name);
MarkIdInternal(trc, id);
}
@ -282,16 +297,17 @@ MarkIdRange(JSTracer *trc, size_t len, HeapId *vec, const char *name)
{
for (size_t i = 0; i < len; ++i) {
JS_SET_TRACING_INDEX(trc, name, i);
MarkIdInternal(trc, vec[i]);
MarkIdInternal(trc, vec[i].unsafeGet());
}
}
void
MarkIdRootRange(JSTracer *trc, size_t len, jsid *vec, const char *name)
{
JS_ROOT_MARKING_ASSERT(trc);
for (size_t i = 0; i < len; ++i) {
JS_SET_TRACING_INDEX(trc, name, i);
MarkIdInternal(trc, vec[i]);
MarkIdInternal(trc, &vec[i]);
}
}
@ -316,6 +332,7 @@ MarkValue(JSTracer *trc, HeapValue *v, const char *name)
void
MarkValueRoot(JSTracer *trc, Value *v, const char *name)
{
JS_ROOT_MARKING_ASSERT(trc);
JS_SET_TRACING_NAME(trc, name);
MarkValueInternal(trc, v);
}
@ -332,6 +349,7 @@ MarkValueRange(JSTracer *trc, size_t len, HeapValue *vec, const char *name)
void
MarkValueRootRange(JSTracer *trc, size_t len, Value *vec, const char *name)
{
JS_ROOT_MARKING_ASSERT(trc);
for (size_t i = 0; i < len; ++i) {
JS_SET_TRACING_INDEX(trc, name, i);
MarkValueInternal(trc, &vec[i]);
@ -351,13 +369,6 @@ MarkObject(JSTracer *trc, const HeapPtr<GlobalObject, JSScript *> &thing, const
MarkInternal(trc, thing.get());
}
void
MarkShape(JSTracer *trc, const HeapPtr<const Shape> &thing, const char *name)
{
JS_SET_TRACING_NAME(trc, name);
MarkInternal(trc, const_cast<Shape *>(thing.get()));
}
void
MarkValueUnbarriered(JSTracer *trc, Value *v, const char *name)
{
@ -374,6 +385,10 @@ MarkCrossCompartmentValue(JSTracer *trc, HeapValue *v, const char *name)
if (rt->gcCurrentCompartment && cell->compartment() != rt->gcCurrentCompartment)
return;
/* In case we're called from a write barrier. */
if (rt->gcIncrementalCompartment && cell->compartment() != rt->gcIncrementalCompartment)
return;
MarkValue(trc, v, name);
}
}
@ -443,10 +458,10 @@ PushMarkStack(GCMarker *gcmarker, JSScript *thing)
}
static void
ScanShape(GCMarker *gcmarker, const Shape *shape);
ScanShape(GCMarker *gcmarker, Shape *shape);
static void
PushMarkStack(GCMarker *gcmarker, const Shape *thing)
PushMarkStack(GCMarker *gcmarker, Shape *thing)
{
JS_COMPARTMENT_ASSERT(gcmarker->runtime, thing);
@ -469,12 +484,12 @@ PushMarkStack(GCMarker *gcmarker, BaseShape *thing)
}
static void
ScanShape(GCMarker *gcmarker, const Shape *shape)
ScanShape(GCMarker *gcmarker, Shape *shape)
{
restart:
PushMarkStack(gcmarker, shape->base());
jsid id = shape->maybePropid();
const HeapId &id = shape->propidRef();
if (JSID_IS_STRING(id))
PushMarkStack(gcmarker, JSID_TO_STRING(id));
else if (JS_UNLIKELY(JSID_IS_OBJECT(id)))
@ -543,7 +558,7 @@ ScanLinearString(GCMarker *gcmarker, JSLinearString *str)
static void
ScanRope(GCMarker *gcmarker, JSRope *rope)
{
uintptr_t *savedTos = gcmarker->stack.tos;
ptrdiff_t savedPos = gcmarker->stack.position();
for (;;) {
JS_ASSERT(GetGCThingTraceKind(rope) == JSTRACE_STRING);
JS_ASSERT(rope->JSString::isRope());
@ -575,14 +590,14 @@ ScanRope(GCMarker *gcmarker, JSRope *rope)
}
if (next) {
rope = next;
} else if (savedTos != gcmarker->stack.tos) {
JS_ASSERT(savedTos < gcmarker->stack.tos);
} else if (savedPos != gcmarker->stack.position()) {
JS_ASSERT(savedPos < gcmarker->stack.position());
rope = reinterpret_cast<JSRope *>(gcmarker->stack.pop());
} else {
break;
}
}
JS_ASSERT(savedTos == gcmarker->stack.tos);
JS_ASSERT(savedPos == gcmarker->stack.position());
}
static inline void
@ -608,28 +623,10 @@ PushMarkStack(GCMarker *gcmarker, JSString *str)
ScanString(gcmarker, str);
}
static inline void
PushValueArray(GCMarker *gcmarker, JSObject* obj, HeapValue *start, HeapValue *end)
{
JS_ASSERT(start <= end);
uintptr_t tagged = reinterpret_cast<uintptr_t>(obj) | GCMarker::ValueArrayTag;
uintptr_t startAddr = reinterpret_cast<uintptr_t>(start);
uintptr_t endAddr = reinterpret_cast<uintptr_t>(end);
/* Push in the reverse order so obj will be on top. */
if (!gcmarker->stack.push(endAddr, startAddr, tagged)) {
/*
* If we cannot push the array, we trigger delay marking for the whole
* object.
*/
gcmarker->delayMarkingChildren(obj);
}
}
void
MarkChildren(JSTracer *trc, JSObject *obj)
{
MarkTypeObject(trc, obj->typeFromGC(), "type");
MarkTypeObject(trc, &obj->typeFromGC(), "type");
Shape *shape = obj->lastProperty();
MarkShapeUnbarriered(trc, shape, "shape");
@ -710,12 +707,12 @@ MarkChildren(JSTracer *trc, JSScript *script)
}
static void
MarkChildren(JSTracer *trc, const Shape *shape)
MarkChildren(JSTracer *trc, Shape *shape)
{
MarkBaseShapeUnbarriered(trc, shape->base(), "base");
MarkId(trc, shape->maybePropid(), "propid");
MarkId(trc, &shape->propidRef(), "propid");
if (shape->previous())
MarkShape(trc, shape->previous(), "parent");
MarkShape(trc, &shape->previousRef(), "parent");
}
static inline void
@ -776,12 +773,12 @@ MarkCycleCollectorChildren(JSTracer *trc, BaseShape *base, JSObject **prevParent
* parent pointer will only be marked once.
*/
void
MarkCycleCollectorChildren(JSTracer *trc, const Shape *shape)
MarkCycleCollectorChildren(JSTracer *trc, Shape *shape)
{
JSObject *prevParent = NULL;
do {
MarkCycleCollectorChildren(trc, shape->base(), &prevParent);
MarkId(trc, shape->maybePropid(), "propid");
MarkId(trc, &shape->propidRef(), "propid");
shape = shape->previous();
} while (shape);
}
@ -824,23 +821,23 @@ MarkChildren(JSTracer *trc, types::TypeObject *type)
for (unsigned i = 0; i < count; i++) {
types::Property *prop = type->getProperty(i);
if (prop)
MarkId(trc, prop->id, "type_prop");
MarkId(trc, &prop->id, "type_prop");
}
}
if (type->proto)
MarkObject(trc, type->proto, "type_proto");
MarkObject(trc, &type->proto, "type_proto");
if (type->singleton && !type->lazy())
MarkObject(trc, type->singleton, "type_singleton");
MarkObject(trc, &type->singleton, "type_singleton");
if (type->newScript) {
MarkObject(trc, type->newScript->fun, "type_new_function");
MarkShape(trc, type->newScript->shape, "type_new_shape");
MarkObject(trc, &type->newScript->fun, "type_new_function");
MarkShape(trc, &type->newScript->shape, "type_new_shape");
}
if (type->interpretedFunction)
MarkObject(trc, type->interpretedFunction, "type_function");
MarkObject(trc, &type->interpretedFunction, "type_function");
}
#ifdef JS_HAS_XML_SUPPORT
@ -851,12 +848,163 @@ MarkChildren(JSTracer *trc, JSXML *xml)
}
#endif
template<typename T>
void
PushArenaTyped(GCMarker *gcmarker, ArenaHeader *aheader)
{
for (CellIterUnderGC i(aheader); !i.done(); i.next())
PushMarkStack(gcmarker, i.get<T>());
}
void
PushArena(GCMarker *gcmarker, ArenaHeader *aheader)
{
switch (MapAllocToTraceKind(aheader->getAllocKind())) {
case JSTRACE_OBJECT:
PushArenaTyped<JSObject>(gcmarker, aheader);
break;
case JSTRACE_STRING:
PushArenaTyped<JSString>(gcmarker, aheader);
break;
case JSTRACE_SCRIPT:
PushArenaTyped<JSScript>(gcmarker, aheader);
break;
case JSTRACE_SHAPE:
PushArenaTyped<js::Shape>(gcmarker, aheader);
break;
case JSTRACE_BASE_SHAPE:
PushArenaTyped<js::BaseShape>(gcmarker, aheader);
break;
case JSTRACE_TYPE_OBJECT:
PushArenaTyped<js::types::TypeObject>(gcmarker, aheader);
break;
#if JS_HAS_XML_SUPPORT
case JSTRACE_XML:
PushArenaTyped<JSXML>(gcmarker, aheader);
break;
#endif
}
}
} /* namespace gc */
using namespace js::gc;
struct ValueArrayLayout
{
union {
HeapValue *end;
js::Class *clasp;
};
union {
HeapValue *start;
uintptr_t index;
};
JSObject *obj;
static void staticAsserts() {
/* This should have the same layout as three mark stack items. */
JS_STATIC_ASSERT(sizeof(ValueArrayLayout) == 3 * sizeof(uintptr_t));
}
};
/*
* During incremental GC, we return from drainMarkStack without having processed
* the entire stack. At that point, JS code can run and reallocate slot arrays
* that are stored on the stack. To prevent this from happening, we replace all
* ValueArrayTag stack items with SavedValueArrayTag. In the latter, slots
* pointers are replaced with slot indexes.
*
* We also replace the slot array end pointer (which can be derived from the obj
* pointer) with the object's class. During JS executation, array slowification
* can cause the layout of slots to change. We can observe that slowification
* happened if the class changed; in that case, we completely rescan the array.
*/
void
GCMarker::saveValueRanges()
{
for (uintptr_t *p = stack.tos; p > stack.stack; ) {
uintptr_t tag = *--p & StackTagMask;
if (tag == ValueArrayTag) {
p -= 2;
ValueArrayLayout *arr = reinterpret_cast<ValueArrayLayout *>(p);
JSObject *obj = arr->obj;
if (obj->getClass() == &ArrayClass) {
HeapValue *vp = obj->getDenseArrayElements();
JS_ASSERT(arr->start >= vp &&
arr->end == vp + obj->getDenseArrayInitializedLength());
arr->index = arr->start - vp;
} else {
HeapValue *vp = obj->fixedSlots();
unsigned nfixed = obj->numFixedSlots();
if (arr->start >= vp && arr->start < vp + nfixed) {
JS_ASSERT(arr->end == vp + Min(nfixed, obj->slotSpan()));
arr->index = arr->start - vp;
} else {
JS_ASSERT(arr->start >= obj->slots &&
arr->end == obj->slots + obj->slotSpan() - nfixed);
arr->index = (arr->start - obj->slots) + nfixed;
}
}
arr->clasp = obj->getClass();
p[2] |= SavedValueArrayTag;
} else if (tag == SavedValueArrayTag) {
p -= 2;
}
}
}
bool
GCMarker::restoreValueArray(JSObject *obj, void **vpp, void **endp)
{
uintptr_t start = stack.pop();
js::Class *clasp = reinterpret_cast<js::Class *>(stack.pop());
JS_ASSERT(obj->getClass() == clasp ||
(clasp == &ArrayClass && obj->getClass() == &SlowArrayClass));
if (clasp == &ArrayClass) {
if (obj->getClass() != &ArrayClass)
return false;
uint32_t initlen = obj->getDenseArrayInitializedLength();
HeapValue *vp = obj->getDenseArrayElements();
if (start < initlen) {
*vpp = vp + start;
*endp = vp + initlen;
} else {
/* The object shrunk, in which case no scanning is needed. */
*vpp = *endp = vp;
}
} else {
HeapValue *vp = obj->fixedSlots();
unsigned nfixed = obj->numFixedSlots();
unsigned nslots = obj->slotSpan();
if (start < nfixed) {
*vpp = vp + start;
*endp = vp + Min(nfixed, nslots);
} else if (start < nslots) {
*vpp = obj->slots + start - nfixed;
*endp = obj->slots + nslots - nfixed;
} else {
/* The object shrunk, in which case no scanning is needed. */
*vpp = *endp = obj->slots;
}
}
JS_ASSERT(*vpp <= *endp);
return true;
}
inline void
GCMarker::processMarkStackTop()
GCMarker::processMarkStackTop(SliceBudget &budget)
{
/*
* The function uses explicit goto and implements the scanning of the
@ -885,29 +1033,46 @@ GCMarker::processMarkStackTop()
if (tag == ObjectTag) {
obj = reinterpret_cast<JSObject *>(addr);
JS_COMPARTMENT_ASSERT(runtime, obj);
goto scan_obj;
}
if (tag == TypeTag) {
ScanTypeObject(this, reinterpret_cast<types::TypeObject *>(addr));
} else if (tag == SavedValueArrayTag) {
JS_ASSERT(!(addr & Cell::CellMask));
obj = reinterpret_cast<JSObject *>(addr);
if (restoreValueArray(obj, (void **)&vp, (void **)&end))
goto scan_value_array;
else
goto scan_obj;
} else {
JS_ASSERT(tag == XmlTag);
MarkChildren(this, reinterpret_cast<JSXML *>(addr));
}
budget.step();
return;
scan_value_array:
JS_ASSERT(vp <= end);
while (vp != end) {
budget.step();
if (budget.isOverBudget()) {
pushValueArray(obj, vp, end);
return;
}
const Value &v = *vp++;
if (v.isString()) {
JSString *str = v.toString();
JS_COMPARTMENT_ASSERT_STR(runtime, str);
if (str->markIfUnmarked())
ScanString(this, str);
} else if (v.isObject()) {
JSObject *obj2 = &v.toObject();
JS_COMPARTMENT_ASSERT(runtime, obj2);
if (obj2->markIfUnmarked(getMarkColor())) {
PushValueArray(this, obj, vp, end);
pushValueArray(obj, vp, end);
obj = obj2;
goto scan_obj;
}
@ -917,6 +1082,14 @@ GCMarker::processMarkStackTop()
scan_obj:
{
JS_COMPARTMENT_ASSERT(runtime, obj);
budget.step();
if (budget.isOverBudget()) {
pushObject(obj);
return;
}
types::TypeObject *type = obj->typeFromGC();
PushMarkStack(this, type);
@ -931,6 +1104,9 @@ GCMarker::processMarkStackTop()
vp = obj->getDenseArrayElements();
end = vp + obj->getDenseArrayInitializedLength();
goto scan_value_array;
} else {
JS_ASSERT_IF(runtime->gcIncrementalState != NO_INCREMENTAL,
clasp->flags & JSCLASS_IMPLEMENTS_BARRIERS);
}
clasp->trace(this, obj);
}
@ -943,7 +1119,7 @@ GCMarker::processMarkStackTop()
if (obj->slots) {
unsigned nfixed = obj->numFixedSlots();
if (nslots > nfixed) {
PushValueArray(this, obj, vp, vp + nfixed);
pushValueArray(obj, vp, vp + nfixed);
vp = obj->slots;
end = vp + (nslots - nfixed);
goto scan_value_array;
@ -955,15 +1131,33 @@ GCMarker::processMarkStackTop()
}
}
void
GCMarker::drainMarkStack()
bool
GCMarker::drainMarkStack(SliceBudget &budget)
{
#ifdef DEBUG
JSRuntime *rt = runtime;
rt->gcCheckCompartment = rt->gcCurrentCompartment;
struct AutoCheckCompartment {
JSRuntime *runtime;
AutoCheckCompartment(JSRuntime *rt) : runtime(rt) {
runtime->gcCheckCompartment = runtime->gcCurrentCompartment;
}
~AutoCheckCompartment() { runtime->gcCheckCompartment = NULL; }
} acc(rt);
#endif
if (budget.isOverBudget())
return false;
for (;;) {
while (!stack.isEmpty())
processMarkStackTop();
while (!stack.isEmpty()) {
processMarkStackTop(budget);
if (budget.isOverBudget()) {
saveValueRanges();
return false;
}
}
if (!hasDelayedChildren())
break;
@ -972,10 +1166,13 @@ GCMarker::drainMarkStack()
* above tracing. Don't do this until we're done with everything
* else.
*/
markDelayedChildren();
if (!markDelayedChildren(budget)) {
saveValueRanges();
return false;
}
}
rt->gcCheckCompartment = NULL;
return true;
}
void

Просмотреть файл

@ -45,8 +45,8 @@ namespace gc {
* defined for marking arrays of object pointers.
*/
#define DeclMarker(base, type) \
void Mark##base(JSTracer *trc, const HeapPtr<type> &thing, const char *name); \
void Mark##base##Root(JSTracer *trc, type *thing, const char *name); \
void Mark##base(JSTracer *trc, HeapPtr<type> *thing, const char *name); \
void Mark##base##Root(JSTracer *trc, type **thingp, const char *name); \
void Mark##base##Unbarriered(JSTracer *trc, type *thing, const char *name); \
void Mark##base##Range(JSTracer *trc, size_t len, HeapPtr<type> *thing, const char *name); \
void Mark##base##RootRange(JSTracer *trc, size_t len, type **thing, const char *name);
@ -83,10 +83,10 @@ MarkGCThingRoot(JSTracer *trc, void *thing, const char *name);
/*** ID Marking ***/
void
MarkId(JSTracer *trc, const HeapId &id, const char *name);
MarkId(JSTracer *trc, HeapId *id, const char *name);
void
MarkIdRoot(JSTracer *trc, const jsid &id, const char *name);
MarkIdRoot(JSTracer *trc, jsid *id, const char *name);
void
MarkIdRange(JSTracer *trc, size_t len, js::HeapId *vec, const char *name);
@ -116,10 +116,6 @@ MarkValueRootRange(JSTracer *trc, Value *begin, Value *end, const char *name)
/*** Special Cases ***/
/* TypeNewObject contains a HeapPtr<const Shape> that needs a unique cast. */
void
MarkShape(JSTracer *trc, const HeapPtr<const Shape> &thing, const char *name);
/* Direct value access used by the write barriers and the methodjit */
void
MarkValueUnbarriered(JSTracer *trc, Value *v, const char *name);
@ -144,9 +140,13 @@ MarkChildren(JSTracer *trc, JSObject *obj);
* JS_TraceShapeCycleCollectorChildren.
*/
void
MarkCycleCollectorChildren(JSTracer *trc, const Shape *shape);
MarkCycleCollectorChildren(JSTracer *trc, Shape *shape);
void
PushArena(GCMarker *gcmarker, ArenaHeader *aheader);
/*** Generic ***/
/*
* The Mark() functions interface should only be used by code that must be
* templated. Other uses should use the more specific, type-named functions.
@ -159,13 +159,13 @@ Mark(JSTracer *trc, HeapValue *v, const char *name)
}
inline void
Mark(JSTracer *trc, const HeapPtr<JSObject> &o, const char *name)
Mark(JSTracer *trc, HeapPtr<JSObject> *o, const char *name)
{
MarkObject(trc, o, name);
}
inline void
Mark(JSTracer *trc, const HeapPtr<JSXML> &xml, const char *name)
Mark(JSTracer *trc, HeapPtr<JSXML> *xml, const char *name)
{
MarkXML(trc, xml, name);
}

Просмотреть файл

@ -2195,7 +2195,7 @@ TypeCompartment::nukeTypes(JSContext *cx)
#ifdef JS_THREADSAFE
AutoLockGC maybeLock;
if (!cx->runtime->gcMarkAndSweep)
if (!cx->runtime->gcRunning)
maybeLock.lock(cx->runtime);
#endif
@ -2618,7 +2618,7 @@ struct types::ObjectTableKey
typedef JSObject * Lookup;
static inline uint32_t hash(JSObject *obj) {
return (uint32_t) (JSID_BITS(obj->lastProperty()->propid()) ^
return (uint32_t) (JSID_BITS(obj->lastProperty()->propid().get()) ^
obj->slotSpan() ^ obj->numFixedSlots() ^
((uint32_t)(size_t)obj->getProto() >> 2));
}

Просмотреть файл

@ -668,7 +668,7 @@ struct TypeNewScript
* Shape to use for newly constructed objects. Reflects all definite
* properties the object will have.
*/
HeapPtr<const Shape> shape;
HeapPtrShape shape;
/*
* Order in which properties become initialized. We need this in case a

Просмотреть файл

@ -741,7 +741,7 @@ void
TypeScript::trace(JSTracer *trc)
{
if (hasScope() && global)
gc::MarkObject(trc, global, "script_global");
gc::MarkObject(trc, &global, "script_global");
/* Note: nesting does not keep anything alive. */
}
@ -1343,7 +1343,7 @@ TypeNewScript::writeBarrierPre(TypeNewScript *newScript)
JSCompartment *comp = newScript->fun->compartment();
if (comp->needsBarrier()) {
MarkObjectUnbarriered(comp->barrierTracer(), newScript->fun, "write barrier");
MarkShape(comp->barrierTracer(), newScript->shape, "write barrier");
MarkShape(comp->barrierTracer(), &newScript->shape, "write barrier");
}
#endif
}

Просмотреть файл

@ -1147,7 +1147,7 @@ js::AssertValidPropertyCacheHit(JSContext *cx,
jsbytecode *pc;
cx->stack.currentScript(&pc);
uint32_t sample = cx->runtime->gcNumber;
uint64_t sample = cx->runtime->gcNumber;
PropertyCacheEntry savedEntry = *entry;
PropertyName *name = GetNameFromBytecode(cx, pc, JSOp(*pc), js_CodeSpec[*pc]);
@ -1254,7 +1254,7 @@ js::Interpret(JSContext *cx, StackFrame *entryFrame, InterpMode interpMode)
{
JSAutoResolveFlags rf(cx, RESOLVE_INFER);
gc::VerifyBarriers(cx, true);
gc::MaybeVerifyBarriers(cx, true);
JS_ASSERT(!cx->compartment->activeAnalysis);
@ -1289,7 +1289,7 @@ js::Interpret(JSContext *cx, StackFrame *entryFrame, InterpMode interpMode)
# define DO_OP() JS_BEGIN_MACRO \
CHECK_PCCOUNT_INTERRUPTS(); \
js::gc::VerifyBarriers(cx); \
js::gc::MaybeVerifyBarriers(cx); \
JS_EXTENSION_(goto *jumpTable[op]); \
JS_END_MACRO
# define DO_NEXT_OP(n) JS_BEGIN_MACRO \
@ -1566,7 +1566,7 @@ js::Interpret(JSContext *cx, StackFrame *entryFrame, InterpMode interpMode)
do_op:
CHECK_PCCOUNT_INTERRUPTS();
js::gc::VerifyBarriers(cx);
js::gc::MaybeVerifyBarriers(cx);
switchOp = intN(op) | switchMask;
do_switch:
switch (switchOp) {
@ -4424,6 +4424,6 @@ END_CASE(JSOP_ARRAYPUSH)
leave_on_safe_point:
#endif
gc::VerifyBarriers(cx, true);
gc::MaybeVerifyBarriers(cx, true);
return interpReturnOK;
}

Просмотреть файл

@ -89,7 +89,7 @@ static JSObject *iterator_iterator(JSContext *cx, JSObject *obj, JSBool keysonly
Class js::IteratorClass = {
"Iterator",
JSCLASS_HAS_PRIVATE |
JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS |
JSCLASS_HAS_CACHED_PROTO(JSProto_Iterator),
JS_PropertyStub, /* addProperty */
JS_PropertyStub, /* delProperty */
@ -148,9 +148,9 @@ void
NativeIterator::mark(JSTracer *trc)
{
for (HeapPtr<JSFlatString> *str = begin(); str < end(); str++)
MarkString(trc, *str, "prop");
MarkString(trc, str, "prop");
if (obj)
MarkObject(trc, obj, "obj");
MarkObject(trc, &obj, "obj");
}
static void
@ -1419,7 +1419,7 @@ generator_trace(JSTracer *trc, JSObject *obj)
Class js::GeneratorClass = {
"Generator",
JSCLASS_HAS_PRIVATE,
JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS,
JS_PropertyStub, /* addProperty */
JS_PropertyStub, /* delProperty */
JS_PropertyStub, /* getProperty */

Просмотреть файл

@ -414,8 +414,11 @@ js_TraceSharpMap(JSTracer *trc, JSSharpObjectMap *map)
* with otherwise unreachable objects. But this is way too complex
* to justify spending efforts.
*/
for (JSSharpTable::Range r = map->table.all(); !r.empty(); r.popFront())
MarkObjectRoot(trc, r.front().key, "sharp table entry");
for (JSSharpTable::Range r = map->table.all(); !r.empty(); r.popFront()) {
JSObject *tmp = r.front().key;
MarkObjectRoot(trc, &tmp, "sharp table entry");
JS_ASSERT(tmp == r.front().key);
}
}
#if JS_HAS_TOSOURCE
@ -2760,6 +2763,13 @@ NewObject(JSContext *cx, Class *clasp, types::TypeObject *type, JSObject *parent
if (!obj)
return NULL;
/*
* This will cancel an already-running incremental GC from doing any more
* slices, and it will prevent any future incremental GCs.
*/
if (clasp->trace && !(clasp->flags & JSCLASS_IMPLEMENTS_BARRIERS))
cx->runtime->gcIncrementalEnabled = false;
Probes::createObject(cx, obj);
return obj;
}
@ -3472,7 +3482,7 @@ JSObject::TradeGuts(JSContext *cx, JSObject *a, JSObject *b, TradeGutsReserved &
a->slots = reserved.newaslots;
a->initSlotRange(0, reserved.bvals.begin(), bcap);
if (a->hasPrivate())
a->setPrivate(bpriv);
a->initPrivate(bpriv);
if (b->isNative())
b->shape_->setNumFixedSlots(reserved.newbfixed);
@ -3482,7 +3492,7 @@ JSObject::TradeGuts(JSContext *cx, JSObject *a, JSObject *b, TradeGutsReserved &
b->slots = reserved.newbslots;
b->initSlotRange(0, reserved.avals.begin(), acap);
if (b->hasPrivate())
b->setPrivate(apriv);
b->initPrivate(apriv);
/* Make sure the destructor for reserved doesn't free the slots. */
reserved.newaslots = NULL;

Просмотреть файл

@ -868,7 +868,7 @@ struct JSObject : js::gc::Cell
return type_;
}
const js::HeapPtr<js::types::TypeObject> &typeFromGC() const {
js::HeapPtr<js::types::TypeObject> &typeFromGC() {
/* Direct field access for use by GC. */
return type_;
}
@ -954,6 +954,7 @@ struct JSObject : js::gc::Cell
inline bool hasPrivate() const;
inline void *getPrivate() const;
inline void setPrivate(void *data);
inline void initPrivate(void *data);
/* Access private data for an object with a known number of fixed slots. */
inline void *getPrivate(size_t nfixed) const;
@ -1355,6 +1356,7 @@ struct JSObject : js::gc::Cell
static inline void writeBarrierPre(JSObject *obj);
static inline void writeBarrierPost(JSObject *obj, void *addr);
static inline void readBarrier(JSObject *obj);
inline void privateWriteBarrierPre(void **oldval);
inline void privateWriteBarrierPost(void **oldval);

Просмотреть файл

@ -119,6 +119,12 @@ JSObject::setPrivate(void *data)
privateWriteBarrierPost(pprivate);
}
inline void
JSObject::initPrivate(void *data)
{
privateRef(numFixedSlots()) = data;
}
inline bool
JSObject::enumerate(JSContext *cx, JSIterateOp iterop, js::Value *statep, jsid *idp)
{
@ -602,21 +608,33 @@ JSObject::moveDenseArrayElements(uintN dstStart, uintN srcStart, uintN count)
JS_ASSERT(srcStart + count <= getDenseArrayInitializedLength());
/*
* Use a custom write barrier here since it's performance sensitive. We
* only want to barrier the elements that are being overwritten.
* Using memmove here would skip write barriers. Also, we need to consider
* an array containing [A, B, C], in the following situation:
*
* 1. Incremental GC marks slot 0 of array (i.e., A), then returns to JS code.
* 2. JS code moves slots 1..2 into slots 0..1, so it contains [B, C, C].
* 3. Incremental GC finishes by marking slots 1 and 2 (i.e., C).
*
* Since normal marking never happens on B, it is very important that the
* write barrier is invoked here on B, despite the fact that it exists in
* the array before and after the move.
*/
uintN markStart, markEnd;
if (dstStart > srcStart) {
markStart = js::Max(srcStart + count, dstStart);
markEnd = dstStart + count;
if (compartment()->needsBarrier()) {
if (dstStart < srcStart) {
js::HeapValue *dst = elements + dstStart;
js::HeapValue *src = elements + srcStart;
for (unsigned i = 0; i < count; i++, dst++, src++)
*dst = *src;
} else {
markStart = dstStart;
markEnd = js::Min(dstStart + count, srcStart);
js::HeapValue *dst = elements + dstStart + count - 1;
js::HeapValue *src = elements + srcStart + count - 1;
for (unsigned i = 0; i < count; i++, dst--, src--)
*dst = *src;
}
prepareElementRangeForOverwrite(markStart, markEnd);
} else {
memmove(elements + dstStart, elements + srcStart, count * sizeof(js::Value));
}
}
inline void
JSObject::moveDenseArrayElementsUnbarriered(uintN dstStart, uintN srcStart, uintN count)
@ -2126,6 +2144,18 @@ JSObject::writeBarrierPre(JSObject *obj)
#endif
}
inline void
JSObject::readBarrier(JSObject *obj)
{
#ifdef JSGC_INCREMENTAL
JSCompartment *comp = obj->compartment();
if (comp->needsBarrier()) {
JS_ASSERT(!comp->rt->gcRunning);
MarkObjectUnbarriered(comp->barrierTracer(), obj, "read barrier");
}
#endif
}
inline void
JSObject::writeBarrierPost(JSObject *obj, void *addr)
{

Просмотреть файл

@ -282,7 +282,7 @@ PropertyCache::purge(JSContext *cx)
#ifdef JS_THREADSAFE
fprintf(fp, "thread %lu, ", (unsigned long) cx->thread->id);
#endif
fprintf(fp, "GC %u\n", cx->runtime->gcNumber);
fprintf(fp, "GC %lu\n", (unsigned long)cx->runtime->gcNumber);
# define P(mem) fprintf(fp, "%11s %10lu\n", #mem, (unsigned long)mem)
P(fills);

Просмотреть файл

@ -1311,7 +1311,7 @@ proxy_TypeOf(JSContext *cx, JSObject *proxy)
JS_FRIEND_DATA(Class) js::ObjectProxyClass = {
"Proxy",
Class::NON_NATIVE | JSCLASS_HAS_RESERVED_SLOTS(4),
Class::NON_NATIVE | JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_HAS_RESERVED_SLOTS(4),
JS_PropertyStub, /* addProperty */
JS_PropertyStub, /* delProperty */
JS_PropertyStub, /* getProperty */
@ -1367,7 +1367,7 @@ JS_FRIEND_DATA(Class) js::ObjectProxyClass = {
JS_FRIEND_DATA(Class) js::OuterWindowProxyClass = {
"Proxy",
Class::NON_NATIVE | JSCLASS_HAS_RESERVED_SLOTS(4),
Class::NON_NATIVE | JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_HAS_RESERVED_SLOTS(4),
JS_PropertyStub, /* addProperty */
JS_PropertyStub, /* delProperty */
JS_PropertyStub, /* getProperty */
@ -1445,7 +1445,7 @@ proxy_Construct(JSContext *cx, uintN argc, Value *vp)
JS_FRIEND_DATA(Class) js::FunctionProxyClass = {
"Proxy",
Class::NON_NATIVE | JSCLASS_HAS_RESERVED_SLOTS(6),
Class::NON_NATIVE | JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_HAS_RESERVED_SLOTS(6),
JS_PropertyStub, /* addProperty */
JS_PropertyStub, /* delProperty */
JS_PropertyStub, /* getProperty */

Просмотреть файл

@ -1062,7 +1062,7 @@ JSObject::replaceWithNewEquivalentShape(JSContext *cx, Shape *oldShape, Shape *n
{
JS_ASSERT_IF(oldShape != lastProperty(),
inDictionaryMode() &&
nativeLookup(cx, oldShape->maybePropid()) == oldShape);
nativeLookup(cx, oldShape->propidRef()) == oldShape);
JSObject *self = this;
@ -1086,7 +1086,7 @@ JSObject::replaceWithNewEquivalentShape(JSContext *cx, Shape *oldShape, Shape *n
PropertyTable &table = self->lastProperty()->table();
Shape **spp = oldShape->isEmptyShape()
? NULL
: table.search(oldShape->maybePropid(), false);
: table.search(oldShape->propidRef(), false);
/*
* Splice the new shape into the same position as the old shape, preserving

Просмотреть файл

@ -562,6 +562,10 @@ struct Shape : public js::gc::Cell
return parent;
}
HeapPtrShape &previousRef() {
return parent;
}
class Range {
protected:
friend struct Shape;
@ -770,8 +774,12 @@ struct Shape : public js::gc::Cell
slotInfo = slotInfo | ((count + 1) << LINEAR_SEARCHES_SHIFT);
}
jsid propid() const { JS_ASSERT(!isEmptyShape()); return maybePropid(); }
const HeapId &maybePropid() const { JS_ASSERT(!JSID_IS_VOID(propid_)); return propid_; }
const HeapId &propid() const {
JS_ASSERT(!isEmptyShape());
JS_ASSERT(!JSID_IS_VOID(propid_));
return propid_;
}
HeapId &propidRef() { JS_ASSERT(!JSID_IS_VOID(propid_)); return propid_; }
int16_t shortid() const { JS_ASSERT(hasShortID()); return maybeShortid(); }
int16_t maybeShortid() const { return shortid_; }
@ -995,7 +1003,7 @@ struct StackShape
StackShape(const Shape *shape)
: base(shape->base()->unowned()),
propid(shape->maybePropid()),
propid(const_cast<Shape *>(shape)->propidRef()),
slot_(shape->slotInfo & Shape::SLOT_MASK),
attrs(shape->attrs),
flags(shape->flags),
@ -1091,7 +1099,7 @@ Shape::search(JSContext *cx, Shape *start, jsid id, Shape ***pspp, bool adding)
}
for (Shape *shape = start; shape; shape = shape->parent) {
if (shape->maybePropid() == id)
if (shape->propidRef() == id)
return shape;
}

Просмотреть файл

@ -319,7 +319,7 @@ void
Bindings::trace(JSTracer *trc)
{
if (lastBinding)
MarkShape(trc, lastBinding, "shape");
MarkShape(trc, &lastBinding, "shape");
}
#ifdef JS_CRASH_DIAGNOSTICS

Просмотреть файл

@ -2180,6 +2180,7 @@ Class ArrayBuffer::slowClass = {
Class js::ArrayBufferClass = {
"ArrayBuffer",
JSCLASS_HAS_PRIVATE |
JSCLASS_IMPLEMENTS_BARRIERS |
Class::NON_NATIVE |
JSCLASS_HAS_RESERVED_SLOTS(ARRAYBUFFER_RESERVED_SLOTS) |
JSCLASS_HAS_CACHED_PROTO(JSProto_ArrayBuffer),
@ -2298,7 +2299,7 @@ JSFunctionSpec _typedArray::jsfuncs[] = { \
{ \
#_typedArray, \
JSCLASS_HAS_RESERVED_SLOTS(TypedArray::FIELD_MAX) | \
JSCLASS_HAS_PRIVATE | \
JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS | \
JSCLASS_FOR_OF_ITERATION | \
Class::NON_NATIVE, \
JS_PropertyStub, /* addProperty */ \

Просмотреть файл

@ -202,16 +202,20 @@ WatchpointMap::markIteratively(JSTracer *trc)
bool objectIsLive = !IsAboutToBeFinalized(e.key.object);
if (objectIsLive || e.value.held) {
if (!objectIsLive) {
MarkObject(trc, e.key.object, "held Watchpoint object");
HeapPtrObject tmp(e.key.object);
MarkObject(trc, &tmp, "held Watchpoint object");
JS_ASSERT(tmp == e.key.object);
marked = true;
}
const HeapId &id = e.key.id;
JS_ASSERT(JSID_IS_STRING(id) || JSID_IS_INT(id));
MarkId(trc, id, "WatchKey::id");
HeapId tmp(id.get());
MarkId(trc, &tmp, "WatchKey::id");
JS_ASSERT(tmp.get() == id.get());
if (e.value.closure && IsAboutToBeFinalized(e.value.closure)) {
MarkObject(trc, e.value.closure, "Watchpoint::closure");
MarkObject(trc, &e.value.closure, "Watchpoint::closure");
marked = true;
}
}
@ -224,13 +228,17 @@ WatchpointMap::markAll(JSTracer *trc)
{
for (Map::Range r = map.all(); !r.empty(); r.popFront()) {
Map::Entry &e = r.front();
MarkObject(trc, e.key.object, "held Watchpoint object");
HeapPtrObject tmpObj(e.key.object);
MarkObject(trc, &tmpObj, "held Watchpoint object");
JS_ASSERT(tmpObj == e.key.object);
const HeapId &id = e.key.id;
JS_ASSERT(JSID_IS_STRING(id) || JSID_IS_INT(id));
MarkId(trc, id, "WatchKey::id");
HeapId tmpId(id.get());
MarkId(trc, &tmpId, "WatchKey::id");
JS_ASSERT(tmpId.get() == id.get());
MarkObject(trc, e.value.closure, "Watchpoint::closure");
MarkObject(trc, &e.value.closure, "Watchpoint::closure");
}
}

Просмотреть файл

@ -62,7 +62,7 @@ bool
WeakMapBase::markAllIteratively(JSTracer *tracer)
{
bool markedAny = false;
JSRuntime *rt = tracer->context->runtime;
JSRuntime *rt = tracer->runtime;
for (WeakMapBase *m = rt->gcWeakMapList; m; m = m->next) {
if (m->markIteratively(tracer))
markedAny = true;
@ -73,7 +73,7 @@ WeakMapBase::markAllIteratively(JSTracer *tracer)
void
WeakMapBase::sweepAll(JSTracer *tracer)
{
JSRuntime *rt = tracer->context->runtime;
JSRuntime *rt = tracer->runtime;
for (WeakMapBase *m = rt->gcWeakMapList; m; m = m->next)
m->sweep(tracer);
}
@ -314,8 +314,16 @@ WeakMap_mark(JSTracer *trc, JSObject *obj)
static void
WeakMap_finalize(JSContext *cx, JSObject *obj)
{
ObjectValueMap *map = GetObjectMap(obj);
if (ObjectValueMap *map = GetObjectMap(obj)) {
map->check();
#ifdef DEBUG
map->~ObjectValueMap();
memset(map, 0xdc, sizeof(ObjectValueMap));
cx->free_(map);
#else
cx->delete_(map);
#endif
}
}
static JSBool
@ -331,7 +339,7 @@ WeakMap_construct(JSContext *cx, uintN argc, Value *vp)
Class js::WeakMapClass = {
"WeakMap",
JSCLASS_HAS_PRIVATE |
JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS |
JSCLASS_HAS_CACHED_PROTO(JSProto_WeakMap),
JS_PropertyStub, /* addProperty */
JS_PropertyStub, /* delProperty */

Просмотреть файл

@ -127,7 +127,7 @@ class WeakMapBase {
// Add ourselves to the list if we are not already in the list. We can already
// be in the list if the weak map is marked more than once due delayed marking.
if (next == WeakMapNotInList) {
JSRuntime *rt = tracer->context->runtime;
JSRuntime *rt = tracer->runtime;
next = rt->gcWeakMapList;
rt->gcWeakMapList = this;
}
@ -156,6 +156,8 @@ class WeakMapBase {
// Trace all delayed weak map bindings. Used by the cycle collector.
static void traceAllMappings(WeakMapTracer *tracer);
void check() { JS_ASSERT(next == WeakMapNotInList); }
// Remove everything from the live weak map list.
static void resetWeakMapList(JSRuntime *rt);
@ -204,7 +206,7 @@ class WeakMap : public HashMap<Key, Value, HashPolicy, RuntimeAllocPolicy>, publ
void nonMarkingTrace(JSTracer *trc) {
ValueMarkPolicy vp(trc);
for (Range r = Base::all(); !r.empty(); r.popFront())
vp.mark(r.front().value);
vp.mark(&r.front().value);
}
bool markIteratively(JSTracer *trc) {
@ -216,7 +218,7 @@ class WeakMap : public HashMap<Key, Value, HashPolicy, RuntimeAllocPolicy>, publ
Value &v = r.front().value;
/* If the entry is live, ensure its key and value are marked. */
if (kp.isMarked(k)) {
markedAny |= vp.mark(v);
markedAny |= vp.mark(&v);
}
JS_ASSERT_IF(kp.isMarked(k), vp.isMarked(v));
}
@ -264,10 +266,10 @@ class DefaultMarkPolicy<HeapValue> {
return !IsAboutToBeFinalized(x);
return true;
}
bool mark(HeapValue &x) {
if (isMarked(x))
bool mark(HeapValue *x) {
if (isMarked(*x))
return false;
js::gc::MarkValue(tracer, &x, "WeakMap entry");
js::gc::MarkValue(tracer, x, "WeakMap entry");
return true;
}
};
@ -281,8 +283,8 @@ class DefaultMarkPolicy<HeapPtrObject> {
bool isMarked(const HeapPtrObject &x) {
return !IsAboutToBeFinalized(x);
}
bool mark(HeapPtrObject &x) {
if (isMarked(x))
bool mark(HeapPtrObject *x) {
if (isMarked(*x))
return false;
js::gc::MarkObject(tracer, x, "WeakMap entry");
return true;
@ -298,8 +300,8 @@ class DefaultMarkPolicy<HeapPtrScript> {
bool isMarked(const HeapPtrScript &x) {
return !IsAboutToBeFinalized(x);
}
bool mark(HeapPtrScript &x) {
if (isMarked(x))
bool mark(HeapPtrScript *x) {
if (isMarked(*x))
return false;
js::gc::MarkScript(tracer, x, "WeakMap entry");
return true;

Просмотреть файл

@ -866,7 +866,7 @@ js_XMLArrayCursorTrace(JSTracer *trc, JSXMLArrayCursor<JSXML> *cursor)
{
for (; cursor; cursor = cursor->next) {
if (cursor->root)
MarkXML(trc, (const HeapPtr<JSXML> &)cursor->root, "cursor_root");
MarkXML(trc, &(HeapPtr<JSXML> &)cursor->root, "cursor_root");
}
}
@ -875,7 +875,7 @@ js_XMLArrayCursorTrace(JSTracer *trc, JSXMLArrayCursor<JSObject> *cursor)
{
for (; cursor; cursor = cursor->next) {
if (cursor->root)
MarkObject(trc, (const HeapPtr<JSObject> &)cursor->root, "cursor_root");
MarkObject(trc, &(HeapPtr<JSObject> &)cursor->root, "cursor_root");
}
}
@ -5369,7 +5369,7 @@ out:
JS_FRIEND_DATA(Class) js::XMLClass = {
js_XML_str,
JSCLASS_HAS_PRIVATE |
JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS |
JSCLASS_HAS_CACHED_PROTO(JSProto_XML),
JS_PropertyStub, /* addProperty */
JS_PropertyStub, /* delProperty */
@ -7328,15 +7328,15 @@ void
js_TraceXML(JSTracer *trc, JSXML *xml)
{
if (xml->object)
MarkObject(trc, xml->object, "object");
MarkObject(trc, &xml->object, "object");
if (xml->name)
MarkObject(trc, xml->name, "name");
MarkObject(trc, &xml->name, "name");
if (xml->parent)
MarkXML(trc, xml->parent, "xml_parent");
MarkXML(trc, &xml->parent, "xml_parent");
if (JSXML_HAS_VALUE(xml)) {
if (xml->xml_value)
MarkString(trc, xml->xml_value, "value");
MarkString(trc, &xml->xml_value, "value");
return;
}
@ -7345,9 +7345,9 @@ js_TraceXML(JSTracer *trc, JSXML *xml)
if (xml->xml_class == JSXML_CLASS_LIST) {
if (xml->xml_target)
MarkXML(trc, xml->xml_target, "target");
MarkXML(trc, &xml->xml_target, "target");
if (xml->xml_targetprop)
MarkObject(trc, xml->xml_targetprop, "targetprop");
MarkObject(trc, &xml->xml_targetprop, "targetprop");
} else {
MarkObjectRange(trc, xml->xml_namespaces.length,
xml->xml_namespaces.vector,
@ -7898,11 +7898,11 @@ xmlfilter_trace(JSTracer *trc, JSObject *obj)
return;
JS_ASSERT(filter->list);
MarkXML(trc, filter->list, "list");
MarkXML(trc, &filter->list, "list");
if (filter->result)
MarkXML(trc, filter->result, "result");
MarkXML(trc, &filter->result, "result");
if (filter->kid)
MarkXML(trc, filter->kid, "kid");
MarkXML(trc, &filter->kid, "kid");
/*
* We do not need to trace the cursor as that would be done when
@ -7922,7 +7922,7 @@ xmlfilter_finalize(JSContext *cx, JSObject *obj)
Class js_XMLFilterClass = {
"XMLFilter",
JSCLASS_HAS_PRIVATE | JSCLASS_IS_ANONYMOUS,
JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_IS_ANONYMOUS,
JS_PropertyStub, /* addProperty */
JS_PropertyStub, /* delProperty */
JS_PropertyStub, /* getProperty */

Просмотреть файл

@ -3924,7 +3924,7 @@ void
mjit::Compiler::interruptCheckHelper()
{
Jump jump;
if (cx->runtime->gcZeal() >= js::gc::ZealVerifierThreshold) {
if (cx->runtime->gcZeal() == js::gc::ZealVerifierValue) {
/* For barrier verification, always take the interrupt so we can verify. */
jump = masm.jump();
} else {
@ -6892,7 +6892,9 @@ mjit::Compiler::jsop_regexp()
!cx->typeInferenceEnabled() ||
analysis->localsAliasStack() ||
types::TypeSet::HasObjectFlags(cx, globalObj->getType(cx),
types::OBJECT_FLAG_REGEXP_FLAGS_SET)) {
types::OBJECT_FLAG_REGEXP_FLAGS_SET) ||
cx->runtime->gcIncrementalState == gc::MARK)
{
prepareStubCall(Uses(0));
masm.move(ImmPtr(obj), Registers::ArgReg1);
INLINE_STUBCALL(stubs::RegExp, REJOIN_FALLTHROUGH);
@ -6946,10 +6948,11 @@ mjit::Compiler::jsop_regexp()
}
/*
* Force creation of the RegExpShared in the script's RegExpObject
* so that we grab it in the getNewObject template copy. Note that
* JIT code is discarded on every GC, which permits us to burn in
* the pointer to the RegExpShared.
* Force creation of the RegExpShared in the script's RegExpObject so that
* we grab it in the getNewObject template copy. Note that JIT code is
* discarded on every GC, which permits us to burn in the pointer to the
* RegExpShared. We don't do this during an incremental
* GC, since we don't discard JIT code after every marking slice.
*/
if (!reobj->getShared(cx))
return false;

Просмотреть файл

@ -484,7 +484,7 @@ private:
bool hasGlobalReallocation;
bool oomInVector; // True if we have OOM'd appending to a vector.
bool overflowICSpace; // True if we added a constant pool in a reserved space.
uint32_t gcNumber;
uint64_t gcNumber;
enum { NoApplyTricks, LazyArgsObj } applyTricks;
PCLengthEntry *pcLengths;

Просмотреть файл

@ -402,7 +402,7 @@ struct RecompilationMonitor
unsigned frameExpansions;
/* If a GC occurs it may discard jit code on the stack. */
unsigned gcNumber;
uint64_t gcNumber;
RecompilationMonitor(JSContext *cx)
: cx(cx),

Просмотреть файл

@ -102,7 +102,7 @@ class PICStubCompiler : public BaseCompiler
JSScript *script;
ic::PICInfo &pic;
void *stub;
uint32_t gcNumber;
uint64_t gcNumber;
public:
bool canCallHook;

Просмотреть файл

@ -878,7 +878,7 @@ stubs::DebuggerStatement(VMFrame &f, jsbytecode *pc)
void JS_FASTCALL
stubs::Interrupt(VMFrame &f, jsbytecode *pc)
{
gc::VerifyBarriers(f.cx);
gc::MaybeVerifyBarriers(f.cx);
if (!js_HandleExecutionInterrupt(f.cx))
THROW();

Просмотреть файл

@ -1286,6 +1286,7 @@ static const struct ParamPair {
{"maxMallocBytes", JSGC_MAX_MALLOC_BYTES},
{"gcBytes", JSGC_BYTES},
{"gcNumber", JSGC_NUMBER},
{"sliceTimeBudget", JSGC_SLICE_TIME_BUDGET}
};
static JSBool
@ -1427,6 +1428,35 @@ ScheduleGC(JSContext *cx, uintN argc, jsval *vp)
*vp = JSVAL_VOID;
return JS_TRUE;
}
static JSBool
VerifyBarriers(JSContext *cx, uintN argc, jsval *vp)
{
gc::VerifyBarriers(cx);
*vp = JSVAL_VOID;
return JS_TRUE;
}
static JSBool
GCSlice(JSContext *cx, uintN argc, jsval *vp)
{
uint32_t budget;
if (argc != 1) {
JS_ReportErrorNumber(cx, my_GetErrorMessage, NULL,
(argc < 1)
? JSSMSG_NOT_ENOUGH_ARGS
: JSSMSG_TOO_MANY_ARGS,
"gcslice");
return JS_FALSE;
}
if (!JS_ValueToECMAUint32(cx, vp[2], &budget))
return JS_FALSE;
GCDebugSlice(cx, budget);
*vp = JSVAL_VOID;
return JS_TRUE;
}
#endif /* JS_GC_ZEAL */
typedef struct JSCountHeapNode JSCountHeapNode;
@ -1473,7 +1503,7 @@ CountHeapNotify(JSTracer *trc, void **thingp, JSGCTraceKind kind)
if (node) {
countTracer->recycleList = node->next;
} else {
node = (JSCountHeapNode *) JS_malloc(trc->context, sizeof *node);
node = (JSCountHeapNode *) js_malloc(sizeof *node);
if (!node) {
countTracer->ok = JS_FALSE;
return;
@ -1575,7 +1605,7 @@ CountHeap(JSContext *cx, uintN argc, jsval *vp)
}
while ((node = countTracer.recycleList) != NULL) {
countTracer.recycleList = node->next;
JS_free(cx, node);
js_free(node);
}
JS_DHashTableFinish(&countTracer.visited);
@ -4001,6 +4031,8 @@ static JSFunctionSpec shell_functions[] = {
#ifdef JS_GC_ZEAL
JS_FN("gczeal", GCZeal, 2,0),
JS_FN("schedulegc", ScheduleGC, 1,0),
JS_FN("verifybarriers", VerifyBarriers, 0,0),
JS_FN("gcslice", GCSlice, 1,0),
#endif
JS_FN("internalConst", InternalConst, 1,0),
JS_FN("setDebug", SetDebug, 1,0),
@ -4114,6 +4146,8 @@ static const char *const shell_help_messages[] = {
" How zealous the garbage collector should be",
"schedulegc(num, [compartmentGC?])\n"
" Schedule a GC to happen after num allocations",
"verifybarriers() Start or end a run of the write barrier verifier",
"gcslice(n) Run an incremental GC slice that marks ~n objects",
#endif
"internalConst(name)\n"
" Query an internal constant for the engine. See InternalConst source for the\n"
@ -5457,7 +5491,7 @@ main(int argc, char **argv, char **envp)
if (!cx)
return 1;
JS_SetGCParameter(rt, JSGC_MODE, JSGC_MODE_COMPARTMENT);
JS_SetGCParameter(rt, JSGC_MODE, JSGC_MODE_INCREMENTAL);
JS_SetGCParameterForThread(cx, JSGC_MAX_CODE_CACHE_BYTES, 16 * 1024 * 1024);
/* Must be done before creating the global object */

Просмотреть файл

@ -1063,15 +1063,21 @@ Debugger::markKeysInCompartment(JSTracer *tracer)
const ObjectMap &objStorage = objects;
for (ObjectMap::Range r = objStorage.all(); !r.empty(); r.popFront()) {
const HeapPtrObject &key = r.front().key;
if (key->compartment() == comp && IsAboutToBeFinalized(key))
gc::MarkObject(tracer, key, "cross-compartment WeakMap key");
if (key->compartment() == comp && IsAboutToBeFinalized(key)) {
HeapPtrObject tmp(key);
gc::MarkObject(tracer, &tmp, "cross-compartment WeakMap key");
JS_ASSERT(tmp == key);
}
}
const ObjectMap &envStorage = environments;
for (ObjectMap::Range r = envStorage.all(); !r.empty(); r.popFront()) {
const HeapPtrObject &key = r.front().key;
if (key->compartment() == comp && IsAboutToBeFinalized(key))
js::gc::MarkObject(tracer, key, "cross-compartment WeakMap key");
if (key->compartment() == comp && IsAboutToBeFinalized(key)) {
HeapPtrObject tmp(key);
js::gc::MarkObject(tracer, &tmp, "cross-compartment WeakMap key");
JS_ASSERT(tmp == key);
}
}
typedef HashMap<HeapPtrScript, HeapPtrObject, DefaultHasher<HeapPtrScript>, RuntimeAllocPolicy>
@ -1079,8 +1085,11 @@ Debugger::markKeysInCompartment(JSTracer *tracer)
const ScriptMap &scriptStorage = scripts;
for (ScriptMap::Range r = scriptStorage.all(); !r.empty(); r.popFront()) {
const HeapPtrScript &key = r.front().key;
if (key->compartment() == comp && IsAboutToBeFinalized(key))
gc::MarkScript(tracer, key, "cross-compartment WeakMap key");
if (key->compartment() == comp && IsAboutToBeFinalized(key)) {
HeapPtrScript tmp(key);
gc::MarkScript(tracer, &tmp, "cross-compartment WeakMap key");
JS_ASSERT(tmp == key);
}
}
}
@ -1176,7 +1185,7 @@ Debugger::markAllIteratively(GCMarker *trc)
* - it isn't already marked
* - it actually has hooks that might be called
*/
const HeapPtrObject &dbgobj = dbg->toJSObject();
HeapPtrObject &dbgobj = dbg->toJSObjectRef();
if (comp && comp != dbgobj->compartment())
continue;
@ -1186,7 +1195,7 @@ Debugger::markAllIteratively(GCMarker *trc)
* obj could be reachable only via its live, enabled
* debugger hooks, which may yet be called.
*/
MarkObject(trc, dbgobj, "enabled Debugger");
MarkObject(trc, &dbgobj, "enabled Debugger");
markedAny = true;
dbgMarked = true;
}
@ -1199,9 +1208,8 @@ Debugger::markAllIteratively(GCMarker *trc)
* The debugger and the script are both live.
* Therefore the breakpoint handler is live.
*/
const HeapPtrObject &handler = bp->getHandler();
if (IsAboutToBeFinalized(handler)) {
MarkObject(trc, bp->getHandler(), "breakpoint handler");
if (IsAboutToBeFinalized(bp->getHandler())) {
MarkObject(trc, &bp->getHandlerRef(), "breakpoint handler");
markedAny = true;
}
}
@ -1224,7 +1232,7 @@ void
Debugger::trace(JSTracer *trc)
{
if (uncaughtExceptionHook)
MarkObject(trc, uncaughtExceptionHook, "hooks");
MarkObject(trc, &uncaughtExceptionHook, "hooks");
/*
* Mark Debugger.Frame objects. These are all reachable from JS, because the
@ -1235,9 +1243,9 @@ Debugger::trace(JSTracer *trc)
* frames.)
*/
for (FrameMap::Range r = frames.all(); !r.empty(); r.popFront()) {
const HeapPtrObject &frameobj = r.front().value;
HeapPtrObject &frameobj = r.front().value;
JS_ASSERT(frameobj->getPrivate());
MarkObject(trc, frameobj, "live Debugger.Frame");
MarkObject(trc, &frameobj, "live Debugger.Frame");
}
/* Trace the weak map from JSScript instances to Debugger.Script objects. */
@ -1315,7 +1323,9 @@ Debugger::finalize(JSContext *cx, JSObject *obj)
}
Class Debugger::jsclass = {
"Debugger", JSCLASS_HAS_PRIVATE | JSCLASS_HAS_RESERVED_SLOTS(JSSLOT_DEBUG_COUNT),
"Debugger",
JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS |
JSCLASS_HAS_RESERVED_SLOTS(JSSLOT_DEBUG_COUNT),
JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub,
JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, Debugger::finalize,
NULL, /* reserved0 */
@ -1846,7 +1856,9 @@ DebuggerScript_trace(JSTracer *trc, JSObject *obj)
}
Class DebuggerScript_class = {
"Script", JSCLASS_HAS_PRIVATE | JSCLASS_HAS_RESERVED_SLOTS(JSSLOT_DEBUGSCRIPT_COUNT),
"Script",
JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS |
JSCLASS_HAS_RESERVED_SLOTS(JSSLOT_DEBUGSCRIPT_COUNT),
JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub,
JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, NULL,
NULL, /* reserved0 */
@ -2948,7 +2960,9 @@ DebuggerObject_trace(JSTracer *trc, JSObject *obj)
}
Class DebuggerObject_class = {
"Object", JSCLASS_HAS_PRIVATE | JSCLASS_HAS_RESERVED_SLOTS(JSSLOT_DEBUGOBJECT_COUNT),
"Object",
JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS |
JSCLASS_HAS_RESERVED_SLOTS(JSSLOT_DEBUGOBJECT_COUNT),
JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub,
JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, NULL,
NULL, /* reserved0 */
@ -3590,7 +3604,9 @@ DebuggerEnv_trace(JSTracer *trc, JSObject *obj)
}
Class DebuggerEnv_class = {
"Environment", JSCLASS_HAS_PRIVATE | JSCLASS_HAS_RESERVED_SLOTS(JSSLOT_DEBUGENV_COUNT),
"Environment",
JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS |
JSCLASS_HAS_RESERVED_SLOTS(JSSLOT_DEBUGENV_COUNT),
JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub,
JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, NULL,
NULL, /* reserved0 */

Просмотреть файл

@ -236,6 +236,7 @@ class Debugger {
bool init(JSContext *cx);
inline const js::HeapPtrObject &toJSObject() const;
inline js::HeapPtrObject &toJSObjectRef();
static inline Debugger *fromJSObject(JSObject *obj);
static Debugger *fromChildJSObject(JSObject *obj);
@ -431,6 +432,7 @@ class Breakpoint {
Breakpoint *nextInDebugger();
Breakpoint *nextInSite();
const HeapPtrObject &getHandler() const { return handler; }
HeapPtrObject &getHandlerRef() { return handler; }
};
Debugger *
@ -455,6 +457,13 @@ Debugger::toJSObject() const
return object;
}
js::HeapPtrObject &
Debugger::toJSObjectRef()
{
JS_ASSERT(object);
return object;
}
Debugger *
Debugger::fromJSObject(JSObject *obj)
{

Просмотреть файл

@ -80,6 +80,14 @@ RegExpObject::getShared(JSContext *cx)
return createShared(cx);
}
inline void
RegExpObject::setShared(JSContext *cx, RegExpShared *shared)
{
if (shared)
shared->prepareForUse(cx);
JSObject::setPrivate(shared);
}
inline void
RegExpObject::setLastIndex(const Value &v)
{
@ -148,6 +156,12 @@ RegExpToShared(JSContext *cx, JSObject &obj)
return Proxy::regexp_toShared(cx, &obj);
}
inline void
RegExpShared::prepareForUse(JSContext *cx)
{
gcNumberWhenUsed = cx->runtime->gcNumber;
}
} /* namespace js */
#endif

Просмотреть файл

@ -62,7 +62,7 @@ RegExpObjectBuilder::RegExpObjectBuilder(JSContext *cx, RegExpObject *reobj)
: cx(cx), reobj_(reobj)
{
if (reobj_)
reobj_->setPrivate(NULL);
reobj_->setShared(cx, NULL);
}
bool
@ -74,7 +74,7 @@ RegExpObjectBuilder::getOrCreate()
JSObject *obj = NewBuiltinClassInstance(cx, &RegExpClass);
if (!obj)
return false;
obj->setPrivate(NULL);
obj->initPrivate(NULL);
reobj_ = &obj->asRegExp();
return true;
@ -88,7 +88,7 @@ RegExpObjectBuilder::getOrCreateClone(RegExpObject *proto)
JSObject *clone = NewObjectWithGivenProto(cx, &RegExpClass, proto, proto->getParent());
if (!clone)
return false;
clone->setPrivate(NULL);
clone->initPrivate(NULL);
reobj_ = &clone->asRegExp();
return true;
@ -103,7 +103,7 @@ RegExpObjectBuilder::build(JSAtom *source, RegExpShared &shared)
if (!reobj_->init(cx, source, shared.getFlags()))
return NULL;
reobj_->setPrivate(&shared);
reobj_->setShared(cx, &shared);
return reobj_;
}
@ -330,13 +330,18 @@ RegExpCode::execute(JSContext *cx, const jschar *chars, size_t length, size_t st
static void
regexp_trace(JSTracer *trc, JSObject *obj)
{
if (trc->runtime->gcRunning)
/*
* We have to check both conditions, since:
* 1. During TraceRuntime, gcRunning is set
* 2. When a write barrier executes, IS_GC_MARKING_TRACER is true.
*/
if (trc->runtime->gcRunning && IS_GC_MARKING_TRACER(trc))
obj->setPrivate(NULL);
}
Class js::RegExpClass = {
js_RegExp_str,
JSCLASS_HAS_PRIVATE |
JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS |
JSCLASS_HAS_RESERVED_SLOTS(RegExpObject::RESERVED_SLOTS) |
JSCLASS_HAS_CACHED_PROTO(JSProto_RegExp),
JS_PropertyStub, /* addProperty */
@ -360,8 +365,8 @@ Class js::RegExpClass = {
regexp_trace
};
RegExpShared::RegExpShared(RegExpFlag flags)
: parenCount(0), flags(flags), activeUseCount(0)
RegExpShared::RegExpShared(JSRuntime *rt, RegExpFlag flags)
: parenCount(0), flags(flags), activeUseCount(0), gcNumberWhenUsed(rt->gcNumber)
{}
RegExpObject *
@ -402,7 +407,7 @@ RegExpObject::createShared(JSContext *cx)
if (!shared)
return NULL;
setPrivate(shared);
setShared(cx, shared);
return shared;
}
@ -616,11 +621,12 @@ RegExpCompartment::init(JSContext *cx)
}
void
RegExpCompartment::purge()
RegExpCompartment::sweep(JSRuntime *rt)
{
for (Map::Enum e(map_); !e.empty(); e.popFront()) {
/* See the comment on RegExpShared lifetime in RegExpObject.h. */
RegExpShared *shared = e.front().value;
if (shared->activeUseCount == 0) {
if (shared->activeUseCount == 0 && shared->gcNumberWhenUsed < rt->gcStartNumber) {
Foreground::delete_(shared);
e.removeFront();
}
@ -630,14 +636,14 @@ RegExpCompartment::purge()
inline RegExpShared *
RegExpCompartment::get(JSContext *cx, JSAtom *keyAtom, JSAtom *source, RegExpFlag flags, Type type)
{
DebugOnly<size_t> gcNumberBefore = cx->runtime->gcNumber;
DebugOnly<uint64_t> gcNumberBefore = cx->runtime->gcNumber;
Key key(keyAtom, flags, type);
Map::AddPtr p = map_.lookupForAdd(key);
if (p)
return p->value;
RegExpShared *shared = cx->runtime->new_<RegExpShared>(flags);
RegExpShared *shared = cx->runtime->new_<RegExpShared>(cx->runtime, flags);
if (!shared || !shared->compile(cx, source))
goto error;

Просмотреть файл

@ -169,6 +169,7 @@ class RegExpObject : public JSObject
inline RegExpShared &shared() const;
inline RegExpShared *maybeShared();
inline RegExpShared *getShared(JSContext *cx);
inline void setShared(JSContext *cx, RegExpShared *shared);
private:
friend class RegExpObjectBuilder;
@ -190,6 +191,9 @@ class RegExpObject : public JSObject
RegExpObject() MOZ_DELETE;
RegExpObject &operator=(const RegExpObject &reo) MOZ_DELETE;
/* Call setShared in preference to setPrivate. */
void setPrivate(void *priv) MOZ_DELETE;
};
class RegExpObjectBuilder
@ -293,7 +297,26 @@ class RegExpCode
} /* namespace detail */
/* The compiled representation of a regexp. */
/*
* A RegExpShared is the compiled representation of a regexp. A RegExpShared is
* pointed to by potentially multiple RegExpObjects. Additionally, C++ code may
* have pointers to RegExpShareds on the stack. The RegExpShareds are tracked in
* a RegExpCompartment hashtable, and most are destroyed on every GC.
*
* During a GC, the trace hook for RegExpObject clears any pointers to
* RegExpShareds so that there will be no dangling pointers when they are
* deleted. However, some RegExpShareds are not deleted:
*
* 1. Any RegExpShared with pointers from the C++ stack is not deleted.
* 2. Any RegExpShared that was installed in a RegExpObject during an
* incremental GC is not deleted. This is because the RegExpObject may have
* been traced through before the new RegExpShared was installed, in which
* case deleting the RegExpShared would turn the RegExpObject's reference
* into a dangling pointer
*
* The activeUseCount and gcNumberWhenUsed fields are used to track these two
* conditions.
*/
class RegExpShared
{
friend class RegExpCompartment;
@ -301,11 +324,12 @@ class RegExpShared
detail::RegExpCode code;
uintN parenCount;
RegExpFlag flags;
size_t activeUseCount;
size_t activeUseCount; /* See comment above. */
uint64_t gcNumberWhenUsed; /* See comment above. */
bool compile(JSContext *cx, JSAtom *source);
RegExpShared(RegExpFlag flags);
RegExpShared(JSRuntime *rt, RegExpFlag flags);
JS_DECLARE_ALLOCATION_FRIENDS_FOR_PRIVATE_CONSTRUCTOR;
public:
@ -338,6 +362,9 @@ class RegExpShared
RegExpShared &operator*() { JS_ASSERT(initialized()); return *re_; }
};
/* Called when a RegExpShared is installed into a RegExpObject. */
inline void prepareForUse(JSContext *cx);
/* Primary interface: run this regular expression on the given string. */
RegExpRunStatus
@ -388,7 +415,7 @@ class RegExpCompartment
~RegExpCompartment();
bool init(JSContext *cx);
void purge();
void sweep(JSRuntime *rt);
/* Return a regexp corresponding to the given (source, flags) pair. */
RegExpShared *get(JSContext *cx, JSAtom *source, RegExpFlag flags);

Просмотреть файл

@ -71,7 +71,7 @@ resc_trace(JSTracer *trc, JSObject *obj)
Class js::RegExpStaticsClass = {
"RegExpStatics",
JSCLASS_HAS_PRIVATE,
JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS,
JS_PropertyStub, /* addProperty */
JS_PropertyStub, /* delProperty */
JS_PropertyStub, /* getProperty */

Просмотреть файл

@ -205,11 +205,11 @@ class RegExpStatics
return get(0, 1) - get(0, 0) > 0;
}
void mark(JSTracer *trc) const {
void mark(JSTracer *trc) {
if (pendingInput)
MarkString(trc, pendingInput, "res->pendingInput");
MarkString(trc, &pendingInput, "res->pendingInput");
if (matchPairsInput)
MarkString(trc, matchPairsInput, "res->matchPairsInput");
MarkString(trc, &matchPairsInput, "res->matchPairsInput");
}
bool pairIsPresent(size_t pairNum) const {

Просмотреть файл

@ -532,6 +532,15 @@ StackSpace::mark(JSTracer *trc)
}
}
void
StackSpace::markActiveCompartments()
{
for (StackSegment *seg = seg_; seg; seg = seg->prevInMemory()) {
for (StackFrame *fp = seg->maybefp(); (Value *)fp > (Value *)seg; fp = fp->prev())
MarkCompartmentActive(fp);
}
}
JS_FRIEND_API(bool)
StackSpace::ensureSpaceSlow(JSContext *cx, MaybeReportError report, Value *from, ptrdiff_t nvals,
JSCompartment *dest) const

Просмотреть файл

@ -1555,6 +1555,9 @@ class StackSpace
void mark(JSTracer *trc);
void markFrameSlots(JSTracer *trc, StackFrame *fp, Value *slotsEnd, jsbytecode *pc);
/* Called during GC: sets active flag on compartments with active frames. */
void markActiveCompartments();
/* We only report the committed size; uncommitted size is uninteresting. */
JS_FRIEND_API(size_t) sizeOfCommitted();
};

Просмотреть файл

@ -398,7 +398,7 @@ enum nsGCType {
};
%}
[uuid(686bb1d0-4711-11e1-b86c-0800200c9a66)]
[uuid(e92bf5e0-494c-11e1-b86c-0800200c9a66)]
interface nsIXPConnect : nsISupports
{
%{ C++
@ -734,6 +734,12 @@ interface nsIXPConnect : nsISupports
*/
void GarbageCollect(in PRUint32 reason, in PRUint32 kind);
/**
* Signals a good place to do an incremental GC slice, because the
* browser is drawing a frame.
*/
void NotifyDidPaint();
/**
* Define quick stubs on the given object, @a proto.
*

Просмотреть файл

@ -604,7 +604,8 @@ void XPCWrappedNativeTearOff::SetJSObject(JSObject* JSObj)
inline
XPCWrappedNativeTearOff::~XPCWrappedNativeTearOff()
{
NS_ASSERTION(!(GetInterface()||GetNative()||GetJSObjectPreserveColor()), "tearoff not empty in dtor");
NS_ASSERTION(!(GetInterface()||GetNative()||GetJSObjectPreserveColor()),
"tearoff not empty in dtor");
}
/***************************************************************************/

Просмотреть файл

@ -911,6 +911,8 @@ JSBool XPCJSRuntime::GCCallback(JSContext *cx, JSGCStatus status)
#ifdef XPC_TRACK_DEFERRED_RELEASES
printf("XPC - End deferred Releases\n");
#endif
self->GetXPConnect()->ClearGCBeforeCC();
break;
}
default:
@ -1890,6 +1892,18 @@ AccumulateTelemetryCallback(int id, uint32_t sample)
case JS_TELEMETRY_GC_SWEEP_MS:
Telemetry::Accumulate(Telemetry::GC_SWEEP_MS, sample);
break;
case JS_TELEMETRY_GC_SLICE_MS:
Telemetry::Accumulate(Telemetry::GC_SLICE_MS, sample);
break;
case JS_TELEMETRY_GC_MMU_50:
Telemetry::Accumulate(Telemetry::GC_MMU_50, sample);
break;
case JS_TELEMETRY_GC_RESET:
Telemetry::Accumulate(Telemetry::GC_RESET, sample);
break;
case JS_TELEMETRY_GC_INCREMENTAL_DISABLED:
Telemetry::Accumulate(Telemetry::GC_INCREMENTAL_DISABLED, sample);
break;
}
}

Просмотреть файл

@ -406,8 +406,6 @@ nsXPConnect::Collect(PRUint32 reason, PRUint32 kind)
// To improve debugging, if DEBUG_CC is defined all JS objects are
// traversed.
mNeedGCBeforeCC = false;
XPCCallContext ccx(NATIVE_CALLER);
if (!ccx.IsValid())
return;
@ -424,6 +422,8 @@ nsXPConnect::Collect(PRUint32 reason, PRUint32 kind)
js::gcreason::Reason gcreason = (js::gcreason::Reason)reason;
if (kind == nsGCShrinking) {
js::ShrinkingGC(cx, gcreason);
} else if (kind == nsGCIncremental) {
js::IncrementalGC(cx, gcreason);
} else {
MOZ_ASSERT(kind == nsGCNormal);
js::GCForReason(cx, gcreason);
@ -2825,6 +2825,23 @@ nsXPConnect::GetTelemetryValue(JSContext *cx, jsval *rval)
return NS_OK;
}
NS_IMETHODIMP
nsXPConnect::NotifyDidPaint()
{
JSRuntime *rt = mRuntime->GetJSRuntime();
if (!js::WantGCSlice(rt))
return NS_OK;
XPCCallContext ccx(NATIVE_CALLER);
if (!ccx.IsValid())
return UnexpectedFailure(NS_ERROR_FAILURE);
JSContext *cx = ccx.GetJSContext();
js::NotifyDidPaint(cx);
return NS_OK;
}
/* These are here to be callable from a debugger */
JS_BEGIN_EXTERN_C
JS_EXPORT_API(void) DumpJSStack()

Просмотреть файл

@ -318,7 +318,8 @@ typedef nsDataHashtable<xpc::PtrAndPrincipalHashKey, JSCompartment *> XPCCompart
return (result || !src) ? NS_OK : NS_ERROR_OUT_OF_MEMORY
#define WRAPPER_SLOTS (JSCLASS_HAS_PRIVATE | JSCLASS_HAS_RESERVED_SLOTS(1))
#define WRAPPER_SLOTS (JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS | \
JSCLASS_HAS_RESERVED_SLOTS(1))
#define INVALID_OBJECT ((JSObject *)1)
@ -520,6 +521,7 @@ public:
JSBool IsShuttingDown() const {return mShuttingDown;}
void EnsureGCBeforeCC() { mNeedGCBeforeCC = true; }
void ClearGCBeforeCC() { mNeedGCBeforeCC = false; }
nsresult GetInfoForIID(const nsIID * aIID, nsIInterfaceInfo** info);
nsresult GetInfoForName(const char * name, nsIInterfaceInfo** info);

Просмотреть файл

@ -75,7 +75,8 @@ xpc_CreateMTGlobalObject(JSContext *cx, JSClass *clasp,
#define XPCONNECT_GLOBAL_FLAGS \
JSCLASS_XPCONNECT_GLOBAL | JSCLASS_HAS_PRIVATE | \
JSCLASS_PRIVATE_IS_NSISUPPORTS | JSCLASS_GLOBAL_FLAGS_WITH_SLOTS(1)
JSCLASS_PRIVATE_IS_NSISUPPORTS | JSCLASS_IMPLEMENTS_BARRIERS | \
JSCLASS_GLOBAL_FLAGS_WITH_SLOTS(1)
void
TraceXPCGlobal(JSTracer *trc, JSObject *obj);
@ -182,8 +183,12 @@ xpc_UnmarkGrayObjectRecursive(JSObject* obj);
inline void
xpc_UnmarkGrayObject(JSObject *obj)
{
if (obj && xpc_IsGrayGCThing(obj))
if (obj) {
if (xpc_IsGrayGCThing(obj))
xpc_UnmarkGrayObjectRecursive(obj);
else if (js::IsIncrementalBarrierNeededOnObject(obj))
js::IncrementalReferenceBarrier(obj);
}
}
// If aVariant is an XPCVariant, this marks the object to be in aGeneration.

Просмотреть файл

@ -5428,6 +5428,24 @@ PresShell::ProcessSynthMouseMoveEvent(bool aFromScroll)
}
}
class nsAutoNotifyDidPaint
{
public:
nsAutoNotifyDidPaint(bool aWillSendDidPaint)
: mWillSendDidPaint(aWillSendDidPaint)
{
}
~nsAutoNotifyDidPaint()
{
if (!mWillSendDidPaint && nsContentUtils::XPConnect()) {
nsContentUtils::XPConnect()->NotifyDidPaint();
}
}
private:
bool mWillSendDidPaint;
};
void
PresShell::Paint(nsIView* aViewToPaint,
nsIWidget* aWidgetToPaint,
@ -5451,6 +5469,8 @@ PresShell::Paint(nsIView* aViewToPaint,
NS_ASSERTION(aViewToPaint, "null view");
NS_ASSERTION(aWidgetToPaint, "Can't paint without a widget");
nsAutoNotifyDidPaint notifyDidPaint(aWillSendDidPaint);
nsPresContext* presContext = GetPresContext();
AUTO_LAYOUT_PHASE_ENTRY_POINT(presContext, Paint);
@ -5744,6 +5764,7 @@ PresShell::RecordMouseLocation(nsGUIEvent* aEvent)
}
}
#ifdef MOZ_TOUCH
static void
EvictTouchPoint(nsCOMPtr<nsIDOMTouch>& aTouch)
{
@ -5791,6 +5812,7 @@ AppendToTouchList(const PRUint32& aKey, nsCOMPtr<nsIDOMTouch>& aData, void *aTou
touches->AppendElement(aData);
return PL_DHASH_NEXT;
}
#endif // MOZ_TOUCH
nsresult
PresShell::HandleEvent(nsIFrame *aFrame,
@ -7221,6 +7243,10 @@ PresShell::DidPaint()
if (rootPresContext == mPresContext) {
rootPresContext->UpdatePluginGeometry();
}
if (nsContentUtils::XPConnect()) {
nsContentUtils::XPConnect()->NotifyDidPaint();
}
}
bool

Просмотреть файл

@ -1939,10 +1939,11 @@ nsBlockFrame::ReflowDirtyLines(nsBlockReflowState& aState)
needToRecoverState = false;
// Update aState.mPrevChild as if we had reflowed all of the frames in
// this line. This is expensive in some cases, since it requires
// walking |GetNextSibling|.
// this line.
if (line->IsDirty())
aState.mPrevChild = line.prev()->LastChild();
NS_ASSERTION(line->mFirstChild->GetPrevSibling() ==
line.prev()->LastChild(), "unexpected line frames");
aState.mPrevChild = line->mFirstChild->GetPrevSibling();
}
// Now repair the line and update |aState.mY| by calling
@ -2132,9 +2133,11 @@ nsBlockFrame::ReflowDirtyLines(nsBlockReflowState& aState)
aState.ReconstructMarginAbove(line);
// Update aState.mPrevChild as if we had reflowed all of the frames in
// the last line. This is expensive in some cases, since it requires
// walking |GetNextSibling|.
aState.mPrevChild = line.prev()->LastChild();
// the last line.
NS_ASSERTION(line == line_end || line->mFirstChild->GetPrevSibling() ==
line.prev()->LastChild(), "unexpected line frames");
aState.mPrevChild =
line == line_end ? mFrames.LastChild() : line->mFirstChild->GetPrevSibling();
}
// Should we really have to do this?
@ -4396,7 +4399,12 @@ nsBlockFrame::PushLines(nsBlockReflowState& aState,
if (firstLine) {
mFrames.Clear();
} else {
mFrames.RemoveFramesAfter(aLineBefore->LastChild());
nsIFrame* f = overBegin->mFirstChild;
nsIFrame* lineBeforeLastFrame =
f ? f->GetPrevSibling() : aLineBefore->LastChild();
NS_ASSERTION(!f || lineBeforeLastFrame == aLineBefore->LastChild(),
"unexpected line frames");
mFrames.RemoveFramesAfter(lineBeforeLastFrame);
}
if (!overflowLines->empty()) {
// XXXbz If we switch overflow lines to nsFrameList, we should
@ -4713,7 +4721,9 @@ nsBlockFrame::AppendFrames(ChildListID aListID,
}
// Find the proper last-child for where the append should go
nsIFrame* lastKid = mLines.empty() ? nsnull : mLines.back()->LastChild();
nsIFrame* lastKid = mFrames.LastChild();
NS_ASSERTION((mLines.empty() ? nsnull : mLines.back()->LastChild()) ==
lastKid, "out-of-sync mLines / mFrames");
// Add frames after the last child
#ifdef NOISY_REFLOW_REASON
@ -5394,8 +5404,16 @@ nsBlockFrame::DoRemoveFrame(nsIFrame* aDeletedFrame, PRUint32 aFlags)
// If the frame being deleted is the last one on the line then
// optimize away the line->Contains(next-in-flow) call below.
bool isLastFrameOnLine = (1 == line->GetChildCount() ||
line->LastChild() == aDeletedFrame);
bool isLastFrameOnLine = 1 == line->GetChildCount();
if (!isLastFrameOnLine) {
line_iterator next = line.next();
nsIFrame* lastFrame = next != line_end ?
next->mFirstChild->GetPrevSibling() :
(searchingOverflowList ? line->LastChild() : mFrames.LastChild());
NS_ASSERTION(next == line_end || lastFrame == line->LastChild(),
"unexpected line frames");
isLastFrameOnLine = lastFrame == aDeletedFrame;
}
// Remove aDeletedFrame from the line
nsIFrame* nextFrame = aDeletedFrame->GetNextSibling();

Просмотреть файл

@ -366,7 +366,7 @@ nsColumnSetFrame::ChooseColumnStrategy(const nsHTMLReflowState& aReflowState)
bool isBalancing = colStyle->mColumnFill == NS_STYLE_COLUMN_FILL_BALANCE;
if (isBalancing) {
const PRUint32 MAX_NESTED_COLUMN_BALANCING = 5;
const PRUint32 MAX_NESTED_COLUMN_BALANCING = 2;
PRUint32 cnt = 1;
for (const nsHTMLReflowState* rs = aReflowState.parentReflowState;
rs && cnt < MAX_NESTED_COLUMN_BALANCING;

Просмотреть файл

@ -242,6 +242,7 @@ fails-if(Android) random-if(gtk2Widget) != text-language-01.xhtml text-language-
== text-layout-03.svg text-layout-03-ref.svg
== text-layout-04.svg text-layout-04-ref.svg
== text-layout-05.svg text-layout-05-ref.svg
== text-layout-06.svg text-layout-06-ref.svg
== text-scale-01.svg text-scale-01-ref.svg
== text-stroke-scaling-01.svg text-stroke-scaling-01-ref.svg
== stroke-dasharray-and-pathLength-01.svg pass.svg

Просмотреть файл

@ -0,0 +1,10 @@
<!--
Any copyright is dedicated to the Public Domain.
http://creativecommons.org/publicdomain/zero/1.0/
-->
<svg xmlns="http://www.w3.org/2000/svg">
<title>Reference to check fill and stroke handling</title>
<text x="50" y="80" font-size="80" fill="blue" stroke="none">A B</text>
<text x="50" y="80" font-size="80" fill="none" stroke="yellow" stroke-width="2">A B</text>
</svg>

После

Ширина:  |  Высота:  |  Размер: 395 B

Просмотреть файл

@ -0,0 +1,9 @@
<!--
Any copyright is dedicated to the Public Domain.
http://creativecommons.org/publicdomain/zero/1.0/
-->
<svg xmlns="http://www.w3.org/2000/svg">
<title>Testcase to check fill and stroke handling</title>
<text x="50" y="80" font-size="80" fill="blue" stroke="yellow" stroke-width="2">A B</text>
</svg>

После

Ширина:  |  Высота:  |  Размер: 320 B

Просмотреть файл

@ -374,7 +374,7 @@ nsSVGGlyphFrame::PaintSVG(nsSVGRenderState *aContext,
iter.SetInitialMatrix(gfx);
nsRefPtr<gfxPattern> strokePattern;
DrawMode drawMode = SetupCairoState(gfx, &strokePattern);
DrawMode drawMode = SetupCairoState(gfx, getter_AddRefs(strokePattern));
if (drawMode) {
DrawCharacters(&iter, gfx, drawMode, strokePattern);
@ -884,39 +884,45 @@ nsSVGGlyphFrame::GetBaselineOffset(float aMetricsScale)
}
DrawMode
nsSVGGlyphFrame::SetupCairoState(gfxContext *context, nsRefPtr<gfxPattern> *strokePattern) {
nsSVGGlyphFrame::SetupCairoState(gfxContext *aContext, gfxPattern **aStrokePattern)
{
DrawMode toDraw = DrawMode(0);
const nsStyleSVG* style = GetStyleSVG();
if (HasStroke()) {
gfxContextMatrixAutoSaveRestore matrixRestore(context);
context->IdentityMatrix();
gfxContextMatrixAutoSaveRestore matrixRestore(aContext);
aContext->IdentityMatrix();
toDraw = DrawMode(toDraw | gfxFont::GLYPH_STROKE);
SetupCairoStrokeHitGeometry(context);
SetupCairoStrokeHitGeometry(aContext);
float opacity = style->mStrokeOpacity;
nsSVGPaintServerFrame *ps = GetPaintServer(&style->mStroke,
nsSVGEffects::StrokeProperty());
nsRefPtr<gfxPattern> strokePattern;
if (ps) {
// Gradient or Pattern: can get pattern directly from frame
*strokePattern = ps->GetPaintServerPattern(this, opacity);
strokePattern = ps->GetPaintServerPattern(this, opacity);
}
NS_ASSERTION(*strokePattern, "No pattern returned from paint server");
} else {
if (!strokePattern) {
nscolor color;
nsSVGUtils::GetFallbackOrPaintColor(context, GetStyleContext(),
nsSVGUtils::GetFallbackOrPaintColor(aContext, GetStyleContext(),
&nsStyleSVG::mStroke, &opacity,
&color);
*strokePattern = new gfxPattern(gfxRGBA(NS_GET_R(color) / 255.0,
strokePattern = new gfxPattern(gfxRGBA(NS_GET_R(color) / 255.0,
NS_GET_G(color) / 255.0,
NS_GET_B(color) / 255.0,
NS_GET_A(color) / 255.0 * opacity));
}
*aStrokePattern = nsnull;
strokePattern.swap(*aStrokePattern);
}
if (SetupCairoFill(context)) {
if (SetupCairoFill(aContext)) {
toDraw = DrawMode(toDraw | gfxFont::GLYPH_FILL);
}

Просмотреть файл

@ -268,7 +268,8 @@ protected:
bool mTrimTrailingWhitespace;
private:
DrawMode SetupCairoState(gfxContext *context, nsRefPtr<gfxPattern> *strokePattern);
DrawMode SetupCairoState(gfxContext *aContext,
gfxPattern **aStrokePattern);
};
#endif

Просмотреть файл

@ -0,0 +1,4 @@
<svg xmlns="http://www.w3.org/2000/svg">
<text stroke="url(#p)">t</text>
<pattern id="p"/>
</svg>

После

Ширина:  |  Высота:  |  Размер: 102 B

Просмотреть файл

@ -123,3 +123,4 @@ load 709920-1.svg
load 709920-2.svg
load 713413-1.svg
load 722003-1.svg
load 725918-1.svg

Просмотреть файл

@ -1,5 +1,6 @@
function test() {
waitForExplicitFinish();
gBrowser.selectedTab = gBrowser.addTab();
SpecialPowers.setIntPref("ui.tooltipDelay", 0);
@ -60,8 +61,10 @@ function test() {
EventUtils.synthesizeMouseAtCenter(p1, { type: "mousemove" }, win);
}
gBrowser.selectedBrowser.addEventListener("load",
function () { setTimeout(onLoad, 0); }, true);
gBrowser.selectedBrowser.addEventListener("load", function loadListener() {
gBrowser.selectedBrowser.removeEventListener("load", loadListener, true);
setTimeout(onLoad, 0);
}, true);
content.location = "data:text/html," +
"<p id=\"p1\" title=\"tooltip is here\">This paragraph has a tooltip.</p>" +

Просмотреть файл

@ -656,6 +656,8 @@ pref("javascript.options.typeinference", true);
pref("javascript.options.mem.high_water_mark", 128);
pref("javascript.options.mem.max", -1);
pref("javascript.options.mem.gc_per_compartment", true);
pref("javascript.options.mem.gc_incremental", true);
pref("javascript.options.mem.gc_incremental_slice_ms", 10);
pref("javascript.options.mem.log", false);
pref("javascript.options.gc_on_memory_pressure", true);

Просмотреть файл

@ -4,6 +4,7 @@
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#include <dlfcn.h>
#include "android/log.h"
#include "cutils/properties.h"
#include "base/basictypes.h"
@ -12,6 +13,7 @@
#include "nsStreamUtils.h"
#include "nsThreadUtils.h"
#include "nsRawStructs.h"
#include "prinit.h"
#define USE_GS2_LIBCAMERA
#define CameraHardwareInterface CameraHardwareInterface_SGS2
@ -48,11 +50,11 @@ using namespace mozilla;
class CameraHardwareInterface {
public:
typedef enum {
enum Type {
CAMERA_SGS2,
CAMERA_MAGURO,
CAMERA_DEFAULT
} Type;
};
static Type getType() {
char propValue[PROPERTY_VALUE_MAX];
@ -60,7 +62,7 @@ class CameraHardwareInterface {
if (!strcmp(propValue, "GT-I9100"))
return CAMERA_SGS2;
if (!strcmp(propValue, "MSM7627A_SKU1") || !strcmp(propValue, "MSM7627A_SKU3"))
if (!strcmp(propValue, "msm7627a_sku1") || !strcmp(propValue, "MSM7627A_SKU3"))
return CAMERA_MAGURO;
printf_stderr("CameraHardwareInterface : unsupported camera for device %s\n", propValue);
@ -89,30 +91,25 @@ class CameraHardwareInterface {
CameraHardwareInterface(PRUint32 aCamera = 0) { };
};
class DlopenWrapper {
public:
DlopenWrapper() : mHandle(nsnull) { };
// Intentionally not trying to dlclose() this handle. That's playing
// Russian roulette with security bugs.
static void* sCameraLib;
static PRCallOnceType sInitCameraLib;
DlopenWrapper(const char* aLibrary) : mHandle(nsnull) {
mHandle = dlopen(aLibrary, RTLD_LAZY);
};
static PRStatus
InitCameraLib()
{
sCameraLib = dlopen("/system/lib/libcamera.so", RTLD_LAZY);
// We might fail to open the camera lib. That's OK.
return PR_SUCCESS;
}
~DlopenWrapper() {
if (mHandle)
dlclose(mHandle);
};
bool opened() {
return mHandle != nsnull;
};
void* dlsym(const char* aFunction) {
return ::dlsym(mHandle, aFunction);
};
protected:
void* mHandle;
};
static void*
GetCameraLibHandle()
{
PR_CallOnce(&sInitCameraLib, InitCameraLib);
return sCameraLib;
}
template<class T> class CameraImpl : public CameraHardwareInterface {
public:
@ -121,14 +118,13 @@ template<class T> class CameraImpl : public CameraHardwareInterface {
typedef sp<T> (*HAL_openCameraHardware_MAGURO)(int, int);
CameraImpl(PRUint32 aCamera = 0) : mOk(false), mCamera(nsnull) {
DlopenWrapper wrapper("/system/lib/libcamera.so");
if (!wrapper.opened())
void* cameraLib = GetCameraLibHandle();
if (!cameraLib) {
printf_stderr("CameraImpl: Failed to dlopen() camera library.");
return;
}
mOk = true;
void *hal = wrapper.dlsym("HAL_openCameraHardware");
void *hal = dlsym(cameraLib, "HAL_openCameraHardware");
HAL_openCameraHardware_DEFAULT funct0;
HAL_openCameraHardware_SGS2 funct1;
HAL_openCameraHardware_MAGURO funct2;
@ -146,6 +142,11 @@ template<class T> class CameraImpl : public CameraHardwareInterface {
mCamera = funct0(aCamera);
break;
}
mOk = mCamera != nsnull;
if (!mOk) {
printf_stderr("CameraImpl: HAL_openCameraHardware() returned NULL (no camera interface).");
}
}
bool ok() {
@ -251,12 +252,11 @@ GonkCameraInputStream::DataCallback(int32_t aMsgType, const sp<IMemory>& aDataPt
PRUint32
GonkCameraInputStream::getNumberOfCameras() {
typedef int (*HAL_getNumberOfCamerasFunct)(void);
DlopenWrapper wrapper("/system/lib/libcamera.so");
if (!wrapper.opened())
void* cameraLib = GetCameraLibHandle();
if (!cameraLib)
return 0;
void *hal = wrapper.dlsym("HAL_getNumberOfCameras");
void *hal = dlsym(cameraLib, "HAL_getNumberOfCameras");
if (nsnull == hal)
return 0;

Просмотреть файл

@ -81,6 +81,10 @@ HISTOGRAM_BOOLEAN(GC_IS_COMPARTMENTAL, "Is it a compartmental GC?")
HISTOGRAM(GC_MS, 1, 10000, 50, EXPONENTIAL, "Time spent running JS GC (ms)")
HISTOGRAM(GC_MARK_MS, 1, 10000, 50, EXPONENTIAL, "Time spent running JS GC mark phase (ms)")
HISTOGRAM(GC_SWEEP_MS, 1, 10000, 50, EXPONENTIAL, "Time spent running JS GC sweep phase (ms)")
HISTOGRAM(GC_SLICE_MS, 1, 10000, 50, EXPONENTIAL, "Time spent running a JS GC slice (ms)")
HISTOGRAM(GC_MMU_50, 1, 100, 20, LINEAR, "Minimum percentage of time spent outside GC over any 50ms window")
HISTOGRAM_BOOLEAN(GC_RESET, "Was an incremental GC canceled?")
HISTOGRAM_BOOLEAN(GC_INCREMENTAL_DISABLED, "Is incremental GC permanently disabled?")
HISTOGRAM(TELEMETRY_PING, 1, 3000, 10, EXPONENTIAL, "Time taken to submit telemetry info (ms)")
HISTOGRAM_BOOLEAN(TELEMETRY_SUCCESS, "Successful telemetry submission")

Просмотреть файл

@ -116,6 +116,7 @@ window.onload = function () {
populatePreferencesSection();
populateExtensionsSection();
populateGraphicsSection();
populateJavaScriptSection();
}
function populateExtensionsSection() {
@ -382,6 +383,13 @@ function populateGraphicsSection() {
]);
}
function populateJavaScriptSection() {
let enabled = window.QueryInterface(Ci.nsIInterfaceRequestor)
.getInterface(Ci.nsIDOMWindowUtils)
.isIncrementalGCEnabled();
document.getElementById("javascript-incremental-gc").textContent = enabled ? "1" : "0";
}
function getPrefValue(aName) {
let value = "";
let type = Services.prefs.getPrefType(aName);

Просмотреть файл

@ -243,6 +243,24 @@
</tbody>
</table>
<!-- - - - - - - - - - - - - - - - - - - - - -->
<h2 class="major-section">
&aboutSupport.jsTitle;
</h2>
<table>
<tbody>
<tr>
<th class="column">
&aboutSupport.jsIncrementalGC;
</th>
<td id="javascript-incremental-gc">
</td>
</tr>
</tbody>
</table>
</div>
</body>

Просмотреть файл

@ -56,6 +56,7 @@ _BROWSER_TEST_FILES = \
browser_bug594509.js \
browser_Geometry.js \
browser_save_resend_postdata.js \
browser_browserDrop.js \
browser_Services.js \
$(NULL)

Просмотреть файл

@ -0,0 +1,61 @@
/* Any copyright is dedicated to the Public Domain.
* http://creativecommons.org/publicdomain/zero/1.0/ */
function test() {
waitForExplicitFinish();
let newTab = gBrowser.selectedTab = gBrowser.addTab();
registerCleanupFunction(function () {
gBrowser.removeTab(newTab);
});
let scriptLoader = Cc["@mozilla.org/moz/jssubscript-loader;1"].
getService(Ci.mozIJSSubScriptLoader);
let chromeUtils = {};
scriptLoader.loadSubScript("chrome://mochikit/content/tests/SimpleTest/ChromeUtils.js", chromeUtils);
let browser = gBrowser.selectedBrowser;
var linkHandlerActivated = 0;
// Don't worry about clobbering the droppedLinkHandler, since we're closing
// this tab after the test anyways
browser.droppedLinkHandler = function dlh(e, url, name) {
linkHandlerActivated++;
ok(!/(javascript|data)/i.test(url), "javascript link should not be dropped");
}
var receivedDropCount = 0;
function dropListener() {
receivedDropCount++;
if (receivedDropCount == triggeredDropCount) {
// Wait for the browser's system-phase event handler to run.
executeSoon(function () {
is(linkHandlerActivated, validDropCount,
"link handler was called correct number of times");
finish();
})
}
}
browser.addEventListener("drop", dropListener, false);
registerCleanupFunction(function () {
browser.removeEventListener("drop", dropListener, false);
});
var triggeredDropCount = 0;
var validDropCount = 0;
function drop(text, valid) {
triggeredDropCount++;
if (valid)
validDropCount++;
executeSoon(function () {
chromeUtils.synthesizeDrop(browser, browser, [[{type: "text/plain", data: text}]], "copy", window, EventUtils);
});
}
drop("mochi.test/first", true);
drop("javascript:'bad'");
drop("jAvascript:'also bad'");
drop("mochi.test/second", true);
drop("data:text/html,bad");
drop("mochi.test/third", true);
}

Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше