diff --git a/content/media/test/Makefile.in b/content/media/test/Makefile.in index 3abd920a8834..c45e30c64813 100644 --- a/content/media/test/Makefile.in +++ b/content/media/test/Makefile.in @@ -109,7 +109,6 @@ _TEST_FILES = \ test_bug495300.html \ test_bug686942.html \ test_can_play_type.html \ - test_closing_connections.html \ test_constants.html \ test_controls.html \ test_currentTime.html \ @@ -176,6 +175,8 @@ endif # test_mixed_principals.html # Disabled since we don't play Wave files standalone, for now # test_audioDocumentTitle.html +# Bug 634564: +# test_closing_connections.html \ # sample files _TEST_FILES += \ diff --git a/dom/base/nsDOMWindowUtils.cpp b/dom/base/nsDOMWindowUtils.cpp index 36d1a3bf32ee..805def0a7cac 100644 --- a/dom/base/nsDOMWindowUtils.cpp +++ b/dom/base/nsDOMWindowUtils.cpp @@ -2091,6 +2091,13 @@ nsDOMWindowUtils::GetFileReferences(const nsAString& aDatabaseName, return NS_OK; } +NS_IMETHODIMP +nsDOMWindowUtils::IsIncrementalGCEnabled(JSContext* cx, bool* aResult) +{ + *aResult = js::IsIncrementalGCEnabled(JS_GetRuntime(cx)); + return NS_OK; +} + NS_IMETHODIMP nsDOMWindowUtils::StartPCCountProfiling(JSContext* cx) { diff --git a/dom/base/nsJSEnvironment.cpp b/dom/base/nsJSEnvironment.cpp index 4988e92e12a3..7e8fa870fccd 100644 --- a/dom/base/nsJSEnvironment.cpp +++ b/dom/base/nsJSEnvironment.cpp @@ -135,6 +135,9 @@ static PRLogModuleInfo* gJSDiagnostics; // doing the first GC. #define NS_FIRST_GC_DELAY 10000 // ms +// Maximum amount of time that should elapse between incremental GC slices +#define NS_INTERSLICE_GC_DELAY 100 // ms + // The amount of time we wait between a request to CC (after GC ran) // and doing the actual CC. #define NS_CC_DELAY 5000 // ms @@ -154,6 +157,9 @@ static nsITimer *sCCTimer; static PRTime sLastCCEndTime; static bool sGCHasRun; +static bool sCCLockedOut; + +static js::GCSliceCallback sPrevGCSliceCallback; // The number of currently pending document loads. This count isn't // guaranteed to always reflect reality and can't easily as we don't @@ -3274,6 +3280,11 @@ nsJSContext::CycleCollectNow(nsICycleCollectorListener *aListener, return; } + if (sCCLockedOut) { + // We're in the middle of an incremental GC; finish it first + nsJSContext::GarbageCollectNow(js::gcreason::CC_FORCED, nsGCNormal); + } + SAMPLE_LABEL("GC", "CycleCollectNow"); NS_TIME_FUNCTION_MIN(1.0); @@ -3357,7 +3368,7 @@ GCTimerFired(nsITimer *aTimer, void *aClosure) NS_RELEASE(sGCTimer); uintptr_t reason = reinterpret_cast(aClosure); - nsJSContext::GarbageCollectNow(static_cast(reason), nsGCNormal); + nsJSContext::GarbageCollectNow(static_cast(reason), nsGCIncremental); } void @@ -3375,6 +3386,9 @@ CCTimerFired(nsITimer *aTimer, void *aClosure) if (sDidShutdown) { return; } + if (sCCLockedOut) { + return; + } ++sCCTimerFireCount; if (sCCTimerFireCount < (NS_CC_DELAY / NS_CC_SKIPPABLE_DELAY)) { PRUint32 suspected = nsCycleCollector_suspectedCount(); @@ -3443,7 +3457,7 @@ nsJSContext::LoadEnd() // static void -nsJSContext::PokeGC(js::gcreason::Reason aReason) +nsJSContext::PokeGC(js::gcreason::Reason aReason, int aDelay) { if (sGCTimer) { // There's already a timer for GC'ing, just return @@ -3460,9 +3474,11 @@ nsJSContext::PokeGC(js::gcreason::Reason aReason) static bool first = true; sGCTimer->InitWithFuncCallback(GCTimerFired, reinterpret_cast(aReason), - first - ? NS_FIRST_GC_DELAY - : NS_GC_DELAY, + aDelay + ? aDelay + : (first + ? NS_FIRST_GC_DELAY + : NS_GC_DELAY), nsITimer::TYPE_ONE_SHOT); first = false; @@ -3549,11 +3565,11 @@ nsJSContext::GC(js::gcreason::Reason aReason) } static void -DOMGCFinishedCallback(JSRuntime *rt, JSCompartment *comp, const char *status) +DOMGCSliceCallback(JSRuntime *aRt, js::GCProgress aProgress, const js::GCDescription &aDesc) { NS_ASSERTION(NS_IsMainThread(), "GCs must run on the main thread"); - if (sPostGCEventsToConsole) { + if (aDesc.logMessage && sPostGCEventsToConsole) { PRTime now = PR_Now(); PRTime delta = 0; if (sFirstCollectionTime) { @@ -3565,45 +3581,66 @@ DOMGCFinishedCallback(JSRuntime *rt, JSCompartment *comp, const char *status) NS_NAMED_LITERAL_STRING(kFmt, "GC(T+%.1f) %s"); nsString msg; msg.Adopt(nsTextFormatter::smprintf(kFmt.get(), - double(delta) / PR_USEC_PER_SEC, status)); + double(delta) / PR_USEC_PER_SEC, + aDesc.logMessage)); nsCOMPtr cs = do_GetService(NS_CONSOLESERVICE_CONTRACTID); if (cs) { cs->LogStringMessage(msg.get()); } } - sCCollectedWaitingForGC = 0; - sCleanupSinceLastGC = false; + // Prevent cycle collections during incremental GC. + if (aProgress == js::GC_CYCLE_BEGIN) { + sCCLockedOut = true; + } else if (aProgress == js::GC_CYCLE_END) { + sCCLockedOut = false; + } - if (sGCTimer) { - // If we were waiting for a GC to happen, kill the timer. + // The GC has more work to do, so schedule another GC slice. + if (aProgress == js::GC_SLICE_END) { nsJSContext::KillGCTimer(); + nsJSContext::KillCCTimer(); - // If this is a compartment GC, restart it. We still want - // a full GC to happen. Compartment GCs usually happen as a - // result of last-ditch or MaybeGC. In both cases its - // probably a time of heavy activity and we want to delay - // the full GC, but we do want it to happen eventually. - if (comp) { - nsJSContext::PokeGC(js::gcreason::POST_COMPARTMENT); + nsJSContext::PokeGC(js::gcreason::INTER_SLICE_GC, NS_INTERSLICE_GC_DELAY); + } - // We poked the GC, so we can kill any pending CC here. - nsJSContext::KillCCTimer(); + if (aProgress == js::GC_CYCLE_END) { + sCCollectedWaitingForGC = 0; + sCleanupSinceLastGC = false; + + if (sGCTimer) { + // If we were waiting for a GC to happen, kill the timer. + nsJSContext::KillGCTimer(); + + // If this is a compartment GC, restart it. We still want + // a full GC to happen. Compartment GCs usually happen as a + // result of last-ditch or MaybeGC. In both cases its + // probably a time of heavy activity and we want to delay + // the full GC, but we do want it to happen eventually. + if (aDesc.isCompartment) { + nsJSContext::PokeGC(js::gcreason::POST_COMPARTMENT); + + // We poked the GC, so we can kill any pending CC here. + nsJSContext::KillCCTimer(); + } + } else { + // If this was a full GC, poke the CC to run soon. + if (!aDesc.isCompartment) { + sGCHasRun = true; + nsJSContext::MaybePokeCC(); + } } - } else { - // If this was a full GC, poke the CC to run soon. - if (!comp) { - sGCHasRun = true; - nsJSContext::MaybePokeCC(); + + // If we didn't end up scheduling a GC, make sure that we release GC buffers + // soon after canceling previous shrinking attempt. + nsJSContext::KillShrinkGCBuffersTimer(); + if (!sGCTimer) { + nsJSContext::PokeShrinkGCBuffers(); } } - // If we didn't end up scheduling a GC, make sure that we release GC buffers - // soon after canceling previous shrinking attempt - nsJSContext::KillShrinkGCBuffersTimer(); - if (!sGCTimer) { - nsJSContext::PokeShrinkGCBuffers(); - } + if (sPrevGCSliceCallback) + (*sPrevGCSliceCallback)(aRt, aProgress, aDesc); } // Script object mananagement - note duplicate implementation @@ -3697,6 +3734,7 @@ nsJSRuntime::Startup() // initialize all our statics, so that we can restart XPCOM sGCTimer = sCCTimer = nsnull; sGCHasRun = false; + sCCLockedOut = false; sLastCCEndTime = 0; sPendingLoadCount = 0; sLoadingInProgress = false; @@ -3768,10 +3806,27 @@ SetMemoryMaxPrefChangedCallback(const char* aPrefName, void* aClosure) static int SetMemoryGCModePrefChangedCallback(const char* aPrefName, void* aClosure) { - bool enableCompartmentGC = Preferences::GetBool(aPrefName); - JS_SetGCParameter(nsJSRuntime::sRuntime, JSGC_MODE, enableCompartmentGC - ? JSGC_MODE_COMPARTMENT - : JSGC_MODE_GLOBAL); + PRBool enableCompartmentGC = Preferences::GetBool("javascript.options.mem.gc_per_compartment"); + PRBool enableIncrementalGC = Preferences::GetBool("javascript.options.mem.gc_incremental"); + JSGCMode mode; + if (enableIncrementalGC) { + mode = JSGC_MODE_INCREMENTAL; + } else if (enableCompartmentGC) { + mode = JSGC_MODE_COMPARTMENT; + } else { + mode = JSGC_MODE_GLOBAL; + } + JS_SetGCParameter(nsJSRuntime::sRuntime, JSGC_MODE, mode); + return 0; +} + +static int +SetMemoryGCSliceTimePrefChangedCallback(const char* aPrefName, void* aClosure) +{ + PRInt32 pref = Preferences::GetInt(aPrefName, -1); + // handle overflow and negative pref values + if (pref > 0 && pref < 100000) + JS_SetGCParameter(nsJSRuntime::sRuntime, JSGC_SLICE_TIME_BUDGET, pref); return 0; } @@ -3858,7 +3913,7 @@ nsJSRuntime::Init() // Let's make sure that our main thread is the same as the xpcom main thread. NS_ASSERTION(NS_IsMainThread(), "bad"); - ::JS_SetGCFinishedCallback(sRuntime, DOMGCFinishedCallback); + sPrevGCSliceCallback = js::SetGCSliceCallback(sRuntime, DOMGCSliceCallback); JSSecurityCallbacks *callbacks = JS_GetRuntimeSecurityCallbacks(sRuntime); NS_ASSERTION(callbacks, "SecMan should have set security callbacks!"); @@ -3903,6 +3958,16 @@ nsJSRuntime::Init() SetMemoryGCModePrefChangedCallback("javascript.options.mem.gc_per_compartment", nsnull); + Preferences::RegisterCallback(SetMemoryGCModePrefChangedCallback, + "javascript.options.mem.gc_incremental"); + SetMemoryGCModePrefChangedCallback("javascript.options.mem.gc_incremental", + nsnull); + + Preferences::RegisterCallback(SetMemoryGCSliceTimePrefChangedCallback, + "javascript.options.mem.gc_incremental_slice_ms"); + SetMemoryGCSliceTimePrefChangedCallback("javascript.options.mem.gc_incremental_slice_ms", + nsnull); + nsCOMPtr obs = mozilla::services::GetObserverService(); if (!obs) return NS_ERROR_FAILURE; diff --git a/dom/base/nsJSEnvironment.h b/dom/base/nsJSEnvironment.h index 43b503e976bd..fca6c2a918c5 100644 --- a/dom/base/nsJSEnvironment.h +++ b/dom/base/nsJSEnvironment.h @@ -188,7 +188,7 @@ public: static void CycleCollectNow(nsICycleCollectorListener *aListener = nsnull, PRInt32 aExtraForgetSkippableCalls = 0); - static void PokeGC(js::gcreason::Reason aReason); + static void PokeGC(js::gcreason::Reason aReason, int aDelay = 0); static void KillGCTimer(); static void PokeShrinkGCBuffers(); diff --git a/dom/interfaces/base/nsIDOMWindowUtils.idl b/dom/interfaces/base/nsIDOMWindowUtils.idl index 2f070969e861..bf6b7c8d9a66 100644 --- a/dom/interfaces/base/nsIDOMWindowUtils.idl +++ b/dom/interfaces/base/nsIDOMWindowUtils.idl @@ -70,7 +70,7 @@ interface nsIDOMFile; interface nsIFile; interface nsIDOMTouch; -[scriptable, uuid(ab6e9c71-8aa1-40bb-8bf9-65e16429055f)] +[scriptable, uuid(73b48170-55d5-11e1-b86c-0800200c9a66)] interface nsIDOMWindowUtils : nsISupports { /** @@ -992,6 +992,12 @@ interface nsIDOMWindowUtils : nsISupports { [optional] out long aDBRefCnt, [optional] out long aSliceRefCnt); + /** + * Return whether incremental GC has been disabled due to a binary add-on. + */ + [implicit_jscontext] + boolean isIncrementalGCEnabled(); + /** * Begin opcode-level profiling of all JavaScript execution in the window's * runtime. diff --git a/dom/plugins/base/nsJSNPRuntime.cpp b/dom/plugins/base/nsJSNPRuntime.cpp index a05a68833be2..4f81b25aec00 100644 --- a/dom/plugins/base/nsJSNPRuntime.cpp +++ b/dom/plugins/base/nsJSNPRuntime.cpp @@ -179,7 +179,7 @@ CreateNPObjectMember(NPP npp, JSContext *cx, JSObject *obj, NPObject *npobj, static JSClass sNPObjectJSWrapperClass = { NPRUNTIME_JSCLASS_NAME, - JSCLASS_HAS_PRIVATE | JSCLASS_NEW_RESOLVE | JSCLASS_NEW_ENUMERATE, + JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_NEW_RESOLVE | JSCLASS_NEW_ENUMERATE, NPObjWrapper_AddProperty, NPObjWrapper_DelProperty, NPObjWrapper_GetProperty, NPObjWrapper_SetProperty, (JSEnumerateOp)NPObjWrapper_newEnumerate, diff --git a/dom/src/events/nsJSEventListener.cpp b/dom/src/events/nsJSEventListener.cpp index cac6b24467ee..b02dd3bf982c 100644 --- a/dom/src/events/nsJSEventListener.cpp +++ b/dom/src/events/nsJSEventListener.cpp @@ -233,6 +233,8 @@ nsJSEventListener::HandleEvent(nsIDOMEvent* aEvent) "JSEventListener has wrong script context?"); #endif nsCOMPtr vrv; + xpc_UnmarkGrayObject(mScopeObject); + xpc_UnmarkGrayObject(mHandler); rv = mContext->CallEventHandler(mTarget, mScopeObject, mHandler, iargv, getter_AddRefs(vrv)); diff --git a/dom/workers/ListenerManager.cpp b/dom/workers/ListenerManager.cpp index 94f27947854a..1cc7c930b110 100644 --- a/dom/workers/ListenerManager.cpp +++ b/dom/workers/ListenerManager.cpp @@ -107,6 +107,9 @@ struct Listener : PRCList static void Remove(JSContext* aCx, Listener* aListener) { + if (js::IsIncrementalBarrierNeeded(aCx)) + js::IncrementalValueBarrier(aListener->mListenerVal); + PR_REMOVE_LINK(aListener); JS_free(aCx, aListener); } diff --git a/dom/workers/Worker.cpp b/dom/workers/Worker.cpp index 9b589cf2cee0..e90f99f18020 100644 --- a/dom/workers/Worker.cpp +++ b/dom/workers/Worker.cpp @@ -300,7 +300,7 @@ private: JSClass Worker::sClass = { "Worker", - JSCLASS_HAS_PRIVATE, + JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS, JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub, JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, Finalize, NULL, NULL, NULL, NULL, NULL, NULL, Trace, NULL @@ -415,7 +415,7 @@ private: JSClass ChromeWorker::sClass = { "ChromeWorker", - JSCLASS_HAS_PRIVATE, + JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS, JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub, JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, Finalize, NULL, NULL, NULL, NULL, NULL, NULL, Trace, NULL diff --git a/dom/workers/WorkerScope.cpp b/dom/workers/WorkerScope.cpp index 864521c0e832..828c1354b1b6 100644 --- a/dom/workers/WorkerScope.cpp +++ b/dom/workers/WorkerScope.cpp @@ -799,7 +799,7 @@ private: JSClass DedicatedWorkerGlobalScope::sClass = { "DedicatedWorkerGlobalScope", - JSCLASS_GLOBAL_FLAGS | JSCLASS_HAS_PRIVATE | JSCLASS_NEW_RESOLVE, + JSCLASS_GLOBAL_FLAGS | JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_NEW_RESOLVE, JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub, JS_EnumerateStub, reinterpret_cast(Resolve), JS_ConvertStub, Finalize, NULL, NULL, NULL, NULL, NULL, NULL, Trace, NULL diff --git a/dom/workers/XMLHttpRequest.cpp b/dom/workers/XMLHttpRequest.cpp index 4d1ed06f7fd0..b30bfc920380 100644 --- a/dom/workers/XMLHttpRequest.cpp +++ b/dom/workers/XMLHttpRequest.cpp @@ -220,7 +220,7 @@ private: JSClass XMLHttpRequestUpload::sClass = { "XMLHttpRequestUpload", - JSCLASS_HAS_PRIVATE | JSCLASS_HAS_RESERVED_SLOTS(SLOT_COUNT), + JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_HAS_RESERVED_SLOTS(SLOT_COUNT), JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub, JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, Finalize, NULL, NULL, NULL, NULL, NULL, NULL, Trace, NULL @@ -769,7 +769,7 @@ private: JSClass XMLHttpRequest::sClass = { "XMLHttpRequest", - JSCLASS_HAS_PRIVATE | JSCLASS_HAS_RESERVED_SLOTS(SLOT_COUNT), + JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_HAS_RESERVED_SLOTS(SLOT_COUNT), JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub, JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, Finalize, NULL, NULL, NULL, NULL, NULL, NULL, Trace, NULL diff --git a/js/jsd/jsd_xpc.cpp b/js/jsd/jsd_xpc.cpp index bdd63e0f2e76..f8698fbdc39d 100644 --- a/js/jsd/jsd_xpc.cpp +++ b/js/jsd/jsd_xpc.cpp @@ -107,8 +107,8 @@ #define JSD_AUTOREG_ENTRY "JSDebugger Startup Observer" #define JSD_STARTUP_ENTRY "JSDebugger Startup Observer" -static JSBool -jsds_GCCallbackProc (JSContext *cx, JSGCStatus status); +static void +jsds_GCSliceCallbackProc (JSRuntime *rt, js::GCProgress progress, const js::GCDescription &desc); /******************************************************************************* * global vars @@ -128,9 +128,9 @@ PRUint32 gContextCount = 0; PRUint32 gFrameCount = 0; #endif -static jsdService *gJsds = 0; -static JSGCCallback gLastGCProc = jsds_GCCallbackProc; -static JSGCStatus gGCStatus = JSGC_END; +static jsdService *gJsds = 0; +static js::GCSliceCallback gPrevGCSliceCallback = jsds_GCSliceCallbackProc; +static bool gGCRunning = false; static struct DeadScript { PRCList links; @@ -460,11 +460,8 @@ jsds_FilterHook (JSDContext *jsdc, JSDThreadState *state) *******************************************************************************/ static void -jsds_NotifyPendingDeadScripts (JSContext *cx) +jsds_NotifyPendingDeadScripts (JSRuntime *rt) { -#ifdef CAUTIOUS_SCRIPTHOOK - JSRuntime *rt = JS_GetRuntime(cx); -#endif jsdService *jsds = gJsds; nsCOMPtr hook; @@ -511,31 +508,23 @@ jsds_NotifyPendingDeadScripts (JSContext *cx) } } -static JSBool -jsds_GCCallbackProc (JSContext *cx, JSGCStatus status) +static void +jsds_GCSliceCallbackProc (JSRuntime *rt, js::GCProgress progress, const js::GCDescription &desc) { -#ifdef DEBUG_verbose - printf ("new gc status is %i\n", status); -#endif - if (status == JSGC_END) { - /* just to guard against reentering. */ - gGCStatus = JSGC_BEGIN; + if (progress == js::GC_CYCLE_END || progress == js::GC_SLICE_END) { + NS_ASSERTION(gGCRunning, "GC slice callback was missed"); + while (gDeadScripts) - jsds_NotifyPendingDeadScripts (cx); + jsds_NotifyPendingDeadScripts (rt); + + gGCRunning = false; + } else { + NS_ASSERTION(!gGCRunning, "should not re-enter GC"); + gGCRunning = true; } - gGCStatus = status; - if (gLastGCProc && !gLastGCProc (cx, status)) { - /* - * If gLastGCProc returns false, then the GC will abort without making - * another callback with status=JSGC_END, so set the status to JSGC_END - * here. - */ - gGCStatus = JSGC_END; - return JS_FALSE; - } - - return JS_TRUE; + if (gPrevGCSliceCallback) + (*gPrevGCSliceCallback)(rt, progress, desc); } static uintN @@ -751,7 +740,7 @@ jsds_ScriptHookProc (JSDContext* jsdc, JSDScript* jsdscript, JSBool creating, jsdis->Invalidate(); - if (gGCStatus == JSGC_END) { + if (!gGCRunning) { nsCOMPtr hook; gJsds->GetScriptHook(getter_AddRefs(hook)); if (!hook) @@ -2580,9 +2569,9 @@ jsdService::ActivateDebugger (JSRuntime *rt) mRuntime = rt; - if (gLastGCProc == jsds_GCCallbackProc) + if (gPrevGCSliceCallback == jsds_GCSliceCallbackProc) /* condition indicates that the callback proc has not been set yet */ - gLastGCProc = JS_SetGCCallbackRT (rt, jsds_GCCallbackProc); + gPrevGCSliceCallback = js::SetGCSliceCallback (rt, jsds_GCSliceCallbackProc); mCx = JSD_DebuggerOnForUser (rt, NULL, NULL); if (!mCx) @@ -2652,19 +2641,14 @@ jsdService::Off (void) return NS_ERROR_NOT_INITIALIZED; if (gDeadScripts) { - if (gGCStatus != JSGC_END) + if (gGCRunning) return NS_ERROR_NOT_AVAILABLE; JSContext *cx = JSD_GetDefaultJSContext(mCx); while (gDeadScripts) - jsds_NotifyPendingDeadScripts (cx); + jsds_NotifyPendingDeadScripts (JS_GetRuntime(cx)); } - /* - if (gLastGCProc != jsds_GCCallbackProc) - JS_SetGCCallbackRT (mRuntime, gLastGCProc); - */ - DeactivateDebugger(); #ifdef DEBUG @@ -3374,7 +3358,7 @@ jsdService::~jsdService() mThrowHook = nsnull; mTopLevelHook = nsnull; mFunctionHook = nsnull; - gGCStatus = JSGC_END; + gGCRunning = false; Off(); gJsds = nsnull; } diff --git a/js/src/Makefile.in b/js/src/Makefile.in index 1abd09f1621d..86751a1131a0 100644 --- a/js/src/Makefile.in +++ b/js/src/Makefile.in @@ -119,7 +119,6 @@ CPPSRCS = \ jsfun.cpp \ jsgc.cpp \ jsgcmark.cpp \ - jsgcstats.cpp \ jscrashreport.cpp \ jshash.cpp \ jsinfer.cpp \ @@ -193,7 +192,6 @@ INSTALLED_HEADERS = \ jsfriendapi.h \ jsgc.h \ jscell.h \ - jsgcstats.h \ jshash.h \ jslock.h \ json.h \ diff --git a/js/src/builtin/MapObject.cpp b/js/src/builtin/MapObject.cpp index bf7f5f3778e7..61832a61b63e 100644 --- a/js/src/builtin/MapObject.cpp +++ b/js/src/builtin/MapObject.cpp @@ -146,7 +146,7 @@ HashableValue::equals(const HashableValue &other) const Class MapObject::class_ = { "Map", - JSCLASS_HAS_PRIVATE | + JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_HAS_CACHED_PROTO(JSProto_Map), JS_PropertyStub, /* addProperty */ JS_PropertyStub, /* delProperty */ @@ -297,7 +297,7 @@ js_InitMapClass(JSContext *cx, JSObject *obj) Class SetObject::class_ = { "Set", - JSCLASS_HAS_PRIVATE | + JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_HAS_CACHED_PROTO(JSProto_Set), JS_PropertyStub, /* addProperty */ JS_PropertyStub, /* delProperty */ diff --git a/js/src/ctypes/CTypes.cpp b/js/src/ctypes/CTypes.cpp index 4ef88ee463e7..4ed0cd9ef51f 100644 --- a/js/src/ctypes/CTypes.cpp +++ b/js/src/ctypes/CTypes.cpp @@ -255,7 +255,7 @@ static JSClass sCDataProtoClass = { static JSClass sCTypeClass = { "CType", - JSCLASS_HAS_RESERVED_SLOTS(CTYPE_SLOTS), + JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_HAS_RESERVED_SLOTS(CTYPE_SLOTS), JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub, JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, CType::Finalize, NULL, NULL, CType::ConstructData, CType::ConstructData, NULL, @@ -272,7 +272,7 @@ static JSClass sCDataClass = { static JSClass sCClosureClass = { "CClosure", - JSCLASS_HAS_RESERVED_SLOTS(CCLOSURE_SLOTS), + JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_HAS_RESERVED_SLOTS(CCLOSURE_SLOTS), JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub, JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, CClosure::Finalize, NULL, NULL, NULL, NULL, NULL, NULL, CClosure::Trace, NULL diff --git a/js/src/gc/Barrier-inl.h b/js/src/gc/Barrier-inl.h index 5fdcad67baf1..e8ed435b71b7 100644 --- a/js/src/gc/Barrier-inl.h +++ b/js/src/gc/Barrier-inl.h @@ -266,6 +266,31 @@ HeapId::operator=(const HeapId &v) return *this; } +inline const Value & +ReadBarrieredValue::get() const +{ + if (value.isObject()) + JSObject::readBarrier(&value.toObject()); + else if (value.isString()) + JSString::readBarrier(value.toString()); + else + JS_ASSERT(!value.isMarkable()); + + return value; +} + +inline +ReadBarrieredValue::operator const Value &() const +{ + return get(); +} + +inline JSObject & +ReadBarrieredValue::toObject() const +{ + return get().toObject(); +} + } /* namespace js */ #endif /* jsgc_barrier_inl_h___ */ diff --git a/js/src/gc/Barrier.h b/js/src/gc/Barrier.h index 1f96b29d0908..0919a9aac984 100644 --- a/js/src/gc/Barrier.h +++ b/js/src/gc/Barrier.h @@ -456,6 +456,20 @@ class ReadBarriered operator MarkablePtr() const { return MarkablePtr(value); } }; +class ReadBarrieredValue +{ + Value value; + + public: + ReadBarrieredValue() : value(UndefinedValue()) {} + ReadBarrieredValue(const Value &value) : value(value) {} + + inline const Value &get() const; + inline operator const Value &() const; + + inline JSObject &toObject() const; +}; + } #endif /* jsgc_barrier_h___ */ diff --git a/js/src/gc/Statistics.cpp b/js/src/gc/Statistics.cpp index ba17a9722464..32084e9c83c5 100644 --- a/js/src/gc/Statistics.cpp +++ b/js/src/gc/Statistics.cpp @@ -38,9 +38,10 @@ * ***** END LICENSE BLOCK ***** */ #include -#include +#include #include "jscntxt.h" +#include "jscompartment.h" #include "jscrashformat.h" #include "jscrashreport.h" #include "jsprf.h" @@ -69,78 +70,114 @@ ExplainReason(gcreason::Reason reason) } } -Statistics::ColumnInfo::ColumnInfo(const char *title, double t, double total) - : title(title) +void +Statistics::fmt(const char *f, ...) { - JS_snprintf(str, sizeof(str), "%.1f", t); - JS_snprintf(totalStr, sizeof(totalStr), "%.1f", total); - width = 6; -} + va_list va; + size_t off = strlen(buffer); -Statistics::ColumnInfo::ColumnInfo(const char *title, double t) - : title(title) -{ - JS_snprintf(str, sizeof(str), "%.1f", t); - strcpy(totalStr, "n/a"); - width = 6; + va_start(va, f); + JS_vsnprintf(buffer + off, BUFFER_SIZE - off, f, va); + va_end(va); } -Statistics::ColumnInfo::ColumnInfo(const char *title, unsigned int data) - : title(title) -{ - JS_snprintf(str, sizeof(str), "%d", data); - strcpy(totalStr, "n/a"); - width = 4; -} - -Statistics::ColumnInfo::ColumnInfo(const char *title, const char *data) - : title(title) -{ - JS_ASSERT(strlen(data) < sizeof(str)); - strcpy(str, data); - strcpy(totalStr, "n/a "); - width = 0; -} - -static const int NUM_COLUMNS = 17; - void -Statistics::makeTable(ColumnInfo *cols) +Statistics::fmtIfNonzero(const char *name, double t) { - int i = 0; + if (t) { + if (needComma) + fmt(", "); + fmt("%s: %.1f", name, t); + needComma = true; + } +} - cols[i++] = ColumnInfo("Type", compartment ? "Comp" : "Glob"); +void +Statistics::formatPhases(int64_t *times) +{ + needComma = false; + fmtIfNonzero("mark", t(times[PHASE_MARK])); + fmtIfNonzero("mark-roots", t(times[PHASE_MARK_ROOTS])); + fmtIfNonzero("mark-delayed", t(times[PHASE_MARK_DELAYED])); + fmtIfNonzero("mark-other", t(times[PHASE_MARK_OTHER])); + fmtIfNonzero("sweep", t(times[PHASE_SWEEP])); + fmtIfNonzero("sweep-obj", t(times[PHASE_SWEEP_OBJECT])); + fmtIfNonzero("sweep-string", t(times[PHASE_SWEEP_STRING])); + fmtIfNonzero("sweep-script", t(times[PHASE_SWEEP_SCRIPT])); + fmtIfNonzero("sweep-shape", t(times[PHASE_SWEEP_SHAPE])); + fmtIfNonzero("discard-code", t(times[PHASE_DISCARD_CODE])); + fmtIfNonzero("discard-analysis", t(times[PHASE_DISCARD_ANALYSIS])); + fmtIfNonzero("xpconnect", t(times[PHASE_XPCONNECT])); + fmtIfNonzero("deallocate", t(times[PHASE_DESTROY])); +} - cols[i++] = ColumnInfo("Total", t(PHASE_GC), total(PHASE_GC)); - cols[i++] = ColumnInfo("Wait", beginDelay(PHASE_MARK, PHASE_GC)); - cols[i++] = ColumnInfo("Mark", t(PHASE_MARK), total(PHASE_MARK)); - cols[i++] = ColumnInfo("Sweep", t(PHASE_SWEEP), total(PHASE_SWEEP)); - cols[i++] = ColumnInfo("FinObj", t(PHASE_SWEEP_OBJECT), total(PHASE_SWEEP_OBJECT)); - cols[i++] = ColumnInfo("FinStr", t(PHASE_SWEEP_STRING), total(PHASE_SWEEP_STRING)); - cols[i++] = ColumnInfo("FinScr", t(PHASE_SWEEP_SCRIPT), total(PHASE_SWEEP_SCRIPT)); - cols[i++] = ColumnInfo("FinShp", t(PHASE_SWEEP_SHAPE), total(PHASE_SWEEP_SHAPE)); - cols[i++] = ColumnInfo("DisCod", t(PHASE_DISCARD_CODE), total(PHASE_DISCARD_CODE)); - cols[i++] = ColumnInfo("DisAnl", t(PHASE_DISCARD_ANALYSIS), total(PHASE_DISCARD_ANALYSIS)); - cols[i++] = ColumnInfo("XPCnct", t(PHASE_XPCONNECT), total(PHASE_XPCONNECT)); - cols[i++] = ColumnInfo("Destry", t(PHASE_DESTROY), total(PHASE_DESTROY)); - cols[i++] = ColumnInfo("End", endDelay(PHASE_GC, PHASE_DESTROY)); +/* Except for the first and last, slices of less than 12ms are not reported. */ +static const int64_t SLICE_MIN_REPORT_TIME = 12 * PRMJ_USEC_PER_MSEC; - cols[i++] = ColumnInfo("+Chu", counts[STAT_NEW_CHUNK]); - cols[i++] = ColumnInfo("-Chu", counts[STAT_DESTROY_CHUNK]); +const char * +Statistics::formatData() +{ + buffer[0] = 0x00; - cols[i++] = ColumnInfo("Reason", ExplainReason(triggerReason)); + int64_t total = 0, longest = 0; - JS_ASSERT(i == NUM_COLUMNS); + for (SliceData *slice = slices.begin(); slice != slices.end(); slice++) { + total += slice->duration(); + if (slice->duration() > longest) + longest = slice->duration(); + } + + double mmu20 = computeMMU(20 * PRMJ_USEC_PER_MSEC); + double mmu50 = computeMMU(50 * PRMJ_USEC_PER_MSEC); + + fmt("TotalTime: %.1fms, Type: %s", t(total), compartment ? "compartment" : "global"); + fmt(", MMU(20ms): %d%%, MMU(50ms): %d%%", int(mmu20 * 100), int(mmu50 * 100)); + + if (slices.length() > 1) + fmt(", MaxPause: %.1f", t(longest)); + else + fmt(", Reason: %s", ExplainReason(slices[0].reason)); + + if (wasReset) + fmt(", ***RESET***"); + + fmt(", +chunks: %d, -chunks: %d\n", counts[STAT_NEW_CHUNK], counts[STAT_DESTROY_CHUNK]); + + if (slices.length() > 1) { + for (size_t i = 0; i < slices.length(); i++) { + int64_t width = slices[i].duration(); + if (i != 0 && i != slices.length() - 1 && width < SLICE_MIN_REPORT_TIME) + continue; + + fmt(" Slice %d @ %.1fms (Pause: %.1f, Reason: %s): ", + i, + t(slices[i].end - slices[0].start), + t(width), + ExplainReason(slices[i].reason)); + formatPhases(slices[i].phaseTimes); + fmt("\n"); + } + + fmt(" Totals: "); + } + + formatPhases(phaseTimes); + fmt("\n"); + + return buffer; } Statistics::Statistics(JSRuntime *rt) : runtime(rt), - triggerReason(gcreason::NO_REASON) + startupTime(PRMJ_Now()), + fp(NULL), + fullFormat(false), + compartment(NULL), + wasReset(false), + needComma(false) { + PodArrayZero(phaseTotals); PodArrayZero(counts); - PodArrayZero(totals); - - startupTime = PRMJ_Now(); char *env = getenv("MOZ_GCTIMER"); if (!env || strcmp(env, "none") == 0) { @@ -159,14 +196,6 @@ Statistics::Statistics(JSRuntime *rt) fp = fopen(env, "a"); JS_ASSERT(fp); - - fprintf(fp, " AppTime"); - - ColumnInfo cols[NUM_COLUMNS]; - makeTable(cols); - for (int i = 0; i < NUM_COLUMNS; i++) - fprintf(fp, ", %*s", cols[i].width, cols[i].title); - fprintf(fp, "\n"); } } @@ -174,13 +203,9 @@ Statistics::~Statistics() { if (fp) { if (fullFormat) { - fprintf(fp, "------>TOTAL"); - - ColumnInfo cols[NUM_COLUMNS]; - makeTable(cols); - for (int i = 0; i < NUM_COLUMNS && cols[i].totalStr[0]; i++) - fprintf(fp, ", %*s", cols[i].width, cols[i].totalStr); - fprintf(fp, "\n"); + buffer[0] = 0x00; + formatPhases(phaseTotals); + fprintf(fp, "TOTALS\n%s\n\n-------\n", buffer); } if (fp != stdout && fp != stderr) @@ -188,120 +213,65 @@ Statistics::~Statistics() } } -struct GCCrashData -{ - int isRegen; - int isCompartment; -}; - -void -Statistics::beginGC(JSCompartment *comp, gcreason::Reason reason) -{ - compartment = comp; - - PodArrayZero(phaseStarts); - PodArrayZero(phaseEnds); - PodArrayZero(phaseTimes); - - triggerReason = reason; - - beginPhase(PHASE_GC); - Probes::GCStart(); - - GCCrashData crashData; - crashData.isCompartment = !!compartment; - crash::SaveCrashData(crash::JS_CRASH_TAG_GC, &crashData, sizeof(crashData)); -} - double -Statistics::t(Phase phase) +Statistics::t(int64_t t) { - return double(phaseTimes[phase]) / PRMJ_USEC_PER_MSEC; + return double(t) / PRMJ_USEC_PER_MSEC; } -double -Statistics::total(Phase phase) +int64_t +Statistics::gcDuration() { - return double(totals[phase]) / PRMJ_USEC_PER_MSEC; -} - -double -Statistics::beginDelay(Phase phase1, Phase phase2) -{ - return double(phaseStarts[phase1] - phaseStarts[phase2]) / PRMJ_USEC_PER_MSEC; -} - -double -Statistics::endDelay(Phase phase1, Phase phase2) -{ - return double(phaseEnds[phase1] - phaseEnds[phase2]) / PRMJ_USEC_PER_MSEC; -} - -void -Statistics::statsToString(char *buffer, size_t size) -{ - JS_ASSERT(size); - buffer[0] = 0x00; - - ColumnInfo cols[NUM_COLUMNS]; - makeTable(cols); - - size_t pos = 0; - for (int i = 0; i < NUM_COLUMNS; i++) { - int len = strlen(cols[i].title) + 1 + strlen(cols[i].str); - if (i > 0) - len += 2; - if (pos + len >= size) - break; - if (i > 0) - strcat(buffer, ", "); - strcat(buffer, cols[i].title); - strcat(buffer, ":"); - strcat(buffer, cols[i].str); - pos += len; - } + return slices.back().end - slices[0].start; } void Statistics::printStats() { if (fullFormat) { - fprintf(fp, "%12.0f", double(phaseStarts[PHASE_GC] - startupTime) / PRMJ_USEC_PER_MSEC); - - ColumnInfo cols[NUM_COLUMNS]; - makeTable(cols); - for (int i = 0; i < NUM_COLUMNS; i++) - fprintf(fp, ", %*s", cols[i].width, cols[i].str); - fprintf(fp, "\n"); + fprintf(fp, "GC(T+%.3fs) %s\n", + t(slices[0].start - startupTime) / 1000.0, + formatData()); } else { fprintf(fp, "%f %f %f\n", - t(PHASE_GC), t(PHASE_MARK), t(PHASE_SWEEP)); + t(gcDuration()), + t(phaseTimes[PHASE_MARK]), + t(phaseTimes[PHASE_SWEEP])); } fflush(fp); } +void +Statistics::beginGC() +{ + PodArrayZero(phaseStarts); + PodArrayZero(phaseTimes); + + slices.clearAndFree(); + wasReset = false; + + Probes::GCStart(); +} + void Statistics::endGC() { Probes::GCEnd(); - endPhase(PHASE_GC); crash::SnapshotGCStack(); for (int i = 0; i < PHASE_LIMIT; i++) - totals[i] += phaseTimes[i]; + phaseTotals[i] += phaseTimes[i]; if (JSAccumulateTelemetryDataCallback cb = runtime->telemetryCallback) { - (*cb)(JS_TELEMETRY_GC_REASON, triggerReason); (*cb)(JS_TELEMETRY_GC_IS_COMPARTMENTAL, compartment ? 1 : 0); - (*cb)(JS_TELEMETRY_GC_MS, t(PHASE_GC)); - (*cb)(JS_TELEMETRY_GC_MARK_MS, t(PHASE_MARK)); - (*cb)(JS_TELEMETRY_GC_SWEEP_MS, t(PHASE_SWEEP)); - } + (*cb)(JS_TELEMETRY_GC_MS, t(gcDuration())); + (*cb)(JS_TELEMETRY_GC_MARK_MS, t(phaseTimes[PHASE_MARK])); + (*cb)(JS_TELEMETRY_GC_SWEEP_MS, t(phaseTimes[PHASE_SWEEP])); + (*cb)(JS_TELEMETRY_GC_RESET, wasReset); + (*cb)(JS_TELEMETRY_GC_INCREMENTAL_DISABLED, !runtime->gcIncrementalEnabled); - if (JSGCFinishedCallback cb = runtime->gcFinishedCallback) { - char buffer[1024]; - statsToString(buffer, sizeof(buffer)); - (*cb)(runtime, compartment, buffer); + double mmu50 = computeMMU(50 * PRMJ_USEC_PER_MSEC); + (*cb)(JS_TELEMETRY_GC_MMU_50, mmu50 * 100); } if (fp) @@ -310,6 +280,47 @@ Statistics::endGC() PodArrayZero(counts); } +void +Statistics::beginSlice(JSCompartment *comp, gcreason::Reason reason) +{ + compartment = comp; + + bool first = runtime->gcIncrementalState == gc::NO_INCREMENTAL; + if (first) + beginGC(); + + SliceData data(reason, PRMJ_Now()); + (void) slices.append(data); /* Ignore any OOMs here. */ + + if (JSAccumulateTelemetryDataCallback cb = runtime->telemetryCallback) + (*cb)(JS_TELEMETRY_GC_REASON, reason); + + if (GCSliceCallback cb = runtime->gcSliceCallback) { + GCDescription desc(NULL, !!compartment); + (*cb)(runtime, first ? GC_CYCLE_BEGIN : GC_SLICE_BEGIN, desc); + } +} + +void +Statistics::endSlice() +{ + slices.back().end = PRMJ_Now(); + + if (JSAccumulateTelemetryDataCallback cb = runtime->telemetryCallback) + (*cb)(JS_TELEMETRY_GC_SLICE_MS, t(slices.back().end - slices.back().start)); + + bool last = runtime->gcIncrementalState == gc::NO_INCREMENTAL; + if (last) + endGC(); + + if (GCSliceCallback cb = runtime->gcSliceCallback) { + if (last) + (*cb)(runtime, GC_CYCLE_END, GCDescription(formatData(), !!compartment)); + else + (*cb)(runtime, GC_SLICE_END, GCDescription(NULL, !!compartment)); + } +} + void Statistics::beginPhase(Phase phase) { @@ -324,8 +335,10 @@ Statistics::beginPhase(Phase phase) void Statistics::endPhase(Phase phase) { - phaseEnds[phase] = PRMJ_Now(); - phaseTimes[phase] += phaseEnds[phase] - phaseStarts[phase]; + int64_t now = PRMJ_Now(); + int64_t t = now - phaseStarts[phase]; + slices.back().phaseTimes[phase] += t; + phaseTimes[phase] += t; if (phase == gcstats::PHASE_MARK) Probes::GCEndMarkPhase(); @@ -333,5 +346,44 @@ Statistics::endPhase(Phase phase) Probes::GCEndSweepPhase(); } +/* + * MMU (minimum mutator utilization) is a measure of how much garbage collection + * is affecting the responsiveness of the system. MMU measurements are given + * with respect to a certain window size. If we report MMU(50ms) = 80%, then + * that means that, for any 50ms window of time, at least 80% of the window is + * devoted to the mutator. In other words, the GC is running for at most 20% of + * the window, or 10ms. The GC can run multiple slices during the 50ms window + * as long as the total time it spends is at most 10ms. + */ +double +Statistics::computeMMU(int64_t window) +{ + JS_ASSERT(!slices.empty()); + + int64_t gc = slices[0].end - slices[0].start; + int64_t gcMax = gc; + + if (gc >= window) + return 0.0; + + int startIndex = 0; + for (size_t endIndex = 1; endIndex < slices.length(); endIndex++) { + gc += slices[endIndex].end - slices[endIndex].start; + + while (slices[endIndex].end - slices[startIndex].end >= window) { + gc -= slices[startIndex].end - slices[startIndex].start; + startIndex++; + } + + int64_t cur = gc; + if (slices[endIndex].end - slices[startIndex].start > window) + cur -= (slices[endIndex].end - slices[startIndex].start - window); + if (cur > gcMax) + gcMax = cur; + } + + return double(window - gcMax) / window; +} + } /* namespace gcstats */ } /* namespace js */ diff --git a/js/src/gc/Statistics.h b/js/src/gc/Statistics.h index 497efaca6072..97b8def045e4 100644 --- a/js/src/gc/Statistics.h +++ b/js/src/gc/Statistics.h @@ -52,8 +52,10 @@ namespace js { namespace gcstats { enum Phase { - PHASE_GC, PHASE_MARK, + PHASE_MARK_ROOTS, + PHASE_MARK_DELAYED, + PHASE_MARK_OTHER, PHASE_SWEEP, PHASE_SWEEP_OBJECT, PHASE_SWEEP_STRING, @@ -74,16 +76,20 @@ enum Stat { STAT_LIMIT }; +static const size_t BUFFER_SIZE = 8192; + struct Statistics { Statistics(JSRuntime *rt); ~Statistics(); - void beginGC(JSCompartment *comp, gcreason::Reason reason); - void endGC(); - void beginPhase(Phase phase); void endPhase(Phase phase); + void beginSlice(JSCompartment *comp, gcreason::Reason reason); + void endSlice(); + + void reset() { wasReset = true; } + void count(Stat s) { JS_ASSERT(s < STAT_LIMIT); counts[s]++; @@ -92,48 +98,64 @@ struct Statistics { private: JSRuntime *runtime; - uint64_t startupTime; + int64_t startupTime; FILE *fp; bool fullFormat; - gcreason::Reason triggerReason; JSCompartment *compartment; + bool wasReset; - uint64_t phaseStarts[PHASE_LIMIT]; - uint64_t phaseEnds[PHASE_LIMIT]; - uint64_t phaseTimes[PHASE_LIMIT]; - uint64_t totals[PHASE_LIMIT]; - unsigned int counts[STAT_LIMIT]; + struct SliceData { + SliceData(gcreason::Reason reason, int64_t start) + : reason(reason), start(start) + { + PodArrayZero(phaseTimes); + } - double t(Phase phase); - double total(Phase phase); - double beginDelay(Phase phase1, Phase phase2); - double endDelay(Phase phase1, Phase phase2); - void printStats(); - void statsToString(char *buffer, size_t size); + gcreason::Reason reason; + int64_t start, end; + int64_t phaseTimes[PHASE_LIMIT]; - struct ColumnInfo { - const char *title; - char str[32]; - char totalStr[32]; - int width; - - ColumnInfo() {} - ColumnInfo(const char *title, double t, double total); - ColumnInfo(const char *title, double t); - ColumnInfo(const char *title, unsigned int data); - ColumnInfo(const char *title, const char *data); + int64_t duration() const { return end - start; } }; - void makeTable(ColumnInfo *cols); + Vector slices; + + /* Most recent time when the given phase started. */ + int64_t phaseStarts[PHASE_LIMIT]; + + /* Total time in a given phase for this GC. */ + int64_t phaseTimes[PHASE_LIMIT]; + + /* Total time in a given phase over all GCs. */ + int64_t phaseTotals[PHASE_LIMIT]; + + /* Number of events of this type for this GC. */ + unsigned int counts[STAT_LIMIT]; + + char buffer[BUFFER_SIZE]; + bool needComma; + + void beginGC(); + void endGC(); + + int64_t gcDuration(); + double t(int64_t t); + void printStats(); + void fmt(const char *f, ...); + void fmtIfNonzero(const char *name, double t); + void formatPhases(int64_t *times); + const char *formatData(); + + double computeMMU(int64_t resolution); }; -struct AutoGC { - AutoGC(Statistics &stats, JSCompartment *comp, gcreason::Reason reason - JS_GUARD_OBJECT_NOTIFIER_PARAM) - : stats(stats) { JS_GUARD_OBJECT_NOTIFIER_INIT; stats.beginGC(comp, reason); } - ~AutoGC() { stats.endGC(); } +struct AutoGCSlice { + AutoGCSlice(Statistics &stats, JSCompartment *comp, gcreason::Reason reason + JS_GUARD_OBJECT_NOTIFIER_PARAM) + : stats(stats) { JS_GUARD_OBJECT_NOTIFIER_INIT; stats.beginSlice(comp, reason); } + ~AutoGCSlice() { stats.endSlice(); } Statistics &stats; JS_DECL_USE_GUARD_OBJECT_NOTIFIER diff --git a/js/src/jsapi.cpp b/js/src/jsapi.cpp index 8e1bb1769ee1..c9c38331879d 100644 --- a/js/src/jsapi.cpp +++ b/js/src/jsapi.cpp @@ -723,8 +723,6 @@ JSRuntime::JSRuntime() gcMaxBytes(0), gcMaxMallocBytes(0), gcNumArenasFreeCommitted(0), - gcNumber(0), - gcIncrementalTracer(NULL), gcVerifyData(NULL), gcChunkAllocationSinceLastGC(false), gcNextFullGCTime(0), @@ -733,12 +731,20 @@ JSRuntime::JSRuntime() gcIsNeeded(0), gcWeakMapList(NULL), gcStats(thisFromCtor()), + gcNumber(0), + gcStartNumber(0), gcTriggerReason(gcreason::NO_REASON), gcTriggerCompartment(NULL), gcCurrentCompartment(NULL), gcCheckCompartment(NULL), + gcIncrementalState(gc::NO_INCREMENTAL), + gcCompartmentCreated(false), + gcLastMarkSlice(false), + gcInterFrameGC(0), + gcSliceBudget(SliceBudget::Unlimited), + gcIncrementalEnabled(true), + gcIncrementalCompartment(NULL), gcPoke(false), - gcMarkAndSweep(false), gcRunning(false), #ifdef JS_GC_ZEAL gcZeal_(0), @@ -747,7 +753,7 @@ JSRuntime::JSRuntime() gcDebugCompartmentGC(false), #endif gcCallback(NULL), - gcFinishedCallback(NULL), + gcSliceCallback(NULL), gcMallocBytes(0), gcBlackRootsTraceOp(NULL), gcBlackRootsData(NULL), @@ -814,6 +820,9 @@ JSRuntime::init(uint32_t maxbytes) if (!js_InitGC(this, maxbytes)) return false; + if (!gcMarker.init()) + return false; + if (!(atomsCompartment = this->new_(this)) || !atomsCompartment->init(NULL) || !compartments.append(atomsCompartment)) { @@ -2437,13 +2446,7 @@ JS_SetExtraGCRootsTracer(JSRuntime *rt, JSTraceDataOp traceOp, void *data) JS_PUBLIC_API(void) JS_TracerInit(JSTracer *trc, JSContext *cx, JSTraceCallback callback) { - trc->runtime = cx->runtime; - trc->context = cx; - trc->callback = callback; - trc->debugPrinter = NULL; - trc->debugPrintArg = NULL; - trc->debugPrintIndex = size_t(-1); - trc->eagerlyTraceWeakMaps = true; + InitTracer(trc, cx->runtime, cx, callback); } JS_PUBLIC_API(void) @@ -2875,8 +2878,7 @@ JS_CompartmentGC(JSContext *cx, JSCompartment *comp) /* We cannot GC the atoms compartment alone; use a full GC instead. */ JS_ASSERT(comp != cx->runtime->atomsCompartment); - js::gc::VerifyBarriers(cx, true); - js_GC(cx, comp, GC_NORMAL, gcreason::API); + GC(cx, comp, GC_NORMAL, gcreason::API); } JS_PUBLIC_API(void) @@ -2914,7 +2916,6 @@ JS_PUBLIC_API(JSBool) JS_IsAboutToBeFinalized(void *thing) { gc::Cell *t = static_cast(thing); - JS_ASSERT(!t->compartment()->rt->gcIncrementalTracer); return IsAboutToBeFinalized(t); } @@ -2931,11 +2932,15 @@ JS_SetGCParameter(JSRuntime *rt, JSGCParamKey key, uint32_t value) case JSGC_MAX_MALLOC_BYTES: rt->setGCMaxMallocBytes(value); break; + case JSGC_SLICE_TIME_BUDGET: + rt->gcSliceBudget = SliceBudget::TimeBudget(value); + break; default: JS_ASSERT(key == JSGC_MODE); rt->gcMode = JSGCMode(value); JS_ASSERT(rt->gcMode == JSGC_MODE_GLOBAL || - rt->gcMode == JSGC_MODE_COMPARTMENT); + rt->gcMode == JSGC_MODE_COMPARTMENT || + rt->gcMode == JSGC_MODE_INCREMENTAL); return; } } @@ -2956,9 +2961,11 @@ JS_GetGCParameter(JSRuntime *rt, JSGCParamKey key) return uint32_t(rt->gcChunkPool.getEmptyCount()); case JSGC_TOTAL_CHUNKS: return uint32_t(rt->gcChunkSet.count() + rt->gcChunkPool.getEmptyCount()); + case JSGC_SLICE_TIME_BUDGET: + return uint32_t(rt->gcSliceBudget > 0 ? rt->gcSliceBudget / PRMJ_USEC_PER_MSEC : 0); default: JS_ASSERT(key == JSGC_NUMBER); - return rt->gcNumber; + return uint32_t(rt->gcNumber); } } @@ -6609,7 +6616,16 @@ JS_AbortIfWrongThread(JSRuntime *rt) JS_PUBLIC_API(void) JS_SetGCZeal(JSContext *cx, uint8_t zeal, uint32_t frequency, JSBool compartment) { - bool schedule = zeal >= js::gc::ZealAllocThreshold && zeal < js::gc::ZealVerifierThreshold; +#ifdef JS_GC_ZEAL + const char *env = getenv("JS_GC_ZEAL"); + if (env) { + zeal = atoi(env); + frequency = 1; + compartment = false; + } +#endif + + bool schedule = zeal >= js::gc::ZealAllocValue; cx->runtime->gcZeal_ = zeal; cx->runtime->gcZealFrequency = frequency; cx->runtime->gcNextScheduled = schedule ? frequency : 0; diff --git a/js/src/jsapi.h b/js/src/jsapi.h index 50d131839af8..63a952f442c0 100644 --- a/js/src/jsapi.h +++ b/js/src/jsapi.h @@ -1428,8 +1428,11 @@ typedef JSBool (* JSContextCallback)(JSContext *cx, uintN contextOp); typedef enum JSGCStatus { + /* These callbacks happen outside the GC lock. */ JSGC_BEGIN, JSGC_END, + + /* These callbacks happen within the GC lock. */ JSGC_MARK_END, JSGC_FINALIZE_END } JSGCStatus; @@ -3290,7 +3293,10 @@ typedef enum JSGCParamKey { JSGC_UNUSED_CHUNKS = 7, /* Total number of allocated GC chunks. */ - JSGC_TOTAL_CHUNKS = 8 + JSGC_TOTAL_CHUNKS = 8, + + /* Max milliseconds to spend in an incremental GC slice. */ + JSGC_SLICE_TIME_BUDGET = 9 } JSGCParamKey; typedef enum JSGCMode { @@ -3298,7 +3304,13 @@ typedef enum JSGCMode { JSGC_MODE_GLOBAL = 0, /* Perform per-compartment GCs until too much garbage has accumulated. */ - JSGC_MODE_COMPARTMENT = 1 + JSGC_MODE_COMPARTMENT = 1, + + /* + * Collect in short time slices rather than all at once. Implies + * JSGC_MODE_COMPARTMENT. + */ + JSGC_MODE_INCREMENTAL = 2 } JSGCMode; extern JS_PUBLIC_API(void) @@ -3393,6 +3405,8 @@ struct JSClass { object in prototype chain passed in via *objp in/out parameter */ +#define JSCLASS_IMPLEMENTS_BARRIERS (1<<5) /* Correctly implements GC read + and write barriers */ #define JSCLASS_DOCUMENT_OBSERVER (1<<6) /* DOM document observer */ /* diff --git a/js/src/jscntxt.cpp b/js/src/jscntxt.cpp index 49bc517e65a1..cf781e63a2b5 100644 --- a/js/src/jscntxt.cpp +++ b/js/src/jscntxt.cpp @@ -282,10 +282,10 @@ js_DestroyContext(JSContext *cx, JSDestroyContextMode mode) c->clearTraps(cx); JS_ClearAllWatchPoints(cx); - js_GC(cx, NULL, GC_NORMAL, gcreason::LAST_CONTEXT); + GC(cx, NULL, GC_NORMAL, gcreason::LAST_CONTEXT); } else if (mode == JSDCM_FORCE_GC) { - js_GC(cx, NULL, GC_NORMAL, gcreason::DESTROY_CONTEXT); + GC(cx, NULL, GC_NORMAL, gcreason::DESTROY_CONTEXT); } else if (mode == JSDCM_MAYBE_GC) { JS_MaybeGC(cx); } @@ -875,7 +875,7 @@ js_InvokeOperationCallback(JSContext *cx) JS_ATOMIC_SET(&rt->interrupt, 0); if (rt->gcIsNeeded) - js_GC(cx, rt->gcTriggerCompartment, GC_NORMAL, rt->gcTriggerReason); + GCSlice(cx, rt->gcTriggerCompartment, GC_NORMAL, rt->gcTriggerReason); #ifdef JS_THREADSAFE /* diff --git a/js/src/jscntxt.h b/js/src/jscntxt.h index c17fd19d8b55..13847778516c 100644 --- a/js/src/jscntxt.h +++ b/js/src/jscntxt.h @@ -304,24 +304,25 @@ struct JSRuntime : js::RuntimeFriendFields * in MaybeGC. */ volatile uint32_t gcNumArenasFreeCommitted; - uint32_t gcNumber; - js::GCMarker *gcIncrementalTracer; + js::FullGCMarker gcMarker; void *gcVerifyData; bool gcChunkAllocationSinceLastGC; int64_t gcNextFullGCTime; int64_t gcJitReleaseTime; JSGCMode gcMode; - volatile uintptr_t gcBarrierFailed; volatile uintptr_t gcIsNeeded; js::WeakMapBase *gcWeakMapList; js::gcstats::Statistics gcStats; + /* Incremented on every GC slice. */ + uint64_t gcNumber; + + /* The gcNumber at the time of the most recent GC's first slice. */ + uint64_t gcStartNumber; + /* The reason that an interrupt-triggered GC should be called. */ js::gcreason::Reason gcTriggerReason; - /* Pre-allocated space for the GC mark stack. */ - uintptr_t gcMarkStackArray[js::MARK_STACK_LENGTH]; - /* * Compartment that triggered GC. If more than one Compatment need GC, * gcTriggerCompartment is reset to NULL and a global GC is performed. @@ -337,6 +338,53 @@ struct JSRuntime : js::RuntimeFriendFields */ JSCompartment *gcCheckCompartment; + /* + * The current incremental GC phase. During non-incremental GC, this is + * always NO_INCREMENTAL. + */ + js::gc::State gcIncrementalState; + + /* Indicates that a new compartment was created during incremental GC. */ + bool gcCompartmentCreated; + + /* Indicates that the last incremental slice exhausted the mark stack. */ + bool gcLastMarkSlice; + + /* + * Indicates that a GC slice has taken place in the middle of an animation + * frame, rather than at the beginning. In this case, the next slice will be + * delayed so that we don't get back-to-back slices. + */ + volatile uintptr_t gcInterFrameGC; + + /* Default budget for incremental GC slice. See SliceBudget in jsgc.h. */ + int64_t gcSliceBudget; + + /* + * We disable incremental GC if we encounter a js::Class with a trace hook + * that does not implement write barriers. + */ + bool gcIncrementalEnabled; + + /* Compartment that is undergoing an incremental GC. */ + JSCompartment *gcIncrementalCompartment; + + /* + * We save all conservative scanned roots in this vector so that + * conservative scanning can be "replayed" deterministically. In DEBUG mode, + * this allows us to run a non-incremental GC after every incremental GC to + * ensure that no objects were missed. + */ +#ifdef DEBUG + struct SavedGCRoot { + void *thing; + JSGCTraceKind kind; + + SavedGCRoot(void *thing, JSGCTraceKind kind) : thing(thing), kind(kind) {} + }; + js::Vector gcSavedRoots; +#endif + /* * We can pack these flags as only the GC thread writes to them. Atomic * updates to packed bytes are not guaranteed, so stores issued by one @@ -344,7 +392,6 @@ struct JSRuntime : js::RuntimeFriendFields * other threads. */ bool gcPoke; - bool gcMarkAndSweep; bool gcRunning; /* @@ -353,7 +400,7 @@ struct JSRuntime : js::RuntimeFriendFields * gcNextScheduled is decremented. When it reaches zero, we do either a * full or a compartmental GC, based on gcDebugCompartmentGC. * - * At this point, if gcZeal_ >= 2 then gcNextScheduled is reset to the + * At this point, if gcZeal_ == 2 then gcNextScheduled is reset to the * value of gcZealFrequency. Otherwise, no additional GCs take place. * * You can control these values in several ways: @@ -361,9 +408,8 @@ struct JSRuntime : js::RuntimeFriendFields * - Call gczeal() or schedulegc() from inside shell-executed JS code * (see the help for details) * - * Additionally, if gzZeal_ == 1 then we perform GCs in select places - * (during MaybeGC and whenever a GC poke happens). This option is mainly - * useful to embedders. + * If gzZeal_ == 1 then we perform GCs in select places (during MaybeGC and + * whenever a GC poke happens). This option is mainly useful to embedders. * * We use gcZeal_ == 4 to enable write barrier verification. See the comment * in jsgc.cpp for more information about this. @@ -378,7 +424,7 @@ struct JSRuntime : js::RuntimeFriendFields bool needZealousGC() { if (gcNextScheduled > 0 && --gcNextScheduled == 0) { - if (gcZeal() >= js::gc::ZealAllocThreshold && gcZeal() < js::gc::ZealVerifierThreshold) + if (gcZeal() == js::gc::ZealAllocValue) gcNextScheduled = gcZealFrequency; return true; } @@ -390,7 +436,7 @@ struct JSRuntime : js::RuntimeFriendFields #endif JSGCCallback gcCallback; - JSGCFinishedCallback gcFinishedCallback; + js::GCSliceCallback gcSliceCallback; private: /* diff --git a/js/src/jscompartment.cpp b/js/src/jscompartment.cpp index 0388b162652a..d40a5513f11a 100644 --- a/js/src/jscompartment.cpp +++ b/js/src/jscompartment.cpp @@ -73,7 +73,6 @@ JSCompartment::JSCompartment(JSRuntime *rt) : rt(rt), principals(NULL), needsBarrier_(false), - gcIncrementalTracer(NULL), gcBytes(0), gcTriggerBytes(0), gcLastBytes(0), @@ -128,6 +127,9 @@ JSCompartment::init(JSContext *cx) if (!scriptFilenameTable.init()) return false; + if (!barrierMarker_.init()) + return false; + return debuggees.init(); } @@ -458,6 +460,30 @@ JSCompartment::markTypes(JSTracer *trc) } } +void +JSCompartment::discardJitCode(JSContext *cx) +{ + /* + * Kick all frames on the stack into the interpreter, and release all JIT + * code in the compartment. + */ +#ifdef JS_METHODJIT + mjit::ClearAllFrames(this); + + for (CellIterUnderGC i(this, FINALIZE_SCRIPT); !i.done(); i.next()) { + JSScript *script = i.get(); + mjit::ReleaseScriptCode(cx, script); + + /* + * Use counts for scripts are reset on GC. After discarding code we + * need to let it warm back up to get information like which opcodes + * are setting array holes or accessing getter properties. + */ + script->resetUseCount(); + } +#endif +} + void JSCompartment::sweep(JSContext *cx, bool releaseTypes) { @@ -474,6 +500,8 @@ JSCompartment::sweep(JSContext *cx, bool releaseTypes) /* Remove dead references held weakly by the compartment. */ + regExps.sweep(rt); + sweepBaseShapeTable(cx); sweepInitialShapeTable(cx); sweepNewTypeObjectTable(cx, newTypeObjects); @@ -488,26 +516,7 @@ JSCompartment::sweep(JSContext *cx, bool releaseTypes) { gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_DISCARD_CODE); - - /* - * Kick all frames on the stack into the interpreter, and release all JIT - * code in the compartment. - */ -#ifdef JS_METHODJIT - mjit::ClearAllFrames(this); - - for (CellIterUnderGC i(this, FINALIZE_SCRIPT); !i.done(); i.next()) { - JSScript *script = i.get(); - mjit::ReleaseScriptCode(cx, script); - - /* - * Use counts for scripts are reset on GC. After discarding code we - * need to let it warm back up to get information like which opcodes - * are setting array holes or accessing getter properties. - */ - script->resetUseCount(); - } -#endif + discardJitCode(cx); } if (!activeAnalysis) { @@ -561,8 +570,6 @@ JSCompartment::sweep(JSContext *cx, bool releaseTypes) void JSCompartment::purge(JSContext *cx) { - arenas.purge(); - regExps.purge(); dtoaCache.purge(); /* @@ -776,13 +783,6 @@ JSCompartment::sweepBreakpoints(JSContext *cx) } } -GCMarker * -JSCompartment::createBarrierTracer() -{ - JS_ASSERT(!gcIncrementalTracer); - return NULL; -} - size_t JSCompartment::sizeOfShapeTable(JSMallocSizeOfFun mallocSizeOf) { diff --git a/js/src/jscompartment.h b/js/src/jscompartment.h index 5c0edb1f5e2b..14c25b84a3d0 100644 --- a/js/src/jscompartment.h +++ b/js/src/jscompartment.h @@ -46,7 +46,6 @@ #include "jscntxt.h" #include "jsfun.h" #include "jsgc.h" -#include "jsgcstats.h" #include "jsobj.h" #include "jsscope.h" #include "vm/GlobalObject.h" @@ -163,6 +162,23 @@ typedef HashSet ScriptFilenameTable; +/* If HashNumber grows, need to change WrapperHasher. */ +JS_STATIC_ASSERT(sizeof(HashNumber) == 4); + +struct WrapperHasher +{ + typedef Value Lookup; + + static HashNumber hash(Value key) { + uint64_t bits = JSVAL_TO_IMPL(key).asBits; + return uint32_t(bits) ^ uint32_t(bits >> 32); + } + + static bool match(const Value &l, const Value &k) { return l == k; } +}; + +typedef HashMap WrapperMap; + } /* namespace js */ namespace JS { @@ -177,7 +193,7 @@ struct JSCompartment js::gc::ArenaLists arenas; bool needsBarrier_; - js::GCMarker *gcIncrementalTracer; + js::BarrierGCMarker barrierMarker_; bool needsBarrier() { return needsBarrier_; @@ -185,9 +201,7 @@ struct JSCompartment js::GCMarker *barrierTracer() { JS_ASSERT(needsBarrier_); - if (gcIncrementalTracer) - return gcIncrementalTracer; - return createBarrierTracer(); + return &barrierMarker_; } size_t gcBytes; @@ -325,10 +339,11 @@ struct JSCompartment bool wrap(JSContext *cx, js::AutoIdVector &props); void markTypes(JSTracer *trc); + void discardJitCode(JSContext *cx); void sweep(JSContext *cx, bool releaseTypes); void purge(JSContext *cx); - void setGCLastBytes(size_t lastBytes, JSGCInvocationKind gckind); + void setGCLastBytes(size_t lastBytes, js::JSGCInvocationKind gckind); void reduceGCTriggerBytes(size_t amount); void resetGCMallocBytes(); @@ -397,8 +412,6 @@ struct JSCompartment private: void sweepBreakpoints(JSContext *cx); - js::GCMarker *createBarrierTracer(); - public: js::WatchpointMap *watchpointMap; }; diff --git a/js/src/jsexn.cpp b/js/src/jsexn.cpp index 483253218465..ff61005b4679 100644 --- a/js/src/jsexn.cpp +++ b/js/src/jsexn.cpp @@ -94,7 +94,7 @@ exn_resolve(JSContext *cx, JSObject *obj, jsid id, uintN flags, Class js::ErrorClass = { js_Error_str, - JSCLASS_HAS_PRIVATE | JSCLASS_NEW_RESOLVE | + JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_NEW_RESOLVE | JSCLASS_HAS_CACHED_PROTO(JSProto_Error), JS_PropertyStub, /* addProperty */ JS_PropertyStub, /* delProperty */ diff --git a/js/src/jsfriendapi.cpp b/js/src/jsfriendapi.cpp index 93d98bc54ed5..f67611376a1d 100644 --- a/js/src/jsfriendapi.cpp +++ b/js/src/jsfriendapi.cpp @@ -132,7 +132,7 @@ JS_NewObjectWithUniqueType(JSContext *cx, JSClass *clasp, JSObject *proto, JSObj JS_FRIEND_API(void) js::GCForReason(JSContext *cx, gcreason::Reason reason) { - js_GC(cx, NULL, GC_NORMAL, reason); + GC(cx, NULL, GC_NORMAL, reason); } JS_FRIEND_API(void) @@ -141,13 +141,19 @@ js::CompartmentGCForReason(JSContext *cx, JSCompartment *comp, gcreason::Reason /* We cannot GC the atoms compartment alone; use a full GC instead. */ JS_ASSERT(comp != cx->runtime->atomsCompartment); - js_GC(cx, comp, GC_NORMAL, reason); + GC(cx, comp, GC_NORMAL, reason); } JS_FRIEND_API(void) js::ShrinkingGC(JSContext *cx, gcreason::Reason reason) { - js_GC(cx, NULL, GC_SHRINK, reason); + GC(cx, NULL, GC_SHRINK, reason); +} + +JS_FRIEND_API(void) +js::IncrementalGC(JSContext *cx, gcreason::Reason reason) +{ + GCSlice(cx, NULL, GC_NORMAL, reason); } JS_FRIEND_API(void) @@ -401,12 +407,6 @@ JS_SetAccumulateTelemetryCallback(JSRuntime *rt, JSAccumulateTelemetryDataCallba rt->telemetryCallback = callback; } -JS_FRIEND_API(void) -JS_SetGCFinishedCallback(JSRuntime *rt, JSGCFinishedCallback callback) -{ - rt->gcFinishedCallback = callback; -} - #ifdef DEBUG JS_FRIEND_API(void) js_DumpString(JSString *str) @@ -551,39 +551,6 @@ js::DumpHeapComplete(JSContext *cx, FILE *fp) namespace js { -JS_FRIEND_API(bool) -IsIncrementalBarrierNeeded(JSRuntime *rt) -{ - return !!rt->gcIncrementalTracer && !rt->gcRunning; -} - -JS_FRIEND_API(bool) -IsIncrementalBarrierNeeded(JSContext *cx) -{ - return IsIncrementalBarrierNeeded(cx->runtime); -} - -extern JS_FRIEND_API(void) -IncrementalReferenceBarrier(void *ptr) -{ - if (!ptr) - return; - JS_ASSERT(!static_cast(ptr)->compartment()->rt->gcRunning); - uint32_t kind = gc::GetGCThingTraceKind(ptr); - if (kind == JSTRACE_OBJECT) - JSObject::writeBarrierPre((JSObject *) ptr); - else if (kind == JSTRACE_STRING) - JSString::writeBarrierPre((JSString *) ptr); - else - JS_NOT_REACHED("invalid trace kind"); -} - -extern JS_FRIEND_API(void) -IncrementalValueBarrier(const Value &v) -{ - HeapValue::writeBarrierPre(v); -} - /* static */ void AutoLockGC::LockGC(JSRuntime *rt) { @@ -719,4 +686,90 @@ SizeOfJSContext() return sizeof(JSContext); } +JS_FRIEND_API(GCSliceCallback) +SetGCSliceCallback(JSRuntime *rt, GCSliceCallback callback) +{ + GCSliceCallback old = rt->gcSliceCallback; + rt->gcSliceCallback = callback; + return old; +} + +JS_FRIEND_API(bool) +WantGCSlice(JSRuntime *rt) +{ + if (rt->gcZeal() == gc::ZealFrameVerifierValue || rt->gcZeal() == gc::ZealFrameGCValue) + return true; + + if (rt->gcIncrementalState != gc::NO_INCREMENTAL) + return true; + + return false; +} + +JS_FRIEND_API(void) +NotifyDidPaint(JSContext *cx) +{ + JSRuntime *rt = cx->runtime; + + if (rt->gcZeal() == gc::ZealFrameVerifierValue) { + gc::VerifyBarriers(cx); + return; + } + + if (rt->gcZeal() == gc::ZealFrameGCValue) { + GCSlice(cx, NULL, GC_NORMAL, gcreason::REFRESH_FRAME); + return; + } + + if (rt->gcIncrementalState != gc::NO_INCREMENTAL && !rt->gcInterFrameGC) + GCSlice(cx, rt->gcIncrementalCompartment, GC_NORMAL, gcreason::REFRESH_FRAME); + + rt->gcInterFrameGC = false; +} + +extern JS_FRIEND_API(bool) +IsIncrementalGCEnabled(JSRuntime *rt) +{ + return rt->gcIncrementalEnabled; +} + +JS_FRIEND_API(bool) +IsIncrementalBarrierNeeded(JSRuntime *rt) +{ + return (rt->gcIncrementalState == gc::MARK && !rt->gcRunning); +} + +JS_FRIEND_API(bool) +IsIncrementalBarrierNeeded(JSContext *cx) +{ + return IsIncrementalBarrierNeeded(cx->runtime); +} + +JS_FRIEND_API(bool) +IsIncrementalBarrierNeededOnObject(JSObject *obj) +{ + return obj->compartment()->needsBarrier(); +} + +extern JS_FRIEND_API(void) +IncrementalReferenceBarrier(void *ptr) +{ + if (!ptr) + return; + JS_ASSERT(!static_cast(ptr)->compartment()->rt->gcRunning); + uint32_t kind = gc::GetGCThingTraceKind(ptr); + if (kind == JSTRACE_OBJECT) + JSObject::writeBarrierPre((JSObject *) ptr); + else if (kind == JSTRACE_STRING) + JSString::writeBarrierPre((JSString *) ptr); + else + JS_NOT_REACHED("invalid trace kind"); +} + +extern JS_FRIEND_API(void) +IncrementalValueBarrier(const Value &v) +{ + HeapValue::writeBarrierPre(v); +} + } // namespace js diff --git a/js/src/jsfriendapi.h b/js/src/jsfriendapi.h index 495b40068175..c81d036e8ae7 100644 --- a/js/src/jsfriendapi.h +++ b/js/src/jsfriendapi.h @@ -100,7 +100,11 @@ enum { JS_TELEMETRY_GC_IS_COMPARTMENTAL, JS_TELEMETRY_GC_MS, JS_TELEMETRY_GC_MARK_MS, - JS_TELEMETRY_GC_SWEEP_MS + JS_TELEMETRY_GC_SWEEP_MS, + JS_TELEMETRY_GC_SLICE_MS, + JS_TELEMETRY_GC_MMU_50, + JS_TELEMETRY_GC_RESET, + JS_TELEMETRY_GC_INCREMENTAL_DISABLED }; typedef void @@ -109,12 +113,6 @@ typedef void extern JS_FRIEND_API(void) JS_SetAccumulateTelemetryCallback(JSRuntime *rt, JSAccumulateTelemetryDataCallback callback); -typedef void -(* JSGCFinishedCallback)(JSRuntime *rt, JSCompartment *comp, const char *description); - -extern JS_FRIEND_API(void) -JS_SetGCFinishedCallback(JSRuntime *rt, JSGCFinishedCallback callback); - extern JS_FRIEND_API(JSPrincipals *) JS_GetCompartmentPrincipals(JSCompartment *compartment); @@ -703,12 +701,65 @@ CompartmentGCForReason(JSContext *cx, JSCompartment *comp, gcreason::Reason reas extern JS_FRIEND_API(void) ShrinkingGC(JSContext *cx, gcreason::Reason reason); +extern JS_FRIEND_API(void) +IncrementalGC(JSContext *cx, gcreason::Reason reason); + +extern JS_FRIEND_API(void) +SetGCSliceTimeBudget(JSContext *cx, int64_t millis); + +enum GCProgress { + /* + * During non-incremental GC, the GC is bracketed by JSGC_CYCLE_BEGIN/END + * callbacks. During an incremental GC, the sequence of callbacks is as + * follows: + * JSGC_CYCLE_BEGIN, JSGC_SLICE_END (first slice) + * JSGC_SLICE_BEGIN, JSGC_SLICE_END (second slice) + * ... + * JSGC_SLICE_BEGIN, JSGC_CYCLE_END (last slice) + */ + + GC_CYCLE_BEGIN, + GC_SLICE_BEGIN, + GC_SLICE_END, + GC_CYCLE_END +}; + +struct GCDescription { + const char *logMessage; + bool isCompartment; + + GCDescription(const char *msg, bool isCompartment) + : logMessage(msg), isCompartment(isCompartment) {} +}; + +typedef void +(* GCSliceCallback)(JSRuntime *rt, GCProgress progress, const GCDescription &desc); + +extern JS_FRIEND_API(GCSliceCallback) +SetGCSliceCallback(JSRuntime *rt, GCSliceCallback callback); + +extern JS_FRIEND_API(bool) +WantGCSlice(JSRuntime *rt); + +/* + * Signals a good place to do an incremental slice, because the browser is + * drawing a frame. + */ +extern JS_FRIEND_API(void) +NotifyDidPaint(JSContext *cx); + +extern JS_FRIEND_API(bool) +IsIncrementalGCEnabled(JSRuntime *rt); + extern JS_FRIEND_API(bool) IsIncrementalBarrierNeeded(JSRuntime *rt); extern JS_FRIEND_API(bool) IsIncrementalBarrierNeeded(JSContext *cx); +extern JS_FRIEND_API(bool) +IsIncrementalBarrierNeededOnObject(JSObject *obj); + extern JS_FRIEND_API(void) IncrementalReferenceBarrier(void *ptr); diff --git a/js/src/jsfun.cpp b/js/src/jsfun.cpp index 71762a5cf2d8..ceb96391c5cc 100644 --- a/js/src/jsfun.cpp +++ b/js/src/jsfun.cpp @@ -551,7 +551,7 @@ args_trace(JSTracer *trc, JSObject *obj) */ Class js::NormalArgumentsObjectClass = { "Arguments", - JSCLASS_NEW_RESOLVE | + JSCLASS_NEW_RESOLVE | JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_HAS_RESERVED_SLOTS(NormalArgumentsObject::RESERVED_SLOTS) | JSCLASS_HAS_CACHED_PROTO(JSProto_Object) | JSCLASS_FOR_OF_ITERATION, @@ -587,7 +587,7 @@ Class js::NormalArgumentsObjectClass = { */ Class js::StrictArgumentsObjectClass = { "Arguments", - JSCLASS_NEW_RESOLVE | + JSCLASS_NEW_RESOLVE | JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_HAS_RESERVED_SLOTS(StrictArgumentsObject::RESERVED_SLOTS) | JSCLASS_HAS_CACHED_PROTO(JSProto_Object) | JSCLASS_FOR_OF_ITERATION, @@ -942,7 +942,7 @@ call_trace(JSTracer *trc, JSObject *obj) JS_PUBLIC_DATA(Class) js::CallClass = { "Call", - JSCLASS_HAS_PRIVATE | + JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_HAS_RESERVED_SLOTS(CallObject::RESERVED_SLOTS) | JSCLASS_NEW_RESOLVE | JSCLASS_IS_ANONYMOUS, JS_PropertyStub, /* addProperty */ @@ -1499,7 +1499,7 @@ JSFunction::sizeOfMisc(JSMallocSizeOfFun mallocSizeOf) const */ JS_FRIEND_DATA(Class) js::FunctionClass = { js_Function_str, - JSCLASS_NEW_RESOLVE | + JSCLASS_NEW_RESOLVE | JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_HAS_CACHED_PROTO(JSProto_Function), JS_PropertyStub, /* addProperty */ JS_PropertyStub, /* delProperty */ diff --git a/js/src/jsgc.cpp b/js/src/jsgc.cpp index 1ca9c275a5a2..5baf96212bd0 100644 --- a/js/src/jsgc.cpp +++ b/js/src/jsgc.cpp @@ -44,13 +44,39 @@ #include "mozilla/Util.h" /* - * This GC allocates fixed-sized things with sizes up to GC_NBYTES_MAX (see - * jsgc.h). It allocates from a special GC arena pool with each arena allocated - * using malloc. It uses an ideally parallel array of flag bytes to hold the - * mark bit, finalizer type index, etc. + * This code implements a mark-and-sweep garbage collector. The mark phase is + * incremental. Most sweeping is done on a background thread. A GC is divided + * into slices as follows: * - * XXX swizzle page to freelist for better locality of reference + * Slice 1: Roots pushed onto the mark stack. The mark stack is processed by + * popping an element, marking it, and pushing its children. + * ... JS code runs ... + * Slice 2: More mark stack processing. + * ... JS code runs ... + * Slice n-1: More mark stack processing. + * ... JS code runs ... + * Slice n: Mark stack is completely drained. Some sweeping is done. + * ... JS code runs, remaining sweeping done on background thread ... + * + * When background sweeping finishes the GC is complete. + * + * Incremental GC requires close collaboration with the mutator (i.e., JS code): + * + * 1. During an incremental GC, if a memory location (except a root) is written + * to, then the value it previously held must be marked. Write barriers ensure + * this. + * 2. Any object that is allocated during incremental GC must start out marked. + * 3. Roots are special memory locations that don't need write + * barriers. However, they must be marked in the first slice. Roots are things + * like the C stack and the VM stack, since it would be too expensive to put + * barriers on them. + * + * Write barriers are handled using the compartment's barrierMarker_ + * JSTracer. This includes a per-compartment stack of GC things that have been + * write-barriered. This stack is processed in each GC slice. The barrierMarker_ + * is also used during write barrier verification (VerifyBarriers below). */ + #include #include /* for memset used when DEBUG */ @@ -118,12 +144,31 @@ using namespace js::gc; namespace js { namespace gc { +/* + * Lower limit after which we limit the heap growth + */ +const size_t GC_ALLOCATION_THRESHOLD = 30 * 1024 * 1024; + +/* + * A GC is triggered once the number of newly allocated arenas is + * GC_HEAP_GROWTH_FACTOR times the number of live arenas after the last GC + * starting after the lower limit of GC_ALLOCATION_THRESHOLD. This number is + * used for non-incremental GCs. + */ +const float GC_HEAP_GROWTH_FACTOR = 3.0f; + +/* Perform a Full GC every 20 seconds if MaybeGC is called */ +static const uint64_t GC_IDLE_FULL_SPAN = 20 * 1000 * 1000; + #ifdef JS_GC_ZEAL static void StartVerifyBarriers(JSContext *cx); static void EndVerifyBarriers(JSContext *cx); + +void +FinishVerifier(JSRuntime *rt); #endif /* This array should be const, but that doesn't link right under GCC. */ @@ -275,6 +320,8 @@ Arena::finalize(JSContext *cx, AllocKind thingKind, size_t thingSize, bool backg JS_ASSERT(thingKind == aheader.getAllocKind()); JS_ASSERT(thingSize == aheader.getThingSize()); JS_ASSERT(!aheader.hasDelayedMarking); + JS_ASSERT(!aheader.markOverflow); + JS_ASSERT(!aheader.allocatedDuringIncremental); uintptr_t thing = thingsStart(thingKind); uintptr_t lastByte = thingsEnd() - 1; @@ -850,7 +897,6 @@ IsAboutToBeFinalized(const Cell *thing) JSRuntime *rt = thingCompartment->rt; if (rt->gcCurrentCompartment != NULL && rt->gcCurrentCompartment != thingCompartment) return false; - return !reinterpret_cast(thing)->isMarked(); } @@ -926,6 +972,18 @@ InFreeList(ArenaHeader *aheader, uintptr_t addr) } } +enum ConservativeGCTest +{ + CGCT_VALID, + CGCT_LOWBITSET, /* excluded because one of the low bits was set */ + CGCT_NOTARENA, /* not within arena range in a chunk */ + CGCT_OTHERCOMPARTMENT, /* in another compartment */ + CGCT_NOTCHUNK, /* not within a valid chunk */ + CGCT_FREEARENA, /* within arena containing only free things */ + CGCT_NOTLIVE, /* gcthing is not allocated */ + CGCT_END +}; + /* * Tests whether w is a (possibly dead) GC thing. Returns CGCT_VALID and * details about the thing if so. On failure, returns the reason for rejection. @@ -1024,22 +1082,18 @@ MarkIfGCThingWord(JSTracer *trc, uintptr_t w) if (InFreeList(aheader, uintptr_t(thing))) return CGCT_NOTLIVE; + JSGCTraceKind traceKind = MapAllocToTraceKind(thingKind); #ifdef DEBUG const char pattern[] = "machine_stack %p"; char nameBuf[sizeof(pattern) - 2 + sizeof(thing) * 2]; JS_snprintf(nameBuf, sizeof(nameBuf), pattern, thing); JS_SET_TRACING_NAME(trc, nameBuf); #endif - MarkKind(trc, thing, MapAllocToTraceKind(thingKind)); + MarkKind(trc, thing, traceKind); -#ifdef JS_DUMP_CONSERVATIVE_GC_ROOTS - if (IS_GC_MARKING_TRACER(trc)) { - GCMarker *marker = static_cast(trc); - if (marker->conservativeDumpFileName) - marker->conservativeRoots.append(thing); - if (uintptr_t(thing) != w) - marker->conservativeStats.unaligned++; - } +#ifdef DEBUG + if (trc->runtime->gcIncrementalState == MARK_ROOTS) + trc->runtime->gcSavedRoots.append(JSRuntime::SavedGCRoot(thing, traceKind)); #endif return CGCT_VALID; @@ -1070,8 +1124,26 @@ MarkRangeConservatively(JSTracer *trc, const uintptr_t *begin, const uintptr_t * } static JS_NEVER_INLINE void -MarkConservativeStackRoots(JSTracer *trc, JSRuntime *rt) +MarkConservativeStackRoots(JSTracer *trc, bool useSavedRoots) { + JSRuntime *rt = trc->runtime; + +#ifdef DEBUG + if (useSavedRoots) { + for (JSRuntime::SavedGCRoot *root = rt->gcSavedRoots.begin(); + root != rt->gcSavedRoots.end(); + root++) + { + JS_SET_TRACING_NAME(trc, "cstack"); + MarkKind(trc, root->thing, root->kind); + } + return; + } + + if (rt->gcIncrementalState == MARK_ROOTS) + rt->gcSavedRoots.clearAndFree(); +#endif + ConservativeGCData *cgcd = &rt->conservativeGC; if (!cgcd->hasStackToScan()) { #ifdef JS_THREADSAFE @@ -1132,6 +1204,8 @@ MarkStackRangeConservatively(JSTracer *trc, Value *beginv, Value *endv) #endif } + + JS_NEVER_INLINE void ConservativeGCData::recordStackTop() { @@ -1191,6 +1265,11 @@ js_FinishGC(JSRuntime *rt) rt->gcHelperThread.finish(); #endif +#ifdef JS_GC_ZEAL + /* Free memory associated with GC verification. */ + FinishVerifier(rt); +#endif + /* Delete all remaining Compartments. */ for (CompartmentsIter c(rt); !c.done(); c.next()) Foreground::delete_(c.get()); @@ -1236,7 +1315,7 @@ js_AddRootRT(JSRuntime *rt, jsval *vp, const char *name) { /* * Due to the long-standing, but now removed, use of rt->gcLock across the - * bulk of js_GC, API users have come to depend on JS_AddRoot etc. locking + * bulk of js::GC, API users have come to depend on JS_AddRoot etc. locking * properly with a racing GC, without calling JS_AddRoot from a request. * We have to preserve API compatibility here, now that we avoid holding * rt->gcLock across the mark phase (including the root hashtable mark). @@ -1252,7 +1331,7 @@ js_AddGCThingRootRT(JSRuntime *rt, void **rp, const char *name) { /* * Due to the long-standing, but now removed, use of rt->gcLock across the - * bulk of js_GC, API users have come to depend on JS_AddRoot etc. locking + * bulk of js::GC, API users have come to depend on JS_AddRoot etc. locking * properly with a racing GC, without calling JS_AddRoot from a request. * We have to preserve API compatibility here, now that we avoid holding * rt->gcLock across the mark phase (including the root hashtable mark). @@ -1370,6 +1449,19 @@ JSCompartment::reduceGCTriggerBytes(size_t amount) namespace js { namespace gc { +inline void +ArenaLists::prepareForIncrementalGC(JSCompartment *comp) +{ + for (size_t i = 0; i != FINALIZE_LIMIT; ++i) { + FreeSpan *headSpan = &freeLists[i]; + if (!headSpan->isEmpty()) { + ArenaHeader *aheader = headSpan->arenaHeader(); + aheader->allocatedDuringIncremental = true; + comp->barrierMarker_.delayMarkingArena(aheader); + } + } +} + inline void * ArenaLists::allocateFromArena(JSCompartment *comp, AllocKind thingKind) { @@ -1423,6 +1515,10 @@ ArenaLists::allocateFromArena(JSCompartment *comp, AllocKind thingKind) */ freeLists[thingKind] = aheader->getFirstFreeSpan(); aheader->setAsFullyUsed(); + if (JS_UNLIKELY(comp->needsBarrier())) { + aheader->allocatedDuringIncremental = true; + comp->barrierMarker_.delayMarkingArena(aheader); + } return freeLists[thingKind].infallibleAllocate(Arena::thingSize(thingKind)); } @@ -1448,6 +1544,10 @@ ArenaLists::allocateFromArena(JSCompartment *comp, AllocKind thingKind) if (!aheader) return NULL; + if (JS_UNLIKELY(comp->needsBarrier())) { + aheader->allocatedDuringIncremental = true; + comp->barrierMarker_.delayMarkingArena(aheader); + } aheader->next = al->head; if (!al->head) { JS_ASSERT(al->cursor == &al->head); @@ -1619,7 +1719,7 @@ RunLastDitchGC(JSContext *cx) /* The last ditch GC preserves all atoms. */ AutoKeepAtoms keep(rt); - js_GC(cx, rt->gcTriggerCompartment, GC_NORMAL, gcreason::LAST_DITCH); + GC(cx, rt->gcTriggerCompartment, GC_NORMAL, gcreason::LAST_DITCH); } /* static */ void * @@ -1631,7 +1731,7 @@ ArenaLists::refillFreeList(JSContext *cx, AllocKind thingKind) JSRuntime *rt = comp->rt; JS_ASSERT(!rt->gcRunning); - bool runGC = !!rt->gcIsNeeded; + bool runGC = rt->gcIncrementalState != NO_INCREMENTAL && comp->gcBytes > comp->gcTriggerBytes; for (;;) { if (JS_UNLIKELY(runGC)) { RunLastDitchGC(cx); @@ -1722,6 +1822,137 @@ js_UnlockGCThingRT(JSRuntime *rt, void *thing) namespace js { +void +InitTracer(JSTracer *trc, JSRuntime *rt, JSContext *cx, JSTraceCallback callback) +{ + trc->runtime = rt; + trc->context = cx; + trc->callback = callback; + trc->debugPrinter = NULL; + trc->debugPrintArg = NULL; + trc->debugPrintIndex = size_t(-1); + trc->eagerlyTraceWeakMaps = true; +} + +/* static */ int64_t +SliceBudget::TimeBudget(int64_t millis) +{ + return millis * PRMJ_USEC_PER_MSEC; +} + +/* static */ int64_t +SliceBudget::WorkBudget(int64_t work) +{ + return -work; +} + +SliceBudget::SliceBudget() + : deadline(INT64_MAX), + counter(INTPTR_MAX) +{ +} + +SliceBudget::SliceBudget(int64_t budget) +{ + if (budget == Unlimited) { + deadline = INT64_MAX; + counter = INTPTR_MAX; + } else if (budget > 0) { + deadline = PRMJ_Now() + budget; + counter = CounterReset; + } else { + deadline = 0; + counter = -budget; + } +} + +bool +SliceBudget::checkOverBudget() +{ + bool over = PRMJ_Now() > deadline; + if (!over) + counter = CounterReset; + return over; +} + +GCMarker::GCMarker() + : color(BLACK), + started(false), + unmarkedArenaStackTop(NULL), + markLaterArenas(0), + grayFailed(false) +{ +} + +bool +GCMarker::init(bool lazy) +{ + if (!stack.init(lazy ? 0 : MARK_STACK_LENGTH)) + return false; + return true; +} + +void +GCMarker::start(JSRuntime *rt, JSContext *cx) +{ + InitTracer(this, rt, cx, NULL); + JS_ASSERT(!started); + started = true; + color = BLACK; + + JS_ASSERT(!unmarkedArenaStackTop); + JS_ASSERT(markLaterArenas == 0); + + JS_ASSERT(grayRoots.empty()); + JS_ASSERT(!grayFailed); + + /* + * The GC is recomputing the liveness of WeakMap entries, so we delay + * visting entries. + */ + eagerlyTraceWeakMaps = JS_FALSE; +} + +void +GCMarker::stop() +{ + JS_ASSERT(isDrained()); + + JS_ASSERT(started); + started = false; + + JS_ASSERT(!unmarkedArenaStackTop); + JS_ASSERT(markLaterArenas == 0); + + JS_ASSERT(grayRoots.empty()); + grayFailed = false; +} + +void +GCMarker::reset() +{ + color = BLACK; + + stack.reset(); + JS_ASSERT(isMarkStackEmpty()); + + while (unmarkedArenaStackTop) { + ArenaHeader *aheader = unmarkedArenaStackTop; + JS_ASSERT(aheader->hasDelayedMarking); + JS_ASSERT(markLaterArenas); + unmarkedArenaStackTop = aheader->getNextDelayedMarking(); + aheader->hasDelayedMarking = 0; + aheader->markOverflow = 0; + aheader->allocatedDuringIncremental = 0; + markLaterArenas--; + } + JS_ASSERT(isDrained()); + JS_ASSERT(!markLaterArenas); + + grayRoots.clearAndFree(); + grayFailed = false; +} + /* * When the native stack is low, the GC does not call JS_TraceChildren to mark * the reachable "children" of the thing. Rather the thing is put aside and @@ -1736,63 +1967,52 @@ namespace js { * from the stack until it empties. */ -GCMarker::GCMarker(JSContext *cx) - : color(BLACK), - unmarkedArenaStackTop(NULL), - stack(cx->runtime->gcMarkStackArray) +inline void +GCMarker::delayMarkingArena(ArenaHeader *aheader) { - JS_TracerInit(this, cx, NULL); - markLaterArenas = 0; -#ifdef JS_DUMP_CONSERVATIVE_GC_ROOTS - conservativeDumpFileName = getenv("JS_DUMP_CONSERVATIVE_GC_ROOTS"); - memset(&conservativeStats, 0, sizeof(conservativeStats)); -#endif - - /* - * The GC is recomputing the liveness of WeakMap entries, so we - * delay visting entries. - */ - eagerlyTraceWeakMaps = JS_FALSE; -} - -GCMarker::~GCMarker() -{ -#ifdef JS_DUMP_CONSERVATIVE_GC_ROOTS - dumpConservativeRoots(); -#endif + if (aheader->hasDelayedMarking) { + /* Arena already scheduled to be marked later */ + return; + } + aheader->setNextDelayedMarking(unmarkedArenaStackTop); + unmarkedArenaStackTop = aheader; + markLaterArenas++; } void GCMarker::delayMarkingChildren(const void *thing) { const Cell *cell = reinterpret_cast(thing); - ArenaHeader *aheader = cell->arenaHeader(); - if (aheader->hasDelayedMarking) { - /* Arena already scheduled to be marked later */ - return; - } - aheader->setNextDelayedMarking(unmarkedArenaStackTop); - unmarkedArenaStackTop = aheader->getArena(); - markLaterArenas++; -} - -static void -MarkDelayedChildren(GCMarker *trc, Arena *a) -{ - AllocKind allocKind = a->aheader.getAllocKind(); - JSGCTraceKind traceKind = MapAllocToTraceKind(allocKind); - size_t thingSize = Arena::thingSize(allocKind); - uintptr_t end = a->thingsEnd(); - for (uintptr_t thing = a->thingsStart(allocKind); thing != end; thing += thingSize) { - Cell *t = reinterpret_cast(thing); - if (t->isMarked()) - JS_TraceChildren(trc, t, traceKind); - } + cell->arenaHeader()->markOverflow = 1; + delayMarkingArena(cell->arenaHeader()); } void -GCMarker::markDelayedChildren() +GCMarker::markDelayedChildren(ArenaHeader *aheader) { + if (aheader->markOverflow) { + bool always = aheader->allocatedDuringIncremental; + aheader->markOverflow = 0; + + for (CellIterUnderGC i(aheader); !i.done(); i.next()) { + Cell *t = i.getCell(); + if (always || t->isMarked()) { + t->markIfUnmarked(); + JS_TraceChildren(this, t, MapAllocToTraceKind(aheader->getAllocKind())); + } + } + } else { + JS_ASSERT(aheader->allocatedDuringIncremental); + PushArena(this, aheader); + } + aheader->allocatedDuringIncremental = 0; +} + +bool +GCMarker::markDelayedChildren(SliceBudget &budget) +{ + gcstats::AutoPhase ap(runtime->gcStats, gcstats::PHASE_MARK_DELAYED); + JS_ASSERT(unmarkedArenaStackTop); do { /* @@ -1800,15 +2020,101 @@ GCMarker::markDelayedChildren() * marking of its things. For that we pop arena from the stack and * clear its hasDelayedMarking flag before we begin the marking. */ - Arena *a = unmarkedArenaStackTop; - JS_ASSERT(a->aheader.hasDelayedMarking); + ArenaHeader *aheader = unmarkedArenaStackTop; + JS_ASSERT(aheader->hasDelayedMarking); JS_ASSERT(markLaterArenas); - unmarkedArenaStackTop = a->aheader.getNextDelayedMarking(); - a->aheader.hasDelayedMarking = 0; + unmarkedArenaStackTop = aheader->getNextDelayedMarking(); + aheader->hasDelayedMarking = 0; markLaterArenas--; - MarkDelayedChildren(this, a); + markDelayedChildren(aheader); + + if (budget.checkOverBudget()) + return false; } while (unmarkedArenaStackTop); JS_ASSERT(!markLaterArenas); + + return true; +} + +#ifdef DEBUG +void +GCMarker::checkCompartment(void *p) +{ + JS_ASSERT(started); + + Cell *cell = static_cast(p); + if (runtime->gcRunning && runtime->gcCurrentCompartment) + JS_ASSERT(cell->compartment() == runtime->gcCurrentCompartment); + else if (runtime->gcIncrementalCompartment) + JS_ASSERT(cell->compartment() == runtime->gcIncrementalCompartment); +} +#endif + +bool +GCMarker::hasBufferedGrayRoots() const +{ + return !grayFailed; +} + +void +GCMarker::startBufferingGrayRoots() +{ + JS_ASSERT(!callback); + callback = GrayCallback; + JS_ASSERT(IS_GC_MARKING_TRACER(this)); +} + +void +GCMarker::endBufferingGrayRoots() +{ + JS_ASSERT(callback == GrayCallback); + callback = NULL; + JS_ASSERT(IS_GC_MARKING_TRACER(this)); +} + +void +GCMarker::markBufferedGrayRoots() +{ + JS_ASSERT(!grayFailed); + + for (GrayRoot *elem = grayRoots.begin(); elem != grayRoots.end(); elem++) { +#ifdef DEBUG + debugPrinter = elem->debugPrinter; + debugPrintArg = elem->debugPrintArg; + debugPrintIndex = elem->debugPrintIndex; +#endif + MarkKind(this, elem->thing, elem->kind); + } + + grayRoots.clearAndFree(); +} + +void +GCMarker::appendGrayRoot(void *thing, JSGCTraceKind kind) +{ + JS_ASSERT(started); + + if (grayFailed) + return; + + GrayRoot root(thing, kind); +#ifdef DEBUG + root.debugPrinter = debugPrinter; + root.debugPrintArg = debugPrintArg; + root.debugPrintIndex = debugPrintIndex; +#endif + + if (!grayRoots.append(root)) { + grayRoots.clearAndFree(); + grayFailed = true; + } +} + +void +GCMarker::GrayCallback(JSTracer *trc, void **thingp, JSGCTraceKind kind) +{ + GCMarker *gcmarker = static_cast(trc); + gcmarker->appendGrayRoot(*thingp, kind); } } /* namespace js */ @@ -1865,6 +2171,17 @@ gc_lock_traversal(const GCLocks::Entry &entry, JSTracer *trc) MarkGCThingRoot(trc, entry.key, "locked object"); } +namespace js { + +void +MarkCompartmentActive(StackFrame *fp) +{ + if (fp->isScriptFrame()) + fp->script()->compartment()->active = true; +} + +} /* namespace js */ + void AutoIdArray::trace(JSTracer *trc) { @@ -2004,25 +2321,19 @@ AutoGCRooter::traceAll(JSTracer *trc) namespace js { -void -MarkWeakReferences(GCMarker *gcmarker) -{ - JS_ASSERT(gcmarker->isMarkStackEmpty()); - while (WatchpointMap::markAllIteratively(gcmarker) || - WeakMapBase::markAllIteratively(gcmarker) || - Debugger::markAllIteratively(gcmarker)) { - gcmarker->drainMarkStack(); - } - JS_ASSERT(gcmarker->isMarkStackEmpty()); -} - static void -MarkRuntime(JSTracer *trc) +MarkRuntime(JSTracer *trc, bool useSavedRoots = false) { JSRuntime *rt = trc->runtime; + JS_ASSERT(trc->callback != GCMarker::GrayCallback); + if (rt->gcCurrentCompartment) { + for (CompartmentsIter c(rt); !c.done(); c.next()) + c->markCrossCompartmentWrappers(trc); + Debugger::markCrossCompartmentDebuggerObjectReferents(trc); + } if (rt->hasContexts()) - MarkConservativeStackRoots(trc, rt); + MarkConservativeStackRoots(trc, useSavedRoots); for (RootRange r = rt->gcRootsHash.all(); !r.empty(); r.popFront()) gc_root_traversal(trc, r.front()); @@ -2077,13 +2388,18 @@ MarkRuntime(JSTracer *trc) if (JSTraceDataOp op = rt->gcBlackRootsTraceOp) (*op)(trc, rt->gcBlackRootsData); - if (!IS_GC_MARKING_TRACER(trc)) { - /* We don't want to miss these when called from TraceRuntime. */ - if (JSTraceDataOp op = rt->gcGrayRootsTraceOp) + /* During GC, this buffers up the gray roots and doesn't mark them. */ + if (JSTraceDataOp op = rt->gcGrayRootsTraceOp) { + if (IS_GC_MARKING_TRACER(trc)) { + GCMarker *gcmarker = static_cast(trc); + gcmarker->startBufferingGrayRoots(); (*op)(trc, rt->gcGrayRootsData); + gcmarker->endBufferingGrayRoots(); + } else { + (*op)(trc, rt->gcGrayRootsData); + } } } - void TriggerGC(JSRuntime *rt, gcreason::Reason reason) { @@ -2105,12 +2421,12 @@ TriggerCompartmentGC(JSCompartment *comp, gcreason::Reason reason) JSRuntime *rt = comp->rt; JS_ASSERT(!rt->gcRunning); - if (rt->gcZeal()) { + if (rt->gcZeal() == ZealAllocValue) { TriggerGC(rt, reason); return; } - if (rt->gcMode != JSGC_MODE_COMPARTMENT || comp == rt->atomsCompartment) { + if (rt->gcMode == JSGC_MODE_GLOBAL || comp == rt->atomsCompartment) { /* We can't do a compartmental GC of the default compartment. */ TriggerGC(rt, reason); return; @@ -2139,19 +2455,23 @@ MaybeGC(JSContext *cx) JSRuntime *rt = cx->runtime; JS_ASSERT(rt->onOwnerThread()); - if (rt->gcZeal()) { - js_GC(cx, NULL, GC_NORMAL, gcreason::MAYBEGC); + if (rt->gcZeal() == ZealAllocValue || rt->gcZeal() == ZealPokeValue) { + GC(cx, NULL, GC_NORMAL, gcreason::MAYBEGC); return; } JSCompartment *comp = cx->compartment; if (rt->gcIsNeeded) { - js_GC(cx, (comp == rt->gcTriggerCompartment) ? comp : NULL, GC_NORMAL, gcreason::MAYBEGC); + GCSlice(cx, (comp == rt->gcTriggerCompartment) ? comp : NULL, + GC_NORMAL, gcreason::MAYBEGC); return; } - if (comp->gcBytes > 8192 && comp->gcBytes >= 3 * (comp->gcTriggerBytes / 4)) { - js_GC(cx, (rt->gcMode == JSGC_MODE_COMPARTMENT) ? comp : NULL, GC_NORMAL, gcreason::MAYBEGC); + if (comp->gcBytes > 8192 && + comp->gcBytes >= 3 * (comp->gcTriggerBytes / 4) && + rt->gcIncrementalState == NO_INCREMENTAL) + { + GCSlice(cx, NULL, GC_NORMAL, gcreason::MAYBEGC); return; } @@ -2165,7 +2485,7 @@ MaybeGC(JSContext *cx) if (rt->gcChunkAllocationSinceLastGC || rt->gcNumArenasFreeCommitted > FreeCommittedArenasThreshold) { - js_GC(cx, NULL, GC_SHRINK, gcreason::MAYBEGC); + GCSlice(cx, NULL, GC_SHRINK, gcreason::MAYBEGC); } else { rt->gcNextFullGCTime = now + GC_IDLE_FULL_SPAN; } @@ -2612,7 +2932,7 @@ SweepCompartments(JSContext *cx, JSGCInvocationKind gckind) } static void -BeginMarkPhase(JSContext *cx, GCMarker *gcmarker, JSGCInvocationKind gckind) +PurgeRuntime(JSContext *cx) { JSRuntime *rt = cx->runtime; @@ -2626,43 +2946,100 @@ BeginMarkPhase(JSContext *cx, GCMarker *gcmarker, JSGCInvocationKind gckind) while (JSContext *acx = js_ContextIterator(rt, JS_TRUE, &iter)) acx->purge(); } +} + +static void +BeginMarkPhase(JSContext *cx) +{ + JSRuntime *rt = cx->runtime; + GCMarker *gcmarker = &rt->gcMarker; + + rt->gcStartNumber = rt->gcNumber; + + /* Reset weak map list. */ + WeakMapBase::resetWeakMapList(rt); + + /* + * We must purge the runtime at the beginning of an incremental GC. The + * danger if we purge later is that the snapshot invariant of incremental + * GC will be broken, as follows. If some object is reachable only through + * some cache (say the dtoaCache) then it will not be part of the snapshot. + * If we purge after root marking, then the mutator could obtain a pointer + * to the object and start using it. This object might never be marked, so + * a GC hazard would exist. + */ + PurgeRuntime(cx); /* * Mark phase. */ - rt->gcStats.beginPhase(gcstats::PHASE_MARK); + gcstats::AutoPhase ap1(rt->gcStats, gcstats::PHASE_MARK); + gcstats::AutoPhase ap2(rt->gcStats, gcstats::PHASE_MARK_ROOTS); for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront()) r.front()->bitmap.clear(); - if (rt->gcCurrentCompartment) { - for (CompartmentsIter c(rt); !c.done(); c.next()) - c->markCrossCompartmentWrappers(gcmarker); - Debugger::markCrossCompartmentDebuggerObjectReferents(gcmarker); - } - MarkRuntime(gcmarker); } +void +MarkWeakReferences(GCMarker *gcmarker) +{ + JS_ASSERT(gcmarker->isDrained()); + while (WatchpointMap::markAllIteratively(gcmarker) || + WeakMapBase::markAllIteratively(gcmarker) || + Debugger::markAllIteratively(gcmarker)) + { + SliceBudget budget; + gcmarker->drainMarkStack(budget); + } + JS_ASSERT(gcmarker->isDrained()); +} + static void -EndMarkPhase(JSContext *cx, GCMarker *gcmarker, JSGCInvocationKind gckind) +MarkGrayAndWeak(JSContext *cx) +{ + JSRuntime *rt = cx->runtime; + FullGCMarker *gcmarker = &rt->gcMarker; + + JS_ASSERT(gcmarker->isDrained()); + MarkWeakReferences(gcmarker); + + gcmarker->setMarkColorGray(); + if (gcmarker->hasBufferedGrayRoots()) { + gcmarker->markBufferedGrayRoots(); + } else { + if (JSTraceDataOp op = rt->gcGrayRootsTraceOp) + (*op)(gcmarker, rt->gcGrayRootsData); + } + SliceBudget budget; + gcmarker->drainMarkStack(budget); + MarkWeakReferences(gcmarker); + JS_ASSERT(gcmarker->isDrained()); +} + +#ifdef DEBUG +static void +ValidateIncrementalMarking(JSContext *cx); +#endif + +static void +EndMarkPhase(JSContext *cx) { JSRuntime *rt = cx->runtime; - JS_ASSERT(gcmarker->isMarkStackEmpty()); - MarkWeakReferences(gcmarker); - - if (JSTraceDataOp op = rt->gcGrayRootsTraceOp) { - gcmarker->setMarkColorGray(); - (*op)(gcmarker, rt->gcGrayRootsData); - gcmarker->drainMarkStack(); - MarkWeakReferences(gcmarker); + { + gcstats::AutoPhase ap1(rt->gcStats, gcstats::PHASE_MARK); + gcstats::AutoPhase ap2(rt->gcStats, gcstats::PHASE_MARK_OTHER); + MarkGrayAndWeak(cx); } - JS_ASSERT(gcmarker->isMarkStackEmpty()); - rt->gcIncrementalTracer = NULL; + JS_ASSERT(rt->gcMarker.isDrained()); - rt->gcStats.endPhase(gcstats::PHASE_MARK); +#ifdef DEBUG + if (rt->gcIncrementalState != NO_INCREMENTAL) + ValidateIncrementalMarking(cx); +#endif if (rt->gcCallback) (void) rt->gcCallback(cx, JSGC_MARK_END); @@ -2678,10 +3055,97 @@ EndMarkPhase(JSContext *cx, GCMarker *gcmarker, JSGCInvocationKind gckind) #endif } +#ifdef DEBUG static void -SweepPhase(JSContext *cx, GCMarker *gcmarker, JSGCInvocationKind gckind) +ValidateIncrementalMarking(JSContext *cx) { JSRuntime *rt = cx->runtime; + FullGCMarker *gcmarker = &rt->gcMarker; + + js::gc::State state = rt->gcIncrementalState; + rt->gcIncrementalState = NO_INCREMENTAL; + + /* As we're re-doing marking, we need to reset the weak map list. */ + WeakMapBase::resetWeakMapList(rt); + + JS_ASSERT(gcmarker->isDrained()); + gcmarker->reset(); + + typedef HashMap BitmapMap; + BitmapMap map(cx); + map.init(); + + for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront()) { + ChunkBitmap *bitmap = &r.front()->bitmap; + uintptr_t *entry = (uintptr_t *)js_malloc(sizeof(bitmap->bitmap)); + if (entry) + memcpy(entry, bitmap->bitmap, sizeof(bitmap->bitmap)); + map.putNew(r.front(), entry); + } + + for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront()) + r.front()->bitmap.clear(); + + MarkRuntime(gcmarker, true); + SliceBudget budget; + rt->gcMarker.drainMarkStack(budget); + MarkGrayAndWeak(cx); + + for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront()) { + Chunk *chunk = r.front(); + ChunkBitmap *bitmap = &chunk->bitmap; + uintptr_t *entry = map.lookup(r.front())->value; + ChunkBitmap incBitmap; + + if (!entry) + continue; + + memcpy(incBitmap.bitmap, entry, sizeof(incBitmap.bitmap)); + js_free(entry); + + for (size_t i = 0; i < ArenasPerChunk; i++) { + Arena *arena = &chunk->arenas[i]; + if (!arena->aheader.allocated()) + continue; + if (rt->gcCurrentCompartment && arena->aheader.compartment != rt->gcCurrentCompartment) + continue; + if (arena->aheader.allocatedDuringIncremental) + continue; + + AllocKind kind = arena->aheader.getAllocKind(); + uintptr_t thing = arena->thingsStart(kind); + uintptr_t end = arena->thingsEnd(); + while (thing < end) { + Cell *cell = (Cell *)thing; + if (bitmap->isMarked(cell, BLACK) && !incBitmap.isMarked(cell, BLACK)) { + JS_DumpHeap(cx, stdout, NULL, JSGCTraceKind(0), NULL, 100000, NULL); + printf("Assertion cell: %p (%d)\n", (void *)cell, cell->getAllocKind()); + } + JS_ASSERT_IF(bitmap->isMarked(cell, BLACK), incBitmap.isMarked(cell, BLACK)); + thing += Arena::thingSize(kind); + } + } + + memcpy(bitmap->bitmap, incBitmap.bitmap, sizeof(incBitmap.bitmap)); + } + + rt->gcIncrementalState = state; +} +#endif + +static void +SweepPhase(JSContext *cx, JSGCInvocationKind gckind) +{ + JSRuntime *rt = cx->runtime; + +#ifdef JS_THREADSAFE + if (rt->hasContexts() && rt->gcHelperThread.prepareForBackgroundSweep()) + cx->gcBackgroundFree = &rt->gcHelperThread; +#endif + + /* Purge the ArenaLists before sweeping. */ + for (GCCompartmentsIter c(rt); !c.done(); c.next()) + c->arenas.purge(); /* * Sweep phase. @@ -2700,7 +3164,7 @@ SweepPhase(JSContext *cx, GCMarker *gcmarker, JSGCInvocationKind gckind) gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_SWEEP); /* Finalize unreachable (key,value) pairs in all weak maps. */ - WeakMapBase::sweepAll(gcmarker); + WeakMapBase::sweepAll(&rt->gcMarker); js_SweepAtomState(rt); @@ -2781,6 +3245,9 @@ SweepPhase(JSContext *cx, GCMarker *gcmarker, JSGCInvocationKind gckind) if (rt->gcCallback) (void) rt->gcCallback(cx, JSGC_FINALIZE_END); } + + for (CompartmentsIter c(rt); !c.done(); c.next()) + c->setGCLastBytes(c->gcBytes, gckind); } /* Perform mark-and-sweep GC. If comp is set, we perform a single-compartment GC. */ @@ -2788,55 +3255,57 @@ static void MarkAndSweep(JSContext *cx, JSGCInvocationKind gckind) { JSRuntime *rt = cx->runtime; - rt->gcNumber++; - - /* Clear gcIsNeeded now, when we are about to start a normal GC cycle. */ - rt->gcIsNeeded = false; - rt->gcTriggerCompartment = NULL; - - /* Clear gcMallocBytes for all compartments */ - JSCompartment **read = rt->compartments.begin(); - JSCompartment **end = rt->compartments.end(); - JS_ASSERT(rt->compartments.length() >= 1); - - while (read < end) { - JSCompartment *compartment = *read++; - compartment->resetGCMallocBytes(); - } - - /* Reset weak map list. */ - WeakMapBase::resetWeakMapList(rt); - - /* Reset malloc counter. */ - rt->resetGCMallocBytes(); AutoUnlockGC unlock(rt); - GCMarker gcmarker(cx); - JS_ASSERT(IS_GC_MARKING_TRACER(&gcmarker)); - JS_ASSERT(gcmarker.getMarkColor() == BLACK); - rt->gcIncrementalTracer = &gcmarker; + rt->gcMarker.start(rt, cx); + JS_ASSERT(!rt->gcMarker.callback); - BeginMarkPhase(cx, &gcmarker, gckind); - gcmarker.drainMarkStack(); - EndMarkPhase(cx, &gcmarker, gckind); - SweepPhase(cx, &gcmarker, gckind); + BeginMarkPhase(cx); + { + gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_MARK); + SliceBudget budget; + rt->gcMarker.drainMarkStack(budget); + } + EndMarkPhase(cx); + SweepPhase(cx, gckind); + + rt->gcMarker.stop(); } -class AutoGCSession { +/* + * This class should be used by any code that needs to exclusive access to the + * heap in order to trace through it... + */ +class AutoHeapSession { public: - explicit AutoGCSession(JSContext *cx); + explicit AutoHeapSession(JSContext *cx); + ~AutoHeapSession(); + + protected: + JSContext *context; + + private: + AutoHeapSession(const AutoHeapSession&) MOZ_DELETE; + void operator=(const AutoHeapSession&) MOZ_DELETE; +}; + +/* ...while this class is to be used only for garbage collection. */ +class AutoGCSession : AutoHeapSession { + public: + explicit AutoGCSession(JSContext *cx, JSCompartment *comp); ~AutoGCSession(); private: - JSContext *context; - - AutoGCSession(const AutoGCSession&) MOZ_DELETE; - void operator=(const AutoGCSession&) MOZ_DELETE; + /* + * We should not be depending on cx->compartment in the GC, so set it to + * NULL to look for violations. + */ + SwitchToCompartment switcher; }; -/* Start a new GC session. */ -AutoGCSession::AutoGCSession(JSContext *cx) +/* Start a new heap session. */ +AutoHeapSession::AutoHeapSession(JSContext *cx) : context(cx) { JS_ASSERT(!cx->runtime->noGCOrAllocationCheck); @@ -2845,144 +3314,111 @@ AutoGCSession::AutoGCSession(JSContext *cx) rt->gcRunning = true; } -AutoGCSession::~AutoGCSession() +AutoHeapSession::~AutoHeapSession() { JSRuntime *rt = context->runtime; rt->gcRunning = false; } -/* - * GC, repeatedly if necessary, until we think we have not created any new - * garbage. We disable inlining to ensure that the bottom of the stack with - * possible GC roots recorded in js_GC excludes any pointers we use during the - * marking implementation. - */ -static JS_NEVER_INLINE void -GCCycle(JSContext *cx, JSCompartment *comp, JSGCInvocationKind gckind) +AutoGCSession::AutoGCSession(JSContext *cx, JSCompartment *comp) + : AutoHeapSession(cx), + switcher(cx, (JSCompartment *)NULL) { JSRuntime *rt = cx->runtime; - JS_ASSERT_IF(comp, comp != rt->atomsCompartment); - JS_ASSERT_IF(comp, rt->gcMode == JSGC_MODE_COMPARTMENT); - - /* Recursive GC is no-op. */ - if (rt->gcMarkAndSweep) - return; - - AutoGCSession gcsession(cx); - - /* Don't GC if we are reporting an OOM. */ - if (rt->inOOMReport) - return; - - /* - * We should not be depending on cx->compartment in the GC, so set it to - * NULL to look for violations. - */ - SwitchToCompartment sc(cx, (JSCompartment *)NULL); - JS_ASSERT(!rt->gcCurrentCompartment); rt->gcCurrentCompartment = comp; - rt->gcMarkAndSweep = true; + rt->gcIsNeeded = false; + rt->gcTriggerCompartment = NULL; + rt->gcInterFrameGC = true; -#ifdef JS_THREADSAFE - /* - * As we about to purge caches and clear the mark bits we must wait for - * any background finalization to finish. We must also wait for the - * background allocation to finish so we can avoid taking the GC lock - * when manipulating the chunks during the GC. - */ - JS_ASSERT(!cx->gcBackgroundFree); - rt->gcHelperThread.waitBackgroundSweepOrAllocEnd(); - if (rt->hasContexts() && rt->gcHelperThread.prepareForBackgroundSweep()) - cx->gcBackgroundFree = &rt->gcHelperThread; -#endif + rt->gcNumber++; - MarkAndSweep(cx, gckind); - -#ifdef JS_THREADSAFE - if (cx->gcBackgroundFree) { - JS_ASSERT(cx->gcBackgroundFree == &rt->gcHelperThread); - cx->gcBackgroundFree = NULL; - rt->gcHelperThread.startBackgroundSweep(cx, gckind == GC_SHRINK); - } -#endif - - rt->gcMarkAndSweep = false; - rt->gcCurrentCompartment = NULL; + rt->resetGCMallocBytes(); + /* Clear gcMallocBytes for all compartments */ for (CompartmentsIter c(rt); !c.done(); c.next()) - c->setGCLastBytes(c->gcBytes, gckind); + c->resetGCMallocBytes(); } -void -js_GC(JSContext *cx, JSCompartment *comp, JSGCInvocationKind gckind, gcreason::Reason reason) +AutoGCSession::~AutoGCSession() { - JSRuntime *rt = cx->runtime; - JS_AbortIfWrongThread(rt); - -#ifdef JS_GC_ZEAL - struct AutoVerifyBarriers { - JSContext *cx; - bool inVerify; - AutoVerifyBarriers(JSContext *cx) : cx(cx), inVerify(cx->runtime->gcVerifyData) { - if (inVerify) EndVerifyBarriers(cx); - } - ~AutoVerifyBarriers() { if (inVerify) StartVerifyBarriers(cx); } - } av(cx); -#endif - - RecordNativeStackTopForGC(cx); - - gcstats::AutoGC agc(rt->gcStats, comp, reason); - - do { - /* - * Let the API user decide to defer a GC if it wants to (unless this - * is the last context). Invoke the callback regardless. Sample the - * callback in case we are freely racing with a JS_SetGCCallback{,RT} - * on another thread. - */ - if (JSGCCallback callback = rt->gcCallback) { - if (!callback(cx, JSGC_BEGIN) && rt->hasContexts()) - return; - } - - { - /* Lock out other GC allocator and collector invocations. */ - AutoLockGC lock(rt); - rt->gcPoke = false; - GCCycle(cx, comp, gckind); - } - - /* We re-sample the callback again as the finalizers can change it. */ - if (JSGCCallback callback = rt->gcCallback) - (void) callback(cx, JSGC_END); - - /* - * On shutdown, iterate until finalizers or the JSGC_END callback - * stop creating garbage. - */ - } while (!rt->hasContexts() && rt->gcPoke); + JSRuntime *rt = context->runtime; + rt->gcCurrentCompartment = NULL; rt->gcNextFullGCTime = PRMJ_Now() + GC_IDLE_FULL_SPAN; - rt->gcChunkAllocationSinceLastGC = false; } -namespace js { - -void -ShrinkGCBuffers(JSRuntime *rt) +static void +ResetIncrementalGC(JSRuntime *rt) { - AutoLockGC lock(rt); - JS_ASSERT(!rt->gcRunning); -#ifndef JS_THREADSAFE - ExpireChunksAndArenas(rt, true); -#else - rt->gcHelperThread.startBackgroundShrink(); -#endif + if (rt->gcIncrementalState == NO_INCREMENTAL) + return; + + for (CompartmentsIter c(rt); !c.done(); c.next()) { + if (!rt->gcIncrementalCompartment || rt->gcIncrementalCompartment == c) { + c->needsBarrier_ = false; + c->barrierMarker_.reset(); + c->barrierMarker_.stop(); + } + JS_ASSERT(!c->needsBarrier_); + } + + rt->gcIncrementalCompartment = NULL; + rt->gcMarker.reset(); + rt->gcMarker.stop(); + rt->gcIncrementalState = NO_INCREMENTAL; + + rt->gcStats.reset(); +} + +class AutoGCSlice { + public: + AutoGCSlice(JSContext *cx); + ~AutoGCSlice(); + + private: + JSContext *context; +}; + +AutoGCSlice::AutoGCSlice(JSContext *cx) + : context(cx) +{ + JSRuntime *rt = context->runtime; + + /* + * During incremental GC, the compartment's active flag determines whether + * there are stack frames active for any of its scripts. Normally this flag + * is set at the beginning of the mark phase. During incremental GC, we also + * set it at the start of every phase. + */ + rt->stackSpace.markActiveCompartments(); + + for (GCCompartmentsIter c(rt); !c.done(); c.next()) { + /* Clear this early so we don't do any write barriers during GC. */ + if (rt->gcIncrementalState == MARK) + c->needsBarrier_ = false; + else + JS_ASSERT(!c->needsBarrier_); + } +} + +AutoGCSlice::~AutoGCSlice() +{ + JSRuntime *rt = context->runtime; + + for (GCCompartmentsIter c(rt); !c.done(); c.next()) { + if (rt->gcIncrementalState == MARK) { + c->needsBarrier_ = true; + c->arenas.prepareForIncrementalGC(c); + } else { + JS_ASSERT(rt->gcIncrementalState == NO_INCREMENTAL); + + c->needsBarrier_ = false; + } + } } class AutoCopyFreeListToArenas { @@ -3001,6 +3437,296 @@ class AutoCopyFreeListToArenas { } }; +static void +IncrementalGCSlice(JSContext *cx, int64_t budget, JSGCInvocationKind gckind) +{ + JS_ASSERT(budget != SliceBudget::Unlimited); + + JSRuntime *rt = cx->runtime; + + AutoUnlockGC unlock(rt); + AutoGCSlice slice(cx); + + gc::State initialState = rt->gcIncrementalState; + + if (rt->gcIncrementalState == NO_INCREMENTAL) { + JS_ASSERT(!rt->gcIncrementalCompartment); + rt->gcIncrementalCompartment = rt->gcCurrentCompartment; + rt->gcIncrementalState = MARK_ROOTS; + rt->gcLastMarkSlice = false; + } + + if (rt->gcIncrementalState == MARK_ROOTS) { + rt->gcMarker.start(rt, cx); + JS_ASSERT(IS_GC_MARKING_TRACER(&rt->gcMarker)); + + for (GCCompartmentsIter c(rt); !c.done(); c.next()) { + c->discardJitCode(cx); + c->barrierMarker_.start(rt, NULL); + } + + BeginMarkPhase(cx); + + rt->gcIncrementalState = MARK; + } + + if (rt->gcIncrementalState == MARK) { + gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_MARK); + SliceBudget sliceBudget(budget); + + /* If we needed delayed marking for gray roots, then collect until done. */ + if (!rt->gcMarker.hasBufferedGrayRoots()) + sliceBudget.reset(); + + bool finished = rt->gcMarker.drainMarkStack(sliceBudget); + + for (GCCompartmentsIter c(rt); !c.done(); c.next()) { + c->barrierMarker_.context = cx; + finished &= c->barrierMarker_.drainMarkStack(sliceBudget); + c->barrierMarker_.context = NULL; + } + + if (finished) { + JS_ASSERT(rt->gcMarker.isDrained()); +#ifdef DEBUG + for (GCCompartmentsIter c(rt); !c.done(); c.next()) + JS_ASSERT(c->barrierMarker_.isDrained()); +#endif + if (initialState == MARK && !rt->gcLastMarkSlice) + rt->gcLastMarkSlice = true; + else + rt->gcIncrementalState = SWEEP; + } + } + + if (rt->gcIncrementalState == SWEEP) { + EndMarkPhase(cx); + SweepPhase(cx, gckind); + + rt->gcMarker.stop(); + + /* JIT code was already discarded during sweeping. */ + for (GCCompartmentsIter c(rt); !c.done(); c.next()) + c->barrierMarker_.stop(); + + rt->gcIncrementalCompartment = NULL; + + rt->gcIncrementalState = NO_INCREMENTAL; + } +} + +static bool +IsIncrementalGCSafe(JSContext *cx) +{ + JSRuntime *rt = cx->runtime; + + if (rt->gcCompartmentCreated) { + rt->gcCompartmentCreated = false; + return false; + } + + if (rt->gcKeepAtoms) + return false; + + for (GCCompartmentsIter c(rt); !c.done(); c.next()) { + if (c->activeAnalysis) + return false; + } + + if (rt->gcIncrementalState != NO_INCREMENTAL && + rt->gcCurrentCompartment != rt->gcIncrementalCompartment) + { + return false; + } + + if (!rt->gcIncrementalEnabled) + return false; + + return true; +} + +static bool +IsIncrementalGCAllowed(JSContext *cx) +{ + JSRuntime *rt = cx->runtime; + + if (rt->gcMode != JSGC_MODE_INCREMENTAL) + return false; + +#ifdef ANDROID + /* Incremental GC is disabled on Android for now. */ + return false; +#endif + + if (!IsIncrementalGCSafe(cx)) + return false; + + for (CompartmentsIter c(rt); !c.done(); c.next()) { + if (c->gcBytes > c->gcTriggerBytes) + return false; + } + + return true; +} + +/* + * GC, repeatedly if necessary, until we think we have not created any new + * garbage. We disable inlining to ensure that the bottom of the stack with + * possible GC roots recorded in js_GC excludes any pointers we use during the + * marking implementation. + */ +static JS_NEVER_INLINE void +GCCycle(JSContext *cx, JSCompartment *comp, int64_t budget, JSGCInvocationKind gckind) +{ + JSRuntime *rt = cx->runtime; + + JS_ASSERT_IF(comp, comp != rt->atomsCompartment); + JS_ASSERT_IF(comp, rt->gcMode != JSGC_MODE_GLOBAL); + + /* Recursive GC is no-op. */ + if (rt->gcRunning) + return; + + AutoGCSession gcsession(cx, comp); + + /* Don't GC if we are reporting an OOM. */ + if (rt->inOOMReport) + return; + +#ifdef JS_THREADSAFE + /* + * As we about to purge caches and clear the mark bits we must wait for + * any background finalization to finish. We must also wait for the + * background allocation to finish so we can avoid taking the GC lock + * when manipulating the chunks during the GC. + */ + JS_ASSERT(!cx->gcBackgroundFree); + rt->gcHelperThread.waitBackgroundSweepOrAllocEnd(); +#endif + + if (budget != SliceBudget::Unlimited) { + if (!IsIncrementalGCAllowed(cx)) + budget = SliceBudget::Unlimited; + } + + if (budget == SliceBudget::Unlimited) + ResetIncrementalGC(rt); + + AutoCopyFreeListToArenas copy(rt); + + if (budget == SliceBudget::Unlimited) + MarkAndSweep(cx, gckind); + else + IncrementalGCSlice(cx, budget, gckind); + +#ifdef DEBUG + if (rt->gcIncrementalState == NO_INCREMENTAL) { + for (CompartmentsIter c(rt); !c.done(); c.next()) + JS_ASSERT(!c->needsBarrier_); + } +#endif +#ifdef JS_THREADSAFE + if (rt->gcIncrementalState == NO_INCREMENTAL) { + if (cx->gcBackgroundFree) { + JS_ASSERT(cx->gcBackgroundFree == &rt->gcHelperThread); + cx->gcBackgroundFree = NULL; + rt->gcHelperThread.startBackgroundSweep(cx, gckind == GC_SHRINK); + } + } +#endif +} + +static void +Collect(JSContext *cx, JSCompartment *comp, int64_t budget, + JSGCInvocationKind gckind, gcreason::Reason reason) +{ + JSRuntime *rt = cx->runtime; + JS_AbortIfWrongThread(rt); + + JS_ASSERT_IF(budget != SliceBudget::Unlimited, JSGC_INCREMENTAL); + +#ifdef JS_GC_ZEAL + struct AutoVerifyBarriers { + JSContext *cx; + bool inVerify; + AutoVerifyBarriers(JSContext *cx) : cx(cx), inVerify(cx->runtime->gcVerifyData) { + if (inVerify) EndVerifyBarriers(cx); + } + ~AutoVerifyBarriers() { if (inVerify) StartVerifyBarriers(cx); } + } av(cx); +#endif + + RecordNativeStackTopForGC(cx); + + /* This is a heuristic to avoid resets. */ + if (rt->gcIncrementalState != NO_INCREMENTAL && !rt->gcIncrementalCompartment) + comp = NULL; + + gcstats::AutoGCSlice agc(rt->gcStats, comp, reason); + + do { + /* + * Let the API user decide to defer a GC if it wants to (unless this + * is the last context). Invoke the callback regardless. + */ + if (rt->gcIncrementalState == NO_INCREMENTAL) { + if (JSGCCallback callback = rt->gcCallback) { + if (!callback(cx, JSGC_BEGIN) && rt->hasContexts()) + return; + } + } + + { + /* Lock out other GC allocator and collector invocations. */ + AutoLockGC lock(rt); + rt->gcPoke = false; + GCCycle(cx, comp, budget, gckind); + } + + if (rt->gcIncrementalState == NO_INCREMENTAL) { + if (JSGCCallback callback = rt->gcCallback) + (void) callback(cx, JSGC_END); + } + + /* + * On shutdown, iterate until finalizers or the JSGC_END callback + * stop creating garbage. + */ + } while (!rt->hasContexts() && rt->gcPoke); +} + +namespace js { + +void +GC(JSContext *cx, JSCompartment *comp, JSGCInvocationKind gckind, gcreason::Reason reason) +{ + Collect(cx, comp, SliceBudget::Unlimited, gckind, reason); +} + +void +GCSlice(JSContext *cx, JSCompartment *comp, JSGCInvocationKind gckind, gcreason::Reason reason) +{ + Collect(cx, comp, cx->runtime->gcSliceBudget, gckind, reason); +} + +void +GCDebugSlice(JSContext *cx, int64_t objCount) +{ + Collect(cx, NULL, SliceBudget::WorkBudget(objCount), GC_NORMAL, gcreason::API); +} + +void +ShrinkGCBuffers(JSRuntime *rt) +{ + AutoLockGC lock(rt); + JS_ASSERT(!rt->gcRunning); +#ifndef JS_THREADSAFE + ExpireChunksAndArenas(rt, true); +#else + rt->gcHelperThread.startBackgroundShrink(); +#endif +} + void TraceRuntime(JSTracer *trc) { @@ -3012,7 +3738,7 @@ TraceRuntime(JSTracer *trc) JSRuntime *rt = cx->runtime; if (!rt->gcRunning) { AutoLockGC lock(rt); - AutoGCSession gcsession(cx); + AutoHeapSession session(cx); rt->gcHelperThread.waitBackgroundSweepEnd(); AutoUnlockGC unlock(rt); @@ -3073,7 +3799,7 @@ IterateCompartments(JSContext *cx, void *data, JS_ASSERT(!rt->gcRunning); AutoLockGC lock(rt); - AutoGCSession gcsession(cx); + AutoHeapSession session(cx); #ifdef JS_THREADSAFE rt->gcHelperThread.waitBackgroundSweepEnd(); #endif @@ -3097,7 +3823,7 @@ IterateCompartmentsArenasCells(JSContext *cx, void *data, JS_ASSERT(!rt->gcRunning); AutoLockGC lock(rt); - AutoGCSession gcsession(cx); + AutoHeapSession session(cx); #ifdef JS_THREADSAFE rt->gcHelperThread.waitBackgroundSweepEnd(); #endif @@ -3127,7 +3853,7 @@ IterateChunks(JSContext *cx, void *data, IterateChunkCallback chunkCallback) JS_ASSERT(!rt->gcRunning); AutoLockGC lock(rt); - AutoGCSession gcsession(cx); + AutoHeapSession session(cx); #ifdef JS_THREADSAFE rt->gcHelperThread.waitBackgroundSweepEnd(); #endif @@ -3148,7 +3874,7 @@ IterateCells(JSContext *cx, JSCompartment *compartment, AllocKind thingKind, JS_ASSERT(!rt->gcRunning); AutoLockGC lock(rt); - AutoGCSession gcsession(cx); + AutoHeapSession session(cx); #ifdef JS_THREADSAFE rt->gcHelperThread.waitBackgroundSweepEnd(); #endif @@ -3196,6 +3922,23 @@ NewCompartment(JSContext *cx, JSPrincipals *principals) */ { AutoLockGC lock(rt); + + /* + * If we're in the middle of an incremental GC, we cancel + * it. Otherwise we might fail the mark the newly created + * compartment fully. + */ + if (rt->gcIncrementalState == MARK) { + rt->gcCompartmentCreated = true; + + /* + * Start the tracer so that it's legal to stop() it when + * resetting the GC. + */ + if (!rt->gcIncrementalCompartment) + compartment->barrierMarker_.start(rt, NULL); + } + if (rt->compartments.append(compartment)) return compartment; } @@ -3239,7 +3982,7 @@ CheckStackRoot(JSTracer *trc, uintptr_t *w) if (test == CGCT_VALID) { JSContext *iter = NULL; bool matched = false; - JSRuntime *rt = trc->context->runtime; + JSRuntime *rt = trc->runtime; while (JSContext *acx = js_ContextIterator(rt, JS_TRUE, &iter)) { for (unsigned i = 0; i < THING_ROOT_COUNT; i++) { Root *rooter = acx->thingGCRooters[i]; @@ -3282,7 +4025,7 @@ CheckStackRoots(JSContext *cx) AutoCopyFreeListToArenas copy(cx->runtime); JSTracer checker; - JS_TRACER_INIT(&checker, cx, EmptyMarkCallback); + JS_TracerInit(&checker, cx, EmptyMarkCallback); ThreadData *td = JS_THREAD_DATA(cx); @@ -3360,7 +4103,7 @@ typedef HashMap NodeMap; */ struct VerifyTracer : JSTracer { /* The gcNumber when the verification began. */ - uint32_t number; + uint64_t number; /* This counts up to JS_VERIFIER_FREQ to decide whether to verify. */ uint32_t count; @@ -3372,10 +4115,8 @@ struct VerifyTracer : JSTracer { char *term; NodeMap nodemap; - /* A dummy marker used for the write barriers; stored in gcMarkingTracer. */ - GCMarker gcmarker; - - VerifyTracer(JSContext *cx) : nodemap(cx), gcmarker(cx) {} + VerifyTracer(JSContext *cx) : root(NULL), nodemap(cx) {} + ~VerifyTracer() { js_free(root); } }; /* @@ -3439,11 +4180,14 @@ StartVerifyBarriers(JSContext *cx) { JSRuntime *rt = cx->runtime; - if (rt->gcVerifyData) + if (rt->gcVerifyData || rt->gcIncrementalState != NO_INCREMENTAL) return; AutoLockGC lock(rt); - AutoGCSession gcsession(cx); + AutoHeapSession session(cx); + + if (!IsIncrementalGCSafe(cx)) + return; #ifdef JS_THREADSAFE rt->gcHelperThread.waitBackgroundSweepOrAllocEnd(); @@ -3457,27 +4201,10 @@ StartVerifyBarriers(JSContext *cx) for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront()) r.front()->bitmap.clear(); - /* - * Kick all frames on the stack into the interpreter, and release all JIT - * code in the compartment. - */ -#ifdef JS_METHODJIT - for (CompartmentsIter c(rt); !c.done(); c.next()) { - mjit::ClearAllFrames(c); + for (CompartmentsIter c(rt); !c.done(); c.next()) + c->discardJitCode(cx); - for (CellIterUnderGC i(c, FINALIZE_SCRIPT); !i.done(); i.next()) { - JSScript *script = i.get(); - mjit::ReleaseScriptCode(cx, script); - - /* - * Use counts for scripts are reset on GC. After discarding code we - * need to let it warm back up to get information like which opcodes - * are setting array holes or accessing getter properties. - */ - script->resetUseCount(); - } - } -#endif + PurgeRuntime(cx); VerifyTracer *trc = new (js_malloc(sizeof(VerifyTracer))) VerifyTracer(cx); @@ -3498,6 +4225,9 @@ StartVerifyBarriers(JSContext *cx) /* Create the root node. */ trc->curnode = MakeNode(trc, NULL, JSGCTraceKind(0)); + /* We want MarkRuntime to save the roots to gcSavedRoots. */ + rt->gcIncrementalState = MARK_ROOTS; + /* Make all the roots be edges emanating from the root node. */ MarkRuntime(trc); @@ -3522,26 +4252,35 @@ StartVerifyBarriers(JSContext *cx) } rt->gcVerifyData = trc; - rt->gcIncrementalTracer = &trc->gcmarker; + rt->gcIncrementalState = MARK; for (CompartmentsIter c(rt); !c.done(); c.next()) { - c->gcIncrementalTracer = &trc->gcmarker; c->needsBarrier_ = true; + c->barrierMarker_.start(rt, NULL); + c->arenas.prepareForIncrementalGC(c); } return; oom: - js_free(trc->root); + rt->gcIncrementalState = NO_INCREMENTAL; trc->~VerifyTracer(); js_free(trc); } static void -CheckAutorooter(JSTracer *jstrc, void **thingp, JSGCTraceKind kind) +MarkFromAutorooter(JSTracer *jstrc, void **thingp, JSGCTraceKind kind) { static_cast(*thingp)->markIfUnmarked(); } +static bool +IsMarkedOrAllocated(Cell *cell) +{ + return cell->isMarked() || cell->arenaHeader()->allocatedDuringIncremental; +} + +const static uint32_t MAX_VERIFIER_EDGES = 1000; + /* * This function is called by EndVerifyBarriers for every heap edge. If the edge * already existed in the original snapshot, we "cancel it out" by overwriting @@ -3555,6 +4294,10 @@ CheckEdge(JSTracer *jstrc, void **thingp, JSGCTraceKind kind) VerifyTracer *trc = (VerifyTracer *)jstrc; VerifyNode *node = trc->curnode; + /* Avoid n^2 behavior. */ + if (node->count > MAX_VERIFIER_EDGES) + return; + for (uint32_t i = 0; i < node->count; i++) { if (node->edges[i].thing == *thingp) { JS_ASSERT(node->edges[i].kind == kind); @@ -3562,6 +4305,21 @@ CheckEdge(JSTracer *jstrc, void **thingp, JSGCTraceKind kind) return; } } + + /* + * Anything that is reachable now should have been reachable before, or else + * it should be marked. + */ + NodeMap::Ptr p = trc->nodemap.lookup(*thingp); + JS_ASSERT_IF(!p, IsMarkedOrAllocated(static_cast(*thingp))); +} + +static void +CheckReachable(JSTracer *jstrc, void **thingp, JSGCTraceKind kind) +{ + VerifyTracer *trc = (VerifyTracer *)jstrc; + NodeMap::Ptr p = trc->nodemap.lookup(*thingp); + JS_ASSERT_IF(!p, IsMarkedOrAllocated(static_cast(*thingp))); } static void @@ -3570,7 +4328,7 @@ EndVerifyBarriers(JSContext *cx) JSRuntime *rt = cx->runtime; AutoLockGC lock(rt); - AutoGCSession gcsession(cx); + AutoHeapSession session(cx); #ifdef JS_THREADSAFE rt->gcHelperThread.waitBackgroundSweepOrAllocEnd(); @@ -3588,18 +4346,17 @@ EndVerifyBarriers(JSContext *cx) JS_ASSERT(trc->number == rt->gcNumber); - for (CompartmentsIter c(rt); !c.done(); c.next()) { - c->gcIncrementalTracer = NULL; + /* We need to disable barriers before tracing, which may invoke barriers. */ + for (CompartmentsIter c(rt); !c.done(); c.next()) c->needsBarrier_ = false; - } - if (rt->gcIncrementalTracer->hasDelayedChildren()) - rt->gcIncrementalTracer->markDelayedChildren(); + for (CompartmentsIter c(rt); !c.done(); c.next()) + c->discardJitCode(cx); rt->gcVerifyData = NULL; - rt->gcIncrementalTracer = NULL; + rt->gcIncrementalState = NO_INCREMENTAL; - JS_TracerInit(trc, cx, CheckAutorooter); + JS_TracerInit(trc, cx, MarkFromAutorooter); JSContext *iter = NULL; while (JSContext *acx = js_ContextIterator(rt, JS_TRUE, &iter)) { @@ -3607,34 +4364,65 @@ EndVerifyBarriers(JSContext *cx) acx->autoGCRooters->traceAll(trc); } - JS_TracerInit(trc, cx, CheckEdge); + if (IsIncrementalGCSafe(cx)) { + /* + * Verify that all the current roots were reachable previously, or else + * are marked. + */ + JS_TracerInit(trc, cx, CheckReachable); + MarkRuntime(trc, true); - /* Start after the roots. */ - VerifyNode *node = NextNode(trc->root); - int count = 0; + JS_TracerInit(trc, cx, CheckEdge); - while ((char *)node < trc->edgeptr) { - trc->curnode = node; - JS_TraceChildren(trc, node->thing, node->kind); + /* Start after the roots. */ + VerifyNode *node = NextNode(trc->root); + while ((char *)node < trc->edgeptr) { + trc->curnode = node; + JS_TraceChildren(trc, node->thing, node->kind); - for (uint32_t i = 0; i < node->count; i++) { - void *thing = node->edges[i].thing; - JS_ASSERT_IF(thing, static_cast(thing)->isMarked()); + if (node->count <= MAX_VERIFIER_EDGES) { + for (uint32_t i = 0; i < node->count; i++) { + void *thing = node->edges[i].thing; + JS_ASSERT_IF(thing, IsMarkedOrAllocated(static_cast(thing))); + } + } + + node = NextNode(node); } - - count++; - node = NextNode(node); } - js_free(trc->root); + for (CompartmentsIter c(rt); !c.done(); c.next()) { + c->barrierMarker_.reset(); + c->barrierMarker_.stop(); + } + trc->~VerifyTracer(); js_free(trc); } void -VerifyBarriers(JSContext *cx, bool always) +FinishVerifier(JSRuntime *rt) { - if (cx->runtime->gcZeal() < ZealVerifierThreshold) + if (VerifyTracer *trc = (VerifyTracer *)rt->gcVerifyData) { + trc->~VerifyTracer(); + js_free(trc); + } +} + +void +VerifyBarriers(JSContext *cx) +{ + JSRuntime *rt = cx->runtime; + if (rt->gcVerifyData) + EndVerifyBarriers(cx); + else + StartVerifyBarriers(cx); +} + +void +MaybeVerifyBarriers(JSContext *cx, bool always) +{ + if (cx->runtime->gcZeal() != ZealVerifierValue) return; uint32_t freq = cx->runtime->gcZealFrequency; @@ -3782,3 +4570,4 @@ js_NewGCXML(JSContext *cx) return NewGCThing(cx, js::gc::FINALIZE_XML, sizeof(JSXML)); } #endif + diff --git a/js/src/jsgc.h b/js/src/jsgc.h index 1b38a6ef640f..d9e2023a8279 100644 --- a/js/src/jsgc.h +++ b/js/src/jsgc.h @@ -55,7 +55,6 @@ #include "jslock.h" #include "jsutil.h" #include "jsversion.h" -#include "jsgcstats.h" #include "jscell.h" #include "ds/BitArray.h" @@ -82,6 +81,14 @@ struct Shape; namespace gc { +enum State { + NO_INCREMENTAL, + MARK_ROOTS, + MARK, + SWEEP, + INVALID +}; + struct Arena; /* @@ -419,6 +426,10 @@ struct ArenaHeader { * not present in the stack we use an extra flag to tag arenas on the * stack. * + * Delayed marking is also used for arenas that we allocate into during an + * incremental GC. In this case, we intend to mark all the objects in the + * arena, and it's faster to do this marking in bulk. + * * To minimize the ArenaHeader size we record the next delayed marking * linkage as arenaAddress() >> ArenaShift and pack it with the allocKind * field and hasDelayedMarking flag. We use 8 bits for the allocKind, not @@ -427,7 +438,9 @@ struct ArenaHeader { */ public: size_t hasDelayedMarking : 1; - size_t nextDelayedMarking : JS_BITS_PER_WORD - 8 - 1; + size_t allocatedDuringIncremental : 1; + size_t markOverflow : 1; + size_t nextDelayedMarking : JS_BITS_PER_WORD - 8 - 1 - 1 - 1; static void staticAsserts() { /* We must be able to fit the allockind into uint8_t. */ @@ -437,7 +450,7 @@ struct ArenaHeader { * nextDelayedMarkingpacking assumes that ArenaShift has enough bits * to cover allocKind and hasDelayedMarking. */ - JS_STATIC_ASSERT(ArenaShift >= 8 + 1); + JS_STATIC_ASSERT(ArenaShift >= 8 + 1 + 1 + 1); } inline uintptr_t address() const; @@ -450,6 +463,8 @@ struct ArenaHeader { void init(JSCompartment *comp, AllocKind kind) { JS_ASSERT(!allocated()); + JS_ASSERT(!markOverflow); + JS_ASSERT(!allocatedDuringIncremental); JS_ASSERT(!hasDelayedMarking); compartment = comp; @@ -462,6 +477,8 @@ struct ArenaHeader { void setAsNotAllocated() { allocKind = size_t(FINALIZE_LIMIT); + markOverflow = 0; + allocatedDuringIncremental = 0; hasDelayedMarking = 0; nextDelayedMarking = 0; } @@ -507,8 +524,8 @@ struct ArenaHeader { void checkSynchronizedWithFreeList() const; #endif - inline Arena *getNextDelayedMarking() const; - inline void setNextDelayedMarking(Arena *arena); + inline ArenaHeader *getNextDelayedMarking() const; + inline void setNextDelayedMarking(ArenaHeader *aheader); }; struct Arena { @@ -908,25 +925,24 @@ ArenaHeader::getThingSize() const return Arena::thingSize(getAllocKind()); } -inline Arena * +inline ArenaHeader * ArenaHeader::getNextDelayedMarking() const { - return reinterpret_cast(nextDelayedMarking << ArenaShift); + return &reinterpret_cast(nextDelayedMarking << ArenaShift)->aheader; } inline void -ArenaHeader::setNextDelayedMarking(Arena *arena) +ArenaHeader::setNextDelayedMarking(ArenaHeader *aheader) { - JS_ASSERT(!hasDelayedMarking); + JS_ASSERT(!(uintptr_t(aheader) & ArenaMask)); hasDelayedMarking = 1; - nextDelayedMarking = arena->address() >> ArenaShift; + nextDelayedMarking = aheader->arenaAddress() >> ArenaShift; } JS_ALWAYS_INLINE void ChunkBitmap::getMarkWordAndMask(const Cell *cell, uint32_t color, uintptr_t **wordp, uintptr_t *maskp) { - JS_ASSERT(cell->chunk() == Chunk::fromAddress(reinterpret_cast(this))); size_t bit = (cell->address() & ChunkMask) / Cell::CellSize + color; JS_ASSERT(bit < ArenaBitmapBits * ArenasPerChunk); *maskp = uintptr_t(1) << (bit % JS_BITS_PER_WORD); @@ -970,21 +986,6 @@ Cell::compartment() const return arenaHeader()->compartment; } -/* - * Lower limit after which we limit the heap growth - */ -const size_t GC_ALLOCATION_THRESHOLD = 30 * 1024 * 1024; - -/* - * A GC is triggered once the number of newly allocated arenas is - * GC_HEAP_GROWTH_FACTOR times the number of live arenas after the last GC - * starting after the lower limit of GC_ALLOCATION_THRESHOLD. - */ -const float GC_HEAP_GROWTH_FACTOR = 3.0f; - -/* Perform a Full GC every 20 seconds if MaybeGC is called */ -static const int64_t GC_IDLE_FULL_SPAN = 20 * 1000 * 1000; - static inline JSGCTraceKind MapAllocToTraceKind(AllocKind thingKind) { @@ -1168,13 +1169,14 @@ struct ArenaLists { FreeSpan *headSpan = &freeLists[i]; if (!headSpan->isEmpty()) { ArenaHeader *aheader = headSpan->arenaHeader(); - JS_ASSERT(!aheader->hasFreeThings()); aheader->setFirstFreeSpan(headSpan); headSpan->initAsEmpty(); } } } + inline void prepareForIncrementalGC(JSCompartment *comp); + /* * Temporarily copy the free list heads to the arenas so the code can see * the proper value in ArenaHeader::freeList when accessing the latter @@ -1309,23 +1311,6 @@ typedef js::HashMap, js::SystemAllocPolicy> RootedValueMap; -/* If HashNumber grows, need to change WrapperHasher. */ -JS_STATIC_ASSERT(sizeof(HashNumber) == 4); - -struct WrapperHasher -{ - typedef Value Lookup; - - static HashNumber hash(Value key) { - uint64_t bits = JSVAL_TO_IMPL(key).asBits; - return uint32_t(bits) ^ uint32_t(bits >> 32); - } - - static bool match(const Value &l, const Value &k) { return l == k; } -}; - -typedef HashMap WrapperMap; - } /* namespace js */ extern JS_FRIEND_API(JSGCTraceKind) @@ -1376,6 +1361,9 @@ js_IsAddressableGCThing(JSRuntime *rt, uintptr_t w, js::gc::AllocKind *thingKind namespace js { +extern void +MarkCompartmentActive(js::StackFrame *fp); + extern void TraceRuntime(JSTracer *trc); @@ -1396,8 +1384,6 @@ MaybeGC(JSContext *cx); extern void ShrinkGCBuffers(JSRuntime *rt); -} /* namespace js */ - /* * Kinds of js_GC invocation. */ @@ -1411,10 +1397,21 @@ typedef enum JSGCInvocationKind { /* Pass NULL for |comp| to get a full GC. */ extern void -js_GC(JSContext *cx, JSCompartment *comp, JSGCInvocationKind gckind, js::gcreason::Reason r); +GC(JSContext *cx, JSCompartment *comp, JSGCInvocationKind gckind, js::gcreason::Reason reason); + +extern void +GCSlice(JSContext *cx, JSCompartment *comp, JSGCInvocationKind gckind, js::gcreason::Reason reason); + +extern void +GCDebugSlice(JSContext *cx, int64_t objCount); + +} /* namespace js */ namespace js { +void +InitTracer(JSTracer *trc, JSRuntime *rt, JSContext *cx, JSTraceCallback callback); + #ifdef JS_THREADSAFE class GCHelperThread { @@ -1572,17 +1569,56 @@ struct MarkStack { T *tos; T *limit; - bool push(T item) { - if (tos == limit) + T *ballast; + T *ballastLimit; + + MarkStack() + : stack(NULL), + tos(NULL), + limit(NULL), + ballast(NULL), + ballastLimit(NULL) { } + + ~MarkStack() { + if (stack != ballast) + js_free(stack); + js_free(ballast); + } + + bool init(size_t ballastcap) { + JS_ASSERT(!stack); + + if (ballastcap == 0) + return true; + + ballast = (T *)js_malloc(sizeof(T) * ballastcap); + if (!ballast) return false; + ballastLimit = ballast + ballastcap; + stack = ballast; + limit = ballastLimit; + tos = stack; + return true; + } + + bool push(T item) { + if (tos == limit) { + if (!enlarge()) + return false; + } + JS_ASSERT(tos < limit); *tos++ = item; return true; } bool push(T item1, T item2, T item3) { T *nextTos = tos + 3; - if (nextTos > limit) - return false; + if (nextTos > limit) { + if (!enlarge()) + return false; + nextTos = tos + 3; + } + JS_ASSERT(nextTos <= limit); tos[0] = item1; tos[1] = item2; tos[2] = item3; @@ -1599,61 +1635,130 @@ struct MarkStack { return *--tos; } - template - MarkStack(T (&buffer)[N]) - : stack(buffer), - tos(buffer), - limit(buffer + N) { } + ptrdiff_t position() const { + return tos - stack; + } + + void reset() { + if (stack != ballast) { + js_free(stack); + stack = ballast; + limit = ballastLimit; + } + tos = stack; + JS_ASSERT(limit == ballastLimit); + } + + bool enlarge() { + size_t tosIndex = tos - stack; + size_t cap = limit - stack; + size_t newcap = cap * 2; + if (newcap == 0) + newcap = 32; + + T *newStack; + if (stack == ballast) { + newStack = (T *)js_malloc(sizeof(T) * newcap); + if (!newStack) + return false; + for (T *src = stack, *dst = newStack; src < tos; ) + *dst++ = *src++; + } else { + newStack = (T *)js_realloc(stack, sizeof(T) * newcap); + if (!newStack) + return false; + } + stack = newStack; + tos = stack + tosIndex; + limit = newStack + newcap; + return true; + } +}; + +/* + * This class records how much work has been done in a given GC slice, so that + * we can return before pausing for too long. Some slices are allowed to run for + * unlimited time, and others are bounded. To reduce the number of gettimeofday + * calls, we only check the time every 1000 operations. + */ +struct SliceBudget { + int64_t deadline; /* in microseconds */ + intptr_t counter; + + static const intptr_t CounterReset = 1000; + + static const int64_t Unlimited = 0; + static int64_t TimeBudget(int64_t millis); + static int64_t WorkBudget(int64_t work); + + /* Equivalent to SliceBudget(UnlimitedBudget). */ + SliceBudget(); + + /* Instantiate as SliceBudget(Time/WorkBudget(n)). */ + SliceBudget(int64_t budget); + + void reset() { + deadline = INT64_MAX; + counter = INTPTR_MAX; + } + + void step() { + counter--; + } + + bool checkOverBudget(); + + bool isOverBudget() { + if (counter > 0) + return false; + return checkOverBudget(); + } }; static const size_t MARK_STACK_LENGTH = 32768; struct GCMarker : public JSTracer { + private: /* * We use a common mark stack to mark GC things of different types and use * the explicit tags to distinguish them when it cannot be deduced from * the context of push or pop operation. - * - * Currently we need only 4 tags. However that can be extended to 8 if - * necessary as we tag only GC things. */ enum StackTag { ValueArrayTag, ObjectTag, TypeTag, XmlTag, - LastTag = XmlTag + SavedValueArrayTag, + LastTag = SavedValueArrayTag }; - static const uintptr_t StackTagMask = 3; + static const uintptr_t StackTagMask = 7; static void staticAsserts() { JS_STATIC_ASSERT(StackTagMask >= uintptr_t(LastTag)); JS_STATIC_ASSERT(StackTagMask <= gc::Cell::CellMask); } - private: - /* The color is only applied to objects, functions and xml. */ - uint32_t color; - public: - /* Pointer to the top of the stack of arenas we are delaying marking on. */ - js::gc::Arena *unmarkedArenaStackTop; - /* Count of arenas that are currently in the stack. */ - DebugOnly markLaterArenas; + explicit GCMarker(); + bool init(bool lazy); -#ifdef JS_DUMP_CONSERVATIVE_GC_ROOTS - js::gc::ConservativeGCStats conservativeStats; - Vector conservativeRoots; - const char *conservativeDumpFileName; - void dumpConservativeRoots(); -#endif + void start(JSRuntime *rt, JSContext *cx); + void stop(); + void reset(); - MarkStack stack; + void pushObject(JSObject *obj) { + pushTaggedPtr(ObjectTag, obj); + } - public: - explicit GCMarker(JSContext *cx); - ~GCMarker(); + void pushType(types::TypeObject *type) { + pushTaggedPtr(TypeTag, type); + } + + void pushXML(JSXML *xml) { + pushTaggedPtr(XmlTag, xml); + } uint32_t getMarkColor() const { return color; @@ -1668,43 +1773,123 @@ struct GCMarker : public JSTracer { * objects that are still reachable. */ void setMarkColorGray() { + JS_ASSERT(isDrained()); JS_ASSERT(color == gc::BLACK); color = gc::GRAY; } + inline void delayMarkingArena(gc::ArenaHeader *aheader); void delayMarkingChildren(const void *thing); - + void markDelayedChildren(gc::ArenaHeader *aheader); + bool markDelayedChildren(SliceBudget &budget); bool hasDelayedChildren() const { return !!unmarkedArenaStackTop; } - void markDelayedChildren(); + bool isDrained() { + return isMarkStackEmpty() && !unmarkedArenaStackTop; + } + + bool drainMarkStack(SliceBudget &budget); + + /* + * Gray marking must be done after all black marking is complete. However, + * we do not have write barriers on XPConnect roots. Therefore, XPConnect + * roots must be accumulated in the first slice of incremental GC. We + * accumulate these roots in the GrayRootMarker and then mark them later, + * after black marking is complete. This accumulation can fail, but in that + * case we switch to non-incremental GC. + */ + bool hasBufferedGrayRoots() const; + void startBufferingGrayRoots(); + void endBufferingGrayRoots(); + void markBufferedGrayRoots(); + + static void GrayCallback(JSTracer *trc, void **thing, JSGCTraceKind kind); + + MarkStack stack; + + private: +#ifdef DEBUG + void checkCompartment(void *p); +#else + void checkCompartment(void *p) {} +#endif + + void pushTaggedPtr(StackTag tag, void *ptr) { + checkCompartment(ptr); + uintptr_t addr = reinterpret_cast(ptr); + JS_ASSERT(!(addr & StackTagMask)); + if (!stack.push(addr | uintptr_t(tag))) + delayMarkingChildren(ptr); + } + + void pushValueArray(JSObject *obj, void *start, void *end) { + checkCompartment(obj); + + if (start == end) + return; + + JS_ASSERT(start <= end); + uintptr_t tagged = reinterpret_cast(obj) | GCMarker::ValueArrayTag; + uintptr_t startAddr = reinterpret_cast(start); + uintptr_t endAddr = reinterpret_cast(end); + + /* + * Push in the reverse order so obj will be on top. If we cannot push + * the array, we trigger delay marking for the whole object. + */ + if (!stack.push(endAddr, startAddr, tagged)) + delayMarkingChildren(obj); + } bool isMarkStackEmpty() { return stack.isEmpty(); } - void drainMarkStack(); + bool restoreValueArray(JSObject *obj, void **vpp, void **endp); + void saveValueRanges(); + inline void processMarkStackTop(SliceBudget &budget); - inline void processMarkStackTop(); + void appendGrayRoot(void *thing, JSGCTraceKind kind); - void pushObject(JSObject *obj) { - pushTaggedPtr(ObjectTag, obj); + /* The color is only applied to objects, functions and xml. */ + uint32_t color; + + DebugOnly started; + + /* Pointer to the top of the stack of arenas we are delaying marking on. */ + js::gc::ArenaHeader *unmarkedArenaStackTop; + /* Count of arenas that are currently in the stack. */ + DebugOnly markLaterArenas; + + struct GrayRoot { + void *thing; + JSGCTraceKind kind; +#ifdef DEBUG + JSTraceNamePrinter debugPrinter; + const void *debugPrintArg; + size_t debugPrintIndex; +#endif + + GrayRoot(void *thing, JSGCTraceKind kind) + : thing(thing), kind(kind) {} + }; + + bool grayFailed; + Vector grayRoots; +}; + +struct BarrierGCMarker : public GCMarker { + bool init() { + return GCMarker::init(true); } +}; - void pushType(types::TypeObject *type) { - pushTaggedPtr(TypeTag, type); - } - void pushXML(JSXML *xml) { - pushTaggedPtr(XmlTag, xml); - } - - void pushTaggedPtr(StackTag tag, void *ptr) { - uintptr_t addr = reinterpret_cast(ptr); - JS_ASSERT(!(addr & StackTagMask)); - if (!stack.push(addr | uintptr_t(tag))) - delayMarkingChildren(ptr); +struct FullGCMarker : public GCMarker { + bool init() { + return GCMarker::init(false); } }; @@ -1757,7 +1942,8 @@ js_FinalizeStringRT(JSRuntime *rt, JSString *str); /* * Macro to test if a traversal is the marking phase of the GC. */ -#define IS_GC_MARKING_TRACER(trc) ((trc)->callback == NULL) +#define IS_GC_MARKING_TRACER(trc) \ + ((trc)->callback == NULL || (trc)->callback == GCMarker::GrayCallback) namespace js { namespace gc { @@ -1778,20 +1964,30 @@ inline void MaybeCheckStackRoots(JSContext *cx) { CheckStackRoots(cx); } inline void MaybeCheckStackRoots(JSContext *cx) {} #endif -const int ZealPokeThreshold = 1; -const int ZealAllocThreshold = 2; -const int ZealVerifierThreshold = 4; +const int ZealPokeValue = 1; +const int ZealAllocValue = 2; +const int ZealFrameGCValue = 3; +const int ZealVerifierValue = 4; +const int ZealFrameVerifierValue = 5; #ifdef JS_GC_ZEAL /* Check that write barriers have been used correctly. See jsgc.cpp. */ void -VerifyBarriers(JSContext *cx, bool always = false); +VerifyBarriers(JSContext *cx); + +void +MaybeVerifyBarriers(JSContext *cx, bool always = false); #else static inline void -VerifyBarriers(JSContext *cx, bool always = false) +VerifyBarriers(JSContext *cx) +{ +} + +static inline void +MaybeVerifyBarriers(JSContext *cx, bool always = false) { } diff --git a/js/src/jsgcinlines.h b/js/src/jsgcinlines.h index 36627f396737..41000ebfe654 100644 --- a/js/src/jsgcinlines.h +++ b/js/src/jsgcinlines.h @@ -210,7 +210,7 @@ GCPoke(JSRuntime *rt, Value oldval) #ifdef JS_GC_ZEAL /* Schedule a GC to happen "soon" after a GC poke. */ - if (rt->gcZeal() >= js::gc::ZealPokeThreshold) + if (rt->gcZeal() == js::gc::ZealPokeValue) rt->gcNextScheduled = 1; #endif } @@ -262,14 +262,25 @@ class CellIterImpl CellIterImpl() { } - void init(JSCompartment *comp, AllocKind kind) { + void initSpan(JSCompartment *comp, AllocKind kind) { JS_ASSERT(comp->arenas.isSynchronizedFreeList(kind)); firstThingOffset = Arena::firstThingOffset(kind); thingSize = Arena::thingSize(kind); - aheader = comp->arenas.getFirstArena(kind); firstSpan.initAsEmpty(); span = &firstSpan; thing = span->first; + } + + void init(ArenaHeader *singleAheader) { + aheader = singleAheader; + initSpan(aheader->compartment, aheader->getAllocKind()); + next(); + aheader = NULL; + } + + void init(JSCompartment *comp, AllocKind kind) { + initSpan(comp, kind); + aheader = comp->arenas.getFirstArena(kind); next(); } @@ -311,13 +322,18 @@ class CellIterImpl } }; -class CellIterUnderGC : public CellIterImpl { - +class CellIterUnderGC : public CellIterImpl +{ public: CellIterUnderGC(JSCompartment *comp, AllocKind kind) { JS_ASSERT(comp->rt->gcRunning); init(comp, kind); } + + CellIterUnderGC(ArenaHeader *aheader) { + JS_ASSERT(aheader->compartment->rt->gcRunning); + init(aheader); + } }; /* @@ -325,7 +341,7 @@ class CellIterUnderGC : public CellIterImpl { * allocations of GC things are possible and that the background finalization * for the given thing kind is not enabled or is done. */ -class CellIter: public CellIterImpl +class CellIter : public CellIterImpl { ArenaLists *lists; AllocKind kind; @@ -335,7 +351,8 @@ class CellIter: public CellIterImpl public: CellIter(JSContext *cx, JSCompartment *comp, AllocKind kind) : lists(&comp->arenas), - kind(kind) { + kind(kind) + { #ifdef JS_THREADSAFE JS_ASSERT(comp->arenas.doneBackgroundFinalize(kind)); #endif @@ -397,6 +414,9 @@ NewGCThing(JSContext *cx, js::gc::AllocKind kind, size_t thingSize) void *t = comp->arenas.allocateFromFreeList(kind, thingSize); if (!t) t = js::gc::ArenaLists::refillFreeList(cx, kind); + + JS_ASSERT_IF(t && comp->needsBarrier(), + static_cast(t)->arenaHeader()->allocatedDuringIncremental); return static_cast(t); } @@ -419,6 +439,8 @@ TryNewGCThing(JSContext *cx, js::gc::AllocKind kind, size_t thingSize) #endif void *t = cx->compartment->arenas.allocateFromFreeList(kind, thingSize); + JS_ASSERT_IF(t && cx->compartment->needsBarrier(), + static_cast(t)->arenaHeader()->allocatedDuringIncremental); return static_cast(t); } diff --git a/js/src/jsgcmark.cpp b/js/src/jsgcmark.cpp index cba2ad7a19af..96a96681e55a 100644 --- a/js/src/jsgcmark.cpp +++ b/js/src/jsgcmark.cpp @@ -103,7 +103,7 @@ MarkInternal(JSTracer *trc, T *thing) * GC. */ if (!rt->gcCurrentCompartment || thing->compartment() == rt->gcCurrentCompartment) { - if (IS_GC_MARKING_TRACER(trc)) { + if (!trc->callback) { PushMarkStack(static_cast(trc), thing); } else { void *tmp = (void *)thing; @@ -118,6 +118,12 @@ MarkInternal(JSTracer *trc, T *thing) #endif } +#define JS_ROOT_MARKING_ASSERT(trc) \ + JS_ASSERT_IF(IS_GC_MARKING_TRACER(trc), \ + trc->runtime->gcIncrementalState == NO_INCREMENTAL || \ + trc->runtime->gcIncrementalState == MARK_ROOTS); + + template static void MarkUnbarriered(JSTracer *trc, T *thing, const char *name) @@ -138,6 +144,7 @@ template static void MarkRoot(JSTracer *trc, T **thingp, const char *name) { + JS_ROOT_MARKING_ASSERT(trc); JS_SET_TRACING_NAME(trc, name); MarkInternal(trc, *thingp); } @@ -158,6 +165,7 @@ template static void MarkRootRange(JSTracer *trc, size_t len, T **vec, const char *name) { + JS_ROOT_MARKING_ASSERT(trc); for (size_t i = 0; i < len; ++i) { JS_SET_TRACING_INDEX(trc, name, i); MarkInternal(trc, vec[i]); @@ -246,6 +254,7 @@ MarkKind(JSTracer *trc, void *thing, JSGCTraceKind kind) void MarkGCThingRoot(JSTracer *trc, void *thing, const char *name) { + JS_ROOT_MARKING_ASSERT(trc); JS_SET_TRACING_NAME(trc, name); if (!thing) return; @@ -273,6 +282,7 @@ MarkId(JSTracer *trc, const HeapId &id, const char *name) void MarkIdRoot(JSTracer *trc, const jsid &id, const char *name) { + JS_ROOT_MARKING_ASSERT(trc); JS_SET_TRACING_NAME(trc, name); MarkIdInternal(trc, id); } @@ -289,6 +299,7 @@ MarkIdRange(JSTracer *trc, size_t len, HeapId *vec, const char *name) void MarkIdRootRange(JSTracer *trc, size_t len, jsid *vec, const char *name) { + JS_ROOT_MARKING_ASSERT(trc); for (size_t i = 0; i < len; ++i) { JS_SET_TRACING_INDEX(trc, name, i); MarkIdInternal(trc, vec[i]); @@ -316,6 +327,7 @@ MarkValue(JSTracer *trc, HeapValue *v, const char *name) void MarkValueRoot(JSTracer *trc, Value *v, const char *name) { + JS_ROOT_MARKING_ASSERT(trc); JS_SET_TRACING_NAME(trc, name); MarkValueInternal(trc, v); } @@ -332,6 +344,7 @@ MarkValueRange(JSTracer *trc, size_t len, HeapValue *vec, const char *name) void MarkValueRootRange(JSTracer *trc, size_t len, Value *vec, const char *name) { + JS_ROOT_MARKING_ASSERT(trc); for (size_t i = 0; i < len; ++i) { JS_SET_TRACING_INDEX(trc, name, i); MarkValueInternal(trc, &vec[i]); @@ -374,6 +387,10 @@ MarkCrossCompartmentValue(JSTracer *trc, HeapValue *v, const char *name) if (rt->gcCurrentCompartment && cell->compartment() != rt->gcCurrentCompartment) return; + /* In case we're called from a write barrier. */ + if (rt->gcIncrementalCompartment && cell->compartment() != rt->gcIncrementalCompartment) + return; + MarkValue(trc, v, name); } } @@ -543,7 +560,7 @@ ScanLinearString(GCMarker *gcmarker, JSLinearString *str) static void ScanRope(GCMarker *gcmarker, JSRope *rope) { - uintptr_t *savedTos = gcmarker->stack.tos; + ptrdiff_t savedPos = gcmarker->stack.position(); for (;;) { JS_ASSERT(GetGCThingTraceKind(rope) == JSTRACE_STRING); JS_ASSERT(rope->JSString::isRope()); @@ -575,14 +592,14 @@ ScanRope(GCMarker *gcmarker, JSRope *rope) } if (next) { rope = next; - } else if (savedTos != gcmarker->stack.tos) { - JS_ASSERT(savedTos < gcmarker->stack.tos); + } else if (savedPos != gcmarker->stack.position()) { + JS_ASSERT(savedPos < gcmarker->stack.position()); rope = reinterpret_cast(gcmarker->stack.pop()); } else { break; } } - JS_ASSERT(savedTos == gcmarker->stack.tos); + JS_ASSERT(savedPos == gcmarker->stack.position()); } static inline void @@ -608,24 +625,6 @@ PushMarkStack(GCMarker *gcmarker, JSString *str) ScanString(gcmarker, str); } -static inline void -PushValueArray(GCMarker *gcmarker, JSObject* obj, HeapValue *start, HeapValue *end) -{ - JS_ASSERT(start <= end); - uintptr_t tagged = reinterpret_cast(obj) | GCMarker::ValueArrayTag; - uintptr_t startAddr = reinterpret_cast(start); - uintptr_t endAddr = reinterpret_cast(end); - - /* Push in the reverse order so obj will be on top. */ - if (!gcmarker->stack.push(endAddr, startAddr, tagged)) { - /* - * If we cannot push the array, we trigger delay marking for the whole - * object. - */ - gcmarker->delayMarkingChildren(obj); - } -} - void MarkChildren(JSTracer *trc, JSObject *obj) { @@ -851,12 +850,163 @@ MarkChildren(JSTracer *trc, JSXML *xml) } #endif +template +void +PushArenaTyped(GCMarker *gcmarker, ArenaHeader *aheader) +{ + for (CellIterUnderGC i(aheader); !i.done(); i.next()) + PushMarkStack(gcmarker, i.get()); +} + +void +PushArena(GCMarker *gcmarker, ArenaHeader *aheader) +{ + switch (MapAllocToTraceKind(aheader->getAllocKind())) { + case JSTRACE_OBJECT: + PushArenaTyped(gcmarker, aheader); + break; + + case JSTRACE_STRING: + PushArenaTyped(gcmarker, aheader); + break; + + case JSTRACE_SCRIPT: + PushArenaTyped(gcmarker, aheader); + break; + + case JSTRACE_SHAPE: + PushArenaTyped(gcmarker, aheader); + break; + + case JSTRACE_BASE_SHAPE: + PushArenaTyped(gcmarker, aheader); + break; + + case JSTRACE_TYPE_OBJECT: + PushArenaTyped(gcmarker, aheader); + break; + +#if JS_HAS_XML_SUPPORT + case JSTRACE_XML: + PushArenaTyped(gcmarker, aheader); + break; +#endif + } +} + } /* namespace gc */ using namespace js::gc; +struct ValueArrayLayout +{ + union { + HeapValue *end; + js::Class *clasp; + }; + union { + HeapValue *start; + uintptr_t index; + }; + JSObject *obj; + + static void staticAsserts() { + /* This should have the same layout as three mark stack items. */ + JS_STATIC_ASSERT(sizeof(ValueArrayLayout) == 3 * sizeof(uintptr_t)); + } +}; + +/* + * During incremental GC, we return from drainMarkStack without having processed + * the entire stack. At that point, JS code can run and reallocate slot arrays + * that are stored on the stack. To prevent this from happening, we replace all + * ValueArrayTag stack items with SavedValueArrayTag. In the latter, slots + * pointers are replaced with slot indexes. + * + * We also replace the slot array end pointer (which can be derived from the obj + * pointer) with the object's class. During JS executation, array slowification + * can cause the layout of slots to change. We can observe that slowification + * happened if the class changed; in that case, we completely rescan the array. + */ +void +GCMarker::saveValueRanges() +{ + for (uintptr_t *p = stack.tos; p > stack.stack; ) { + uintptr_t tag = *--p & StackTagMask; + if (tag == ValueArrayTag) { + p -= 2; + ValueArrayLayout *arr = reinterpret_cast(p); + JSObject *obj = arr->obj; + + if (obj->getClass() == &ArrayClass) { + HeapValue *vp = obj->getDenseArrayElements(); + JS_ASSERT(arr->start >= vp && + arr->end == vp + obj->getDenseArrayInitializedLength()); + arr->index = arr->start - vp; + } else { + HeapValue *vp = obj->fixedSlots(); + unsigned nfixed = obj->numFixedSlots(); + if (arr->start >= vp && arr->start < vp + nfixed) { + JS_ASSERT(arr->end == vp + Min(nfixed, obj->slotSpan())); + arr->index = arr->start - vp; + } else { + JS_ASSERT(arr->start >= obj->slots && + arr->end == obj->slots + obj->slotSpan() - nfixed); + arr->index = (arr->start - obj->slots) + nfixed; + } + } + arr->clasp = obj->getClass(); + p[2] |= SavedValueArrayTag; + } else if (tag == SavedValueArrayTag) { + p -= 2; + } + } +} + +bool +GCMarker::restoreValueArray(JSObject *obj, void **vpp, void **endp) +{ + uintptr_t start = stack.pop(); + js::Class *clasp = reinterpret_cast(stack.pop()); + + JS_ASSERT(obj->getClass() == clasp || + (clasp == &ArrayClass && obj->getClass() == &SlowArrayClass)); + + if (clasp == &ArrayClass) { + if (obj->getClass() != &ArrayClass) + return false; + + uint32_t initlen = obj->getDenseArrayInitializedLength(); + HeapValue *vp = obj->getDenseArrayElements(); + if (start < initlen) { + *vpp = vp + start; + *endp = vp + initlen; + } else { + /* The object shrunk, in which case no scanning is needed. */ + *vpp = *endp = vp; + } + } else { + HeapValue *vp = obj->fixedSlots(); + unsigned nfixed = obj->numFixedSlots(); + unsigned nslots = obj->slotSpan(); + if (start < nfixed) { + *vpp = vp + start; + *endp = vp + Min(nfixed, nslots); + } else if (start < nslots) { + *vpp = obj->slots + start - nfixed; + *endp = obj->slots + nslots - nfixed; + } else { + /* The object shrunk, in which case no scanning is needed. */ + *vpp = *endp = obj->slots; + } + } + + JS_ASSERT(*vpp <= *endp); + return true; +} + inline void -GCMarker::processMarkStackTop() +GCMarker::processMarkStackTop(SliceBudget &budget) { /* * The function uses explicit goto and implements the scanning of the @@ -885,29 +1035,46 @@ GCMarker::processMarkStackTop() if (tag == ObjectTag) { obj = reinterpret_cast(addr); + JS_COMPARTMENT_ASSERT(runtime, obj); goto scan_obj; } if (tag == TypeTag) { ScanTypeObject(this, reinterpret_cast(addr)); + } else if (tag == SavedValueArrayTag) { + JS_ASSERT(!(addr & Cell::CellMask)); + obj = reinterpret_cast(addr); + if (restoreValueArray(obj, (void **)&vp, (void **)&end)) + goto scan_value_array; + else + goto scan_obj; } else { JS_ASSERT(tag == XmlTag); MarkChildren(this, reinterpret_cast(addr)); } + budget.step(); return; scan_value_array: JS_ASSERT(vp <= end); while (vp != end) { + budget.step(); + if (budget.isOverBudget()) { + pushValueArray(obj, vp, end); + return; + } + const Value &v = *vp++; if (v.isString()) { JSString *str = v.toString(); + JS_COMPARTMENT_ASSERT_STR(runtime, str); if (str->markIfUnmarked()) ScanString(this, str); } else if (v.isObject()) { JSObject *obj2 = &v.toObject(); + JS_COMPARTMENT_ASSERT(runtime, obj2); if (obj2->markIfUnmarked(getMarkColor())) { - PushValueArray(this, obj, vp, end); + pushValueArray(obj, vp, end); obj = obj2; goto scan_obj; } @@ -917,6 +1084,14 @@ GCMarker::processMarkStackTop() scan_obj: { + JS_COMPARTMENT_ASSERT(runtime, obj); + + budget.step(); + if (budget.isOverBudget()) { + pushObject(obj); + return; + } + types::TypeObject *type = obj->typeFromGC(); PushMarkStack(this, type); @@ -931,6 +1106,9 @@ GCMarker::processMarkStackTop() vp = obj->getDenseArrayElements(); end = vp + obj->getDenseArrayInitializedLength(); goto scan_value_array; + } else { + JS_ASSERT_IF(runtime->gcIncrementalState != NO_INCREMENTAL, + clasp->flags & JSCLASS_IMPLEMENTS_BARRIERS); } clasp->trace(this, obj); } @@ -943,7 +1121,7 @@ GCMarker::processMarkStackTop() if (obj->slots) { unsigned nfixed = obj->numFixedSlots(); if (nslots > nfixed) { - PushValueArray(this, obj, vp, vp + nfixed); + pushValueArray(obj, vp, vp + nfixed); vp = obj->slots; end = vp + (nslots - nfixed); goto scan_value_array; @@ -955,15 +1133,33 @@ GCMarker::processMarkStackTop() } } -void -GCMarker::drainMarkStack() +bool +GCMarker::drainMarkStack(SliceBudget &budget) { +#ifdef DEBUG JSRuntime *rt = runtime; - rt->gcCheckCompartment = rt->gcCurrentCompartment; + + struct AutoCheckCompartment { + JSRuntime *runtime; + AutoCheckCompartment(JSRuntime *rt) : runtime(rt) { + runtime->gcCheckCompartment = runtime->gcCurrentCompartment; + } + ~AutoCheckCompartment() { runtime->gcCheckCompartment = NULL; } + } acc(rt); +#endif + + if (budget.isOverBudget()) + return false; for (;;) { - while (!stack.isEmpty()) - processMarkStackTop(); + while (!stack.isEmpty()) { + processMarkStackTop(budget); + if (budget.isOverBudget()) { + saveValueRanges(); + return false; + } + } + if (!hasDelayedChildren()) break; @@ -972,10 +1168,13 @@ GCMarker::drainMarkStack() * above tracing. Don't do this until we're done with everything * else. */ - markDelayedChildren(); + if (!markDelayedChildren(budget)) { + saveValueRanges(); + return false; + } } - rt->gcCheckCompartment = NULL; + return true; } void diff --git a/js/src/jsgcmark.h b/js/src/jsgcmark.h index 1742fbcb46ec..0003ea3573d2 100644 --- a/js/src/jsgcmark.h +++ b/js/src/jsgcmark.h @@ -146,7 +146,11 @@ MarkChildren(JSTracer *trc, JSObject *obj); void MarkCycleCollectorChildren(JSTracer *trc, const Shape *shape); +void +PushArena(GCMarker *gcmarker, ArenaHeader *aheader); + /*** Generic ***/ + /* * The Mark() functions interface should only be used by code that must be * templated. Other uses should use the more specific, type-named functions. diff --git a/js/src/jsinfer.cpp b/js/src/jsinfer.cpp index 6b7c1fb075b2..2e6756d28993 100644 --- a/js/src/jsinfer.cpp +++ b/js/src/jsinfer.cpp @@ -2195,7 +2195,7 @@ TypeCompartment::nukeTypes(JSContext *cx) #ifdef JS_THREADSAFE AutoLockGC maybeLock; - if (!cx->runtime->gcMarkAndSweep) + if (!cx->runtime->gcRunning) maybeLock.lock(cx->runtime); #endif diff --git a/js/src/jsinterp.cpp b/js/src/jsinterp.cpp index 794c480600f3..bdd2f7b711c9 100644 --- a/js/src/jsinterp.cpp +++ b/js/src/jsinterp.cpp @@ -1147,7 +1147,7 @@ js::AssertValidPropertyCacheHit(JSContext *cx, jsbytecode *pc; cx->stack.currentScript(&pc); - uint32_t sample = cx->runtime->gcNumber; + uint64_t sample = cx->runtime->gcNumber; PropertyCacheEntry savedEntry = *entry; PropertyName *name = GetNameFromBytecode(cx, pc, JSOp(*pc), js_CodeSpec[*pc]); @@ -1254,7 +1254,7 @@ js::Interpret(JSContext *cx, StackFrame *entryFrame, InterpMode interpMode) { JSAutoResolveFlags rf(cx, RESOLVE_INFER); - gc::VerifyBarriers(cx, true); + gc::MaybeVerifyBarriers(cx, true); JS_ASSERT(!cx->compartment->activeAnalysis); @@ -1289,7 +1289,7 @@ js::Interpret(JSContext *cx, StackFrame *entryFrame, InterpMode interpMode) # define DO_OP() JS_BEGIN_MACRO \ CHECK_PCCOUNT_INTERRUPTS(); \ - js::gc::VerifyBarriers(cx); \ + js::gc::MaybeVerifyBarriers(cx); \ JS_EXTENSION_(goto *jumpTable[op]); \ JS_END_MACRO # define DO_NEXT_OP(n) JS_BEGIN_MACRO \ @@ -1566,7 +1566,7 @@ js::Interpret(JSContext *cx, StackFrame *entryFrame, InterpMode interpMode) do_op: CHECK_PCCOUNT_INTERRUPTS(); - js::gc::VerifyBarriers(cx); + js::gc::MaybeVerifyBarriers(cx); switchOp = intN(op) | switchMask; do_switch: switch (switchOp) { @@ -4424,6 +4424,6 @@ END_CASE(JSOP_ARRAYPUSH) leave_on_safe_point: #endif - gc::VerifyBarriers(cx, true); + gc::MaybeVerifyBarriers(cx, true); return interpReturnOK; } diff --git a/js/src/jsiter.cpp b/js/src/jsiter.cpp index 62c9de6a5df7..c476bc834726 100644 --- a/js/src/jsiter.cpp +++ b/js/src/jsiter.cpp @@ -89,7 +89,7 @@ static JSObject *iterator_iterator(JSContext *cx, JSObject *obj, JSBool keysonly Class js::IteratorClass = { "Iterator", - JSCLASS_HAS_PRIVATE | + JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_HAS_CACHED_PROTO(JSProto_Iterator), JS_PropertyStub, /* addProperty */ JS_PropertyStub, /* delProperty */ @@ -1419,7 +1419,7 @@ generator_trace(JSTracer *trc, JSObject *obj) Class js::GeneratorClass = { "Generator", - JSCLASS_HAS_PRIVATE, + JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS, JS_PropertyStub, /* addProperty */ JS_PropertyStub, /* delProperty */ JS_PropertyStub, /* getProperty */ diff --git a/js/src/jsobj.cpp b/js/src/jsobj.cpp index 4df8f65e8e82..4405f4dd6ac2 100644 --- a/js/src/jsobj.cpp +++ b/js/src/jsobj.cpp @@ -2763,6 +2763,13 @@ NewObject(JSContext *cx, Class *clasp, types::TypeObject *type, JSObject *parent if (!obj) return NULL; + /* + * This will cancel an already-running incremental GC from doing any more + * slices, and it will prevent any future incremental GCs. + */ + if (clasp->trace && !(clasp->flags & JSCLASS_IMPLEMENTS_BARRIERS)) + cx->runtime->gcIncrementalEnabled = false; + Probes::createObject(cx, obj); return obj; } @@ -3475,7 +3482,7 @@ JSObject::TradeGuts(JSContext *cx, JSObject *a, JSObject *b, TradeGutsReserved & a->slots = reserved.newaslots; a->initSlotRange(0, reserved.bvals.begin(), bcap); if (a->hasPrivate()) - a->setPrivate(bpriv); + a->initPrivate(bpriv); if (b->isNative()) b->shape_->setNumFixedSlots(reserved.newbfixed); @@ -3485,7 +3492,7 @@ JSObject::TradeGuts(JSContext *cx, JSObject *a, JSObject *b, TradeGutsReserved & b->slots = reserved.newbslots; b->initSlotRange(0, reserved.avals.begin(), acap); if (b->hasPrivate()) - b->setPrivate(apriv); + b->initPrivate(apriv); /* Make sure the destructor for reserved doesn't free the slots. */ reserved.newaslots = NULL; diff --git a/js/src/jsobj.h b/js/src/jsobj.h index 542381473795..fa43153b6954 100644 --- a/js/src/jsobj.h +++ b/js/src/jsobj.h @@ -954,6 +954,7 @@ struct JSObject : js::gc::Cell inline bool hasPrivate() const; inline void *getPrivate() const; inline void setPrivate(void *data); + inline void initPrivate(void *data); /* Access private data for an object with a known number of fixed slots. */ inline void *getPrivate(size_t nfixed) const; @@ -1355,6 +1356,7 @@ struct JSObject : js::gc::Cell static inline void writeBarrierPre(JSObject *obj); static inline void writeBarrierPost(JSObject *obj, void *addr); + static inline void readBarrier(JSObject *obj); inline void privateWriteBarrierPre(void **oldval); inline void privateWriteBarrierPost(void **oldval); diff --git a/js/src/jsobjinlines.h b/js/src/jsobjinlines.h index 930a807d0b39..2455377c4648 100644 --- a/js/src/jsobjinlines.h +++ b/js/src/jsobjinlines.h @@ -119,6 +119,12 @@ JSObject::setPrivate(void *data) privateWriteBarrierPost(pprivate); } +inline void +JSObject::initPrivate(void *data) +{ + privateRef(numFixedSlots()) = data; +} + inline bool JSObject::enumerate(JSContext *cx, JSIterateOp iterop, js::Value *statep, jsid *idp) { @@ -602,20 +608,32 @@ JSObject::moveDenseArrayElements(uintN dstStart, uintN srcStart, uintN count) JS_ASSERT(srcStart + count <= getDenseArrayInitializedLength()); /* - * Use a custom write barrier here since it's performance sensitive. We - * only want to barrier the elements that are being overwritten. - */ - uintN markStart, markEnd; - if (dstStart > srcStart) { - markStart = js::Max(srcStart + count, dstStart); - markEnd = dstStart + count; + * Using memmove here would skip write barriers. Also, we need to consider + * an array containing [A, B, C], in the following situation: + * + * 1. Incremental GC marks slot 0 of array (i.e., A), then returns to JS code. + * 2. JS code moves slots 1..2 into slots 0..1, so it contains [B, C, C]. + * 3. Incremental GC finishes by marking slots 1 and 2 (i.e., C). + * + * Since normal marking never happens on B, it is very important that the + * write barrier is invoked here on B, despite the fact that it exists in + * the array before and after the move. + */ + if (compartment()->needsBarrier()) { + if (dstStart < srcStart) { + js::HeapValue *dst = elements + dstStart; + js::HeapValue *src = elements + srcStart; + for (unsigned i = 0; i < count; i++, dst++, src++) + *dst = *src; + } else { + js::HeapValue *dst = elements + dstStart + count - 1; + js::HeapValue *src = elements + srcStart + count - 1; + for (unsigned i = 0; i < count; i++, dst--, src--) + *dst = *src; + } } else { - markStart = dstStart; - markEnd = js::Min(dstStart + count, srcStart); + memmove(elements + dstStart, elements + srcStart, count * sizeof(js::Value)); } - prepareElementRangeForOverwrite(markStart, markEnd); - - memmove(elements + dstStart, elements + srcStart, count * sizeof(js::Value)); } inline void @@ -2126,6 +2144,18 @@ JSObject::writeBarrierPre(JSObject *obj) #endif } +inline void +JSObject::readBarrier(JSObject *obj) +{ +#ifdef JSGC_INCREMENTAL + JSCompartment *comp = obj->compartment(); + if (comp->needsBarrier()) { + JS_ASSERT(!comp->rt->gcRunning); + MarkObjectUnbarriered(comp->barrierTracer(), obj, "read barrier"); + } +#endif +} + inline void JSObject::writeBarrierPost(JSObject *obj, void *addr) { diff --git a/js/src/jspropertycache.cpp b/js/src/jspropertycache.cpp index 5f76e3cbae15..3548f2ef6f79 100644 --- a/js/src/jspropertycache.cpp +++ b/js/src/jspropertycache.cpp @@ -282,7 +282,7 @@ PropertyCache::purge(JSContext *cx) #ifdef JS_THREADSAFE fprintf(fp, "thread %lu, ", (unsigned long) cx->thread->id); #endif - fprintf(fp, "GC %u\n", cx->runtime->gcNumber); + fprintf(fp, "GC %lu\n", (unsigned long)cx->runtime->gcNumber); # define P(mem) fprintf(fp, "%11s %10lu\n", #mem, (unsigned long)mem) P(fills); diff --git a/js/src/jsproxy.cpp b/js/src/jsproxy.cpp index 3ea285e4388f..f0ddde305fce 100644 --- a/js/src/jsproxy.cpp +++ b/js/src/jsproxy.cpp @@ -1311,7 +1311,7 @@ proxy_TypeOf(JSContext *cx, JSObject *proxy) JS_FRIEND_DATA(Class) js::ObjectProxyClass = { "Proxy", - Class::NON_NATIVE | JSCLASS_HAS_RESERVED_SLOTS(4), + Class::NON_NATIVE | JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_HAS_RESERVED_SLOTS(4), JS_PropertyStub, /* addProperty */ JS_PropertyStub, /* delProperty */ JS_PropertyStub, /* getProperty */ @@ -1367,7 +1367,7 @@ JS_FRIEND_DATA(Class) js::ObjectProxyClass = { JS_FRIEND_DATA(Class) js::OuterWindowProxyClass = { "Proxy", - Class::NON_NATIVE | JSCLASS_HAS_RESERVED_SLOTS(4), + Class::NON_NATIVE | JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_HAS_RESERVED_SLOTS(4), JS_PropertyStub, /* addProperty */ JS_PropertyStub, /* delProperty */ JS_PropertyStub, /* getProperty */ @@ -1445,7 +1445,7 @@ proxy_Construct(JSContext *cx, uintN argc, Value *vp) JS_FRIEND_DATA(Class) js::FunctionProxyClass = { "Proxy", - Class::NON_NATIVE | JSCLASS_HAS_RESERVED_SLOTS(6), + Class::NON_NATIVE | JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_HAS_RESERVED_SLOTS(6), JS_PropertyStub, /* addProperty */ JS_PropertyStub, /* delProperty */ JS_PropertyStub, /* getProperty */ diff --git a/js/src/jstypedarray.cpp b/js/src/jstypedarray.cpp index 1fa5bc56732f..166b3ff8abc9 100644 --- a/js/src/jstypedarray.cpp +++ b/js/src/jstypedarray.cpp @@ -2180,6 +2180,7 @@ Class ArrayBuffer::slowClass = { Class js::ArrayBufferClass = { "ArrayBuffer", JSCLASS_HAS_PRIVATE | + JSCLASS_IMPLEMENTS_BARRIERS | Class::NON_NATIVE | JSCLASS_HAS_RESERVED_SLOTS(ARRAYBUFFER_RESERVED_SLOTS) | JSCLASS_HAS_CACHED_PROTO(JSProto_ArrayBuffer), @@ -2298,7 +2299,7 @@ JSFunctionSpec _typedArray::jsfuncs[] = { \ { \ #_typedArray, \ JSCLASS_HAS_RESERVED_SLOTS(TypedArray::FIELD_MAX) | \ - JSCLASS_HAS_PRIVATE | \ + JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS | \ JSCLASS_FOR_OF_ITERATION | \ Class::NON_NATIVE, \ JS_PropertyStub, /* addProperty */ \ diff --git a/js/src/jsweakmap.cpp b/js/src/jsweakmap.cpp index c2a15a680a23..a5e3a3aff514 100644 --- a/js/src/jsweakmap.cpp +++ b/js/src/jsweakmap.cpp @@ -62,7 +62,7 @@ bool WeakMapBase::markAllIteratively(JSTracer *tracer) { bool markedAny = false; - JSRuntime *rt = tracer->context->runtime; + JSRuntime *rt = tracer->runtime; for (WeakMapBase *m = rt->gcWeakMapList; m; m = m->next) { if (m->markIteratively(tracer)) markedAny = true; @@ -73,7 +73,7 @@ WeakMapBase::markAllIteratively(JSTracer *tracer) void WeakMapBase::sweepAll(JSTracer *tracer) { - JSRuntime *rt = tracer->context->runtime; + JSRuntime *rt = tracer->runtime; for (WeakMapBase *m = rt->gcWeakMapList; m; m = m->next) m->sweep(tracer); } @@ -314,8 +314,16 @@ WeakMap_mark(JSTracer *trc, JSObject *obj) static void WeakMap_finalize(JSContext *cx, JSObject *obj) { - ObjectValueMap *map = GetObjectMap(obj); - cx->delete_(map); + if (ObjectValueMap *map = GetObjectMap(obj)) { + map->check(); +#ifdef DEBUG + map->~ObjectValueMap(); + memset(map, 0xdc, sizeof(ObjectValueMap)); + cx->free_(map); +#else + cx->delete_(map); +#endif + } } static JSBool @@ -331,7 +339,7 @@ WeakMap_construct(JSContext *cx, uintN argc, Value *vp) Class js::WeakMapClass = { "WeakMap", - JSCLASS_HAS_PRIVATE | + JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_HAS_CACHED_PROTO(JSProto_WeakMap), JS_PropertyStub, /* addProperty */ JS_PropertyStub, /* delProperty */ diff --git a/js/src/jsweakmap.h b/js/src/jsweakmap.h index fa082427d214..9c1aa316bc51 100644 --- a/js/src/jsweakmap.h +++ b/js/src/jsweakmap.h @@ -127,7 +127,7 @@ class WeakMapBase { // Add ourselves to the list if we are not already in the list. We can already // be in the list if the weak map is marked more than once due delayed marking. if (next == WeakMapNotInList) { - JSRuntime *rt = tracer->context->runtime; + JSRuntime *rt = tracer->runtime; next = rt->gcWeakMapList; rt->gcWeakMapList = this; } @@ -156,6 +156,8 @@ class WeakMapBase { // Trace all delayed weak map bindings. Used by the cycle collector. static void traceAllMappings(WeakMapTracer *tracer); + void check() { JS_ASSERT(next == WeakMapNotInList); } + // Remove everything from the live weak map list. static void resetWeakMapList(JSRuntime *rt); diff --git a/js/src/jsxml.cpp b/js/src/jsxml.cpp index d4dd972ba926..92e09070e2da 100644 --- a/js/src/jsxml.cpp +++ b/js/src/jsxml.cpp @@ -5369,7 +5369,7 @@ out: JS_FRIEND_DATA(Class) js::XMLClass = { js_XML_str, - JSCLASS_HAS_PRIVATE | + JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_HAS_CACHED_PROTO(JSProto_XML), JS_PropertyStub, /* addProperty */ JS_PropertyStub, /* delProperty */ @@ -7922,7 +7922,7 @@ xmlfilter_finalize(JSContext *cx, JSObject *obj) Class js_XMLFilterClass = { "XMLFilter", - JSCLASS_HAS_PRIVATE | JSCLASS_IS_ANONYMOUS, + JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_IS_ANONYMOUS, JS_PropertyStub, /* addProperty */ JS_PropertyStub, /* delProperty */ JS_PropertyStub, /* getProperty */ diff --git a/js/src/methodjit/Compiler.cpp b/js/src/methodjit/Compiler.cpp index 0fa45cb53ed9..139943112df8 100644 --- a/js/src/methodjit/Compiler.cpp +++ b/js/src/methodjit/Compiler.cpp @@ -3924,7 +3924,7 @@ void mjit::Compiler::interruptCheckHelper() { Jump jump; - if (cx->runtime->gcZeal() >= js::gc::ZealVerifierThreshold) { + if (cx->runtime->gcZeal() == js::gc::ZealVerifierValue) { /* For barrier verification, always take the interrupt so we can verify. */ jump = masm.jump(); } else { @@ -6892,7 +6892,9 @@ mjit::Compiler::jsop_regexp() !cx->typeInferenceEnabled() || analysis->localsAliasStack() || types::TypeSet::HasObjectFlags(cx, globalObj->getType(cx), - types::OBJECT_FLAG_REGEXP_FLAGS_SET)) { + types::OBJECT_FLAG_REGEXP_FLAGS_SET) || + cx->runtime->gcIncrementalState == gc::MARK) + { prepareStubCall(Uses(0)); masm.move(ImmPtr(obj), Registers::ArgReg1); INLINE_STUBCALL(stubs::RegExp, REJOIN_FALLTHROUGH); @@ -6946,10 +6948,11 @@ mjit::Compiler::jsop_regexp() } /* - * Force creation of the RegExpShared in the script's RegExpObject - * so that we grab it in the getNewObject template copy. Note that - * JIT code is discarded on every GC, which permits us to burn in - * the pointer to the RegExpShared. + * Force creation of the RegExpShared in the script's RegExpObject so that + * we grab it in the getNewObject template copy. Note that JIT code is + * discarded on every GC, which permits us to burn in the pointer to the + * RegExpShared. We don't do this during an incremental + * GC, since we don't discard JIT code after every marking slice. */ if (!reobj->getShared(cx)) return false; diff --git a/js/src/methodjit/Compiler.h b/js/src/methodjit/Compiler.h index 6cb7caa1aeac..2708b1520475 100644 --- a/js/src/methodjit/Compiler.h +++ b/js/src/methodjit/Compiler.h @@ -484,7 +484,7 @@ private: bool hasGlobalReallocation; bool oomInVector; // True if we have OOM'd appending to a vector. bool overflowICSpace; // True if we added a constant pool in a reserved space. - uint32_t gcNumber; + uint64_t gcNumber; enum { NoApplyTricks, LazyArgsObj } applyTricks; PCLengthEntry *pcLengths; diff --git a/js/src/methodjit/MethodJIT.h b/js/src/methodjit/MethodJIT.h index eb23d88ad1b7..bdb7c8904446 100644 --- a/js/src/methodjit/MethodJIT.h +++ b/js/src/methodjit/MethodJIT.h @@ -402,7 +402,7 @@ struct RecompilationMonitor unsigned frameExpansions; /* If a GC occurs it may discard jit code on the stack. */ - unsigned gcNumber; + uint64_t gcNumber; RecompilationMonitor(JSContext *cx) : cx(cx), diff --git a/js/src/methodjit/PolyIC.cpp b/js/src/methodjit/PolyIC.cpp index 85786eca7735..0441bc16a5af 100644 --- a/js/src/methodjit/PolyIC.cpp +++ b/js/src/methodjit/PolyIC.cpp @@ -102,7 +102,7 @@ class PICStubCompiler : public BaseCompiler JSScript *script; ic::PICInfo &pic; void *stub; - uint32_t gcNumber; + uint64_t gcNumber; public: bool canCallHook; diff --git a/js/src/methodjit/StubCalls.cpp b/js/src/methodjit/StubCalls.cpp index 9f8ecc40425b..3645a642dcac 100644 --- a/js/src/methodjit/StubCalls.cpp +++ b/js/src/methodjit/StubCalls.cpp @@ -878,7 +878,7 @@ stubs::DebuggerStatement(VMFrame &f, jsbytecode *pc) void JS_FASTCALL stubs::Interrupt(VMFrame &f, jsbytecode *pc) { - gc::VerifyBarriers(f.cx); + gc::MaybeVerifyBarriers(f.cx); if (!js_HandleExecutionInterrupt(f.cx)) THROW(); diff --git a/js/src/shell/js.cpp b/js/src/shell/js.cpp index 214c4c12774e..d93c5db1b375 100644 --- a/js/src/shell/js.cpp +++ b/js/src/shell/js.cpp @@ -1286,6 +1286,7 @@ static const struct ParamPair { {"maxMallocBytes", JSGC_MAX_MALLOC_BYTES}, {"gcBytes", JSGC_BYTES}, {"gcNumber", JSGC_NUMBER}, + {"sliceTimeBudget", JSGC_SLICE_TIME_BUDGET} }; static JSBool @@ -1427,6 +1428,35 @@ ScheduleGC(JSContext *cx, uintN argc, jsval *vp) *vp = JSVAL_VOID; return JS_TRUE; } + +static JSBool +VerifyBarriers(JSContext *cx, uintN argc, jsval *vp) +{ + gc::VerifyBarriers(cx); + *vp = JSVAL_VOID; + return JS_TRUE; +} + +static JSBool +GCSlice(JSContext *cx, uintN argc, jsval *vp) +{ + uint32_t budget; + + if (argc != 1) { + JS_ReportErrorNumber(cx, my_GetErrorMessage, NULL, + (argc < 1) + ? JSSMSG_NOT_ENOUGH_ARGS + : JSSMSG_TOO_MANY_ARGS, + "gcslice"); + return JS_FALSE; + } + if (!JS_ValueToECMAUint32(cx, vp[2], &budget)) + return JS_FALSE; + + GCDebugSlice(cx, budget); + *vp = JSVAL_VOID; + return JS_TRUE; +} #endif /* JS_GC_ZEAL */ typedef struct JSCountHeapNode JSCountHeapNode; @@ -1473,7 +1503,7 @@ CountHeapNotify(JSTracer *trc, void **thingp, JSGCTraceKind kind) if (node) { countTracer->recycleList = node->next; } else { - node = (JSCountHeapNode *) JS_malloc(trc->context, sizeof *node); + node = (JSCountHeapNode *) js_malloc(sizeof *node); if (!node) { countTracer->ok = JS_FALSE; return; @@ -1575,7 +1605,7 @@ CountHeap(JSContext *cx, uintN argc, jsval *vp) } while ((node = countTracer.recycleList) != NULL) { countTracer.recycleList = node->next; - JS_free(cx, node); + js_free(node); } JS_DHashTableFinish(&countTracer.visited); @@ -4001,6 +4031,8 @@ static JSFunctionSpec shell_functions[] = { #ifdef JS_GC_ZEAL JS_FN("gczeal", GCZeal, 2,0), JS_FN("schedulegc", ScheduleGC, 1,0), + JS_FN("verifybarriers", VerifyBarriers, 0,0), + JS_FN("gcslice", GCSlice, 1,0), #endif JS_FN("internalConst", InternalConst, 1,0), JS_FN("setDebug", SetDebug, 1,0), @@ -4114,6 +4146,8 @@ static const char *const shell_help_messages[] = { " How zealous the garbage collector should be", "schedulegc(num, [compartmentGC?])\n" " Schedule a GC to happen after num allocations", +"verifybarriers() Start or end a run of the write barrier verifier", +"gcslice(n) Run an incremental GC slice that marks ~n objects", #endif "internalConst(name)\n" " Query an internal constant for the engine. See InternalConst source for the\n" @@ -5457,7 +5491,7 @@ main(int argc, char **argv, char **envp) if (!cx) return 1; - JS_SetGCParameter(rt, JSGC_MODE, JSGC_MODE_COMPARTMENT); + JS_SetGCParameter(rt, JSGC_MODE, JSGC_MODE_INCREMENTAL); JS_SetGCParameterForThread(cx, JSGC_MAX_CODE_CACHE_BYTES, 16 * 1024 * 1024); /* Must be done before creating the global object */ diff --git a/js/src/vm/Debugger.cpp b/js/src/vm/Debugger.cpp index 14ee3b5bb3c3..07497632200c 100644 --- a/js/src/vm/Debugger.cpp +++ b/js/src/vm/Debugger.cpp @@ -1323,7 +1323,9 @@ Debugger::finalize(JSContext *cx, JSObject *obj) } Class Debugger::jsclass = { - "Debugger", JSCLASS_HAS_PRIVATE | JSCLASS_HAS_RESERVED_SLOTS(JSSLOT_DEBUG_COUNT), + "Debugger", + JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS | + JSCLASS_HAS_RESERVED_SLOTS(JSSLOT_DEBUG_COUNT), JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub, JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, Debugger::finalize, NULL, /* reserved0 */ @@ -1854,7 +1856,9 @@ DebuggerScript_trace(JSTracer *trc, JSObject *obj) } Class DebuggerScript_class = { - "Script", JSCLASS_HAS_PRIVATE | JSCLASS_HAS_RESERVED_SLOTS(JSSLOT_DEBUGSCRIPT_COUNT), + "Script", + JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS | + JSCLASS_HAS_RESERVED_SLOTS(JSSLOT_DEBUGSCRIPT_COUNT), JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub, JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, NULL, NULL, /* reserved0 */ @@ -2956,7 +2960,9 @@ DebuggerObject_trace(JSTracer *trc, JSObject *obj) } Class DebuggerObject_class = { - "Object", JSCLASS_HAS_PRIVATE | JSCLASS_HAS_RESERVED_SLOTS(JSSLOT_DEBUGOBJECT_COUNT), + "Object", + JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS | + JSCLASS_HAS_RESERVED_SLOTS(JSSLOT_DEBUGOBJECT_COUNT), JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub, JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, NULL, NULL, /* reserved0 */ @@ -3598,7 +3604,9 @@ DebuggerEnv_trace(JSTracer *trc, JSObject *obj) } Class DebuggerEnv_class = { - "Environment", JSCLASS_HAS_PRIVATE | JSCLASS_HAS_RESERVED_SLOTS(JSSLOT_DEBUGENV_COUNT), + "Environment", + JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS | + JSCLASS_HAS_RESERVED_SLOTS(JSSLOT_DEBUGENV_COUNT), JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub, JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, NULL, NULL, /* reserved0 */ diff --git a/js/src/vm/RegExpObject-inl.h b/js/src/vm/RegExpObject-inl.h index 23bd372a64b5..fbcad568ebbc 100644 --- a/js/src/vm/RegExpObject-inl.h +++ b/js/src/vm/RegExpObject-inl.h @@ -80,6 +80,14 @@ RegExpObject::getShared(JSContext *cx) return createShared(cx); } +inline void +RegExpObject::setShared(JSContext *cx, RegExpShared *shared) +{ + if (shared) + shared->prepareForUse(cx); + JSObject::setPrivate(shared); +} + inline void RegExpObject::setLastIndex(const Value &v) { @@ -148,6 +156,12 @@ RegExpToShared(JSContext *cx, JSObject &obj) return Proxy::regexp_toShared(cx, &obj); } +inline void +RegExpShared::prepareForUse(JSContext *cx) +{ + gcNumberWhenUsed = cx->runtime->gcNumber; +} + } /* namespace js */ #endif diff --git a/js/src/vm/RegExpObject.cpp b/js/src/vm/RegExpObject.cpp index c276695b6a52..e837a3fef83a 100644 --- a/js/src/vm/RegExpObject.cpp +++ b/js/src/vm/RegExpObject.cpp @@ -62,7 +62,7 @@ RegExpObjectBuilder::RegExpObjectBuilder(JSContext *cx, RegExpObject *reobj) : cx(cx), reobj_(reobj) { if (reobj_) - reobj_->setPrivate(NULL); + reobj_->setShared(cx, NULL); } bool @@ -74,7 +74,7 @@ RegExpObjectBuilder::getOrCreate() JSObject *obj = NewBuiltinClassInstance(cx, &RegExpClass); if (!obj) return false; - obj->setPrivate(NULL); + obj->initPrivate(NULL); reobj_ = &obj->asRegExp(); return true; @@ -88,7 +88,7 @@ RegExpObjectBuilder::getOrCreateClone(RegExpObject *proto) JSObject *clone = NewObjectWithGivenProto(cx, &RegExpClass, proto, proto->getParent()); if (!clone) return false; - clone->setPrivate(NULL); + clone->initPrivate(NULL); reobj_ = &clone->asRegExp(); return true; @@ -103,7 +103,7 @@ RegExpObjectBuilder::build(JSAtom *source, RegExpShared &shared) if (!reobj_->init(cx, source, shared.getFlags())) return NULL; - reobj_->setPrivate(&shared); + reobj_->setShared(cx, &shared); return reobj_; } @@ -330,13 +330,18 @@ RegExpCode::execute(JSContext *cx, const jschar *chars, size_t length, size_t st static void regexp_trace(JSTracer *trc, JSObject *obj) { - if (trc->runtime->gcRunning) + /* + * We have to check both conditions, since: + * 1. During TraceRuntime, gcRunning is set + * 2. When a write barrier executes, IS_GC_MARKING_TRACER is true. + */ + if (trc->runtime->gcRunning && IS_GC_MARKING_TRACER(trc)) obj->setPrivate(NULL); } Class js::RegExpClass = { js_RegExp_str, - JSCLASS_HAS_PRIVATE | + JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_HAS_RESERVED_SLOTS(RegExpObject::RESERVED_SLOTS) | JSCLASS_HAS_CACHED_PROTO(JSProto_RegExp), JS_PropertyStub, /* addProperty */ @@ -360,8 +365,8 @@ Class js::RegExpClass = { regexp_trace }; -RegExpShared::RegExpShared(RegExpFlag flags) - : parenCount(0), flags(flags), activeUseCount(0) +RegExpShared::RegExpShared(JSRuntime *rt, RegExpFlag flags) + : parenCount(0), flags(flags), activeUseCount(0), gcNumberWhenUsed(rt->gcNumber) {} RegExpObject * @@ -402,7 +407,7 @@ RegExpObject::createShared(JSContext *cx) if (!shared) return NULL; - setPrivate(shared); + setShared(cx, shared); return shared; } @@ -616,11 +621,12 @@ RegExpCompartment::init(JSContext *cx) } void -RegExpCompartment::purge() +RegExpCompartment::sweep(JSRuntime *rt) { for (Map::Enum e(map_); !e.empty(); e.popFront()) { + /* See the comment on RegExpShared lifetime in RegExpObject.h. */ RegExpShared *shared = e.front().value; - if (shared->activeUseCount == 0) { + if (shared->activeUseCount == 0 && shared->gcNumberWhenUsed < rt->gcStartNumber) { Foreground::delete_(shared); e.removeFront(); } @@ -630,14 +636,14 @@ RegExpCompartment::purge() inline RegExpShared * RegExpCompartment::get(JSContext *cx, JSAtom *keyAtom, JSAtom *source, RegExpFlag flags, Type type) { - DebugOnly gcNumberBefore = cx->runtime->gcNumber; + DebugOnly gcNumberBefore = cx->runtime->gcNumber; Key key(keyAtom, flags, type); Map::AddPtr p = map_.lookupForAdd(key); if (p) return p->value; - RegExpShared *shared = cx->runtime->new_(flags); + RegExpShared *shared = cx->runtime->new_(cx->runtime, flags); if (!shared || !shared->compile(cx, source)) goto error; diff --git a/js/src/vm/RegExpObject.h b/js/src/vm/RegExpObject.h index e5a8d2ed0a3c..df52548e03fd 100644 --- a/js/src/vm/RegExpObject.h +++ b/js/src/vm/RegExpObject.h @@ -169,6 +169,7 @@ class RegExpObject : public JSObject inline RegExpShared &shared() const; inline RegExpShared *maybeShared(); inline RegExpShared *getShared(JSContext *cx); + inline void setShared(JSContext *cx, RegExpShared *shared); private: friend class RegExpObjectBuilder; @@ -190,6 +191,9 @@ class RegExpObject : public JSObject RegExpObject() MOZ_DELETE; RegExpObject &operator=(const RegExpObject &reo) MOZ_DELETE; + + /* Call setShared in preference to setPrivate. */ + void setPrivate(void *priv) MOZ_DELETE; }; class RegExpObjectBuilder @@ -293,7 +297,26 @@ class RegExpCode } /* namespace detail */ -/* The compiled representation of a regexp. */ +/* + * A RegExpShared is the compiled representation of a regexp. A RegExpShared is + * pointed to by potentially multiple RegExpObjects. Additionally, C++ code may + * have pointers to RegExpShareds on the stack. The RegExpShareds are tracked in + * a RegExpCompartment hashtable, and most are destroyed on every GC. + * + * During a GC, the trace hook for RegExpObject clears any pointers to + * RegExpShareds so that there will be no dangling pointers when they are + * deleted. However, some RegExpShareds are not deleted: + * + * 1. Any RegExpShared with pointers from the C++ stack is not deleted. + * 2. Any RegExpShared that was installed in a RegExpObject during an + * incremental GC is not deleted. This is because the RegExpObject may have + * been traced through before the new RegExpShared was installed, in which + * case deleting the RegExpShared would turn the RegExpObject's reference + * into a dangling pointer + * + * The activeUseCount and gcNumberWhenUsed fields are used to track these two + * conditions. + */ class RegExpShared { friend class RegExpCompartment; @@ -301,11 +324,12 @@ class RegExpShared detail::RegExpCode code; uintN parenCount; RegExpFlag flags; - size_t activeUseCount; + size_t activeUseCount; /* See comment above. */ + uint64_t gcNumberWhenUsed; /* See comment above. */ bool compile(JSContext *cx, JSAtom *source); - RegExpShared(RegExpFlag flags); + RegExpShared(JSRuntime *rt, RegExpFlag flags); JS_DECLARE_ALLOCATION_FRIENDS_FOR_PRIVATE_CONSTRUCTOR; public: @@ -338,6 +362,9 @@ class RegExpShared RegExpShared &operator*() { JS_ASSERT(initialized()); return *re_; } }; + /* Called when a RegExpShared is installed into a RegExpObject. */ + inline void prepareForUse(JSContext *cx); + /* Primary interface: run this regular expression on the given string. */ RegExpRunStatus @@ -388,7 +415,7 @@ class RegExpCompartment ~RegExpCompartment(); bool init(JSContext *cx); - void purge(); + void sweep(JSRuntime *rt); /* Return a regexp corresponding to the given (source, flags) pair. */ RegExpShared *get(JSContext *cx, JSAtom *source, RegExpFlag flags); diff --git a/js/src/vm/RegExpStatics.cpp b/js/src/vm/RegExpStatics.cpp index b34d07769317..f80bf30bc222 100644 --- a/js/src/vm/RegExpStatics.cpp +++ b/js/src/vm/RegExpStatics.cpp @@ -71,7 +71,7 @@ resc_trace(JSTracer *trc, JSObject *obj) Class js::RegExpStaticsClass = { "RegExpStatics", - JSCLASS_HAS_PRIVATE, + JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS, JS_PropertyStub, /* addProperty */ JS_PropertyStub, /* delProperty */ JS_PropertyStub, /* getProperty */ diff --git a/js/src/vm/Stack.cpp b/js/src/vm/Stack.cpp index 720e7bda6bba..cf0f8be1a59a 100644 --- a/js/src/vm/Stack.cpp +++ b/js/src/vm/Stack.cpp @@ -532,6 +532,15 @@ StackSpace::mark(JSTracer *trc) } } +void +StackSpace::markActiveCompartments() +{ + for (StackSegment *seg = seg_; seg; seg = seg->prevInMemory()) { + for (StackFrame *fp = seg->maybefp(); (Value *)fp > (Value *)seg; fp = fp->prev()) + MarkCompartmentActive(fp); + } +} + JS_FRIEND_API(bool) StackSpace::ensureSpaceSlow(JSContext *cx, MaybeReportError report, Value *from, ptrdiff_t nvals, JSCompartment *dest) const diff --git a/js/src/vm/Stack.h b/js/src/vm/Stack.h index c2690d87af84..274789b9c716 100644 --- a/js/src/vm/Stack.h +++ b/js/src/vm/Stack.h @@ -1555,6 +1555,9 @@ class StackSpace void mark(JSTracer *trc); void markFrameSlots(JSTracer *trc, StackFrame *fp, Value *slotsEnd, jsbytecode *pc); + /* Called during GC: sets active flag on compartments with active frames. */ + void markActiveCompartments(); + /* We only report the committed size; uncommitted size is uninteresting. */ JS_FRIEND_API(size_t) sizeOfCommitted(); }; diff --git a/js/xpconnect/idl/nsIXPConnect.idl b/js/xpconnect/idl/nsIXPConnect.idl index 4de0430fc80f..686f5517a3d9 100644 --- a/js/xpconnect/idl/nsIXPConnect.idl +++ b/js/xpconnect/idl/nsIXPConnect.idl @@ -398,7 +398,7 @@ enum nsGCType { }; %} -[uuid(686bb1d0-4711-11e1-b86c-0800200c9a66)] +[uuid(e92bf5e0-494c-11e1-b86c-0800200c9a66)] interface nsIXPConnect : nsISupports { %{ C++ @@ -734,6 +734,12 @@ interface nsIXPConnect : nsISupports */ void GarbageCollect(in PRUint32 reason, in PRUint32 kind); + /** + * Signals a good place to do an incremental GC slice, because the + * browser is drawing a frame. + */ + void NotifyDidPaint(); + /** * Define quick stubs on the given object, @a proto. * diff --git a/js/xpconnect/src/XPCInlines.h b/js/xpconnect/src/XPCInlines.h index 27a4d53d1c74..4d4af6e01d43 100644 --- a/js/xpconnect/src/XPCInlines.h +++ b/js/xpconnect/src/XPCInlines.h @@ -604,7 +604,8 @@ void XPCWrappedNativeTearOff::SetJSObject(JSObject* JSObj) inline XPCWrappedNativeTearOff::~XPCWrappedNativeTearOff() { - NS_ASSERTION(!(GetInterface()||GetNative()||GetJSObjectPreserveColor()), "tearoff not empty in dtor"); + NS_ASSERTION(!(GetInterface()||GetNative()||GetJSObjectPreserveColor()), + "tearoff not empty in dtor"); } /***************************************************************************/ diff --git a/js/xpconnect/src/XPCJSRuntime.cpp b/js/xpconnect/src/XPCJSRuntime.cpp index 3bc55abf66d8..9d649c4b8c10 100644 --- a/js/xpconnect/src/XPCJSRuntime.cpp +++ b/js/xpconnect/src/XPCJSRuntime.cpp @@ -911,6 +911,8 @@ JSBool XPCJSRuntime::GCCallback(JSContext *cx, JSGCStatus status) #ifdef XPC_TRACK_DEFERRED_RELEASES printf("XPC - End deferred Releases\n"); #endif + + self->GetXPConnect()->ClearGCBeforeCC(); break; } default: @@ -1890,6 +1892,18 @@ AccumulateTelemetryCallback(int id, uint32_t sample) case JS_TELEMETRY_GC_SWEEP_MS: Telemetry::Accumulate(Telemetry::GC_SWEEP_MS, sample); break; + case JS_TELEMETRY_GC_SLICE_MS: + Telemetry::Accumulate(Telemetry::GC_SLICE_MS, sample); + break; + case JS_TELEMETRY_GC_MMU_50: + Telemetry::Accumulate(Telemetry::GC_MMU_50, sample); + break; + case JS_TELEMETRY_GC_RESET: + Telemetry::Accumulate(Telemetry::GC_RESET, sample); + break; + case JS_TELEMETRY_GC_INCREMENTAL_DISABLED: + Telemetry::Accumulate(Telemetry::GC_INCREMENTAL_DISABLED, sample); + break; } } diff --git a/js/xpconnect/src/nsXPConnect.cpp b/js/xpconnect/src/nsXPConnect.cpp index d506d95af8bf..e1cf2c327c77 100644 --- a/js/xpconnect/src/nsXPConnect.cpp +++ b/js/xpconnect/src/nsXPConnect.cpp @@ -406,8 +406,6 @@ nsXPConnect::Collect(PRUint32 reason, PRUint32 kind) // To improve debugging, if DEBUG_CC is defined all JS objects are // traversed. - mNeedGCBeforeCC = false; - XPCCallContext ccx(NATIVE_CALLER); if (!ccx.IsValid()) return; @@ -424,6 +422,8 @@ nsXPConnect::Collect(PRUint32 reason, PRUint32 kind) js::gcreason::Reason gcreason = (js::gcreason::Reason)reason; if (kind == nsGCShrinking) { js::ShrinkingGC(cx, gcreason); + } else if (kind == nsGCIncremental) { + js::IncrementalGC(cx, gcreason); } else { MOZ_ASSERT(kind == nsGCNormal); js::GCForReason(cx, gcreason); @@ -2825,6 +2825,23 @@ nsXPConnect::GetTelemetryValue(JSContext *cx, jsval *rval) return NS_OK; } +NS_IMETHODIMP +nsXPConnect::NotifyDidPaint() +{ + JSRuntime *rt = mRuntime->GetJSRuntime(); + if (!js::WantGCSlice(rt)) + return NS_OK; + + XPCCallContext ccx(NATIVE_CALLER); + if (!ccx.IsValid()) + return UnexpectedFailure(NS_ERROR_FAILURE); + + JSContext *cx = ccx.GetJSContext(); + + js::NotifyDidPaint(cx); + return NS_OK; +} + /* These are here to be callable from a debugger */ JS_BEGIN_EXTERN_C JS_EXPORT_API(void) DumpJSStack() diff --git a/js/xpconnect/src/xpcprivate.h b/js/xpconnect/src/xpcprivate.h index 3f97e1d1a021..8f1c9d5753b7 100644 --- a/js/xpconnect/src/xpcprivate.h +++ b/js/xpconnect/src/xpcprivate.h @@ -318,7 +318,8 @@ typedef nsDataHashtable XPCCompart return (result || !src) ? NS_OK : NS_ERROR_OUT_OF_MEMORY -#define WRAPPER_SLOTS (JSCLASS_HAS_PRIVATE | JSCLASS_HAS_RESERVED_SLOTS(1)) +#define WRAPPER_SLOTS (JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS | \ + JSCLASS_HAS_RESERVED_SLOTS(1)) #define INVALID_OBJECT ((JSObject *)1) @@ -520,6 +521,7 @@ public: JSBool IsShuttingDown() const {return mShuttingDown;} void EnsureGCBeforeCC() { mNeedGCBeforeCC = true; } + void ClearGCBeforeCC() { mNeedGCBeforeCC = false; } nsresult GetInfoForIID(const nsIID * aIID, nsIInterfaceInfo** info); nsresult GetInfoForName(const char * name, nsIInterfaceInfo** info); diff --git a/js/xpconnect/src/xpcpublic.h b/js/xpconnect/src/xpcpublic.h index 6aabd0d885c6..709a6198ecf0 100644 --- a/js/xpconnect/src/xpcpublic.h +++ b/js/xpconnect/src/xpcpublic.h @@ -75,7 +75,8 @@ xpc_CreateMTGlobalObject(JSContext *cx, JSClass *clasp, #define XPCONNECT_GLOBAL_FLAGS \ JSCLASS_XPCONNECT_GLOBAL | JSCLASS_HAS_PRIVATE | \ - JSCLASS_PRIVATE_IS_NSISUPPORTS | JSCLASS_GLOBAL_FLAGS_WITH_SLOTS(1) + JSCLASS_PRIVATE_IS_NSISUPPORTS | JSCLASS_IMPLEMENTS_BARRIERS | \ + JSCLASS_GLOBAL_FLAGS_WITH_SLOTS(1) void TraceXPCGlobal(JSTracer *trc, JSObject *obj); @@ -182,8 +183,12 @@ xpc_UnmarkGrayObjectRecursive(JSObject* obj); inline void xpc_UnmarkGrayObject(JSObject *obj) { - if (obj && xpc_IsGrayGCThing(obj)) - xpc_UnmarkGrayObjectRecursive(obj); + if (obj) { + if (xpc_IsGrayGCThing(obj)) + xpc_UnmarkGrayObjectRecursive(obj); + else if (js::IsIncrementalBarrierNeededOnObject(obj)) + js::IncrementalReferenceBarrier(obj); + } } // If aVariant is an XPCVariant, this marks the object to be in aGeneration. diff --git a/layout/base/nsPresShell.cpp b/layout/base/nsPresShell.cpp index ffe4bc85d19b..6a3e7a8e2532 100644 --- a/layout/base/nsPresShell.cpp +++ b/layout/base/nsPresShell.cpp @@ -5428,6 +5428,24 @@ PresShell::ProcessSynthMouseMoveEvent(bool aFromScroll) } } +class nsAutoNotifyDidPaint +{ +public: + nsAutoNotifyDidPaint(bool aWillSendDidPaint) + : mWillSendDidPaint(aWillSendDidPaint) + { + } + ~nsAutoNotifyDidPaint() + { + if (!mWillSendDidPaint && nsContentUtils::XPConnect()) { + nsContentUtils::XPConnect()->NotifyDidPaint(); + } + } + +private: + bool mWillSendDidPaint; +}; + void PresShell::Paint(nsIView* aViewToPaint, nsIWidget* aWidgetToPaint, @@ -5451,6 +5469,8 @@ PresShell::Paint(nsIView* aViewToPaint, NS_ASSERTION(aViewToPaint, "null view"); NS_ASSERTION(aWidgetToPaint, "Can't paint without a widget"); + nsAutoNotifyDidPaint notifyDidPaint(aWillSendDidPaint); + nsPresContext* presContext = GetPresContext(); AUTO_LAYOUT_PHASE_ENTRY_POINT(presContext, Paint); @@ -7221,6 +7241,10 @@ PresShell::DidPaint() if (rootPresContext == mPresContext) { rootPresContext->UpdatePluginGeometry(); } + + if (nsContentUtils::XPConnect()) { + nsContentUtils::XPConnect()->NotifyDidPaint(); + } } bool diff --git a/modules/libpref/src/init/all.js b/modules/libpref/src/init/all.js index e13be5299f99..7e11e1480f1e 100644 --- a/modules/libpref/src/init/all.js +++ b/modules/libpref/src/init/all.js @@ -656,6 +656,8 @@ pref("javascript.options.typeinference", true); pref("javascript.options.mem.high_water_mark", 128); pref("javascript.options.mem.max", -1); pref("javascript.options.mem.gc_per_compartment", true); +pref("javascript.options.mem.gc_incremental", true); +pref("javascript.options.mem.gc_incremental_slice_ms", 10); pref("javascript.options.mem.log", false); pref("javascript.options.gc_on_memory_pressure", true); diff --git a/toolkit/components/telemetry/TelemetryHistograms.h b/toolkit/components/telemetry/TelemetryHistograms.h index 360ac7d4a96d..ba98645a5375 100644 --- a/toolkit/components/telemetry/TelemetryHistograms.h +++ b/toolkit/components/telemetry/TelemetryHistograms.h @@ -81,6 +81,10 @@ HISTOGRAM_BOOLEAN(GC_IS_COMPARTMENTAL, "Is it a compartmental GC?") HISTOGRAM(GC_MS, 1, 10000, 50, EXPONENTIAL, "Time spent running JS GC (ms)") HISTOGRAM(GC_MARK_MS, 1, 10000, 50, EXPONENTIAL, "Time spent running JS GC mark phase (ms)") HISTOGRAM(GC_SWEEP_MS, 1, 10000, 50, EXPONENTIAL, "Time spent running JS GC sweep phase (ms)") +HISTOGRAM(GC_SLICE_MS, 1, 10000, 50, EXPONENTIAL, "Time spent running a JS GC slice (ms)") +HISTOGRAM(GC_MMU_50, 1, 100, 20, LINEAR, "Minimum percentage of time spent outside GC over any 50ms window") +HISTOGRAM_BOOLEAN(GC_RESET, "Was an incremental GC canceled?") +HISTOGRAM_BOOLEAN(GC_INCREMENTAL_DISABLED, "Is incremental GC permanently disabled?") HISTOGRAM(TELEMETRY_PING, 1, 3000, 10, EXPONENTIAL, "Time taken to submit telemetry info (ms)") HISTOGRAM_BOOLEAN(TELEMETRY_SUCCESS, "Successful telemetry submission") diff --git a/toolkit/content/aboutSupport.js b/toolkit/content/aboutSupport.js index 33a9bb8671d6..1e648dad65da 100644 --- a/toolkit/content/aboutSupport.js +++ b/toolkit/content/aboutSupport.js @@ -116,6 +116,7 @@ window.onload = function () { populatePreferencesSection(); populateExtensionsSection(); populateGraphicsSection(); + populateJavaScriptSection(); } function populateExtensionsSection() { @@ -382,6 +383,13 @@ function populateGraphicsSection() { ]); } +function populateJavaScriptSection() { + let enabled = window.QueryInterface(Ci.nsIInterfaceRequestor) + .getInterface(Ci.nsIDOMWindowUtils) + .isIncrementalGCEnabled(); + document.getElementById("javascript-incremental-gc").textContent = enabled ? "1" : "0"; +} + function getPrefValue(aName) { let value = ""; let type = Services.prefs.getPrefType(aName); diff --git a/toolkit/content/aboutSupport.xhtml b/toolkit/content/aboutSupport.xhtml index fd02b55cae6b..e92194bdcc63 100644 --- a/toolkit/content/aboutSupport.xhtml +++ b/toolkit/content/aboutSupport.xhtml @@ -243,6 +243,24 @@ + +

+ &aboutSupport.jsTitle; +

+ + + + + + + + + +
+ &aboutSupport.jsIncrementalGC; + +
+ diff --git a/toolkit/locales/en-US/chrome/global/aboutSupport.dtd b/toolkit/locales/en-US/chrome/global/aboutSupport.dtd index da50829c6e2a..bfe1095f8132 100644 --- a/toolkit/locales/en-US/chrome/global/aboutSupport.dtd +++ b/toolkit/locales/en-US/chrome/global/aboutSupport.dtd @@ -44,6 +44,9 @@ variant of aboutSupport.showDir.label. --> + + +