Bug 641025 - Incremental GC (r=igor,smaug,roc,cdleary,gregor)

This commit is contained in:
Bill McCloskey 2012-02-17 14:35:20 -08:00
Родитель ddb9b610b8
Коммит d15702e36f
71 изменённых файлов: 2806 добавлений и 966 удалений

Просмотреть файл

@ -109,7 +109,6 @@ _TEST_FILES = \
test_bug495300.html \
test_bug686942.html \
test_can_play_type.html \
test_closing_connections.html \
test_constants.html \
test_controls.html \
test_currentTime.html \
@ -176,6 +175,8 @@ endif
# test_mixed_principals.html
# Disabled since we don't play Wave files standalone, for now
# test_audioDocumentTitle.html
# Bug 634564:
# test_closing_connections.html \
# sample files
_TEST_FILES += \

Просмотреть файл

@ -2091,6 +2091,13 @@ nsDOMWindowUtils::GetFileReferences(const nsAString& aDatabaseName,
return NS_OK;
}
NS_IMETHODIMP
nsDOMWindowUtils::IsIncrementalGCEnabled(JSContext* cx, bool* aResult)
{
*aResult = js::IsIncrementalGCEnabled(JS_GetRuntime(cx));
return NS_OK;
}
NS_IMETHODIMP
nsDOMWindowUtils::StartPCCountProfiling(JSContext* cx)
{

Просмотреть файл

@ -135,6 +135,9 @@ static PRLogModuleInfo* gJSDiagnostics;
// doing the first GC.
#define NS_FIRST_GC_DELAY 10000 // ms
// Maximum amount of time that should elapse between incremental GC slices
#define NS_INTERSLICE_GC_DELAY 100 // ms
// The amount of time we wait between a request to CC (after GC ran)
// and doing the actual CC.
#define NS_CC_DELAY 5000 // ms
@ -154,6 +157,9 @@ static nsITimer *sCCTimer;
static PRTime sLastCCEndTime;
static bool sGCHasRun;
static bool sCCLockedOut;
static js::GCSliceCallback sPrevGCSliceCallback;
// The number of currently pending document loads. This count isn't
// guaranteed to always reflect reality and can't easily as we don't
@ -3274,6 +3280,11 @@ nsJSContext::CycleCollectNow(nsICycleCollectorListener *aListener,
return;
}
if (sCCLockedOut) {
// We're in the middle of an incremental GC; finish it first
nsJSContext::GarbageCollectNow(js::gcreason::CC_FORCED, nsGCNormal);
}
SAMPLE_LABEL("GC", "CycleCollectNow");
NS_TIME_FUNCTION_MIN(1.0);
@ -3357,7 +3368,7 @@ GCTimerFired(nsITimer *aTimer, void *aClosure)
NS_RELEASE(sGCTimer);
uintptr_t reason = reinterpret_cast<uintptr_t>(aClosure);
nsJSContext::GarbageCollectNow(static_cast<js::gcreason::Reason>(reason), nsGCNormal);
nsJSContext::GarbageCollectNow(static_cast<js::gcreason::Reason>(reason), nsGCIncremental);
}
void
@ -3375,6 +3386,9 @@ CCTimerFired(nsITimer *aTimer, void *aClosure)
if (sDidShutdown) {
return;
}
if (sCCLockedOut) {
return;
}
++sCCTimerFireCount;
if (sCCTimerFireCount < (NS_CC_DELAY / NS_CC_SKIPPABLE_DELAY)) {
PRUint32 suspected = nsCycleCollector_suspectedCount();
@ -3443,7 +3457,7 @@ nsJSContext::LoadEnd()
// static
void
nsJSContext::PokeGC(js::gcreason::Reason aReason)
nsJSContext::PokeGC(js::gcreason::Reason aReason, int aDelay)
{
if (sGCTimer) {
// There's already a timer for GC'ing, just return
@ -3460,9 +3474,11 @@ nsJSContext::PokeGC(js::gcreason::Reason aReason)
static bool first = true;
sGCTimer->InitWithFuncCallback(GCTimerFired, reinterpret_cast<void *>(aReason),
first
? NS_FIRST_GC_DELAY
: NS_GC_DELAY,
aDelay
? aDelay
: (first
? NS_FIRST_GC_DELAY
: NS_GC_DELAY),
nsITimer::TYPE_ONE_SHOT);
first = false;
@ -3549,11 +3565,11 @@ nsJSContext::GC(js::gcreason::Reason aReason)
}
static void
DOMGCFinishedCallback(JSRuntime *rt, JSCompartment *comp, const char *status)
DOMGCSliceCallback(JSRuntime *aRt, js::GCProgress aProgress, const js::GCDescription &aDesc)
{
NS_ASSERTION(NS_IsMainThread(), "GCs must run on the main thread");
if (sPostGCEventsToConsole) {
if (aDesc.logMessage && sPostGCEventsToConsole) {
PRTime now = PR_Now();
PRTime delta = 0;
if (sFirstCollectionTime) {
@ -3565,45 +3581,66 @@ DOMGCFinishedCallback(JSRuntime *rt, JSCompartment *comp, const char *status)
NS_NAMED_LITERAL_STRING(kFmt, "GC(T+%.1f) %s");
nsString msg;
msg.Adopt(nsTextFormatter::smprintf(kFmt.get(),
double(delta) / PR_USEC_PER_SEC, status));
double(delta) / PR_USEC_PER_SEC,
aDesc.logMessage));
nsCOMPtr<nsIConsoleService> cs = do_GetService(NS_CONSOLESERVICE_CONTRACTID);
if (cs) {
cs->LogStringMessage(msg.get());
}
}
sCCollectedWaitingForGC = 0;
sCleanupSinceLastGC = false;
// Prevent cycle collections during incremental GC.
if (aProgress == js::GC_CYCLE_BEGIN) {
sCCLockedOut = true;
} else if (aProgress == js::GC_CYCLE_END) {
sCCLockedOut = false;
}
if (sGCTimer) {
// If we were waiting for a GC to happen, kill the timer.
// The GC has more work to do, so schedule another GC slice.
if (aProgress == js::GC_SLICE_END) {
nsJSContext::KillGCTimer();
nsJSContext::KillCCTimer();
// If this is a compartment GC, restart it. We still want
// a full GC to happen. Compartment GCs usually happen as a
// result of last-ditch or MaybeGC. In both cases its
// probably a time of heavy activity and we want to delay
// the full GC, but we do want it to happen eventually.
if (comp) {
nsJSContext::PokeGC(js::gcreason::POST_COMPARTMENT);
nsJSContext::PokeGC(js::gcreason::INTER_SLICE_GC, NS_INTERSLICE_GC_DELAY);
}
// We poked the GC, so we can kill any pending CC here.
nsJSContext::KillCCTimer();
if (aProgress == js::GC_CYCLE_END) {
sCCollectedWaitingForGC = 0;
sCleanupSinceLastGC = false;
if (sGCTimer) {
// If we were waiting for a GC to happen, kill the timer.
nsJSContext::KillGCTimer();
// If this is a compartment GC, restart it. We still want
// a full GC to happen. Compartment GCs usually happen as a
// result of last-ditch or MaybeGC. In both cases its
// probably a time of heavy activity and we want to delay
// the full GC, but we do want it to happen eventually.
if (aDesc.isCompartment) {
nsJSContext::PokeGC(js::gcreason::POST_COMPARTMENT);
// We poked the GC, so we can kill any pending CC here.
nsJSContext::KillCCTimer();
}
} else {
// If this was a full GC, poke the CC to run soon.
if (!aDesc.isCompartment) {
sGCHasRun = true;
nsJSContext::MaybePokeCC();
}
}
} else {
// If this was a full GC, poke the CC to run soon.
if (!comp) {
sGCHasRun = true;
nsJSContext::MaybePokeCC();
// If we didn't end up scheduling a GC, make sure that we release GC buffers
// soon after canceling previous shrinking attempt.
nsJSContext::KillShrinkGCBuffersTimer();
if (!sGCTimer) {
nsJSContext::PokeShrinkGCBuffers();
}
}
// If we didn't end up scheduling a GC, make sure that we release GC buffers
// soon after canceling previous shrinking attempt
nsJSContext::KillShrinkGCBuffersTimer();
if (!sGCTimer) {
nsJSContext::PokeShrinkGCBuffers();
}
if (sPrevGCSliceCallback)
(*sPrevGCSliceCallback)(aRt, aProgress, aDesc);
}
// Script object mananagement - note duplicate implementation
@ -3697,6 +3734,7 @@ nsJSRuntime::Startup()
// initialize all our statics, so that we can restart XPCOM
sGCTimer = sCCTimer = nsnull;
sGCHasRun = false;
sCCLockedOut = false;
sLastCCEndTime = 0;
sPendingLoadCount = 0;
sLoadingInProgress = false;
@ -3768,10 +3806,27 @@ SetMemoryMaxPrefChangedCallback(const char* aPrefName, void* aClosure)
static int
SetMemoryGCModePrefChangedCallback(const char* aPrefName, void* aClosure)
{
bool enableCompartmentGC = Preferences::GetBool(aPrefName);
JS_SetGCParameter(nsJSRuntime::sRuntime, JSGC_MODE, enableCompartmentGC
? JSGC_MODE_COMPARTMENT
: JSGC_MODE_GLOBAL);
PRBool enableCompartmentGC = Preferences::GetBool("javascript.options.mem.gc_per_compartment");
PRBool enableIncrementalGC = Preferences::GetBool("javascript.options.mem.gc_incremental");
JSGCMode mode;
if (enableIncrementalGC) {
mode = JSGC_MODE_INCREMENTAL;
} else if (enableCompartmentGC) {
mode = JSGC_MODE_COMPARTMENT;
} else {
mode = JSGC_MODE_GLOBAL;
}
JS_SetGCParameter(nsJSRuntime::sRuntime, JSGC_MODE, mode);
return 0;
}
static int
SetMemoryGCSliceTimePrefChangedCallback(const char* aPrefName, void* aClosure)
{
PRInt32 pref = Preferences::GetInt(aPrefName, -1);
// handle overflow and negative pref values
if (pref > 0 && pref < 100000)
JS_SetGCParameter(nsJSRuntime::sRuntime, JSGC_SLICE_TIME_BUDGET, pref);
return 0;
}
@ -3858,7 +3913,7 @@ nsJSRuntime::Init()
// Let's make sure that our main thread is the same as the xpcom main thread.
NS_ASSERTION(NS_IsMainThread(), "bad");
::JS_SetGCFinishedCallback(sRuntime, DOMGCFinishedCallback);
sPrevGCSliceCallback = js::SetGCSliceCallback(sRuntime, DOMGCSliceCallback);
JSSecurityCallbacks *callbacks = JS_GetRuntimeSecurityCallbacks(sRuntime);
NS_ASSERTION(callbacks, "SecMan should have set security callbacks!");
@ -3903,6 +3958,16 @@ nsJSRuntime::Init()
SetMemoryGCModePrefChangedCallback("javascript.options.mem.gc_per_compartment",
nsnull);
Preferences::RegisterCallback(SetMemoryGCModePrefChangedCallback,
"javascript.options.mem.gc_incremental");
SetMemoryGCModePrefChangedCallback("javascript.options.mem.gc_incremental",
nsnull);
Preferences::RegisterCallback(SetMemoryGCSliceTimePrefChangedCallback,
"javascript.options.mem.gc_incremental_slice_ms");
SetMemoryGCSliceTimePrefChangedCallback("javascript.options.mem.gc_incremental_slice_ms",
nsnull);
nsCOMPtr<nsIObserverService> obs = mozilla::services::GetObserverService();
if (!obs)
return NS_ERROR_FAILURE;

Просмотреть файл

@ -188,7 +188,7 @@ public:
static void CycleCollectNow(nsICycleCollectorListener *aListener = nsnull,
PRInt32 aExtraForgetSkippableCalls = 0);
static void PokeGC(js::gcreason::Reason aReason);
static void PokeGC(js::gcreason::Reason aReason, int aDelay = 0);
static void KillGCTimer();
static void PokeShrinkGCBuffers();

Просмотреть файл

@ -70,7 +70,7 @@ interface nsIDOMFile;
interface nsIFile;
interface nsIDOMTouch;
[scriptable, uuid(ab6e9c71-8aa1-40bb-8bf9-65e16429055f)]
[scriptable, uuid(73b48170-55d5-11e1-b86c-0800200c9a66)]
interface nsIDOMWindowUtils : nsISupports {
/**
@ -992,6 +992,12 @@ interface nsIDOMWindowUtils : nsISupports {
[optional] out long aDBRefCnt,
[optional] out long aSliceRefCnt);
/**
* Return whether incremental GC has been disabled due to a binary add-on.
*/
[implicit_jscontext]
boolean isIncrementalGCEnabled();
/**
* Begin opcode-level profiling of all JavaScript execution in the window's
* runtime.

Просмотреть файл

@ -179,7 +179,7 @@ CreateNPObjectMember(NPP npp, JSContext *cx, JSObject *obj, NPObject *npobj,
static JSClass sNPObjectJSWrapperClass =
{
NPRUNTIME_JSCLASS_NAME,
JSCLASS_HAS_PRIVATE | JSCLASS_NEW_RESOLVE | JSCLASS_NEW_ENUMERATE,
JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_NEW_RESOLVE | JSCLASS_NEW_ENUMERATE,
NPObjWrapper_AddProperty, NPObjWrapper_DelProperty,
NPObjWrapper_GetProperty, NPObjWrapper_SetProperty,
(JSEnumerateOp)NPObjWrapper_newEnumerate,

Просмотреть файл

@ -233,6 +233,8 @@ nsJSEventListener::HandleEvent(nsIDOMEvent* aEvent)
"JSEventListener has wrong script context?");
#endif
nsCOMPtr<nsIVariant> vrv;
xpc_UnmarkGrayObject(mScopeObject);
xpc_UnmarkGrayObject(mHandler);
rv = mContext->CallEventHandler(mTarget, mScopeObject, mHandler, iargv,
getter_AddRefs(vrv));

Просмотреть файл

@ -107,6 +107,9 @@ struct Listener : PRCList
static void
Remove(JSContext* aCx, Listener* aListener)
{
if (js::IsIncrementalBarrierNeeded(aCx))
js::IncrementalValueBarrier(aListener->mListenerVal);
PR_REMOVE_LINK(aListener);
JS_free(aCx, aListener);
}

Просмотреть файл

@ -300,7 +300,7 @@ private:
JSClass Worker::sClass = {
"Worker",
JSCLASS_HAS_PRIVATE,
JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS,
JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub,
JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, Finalize, NULL, NULL, NULL,
NULL, NULL, NULL, Trace, NULL
@ -415,7 +415,7 @@ private:
JSClass ChromeWorker::sClass = {
"ChromeWorker",
JSCLASS_HAS_PRIVATE,
JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS,
JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub,
JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, Finalize, NULL, NULL, NULL,
NULL, NULL, NULL, Trace, NULL

Просмотреть файл

@ -799,7 +799,7 @@ private:
JSClass DedicatedWorkerGlobalScope::sClass = {
"DedicatedWorkerGlobalScope",
JSCLASS_GLOBAL_FLAGS | JSCLASS_HAS_PRIVATE | JSCLASS_NEW_RESOLVE,
JSCLASS_GLOBAL_FLAGS | JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_NEW_RESOLVE,
JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub,
JS_EnumerateStub, reinterpret_cast<JSResolveOp>(Resolve), JS_ConvertStub,
Finalize, NULL, NULL, NULL, NULL, NULL, NULL, Trace, NULL

Просмотреть файл

@ -220,7 +220,7 @@ private:
JSClass XMLHttpRequestUpload::sClass = {
"XMLHttpRequestUpload",
JSCLASS_HAS_PRIVATE | JSCLASS_HAS_RESERVED_SLOTS(SLOT_COUNT),
JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_HAS_RESERVED_SLOTS(SLOT_COUNT),
JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub,
JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, Finalize,
NULL, NULL, NULL, NULL, NULL, NULL, Trace, NULL
@ -769,7 +769,7 @@ private:
JSClass XMLHttpRequest::sClass = {
"XMLHttpRequest",
JSCLASS_HAS_PRIVATE | JSCLASS_HAS_RESERVED_SLOTS(SLOT_COUNT),
JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_HAS_RESERVED_SLOTS(SLOT_COUNT),
JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub,
JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, Finalize,
NULL, NULL, NULL, NULL, NULL, NULL, Trace, NULL

Просмотреть файл

@ -107,8 +107,8 @@
#define JSD_AUTOREG_ENTRY "JSDebugger Startup Observer"
#define JSD_STARTUP_ENTRY "JSDebugger Startup Observer"
static JSBool
jsds_GCCallbackProc (JSContext *cx, JSGCStatus status);
static void
jsds_GCSliceCallbackProc (JSRuntime *rt, js::GCProgress progress, const js::GCDescription &desc);
/*******************************************************************************
* global vars
@ -128,9 +128,9 @@ PRUint32 gContextCount = 0;
PRUint32 gFrameCount = 0;
#endif
static jsdService *gJsds = 0;
static JSGCCallback gLastGCProc = jsds_GCCallbackProc;
static JSGCStatus gGCStatus = JSGC_END;
static jsdService *gJsds = 0;
static js::GCSliceCallback gPrevGCSliceCallback = jsds_GCSliceCallbackProc;
static bool gGCRunning = false;
static struct DeadScript {
PRCList links;
@ -460,11 +460,8 @@ jsds_FilterHook (JSDContext *jsdc, JSDThreadState *state)
*******************************************************************************/
static void
jsds_NotifyPendingDeadScripts (JSContext *cx)
jsds_NotifyPendingDeadScripts (JSRuntime *rt)
{
#ifdef CAUTIOUS_SCRIPTHOOK
JSRuntime *rt = JS_GetRuntime(cx);
#endif
jsdService *jsds = gJsds;
nsCOMPtr<jsdIScriptHook> hook;
@ -511,31 +508,23 @@ jsds_NotifyPendingDeadScripts (JSContext *cx)
}
}
static JSBool
jsds_GCCallbackProc (JSContext *cx, JSGCStatus status)
static void
jsds_GCSliceCallbackProc (JSRuntime *rt, js::GCProgress progress, const js::GCDescription &desc)
{
#ifdef DEBUG_verbose
printf ("new gc status is %i\n", status);
#endif
if (status == JSGC_END) {
/* just to guard against reentering. */
gGCStatus = JSGC_BEGIN;
if (progress == js::GC_CYCLE_END || progress == js::GC_SLICE_END) {
NS_ASSERTION(gGCRunning, "GC slice callback was missed");
while (gDeadScripts)
jsds_NotifyPendingDeadScripts (cx);
jsds_NotifyPendingDeadScripts (rt);
gGCRunning = false;
} else {
NS_ASSERTION(!gGCRunning, "should not re-enter GC");
gGCRunning = true;
}
gGCStatus = status;
if (gLastGCProc && !gLastGCProc (cx, status)) {
/*
* If gLastGCProc returns false, then the GC will abort without making
* another callback with status=JSGC_END, so set the status to JSGC_END
* here.
*/
gGCStatus = JSGC_END;
return JS_FALSE;
}
return JS_TRUE;
if (gPrevGCSliceCallback)
(*gPrevGCSliceCallback)(rt, progress, desc);
}
static uintN
@ -751,7 +740,7 @@ jsds_ScriptHookProc (JSDContext* jsdc, JSDScript* jsdscript, JSBool creating,
jsdis->Invalidate();
if (gGCStatus == JSGC_END) {
if (!gGCRunning) {
nsCOMPtr<jsdIScriptHook> hook;
gJsds->GetScriptHook(getter_AddRefs(hook));
if (!hook)
@ -2580,9 +2569,9 @@ jsdService::ActivateDebugger (JSRuntime *rt)
mRuntime = rt;
if (gLastGCProc == jsds_GCCallbackProc)
if (gPrevGCSliceCallback == jsds_GCSliceCallbackProc)
/* condition indicates that the callback proc has not been set yet */
gLastGCProc = JS_SetGCCallbackRT (rt, jsds_GCCallbackProc);
gPrevGCSliceCallback = js::SetGCSliceCallback (rt, jsds_GCSliceCallbackProc);
mCx = JSD_DebuggerOnForUser (rt, NULL, NULL);
if (!mCx)
@ -2652,19 +2641,14 @@ jsdService::Off (void)
return NS_ERROR_NOT_INITIALIZED;
if (gDeadScripts) {
if (gGCStatus != JSGC_END)
if (gGCRunning)
return NS_ERROR_NOT_AVAILABLE;
JSContext *cx = JSD_GetDefaultJSContext(mCx);
while (gDeadScripts)
jsds_NotifyPendingDeadScripts (cx);
jsds_NotifyPendingDeadScripts (JS_GetRuntime(cx));
}
/*
if (gLastGCProc != jsds_GCCallbackProc)
JS_SetGCCallbackRT (mRuntime, gLastGCProc);
*/
DeactivateDebugger();
#ifdef DEBUG
@ -3374,7 +3358,7 @@ jsdService::~jsdService()
mThrowHook = nsnull;
mTopLevelHook = nsnull;
mFunctionHook = nsnull;
gGCStatus = JSGC_END;
gGCRunning = false;
Off();
gJsds = nsnull;
}

Просмотреть файл

@ -119,7 +119,6 @@ CPPSRCS = \
jsfun.cpp \
jsgc.cpp \
jsgcmark.cpp \
jsgcstats.cpp \
jscrashreport.cpp \
jshash.cpp \
jsinfer.cpp \
@ -193,7 +192,6 @@ INSTALLED_HEADERS = \
jsfriendapi.h \
jsgc.h \
jscell.h \
jsgcstats.h \
jshash.h \
jslock.h \
json.h \

Просмотреть файл

@ -146,7 +146,7 @@ HashableValue::equals(const HashableValue &other) const
Class MapObject::class_ = {
"Map",
JSCLASS_HAS_PRIVATE |
JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS |
JSCLASS_HAS_CACHED_PROTO(JSProto_Map),
JS_PropertyStub, /* addProperty */
JS_PropertyStub, /* delProperty */
@ -297,7 +297,7 @@ js_InitMapClass(JSContext *cx, JSObject *obj)
Class SetObject::class_ = {
"Set",
JSCLASS_HAS_PRIVATE |
JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS |
JSCLASS_HAS_CACHED_PROTO(JSProto_Set),
JS_PropertyStub, /* addProperty */
JS_PropertyStub, /* delProperty */

Просмотреть файл

@ -255,7 +255,7 @@ static JSClass sCDataProtoClass = {
static JSClass sCTypeClass = {
"CType",
JSCLASS_HAS_RESERVED_SLOTS(CTYPE_SLOTS),
JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_HAS_RESERVED_SLOTS(CTYPE_SLOTS),
JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub,
JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, CType::Finalize,
NULL, NULL, CType::ConstructData, CType::ConstructData, NULL,
@ -272,7 +272,7 @@ static JSClass sCDataClass = {
static JSClass sCClosureClass = {
"CClosure",
JSCLASS_HAS_RESERVED_SLOTS(CCLOSURE_SLOTS),
JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_HAS_RESERVED_SLOTS(CCLOSURE_SLOTS),
JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub,
JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, CClosure::Finalize,
NULL, NULL, NULL, NULL, NULL, NULL, CClosure::Trace, NULL

Просмотреть файл

@ -266,6 +266,31 @@ HeapId::operator=(const HeapId &v)
return *this;
}
inline const Value &
ReadBarrieredValue::get() const
{
if (value.isObject())
JSObject::readBarrier(&value.toObject());
else if (value.isString())
JSString::readBarrier(value.toString());
else
JS_ASSERT(!value.isMarkable());
return value;
}
inline
ReadBarrieredValue::operator const Value &() const
{
return get();
}
inline JSObject &
ReadBarrieredValue::toObject() const
{
return get().toObject();
}
} /* namespace js */
#endif /* jsgc_barrier_inl_h___ */

Просмотреть файл

@ -456,6 +456,20 @@ class ReadBarriered
operator MarkablePtr<U>() const { return MarkablePtr<U>(value); }
};
class ReadBarrieredValue
{
Value value;
public:
ReadBarrieredValue() : value(UndefinedValue()) {}
ReadBarrieredValue(const Value &value) : value(value) {}
inline const Value &get() const;
inline operator const Value &() const;
inline JSObject &toObject() const;
};
}
#endif /* jsgc_barrier_h___ */

Просмотреть файл

@ -38,9 +38,10 @@
* ***** END LICENSE BLOCK ***** */
#include <stdio.h>
#include <ctype.h>
#include <stdarg.h>
#include "jscntxt.h"
#include "jscompartment.h"
#include "jscrashformat.h"
#include "jscrashreport.h"
#include "jsprf.h"
@ -69,78 +70,114 @@ ExplainReason(gcreason::Reason reason)
}
}
Statistics::ColumnInfo::ColumnInfo(const char *title, double t, double total)
: title(title)
void
Statistics::fmt(const char *f, ...)
{
JS_snprintf(str, sizeof(str), "%.1f", t);
JS_snprintf(totalStr, sizeof(totalStr), "%.1f", total);
width = 6;
}
va_list va;
size_t off = strlen(buffer);
Statistics::ColumnInfo::ColumnInfo(const char *title, double t)
: title(title)
{
JS_snprintf(str, sizeof(str), "%.1f", t);
strcpy(totalStr, "n/a");
width = 6;
va_start(va, f);
JS_vsnprintf(buffer + off, BUFFER_SIZE - off, f, va);
va_end(va);
}
Statistics::ColumnInfo::ColumnInfo(const char *title, unsigned int data)
: title(title)
{
JS_snprintf(str, sizeof(str), "%d", data);
strcpy(totalStr, "n/a");
width = 4;
}
Statistics::ColumnInfo::ColumnInfo(const char *title, const char *data)
: title(title)
{
JS_ASSERT(strlen(data) < sizeof(str));
strcpy(str, data);
strcpy(totalStr, "n/a ");
width = 0;
}
static const int NUM_COLUMNS = 17;
void
Statistics::makeTable(ColumnInfo *cols)
Statistics::fmtIfNonzero(const char *name, double t)
{
int i = 0;
if (t) {
if (needComma)
fmt(", ");
fmt("%s: %.1f", name, t);
needComma = true;
}
}
cols[i++] = ColumnInfo("Type", compartment ? "Comp" : "Glob");
void
Statistics::formatPhases(int64_t *times)
{
needComma = false;
fmtIfNonzero("mark", t(times[PHASE_MARK]));
fmtIfNonzero("mark-roots", t(times[PHASE_MARK_ROOTS]));
fmtIfNonzero("mark-delayed", t(times[PHASE_MARK_DELAYED]));
fmtIfNonzero("mark-other", t(times[PHASE_MARK_OTHER]));
fmtIfNonzero("sweep", t(times[PHASE_SWEEP]));
fmtIfNonzero("sweep-obj", t(times[PHASE_SWEEP_OBJECT]));
fmtIfNonzero("sweep-string", t(times[PHASE_SWEEP_STRING]));
fmtIfNonzero("sweep-script", t(times[PHASE_SWEEP_SCRIPT]));
fmtIfNonzero("sweep-shape", t(times[PHASE_SWEEP_SHAPE]));
fmtIfNonzero("discard-code", t(times[PHASE_DISCARD_CODE]));
fmtIfNonzero("discard-analysis", t(times[PHASE_DISCARD_ANALYSIS]));
fmtIfNonzero("xpconnect", t(times[PHASE_XPCONNECT]));
fmtIfNonzero("deallocate", t(times[PHASE_DESTROY]));
}
cols[i++] = ColumnInfo("Total", t(PHASE_GC), total(PHASE_GC));
cols[i++] = ColumnInfo("Wait", beginDelay(PHASE_MARK, PHASE_GC));
cols[i++] = ColumnInfo("Mark", t(PHASE_MARK), total(PHASE_MARK));
cols[i++] = ColumnInfo("Sweep", t(PHASE_SWEEP), total(PHASE_SWEEP));
cols[i++] = ColumnInfo("FinObj", t(PHASE_SWEEP_OBJECT), total(PHASE_SWEEP_OBJECT));
cols[i++] = ColumnInfo("FinStr", t(PHASE_SWEEP_STRING), total(PHASE_SWEEP_STRING));
cols[i++] = ColumnInfo("FinScr", t(PHASE_SWEEP_SCRIPT), total(PHASE_SWEEP_SCRIPT));
cols[i++] = ColumnInfo("FinShp", t(PHASE_SWEEP_SHAPE), total(PHASE_SWEEP_SHAPE));
cols[i++] = ColumnInfo("DisCod", t(PHASE_DISCARD_CODE), total(PHASE_DISCARD_CODE));
cols[i++] = ColumnInfo("DisAnl", t(PHASE_DISCARD_ANALYSIS), total(PHASE_DISCARD_ANALYSIS));
cols[i++] = ColumnInfo("XPCnct", t(PHASE_XPCONNECT), total(PHASE_XPCONNECT));
cols[i++] = ColumnInfo("Destry", t(PHASE_DESTROY), total(PHASE_DESTROY));
cols[i++] = ColumnInfo("End", endDelay(PHASE_GC, PHASE_DESTROY));
/* Except for the first and last, slices of less than 12ms are not reported. */
static const int64_t SLICE_MIN_REPORT_TIME = 12 * PRMJ_USEC_PER_MSEC;
cols[i++] = ColumnInfo("+Chu", counts[STAT_NEW_CHUNK]);
cols[i++] = ColumnInfo("-Chu", counts[STAT_DESTROY_CHUNK]);
const char *
Statistics::formatData()
{
buffer[0] = 0x00;
cols[i++] = ColumnInfo("Reason", ExplainReason(triggerReason));
int64_t total = 0, longest = 0;
JS_ASSERT(i == NUM_COLUMNS);
for (SliceData *slice = slices.begin(); slice != slices.end(); slice++) {
total += slice->duration();
if (slice->duration() > longest)
longest = slice->duration();
}
double mmu20 = computeMMU(20 * PRMJ_USEC_PER_MSEC);
double mmu50 = computeMMU(50 * PRMJ_USEC_PER_MSEC);
fmt("TotalTime: %.1fms, Type: %s", t(total), compartment ? "compartment" : "global");
fmt(", MMU(20ms): %d%%, MMU(50ms): %d%%", int(mmu20 * 100), int(mmu50 * 100));
if (slices.length() > 1)
fmt(", MaxPause: %.1f", t(longest));
else
fmt(", Reason: %s", ExplainReason(slices[0].reason));
if (wasReset)
fmt(", ***RESET***");
fmt(", +chunks: %d, -chunks: %d\n", counts[STAT_NEW_CHUNK], counts[STAT_DESTROY_CHUNK]);
if (slices.length() > 1) {
for (size_t i = 0; i < slices.length(); i++) {
int64_t width = slices[i].duration();
if (i != 0 && i != slices.length() - 1 && width < SLICE_MIN_REPORT_TIME)
continue;
fmt(" Slice %d @ %.1fms (Pause: %.1f, Reason: %s): ",
i,
t(slices[i].end - slices[0].start),
t(width),
ExplainReason(slices[i].reason));
formatPhases(slices[i].phaseTimes);
fmt("\n");
}
fmt(" Totals: ");
}
formatPhases(phaseTimes);
fmt("\n");
return buffer;
}
Statistics::Statistics(JSRuntime *rt)
: runtime(rt),
triggerReason(gcreason::NO_REASON)
startupTime(PRMJ_Now()),
fp(NULL),
fullFormat(false),
compartment(NULL),
wasReset(false),
needComma(false)
{
PodArrayZero(phaseTotals);
PodArrayZero(counts);
PodArrayZero(totals);
startupTime = PRMJ_Now();
char *env = getenv("MOZ_GCTIMER");
if (!env || strcmp(env, "none") == 0) {
@ -159,14 +196,6 @@ Statistics::Statistics(JSRuntime *rt)
fp = fopen(env, "a");
JS_ASSERT(fp);
fprintf(fp, " AppTime");
ColumnInfo cols[NUM_COLUMNS];
makeTable(cols);
for (int i = 0; i < NUM_COLUMNS; i++)
fprintf(fp, ", %*s", cols[i].width, cols[i].title);
fprintf(fp, "\n");
}
}
@ -174,13 +203,9 @@ Statistics::~Statistics()
{
if (fp) {
if (fullFormat) {
fprintf(fp, "------>TOTAL");
ColumnInfo cols[NUM_COLUMNS];
makeTable(cols);
for (int i = 0; i < NUM_COLUMNS && cols[i].totalStr[0]; i++)
fprintf(fp, ", %*s", cols[i].width, cols[i].totalStr);
fprintf(fp, "\n");
buffer[0] = 0x00;
formatPhases(phaseTotals);
fprintf(fp, "TOTALS\n%s\n\n-------\n", buffer);
}
if (fp != stdout && fp != stderr)
@ -188,120 +213,65 @@ Statistics::~Statistics()
}
}
struct GCCrashData
{
int isRegen;
int isCompartment;
};
void
Statistics::beginGC(JSCompartment *comp, gcreason::Reason reason)
{
compartment = comp;
PodArrayZero(phaseStarts);
PodArrayZero(phaseEnds);
PodArrayZero(phaseTimes);
triggerReason = reason;
beginPhase(PHASE_GC);
Probes::GCStart();
GCCrashData crashData;
crashData.isCompartment = !!compartment;
crash::SaveCrashData(crash::JS_CRASH_TAG_GC, &crashData, sizeof(crashData));
}
double
Statistics::t(Phase phase)
Statistics::t(int64_t t)
{
return double(phaseTimes[phase]) / PRMJ_USEC_PER_MSEC;
return double(t) / PRMJ_USEC_PER_MSEC;
}
double
Statistics::total(Phase phase)
int64_t
Statistics::gcDuration()
{
return double(totals[phase]) / PRMJ_USEC_PER_MSEC;
}
double
Statistics::beginDelay(Phase phase1, Phase phase2)
{
return double(phaseStarts[phase1] - phaseStarts[phase2]) / PRMJ_USEC_PER_MSEC;
}
double
Statistics::endDelay(Phase phase1, Phase phase2)
{
return double(phaseEnds[phase1] - phaseEnds[phase2]) / PRMJ_USEC_PER_MSEC;
}
void
Statistics::statsToString(char *buffer, size_t size)
{
JS_ASSERT(size);
buffer[0] = 0x00;
ColumnInfo cols[NUM_COLUMNS];
makeTable(cols);
size_t pos = 0;
for (int i = 0; i < NUM_COLUMNS; i++) {
int len = strlen(cols[i].title) + 1 + strlen(cols[i].str);
if (i > 0)
len += 2;
if (pos + len >= size)
break;
if (i > 0)
strcat(buffer, ", ");
strcat(buffer, cols[i].title);
strcat(buffer, ":");
strcat(buffer, cols[i].str);
pos += len;
}
return slices.back().end - slices[0].start;
}
void
Statistics::printStats()
{
if (fullFormat) {
fprintf(fp, "%12.0f", double(phaseStarts[PHASE_GC] - startupTime) / PRMJ_USEC_PER_MSEC);
ColumnInfo cols[NUM_COLUMNS];
makeTable(cols);
for (int i = 0; i < NUM_COLUMNS; i++)
fprintf(fp, ", %*s", cols[i].width, cols[i].str);
fprintf(fp, "\n");
fprintf(fp, "GC(T+%.3fs) %s\n",
t(slices[0].start - startupTime) / 1000.0,
formatData());
} else {
fprintf(fp, "%f %f %f\n",
t(PHASE_GC), t(PHASE_MARK), t(PHASE_SWEEP));
t(gcDuration()),
t(phaseTimes[PHASE_MARK]),
t(phaseTimes[PHASE_SWEEP]));
}
fflush(fp);
}
void
Statistics::beginGC()
{
PodArrayZero(phaseStarts);
PodArrayZero(phaseTimes);
slices.clearAndFree();
wasReset = false;
Probes::GCStart();
}
void
Statistics::endGC()
{
Probes::GCEnd();
endPhase(PHASE_GC);
crash::SnapshotGCStack();
for (int i = 0; i < PHASE_LIMIT; i++)
totals[i] += phaseTimes[i];
phaseTotals[i] += phaseTimes[i];
if (JSAccumulateTelemetryDataCallback cb = runtime->telemetryCallback) {
(*cb)(JS_TELEMETRY_GC_REASON, triggerReason);
(*cb)(JS_TELEMETRY_GC_IS_COMPARTMENTAL, compartment ? 1 : 0);
(*cb)(JS_TELEMETRY_GC_MS, t(PHASE_GC));
(*cb)(JS_TELEMETRY_GC_MARK_MS, t(PHASE_MARK));
(*cb)(JS_TELEMETRY_GC_SWEEP_MS, t(PHASE_SWEEP));
}
(*cb)(JS_TELEMETRY_GC_MS, t(gcDuration()));
(*cb)(JS_TELEMETRY_GC_MARK_MS, t(phaseTimes[PHASE_MARK]));
(*cb)(JS_TELEMETRY_GC_SWEEP_MS, t(phaseTimes[PHASE_SWEEP]));
(*cb)(JS_TELEMETRY_GC_RESET, wasReset);
(*cb)(JS_TELEMETRY_GC_INCREMENTAL_DISABLED, !runtime->gcIncrementalEnabled);
if (JSGCFinishedCallback cb = runtime->gcFinishedCallback) {
char buffer[1024];
statsToString(buffer, sizeof(buffer));
(*cb)(runtime, compartment, buffer);
double mmu50 = computeMMU(50 * PRMJ_USEC_PER_MSEC);
(*cb)(JS_TELEMETRY_GC_MMU_50, mmu50 * 100);
}
if (fp)
@ -310,6 +280,47 @@ Statistics::endGC()
PodArrayZero(counts);
}
void
Statistics::beginSlice(JSCompartment *comp, gcreason::Reason reason)
{
compartment = comp;
bool first = runtime->gcIncrementalState == gc::NO_INCREMENTAL;
if (first)
beginGC();
SliceData data(reason, PRMJ_Now());
(void) slices.append(data); /* Ignore any OOMs here. */
if (JSAccumulateTelemetryDataCallback cb = runtime->telemetryCallback)
(*cb)(JS_TELEMETRY_GC_REASON, reason);
if (GCSliceCallback cb = runtime->gcSliceCallback) {
GCDescription desc(NULL, !!compartment);
(*cb)(runtime, first ? GC_CYCLE_BEGIN : GC_SLICE_BEGIN, desc);
}
}
void
Statistics::endSlice()
{
slices.back().end = PRMJ_Now();
if (JSAccumulateTelemetryDataCallback cb = runtime->telemetryCallback)
(*cb)(JS_TELEMETRY_GC_SLICE_MS, t(slices.back().end - slices.back().start));
bool last = runtime->gcIncrementalState == gc::NO_INCREMENTAL;
if (last)
endGC();
if (GCSliceCallback cb = runtime->gcSliceCallback) {
if (last)
(*cb)(runtime, GC_CYCLE_END, GCDescription(formatData(), !!compartment));
else
(*cb)(runtime, GC_SLICE_END, GCDescription(NULL, !!compartment));
}
}
void
Statistics::beginPhase(Phase phase)
{
@ -324,8 +335,10 @@ Statistics::beginPhase(Phase phase)
void
Statistics::endPhase(Phase phase)
{
phaseEnds[phase] = PRMJ_Now();
phaseTimes[phase] += phaseEnds[phase] - phaseStarts[phase];
int64_t now = PRMJ_Now();
int64_t t = now - phaseStarts[phase];
slices.back().phaseTimes[phase] += t;
phaseTimes[phase] += t;
if (phase == gcstats::PHASE_MARK)
Probes::GCEndMarkPhase();
@ -333,5 +346,44 @@ Statistics::endPhase(Phase phase)
Probes::GCEndSweepPhase();
}
/*
* MMU (minimum mutator utilization) is a measure of how much garbage collection
* is affecting the responsiveness of the system. MMU measurements are given
* with respect to a certain window size. If we report MMU(50ms) = 80%, then
* that means that, for any 50ms window of time, at least 80% of the window is
* devoted to the mutator. In other words, the GC is running for at most 20% of
* the window, or 10ms. The GC can run multiple slices during the 50ms window
* as long as the total time it spends is at most 10ms.
*/
double
Statistics::computeMMU(int64_t window)
{
JS_ASSERT(!slices.empty());
int64_t gc = slices[0].end - slices[0].start;
int64_t gcMax = gc;
if (gc >= window)
return 0.0;
int startIndex = 0;
for (size_t endIndex = 1; endIndex < slices.length(); endIndex++) {
gc += slices[endIndex].end - slices[endIndex].start;
while (slices[endIndex].end - slices[startIndex].end >= window) {
gc -= slices[startIndex].end - slices[startIndex].start;
startIndex++;
}
int64_t cur = gc;
if (slices[endIndex].end - slices[startIndex].start > window)
cur -= (slices[endIndex].end - slices[startIndex].start - window);
if (cur > gcMax)
gcMax = cur;
}
return double(window - gcMax) / window;
}
} /* namespace gcstats */
} /* namespace js */

Просмотреть файл

@ -52,8 +52,10 @@ namespace js {
namespace gcstats {
enum Phase {
PHASE_GC,
PHASE_MARK,
PHASE_MARK_ROOTS,
PHASE_MARK_DELAYED,
PHASE_MARK_OTHER,
PHASE_SWEEP,
PHASE_SWEEP_OBJECT,
PHASE_SWEEP_STRING,
@ -74,16 +76,20 @@ enum Stat {
STAT_LIMIT
};
static const size_t BUFFER_SIZE = 8192;
struct Statistics {
Statistics(JSRuntime *rt);
~Statistics();
void beginGC(JSCompartment *comp, gcreason::Reason reason);
void endGC();
void beginPhase(Phase phase);
void endPhase(Phase phase);
void beginSlice(JSCompartment *comp, gcreason::Reason reason);
void endSlice();
void reset() { wasReset = true; }
void count(Stat s) {
JS_ASSERT(s < STAT_LIMIT);
counts[s]++;
@ -92,48 +98,64 @@ struct Statistics {
private:
JSRuntime *runtime;
uint64_t startupTime;
int64_t startupTime;
FILE *fp;
bool fullFormat;
gcreason::Reason triggerReason;
JSCompartment *compartment;
bool wasReset;
uint64_t phaseStarts[PHASE_LIMIT];
uint64_t phaseEnds[PHASE_LIMIT];
uint64_t phaseTimes[PHASE_LIMIT];
uint64_t totals[PHASE_LIMIT];
unsigned int counts[STAT_LIMIT];
struct SliceData {
SliceData(gcreason::Reason reason, int64_t start)
: reason(reason), start(start)
{
PodArrayZero(phaseTimes);
}
double t(Phase phase);
double total(Phase phase);
double beginDelay(Phase phase1, Phase phase2);
double endDelay(Phase phase1, Phase phase2);
void printStats();
void statsToString(char *buffer, size_t size);
gcreason::Reason reason;
int64_t start, end;
int64_t phaseTimes[PHASE_LIMIT];
struct ColumnInfo {
const char *title;
char str[32];
char totalStr[32];
int width;
ColumnInfo() {}
ColumnInfo(const char *title, double t, double total);
ColumnInfo(const char *title, double t);
ColumnInfo(const char *title, unsigned int data);
ColumnInfo(const char *title, const char *data);
int64_t duration() const { return end - start; }
};
void makeTable(ColumnInfo *cols);
Vector<SliceData, 8, SystemAllocPolicy> slices;
/* Most recent time when the given phase started. */
int64_t phaseStarts[PHASE_LIMIT];
/* Total time in a given phase for this GC. */
int64_t phaseTimes[PHASE_LIMIT];
/* Total time in a given phase over all GCs. */
int64_t phaseTotals[PHASE_LIMIT];
/* Number of events of this type for this GC. */
unsigned int counts[STAT_LIMIT];
char buffer[BUFFER_SIZE];
bool needComma;
void beginGC();
void endGC();
int64_t gcDuration();
double t(int64_t t);
void printStats();
void fmt(const char *f, ...);
void fmtIfNonzero(const char *name, double t);
void formatPhases(int64_t *times);
const char *formatData();
double computeMMU(int64_t resolution);
};
struct AutoGC {
AutoGC(Statistics &stats, JSCompartment *comp, gcreason::Reason reason
JS_GUARD_OBJECT_NOTIFIER_PARAM)
: stats(stats) { JS_GUARD_OBJECT_NOTIFIER_INIT; stats.beginGC(comp, reason); }
~AutoGC() { stats.endGC(); }
struct AutoGCSlice {
AutoGCSlice(Statistics &stats, JSCompartment *comp, gcreason::Reason reason
JS_GUARD_OBJECT_NOTIFIER_PARAM)
: stats(stats) { JS_GUARD_OBJECT_NOTIFIER_INIT; stats.beginSlice(comp, reason); }
~AutoGCSlice() { stats.endSlice(); }
Statistics &stats;
JS_DECL_USE_GUARD_OBJECT_NOTIFIER

Просмотреть файл

@ -723,8 +723,6 @@ JSRuntime::JSRuntime()
gcMaxBytes(0),
gcMaxMallocBytes(0),
gcNumArenasFreeCommitted(0),
gcNumber(0),
gcIncrementalTracer(NULL),
gcVerifyData(NULL),
gcChunkAllocationSinceLastGC(false),
gcNextFullGCTime(0),
@ -733,12 +731,20 @@ JSRuntime::JSRuntime()
gcIsNeeded(0),
gcWeakMapList(NULL),
gcStats(thisFromCtor()),
gcNumber(0),
gcStartNumber(0),
gcTriggerReason(gcreason::NO_REASON),
gcTriggerCompartment(NULL),
gcCurrentCompartment(NULL),
gcCheckCompartment(NULL),
gcIncrementalState(gc::NO_INCREMENTAL),
gcCompartmentCreated(false),
gcLastMarkSlice(false),
gcInterFrameGC(0),
gcSliceBudget(SliceBudget::Unlimited),
gcIncrementalEnabled(true),
gcIncrementalCompartment(NULL),
gcPoke(false),
gcMarkAndSweep(false),
gcRunning(false),
#ifdef JS_GC_ZEAL
gcZeal_(0),
@ -747,7 +753,7 @@ JSRuntime::JSRuntime()
gcDebugCompartmentGC(false),
#endif
gcCallback(NULL),
gcFinishedCallback(NULL),
gcSliceCallback(NULL),
gcMallocBytes(0),
gcBlackRootsTraceOp(NULL),
gcBlackRootsData(NULL),
@ -814,6 +820,9 @@ JSRuntime::init(uint32_t maxbytes)
if (!js_InitGC(this, maxbytes))
return false;
if (!gcMarker.init())
return false;
if (!(atomsCompartment = this->new_<JSCompartment>(this)) ||
!atomsCompartment->init(NULL) ||
!compartments.append(atomsCompartment)) {
@ -2437,13 +2446,7 @@ JS_SetExtraGCRootsTracer(JSRuntime *rt, JSTraceDataOp traceOp, void *data)
JS_PUBLIC_API(void)
JS_TracerInit(JSTracer *trc, JSContext *cx, JSTraceCallback callback)
{
trc->runtime = cx->runtime;
trc->context = cx;
trc->callback = callback;
trc->debugPrinter = NULL;
trc->debugPrintArg = NULL;
trc->debugPrintIndex = size_t(-1);
trc->eagerlyTraceWeakMaps = true;
InitTracer(trc, cx->runtime, cx, callback);
}
JS_PUBLIC_API(void)
@ -2875,8 +2878,7 @@ JS_CompartmentGC(JSContext *cx, JSCompartment *comp)
/* We cannot GC the atoms compartment alone; use a full GC instead. */
JS_ASSERT(comp != cx->runtime->atomsCompartment);
js::gc::VerifyBarriers(cx, true);
js_GC(cx, comp, GC_NORMAL, gcreason::API);
GC(cx, comp, GC_NORMAL, gcreason::API);
}
JS_PUBLIC_API(void)
@ -2914,7 +2916,6 @@ JS_PUBLIC_API(JSBool)
JS_IsAboutToBeFinalized(void *thing)
{
gc::Cell *t = static_cast<gc::Cell *>(thing);
JS_ASSERT(!t->compartment()->rt->gcIncrementalTracer);
return IsAboutToBeFinalized(t);
}
@ -2931,11 +2932,15 @@ JS_SetGCParameter(JSRuntime *rt, JSGCParamKey key, uint32_t value)
case JSGC_MAX_MALLOC_BYTES:
rt->setGCMaxMallocBytes(value);
break;
case JSGC_SLICE_TIME_BUDGET:
rt->gcSliceBudget = SliceBudget::TimeBudget(value);
break;
default:
JS_ASSERT(key == JSGC_MODE);
rt->gcMode = JSGCMode(value);
JS_ASSERT(rt->gcMode == JSGC_MODE_GLOBAL ||
rt->gcMode == JSGC_MODE_COMPARTMENT);
rt->gcMode == JSGC_MODE_COMPARTMENT ||
rt->gcMode == JSGC_MODE_INCREMENTAL);
return;
}
}
@ -2956,9 +2961,11 @@ JS_GetGCParameter(JSRuntime *rt, JSGCParamKey key)
return uint32_t(rt->gcChunkPool.getEmptyCount());
case JSGC_TOTAL_CHUNKS:
return uint32_t(rt->gcChunkSet.count() + rt->gcChunkPool.getEmptyCount());
case JSGC_SLICE_TIME_BUDGET:
return uint32_t(rt->gcSliceBudget > 0 ? rt->gcSliceBudget / PRMJ_USEC_PER_MSEC : 0);
default:
JS_ASSERT(key == JSGC_NUMBER);
return rt->gcNumber;
return uint32_t(rt->gcNumber);
}
}
@ -6609,7 +6616,16 @@ JS_AbortIfWrongThread(JSRuntime *rt)
JS_PUBLIC_API(void)
JS_SetGCZeal(JSContext *cx, uint8_t zeal, uint32_t frequency, JSBool compartment)
{
bool schedule = zeal >= js::gc::ZealAllocThreshold && zeal < js::gc::ZealVerifierThreshold;
#ifdef JS_GC_ZEAL
const char *env = getenv("JS_GC_ZEAL");
if (env) {
zeal = atoi(env);
frequency = 1;
compartment = false;
}
#endif
bool schedule = zeal >= js::gc::ZealAllocValue;
cx->runtime->gcZeal_ = zeal;
cx->runtime->gcZealFrequency = frequency;
cx->runtime->gcNextScheduled = schedule ? frequency : 0;

Просмотреть файл

@ -1428,8 +1428,11 @@ typedef JSBool
(* JSContextCallback)(JSContext *cx, uintN contextOp);
typedef enum JSGCStatus {
/* These callbacks happen outside the GC lock. */
JSGC_BEGIN,
JSGC_END,
/* These callbacks happen within the GC lock. */
JSGC_MARK_END,
JSGC_FINALIZE_END
} JSGCStatus;
@ -3290,7 +3293,10 @@ typedef enum JSGCParamKey {
JSGC_UNUSED_CHUNKS = 7,
/* Total number of allocated GC chunks. */
JSGC_TOTAL_CHUNKS = 8
JSGC_TOTAL_CHUNKS = 8,
/* Max milliseconds to spend in an incremental GC slice. */
JSGC_SLICE_TIME_BUDGET = 9
} JSGCParamKey;
typedef enum JSGCMode {
@ -3298,7 +3304,13 @@ typedef enum JSGCMode {
JSGC_MODE_GLOBAL = 0,
/* Perform per-compartment GCs until too much garbage has accumulated. */
JSGC_MODE_COMPARTMENT = 1
JSGC_MODE_COMPARTMENT = 1,
/*
* Collect in short time slices rather than all at once. Implies
* JSGC_MODE_COMPARTMENT.
*/
JSGC_MODE_INCREMENTAL = 2
} JSGCMode;
extern JS_PUBLIC_API(void)
@ -3393,6 +3405,8 @@ struct JSClass {
object in prototype chain
passed in via *objp in/out
parameter */
#define JSCLASS_IMPLEMENTS_BARRIERS (1<<5) /* Correctly implements GC read
and write barriers */
#define JSCLASS_DOCUMENT_OBSERVER (1<<6) /* DOM document observer */
/*

Просмотреть файл

@ -282,10 +282,10 @@ js_DestroyContext(JSContext *cx, JSDestroyContextMode mode)
c->clearTraps(cx);
JS_ClearAllWatchPoints(cx);
js_GC(cx, NULL, GC_NORMAL, gcreason::LAST_CONTEXT);
GC(cx, NULL, GC_NORMAL, gcreason::LAST_CONTEXT);
} else if (mode == JSDCM_FORCE_GC) {
js_GC(cx, NULL, GC_NORMAL, gcreason::DESTROY_CONTEXT);
GC(cx, NULL, GC_NORMAL, gcreason::DESTROY_CONTEXT);
} else if (mode == JSDCM_MAYBE_GC) {
JS_MaybeGC(cx);
}
@ -875,7 +875,7 @@ js_InvokeOperationCallback(JSContext *cx)
JS_ATOMIC_SET(&rt->interrupt, 0);
if (rt->gcIsNeeded)
js_GC(cx, rt->gcTriggerCompartment, GC_NORMAL, rt->gcTriggerReason);
GCSlice(cx, rt->gcTriggerCompartment, GC_NORMAL, rt->gcTriggerReason);
#ifdef JS_THREADSAFE
/*

Просмотреть файл

@ -304,24 +304,25 @@ struct JSRuntime : js::RuntimeFriendFields
* in MaybeGC.
*/
volatile uint32_t gcNumArenasFreeCommitted;
uint32_t gcNumber;
js::GCMarker *gcIncrementalTracer;
js::FullGCMarker gcMarker;
void *gcVerifyData;
bool gcChunkAllocationSinceLastGC;
int64_t gcNextFullGCTime;
int64_t gcJitReleaseTime;
JSGCMode gcMode;
volatile uintptr_t gcBarrierFailed;
volatile uintptr_t gcIsNeeded;
js::WeakMapBase *gcWeakMapList;
js::gcstats::Statistics gcStats;
/* Incremented on every GC slice. */
uint64_t gcNumber;
/* The gcNumber at the time of the most recent GC's first slice. */
uint64_t gcStartNumber;
/* The reason that an interrupt-triggered GC should be called. */
js::gcreason::Reason gcTriggerReason;
/* Pre-allocated space for the GC mark stack. */
uintptr_t gcMarkStackArray[js::MARK_STACK_LENGTH];
/*
* Compartment that triggered GC. If more than one Compatment need GC,
* gcTriggerCompartment is reset to NULL and a global GC is performed.
@ -337,6 +338,53 @@ struct JSRuntime : js::RuntimeFriendFields
*/
JSCompartment *gcCheckCompartment;
/*
* The current incremental GC phase. During non-incremental GC, this is
* always NO_INCREMENTAL.
*/
js::gc::State gcIncrementalState;
/* Indicates that a new compartment was created during incremental GC. */
bool gcCompartmentCreated;
/* Indicates that the last incremental slice exhausted the mark stack. */
bool gcLastMarkSlice;
/*
* Indicates that a GC slice has taken place in the middle of an animation
* frame, rather than at the beginning. In this case, the next slice will be
* delayed so that we don't get back-to-back slices.
*/
volatile uintptr_t gcInterFrameGC;
/* Default budget for incremental GC slice. See SliceBudget in jsgc.h. */
int64_t gcSliceBudget;
/*
* We disable incremental GC if we encounter a js::Class with a trace hook
* that does not implement write barriers.
*/
bool gcIncrementalEnabled;
/* Compartment that is undergoing an incremental GC. */
JSCompartment *gcIncrementalCompartment;
/*
* We save all conservative scanned roots in this vector so that
* conservative scanning can be "replayed" deterministically. In DEBUG mode,
* this allows us to run a non-incremental GC after every incremental GC to
* ensure that no objects were missed.
*/
#ifdef DEBUG
struct SavedGCRoot {
void *thing;
JSGCTraceKind kind;
SavedGCRoot(void *thing, JSGCTraceKind kind) : thing(thing), kind(kind) {}
};
js::Vector<SavedGCRoot, 0, js::SystemAllocPolicy> gcSavedRoots;
#endif
/*
* We can pack these flags as only the GC thread writes to them. Atomic
* updates to packed bytes are not guaranteed, so stores issued by one
@ -344,7 +392,6 @@ struct JSRuntime : js::RuntimeFriendFields
* other threads.
*/
bool gcPoke;
bool gcMarkAndSweep;
bool gcRunning;
/*
@ -353,7 +400,7 @@ struct JSRuntime : js::RuntimeFriendFields
* gcNextScheduled is decremented. When it reaches zero, we do either a
* full or a compartmental GC, based on gcDebugCompartmentGC.
*
* At this point, if gcZeal_ >= 2 then gcNextScheduled is reset to the
* At this point, if gcZeal_ == 2 then gcNextScheduled is reset to the
* value of gcZealFrequency. Otherwise, no additional GCs take place.
*
* You can control these values in several ways:
@ -361,9 +408,8 @@ struct JSRuntime : js::RuntimeFriendFields
* - Call gczeal() or schedulegc() from inside shell-executed JS code
* (see the help for details)
*
* Additionally, if gzZeal_ == 1 then we perform GCs in select places
* (during MaybeGC and whenever a GC poke happens). This option is mainly
* useful to embedders.
* If gzZeal_ == 1 then we perform GCs in select places (during MaybeGC and
* whenever a GC poke happens). This option is mainly useful to embedders.
*
* We use gcZeal_ == 4 to enable write barrier verification. See the comment
* in jsgc.cpp for more information about this.
@ -378,7 +424,7 @@ struct JSRuntime : js::RuntimeFriendFields
bool needZealousGC() {
if (gcNextScheduled > 0 && --gcNextScheduled == 0) {
if (gcZeal() >= js::gc::ZealAllocThreshold && gcZeal() < js::gc::ZealVerifierThreshold)
if (gcZeal() == js::gc::ZealAllocValue)
gcNextScheduled = gcZealFrequency;
return true;
}
@ -390,7 +436,7 @@ struct JSRuntime : js::RuntimeFriendFields
#endif
JSGCCallback gcCallback;
JSGCFinishedCallback gcFinishedCallback;
js::GCSliceCallback gcSliceCallback;
private:
/*

Просмотреть файл

@ -73,7 +73,6 @@ JSCompartment::JSCompartment(JSRuntime *rt)
: rt(rt),
principals(NULL),
needsBarrier_(false),
gcIncrementalTracer(NULL),
gcBytes(0),
gcTriggerBytes(0),
gcLastBytes(0),
@ -128,6 +127,9 @@ JSCompartment::init(JSContext *cx)
if (!scriptFilenameTable.init())
return false;
if (!barrierMarker_.init())
return false;
return debuggees.init();
}
@ -458,6 +460,30 @@ JSCompartment::markTypes(JSTracer *trc)
}
}
void
JSCompartment::discardJitCode(JSContext *cx)
{
/*
* Kick all frames on the stack into the interpreter, and release all JIT
* code in the compartment.
*/
#ifdef JS_METHODJIT
mjit::ClearAllFrames(this);
for (CellIterUnderGC i(this, FINALIZE_SCRIPT); !i.done(); i.next()) {
JSScript *script = i.get<JSScript>();
mjit::ReleaseScriptCode(cx, script);
/*
* Use counts for scripts are reset on GC. After discarding code we
* need to let it warm back up to get information like which opcodes
* are setting array holes or accessing getter properties.
*/
script->resetUseCount();
}
#endif
}
void
JSCompartment::sweep(JSContext *cx, bool releaseTypes)
{
@ -474,6 +500,8 @@ JSCompartment::sweep(JSContext *cx, bool releaseTypes)
/* Remove dead references held weakly by the compartment. */
regExps.sweep(rt);
sweepBaseShapeTable(cx);
sweepInitialShapeTable(cx);
sweepNewTypeObjectTable(cx, newTypeObjects);
@ -488,26 +516,7 @@ JSCompartment::sweep(JSContext *cx, bool releaseTypes)
{
gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_DISCARD_CODE);
/*
* Kick all frames on the stack into the interpreter, and release all JIT
* code in the compartment.
*/
#ifdef JS_METHODJIT
mjit::ClearAllFrames(this);
for (CellIterUnderGC i(this, FINALIZE_SCRIPT); !i.done(); i.next()) {
JSScript *script = i.get<JSScript>();
mjit::ReleaseScriptCode(cx, script);
/*
* Use counts for scripts are reset on GC. After discarding code we
* need to let it warm back up to get information like which opcodes
* are setting array holes or accessing getter properties.
*/
script->resetUseCount();
}
#endif
discardJitCode(cx);
}
if (!activeAnalysis) {
@ -561,8 +570,6 @@ JSCompartment::sweep(JSContext *cx, bool releaseTypes)
void
JSCompartment::purge(JSContext *cx)
{
arenas.purge();
regExps.purge();
dtoaCache.purge();
/*
@ -776,13 +783,6 @@ JSCompartment::sweepBreakpoints(JSContext *cx)
}
}
GCMarker *
JSCompartment::createBarrierTracer()
{
JS_ASSERT(!gcIncrementalTracer);
return NULL;
}
size_t
JSCompartment::sizeOfShapeTable(JSMallocSizeOfFun mallocSizeOf)
{

Просмотреть файл

@ -46,7 +46,6 @@
#include "jscntxt.h"
#include "jsfun.h"
#include "jsgc.h"
#include "jsgcstats.h"
#include "jsobj.h"
#include "jsscope.h"
#include "vm/GlobalObject.h"
@ -163,6 +162,23 @@ typedef HashSet<ScriptFilenameEntry *,
ScriptFilenameHasher,
SystemAllocPolicy> ScriptFilenameTable;
/* If HashNumber grows, need to change WrapperHasher. */
JS_STATIC_ASSERT(sizeof(HashNumber) == 4);
struct WrapperHasher
{
typedef Value Lookup;
static HashNumber hash(Value key) {
uint64_t bits = JSVAL_TO_IMPL(key).asBits;
return uint32_t(bits) ^ uint32_t(bits >> 32);
}
static bool match(const Value &l, const Value &k) { return l == k; }
};
typedef HashMap<Value, ReadBarrieredValue, WrapperHasher, SystemAllocPolicy> WrapperMap;
} /* namespace js */
namespace JS {
@ -177,7 +193,7 @@ struct JSCompartment
js::gc::ArenaLists arenas;
bool needsBarrier_;
js::GCMarker *gcIncrementalTracer;
js::BarrierGCMarker barrierMarker_;
bool needsBarrier() {
return needsBarrier_;
@ -185,9 +201,7 @@ struct JSCompartment
js::GCMarker *barrierTracer() {
JS_ASSERT(needsBarrier_);
if (gcIncrementalTracer)
return gcIncrementalTracer;
return createBarrierTracer();
return &barrierMarker_;
}
size_t gcBytes;
@ -325,10 +339,11 @@ struct JSCompartment
bool wrap(JSContext *cx, js::AutoIdVector &props);
void markTypes(JSTracer *trc);
void discardJitCode(JSContext *cx);
void sweep(JSContext *cx, bool releaseTypes);
void purge(JSContext *cx);
void setGCLastBytes(size_t lastBytes, JSGCInvocationKind gckind);
void setGCLastBytes(size_t lastBytes, js::JSGCInvocationKind gckind);
void reduceGCTriggerBytes(size_t amount);
void resetGCMallocBytes();
@ -397,8 +412,6 @@ struct JSCompartment
private:
void sweepBreakpoints(JSContext *cx);
js::GCMarker *createBarrierTracer();
public:
js::WatchpointMap *watchpointMap;
};

Просмотреть файл

@ -94,7 +94,7 @@ exn_resolve(JSContext *cx, JSObject *obj, jsid id, uintN flags,
Class js::ErrorClass = {
js_Error_str,
JSCLASS_HAS_PRIVATE | JSCLASS_NEW_RESOLVE |
JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_NEW_RESOLVE |
JSCLASS_HAS_CACHED_PROTO(JSProto_Error),
JS_PropertyStub, /* addProperty */
JS_PropertyStub, /* delProperty */

Просмотреть файл

@ -132,7 +132,7 @@ JS_NewObjectWithUniqueType(JSContext *cx, JSClass *clasp, JSObject *proto, JSObj
JS_FRIEND_API(void)
js::GCForReason(JSContext *cx, gcreason::Reason reason)
{
js_GC(cx, NULL, GC_NORMAL, reason);
GC(cx, NULL, GC_NORMAL, reason);
}
JS_FRIEND_API(void)
@ -141,13 +141,19 @@ js::CompartmentGCForReason(JSContext *cx, JSCompartment *comp, gcreason::Reason
/* We cannot GC the atoms compartment alone; use a full GC instead. */
JS_ASSERT(comp != cx->runtime->atomsCompartment);
js_GC(cx, comp, GC_NORMAL, reason);
GC(cx, comp, GC_NORMAL, reason);
}
JS_FRIEND_API(void)
js::ShrinkingGC(JSContext *cx, gcreason::Reason reason)
{
js_GC(cx, NULL, GC_SHRINK, reason);
GC(cx, NULL, GC_SHRINK, reason);
}
JS_FRIEND_API(void)
js::IncrementalGC(JSContext *cx, gcreason::Reason reason)
{
GCSlice(cx, NULL, GC_NORMAL, reason);
}
JS_FRIEND_API(void)
@ -401,12 +407,6 @@ JS_SetAccumulateTelemetryCallback(JSRuntime *rt, JSAccumulateTelemetryDataCallba
rt->telemetryCallback = callback;
}
JS_FRIEND_API(void)
JS_SetGCFinishedCallback(JSRuntime *rt, JSGCFinishedCallback callback)
{
rt->gcFinishedCallback = callback;
}
#ifdef DEBUG
JS_FRIEND_API(void)
js_DumpString(JSString *str)
@ -551,39 +551,6 @@ js::DumpHeapComplete(JSContext *cx, FILE *fp)
namespace js {
JS_FRIEND_API(bool)
IsIncrementalBarrierNeeded(JSRuntime *rt)
{
return !!rt->gcIncrementalTracer && !rt->gcRunning;
}
JS_FRIEND_API(bool)
IsIncrementalBarrierNeeded(JSContext *cx)
{
return IsIncrementalBarrierNeeded(cx->runtime);
}
extern JS_FRIEND_API(void)
IncrementalReferenceBarrier(void *ptr)
{
if (!ptr)
return;
JS_ASSERT(!static_cast<gc::Cell *>(ptr)->compartment()->rt->gcRunning);
uint32_t kind = gc::GetGCThingTraceKind(ptr);
if (kind == JSTRACE_OBJECT)
JSObject::writeBarrierPre((JSObject *) ptr);
else if (kind == JSTRACE_STRING)
JSString::writeBarrierPre((JSString *) ptr);
else
JS_NOT_REACHED("invalid trace kind");
}
extern JS_FRIEND_API(void)
IncrementalValueBarrier(const Value &v)
{
HeapValue::writeBarrierPre(v);
}
/* static */ void
AutoLockGC::LockGC(JSRuntime *rt)
{
@ -719,4 +686,90 @@ SizeOfJSContext()
return sizeof(JSContext);
}
JS_FRIEND_API(GCSliceCallback)
SetGCSliceCallback(JSRuntime *rt, GCSliceCallback callback)
{
GCSliceCallback old = rt->gcSliceCallback;
rt->gcSliceCallback = callback;
return old;
}
JS_FRIEND_API(bool)
WantGCSlice(JSRuntime *rt)
{
if (rt->gcZeal() == gc::ZealFrameVerifierValue || rt->gcZeal() == gc::ZealFrameGCValue)
return true;
if (rt->gcIncrementalState != gc::NO_INCREMENTAL)
return true;
return false;
}
JS_FRIEND_API(void)
NotifyDidPaint(JSContext *cx)
{
JSRuntime *rt = cx->runtime;
if (rt->gcZeal() == gc::ZealFrameVerifierValue) {
gc::VerifyBarriers(cx);
return;
}
if (rt->gcZeal() == gc::ZealFrameGCValue) {
GCSlice(cx, NULL, GC_NORMAL, gcreason::REFRESH_FRAME);
return;
}
if (rt->gcIncrementalState != gc::NO_INCREMENTAL && !rt->gcInterFrameGC)
GCSlice(cx, rt->gcIncrementalCompartment, GC_NORMAL, gcreason::REFRESH_FRAME);
rt->gcInterFrameGC = false;
}
extern JS_FRIEND_API(bool)
IsIncrementalGCEnabled(JSRuntime *rt)
{
return rt->gcIncrementalEnabled;
}
JS_FRIEND_API(bool)
IsIncrementalBarrierNeeded(JSRuntime *rt)
{
return (rt->gcIncrementalState == gc::MARK && !rt->gcRunning);
}
JS_FRIEND_API(bool)
IsIncrementalBarrierNeeded(JSContext *cx)
{
return IsIncrementalBarrierNeeded(cx->runtime);
}
JS_FRIEND_API(bool)
IsIncrementalBarrierNeededOnObject(JSObject *obj)
{
return obj->compartment()->needsBarrier();
}
extern JS_FRIEND_API(void)
IncrementalReferenceBarrier(void *ptr)
{
if (!ptr)
return;
JS_ASSERT(!static_cast<gc::Cell *>(ptr)->compartment()->rt->gcRunning);
uint32_t kind = gc::GetGCThingTraceKind(ptr);
if (kind == JSTRACE_OBJECT)
JSObject::writeBarrierPre((JSObject *) ptr);
else if (kind == JSTRACE_STRING)
JSString::writeBarrierPre((JSString *) ptr);
else
JS_NOT_REACHED("invalid trace kind");
}
extern JS_FRIEND_API(void)
IncrementalValueBarrier(const Value &v)
{
HeapValue::writeBarrierPre(v);
}
} // namespace js

Просмотреть файл

@ -100,7 +100,11 @@ enum {
JS_TELEMETRY_GC_IS_COMPARTMENTAL,
JS_TELEMETRY_GC_MS,
JS_TELEMETRY_GC_MARK_MS,
JS_TELEMETRY_GC_SWEEP_MS
JS_TELEMETRY_GC_SWEEP_MS,
JS_TELEMETRY_GC_SLICE_MS,
JS_TELEMETRY_GC_MMU_50,
JS_TELEMETRY_GC_RESET,
JS_TELEMETRY_GC_INCREMENTAL_DISABLED
};
typedef void
@ -109,12 +113,6 @@ typedef void
extern JS_FRIEND_API(void)
JS_SetAccumulateTelemetryCallback(JSRuntime *rt, JSAccumulateTelemetryDataCallback callback);
typedef void
(* JSGCFinishedCallback)(JSRuntime *rt, JSCompartment *comp, const char *description);
extern JS_FRIEND_API(void)
JS_SetGCFinishedCallback(JSRuntime *rt, JSGCFinishedCallback callback);
extern JS_FRIEND_API(JSPrincipals *)
JS_GetCompartmentPrincipals(JSCompartment *compartment);
@ -703,12 +701,65 @@ CompartmentGCForReason(JSContext *cx, JSCompartment *comp, gcreason::Reason reas
extern JS_FRIEND_API(void)
ShrinkingGC(JSContext *cx, gcreason::Reason reason);
extern JS_FRIEND_API(void)
IncrementalGC(JSContext *cx, gcreason::Reason reason);
extern JS_FRIEND_API(void)
SetGCSliceTimeBudget(JSContext *cx, int64_t millis);
enum GCProgress {
/*
* During non-incremental GC, the GC is bracketed by JSGC_CYCLE_BEGIN/END
* callbacks. During an incremental GC, the sequence of callbacks is as
* follows:
* JSGC_CYCLE_BEGIN, JSGC_SLICE_END (first slice)
* JSGC_SLICE_BEGIN, JSGC_SLICE_END (second slice)
* ...
* JSGC_SLICE_BEGIN, JSGC_CYCLE_END (last slice)
*/
GC_CYCLE_BEGIN,
GC_SLICE_BEGIN,
GC_SLICE_END,
GC_CYCLE_END
};
struct GCDescription {
const char *logMessage;
bool isCompartment;
GCDescription(const char *msg, bool isCompartment)
: logMessage(msg), isCompartment(isCompartment) {}
};
typedef void
(* GCSliceCallback)(JSRuntime *rt, GCProgress progress, const GCDescription &desc);
extern JS_FRIEND_API(GCSliceCallback)
SetGCSliceCallback(JSRuntime *rt, GCSliceCallback callback);
extern JS_FRIEND_API(bool)
WantGCSlice(JSRuntime *rt);
/*
* Signals a good place to do an incremental slice, because the browser is
* drawing a frame.
*/
extern JS_FRIEND_API(void)
NotifyDidPaint(JSContext *cx);
extern JS_FRIEND_API(bool)
IsIncrementalGCEnabled(JSRuntime *rt);
extern JS_FRIEND_API(bool)
IsIncrementalBarrierNeeded(JSRuntime *rt);
extern JS_FRIEND_API(bool)
IsIncrementalBarrierNeeded(JSContext *cx);
extern JS_FRIEND_API(bool)
IsIncrementalBarrierNeededOnObject(JSObject *obj);
extern JS_FRIEND_API(void)
IncrementalReferenceBarrier(void *ptr);

Просмотреть файл

@ -551,7 +551,7 @@ args_trace(JSTracer *trc, JSObject *obj)
*/
Class js::NormalArgumentsObjectClass = {
"Arguments",
JSCLASS_NEW_RESOLVE |
JSCLASS_NEW_RESOLVE | JSCLASS_IMPLEMENTS_BARRIERS |
JSCLASS_HAS_RESERVED_SLOTS(NormalArgumentsObject::RESERVED_SLOTS) |
JSCLASS_HAS_CACHED_PROTO(JSProto_Object) |
JSCLASS_FOR_OF_ITERATION,
@ -587,7 +587,7 @@ Class js::NormalArgumentsObjectClass = {
*/
Class js::StrictArgumentsObjectClass = {
"Arguments",
JSCLASS_NEW_RESOLVE |
JSCLASS_NEW_RESOLVE | JSCLASS_IMPLEMENTS_BARRIERS |
JSCLASS_HAS_RESERVED_SLOTS(StrictArgumentsObject::RESERVED_SLOTS) |
JSCLASS_HAS_CACHED_PROTO(JSProto_Object) |
JSCLASS_FOR_OF_ITERATION,
@ -942,7 +942,7 @@ call_trace(JSTracer *trc, JSObject *obj)
JS_PUBLIC_DATA(Class) js::CallClass = {
"Call",
JSCLASS_HAS_PRIVATE |
JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS |
JSCLASS_HAS_RESERVED_SLOTS(CallObject::RESERVED_SLOTS) |
JSCLASS_NEW_RESOLVE | JSCLASS_IS_ANONYMOUS,
JS_PropertyStub, /* addProperty */
@ -1499,7 +1499,7 @@ JSFunction::sizeOfMisc(JSMallocSizeOfFun mallocSizeOf) const
*/
JS_FRIEND_DATA(Class) js::FunctionClass = {
js_Function_str,
JSCLASS_NEW_RESOLVE |
JSCLASS_NEW_RESOLVE | JSCLASS_IMPLEMENTS_BARRIERS |
JSCLASS_HAS_CACHED_PROTO(JSProto_Function),
JS_PropertyStub, /* addProperty */
JS_PropertyStub, /* delProperty */

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -55,7 +55,6 @@
#include "jslock.h"
#include "jsutil.h"
#include "jsversion.h"
#include "jsgcstats.h"
#include "jscell.h"
#include "ds/BitArray.h"
@ -82,6 +81,14 @@ struct Shape;
namespace gc {
enum State {
NO_INCREMENTAL,
MARK_ROOTS,
MARK,
SWEEP,
INVALID
};
struct Arena;
/*
@ -419,6 +426,10 @@ struct ArenaHeader {
* not present in the stack we use an extra flag to tag arenas on the
* stack.
*
* Delayed marking is also used for arenas that we allocate into during an
* incremental GC. In this case, we intend to mark all the objects in the
* arena, and it's faster to do this marking in bulk.
*
* To minimize the ArenaHeader size we record the next delayed marking
* linkage as arenaAddress() >> ArenaShift and pack it with the allocKind
* field and hasDelayedMarking flag. We use 8 bits for the allocKind, not
@ -427,7 +438,9 @@ struct ArenaHeader {
*/
public:
size_t hasDelayedMarking : 1;
size_t nextDelayedMarking : JS_BITS_PER_WORD - 8 - 1;
size_t allocatedDuringIncremental : 1;
size_t markOverflow : 1;
size_t nextDelayedMarking : JS_BITS_PER_WORD - 8 - 1 - 1 - 1;
static void staticAsserts() {
/* We must be able to fit the allockind into uint8_t. */
@ -437,7 +450,7 @@ struct ArenaHeader {
* nextDelayedMarkingpacking assumes that ArenaShift has enough bits
* to cover allocKind and hasDelayedMarking.
*/
JS_STATIC_ASSERT(ArenaShift >= 8 + 1);
JS_STATIC_ASSERT(ArenaShift >= 8 + 1 + 1 + 1);
}
inline uintptr_t address() const;
@ -450,6 +463,8 @@ struct ArenaHeader {
void init(JSCompartment *comp, AllocKind kind) {
JS_ASSERT(!allocated());
JS_ASSERT(!markOverflow);
JS_ASSERT(!allocatedDuringIncremental);
JS_ASSERT(!hasDelayedMarking);
compartment = comp;
@ -462,6 +477,8 @@ struct ArenaHeader {
void setAsNotAllocated() {
allocKind = size_t(FINALIZE_LIMIT);
markOverflow = 0;
allocatedDuringIncremental = 0;
hasDelayedMarking = 0;
nextDelayedMarking = 0;
}
@ -507,8 +524,8 @@ struct ArenaHeader {
void checkSynchronizedWithFreeList() const;
#endif
inline Arena *getNextDelayedMarking() const;
inline void setNextDelayedMarking(Arena *arena);
inline ArenaHeader *getNextDelayedMarking() const;
inline void setNextDelayedMarking(ArenaHeader *aheader);
};
struct Arena {
@ -908,25 +925,24 @@ ArenaHeader::getThingSize() const
return Arena::thingSize(getAllocKind());
}
inline Arena *
inline ArenaHeader *
ArenaHeader::getNextDelayedMarking() const
{
return reinterpret_cast<Arena *>(nextDelayedMarking << ArenaShift);
return &reinterpret_cast<Arena *>(nextDelayedMarking << ArenaShift)->aheader;
}
inline void
ArenaHeader::setNextDelayedMarking(Arena *arena)
ArenaHeader::setNextDelayedMarking(ArenaHeader *aheader)
{
JS_ASSERT(!hasDelayedMarking);
JS_ASSERT(!(uintptr_t(aheader) & ArenaMask));
hasDelayedMarking = 1;
nextDelayedMarking = arena->address() >> ArenaShift;
nextDelayedMarking = aheader->arenaAddress() >> ArenaShift;
}
JS_ALWAYS_INLINE void
ChunkBitmap::getMarkWordAndMask(const Cell *cell, uint32_t color,
uintptr_t **wordp, uintptr_t *maskp)
{
JS_ASSERT(cell->chunk() == Chunk::fromAddress(reinterpret_cast<uintptr_t>(this)));
size_t bit = (cell->address() & ChunkMask) / Cell::CellSize + color;
JS_ASSERT(bit < ArenaBitmapBits * ArenasPerChunk);
*maskp = uintptr_t(1) << (bit % JS_BITS_PER_WORD);
@ -970,21 +986,6 @@ Cell::compartment() const
return arenaHeader()->compartment;
}
/*
* Lower limit after which we limit the heap growth
*/
const size_t GC_ALLOCATION_THRESHOLD = 30 * 1024 * 1024;
/*
* A GC is triggered once the number of newly allocated arenas is
* GC_HEAP_GROWTH_FACTOR times the number of live arenas after the last GC
* starting after the lower limit of GC_ALLOCATION_THRESHOLD.
*/
const float GC_HEAP_GROWTH_FACTOR = 3.0f;
/* Perform a Full GC every 20 seconds if MaybeGC is called */
static const int64_t GC_IDLE_FULL_SPAN = 20 * 1000 * 1000;
static inline JSGCTraceKind
MapAllocToTraceKind(AllocKind thingKind)
{
@ -1168,13 +1169,14 @@ struct ArenaLists {
FreeSpan *headSpan = &freeLists[i];
if (!headSpan->isEmpty()) {
ArenaHeader *aheader = headSpan->arenaHeader();
JS_ASSERT(!aheader->hasFreeThings());
aheader->setFirstFreeSpan(headSpan);
headSpan->initAsEmpty();
}
}
}
inline void prepareForIncrementalGC(JSCompartment *comp);
/*
* Temporarily copy the free list heads to the arenas so the code can see
* the proper value in ArenaHeader::freeList when accessing the latter
@ -1309,23 +1311,6 @@ typedef js::HashMap<void *,
js::DefaultHasher<void *>,
js::SystemAllocPolicy> RootedValueMap;
/* If HashNumber grows, need to change WrapperHasher. */
JS_STATIC_ASSERT(sizeof(HashNumber) == 4);
struct WrapperHasher
{
typedef Value Lookup;
static HashNumber hash(Value key) {
uint64_t bits = JSVAL_TO_IMPL(key).asBits;
return uint32_t(bits) ^ uint32_t(bits >> 32);
}
static bool match(const Value &l, const Value &k) { return l == k; }
};
typedef HashMap<Value, Value, WrapperHasher, SystemAllocPolicy> WrapperMap;
} /* namespace js */
extern JS_FRIEND_API(JSGCTraceKind)
@ -1376,6 +1361,9 @@ js_IsAddressableGCThing(JSRuntime *rt, uintptr_t w, js::gc::AllocKind *thingKind
namespace js {
extern void
MarkCompartmentActive(js::StackFrame *fp);
extern void
TraceRuntime(JSTracer *trc);
@ -1396,8 +1384,6 @@ MaybeGC(JSContext *cx);
extern void
ShrinkGCBuffers(JSRuntime *rt);
} /* namespace js */
/*
* Kinds of js_GC invocation.
*/
@ -1411,10 +1397,21 @@ typedef enum JSGCInvocationKind {
/* Pass NULL for |comp| to get a full GC. */
extern void
js_GC(JSContext *cx, JSCompartment *comp, JSGCInvocationKind gckind, js::gcreason::Reason r);
GC(JSContext *cx, JSCompartment *comp, JSGCInvocationKind gckind, js::gcreason::Reason reason);
extern void
GCSlice(JSContext *cx, JSCompartment *comp, JSGCInvocationKind gckind, js::gcreason::Reason reason);
extern void
GCDebugSlice(JSContext *cx, int64_t objCount);
} /* namespace js */
namespace js {
void
InitTracer(JSTracer *trc, JSRuntime *rt, JSContext *cx, JSTraceCallback callback);
#ifdef JS_THREADSAFE
class GCHelperThread {
@ -1572,17 +1569,56 @@ struct MarkStack {
T *tos;
T *limit;
bool push(T item) {
if (tos == limit)
T *ballast;
T *ballastLimit;
MarkStack()
: stack(NULL),
tos(NULL),
limit(NULL),
ballast(NULL),
ballastLimit(NULL) { }
~MarkStack() {
if (stack != ballast)
js_free(stack);
js_free(ballast);
}
bool init(size_t ballastcap) {
JS_ASSERT(!stack);
if (ballastcap == 0)
return true;
ballast = (T *)js_malloc(sizeof(T) * ballastcap);
if (!ballast)
return false;
ballastLimit = ballast + ballastcap;
stack = ballast;
limit = ballastLimit;
tos = stack;
return true;
}
bool push(T item) {
if (tos == limit) {
if (!enlarge())
return false;
}
JS_ASSERT(tos < limit);
*tos++ = item;
return true;
}
bool push(T item1, T item2, T item3) {
T *nextTos = tos + 3;
if (nextTos > limit)
return false;
if (nextTos > limit) {
if (!enlarge())
return false;
nextTos = tos + 3;
}
JS_ASSERT(nextTos <= limit);
tos[0] = item1;
tos[1] = item2;
tos[2] = item3;
@ -1599,61 +1635,130 @@ struct MarkStack {
return *--tos;
}
template<size_t N>
MarkStack(T (&buffer)[N])
: stack(buffer),
tos(buffer),
limit(buffer + N) { }
ptrdiff_t position() const {
return tos - stack;
}
void reset() {
if (stack != ballast) {
js_free(stack);
stack = ballast;
limit = ballastLimit;
}
tos = stack;
JS_ASSERT(limit == ballastLimit);
}
bool enlarge() {
size_t tosIndex = tos - stack;
size_t cap = limit - stack;
size_t newcap = cap * 2;
if (newcap == 0)
newcap = 32;
T *newStack;
if (stack == ballast) {
newStack = (T *)js_malloc(sizeof(T) * newcap);
if (!newStack)
return false;
for (T *src = stack, *dst = newStack; src < tos; )
*dst++ = *src++;
} else {
newStack = (T *)js_realloc(stack, sizeof(T) * newcap);
if (!newStack)
return false;
}
stack = newStack;
tos = stack + tosIndex;
limit = newStack + newcap;
return true;
}
};
/*
* This class records how much work has been done in a given GC slice, so that
* we can return before pausing for too long. Some slices are allowed to run for
* unlimited time, and others are bounded. To reduce the number of gettimeofday
* calls, we only check the time every 1000 operations.
*/
struct SliceBudget {
int64_t deadline; /* in microseconds */
intptr_t counter;
static const intptr_t CounterReset = 1000;
static const int64_t Unlimited = 0;
static int64_t TimeBudget(int64_t millis);
static int64_t WorkBudget(int64_t work);
/* Equivalent to SliceBudget(UnlimitedBudget). */
SliceBudget();
/* Instantiate as SliceBudget(Time/WorkBudget(n)). */
SliceBudget(int64_t budget);
void reset() {
deadline = INT64_MAX;
counter = INTPTR_MAX;
}
void step() {
counter--;
}
bool checkOverBudget();
bool isOverBudget() {
if (counter > 0)
return false;
return checkOverBudget();
}
};
static const size_t MARK_STACK_LENGTH = 32768;
struct GCMarker : public JSTracer {
private:
/*
* We use a common mark stack to mark GC things of different types and use
* the explicit tags to distinguish them when it cannot be deduced from
* the context of push or pop operation.
*
* Currently we need only 4 tags. However that can be extended to 8 if
* necessary as we tag only GC things.
*/
enum StackTag {
ValueArrayTag,
ObjectTag,
TypeTag,
XmlTag,
LastTag = XmlTag
SavedValueArrayTag,
LastTag = SavedValueArrayTag
};
static const uintptr_t StackTagMask = 3;
static const uintptr_t StackTagMask = 7;
static void staticAsserts() {
JS_STATIC_ASSERT(StackTagMask >= uintptr_t(LastTag));
JS_STATIC_ASSERT(StackTagMask <= gc::Cell::CellMask);
}
private:
/* The color is only applied to objects, functions and xml. */
uint32_t color;
public:
/* Pointer to the top of the stack of arenas we are delaying marking on. */
js::gc::Arena *unmarkedArenaStackTop;
/* Count of arenas that are currently in the stack. */
DebugOnly<size_t> markLaterArenas;
explicit GCMarker();
bool init(bool lazy);
#ifdef JS_DUMP_CONSERVATIVE_GC_ROOTS
js::gc::ConservativeGCStats conservativeStats;
Vector<void *, 0, SystemAllocPolicy> conservativeRoots;
const char *conservativeDumpFileName;
void dumpConservativeRoots();
#endif
void start(JSRuntime *rt, JSContext *cx);
void stop();
void reset();
MarkStack<uintptr_t> stack;
void pushObject(JSObject *obj) {
pushTaggedPtr(ObjectTag, obj);
}
public:
explicit GCMarker(JSContext *cx);
~GCMarker();
void pushType(types::TypeObject *type) {
pushTaggedPtr(TypeTag, type);
}
void pushXML(JSXML *xml) {
pushTaggedPtr(XmlTag, xml);
}
uint32_t getMarkColor() const {
return color;
@ -1668,43 +1773,123 @@ struct GCMarker : public JSTracer {
* objects that are still reachable.
*/
void setMarkColorGray() {
JS_ASSERT(isDrained());
JS_ASSERT(color == gc::BLACK);
color = gc::GRAY;
}
inline void delayMarkingArena(gc::ArenaHeader *aheader);
void delayMarkingChildren(const void *thing);
void markDelayedChildren(gc::ArenaHeader *aheader);
bool markDelayedChildren(SliceBudget &budget);
bool hasDelayedChildren() const {
return !!unmarkedArenaStackTop;
}
void markDelayedChildren();
bool isDrained() {
return isMarkStackEmpty() && !unmarkedArenaStackTop;
}
bool drainMarkStack(SliceBudget &budget);
/*
* Gray marking must be done after all black marking is complete. However,
* we do not have write barriers on XPConnect roots. Therefore, XPConnect
* roots must be accumulated in the first slice of incremental GC. We
* accumulate these roots in the GrayRootMarker and then mark them later,
* after black marking is complete. This accumulation can fail, but in that
* case we switch to non-incremental GC.
*/
bool hasBufferedGrayRoots() const;
void startBufferingGrayRoots();
void endBufferingGrayRoots();
void markBufferedGrayRoots();
static void GrayCallback(JSTracer *trc, void **thing, JSGCTraceKind kind);
MarkStack<uintptr_t> stack;
private:
#ifdef DEBUG
void checkCompartment(void *p);
#else
void checkCompartment(void *p) {}
#endif
void pushTaggedPtr(StackTag tag, void *ptr) {
checkCompartment(ptr);
uintptr_t addr = reinterpret_cast<uintptr_t>(ptr);
JS_ASSERT(!(addr & StackTagMask));
if (!stack.push(addr | uintptr_t(tag)))
delayMarkingChildren(ptr);
}
void pushValueArray(JSObject *obj, void *start, void *end) {
checkCompartment(obj);
if (start == end)
return;
JS_ASSERT(start <= end);
uintptr_t tagged = reinterpret_cast<uintptr_t>(obj) | GCMarker::ValueArrayTag;
uintptr_t startAddr = reinterpret_cast<uintptr_t>(start);
uintptr_t endAddr = reinterpret_cast<uintptr_t>(end);
/*
* Push in the reverse order so obj will be on top. If we cannot push
* the array, we trigger delay marking for the whole object.
*/
if (!stack.push(endAddr, startAddr, tagged))
delayMarkingChildren(obj);
}
bool isMarkStackEmpty() {
return stack.isEmpty();
}
void drainMarkStack();
bool restoreValueArray(JSObject *obj, void **vpp, void **endp);
void saveValueRanges();
inline void processMarkStackTop(SliceBudget &budget);
inline void processMarkStackTop();
void appendGrayRoot(void *thing, JSGCTraceKind kind);
void pushObject(JSObject *obj) {
pushTaggedPtr(ObjectTag, obj);
/* The color is only applied to objects, functions and xml. */
uint32_t color;
DebugOnly<bool> started;
/* Pointer to the top of the stack of arenas we are delaying marking on. */
js::gc::ArenaHeader *unmarkedArenaStackTop;
/* Count of arenas that are currently in the stack. */
DebugOnly<size_t> markLaterArenas;
struct GrayRoot {
void *thing;
JSGCTraceKind kind;
#ifdef DEBUG
JSTraceNamePrinter debugPrinter;
const void *debugPrintArg;
size_t debugPrintIndex;
#endif
GrayRoot(void *thing, JSGCTraceKind kind)
: thing(thing), kind(kind) {}
};
bool grayFailed;
Vector<GrayRoot, 0, SystemAllocPolicy> grayRoots;
};
struct BarrierGCMarker : public GCMarker {
bool init() {
return GCMarker::init(true);
}
};
void pushType(types::TypeObject *type) {
pushTaggedPtr(TypeTag, type);
}
void pushXML(JSXML *xml) {
pushTaggedPtr(XmlTag, xml);
}
void pushTaggedPtr(StackTag tag, void *ptr) {
uintptr_t addr = reinterpret_cast<uintptr_t>(ptr);
JS_ASSERT(!(addr & StackTagMask));
if (!stack.push(addr | uintptr_t(tag)))
delayMarkingChildren(ptr);
struct FullGCMarker : public GCMarker {
bool init() {
return GCMarker::init(false);
}
};
@ -1757,7 +1942,8 @@ js_FinalizeStringRT(JSRuntime *rt, JSString *str);
/*
* Macro to test if a traversal is the marking phase of the GC.
*/
#define IS_GC_MARKING_TRACER(trc) ((trc)->callback == NULL)
#define IS_GC_MARKING_TRACER(trc) \
((trc)->callback == NULL || (trc)->callback == GCMarker::GrayCallback)
namespace js {
namespace gc {
@ -1778,20 +1964,30 @@ inline void MaybeCheckStackRoots(JSContext *cx) { CheckStackRoots(cx); }
inline void MaybeCheckStackRoots(JSContext *cx) {}
#endif
const int ZealPokeThreshold = 1;
const int ZealAllocThreshold = 2;
const int ZealVerifierThreshold = 4;
const int ZealPokeValue = 1;
const int ZealAllocValue = 2;
const int ZealFrameGCValue = 3;
const int ZealVerifierValue = 4;
const int ZealFrameVerifierValue = 5;
#ifdef JS_GC_ZEAL
/* Check that write barriers have been used correctly. See jsgc.cpp. */
void
VerifyBarriers(JSContext *cx, bool always = false);
VerifyBarriers(JSContext *cx);
void
MaybeVerifyBarriers(JSContext *cx, bool always = false);
#else
static inline void
VerifyBarriers(JSContext *cx, bool always = false)
VerifyBarriers(JSContext *cx)
{
}
static inline void
MaybeVerifyBarriers(JSContext *cx, bool always = false)
{
}

Просмотреть файл

@ -210,7 +210,7 @@ GCPoke(JSRuntime *rt, Value oldval)
#ifdef JS_GC_ZEAL
/* Schedule a GC to happen "soon" after a GC poke. */
if (rt->gcZeal() >= js::gc::ZealPokeThreshold)
if (rt->gcZeal() == js::gc::ZealPokeValue)
rt->gcNextScheduled = 1;
#endif
}
@ -262,14 +262,25 @@ class CellIterImpl
CellIterImpl() {
}
void init(JSCompartment *comp, AllocKind kind) {
void initSpan(JSCompartment *comp, AllocKind kind) {
JS_ASSERT(comp->arenas.isSynchronizedFreeList(kind));
firstThingOffset = Arena::firstThingOffset(kind);
thingSize = Arena::thingSize(kind);
aheader = comp->arenas.getFirstArena(kind);
firstSpan.initAsEmpty();
span = &firstSpan;
thing = span->first;
}
void init(ArenaHeader *singleAheader) {
aheader = singleAheader;
initSpan(aheader->compartment, aheader->getAllocKind());
next();
aheader = NULL;
}
void init(JSCompartment *comp, AllocKind kind) {
initSpan(comp, kind);
aheader = comp->arenas.getFirstArena(kind);
next();
}
@ -311,13 +322,18 @@ class CellIterImpl
}
};
class CellIterUnderGC : public CellIterImpl {
class CellIterUnderGC : public CellIterImpl
{
public:
CellIterUnderGC(JSCompartment *comp, AllocKind kind) {
JS_ASSERT(comp->rt->gcRunning);
init(comp, kind);
}
CellIterUnderGC(ArenaHeader *aheader) {
JS_ASSERT(aheader->compartment->rt->gcRunning);
init(aheader);
}
};
/*
@ -325,7 +341,7 @@ class CellIterUnderGC : public CellIterImpl {
* allocations of GC things are possible and that the background finalization
* for the given thing kind is not enabled or is done.
*/
class CellIter: public CellIterImpl
class CellIter : public CellIterImpl
{
ArenaLists *lists;
AllocKind kind;
@ -335,7 +351,8 @@ class CellIter: public CellIterImpl
public:
CellIter(JSContext *cx, JSCompartment *comp, AllocKind kind)
: lists(&comp->arenas),
kind(kind) {
kind(kind)
{
#ifdef JS_THREADSAFE
JS_ASSERT(comp->arenas.doneBackgroundFinalize(kind));
#endif
@ -397,6 +414,9 @@ NewGCThing(JSContext *cx, js::gc::AllocKind kind, size_t thingSize)
void *t = comp->arenas.allocateFromFreeList(kind, thingSize);
if (!t)
t = js::gc::ArenaLists::refillFreeList(cx, kind);
JS_ASSERT_IF(t && comp->needsBarrier(),
static_cast<T *>(t)->arenaHeader()->allocatedDuringIncremental);
return static_cast<T *>(t);
}
@ -419,6 +439,8 @@ TryNewGCThing(JSContext *cx, js::gc::AllocKind kind, size_t thingSize)
#endif
void *t = cx->compartment->arenas.allocateFromFreeList(kind, thingSize);
JS_ASSERT_IF(t && cx->compartment->needsBarrier(),
static_cast<T *>(t)->arenaHeader()->allocatedDuringIncremental);
return static_cast<T *>(t);
}

Просмотреть файл

@ -103,7 +103,7 @@ MarkInternal(JSTracer *trc, T *thing)
* GC.
*/
if (!rt->gcCurrentCompartment || thing->compartment() == rt->gcCurrentCompartment) {
if (IS_GC_MARKING_TRACER(trc)) {
if (!trc->callback) {
PushMarkStack(static_cast<GCMarker *>(trc), thing);
} else {
void *tmp = (void *)thing;
@ -118,6 +118,12 @@ MarkInternal(JSTracer *trc, T *thing)
#endif
}
#define JS_ROOT_MARKING_ASSERT(trc) \
JS_ASSERT_IF(IS_GC_MARKING_TRACER(trc), \
trc->runtime->gcIncrementalState == NO_INCREMENTAL || \
trc->runtime->gcIncrementalState == MARK_ROOTS);
template <typename T>
static void
MarkUnbarriered(JSTracer *trc, T *thing, const char *name)
@ -138,6 +144,7 @@ template <typename T>
static void
MarkRoot(JSTracer *trc, T **thingp, const char *name)
{
JS_ROOT_MARKING_ASSERT(trc);
JS_SET_TRACING_NAME(trc, name);
MarkInternal(trc, *thingp);
}
@ -158,6 +165,7 @@ template <typename T>
static void
MarkRootRange(JSTracer *trc, size_t len, T **vec, const char *name)
{
JS_ROOT_MARKING_ASSERT(trc);
for (size_t i = 0; i < len; ++i) {
JS_SET_TRACING_INDEX(trc, name, i);
MarkInternal(trc, vec[i]);
@ -246,6 +254,7 @@ MarkKind(JSTracer *trc, void *thing, JSGCTraceKind kind)
void
MarkGCThingRoot(JSTracer *trc, void *thing, const char *name)
{
JS_ROOT_MARKING_ASSERT(trc);
JS_SET_TRACING_NAME(trc, name);
if (!thing)
return;
@ -273,6 +282,7 @@ MarkId(JSTracer *trc, const HeapId &id, const char *name)
void
MarkIdRoot(JSTracer *trc, const jsid &id, const char *name)
{
JS_ROOT_MARKING_ASSERT(trc);
JS_SET_TRACING_NAME(trc, name);
MarkIdInternal(trc, id);
}
@ -289,6 +299,7 @@ MarkIdRange(JSTracer *trc, size_t len, HeapId *vec, const char *name)
void
MarkIdRootRange(JSTracer *trc, size_t len, jsid *vec, const char *name)
{
JS_ROOT_MARKING_ASSERT(trc);
for (size_t i = 0; i < len; ++i) {
JS_SET_TRACING_INDEX(trc, name, i);
MarkIdInternal(trc, vec[i]);
@ -316,6 +327,7 @@ MarkValue(JSTracer *trc, HeapValue *v, const char *name)
void
MarkValueRoot(JSTracer *trc, Value *v, const char *name)
{
JS_ROOT_MARKING_ASSERT(trc);
JS_SET_TRACING_NAME(trc, name);
MarkValueInternal(trc, v);
}
@ -332,6 +344,7 @@ MarkValueRange(JSTracer *trc, size_t len, HeapValue *vec, const char *name)
void
MarkValueRootRange(JSTracer *trc, size_t len, Value *vec, const char *name)
{
JS_ROOT_MARKING_ASSERT(trc);
for (size_t i = 0; i < len; ++i) {
JS_SET_TRACING_INDEX(trc, name, i);
MarkValueInternal(trc, &vec[i]);
@ -374,6 +387,10 @@ MarkCrossCompartmentValue(JSTracer *trc, HeapValue *v, const char *name)
if (rt->gcCurrentCompartment && cell->compartment() != rt->gcCurrentCompartment)
return;
/* In case we're called from a write barrier. */
if (rt->gcIncrementalCompartment && cell->compartment() != rt->gcIncrementalCompartment)
return;
MarkValue(trc, v, name);
}
}
@ -543,7 +560,7 @@ ScanLinearString(GCMarker *gcmarker, JSLinearString *str)
static void
ScanRope(GCMarker *gcmarker, JSRope *rope)
{
uintptr_t *savedTos = gcmarker->stack.tos;
ptrdiff_t savedPos = gcmarker->stack.position();
for (;;) {
JS_ASSERT(GetGCThingTraceKind(rope) == JSTRACE_STRING);
JS_ASSERT(rope->JSString::isRope());
@ -575,14 +592,14 @@ ScanRope(GCMarker *gcmarker, JSRope *rope)
}
if (next) {
rope = next;
} else if (savedTos != gcmarker->stack.tos) {
JS_ASSERT(savedTos < gcmarker->stack.tos);
} else if (savedPos != gcmarker->stack.position()) {
JS_ASSERT(savedPos < gcmarker->stack.position());
rope = reinterpret_cast<JSRope *>(gcmarker->stack.pop());
} else {
break;
}
}
JS_ASSERT(savedTos == gcmarker->stack.tos);
JS_ASSERT(savedPos == gcmarker->stack.position());
}
static inline void
@ -608,24 +625,6 @@ PushMarkStack(GCMarker *gcmarker, JSString *str)
ScanString(gcmarker, str);
}
static inline void
PushValueArray(GCMarker *gcmarker, JSObject* obj, HeapValue *start, HeapValue *end)
{
JS_ASSERT(start <= end);
uintptr_t tagged = reinterpret_cast<uintptr_t>(obj) | GCMarker::ValueArrayTag;
uintptr_t startAddr = reinterpret_cast<uintptr_t>(start);
uintptr_t endAddr = reinterpret_cast<uintptr_t>(end);
/* Push in the reverse order so obj will be on top. */
if (!gcmarker->stack.push(endAddr, startAddr, tagged)) {
/*
* If we cannot push the array, we trigger delay marking for the whole
* object.
*/
gcmarker->delayMarkingChildren(obj);
}
}
void
MarkChildren(JSTracer *trc, JSObject *obj)
{
@ -851,12 +850,163 @@ MarkChildren(JSTracer *trc, JSXML *xml)
}
#endif
template<typename T>
void
PushArenaTyped(GCMarker *gcmarker, ArenaHeader *aheader)
{
for (CellIterUnderGC i(aheader); !i.done(); i.next())
PushMarkStack(gcmarker, i.get<T>());
}
void
PushArena(GCMarker *gcmarker, ArenaHeader *aheader)
{
switch (MapAllocToTraceKind(aheader->getAllocKind())) {
case JSTRACE_OBJECT:
PushArenaTyped<JSObject>(gcmarker, aheader);
break;
case JSTRACE_STRING:
PushArenaTyped<JSString>(gcmarker, aheader);
break;
case JSTRACE_SCRIPT:
PushArenaTyped<JSScript>(gcmarker, aheader);
break;
case JSTRACE_SHAPE:
PushArenaTyped<js::Shape>(gcmarker, aheader);
break;
case JSTRACE_BASE_SHAPE:
PushArenaTyped<js::BaseShape>(gcmarker, aheader);
break;
case JSTRACE_TYPE_OBJECT:
PushArenaTyped<js::types::TypeObject>(gcmarker, aheader);
break;
#if JS_HAS_XML_SUPPORT
case JSTRACE_XML:
PushArenaTyped<JSXML>(gcmarker, aheader);
break;
#endif
}
}
} /* namespace gc */
using namespace js::gc;
struct ValueArrayLayout
{
union {
HeapValue *end;
js::Class *clasp;
};
union {
HeapValue *start;
uintptr_t index;
};
JSObject *obj;
static void staticAsserts() {
/* This should have the same layout as three mark stack items. */
JS_STATIC_ASSERT(sizeof(ValueArrayLayout) == 3 * sizeof(uintptr_t));
}
};
/*
* During incremental GC, we return from drainMarkStack without having processed
* the entire stack. At that point, JS code can run and reallocate slot arrays
* that are stored on the stack. To prevent this from happening, we replace all
* ValueArrayTag stack items with SavedValueArrayTag. In the latter, slots
* pointers are replaced with slot indexes.
*
* We also replace the slot array end pointer (which can be derived from the obj
* pointer) with the object's class. During JS executation, array slowification
* can cause the layout of slots to change. We can observe that slowification
* happened if the class changed; in that case, we completely rescan the array.
*/
void
GCMarker::saveValueRanges()
{
for (uintptr_t *p = stack.tos; p > stack.stack; ) {
uintptr_t tag = *--p & StackTagMask;
if (tag == ValueArrayTag) {
p -= 2;
ValueArrayLayout *arr = reinterpret_cast<ValueArrayLayout *>(p);
JSObject *obj = arr->obj;
if (obj->getClass() == &ArrayClass) {
HeapValue *vp = obj->getDenseArrayElements();
JS_ASSERT(arr->start >= vp &&
arr->end == vp + obj->getDenseArrayInitializedLength());
arr->index = arr->start - vp;
} else {
HeapValue *vp = obj->fixedSlots();
unsigned nfixed = obj->numFixedSlots();
if (arr->start >= vp && arr->start < vp + nfixed) {
JS_ASSERT(arr->end == vp + Min(nfixed, obj->slotSpan()));
arr->index = arr->start - vp;
} else {
JS_ASSERT(arr->start >= obj->slots &&
arr->end == obj->slots + obj->slotSpan() - nfixed);
arr->index = (arr->start - obj->slots) + nfixed;
}
}
arr->clasp = obj->getClass();
p[2] |= SavedValueArrayTag;
} else if (tag == SavedValueArrayTag) {
p -= 2;
}
}
}
bool
GCMarker::restoreValueArray(JSObject *obj, void **vpp, void **endp)
{
uintptr_t start = stack.pop();
js::Class *clasp = reinterpret_cast<js::Class *>(stack.pop());
JS_ASSERT(obj->getClass() == clasp ||
(clasp == &ArrayClass && obj->getClass() == &SlowArrayClass));
if (clasp == &ArrayClass) {
if (obj->getClass() != &ArrayClass)
return false;
uint32_t initlen = obj->getDenseArrayInitializedLength();
HeapValue *vp = obj->getDenseArrayElements();
if (start < initlen) {
*vpp = vp + start;
*endp = vp + initlen;
} else {
/* The object shrunk, in which case no scanning is needed. */
*vpp = *endp = vp;
}
} else {
HeapValue *vp = obj->fixedSlots();
unsigned nfixed = obj->numFixedSlots();
unsigned nslots = obj->slotSpan();
if (start < nfixed) {
*vpp = vp + start;
*endp = vp + Min(nfixed, nslots);
} else if (start < nslots) {
*vpp = obj->slots + start - nfixed;
*endp = obj->slots + nslots - nfixed;
} else {
/* The object shrunk, in which case no scanning is needed. */
*vpp = *endp = obj->slots;
}
}
JS_ASSERT(*vpp <= *endp);
return true;
}
inline void
GCMarker::processMarkStackTop()
GCMarker::processMarkStackTop(SliceBudget &budget)
{
/*
* The function uses explicit goto and implements the scanning of the
@ -885,29 +1035,46 @@ GCMarker::processMarkStackTop()
if (tag == ObjectTag) {
obj = reinterpret_cast<JSObject *>(addr);
JS_COMPARTMENT_ASSERT(runtime, obj);
goto scan_obj;
}
if (tag == TypeTag) {
ScanTypeObject(this, reinterpret_cast<types::TypeObject *>(addr));
} else if (tag == SavedValueArrayTag) {
JS_ASSERT(!(addr & Cell::CellMask));
obj = reinterpret_cast<JSObject *>(addr);
if (restoreValueArray(obj, (void **)&vp, (void **)&end))
goto scan_value_array;
else
goto scan_obj;
} else {
JS_ASSERT(tag == XmlTag);
MarkChildren(this, reinterpret_cast<JSXML *>(addr));
}
budget.step();
return;
scan_value_array:
JS_ASSERT(vp <= end);
while (vp != end) {
budget.step();
if (budget.isOverBudget()) {
pushValueArray(obj, vp, end);
return;
}
const Value &v = *vp++;
if (v.isString()) {
JSString *str = v.toString();
JS_COMPARTMENT_ASSERT_STR(runtime, str);
if (str->markIfUnmarked())
ScanString(this, str);
} else if (v.isObject()) {
JSObject *obj2 = &v.toObject();
JS_COMPARTMENT_ASSERT(runtime, obj2);
if (obj2->markIfUnmarked(getMarkColor())) {
PushValueArray(this, obj, vp, end);
pushValueArray(obj, vp, end);
obj = obj2;
goto scan_obj;
}
@ -917,6 +1084,14 @@ GCMarker::processMarkStackTop()
scan_obj:
{
JS_COMPARTMENT_ASSERT(runtime, obj);
budget.step();
if (budget.isOverBudget()) {
pushObject(obj);
return;
}
types::TypeObject *type = obj->typeFromGC();
PushMarkStack(this, type);
@ -931,6 +1106,9 @@ GCMarker::processMarkStackTop()
vp = obj->getDenseArrayElements();
end = vp + obj->getDenseArrayInitializedLength();
goto scan_value_array;
} else {
JS_ASSERT_IF(runtime->gcIncrementalState != NO_INCREMENTAL,
clasp->flags & JSCLASS_IMPLEMENTS_BARRIERS);
}
clasp->trace(this, obj);
}
@ -943,7 +1121,7 @@ GCMarker::processMarkStackTop()
if (obj->slots) {
unsigned nfixed = obj->numFixedSlots();
if (nslots > nfixed) {
PushValueArray(this, obj, vp, vp + nfixed);
pushValueArray(obj, vp, vp + nfixed);
vp = obj->slots;
end = vp + (nslots - nfixed);
goto scan_value_array;
@ -955,15 +1133,33 @@ GCMarker::processMarkStackTop()
}
}
void
GCMarker::drainMarkStack()
bool
GCMarker::drainMarkStack(SliceBudget &budget)
{
#ifdef DEBUG
JSRuntime *rt = runtime;
rt->gcCheckCompartment = rt->gcCurrentCompartment;
struct AutoCheckCompartment {
JSRuntime *runtime;
AutoCheckCompartment(JSRuntime *rt) : runtime(rt) {
runtime->gcCheckCompartment = runtime->gcCurrentCompartment;
}
~AutoCheckCompartment() { runtime->gcCheckCompartment = NULL; }
} acc(rt);
#endif
if (budget.isOverBudget())
return false;
for (;;) {
while (!stack.isEmpty())
processMarkStackTop();
while (!stack.isEmpty()) {
processMarkStackTop(budget);
if (budget.isOverBudget()) {
saveValueRanges();
return false;
}
}
if (!hasDelayedChildren())
break;
@ -972,10 +1168,13 @@ GCMarker::drainMarkStack()
* above tracing. Don't do this until we're done with everything
* else.
*/
markDelayedChildren();
if (!markDelayedChildren(budget)) {
saveValueRanges();
return false;
}
}
rt->gcCheckCompartment = NULL;
return true;
}
void

Просмотреть файл

@ -146,7 +146,11 @@ MarkChildren(JSTracer *trc, JSObject *obj);
void
MarkCycleCollectorChildren(JSTracer *trc, const Shape *shape);
void
PushArena(GCMarker *gcmarker, ArenaHeader *aheader);
/*** Generic ***/
/*
* The Mark() functions interface should only be used by code that must be
* templated. Other uses should use the more specific, type-named functions.

Просмотреть файл

@ -2195,7 +2195,7 @@ TypeCompartment::nukeTypes(JSContext *cx)
#ifdef JS_THREADSAFE
AutoLockGC maybeLock;
if (!cx->runtime->gcMarkAndSweep)
if (!cx->runtime->gcRunning)
maybeLock.lock(cx->runtime);
#endif

Просмотреть файл

@ -1147,7 +1147,7 @@ js::AssertValidPropertyCacheHit(JSContext *cx,
jsbytecode *pc;
cx->stack.currentScript(&pc);
uint32_t sample = cx->runtime->gcNumber;
uint64_t sample = cx->runtime->gcNumber;
PropertyCacheEntry savedEntry = *entry;
PropertyName *name = GetNameFromBytecode(cx, pc, JSOp(*pc), js_CodeSpec[*pc]);
@ -1254,7 +1254,7 @@ js::Interpret(JSContext *cx, StackFrame *entryFrame, InterpMode interpMode)
{
JSAutoResolveFlags rf(cx, RESOLVE_INFER);
gc::VerifyBarriers(cx, true);
gc::MaybeVerifyBarriers(cx, true);
JS_ASSERT(!cx->compartment->activeAnalysis);
@ -1289,7 +1289,7 @@ js::Interpret(JSContext *cx, StackFrame *entryFrame, InterpMode interpMode)
# define DO_OP() JS_BEGIN_MACRO \
CHECK_PCCOUNT_INTERRUPTS(); \
js::gc::VerifyBarriers(cx); \
js::gc::MaybeVerifyBarriers(cx); \
JS_EXTENSION_(goto *jumpTable[op]); \
JS_END_MACRO
# define DO_NEXT_OP(n) JS_BEGIN_MACRO \
@ -1566,7 +1566,7 @@ js::Interpret(JSContext *cx, StackFrame *entryFrame, InterpMode interpMode)
do_op:
CHECK_PCCOUNT_INTERRUPTS();
js::gc::VerifyBarriers(cx);
js::gc::MaybeVerifyBarriers(cx);
switchOp = intN(op) | switchMask;
do_switch:
switch (switchOp) {
@ -4424,6 +4424,6 @@ END_CASE(JSOP_ARRAYPUSH)
leave_on_safe_point:
#endif
gc::VerifyBarriers(cx, true);
gc::MaybeVerifyBarriers(cx, true);
return interpReturnOK;
}

Просмотреть файл

@ -89,7 +89,7 @@ static JSObject *iterator_iterator(JSContext *cx, JSObject *obj, JSBool keysonly
Class js::IteratorClass = {
"Iterator",
JSCLASS_HAS_PRIVATE |
JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS |
JSCLASS_HAS_CACHED_PROTO(JSProto_Iterator),
JS_PropertyStub, /* addProperty */
JS_PropertyStub, /* delProperty */
@ -1419,7 +1419,7 @@ generator_trace(JSTracer *trc, JSObject *obj)
Class js::GeneratorClass = {
"Generator",
JSCLASS_HAS_PRIVATE,
JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS,
JS_PropertyStub, /* addProperty */
JS_PropertyStub, /* delProperty */
JS_PropertyStub, /* getProperty */

Просмотреть файл

@ -2763,6 +2763,13 @@ NewObject(JSContext *cx, Class *clasp, types::TypeObject *type, JSObject *parent
if (!obj)
return NULL;
/*
* This will cancel an already-running incremental GC from doing any more
* slices, and it will prevent any future incremental GCs.
*/
if (clasp->trace && !(clasp->flags & JSCLASS_IMPLEMENTS_BARRIERS))
cx->runtime->gcIncrementalEnabled = false;
Probes::createObject(cx, obj);
return obj;
}
@ -3475,7 +3482,7 @@ JSObject::TradeGuts(JSContext *cx, JSObject *a, JSObject *b, TradeGutsReserved &
a->slots = reserved.newaslots;
a->initSlotRange(0, reserved.bvals.begin(), bcap);
if (a->hasPrivate())
a->setPrivate(bpriv);
a->initPrivate(bpriv);
if (b->isNative())
b->shape_->setNumFixedSlots(reserved.newbfixed);
@ -3485,7 +3492,7 @@ JSObject::TradeGuts(JSContext *cx, JSObject *a, JSObject *b, TradeGutsReserved &
b->slots = reserved.newbslots;
b->initSlotRange(0, reserved.avals.begin(), acap);
if (b->hasPrivate())
b->setPrivate(apriv);
b->initPrivate(apriv);
/* Make sure the destructor for reserved doesn't free the slots. */
reserved.newaslots = NULL;

Просмотреть файл

@ -954,6 +954,7 @@ struct JSObject : js::gc::Cell
inline bool hasPrivate() const;
inline void *getPrivate() const;
inline void setPrivate(void *data);
inline void initPrivate(void *data);
/* Access private data for an object with a known number of fixed slots. */
inline void *getPrivate(size_t nfixed) const;
@ -1355,6 +1356,7 @@ struct JSObject : js::gc::Cell
static inline void writeBarrierPre(JSObject *obj);
static inline void writeBarrierPost(JSObject *obj, void *addr);
static inline void readBarrier(JSObject *obj);
inline void privateWriteBarrierPre(void **oldval);
inline void privateWriteBarrierPost(void **oldval);

Просмотреть файл

@ -119,6 +119,12 @@ JSObject::setPrivate(void *data)
privateWriteBarrierPost(pprivate);
}
inline void
JSObject::initPrivate(void *data)
{
privateRef(numFixedSlots()) = data;
}
inline bool
JSObject::enumerate(JSContext *cx, JSIterateOp iterop, js::Value *statep, jsid *idp)
{
@ -602,20 +608,32 @@ JSObject::moveDenseArrayElements(uintN dstStart, uintN srcStart, uintN count)
JS_ASSERT(srcStart + count <= getDenseArrayInitializedLength());
/*
* Use a custom write barrier here since it's performance sensitive. We
* only want to barrier the elements that are being overwritten.
*/
uintN markStart, markEnd;
if (dstStart > srcStart) {
markStart = js::Max(srcStart + count, dstStart);
markEnd = dstStart + count;
* Using memmove here would skip write barriers. Also, we need to consider
* an array containing [A, B, C], in the following situation:
*
* 1. Incremental GC marks slot 0 of array (i.e., A), then returns to JS code.
* 2. JS code moves slots 1..2 into slots 0..1, so it contains [B, C, C].
* 3. Incremental GC finishes by marking slots 1 and 2 (i.e., C).
*
* Since normal marking never happens on B, it is very important that the
* write barrier is invoked here on B, despite the fact that it exists in
* the array before and after the move.
*/
if (compartment()->needsBarrier()) {
if (dstStart < srcStart) {
js::HeapValue *dst = elements + dstStart;
js::HeapValue *src = elements + srcStart;
for (unsigned i = 0; i < count; i++, dst++, src++)
*dst = *src;
} else {
js::HeapValue *dst = elements + dstStart + count - 1;
js::HeapValue *src = elements + srcStart + count - 1;
for (unsigned i = 0; i < count; i++, dst--, src--)
*dst = *src;
}
} else {
markStart = dstStart;
markEnd = js::Min(dstStart + count, srcStart);
memmove(elements + dstStart, elements + srcStart, count * sizeof(js::Value));
}
prepareElementRangeForOverwrite(markStart, markEnd);
memmove(elements + dstStart, elements + srcStart, count * sizeof(js::Value));
}
inline void
@ -2126,6 +2144,18 @@ JSObject::writeBarrierPre(JSObject *obj)
#endif
}
inline void
JSObject::readBarrier(JSObject *obj)
{
#ifdef JSGC_INCREMENTAL
JSCompartment *comp = obj->compartment();
if (comp->needsBarrier()) {
JS_ASSERT(!comp->rt->gcRunning);
MarkObjectUnbarriered(comp->barrierTracer(), obj, "read barrier");
}
#endif
}
inline void
JSObject::writeBarrierPost(JSObject *obj, void *addr)
{

Просмотреть файл

@ -282,7 +282,7 @@ PropertyCache::purge(JSContext *cx)
#ifdef JS_THREADSAFE
fprintf(fp, "thread %lu, ", (unsigned long) cx->thread->id);
#endif
fprintf(fp, "GC %u\n", cx->runtime->gcNumber);
fprintf(fp, "GC %lu\n", (unsigned long)cx->runtime->gcNumber);
# define P(mem) fprintf(fp, "%11s %10lu\n", #mem, (unsigned long)mem)
P(fills);

Просмотреть файл

@ -1311,7 +1311,7 @@ proxy_TypeOf(JSContext *cx, JSObject *proxy)
JS_FRIEND_DATA(Class) js::ObjectProxyClass = {
"Proxy",
Class::NON_NATIVE | JSCLASS_HAS_RESERVED_SLOTS(4),
Class::NON_NATIVE | JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_HAS_RESERVED_SLOTS(4),
JS_PropertyStub, /* addProperty */
JS_PropertyStub, /* delProperty */
JS_PropertyStub, /* getProperty */
@ -1367,7 +1367,7 @@ JS_FRIEND_DATA(Class) js::ObjectProxyClass = {
JS_FRIEND_DATA(Class) js::OuterWindowProxyClass = {
"Proxy",
Class::NON_NATIVE | JSCLASS_HAS_RESERVED_SLOTS(4),
Class::NON_NATIVE | JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_HAS_RESERVED_SLOTS(4),
JS_PropertyStub, /* addProperty */
JS_PropertyStub, /* delProperty */
JS_PropertyStub, /* getProperty */
@ -1445,7 +1445,7 @@ proxy_Construct(JSContext *cx, uintN argc, Value *vp)
JS_FRIEND_DATA(Class) js::FunctionProxyClass = {
"Proxy",
Class::NON_NATIVE | JSCLASS_HAS_RESERVED_SLOTS(6),
Class::NON_NATIVE | JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_HAS_RESERVED_SLOTS(6),
JS_PropertyStub, /* addProperty */
JS_PropertyStub, /* delProperty */
JS_PropertyStub, /* getProperty */

Просмотреть файл

@ -2180,6 +2180,7 @@ Class ArrayBuffer::slowClass = {
Class js::ArrayBufferClass = {
"ArrayBuffer",
JSCLASS_HAS_PRIVATE |
JSCLASS_IMPLEMENTS_BARRIERS |
Class::NON_NATIVE |
JSCLASS_HAS_RESERVED_SLOTS(ARRAYBUFFER_RESERVED_SLOTS) |
JSCLASS_HAS_CACHED_PROTO(JSProto_ArrayBuffer),
@ -2298,7 +2299,7 @@ JSFunctionSpec _typedArray::jsfuncs[] = { \
{ \
#_typedArray, \
JSCLASS_HAS_RESERVED_SLOTS(TypedArray::FIELD_MAX) | \
JSCLASS_HAS_PRIVATE | \
JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS | \
JSCLASS_FOR_OF_ITERATION | \
Class::NON_NATIVE, \
JS_PropertyStub, /* addProperty */ \

Просмотреть файл

@ -62,7 +62,7 @@ bool
WeakMapBase::markAllIteratively(JSTracer *tracer)
{
bool markedAny = false;
JSRuntime *rt = tracer->context->runtime;
JSRuntime *rt = tracer->runtime;
for (WeakMapBase *m = rt->gcWeakMapList; m; m = m->next) {
if (m->markIteratively(tracer))
markedAny = true;
@ -73,7 +73,7 @@ WeakMapBase::markAllIteratively(JSTracer *tracer)
void
WeakMapBase::sweepAll(JSTracer *tracer)
{
JSRuntime *rt = tracer->context->runtime;
JSRuntime *rt = tracer->runtime;
for (WeakMapBase *m = rt->gcWeakMapList; m; m = m->next)
m->sweep(tracer);
}
@ -314,8 +314,16 @@ WeakMap_mark(JSTracer *trc, JSObject *obj)
static void
WeakMap_finalize(JSContext *cx, JSObject *obj)
{
ObjectValueMap *map = GetObjectMap(obj);
cx->delete_(map);
if (ObjectValueMap *map = GetObjectMap(obj)) {
map->check();
#ifdef DEBUG
map->~ObjectValueMap();
memset(map, 0xdc, sizeof(ObjectValueMap));
cx->free_(map);
#else
cx->delete_(map);
#endif
}
}
static JSBool
@ -331,7 +339,7 @@ WeakMap_construct(JSContext *cx, uintN argc, Value *vp)
Class js::WeakMapClass = {
"WeakMap",
JSCLASS_HAS_PRIVATE |
JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS |
JSCLASS_HAS_CACHED_PROTO(JSProto_WeakMap),
JS_PropertyStub, /* addProperty */
JS_PropertyStub, /* delProperty */

Просмотреть файл

@ -127,7 +127,7 @@ class WeakMapBase {
// Add ourselves to the list if we are not already in the list. We can already
// be in the list if the weak map is marked more than once due delayed marking.
if (next == WeakMapNotInList) {
JSRuntime *rt = tracer->context->runtime;
JSRuntime *rt = tracer->runtime;
next = rt->gcWeakMapList;
rt->gcWeakMapList = this;
}
@ -156,6 +156,8 @@ class WeakMapBase {
// Trace all delayed weak map bindings. Used by the cycle collector.
static void traceAllMappings(WeakMapTracer *tracer);
void check() { JS_ASSERT(next == WeakMapNotInList); }
// Remove everything from the live weak map list.
static void resetWeakMapList(JSRuntime *rt);

Просмотреть файл

@ -5369,7 +5369,7 @@ out:
JS_FRIEND_DATA(Class) js::XMLClass = {
js_XML_str,
JSCLASS_HAS_PRIVATE |
JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS |
JSCLASS_HAS_CACHED_PROTO(JSProto_XML),
JS_PropertyStub, /* addProperty */
JS_PropertyStub, /* delProperty */
@ -7922,7 +7922,7 @@ xmlfilter_finalize(JSContext *cx, JSObject *obj)
Class js_XMLFilterClass = {
"XMLFilter",
JSCLASS_HAS_PRIVATE | JSCLASS_IS_ANONYMOUS,
JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_IS_ANONYMOUS,
JS_PropertyStub, /* addProperty */
JS_PropertyStub, /* delProperty */
JS_PropertyStub, /* getProperty */

Просмотреть файл

@ -3924,7 +3924,7 @@ void
mjit::Compiler::interruptCheckHelper()
{
Jump jump;
if (cx->runtime->gcZeal() >= js::gc::ZealVerifierThreshold) {
if (cx->runtime->gcZeal() == js::gc::ZealVerifierValue) {
/* For barrier verification, always take the interrupt so we can verify. */
jump = masm.jump();
} else {
@ -6892,7 +6892,9 @@ mjit::Compiler::jsop_regexp()
!cx->typeInferenceEnabled() ||
analysis->localsAliasStack() ||
types::TypeSet::HasObjectFlags(cx, globalObj->getType(cx),
types::OBJECT_FLAG_REGEXP_FLAGS_SET)) {
types::OBJECT_FLAG_REGEXP_FLAGS_SET) ||
cx->runtime->gcIncrementalState == gc::MARK)
{
prepareStubCall(Uses(0));
masm.move(ImmPtr(obj), Registers::ArgReg1);
INLINE_STUBCALL(stubs::RegExp, REJOIN_FALLTHROUGH);
@ -6946,10 +6948,11 @@ mjit::Compiler::jsop_regexp()
}
/*
* Force creation of the RegExpShared in the script's RegExpObject
* so that we grab it in the getNewObject template copy. Note that
* JIT code is discarded on every GC, which permits us to burn in
* the pointer to the RegExpShared.
* Force creation of the RegExpShared in the script's RegExpObject so that
* we grab it in the getNewObject template copy. Note that JIT code is
* discarded on every GC, which permits us to burn in the pointer to the
* RegExpShared. We don't do this during an incremental
* GC, since we don't discard JIT code after every marking slice.
*/
if (!reobj->getShared(cx))
return false;

Просмотреть файл

@ -484,7 +484,7 @@ private:
bool hasGlobalReallocation;
bool oomInVector; // True if we have OOM'd appending to a vector.
bool overflowICSpace; // True if we added a constant pool in a reserved space.
uint32_t gcNumber;
uint64_t gcNumber;
enum { NoApplyTricks, LazyArgsObj } applyTricks;
PCLengthEntry *pcLengths;

Просмотреть файл

@ -402,7 +402,7 @@ struct RecompilationMonitor
unsigned frameExpansions;
/* If a GC occurs it may discard jit code on the stack. */
unsigned gcNumber;
uint64_t gcNumber;
RecompilationMonitor(JSContext *cx)
: cx(cx),

Просмотреть файл

@ -102,7 +102,7 @@ class PICStubCompiler : public BaseCompiler
JSScript *script;
ic::PICInfo &pic;
void *stub;
uint32_t gcNumber;
uint64_t gcNumber;
public:
bool canCallHook;

Просмотреть файл

@ -878,7 +878,7 @@ stubs::DebuggerStatement(VMFrame &f, jsbytecode *pc)
void JS_FASTCALL
stubs::Interrupt(VMFrame &f, jsbytecode *pc)
{
gc::VerifyBarriers(f.cx);
gc::MaybeVerifyBarriers(f.cx);
if (!js_HandleExecutionInterrupt(f.cx))
THROW();

Просмотреть файл

@ -1286,6 +1286,7 @@ static const struct ParamPair {
{"maxMallocBytes", JSGC_MAX_MALLOC_BYTES},
{"gcBytes", JSGC_BYTES},
{"gcNumber", JSGC_NUMBER},
{"sliceTimeBudget", JSGC_SLICE_TIME_BUDGET}
};
static JSBool
@ -1427,6 +1428,35 @@ ScheduleGC(JSContext *cx, uintN argc, jsval *vp)
*vp = JSVAL_VOID;
return JS_TRUE;
}
static JSBool
VerifyBarriers(JSContext *cx, uintN argc, jsval *vp)
{
gc::VerifyBarriers(cx);
*vp = JSVAL_VOID;
return JS_TRUE;
}
static JSBool
GCSlice(JSContext *cx, uintN argc, jsval *vp)
{
uint32_t budget;
if (argc != 1) {
JS_ReportErrorNumber(cx, my_GetErrorMessage, NULL,
(argc < 1)
? JSSMSG_NOT_ENOUGH_ARGS
: JSSMSG_TOO_MANY_ARGS,
"gcslice");
return JS_FALSE;
}
if (!JS_ValueToECMAUint32(cx, vp[2], &budget))
return JS_FALSE;
GCDebugSlice(cx, budget);
*vp = JSVAL_VOID;
return JS_TRUE;
}
#endif /* JS_GC_ZEAL */
typedef struct JSCountHeapNode JSCountHeapNode;
@ -1473,7 +1503,7 @@ CountHeapNotify(JSTracer *trc, void **thingp, JSGCTraceKind kind)
if (node) {
countTracer->recycleList = node->next;
} else {
node = (JSCountHeapNode *) JS_malloc(trc->context, sizeof *node);
node = (JSCountHeapNode *) js_malloc(sizeof *node);
if (!node) {
countTracer->ok = JS_FALSE;
return;
@ -1575,7 +1605,7 @@ CountHeap(JSContext *cx, uintN argc, jsval *vp)
}
while ((node = countTracer.recycleList) != NULL) {
countTracer.recycleList = node->next;
JS_free(cx, node);
js_free(node);
}
JS_DHashTableFinish(&countTracer.visited);
@ -4001,6 +4031,8 @@ static JSFunctionSpec shell_functions[] = {
#ifdef JS_GC_ZEAL
JS_FN("gczeal", GCZeal, 2,0),
JS_FN("schedulegc", ScheduleGC, 1,0),
JS_FN("verifybarriers", VerifyBarriers, 0,0),
JS_FN("gcslice", GCSlice, 1,0),
#endif
JS_FN("internalConst", InternalConst, 1,0),
JS_FN("setDebug", SetDebug, 1,0),
@ -4114,6 +4146,8 @@ static const char *const shell_help_messages[] = {
" How zealous the garbage collector should be",
"schedulegc(num, [compartmentGC?])\n"
" Schedule a GC to happen after num allocations",
"verifybarriers() Start or end a run of the write barrier verifier",
"gcslice(n) Run an incremental GC slice that marks ~n objects",
#endif
"internalConst(name)\n"
" Query an internal constant for the engine. See InternalConst source for the\n"
@ -5457,7 +5491,7 @@ main(int argc, char **argv, char **envp)
if (!cx)
return 1;
JS_SetGCParameter(rt, JSGC_MODE, JSGC_MODE_COMPARTMENT);
JS_SetGCParameter(rt, JSGC_MODE, JSGC_MODE_INCREMENTAL);
JS_SetGCParameterForThread(cx, JSGC_MAX_CODE_CACHE_BYTES, 16 * 1024 * 1024);
/* Must be done before creating the global object */

Просмотреть файл

@ -1323,7 +1323,9 @@ Debugger::finalize(JSContext *cx, JSObject *obj)
}
Class Debugger::jsclass = {
"Debugger", JSCLASS_HAS_PRIVATE | JSCLASS_HAS_RESERVED_SLOTS(JSSLOT_DEBUG_COUNT),
"Debugger",
JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS |
JSCLASS_HAS_RESERVED_SLOTS(JSSLOT_DEBUG_COUNT),
JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub,
JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, Debugger::finalize,
NULL, /* reserved0 */
@ -1854,7 +1856,9 @@ DebuggerScript_trace(JSTracer *trc, JSObject *obj)
}
Class DebuggerScript_class = {
"Script", JSCLASS_HAS_PRIVATE | JSCLASS_HAS_RESERVED_SLOTS(JSSLOT_DEBUGSCRIPT_COUNT),
"Script",
JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS |
JSCLASS_HAS_RESERVED_SLOTS(JSSLOT_DEBUGSCRIPT_COUNT),
JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub,
JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, NULL,
NULL, /* reserved0 */
@ -2956,7 +2960,9 @@ DebuggerObject_trace(JSTracer *trc, JSObject *obj)
}
Class DebuggerObject_class = {
"Object", JSCLASS_HAS_PRIVATE | JSCLASS_HAS_RESERVED_SLOTS(JSSLOT_DEBUGOBJECT_COUNT),
"Object",
JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS |
JSCLASS_HAS_RESERVED_SLOTS(JSSLOT_DEBUGOBJECT_COUNT),
JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub,
JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, NULL,
NULL, /* reserved0 */
@ -3598,7 +3604,9 @@ DebuggerEnv_trace(JSTracer *trc, JSObject *obj)
}
Class DebuggerEnv_class = {
"Environment", JSCLASS_HAS_PRIVATE | JSCLASS_HAS_RESERVED_SLOTS(JSSLOT_DEBUGENV_COUNT),
"Environment",
JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS |
JSCLASS_HAS_RESERVED_SLOTS(JSSLOT_DEBUGENV_COUNT),
JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub,
JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, NULL,
NULL, /* reserved0 */

Просмотреть файл

@ -80,6 +80,14 @@ RegExpObject::getShared(JSContext *cx)
return createShared(cx);
}
inline void
RegExpObject::setShared(JSContext *cx, RegExpShared *shared)
{
if (shared)
shared->prepareForUse(cx);
JSObject::setPrivate(shared);
}
inline void
RegExpObject::setLastIndex(const Value &v)
{
@ -148,6 +156,12 @@ RegExpToShared(JSContext *cx, JSObject &obj)
return Proxy::regexp_toShared(cx, &obj);
}
inline void
RegExpShared::prepareForUse(JSContext *cx)
{
gcNumberWhenUsed = cx->runtime->gcNumber;
}
} /* namespace js */
#endif

Просмотреть файл

@ -62,7 +62,7 @@ RegExpObjectBuilder::RegExpObjectBuilder(JSContext *cx, RegExpObject *reobj)
: cx(cx), reobj_(reobj)
{
if (reobj_)
reobj_->setPrivate(NULL);
reobj_->setShared(cx, NULL);
}
bool
@ -74,7 +74,7 @@ RegExpObjectBuilder::getOrCreate()
JSObject *obj = NewBuiltinClassInstance(cx, &RegExpClass);
if (!obj)
return false;
obj->setPrivate(NULL);
obj->initPrivate(NULL);
reobj_ = &obj->asRegExp();
return true;
@ -88,7 +88,7 @@ RegExpObjectBuilder::getOrCreateClone(RegExpObject *proto)
JSObject *clone = NewObjectWithGivenProto(cx, &RegExpClass, proto, proto->getParent());
if (!clone)
return false;
clone->setPrivate(NULL);
clone->initPrivate(NULL);
reobj_ = &clone->asRegExp();
return true;
@ -103,7 +103,7 @@ RegExpObjectBuilder::build(JSAtom *source, RegExpShared &shared)
if (!reobj_->init(cx, source, shared.getFlags()))
return NULL;
reobj_->setPrivate(&shared);
reobj_->setShared(cx, &shared);
return reobj_;
}
@ -330,13 +330,18 @@ RegExpCode::execute(JSContext *cx, const jschar *chars, size_t length, size_t st
static void
regexp_trace(JSTracer *trc, JSObject *obj)
{
if (trc->runtime->gcRunning)
/*
* We have to check both conditions, since:
* 1. During TraceRuntime, gcRunning is set
* 2. When a write barrier executes, IS_GC_MARKING_TRACER is true.
*/
if (trc->runtime->gcRunning && IS_GC_MARKING_TRACER(trc))
obj->setPrivate(NULL);
}
Class js::RegExpClass = {
js_RegExp_str,
JSCLASS_HAS_PRIVATE |
JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS |
JSCLASS_HAS_RESERVED_SLOTS(RegExpObject::RESERVED_SLOTS) |
JSCLASS_HAS_CACHED_PROTO(JSProto_RegExp),
JS_PropertyStub, /* addProperty */
@ -360,8 +365,8 @@ Class js::RegExpClass = {
regexp_trace
};
RegExpShared::RegExpShared(RegExpFlag flags)
: parenCount(0), flags(flags), activeUseCount(0)
RegExpShared::RegExpShared(JSRuntime *rt, RegExpFlag flags)
: parenCount(0), flags(flags), activeUseCount(0), gcNumberWhenUsed(rt->gcNumber)
{}
RegExpObject *
@ -402,7 +407,7 @@ RegExpObject::createShared(JSContext *cx)
if (!shared)
return NULL;
setPrivate(shared);
setShared(cx, shared);
return shared;
}
@ -616,11 +621,12 @@ RegExpCompartment::init(JSContext *cx)
}
void
RegExpCompartment::purge()
RegExpCompartment::sweep(JSRuntime *rt)
{
for (Map::Enum e(map_); !e.empty(); e.popFront()) {
/* See the comment on RegExpShared lifetime in RegExpObject.h. */
RegExpShared *shared = e.front().value;
if (shared->activeUseCount == 0) {
if (shared->activeUseCount == 0 && shared->gcNumberWhenUsed < rt->gcStartNumber) {
Foreground::delete_(shared);
e.removeFront();
}
@ -630,14 +636,14 @@ RegExpCompartment::purge()
inline RegExpShared *
RegExpCompartment::get(JSContext *cx, JSAtom *keyAtom, JSAtom *source, RegExpFlag flags, Type type)
{
DebugOnly<size_t> gcNumberBefore = cx->runtime->gcNumber;
DebugOnly<uint64_t> gcNumberBefore = cx->runtime->gcNumber;
Key key(keyAtom, flags, type);
Map::AddPtr p = map_.lookupForAdd(key);
if (p)
return p->value;
RegExpShared *shared = cx->runtime->new_<RegExpShared>(flags);
RegExpShared *shared = cx->runtime->new_<RegExpShared>(cx->runtime, flags);
if (!shared || !shared->compile(cx, source))
goto error;

Просмотреть файл

@ -169,6 +169,7 @@ class RegExpObject : public JSObject
inline RegExpShared &shared() const;
inline RegExpShared *maybeShared();
inline RegExpShared *getShared(JSContext *cx);
inline void setShared(JSContext *cx, RegExpShared *shared);
private:
friend class RegExpObjectBuilder;
@ -190,6 +191,9 @@ class RegExpObject : public JSObject
RegExpObject() MOZ_DELETE;
RegExpObject &operator=(const RegExpObject &reo) MOZ_DELETE;
/* Call setShared in preference to setPrivate. */
void setPrivate(void *priv) MOZ_DELETE;
};
class RegExpObjectBuilder
@ -293,7 +297,26 @@ class RegExpCode
} /* namespace detail */
/* The compiled representation of a regexp. */
/*
* A RegExpShared is the compiled representation of a regexp. A RegExpShared is
* pointed to by potentially multiple RegExpObjects. Additionally, C++ code may
* have pointers to RegExpShareds on the stack. The RegExpShareds are tracked in
* a RegExpCompartment hashtable, and most are destroyed on every GC.
*
* During a GC, the trace hook for RegExpObject clears any pointers to
* RegExpShareds so that there will be no dangling pointers when they are
* deleted. However, some RegExpShareds are not deleted:
*
* 1. Any RegExpShared with pointers from the C++ stack is not deleted.
* 2. Any RegExpShared that was installed in a RegExpObject during an
* incremental GC is not deleted. This is because the RegExpObject may have
* been traced through before the new RegExpShared was installed, in which
* case deleting the RegExpShared would turn the RegExpObject's reference
* into a dangling pointer
*
* The activeUseCount and gcNumberWhenUsed fields are used to track these two
* conditions.
*/
class RegExpShared
{
friend class RegExpCompartment;
@ -301,11 +324,12 @@ class RegExpShared
detail::RegExpCode code;
uintN parenCount;
RegExpFlag flags;
size_t activeUseCount;
size_t activeUseCount; /* See comment above. */
uint64_t gcNumberWhenUsed; /* See comment above. */
bool compile(JSContext *cx, JSAtom *source);
RegExpShared(RegExpFlag flags);
RegExpShared(JSRuntime *rt, RegExpFlag flags);
JS_DECLARE_ALLOCATION_FRIENDS_FOR_PRIVATE_CONSTRUCTOR;
public:
@ -338,6 +362,9 @@ class RegExpShared
RegExpShared &operator*() { JS_ASSERT(initialized()); return *re_; }
};
/* Called when a RegExpShared is installed into a RegExpObject. */
inline void prepareForUse(JSContext *cx);
/* Primary interface: run this regular expression on the given string. */
RegExpRunStatus
@ -388,7 +415,7 @@ class RegExpCompartment
~RegExpCompartment();
bool init(JSContext *cx);
void purge();
void sweep(JSRuntime *rt);
/* Return a regexp corresponding to the given (source, flags) pair. */
RegExpShared *get(JSContext *cx, JSAtom *source, RegExpFlag flags);

Просмотреть файл

@ -71,7 +71,7 @@ resc_trace(JSTracer *trc, JSObject *obj)
Class js::RegExpStaticsClass = {
"RegExpStatics",
JSCLASS_HAS_PRIVATE,
JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS,
JS_PropertyStub, /* addProperty */
JS_PropertyStub, /* delProperty */
JS_PropertyStub, /* getProperty */

Просмотреть файл

@ -532,6 +532,15 @@ StackSpace::mark(JSTracer *trc)
}
}
void
StackSpace::markActiveCompartments()
{
for (StackSegment *seg = seg_; seg; seg = seg->prevInMemory()) {
for (StackFrame *fp = seg->maybefp(); (Value *)fp > (Value *)seg; fp = fp->prev())
MarkCompartmentActive(fp);
}
}
JS_FRIEND_API(bool)
StackSpace::ensureSpaceSlow(JSContext *cx, MaybeReportError report, Value *from, ptrdiff_t nvals,
JSCompartment *dest) const

Просмотреть файл

@ -1555,6 +1555,9 @@ class StackSpace
void mark(JSTracer *trc);
void markFrameSlots(JSTracer *trc, StackFrame *fp, Value *slotsEnd, jsbytecode *pc);
/* Called during GC: sets active flag on compartments with active frames. */
void markActiveCompartments();
/* We only report the committed size; uncommitted size is uninteresting. */
JS_FRIEND_API(size_t) sizeOfCommitted();
};

Просмотреть файл

@ -398,7 +398,7 @@ enum nsGCType {
};
%}
[uuid(686bb1d0-4711-11e1-b86c-0800200c9a66)]
[uuid(e92bf5e0-494c-11e1-b86c-0800200c9a66)]
interface nsIXPConnect : nsISupports
{
%{ C++
@ -734,6 +734,12 @@ interface nsIXPConnect : nsISupports
*/
void GarbageCollect(in PRUint32 reason, in PRUint32 kind);
/**
* Signals a good place to do an incremental GC slice, because the
* browser is drawing a frame.
*/
void NotifyDidPaint();
/**
* Define quick stubs on the given object, @a proto.
*

Просмотреть файл

@ -604,7 +604,8 @@ void XPCWrappedNativeTearOff::SetJSObject(JSObject* JSObj)
inline
XPCWrappedNativeTearOff::~XPCWrappedNativeTearOff()
{
NS_ASSERTION(!(GetInterface()||GetNative()||GetJSObjectPreserveColor()), "tearoff not empty in dtor");
NS_ASSERTION(!(GetInterface()||GetNative()||GetJSObjectPreserveColor()),
"tearoff not empty in dtor");
}
/***************************************************************************/

Просмотреть файл

@ -911,6 +911,8 @@ JSBool XPCJSRuntime::GCCallback(JSContext *cx, JSGCStatus status)
#ifdef XPC_TRACK_DEFERRED_RELEASES
printf("XPC - End deferred Releases\n");
#endif
self->GetXPConnect()->ClearGCBeforeCC();
break;
}
default:
@ -1890,6 +1892,18 @@ AccumulateTelemetryCallback(int id, uint32_t sample)
case JS_TELEMETRY_GC_SWEEP_MS:
Telemetry::Accumulate(Telemetry::GC_SWEEP_MS, sample);
break;
case JS_TELEMETRY_GC_SLICE_MS:
Telemetry::Accumulate(Telemetry::GC_SLICE_MS, sample);
break;
case JS_TELEMETRY_GC_MMU_50:
Telemetry::Accumulate(Telemetry::GC_MMU_50, sample);
break;
case JS_TELEMETRY_GC_RESET:
Telemetry::Accumulate(Telemetry::GC_RESET, sample);
break;
case JS_TELEMETRY_GC_INCREMENTAL_DISABLED:
Telemetry::Accumulate(Telemetry::GC_INCREMENTAL_DISABLED, sample);
break;
}
}

Просмотреть файл

@ -406,8 +406,6 @@ nsXPConnect::Collect(PRUint32 reason, PRUint32 kind)
// To improve debugging, if DEBUG_CC is defined all JS objects are
// traversed.
mNeedGCBeforeCC = false;
XPCCallContext ccx(NATIVE_CALLER);
if (!ccx.IsValid())
return;
@ -424,6 +422,8 @@ nsXPConnect::Collect(PRUint32 reason, PRUint32 kind)
js::gcreason::Reason gcreason = (js::gcreason::Reason)reason;
if (kind == nsGCShrinking) {
js::ShrinkingGC(cx, gcreason);
} else if (kind == nsGCIncremental) {
js::IncrementalGC(cx, gcreason);
} else {
MOZ_ASSERT(kind == nsGCNormal);
js::GCForReason(cx, gcreason);
@ -2825,6 +2825,23 @@ nsXPConnect::GetTelemetryValue(JSContext *cx, jsval *rval)
return NS_OK;
}
NS_IMETHODIMP
nsXPConnect::NotifyDidPaint()
{
JSRuntime *rt = mRuntime->GetJSRuntime();
if (!js::WantGCSlice(rt))
return NS_OK;
XPCCallContext ccx(NATIVE_CALLER);
if (!ccx.IsValid())
return UnexpectedFailure(NS_ERROR_FAILURE);
JSContext *cx = ccx.GetJSContext();
js::NotifyDidPaint(cx);
return NS_OK;
}
/* These are here to be callable from a debugger */
JS_BEGIN_EXTERN_C
JS_EXPORT_API(void) DumpJSStack()

Просмотреть файл

@ -318,7 +318,8 @@ typedef nsDataHashtable<xpc::PtrAndPrincipalHashKey, JSCompartment *> XPCCompart
return (result || !src) ? NS_OK : NS_ERROR_OUT_OF_MEMORY
#define WRAPPER_SLOTS (JSCLASS_HAS_PRIVATE | JSCLASS_HAS_RESERVED_SLOTS(1))
#define WRAPPER_SLOTS (JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS | \
JSCLASS_HAS_RESERVED_SLOTS(1))
#define INVALID_OBJECT ((JSObject *)1)
@ -520,6 +521,7 @@ public:
JSBool IsShuttingDown() const {return mShuttingDown;}
void EnsureGCBeforeCC() { mNeedGCBeforeCC = true; }
void ClearGCBeforeCC() { mNeedGCBeforeCC = false; }
nsresult GetInfoForIID(const nsIID * aIID, nsIInterfaceInfo** info);
nsresult GetInfoForName(const char * name, nsIInterfaceInfo** info);

Просмотреть файл

@ -75,7 +75,8 @@ xpc_CreateMTGlobalObject(JSContext *cx, JSClass *clasp,
#define XPCONNECT_GLOBAL_FLAGS \
JSCLASS_XPCONNECT_GLOBAL | JSCLASS_HAS_PRIVATE | \
JSCLASS_PRIVATE_IS_NSISUPPORTS | JSCLASS_GLOBAL_FLAGS_WITH_SLOTS(1)
JSCLASS_PRIVATE_IS_NSISUPPORTS | JSCLASS_IMPLEMENTS_BARRIERS | \
JSCLASS_GLOBAL_FLAGS_WITH_SLOTS(1)
void
TraceXPCGlobal(JSTracer *trc, JSObject *obj);
@ -182,8 +183,12 @@ xpc_UnmarkGrayObjectRecursive(JSObject* obj);
inline void
xpc_UnmarkGrayObject(JSObject *obj)
{
if (obj && xpc_IsGrayGCThing(obj))
xpc_UnmarkGrayObjectRecursive(obj);
if (obj) {
if (xpc_IsGrayGCThing(obj))
xpc_UnmarkGrayObjectRecursive(obj);
else if (js::IsIncrementalBarrierNeededOnObject(obj))
js::IncrementalReferenceBarrier(obj);
}
}
// If aVariant is an XPCVariant, this marks the object to be in aGeneration.

Просмотреть файл

@ -5428,6 +5428,24 @@ PresShell::ProcessSynthMouseMoveEvent(bool aFromScroll)
}
}
class nsAutoNotifyDidPaint
{
public:
nsAutoNotifyDidPaint(bool aWillSendDidPaint)
: mWillSendDidPaint(aWillSendDidPaint)
{
}
~nsAutoNotifyDidPaint()
{
if (!mWillSendDidPaint && nsContentUtils::XPConnect()) {
nsContentUtils::XPConnect()->NotifyDidPaint();
}
}
private:
bool mWillSendDidPaint;
};
void
PresShell::Paint(nsIView* aViewToPaint,
nsIWidget* aWidgetToPaint,
@ -5451,6 +5469,8 @@ PresShell::Paint(nsIView* aViewToPaint,
NS_ASSERTION(aViewToPaint, "null view");
NS_ASSERTION(aWidgetToPaint, "Can't paint without a widget");
nsAutoNotifyDidPaint notifyDidPaint(aWillSendDidPaint);
nsPresContext* presContext = GetPresContext();
AUTO_LAYOUT_PHASE_ENTRY_POINT(presContext, Paint);
@ -7221,6 +7241,10 @@ PresShell::DidPaint()
if (rootPresContext == mPresContext) {
rootPresContext->UpdatePluginGeometry();
}
if (nsContentUtils::XPConnect()) {
nsContentUtils::XPConnect()->NotifyDidPaint();
}
}
bool

Просмотреть файл

@ -656,6 +656,8 @@ pref("javascript.options.typeinference", true);
pref("javascript.options.mem.high_water_mark", 128);
pref("javascript.options.mem.max", -1);
pref("javascript.options.mem.gc_per_compartment", true);
pref("javascript.options.mem.gc_incremental", true);
pref("javascript.options.mem.gc_incremental_slice_ms", 10);
pref("javascript.options.mem.log", false);
pref("javascript.options.gc_on_memory_pressure", true);

Просмотреть файл

@ -81,6 +81,10 @@ HISTOGRAM_BOOLEAN(GC_IS_COMPARTMENTAL, "Is it a compartmental GC?")
HISTOGRAM(GC_MS, 1, 10000, 50, EXPONENTIAL, "Time spent running JS GC (ms)")
HISTOGRAM(GC_MARK_MS, 1, 10000, 50, EXPONENTIAL, "Time spent running JS GC mark phase (ms)")
HISTOGRAM(GC_SWEEP_MS, 1, 10000, 50, EXPONENTIAL, "Time spent running JS GC sweep phase (ms)")
HISTOGRAM(GC_SLICE_MS, 1, 10000, 50, EXPONENTIAL, "Time spent running a JS GC slice (ms)")
HISTOGRAM(GC_MMU_50, 1, 100, 20, LINEAR, "Minimum percentage of time spent outside GC over any 50ms window")
HISTOGRAM_BOOLEAN(GC_RESET, "Was an incremental GC canceled?")
HISTOGRAM_BOOLEAN(GC_INCREMENTAL_DISABLED, "Is incremental GC permanently disabled?")
HISTOGRAM(TELEMETRY_PING, 1, 3000, 10, EXPONENTIAL, "Time taken to submit telemetry info (ms)")
HISTOGRAM_BOOLEAN(TELEMETRY_SUCCESS, "Successful telemetry submission")

Просмотреть файл

@ -116,6 +116,7 @@ window.onload = function () {
populatePreferencesSection();
populateExtensionsSection();
populateGraphicsSection();
populateJavaScriptSection();
}
function populateExtensionsSection() {
@ -382,6 +383,13 @@ function populateGraphicsSection() {
]);
}
function populateJavaScriptSection() {
let enabled = window.QueryInterface(Ci.nsIInterfaceRequestor)
.getInterface(Ci.nsIDOMWindowUtils)
.isIncrementalGCEnabled();
document.getElementById("javascript-incremental-gc").textContent = enabled ? "1" : "0";
}
function getPrefValue(aName) {
let value = "";
let type = Services.prefs.getPrefType(aName);

Просмотреть файл

@ -243,6 +243,24 @@
</tbody>
</table>
<!-- - - - - - - - - - - - - - - - - - - - - -->
<h2 class="major-section">
&aboutSupport.jsTitle;
</h2>
<table>
<tbody>
<tr>
<th class="column">
&aboutSupport.jsIncrementalGC;
</th>
<td id="javascript-incremental-gc">
</td>
</tr>
</tbody>
</table>
</div>
</body>

Просмотреть файл

@ -44,6 +44,9 @@ variant of aboutSupport.showDir.label. -->
<!ENTITY aboutSupport.graphicsTitle "Graphics">
<!ENTITY aboutSupport.jsTitle "JavaScript">
<!ENTITY aboutSupport.jsIncrementalGC "Incremental GC">
<!ENTITY aboutSupport.installationHistoryTitle "Installation History">
<!ENTITY aboutSupport.updateHistoryTitle "Update History">