Enable per-compartment garbage collection (bug 605662, r=gal). CLOSED TREE

This commit is contained in:
Gregor Wagner 2011-01-07 23:44:57 -08:00
Родитель c1800dcccc
Коммит 5992bb7f55
20 изменённых файлов: 470 добавлений и 162 удалений

Просмотреть файл

@ -885,20 +885,6 @@ DumpString(const nsAString &str)
}
#endif
static void
MaybeGC(JSContext *cx)
{
size_t bytes = cx->runtime->gcBytes;
size_t lastBytes = cx->runtime->gcLastBytes;
if ((bytes > 8192 && bytes > lastBytes * 16)
#ifdef DEBUG
|| cx->runtime->gcZeal > 0
#endif
) {
JS_GC(cx);
}
}
static already_AddRefed<nsIPrompt>
GetPromptFromContext(nsJSContext* ctx)
{
@ -937,7 +923,7 @@ nsJSContext::DOMOperationCallback(JSContext *cx)
PRTime callbackTime = ctx->mOperationCallbackTime;
PRTime modalStateTime = ctx->mModalStateTime;
MaybeGC(cx);
JS_MaybeGC(cx);
// Now restore the callback time and count, in case they got reset.
ctx->mOperationCallbackTime = callbackTime;
@ -3541,12 +3527,12 @@ nsJSContext::ScriptEvaluated(PRBool aTerminated)
#ifdef JS_GC_ZEAL
if (mContext->runtime->gcZeal >= 2) {
MaybeGC(mContext);
JS_MaybeGC(mContext);
} else
#endif
if (mNumEvaluations > 20) {
mNumEvaluations = 0;
MaybeGC(mContext);
JS_MaybeGC(mContext);
}
if (aTerminated) {

Просмотреть файл

@ -2544,78 +2544,19 @@ JS_GC(JSContext *cx)
/* Don't nuke active arenas if executing or compiling. */
if (cx->tempPool.current == &cx->tempPool.first)
JS_FinishArenaPool(&cx->tempPool);
js_GC(cx, GC_NORMAL);
js_GC(cx, NULL, GC_NORMAL);
}
JS_PUBLIC_API(void)
JS_MaybeGC(JSContext *cx)
{
JSRuntime *rt;
uint32 bytes, lastBytes;
LeaveTrace(cx);
rt = cx->runtime;
/* Don't nuke active arenas if executing or compiling. */
if (cx->tempPool.current == &cx->tempPool.first)
JS_FinishArenaPool(&cx->tempPool);
#ifdef JS_GC_ZEAL
if (rt->gcZeal > 0) {
JS_GC(cx);
return;
}
#endif
bytes = rt->gcBytes;
lastBytes = rt->gcLastBytes;
/*
* We run the GC if we used all available free GC cells and had to
* allocate extra 1/3 of GC arenas since the last run of GC, or if
* we have malloc'd more bytes through JS_malloc than we were told
* to allocate by JS_NewRuntime.
*
* The reason for
* bytes > 4/3 lastBytes
* condition is the following. Bug 312238 changed bytes and lastBytes
* to mean the total amount of memory that the GC uses now and right
* after the last GC.
*
* Before the bug the variables meant the size of allocated GC things
* now and right after the last GC. That size did not include the
* memory taken by free GC cells and the condition was
* bytes > 3/2 lastBytes.
* That is, we run the GC if we have half again as many bytes of
* GC-things as the last time we GC'd. To be compatible we need to
* express that condition through the new meaning of bytes and
* lastBytes.
*
* We write the original condition as
* B*(1-F) > 3/2 Bl*(1-Fl)
* where B is the total memory size allocated by GC and F is the free
* cell density currently and Sl and Fl are the size and the density
* right after GC. The density by definition is memory taken by free
* cells divided by total amount of memory. In other words, B and Bl
* are bytes and lastBytes with the new meaning and B*(1-F) and
* Bl*(1-Fl) are bytes and lastBytes with the original meaning.
*
* Our task is to exclude F and Fl from the last statement. According
* to the stats from bug 331966 comment 23, Fl is about 10-25% for a
* typical run of the browser. It means that the original condition
* implied that we did not run GC unless we exhausted the pool of
* free cells. Indeed if we still have free cells, then B == Bl since
* we did not yet allocated any new arenas and the condition means
* 1 - F > 3/2 (1-Fl) or 3/2Fl > 1/2 + F
* That implies 3/2 Fl > 1/2 or Fl > 1/3. That cannot be fulfilled
* for the state described by the stats. So we can write the original
* condition as:
* F == 0 && B > 3/2 Bl(1-Fl)
* Again using the stats we see that Fl is about 11% when the browser
* starts up and when we are far from hitting rt->gcMaxBytes. With
* this F we have
* F == 0 && B > 3/2 Bl(1-0.11)
* or approximately F == 0 && B > 4/3 Bl.
*/
if ((bytes > 8192 && bytes > lastBytes + lastBytes / 3) ||
rt->isGCMallocLimitReached()) {
JS_GC(cx);
}
MaybeGC(cx);
}
JS_PUBLIC_API(JSGCCallback)
@ -2640,7 +2581,7 @@ JS_IsAboutToBeFinalized(JSContext *cx, void *thing)
{
JS_ASSERT(thing);
JS_ASSERT(!cx->runtime->gcMarkingTracer);
return IsAboutToBeFinalized(thing);
return IsAboutToBeFinalized(cx, thing);
}
JS_PUBLIC_API(void)

Просмотреть файл

@ -450,8 +450,8 @@ js_SweepAtomState(JSContext *cx)
AtomEntryType entry = e.front();
if (AtomEntryFlags(entry) & (ATOM_PINNED | ATOM_INTERNED)) {
/* Pinned or interned key cannot be finalized. */
JS_ASSERT(!IsAboutToBeFinalized(AtomEntryToKey(entry)));
} else if (IsAboutToBeFinalized(AtomEntryToKey(entry))) {
JS_ASSERT(!IsAboutToBeFinalized(cx, AtomEntryToKey(entry)));
} else if (IsAboutToBeFinalized(cx, AtomEntryToKey(entry))) {
e.removeFront();
}
}

Просмотреть файл

@ -1072,7 +1072,7 @@ js_DestroyContext(JSContext *cx, JSDestroyContextMode mode)
#endif
if (last) {
js_GC(cx, GC_LAST_CONTEXT);
js_GC(cx, NULL, GC_LAST_CONTEXT);
DUMP_EVAL_CACHE_METER(cx);
DUMP_FUNCTION_METER(cx);
@ -1082,7 +1082,7 @@ js_DestroyContext(JSContext *cx, JSDestroyContextMode mode)
JS_NOTIFY_ALL_CONDVAR(rt->stateChange);
} else {
if (mode == JSDCM_FORCE_GC)
js_GC(cx, GC_NORMAL);
js_GC(cx, NULL, GC_NORMAL);
else if (mode == JSDCM_MAYBE_GC)
JS_MaybeGC(cx);
JS_LOCK_GC(rt);
@ -1821,7 +1821,7 @@ js_InvokeOperationCallback(JSContext *cx)
JS_UNLOCK_GC(rt);
if (rt->gcIsNeeded) {
js_GC(cx, GC_NORMAL);
js_GC(cx, rt->gcTriggerCompartment, GC_NORMAL);
/*
* On trace we can exceed the GC quota, see comments in NewGCArena. So

Просмотреть файл

@ -1131,7 +1131,16 @@ struct JSRuntime {
js::GCMarker *gcMarkingTracer;
uint32 gcTriggerFactor;
int64 gcJitReleaseTime;
volatile JSBool gcIsNeeded;
volatile bool gcIsNeeded;
/*
* Compartment that triggered GC. If more than one Compatment need GC,
* gcTriggerCompartment is reset to NULL and a global GC is performed.
*/
JSCompartment *gcTriggerCompartment;
/* Compartment that is currently involved in per-compartment GC */
JSCompartment *gcCurrentCompartment;
/*
* We can pack these flags as only the GC thread writes to them. Atomic

Просмотреть файл

@ -57,6 +57,9 @@ using namespace js::gc;
JSCompartment::JSCompartment(JSRuntime *rt)
: rt(rt),
principals(NULL),
gcBytes(0),
gcTriggerBytes(0),
gcLastBytes(0),
data(NULL),
marked(false),
active(false),
@ -384,23 +387,30 @@ ScriptPoolDestroyed(JSContext *cx, mjit::JITScript *jit,
}
#endif
void
JSCompartment::mark(JSTracer *trc)
{
for (WrapperMap::Enum e(crossCompartmentWrappers); !e.empty(); e.popFront())
MarkValue(trc, e.front().key, "cross-compartment wrapper");
}
void
JSCompartment::sweep(JSContext *cx, uint32 releaseInterval)
{
chunk = NULL;
/* Remove dead wrappers from the table. */
for (WrapperMap::Enum e(crossCompartmentWrappers); !e.empty(); e.popFront()) {
JS_ASSERT_IF(IsAboutToBeFinalized(e.front().key.toGCThing()) &&
!IsAboutToBeFinalized(e.front().value.toGCThing()),
JS_ASSERT_IF(IsAboutToBeFinalized(cx, e.front().key.toGCThing()) &&
!IsAboutToBeFinalized(cx, e.front().value.toGCThing()),
e.front().key.isString());
if (IsAboutToBeFinalized(e.front().key.toGCThing()) ||
IsAboutToBeFinalized(e.front().value.toGCThing())) {
if (IsAboutToBeFinalized(cx, e.front().key.toGCThing()) ||
IsAboutToBeFinalized(cx, e.front().value.toGCThing())) {
e.removeFront();
}
}
#ifdef JS_TRACER
traceMonitor.sweep();
traceMonitor.sweep(cx);
#endif
#if defined JS_METHODJIT && defined JS_MONOIC
@ -418,7 +428,7 @@ JSCompartment::sweep(JSContext *cx, uint32 releaseInterval)
for (JSCList *cursor = scripts.next; cursor != &scripts; cursor = cursor->next) {
JSScript *script = reinterpret_cast<JSScript *>(cursor);
if (script->hasJITCode()) {
mjit::ic::SweepCallICs(script, discardScripts);
mjit::ic::SweepCallICs(cx, script, discardScripts);
if (discardScripts) {
if (script->jitNormal &&
ScriptPoolDestroyed(cx, script->jitNormal, releaseInterval, counter)) {

Просмотреть файл

@ -201,7 +201,7 @@ struct TraceMonitor {
void flush();
/* Sweep any cache entry pointing to dead GC things. */
void sweep();
void sweep(JSContext *cx);
bool outOfMemory() const;
};
@ -273,6 +273,10 @@ struct JS_FRIEND_API(JSCompartment) {
js::gc::ArenaList arenas[js::gc::FINALIZE_LIMIT];
js::gc::FreeLists freeLists;
size_t gcBytes;
size_t gcTriggerBytes;
size_t gcLastBytes;
#ifdef JS_GCMETER
js::gc::JSGCArenaStats compartmentStats[js::gc::FINALIZE_LIMIT];
#endif
@ -320,6 +324,7 @@ struct JS_FRIEND_API(JSCompartment) {
bool init();
void mark(JSTracer *trc);
bool wrap(JSContext *cx, js::Value *vp);
bool wrap(JSContext *cx, JSString **strp);
bool wrap(JSContext *cx, JSObject **objp);
@ -331,8 +336,12 @@ struct JS_FRIEND_API(JSCompartment) {
void sweep(JSContext *cx, uint32 releaseInterval);
void purge(JSContext *cx);
void finishArenaLists();
void finalizeObjectArenaLists(JSContext *cx);
void finalizeStringArenaLists(JSContext *cx);
bool arenaListsAreEmpty();
void setGCLastBytes(size_t lastBytes);
private:
js::MathCache *mathCache;

Просмотреть файл

@ -654,7 +654,7 @@ js_SweepWatchPoints(JSContext *cx)
&wp->links != &rt->watchPointList;
wp = next) {
next = (JSWatchPoint *)wp->links.next;
if (IsAboutToBeFinalized(wp->object)) {
if (IsAboutToBeFinalized(cx, wp->object)) {
sample = rt->debuggerMutations;
/* Ignore failures. */

Просмотреть файл

@ -253,6 +253,15 @@ checkArenaListsForThing(JSCompartment *comp, void *thing) {
return false;
}
bool
checkArenaListAllUnmarked(JSCompartment *comp) {
for (unsigned i = 0; i < FINALIZE_LIMIT; i++) {
if (comp->arenas[i].markedThingsInArenaList())
return false;
}
return true;
}
#endif
} /* namespace gc */
@ -323,8 +332,9 @@ Chunk::allocateArena(JSCompartment *comp, unsigned thingKind)
JSRuntime *rt = info.runtime;
rt->gcBytes += sizeof(Arena<T>);
if (rt->gcBytes >= rt->gcTriggerBytes)
TriggerGC(rt);
comp->gcBytes += sizeof(Arena<T>);
if (comp->gcBytes >= comp->gcTriggerBytes)
TriggerCompartmentGC(comp);
METER(rt->gcStats.nallarenas++);
return arena;
}
@ -334,12 +344,15 @@ void
Chunk::releaseArena(Arena<T> *arena)
{
JSRuntime *rt = info.runtime;
JSCompartment *comp = arena->header()->compartment;
METER(rt->gcStats.afree++);
JS_ASSERT(rt->gcStats.nallarenas != 0);
METER(rt->gcStats.nallarenas--);
JS_ASSERT(rt->gcBytes >= sizeof(Arena<T>));
JS_ASSERT(comp->gcBytes >= sizeof(Arena<T>));
rt->gcBytes -= sizeof(Arena<T>);
comp->gcBytes -= sizeof(Arena<T>);
info.emptyArenaLists.insert((Arena<Cell> *)arena);
arena->header()->isUsed = false;
++info.numFree;
@ -476,10 +489,17 @@ AllocateArena(JSContext *cx, unsigned thingKind)
}
JS_FRIEND_API(bool)
IsAboutToBeFinalized(void *thing)
IsAboutToBeFinalized(JSContext *cx, void *thing)
{
if (JSString::isStatic(thing))
return false;
JS_ASSERT(cx);
JSCompartment *thingCompartment = reinterpret_cast<Cell *>(thing)->compartment();
JSRuntime *rt = cx->runtime;
if (rt->gcCurrentCompartment != NULL && rt->gcCurrentCompartment != thingCompartment)
return false;
return !reinterpret_cast<Cell *>(thing)->isMarked();
}
@ -540,6 +560,8 @@ js_InitGC(JSRuntime *rt, uint32 maxbytes)
rt->gcTriggerFactor = uint32(100.0f * GC_HEAP_GROWTH_FACTOR);
rt->defaultCompartment->setGCLastBytes(8192);
/*
* The assigned value prevents GC from running when GC memory is too low
* (during JS engine start).
@ -1001,6 +1023,11 @@ JSRuntime::setGCTriggerFactor(uint32 factor)
gcTriggerFactor = factor;
setGCLastBytes(gcLastBytes);
for (JSCompartment **c = compartments.begin(); c != compartments.end(); ++c) {
(*c)->setGCLastBytes(gcLastBytes);
}
defaultCompartment->setGCLastBytes(gcLastBytes);
}
void
@ -1016,6 +1043,19 @@ JSRuntime::setGCLastBytes(size_t lastBytes)
gcTriggerBytes = (float(gcMaxBytes) < maxtriger) ? gcMaxBytes : size_t(maxtriger);
}
void
JSCompartment::setGCLastBytes(size_t lastBytes)
{
gcLastBytes = lastBytes;
/* FIXME bug 603916 - we should unify the triggers here. */
float trigger1 = float(lastBytes) * float(rt->gcTriggerFactor) / 100.0f;
float trigger2 = float(Max(lastBytes, GC_ARENA_ALLOCATION_TRIGGER)) *
GC_HEAP_GROWTH_FACTOR;
float maxtriger = Max(trigger1, trigger2);
gcTriggerBytes = (float(rt->gcMaxBytes) < maxtriger) ? rt->gcMaxBytes : size_t(maxtriger);
}
void
FreeLists::purge()
{
@ -1055,7 +1095,7 @@ NeedLastDitchGC(JSContext *cx)
if (rt->gcZeal >= 1)
return true;
#endif
return !!rt->gcIsNeeded;
return rt->gcIsNeeded;
}
/*
@ -1074,7 +1114,7 @@ RunLastDitchGC(JSContext *cx)
#endif
/* The last ditch GC preserves all atoms. */
AutoKeepAtoms keep(rt);
js_GC(cx, GC_NORMAL);
js_GC(cx, rt->gcTriggerCompartment, GC_NORMAL);
return rt->gcBytes < rt->gcMaxBytes;
}
@ -1615,7 +1655,7 @@ MarkRuntime(JSTracer *trc)
* builds for now, see bug 574313.
*/
JSContext *iter;
#if 1
#if 0
iter = NULL;
while (JSContext *acx = js_ContextIterator(rt, JS_TRUE, &iter)) {
for (AutoGCRooter *gcr = acx->autoGCRooters; gcr; gcr = gcr->down) {
@ -1736,9 +1776,73 @@ TriggerGC(JSRuntime *rt)
* thread.
*/
rt->gcIsNeeded = true;
rt->gcTriggerCompartment = NULL;
TriggerAllOperationCallbacks(rt);
}
void
TriggerCompartmentGC(JSCompartment *comp)
{
JSRuntime *rt = comp->rt;
JS_ASSERT(!rt->gcRunning);
#ifdef JS_GC_ZEAL
if (rt->gcZeal >= 1) {
TriggerGC(rt);
return;
}
#endif
if (comp == rt->defaultCompartment) {
/* We can't do a compartmental GC of the default compartment. */
TriggerGC(rt);
return;
}
if (rt->gcIsNeeded) {
/* If we need to GC more than one compartment, run a full GC. */
if (rt->gcTriggerCompartment != comp)
rt->gcTriggerCompartment = NULL;
return;
}
if (rt->gcBytes > 8192 && rt->gcBytes >= 3 * (rt->gcTriggerBytes / 2)) {
/* If we're using significantly more than our quota, do a full GC. */
TriggerGC(rt);
return;
}
/*
* Trigger the GC when it is safe to call an operation callback on any
* thread.
*/
rt->gcIsNeeded = true;
rt->gcTriggerCompartment = comp;
TriggerAllOperationCallbacks(comp->rt);
}
void
MaybeGC(JSContext *cx)
{
JSRuntime *rt = cx->runtime;
#ifdef JS_GC_ZEAL
if (rt->gcZeal > 0) {
js_GC(cx, NULL, GC_NORMAL);
return;
}
#endif
JSCompartment *comp = cx->compartment;
if (rt->gcIsNeeded) {
js_GC(cx, comp == rt->gcTriggerCompartment ? comp : NULL, GC_NORMAL);
return;
}
if (comp->gcBytes > 8192 && comp->gcBytes >= 3 * (comp->gcTriggerBytes / 4))
js_GC(cx, comp, GC_NORMAL);
}
} /* namespace js */
void
@ -1902,6 +2006,29 @@ FinalizeArenaList(JSCompartment *comp, JSContext *cx, unsigned thingKind)
METER(UpdateCompartmentStats(comp, thingKind, nlivearenas, nkilledarenas, nthings));
}
void
JSCompartment::finalizeObjectArenaLists(JSContext *cx)
{
FinalizeArenaList<JSObject>(this, cx, FINALIZE_OBJECT0);
FinalizeArenaList<JSObject_Slots2>(this, cx, FINALIZE_OBJECT2);
FinalizeArenaList<JSObject_Slots4>(this, cx, FINALIZE_OBJECT4);
FinalizeArenaList<JSObject_Slots8>(this, cx, FINALIZE_OBJECT8);
FinalizeArenaList<JSObject_Slots12>(this, cx, FINALIZE_OBJECT12);
FinalizeArenaList<JSObject_Slots16>(this, cx, FINALIZE_OBJECT16);
FinalizeArenaList<JSFunction>(this, cx, FINALIZE_FUNCTION);
#if JS_HAS_XML_SUPPORT
FinalizeArenaList<JSXML>(this, cx, FINALIZE_XML);
#endif
}
void
JSCompartment::finalizeStringArenaLists(JSContext *cx)
{
FinalizeArenaList<JSShortString>(this, cx, FINALIZE_SHORT_STRING);
FinalizeArenaList<JSString>(this, cx, FINALIZE_STRING);
FinalizeArenaList<JSExternalString>(this, cx, FINALIZE_EXTERNAL_STRING);
}
#ifdef JS_THREADSAFE
namespace js {
@ -2091,7 +2218,8 @@ PreGCCleanup(JSContext *cx, JSGCInvocationKind gckind)
JSRuntime *rt = cx->runtime;
/* Clear gcIsNeeded now, when we are about to start a normal GC cycle. */
rt->gcIsNeeded = JS_FALSE;
rt->gcIsNeeded = false;
rt->gcTriggerCompartment = NULL;
/* Reset malloc counter. */
rt->resetGCMallocBytes();
@ -2117,8 +2245,13 @@ PreGCCleanup(JSContext *cx, JSGCInvocationKind gckind)
rt->shapeGen = Shape::LAST_RESERVED_SHAPE;
rt->protoHazardShape = 0;
}
for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c)
(*c)->purge(cx);
if (rt->gcCurrentCompartment) {
rt->gcCurrentCompartment->purge(cx);
} else {
for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c)
(*c)->purge(cx);
}
js_PurgeThreads(cx);
{
@ -2128,6 +2261,116 @@ PreGCCleanup(JSContext *cx, JSGCInvocationKind gckind)
}
}
static void
MarkAndSweepCompartment(JSContext *cx, JSCompartment *comp, JSGCInvocationKind gckind GCTIMER_PARAM)
{
JSRuntime *rt = cx->runtime;
rt->gcNumber++;
JS_ASSERT(!rt->gcRegenShapes);
JS_ASSERT(gckind != GC_LAST_CONTEXT);
/*
* Mark phase.
*/
GCMarker gcmarker(cx);
JS_ASSERT(IS_GC_MARKING_TRACER(&gcmarker));
JS_ASSERT(gcmarker.getMarkColor() == BLACK);
rt->gcMarkingTracer = &gcmarker;
gcmarker.stackLimit = cx->stackLimit;
for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront())
r.front()->clearMarkBitmap();
for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c)
(*c)->mark(&gcmarker);
MarkRuntime(&gcmarker);
/*
* Mark children of things that caused too deep recursion during the above
* tracing.
*/
gcmarker.markDelayedChildren();
rt->gcMarkingTracer = NULL;
if (rt->gcCallback)
(void) rt->gcCallback(cx, JSGC_MARK_END);
#ifdef JS_THREADSAFE
/*
* cx->gcBackgroundFree is set if we need several mark-and-sweep loops to
* finish the GC.
*/
if(!cx->gcBackgroundFree) {
/* Wait until the sweeping from the previois GC finishes. */
rt->gcHelperThread.waitBackgroundSweepEnd(rt);
cx->gcBackgroundFree = &rt->gcHelperThread;
}
#endif
#ifdef DEBUG
/* Make sure that we didn't mark an object in another compartment */
for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c)
JS_ASSERT_IF(*c != comp, checkArenaListAllUnmarked(*c));
#endif
/*
* Sweep phase.
*
* Finalize as we sweep, outside of rt->gcLock but with rt->gcRunning set
* so that any attempt to allocate a GC-thing from a finalizer will fail,
* rather than nest badly and leave the unmarked newborn to be swept.
*
* We first sweep atom state so we can use js_IsAboutToBeFinalized on
* JSString held in a hashtable to check if the hashtable entry can be
* freed. Note that even after the entry is freed, JSObject finalizers can
* continue to access the corresponding JSString* assuming that they are
* unique. This works since the atomization API must not be called during
* the GC.
*/
TIMESTAMP(startSweep);
js_SweepAtomState(cx);
/* Finalize watch points associated with unreachable objects. */
js_SweepWatchPoints(cx);
#ifdef DEBUG
/* Save the pre-sweep count of scope-mapped properties. */
rt->liveObjectPropsPreSweep = rt->liveObjectProps;
#endif
/*
* We finalize iterators before other objects so the iterator can use the
* object which properties it enumerates over to finalize the enumeration
* state. We finalize objects before other GC things to ensure that
* object's finalizer can access them even if they will be freed.
*/
comp->finalizeObjectArenaLists(cx);
TIMESTAMP(sweepObjectEnd);
comp->finalizeStringArenaLists(cx);
TIMESTAMP(sweepStringEnd);
comp->sweep(cx, 0);
/*
* Unmark the runtime's property trees because we don't
* sweep them.
*/
js::PropertyTree::unmarkShapes(cx);
/*
* Destroy arenas after we finished the sweeping so finalizers can safely
* use js_IsAboutToBeFinalized().
*/
ExpireGCChunks(rt);
TIMESTAMP(sweepDestroyEnd);
if (rt->gcCallback)
(void) rt->gcCallback(cx, JSGC_FINALIZE_END);
}
/*
* Perform mark-and-sweep GC.
*
@ -2211,25 +2454,13 @@ MarkAndSweep(JSContext *cx, JSGCInvocationKind gckind GCTIMER_PARAM)
* object's finalizer can access them even if they will be freed.
*/
for (JSCompartment **comp = rt->compartments.begin(); comp != rt->compartments.end(); comp++) {
FinalizeArenaList<JSObject>(*comp, cx, FINALIZE_OBJECT0);
FinalizeArenaList<JSObject_Slots2>(*comp, cx, FINALIZE_OBJECT2);
FinalizeArenaList<JSObject_Slots4>(*comp, cx, FINALIZE_OBJECT4);
FinalizeArenaList<JSObject_Slots8>(*comp, cx, FINALIZE_OBJECT8);
FinalizeArenaList<JSObject_Slots12>(*comp, cx, FINALIZE_OBJECT12);
FinalizeArenaList<JSObject_Slots16>(*comp, cx, FINALIZE_OBJECT16);
FinalizeArenaList<JSFunction>(*comp, cx, FINALIZE_FUNCTION);
#if JS_HAS_XML_SUPPORT
FinalizeArenaList<JSXML>(*comp, cx, FINALIZE_XML);
#endif
}
for (JSCompartment **comp = rt->compartments.begin(); comp != rt->compartments.end(); comp++)
(*comp)->finalizeObjectArenaLists(cx);
TIMESTAMP(sweepObjectEnd);
for (JSCompartment **comp = rt->compartments.begin(); comp != rt->compartments.end(); comp++) {
FinalizeArenaList<JSShortString>(*comp, cx, FINALIZE_SHORT_STRING);
FinalizeArenaList<JSString>(*comp, cx, FINALIZE_STRING);
FinalizeArenaList<JSExternalString>(*comp, cx, FINALIZE_EXTERNAL_STRING);
}
for (JSCompartment **comp = rt->compartments.begin(); comp != rt->compartments.end(); comp++)
(*comp)->finalizeStringArenaLists(cx);
TIMESTAMP(sweepStringEnd);
@ -2444,7 +2675,7 @@ AutoGCSession::~AutoGCSession()
* garbage and no other threads are demanding more GC.
*/
static void
GCUntilDone(JSContext *cx, JSGCInvocationKind gckind GCTIMER_PARAM)
GCUntilDone(JSContext *cx, JSCompartment *comp, JSGCInvocationKind gckind GCTIMER_PARAM)
{
if (JS_ON_TRACE(cx))
return;
@ -2466,6 +2697,9 @@ GCUntilDone(JSContext *cx, JSGCInvocationKind gckind GCTIMER_PARAM)
AutoGCSession gcsession(cx);
JS_ASSERT(!rt->gcCurrentCompartment);
rt->gcCurrentCompartment = comp;
METER(rt->gcStats.poke++);
bool firstRun = true;
@ -2482,7 +2716,11 @@ GCUntilDone(JSContext *cx, JSGCInvocationKind gckind GCTIMER_PARAM)
TIMESTAMP(startMark);
firstRun = false;
}
MarkAndSweep(cx, gckind GCTIMER_ARG);
if (comp)
MarkAndSweepCompartment(cx, comp, gckind GCTIMER_ARG);
else
MarkAndSweep(cx, gckind GCTIMER_ARG);
// GC again if:
// - another thread, not in a request, called js_GC
@ -2499,10 +2737,16 @@ GCUntilDone(JSContext *cx, JSGCInvocationKind gckind GCTIMER_PARAM)
rt->gcMarkAndSweep = false;
rt->gcRegenShapes = false;
rt->setGCLastBytes(rt->gcBytes);
rt->gcCurrentCompartment = NULL;
for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c) {
(*c)->setGCLastBytes((*c)->gcBytes);
(*c)->marked = false;
}
}
void
js_GC(JSContext *cx, JSGCInvocationKind gckind)
js_GC(JSContext *cx, JSCompartment *comp, JSGCInvocationKind gckind)
{
JSRuntime *rt = cx->runtime;
@ -2547,7 +2791,7 @@ js_GC(JSContext *cx, JSGCInvocationKind gckind)
/* Lock out other GC allocator and collector invocations. */
AutoLockGC lock(rt);
GCUntilDone(cx, gckind GCTIMER_ARG);
GCUntilDone(cx, comp, gckind GCTIMER_ARG);
}
/* We re-sample the callback again as the finalizers can change it. */
@ -2621,6 +2865,8 @@ NewCompartment(JSContext *cx, JSPrincipals *principals)
JSPRINCIPALS_HOLD(cx, principals);
}
compartment->setGCLastBytes(8192);
{
AutoLockGC lock(rt);

Просмотреть файл

@ -70,6 +70,9 @@ js_TraceXML(JSTracer *trc, JSXML* thing);
#endif
namespace js {
struct Shape;
namespace gc {
/*
@ -201,6 +204,16 @@ struct ArenaBitmap {
}
return true;
}
#ifdef DEBUG
bool noBitsSet() {
for (unsigned i = 0; i < BitWords; i++) {
if (bitmap[i] != uintptr_t(0))
return false;
}
return true;
}
#endif
};
/* Ensure that bitmap covers the whole arena. */
@ -574,6 +587,14 @@ struct ArenaList {
}
return false;
}
bool markedThingsInArenaList() {
for (Arena<FreeCell> *a = (Arena<FreeCell> *) head; a; a = (Arena<FreeCell> *) a->header()->next) {
if (!a->bitmap()->noBitsSet())
return true;
}
return false;
}
#endif
inline void insert(Arena<FreeCell> *a) {
@ -768,7 +789,7 @@ extern void
js_UnlockGCThingRT(JSRuntime *rt, void *thing);
extern JS_FRIEND_API(bool)
IsAboutToBeFinalized(void *thing);
IsAboutToBeFinalized(JSContext *cx, void *thing);
extern JS_FRIEND_API(bool)
js_GCThingIsMarked(void *thing, uint32 color);
@ -791,6 +812,13 @@ MarkContext(JSTracer *trc, JSContext *acx);
extern void
TriggerGC(JSRuntime *rt);
/* Must be called with GC lock taken. */
extern void
TriggerCompartmentGC(JSCompartment *comp);
extern void
MaybeGC(JSContext *cx);
} /* namespace js */
/*
@ -807,8 +835,9 @@ typedef enum JSGCInvocationKind {
GC_LAST_CONTEXT = 1
} JSGCInvocationKind;
/* Pass NULL for |comp| to get a full GC. */
extern void
js_GC(JSContext *cx, JSGCInvocationKind gckind);
js_GC(JSContext *cx, JSCompartment *comp, JSGCInvocationKind gckind);
#ifdef JS_THREADSAFE
/*

Просмотреть файл

@ -190,6 +190,14 @@ Mark(JSTracer *trc, T *thing)
JS_ASSERT(JS_IS_VALID_TRACE_KIND(GetGCThingTraceKind(thing)));
JS_ASSERT(trc->debugPrinter || trc->debugPrintArg);
/* Per-Compartment GC only with GCMarker and no custom JSTracer */
JS_ASSERT_IF(trc->context->runtime->gcCurrentCompartment, IS_GC_MARKING_TRACER(trc));
JSRuntime *rt = trc->context->runtime;
/* Don't mark things outside a compartment if we are in a per-compartment GC */
if (rt->gcCurrentCompartment && thing->asCell()->compartment() != rt->gcCurrentCompartment)
goto out;
if (!IS_GC_MARKING_TRACER(trc)) {
uint32 kind = GetGCThingTraceKind(thing);
trc->callback(trc, thing, kind);
@ -197,7 +205,7 @@ Mark(JSTracer *trc, T *thing)
}
TypedMarker(trc, thing);
out:
#ifdef DEBUG
trc->debugPrinter = NULL;
@ -330,7 +338,7 @@ TypedMarker(JSTracer *trc, JSFunction *thing)
{
JS_ASSERT(thing);
JS_ASSERT(JSTRACE_OBJECT == GetFinalizableTraceKind(thing->asCell()->arena()->header()->thingKind));
GCMarker *gcmarker = static_cast<GCMarker *>(trc);
if (!thing->markIfUnmarked(gcmarker->getMarkColor()))
return;
@ -378,14 +386,36 @@ Untag(JSString *str)
}
static JS_ALWAYS_INLINE void
NonRopeTypedMarker(JSString *str)
NonRopeTypedMarker(JSRuntime *rt, JSString *str)
{
/* N.B. The base of a dependent string is not necessarily flat. */
JS_ASSERT(!str->isRope());
while (!JSString::isStatic(str) &&
str->asCell()->markIfUnmarked() &&
str->isDependent()) {
str = str->dependentBase();
if (rt->gcCurrentCompartment) {
for (;;) {
if (JSString::isStatic(str))
break;
/*
* If we perform single-compartment GC don't mark Strings outside the current compartment.
* Dependent Strings are not shared between compartments and they can't be in the defaultCompartment.
*/
if (str->asCell()->compartment() != rt->gcCurrentCompartment) {
JS_ASSERT(str->asCell()->compartment() == rt->defaultCompartment);
break;
}
if (!str->asCell()->markIfUnmarked())
break;
if (!str->isDependent())
break;
str = str->dependentBase();
}
} else {
while (!JSString::isStatic(str) &&
str->asCell()->markIfUnmarked() &&
str->isDependent()) {
str = str->dependentBase();
}
}
}
@ -397,9 +427,13 @@ static JS_ALWAYS_INLINE void
TypedMarker(JSTracer *trc, JSString *str)
{
using namespace detail;
JSRuntime *rt = trc->context->runtime;
JS_ASSERT(!JSString::isStatic(str));
#ifdef DEBUG
JSCompartment *strComp = str->asCell()->compartment();
#endif
if (!str->isRope()) {
NonRopeTypedMarker(str);
NonRopeTypedMarker(rt, str);
return;
}
@ -411,6 +445,8 @@ TypedMarker(JSTracer *trc, JSString *str)
*/
JSString *parent = NULL;
first_visit_node: {
JS_ASSERT(strComp == str->asCell()->compartment() || str->asCell()->compartment() == rt->defaultCompartment);
JS_ASSERT(!JSString::isStatic(str));
if (!str->asCell()->markIfUnmarked())
goto finish_node;
JSString *left = str->ropeLeft();
@ -421,7 +457,10 @@ TypedMarker(JSTracer *trc, JSString *str)
str = left;
goto first_visit_node;
}
NonRopeTypedMarker(left);
JS_ASSERT_IF(!JSString::isStatic(left),
strComp == left->asCell()->compartment()
|| left->asCell()->compartment() == rt->defaultCompartment);
NonRopeTypedMarker(rt, left);
}
visit_right_child: {
JSString *right = str->ropeRight();
@ -432,7 +471,10 @@ TypedMarker(JSTracer *trc, JSString *str)
str = right;
goto first_visit_node;
}
NonRopeTypedMarker(right);
JS_ASSERT_IF(!JSString::isStatic(right),
strComp == right->asCell()->compartment()
|| right->asCell()->compartment() == rt->defaultCompartment);
NonRopeTypedMarker(rt, right);
}
finish_node: {
if (!parent)
@ -460,7 +502,9 @@ MarkAtomRange(JSTracer *trc, size_t len, JSAtom **vec, const char *name)
for (uint32 i = 0; i < len; i++) {
if (JSAtom *atom = vec[i]) {
JS_SET_TRACING_INDEX(trc, name, i);
Mark(trc, ATOM_TO_STRING(atom));
JSString *str = ATOM_TO_STRING(atom);
if (!JSString::isStatic(str))
Mark(trc, str);
}
}
}
@ -479,8 +523,11 @@ MarkObjectRange(JSTracer *trc, size_t len, JSObject **vec, const char *name)
static inline void
MarkId(JSTracer *trc, jsid id)
{
if (JSID_IS_STRING(id))
Mark(trc, JSID_TO_STRING(id));
if (JSID_IS_STRING(id)) {
JSString *str = JSID_TO_STRING(id);
if (!JSString::isStatic(str))
Mark(trc, str);
}
else if (JS_UNLIKELY(JSID_IS_OBJECT(id)))
Mark(trc, JSID_TO_OBJECT(id));
}

Просмотреть файл

@ -895,3 +895,22 @@ js::PropertyTree::sweepShapes(JSContext *cx)
}
#endif /* DEBUG */
}
void
js::PropertyTree::unmarkShapes(JSContext *cx)
{
JSArena **ap = &JS_PROPERTY_TREE(cx).arenaPool.first.next;
while (JSArena *a = *ap) {
Shape *limit = (Shape *) a->avail;
for (Shape *shape = (Shape *) a->base; shape < limit; shape++) {
/* If the id is null, shape is already on the freelist. */
if (JSID_IS_VOID(shape->id))
continue;
if (shape->marked())
shape->clearMark();
}
ap = &a->next;
}
}

Просмотреть файл

@ -144,6 +144,7 @@ class PropertyTree
static void orphanKids(JSContext *cx, js::Shape *shape);
static void sweepShapes(JSContext *cx);
static void unmarkShapes(JSContext *cx);
#ifdef DEBUG
static void meter(JSBasicStats *bs, js::Shape *node);
#endif

Просмотреть файл

@ -296,7 +296,7 @@ struct Shape : public JSObjectMap
friend struct ::JSFunction;
friend class js::PropertyTree;
friend class js::Bindings;
friend bool HasUnreachableGCThings(TreeFragment *f);
friend bool IsShapeAboutToBeFinalized(JSContext *cx, const js::Shape *shape);
protected:
mutable uint32 numSearches; /* Only updated until it reaches HASH_MIN_SEARCHES. */

Просмотреть файл

@ -2771,33 +2771,43 @@ TraceMonitor::flush()
}
inline bool
HasUnreachableGCThings(TreeFragment *f)
IsShapeAboutToBeFinalized(JSContext *cx, const js::Shape *shape)
{
JSRuntime *rt = cx->runtime;
if (rt->gcCurrentCompartment != NULL)
return false;
return !shape->marked();
}
inline bool
HasUnreachableGCThings(JSContext *cx, TreeFragment *f)
{
/*
* We do not check here for dead scripts as JSScript is not a GC thing.
* Instead PurgeScriptFragments is used to remove dead script fragments.
* See bug 584860.
*/
if (IsAboutToBeFinalized(f->globalObj))
if (IsAboutToBeFinalized(cx, f->globalObj))
return true;
Value* vp = f->gcthings.data();
for (unsigned len = f->gcthings.length(); len; --len) {
Value &v = *vp++;
JS_ASSERT(v.isMarkable());
if (IsAboutToBeFinalized(v.toGCThing()))
if (IsAboutToBeFinalized(cx, v.toGCThing()))
return true;
}
const Shape** shapep = f->shapes.data();
for (unsigned len = f->shapes.length(); len; --len) {
const Shape* shape = *shapep++;
if (!shape->marked())
if (IsShapeAboutToBeFinalized(cx, shape))
return true;
}
return false;
}
void
TraceMonitor::sweep()
TraceMonitor::sweep(JSContext *cx)
{
JS_ASSERT(!ontrace());
debug_only_print0(LC_TMTracer, "Purging fragments with dead things");
@ -2806,7 +2816,7 @@ TraceMonitor::sweep()
TreeFragment *recorderTree = NULL;
if (recorder) {
recorderTree = recorder->getTree();
shouldAbortRecording = HasUnreachableGCThings(recorderTree);
shouldAbortRecording = HasUnreachableGCThings(cx, recorderTree);
}
for (size_t i = 0; i < FRAGMENT_TABLE_SIZE; ++i) {
@ -2814,7 +2824,7 @@ TraceMonitor::sweep()
while (TreeFragment* frag = *fragp) {
TreeFragment* peer = frag;
do {
if (HasUnreachableGCThings(peer))
if (HasUnreachableGCThings(cx, peer))
break;
peer = peer->peer;
} while (peer);

Просмотреть файл

@ -1580,7 +1580,7 @@ class TraceRecorder
bool *blacklist);
friend AbortResult AbortRecording(JSContext*, const char*);
friend class BoxArg;
friend void TraceMonitor::sweep();
friend void TraceMonitor::sweep(JSContext *cx);
public:
static bool JS_REQUIRES_STACK

Просмотреть файл

@ -351,7 +351,7 @@ struct JITScript {
}
void nukeScriptDependentICs();
void sweepCallICs(bool purgeAll);
void sweepCallICs(JSContext *cx, bool purgeAll);
void purgeMICs();
void purgePICs();

Просмотреть файл

@ -1118,7 +1118,7 @@ JITScript::nukeScriptDependentICs()
}
void
JITScript::sweepCallICs(bool purgeAll)
JITScript::sweepCallICs(JSContext *cx, bool purgeAll)
{
Repatcher repatcher(this);
@ -1137,9 +1137,9 @@ JITScript::sweepCallICs(bool purgeAll)
* precisely GC call ICs while keeping the identity guard safe.
*/
bool fastFunDead = ic.fastGuardedObject &&
(purgeAll || IsAboutToBeFinalized(ic.fastGuardedObject));
(purgeAll || IsAboutToBeFinalized(cx, ic.fastGuardedObject));
bool nativeDead = ic.fastGuardedNative &&
(purgeAll || IsAboutToBeFinalized(ic.fastGuardedNative));
(purgeAll || IsAboutToBeFinalized(cx, ic.fastGuardedNative));
if (fastFunDead) {
repatcher.repatch(ic.funGuard, NULL);
@ -1189,12 +1189,12 @@ JITScript::sweepCallICs(bool purgeAll)
}
void
ic::SweepCallICs(JSScript *script, bool purgeAll)
ic::SweepCallICs(JSContext *cx, JSScript *script, bool purgeAll)
{
if (script->jitNormal)
script->jitNormal->sweepCallICs(purgeAll);
script->jitNormal->sweepCallICs(cx, purgeAll);
if (script->jitCtor)
script->jitCtor->sweepCallICs(purgeAll);
script->jitCtor->sweepCallICs(cx, purgeAll);
}
#endif /* JS_MONOIC */

Просмотреть файл

@ -266,7 +266,7 @@ void JS_FASTCALL NativeCall(VMFrame &f, ic::CallICInfo *ic);
JSBool JS_FASTCALL SplatApplyArgs(VMFrame &f);
void PurgeMICs(JSContext *cx, JSScript *script);
void SweepCallICs(JSScript *script, bool purgeAll);
void SweepCallICs(JSContext *cx, JSScript *script, bool purgeAll);
} /* namespace ic */
} /* namespace mjit */

Просмотреть файл

@ -555,9 +555,10 @@ static JSDHashOperator
SweepWaiverWrappers(JSDHashTable *table, JSDHashEntryHdr *hdr,
uint32 number, void *arg)
{
JSContext *cx = (JSContext *)arg;
JSObject *key = ((JSObject2JSObjectMap::Entry *)hdr)->key;
JSObject *value = ((JSObject2JSObjectMap::Entry *)hdr)->value;
if(IsAboutToBeFinalized(key) || IsAboutToBeFinalized(value))
if(IsAboutToBeFinalized(cx, key) || IsAboutToBeFinalized(cx, value))
return JS_DHASH_REMOVE;
return JS_DHASH_NEXT;
}
@ -568,7 +569,7 @@ SweepCompartment(nsCStringHashKey& aKey, JSCompartment *compartment, void *aClos
xpc::CompartmentPrivate *priv = (xpc::CompartmentPrivate *)
JS_GetCompartmentPrivate((JSContext *)aClosure, compartment);
if (priv->waiverWrapperMap)
priv->waiverWrapperMap->Enumerate(SweepWaiverWrappers, nsnull);
priv->waiverWrapperMap->Enumerate(SweepWaiverWrappers, (JSContext *)aClosure);
return PL_DHASH_NEXT;
}