Bug 716142 - Multi-compartment GC (r=igor,jorendorff)

This commit is contained in:
Bill McCloskey 2012-04-02 17:02:25 -07:00
Родитель 73c890014f
Коммит b4eb5cc658
17 изменённых файлов: 342 добавлений и 351 удалений

Просмотреть файл

@ -328,7 +328,7 @@ Statistics::formatData(StatisticsSerializer &ss, uint64_t timestamp)
if (ss.isJSON())
ss.appendNumber("Timestamp", "%llu", "", (unsigned long long)timestamp);
ss.appendNumber("Total Time", "%.1f", "ms", t(total));
ss.appendString("Type", compartment ? "compartment" : "global");
ss.appendString("Type", wasFullGC ? "global" : "compartment");
ss.appendNumber("MMU (20ms)", "%d", "%", int(mmu20 * 100));
ss.appendNumber("MMU (50ms)", "%d", "%", int(mmu50 * 100));
if (slices.length() > 1 || ss.isJSON())
@ -398,7 +398,7 @@ Statistics::Statistics(JSRuntime *rt)
startupTime(PRMJ_Now()),
fp(NULL),
fullFormat(false),
compartment(NULL),
wasFullGC(false),
nonincrementalReason(NULL)
{
PodArrayZero(phaseTotals);
@ -492,7 +492,7 @@ Statistics::endGC()
phaseTotals[i] += phaseTimes[i];
if (JSAccumulateTelemetryDataCallback cb = runtime->telemetryCallback) {
(*cb)(JS_TELEMETRY_GC_IS_COMPARTMENTAL, compartment ? 1 : 0);
(*cb)(JS_TELEMETRY_GC_IS_COMPARTMENTAL, wasFullGC ? 0 : 1);
(*cb)(JS_TELEMETRY_GC_MS, t(gcDuration()));
(*cb)(JS_TELEMETRY_GC_MARK_MS, t(phaseTimes[PHASE_MARK]));
(*cb)(JS_TELEMETRY_GC_SWEEP_MS, t(phaseTimes[PHASE_SWEEP]));
@ -508,9 +508,9 @@ Statistics::endGC()
}
void
Statistics::beginSlice(JSCompartment *comp, gcreason::Reason reason)
Statistics::beginSlice(bool full, gcreason::Reason reason)
{
compartment = comp;
wasFullGC = full;
bool first = runtime->gcIncrementalState == gc::NO_INCREMENTAL;
if (first)
@ -523,7 +523,7 @@ Statistics::beginSlice(JSCompartment *comp, gcreason::Reason reason)
(*cb)(JS_TELEMETRY_GC_REASON, reason);
if (GCSliceCallback cb = runtime->gcSliceCallback)
(*cb)(runtime, first ? GC_CYCLE_BEGIN : GC_SLICE_BEGIN, GCDescription(!!compartment));
(*cb)(runtime, first ? GC_CYCLE_BEGIN : GC_SLICE_BEGIN, GCDescription(!wasFullGC));
}
void
@ -542,9 +542,9 @@ Statistics::endSlice()
if (GCSliceCallback cb = runtime->gcSliceCallback) {
if (last)
(*cb)(runtime, GC_CYCLE_END, GCDescription(!!compartment));
(*cb)(runtime, GC_CYCLE_END, GCDescription(!wasFullGC));
else
(*cb)(runtime, GC_SLICE_END, GCDescription(!!compartment));
(*cb)(runtime, GC_SLICE_END, GCDescription(!wasFullGC));
}
/* Do this after the slice callback since it uses these values. */

Просмотреть файл

@ -94,7 +94,7 @@ struct Statistics {
void beginPhase(Phase phase);
void endPhase(Phase phase);
void beginSlice(JSCompartment *comp, gcreason::Reason reason);
void beginSlice(bool full, gcreason::Reason reason);
void endSlice();
void reset(const char *reason) { slices.back().resetReason = reason; }
@ -116,7 +116,7 @@ struct Statistics {
FILE *fp;
bool fullFormat;
JSCompartment *compartment;
bool wasFullGC;
const char *nonincrementalReason;
struct SliceData {
@ -162,9 +162,9 @@ struct Statistics {
};
struct AutoGCSlice {
AutoGCSlice(Statistics &stats, JSCompartment *comp, gcreason::Reason reason
AutoGCSlice(Statistics &stats, bool full, gcreason::Reason reason
JS_GUARD_OBJECT_NOTIFIER_PARAM)
: stats(stats) { JS_GUARD_OBJECT_NOTIFIER_INIT; stats.beginSlice(comp, reason); }
: stats(stats) { JS_GUARD_OBJECT_NOTIFIER_INIT; stats.beginSlice(full, reason); }
~AutoGCSlice() { stats.endSlice(); }
Statistics &stats;

Просмотреть файл

@ -730,21 +730,21 @@ JSRuntime::JSRuntime()
gcJitReleaseTime(0),
gcMode(JSGC_MODE_GLOBAL),
gcIsNeeded(0),
gcFullIsNeeded(0),
gcWeakMapList(NULL),
gcStats(thisFromCtor()),
gcNumber(0),
gcStartNumber(0),
gcTriggerReason(gcreason::NO_REASON),
gcTriggerCompartment(NULL),
gcCurrentCompartment(NULL),
gcCheckCompartment(NULL),
gcIsFull(false),
gcStrictCompartmentChecking(false),
gcIncrementalState(gc::NO_INCREMENTAL),
gcCompartmentCreated(false),
gcLastMarkSlice(false),
gcIncrementalIsFull(false),
gcInterFrameGC(0),
gcSliceBudget(SliceBudget::Unlimited),
gcIncrementalEnabled(true),
gcIncrementalCompartment(NULL),
gcPoke(false),
gcRunning(false),
#ifdef JS_GC_ZEAL
@ -2829,7 +2829,7 @@ JS_IsGCMarkingTracer(JSTracer *trc)
return IS_GC_MARKING_TRACER(trc);
}
JS_PUBLIC_API(void)
extern JS_PUBLIC_API(void)
JS_CompartmentGC(JSContext *cx, JSCompartment *comp)
{
AssertNoGC(cx);
@ -2837,7 +2837,12 @@ JS_CompartmentGC(JSContext *cx, JSCompartment *comp)
/* We cannot GC the atoms compartment alone; use a full GC instead. */
JS_ASSERT(comp != cx->runtime->atomsCompartment);
GC(cx, comp, GC_NORMAL, gcreason::API);
if (comp) {
PrepareCompartmentForGC(comp);
GC(cx, false, GC_NORMAL, gcreason::API);
} else {
GC(cx, true, GC_NORMAL, gcreason::API);
}
}
JS_PUBLIC_API(void)

Просмотреть файл

@ -261,7 +261,7 @@ js_DestroyContext(JSContext *cx, JSDestroyContextMode mode)
rt->gcHelperThread.waitBackgroundSweepEnd();
}
#endif
/*
* Dump remaining type inference results first. This printing
* depends on atoms still existing.
@ -271,16 +271,16 @@ js_DestroyContext(JSContext *cx, JSDestroyContextMode mode)
/* Unpin all common atoms before final GC. */
js_FinishCommonAtoms(cx);
/* Clear debugging state to remove GC roots. */
for (CompartmentsIter c(rt); !c.done(); c.next())
c->clearTraps(cx);
JS_ClearAllWatchPoints(cx);
GC(cx, NULL, GC_NORMAL, gcreason::LAST_CONTEXT);
GC(cx, true, GC_NORMAL, gcreason::LAST_CONTEXT);
} else if (mode == JSDCM_FORCE_GC) {
JS_ASSERT(!rt->gcRunning);
GC(cx, NULL, GC_NORMAL, gcreason::DESTROY_CONTEXT);
GC(cx, true, GC_NORMAL, gcreason::DESTROY_CONTEXT);
} else if (mode == JSDCM_MAYBE_GC) {
JS_ASSERT(!rt->gcRunning);
JS_MaybeGC(cx);
@ -883,7 +883,7 @@ js_InvokeOperationCallback(JSContext *cx)
JS_ATOMIC_SET(&rt->interrupt, 0);
if (rt->gcIsNeeded)
GCSlice(cx, rt->gcTriggerCompartment, GC_NORMAL, rt->gcTriggerReason);
GCSlice(cx, rt->gcFullIsNeeded, GC_NORMAL, rt->gcTriggerReason);
#ifdef JS_THREADSAFE
/*

Просмотреть файл

@ -306,7 +306,15 @@ struct JSRuntime : js::RuntimeFriendFields
int64_t gcNextFullGCTime;
int64_t gcJitReleaseTime;
JSGCMode gcMode;
/*
* These flags must be kept separate so that a thread requesting a
* compartment GC doesn't cancel another thread's concurrent request for a
* full GC.
*/
volatile uintptr_t gcIsNeeded;
volatile uintptr_t gcFullIsNeeded;
js::WeakMapBase *gcWeakMapList;
js::gcstats::Statistics gcStats;
@ -319,20 +327,14 @@ struct JSRuntime : js::RuntimeFriendFields
/* The reason that an interrupt-triggered GC should be called. */
js::gcreason::Reason gcTriggerReason;
/*
* Compartment that triggered GC. If more than one Compatment need GC,
* gcTriggerCompartment is reset to NULL and a global GC is performed.
*/
JSCompartment *gcTriggerCompartment;
/* Compartment that is currently involved in per-compartment GC */
JSCompartment *gcCurrentCompartment;
/* Is the currently running GC a full GC or a compartmental GC? */
bool gcIsFull;
/*
* If this is non-NULL, all marked objects must belong to this compartment.
* This is used to look for compartment bugs.
* If this is true, all marked objects must belong to a compartment being
* GCed. This is used to look for compartment bugs.
*/
JSCompartment *gcCheckCompartment;
bool gcStrictCompartmentChecking;
/*
* The current incremental GC phase. During non-incremental GC, this is
@ -346,6 +348,9 @@ struct JSRuntime : js::RuntimeFriendFields
/* Indicates that the last incremental slice exhausted the mark stack. */
bool gcLastMarkSlice;
/* Is there a full incremental GC in progress. */
bool gcIncrementalIsFull;
/*
* Indicates that a GC slice has taken place in the middle of an animation
* frame, rather than at the beginning. In this case, the next slice will be
@ -362,9 +367,6 @@ struct JSRuntime : js::RuntimeFriendFields
*/
bool gcIncrementalEnabled;
/* Compartment that is undergoing an incremental GC. */
JSCompartment *gcIncrementalCompartment;
/*
* We save all conservative scanned roots in this vector so that
* conservative scanning can be "replayed" deterministically. In DEBUG mode,

Просмотреть файл

@ -73,6 +73,7 @@ JSCompartment::JSCompartment(JSRuntime *rt)
: rt(rt),
principals(NULL),
needsBarrier_(false),
gcState(NoGCScheduled),
gcBytes(0),
gcTriggerBytes(0),
hold(false),
@ -420,7 +421,7 @@ JSCompartment::wrap(JSContext *cx, AutoIdVector &props)
void
JSCompartment::markCrossCompartmentWrappers(JSTracer *trc)
{
JS_ASSERT(trc->runtime->gcCurrentCompartment);
JS_ASSERT(!isCollecting());
for (WrapperMap::Enum e(crossCompartmentWrappers); !e.empty(); e.popFront()) {
Value tmp = e.front().key;

Просмотреть файл

@ -194,7 +194,7 @@ struct JSCompartment
bool needsBarrier_;
bool needsBarrier() {
bool needsBarrier() const {
return needsBarrier_;
}
@ -203,6 +203,52 @@ struct JSCompartment
return &rt->gcMarker;
}
private:
enum CompartmentGCState {
NoGCScheduled,
GCScheduled,
GCRunning
};
CompartmentGCState gcState;
public:
bool isCollecting() const {
/* Allow this if we're in the middle of an incremental GC. */
if (rt->gcRunning) {
return gcState == GCRunning;
} else {
JS_ASSERT(gcState != GCRunning);
return needsBarrier();
}
}
/*
* If this returns true, all object tracing must be done with a GC marking
* tracer.
*/
bool requireGCTracer() const {
return gcState == GCRunning;
}
void setCollecting(bool collecting) {
JS_ASSERT(rt->gcRunning);
if (collecting)
gcState = GCRunning;
else
gcState = NoGCScheduled;
}
void scheduleGC() {
JS_ASSERT(!rt->gcRunning);
JS_ASSERT(gcState != GCRunning);
gcState = GCScheduled;
}
bool isGCScheduled() const {
return gcState == GCScheduled;
}
size_t gcBytes;
size_t gcTriggerBytes;
size_t gcMaxMallocBytes;

Просмотреть файл

@ -134,7 +134,7 @@ JS_NewObjectWithUniqueType(JSContext *cx, JSClass *clasp, JSObject *proto, JSObj
JS_FRIEND_API(void)
js::GCForReason(JSContext *cx, gcreason::Reason reason)
{
GC(cx, NULL, GC_NORMAL, reason);
GC(cx, true, GC_NORMAL, reason);
}
JS_FRIEND_API(void)
@ -143,19 +143,20 @@ js::CompartmentGCForReason(JSContext *cx, JSCompartment *comp, gcreason::Reason
/* We cannot GC the atoms compartment alone; use a full GC instead. */
JS_ASSERT(comp != cx->runtime->atomsCompartment);
GC(cx, comp, GC_NORMAL, reason);
PrepareCompartmentForGC(comp);
GC(cx, false, GC_NORMAL, reason);
}
JS_FRIEND_API(void)
js::ShrinkingGC(JSContext *cx, gcreason::Reason reason)
{
GC(cx, NULL, GC_SHRINK, reason);
GC(cx, true, GC_SHRINK, reason);
}
JS_FRIEND_API(void)
js::IncrementalGC(JSContext *cx, gcreason::Reason reason)
{
GCSlice(cx, NULL, GC_NORMAL, reason);
GCSlice(cx, true, GC_NORMAL, reason);
}
JS_FRIEND_API(void)
@ -752,12 +753,17 @@ NotifyDidPaint(JSContext *cx)
}
if (rt->gcZeal() == gc::ZealFrameGCValue) {
GCSlice(cx, NULL, GC_NORMAL, gcreason::REFRESH_FRAME);
GCSlice(cx, true, GC_NORMAL, gcreason::REFRESH_FRAME);
return;
}
if (rt->gcIncrementalState != gc::NO_INCREMENTAL && !rt->gcInterFrameGC)
GCSlice(cx, rt->gcIncrementalCompartment, GC_NORMAL, gcreason::REFRESH_FRAME);
if (rt->gcIncrementalState != gc::NO_INCREMENTAL && !rt->gcInterFrameGC) {
for (CompartmentsIter c(rt); !c.done(); c.next()) {
if (c->needsBarrier())
PrepareCompartmentForGC(c);
}
GCSlice(cx, rt->gcIncrementalIsFull, GC_NORMAL, gcreason::REFRESH_FRAME);
}
rt->gcInterFrameGC = false;
}

Просмотреть файл

@ -231,37 +231,6 @@ const uint32_t Arena::FirstThingOffsets[] = {
#undef OFFSET
class GCCompartmentsIter {
private:
JSCompartment **it, **end;
public:
GCCompartmentsIter(JSRuntime *rt) {
if (rt->gcCurrentCompartment) {
it = &rt->gcCurrentCompartment;
end = &rt->gcCurrentCompartment + 1;
} else {
it = rt->compartments.begin();
end = rt->compartments.end();
}
}
bool done() const { return it == end; }
void next() {
JS_ASSERT(!done());
it++;
}
JSCompartment *get() const {
JS_ASSERT(!done());
return *it;
}
operator JSCompartment *() const { return get(); }
JSCompartment *operator->() const { return get(); }
};
#ifdef DEBUG
void
ArenaHeader::checkSynchronizedWithFreeList() const
@ -889,8 +858,7 @@ JS_FRIEND_API(bool)
IsAboutToBeFinalized(const Cell *thing)
{
JSCompartment *thingCompartment = reinterpret_cast<const Cell *>(thing)->compartment();
JSRuntime *rt = thingCompartment->rt;
if (rt->gcCurrentCompartment != NULL && rt->gcCurrentCompartment != thingCompartment)
if (!thingCompartment->isCollecting())
return false;
return !reinterpret_cast<const Cell *>(thing)->isMarked();
}
@ -1032,8 +1000,7 @@ IsAddressableGCThing(JSRuntime *rt, uintptr_t w,
if (!aheader->allocated())
return CGCT_FREEARENA;
JSCompartment *curComp = rt->gcCurrentCompartment;
if (curComp && curComp != aheader->compartment)
if (rt->gcRunning && !aheader->compartment->isCollecting())
return CGCT_OTHERCOMPARTMENT;
AllocKind thingKind = aheader->getAllocKind();
@ -1168,24 +1135,6 @@ MarkConservativeStackRoots(JSTracer *trc, bool useSavedRoots)
void
MarkStackRangeConservatively(JSTracer *trc, Value *beginv, Value *endv)
{
/*
* Normally, the drainMarkStack phase of marking will never trace outside
* of the compartment currently being collected. However, conservative
* scanning during drainMarkStack (as is done for generators) can break
* this invariant. So we disable the compartment assertions in this
* situation.
*/
struct AutoSkipChecking {
JSRuntime *runtime;
JSCompartment *savedCompartment;
AutoSkipChecking(JSRuntime *rt)
: runtime(rt), savedCompartment(rt->gcCheckCompartment) {
rt->gcCheckCompartment = NULL;
}
~AutoSkipChecking() { runtime->gcCheckCompartment = savedCompartment; }
} as(trc->runtime);
const uintptr_t *begin = beginv->payloadWord();
const uintptr_t *end = endv->payloadWord();
#ifdef JS_NUNBOX32
@ -1691,13 +1640,13 @@ ArenaLists::finalizeScripts(JSContext *cx)
}
static void
RunLastDitchGC(JSContext *cx, gcreason::Reason reason)
RunLastDitchGC(JSContext *cx, gcreason::Reason reason, bool full)
{
JSRuntime *rt = cx->runtime;
/* The last ditch GC preserves all atoms. */
AutoKeepAtoms keep(rt);
GC(cx, rt->gcTriggerCompartment, GC_NORMAL, reason);
GC(cx, full, GC_NORMAL, reason);
}
/* static */ void *
@ -1712,7 +1661,8 @@ ArenaLists::refillFreeList(JSContext *cx, AllocKind thingKind)
bool runGC = rt->gcIncrementalState != NO_INCREMENTAL && comp->gcBytes > comp->gcTriggerBytes;
for (;;) {
if (JS_UNLIKELY(runGC)) {
RunLastDitchGC(cx, gcreason::LAST_DITCH);
PrepareCompartmentForGC(comp);
RunLastDitchGC(cx, gcreason::LAST_DITCH, rt->gcFullIsNeeded);
/*
* The JSGC_END callback can legitimately allocate new GC
@ -2017,12 +1967,7 @@ void
GCMarker::checkCompartment(void *p)
{
JS_ASSERT(started);
Cell *cell = static_cast<Cell *>(p);
if (runtime->gcRunning && runtime->gcCurrentCompartment)
JS_ASSERT(cell->compartment() == runtime->gcCurrentCompartment);
else if (runtime->gcIncrementalCompartment)
JS_ASSERT(cell->compartment() == runtime->gcIncrementalCompartment);
JS_ASSERT(static_cast<Cell *>(p)->compartment()->isCollecting());
}
#endif
@ -2111,44 +2056,9 @@ SetMarkStackLimit(JSRuntime *rt, size_t limit)
} /* namespace js */
#ifdef DEBUG
static void
EmptyMarkCallback(JSTracer *trc, void **thingp, JSGCTraceKind kind)
{
}
#endif
static void
gc_root_traversal(JSTracer *trc, const RootEntry &entry)
{
#ifdef DEBUG
void *ptr;
if (entry.value.type == JS_GC_ROOT_GCTHING_PTR) {
ptr = *reinterpret_cast<void **>(entry.key);
} else {
Value *vp = reinterpret_cast<Value *>(entry.key);
ptr = vp->isGCThing() ? vp->toGCThing() : NULL;
}
if (ptr && !trc->runtime->gcCurrentCompartment) {
/*
* Use conservative machinery to find if ptr is a valid GC thing.
* We only do this during global GCs, to preserve the invariant
* that mark callbacks are not in place during compartment GCs.
*/
JSTracer checker;
JS_TracerInit(&checker, trc->runtime, EmptyMarkCallback);
ConservativeGCTest test = MarkIfGCThingWord(&checker, reinterpret_cast<uintptr_t>(ptr));
if (test != CGCT_VALID && entry.value.name) {
fprintf(stderr,
"JS API usage error: the address passed to JS_AddNamedRoot currently holds an\n"
"invalid gcthing. This is usually caused by a missing call to JS_RemoveRoot.\n"
"The root's name is \"%s\".\n",
entry.value.name);
}
JS_ASSERT(test == CGCT_VALID);
}
#endif
const char *name = entry.value.name ? entry.value.name : "root";
if (entry.value.type == JS_GC_ROOT_GCTHING_PTR)
MarkGCThingRoot(trc, reinterpret_cast<void **>(entry.key), name);
@ -2327,9 +2237,12 @@ MarkRuntime(JSTracer *trc, bool useSavedRoots = false)
{
JSRuntime *rt = trc->runtime;
JS_ASSERT(trc->callback != GCMarker::GrayCallback);
if (rt->gcCurrentCompartment) {
for (CompartmentsIter c(rt); !c.done(); c.next())
c->markCrossCompartmentWrappers(trc);
if (IS_GC_MARKING_TRACER(trc) && !rt->gcIsFull) {
for (CompartmentsIter c(rt); !c.done(); c.next()) {
if (!c->isCollecting())
c->markCrossCompartmentWrappers(trc);
}
Debugger::markCrossCompartmentDebuggerObjectReferents(trc);
}
@ -2356,7 +2269,11 @@ MarkRuntime(JSTracer *trc, bool useSavedRoots = false)
for (ContextIter acx(rt); !acx.done(); acx.next())
acx->mark(trc);
for (GCCompartmentsIter c(rt); !c.done(); c.next()) {
/* We can't use GCCompartmentsIter if we're called from TraceRuntime. */
for (CompartmentsIter c(rt); !c.done(); c.next()) {
if (IS_GC_MARKING_TRACER(trc) && !c->isCollecting())
continue;
if (c->activeAnalysis)
c->markTypes(trc);
@ -2413,7 +2330,7 @@ TriggerGC(JSRuntime *rt, gcreason::Reason reason)
/* Trigger the GC when it is safe to call an operation callback. */
rt->gcIsNeeded = true;
rt->gcTriggerCompartment = NULL;
rt->gcFullIsNeeded = true;
rt->gcTriggerReason = reason;
rt->triggerOperationCallback();
}
@ -2435,19 +2352,16 @@ TriggerCompartmentGC(JSCompartment *comp, gcreason::Reason reason)
return;
}
if (rt->gcIsNeeded) {
/* If we need to GC more than one compartment, run a full GC. */
if (rt->gcTriggerCompartment != comp)
rt->gcTriggerCompartment = NULL;
PrepareCompartmentForGC(comp);
if (rt->gcIsNeeded)
return;
}
/*
* Trigger the GC when it is safe to call an operation callback on any
* thread.
*/
rt->gcIsNeeded = true;
rt->gcTriggerCompartment = comp;
rt->gcTriggerReason = reason;
rt->triggerOperationCallback();
}
@ -2459,14 +2373,13 @@ MaybeGC(JSContext *cx)
JS_ASSERT(rt->onOwnerThread());
if (rt->gcZeal() == ZealAllocValue || rt->gcZeal() == ZealPokeValue) {
GC(cx, NULL, GC_NORMAL, gcreason::MAYBEGC);
GC(cx, true, GC_NORMAL, gcreason::MAYBEGC);
return;
}
JSCompartment *comp = cx->compartment;
if (rt->gcIsNeeded) {
GCSlice(cx, (comp == rt->gcTriggerCompartment) ? comp : NULL,
GC_NORMAL, gcreason::MAYBEGC);
GCSlice(cx, rt->gcFullIsNeeded, GC_NORMAL, gcreason::MAYBEGC);
return;
}
@ -2474,12 +2387,14 @@ MaybeGC(JSContext *cx)
comp->gcBytes >= 3 * (comp->gcTriggerBytes / 4) &&
rt->gcIncrementalState == NO_INCREMENTAL)
{
GCSlice(cx, NULL, GC_NORMAL, gcreason::MAYBEGC);
PrepareCompartmentForGC(comp);
GCSlice(cx, false, GC_NORMAL, gcreason::MAYBEGC);
return;
}
if (comp->gcMallocAndFreeBytes > comp->gcTriggerMallocAndFreeBytes) {
GCSlice(cx, comp, GC_NORMAL, gcreason::MAYBEGC);
PrepareCompartmentForGC(comp);
GCSlice(cx, false, GC_NORMAL, gcreason::MAYBEGC);
return;
}
@ -2493,7 +2408,7 @@ MaybeGC(JSContext *cx)
if (rt->gcChunkAllocationSinceLastGC ||
rt->gcNumArenasFreeCommitted > FreeCommittedArenasThreshold)
{
GCSlice(cx, NULL, GC_SHRINK, gcreason::MAYBEGC);
GCSlice(cx, true, GC_SHRINK, gcreason::MAYBEGC);
} else {
rt->gcNextFullGCTime = now + GC_IDLE_FULL_SPAN;
}
@ -2890,6 +2805,12 @@ GCHelperThread::doSweep()
#endif /* JS_THREADSAFE */
void
PrepareCompartmentForGC(JSCompartment *comp)
{
comp->scheduleGC();
}
} /* namespace js */
static bool
@ -2938,10 +2859,15 @@ SweepCompartments(JSContext *cx, JSGCInvocationKind gckind)
}
static void
PurgeRuntime(JSRuntime *rt)
PurgeRuntime(JSTracer *trc)
{
for (GCCompartmentsIter c(rt); !c.done(); c.next())
c->purge();
JSRuntime *rt = trc->runtime;
for (CompartmentsIter c(rt); !c.done(); c.next()) {
/* We can be called from StartVerifyBarriers with a non-GC marker. */
if (c->isCollecting() || !IS_GC_MARKING_TRACER(trc))
c->purge();
}
rt->tempLifoAlloc.freeUnused();
rt->gsnCache.purge();
@ -2974,7 +2900,7 @@ BeginMarkPhase(JSRuntime *rt)
*/
{
gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_PURGE);
PurgeRuntime(rt);
PurgeRuntime(gcmarker);
}
/*
@ -3049,9 +2975,9 @@ EndMarkPhase(JSContext *cx)
#ifdef DEBUG
/* Make sure that we didn't mark an object in another compartment */
if (rt->gcCurrentCompartment) {
if (!rt->gcIsFull) {
for (CompartmentsIter c(rt); !c.done(); c.next()) {
JS_ASSERT_IF(c != rt->gcCurrentCompartment && c != rt->atomsCompartment,
JS_ASSERT_IF(!c->isCollecting() && c != rt->atomsCompartment,
c->arenas.checkArenaListAllUnmarked());
}
}
@ -3121,10 +3047,12 @@ ValidateIncrementalMarking(JSContext *cx)
js_free(entry);
for (size_t i = 0; i < ArenasPerChunk; i++) {
if (chunk->decommittedArenas.get(i))
continue;
Arena *arena = &chunk->arenas[i];
if (!arena->aheader.allocated())
continue;
if (rt->gcCurrentCompartment && arena->aheader.compartment != rt->gcCurrentCompartment)
if (!arena->aheader.compartment->isCollecting())
continue;
if (arena->aheader.allocatedDuringIncremental)
continue;
@ -3198,13 +3126,13 @@ SweepPhase(JSContext *cx, JSGCInvocationKind gckind)
/* Collect watch points associated with unreachable objects. */
WatchpointMap::sweepAll(rt);
if (!rt->gcCurrentCompartment)
Debugger::sweepAll(cx);
/* Detach unreachable debuggers and global objects from each other. */
Debugger::sweepAll(cx);
{
gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_SWEEP_COMPARTMENTS);
bool releaseTypes = !rt->gcCurrentCompartment && ReleaseObservedTypes(rt);
bool releaseTypes = rt->gcIsFull && ReleaseObservedTypes(rt);
for (GCCompartmentsIter c(rt); !c.done(); c.next())
c->sweep(cx, releaseTypes);
}
@ -3258,7 +3186,7 @@ SweepPhase(JSContext *cx, JSGCInvocationKind gckind)
* This removes compartments from rt->compartment, so we do it last to make
* sure we don't miss sweeping any compartments.
*/
if (!rt->gcCurrentCompartment)
if (rt->gcIsFull)
SweepCompartments(cx, gckind);
#ifndef JS_THREADSAFE
@ -3324,7 +3252,7 @@ class AutoHeapSession {
/* ...while this class is to be used only for garbage collection. */
class AutoGCSession : AutoHeapSession {
public:
explicit AutoGCSession(JSRuntime *rt, JSCompartment *comp);
explicit AutoGCSession(JSRuntime *rt, bool full);
~AutoGCSession();
};
@ -3343,14 +3271,21 @@ AutoHeapSession::~AutoHeapSession()
runtime->gcRunning = false;
}
AutoGCSession::AutoGCSession(JSRuntime *rt, JSCompartment *comp)
AutoGCSession::AutoGCSession(JSRuntime *rt, bool full)
: AutoHeapSession(rt)
{
JS_ASSERT(!runtime->gcCurrentCompartment);
runtime->gcCurrentCompartment = comp;
rt->gcIsFull = full;
DebugOnly<bool> any = false;
for (CompartmentsIter c(rt); !c.done(); c.next()) {
if (full || c->isGCScheduled()) {
c->setCollecting(true);
any = true;
}
}
JS_ASSERT(any);
runtime->gcIsNeeded = false;
runtime->gcTriggerCompartment = NULL;
runtime->gcFullIsNeeded = false;
runtime->gcInterFrameGC = true;
runtime->gcNumber++;
@ -3364,7 +3299,10 @@ AutoGCSession::AutoGCSession(JSRuntime *rt, JSCompartment *comp)
AutoGCSession::~AutoGCSession()
{
runtime->gcCurrentCompartment = NULL;
runtime->gcIsFull = false;
for (GCCompartmentsIter c(runtime); !c.done(); c.next())
c->setCollecting(false);
runtime->gcNextFullGCTime = PRMJ_Now() + GC_IDLE_FULL_SPAN;
runtime->gcChunkAllocationSinceLastGC = false;
}
@ -3375,18 +3313,16 @@ ResetIncrementalGC(JSRuntime *rt, const char *reason)
if (rt->gcIncrementalState == NO_INCREMENTAL)
return;
for (CompartmentsIter c(rt); !c.done(); c.next()) {
if (!rt->gcIncrementalCompartment || rt->gcIncrementalCompartment == c)
c->needsBarrier_ = false;
for (CompartmentsIter c(rt); !c.done(); c.next())
c->needsBarrier_ = false;
JS_ASSERT(!c->needsBarrier_);
}
rt->gcIncrementalCompartment = NULL;
rt->gcIncrementalIsFull = false;
rt->gcMarker.reset();
rt->gcMarker.stop();
rt->gcIncrementalState = NO_INCREMENTAL;
JS_ASSERT(!rt->gcStrictCompartmentChecking);
rt->gcStats.reset(reason);
}
@ -3464,8 +3400,8 @@ IncrementalGCSlice(JSContext *cx, int64_t budget, JSGCInvocationKind gckind)
gc::State initialState = rt->gcIncrementalState;
if (rt->gcIncrementalState == NO_INCREMENTAL) {
JS_ASSERT(!rt->gcIncrementalCompartment);
rt->gcIncrementalCompartment = rt->gcCurrentCompartment;
JS_ASSERT(!rt->gcIncrementalIsFull);
rt->gcIncrementalIsFull = rt->gcIsFull;
rt->gcIncrementalState = MARK_ROOTS;
rt->gcLastMarkSlice = false;
}
@ -3511,7 +3447,7 @@ IncrementalGCSlice(JSContext *cx, int64_t budget, JSGCInvocationKind gckind)
/* JIT code was already discarded during sweeping. */
rt->gcIncrementalCompartment = NULL;
rt->gcIncrementalIsFull = false;
rt->gcIncrementalState = NO_INCREMENTAL;
}
@ -3551,7 +3487,7 @@ IsIncrementalGCSafe(JSRuntime *rt)
if (rt->gcKeepAtoms)
return IncrementalSafety::Unsafe("gcKeepAtoms set");
for (GCCompartmentsIter c(rt); !c.done(); c.next()) {
for (CompartmentsIter c(rt); !c.done(); c.next()) {
if (c->activeAnalysis)
return IncrementalSafety::Unsafe("activeAnalysis set");
}
@ -3587,19 +3523,17 @@ BudgetIncrementalGC(JSRuntime *rt, int64_t *budget)
return;
#endif
if (rt->gcIncrementalState != NO_INCREMENTAL &&
rt->gcCurrentCompartment != rt->gcIncrementalCompartment)
{
ResetIncrementalGC(rt, "compartment change");
return;
}
for (CompartmentsIter c(rt); !c.done(); c.next()) {
if (c->gcBytes > c->gcTriggerBytes) {
*budget = SliceBudget::Unlimited;
rt->gcStats.nonincremental("allocation trigger");
return;
}
if (c->isCollecting() != c->needsBarrier()) {
ResetIncrementalGC(rt, "compartment change");
return;
}
}
}
@ -3610,18 +3544,18 @@ BudgetIncrementalGC(JSRuntime *rt, int64_t *budget)
* marking implementation.
*/
static JS_NEVER_INLINE void
GCCycle(JSContext *cx, JSCompartment *comp, int64_t budget, JSGCInvocationKind gckind)
GCCycle(JSContext *cx, bool full, int64_t budget, JSGCInvocationKind gckind)
{
JSRuntime *rt = cx->runtime;
JS_ASSERT_IF(comp, comp != rt->atomsCompartment);
JS_ASSERT_IF(comp, rt->gcMode != JSGC_MODE_GLOBAL);
JS_ASSERT_IF(!full, !rt->atomsCompartment->isCollecting());
JS_ASSERT_IF(!full, rt->gcMode != JSGC_MODE_GLOBAL);
/* Recursive GC is no-op. */
if (rt->gcRunning)
return;
AutoGCSession gcsession(rt, comp);
AutoGCSession gcsession(rt, full);
/* Don't GC if we are reporting an OOM. */
if (rt->inOOMReport)
@ -3689,7 +3623,7 @@ IsDeterministicGCReason(gcreason::Reason reason)
#endif
static void
Collect(JSContext *cx, JSCompartment *comp, int64_t budget,
Collect(JSContext *cx, bool full, int64_t budget,
JSGCInvocationKind gckind, gcreason::Reason reason)
{
JSRuntime *rt = cx->runtime;
@ -3726,13 +3660,13 @@ Collect(JSContext *cx, JSCompartment *comp, int64_t budget,
RecordNativeStackTopForGC(rt);
if (rt->gcMode == JSGC_MODE_GLOBAL)
comp = NULL;
full = true;
/* This is a heuristic to avoid resets. */
if (rt->gcIncrementalState != NO_INCREMENTAL && !rt->gcIncrementalCompartment)
comp = NULL;
if (rt->gcIncrementalState != NO_INCREMENTAL && rt->gcIncrementalIsFull)
full = true;
gcstats::AutoGCSlice agc(rt->gcStats, comp, reason);
gcstats::AutoGCSlice agc(rt->gcStats, full, reason);
do {
/*
@ -3749,7 +3683,7 @@ Collect(JSContext *cx, JSCompartment *comp, int64_t budget,
/* Lock out other GC allocator and collector invocations. */
AutoLockGC lock(rt);
rt->gcPoke = false;
GCCycle(cx, comp, budget, gckind);
GCCycle(cx, full, budget, gckind);
}
if (rt->gcIncrementalState == NO_INCREMENTAL) {
@ -3768,15 +3702,15 @@ Collect(JSContext *cx, JSCompartment *comp, int64_t budget,
namespace js {
void
GC(JSContext *cx, JSCompartment *comp, JSGCInvocationKind gckind, gcreason::Reason reason)
GC(JSContext *cx, bool full, JSGCInvocationKind gckind, gcreason::Reason reason)
{
Collect(cx, comp, SliceBudget::Unlimited, gckind, reason);
Collect(cx, full, SliceBudget::Unlimited, gckind, reason);
}
void
GCSlice(JSContext *cx, JSCompartment *comp, JSGCInvocationKind gckind, gcreason::Reason reason)
GCSlice(JSContext *cx, bool full, JSGCInvocationKind gckind, gcreason::Reason reason)
{
Collect(cx, comp, cx->runtime->gcSliceBudget, gckind, reason);
Collect(cx, full, cx->runtime->gcSliceBudget, gckind, reason);
}
void
@ -3989,11 +3923,9 @@ RunDebugGC(JSContext *cx)
* If rt->gcDebugCompartmentGC is true, only GC the current
* compartment. But don't GC the atoms compartment.
*/
rt->gcTriggerCompartment = rt->gcDebugCompartmentGC ? cx->compartment : NULL;
if (rt->gcTriggerCompartment == rt->atomsCompartment)
rt->gcTriggerCompartment = NULL;
RunLastDitchGC(cx, gcreason::DEBUG_GC);
if (rt->gcDebugCompartmentGC)
PrepareCompartmentForGC(cx->compartment);
RunLastDitchGC(cx, gcreason::DEBUG_GC, !rt->gcDebugCompartmentGC);
#endif
}
@ -4241,8 +4173,6 @@ StartVerifyBarriers(JSContext *cx)
for (CompartmentsIter c(rt); !c.done(); c.next())
c->discardJitCode(cx);
PurgeRuntime(rt);
VerifyTracer *trc = new (js_malloc(sizeof(VerifyTracer))) VerifyTracer;
rt->gcNumber++;
@ -4251,6 +4181,8 @@ StartVerifyBarriers(JSContext *cx)
JS_TracerInit(trc, rt, AccumulateEdge);
PurgeRuntime(trc);
const size_t size = 64 * 1024 * 1024;
trc->root = (VerifyNode *)js_malloc(size);
JS_ASSERT(trc->root);

Просмотреть файл

@ -1383,6 +1383,9 @@ MaybeGC(JSContext *cx);
extern void
ShrinkGCBuffers(JSRuntime *rt);
extern void
PrepareCompartmentForGC(JSCompartment *comp);
/*
* Kinds of js_GC invocation.
*/
@ -1396,10 +1399,10 @@ typedef enum JSGCInvocationKind {
/* Pass NULL for |comp| to get a full GC. */
extern void
GC(JSContext *cx, JSCompartment *comp, JSGCInvocationKind gckind, js::gcreason::Reason reason);
GC(JSContext *cx, bool full, JSGCInvocationKind gckind, js::gcreason::Reason reason);
extern void
GCSlice(JSContext *cx, JSCompartment *comp, JSGCInvocationKind gckind, js::gcreason::Reason reason);
GCSlice(JSContext *cx, bool full, JSGCInvocationKind gckind, js::gcreason::Reason reason);
extern void
GCDebugSlice(JSContext *cx, int64_t objCount);

Просмотреть файл

@ -388,6 +388,38 @@ class CellIter : public CellIterImpl
inline void EmptyArenaOp(Arena *arena) {}
inline void EmptyCellOp(Cell *t) {}
class GCCompartmentsIter {
private:
JSCompartment **it, **end;
public:
GCCompartmentsIter(JSRuntime *rt) {
JS_ASSERT(rt->gcRunning);
it = rt->compartments.begin();
end = rt->compartments.end();
if (!(*it)->isCollecting())
next();
JS_ASSERT(it < end);
}
bool done() const { return it == end; }
void next() {
JS_ASSERT(!done());
do {
it++;
} while (it != end && !(*it)->isCollecting());
}
JSCompartment *get() const {
JS_ASSERT(!done());
return *it;
}
operator JSCompartment *() const { return get(); }
JSCompartment *operator->() const { return get(); }
};
/*
* Allocates a new GC thing. After a successful allocation the caller must
* fully initialize the thing before calling any function that can potentially

Просмотреть файл

@ -77,13 +77,20 @@ CheckMarkedThing(JSTracer *trc, T *thing)
{
JS_ASSERT(trc);
JS_ASSERT(thing);
JS_ASSERT(thing->compartment());
JS_ASSERT(thing->compartment()->rt == trc->runtime);
JS_ASSERT(trc->debugPrinter || trc->debugPrintArg);
JS_ASSERT_IF(trc->runtime->gcCurrentCompartment, IS_GC_MARKING_TRACER(trc));
DebugOnly<JSRuntime *> rt = trc->runtime;
JS_ASSERT_IF(rt->gcIsFull, IS_GC_MARKING_TRACER(trc));
JS_ASSERT_IF(thing->compartment()->requireGCTracer(), IS_GC_MARKING_TRACER(trc));
JS_ASSERT(thing->isAligned());
JS_ASSERT(thing->compartment());
JS_ASSERT(thing->compartment()->rt == trc->runtime);
JS_ASSERT_IF(rt->gcStrictCompartmentChecking,
thing->compartment()->isCollecting() ||
thing->compartment() == rt->atomsCompartment);
}
template<typename T>
@ -95,22 +102,15 @@ MarkInternal(JSTracer *trc, T **thingp)
CheckMarkedThing(trc, thing);
JSRuntime *rt = trc->runtime;
JS_ASSERT_IF(rt->gcCheckCompartment,
thing->compartment() == rt->gcCheckCompartment ||
thing->compartment() == rt->atomsCompartment);
/*
* Don't mark things outside a compartment if we are in a per-compartment
* GC.
*/
if (!rt->gcCurrentCompartment || thing->compartment() == rt->gcCurrentCompartment) {
if (!trc->callback) {
if (!trc->callback) {
if (thing->compartment()->isCollecting())
PushMarkStack(static_cast<GCMarker *>(trc), thing);
} else {
trc->callback(trc, (void **)thingp, GetGCThingTraceKind(thing));
}
} else {
trc->callback(trc, (void **)thingp, GetGCThingTraceKind(thing));
}
#ifdef DEBUG
@ -393,17 +393,30 @@ MarkObjectSlots(JSTracer *trc, JSObject *obj, uint32_t start, uint32_t nslots)
}
}
void
MarkCrossCompartmentObjectUnbarriered(JSTracer *trc, JSObject **obj, const char *name)
{
if (IS_GC_MARKING_TRACER(trc) && !(*obj)->compartment()->isCollecting())
return;
MarkObjectUnbarriered(trc, obj, name);
}
void
MarkCrossCompartmentScriptUnbarriered(JSTracer *trc, JSScript **script, const char *name)
{
if (IS_GC_MARKING_TRACER(trc) && !(*script)->compartment()->isCollecting())
return;
MarkScriptUnbarriered(trc, script, name);
}
void
MarkCrossCompartmentSlot(JSTracer *trc, HeapSlot *s, const char *name)
{
if (s->isMarkable()) {
Cell *cell = (Cell *)s->toGCThing();
JSRuntime *rt = trc->runtime;
if (rt->gcCurrentCompartment && cell->compartment() != rt->gcCurrentCompartment)
return;
/* In case we're called from a write barrier. */
if (rt->gcIncrementalCompartment && cell->compartment() != rt->gcIncrementalCompartment)
if (IS_GC_MARKING_TRACER(trc) && !cell->compartment()->isCollecting())
return;
MarkSlot(trc, s, name);
@ -428,14 +441,12 @@ MarkValueUnbarriered(JSTracer *trc, Value *v, const char *name)
/*** Push Mark Stack ***/
#define JS_COMPARTMENT_ASSERT(rt, thing) \
JS_ASSERT_IF((rt)->gcCurrentCompartment, \
(thing)->compartment() == (rt)->gcCurrentCompartment);
#define JS_COMPARTMENT_ASSERT(rt, thing) \
JS_ASSERT((thing)->compartment()->isCollecting())
#define JS_COMPARTMENT_ASSERT_STR(rt, thing) \
JS_ASSERT_IF((rt)->gcCurrentCompartment, \
(thing)->compartment() == (rt)->gcCurrentCompartment || \
(thing)->compartment() == (rt)->atomsCompartment);
#define JS_COMPARTMENT_ASSERT_STR(rt, thing) \
JS_ASSERT((thing)->compartment()->isCollecting() || \
(thing)->compartment() == (rt)->atomsCompartment);
static void
PushMarkStack(GCMarker *gcmarker, JSXML *thing)
@ -1105,9 +1116,10 @@ GCMarker::drainMarkStack(SliceBudget &budget)
struct AutoCheckCompartment {
JSRuntime *runtime;
AutoCheckCompartment(JSRuntime *rt) : runtime(rt) {
runtime->gcCheckCompartment = runtime->gcCurrentCompartment;
JS_ASSERT(!rt->gcStrictCompartmentChecking);
runtime->gcStrictCompartmentChecking = true;
}
~AutoCheckCompartment() { runtime->gcCheckCompartment = NULL; }
~AutoCheckCompartment() { runtime->gcStrictCompartmentChecking = false; }
} acc(rt);
#endif

Просмотреть файл

@ -126,6 +126,12 @@ MarkArraySlots(JSTracer *trc, size_t len, HeapSlot *vec, const char *name);
void
MarkObjectSlots(JSTracer *trc, JSObject *obj, uint32_t start, uint32_t nslots);
void
MarkCrossCompartmentObjectUnbarriered(JSTracer *trc, JSObject **obj, const char *name);
void
MarkCrossCompartmentScriptUnbarriered(JSTracer *trc, JSScript **script, const char *name);
/*
* Mark a value that may be in a different compartment from the compartment
* being GC'd. (Although it won't be marked if it's in the wrong compartment.)

Просмотреть файл

@ -47,6 +47,7 @@
#include "jspropertytree.h"
#include "jsscope.h"
#include "jsgcinlines.h"
#include "jsobjinlines.h"
#include "jsscopeinlines.h"
@ -354,10 +355,7 @@ js::PropertyTree::dumpShapes(JSContext *cx)
JSRuntime *rt = cx->runtime;
fprintf(dumpfp, "rt->gcNumber = %lu", (unsigned long)rt->gcNumber);
for (CompartmentsIter c(rt); !c.done(); c.next()) {
if (rt->gcCurrentCompartment != NULL && rt->gcCurrentCompartment != c)
continue;
for (gc::GCCompartmentsIter c(rt); !c.done(); c.next()) {
fprintf(dumpfp, "*** Compartment %p ***\n", (void *)c.get());
/*

Просмотреть файл

@ -1828,8 +1828,7 @@ JSScript::clearTraps(JSContext *cx)
void
JSScript::markChildren(JSTracer *trc)
{
JS_ASSERT_IF(trc->runtime->gcCheckCompartment,
compartment() == trc->runtime->gcCheckCompartment);
JS_ASSERT_IF(trc->runtime->gcStrictCompartmentChecking, compartment()->isCollecting());
for (uint32_t i = 0; i < natoms; ++i) {
if (atoms[i])

Просмотреть файл

@ -162,13 +162,8 @@ bool
WatchpointMap::markAllIteratively(JSTracer *trc)
{
JSRuntime *rt = trc->runtime;
if (rt->gcCurrentCompartment) {
WatchpointMap *wpmap = rt->gcCurrentCompartment->watchpointMap;
return wpmap && wpmap->markIteratively(trc);
}
bool mutated = false;
for (CompartmentsIter c(rt); !c.done(); c.next()) {
for (GCCompartmentsIter c(rt); !c.done(); c.next()) {
if (c->watchpointMap)
mutated |= c->watchpointMap->markIteratively(trc);
}
@ -227,14 +222,9 @@ WatchpointMap::markAll(JSTracer *trc)
void
WatchpointMap::sweepAll(JSRuntime *rt)
{
if (rt->gcCurrentCompartment) {
if (WatchpointMap *wpmap = rt->gcCurrentCompartment->watchpointMap)
for (GCCompartmentsIter c(rt); !c.done(); c.next()) {
if (WatchpointMap *wpmap = c->watchpointMap)
wpmap->sweep();
} else {
for (CompartmentsIter c(rt); !c.done(); c.next()) {
if (WatchpointMap *wpmap = c->watchpointMap)
wpmap->sweep();
}
}
}

Просмотреть файл

@ -1275,9 +1275,6 @@ Debugger::onSingleStep(JSContext *cx, Value *vp)
void
Debugger::markKeysInCompartment(JSTracer *tracer)
{
JSCompartment *comp = tracer->runtime->gcCurrentCompartment;
JS_ASSERT(comp);
/*
* WeakMap::Range is deliberately private, to discourage C++ code from
* enumerating WeakMap keys. However in this case we need access, so we
@ -1288,21 +1285,17 @@ Debugger::markKeysInCompartment(JSTracer *tracer)
const ObjectMap &objStorage = objects;
for (ObjectMap::Range r = objStorage.all(); !r.empty(); r.popFront()) {
const HeapPtrObject &key = r.front().key;
if (key->compartment() == comp && IsAboutToBeFinalized(key)) {
HeapPtrObject tmp(key);
gc::MarkObject(tracer, &tmp, "cross-compartment WeakMap key");
JS_ASSERT(tmp == key);
}
HeapPtrObject tmp(key);
gc::MarkObject(tracer, &tmp, "cross-compartment WeakMap key");
JS_ASSERT(tmp == key);
}
const ObjectMap &envStorage = environments;
for (ObjectMap::Range r = envStorage.all(); !r.empty(); r.popFront()) {
const HeapPtrObject &key = r.front().key;
if (key->compartment() == comp && IsAboutToBeFinalized(key)) {
HeapPtrObject tmp(key);
js::gc::MarkObject(tracer, &tmp, "cross-compartment WeakMap key");
JS_ASSERT(tmp == key);
}
HeapPtrObject tmp(key);
js::gc::MarkObject(tracer, &tmp, "cross-compartment WeakMap key");
JS_ASSERT(tmp == key);
}
typedef HashMap<HeapPtrScript, HeapPtrObject, DefaultHasher<HeapPtrScript>, RuntimeAllocPolicy>
@ -1310,11 +1303,9 @@ Debugger::markKeysInCompartment(JSTracer *tracer)
const ScriptMap &scriptStorage = scripts;
for (ScriptMap::Range r = scriptStorage.all(); !r.empty(); r.popFront()) {
const HeapPtrScript &key = r.front().key;
if (key->compartment() == comp && IsAboutToBeFinalized(key)) {
HeapPtrScript tmp(key);
gc::MarkScript(tracer, &tmp, "cross-compartment WeakMap key");
JS_ASSERT(tmp == key);
}
HeapPtrScript tmp(key);
gc::MarkScript(tracer, &tmp, "cross-compartment WeakMap key");
JS_ASSERT(tmp == key);
}
}
@ -1323,19 +1314,17 @@ Debugger::markKeysInCompartment(JSTracer *tracer)
* discovered that the WeakMap was live; that is, some object containing the
* WeakMap was marked during mark phase.
*
* However, during single-compartment GC, we have to do something about
* cross-compartment WeakMaps in other compartments. Since those compartments
* aren't being GC'd, the WeakMaps definitely will not be found during mark
* phase. If their keys and values might need to be marked, we have to do it
* manually.
* However, during compartment GC, we have to do something about
* cross-compartment WeakMaps in non-GC'd compartments. If their keys and values
* might need to be marked, we have to do it manually.
*
* Each Debugger object keeps two cross-compartment WeakMaps: objects and
* scripts. Both have the nice property that all their values are in the
* same compartment as the Debugger object, so we only need to mark the
* keys. We must simply mark all keys that are in the compartment being GC'd.
* Each Debugger object keeps three cross-compartment WeakMaps: objects, script,
* and environments. They have the nice property that all their values are in
* the same compartment as the Debugger object, so we only need to mark the
* keys. We must simply mark all keys that are in a compartment being GC'd.
*
* We must scan all Debugger objects regardless of whether they *currently*
* have any debuggees in the compartment being GC'd, because the WeakMap
* have any debuggees in a compartment being GC'd, because the WeakMap
* entries persist even when debuggees are removed.
*
* This happens during the initial mark phase, not iterative marking, because
@ -1345,7 +1334,6 @@ void
Debugger::markCrossCompartmentDebuggerObjectReferents(JSTracer *tracer)
{
JSRuntime *rt = tracer->runtime;
JSCompartment *comp = rt->gcCurrentCompartment;
/*
* Mark all objects in comp that are referents of Debugger.Objects in other
@ -1353,7 +1341,7 @@ Debugger::markCrossCompartmentDebuggerObjectReferents(JSTracer *tracer)
*/
for (JSCList *p = &rt->debuggerList; (p = JS_NEXT_LINK(p)) != &rt->debuggerList;) {
Debugger *dbg = Debugger::fromLinks(p);
if (dbg->object->compartment() != comp)
if (!dbg->object->compartment()->isCollecting())
dbg->markKeysInCompartment(tracer);
}
}
@ -1378,18 +1366,8 @@ Debugger::markAllIteratively(GCMarker *trc)
* convoluted since the easiest way to find them is via their debuggees.
*/
JSRuntime *rt = trc->runtime;
JSCompartment *comp = rt->gcCurrentCompartment;
for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); c++) {
JSCompartment *dc = *c;
/*
* If this is a single-compartment GC, no compartment can debug itself, so skip
* |comp|. If it's a global GC, then search every compartment.
*/
if (comp && dc == comp)
continue;
const GlobalObjectSet &debuggees = dc->getDebuggees();
for (CompartmentsIter c(rt); !c.done(); c.next()) {
const GlobalObjectSet &debuggees = c->getDebuggees();
for (GlobalObjectSet::Range r = debuggees.all(); !r.empty(); r.popFront()) {
GlobalObject *global = r.front();
if (IsAboutToBeFinalized(global))
@ -1411,7 +1389,7 @@ Debugger::markAllIteratively(GCMarker *trc)
* - it actually has hooks that might be called
*/
HeapPtrObject &dbgobj = dbg->toJSObjectRef();
if (comp && comp != dbgobj->compartment())
if (!dbgobj->compartment()->isCollecting())
continue;
bool dbgMarked = !IsAboutToBeFinalized(dbgobj);
@ -1487,20 +1465,15 @@ void
Debugger::sweepAll(JSContext *cx)
{
JSRuntime *rt = cx->runtime;
JS_ASSERT(!rt->gcCurrentCompartment);
for (JSCList *p = &rt->debuggerList; (p = JS_NEXT_LINK(p)) != &rt->debuggerList;) {
Debugger *dbg = Debugger::fromLinks(p);
if (IsAboutToBeFinalized(dbg->object)) {
/*
* dbg is being GC'd. Detach it from its debuggees. In the case of
* runtime-wide GC, the debuggee might be GC'd too. Since detaching
* requires access to both objects, this must be done before
* finalize time. However, in a per-compartment GC, it is
* impossible for both objects to be GC'd (since they are in
* different compartments), so in that case we just wait for
* Debugger::finalize.
* dbg is being GC'd. Detach it from its debuggees. The debuggee
* might be GC'd too. Since detaching requires access to both
* objects, this must be done before finalize time.
*/
for (GlobalObjectSet::Enum e(dbg->debuggees); !e.empty(); e.popFront())
dbg->removeDebuggeeGlobal(cx, e.front(), NULL, &e);
@ -1535,15 +1508,7 @@ Debugger::finalize(JSContext *cx, JSObject *obj)
Debugger *dbg = fromJSObject(obj);
if (!dbg)
return;
if (!dbg->debuggees.empty()) {
/*
* This happens only during per-compartment GC. See comment in
* Debugger::sweepAll.
*/
JS_ASSERT(cx->runtime->gcCurrentCompartment == dbg->object->compartment());
for (GlobalObjectSet::Enum e(dbg->debuggees); !e.empty(); e.popFront())
dbg->removeDebuggeeGlobal(cx, e.front(), NULL, &e);
}
JS_ASSERT(dbg->debuggees.empty());
cx->delete_(dbg);
}
@ -2183,12 +2148,10 @@ SetScriptReferent(JSObject *obj, JSScript *script)
static void
DebuggerScript_trace(JSTracer *trc, JSObject *obj)
{
if (!trc->runtime->gcCurrentCompartment) {
/* This comes from a private pointer, so no barrier needed. */
if (JSScript *script = GetScriptReferent(obj)) {
MarkScriptUnbarriered(trc, &script, "Debugger.Script referent");
obj->setPrivateUnbarriered(script);
}
/* This comes from a private pointer, so no barrier needed. */
if (JSScript *script = GetScriptReferent(obj)) {
MarkCrossCompartmentScriptUnbarriered(trc, &script, "Debugger.Script referent");
obj->setPrivateUnbarriered(script);
}
}
@ -3315,15 +3278,13 @@ static JSFunctionSpec DebuggerFrame_methods[] = {
static void
DebuggerObject_trace(JSTracer *trc, JSObject *obj)
{
if (!trc->runtime->gcCurrentCompartment) {
/*
* There is a barrier on private pointers, so the Unbarriered marking
* is okay.
*/
if (JSObject *referent = (JSObject *) obj->getPrivate()) {
MarkObjectUnbarriered(trc, &referent, "Debugger.Object referent");
obj->setPrivateUnbarriered(referent);
}
/*
* There is a barrier on private pointers, so the Unbarriered marking
* is okay.
*/
if (JSObject *referent = (JSObject *) obj->getPrivate()) {
MarkCrossCompartmentObjectUnbarriered(trc, &referent, "Debugger.Object referent");
obj->setPrivateUnbarriered(referent);
}
}
@ -3959,15 +3920,13 @@ static JSFunctionSpec DebuggerObject_methods[] = {
static void
DebuggerEnv_trace(JSTracer *trc, JSObject *obj)
{
if (!trc->runtime->gcCurrentCompartment) {
/*
* There is a barrier on private pointers, so the Unbarriered marking
* is okay.
*/
if (Env *referent = (JSObject *) obj->getPrivate()) {
MarkObjectUnbarriered(trc, &referent, "Debugger.Environment referent");
obj->setPrivateUnbarriered(referent);
}
/*
* There is a barrier on private pointers, so the Unbarriered marking
* is okay.
*/
if (Env *referent = (JSObject *) obj->getPrivate()) {
MarkCrossCompartmentObjectUnbarriered(trc, &referent, "Debugger.Environment referent");
obj->setPrivateUnbarriered(referent);
}
}