Bug 782318 - GC: Simplify compartment GC state r=billm

This commit is contained in:
Jon Coppeard 2012-08-14 11:10:50 +01:00
Родитель ca7c98775e
Коммит dd5c10cc90
4 изменённых файлов: 54 добавлений и 71 удалений

Просмотреть файл

@ -643,6 +643,8 @@ struct JSRuntime : js::RuntimeFriendFields
bool isHeapBusy() { return heapState != Idle; }
bool isHeapCollecting() { return heapState == Collecting; }
/*
* These options control the zealousness of the GC. The fundamental values
* are gcNextScheduled and gcDebugCompartmentGC. At every allocation,

Просмотреть файл

@ -45,9 +45,9 @@ JSCompartment::JSCompartment(JSRuntime *rt)
gcStoreBuffer(&gcNursery),
#endif
needsBarrier_(false),
gcState(NoGCScheduled),
gcScheduled(false),
gcState(NoGC),
gcPreserveCode(false),
gcStarted(false),
gcBytes(0),
gcTriggerBytes(0),
gcHeapGrowthFactor(3.0),

Просмотреть файл

@ -164,22 +164,19 @@ struct JSCompartment
private:
enum CompartmentGCState {
NoGCScheduled,
GCScheduled,
GCRunning
NoGC,
Collecting
};
bool gcScheduled;
CompartmentGCState gcState;
bool gcPreserveCode;
bool gcStarted;
public:
bool isCollecting() const {
/* Allow this if we're in the middle of an incremental GC. */
if (rt->isHeapBusy()) {
return gcState == GCRunning;
if (rt->isHeapCollecting()) {
return gcState != NoGC;
} else {
JS_ASSERT(gcState != GCRunning);
return needsBarrier();
}
}
@ -193,31 +190,25 @@ struct JSCompartment
* tracer.
*/
bool requireGCTracer() const {
return gcState == GCRunning;
return rt->isHeapCollecting() && gcState != NoGC;
}
void setCollecting(bool collecting) {
JS_ASSERT(rt->isHeapBusy());
if (collecting)
gcState = GCRunning;
else
gcState = NoGCScheduled;
gcState = collecting ? Collecting : NoGC;
}
void scheduleGC() {
JS_ASSERT(!rt->isHeapBusy());
JS_ASSERT(gcState != GCRunning);
gcState = GCScheduled;
gcScheduled = true;
}
void unscheduleGC() {
JS_ASSERT(!rt->isHeapBusy());
JS_ASSERT(gcState != GCRunning);
gcState = NoGCScheduled;
gcScheduled = false;
}
bool isGCScheduled() const {
return gcState == GCScheduled;
return gcScheduled;
}
void setPreservingCode(bool preserving) {
@ -225,16 +216,11 @@ struct JSCompartment
}
bool wasGCStarted() const {
return gcStarted;
}
void setGCStarted(bool started) {
JS_ASSERT(rt->isHeapBusy());
gcStarted = started;
return gcState != NoGC;
}
bool isGCSweeping() {
return wasGCStarted() && rt->gcIncrementalState == js::gc::SWEEP;
return gcState != NoGC && rt->gcIncrementalState == js::gc::SWEEP;
}
size_t gcBytes;

Просмотреть файл

@ -3184,6 +3184,38 @@ BeginMarkPhase(JSRuntime *rt)
{
int64_t currentTime = PRMJ_Now();
rt->gcIsFull = true;
DebugOnly<bool> any = false;
for (CompartmentsIter c(rt); !c.done(); c.next()) {
/* Assert that compartment state is as we expect */
JS_ASSERT(!c->isCollecting());
for (unsigned i = 0; i < FINALIZE_LIMIT; ++i)
JS_ASSERT(!c->arenas.arenaListsToSweep[i]);
/* Set up which compartments will be collected. */
if (c->isGCScheduled()) {
any = true;
if (c.get() != rt->atomsCompartment)
c->setCollecting(true);
} else {
rt->gcIsFull = false;
}
c->setPreservingCode(ShouldPreserveJITCode(c, currentTime));
}
/* Check that at least one compartment is scheduled for collection. */
JS_ASSERT(any);
/*
* Atoms are not in the cross-compartment map. So if there are any
* compartments that are not being collected, we are not allowed to collect
* atoms. Otherwise, the non-collected compartments could contain pointers
* to atoms that we would miss.
*/
if (rt->atomsCompartment->isGCScheduled() && rt->gcIsFull && !rt->gcKeepAtoms)
rt->atomsCompartment->setCollecting(true);
/*
* At the end of each incremental slice, we call prepareForIncrementalGC,
* which marks objects in all arenas that we're currently allocating
@ -3196,20 +3228,6 @@ BeginMarkPhase(JSRuntime *rt)
c->arenas.purge();
}
rt->gcIsFull = true;
for (CompartmentsIter c(rt); !c.done(); c.next()) {
JS_ASSERT(!c->wasGCStarted());
for (unsigned i = 0 ; i < FINALIZE_LIMIT ; ++i)
JS_ASSERT(!c->arenas.arenaListsToSweep[i]);
if (c->isCollecting())
c->setGCStarted(true);
else
rt->gcIsFull = false;
c->setPreservingCode(ShouldPreserveJITCode(c, currentTime));
}
rt->gcMarker.start(rt);
JS_ASSERT(!rt->gcMarker.callback);
JS_ASSERT(IS_GC_MARKING_TRACER(&rt->gcMarker));
@ -3646,9 +3664,8 @@ EndSweepPhase(JSRuntime *rt, JSGCInvocationKind gckind, gcreason::Reason gcReaso
for (CompartmentsIter c(rt); !c.done(); c.next()) {
c->setGCLastBytes(c->gcBytes, c->gcMallocAndFreeBytes, gckind);
if (c->wasGCStarted())
c->setGCStarted(false);
c->setCollecting(false);
JS_ASSERT(!c->wasGCStarted());
for (unsigned i = 0 ; i < FINALIZE_LIMIT ; ++i)
JS_ASSERT(!c->arenas.arenaListsToSweep[i]);
}
@ -3699,27 +3716,6 @@ AutoTraceSession::~AutoTraceSession()
AutoGCSession::AutoGCSession(JSRuntime *rt)
: AutoTraceSession(rt, JSRuntime::Collecting)
{
bool all = true;
DebugOnly<bool> any = false;
for (CompartmentsIter c(rt); !c.done(); c.next()) {
if (c->isGCScheduled()) {
c->setCollecting(true);
any = true;
} else {
all = false;
}
}
JS_ASSERT(any);
/*
* Atoms are not in the cross-compartment map. So if there are any
* compartments that are not being collected, we are not allowed to collect
* atoms. Otherwise, the non-collected compartments could contain pointers
* to atoms that we would miss.
*/
if (rt->gcKeepAtoms || !all)
rt->atomsCompartment->setCollecting(false);
runtime->gcIsNeeded = false;
runtime->gcInterFrameGC = true;
@ -3728,9 +3724,6 @@ AutoGCSession::AutoGCSession(JSRuntime *rt)
AutoGCSession::~AutoGCSession()
{
for (GCCompartmentsIter c(runtime); !c.done(); c.next())
c->setCollecting(false);
#ifndef JS_MORE_DETERMINISTIC
runtime->gcNextFullGCTime = PRMJ_Now() + GC_IDLE_FULL_SPAN;
#endif
@ -3743,8 +3736,10 @@ AutoGCSession::~AutoGCSession()
#endif
/* Clear gcMallocBytes for all compartments */
for (CompartmentsIter c(runtime); !c.done(); c.next())
for (CompartmentsIter c(runtime); !c.done(); c.next()) {
c->resetGCMallocBytes();
c->unscheduleGC();
}
runtime->resetGCMallocBytes();
}
@ -3773,7 +3768,7 @@ ResetIncrementalGC(JSRuntime *rt, const char *reason)
for (CompartmentsIter c(rt); !c.done(); c.next()) {
c->setNeedsBarrier(false);
c->setGCStarted(false);
c->setCollecting(false);
for (unsigned i = 0 ; i < FINALIZE_LIMIT ; ++i)
JS_ASSERT(!c->arenas.arenaListsToSweep[i]);
}
@ -4070,7 +4065,7 @@ BudgetIncrementalGC(JSRuntime *rt, int64_t *budget)
}
if (rt->gcIncrementalState != NO_INCREMENTAL &&
c->isCollecting() != c->wasGCStarted()) {
c->isGCScheduled() != c->wasGCStarted()) {
reset = true;
}
}