Bug 1125101 - Incrementalise compacting GC by zones r=terrence

This commit is contained in:
Jon Coppeard 2015-02-26 12:35:59 +00:00
Родитель 3a63575b80
Коммит e207ec92b0
4 изменённых файлов: 173 добавлений и 127 удалений

Просмотреть файл

@ -923,18 +923,20 @@ class GCRuntime
void sweepBackgroundThings(ZoneList &zones, LifoAlloc &freeBlocks, ThreadType threadType); void sweepBackgroundThings(ZoneList &zones, LifoAlloc &freeBlocks, ThreadType threadType);
void assertBackgroundSweepingFinished(); void assertBackgroundSweepingFinished();
bool shouldCompact(); bool shouldCompact();
IncrementalProgress beginCompactPhase();
IncrementalProgress compactPhase(JS::gcreason::Reason reason); IncrementalProgress compactPhase(JS::gcreason::Reason reason);
void endCompactPhase(JS::gcreason::Reason reason);
void sweepTypesAfterCompacting(Zone *zone); void sweepTypesAfterCompacting(Zone *zone);
void sweepZoneAfterCompacting(Zone *zone); void sweepZoneAfterCompacting(Zone *zone);
ArenaHeader *relocateArenas(JS::gcreason::Reason reason); bool relocateArenas(Zone *zone, JS::gcreason::Reason reason);
void updateAllCellPointersParallel(MovingTracer *trc); void updateAllCellPointersParallel(MovingTracer *trc, Zone *zone);
void updateAllCellPointersSerial(MovingTracer *trc); void updateAllCellPointersSerial(MovingTracer *trc, Zone *zone);
void updatePointersToRelocatedCells(); void updatePointersToRelocatedCells(Zone *zone);
void releaseRelocatedArenas(ArenaHeader *relocatedList); void releaseRelocatedArenas();
void releaseRelocatedArenasWithoutUnlocking(ArenaHeader *relocatedList, const AutoLockGC& lock); void releaseRelocatedArenasWithoutUnlocking(const AutoLockGC& lock);
#ifdef DEBUG #ifdef DEBUG
void protectRelocatedArenas(ArenaHeader *relocatedList); void protectRelocatedArenas();
void unprotectRelocatedArenas(ArenaHeader *relocatedList); void unprotectRelocatedArenas();
#endif #endif
void finishCollection(JS::gcreason::Reason reason); void finishCollection(JS::gcreason::Reason reason);
@ -1074,6 +1076,7 @@ class GCRuntime
/* Singly linekd list of zones to be swept in the background. */ /* Singly linekd list of zones to be swept in the background. */
ZoneList backgroundSweepZones; ZoneList backgroundSweepZones;
/* /*
* Free LIFO blocks are transferred to this allocator before being freed on * Free LIFO blocks are transferred to this allocator before being freed on
* the background GC thread. * the background GC thread.
@ -1105,6 +1108,13 @@ class GCRuntime
*/ */
js::gc::ArenaHeader *arenasAllocatedDuringSweep; js::gc::ArenaHeader *arenasAllocatedDuringSweep;
/*
* Incremental compacting state.
*/
bool startedCompacting;
js::gc::ZoneList zonesToMaybeCompact;
ArenaHeader* relocatedArenasToRelease;
#ifdef JS_GC_MARKING_VALIDATION #ifdef JS_GC_MARKING_VALIDATION
js::gc::MarkingValidator *markingValidator; js::gc::MarkingValidator *markingValidator;
#endif #endif
@ -1239,9 +1249,6 @@ class GCRuntime
int inUnsafeRegion; int inUnsafeRegion;
size_t noGCOrAllocationCheck; size_t noGCOrAllocationCheck;
ArenaHeader* relocatedArenasToRelease;
#endif #endif
/* Synchronize GC heap access between main thread and GCHelperState. */ /* Synchronize GC heap access between main thread and GCHelperState. */

Просмотреть файл

@ -384,3 +384,10 @@ ZoneList::removeFront()
front->listNext_ = Zone::NotOnList; front->listNext_ = Zone::NotOnList;
} }
void
ZoneList::clear()
{
while (!isEmpty())
removeFront();
}

Просмотреть файл

@ -1085,6 +1085,8 @@ GCRuntime::GCRuntime(JSRuntime *rt) :
sweepKindIndex(0), sweepKindIndex(0),
abortSweepAfterCurrentGroup(false), abortSweepAfterCurrentGroup(false),
arenasAllocatedDuringSweep(nullptr), arenasAllocatedDuringSweep(nullptr),
startedCompacting(false),
relocatedArenasToRelease(nullptr),
#ifdef JS_GC_MARKING_VALIDATION #ifdef JS_GC_MARKING_VALIDATION
markingValidator(nullptr), markingValidator(nullptr),
#endif #endif
@ -1113,7 +1115,6 @@ GCRuntime::GCRuntime(JSRuntime *rt) :
#ifdef DEBUG #ifdef DEBUG
inUnsafeRegion(0), inUnsafeRegion(0),
noGCOrAllocationCheck(0), noGCOrAllocationCheck(0),
relocatedArenasToRelease(nullptr),
#endif #endif
lock(nullptr), lock(nullptr),
lockOwner(nullptr), lockOwner(nullptr),
@ -2016,6 +2017,16 @@ RelocateArena(ArenaHeader *aheader)
CrashAtUnhandlableOOM("Could not allocate new arena while compacting"); CrashAtUnhandlableOOM("Could not allocate new arena while compacting");
} }
} }
#ifdef DEBUG
for (ArenaCellIterUnderFinalize i(aheader); !i.done(); i.next()) {
TenuredCell *src = i.getCell();
MOZ_ASSERT(RelocationOverlay::isCellForwarded(src));
TenuredCell *dest = Forwarded(src);
MOZ_ASSERT(src->isMarked(BLACK) == dest->isMarked(BLACK));
MOZ_ASSERT(src->isMarked(GRAY) == dest->isMarked(GRAY));
}
#endif
} }
/* /*
@ -2114,26 +2125,38 @@ ArenaLists::relocateArenas(ArenaHeader *&relocatedListOut, JS::gcreason::Reason
return true; return true;
} }
ArenaHeader * bool
GCRuntime::relocateArenas(JS::gcreason::Reason reason) GCRuntime::relocateArenas(Zone *zone, JS::gcreason::Reason reason)
{ {
gcstats::AutoPhase ap(stats, gcstats::PHASE_COMPACT_MOVE); gcstats::AutoPhase ap(stats, gcstats::PHASE_COMPACT_MOVE);
ArenaHeader *relocatedList = nullptr; MOZ_ASSERT(!zone->isPreservingCode());
for (GCZonesIter zone(rt); !zone.done(); zone.next()) { MOZ_ASSERT(CanRelocateZone(rt, zone));
MOZ_ASSERT(zone->isGCFinished());
MOZ_ASSERT(!zone->isPreservingCode());
if (CanRelocateZone(rt, zone)) { jit::StopAllOffThreadCompilations(zone);
jit::StopAllOffThreadCompilations(zone);
if (zone->arenas.relocateArenas(relocatedList, reason, stats)) if (!zone->arenas.relocateArenas(relocatedArenasToRelease, reason, stats))
zone->setGCState(Zone::Compact); return false;
#ifdef DEBUG
// Check that we did as much compaction as we should have. There
// should always be less than one arena's worth of free cells.
for (ALL_ALLOC_KINDS(i)) {
size_t thingsPerArena = Arena::thingsPerArena(Arena::thingSize(i));
if (CanRelocateAllocKind(i)) {
ArenaList &al = zone->arenas.arenaLists[i];
size_t freeCells = 0;
for (ArenaHeader *arena = al.arenaAfterCursor(); arena; arena = arena->next)
freeCells += arena->countFreeCells();
MOZ_ASSERT(freeCells < thingsPerArena);
} }
} }
#endif
return relocatedList; return true;
} }
void void
MovingTracer::Visit(JSTracer *jstrc, void **thingp, JSGCTraceKind kind) MovingTracer::Visit(JSTracer *jstrc, void **thingp, JSGCTraceKind kind)
{ {
@ -2267,7 +2290,7 @@ struct ArenasToUpdate
BACKGROUND = 2, BACKGROUND = 2,
ALL = FOREGROUND | BACKGROUND ALL = FOREGROUND | BACKGROUND
}; };
ArenasToUpdate(JSRuntime *rt, KindsToUpdate kinds); ArenasToUpdate(Zone *zone, KindsToUpdate kinds);
bool done() { return initialized && arena == nullptr; } bool done() { return initialized && arena == nullptr; }
ArenaHeader* next(AutoLockHelperThreadState& lock); ArenaHeader* next(AutoLockHelperThreadState& lock);
ArenaHeader *getArenasToUpdate(AutoLockHelperThreadState& lock, unsigned max); ArenaHeader *getArenasToUpdate(AutoLockHelperThreadState& lock, unsigned max);
@ -2275,7 +2298,7 @@ struct ArenasToUpdate
private: private:
bool initialized; bool initialized;
KindsToUpdate kinds; KindsToUpdate kinds;
GCZonesIter zone; // Current zone to process, unless zone.done() Zone *zone; // Zone to process
unsigned kind; // Current alloc kind to process unsigned kind; // Current alloc kind to process
ArenaHeader *arena; // Next arena to process ArenaHeader *arena; // Next arena to process
@ -2299,9 +2322,10 @@ bool ArenasToUpdate::shouldProcessKind(AllocKind kind)
return (kinds & FOREGROUND) != 0; return (kinds & FOREGROUND) != 0;
} }
ArenasToUpdate::ArenasToUpdate(JSRuntime *rt, KindsToUpdate kinds) ArenasToUpdate::ArenasToUpdate(Zone *zone, KindsToUpdate kinds)
: initialized(false), kinds(kinds), zone(rt, SkipAtoms) : initialized(false), kinds(kinds), zone(zone)
{ {
MOZ_ASSERT(zone->isGCCompacting());
MOZ_ASSERT(kinds && !(kinds & ~ALL)); MOZ_ASSERT(kinds && !(kinds & ~ALL));
} }
@ -2320,33 +2344,30 @@ ArenasToUpdate::next(AutoLockHelperThreadState& lock)
if (initialized) { if (initialized) {
MOZ_ASSERT(arena); MOZ_ASSERT(arena);
MOZ_ASSERT(shouldProcessKind(AllocKind(kind))); MOZ_ASSERT(shouldProcessKind(AllocKind(kind)));
MOZ_ASSERT(!zone.done()); MOZ_ASSERT(zone);
goto resumePoint; goto resumePoint;
} }
initialized = true; initialized = true;
for (; !zone.done(); zone.next()) { for (kind = 0; kind < size_t(AllocKind::LIMIT); ++kind) {
if (zone->isGCCompacting()) { if (shouldProcessKind(AllocKind(kind))) {
for (kind = 0; kind < size_t(AllocKind::LIMIT); ++kind) { for (arena = zone->arenas.getFirstArena(AllocKind(kind));
if (shouldProcessKind(AllocKind(kind))) { arena;
for (arena = zone.get()->arenas.getFirstArena(AllocKind(kind)); arena = arena->next)
arena; {
arena = arena->next) return arena;
{ resumePoint:;
return arena;
resumePoint:;
}
}
} }
} }
} }
zone = nullptr;
return nullptr; return nullptr;
} }
ArenaHeader * ArenaHeader *
ArenasToUpdate::getArenasToUpdate(AutoLockHelperThreadState& lock, unsigned count) ArenasToUpdate::getArenasToUpdate(AutoLockHelperThreadState& lock, unsigned count)
{ {
if (zone.done()) if (!zone)
return nullptr; return nullptr;
ArenaHeader *head = nullptr; ArenaHeader *head = nullptr;
@ -2432,7 +2453,7 @@ UpdateCellPointersTask::run()
} // namespace js } // namespace js
void void
GCRuntime::updateAllCellPointersParallel(MovingTracer *trc) GCRuntime::updateAllCellPointersParallel(MovingTracer *trc, Zone *zone)
{ {
AutoDisableProxyCheck noProxyCheck(rt); // These checks assert when run in parallel. AutoDisableProxyCheck noProxyCheck(rt); // These checks assert when run in parallel.
@ -2443,8 +2464,8 @@ GCRuntime::updateAllCellPointersParallel(MovingTracer *trc)
UpdateCellPointersTask bgTasks[maxTasks]; UpdateCellPointersTask bgTasks[maxTasks];
UpdateCellPointersTask fgTask; UpdateCellPointersTask fgTask;
ArenasToUpdate fgArenas(rt, ArenasToUpdate::FOREGROUND); ArenasToUpdate fgArenas(zone, ArenasToUpdate::FOREGROUND);
ArenasToUpdate bgArenas(rt, ArenasToUpdate::BACKGROUND); ArenasToUpdate bgArenas(zone, ArenasToUpdate::BACKGROUND);
unsigned tasksStarted = 0; unsigned tasksStarted = 0;
{ {
@ -2469,12 +2490,12 @@ GCRuntime::updateAllCellPointersParallel(MovingTracer *trc)
} }
void void
GCRuntime::updateAllCellPointersSerial(MovingTracer *trc) GCRuntime::updateAllCellPointersSerial(MovingTracer *trc, Zone *zone)
{ {
UpdateCellPointersTask task; UpdateCellPointersTask task;
{ {
AutoLockHelperThreadState lock; AutoLockHelperThreadState lock;
ArenasToUpdate allArenas(rt, ArenasToUpdate::ALL); ArenasToUpdate allArenas(zone, ArenasToUpdate::ALL);
task.init(rt, &allArenas, lock); task.init(rt, &allArenas, lock);
} }
task.runFromMainThread(rt); task.runFromMainThread(rt);
@ -2487,20 +2508,24 @@ GCRuntime::updateAllCellPointersSerial(MovingTracer *trc)
* part of the traversal. * part of the traversal.
*/ */
void void
GCRuntime::updatePointersToRelocatedCells() GCRuntime::updatePointersToRelocatedCells(Zone *zone)
{ {
MOZ_ASSERT(zone->isGCCompacting());
MOZ_ASSERT(rt->currentThreadHasExclusiveAccess()); MOZ_ASSERT(rt->currentThreadHasExclusiveAccess());
gcstats::AutoPhase ap(stats, gcstats::PHASE_COMPACT_UPDATE); gcstats::AutoPhase ap(stats, gcstats::PHASE_COMPACT_UPDATE);
MovingTracer trc(rt); MovingTracer trc(rt);
// Fixup compartment global pointers as these get accessed during marking. // Fixup compartment global pointers as these get accessed during marking.
for (GCCompartmentsIter comp(rt); !comp.done(); comp.next()) for (CompartmentsInZoneIter comp(zone); !comp.done(); comp.next())
comp->fixupAfterMovingGC(); comp->fixupAfterMovingGC();
// Fixup cross compartment wrappers as we assert the existence of wrappers in the map. // Fixup cross compartment wrappers as we assert the existence of wrappers in the map.
for (CompartmentsIter comp(rt, SkipAtoms); !comp.done(); comp.next()) { for (CompartmentsIter comp(rt, SkipAtoms); !comp.done(); comp.next()) {
// Sweep the wrapper map to update its pointers.
comp->sweepCrossCompartmentWrappers(); comp->sweepCrossCompartmentWrappers();
// Mark the contents of the map to update each wrapper's cross compartment pointer.
comp->markCrossCompartmentWrappers(&trc); comp->markCrossCompartmentWrappers(&trc);
} }
@ -2508,9 +2533,9 @@ GCRuntime::updatePointersToRelocatedCells()
// them. Since updating each cell is independent we try to parallelize this // them. Since updating each cell is independent we try to parallelize this
// as much as possible. // as much as possible.
if (CanUseExtraThreads()) if (CanUseExtraThreads())
updateAllCellPointersParallel(&trc); updateAllCellPointersParallel(&trc, zone);
else else
updateAllCellPointersSerial(&trc); updateAllCellPointersSerial(&trc, zone);
// Mark roots to update them. // Mark roots to update them.
{ {
@ -2520,7 +2545,7 @@ GCRuntime::updatePointersToRelocatedCells()
Debugger::markAll(&trc); Debugger::markAll(&trc);
Debugger::markIncomingCrossCompartmentEdges(&trc); Debugger::markIncomingCrossCompartmentEdges(&trc);
for (GCCompartmentsIter c(rt); !c.done(); c.next()) { for (CompartmentsInZoneIter c(zone); !c.done(); c.next()) {
WeakMapBase::markAll(c, &trc); WeakMapBase::markAll(c, &trc);
if (c->watchpointMap) if (c->watchpointMap)
c->watchpointMap->markAll(&trc); c->watchpointMap->markAll(&trc);
@ -2536,10 +2561,7 @@ GCRuntime::updatePointersToRelocatedCells()
WatchpointMap::sweepAll(rt); WatchpointMap::sweepAll(rt);
Debugger::sweepAll(rt->defaultFreeOp()); Debugger::sweepAll(rt->defaultFreeOp());
jit::JitRuntime::SweepJitcodeGlobalTable(rt); jit::JitRuntime::SweepJitcodeGlobalTable(rt);
for (GCZonesIter zone(rt); !zone.done(); zone.next()) { rt->gc.sweepZoneAfterCompacting(zone);
if (zone->isGCCompacting())
rt->gc.sweepZoneAfterCompacting(zone);
}
// Type inference may put more blocks here to free. // Type inference may put more blocks here to free.
freeLifoAlloc.freeAll(); freeLifoAlloc.freeAll();
@ -2555,9 +2577,9 @@ GCRuntime::updatePointersToRelocatedCells()
#ifdef DEBUG #ifdef DEBUG
void void
GCRuntime::protectRelocatedArenas(ArenaHeader *relocatedList) GCRuntime::protectRelocatedArenas()
{ {
for (ArenaHeader* arena = relocatedList, *next; arena; arena = next) { for (ArenaHeader* arena = relocatedArenasToRelease, *next; arena; arena = next) {
next = arena->next; next = arena->next;
#if defined(XP_WIN) #if defined(XP_WIN)
DWORD oldProtect; DWORD oldProtect;
@ -2571,9 +2593,9 @@ GCRuntime::protectRelocatedArenas(ArenaHeader *relocatedList)
} }
void void
GCRuntime::unprotectRelocatedArenas(ArenaHeader *relocatedList) GCRuntime::unprotectRelocatedArenas()
{ {
for (ArenaHeader* arena = relocatedList; arena; arena = arena->next) { for (ArenaHeader* arena = relocatedArenasToRelease; arena; arena = arena->next) {
#if defined(XP_WIN) #if defined(XP_WIN)
DWORD oldProtect; DWORD oldProtect;
if (!VirtualProtect(arena, ArenaSize, PAGE_READWRITE, &oldProtect)) if (!VirtualProtect(arena, ArenaSize, PAGE_READWRITE, &oldProtect))
@ -2587,21 +2609,21 @@ GCRuntime::unprotectRelocatedArenas(ArenaHeader *relocatedList)
#endif #endif
void void
GCRuntime::releaseRelocatedArenas(ArenaHeader *relocatedList) GCRuntime::releaseRelocatedArenas()
{ {
AutoLockGC lock(rt); AutoLockGC lock(rt);
releaseRelocatedArenasWithoutUnlocking(relocatedList, lock); releaseRelocatedArenasWithoutUnlocking(lock);
expireChunksAndArenas(true, lock); expireChunksAndArenas(true, lock);
} }
void void
GCRuntime::releaseRelocatedArenasWithoutUnlocking(ArenaHeader *relocatedList, const AutoLockGC &lock) GCRuntime::releaseRelocatedArenasWithoutUnlocking(const AutoLockGC &lock)
{ {
// Release the relocated arenas, now containing only forwarding pointers // Release the relocated arenas, now containing only forwarding pointers
unsigned count = 0; unsigned count = 0;
while (relocatedList) { while (relocatedArenasToRelease) {
ArenaHeader *aheader = relocatedList; ArenaHeader *aheader = relocatedArenasToRelease;
relocatedList = relocatedList->next; relocatedArenasToRelease = relocatedArenasToRelease->next;
// Clear the mark bits // Clear the mark bits
aheader->unmarkAll(); aheader->unmarkAll();
@ -2631,9 +2653,8 @@ GCRuntime::releaseHeldRelocatedArenas()
// In debug mode we don't release relocated arenas straight away. Instead // In debug mode we don't release relocated arenas straight away. Instead
// we protect them and hold onto them until the next GC sweep phase to catch // we protect them and hold onto them until the next GC sweep phase to catch
// any pointers to them that didn't get forwarded. // any pointers to them that didn't get forwarded.
unprotectRelocatedArenas(relocatedArenasToRelease); unprotectRelocatedArenas();
releaseRelocatedArenas(relocatedArenasToRelease); releaseRelocatedArenas();
relocatedArenasToRelease = nullptr;
#endif #endif
} }
@ -3191,7 +3212,6 @@ GCRuntime::assertBackgroundSweepingFinished()
#ifdef DEBUG #ifdef DEBUG
MOZ_ASSERT(backgroundSweepZones.isEmpty()); MOZ_ASSERT(backgroundSweepZones.isEmpty());
for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) { for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) {
MOZ_ASSERT(!zone->isOnList());
for (ALL_ALLOC_KINDS(i)) { for (ALL_ALLOC_KINDS(i)) {
MOZ_ASSERT(!zone->arenas.arenaListsToSweep[i]); MOZ_ASSERT(!zone->arenas.arenaListsToSweep[i]);
MOZ_ASSERT(zone->arenas.doneBackgroundFinalize(i)); MOZ_ASSERT(zone->arenas.doneBackgroundFinalize(i));
@ -5266,7 +5286,7 @@ GCRuntime::endSweepPhase(bool lastGC)
} }
GCRuntime::IncrementalProgress GCRuntime::IncrementalProgress
GCRuntime::compactPhase(JS::gcreason::Reason reason) GCRuntime::beginCompactPhase()
{ {
gcstats::AutoPhase ap(stats, gcstats::PHASE_COMPACT); gcstats::AutoPhase ap(stats, gcstats::PHASE_COMPACT);
@ -5279,63 +5299,60 @@ GCRuntime::compactPhase(JS::gcreason::Reason reason)
waitBackgroundSweepEnd(); waitBackgroundSweepEnd();
} }
MOZ_ASSERT(zonesToMaybeCompact.isEmpty());
for (GCZonesIter zone(rt); !zone.done(); zone.next()) {
if (CanRelocateZone(rt, zone))
zonesToMaybeCompact.append(zone);
}
MOZ_ASSERT(!relocatedArenasToRelease);
startedCompacting = true;
return Finished;
}
GCRuntime::IncrementalProgress
GCRuntime::compactPhase(JS::gcreason::Reason reason)
{
MOZ_ASSERT(rt->gc.nursery.isEmpty()); MOZ_ASSERT(rt->gc.nursery.isEmpty());
assertBackgroundSweepingFinished(); assertBackgroundSweepingFinished();
MOZ_ASSERT(startedCompacting);
ArenaHeader *relocatedList = relocateArenas(reason); gcstats::AutoPhase ap(stats, gcstats::PHASE_COMPACT);
if (relocatedList)
updatePointersToRelocatedCells();
#ifdef DEBUG while (!zonesToMaybeCompact.isEmpty()) {
for (ArenaHeader *arena = relocatedList; arena; arena = arena->next) { Zone *zone = zonesToMaybeCompact.front();
for (ArenaCellIterUnderFinalize i(arena); !i.done(); i.next()) { MOZ_ASSERT(zone->isGCFinished());
TenuredCell *src = i.getCell(); if (relocateArenas(zone, reason)) {
MOZ_ASSERT(RelocationOverlay::isCellForwarded(src)); zone->setGCState(Zone::Compact);
TenuredCell *dest = Forwarded(src); updatePointersToRelocatedCells(zone);
MOZ_ASSERT(src->isMarked(BLACK) == dest->isMarked(BLACK)); zone->setGCState(Zone::Finished);
MOZ_ASSERT(src->isMarked(GRAY) == dest->isMarked(GRAY));
} }
zonesToMaybeCompact.removeFront();
} }
#endif
// Release the relocated arenas, or in debug builds queue them to be
// released until the start of the next GC unless this is the last GC or we
// are doing a last ditch GC.
#ifndef DEBUG
releaseRelocatedArenas(relocatedList);
#else
if (reason != JS::gcreason::DEBUG_GC) {
releaseRelocatedArenas(relocatedList);
} else {
MOZ_ASSERT(!relocatedArenasToRelease);
protectRelocatedArenas(relocatedList);
relocatedArenasToRelease = relocatedList;
}
#endif
#ifdef DEBUG #ifdef DEBUG
CheckHashTablesAfterMovingGC(rt); CheckHashTablesAfterMovingGC(rt);
for (GCZonesIter zone(rt); !zone.done(); zone.next()) {
if (zone->isGCCompacting()) {
MOZ_ASSERT(!zone->isPreservingCode());
zone->arenas.checkEmptyFreeLists();
// Check that we did as much compaction as we should have. There
// should always be less than one arena's worth of free cells.
for (ALL_ALLOC_KINDS(i)) {
size_t thingsPerArena = Arena::thingsPerArena(Arena::thingSize(i));
if (CanRelocateAllocKind(i)) {
ArenaList &al = zone->arenas.arenaLists[i];
size_t freeCells = 0;
for (ArenaHeader *arena = al.arenaAfterCursor(); arena; arena = arena->next)
freeCells += arena->countFreeCells();
MOZ_ASSERT(freeCells < thingsPerArena);
}
}
}
}
#endif #endif
return Finished;
return zonesToMaybeCompact.isEmpty() ? Finished : NotFinished;
}
void
GCRuntime::endCompactPhase(JS::gcreason::Reason reason)
{
// Release the relocated arenas, or in debug builds queue them to be
// released at the start of the next GC unless this is the last GC or we are
// doing a last ditch GC.
#ifndef DEBUG
releaseRelocatedArenas();
#else
if (reason != JS::gcreason::DEBUG_GC)
releaseRelocatedArenas();
else
protectRelocatedArenas();
#endif
startedCompacting = false;
} }
void void
@ -5349,7 +5366,7 @@ GCRuntime::finishCollection(JS::gcreason::Reason reason)
for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) { for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) {
if (zone->isCollecting()) { if (zone->isCollecting()) {
MOZ_ASSERT(zone->isGCFinished() || zone->isGCCompacting()); MOZ_ASSERT(zone->isGCFinished());
zone->setGCState(Zone::NoGC); zone->setGCState(Zone::NoGC);
zone->active = false; zone->active = false;
} }
@ -5358,6 +5375,8 @@ GCRuntime::finishCollection(JS::gcreason::Reason reason)
MOZ_ASSERT(!zone->wasGCStarted()); MOZ_ASSERT(!zone->wasGCStarted());
} }
MOZ_ASSERT(zonesToMaybeCompact.isEmpty());
if (invocationKind == GC_SHRINK) { if (invocationKind == GC_SHRINK) {
// Ensure excess chunks are returns to the system and free arenas // Ensure excess chunks are returns to the system and free arenas
// decommitted. // decommitted.
@ -5517,7 +5536,10 @@ GCRuntime::resetIncrementalGC(const char *reason)
} }
bool wasCompacting = isCompacting; bool wasCompacting = isCompacting;
isCompacting = false;
isCompacting = true;
startedCompacting = true;
zonesToMaybeCompact.clear();
SliceBudget budget; SliceBudget budget;
incrementalCollectSlice(budget, JS::gcreason::RESET); incrementalCollectSlice(budget, JS::gcreason::RESET);
@ -5533,11 +5555,13 @@ GCRuntime::resetIncrementalGC(const char *reason)
stats.reset(reason); stats.reset(reason);
#ifdef DEBUG #ifdef DEBUG
assertBackgroundSweepingFinished();
for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) { for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) {
MOZ_ASSERT(!zone->isCollecting());
MOZ_ASSERT(!zone->needsIncrementalBarrier()); MOZ_ASSERT(!zone->needsIncrementalBarrier());
for (ALL_ALLOC_KINDS(i)) MOZ_ASSERT(!zone->isOnList());
MOZ_ASSERT(!zone->arenas.arenaListsToSweep[i]);
} }
MOZ_ASSERT(zonesToMaybeCompact.isEmpty());
#endif #endif
} }
@ -5735,14 +5759,22 @@ GCRuntime::incrementalCollectSlice(SliceBudget &budget, JS::gcreason::Reason rea
endSweepPhase(lastGC); endSweepPhase(lastGC);
incrementalState = COMPACT; incrementalState = COMPACT;
MOZ_ASSERT(!startedCompacting);
/* Yield before compacting since it is not incremental. */ /* Yield before compacting since it is not incremental. */
if (isCompacting && isIncremental) if (isCompacting && isIncremental)
break; break;
case COMPACT: case COMPACT:
if (isCompacting && compactPhase(reason) == NotFinished) if (isCompacting) {
break; if (!startedCompacting && beginCompactPhase() == NotFinished)
break;
if (compactPhase(reason) == NotFinished)
break;
endCompactPhase(reason);
}
finishCollection(reason); finishCollection(reason);
@ -6205,12 +6237,11 @@ GCRuntime::onOutOfMallocMemory()
void void
GCRuntime::onOutOfMallocMemory(const AutoLockGC &lock) GCRuntime::onOutOfMallocMemory(const AutoLockGC &lock)
{ {
#ifdef DEBUG
// Release any relocated arenas we may be holding on to, without releasing // Release any relocated arenas we may be holding on to, without releasing
// the GC lock. // the GC lock.
#ifdef DEBUG unprotectRelocatedArenas();
unprotectRelocatedArenas(relocatedArenasToRelease); releaseRelocatedArenasWithoutUnlocking(lock);
releaseRelocatedArenasWithoutUnlocking(relocatedArenasToRelease, lock);
relocatedArenasToRelease = nullptr;
#endif #endif
// Throw away any excess chunks we have lying around. // Throw away any excess chunks we have lying around.

Просмотреть файл

@ -1382,6 +1382,7 @@ class ZoneList
void append(Zone *zone); void append(Zone *zone);
void transferFrom(ZoneList &other); void transferFrom(ZoneList &other);
void removeFront(); void removeFront();
void clear();
private: private:
explicit ZoneList(Zone *singleZone); explicit ZoneList(Zone *singleZone);