Bug 650161 - Implement compacting GC for JSObjects r=terrence

This commit is contained in:
Jon Coppeard 2014-08-14 11:52:24 +01:00
Родитель eec4a0fd36
Коммит 01b56566aa
10 изменённых файлов: 560 добавлений и 47 удалений

Просмотреть файл

@ -43,6 +43,7 @@ namespace js {}
#define JS_SWEPT_NURSERY_PATTERN 0x2B #define JS_SWEPT_NURSERY_PATTERN 0x2B
#define JS_ALLOCATED_NURSERY_PATTERN 0x2D #define JS_ALLOCATED_NURSERY_PATTERN 0x2D
#define JS_FRESH_TENURED_PATTERN 0x4F #define JS_FRESH_TENURED_PATTERN 0x4F
#define JS_MOVED_TENURED_PATTERN 0x49
#define JS_SWEPT_TENURED_PATTERN 0x4B #define JS_SWEPT_TENURED_PATTERN 0x4B
#define JS_ALLOCATED_TENURED_PATTERN 0x4D #define JS_ALLOCATED_TENURED_PATTERN 0x4D
#define JS_SWEPT_CODE_PATTERN 0x3b #define JS_SWEPT_CODE_PATTERN 0x3b

Просмотреть файл

@ -2062,6 +2062,7 @@ static const JSFunctionSpecWithHelp TestingFunctions[] = {
" 11: Verify post write barriers between instructions\n" " 11: Verify post write barriers between instructions\n"
" 12: Verify post write barriers between paints\n" " 12: Verify post write barriers between paints\n"
" 13: Check internal hashtables on minor GC\n" " 13: Check internal hashtables on minor GC\n"
" 14: Always compact arenas after GC\n"
" Period specifies that collection happens every n allocations.\n"), " Period specifies that collection happens every n allocations.\n"),
JS_FN_HELP("schedulegc", ScheduleGC, 1, 0, JS_FN_HELP("schedulegc", ScheduleGC, 1, 0,

Просмотреть файл

@ -293,7 +293,17 @@ class GCRuntime
void runDebugGC(); void runDebugGC();
inline void poke(); inline void poke();
void markRuntime(JSTracer *trc, bool useSavedRoots = false); enum TraceOrMarkRuntime {
TraceRuntime,
MarkRuntime
};
enum TraceRootsOrUsedSaved {
TraceRoots,
UseSavedRoots
};
void markRuntime(JSTracer *trc,
TraceOrMarkRuntime traceOrMark = TraceRuntime,
TraceRootsOrUsedSaved rootsSource = TraceRoots);
void notifyDidPaint(); void notifyDidPaint();
void shrinkBuffers(); void shrinkBuffers();
@ -491,6 +501,9 @@ class GCRuntime
void markWeakReferencesInCurrentGroup(gcstats::Phase phase); void markWeakReferencesInCurrentGroup(gcstats::Phase phase);
template <class ZoneIterT, class CompartmentIterT> void markGrayReferences(); template <class ZoneIterT, class CompartmentIterT> void markGrayReferences();
void markGrayReferencesInCurrentGroup(); void markGrayReferencesInCurrentGroup();
void markAllWeakReferences(gcstats::Phase phase);
void markAllGrayReferences();
void beginSweepPhase(bool lastGC); void beginSweepPhase(bool lastGC);
void findZoneGroups(); void findZoneGroups();
bool findZoneEdgesForWeakMaps(); bool findZoneEdgesForWeakMaps();
@ -507,6 +520,13 @@ class GCRuntime
void expireChunksAndArenas(bool shouldShrink); void expireChunksAndArenas(bool shouldShrink);
void sweepBackgroundThings(bool onBackgroundThread); void sweepBackgroundThings(bool onBackgroundThread);
void assertBackgroundSweepingFinished(); void assertBackgroundSweepingFinished();
bool shouldCompact();
#ifdef JSGC_COMPACTING
void compactPhase();
void updatePointersToRelocatedCells();
void releaseRelocatedArenas(ArenaHeader *relocatedList);
#endif
void finishCollection();
void computeNonIncrementalMarkingForValidation(); void computeNonIncrementalMarkingForValidation();
void validateIncrementalMarking(); void validateIncrementalMarking();
@ -516,8 +536,6 @@ class GCRuntime
#ifdef DEBUG #ifdef DEBUG
void checkForCompartmentMismatches(); void checkForCompartmentMismatches();
void markAllWeakReferences(gcstats::Phase phase);
void markAllGrayReferences();
#endif #endif
public: public:
@ -839,7 +857,8 @@ GCRuntime::needZealousGC() {
if (zealMode == ZealAllocValue || if (zealMode == ZealAllocValue ||
zealMode == ZealGenerationalGCValue || zealMode == ZealGenerationalGCValue ||
(zealMode >= ZealIncrementalRootsThenFinish && (zealMode >= ZealIncrementalRootsThenFinish &&
zealMode <= ZealIncrementalMultipleSlices)) zealMode <= ZealIncrementalMultipleSlices) ||
zealMode == ZealCompactValue)
{ {
nextScheduled = zealFrequency; nextScheduled = zealFrequency;
} }

Просмотреть файл

@ -102,6 +102,7 @@ struct Cell
MOZ_ALWAYS_INLINE bool isMarked(uint32_t color = BLACK) const; MOZ_ALWAYS_INLINE bool isMarked(uint32_t color = BLACK) const;
MOZ_ALWAYS_INLINE bool markIfUnmarked(uint32_t color = BLACK) const; MOZ_ALWAYS_INLINE bool markIfUnmarked(uint32_t color = BLACK) const;
MOZ_ALWAYS_INLINE void unmark(uint32_t color) const; MOZ_ALWAYS_INLINE void unmark(uint32_t color) const;
MOZ_ALWAYS_INLINE void copyMarkBitsFrom(const Cell *src);
inline JSRuntime *runtimeFromMainThread() const; inline JSRuntime *runtimeFromMainThread() const;
inline JS::shadow::Runtime *shadowRuntimeFromMainThread() const; inline JS::shadow::Runtime *shadowRuntimeFromMainThread() const;
@ -761,6 +762,12 @@ struct ChunkBitmap
*word &= ~mask; *word &= ~mask;
} }
MOZ_ALWAYS_INLINE void copyMarkBit(Cell *dst, const Cell *src, uint32_t color) {
uintptr_t *word, mask;
getMarkWordAndMask(dst, color, &word, &mask);
*word = (*word & ~mask) | (src->isMarked(color) ? mask : 0);
}
void clear() { void clear() {
memset((void *)bitmap, 0, sizeof(bitmap)); memset((void *)bitmap, 0, sizeof(bitmap));
} }
@ -1112,6 +1119,16 @@ Cell::unmark(uint32_t color) const
chunk()->bitmap.unmark(this, color); chunk()->bitmap.unmark(this, color);
} }
void
Cell::copyMarkBitsFrom(const Cell *src)
{
JS_ASSERT(isTenured());
JS_ASSERT(src->isTenured());
ChunkBitmap &bitmap = chunk()->bitmap;
bitmap.copyMarkBit(this, src, BLACK);
bitmap.copyMarkBit(this, src, GRAY);
}
JS::Zone * JS::Zone *
Cell::tenuredZone() const Cell::tenuredZone() const
{ {

Просмотреть файл

@ -707,13 +707,17 @@ js::gc::MarkForkJoinStack(ForkJoinNurseryCollectionTracer *trc)
#endif // JSGC_FJGENERATIONAL #endif // JSGC_FJGENERATIONAL
void void
js::gc::GCRuntime::markRuntime(JSTracer *trc, bool useSavedRoots) js::gc::GCRuntime::markRuntime(JSTracer *trc,
TraceOrMarkRuntime traceOrMark,
TraceRootsOrUsedSaved rootsSource)
{ {
JS_ASSERT(trc->callback != GCMarker::GrayCallback); JS_ASSERT(trc->callback != GCMarker::GrayCallback);
JS_ASSERT(traceOrMark == TraceRuntime || traceOrMark == MarkRuntime);
JS_ASSERT(rootsSource == TraceRoots || rootsSource == UseSavedRoots);
JS_ASSERT(!rt->mainThread.suppressGC); JS_ASSERT(!rt->mainThread.suppressGC);
if (IS_GC_MARKING_TRACER(trc)) { if (traceOrMark == MarkRuntime) {
for (CompartmentsIter c(rt, SkipAtoms); !c.done(); c.next()) { for (CompartmentsIter c(rt, SkipAtoms); !c.done(); c.next()) {
if (!c->zone()->isCollecting()) if (!c->zone()->isCollecting())
c->markCrossCompartmentWrappers(trc); c->markCrossCompartmentWrappers(trc);
@ -727,7 +731,7 @@ js::gc::GCRuntime::markRuntime(JSTracer *trc, bool useSavedRoots)
#ifdef JSGC_USE_EXACT_ROOTING #ifdef JSGC_USE_EXACT_ROOTING
MarkExactStackRoots(rt, trc); MarkExactStackRoots(rt, trc);
#else #else
markConservativeStackRoots(trc, useSavedRoots); markConservativeStackRoots(trc, rootsSource == UseSavedRoots);
#endif #endif
rt->markSelfHostingGlobal(trc); rt->markSelfHostingGlobal(trc);
} }
@ -760,7 +764,7 @@ js::gc::GCRuntime::markRuntime(JSTracer *trc, bool useSavedRoots)
} }
if (!rt->isBeingDestroyed() && !trc->runtime()->isHeapMinorCollecting()) { if (!rt->isBeingDestroyed() && !trc->runtime()->isHeapMinorCollecting()) {
if (!IS_GC_MARKING_TRACER(trc) || rt->atomsCompartment()->zone()->isCollecting()) { if (traceOrMark == TraceRuntime || rt->atomsCompartment()->zone()->isCollecting()) {
MarkPermanentAtoms(trc); MarkPermanentAtoms(trc);
MarkAtoms(trc); MarkAtoms(trc);
MarkWellKnownSymbols(trc); MarkWellKnownSymbols(trc);
@ -772,7 +776,7 @@ js::gc::GCRuntime::markRuntime(JSTracer *trc, bool useSavedRoots)
acx->mark(trc); acx->mark(trc);
for (ZonesIter zone(rt, SkipAtoms); !zone.done(); zone.next()) { for (ZonesIter zone(rt, SkipAtoms); !zone.done(); zone.next()) {
if (IS_GC_MARKING_TRACER(trc) && !zone->isCollecting()) if (traceOrMark == MarkRuntime && !zone->isCollecting())
continue; continue;
/* Do not discard scripts with counts while profiling. */ /* Do not discard scripts with counts while profiling. */
@ -792,11 +796,11 @@ js::gc::GCRuntime::markRuntime(JSTracer *trc, bool useSavedRoots)
if (trc->runtime()->isHeapMinorCollecting()) if (trc->runtime()->isHeapMinorCollecting())
c->globalWriteBarriered = false; c->globalWriteBarriered = false;
if (IS_GC_MARKING_TRACER(trc) && !c->zone()->isCollecting()) if (traceOrMark == MarkRuntime && !c->zone()->isCollecting())
continue; continue;
/* During a GC, these are treated as weak pointers. */ /* During a GC, these are treated as weak pointers. */
if (!IS_GC_MARKING_TRACER(trc)) { if (traceOrMark == TraceRuntime) {
if (c->watchpointMap) if (c->watchpointMap)
c->watchpointMap->markAll(trc); c->watchpointMap->markAll(trc);
} }
@ -812,9 +816,9 @@ js::gc::GCRuntime::markRuntime(JSTracer *trc, bool useSavedRoots)
if (!isHeapMinorCollecting()) { if (!isHeapMinorCollecting()) {
/* /*
* All JSCompartment::mark does is mark the globals for compartments * All JSCompartment::markRoots() does is mark the globals for
* which have been entered. Globals aren't nursery allocated so there's * compartments which have been entered. Globals aren't nursery
* no need to do this for minor GCs. * allocated so there's no need to do this for minor GCs.
*/ */
for (CompartmentsIter c(rt, SkipAtoms); !c.done(); c.next()) for (CompartmentsIter c(rt, SkipAtoms); !c.done(); c.next())
c->markRoots(trc); c->markRoots(trc);
@ -833,7 +837,7 @@ js::gc::GCRuntime::markRuntime(JSTracer *trc, bool useSavedRoots)
/* During GC, we don't mark gray roots at this stage. */ /* During GC, we don't mark gray roots at this stage. */
if (JSTraceDataOp op = grayRootTracer.op) { if (JSTraceDataOp op = grayRootTracer.op) {
if (!IS_GC_MARKING_TRACER(trc)) if (traceOrMark == TraceRuntime)
(*op)(trc, grayRootTracer.data); (*op)(trc, grayRootTracer.data);
} }
} }

Просмотреть файл

@ -632,7 +632,7 @@ void
GCMarker::markBufferedGrayRoots(JS::Zone *zone) GCMarker::markBufferedGrayRoots(JS::Zone *zone)
{ {
JS_ASSERT(grayBufferState == GRAY_BUFFER_OK); JS_ASSERT(grayBufferState == GRAY_BUFFER_OK);
JS_ASSERT(zone->isGCMarkingGray()); JS_ASSERT(zone->isGCMarkingGray() || zone->isGCCompacting());
for (GrayRoot *elem = zone->gcGrayRoots.begin(); elem != zone->gcGrayRoots.end(); elem++) { for (GrayRoot *elem = zone->gcGrayRoots.begin(); elem != zone->gcGrayRoots.end(); elem++) {
#ifdef DEBUG #ifdef DEBUG

Просмотреть файл

@ -120,8 +120,6 @@ Zone::sweep(FreeOp *fop, bool releaseTypes, bool *oom)
if (!fop->runtime()->debuggerList.isEmpty()) if (!fop->runtime()->debuggerList.isEmpty())
sweepBreakpoints(fop); sweepBreakpoints(fop);
active = false;
} }
void void

Просмотреть файл

@ -111,7 +111,7 @@ BEGIN_TEST(testWeakMap_keyDelegates)
CHECK(map->zone()->lastZoneGroupIndex() == delegate->zone()->lastZoneGroupIndex()); CHECK(map->zone()->lastZoneGroupIndex() == delegate->zone()->lastZoneGroupIndex());
#endif #endif
/* Check that when the delegate becomes unreacable the entry is removed. */ /* Check that when the delegate becomes unreachable the entry is removed. */
delegate = nullptr; delegate = nullptr;
JS_GC(rt); JS_GC(rt);
CHECK(checkSize(map, 0)); CHECK(checkSize(map, 0));

Просмотреть файл

@ -169,6 +169,16 @@
* the mark state, this just stops marking, but if we have started sweeping * the mark state, this just stops marking, but if we have started sweeping
* already, we continue until we have swept the current zone group. Following a * already, we continue until we have swept the current zone group. Following a
* reset, a new non-incremental collection is started. * reset, a new non-incremental collection is started.
*
* Compacting GC
* -------------
*
* Compacting GC happens at the end of a major GC as part of the last slice.
* There are three parts:
*
* - Arenas are selected for compaction.
* - The contents of those arenas are moved to new arenas.
* - All references to moved things are updated.
*/ */
#include "jsgcinlines.h" #include "jsgcinlines.h"
@ -916,7 +926,10 @@ Chunk::allocateArena(Zone *zone, AllocKind thingKind)
JS_ASSERT(hasAvailableArenas()); JS_ASSERT(hasAvailableArenas());
JSRuntime *rt = zone->runtimeFromAnyThread(); JSRuntime *rt = zone->runtimeFromAnyThread();
if (!rt->isHeapMinorCollecting() && rt->gc.usage.gcBytes() >= rt->gc.tunables.gcMaxBytes()) { if (!rt->isHeapMinorCollecting() &&
!rt->isHeapCompacting() &&
rt->gc.usage.gcBytes() >= rt->gc.tunables.gcMaxBytes())
{
#ifdef JSGC_FJGENERATIONAL #ifdef JSGC_FJGENERATIONAL
// This is an approximation to the best test, which would check that // This is an approximation to the best test, which would check that
// this thread is currently promoting into the tenured area. I doubt // this thread is currently promoting into the tenured area. I doubt
@ -937,7 +950,7 @@ Chunk::allocateArena(Zone *zone, AllocKind thingKind)
zone->usage.addGCArena(); zone->usage.addGCArena();
if (zone->usage.gcBytes() >= zone->threshold.gcTriggerBytes()) { if (!rt->isHeapCompacting() && zone->usage.gcBytes() >= zone->threshold.gcTriggerBytes()) {
AutoUnlockGC unlock(rt); AutoUnlockGC unlock(rt);
rt->gc.triggerZoneGC(zone, JS::gcreason::ALLOC_TRIGGER); rt->gc.triggerZoneGC(zone, JS::gcreason::ALLOC_TRIGGER);
} }
@ -1985,6 +1998,18 @@ ArenaLists::wipeDuringParallelExecution(JSRuntime *rt)
} }
} }
/* Compacting GC */
bool
GCRuntime::shouldCompact()
{
#ifdef JSGC_COMPACTING
return invocationKind == GC_SHRINK;
#else
return false;
#endif
}
#ifdef JSGC_COMPACTING #ifdef JSGC_COMPACTING
static void static void
@ -2002,8 +2027,381 @@ ForwardCell(Cell *dest, Cell *src)
ptr[1] = ForwardedCellMagicValue; // Moved! ptr[1] = ForwardedCellMagicValue; // Moved!
} }
static bool
ArenaContainsGlobal(ArenaHeader *arena)
{
if (arena->getAllocKind() > FINALIZE_OBJECT_LAST)
return false;
for (ArenaCellIterUnderGC i(arena); !i.done(); i.next()) {
JSObject *obj = static_cast<JSObject *>(i.getCell());
if (obj->is<GlobalObject>())
return true;
}
return false;
}
static bool
CanRelocateArena(ArenaHeader *arena)
{
/*
* We can't currently move global objects because their address is baked
* into compiled code. We therefore skip moving the contents of any arena
* containing a global.
*/
return arena->getAllocKind() <= FINALIZE_OBJECT_LAST && !ArenaContainsGlobal(arena);
}
static bool
ShouldRelocateArena(ArenaHeader *arena)
{
#ifdef JS_GC_ZEAL
if (arena->zone->runtimeFromMainThread()->gc.zeal() == ZealCompactValue)
return true;
#endif #endif
/*
* Eventually, this will be based on brilliant heuristics that look at fill
* percentage and fragmentation and... stuff.
*/
return arena->hasFreeThings();
}
/*
* Choose some arenas to relocate all cells out of and remove them from the
* arena list. Return the head of the list of arenas to relocate.
*/
ArenaHeader *
ArenaList::pickArenasToRelocate()
{
check();
ArenaHeader *head = nullptr;
ArenaHeader **tailp = &head;
// TODO: Only scan through the arenas with space available.
ArenaHeader **arenap = &head_;
while (*arenap) {
ArenaHeader *arena = *arenap;
JS_ASSERT(arena);
if (CanRelocateArena(arena) && ShouldRelocateArena(arena)) {
// Remove from arena list
if (cursorp_ == &arena->next)
cursorp_ = arenap;
*arenap = arena->next;
arena->next = nullptr;
// Append to relocation list
*tailp = arena;
tailp = &arena->next;
} else {
arenap = &arena->next;
}
}
check();
return head;
}
static bool
RelocateCell(Zone *zone, Cell *src, AllocKind thingKind, size_t thingSize)
{
// Allocate a new cell.
void *dst = zone->allocator.arenas.allocateFromFreeList(thingKind, thingSize);
if (!dst)
dst = js::gc::ArenaLists::refillFreeListInGC(zone, thingKind);
if (!dst)
return false;
// Copy source cell contents to destination.
memcpy(dst, src, thingSize);
// Mark source cell as forwarded and leave a pointer to the destination.
ForwardCell(static_cast<Cell *>(dst), src);
// Fixup the pointer to inline object elements if necessary.
if (thingKind <= FINALIZE_OBJECT_LAST) {
JSObject *srcObj = static_cast<JSObject *>(src);
JSObject *dstObj = static_cast<JSObject *>(dst);
if (srcObj->hasFixedElements())
dstObj->setFixedElements();
JS_ASSERT(
uintptr_t((HeapSlot*)dstObj->getElementsHeader()) - uintptr_t(srcObj) >= thingSize);
}
// Copy the mark bits.
static_cast<Cell *>(dst)->copyMarkBitsFrom(src);
return true;
}
static bool
RelocateArena(ArenaHeader *aheader)
{
JS_ASSERT(aheader->allocated());
JS_ASSERT(!aheader->hasDelayedMarking);
JS_ASSERT(!aheader->markOverflow);
JS_ASSERT(!aheader->allocatedDuringIncremental);
Zone *zone = aheader->zone;
AllocKind thingKind = aheader->getAllocKind();
size_t thingSize = aheader->getThingSize();
for (ArenaCellIterUnderFinalize i(aheader); !i.done(); i.next()) {
if (!RelocateCell(zone, i.getCell(), thingKind, thingSize)) {
MOZ_CRASH(); // TODO: Handle failure here.
return false;
}
}
return true;
}
/*
* Relocate all arenas identified by pickArenasToRelocate: for each arena,
* relocate each cell within it, then tack it onto a list of relocated arenas.
* Currently, we allow the relocation to fail, in which case the arena will be
* moved back onto the list of arenas with space available. (I did this
* originally to test my list manipulation before implementing the actual
* moving, with half a thought to allowing pinning (moving only a portion of
* the cells in an arena), but now it's probably just dead weight. FIXME)
*/
ArenaHeader *
ArenaList::relocateArenas(ArenaHeader *toRelocate, ArenaHeader *relocated)
{
check();
while (ArenaHeader *arena = toRelocate) {
toRelocate = arena->next;
if (RelocateArena(arena)) {
// Prepend to list of relocated arenas
arena->next = relocated;
relocated = arena;
} else {
// For some reason, the arena did not end up empty. Prepend it to
// the portion of the list that the cursor is pointing to (the
// arenas with space available) so that it will be used for future
// allocations.
JS_ASSERT(arena->hasFreeThings());
insertAtCursor(arena);
}
}
check();
return relocated;
}
ArenaHeader *
ArenaLists::relocateArenas(ArenaHeader *relocatedList)
{
// Flush all the freeLists back into the arena headers
purge();
checkEmptyFreeLists();
for (size_t i = 0; i < FINALIZE_LIMIT; i++) {
ArenaList &al = arenaLists[i];
ArenaHeader *toRelocate = al.pickArenasToRelocate();
if (toRelocate)
relocatedList = al.relocateArenas(toRelocate, relocatedList);
}
/*
* When we allocate new locations for cells, we use
* allocateFromFreeList(). Reset the free list again so that
* AutoCopyFreeListToArenasForGC doesn't complain that the free lists
* are different now.
*/
purge();
checkEmptyFreeLists();
return relocatedList;
}
struct MovingTracer : JSTracer {
MovingTracer(JSRuntime *rt) : JSTracer(rt, Visit, TraceWeakMapValues) {}
static void Visit(JSTracer *jstrc, void **thingp, JSGCTraceKind kind);
static void Sweep(JSTracer *jstrc);
};
void
MovingTracer::Visit(JSTracer *jstrc, void **thingp, JSGCTraceKind kind)
{
Cell *thing = static_cast<Cell *>(*thingp);
if (!thing->tenuredZone()->isGCCompacting()) {
JS_ASSERT(!IsForwarded(thing));
return;
}
if (IsForwarded(thing)) {
Cell *dst = Forwarded(thing);
*thingp = dst;
}
}
void
MovingTracer::Sweep(JSTracer *jstrc)
{
JSRuntime *rt = jstrc->runtime();
FreeOp *fop = rt->defaultFreeOp();
WatchpointMap::sweepAll(rt);
Debugger::sweepAll(fop);
for (ZonesIter zone(rt, SkipAtoms); !zone.done(); zone.next()) {
if (zone->isCollecting()) {
gcstats::AutoPhase ap(rt->gc.stats, gcstats::PHASE_SWEEP_COMPARTMENTS);
bool oom = false;
zone->sweep(fop, false, &oom);
JS_ASSERT(!oom);
for (CompartmentsInZoneIter c(zone); !c.done(); c.next()) {
c->sweep(fop, false);
ArrayBufferObject::sweep(c);
}
} else {
/* Update cross compartment wrappers into moved zones. */
for (CompartmentsInZoneIter c(zone); !c.done(); c.next())
c->sweepCrossCompartmentWrappers();
}
}
/* Type inference may put more blocks here to free. */
rt->freeLifoAlloc.freeAll();
}
/*
* Update the interal pointers in a single cell.
*/
static void
UpdateCellPointers(MovingTracer *trc, Cell *cell, JSGCTraceKind traceKind) {
TraceChildren(trc, cell, traceKind);
if (traceKind == JSTRACE_SHAPE) {
Shape *shape = static_cast<Shape *>(cell);
shape->fixupAfterMovingGC();
} else if (traceKind == JSTRACE_BASE_SHAPE) {
BaseShape *base = static_cast<BaseShape *>(cell);
base->fixupAfterMovingGC();
}
}
/*
* Update pointers to relocated cells by doing a full heap traversal and sweep.
*
* The latter is necessary to update weak references which are not marked as
* part of the traversal.
*/
void
GCRuntime::updatePointersToRelocatedCells()
{
JS_ASSERT(rt->currentThreadHasExclusiveAccess());
MovingTracer trc(rt);
{
// TODO: Maybe give compaction its own set of phases.
gcstats::AutoPhase ap(stats, gcstats::PHASE_MARK);
// TODO: We may need to fix up other weak pointers here.
// Fixup compartment global pointers as these get accessed during marking.
for (GCCompartmentsIter comp(rt); !comp.done(); comp.next())
comp->fixupAfterMovingGC();
// Fixup cross compartment wrappers as we assert the existence of wrappers in the map.
for (CompartmentsIter comp(rt, SkipAtoms); !comp.done(); comp.next())
comp->fixupCrossCompartmentWrappers(&trc);
// Fixup generators as these are not normally traced.
for (ContextIter i(rt); !i.done(); i.next()) {
for (JSGenerator *gen = i.get()->innermostGenerator(); gen; gen = gen->prevGenerator)
gen->obj = MaybeForwarded(gen->obj.get());
}
// Iterate through all allocated cells to update internal pointers.
for (GCZonesIter zone(rt); !zone.done(); zone.next()) {
ArenaLists &al = zone->allocator.arenas;
for (unsigned i = 0; i < FINALIZE_LIMIT; ++i) {
AllocKind thingKind = static_cast<AllocKind>(i);
JSGCTraceKind traceKind = MapAllocToTraceKind(thingKind);
for (ArenaHeader *arena = al.getFirstArena(thingKind); arena; arena = arena->next) {
for (ArenaCellIterUnderGC i(arena); !i.done(); i.next()) {
UpdateCellPointers(&trc, i.getCell(), traceKind);
}
}
}
}
// Mark roots to update them.
markRuntime(&trc, MarkRuntime);
Debugger::markAll(&trc);
Debugger::markCrossCompartmentDebuggerObjectReferents(&trc);
for (GCCompartmentsIter c(rt); !c.done(); c.next()) {
if (c->watchpointMap)
c->watchpointMap->markAll(&trc);
}
}
{
gcstats::AutoPhase ap(rt->gc.stats, gcstats::PHASE_SWEEP);
markAllGrayReferences();
MovingTracer::Sweep(&trc);
}
}
void
GCRuntime::releaseRelocatedArenas(ArenaHeader *relocatedList)
{
// Release the relocated arenas, now containing only forwarding pointers
#ifdef DEBUG
for (ArenaHeader *arena = relocatedList; arena; arena = arena->next) {
for (ArenaCellIterUnderFinalize i(arena); !i.done(); i.next()) {
Cell *src = i.getCell();
JS_ASSERT(IsForwarded(src));
Cell *dest = Forwarded(src);
JS_ASSERT(src->isMarked(BLACK) == dest->isMarked(BLACK));
JS_ASSERT(src->isMarked(GRAY) == dest->isMarked(GRAY));
}
}
#endif
unsigned count = 0;
while (relocatedList) {
ArenaHeader *aheader = relocatedList;
relocatedList = relocatedList->next;
// Mark arena as empty
AllocKind thingKind = aheader->getAllocKind();
size_t thingSize = aheader->getThingSize();
Arena *arena = aheader->getArena();
FreeSpan fullSpan;
fullSpan.initFinal(arena->thingsStart(thingKind), arena->thingsEnd() - thingSize, thingSize);
aheader->setFirstFreeSpan(&fullSpan);
#if defined(JS_CRASH_DIAGNOSTICS) || defined(JS_GC_ZEAL)
JS_POISON(reinterpret_cast<void *>(arena->thingsStart(thingKind)),
JS_MOVED_TENURED_PATTERN, Arena::thingsSpan(thingSize));
#endif
aheader->chunk()->releaseArena(aheader);
++count;
}
AutoLockGC lock(rt);
expireChunksAndArenas(true);
}
#endif // JSGC_COMPACTING
void void
ArenaLists::finalizeNow(FreeOp *fop, AllocKind thingKind) ArenaLists::finalizeNow(FreeOp *fop, AllocKind thingKind)
{ {
@ -2290,6 +2688,22 @@ ArenaLists::refillFreeList<NoGC>(ThreadSafeContext *cx, AllocKind thingKind);
template void * template void *
ArenaLists::refillFreeList<CanGC>(ThreadSafeContext *cx, AllocKind thingKind); ArenaLists::refillFreeList<CanGC>(ThreadSafeContext *cx, AllocKind thingKind);
/* static */ void *
ArenaLists::refillFreeListInGC(Zone *zone, AllocKind thingKind)
{
/*
* Called by compacting GC to refill a free list while we are in a GC.
*/
Allocator &allocator = zone->allocator;
JS_ASSERT(allocator.arenas.freeLists[thingKind].isEmpty());
JSRuntime *rt = zone->runtimeFromMainThread();
JS_ASSERT(rt->isHeapMajorCollecting());
JS_ASSERT(!rt->gc.isBackgroundSweeping());
return allocator.arenas.allocateFromArena(zone, thingKind);
}
/* static */ int64_t /* static */ int64_t
SliceBudget::TimeBudget(int64_t millis) SliceBudget::TimeBudget(int64_t millis)
{ {
@ -3256,7 +3670,7 @@ GCRuntime::beginMarkPhase(JS::gcreason::Reason reason)
if (isFull) if (isFull)
UnmarkScriptData(rt); UnmarkScriptData(rt);
markRuntime(gcmarker); markRuntime(gcmarker, MarkRuntime);
if (isIncremental) if (isIncremental)
bufferGrayRoots(); bufferGrayRoots();
@ -3387,8 +3801,6 @@ GCRuntime::markGrayReferencesInCurrentGroup()
markGrayReferences<GCZoneGroupIter, GCCompartmentGroupIter>(); markGrayReferences<GCZoneGroupIter, GCCompartmentGroupIter>();
} }
#ifdef DEBUG
void void
GCRuntime::markAllWeakReferences(gcstats::Phase phase) GCRuntime::markAllWeakReferences(gcstats::Phase phase)
{ {
@ -3401,6 +3813,8 @@ GCRuntime::markAllGrayReferences()
markGrayReferences<GCZonesIter, GCCompartmentsIter>(); markGrayReferences<GCZonesIter, GCCompartmentsIter>();
} }
#ifdef DEBUG
class js::gc::MarkingValidator class js::gc::MarkingValidator
{ {
public: public:
@ -3505,7 +3919,7 @@ js::gc::MarkingValidator::nonIncrementalMark()
{ {
gcstats::AutoPhase ap1(gc->stats, gcstats::PHASE_MARK); gcstats::AutoPhase ap1(gc->stats, gcstats::PHASE_MARK);
gcstats::AutoPhase ap2(gc->stats, gcstats::PHASE_MARK_ROOTS); gcstats::AutoPhase ap2(gc->stats, gcstats::PHASE_MARK_ROOTS);
gc->markRuntime(gcmarker, true); gc->markRuntime(gcmarker, GCRuntime::MarkRuntime, GCRuntime::UseSavedRoots);
} }
{ {
@ -4267,7 +4681,8 @@ GCRuntime::beginSweepPhase(bool lastGC)
gcstats::AutoPhase ap(stats, gcstats::PHASE_SWEEP); gcstats::AutoPhase ap(stats, gcstats::PHASE_SWEEP);
sweepOnBackgroundThread = !lastGC && !TraceEnabled() && CanUseExtraThreads(); sweepOnBackgroundThread =
!lastGC && !TraceEnabled() && CanUseExtraThreads() && !shouldCompact();
releaseObservedTypes = shouldReleaseObservedTypes(); releaseObservedTypes = shouldReleaseObservedTypes();
@ -4395,9 +4810,6 @@ GCRuntime::endSweepPhase(bool lastGC)
JS_ASSERT_IF(lastGC, !sweepOnBackgroundThread); JS_ASSERT_IF(lastGC, !sweepOnBackgroundThread);
JS_ASSERT(marker.isDrained());
marker.stop();
/* /*
* Recalculate whether GC was full or not as this may have changed due to * Recalculate whether GC was full or not as this may have changed due to
* newly created zones. Can only change from full to not full. * newly created zones. Can only change from full to not full.
@ -4498,30 +4910,17 @@ GCRuntime::endSweepPhase(bool lastGC)
sweepZones(&fop, lastGC); sweepZones(&fop, lastGC);
} }
uint64_t currentTime = PRMJ_Now(); finishMarkingValidation();
schedulingState.updateHighFrequencyMode(lastGCTime, currentTime, tunables);
for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) {
zone->threshold.updateAfterGC(zone->usage.gcBytes(), invocationKind, tunables,
schedulingState);
if (zone->isCollecting()) {
JS_ASSERT(zone->isGCFinished());
zone->setGCState(Zone::NoGC);
}
#ifdef DEBUG #ifdef DEBUG
JS_ASSERT(!zone->isCollecting()); for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) {
JS_ASSERT(!zone->wasGCStarted());
for (unsigned i = 0 ; i < FINALIZE_LIMIT ; ++i) { for (unsigned i = 0 ; i < FINALIZE_LIMIT ; ++i) {
JS_ASSERT_IF(!IsBackgroundFinalized(AllocKind(i)) || JS_ASSERT_IF(!IsBackgroundFinalized(AllocKind(i)) ||
!sweepOnBackgroundThread, !sweepOnBackgroundThread,
!zone->allocator.arenas.arenaListsToSweep[i]); !zone->allocator.arenas.arenaListsToSweep[i]);
} }
#endif
} }
#ifdef DEBUG
for (CompartmentsIter c(rt, SkipAtoms); !c.done(); c.next()) { for (CompartmentsIter c(rt, SkipAtoms); !c.done(); c.next()) {
JS_ASSERT(!c->gcIncomingGrayPointers); JS_ASSERT(!c->gcIncomingGrayPointers);
JS_ASSERT(c->gcLiveArrayBuffers.empty()); JS_ASSERT(c->gcLiveArrayBuffers.empty());
@ -4532,8 +4931,61 @@ GCRuntime::endSweepPhase(bool lastGC)
} }
} }
#endif #endif
}
finishMarkingValidation(); #ifdef JSGC_COMPACTING
void
GCRuntime::compactPhase()
{
JS_ASSERT(rt->gc.nursery.isEmpty());
JS_ASSERT(!sweepOnBackgroundThread);
ArenaHeader *relocatedList = nullptr;
for (GCZonesIter zone(rt); !zone.done(); zone.next()) {
JS_ASSERT(zone->isGCFinished());
JS_ASSERT(!zone->isPreservingCode());
// We cannot move atoms as we depend on their addresses being constant.
if (!rt->isAtomsZone(zone)) {
zone->setGCState(Zone::Compact);
relocatedList = zone->allocator.arenas.relocateArenas(relocatedList);
}
}
updatePointersToRelocatedCells();
releaseRelocatedArenas(relocatedList);
#ifdef DEBUG
CheckHashTablesAfterMovingGC(rt);
for (GCZonesIter zone(rt); !zone.done(); zone.next()) {
if (!rt->isAtomsZone(zone) && !zone->isPreservingCode())
zone->allocator.arenas.checkEmptyFreeLists();
}
#endif
}
#endif // JSGC_COMPACTING
void
GCRuntime::finishCollection()
{
JS_ASSERT(marker.isDrained());
marker.stop();
uint64_t currentTime = PRMJ_Now();
schedulingState.updateHighFrequencyMode(lastGCTime, currentTime, tunables);
for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) {
zone->threshold.updateAfterGC(zone->usage.gcBytes(), invocationKind, tunables,
schedulingState);
if (zone->isCollecting()) {
JS_ASSERT(zone->isGCFinished() || zone->isGCCompacting());
zone->setGCState(Zone::NoGC);
zone->active = false;
}
JS_ASSERT(!zone->isCollecting());
JS_ASSERT(!zone->wasGCStarted());
}
lastGCTime = currentTime; lastGCTime = currentTime;
} }
@ -4870,6 +5322,14 @@ GCRuntime::incrementalCollectSlice(int64_t budget,
if (sweepOnBackgroundThread) if (sweepOnBackgroundThread)
helperState.startBackgroundSweep(invocationKind == GC_SHRINK); helperState.startBackgroundSweep(invocationKind == GC_SHRINK);
#ifdef JSGC_COMPACTING
if (shouldCompact()) {
incrementalState = COMPACT;
compactPhase();
}
#endif
finishCollection();
incrementalState = NO_INCREMENTAL; incrementalState = NO_INCREMENTAL;
break; break;
} }
@ -5542,6 +6002,8 @@ GCRuntime::runDebugGC()
{ {
incrementalLimit = zealFrequency / 2; incrementalLimit = zealFrequency / 2;
} }
} else if (type == ZealCompactValue) {
collect(false, SliceBudget::Unlimited, GC_SHRINK, JS::gcreason::DEBUG_GC);
} else { } else {
collect(false, SliceBudget::Unlimited, GC_NORMAL, JS::gcreason::DEBUG_GC); collect(false, SliceBudget::Unlimited, GC_NORMAL, JS::gcreason::DEBUG_GC);
} }

Просмотреть файл

@ -523,6 +523,11 @@ class ArenaList {
check(); check();
return *this; return *this;
} }
#ifdef JSGC_COMPACTING
ArenaHeader *pickArenasToRelocate();
ArenaHeader *relocateArenas(ArenaHeader *toRelocate, ArenaHeader *relocated);
#endif
}; };
/* /*
@ -799,7 +804,6 @@ class ArenaLists
clearFreeListInArena(AllocKind(i)); clearFreeListInArena(AllocKind(i));
} }
void clearFreeListInArena(AllocKind kind) { void clearFreeListInArena(AllocKind kind) {
FreeList *freeList = &freeLists[kind]; FreeList *freeList = &freeLists[kind];
if (!freeList->isEmpty()) { if (!freeList->isEmpty()) {
@ -845,6 +849,8 @@ class ArenaLists
template <AllowGC allowGC> template <AllowGC allowGC>
static void *refillFreeList(ThreadSafeContext *cx, AllocKind thingKind); static void *refillFreeList(ThreadSafeContext *cx, AllocKind thingKind);
static void *refillFreeListInGC(Zone *zone, AllocKind thingKind);
/* /*
* Moves all arenas from |fromArenaLists| into |this|. In * Moves all arenas from |fromArenaLists| into |this|. In
* parallel blocks, we temporarily create one ArenaLists per * parallel blocks, we temporarily create one ArenaLists per
@ -868,6 +874,10 @@ class ArenaLists
JS_ASSERT(freeLists[kind].isEmpty()); JS_ASSERT(freeLists[kind].isEmpty());
} }
#ifdef JSGC_COMPACTING
ArenaHeader *relocateArenas(ArenaHeader *relocatedList);
#endif
void queueObjectsForSweep(FreeOp *fop); void queueObjectsForSweep(FreeOp *fop);
void queueStringsAndSymbolsForSweep(FreeOp *fop); void queueStringsAndSymbolsForSweep(FreeOp *fop);
void queueShapesForSweep(FreeOp *fop); void queueShapesForSweep(FreeOp *fop);
@ -1320,7 +1330,8 @@ const int ZealIncrementalMultipleSlices = 10;
const int ZealVerifierPostValue = 11; const int ZealVerifierPostValue = 11;
const int ZealFrameVerifierPostValue = 12; const int ZealFrameVerifierPostValue = 12;
const int ZealCheckHashTablesOnMinorGC = 13; const int ZealCheckHashTablesOnMinorGC = 13;
const int ZealLimit = 13; const int ZealCompactValue = 14;
const int ZealLimit = 14;
enum VerifierType { enum VerifierType {
PreBarrierVerifier, PreBarrierVerifier,