Bug 1159540 - Organize and comment the marking paths; r=sfink

This commit is contained in:
Terrence Cole 2015-04-29 10:23:24 -07:00
Родитель 492e64bca9
Коммит a044cbc08e
16 изменённых файлов: 1357 добавлений и 1329 удалений

Просмотреть файл

@ -102,17 +102,19 @@ class JS_PUBLIC_API(JSTracer)
MarkingTracer,
CallbackTracer
};
bool isMarkingTracer() const { return tag == MarkingTracer; }
bool isCallbackTracer() const { return tag == CallbackTracer; }
bool isMarkingTracer() const { return tag_ == MarkingTracer; }
bool isCallbackTracer() const { return tag_ == CallbackTracer; }
inline JS::CallbackTracer* asCallbackTracer();
protected:
JSTracer(JSRuntime* rt, TracerKindTag tag,
WeakMapTraceKind weakTraceKind = TraceWeakMapValues);
WeakMapTraceKind weakTraceKind = TraceWeakMapValues)
: runtime_(rt), tag_(tag), eagerlyTraceWeakMaps_(weakTraceKind)
{}
private:
JSRuntime* runtime_;
TracerKindTag tag;
TracerKindTag tag_;
WeakMapTraceKind eagerlyTraceWeakMaps_;
};
@ -134,7 +136,9 @@ class JS_PUBLIC_API(CallbackTracer) : public JSTracer
{}
// Update the trace callback.
void setTraceCallback(JSTraceCallback traceCallback);
void setTraceCallback(JSTraceCallback traceCallback) {
callback = traceCallback;
}
// Test if the given callback is the same as our callback.
bool hasCallback(JSTraceCallback maybeCallback) const {

Просмотреть файл

@ -11,9 +11,9 @@
#include "gc/Heap.h"
#include "gc/StoreBuffer.h"
#include "js/HashTable.h"
#include "js/Id.h"
#include "js/RootingAPI.h"
#include "js/Value.h"
/*
* A write barrier is a mechanism used by incremental or generation GCs to

Просмотреть файл

@ -53,6 +53,9 @@ CurrentThreadIsIonCompiling();
extern bool
UnmarkGrayCellRecursively(gc::Cell* cell, JSGCTraceKind kind);
extern void
TraceManuallyBarrieredGenericPointerEdge(JSTracer* trc, gc::Cell** thingp, const char* name);
namespace gc {
struct Arena;
@ -61,9 +64,6 @@ class SortedArenaList;
struct ArenaHeader;
struct Chunk;
extern void
TraceManuallyBarrieredGenericPointerEdge(JSTracer* trc, Cell** thingp, const char* name);
/*
* This flag allows an allocation site to request a specific heap based upon the
* estimated lifetime or lifetime requirements of objects allocated from that

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -7,87 +7,308 @@
#ifndef gc_Marking_h
#define gc_Marking_h
#include "mozilla/DebugOnly.h"
#include "jsfriendapi.h"
#include "gc/Barrier.h"
#include "gc/Heap.h"
#include "gc/Tracer.h"
#include "js/GCAPI.h"
#include "js/SliceBudget.h"
#include "js/TracingAPI.h"
class JSLinearString;
class JSRope;
namespace js {
/*** Tracing ***/
// Trace through an edge in the live object graph on behalf of tracing. The
// effect of tracing the edge depends on the JSTracer being used.
template <typename T>
void
TraceEdge(JSTracer* trc, BarrieredBase<T>* thingp, const char* name);
// Trace through a "root" edge. These edges are the initial edges in the object
// graph traversal. Root edges are asserted to only be traversed in the initial
// phase of a GC.
template <typename T>
void
TraceRoot(JSTracer* trc, T* thingp, const char* name);
// Like TraceEdge, but for edges that do not use one of the automatic barrier
// classes and, thus, must be treated specially for moving GC. This method is
// separate from TraceEdge to make accidental use of such edges more obvious.
template <typename T>
void
TraceManuallyBarrieredEdge(JSTracer* trc, T* thingp, const char* name);
// Trace all edges contained in the given array.
template <typename T>
void
TraceRange(JSTracer* trc, size_t len, BarrieredBase<T>* vec, const char* name);
// Trace all root edges in the given array.
template <typename T>
void
TraceRootRange(JSTracer* trc, size_t len, T* vec, const char* name);
// Trace an edge that crosses compartment boundaries. If the compartment of the
// destination thing is not being GC'd, then the edge will not be traced.
template <typename T>
void
TraceCrossCompartmentEdge(JSTracer* trc, JSObject* src, BarrieredBase<T>* dst,
const char* name);
// As above but with manual barriers.
template <typename T>
void
TraceManuallyBarrieredCrossCompartmentEdge(JSTracer* trc, JSObject* src, T* dst,
const char* name);
// Permanent atoms and well-known symbols are shared between runtimes and must
// use a separate marking path so that we can filter them out of normal heap
// tracing.
template <typename T>
void
TraceProcessGlobalRoot(JSTracer* trc, T* thing, const char* name);
class BaseShape;
class GCMarker;
class LazyScript;
class NativeObject;
class ObjectGroup;
namespace gc {
/* Return true if the pointer is nullptr, or if it is a tagged pointer to
* nullptr.
*/
MOZ_ALWAYS_INLINE bool
IsNullTaggedPointer(void* p)
{
return uintptr_t(p) < 32;
struct ArenaHeader;
}
namespace jit {
class JitCode;
}
/*** Externally Typed Marking ***/
static const size_t NON_INCREMENTAL_MARK_STACK_BASE_CAPACITY = 4096;
static const size_t INCREMENTAL_MARK_STACK_BASE_CAPACITY = 32768;
/*
* When the native stack is low, the GC does not call JS_TraceChildren to mark
* the reachable "children" of the thing. Rather the thing is put aside and
* JS_TraceChildren is called later with more space on the C stack.
*
* To implement such delayed marking of the children with minimal overhead for
* the normal case of sufficient native stack, the code adds a field per arena.
* The field markingDelay->link links all arenas with delayed things into a
* stack list with the pointer to stack top in GCMarker::unmarkedArenaStackTop.
* GCMarker::delayMarkingChildren adds arenas to the stack as necessary while
* markDelayedChildren pops the arenas from the stack until it empties.
*/
class MarkStack
{
friend class GCMarker;
uintptr_t* stack_;
uintptr_t* tos_;
uintptr_t* end_;
// The capacity we start with and reset() to.
size_t baseCapacity_;
size_t maxCapacity_;
public:
explicit MarkStack(size_t maxCapacity)
: stack_(nullptr),
tos_(nullptr),
end_(nullptr),
baseCapacity_(0),
maxCapacity_(maxCapacity)
{}
~MarkStack() {
js_free(stack_);
}
size_t capacity() { return end_ - stack_; }
ptrdiff_t position() const { return tos_ - stack_; }
void setStack(uintptr_t* stack, size_t tosIndex, size_t capacity) {
stack_ = stack;
tos_ = stack + tosIndex;
end_ = stack + capacity;
}
bool init(JSGCMode gcMode);
void setBaseCapacity(JSGCMode mode);
size_t maxCapacity() const { return maxCapacity_; }
void setMaxCapacity(size_t maxCapacity);
bool push(uintptr_t item) {
if (tos_ == end_) {
if (!enlarge(1))
return false;
}
MOZ_ASSERT(tos_ < end_);
*tos_++ = item;
return true;
}
bool push(uintptr_t item1, uintptr_t item2, uintptr_t item3) {
uintptr_t* nextTos = tos_ + 3;
if (nextTos > end_) {
if (!enlarge(3))
return false;
nextTos = tos_ + 3;
}
MOZ_ASSERT(nextTos <= end_);
tos_[0] = item1;
tos_[1] = item2;
tos_[2] = item3;
tos_ = nextTos;
return true;
}
bool isEmpty() const {
return tos_ == stack_;
}
uintptr_t pop() {
MOZ_ASSERT(!isEmpty());
return *--tos_;
}
void reset();
/* Grow the stack, ensuring there is space for at least count elements. */
bool enlarge(unsigned count);
void setGCMode(JSGCMode gcMode);
size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const;
};
class GCMarker : public JSTracer
{
public:
explicit GCMarker(JSRuntime* rt);
bool init(JSGCMode gcMode);
void setMaxCapacity(size_t maxCap) { stack.setMaxCapacity(maxCap); }
size_t maxCapacity() const { return stack.maxCapacity(); }
void start();
void stop();
void reset();
// Mark the given GC thing and traverse its children at some point.
template <typename T> void traverse(T thing);
// Calls traverse on target after making additional assertions.
template <typename S, typename T> void traverse(S source, T target);
// C++ requires explicit declarations of partial template instantiations.
template <typename S> void traverse(S source, jsid target);
/*
* Care must be taken changing the mark color from gray to black. The cycle
* collector depends on the invariant that there are no black to gray edges
* in the GC heap. This invariant lets the CC not trace through black
* objects. If this invariant is violated, the cycle collector may free
* objects that are still reachable.
*/
void setMarkColorGray() {
MOZ_ASSERT(isDrained());
MOZ_ASSERT(color == gc::BLACK);
color = gc::GRAY;
}
void setMarkColorBlack() {
MOZ_ASSERT(isDrained());
MOZ_ASSERT(color == gc::GRAY);
color = gc::BLACK;
}
uint32_t markColor() const { return color; }
void delayMarkingArena(gc::ArenaHeader* aheader);
void delayMarkingChildren(const void* thing);
void markDelayedChildren(gc::ArenaHeader* aheader);
bool markDelayedChildren(SliceBudget& budget);
bool hasDelayedChildren() const {
return !!unmarkedArenaStackTop;
}
bool isDrained() {
return isMarkStackEmpty() && !unmarkedArenaStackTop;
}
bool drainMarkStack(SliceBudget& budget);
void setGCMode(JSGCMode mode) { stack.setGCMode(mode); }
size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const;
#ifdef DEBUG
bool shouldCheckCompartments() { return strictCompartmentChecking; }
#endif
/* This is public exclusively for ScanRope. */
MarkStack stack;
private:
#ifdef DEBUG
void checkZone(void* p);
#else
void checkZone(void* p) {}
#endif
/*
* We use a common mark stack to mark GC things of different types and use
* the explicit tags to distinguish them when it cannot be deduced from
* the context of push or pop operation.
*/
enum StackTag {
ValueArrayTag,
ObjectTag,
GroupTag,
SavedValueArrayTag,
JitCodeTag,
LastTag = JitCodeTag
};
static const uintptr_t StackTagMask = 7;
static_assert(StackTagMask >= uintptr_t(LastTag), "The tag mask must subsume the tags.");
static_assert(StackTagMask <= gc::CellMask, "The tag mask must be embeddable in a Cell*.");
// Push an object onto the stack for later tracing and assert that it has
// already been marked.
void repush(JSObject* obj) {
MOZ_ASSERT(gc::TenuredCell::fromPointer(obj)->isMarked(markColor()));
pushTaggedPtr(ObjectTag, obj);
}
template <typename T> void markAndTraceChildren(T* thing);
template <typename T> void markAndPush(StackTag tag, T* thing);
template <typename T> void markAndScan(T* thing);
void eagerlyMarkChildren(JSLinearString* str);
void eagerlyMarkChildren(JSRope* rope);
void eagerlyMarkChildren(JSString* str);
void eagerlyMarkChildren(LazyScript *thing);
void eagerlyMarkChildren(Shape* shape);
void lazilyMarkChildren(ObjectGroup* group);
// We may not have concrete types yet, so this has to be out of the header.
template <typename T>
void dispatchToTraceChildren(T* thing);
// Mark the given GC thing, but do not trace its children. Return true
// if the thing became marked.
template <typename T>
bool mark(T* thing);
void pushTaggedPtr(StackTag tag, void* ptr) {
checkZone(ptr);
uintptr_t addr = reinterpret_cast<uintptr_t>(ptr);
MOZ_ASSERT(!(addr & StackTagMask));
if (!stack.push(addr | uintptr_t(tag)))
delayMarkingChildren(ptr);
}
void pushValueArray(JSObject* obj, void* start, void* end) {
checkZone(obj);
MOZ_ASSERT(start <= end);
uintptr_t tagged = reinterpret_cast<uintptr_t>(obj) | GCMarker::ValueArrayTag;
uintptr_t startAddr = reinterpret_cast<uintptr_t>(start);
uintptr_t endAddr = reinterpret_cast<uintptr_t>(end);
/*
* Push in the reverse order so obj will be on top. If we cannot push
* the array, we trigger delay marking for the whole object.
*/
if (!stack.push(endAddr, startAddr, tagged))
delayMarkingChildren(obj);
}
bool isMarkStackEmpty() {
return stack.isEmpty();
}
bool restoreValueArray(NativeObject* obj, void** vpp, void** endp);
void saveValueRanges();
inline void processMarkStackTop(SliceBudget& budget);
/* The color is only applied to objects and functions. */
uint32_t color;
/* Pointer to the top of the stack of arenas we are delaying marking on. */
js::gc::ArenaHeader* unmarkedArenaStackTop;
/* Count of arenas that are currently in the stack. */
mozilla::DebugOnly<size_t> markLaterArenas;
/* Assert that start and stop are called with correct ordering. */
mozilla::DebugOnly<bool> started;
/*
* If this is true, all marked objects must belong to a compartment being
* GCed. This is used to look for compartment bugs.
*/
mozilla::DebugOnly<bool> strictCompartmentChecking;
};
void
TraceGenericPointerRoot(JSTracer* trc, Cell** thingp, const char* name);
SetMarkStackLimit(JSRuntime* rt, size_t limit);
void
TraceManuallyBarrieredGenericPointerEdge(JSTracer* trc, Cell** thingp, const char* name);
bool
IsBufferingGrayRoots(JSTracer* trc);
/*** Slot Marking ***/
void
MarkObjectSlots(JSTracer* trc, NativeObject* obj, uint32_t start, uint32_t nslots);
} /* namespace js */
namespace js {
namespace gc {
/*** Special Cases ***/
@ -103,7 +324,7 @@ MarkCycleCollectorChildren(JSTracer* trc, ObjectGroup* group);
void
PushArena(GCMarker* gcmarker, ArenaHeader* aheader);
/*** Generic ***/
/*** Liveness ***/
template <typename T>
bool
@ -143,10 +364,16 @@ ToMarkable(Cell* cell)
return cell;
}
/*
* HashKeyRef represents a reference to a HashMap key. This should normally
* be used through the HashTableWriteBarrierPost function.
*/
// Return true if the pointer is nullptr, or if it is a tagged pointer to
// nullptr.
MOZ_ALWAYS_INLINE bool
IsNullTaggedPointer(void* p)
{
return uintptr_t(p) < 32;
}
// HashKeyRef represents a reference to a HashMap key. This should normally
// be used through the HashTableWriteBarrierPost function.
template <typename Map, typename Key>
class HashKeyRef : public BufferableRef
{
@ -167,6 +394,8 @@ class HashKeyRef : public BufferableRef
}
};
// Wrap a GC thing pointer into a new Value or jsid. The type system enforces
// that the thing pointer is a wrappable type.
template <typename S, typename T>
struct RewrapValueOrId {};
#define DECLARE_REWRAP(S, T, method, prefix) \
@ -181,9 +410,6 @@ DECLARE_REWRAP(jsid, JS::Symbol*, SYMBOL_TO_JSID, );
} /* namespace gc */
void
TraceChildren(JSTracer* trc, void* thing, JSGCTraceKind kind);
bool
UnmarkGrayShapeRecursively(Shape* shape);

Просмотреть файл

@ -552,6 +552,28 @@ js::gc::GCRuntime::markRuntime(JSTracer* trc,
}
}
// Append traced things to a buffer on the zone for use later in the GC.
// See the comment in GCRuntime.h above grayBufferState for details.
class BufferGrayRootsTracer : public JS::CallbackTracer
{
// Set to false if we OOM while buffering gray roots.
bool bufferingGrayRootsFailed;
void appendGrayRoot(gc::TenuredCell* thing, JSGCTraceKind kind);
public:
explicit BufferGrayRootsTracer(JSRuntime* rt)
: JS::CallbackTracer(rt, grayTraceCallback), bufferingGrayRootsFailed(false)
{}
static void grayTraceCallback(JS::CallbackTracer* trc, void** thingp, JSGCTraceKind kind) {
auto tracer = static_cast<BufferGrayRootsTracer*>(trc);
tracer->appendGrayRoot(gc::TenuredCell::fromPointer(*thingp), kind);
}
bool failed() const { return bufferingGrayRootsFailed; }
};
void
js::gc::GCRuntime::bufferGrayRoots()
{
@ -621,3 +643,13 @@ GCRuntime::resetBufferedGrayRoots() const
for (GCZonesIter zone(rt); !zone.done(); zone.next())
zone->gcGrayRoots.clearAndFree();
}
// Return true if this trace is happening on behalf of gray buffering during
// the marking phase of incremental GC.
bool
js::IsBufferingGrayRoots(JSTracer* trc)
{
return trc->isCallbackTracer() &&
trc->asCallbackTracer()->hasCallback(BufferGrayRootsTracer::grayTraceCallback);
}

Просмотреть файл

@ -42,7 +42,7 @@ StoreBuffer::SlotsEdge::mark(JSTracer* trc) const
int32_t start = Min(uint32_t(start_), obj->slotSpan());
int32_t end = Min(uint32_t(start_) + count_, obj->slotSpan());
MOZ_ASSERT(end >= start);
MarkObjectSlots(trc, obj, start, end - start);
TraceObjectSlots(trc, obj, start, end - start);
}
}

Просмотреть файл

@ -15,7 +15,6 @@
#include "ds/LifoAlloc.h"
#include "gc/Nursery.h"
#include "gc/Tracer.h"
#include "js/MemoryMetrics.h"
namespace js {

Просмотреть файл

@ -21,6 +21,7 @@
#include "gc/Marking.h"
#include "gc/Zone.h"
#include "vm/Shape.h"
#include "vm/Symbol.h"
#include "jsgcinlines.h"
@ -35,6 +36,9 @@ void
CheckTracedThing(JSTracer* trc, T thing);
} // namespace js
/*** Callback Tracer Dispatch ********************************************************************/
template <typename T>
T
DoCallback(JS::CallbackTracer* trc, T* thingp, const char* name)
@ -73,6 +77,24 @@ DoCallback<jsid>(JS::CallbackTracer* trc, jsid* idp, const char* name)
return *idp;
}
void
JS::CallbackTracer::getTracingEdgeName(char* buffer, size_t bufferSize)
{
MOZ_ASSERT(bufferSize > 0);
if (contextFunctor_) {
(*contextFunctor_)(this, buffer, bufferSize);
return;
}
if (contextIndex_ != InvalidIndex) {
JS_snprintf(buffer, bufferSize, "%s[%lu]", contextName_, contextIndex_);
return;
}
JS_snprintf(buffer, bufferSize, "%s", contextName_);
}
/*** Public Tracing API **************************************************************************/
JS_PUBLIC_API(void)
JS_CallUnbarrieredValueTracer(JSTracer* trc, Value* valuep, const char* name)
{
@ -158,6 +180,21 @@ JS_TraceChildren(JSTracer* trc, void* thing, JSGCTraceKind kind)
js::TraceChildren(trc, thing, kind);
}
struct TraceChildrenFunctor {
template <typename T>
void operator()(JSTracer* trc, void* thing) {
static_cast<T*>(thing)->traceChildren(trc);
}
};
void
js::TraceChildren(JSTracer* trc, void* thing, JSGCTraceKind kind)
{
MOZ_ASSERT(thing);
TraceChildrenFunctor f;
CallTyped(f, kind, trc, thing);
}
JS_PUBLIC_API(void)
JS_TraceRuntime(JSTracer* trc)
{
@ -219,6 +256,9 @@ JS_TraceIncomingCCWs(JSTracer* trc, const JS::ZoneSet& zones)
}
}
/*** Traced Edge Printer *************************************************************************/
static size_t
CountDecimalDigits(size_t num)
{
@ -368,277 +408,3 @@ JS_GetTraceThingInfo(char* buf, size_t bufsize, JSTracer* trc, void* thing,
}
buf[bufsize - 1] = '\0';
}
JSTracer::JSTracer(JSRuntime* rt, TracerKindTag kindTag,
WeakMapTraceKind weakTraceKind /* = TraceWeakMapValues */)
: runtime_(rt)
, tag(kindTag)
, eagerlyTraceWeakMaps_(weakTraceKind)
{
}
void
JS::CallbackTracer::getTracingEdgeName(char* buffer, size_t bufferSize)
{
MOZ_ASSERT(bufferSize > 0);
if (contextFunctor_) {
(*contextFunctor_)(this, buffer, bufferSize);
return;
}
if (contextIndex_ != InvalidIndex) {
JS_snprintf(buffer, bufferSize, "%s[%lu]", contextName_, contextIndex_);
return;
}
JS_snprintf(buffer, bufferSize, "%s", contextName_);
}
void
JS::CallbackTracer::setTraceCallback(JSTraceCallback traceCallback)
{
callback = traceCallback;
}
bool
MarkStack::init(JSGCMode gcMode)
{
setBaseCapacity(gcMode);
MOZ_ASSERT(!stack_);
uintptr_t* newStack = js_pod_malloc<uintptr_t>(baseCapacity_);
if (!newStack)
return false;
setStack(newStack, 0, baseCapacity_);
return true;
}
void
MarkStack::setBaseCapacity(JSGCMode mode)
{
switch (mode) {
case JSGC_MODE_GLOBAL:
case JSGC_MODE_COMPARTMENT:
baseCapacity_ = NON_INCREMENTAL_MARK_STACK_BASE_CAPACITY;
break;
case JSGC_MODE_INCREMENTAL:
baseCapacity_ = INCREMENTAL_MARK_STACK_BASE_CAPACITY;
break;
default:
MOZ_CRASH("bad gc mode");
}
if (baseCapacity_ > maxCapacity_)
baseCapacity_ = maxCapacity_;
}
void
MarkStack::setMaxCapacity(size_t maxCapacity)
{
MOZ_ASSERT(isEmpty());
maxCapacity_ = maxCapacity;
if (baseCapacity_ > maxCapacity_)
baseCapacity_ = maxCapacity_;
reset();
}
void
MarkStack::reset()
{
if (capacity() == baseCapacity_) {
// No size change; keep the current stack.
setStack(stack_, 0, baseCapacity_);
return;
}
uintptr_t* newStack = (uintptr_t*)js_realloc(stack_, sizeof(uintptr_t) * baseCapacity_);
if (!newStack) {
// If the realloc fails, just keep using the existing stack; it's
// not ideal but better than failing.
newStack = stack_;
baseCapacity_ = capacity();
}
setStack(newStack, 0, baseCapacity_);
}
bool
MarkStack::enlarge(unsigned count)
{
size_t newCapacity = Min(maxCapacity_, capacity() * 2);
if (newCapacity < capacity() + count)
return false;
size_t tosIndex = position();
uintptr_t* newStack = (uintptr_t*)js_realloc(stack_, sizeof(uintptr_t) * newCapacity);
if (!newStack)
return false;
setStack(newStack, tosIndex, newCapacity);
return true;
}
void
MarkStack::setGCMode(JSGCMode gcMode)
{
// The mark stack won't be resized until the next call to reset(), but
// that will happen at the end of the next GC.
setBaseCapacity(gcMode);
}
size_t
MarkStack::sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const
{
return mallocSizeOf(stack_);
}
/*
* DoNotTraceWeakMaps: the GC is recomputing the liveness of WeakMap entries,
* so we delay visting entries.
*/
GCMarker::GCMarker(JSRuntime* rt)
: JSTracer(rt, JSTracer::MarkingTracer, DoNotTraceWeakMaps),
stack(size_t(-1)),
color(BLACK),
unmarkedArenaStackTop(nullptr),
markLaterArenas(0),
started(false),
strictCompartmentChecking(false)
{
}
bool
GCMarker::init(JSGCMode gcMode)
{
return stack.init(gcMode);
}
void
GCMarker::start()
{
MOZ_ASSERT(!started);
started = true;
color = BLACK;
MOZ_ASSERT(!unmarkedArenaStackTop);
MOZ_ASSERT(markLaterArenas == 0);
}
void
GCMarker::stop()
{
MOZ_ASSERT(isDrained());
MOZ_ASSERT(started);
started = false;
MOZ_ASSERT(!unmarkedArenaStackTop);
MOZ_ASSERT(markLaterArenas == 0);
/* Free non-ballast stack memory. */
stack.reset();
}
void
GCMarker::reset()
{
color = BLACK;
stack.reset();
MOZ_ASSERT(isMarkStackEmpty());
while (unmarkedArenaStackTop) {
ArenaHeader* aheader = unmarkedArenaStackTop;
MOZ_ASSERT(aheader->hasDelayedMarking);
MOZ_ASSERT(markLaterArenas);
unmarkedArenaStackTop = aheader->getNextDelayedMarking();
aheader->unsetDelayedMarking();
aheader->markOverflow = 0;
aheader->allocatedDuringIncremental = 0;
markLaterArenas--;
}
MOZ_ASSERT(isDrained());
MOZ_ASSERT(!markLaterArenas);
}
void
GCMarker::markDelayedChildren(ArenaHeader* aheader)
{
if (aheader->markOverflow) {
bool always = aheader->allocatedDuringIncremental;
aheader->markOverflow = 0;
for (ArenaCellIterUnderGC i(aheader); !i.done(); i.next()) {
TenuredCell* t = i.getCell();
if (always || t->isMarked()) {
t->markIfUnmarked();
JS_TraceChildren(this, t, MapAllocToTraceKind(aheader->getAllocKind()));
}
}
} else {
MOZ_ASSERT(aheader->allocatedDuringIncremental);
PushArena(this, aheader);
}
aheader->allocatedDuringIncremental = 0;
/*
* Note that during an incremental GC we may still be allocating into
* aheader. However, prepareForIncrementalGC sets the
* allocatedDuringIncremental flag if we continue marking.
*/
}
bool
GCMarker::markDelayedChildren(SliceBudget& budget)
{
GCRuntime& gc = runtime()->gc;
gcstats::AutoPhase ap(gc.stats, gc.state() == MARK, gcstats::PHASE_MARK_DELAYED);
MOZ_ASSERT(unmarkedArenaStackTop);
do {
/*
* If marking gets delayed at the same arena again, we must repeat
* marking of its things. For that we pop arena from the stack and
* clear its hasDelayedMarking flag before we begin the marking.
*/
ArenaHeader* aheader = unmarkedArenaStackTop;
MOZ_ASSERT(aheader->hasDelayedMarking);
MOZ_ASSERT(markLaterArenas);
unmarkedArenaStackTop = aheader->getNextDelayedMarking();
aheader->unsetDelayedMarking();
markLaterArenas--;
markDelayedChildren(aheader);
budget.step(150);
if (budget.isOverBudget())
return false;
} while (unmarkedArenaStackTop);
MOZ_ASSERT(!markLaterArenas);
return true;
}
#ifdef DEBUG
void
GCMarker::checkZone(void* p)
{
MOZ_ASSERT(started);
DebugOnly<Cell*> cell = static_cast<Cell*>(p);
MOZ_ASSERT_IF(cell->isTenured(), cell->asTenured().zone()->isCollecting());
}
#endif
size_t
GCMarker::sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const
{
size_t size = stack.sizeOfExcludingThis(mallocSizeOf);
for (ZonesIter zone(runtime(), WithAtoms); !zone.done(); zone.next())
size += zone->gcGrayRoots.sizeOfExcludingThis(mallocSizeOf);
return size;
}
void
js::SetMarkStackLimit(JSRuntime* rt, size_t limit)
{
rt->gc.setMarkStackLimit(limit);
}

Просмотреть файл

@ -7,347 +7,117 @@
#ifndef js_Tracer_h
#define js_Tracer_h
#include "mozilla/DebugOnly.h"
#include "jsfriendapi.h"
#include "gc/Heap.h"
#include "js/GCAPI.h"
#include "js/SliceBudget.h"
#include "js/TracingAPI.h"
#include "gc/Barrier.h"
class JSLinearString;
class JSRope;
namespace js {
class BaseShape;
class GCMarker;
class LazyScript;
class NativeObject;
class ObjectGroup;
namespace gc {
struct ArenaHeader;
}
namespace jit {
class JitCode;
}
static const size_t NON_INCREMENTAL_MARK_STACK_BASE_CAPACITY = 4096;
static const size_t INCREMENTAL_MARK_STACK_BASE_CAPACITY = 32768;
/*
* When the native stack is low, the GC does not call JS_TraceChildren to mark
* the reachable "children" of the thing. Rather the thing is put aside and
* JS_TraceChildren is called later with more space on the C stack.
*
* To implement such delayed marking of the children with minimal overhead for
* the normal case of sufficient native stack, the code adds a field per arena.
* The field markingDelay->link links all arenas with delayed things into a
* stack list with the pointer to stack top in GCMarker::unmarkedArenaStackTop.
* GCMarker::delayMarkingChildren adds arenas to the stack as necessary while
* markDelayedChildren pops the arenas from the stack until it empties.
*/
class MarkStack
{
friend class GCMarker;
uintptr_t* stack_;
uintptr_t* tos_;
uintptr_t* end_;
// The capacity we start with and reset() to.
size_t baseCapacity_;
size_t maxCapacity_;
public:
explicit MarkStack(size_t maxCapacity)
: stack_(nullptr),
tos_(nullptr),
end_(nullptr),
baseCapacity_(0),
maxCapacity_(maxCapacity)
{}
~MarkStack() {
js_free(stack_);
}
size_t capacity() { return end_ - stack_; }
ptrdiff_t position() const { return tos_ - stack_; }
void setStack(uintptr_t* stack, size_t tosIndex, size_t capacity) {
stack_ = stack;
tos_ = stack + tosIndex;
end_ = stack + capacity;
}
bool init(JSGCMode gcMode);
void setBaseCapacity(JSGCMode mode);
size_t maxCapacity() const { return maxCapacity_; }
void setMaxCapacity(size_t maxCapacity);
bool push(uintptr_t item) {
if (tos_ == end_) {
if (!enlarge(1))
return false;
}
MOZ_ASSERT(tos_ < end_);
*tos_++ = item;
return true;
}
bool push(uintptr_t item1, uintptr_t item2, uintptr_t item3) {
uintptr_t* nextTos = tos_ + 3;
if (nextTos > end_) {
if (!enlarge(3))
return false;
nextTos = tos_ + 3;
}
MOZ_ASSERT(nextTos <= end_);
tos_[0] = item1;
tos_[1] = item2;
tos_[2] = item3;
tos_ = nextTos;
return true;
}
bool isEmpty() const {
return tos_ == stack_;
}
uintptr_t pop() {
MOZ_ASSERT(!isEmpty());
return *--tos_;
}
void reset();
/* Grow the stack, ensuring there is space for at least count elements. */
bool enlarge(unsigned count);
void setGCMode(JSGCMode gcMode);
size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const;
};
#ifdef DEBUG
namespace gc {
// Internal Tracing API
//
// Tracing is an abstract visitation of each edge in a JS heap graph.[1] The
// most common (and performance sensitive) use of this infrastructure is for GC
// "marking" as part of the mark-and-sweep collector; however, this
// infrastructure is much more general than that and is used for many other
// purposes as well.
//
// One commonly misunderstood subtlety of the tracing architecture is the role
// of graph verticies versus graph edges. Graph verticies are the heap
// allocations -- GC things -- that are returned by Allocate. Graph edges are
// pointers -- including tagged pointers like Value and jsid -- that link the
// allocations into a complex heap. The tracing API deals *only* with edges.
// Any action taken on the target of a graph edge is independent to the tracing
// itself.
//
// Another common misunderstanding relates to the role of the JSTracer. The
// JSTracer instance determines what tracing does when visiting an edge; it
// does not itself participate in the tracing process, other than to be passed
// through as opaque data. It works like a closure in that respect.
//
// Tracing implementations internal to SpiderMonkey should use these interfaces
// instead of the public interfaces in js/TracingAPI.h. Unlike the public
// tracing methods, these work on internal types and avoid an external call.
//
// Note that the implementations for these methods are, surprisingly, in
// js/src/gc/Marking.cpp. This is so that the compiler can inline as much as
// possible in the common, marking pathways. Conceptually, however, they remain
// as part of the generic "tracing" architecture, rather than the more specific
// marking implementation of tracing.
//
// 1 - In SpiderMonkey, we call this concept tracing rather than visiting
// because "visiting" is already used by the compiler. Also, it's been
// called "tracing" forever and changing it would be extremely difficult at
// this point.
// Trace through an edge in the live object graph on behalf of tracing. The
// effect of tracing the edge depends on the JSTracer being used.
template <typename T>
extern bool
ZoneIsGCMarking(T* thing);
template <typename T>
extern bool
ZoneIsAtomsZoneForString(JSRuntime* rt, T* thing);
} /* namespace gc */
#endif
#define JS_COMPARTMENT_ASSERT(rt, thing) \
MOZ_ASSERT(gc::ZoneIsGCMarking((thing)) || gc::ZoneIsAtomsZoneForString((rt), (thing)))
class GCMarker : public JSTracer
{
public:
explicit GCMarker(JSRuntime* rt);
bool init(JSGCMode gcMode);
void setMaxCapacity(size_t maxCap) { stack.setMaxCapacity(maxCap); }
size_t maxCapacity() const { return stack.maxCapacity(); }
void start();
void stop();
void reset();
// Mark the given GC thing and traverse its children at some point.
template <typename T> void traverse(T thing);
// Calls traverse on target after making additional assertions.
template <typename S, typename T> void traverse(S source, T target);
// C++ requires explicit declarations of partial template instantiations.
template <typename S> void traverse(S source, jsid target);
/*
* Care must be taken changing the mark color from gray to black. The cycle
* collector depends on the invariant that there are no black to gray edges
* in the GC heap. This invariant lets the CC not trace through black
* objects. If this invariant is violated, the cycle collector may free
* objects that are still reachable.
*/
void setMarkColorGray() {
MOZ_ASSERT(isDrained());
MOZ_ASSERT(color == gc::BLACK);
color = gc::GRAY;
}
void setMarkColorBlack() {
MOZ_ASSERT(isDrained());
MOZ_ASSERT(color == gc::GRAY);
color = gc::BLACK;
}
uint32_t markColor() const { return color; }
void delayMarkingArena(gc::ArenaHeader* aheader);
void delayMarkingChildren(const void* thing);
void markDelayedChildren(gc::ArenaHeader* aheader);
bool markDelayedChildren(SliceBudget& budget);
bool hasDelayedChildren() const {
return !!unmarkedArenaStackTop;
}
bool isDrained() {
return isMarkStackEmpty() && !unmarkedArenaStackTop;
}
bool drainMarkStack(SliceBudget& budget);
void setGCMode(JSGCMode mode) { stack.setGCMode(mode); }
size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const;
#ifdef DEBUG
bool shouldCheckCompartments() { return strictCompartmentChecking; }
#endif
/* This is public exclusively for ScanRope. */
MarkStack stack;
private:
#ifdef DEBUG
void checkZone(void* p);
#else
void checkZone(void* p) {}
#endif
/*
* We use a common mark stack to mark GC things of different types and use
* the explicit tags to distinguish them when it cannot be deduced from
* the context of push or pop operation.
*/
enum StackTag {
ValueArrayTag,
ObjectTag,
GroupTag,
SavedValueArrayTag,
JitCodeTag,
LastTag = JitCodeTag
};
static const uintptr_t StackTagMask = 7;
static_assert(StackTagMask >= uintptr_t(LastTag), "The tag mask must subsume the tags.");
static_assert(StackTagMask <= gc::CellMask, "The tag mask must be embeddable in a Cell*.");
// Push an object onto the stack for later tracing and assert that it has
// already been marked.
void repush(JSObject* obj) {
MOZ_ASSERT(gc::TenuredCell::fromPointer(obj)->isMarked(markColor()));
pushTaggedPtr(ObjectTag, obj);
}
template <typename T> void markAndTraceChildren(T* thing);
template <typename T> void markAndPush(StackTag tag, T* thing);
template <typename T> void markAndScan(T* thing);
void eagerlyMarkChildren(JSLinearString* str);
void eagerlyMarkChildren(JSRope* rope);
void eagerlyMarkChildren(JSString* str);
void eagerlyMarkChildren(LazyScript *thing);
void eagerlyMarkChildren(Shape* shape);
void lazilyMarkChildren(ObjectGroup* group);
// We may not have concrete types yet, so this has to be out of the header.
template <typename T>
void dispatchToTraceChildren(T* thing);
// Mark the given GC thing, but do not trace its children. Return true
// if the thing became marked.
template <typename T>
bool mark(T* thing);
void pushTaggedPtr(StackTag tag, void* ptr) {
checkZone(ptr);
uintptr_t addr = reinterpret_cast<uintptr_t>(ptr);
MOZ_ASSERT(!(addr & StackTagMask));
if (!stack.push(addr | uintptr_t(tag)))
delayMarkingChildren(ptr);
}
void pushValueArray(JSObject* obj, void* start, void* end) {
checkZone(obj);
MOZ_ASSERT(start <= end);
uintptr_t tagged = reinterpret_cast<uintptr_t>(obj) | GCMarker::ValueArrayTag;
uintptr_t startAddr = reinterpret_cast<uintptr_t>(start);
uintptr_t endAddr = reinterpret_cast<uintptr_t>(end);
/*
* Push in the reverse order so obj will be on top. If we cannot push
* the array, we trigger delay marking for the whole object.
*/
if (!stack.push(endAddr, startAddr, tagged))
delayMarkingChildren(obj);
}
bool isMarkStackEmpty() {
return stack.isEmpty();
}
bool restoreValueArray(NativeObject* obj, void** vpp, void** endp);
void saveValueRanges();
inline void processMarkStackTop(SliceBudget& budget);
/* The color is only applied to objects and functions. */
uint32_t color;
/* Pointer to the top of the stack of arenas we are delaying marking on. */
js::gc::ArenaHeader* unmarkedArenaStackTop;
/* Count of arenas that are currently in the stack. */
mozilla::DebugOnly<size_t> markLaterArenas;
/* Assert that start and stop are called with correct ordering. */
mozilla::DebugOnly<bool> started;
/*
* If this is true, all marked objects must belong to a compartment being
* GCed. This is used to look for compartment bugs.
*/
mozilla::DebugOnly<bool> strictCompartmentChecking;
};
// Append traced things to a buffer on the zone for use later in the GC.
// See the comment in GCRuntime.h above grayBufferState for details.
class BufferGrayRootsTracer : public JS::CallbackTracer
{
// Set to false if we OOM while buffering gray roots.
bool bufferingGrayRootsFailed;
void appendGrayRoot(gc::TenuredCell* thing, JSGCTraceKind kind);
public:
explicit BufferGrayRootsTracer(JSRuntime* rt)
: JS::CallbackTracer(rt, grayTraceCallback), bufferingGrayRootsFailed(false)
{}
static void grayTraceCallback(JS::CallbackTracer* trc, void** thingp, JSGCTraceKind kind) {
auto tracer = static_cast<BufferGrayRootsTracer*>(trc);
tracer->appendGrayRoot(gc::TenuredCell::fromPointer(*thingp), kind);
}
bool failed() const { return bufferingGrayRootsFailed; }
};
void
SetMarkStackLimit(JSRuntime* rt, size_t limit);
TraceEdge(JSTracer* trc, BarrieredBase<T>* thingp, const char* name);
// Return true if this trace is happening on behalf of gray buffering during
// the marking phase of incremental GC.
inline bool
IsBufferingGrayRoots(JSTracer* trc)
{
return trc->isCallbackTracer() &&
trc->asCallbackTracer()->hasCallback(BufferGrayRootsTracer::grayTraceCallback);
}
// Trace through a "root" edge. These edges are the initial edges in the object
// graph traversal. Root edges are asserted to only be traversed in the initial
// phase of a GC.
template <typename T>
void
TraceRoot(JSTracer* trc, T* thingp, const char* name);
} /* namespace js */
// Like TraceEdge, but for edges that do not use one of the automatic barrier
// classes and, thus, must be treated specially for moving GC. This method is
// separate from TraceEdge to make accidental use of such edges more obvious.
template <typename T>
void
TraceManuallyBarrieredEdge(JSTracer* trc, T* thingp, const char* name);
// Trace all edges contained in the given array.
template <typename T>
void
TraceRange(JSTracer* trc, size_t len, BarrieredBase<T>* vec, const char* name);
// Trace all root edges in the given array.
template <typename T>
void
TraceRootRange(JSTracer* trc, size_t len, T* vec, const char* name);
// Trace an edge that crosses compartment boundaries. If the compartment of the
// destination thing is not being GC'd, then the edge will not be traced.
template <typename T>
void
TraceCrossCompartmentEdge(JSTracer* trc, JSObject* src, BarrieredBase<T>* dst,
const char* name);
// As above but with manual barriers.
template <typename T>
void
TraceManuallyBarrieredCrossCompartmentEdge(JSTracer* trc, JSObject* src, T* dst,
const char* name);
// Permanent atoms and well-known symbols are shared between runtimes and must
// use a separate marking path so that we can filter them out of normal heap
// tracing.
template <typename T>
void
TraceProcessGlobalRoot(JSTracer* trc, T* thing, const char* name);
// Trace a root edge that uses the base GC thing type, instead of a more
// specific type.
void
TraceGenericPointerRoot(JSTracer* trc, gc::Cell** thingp, const char* name);
// Trace a non-root edge that uses the base GC thing type, instead of a more
// specific type.
void
TraceManuallyBarrieredGenericPointerEdge(JSTracer* trc, gc::Cell** thingp, const char* name);
// Object slots are not stored as a contiguous vector, so marking them as such
// will lead to the wrong indicies, if such are requested when tracing.
void
TraceObjectSlots(JSTracer* trc, NativeObject* obj, uint32_t start, uint32_t nslots);
// Depricated. Please use one of the strongly typed variants above.
void
TraceChildren(JSTracer* trc, void* thing, JSGCTraceKind kind);
} // namespace js
#endif /* js_Tracer_h */

Просмотреть файл

@ -1094,7 +1094,7 @@ MarkIonJSFrame(JSTracer* trc, const JitFrameIterator& frame)
for (GeneralRegisterBackwardIterator iter(safepoint.allGprSpills()); iter.more(); iter++) {
--spill;
if (gcRegs.has(*iter))
gc::TraceGenericPointerRoot(trc, reinterpret_cast<gc::Cell**>(spill), "ion-gc-spill");
TraceGenericPointerRoot(trc, reinterpret_cast<gc::Cell**>(spill), "ion-gc-spill");
else if (valueRegs.has(*iter))
TraceRoot(trc, reinterpret_cast<Value*>(spill), "ion-value-spill");
}
@ -1422,7 +1422,7 @@ MarkJitExitFrame(JSTracer* trc, const JitFrameIterator& frame)
TraceRoot(trc, reinterpret_cast<Value*>(argBase), "ion-vm-args");
break;
case VMFunction::RootCell:
gc::TraceGenericPointerRoot(trc, reinterpret_cast<gc::Cell**>(argBase), "ion-vm-args");
TraceGenericPointerRoot(trc, reinterpret_cast<gc::Cell**>(argBase), "ion-vm-args");
break;
}
@ -1456,7 +1456,7 @@ MarkJitExitFrame(JSTracer* trc, const JitFrameIterator& frame)
TraceRoot(trc, footer->outParam<Value>(), "ion-vm-outvp");
break;
case VMFunction::RootCell:
gc::TraceGenericPointerRoot(trc, footer->outParam<gc::Cell*>(), "ion-vm-out");
TraceGenericPointerRoot(trc, footer->outParam<gc::Cell*>(), "ion-vm-out");
break;
}
}

Просмотреть файл

@ -812,8 +812,8 @@ TraceOneDataRelocation(JSTracer* trc, Iter* iter)
MOZ_ASSERT(!(uintptr_t(ptr) & 0x1));
// No barrier needed since these are constants.
gc::TraceManuallyBarrieredGenericPointerEdge(trc, reinterpret_cast<gc::Cell**>(&ptr),
"ion-masm-ptr");
TraceManuallyBarrieredGenericPointerEdge(trc, reinterpret_cast<gc::Cell**>(&ptr),
"ion-masm-ptr");
if (ptr != prior) {
MacroAssemblerARM::ma_mov_patch(Imm32(int32_t(ptr)), dest, Assembler::Always, rs, ins);

Просмотреть файл

@ -295,7 +295,7 @@ TraceOneDataRelocation(JSTracer* trc, Instruction* inst)
MOZ_ASSERT(!(uintptr_t(ptr) & 0x1));
// No barrier needed since these are constants.
gc::TraceManuallyBarrieredGenericPointerEdge(trc, reinterpret_cast<gc::Cell**>(&ptr),
TraceManuallyBarrieredGenericPointerEdge(trc, reinterpret_cast<gc::Cell**>(&ptr),
"ion-masm-ptr");
if (ptr != prior) {
Assembler::UpdateLuiOriValue(inst, inst->next(), uint32_t(ptr));

Просмотреть файл

@ -74,8 +74,8 @@ TraceDataRelocations(JSTracer* trc, uint8_t* buffer, CompactBufferReader& reader
MOZ_ASSERT(!(*reinterpret_cast<uintptr_t*>(ptr) & 0x1));
// No barrier needed since these are constants.
gc::TraceManuallyBarrieredGenericPointerEdge(trc, reinterpret_cast<gc::Cell**>(ptr),
"ion-masm-ptr");
TraceManuallyBarrieredGenericPointerEdge(trc, reinterpret_cast<gc::Cell**>(ptr),
"ion-masm-ptr");
}
}

Просмотреть файл

@ -4087,7 +4087,7 @@ JSObject::traceChildren(JSTracer* trc)
{
GetObjectSlotNameFunctor func(nobj);
JS::AutoTracingDetails ctx(trc, func);
MarkObjectSlots(trc, nobj, 0, nobj->slotSpan());
TraceObjectSlots(trc, nobj, 0, nobj->slotSpan());
}
do {

Просмотреть файл

@ -933,7 +933,7 @@ class Shape : public gc::TenuredCell
static inline ThingRootKind rootKind() { return THING_ROOT_SHAPE; }
inline void traceChildren(JSTracer* trc);
void traceChildren(JSTracer* trc);
inline Shape* search(ExclusiveContext* cx, jsid id);
inline Shape* searchLinear(jsid id);