Bug 1169086 - Use virtual dispatch to implement callback tracer; r=jonco, r=mccr8

--HG--
extra : rebase_source : 8da1af998621319f7a0112f1870080dd8835063e
This commit is contained in:
Terrence Cole 2015-05-28 16:35:08 -07:00
Родитель 0d430ff1b0
Коммит 5dd32db8cf
17 изменённых файлов: 173 добавлений и 219 удалений

Просмотреть файл

@ -2408,16 +2408,6 @@ IsInCertifiedApp(JSContext* aCx, JSObject* aObj)
Preferences::GetBool("dom.ignore_webidl_scope_checks", false);
}
#ifdef DEBUG
void
VerifyTraceProtoAndIfaceCacheCalled(JS::CallbackTracer *trc, void **thingp,
JS::TraceKind kind)
{
// We don't do anything here, we only want to verify that
// TraceProtoAndIfaceCache was called.
}
#endif
void
FinalizeGlobal(JSFreeOp* aFreeOp, JSObject* aObj)
{

Просмотреть файл

@ -526,17 +526,20 @@ AllocateProtoAndIfaceCache(JSObject* obj, ProtoAndIfaceCache::Kind aKind)
}
#ifdef DEBUG
void
VerifyTraceProtoAndIfaceCacheCalled(JS::CallbackTracer *trc, void **thingp,
JS::TraceKind kind);
struct VerifyTraceProtoAndIfaceCacheCalledTracer : public JS::CallbackTracer
{
bool ok;
bool ok;
explicit VerifyTraceProtoAndIfaceCacheCalledTracer(JSRuntime *rt)
: JS::CallbackTracer(rt, VerifyTraceProtoAndIfaceCacheCalled), ok(false)
{}
explicit VerifyTraceProtoAndIfaceCacheCalledTracer(JSRuntime *rt)
: JS::CallbackTracer(rt), ok(false)
{}
void trace(void** thingp, JS::TraceKind kind) {
// We don't do anything here, we only want to verify that
// TraceProtoAndIfaceCache was called.
}
TracerKind getTracerKind() const override { return TracerKind::VerifyTraceProtoAndIface; }
};
#endif
@ -547,8 +550,8 @@ TraceProtoAndIfaceCache(JSTracer* trc, JSObject* obj)
#ifdef DEBUG
if (trc->isCallbackTracer() &&
trc->asCallbackTracer()->hasCallback(
VerifyTraceProtoAndIfaceCacheCalled)) {
(trc->asCallbackTracer()->getTracerKind() ==
JS::CallbackTracer::TracerKind::VerifyTraceProtoAndIface)) {
// We don't do anything here, we only want to verify that
// TraceProtoAndIfaceCache was called.
static_cast<VerifyTraceProtoAndIfaceCacheCalledTracer*>(trc)->ok = true;

Просмотреть файл

@ -127,21 +127,13 @@ class AutoTracingCallback;
class JS_PUBLIC_API(CallbackTracer) : public JSTracer
{
public:
CallbackTracer(JSRuntime* rt, JSTraceCallback traceCallback,
WeakMapTraceKind weakTraceKind = TraceWeakMapValues)
: JSTracer(rt, JSTracer::TracerKindTag::Callback, weakTraceKind), callback(traceCallback),
CallbackTracer(JSRuntime* rt, WeakMapTraceKind weakTraceKind = TraceWeakMapValues)
: JSTracer(rt, JSTracer::TracerKindTag::Callback, weakTraceKind),
contextName_(nullptr), contextIndex_(InvalidIndex), contextFunctor_(nullptr)
{}
// Test if the given callback is the same as our callback.
bool hasCallback(JSTraceCallback maybeCallback) const {
return maybeCallback == callback;
}
// Call the callback.
void invoke(void** thing, JS::TraceKind kind) {
callback(this, thing, kind);
}
// Override this method to receive notification when an edge is visited.
virtual void trace(void** thing, JS::TraceKind kind) = 0;
// Access to the tracing context:
// When tracing with a JS::CallbackTracer, we invoke the callback with the
@ -188,11 +180,12 @@ class JS_PUBLIC_API(CallbackTracer) : public JSTracer
virtual void operator()(CallbackTracer* trc, char* buf, size_t bufsize) = 0;
};
private:
// Exposed publicly for several callers that need to check if the tracer
// calling them is of the right type.
JSTraceCallback callback;
#ifdef DEBUG
enum class TracerKind { DoNotCare, Moving, GrayBuffering, VerifyTraceProtoAndIface };
virtual TracerKind getTracerKind() const { return TracerKind::DoNotCare; }
#endif
private:
friend class AutoTracingName;
const char* contextName_;

Просмотреть файл

@ -808,18 +808,14 @@ class HasChildTracer : public JS::CallbackTracer
RootedValue child_;
bool found_;
void onEdge(void** thingp, JS::TraceKind kind) {
void trace(void** thingp, JS::TraceKind kind) {
if (*thingp == child_.toGCThing())
found_ = true;
}
static void trampoline(JS::CallbackTracer* trc, void** thingp, JS::TraceKind kind) {
static_cast<HasChildTracer*>(trc)->onEdge(thingp, kind);
}
public:
HasChildTracer(JSRuntime* rt, HandleValue child)
: JS::CallbackTracer(rt, trampoline, TraceWeakMapKeysValues), child_(rt, child), found_(false)
: JS::CallbackTracer(rt, TraceWeakMapKeysValues), child_(rt, child), found_(false)
{}
bool found() const { return found_; }

Просмотреть файл

@ -57,7 +57,6 @@ function indirectCallCannotGC(fullCaller, fullVariable)
// Ignore calls through functions pointers with these types
var ignoreClasses = {
"JS::CallbackTracer" : true,
"JSStringFinalizer" : true,
"SprintfState" : true,
"SprintfStateStr" : true,
@ -330,7 +329,10 @@ function isOverridableField(initialCSU, csu, field)
if (field == 'GetWindowProxy' || field == 'GetWindowProxyPreserveColor')
return false;
}
if (initialCSU == 'nsICycleCollectorListener' && field == 'NoteWeakMapEntry')
return false;
if (initialCSU == 'nsICycleCollectorListener' && field == 'NoteEdge')
return false;
return true;
}

Просмотреть файл

@ -139,12 +139,11 @@ CheckHashTablesAfterMovingGC(JSRuntime* rt);
#endif
struct MovingTracer : JS::CallbackTracer {
explicit MovingTracer(JSRuntime* rt) : CallbackTracer(rt, Visit, TraceWeakMapKeysValues) {}
static void Visit(JS::CallbackTracer* jstrc, void** thingp, JS::TraceKind kind);
static bool IsMovingTracer(JSTracer* trc) {
return trc->isCallbackTracer() && trc->asCallbackTracer()->hasCallback(Visit);
}
explicit MovingTracer(JSRuntime* rt) : CallbackTracer(rt, TraceWeakMapKeysValues) {}
void trace(void** thingp, JS::TraceKind kind) override;
#ifdef DEBUG
TracerKind getTracerKind() const override { return TracerKind::Moving; }
#endif
};
class AutoMaybeStartBackgroundAllocation

Просмотреть файл

@ -25,6 +25,7 @@
namespace js {
class AutoLockGC;
class VerifyPreTracer;
namespace gc {
@ -1005,7 +1006,7 @@ class GCRuntime
* Number of the committed arenas in all GC chunks including empty chunks.
*/
mozilla::Atomic<uint32_t, mozilla::ReleaseAcquire> numArenasFreeCommitted;
void* verifyPreData;
VerifyPreTracer* verifyPreData;
bool chunkAllocationSinceLastGC;
int64_t nextFullGCTime;
int64_t lastGCTime;

Просмотреть файл

@ -137,6 +137,13 @@ IsThingPoisoned(T* thing)
}
return false;
}
static bool
IsMovingTracer(JSTracer *trc)
{
return trc->isCallbackTracer() &&
trc->asCallbackTracer()->getTracerKind() == JS::CallbackTracer::TracerKind::Moving;
}
#endif
template <typename T> bool ThingIsPermanentAtomOrWellKnownSymbol(T* thing) { return false; }
@ -173,8 +180,7 @@ js::CheckTracedThing(JSTracer* trc, T thing)
if (IsInsideNursery(thing))
return;
MOZ_ASSERT_IF(!MovingTracer::IsMovingTracer(trc) && !trc->isTenuringTracer(),
!IsForwarded(thing));
MOZ_ASSERT_IF(!IsMovingTracer(trc) && !trc->isTenuringTracer(), !IsForwarded(thing));
/*
* Permanent atoms are not associated with this runtime, but will be
@ -186,8 +192,8 @@ js::CheckTracedThing(JSTracer* trc, T thing)
Zone* zone = thing->zoneFromAnyThread();
JSRuntime* rt = trc->runtime();
MOZ_ASSERT_IF(!MovingTracer::IsMovingTracer(trc), CurrentThreadCanAccessZone(zone));
MOZ_ASSERT_IF(!MovingTracer::IsMovingTracer(trc), CurrentThreadCanAccessRuntime(rt));
MOZ_ASSERT_IF(!IsMovingTracer(trc), CurrentThreadCanAccessZone(zone));
MOZ_ASSERT_IF(!IsMovingTracer(trc), CurrentThreadCanAccessRuntime(rt));
MOZ_ASSERT(zone->runtimeFromAnyThread() == trc->runtime());
@ -201,7 +207,7 @@ js::CheckTracedThing(JSTracer* trc, T thing)
*/
bool isGcMarkingTracer = trc->isMarkingTracer();
MOZ_ASSERT_IF(zone->requireGCTracer(), isGcMarkingTracer || IsBufferingGrayRoots(trc));
MOZ_ASSERT_IF(zone->requireGCTracer(), isGcMarkingTracer || IsBufferGrayRootsTracer(trc));
if (isGcMarkingTracer) {
GCMarker* gcMarker = static_cast<GCMarker*>(trc);
@ -2266,17 +2272,15 @@ TypeSet::MarkTypeUnbarriered(JSTracer* trc, TypeSet::Type* v, const char* name)
/*** Cycle Collector Barrier Implementation *******************************************************/
#ifdef DEBUG
static void
AssertNonGrayGCThing(JS::CallbackTracer* trc, void** thingp, JS::TraceKind kind)
{
DebugOnly<Cell*> thing(static_cast<Cell*>(*thingp));
MOZ_ASSERT_IF(thing->isTenured(), !thing->asTenured().isMarked(js::gc::GRAY));
}
struct AssertNonGrayTracer : public JS::CallbackTracer {
explicit AssertNonGrayTracer(JSRuntime* rt) : JS::CallbackTracer(rt) {}
void trace(void** thingp, JS::TraceKind kind) override {
DebugOnly<Cell*> thing(static_cast<Cell*>(*thingp));
MOZ_ASSERT_IF(thing->isTenured(), !thing->asTenured().isMarked(js::gc::GRAY));
}
};
#endif
static void
UnmarkGrayChildren(JS::CallbackTracer* trc, void** thingp, JS::TraceKind kind);
struct UnmarkGrayTracer : public JS::CallbackTracer
{
/*
@ -2284,19 +2288,21 @@ struct UnmarkGrayTracer : public JS::CallbackTracer
* up any color mismatches involving weakmaps when it runs.
*/
explicit UnmarkGrayTracer(JSRuntime* rt)
: JS::CallbackTracer(rt, UnmarkGrayChildren, DoNotTraceWeakMaps),
: JS::CallbackTracer(rt, DoNotTraceWeakMaps),
tracingShape(false),
previousShape(nullptr),
unmarkedAny(false)
{}
UnmarkGrayTracer(JSTracer* trc, bool tracingShape)
: JS::CallbackTracer(trc->runtime(), UnmarkGrayChildren, DoNotTraceWeakMaps),
: JS::CallbackTracer(trc->runtime(), DoNotTraceWeakMaps),
tracingShape(tracingShape),
previousShape(nullptr),
unmarkedAny(false)
{}
void trace(void** thingp, JS::TraceKind kind) override;
/* True iff we are tracing the immediate children of a shape. */
bool tracingShape;
@ -2337,18 +2343,18 @@ struct UnmarkGrayTracer : public JS::CallbackTracer
* of the containers, we must add unmark-graying read barriers to these
* containers.
*/
static void
UnmarkGrayChildren(JS::CallbackTracer* trc, void** thingp, JS::TraceKind kind)
void
UnmarkGrayTracer::trace(void** thingp, JS::TraceKind kind)
{
int stackDummy;
if (!JS_CHECK_STACK_SIZE(trc->runtime()->mainThread.nativeStackLimit[StackForSystemCode],
if (!JS_CHECK_STACK_SIZE(runtime()->mainThread.nativeStackLimit[StackForSystemCode],
&stackDummy))
{
/*
* If we run out of stack, we take a more drastic measure: require that
* we GC again before the next CC.
*/
trc->runtime()->gc.setGrayBitsInvalid();
runtime()->gc.setGrayBitsInvalid();
return;
}
@ -2358,7 +2364,7 @@ UnmarkGrayChildren(JS::CallbackTracer* trc, void** thingp, JS::TraceKind kind)
// to only black edges.
if (!cell->isTenured()) {
#ifdef DEBUG
JS::CallbackTracer nongray(trc->runtime(), AssertNonGrayGCThing);
AssertNonGrayTracer nongray(runtime());
TraceChildren(&nongray, cell, kind);
#endif
return;
@ -2369,28 +2375,27 @@ UnmarkGrayChildren(JS::CallbackTracer* trc, void** thingp, JS::TraceKind kind)
return;
tenured.unmark(js::gc::GRAY);
UnmarkGrayTracer* tracer = static_cast<UnmarkGrayTracer*>(trc);
tracer->unmarkedAny = true;
unmarkedAny = true;
// Trace children of |tenured|. If |tenured| and its parent are both
// shapes, |tenured| will get saved to mPreviousShape without being traced.
// The parent will later trace |tenured|. This is done to avoid increasing
// the stack depth during shape tracing. It is safe to do because a shape
// can only have one child that is a shape.
UnmarkGrayTracer childTracer(tracer, kind == JS::TraceKind::Shape);
UnmarkGrayTracer childTracer(this, kind == JS::TraceKind::Shape);
if (kind != JS::TraceKind::Shape) {
TraceChildren(&childTracer, &tenured, kind);
MOZ_ASSERT(!childTracer.previousShape);
tracer->unmarkedAny |= childTracer.unmarkedAny;
unmarkedAny |= childTracer.unmarkedAny;
return;
}
MOZ_ASSERT(kind == JS::TraceKind::Shape);
Shape* shape = static_cast<Shape*>(&tenured);
if (tracer->tracingShape) {
MOZ_ASSERT(!tracer->previousShape);
tracer->previousShape = shape;
if (tracingShape) {
MOZ_ASSERT(!previousShape);
previousShape = shape;
return;
}
@ -2400,7 +2405,7 @@ UnmarkGrayChildren(JS::CallbackTracer* trc, void** thingp, JS::TraceKind kind)
shape = childTracer.previousShape;
childTracer.previousShape = nullptr;
} while (shape);
tracer->unmarkedAny |= childTracer.unmarkedAny;
unmarkedAny |= childTracer.unmarkedAny;
}
bool

Просмотреть файл

@ -300,8 +300,12 @@ class GCMarker : public JSTracer
mozilla::DebugOnly<bool> strictCompartmentChecking;
};
#ifdef DEBUG
// Return true if this trace is happening on behalf of gray buffering during
// the marking phase of incremental GC.
bool
IsBufferingGrayRoots(JSTracer* trc);
IsBufferGrayRootsTracer(JSTracer* trc);
#endif
namespace gc {

Просмотреть файл

@ -552,21 +552,31 @@ class BufferGrayRootsTracer : public JS::CallbackTracer
// Set to false if we OOM while buffering gray roots.
bool bufferingGrayRootsFailed;
void appendGrayRoot(gc::TenuredCell* thing, JS::TraceKind kind);
void trace(void** thingp, JS::TraceKind kind) override;
public:
explicit BufferGrayRootsTracer(JSRuntime* rt)
: JS::CallbackTracer(rt, grayTraceCallback), bufferingGrayRootsFailed(false)
: JS::CallbackTracer(rt), bufferingGrayRootsFailed(false)
{}
static void grayTraceCallback(JS::CallbackTracer* trc, void** thingp, JS::TraceKind kind) {
auto tracer = static_cast<BufferGrayRootsTracer*>(trc);
tracer->appendGrayRoot(gc::TenuredCell::fromPointer(*thingp), kind);
}
bool failed() const { return bufferingGrayRootsFailed; }
#ifdef DEBUG
TracerKind getTracerKind() const override { return TracerKind::GrayBuffering; }
#endif
};
#ifdef DEBUG
// Return true if this trace is happening on behalf of gray buffering during
// the marking phase of incremental GC.
bool
js::IsBufferGrayRootsTracer(JSTracer* trc)
{
return trc->isCallbackTracer() &&
trc->asCallbackTracer()->getTracerKind() == JS::CallbackTracer::TracerKind::GrayBuffering;
}
#endif
void
js::gc::GCRuntime::bufferGrayRoots()
{
@ -595,13 +605,15 @@ struct SetMaybeAliveFunctor {
};
void
BufferGrayRootsTracer::appendGrayRoot(TenuredCell* thing, JS::TraceKind kind)
BufferGrayRootsTracer::trace(void** thingp, JS::TraceKind kind)
{
MOZ_ASSERT(runtime()->isHeapBusy());
if (bufferingGrayRootsFailed)
return;
gc::TenuredCell* thing = gc::TenuredCell::fromPointer(*thingp);
Zone* zone = thing->zone();
if (zone->isCollecting()) {
// See the comment on SetMaybeAliveFlag to see why we only do this for
@ -634,12 +646,3 @@ GCRuntime::resetBufferedGrayRoots() const
zone->gcGrayRoots.clearAndFree();
}
// Return true if this trace is happening on behalf of gray buffering during
// the marking phase of incremental GC.
bool
js::IsBufferingGrayRoots(JSTracer* trc)
{
return trc->isCallbackTracer() &&
trc->asCallbackTracer()->hasCallback(BufferGrayRootsTracer::grayTraceCallback);
}

Просмотреть файл

@ -48,7 +48,7 @@ DoCallback(JS::CallbackTracer* trc, T* thingp, const char* name)
CheckTracedThing(trc, *thingp);
JS::TraceKind kind = MapTypeToTraceKind<typename mozilla::RemovePointer<T>::Type>::kind;
JS::AutoTracingName ctx(trc, name);
trc->invoke((void**)thingp, kind);
trc->trace(reinterpret_cast<void**>(thingp), kind);
return *thingp;
}
#define INSTANTIATE_ALL_VALID_TRACE_FUNCTIONS(name, type, _) \
@ -318,27 +318,25 @@ TraceObjectGroupCycleCollectorChildrenCallback(JS::CallbackTracer* trc,
struct ObjectGroupCycleCollectorTracer : public JS::CallbackTracer
{
explicit ObjectGroupCycleCollectorTracer(JS::CallbackTracer* innerTracer)
: JS::CallbackTracer(innerTracer->runtime(),
TraceObjectGroupCycleCollectorChildrenCallback,
DoNotTraceWeakMaps),
: JS::CallbackTracer(innerTracer->runtime(), DoNotTraceWeakMaps),
innerTracer(innerTracer)
{}
void trace(void** thingp, JS::TraceKind kind) override;
JS::CallbackTracer* innerTracer;
Vector<ObjectGroup*, 4, SystemAllocPolicy> seen, worklist;
};
void
TraceObjectGroupCycleCollectorChildrenCallback(JS::CallbackTracer* trcArg,
void** thingp, JS::TraceKind kind)
ObjectGroupCycleCollectorTracer::trace(void** thingp, JS::TraceKind kind)
{
ObjectGroupCycleCollectorTracer* trc = static_cast<ObjectGroupCycleCollectorTracer*>(trcArg);
JS::GCCellPtr thing(*thingp, kind);
if (thing.isObject() || thing.isScript()) {
// Invoke the inner cycle collector callback on this child. It will not
// recurse back into TraceChildren.
trc->innerTracer->invoke(thingp, kind);
innerTracer->trace(thingp, kind);
return;
}
@ -347,11 +345,11 @@ TraceObjectGroupCycleCollectorChildrenCallback(JS::CallbackTracer* trcArg,
// via the provided worklist rather than continuing to recurse.
ObjectGroup* group = static_cast<ObjectGroup*>(thing.asCell());
if (group->maybeUnboxedLayout()) {
for (size_t i = 0; i < trc->seen.length(); i++) {
if (trc->seen[i] == group)
for (size_t i = 0; i < seen.length(); i++) {
if (seen[i] == group)
return;
}
if (trc->seen.append(group) && trc->worklist.append(group)) {
if (seen.append(group) && worklist.append(group)) {
return;
} else {
// If append fails, keep tracing normally. The worst that will
@ -360,7 +358,7 @@ TraceObjectGroupCycleCollectorChildrenCallback(JS::CallbackTracer* trcArg,
}
}
TraceChildren(trc, thing.asCell(), thing.kind());
TraceChildren(this, thing.asCell(), thing.kind());
}
void

Просмотреть файл

@ -64,9 +64,6 @@ struct VerifyNode
typedef HashMap<void*, VerifyNode*, DefaultHasher<void*>, SystemAllocPolicy> NodeMap;
static void
AccumulateEdge(JS::CallbackTracer* jstrc, void** thingp, JS::TraceKind kind);
/*
* The verifier data structures are simple. The entire graph is stored in a
* single block of memory. At the beginning is a VerifyNode for the root
@ -80,10 +77,13 @@ AccumulateEdge(JS::CallbackTracer* jstrc, void** thingp, JS::TraceKind kind);
* The nodemap field is a hashtable that maps from the address of the GC thing
* to the VerifyNode that represents it.
*/
struct VerifyPreTracer : JS::CallbackTracer
class js::VerifyPreTracer : public JS::CallbackTracer
{
JS::AutoDisableGenerationalGC noggc;
void trace(void** thingp, JS::TraceKind kind) override;
public:
/* The gcNumber when the verification began. */
uint64_t number;
@ -98,8 +98,7 @@ struct VerifyPreTracer : JS::CallbackTracer
NodeMap nodemap;
explicit VerifyPreTracer(JSRuntime* rt)
: JS::CallbackTracer(rt, AccumulateEdge), noggc(rt), number(rt->gc.gcNumber()), count(0),
root(nullptr)
: JS::CallbackTracer(rt), noggc(rt), number(rt->gc.gcNumber()), count(0), root(nullptr)
{}
~VerifyPreTracer() {
@ -111,25 +110,23 @@ struct VerifyPreTracer : JS::CallbackTracer
* This function builds up the heap snapshot by adding edges to the current
* node.
*/
static void
AccumulateEdge(JS::CallbackTracer* jstrc, void** thingp, JS::TraceKind kind)
void
VerifyPreTracer::trace(void** thingp, JS::TraceKind kind)
{
VerifyPreTracer* trc = static_cast<VerifyPreTracer*>(jstrc);
MOZ_ASSERT(!IsInsideNursery(*reinterpret_cast<Cell**>(thingp)));
trc->edgeptr += sizeof(EdgeValue);
if (trc->edgeptr >= trc->term) {
trc->edgeptr = trc->term;
edgeptr += sizeof(EdgeValue);
if (edgeptr >= term) {
edgeptr = term;
return;
}
VerifyNode* node = trc->curnode;
VerifyNode* node = curnode;
uint32_t i = node->count;
node->edges[i].thing = *thingp;
node->edges[i].kind = kind;
node->edges[i].label = trc->contextName();
node->edges[i].label = contextName();
node->count++;
}
@ -251,12 +248,10 @@ IsMarkedOrAllocated(TenuredCell* cell)
return cell->isMarked() || cell->arenaHeader()->allocatedDuringIncremental;
}
static void
CheckEdge(JS::CallbackTracer* jstrc, void** thingp, JS::TraceKind kind);
struct CheckEdgeTracer : public JS::CallbackTracer {
VerifyNode* node;
explicit CheckEdgeTracer(JSRuntime* rt) : JS::CallbackTracer(rt, CheckEdge), node(nullptr) {}
explicit CheckEdgeTracer(JSRuntime* rt) : JS::CallbackTracer(rt), node(nullptr) {}
void trace(void** thingp, JS::TraceKind kind) override;
};
static const uint32_t MAX_VERIFIER_EDGES = 1000;
@ -268,12 +263,9 @@ static const uint32_t MAX_VERIFIER_EDGES = 1000;
* non-nullptr edges (i.e., the ones from the original snapshot that must have
* been modified) must point to marked objects.
*/
static void
CheckEdge(JS::CallbackTracer* jstrc, void** thingp, JS::TraceKind kind)
void
CheckEdgeTracer::trace(void** thingp, JS::TraceKind kind)
{
CheckEdgeTracer* trc = static_cast<CheckEdgeTracer*>(jstrc);
VerifyNode* node = trc->node;
/* Avoid n^2 behavior. */
if (node->count > MAX_VERIFIER_EDGES)
return;
@ -308,7 +300,7 @@ AssertMarkedOrAllocated(const EdgeValue& edge)
bool
gc::GCRuntime::endVerifyPreBarriers()
{
VerifyPreTracer* trc = static_cast<VerifyPreTracer*>(verifyPreData);
VerifyPreTracer* trc = verifyPreData;
if (!trc)
return false;
@ -390,8 +382,8 @@ gc::GCRuntime::maybeVerifyPreBarriers(bool always)
if (rt->mainThread.suppressGC)
return;
if (VerifyPreTracer* trc = static_cast<VerifyPreTracer*>(verifyPreData)) {
if (++trc->count < zealFrequency && !always)
if (verifyPreData) {
if (++verifyPreData->count < zealFrequency && !always)
return;
endVerifyPreBarriers();
@ -410,8 +402,8 @@ js::gc::MaybeVerifyBarriers(JSContext* cx, bool always)
void
js::gc::GCRuntime::finishVerifier()
{
if (VerifyPreTracer* trc = static_cast<VerifyPreTracer*>(verifyPreData)) {
js_delete(trc);
if (verifyPreData) {
js_delete(verifyPreData);
verifyPreData = nullptr;
}
}

Просмотреть файл

@ -8,11 +8,7 @@
#include "jsapi-tests/tests.h"
class CCWTestTracer : public JS::CallbackTracer {
static void staticCallback(JS::CallbackTracer* trc, void** thingp, JS::TraceKind kind) {
static_cast<CCWTestTracer*>(trc)->callback(thingp, kind);
}
void callback(void** thingp, JS::TraceKind kind) {
void trace(void** thingp, JS::TraceKind kind) {
numberOfThingsTraced++;
printf("*thingp = %p\n", *thingp);
@ -32,7 +28,7 @@ class CCWTestTracer : public JS::CallbackTracer {
JS::TraceKind expectedKind;
CCWTestTracer(JSContext* cx, void** expectedThingp, JS::TraceKind expectedKind)
: JS::CallbackTracer(JS_GetRuntime(cx), staticCallback),
: JS::CallbackTracer(JS_GetRuntime(cx)),
okay(true),
numberOfThingsTraced(0),
expectedThingp(expectedThingp),

Просмотреть файл

@ -889,8 +889,8 @@ struct DumpHeapTracer : public JS::CallbackTracer, public WeakMapTracer
const char* prefix;
FILE* output;
DumpHeapTracer(FILE* fp, JSRuntime* rt, JSTraceCallback callback)
: JS::CallbackTracer(rt, callback, DoNotTraceWeakMaps),
DumpHeapTracer(FILE* fp, JSRuntime* rt)
: JS::CallbackTracer(rt, DoNotTraceWeakMaps),
js::WeakMapTracer(rt), prefix(""), output(fp)
{}
@ -903,6 +903,8 @@ struct DumpHeapTracer : public JS::CallbackTracer, public WeakMapTracer
fprintf(output, "WeakMapEntry map=%p key=%p keyDelegate=%p value=%p\n",
map, key.asCell(), kdelegate, value.asCell());
}
void trace(void** thingp, JS::TraceKind kind) override;
};
static char
@ -955,16 +957,15 @@ DumpHeapVisitCell(JSRuntime* rt, void* data, void* thing,
JS_TraceChildren(dtrc, thing, traceKind);
}
static void
DumpHeapVisitGCThing(JS::CallbackTracer* trc, void** thingp, JS::TraceKind kind)
void
DumpHeapTracer::trace(void** thingp, JS::TraceKind kind)
{
if (gc::IsInsideNursery((js::gc::Cell*)*thingp))
return;
DumpHeapTracer* dtrc = static_cast<DumpHeapTracer*>(trc);
char buffer[1024];
dtrc->getTracingEdgeName(buffer, sizeof(buffer));
fprintf(dtrc->output, "%s%p %c %s\n", dtrc->prefix, *thingp, MarkDescriptor(*thingp), buffer);
getTracingEdgeName(buffer, sizeof(buffer));
fprintf(output, "%s%p %c %s\n", prefix, *thingp, MarkDescriptor(*thingp), buffer);
}
void
@ -973,7 +974,7 @@ js::DumpHeap(JSRuntime* rt, FILE* fp, js::DumpHeapNurseryBehaviour nurseryBehavi
if (nurseryBehaviour == js::CollectNurseryBeforeDump)
rt->gc.evictNursery(JS::gcreason::API);
DumpHeapTracer dtrc(fp, rt, DumpHeapVisitGCThing);
DumpHeapTracer dtrc(fp, rt);
fprintf(dtrc.output, "# Roots.\n");
TraceRuntime(&dtrc);

Просмотреть файл

@ -2206,9 +2206,8 @@ GCRuntime::relocateArenas(Zone* zone, JS::gcreason::Reason reason, SliceBudget&
return true;
}
void
MovingTracer::Visit(JS::CallbackTracer* jstrc, void** thingp, JS::TraceKind kind)
MovingTracer::trace(void** thingp, JS::TraceKind kind)
{
TenuredCell* thing = TenuredCell::fromPointer(*thingp);
@ -3678,10 +3677,10 @@ GCRuntime::shouldPreserveJITCode(JSCompartment* comp, int64_t currentTime,
#ifdef DEBUG
class CompartmentCheckTracer : public JS::CallbackTracer
{
void trace(void** thingp, JS::TraceKind kind) override;
public:
CompartmentCheckTracer(JSRuntime* rt, JSTraceCallback callback)
: JS::CallbackTracer(rt, callback)
{}
explicit CompartmentCheckTracer(JSRuntime* rt) : JS::CallbackTracer(rt) {}
Cell* src;
JS::TraceKind srcKind;
@ -3714,31 +3713,23 @@ InCrossCompartmentMap(JSObject* src, Cell* dst, JS::TraceKind dstKind)
return false;
}
static void
CheckCompartment(CompartmentCheckTracer* trc, JSCompartment* thingCompartment,
Cell* thing, JS::TraceKind kind)
{
MOZ_ASSERT(thingCompartment == trc->compartment ||
trc->runtime()->isAtomsCompartment(thingCompartment) ||
(trc->srcKind == JS::TraceKind::Object &&
InCrossCompartmentMap((JSObject*)trc->src, thing, kind)));
}
struct MaybeCompartmentFunctor {
template <typename T> JSCompartment* operator()(T* t) { return t->maybeCompartment(); }
};
static void
CheckCompartmentCallback(JS::CallbackTracer* trcArg, void** thingp, JS::TraceKind kind)
void
CompartmentCheckTracer::trace(void** thingp, JS::TraceKind kind)
{
CompartmentCheckTracer* trc = static_cast<CompartmentCheckTracer*>(trcArg);
TenuredCell* thing = TenuredCell::fromPointer(*thingp);
JSCompartment* comp = CallTyped(MaybeCompartmentFunctor(), thing, kind);
if (comp && trc->compartment)
CheckCompartment(trc, comp, thing, kind);
else
MOZ_ASSERT(thing->zone() == trc->zone || thing->zone()->isAtomsZone());
if (comp && compartment) {
MOZ_ASSERT(comp == compartment || runtime()->isAtomsCompartment(comp) ||
(srcKind == JS::TraceKind::Object &&
InCrossCompartmentMap(static_cast<JSObject*>(src), thing, kind)));
} else {
MOZ_ASSERT(thing->zone() == zone || thing->zone()->isAtomsZone());
}
}
void
@ -3747,7 +3738,7 @@ GCRuntime::checkForCompartmentMismatches()
if (disableStrictProxyCheckingCount)
return;
CompartmentCheckTracer trc(rt, CheckCompartmentCallback);
CompartmentCheckTracer trc(rt);
for (ZonesIter zone(rt, SkipAtoms); !zone.done(); zone.next()) {
trc.zone = zone;
for (auto thingKind : AllAllocKinds()) {

Просмотреть файл

@ -111,11 +111,7 @@ class SimpleEdgeVectorTracer : public JS::CallbackTracer {
// True if we should populate the edge's names.
bool wantNames;
static void staticCallback(JS::CallbackTracer* trc, void** thingp, JS::TraceKind kind) {
static_cast<SimpleEdgeVectorTracer*>(trc)->callback(thingp, kind);
}
void callback(void** thingp, JS::TraceKind kind) {
void trace(void** thingp, JS::TraceKind kind) {
if (!okay)
return;
@ -154,7 +150,7 @@ class SimpleEdgeVectorTracer : public JS::CallbackTracer {
bool okay;
SimpleEdgeVectorTracer(JSContext* cx, SimpleEdgeVector* vec, bool wantNames)
: JS::CallbackTracer(JS_GetRuntime(cx), staticCallback),
: JS::CallbackTracer(JS_GetRuntime(cx)),
vec(vec),
wantNames(wantNames),
okay(true)

Просмотреть файл

@ -120,19 +120,15 @@ public:
} // namespace mozilla
static void
TraceWeakMappingChild(JS::CallbackTracer* aTrc, void** aThingp,
JS::TraceKind aKind);
struct NoteWeakMapChildrenTracer : public JS::CallbackTracer
{
NoteWeakMapChildrenTracer(JSRuntime* aRt,
nsCycleCollectionNoteRootCallback& aCb)
: JS::CallbackTracer(aRt, TraceWeakMappingChild), mCb(aCb),
mTracedAny(false), mMap(nullptr), mKey(nullptr),
mKeyDelegate(nullptr)
: JS::CallbackTracer(aRt), mCb(aCb), mTracedAny(false), mMap(nullptr),
mKey(nullptr), mKeyDelegate(nullptr)
{
}
void trace(void** aThingp, JS::TraceKind aKind) override;
nsCycleCollectionNoteRootCallback& mCb;
bool mTracedAny;
JSObject* mMap;
@ -140,29 +136,24 @@ struct NoteWeakMapChildrenTracer : public JS::CallbackTracer
JSObject* mKeyDelegate;
};
static void
TraceWeakMappingChild(JS::CallbackTracer* aTrc, void** aThingp,
JS::TraceKind aKind)
void
NoteWeakMapChildrenTracer::trace(void** aThingp, JS::TraceKind aKind)
{
MOZ_ASSERT(aTrc->hasCallback(TraceWeakMappingChild));
NoteWeakMapChildrenTracer* tracer =
static_cast<NoteWeakMapChildrenTracer*>(aTrc);
JS::GCCellPtr thing(*aThingp, aKind);
if (thing.isString()) {
return;
}
if (!JS::GCThingIsMarkedGray(thing) && !tracer->mCb.WantAllTraces()) {
if (!JS::GCThingIsMarkedGray(thing) && !mCb.WantAllTraces()) {
return;
}
if (AddToCCKind(thing.kind())) {
tracer->mCb.NoteWeakMapping(tracer->mMap, tracer->mKey,
tracer->mKeyDelegate, thing);
tracer->mTracedAny = true;
mCb.NoteWeakMapping(mMap, mKey, mKeyDelegate, thing);
mTracedAny = true;
} else {
JS_TraceChildren(aTrc, thing.asCell(), thing.kind());
JS_TraceChildren(this, thing.asCell(), thing.kind());
}
}
@ -362,27 +353,21 @@ JSZoneParticipant::Traverse(void* aPtr, nsCycleCollectionTraversalCallback& aCb)
return NS_OK;
}
static void
NoteJSChildTracerShim(JS::CallbackTracer* aTrc, void** aThingp,
JS::TraceKind aTraceKind);
struct TraversalTracer : public JS::CallbackTracer
{
TraversalTracer(JSRuntime* aRt, nsCycleCollectionTraversalCallback& aCb)
: JS::CallbackTracer(aRt, NoteJSChildTracerShim, DoNotTraceWeakMaps),
mCb(aCb)
: JS::CallbackTracer(aRt, DoNotTraceWeakMaps), mCb(aCb)
{
}
void trace(void** aThingp, JS::TraceKind aTraceKind) override;
nsCycleCollectionTraversalCallback& mCb;
};
static void
NoteJSChild(JS::CallbackTracer* aTrc, JS::GCCellPtr aThing)
NoteJSChild(TraversalTracer* aTrc, JS::GCCellPtr aThing)
{
TraversalTracer* tracer = static_cast<TraversalTracer*>(aTrc);
// Don't traverse non-gray objects, unless we want all traces.
if (!JS::GCThingIsMarkedGray(aThing) && !tracer->mCb.WantAllTraces()) {
if (!JS::GCThingIsMarkedGray(aThing) && !aTrc->mCb.WantAllTraces()) {
return;
}
@ -394,15 +379,15 @@ NoteJSChild(JS::CallbackTracer* aTrc, JS::GCCellPtr aThing)
* use special APIs to handle such chains iteratively.
*/
if (AddToCCKind(aThing.kind())) {
if (MOZ_UNLIKELY(tracer->mCb.WantDebugInfo())) {
if (MOZ_UNLIKELY(aTrc->mCb.WantDebugInfo())) {
char buffer[200];
tracer->getTracingEdgeName(buffer, sizeof(buffer));
tracer->mCb.NoteNextEdgeName(buffer);
aTrc->getTracingEdgeName(buffer, sizeof(buffer));
aTrc->mCb.NoteNextEdgeName(buffer);
}
if (aThing.isObject()) {
tracer->mCb.NoteJSObject(aThing.toObject());
aTrc->mCb.NoteJSObject(aThing.toObject());
} else {
tracer->mCb.NoteJSScript(aThing.toScript());
aTrc->mCb.NoteJSScript(aThing.toScript());
}
} else if (aThing.isShape()) {
// The maximum depth of traversal when tracing a Shape is unbounded, due to
@ -418,12 +403,11 @@ NoteJSChild(JS::CallbackTracer* aTrc, JS::GCCellPtr aThing)
}
}
static void
NoteJSChildTracerShim(JS::CallbackTracer* aTrc, void** aThingp,
JS::TraceKind aTraceKind)
void
TraversalTracer::trace(void** aThingp, JS::TraceKind aTraceKind)
{
JS::GCCellPtr thing(*aThingp, aTraceKind);
NoteJSChild(aTrc, thing);
NoteJSChild(this, thing);
}
static void