Back out 2778f515ed5a (bug 723286) on suspicion of causing OSX64 crashes

This commit is contained in:
Matt Brubeck 2012-02-29 16:18:22 -08:00
Родитель 3de59e6211
Коммит 85888b116e
17 изменённых файлов: 242 добавлений и 212 удалений

Просмотреть файл

@ -220,8 +220,8 @@ static JSClass sNPObjectMemberClass =
static void
OnWrapperDestroyed();
static void
DelayedReleaseGCCallback(JSRuntime* rt, JSGCStatus status)
static JSBool
DelayedReleaseGCCallback(JSContext* cx, JSGCStatus status)
{
if (JSGC_END == status) {
// Take ownership of sDelayedReleases and null it out now. The
@ -238,6 +238,7 @@ DelayedReleaseGCCallback(JSRuntime* rt, JSGCStatus status)
}
}
}
return JS_TRUE;
}
static void

Просмотреть файл

@ -507,12 +507,10 @@ DumpHeap(JSContext *cx,
}
}
ok = JS_DumpHeap(JS_GetRuntime(cx), dumpFile, startThing, startTraceKind, thingToFind,
ok = JS_DumpHeap(cx, dumpFile, startThing, startTraceKind, thingToFind,
maxDepth, thingToIgnore);
if (dumpFile != stdout)
fclose(dumpFile);
if (!ok)
JS_ReportOutOfMemory(cx);
return ok;
not_traceable_arg:

Просмотреть файл

@ -2835,7 +2835,7 @@ jsdService::DumpHeap(const nsACString &fileName)
rv = NS_ERROR_FAILURE;
} else {
JSContext *cx = JSD_GetDefaultJSContext (mCx);
if (!JS_DumpHeap(JS_GetRuntime(cx), file, NULL, JSTRACE_OBJECT, NULL, (size_t)-1, NULL))
if (!JS_DumpHeap(cx, file, NULL, JSTRACE_OBJECT, NULL, (size_t)-1, NULL))
rv = NS_ERROR_FAILURE;
if (file != stdout)
fclose(file);

Просмотреть файл

@ -23,11 +23,12 @@ struct StringWrapper
bool strOk;
} sw;
void
FinalizeCallback(JSContext *cx, JSFinalizeStatus status)
JSBool
GCCallback(JSContext *cx, JSGCStatus status)
{
if (status == JSFINALIZE_START)
if (status == JSGC_MARK_END)
sw.strOk = !JS_IsAboutToBeFinalized(sw.str);
return true;
}
BEGIN_TEST(testInternAcrossGC)
@ -35,7 +36,7 @@ BEGIN_TEST(testInternAcrossGC)
sw.str = JS_InternString(cx, "wrapped chars that another test shouldn't be using");
sw.strOk = false;
CHECK(sw.str);
JS_SetFinalizeCallback(rt, FinalizeCallback);
JS_SetGCCallback(cx, GCCallback);
JS_GC(cx);
CHECK(sw.strOk);
return true;

Просмотреть файл

@ -755,7 +755,6 @@ JSRuntime::JSRuntime()
#endif
gcCallback(NULL),
gcSliceCallback(NULL),
gcFinalizeCallback(NULL),
gcMallocBytes(0),
gcBlackRootsTraceOp(NULL),
gcBlackRootsData(NULL),
@ -2429,9 +2428,9 @@ JS_SetExtraGCRootsTracer(JSRuntime *rt, JSTraceDataOp traceOp, void *data)
}
JS_PUBLIC_API(void)
JS_TracerInit(JSTracer *trc, JSRuntime *rt, JSTraceCallback callback)
JS_TracerInit(JSTracer *trc, JSContext *cx, JSTraceCallback callback)
{
InitTracer(trc, rt, callback);
InitTracer(trc, cx->runtime, cx, callback);
}
JS_PUBLIC_API(void)
@ -2606,7 +2605,7 @@ struct JSHeapDumpNode {
typedef struct JSDumpingTracer {
JSTracer base;
JSDHashTable visited;
bool ok;
JSBool ok;
void *startThing;
void *thingToFind;
void *thingToIgnore;
@ -2620,6 +2619,7 @@ DumpNotify(JSTracer *trc, void **thingp, JSGCTraceKind kind)
{
void *thing = *thingp;
JSDumpingTracer *dtrc;
JSContext *cx;
JSDHashEntryStub *entry;
JS_ASSERT(trc->callback == DumpNotify);
@ -2628,6 +2628,8 @@ DumpNotify(JSTracer *trc, void **thingp, JSGCTraceKind kind)
if (!dtrc->ok || thing == dtrc->thingToIgnore)
return;
cx = trc->context;
/*
* Check if we have already seen thing unless it is thingToFind to include
* it to the graph each time we reach it and print all live things that
@ -2648,7 +2650,8 @@ DumpNotify(JSTracer *trc, void **thingp, JSGCTraceKind kind)
entry = (JSDHashEntryStub *)
JS_DHashTableOperate(&dtrc->visited, thing, JS_DHASH_ADD);
if (!entry) {
dtrc->ok = false;
JS_ReportOutOfMemory(cx);
dtrc->ok = JS_FALSE;
return;
}
if (entry->key)
@ -2661,7 +2664,7 @@ DumpNotify(JSTracer *trc, void **thingp, JSGCTraceKind kind)
size_t bytes = offsetof(JSHeapDumpNode, edgeName) + edgeNameSize;
JSHeapDumpNode *node = (JSHeapDumpNode *) OffTheBooks::malloc_(bytes);
if (!node) {
dtrc->ok = false;
dtrc->ok = JS_FALSE;
return;
}
@ -2682,6 +2685,7 @@ DumpNode(JSDumpingTracer *dtrc, FILE* fp, JSHeapDumpNode *node)
{
JSHeapDumpNode *prev, *following;
size_t chainLimit;
JSBool ok;
enum { MAX_PARENTS_TO_PRINT = 10 };
JS_PrintTraceThingInfo(dtrc->buffer, sizeof dtrc->buffer,
@ -2714,21 +2718,21 @@ DumpNode(JSDumpingTracer *dtrc, FILE* fp, JSHeapDumpNode *node)
node = prev;
prev = following;
bool ok = true;
ok = JS_TRUE;
do {
/* Loop must continue even when !ok to restore the parent chain. */
if (ok) {
if (!prev) {
/* Print edge from some runtime root or startThing. */
if (fputs(node->edgeName, fp) < 0)
ok = false;
ok = JS_FALSE;
} else {
JS_PrintTraceThingInfo(dtrc->buffer, sizeof dtrc->buffer,
&dtrc->base, prev->thing, prev->kind,
JS_FALSE);
if (fprintf(fp, "(%p %s).%s",
prev->thing, dtrc->buffer, node->edgeName) < 0) {
ok = false;
ok = JS_FALSE;
}
}
}
@ -2742,7 +2746,7 @@ DumpNode(JSDumpingTracer *dtrc, FILE* fp, JSHeapDumpNode *node)
}
JS_PUBLIC_API(JSBool)
JS_DumpHeap(JSRuntime *rt, FILE *fp, void* startThing, JSGCTraceKind startKind,
JS_DumpHeap(JSContext *cx, FILE *fp, void* startThing, JSGCTraceKind startKind,
void *thingToFind, size_t maxDepth, void *thingToIgnore)
{
JSDumpingTracer dtrc;
@ -2753,11 +2757,12 @@ JS_DumpHeap(JSRuntime *rt, FILE *fp, void* startThing, JSGCTraceKind startKind,
if (maxDepth == 0)
return JS_TRUE;
JS_TracerInit(&dtrc.base, rt, DumpNotify);
JS_TracerInit(&dtrc.base, cx, DumpNotify);
if (!JS_DHashTableInit(&dtrc.visited, JS_DHashGetStubOps(),
NULL, sizeof(JSDHashEntryStub),
JS_DHASH_DEFAULT_CAPACITY(100))) {
return false;
JS_ReportOutOfMemory(cx);
return JS_FALSE;
}
dtrc.ok = JS_TRUE;
dtrc.startThing = startThing;
@ -2858,18 +2863,23 @@ JS_MaybeGC(JSContext *cx)
MaybeGC(cx);
}
JS_PUBLIC_API(void)
JS_SetGCCallback(JSRuntime *rt, JSGCCallback cb)
JS_PUBLIC_API(JSGCCallback)
JS_SetGCCallback(JSContext *cx, JSGCCallback cb)
{
AssertNoGC(rt);
rt->gcCallback = cb;
AssertNoGC(cx);
CHECK_REQUEST(cx);
return JS_SetGCCallbackRT(cx->runtime, cb);
}
JS_PUBLIC_API(void)
JS_SetFinalizeCallback(JSRuntime *rt, JSFinalizeCallback cb)
JS_PUBLIC_API(JSGCCallback)
JS_SetGCCallbackRT(JSRuntime *rt, JSGCCallback cb)
{
JSGCCallback oldcb;
AssertNoGC(rt);
rt->gcFinalizeCallback = cb;
oldcb = rt->gcCallback;
rt->gcCallback = cb;
return oldcb;
}
JS_PUBLIC_API(JSBool)
@ -3342,12 +3352,6 @@ JS_IsNative(JSObject *obj)
return obj->isNative();
}
JS_PUBLIC_API(JSRuntime *)
JS_GetObjectRuntime(JSObject *obj)
{
return obj->compartment()->rt;
}
JS_PUBLIC_API(JSBool)
JS_FreezeObject(JSContext *cx, JSObject *obj)
{

Просмотреть файл

@ -1426,20 +1426,17 @@ typedef JSBool
(* JSContextCallback)(JSContext *cx, unsigned contextOp);
typedef enum JSGCStatus {
/* These callbacks happen outside the GC lock. */
JSGC_BEGIN,
JSGC_END
JSGC_END,
/* These callbacks happen within the GC lock. */
JSGC_MARK_END,
JSGC_FINALIZE_END
} JSGCStatus;
typedef void
(* JSGCCallback)(JSRuntime *rt, JSGCStatus status);
typedef enum JSFinalizeStatus {
JSFINALIZE_START,
JSFINALIZE_END
} JSFinalizeStatus;
typedef void
(* JSFinalizeCallback)(JSContext *cx, JSFinalizeStatus status);
typedef JSBool
(* JSGCCallback)(JSContext *cx, JSGCStatus status);
/*
* Generic trace operation that calls JS_CallTracer on each traceable thing
@ -3122,6 +3119,7 @@ typedef void
struct JSTracer {
JSRuntime *runtime;
JSContext *context;
JSTraceCallback callback;
JSTraceNamePrinter debugPrinter;
const void *debugPrintArg;
@ -3220,7 +3218,7 @@ JS_CallTracer(JSTracer *trc, void *thing, JSGCTraceKind kind);
* API for JSTraceCallback implementations.
*/
extern JS_PUBLIC_API(void)
JS_TracerInit(JSTracer *trc, JSRuntime *rt, JSTraceCallback callback);
JS_TracerInit(JSTracer *trc, JSContext *cx, JSTraceCallback callback);
extern JS_PUBLIC_API(void)
JS_TraceChildren(JSTracer *trc, void *thing, JSGCTraceKind kind);
@ -3253,7 +3251,7 @@ JS_GetTraceEdgeName(JSTracer *trc, char *buffer, int bufferSize);
* thingToIgnore: thing to ignore during the graph traversal when non-null.
*/
extern JS_PUBLIC_API(JSBool)
JS_DumpHeap(JSRuntime *rt, FILE *fp, void* startThing, JSGCTraceKind kind,
JS_DumpHeap(JSContext *cx, FILE *fp, void* startThing, JSGCTraceKind kind,
void *thingToFind, size_t maxDepth, void *thingToIgnore);
#endif
@ -3270,11 +3268,11 @@ JS_CompartmentGC(JSContext *cx, JSCompartment *comp);
extern JS_PUBLIC_API(void)
JS_MaybeGC(JSContext *cx);
extern JS_PUBLIC_API(void)
JS_SetGCCallback(JSRuntime *rt, JSGCCallback cb);
extern JS_PUBLIC_API(JSGCCallback)
JS_SetGCCallback(JSContext *cx, JSGCCallback cb);
extern JS_PUBLIC_API(void)
JS_SetFinalizeCallback(JSRuntime *rt, JSFinalizeCallback cb);
extern JS_PUBLIC_API(JSGCCallback)
JS_SetGCCallbackRT(JSRuntime *rt, JSGCCallback cb);
extern JS_PUBLIC_API(JSBool)
JS_IsGCMarkingTracer(JSTracer *trc);
@ -3710,9 +3708,6 @@ JS_IsExtensible(JSObject *obj);
extern JS_PUBLIC_API(JSBool)
JS_IsNative(JSObject *obj);
extern JS_PUBLIC_API(JSRuntime *)
JS_GetObjectRuntime(JSObject *obj);
/*
* Unlike JS_NewObject, JS_NewObjectWithGivenProto does not compute a default
* proto if proto's actual parameter value is null.

Просмотреть файл

@ -437,7 +437,6 @@ struct JSRuntime : js::RuntimeFriendFields
JSGCCallback gcCallback;
js::GCSliceCallback gcSliceCallback;
JSFinalizeCallback gcFinalizeCallback;
private:
/*

Просмотреть файл

@ -468,17 +468,17 @@ struct DumpingChildInfo {
{}
};
typedef HashSet<void *, DefaultHasher<void *>, SystemAllocPolicy> PtrSet;
typedef HashSet<void *, DefaultHasher<void *>, ContextAllocPolicy> PtrSet;
struct JSDumpHeapTracer : public JSTracer {
PtrSet visited;
FILE *output;
Vector<DumpingChildInfo, 0, SystemAllocPolicy> nodes;
Vector<DumpingChildInfo, 0, ContextAllocPolicy> nodes;
char buffer[200];
bool rootTracing;
JSDumpHeapTracer(FILE *fp)
: output(fp)
JSDumpHeapTracer(JSContext *cx, FILE *fp)
: visited(cx), output(fp), nodes(cx)
{}
};
@ -520,10 +520,10 @@ DumpHeapVisitChild(JSTracer *trc, void **thingp, JSGCTraceKind kind)
}
void
js::DumpHeapComplete(JSRuntime *rt, FILE *fp)
js::DumpHeapComplete(JSContext *cx, FILE *fp)
{
JSDumpHeapTracer dtrc(fp);
JS_TracerInit(&dtrc, rt, DumpHeapPushIfNew);
JSDumpHeapTracer dtrc(cx, fp);
JS_TracerInit(&dtrc, cx, DumpHeapPushIfNew);
if (!dtrc.visited.init(10000))
return;

Просмотреть файл

@ -221,7 +221,7 @@ typedef bool
* fp is the file for the dump output.
*/
extern JS_FRIEND_API(void)
DumpHeapComplete(JSRuntime *rt, FILE *fp);
DumpHeapComplete(JSContext *cx, FILE *fp);
#endif
@ -519,15 +519,15 @@ IsObjectInContextCompartment(const JSObject *obj, const JSContext *cx);
#define JSITER_FOR_OF 0x20 /* harmony for-of loop */
inline uintptr_t
GetNativeStackLimit(const JSRuntime *rt)
GetContextStackLimit(const JSContext *cx)
{
return RuntimeFriendFields::get(rt)->nativeStackLimit;
return RuntimeFriendFields::get(GetRuntime(cx))->nativeStackLimit;
}
#define JS_CHECK_RECURSION(cx, onerror) \
JS_BEGIN_MACRO \
int stackDummy_; \
if (!JS_CHECK_STACK_SIZE(js::GetNativeStackLimit(js::GetRuntime(cx)), &stackDummy_)) { \
if (!JS_CHECK_STACK_SIZE(js::GetContextStackLimit(cx), &stackDummy_)) { \
js_ReportOverRecursed(cx); \
onerror; \
} \

Просмотреть файл

@ -1823,9 +1823,10 @@ js_UnlockGCThingRT(JSRuntime *rt, void *thing)
namespace js {
void
InitTracer(JSTracer *trc, JSRuntime *rt, JSTraceCallback callback)
InitTracer(JSTracer *trc, JSRuntime *rt, JSContext *cx, JSTraceCallback callback)
{
trc->runtime = rt;
trc->context = cx;
trc->callback = callback;
trc->debugPrinter = NULL;
trc->debugPrintArg = NULL;
@ -1893,9 +1894,9 @@ GCMarker::init(bool lazy)
}
void
GCMarker::start(JSRuntime *rt)
GCMarker::start(JSRuntime *rt, JSContext *cx)
{
InitTracer(this, rt, NULL);
InitTracer(this, rt, cx, NULL);
JS_ASSERT(!started);
started = true;
color = BLACK;
@ -2161,7 +2162,8 @@ gc_root_traversal(JSTracer *trc, const RootEntry &entry)
* that mark callbacks are not in place during compartment GCs.
*/
JSTracer checker;
JS_TracerInit(&checker, trc->runtime, EmptyMarkCallback);
JS_ASSERT(trc->runtime == trc->context->runtime);
JS_TracerInit(&checker, trc->context, EmptyMarkCallback);
ConservativeGCTest test = MarkIfGCThingWord(&checker, reinterpret_cast<uintptr_t>(ptr));
if (test != CGCT_VALID && entry.value.name) {
fprintf(stderr,
@ -2967,9 +2969,6 @@ BeginMarkPhase(JSRuntime *rt)
{
GCMarker *gcmarker = &rt->gcMarker;
gcmarker->start(rt);
JS_ASSERT(IS_GC_MARKING_TRACER(gcmarker));
rt->gcStartNumber = rt->gcNumber;
/* Reset weak map list. */
@ -3035,12 +3034,14 @@ MarkGrayAndWeak(JSRuntime *rt)
#ifdef DEBUG
static void
ValidateIncrementalMarking(JSRuntime *rt);
ValidateIncrementalMarking(JSContext *cx);
#endif
static void
EndMarkPhase(JSRuntime *rt)
EndMarkPhase(JSContext *cx)
{
JSRuntime *rt = cx->runtime;
{
gcstats::AutoPhase ap1(rt->gcStats, gcstats::PHASE_MARK);
gcstats::AutoPhase ap2(rt->gcStats, gcstats::PHASE_MARK_OTHER);
@ -3051,9 +3052,12 @@ EndMarkPhase(JSRuntime *rt)
#ifdef DEBUG
if (rt->gcIncrementalState != NO_INCREMENTAL)
ValidateIncrementalMarking(rt);
ValidateIncrementalMarking(cx);
#endif
if (rt->gcCallback)
(void) rt->gcCallback(cx, JSGC_MARK_END);
#ifdef DEBUG
/* Make sure that we didn't mark an object in another compartment */
if (rt->gcCurrentCompartment) {
@ -3063,18 +3067,18 @@ EndMarkPhase(JSRuntime *rt)
}
}
#endif
rt->gcMarker.stop();
}
#ifdef DEBUG
static void
ValidateIncrementalMarking(JSRuntime *rt)
ValidateIncrementalMarking(JSContext *cx)
{
typedef HashMap<Chunk *, uintptr_t *, GCChunkHasher, SystemAllocPolicy> BitmapMap;
BitmapMap map;
if (!map.init())
return;
JSRuntime *rt = cx->runtime;
FullGCMarker *gcmarker = &rt->gcMarker;
/* Save existing mark bits. */
@ -3142,7 +3146,7 @@ ValidateIncrementalMarking(JSRuntime *rt)
while (thing < end) {
Cell *cell = (Cell *)thing;
if (bitmap->isMarked(cell, BLACK) && !incBitmap.isMarked(cell, BLACK)) {
JS_DumpHeap(rt, stdout, NULL, JSGCTraceKind(0), NULL, 100000, NULL);
JS_DumpHeap(cx, stdout, NULL, JSGCTraceKind(0), NULL, 100000, NULL);
printf("Assertion cell: %p (%d)\n", (void *)cell, cell->getAllocKind());
}
JS_ASSERT_IF(bitmap->isMarked(cell, BLACK), incBitmap.isMarked(cell, BLACK));
@ -3191,9 +3195,6 @@ SweepPhase(JSContext *cx, JSGCInvocationKind gckind)
*/
gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_SWEEP);
if (rt->gcFinalizeCallback)
rt->gcFinalizeCallback(cx, JSFINALIZE_START);
/* Finalize unreachable (key,value) pairs in all weak maps. */
WeakMapBase::sweepAll(&rt->gcMarker);
@ -3273,21 +3274,12 @@ SweepPhase(JSContext *cx, JSGCInvocationKind gckind)
{
gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_XPCONNECT);
if (rt->gcFinalizeCallback)
rt->gcFinalizeCallback(cx, JSFINALIZE_END);
if (rt->gcCallback)
(void) rt->gcCallback(cx, JSGC_FINALIZE_END);
}
for (CompartmentsIter c(rt); !c.done(); c.next())
c->setGCLastBytes(c->gcBytes, gckind);
#ifdef JS_THREADSAFE
if (cx->gcBackgroundFree) {
JS_ASSERT(cx->gcBackgroundFree == &rt->gcHelperThread);
cx->gcBackgroundFree = NULL;
AutoLockGC lock(rt);
rt->gcHelperThread.startBackgroundSweep(cx, gckind == GC_SHRINK);
}
#endif
}
/* Perform mark-and-sweep GC. If comp is set, we perform a single-compartment GC. */
@ -3298,15 +3290,19 @@ MarkAndSweep(JSContext *cx, JSGCInvocationKind gckind)
AutoUnlockGC unlock(rt);
rt->gcMarker.start(rt, cx);
JS_ASSERT(!rt->gcMarker.callback);
BeginMarkPhase(rt);
{
gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_MARK);
SliceBudget budget;
rt->gcMarker.drainMarkStack(budget);
}
EndMarkPhase(rt);
EndMarkPhase(cx);
SweepPhase(cx, gckind);
rt->gcMarker.stop();
}
/*
@ -3329,8 +3325,15 @@ class AutoHeapSession {
/* ...while this class is to be used only for garbage collection. */
class AutoGCSession : AutoHeapSession {
public:
explicit AutoGCSession(JSRuntime *rt, JSCompartment *comp);
explicit AutoGCSession(JSContext *cx, JSCompartment *comp);
~AutoGCSession();
private:
/*
* We should not be depending on cx->compartment in the GC, so set it to
* NULL to look for violations.
*/
SwitchToCompartment switcher;
};
/* Start a new heap session. */
@ -3348,8 +3351,9 @@ AutoHeapSession::~AutoHeapSession()
runtime->gcRunning = false;
}
AutoGCSession::AutoGCSession(JSRuntime *rt, JSCompartment *comp)
: AutoHeapSession(rt)
AutoGCSession::AutoGCSession(JSContext *cx, JSCompartment *comp)
: AutoHeapSession(cx->runtime),
switcher(cx, (JSCompartment *)NULL)
{
JS_ASSERT(!runtime->gcCurrentCompartment);
runtime->gcCurrentCompartment = comp;
@ -3478,9 +3482,12 @@ IncrementalGCSlice(JSContext *cx, int64_t budget, JSGCInvocationKind gckind)
}
if (rt->gcIncrementalState == MARK_ROOTS) {
rt->gcMarker.start(rt, cx);
JS_ASSERT(IS_GC_MARKING_TRACER(&rt->gcMarker));
for (GCCompartmentsIter c(rt); !c.done(); c.next()) {
c->discardJitCode(cx);
c->barrierMarker_.start(rt);
c->barrierMarker_.start(rt, NULL);
}
BeginMarkPhase(rt);
@ -3496,10 +3503,14 @@ IncrementalGCSlice(JSContext *cx, int64_t budget, JSGCInvocationKind gckind)
if (!rt->gcMarker.hasBufferedGrayRoots())
sliceBudget.reset();
rt->gcMarker.context = cx;
bool finished = rt->gcMarker.drainMarkStack(sliceBudget);
for (GCCompartmentsIter c(rt); !c.done(); c.next())
for (GCCompartmentsIter c(rt); !c.done(); c.next()) {
c->barrierMarker_.context = cx;
finished &= c->barrierMarker_.drainMarkStack(sliceBudget);
c->barrierMarker_.context = NULL;
}
if (finished) {
JS_ASSERT(rt->gcMarker.isDrained());
@ -3515,10 +3526,11 @@ IncrementalGCSlice(JSContext *cx, int64_t budget, JSGCInvocationKind gckind)
}
if (rt->gcIncrementalState == SWEEP) {
EndMarkPhase(rt);
EndMarkPhase(cx);
SweepPhase(cx, gckind);
rt->gcMarker.stop();
/* JIT code was already discarded during sweeping. */
for (GCCompartmentsIter c(rt); !c.done(); c.next())
c->barrierMarker_.stop();
@ -3633,7 +3645,7 @@ GCCycle(JSContext *cx, JSCompartment *comp, int64_t budget, JSGCInvocationKind g
if (rt->gcRunning)
return;
AutoGCSession gcsession(rt, comp);
AutoGCSession gcsession(cx, comp);
/* Don't GC if we are reporting an OOM. */
if (rt->inOOMReport)
@ -3671,6 +3683,15 @@ GCCycle(JSContext *cx, JSCompartment *comp, int64_t budget, JSGCInvocationKind g
JS_ASSERT(!c->needsBarrier_);
}
#endif
#ifdef JS_THREADSAFE
if (rt->gcIncrementalState == NO_INCREMENTAL) {
if (cx->gcBackgroundFree) {
JS_ASSERT(cx->gcBackgroundFree == &rt->gcHelperThread);
cx->gcBackgroundFree = NULL;
rt->gcHelperThread.startBackgroundSweep(cx, gckind == GC_SHRINK);
}
}
#endif
}
static void
@ -3702,9 +3723,15 @@ Collect(JSContext *cx, JSCompartment *comp, int64_t budget,
gcstats::AutoGCSlice agc(rt->gcStats, comp, reason);
do {
/*
* Let the API user decide to defer a GC if it wants to (unless this
* is the last context). Invoke the callback regardless.
*/
if (rt->gcIncrementalState == NO_INCREMENTAL) {
if (JSGCCallback callback = rt->gcCallback)
callback(rt, JSGC_BEGIN);
if (JSGCCallback callback = rt->gcCallback) {
if (!callback(cx, JSGC_BEGIN) && rt->hasContexts())
return;
}
}
{
@ -3716,7 +3743,7 @@ Collect(JSContext *cx, JSCompartment *comp, int64_t budget,
if (rt->gcIncrementalState == NO_INCREMENTAL) {
if (JSGCCallback callback = rt->gcCallback)
callback(rt, JSGC_END);
(void) callback(cx, JSGC_END);
}
/*
@ -3935,7 +3962,7 @@ NewCompartment(JSContext *cx, JSPrincipals *principals)
* resetting the GC.
*/
if (!rt->gcIncrementalCompartment)
compartment->barrierMarker_.start(rt);
compartment->barrierMarker_.start(rt, NULL);
}
if (rt->compartments.append(compartment))
@ -4083,7 +4110,7 @@ struct VerifyNode
EdgeValue edges[1];
};
typedef HashMap<void *, VerifyNode *, DefaultHasher<void *>, SystemAllocPolicy> NodeMap;
typedef HashMap<void *, VerifyNode *> NodeMap;
/*
* The verifier data structures are simple. The entire graph is stored in a
@ -4112,11 +4139,8 @@ struct VerifyTracer : JSTracer {
char *term;
NodeMap nodemap;
VerifyTracer()
: root(NULL) {}
~VerifyTracer() {
js_free(root);
}
VerifyTracer(JSContext *cx) : root(NULL), nodemap(cx) {}
~VerifyTracer() { js_free(root); }
};
/*
@ -4206,13 +4230,13 @@ StartVerifyBarriers(JSContext *cx)
PurgeRuntime(rt);
VerifyTracer *trc = new (js_malloc(sizeof(VerifyTracer))) VerifyTracer;
VerifyTracer *trc = new (js_malloc(sizeof(VerifyTracer))) VerifyTracer(cx);
rt->gcNumber++;
trc->number = rt->gcNumber;
trc->count = 0;
JS_TracerInit(trc, rt, AccumulateEdge);
JS_TracerInit(trc, cx, AccumulateEdge);
const size_t size = 64 * 1024 * 1024;
trc->root = (VerifyNode *)js_malloc(size);
@ -4255,7 +4279,7 @@ StartVerifyBarriers(JSContext *cx)
rt->gcIncrementalState = MARK;
for (CompartmentsIter c(rt); !c.done(); c.next()) {
c->needsBarrier_ = true;
c->barrierMarker_.start(rt);
c->barrierMarker_.start(rt, NULL);
c->arenas.prepareForIncrementalGC(c);
}
@ -4361,7 +4385,7 @@ EndVerifyBarriers(JSContext *cx)
rt->gcVerifyData = NULL;
rt->gcIncrementalState = NO_INCREMENTAL;
JS_TracerInit(trc, rt, MarkFromAutorooter);
JS_TracerInit(trc, cx, MarkFromAutorooter);
AutoGCRooter::traceAll(trc);
@ -4370,10 +4394,10 @@ EndVerifyBarriers(JSContext *cx)
* Verify that all the current roots were reachable previously, or else
* are marked.
*/
JS_TracerInit(trc, rt, CheckReachable);
JS_TracerInit(trc, cx, CheckReachable);
MarkRuntime(trc, true);
JS_TracerInit(trc, rt, CheckEdge);
JS_TracerInit(trc, cx, CheckEdge);
/* Start after the roots. */
VerifyNode *node = NextNode(trc->root);

Просмотреть файл

@ -1410,7 +1410,7 @@ GCDebugSlice(JSContext *cx, int64_t objCount);
namespace js {
void
InitTracer(JSTracer *trc, JSRuntime *rt, JSTraceCallback callback);
InitTracer(JSTracer *trc, JSRuntime *rt, JSContext *cx, JSTraceCallback callback);
#ifdef JS_THREADSAFE
@ -1772,7 +1772,7 @@ struct GCMarker : public JSTracer {
void setSizeLimit(size_t size) { stack.setSizeLimit(size); }
size_t sizeLimit() const { return stack.sizeLimit; }
void start(JSRuntime *rt);
void start(JSRuntime *rt, JSContext *cx);
void stop();
void reset();

Просмотреть файл

@ -1474,7 +1474,7 @@ struct JSCountHeapNode {
typedef struct JSCountHeapTracer {
JSTracer base;
JSDHashTable visited;
bool ok;
JSBool ok;
JSCountHeapNode *traceList;
JSCountHeapNode *recycleList;
} JSCountHeapTracer;
@ -1495,7 +1495,8 @@ CountHeapNotify(JSTracer *trc, void **thingp, JSGCTraceKind kind)
entry = (JSDHashEntryStub *)
JS_DHashTableOperate(&countTracer->visited, thing, JS_DHASH_ADD);
if (!entry) {
countTracer->ok = false;
JS_ReportOutOfMemory(trc->context);
countTracer->ok = JS_FALSE;
return;
}
if (entry->key)
@ -1508,7 +1509,7 @@ CountHeapNotify(JSTracer *trc, void **thingp, JSGCTraceKind kind)
} else {
node = (JSCountHeapNode *) js_malloc(sizeof *node);
if (!node) {
countTracer->ok = false;
countTracer->ok = JS_FALSE;
return;
}
}
@ -1579,14 +1580,14 @@ CountHeap(JSContext *cx, unsigned argc, jsval *vp)
}
}
JS_TracerInit(&countTracer.base, JS_GetRuntime(cx), CountHeapNotify);
JS_TracerInit(&countTracer.base, cx, CountHeapNotify);
if (!JS_DHashTableInit(&countTracer.visited, JS_DHashGetStubOps(),
NULL, sizeof(JSDHashEntryStub),
JS_DHASH_DEFAULT_CAPACITY(100))) {
JS_ReportOutOfMemory(cx);
return JS_FALSE;
}
countTracer.ok = true;
countTracer.ok = JS_TRUE;
countTracer.traceList = NULL;
countTracer.recycleList = NULL;
@ -1611,12 +1612,8 @@ CountHeap(JSContext *cx, unsigned argc, jsval *vp)
js_free(node);
}
JS_DHashTableFinish(&countTracer.visited);
if (!countTracer.ok) {
JS_ReportOutOfMemory(cx);
return false;
}
return JS_NewNumberValue(cx, (double) counter, vp);
return countTracer.ok && JS_NewNumberValue(cx, (double) counter, vp);
}
static jsrefcount finalizeCount = 0;
@ -2536,16 +2533,12 @@ DumpHeap(JSContext *cx, unsigned argc, jsval *vp)
}
}
ok = JS_DumpHeap(JS_GetRuntime(cx), dumpFile, startThing, startTraceKind, thingToFind,
ok = JS_DumpHeap(cx, dumpFile, startThing, startTraceKind, thingToFind,
maxDepth, thingToIgnore);
if (dumpFile != stdout)
fclose(dumpFile);
if (!ok) {
JS_ReportOutOfMemory(cx);
return false;
}
JS_SET_RVAL(cx, vp, JSVAL_VOID);
return true;
return ok;
not_traceable_arg:
JS_ReportError(cx, "argument '%s' is not null or a heap-allocated thing",

Просмотреть файл

@ -133,7 +133,7 @@ class HeapReverser : public JSTracer {
struct Edge {
public:
Edge(char *name, void *origin) : name(name), origin(origin) { }
~Edge() { js_free(name); }
~Edge() { free(name); }
/*
* Move constructor and move assignment. These allow us to live in
@ -166,12 +166,12 @@ class HeapReverser : public JSTracer {
* The result of a reversal is a map from Cells' addresses to Node
* structures describing their incoming edges.
*/
typedef HashMap<void *, Node, DefaultHasher<void *>, SystemAllocPolicy> Map;
typedef HashMap<void *, Node> Map;
Map map;
/* Construct a HeapReverser for |context|'s heap. */
HeapReverser(JSContext *cx) : rooter(cx, 0, NULL), parent(NULL) {
JS_TracerInit(this, JS_GetRuntime(cx), traverseEdgeWithThis);
HeapReverser(JSContext *cx) : map(cx), roots(cx), rooter(cx, 0, NULL), work(cx), parent(NULL) {
JS_TracerInit(this, cx, traverseEdgeWithThis);
}
bool init() { return map.init(); }
@ -193,7 +193,7 @@ class HeapReverser : public JSTracer {
* rule. This is kind of dumb, but JSAPI doesn't provide any less restricted
* way to register arrays of roots.
*/
Vector<jsval, 0, SystemAllocPolicy> roots;
Vector<jsval> roots;
AutoArrayRooter rooter;
/*
@ -232,7 +232,7 @@ class HeapReverser : public JSTracer {
* A stack of work items. We represent the stack explicitly to avoid
* overflowing the C++ stack when traversing long chains of objects.
*/
Vector<Child, 0, SystemAllocPolicy> work;
Vector<Child> work;
/* When traverseEdge is called, the Cell and kind at which the edge originated. */
void *parent;
@ -323,7 +323,7 @@ HeapReverser::getEdgeDescription()
{
if (!debugPrinter && debugPrintIndex == (size_t) -1) {
const char *arg = static_cast<const char *>(debugPrintArg);
char *name = static_cast<char *>(js_malloc(strlen(arg) + 1));
char *name = static_cast<char *>(context->malloc_(strlen(arg) + 1));
if (!name)
return NULL;
strcpy(name, arg);
@ -332,7 +332,7 @@ HeapReverser::getEdgeDescription()
/* Lovely; but a fixed size is required by JSTraceNamePrinter. */
static const int nameSize = 200;
char *name = static_cast<char *>(js_malloc(nameSize));
char *name = static_cast<char *>(context->malloc_(nameSize));
if (!name)
return NULL;
if (debugPrinter)
@ -342,7 +342,7 @@ HeapReverser::getEdgeDescription()
static_cast<const char *>(debugPrintArg), debugPrintIndex);
/* Shrink storage to fit. */
return static_cast<char *>(js_realloc(name, strlen(name) + 1));
return static_cast<char *>(context->realloc_(name, strlen(name) + 1));
}

Просмотреть файл

@ -644,12 +644,10 @@ DumpHeap(JSContext *cx, unsigned argc, jsval *vp)
}
}
ok = JS_DumpHeap(JS_GetRuntime(cx), dumpFile, startThing, startTraceKind, thingToFind,
ok = JS_DumpHeap(cx, dumpFile, startThing, startTraceKind, thingToFind,
maxDepth, thingToIgnore);
if (dumpFile != gOutFile)
fclose(dumpFile);
if (!ok)
JS_ReportOutOfMemory(cx);
return ok;
not_traceable_arg:

Просмотреть файл

@ -654,56 +654,30 @@ SweepCompartment(nsCStringHashKey& aKey, JSCompartment *compartment, void *aClos
return PL_DHASH_NEXT;
}
/* static */ void
XPCJSRuntime::GCCallback(JSRuntime *rt, JSGCStatus status)
// static
JSBool XPCJSRuntime::GCCallback(JSContext *cx, JSGCStatus status)
{
XPCJSRuntime* self = nsXPConnect::GetRuntimeInstance();
if (!self)
return;
return true;
switch (status) {
case JSGC_BEGIN:
{
if (!NS_IsMainThread()) {
return false;
}
// We seem to sometime lose the unrooted global flag. Restore it
// here. FIXME: bug 584495.
JSContext *iter = nsnull;
while (JSContext *acx = JS_ContextIterator(rt, &iter)) {
while (JSContext *acx = JS_ContextIterator(JS_GetRuntime(cx), &iter)) {
if (!js::HasUnrootedGlobal(acx))
JS_ToggleOptions(acx, JSOPTION_UNROOTED_GLOBAL);
}
break;
}
case JSGC_END:
{
// Do any deferred releases of native objects.
#ifdef XPC_TRACK_DEFERRED_RELEASES
printf("XPC - Begin deferred Release of %d nsISupports pointers\n",
self->mNativesToReleaseArray.Length());
#endif
DoDeferredRelease(self->mNativesToReleaseArray);
#ifdef XPC_TRACK_DEFERRED_RELEASES
printf("XPC - End deferred Releases\n");
#endif
self->GetXPConnect()->ClearGCBeforeCC();
break;
}
}
nsTArray<JSGCCallback> callbacks(self->extraGCCallbacks);
for (PRUint32 i = 0; i < callbacks.Length(); ++i)
callbacks[i](rt, status);
}
/* static */ void
XPCJSRuntime::FinalizeCallback(JSContext *cx, JSFinalizeStatus status)
{
XPCJSRuntime* self = nsXPConnect::GetRuntimeInstance();
if (!self)
return;
switch (status) {
case JSFINALIZE_START:
case JSGC_MARK_END:
{
NS_ASSERTION(!self->mDoingFinalization, "bad state");
@ -736,7 +710,7 @@ XPCJSRuntime::FinalizeCallback(JSContext *cx, JSFinalizeStatus status)
self->mDoingFinalization = true;
break;
}
case JSFINALIZE_END:
case JSGC_FINALIZE_END:
{
NS_ASSERTION(self->mDoingFinalization, "bad state");
self->mDoingFinalization = false;
@ -923,7 +897,36 @@ XPCJSRuntime::FinalizeCallback(JSContext *cx, JSFinalizeStatus status)
break;
}
case JSGC_END:
{
// NOTE that this event happens outside of the gc lock in
// the js engine. So this could be simultaneous with the
// events above.
// Do any deferred releases of native objects.
#ifdef XPC_TRACK_DEFERRED_RELEASES
printf("XPC - Begin deferred Release of %d nsISupports pointers\n",
self->mNativesToReleaseArray.Length());
#endif
DoDeferredRelease(self->mNativesToReleaseArray);
#ifdef XPC_TRACK_DEFERRED_RELEASES
printf("XPC - End deferred Releases\n");
#endif
self->GetXPConnect()->ClearGCBeforeCC();
break;
}
default:
break;
}
nsTArray<JSGCCallback> callbacks(self->extraGCCallbacks);
for (PRUint32 i = 0; i < callbacks.Length(); ++i) {
if (!callbacks[i](cx, status))
return false;
}
return true;
}
//static
@ -2064,8 +2067,7 @@ XPCJSRuntime::XPCJSRuntime(nsXPConnect* aXPConnect)
JS_SetNativeStackQuota(mJSRuntime, 128 * sizeof(size_t) * 1024);
JS_SetContextCallback(mJSRuntime, ContextCallback);
JS_SetCompartmentCallback(mJSRuntime, CompartmentCallback);
JS_SetGCCallback(mJSRuntime, GCCallback);
JS_SetFinalizeCallback(mJSRuntime, FinalizeCallback);
JS_SetGCCallbackRT(mJSRuntime, GCCallback);
JS_SetExtraGCRootsTracer(mJSRuntime, TraceBlackJS, this);
JS_SetGrayGCRootsTracer(mJSRuntime, TraceGrayJS, this);
JS_SetWrapObjectCallbacks(mJSRuntime,

Просмотреть файл

@ -228,17 +228,21 @@ nsXPConnect::ReleaseXPConnectSingleton()
// force a dump of the JavaScript gc heap if JS is still alive
// if requested through XPC_SHUTDOWN_HEAP_DUMP environment variable
{
const char* dumpName = getenv("XPC_SHUTDOWN_HEAP_DUMP");
if (dumpName) {
FILE* dumpFile = (*dumpName == '\0' ||
strcmp(dumpName, "stdout") == 0)
? stdout
: fopen(dumpName, "w");
if (dumpFile) {
JS_DumpHeap(xpc->GetRuntime()->GetJSRuntime(), dumpFile, nsnull,
JSTRACE_OBJECT, nsnull, static_cast<size_t>(-1), nsnull);
if (dumpFile != stdout)
fclose(dumpFile);
// autoscope
XPCCallContext ccx(NATIVE_CALLER);
if (ccx.IsValid()) {
const char* dumpName = getenv("XPC_SHUTDOWN_HEAP_DUMP");
if (dumpName) {
FILE* dumpFile = (*dumpName == '\0' ||
strcmp(dumpName, "stdout") == 0)
? stdout
: fopen(dumpName, "w");
if (dumpFile) {
JS_DumpHeap(ccx, dumpFile, nsnull, JSTRACE_OBJECT, nsnull,
static_cast<size_t>(-1), nsnull);
if (dumpFile != stdout)
fclose(dumpFile);
}
}
}
}
@ -495,11 +499,11 @@ TraceWeakMappingChild(JSTracer *trc, void **thingp, JSGCTraceKind kind)
struct NoteWeakMapsTracer : public js::WeakMapTracer
{
NoteWeakMapsTracer(JSRuntime *rt, js::WeakMapTraceCallback cb,
NoteWeakMapsTracer(JSContext *cx, js::WeakMapTraceCallback cb,
nsCycleCollectionTraversalCallback &cccb)
: js::WeakMapTracer(rt, cb), mCb(cccb), mChildTracer(cccb)
: js::WeakMapTracer(js::GetRuntime(cx), cb), mCb(cccb), mChildTracer(cccb)
{
JS_TracerInit(&mChildTracer, rt, TraceWeakMappingChild);
JS_TracerInit(&mChildTracer, cx, TraceWeakMappingChild);
}
nsCycleCollectionTraversalCallback &mCb;
NoteWeakMapChildrenTracer mChildTracer;
@ -594,7 +598,8 @@ nsXPConnect::BeginCycleCollection(nsCycleCollectionTraversalCallback &cb,
GetRuntime()->AddXPConnectRoots(cb);
NoteWeakMapsTracer trc(GetRuntime()->GetJSRuntime(), TraceWeakMapping, cb);
NoteWeakMapsTracer trc(mCycleCollectionContext->GetJSContext(),
TraceWeakMapping, cb);
js::TraceWeakMaps(&trc);
return NS_OK;
@ -719,7 +724,7 @@ UnmarkGrayChildren(JSTracer *trc, void **thingp, JSGCTraceKind kind)
{
void *thing = *thingp;
int stackDummy;
if (!JS_CHECK_STACK_SIZE(js::GetNativeStackLimit(trc->runtime), &stackDummy)) {
if (!JS_CHECK_STACK_SIZE(js::GetContextStackLimit(trc->context), &stackDummy)) {
/*
* If we run out of stack, we take a more drastic measure: require that
* we GC again before the next CC.
@ -748,9 +753,17 @@ xpc_UnmarkGrayObjectRecursive(JSObject *obj)
// Unmark.
js::gc::AsCell(obj)->unmark(js::gc::GRAY);
// Tracing requires a JSContext...
JSContext *cx;
nsXPConnect* xpc = nsXPConnect::GetXPConnect();
if (!xpc || NS_FAILED(xpc->GetSafeJSContext(&cx)) || !cx) {
NS_ERROR("Failed to get safe JSContext!");
return;
}
// Trace children.
JSTracer trc;
JS_TracerInit(&trc, JS_GetObjectRuntime(obj), UnmarkGrayChildren);
JS_TracerInit(&trc, cx, UnmarkGrayChildren);
JS_TraceChildren(&trc, obj, JSTRACE_OBJECT);
}
@ -848,6 +861,8 @@ WrapperIsNotMainThreadOnly(XPCWrappedNative *wrapper)
NS_IMETHODIMP
nsXPConnect::Traverse(void *p, nsCycleCollectionTraversalCallback &cb)
{
JSContext *cx = mCycleCollectionContext->GetJSContext();
JSGCTraceKind traceKind = js_GetGCThingTraceKind(p);
JSObject *obj = nsnull;
js::Class *clazz = nsnull;
@ -955,7 +970,7 @@ nsXPConnect::Traverse(void *p, nsCycleCollectionTraversalCallback &cb)
TraversalTracer trc(cb);
JS_TracerInit(&trc, GetRuntime()->GetJSRuntime(), NoteJSChild);
JS_TracerInit(&trc, cx, NoteJSChild);
trc.eagerlyTraceWeakMaps = false;
JS_TraceChildren(&trc, p, traceKind);
@ -1207,7 +1222,7 @@ xpc_CreateGlobalObject(JSContext *cx, JSClass *clasp,
#ifdef DEBUG
if (clasp->flags & JSCLASS_XPCONNECT_GLOBAL) {
VerifyTraceXPCGlobalCalledTracer trc;
JS_TracerInit(&trc.base, JS_GetRuntime(cx), VerifyTraceXPCGlobalCalled);
JS_TracerInit(&trc.base, cx, VerifyTraceXPCGlobalCalled);
trc.ok = false;
JS_TraceChildren(&trc.base, *global, JSTRACE_OBJECT);
NS_ABORT_IF_FALSE(trc.ok, "Trace hook needs to call TraceXPCGlobal if JSCLASS_XPCONNECT_GLOBAL is set.");
@ -2756,12 +2771,13 @@ void
DumpJSHeap(FILE* file)
{
NS_ABORT_IF_FALSE(NS_IsMainThread(), "Must dump GC heap on main thread.");
JSContext *cx;
nsXPConnect* xpc = nsXPConnect::GetXPConnect();
if (!xpc) {
NS_ERROR("Failed to get nsXPConnect instance!");
if (!xpc || NS_FAILED(xpc->GetSafeJSContext(&cx)) || !cx) {
NS_ERROR("Failed to get safe JSContext!");
return;
}
js::DumpHeapComplete(xpc->GetRuntime()->GetJSRuntime(), file);
js::DumpHeapComplete(cx, file);
}
#endif

Просмотреть файл

@ -751,8 +751,7 @@ public:
void AddXPConnectRoots(nsCycleCollectionTraversalCallback& cb);
void UnmarkSkippableJSHolders();
static void GCCallback(JSRuntime *rt, JSGCStatus status);
static void FinalizeCallback(JSContext *cx, JSFinalizeStatus status);
static JSBool GCCallback(JSContext *cx, JSGCStatus status);
inline void AddVariantRoot(XPCTraceableVariant* variant);
inline void AddWrappedJSRoot(nsXPCWrappedJS* wrappedJS);