Remove reserve doubles and objects lists and instead waive the GC quota to avoid failing in LeaveTree (508140, r=dvander).

This commit is contained in:
Andreas Gal 2009-11-12 16:01:56 -08:00
Родитель 5a45fd911f
Коммит 0c63e06280
4 изменённых файлов: 68 добавлений и 290 удалений

Просмотреть файл

@ -131,24 +131,14 @@ JSThreadData::purge(JSContext *cx)
/* FIXME: bug 506341. */
js_PurgePropertyCache(cx, &propertyCache);
# ifdef JS_TRACER
#ifdef JS_TRACER
/*
* If we are about to regenerate shapes, we have to flush the JIT cache,
* which will eventually abort any current recording.
*/
if (cx->runtime->gcRegenShapes)
traceMonitor.needFlush = JS_TRUE;
/*
* We want to keep reserved doubles and objects after the GC. So, unless we
* are shutting down, we don't purge them here and rather mark them during
* the GC, see MarkReservedObjects in jsgc.cpp.
*/
if (cx->runtime->state == JSRTS_LANDING) {
traceMonitor.reservedDoublePoolPtr = traceMonitor.reservedDoublePool;
traceMonitor.reservedObjects = NULL;
}
# endif
#endif
/* Destroy eval'ed scripts. */
js_DestroyScriptsToGC(cx, this);

Просмотреть файл

@ -164,7 +164,6 @@ struct InterpState
// Used to communicate the location of the return value in case of a deep bail.
double* deepBailSp;
// Used when calling natives from trace to root the vp vector.
uintN nativeVpLen;
jsval* nativeVp;
@ -247,8 +246,6 @@ struct JSTraceMonitor {
#endif
TraceRecorder* recorder;
jsval *reservedDoublePool;
jsval *reservedDoublePoolPtr;
struct GlobalState globalStates[MONITOR_N_GLOBAL_STATES];
struct TreeFragment* vmfragments[FRAGMENT_TABLE_SIZE];
@ -377,6 +374,13 @@ const uint32 JSLRS_NULL_MARK = uint32(-1);
struct JSThreadData {
JSGCFreeLists gcFreeLists;
/*
* Flag indicating that we are waiving any soft limits on the GC heap
* because we want allocations to be infallible (except when we hit
* a hard quota).
*/
bool waiveGCQuota;
/*
* The GSN cache is per thread since even multi-cx-per-thread embeddings
* do not interleave js_GetSrcNote calls.

Просмотреть файл

@ -636,11 +636,11 @@ NewGCArena(JSContext *cx)
JSGCArenaInfo *a;
JSRuntime *rt = cx->runtime;
if (rt->gcBytes >= rt->gcMaxBytes) {
if (!JS_THREAD_DATA(cx)->waiveGCQuota && rt->gcBytes >= rt->gcMaxBytes) {
/*
* FIXME bug 524051 We cannot run a last-ditch GC on trace for now, so
* as a workaround we allow to breach the max bytes limit here and
* schedule the GC later.
* just pretend we are out of memory which will throw us off trace and
* we will re-try this code path from the interpreter.
*/
if (!JS_ON_TRACE(cx))
return NULL;
@ -1555,9 +1555,6 @@ js_NewFinalizableGCThing(JSContext *cx, unsigned thingKind)
* check for non-null lrs only when we exhaust the free list.
*/
JSLocalRootStack *lrs = JS_THREAD_DATA(cx)->localRootStack;
#ifdef JS_TRACER
bool fromTraceReserve = false;
#endif
for (;;) {
if (lrs) {
freeListp = lrs->gcFreeLists.finalizables + thingKind;
@ -1569,19 +1566,6 @@ js_NewFinalizableGCThing(JSContext *cx, unsigned thingKind)
}
}
#ifdef JS_TRACER
if (JS_TRACE_MONITOR(cx).useReservedObjects) {
JS_ASSERT(!JS_ON_TRACE(cx));
JS_ASSERT(thingKind == FINALIZE_OBJECT);
JSTraceMonitor *tm = &JS_TRACE_MONITOR(cx);
thing = (JSGCThing *) tm->reservedObjects;
JS_ASSERT(thing);
tm->reservedObjects = JSVAL_TO_OBJECT(tm->reservedObjects->fslots[0]);
fromTraceReserve = true;
break;
}
#endif
thing = RefillFinalizableFreeList(cx, thingKind);
if (thing) {
/*
@ -1607,19 +1591,8 @@ js_NewFinalizableGCThing(JSContext *cx, unsigned thingKind)
* See JS_EnterLocalRootScope and related APIs.
*/
if (js_PushLocalRoot(cx, lrs, (jsval) thing) < 0) {
/*
* When we fail for a thing allocated from a free list, not from
* the reserved pool, the thing is not initialized. To prevent GC
* running the finalizer on the thing, we add the thing back to
* the free list. See bug 337407.
*/
#ifdef JS_TRACER
if (!fromTraceReserve)
#endif
{
JS_ASSERT(thing->link == *freeListp);
*freeListp = thing;
}
JS_ASSERT(thing->link == *freeListp);
*freeListp = thing;
return NULL;
}
} else {
@ -1785,10 +1758,6 @@ js_NewDoubleInRootedValue(JSContext *cx, jsdouble d, jsval *vp)
break;
}
}
#ifdef JS_TRACER
if (JS_TRACE_MONITOR(cx).useReservedObjects)
return false;
#endif
thing = RefillDoubleFreeList(cx);
if (thing) {
JS_ASSERT(!*freeListp || *freeListp == thing);
@ -1824,36 +1793,6 @@ js_NewWeaklyRootedDouble(JSContext *cx, jsdouble d)
return dp;
}
#ifdef JS_TRACER
JSBool
js_ReserveObjects(JSContext *cx, size_t nobjects)
{
/*
* Ensure at least nobjects objects are in the list. fslots[1] of each
* object on the reservedObjects list is the length of the list to this
* object.
*/
JSObject *&head = JS_TRACE_MONITOR(cx).reservedObjects;
size_t i = head ? JSVAL_TO_INT(head->fslots[1]) : 0;
while (i < nobjects) {
JSObject *obj = js_NewGCObject(cx);
if (!obj)
return JS_FALSE;
memset(obj, 0, sizeof(JSObject));
/* The class must be set to something for finalization. */
obj->classword = (jsuword) &js_ObjectClass;
obj->fslots[0] = OBJECT_TO_JSVAL(head);
i++;
obj->fslots[1] = INT_TO_JSVAL(i);
head = obj;
}
return JS_TRUE;
}
#endif
/*
* Shallow GC-things can be locked just by setting the GCF_LOCK bit, because
* they have no descendants to mark during the GC. Currently the optimization
@ -2598,48 +2537,6 @@ js_TraceContext(JSTracer *trc, JSContext *acx)
#endif
}
#ifdef JS_TRACER
static void
MarkReservedGCThings(JSTraceMonitor *tm)
{
/* Keep reserved doubles. */
for (jsval *ptr = tm->reservedDoublePool; ptr < tm->reservedDoublePoolPtr; ++ptr) {
jsdouble* dp = JSVAL_TO_DOUBLE(*ptr);
JS_ASSERT(js_GetGCThingTraceKind(dp) == JSTRACE_DOUBLE);
JSGCArenaInfo *a = THING_TO_ARENA(dp);
JS_ASSERT(!a->list);
if (!a->hasMarkedDoubles) {
ClearDoubleArenaFlags(a);
a->hasMarkedDoubles = JS_TRUE;
}
jsuint index = DOUBLE_THING_TO_INDEX(dp);
JS_SET_BIT(DOUBLE_ARENA_BITMAP(a), index);
}
/* Keep reserved objects. */
for (JSObject *obj = tm->reservedObjects; obj; obj = JSVAL_TO_OBJECT(obj->fslots[0])) {
JS_ASSERT(js_GetGCThingTraceKind(obj) == JSTRACE_OBJECT);
uint8 *flagp = GetGCThingFlags(obj);
*flagp |= GCF_MARK;
}
}
#ifdef JS_THREADSAFE
static JSDHashOperator
reserved_gcthings_marker(JSDHashTable *table, JSDHashEntryHdr *hdr,
uint32, void *)
{
JSThread *thread = ((JSThreadsHashEntry *) hdr)->thread;
MarkReservedGCThings(&thread->data.traceMonitor);
return JS_DHASH_NEXT;
}
#endif
#endif
JS_REQUIRES_STACK void
js_TraceRuntime(JSTracer *trc, JSBool allAtoms)
{
@ -2667,16 +2564,6 @@ js_TraceRuntime(JSTracer *trc, JSBool allAtoms)
if (rt->builtinFunctions[i])
JS_CALL_OBJECT_TRACER(trc, rt->builtinFunctions[i], "builtin function");
}
/* Mark reserved gcthings unless we are shutting down. */
if (IS_GC_MARKING_TRACER(trc) && rt->state != JSRTS_LANDING) {
#ifdef JS_THREADSAFE
JS_DHashTableEnumerate(&rt->threads, reserved_gcthings_marker, NULL);
#else
MarkReservedGCThings(&rt->threadData.traceMonitor);
#endif
}
#endif
}

Просмотреть файл

@ -315,6 +315,11 @@ nanojit::Allocator::postReset() {
vma->mSize = 0;
}
static void OutOfMemoryAbort()
{
JS_NOT_REACHED("out of memory");
abort();
}
#ifdef JS_JIT_SPEW
static void
@ -2697,64 +2702,6 @@ ValueToNative(JSContext* cx, jsval v, JSTraceType type, double* slot)
JS_NOT_REACHED("unexpected type");
}
/*
* We maintain an emergency pool of doubles so we can recover safely if a trace
* runs out of memory (doubles or objects).
*/
static jsval
AllocateDoubleFromReservedPool(JSContext* cx)
{
JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
JS_ASSERT(tm->reservedDoublePoolPtr > tm->reservedDoublePool);
return *--tm->reservedDoublePoolPtr;
}
static bool
ReplenishReservedPool(JSContext* cx, JSTraceMonitor* tm)
{
/* We should not be called with a full pool. */
JS_ASSERT((size_t) (tm->reservedDoublePoolPtr - tm->reservedDoublePool) <
MAX_NATIVE_STACK_SLOTS);
/*
* When the GC runs in js_NewDoubleInRootedValue, it resets
* tm->reservedDoublePoolPtr back to tm->reservedDoublePool.
*/
JSRuntime* rt = cx->runtime;
uintN gcNumber = rt->gcNumber;
uintN lastgcNumber = gcNumber;
jsval* ptr = tm->reservedDoublePoolPtr;
while (ptr < tm->reservedDoublePool + MAX_NATIVE_STACK_SLOTS) {
if (!js_NewDoubleInRootedValue(cx, 0.0, ptr))
goto oom;
/* Check if the last call to js_NewDoubleInRootedValue GC'd. */
if (rt->gcNumber != lastgcNumber) {
lastgcNumber = rt->gcNumber;
ptr = tm->reservedDoublePool;
/*
* Have we GC'd more than once? We're probably running really
* low on memory, bail now.
*/
if (uintN(rt->gcNumber - gcNumber) > uintN(1))
goto oom;
continue;
}
++ptr;
}
tm->reservedDoublePoolPtr = ptr;
return true;
oom:
/*
* Already massive GC pressure, no need to hold doubles back.
* We won't run any native code anyway.
*/
tm->reservedDoublePoolPtr = tm->reservedDoublePool;
return false;
}
void
JSTraceMonitor::flush()
{
@ -2853,10 +2800,10 @@ JSTraceMonitor::mark(JSTracer* trc)
* are too large to fit into a jsval are automatically boxed into
* heap-allocated doubles.
*/
template <typename E>
static inline bool
NativeToValueBase(JSContext* cx, jsval& v, JSTraceType type, double* slot)
bool
js_NativeToValue(JSContext* cx, jsval& v, JSTraceType type, double* slot)
{
bool ok;
jsint i;
jsdouble d;
switch (type) {
@ -2886,7 +2833,12 @@ NativeToValueBase(JSContext* cx, jsval& v, JSTraceType type, double* slot)
if (JSDOUBLE_IS_INT(d, i))
goto store_int;
store_double:
return E::NewDoubleInRootedValue(cx, d, v);
ok = js_NewDoubleInRootedValue(cx, d, &v);
if (!ok) {
js_ReportOutOfMemory(cx);
return false;
}
return true;
case TT_JSVAL:
v = *(jsval*)slot;
@ -2928,48 +2880,6 @@ NativeToValueBase(JSContext* cx, jsval& v, JSTraceType type, double* slot)
return true;
}
struct ReserveDoubleOOMHandler {
static bool NewDoubleInRootedValue(JSContext *cx, double d, jsval& v) {
JS_ASSERT(!JS_TRACE_MONITOR(cx).useReservedObjects);
JS_TRACE_MONITOR(cx).useReservedObjects = true;
bool ok = js_NewDoubleInRootedValue(cx, d, &v);
JS_TRACE_MONITOR(cx).useReservedObjects = false;
if (ok)
return true;
v = AllocateDoubleFromReservedPool(cx);
JS_ASSERT(JSVAL_IS_DOUBLE(v) && *JSVAL_TO_DOUBLE(v) == 0.0);
*JSVAL_TO_DOUBLE(v) = d;
return true;
}
};
static void
NativeToValue(JSContext* cx, jsval& v, JSTraceType type, double* slot)
{
#ifdef DEBUG
bool ok =
#endif
NativeToValueBase<ReserveDoubleOOMHandler>(cx, v, type, slot);
JS_ASSERT(ok);
}
struct FailDoubleOOMHandler {
static bool NewDoubleInRootedValue(JSContext *cx, double d, jsval& v) {
bool ok = js_NewDoubleInRootedValue(cx, d, &v);
if (!ok) {
js_ReportOutOfMemory(cx);
return false;
}
return true;
}
};
bool
js_NativeToValue(JSContext* cx, jsval& v, JSTraceType type, double* slot)
{
return NativeToValueBase<FailDoubleOOMHandler>(cx, v, type, slot);
}
class BuildNativeFrameVisitor : public SlotVisitorBase
{
JSContext *mCx;
@ -3030,7 +2940,9 @@ public:
JS_REQUIRES_STACK JS_ALWAYS_INLINE void
visitGlobalSlot(jsval *vp, unsigned n, unsigned slot) {
debug_only_printf(LC_TMTracer, "global%d=", n);
NativeToValue(mCx, *vp, *mTypeMap++, &mGlobal[slot]);
JS_ASSERT(JS_THREAD_DATA(mCx)->waiveGCQuota);
if (!js_NativeToValue(mCx, *vp, *mTypeMap++, &mGlobal[slot]))
OutOfMemoryAbort();
}
};
@ -3063,12 +2975,15 @@ public:
JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
visitStackSlots(jsval *vp, size_t count, JSStackFrame* fp) {
JS_ASSERT(JS_THREAD_DATA(mCx)->waiveGCQuota);
for (size_t i = 0; i < count; ++i) {
if (vp == mStop)
return false;
debug_only_printf(LC_TMTracer, "%s%u=", stackSlotKind(), unsigned(i));
if (unsigned(mTypeMap - mInitTypeMap) >= mIgnoreSlots)
NativeToValue(mCx, *vp, *mTypeMap, mStack);
if (unsigned(mTypeMap - mInitTypeMap) >= mIgnoreSlots) {
if (!js_NativeToValue(mCx, *vp, *mTypeMap, mStack))
OutOfMemoryAbort();
}
vp++;
mTypeMap++;
mStack++;
@ -3409,14 +3324,12 @@ FlushNativeStackFrame(JSContext* cx, unsigned callDepth, const JSTraceType* mp,
*/
void* hookData = ((JSInlineFrame*)fp)->hookData;
((JSInlineFrame*)fp)->hookData = NULL;
JS_ASSERT(!JS_TRACE_MONITOR(cx).useReservedObjects);
JS_TRACE_MONITOR(cx).useReservedObjects = JS_TRUE;
JS_ASSERT(JS_THREAD_DATA(cx)->waiveGCQuota);
#ifdef DEBUG
JSObject *obj =
#endif
js_GetCallObject(cx, fp);
JS_ASSERT(obj);
JS_TRACE_MONITOR(cx).useReservedObjects = JS_FALSE;
((JSInlineFrame*)fp)->hookData = hookData;
}
}
@ -5445,10 +5358,8 @@ SynthesizeFrame(JSContext* cx, const FrameInfo& fi, JSObject* callee)
JS_ASSERT(missing == 0);
} else {
JS_ARENA_ALLOCATE_CAST(newsp, jsval *, &cx->stackPool, nbytes);
if (!newsp) {
JS_NOT_REACHED("out of memory");
abort();
}
if (!newsp)
OutOfMemoryAbort();
/*
* Move args if the missing ones overflow arena a, then push
@ -5563,10 +5474,8 @@ SynthesizeSlowNativeFrame(InterpState& state, JSContext *cx, VMSideExit *exit)
/* This allocation is infallible: ExecuteTree reserved enough stack. */
mark = JS_ARENA_MARK(&cx->stackPool);
JS_ARENA_ALLOCATE_CAST(ifp, JSInlineFrame *, &cx->stackPool, sizeof(JSInlineFrame));
if (!ifp) {
JS_NOT_REACHED("out of memory");
abort();
}
if (!ifp)
OutOfMemoryAbort();
JSStackFrame *fp = &ifp->frame;
fp->regs = NULL;
@ -5954,13 +5863,6 @@ TraceRecorder::recordLoopEdge(JSContext* cx, TraceRecorder* r, uintN& inlineCall
TreeFragment* first = LookupOrAddLoop(tm, cx->fp->regs->pc, root->globalObj,
root->globalShape, cx->fp->argc);
/* Make sure inner tree call will not run into an out-of-memory condition. */
if (tm->reservedDoublePoolPtr < (tm->reservedDoublePool + MAX_NATIVE_STACK_SLOTS) &&
!ReplenishReservedPool(cx, tm)) {
js_AbortRecording(cx, "Couldn't call inner tree (out of memory)");
return false;
}
/*
* Make sure the shape of the global object still matches (this might flush
* the JIT cache).
@ -6391,13 +6293,6 @@ ExecuteTree(JSContext* cx, TreeFragment* f, uintN& inlineCallCount,
OBJ_SHAPE(JS_GetGlobalForObject(cx, cx->fp->scopeChain)) ==
f->globalShape);
/* Make sure our caller replenished the double pool. */
JS_ASSERT(tm->reservedDoublePoolPtr >= tm->reservedDoublePool + MAX_NATIVE_STACK_SLOTS);
/* Reserve objects and stack space now, to make leaving the tree infallible. */
if (!js_ReserveObjects(cx, MAX_CALL_STACK_ENTRIES))
return NULL;
/*
* Set up the interpreter state. For the native stacks and global frame,
* reuse the storage in |tm->storage|. This reuse depends on the invariant
@ -6507,12 +6402,31 @@ ExecuteTree(JSContext* cx, TreeFragment* f, uintN& inlineCallCount,
return state.innermost;
}
class Guardian {
bool *flagp;
public:
Guardian(bool *flagp) {
this->flagp = flagp;
JS_ASSERT(!*flagp);
*flagp = true;
}
~Guardian() {
JS_ASSERT(*flagp);
*flagp = false;
}
};
static JS_FORCES_STACK void
LeaveTree(InterpState& state, VMSideExit* lr)
{
VOUCH_DOES_NOT_REQUIRE_STACK();
JSContext* cx = state.cx;
/* Temporary waive the soft GC quota to make sure LeaveTree() doesn't fail. */
Guardian waiver(&JS_THREAD_DATA(cx)->waiveGCQuota);
FrameInfo** callstack = state.callstackBase;
double* stack = state.stackBase;
@ -6649,11 +6563,13 @@ LeaveTree(InterpState& state, VMSideExit* lr)
*/
JSTraceType* typeMap = innermost->stackTypeMap();
for (int i = 1; i <= cs.ndefs; i++) {
NativeToValue(cx,
regs->sp[-i],
typeMap[innermost->numStackSlots - i],
(jsdouble *) state.deepBailSp
+ innermost->sp_adj / sizeof(jsdouble) - i);
if (!js_NativeToValue(cx,
regs->sp[-i],
typeMap[innermost->numStackSlots - i],
(jsdouble *) state.deepBailSp
+ innermost->sp_adj / sizeof(jsdouble) - i)) {
OutOfMemoryAbort();
}
}
}
return;
@ -6825,6 +6741,7 @@ LeaveTree(InterpState& state, VMSideExit* lr)
/* Write back interned globals. */
JS_ASSERT(state.eos == state.stackBase + MAX_NATIVE_STACK_SLOTS);
FlushNativeGlobalFrame(cx, state.eos, ngslots, gslots, globalTypeMap);
#ifdef DEBUG
/* Verify that our state restoration worked. */
for (JSStackFrame* fp = cx->fp; fp; fp = fp->down) {
@ -6877,15 +6794,6 @@ js_MonitorLoopEdge(JSContext* cx, uintN& inlineCallCount, RecordReason reason)
}
JS_ASSERT(!tm->recorder);
/* Check the pool of reserved doubles (this might trigger a GC). */
if (tm->reservedDoublePoolPtr < (tm->reservedDoublePool + MAX_NATIVE_STACK_SLOTS) &&
!ReplenishReservedPool(cx, tm)) {
#ifdef MOZ_TRACEVIS
tvso.r = R_DOUBLES;
#endif
return false; /* Out of memory, don't try to record now. */
}
/*
* Make sure the shape of the global object still matches (this might flush
* the JIT cache).
@ -7559,9 +7467,6 @@ js_InitJIT(JSTraceMonitor *tm)
tm->flush();
verbose_only( tm->branches = NULL; )
JS_ASSERT(!tm->reservedDoublePool);
tm->reservedDoublePoolPtr = tm->reservedDoublePool = new jsval[MAX_NATIVE_STACK_SLOTS];
#if !defined XP_WIN
debug_only(memset(&jitstats, 0, sizeof(jitstats)));
#endif
@ -7624,7 +7529,6 @@ js_FinishJIT(JSTraceMonitor *tm)
debug_only_print0(LC_TMStats, "\n");
}
#endif
JS_ASSERT(tm->reservedDoublePool);
if (tm->recordAttempts.ops)
JS_DHashTableFinish(&tm->recordAttempts);
@ -7660,9 +7564,6 @@ js_FinishJIT(JSTraceMonitor *tm)
memset(&tm->vmfragments[0], 0, FRAGMENT_TABLE_SIZE * sizeof(TreeFragment*));
delete[] tm->reservedDoublePool;
tm->reservedDoublePool = tm->reservedDoublePoolPtr = NULL;
if (tm->frameCache) {
delete tm->frameCache;
tm->frameCache = NULL;
@ -9809,10 +9710,6 @@ TraceRecorder::record_EnterFrame(uintN& inlineCallCount)
} else if (f) {
/* Make sure inner tree call will not run into an out-of-memory condition. */
JSTraceMonitor* tm = traceMonitor;
if (tm->reservedDoublePoolPtr < (tm->reservedDoublePool + MAX_NATIVE_STACK_SLOTS) &&
!ReplenishReservedPool(cx, tm)) {
RETURN_STOP_A("Couldn't call inner tree (out of memory)");
}
/*
* Make sure the shape of the global object still matches (this might
* flush the JIT cache).