Bug 462021 - TM: Make JSStackFrame reconstitution infallible (part 3 of 4, call objects, r=brendan)

--HG--
extra : rebase_source : 14a82b2b08c8b7dbae5e72623685b7c3947df603
This commit is contained in:
Jason Orendorff 2008-12-11 17:53:05 -06:00
Родитель a4254d01fe
Коммит b471c172cd
5 изменённых файлов: 106 добавлений и 6 удалений

Просмотреть файл

@ -131,6 +131,13 @@ typedef struct JSTraceMonitor {
jsval *recoveryDoublePool;
jsval *recoveryDoublePoolPtr;
/*
* reservedObjects is a linked list (via fslots[0]) of preallocated JSObjects.
* The JIT uses this to ensure that leaving a trace tree can't fail.
*/
JSObject *reservedObjects;
JSBool useReservedObjects;
/* Fragmento for the regular expression compiler. This is logically
* a distinct compiler but needs to be managed in exactly the same
* way as the real tracing Fragmento. */

Просмотреть файл

@ -1826,11 +1826,13 @@ js_NewGCThing(JSContext *cx, uintN flags, size_t nbytes)
doGC = (rt->gcMallocBytes >= rt->gcMaxMallocBytes && rt->gcPoke);
#ifdef JS_GC_ZEAL
doGC = doGC || rt->gcZeal >= 2 || (rt->gcZeal >= 1 && rt->gcPoke);
if (rt->gcZeal >= 1 && JS_TRACE_MONITOR(cx).useReservedObjects)
goto testReservedObjects;
#endif
arenaList = &rt->gcArenaList[flindex];
for (;;) {
if (doGC && !JS_ON_TRACE(cx)) {
if (doGC && !JS_ON_TRACE(cx) && !JS_TRACE_MONITOR(cx).useReservedObjects) {
/*
* Keep rt->gcLock across the call into js_GC so we don't starve
* and lose to racing threads who deplete the heap just after
@ -1895,6 +1897,21 @@ js_NewGCThing(JSContext *cx, uintN flags, size_t nbytes)
JS_ASSERT(arenaList->lastCount < thingsLimit);
a = arenaList->last;
} else {
#ifdef JS_TRACER
if (JS_TRACE_MONITOR(cx).useReservedObjects) {
#ifdef JS_GC_ZEAL
testReservedObjects:
#endif
JSTraceMonitor *tm = &JS_TRACE_MONITOR(cx);
thing = (JSGCThing *) tm->reservedObjects;
flagp = GetGCThingFlags(thing);
JS_ASSERT(thing);
tm->reservedObjects = JSVAL_TO_OBJECT(tm->reservedObjects->fslots[0]);
break;
}
#endif
a = NewGCArena(rt);
if (!a) {
if (doGC || JS_ON_TRACE(cx))
@ -2200,6 +2217,33 @@ js_NewWeaklyRootedDouble(JSContext *cx, jsdouble d)
return dp;
}
#ifdef JS_TRACER
JSBool
js_ReserveObjects(JSContext *cx, size_t nobjects)
{
/*
* Ensure at least nobjects objects are in the list. fslots[1] of each
* object on the reservedObjects list is the length of the list from there.
*/
JSObject *&head = JS_TRACE_MONITOR(cx).reservedObjects;
size_t i = head ? JSVAL_TO_INT(head->fslots[1]) : 0;
while (i < nobjects) {
JSObject *obj = (JSObject *) js_NewGCThing(cx, GCX_OBJECT, sizeof(JSObject));
if (!obj)
return JS_FALSE;
memset(obj, 0, sizeof(JSObject));
/* The class must be set to something for finalization. */
obj->classword = (jsuword) &js_ObjectClass;
obj->fslots[0] = OBJECT_TO_JSVAL(head);
i++;
obj->fslots[1] = INT_TO_JSVAL(i);
head = obj;
}
return JS_TRUE;
}
#endif JS_TRACER
JSBool
js_AddAsGCBytes(JSContext *cx, size_t sz)
{
@ -3020,9 +3064,18 @@ js_TraceTraceMonitor(JSTracer *trc, JSTraceMonitor *tm)
{
if (IS_GC_MARKING_TRACER(trc)) {
tm->recoveryDoublePoolPtr = tm->recoveryDoublePool;
/* Make sure the global shape changes and will force a flush
of the code cache. */
tm->globalShape = -1;
tm->globalShape = -1;
/* Keep the reserved objects. */
for (JSObject *obj = tm->reservedObjects; obj; obj = JSVAL_TO_OBJECT(obj->fslots[0])) {
uint8 *flagp = GetGCThingFlags(obj);
JS_ASSERT((*flagp & GCF_TYPEMASK) == GCX_OBJECT);
JS_ASSERT(*flagp != GCF_FINAL);
*flagp |= GCF_MARK;
}
}
}

Просмотреть файл

@ -211,6 +211,11 @@ js_NewDoubleInRootedValue(JSContext *cx, jsdouble d, jsval *vp);
extern jsdouble *
js_NewWeaklyRootedDouble(JSContext *cx, jsdouble d);
#ifdef JS_TRACER
extern JSBool
js_ReserveObjects(JSContext *cx, size_t nobjects);
#endif
extern JSBool
js_LockGCThingRT(JSRuntime *rt, void *thing);

Просмотреть файл

@ -2932,10 +2932,19 @@ js_SynthesizeFrame(JSContext* cx, const FrameInfo& fi)
/*
* Set hookData to null because the failure case for js_GetCallObject
* involves it calling the debugger hook.
*
* Allocating the Call object must not fail, so use an object
* previously reserved by js_ExecuteTrace if needed.
*/
newifp->hookData = NULL;
if (!js_GetCallObject(cx, &newifp->frame, newifp->frame.scopeChain))
return -1;
JS_ASSERT(!JS_TRACE_MONITOR(cx).useReservedObjects);
JS_TRACE_MONITOR(cx).useReservedObjects = JS_TRUE;
#ifdef DEBUG
JSObject *obj =
#endif
js_GetCallObject(cx, &newifp->frame, newifp->frame.scopeChain);
JS_ASSERT(obj);
JS_TRACE_MONITOR(cx).useReservedObjects = JS_FALSE;
}
/*
@ -3525,12 +3534,14 @@ js_ExecuteTree(JSContext* cx, Fragment* f, uintN& inlineCallCount,
/* Make sure our caller replenished the double pool. */
JS_ASSERT(tm->recoveryDoublePoolPtr >= tm->recoveryDoublePool + MAX_NATIVE_STACK_SLOTS);
/* Reserve stack space now, to keep LeaveTree infallible. */
/* Reserve objects and stack space now, to make leaving the tree infallible. */
void *reserve;
void *stackMark = JS_ARENA_MARK(&cx->stackPool);
if (!js_ReserveObjects(cx, MAX_CALL_STACK_ENTRIES))
return NULL;
JS_ARENA_ALLOCATE(reserve, &cx->stackPool, MAX_INTERP_STACK_BYTES);
if (!reserve)
return NULL; /* do not report OOM, just bail */
return NULL;
#ifdef DEBUG
memset(stack_buffer, 0xCD, sizeof(stack_buffer));

Просмотреть файл

@ -911,6 +911,30 @@ function deep2() {
deep2.expected = "ok";
test(deep2);
function heavyFn1(i) {
if (i == 3) {
var x = 3;
return [0, i].map(function (i) i + x);
}
return [];
}
function testHeavy() {
for (var i = 0; i <= 3; i++)
heavyFn1(i);
}
test(testHeavy);
function heavyFn2(i) {
if (i < 1000)
return heavyFn1(i);
return function () i;
}
function testHeavy2() {
for (var i = 0; i <= 3; i++)
heavyFn2(i);
}
test(testHeavy2);
var merge_type_maps_x = 0, merge_type_maps_y = 0;
function merge_type_maps() {
for (merge_type_maps_x = 0; merge_type_maps_x < 50; ++merge_type_maps_x)