From a2dfa4ca31a3f628bb230ad0143940e14ec50932 Mon Sep 17 00:00:00 2001 From: "igor%mir2.org" Date: Tue, 26 Feb 2008 21:01:42 +0000 Subject: [PATCH] bug=400902 r,a1.9=brendan Specialized GC arena for double values --- js/src/js.msg | 2 +- js/src/jsapi.c | 8 +- js/src/jsatom.c | 2 +- js/src/jscntxt.h | 4 + js/src/jsdate.c | 6 +- js/src/jsgc.c | 1109 +++++++++++++++++++++++++++++++++------------- js/src/jsgc.h | 56 ++- js/src/jsnum.c | 33 +- js/src/jsnum.h | 8 +- 9 files changed, 869 insertions(+), 359 deletions(-) diff --git a/js/src/js.msg b/js/src/js.msg index 42dd23a4d78..33c6b14f95c 100644 --- a/js/src/js.msg +++ b/js/src/js.msg @@ -85,7 +85,7 @@ MSG_DEF(JSMSG_INACTIVE, 2, 0, JSEXN_INTERNALERR, "nothing active MSG_DEF(JSMSG_MORE_ARGS_NEEDED, 3, 3, JSEXN_TYPEERR, "{0} requires more than {1} argument{2}") MSG_DEF(JSMSG_BAD_CHAR, 4, 1, JSEXN_INTERNALERR, "invalid format character {0}") MSG_DEF(JSMSG_BAD_TYPE, 5, 1, JSEXN_TYPEERR, "unknown type {0}") -MSG_DEF(JSMSG_CANT_LOCK, 6, 0, JSEXN_INTERNALERR, "can't lock memory") +MSG_DEF(JSMSG_UNUSED6, 6, 0, JSEXN_NONE, "") MSG_DEF(JSMSG_CANT_UNLOCK, 7, 0, JSEXN_INTERNALERR, "can't unlock memory") MSG_DEF(JSMSG_INCOMPATIBLE_PROTO, 8, 3, JSEXN_TYPEERR, "{0}.prototype.{1} called on incompatible {2}") MSG_DEF(JSMSG_NO_CONSTRUCTOR, 9, 1, JSEXN_TYPEERR, "{0} has no constructor") diff --git a/js/src/jsapi.c b/js/src/jsapi.c index ffd7a7976a4..df6d55181a9 100644 --- a/js/src/jsapi.c +++ b/js/src/jsapi.c @@ -489,7 +489,7 @@ JS_ConvertValue(JSContext *cx, jsval v, JSType type, jsval *vp) case JSTYPE_NUMBER: ok = js_ValueToNumber(cx, v, &d); if (ok) { - dp = js_NewDouble(cx, d, 0); + dp = js_NewDouble(cx, d); ok = (dp != NULL); if (ok) *vp = DOUBLE_TO_JSVAL(dp); @@ -1768,7 +1768,7 @@ JS_PUBLIC_API(jsdouble *) JS_NewDouble(JSContext *cx, jsdouble d) { CHECK_REQUEST(cx); - return js_NewDouble(cx, d, 0); + return js_NewDouble(cx, d); } JS_PUBLIC_API(JSBool) @@ -1877,9 +1877,9 @@ JS_LockGCThing(JSContext *cx, void *thing) JSBool ok; CHECK_REQUEST(cx); - ok = js_LockGCThing(cx, thing); + ok = js_LockGCThingRT(cx->runtime, thing); if (!ok) - JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_CANT_LOCK); + JS_ReportOutOfMemory(cx); return ok; } diff --git a/js/src/jsatom.c b/js/src/jsatom.c index 333eabffbda..c4a5c1c45fc 100644 --- a/js/src/jsatom.c +++ b/js/src/jsatom.c @@ -586,7 +586,7 @@ js_AtomizeDouble(JSContext *cx, jsdouble d) gen = ++table->generation; JS_UNLOCK(&state->lock, cx); - key = js_NewDouble(cx, d, 0); + key = js_NewDouble(cx, d); if (!key) return NULL; diff --git a/js/src/jscntxt.h b/js/src/jscntxt.h index 25bd9811504..667d063cf12 100644 --- a/js/src/jscntxt.h +++ b/js/src/jscntxt.h @@ -187,6 +187,7 @@ struct JSRuntime { /* Garbage collector state, used by jsgc.c. */ JSGCChunkInfo *gcChunkList; JSGCArenaList gcArenaList[GC_NUM_FREELISTS]; + JSGCDoubleArenaList gcDoubleArenaList; JSDHashTable gcRootsHash; JSDHashTable *gcLocksHash; jsrefcount gcKeepAtoms; @@ -787,6 +788,9 @@ struct JSContext { /* Stack of thread-stack-allocated temporary GC roots. */ JSTempValueRooter *tempValueRooters; + /* List of pre-allocated doubles. */ + JSGCDoubleCell *doubleFreeList; + /* Debug hooks associated with the current context. */ JSDebugHooks *debugHooks; }; diff --git a/js/src/jsdate.c b/js/src/jsdate.c index 79a272c8153..60805a89066 100644 --- a/js/src/jsdate.c +++ b/js/src/jsdate.c @@ -963,7 +963,7 @@ SetUTCTimePtr(JSContext *cx, JSObject *obj, jsval *vp, jsdouble *dp) static JSBool SetUTCTime(JSContext *cx, JSObject *obj, jsval *vp, jsdouble t) { - jsdouble *dp = js_NewDouble(cx, t, 0); + jsdouble *dp = js_NewDouble(cx, t); if (!dp) return JS_FALSE; return SetUTCTimePtr(cx, obj, vp, dp); @@ -993,7 +993,7 @@ GetAndCacheLocalTime(JSContext *cx, JSObject *obj, jsval *vp, jsdouble *dp) if (JSDOUBLE_IS_FINITE(result)) result = LocalTime(result); - cached = js_NewDouble(cx, result, 0); + cached = js_NewDouble(cx, result); if (!cached) return JS_FALSE; @@ -2019,7 +2019,7 @@ date_constructor(JSContext *cx, JSObject* obj) { jsdouble *date; - date = js_NewDouble(cx, 0.0, 0); + date = js_NewDouble(cx, 0.0); if (!date) return NULL; diff --git a/js/src/jsgc.c b/js/src/jsgc.c index adc0c5a6622..dbfe9c5fefe 100644 --- a/js/src/jsgc.c +++ b/js/src/jsgc.c @@ -111,8 +111,8 @@ #endif /* - * A GC arena contains one flag byte for each thing in its heap, and supports - * O(1) lookup of a flag given its thing's address. + * A GC arena contains a fixed number of flag bits for each thing in its heap, + * and supports O(1) lookup of a flag given its thing's address. * * To implement this, we allocate things of the same size from a GC arena * containing GC_ARENA_SIZE bytes aligned on GC_ARENA_SIZE boundary. The @@ -122,29 +122,50 @@ * | allocation area for GC thing | flags of GC things | JSGCArenaInfo | * +------------------------------+--------------------+---------------+ * - * For a GC thing of size thingSize the number of things that the arena can - * hold is given by: - * (GC_ARENA_SIZE - sizeof(JSGCArenaInfo)) / (thingSize + 1) + * To find the flag bits for the thing we calculate the thing index counting + * from arena's start using: + * + * thingIndex = (thingAddress & GC_ARENA_MASK) / thingSize + * + * The details of flag's lookup depend on thing's kind. For all GC things + * except doubles we use one byte of flags where the 4 bits determine thing's + * type and the rest is used to implement GC marking, finalization and + * locking. We calculate the address of flag's byte using: * - * The address of thing's flag is given by: * flagByteAddress = - * (thingAddress | GC_ARENA_MASK) - sizeof(JSGCArenaInfo) - - * (thingAddress & GC_ARENA_MASK) / thingSize - * where - * (thingAddress | GC_ARENA_MASK) - sizeof(JSGCArenaInfo) - * is the last byte of flags' area and - * (thingAddress & GC_ARENA_MASK) / thingSize - * is thing's index counting from arena's start. + * (thingAddress | GC_ARENA_MASK) - sizeof(JSGCArenaInfo) - thingIndex * - * Things are allocated from the start of their area and flags are allocated - * from the end of their area. This avoids calculating the location of the - * boundary separating things and flags. + * where + * + * (thingAddress | GC_ARENA_MASK) - sizeof(JSGCArenaInfo) + * + * is the last byte of flags' area. + * + * This implies that the things are allocated from the start of their area and + * flags are allocated from the end. This arrangement avoids a relatively + * expensive calculation of the location of the boundary separating things and + * flags. The boundary's offset from the start of the arena is given by: + * + * thingsPerArena * thingSize + * + * where thingsPerArena is the number of things that the arena can hold: + * + * (GC_ARENA_SIZE - sizeof(JSGCArenaInfo)) / (thingSize + 1). + * + * To allocate doubles we use a specialized arena. It can contain only numbers + * so we do not need the type bits. Moreover, since the doubles do not require + * a finalizer and very few of them are locked via js_LockGCThing API, we use + * just one bit of flags per double to denote if it was marked during the + * marking phase of the GC. The locking is implemented via a hash table. Thus + * for doubles the flag area becomes a bitmap. * * JS_GC_USE_MMAP macros governs the allocation of aligned arenas. When the * macro is true, a platform specific allocation code like POSIX mmap is used * with no extra overhead. If the macro is false, the code uses malloc to * allocate a chunk of + * * GC_ARENA_SIZE * (js_gcArenasPerChunk + 1) + * * bytes. The chunk contains at least js_gcArenasPerChunk aligned arenas so * the overhead of this schema is approximately 1/js_gcArenasPerChunk. See * NewGCChunk/DestroyGCChunk below for details. @@ -169,7 +190,7 @@ struct JSGCArenaInfo { /* - * Allocation list for the arena. + * Allocation list for the arena or NULL if the arena holds double values. */ JSGCArenaList *list; @@ -198,15 +219,14 @@ struct JSGCArenaInfo { */ jsuword arenaIndex : GC_ARENA_SHIFT - 1; - /* - * Flag indicating if the arena is the first in the chunk. - */ + /* Flag indicating if the arena is the first in the chunk. */ jsuword firstArena : 1; - /* - * Bitset for fast search of marked but not yet traced things. - */ - jsuword untracedThings; + union { + jsuword untracedThings; /* bitset for fast search of marked + but not yet traced things */ + JSBool hasMarkedDoubles; /* the arena has marked doubles */ + } u; }; /* @@ -339,6 +359,168 @@ static JSBool js_gcUseMmap = JS_FALSE; (JSGCThing *)(((jsuword) (flagp) & ~GC_ARENA_MASK) + \ (thingSize) * FLAGP_TO_INDEX(flagp))) +/* + * Macros for the specialized arena for doubles. + * + * DOUBLES_PER_ARENA defines the maximum number of doubles that the arena can + * hold. We find it as the following. Let n be the number of doubles in the + * arena. Together with the bitmap of flags and JSGCArenaInfo they should fit + * the arena. Hence DOUBLES_PER_ARENA or n_max is the maximum value of n for + * which the following holds: + * + * n*s + ceil(n/B) <= M (1) + * + * where "/" denotes normal real division, + * ceil(r) gives the least integer not smaller than the number r, + * s is thing's size in bytes, + * B is number of bits per byte or JS_BITS_PER_BYTE, + * M is the size of the available space in the arena in bytes or + * M == GC_ARENA_SIZE - sizeof(JSGCArenaInfo) or ARENA_INFO_OFFSET. + * + * We rewrite the inequality as + * + * n*B*s/B + ceil(n/B) <= M, + * ceil(n*B*s/B + n/B) <= M, + * ceil(n*(B*s + 1)/B) <= M (2) + * + * We define a helper function e(n, s, B), + * + * e(n, s, B) := ceil(n*(B*s + 1)/B) - n*(B*s + 1)/B, 0 <= e(n, s, B) < 1. + * + * It gives: + * + * n*(B*s + 1)/B + e(n, s, B) <= M, + * n + e*B/(B*s + 1) <= M*B/(B*s + 1) + * + * We apply the floor function to both sides of the last equation, where + * floor(r) gives the biggest integer not greater than r. As a consequence we + * have: + * + * floor(n + e*B/(B*s + 1)) <= floor(M*B/(B*s + 1)), + * n + floor(e*B/(B*s + 1)) <= floor(M*B/(B*s + 1)), + * n <= floor(M*B/(B*s + 1)), (3) + * + * where floor(e*B/(B*s + 1)) is zero as e*B/(B*s + 1) < B/(B*s + 1) < 1. + * Thus any n that satisfies the original constraint (1) or its equivalent (2), + * must also satisfy (3). That is, we got an upper estimate for the maximum + * value of n. Lets show that this upper estimate, + * + * floor(M*B/(B*s + 1)), (4) + * + * also satisfies (1) and, as such, gives the required maximum value. + * Substituting it into (2) gives: + * + * ceil(floor(M*B/(B*s + 1))*(B*s + 1)/B) == ceil(floor(M/X)*X) + * + * where X == (B*s + 1)/B > 1. But then floor(M/X)*X <= M/X*X == M and + * + * ceil(floor(M/X)*X) <= ceil(M) == M. + * + * Thus the value of (4) gives the maximum n satisfying (1). + */ +JS_STATIC_ASSERT(sizeof(JSGCArenaInfo) % sizeof(jsuword) == 0); + +#define DOUBLES_PER_ARENA \ + ((ARENA_INFO_OFFSET * JS_BITS_PER_BYTE) / (JS_BITS_PER_DOUBLE + 1)) + +#define DOUBLES_ARENA_BITMAP_SIZE \ + JS_HOWMANY(DOUBLES_PER_ARENA, JS_BITS_PER_BYTE) + +/* + * Check that DOUBLES_PER_ARENA has the correct value. + */ +JS_STATIC_ASSERT(DOUBLES_PER_ARENA * sizeof(jsdouble) + + DOUBLES_ARENA_BITMAP_SIZE <= + ARENA_INFO_OFFSET); + +JS_STATIC_ASSERT((DOUBLES_PER_ARENA + 1) * sizeof(jsdouble) + + JS_HOWMANY((DOUBLES_PER_ARENA + 1), JS_BITS_PER_BYTE) > + ARENA_INFO_OFFSET); + +/* + * When DOUBLES_PER_ARENA % JS_BITS_PER_BYTE != 0, some bits in the last byte + * of the occupation bitmap are unused. + */ +#define UNUSED_DOUBLE_BITMAP_BITS \ + (DOUBLES_ARENA_BITMAP_SIZE * JS_BITS_PER_BYTE - DOUBLES_PER_ARENA) + +JS_STATIC_ASSERT(UNUSED_DOUBLE_BITMAP_BITS < JS_BITS_PER_BYTE); + +#define DOUBLES_ARENA_BITMAP_OFFSET \ + (ARENA_INFO_OFFSET - DOUBLES_ARENA_BITMAP_SIZE) + +#define CHECK_DOUBLE_ARENA_INFO(arenaInfo) \ + (JS_ASSERT(IS_ARENA_INFO_ADDRESS(arenaInfo)), \ + JS_ASSERT(!(arenaInfo)->list)) \ + +/* + * Get the start of the bitmap area containing double mark flags in the arena. + * To access the flag the code uses + * + * bitmapStart[index / JS_BITS_PER_BYTE] & JS_BIT(index % JS_BITS_PER_BYTE) + * + * That is, compared with the case of arenas with non-double things, we count + * flags from the start of the bitmap area, not from the end. + */ +#define DOUBLE_ARENA_BITMAP(arenaInfo) \ + (CHECK_DOUBLE_ARENA_INFO(arenaInfo), \ + (uint8 *) arenaInfo - DOUBLES_ARENA_BITMAP_SIZE) + +#define DOUBLE_THING_TO_INDEX(thing) \ + (CHECK_DOUBLE_ARENA_INFO(THING_TO_ARENA(thing)), \ + JS_ASSERT(((jsuword) (thing) & GC_ARENA_MASK) < \ + DOUBLES_ARENA_BITMAP_OFFSET), \ + ((uint32) (((jsuword) (thing) & GC_ARENA_MASK) / sizeof(jsdouble)))) + +static void +ClearDoubleArenaFlags(JSGCArenaInfo *a) +{ + uint8 *bitmap; + uintN mask, nused; + + /* + * When some high bits in the last byte of the double occupation bitmap + * are unused, we must set them. Otherwise RefillDoubleFreeList will + * assume that they corresponds to some free cells and tries to allocate + * them. + * + * Note that the code works correctly with UNUSED_DOUBLE_BITMAP_BITS == 0. + */ + bitmap = DOUBLE_ARENA_BITMAP(a); + memset(bitmap, 0, DOUBLES_ARENA_BITMAP_SIZE - 1); + mask = JS_BITMASK(UNUSED_DOUBLE_BITMAP_BITS); + nused = JS_BITS_PER_BYTE - UNUSED_DOUBLE_BITMAP_BITS; + bitmap[DOUBLES_ARENA_BITMAP_SIZE - 1] = (uint8) (mask << nused); +} + +static JS_INLINE JSBool +IsMarkedDouble(JSGCArenaInfo *a, uint32 index) +{ + uint8 *bitmap; + + JS_ASSERT(a->u.hasMarkedDoubles); + bitmap = DOUBLE_ARENA_BITMAP(a) + (index >> JS_BITS_PER_BYTE_LOG2); + return *bitmap & JS_BIT(index & (JS_BITS_PER_BYTE - 1)); +} + +/* + * JSRuntime.gcDoubleArenaList.nextDoubleFlags points either to: + * + * 1. The next byte in the bitmap area for doubles to check for unmarked + * (or free) doubles. + * 2. Or to the end of the bitmap area when all GC cells of the arena are + * allocated. + * 3. Or to a special sentinel value indicating that there are no arenas + * to check for unmarked doubles. + * + * We set the sentinel to (uint8 *) ARENA_INFO_OFFSET so the single check + * + * ((jsuword) nextDoubleFlags & GC_ARENA_MASK) == ARENA_INFO_OFFSET + * + * will cover both the second and the third cases. + */ +#define DOUBLE_BITMAP_SENTINEL ((uint8 *) ARENA_INFO_OFFSET) + #ifdef JS_THREADSAFE /* * The maximum number of things to put on the local free list by taking @@ -624,107 +806,120 @@ NewGCArena(JSRuntime *rt) uint32 i; JSGCArenaInfo *a, *aprev; - if (js_gcArenasPerChunk == 1) { - chunk = NewGCChunk(); - return (chunk == 0) ? NULL : ARENA_START_TO_INFO(chunk); - } + if (rt->gcBytes >= rt->gcMaxBytes) + return NULL; - ci = rt->gcChunkList; - if (!ci) { + if (js_gcArenasPerChunk == 1) { chunk = NewGCChunk(); if (chunk == 0) return NULL; - JS_ASSERT((chunk & GC_ARENA_MASK) == 0); - a = GET_ARENA_INFO(chunk, 0); - a->firstArena = JS_TRUE; - a->arenaIndex = 0; - aprev = NULL; - i = 0; - do { - a->prev = aprev; - aprev = a; - ++i; - a = GET_ARENA_INFO(chunk, i); - a->firstArena = JS_FALSE; - a->arenaIndex = i; - } while (i != js_gcArenasPerChunk - 1); - ci = GET_CHUNK_INFO(chunk, 0); - ci->lastFreeArena = aprev; - ci->numFreeArenas = js_gcArenasPerChunk - 1; - AddChunkToList(rt, ci); + a = ARENA_START_TO_INFO(chunk); } else { - JS_ASSERT(ci->prevp == &rt->gcChunkList); - a = ci->lastFreeArena; - aprev = a->prev; - if (!aprev) { - JS_ASSERT(ci->numFreeArenas == 1); - JS_ASSERT(ARENA_INFO_TO_START(a) == (jsuword) ci); - RemoveChunkFromList(rt, ci); - chunk = GET_ARENA_CHUNK(a, GET_ARENA_INDEX(a)); - SET_CHUNK_INFO_INDEX(chunk, NO_FREE_ARENAS); - } else { - JS_ASSERT(ci->numFreeArenas >= 2); - JS_ASSERT(ARENA_INFO_TO_START(a) != (jsuword) ci); + ci = rt->gcChunkList; + if (!ci) { + chunk = NewGCChunk(); + if (chunk == 0) + return NULL; + JS_ASSERT((chunk & GC_ARENA_MASK) == 0); + a = GET_ARENA_INFO(chunk, 0); + a->firstArena = JS_TRUE; + a->arenaIndex = 0; + aprev = NULL; + i = 0; + do { + a->prev = aprev; + aprev = a; + ++i; + a = GET_ARENA_INFO(chunk, i); + a->firstArena = JS_FALSE; + a->arenaIndex = i; + } while (i != js_gcArenasPerChunk - 1); + ci = GET_CHUNK_INFO(chunk, 0); ci->lastFreeArena = aprev; - ci->numFreeArenas--; + ci->numFreeArenas = js_gcArenasPerChunk - 1; + AddChunkToList(rt, ci); + } else { + JS_ASSERT(ci->prevp == &rt->gcChunkList); + a = ci->lastFreeArena; + aprev = a->prev; + if (!aprev) { + JS_ASSERT(ci->numFreeArenas == 1); + JS_ASSERT(ARENA_INFO_TO_START(a) == (jsuword) ci); + RemoveChunkFromList(rt, ci); + chunk = GET_ARENA_CHUNK(a, GET_ARENA_INDEX(a)); + SET_CHUNK_INFO_INDEX(chunk, NO_FREE_ARENAS); + } else { + JS_ASSERT(ci->numFreeArenas >= 2); + JS_ASSERT(ARENA_INFO_TO_START(a) != (jsuword) ci); + ci->lastFreeArena = aprev; + ci->numFreeArenas--; + } } } + rt->gcBytes += GC_ARENA_SIZE; + a->prevUntracedPage = 0; + memset(&a->u, 0, sizeof(a->u)); + return a; } static void -DestroyGCArena(JSRuntime *rt, JSGCArenaInfo *a) +DestroyGCArenas(JSRuntime *rt, JSGCArenaInfo *last) { + JSGCArenaInfo *a; uint32 arenaIndex; jsuword chunk; uint32 chunkInfoIndex; JSGCChunkInfo *ci; - METER(rt->gcStats.afree++); + while (last) { + a = last; + last = last->prev; - if (js_gcArenasPerChunk == 1) { - DestroyGCChunk(ARENA_INFO_TO_START(a)); - return; - } + METER(rt->gcStats.afree++); + JS_ASSERT(rt->gcBytes >= GC_ARENA_SIZE); + rt->gcBytes -= GC_ARENA_SIZE; -#ifdef DEBUG - { - jsuword firstArena, arenaIndex; - - firstArena = a->firstArena; - arenaIndex = a->arenaIndex; - memset((void *) ARENA_INFO_TO_START(a), JS_FREE_PATTERN, - GC_ARENA_SIZE); - a->firstArena = firstArena; - a->arenaIndex = arenaIndex; - } -#endif - - arenaIndex = GET_ARENA_INDEX(a); - chunk = GET_ARENA_CHUNK(a, arenaIndex); - chunkInfoIndex = GET_CHUNK_INFO_INDEX(chunk); - if (chunkInfoIndex == NO_FREE_ARENAS) { - chunkInfoIndex = arenaIndex; - SET_CHUNK_INFO_INDEX(chunk, arenaIndex); - ci = GET_CHUNK_INFO(chunk, chunkInfoIndex); - a->prev = NULL; - ci->lastFreeArena = a; - ci->numFreeArenas = 1; - AddChunkToList(rt, ci); - } else { - JS_ASSERT(chunkInfoIndex != arenaIndex); - ci = GET_CHUNK_INFO(chunk, chunkInfoIndex); - JS_ASSERT(ci->numFreeArenas != 0); - JS_ASSERT(ci->lastFreeArena); - JS_ASSERT(a != ci->lastFreeArena); - if (ci->numFreeArenas == js_gcArenasPerChunk - 1) { - RemoveChunkFromList(rt, ci); - DestroyGCChunk(chunk); + if (js_gcArenasPerChunk == 1) { + DestroyGCChunk(ARENA_INFO_TO_START(a)); } else { - ++ci->numFreeArenas; - a->prev = ci->lastFreeArena; - ci->lastFreeArena = a; +#ifdef DEBUG + jsuword firstArena; + + firstArena = a->firstArena; + arenaIndex = a->arenaIndex; + memset((void *) ARENA_INFO_TO_START(a), JS_FREE_PATTERN, + GC_ARENA_SIZE); + a->firstArena = firstArena; + a->arenaIndex = arenaIndex; +#endif + arenaIndex = GET_ARENA_INDEX(a); + chunk = GET_ARENA_CHUNK(a, arenaIndex); + chunkInfoIndex = GET_CHUNK_INFO_INDEX(chunk); + if (chunkInfoIndex == NO_FREE_ARENAS) { + chunkInfoIndex = arenaIndex; + SET_CHUNK_INFO_INDEX(chunk, arenaIndex); + ci = GET_CHUNK_INFO(chunk, chunkInfoIndex); + a->prev = NULL; + ci->lastFreeArena = a; + ci->numFreeArenas = 1; + AddChunkToList(rt, ci); + } else { + JS_ASSERT(chunkInfoIndex != arenaIndex); + ci = GET_CHUNK_INFO(chunk, chunkInfoIndex); + JS_ASSERT(ci->numFreeArenas != 0); + JS_ASSERT(ci->lastFreeArena); + JS_ASSERT(a != ci->lastFreeArena); + if (ci->numFreeArenas == js_gcArenasPerChunk - 1) { + RemoveChunkFromList(rt, ci); + DestroyGCChunk(chunk); + } else { + ++ci->numFreeArenas; + a->prev = ci->lastFreeArena; + ci->lastFreeArena = a; + } + } } } } @@ -744,6 +939,8 @@ InitGCArenaLists(JSRuntime *rt) arenaList->thingSize = (uint16)thingSize; arenaList->freeList = NULL; } + rt->gcDoubleArenaList.first = NULL; + rt->gcDoubleArenaList.nextDoubleFlags = DOUBLE_BITMAP_SENTINEL; } static void @@ -751,24 +948,25 @@ FinishGCArenaLists(JSRuntime *rt) { uintN i; JSGCArenaList *arenaList; - JSGCArenaInfo *a, *aprev; for (i = 0; i < GC_NUM_FREELISTS; i++) { arenaList = &rt->gcArenaList[i]; - - for (a = arenaList->last; a; a = aprev) { - aprev = a->prev; - DestroyGCArena(rt, a); - } + DestroyGCArenas(rt, arenaList->last); arenaList->last = NULL; arenaList->lastCount = THINGS_PER_ARENA(arenaList->thingSize); arenaList->freeList = NULL; - METER(rt->gcStats.arenas[i].narenas = 0); } + DestroyGCArenas(rt, rt->gcDoubleArenaList.first); + rt->gcDoubleArenaList.first = NULL; + rt->gcDoubleArenaList.nextDoubleFlags = DOUBLE_BITMAP_SENTINEL; + rt->gcBytes = 0; JS_ASSERT(rt->gcChunkList == 0); } +/* + * This function must not be called when thing is jsdouble. + */ static uint8 * GetGCThingFlags(void *thing) { @@ -780,6 +978,22 @@ GetGCThingFlags(void *thing) return THING_FLAGP(a, index); } +/* + * This function returns null when thing is jsdouble. + */ +static uint8 * +GetGCThingFlagsOrNull(void *thing) +{ + JSGCArenaInfo *a; + uint32 index; + + a = THING_TO_ARENA(thing); + if (!a->list) + return NULL; + index = THING_TO_INDEX(thing, a->list->thingSize); + return THING_FLAGP(a, index); +} + intN js_GetExternalStringGCType(JSString *str) { @@ -796,6 +1010,7 @@ MapGCFlagsToTraceKind(uintN flags) uint32 type; type = flags & GCF_TYPEMASK; + JS_ASSERT(type != GCX_DOUBLE); JS_ASSERT(type < GCX_NTYPES); return (type < GCX_EXTERNAL_STRING) ? type : JSTRACE_STRING; } @@ -803,7 +1018,15 @@ MapGCFlagsToTraceKind(uintN flags) JS_FRIEND_API(uint32) js_GetGCThingTraceKind(void *thing) { - return MapGCFlagsToTraceKind(*GetGCThingFlags(thing)); + JSGCArenaInfo *a; + uint32 index; + + a = THING_TO_ARENA(thing); + if (!a->list) + return JSTRACE_DOUBLE; + + index = THING_TO_INDEX(thing, a->list->thingSize); + return MapGCFlagsToTraceKind(*THING_FLAGP(a, index)); } JSRuntime* @@ -822,8 +1045,23 @@ js_GetGCStringRuntime(JSString *str) JSBool js_IsAboutToBeFinalized(JSContext *cx, void *thing) { - uint8 flags = *GetGCThingFlags(thing); + JSGCArenaInfo *a; + uint32 index, flags; + a = THING_TO_ARENA(thing); + if (!a->list) { + /* + * Check if arena has no marked doubles. In that case the bitmap with + * the mark flags contains all garbage as it is initialized only when + * marking the first double in the arena. + */ + if (!a->u.hasMarkedDoubles) + return JS_TRUE; + index = DOUBLE_THING_TO_INDEX(thing); + return !IsMarkedDouble(a, index); + } + index = THING_TO_INDEX(thing, a->list->thingSize); + flags = *THING_FLAGP(a, index); return !(flags & (GCF_MARK | GCF_LOCK | GCF_FINAL)); } @@ -909,85 +1147,122 @@ js_InitGC(JSRuntime *rt, uint32 maxbytes) } #ifdef JS_GCMETER + +static void +UpdateArenaStats(JSGCArenaStats *st, uint32 nlivearenas, uint32 nkilledArenas, + uint32 nthings) +{ + size_t narenas; + + narenas = nlivearenas + nkilledArenas; + JS_ASSERT(narenas >= st->livearenas); + + st->newarenas = narenas - st->livearenas; + st->narenas = narenas; + st->livearenas = nlivearenas; + if (st->maxarenas < narenas) + st->maxarenas = narenas; + st->totalarenas += narenas; + + st->nthings = nthings; + if (st->maxthings < nthings) + st->maxthings = nthings; + st->totalthings += nthings; +} + JS_FRIEND_API(void) js_DumpGCStats(JSRuntime *rt, FILE *fp) { - uintN i; - size_t thingsPerArena; - size_t totalThings, totalMaxThings, totalBytes; + int i; size_t sumArenas, sumTotalArenas; - size_t sumFreeSize, sumTotalFreeSize; - JSGCArenaList *list; - JSGCArenaStats *stats; + size_t sumThings, sumMaxThings; + size_t sumThingSize, sumTotalThingSize; + size_t sumArenaCapacity, sumTotalArenaCapacity; + JSGCArenaStats *st; + size_t thingSize, thingsPerArena; + size_t sumAlloc, sumLocalAlloc, sumFail, sumRetry; fprintf(fp, "\nGC allocation statistics:\n"); #define UL(x) ((unsigned long)(x)) #define ULSTAT(x) UL(rt->gcStats.x) - totalThings = 0; +#define PERCENT(x,y) (100.0 * (double) (x) / (double) (y)) - totalMaxThings = 0; - totalBytes = 0; sumArenas = 0; sumTotalArenas = 0; - sumFreeSize = 0; - sumTotalFreeSize = 0; - for (i = 0; i < GC_NUM_FREELISTS; i++) { - list = &rt->gcArenaList[i]; - stats = &rt->gcStats.arenas[i]; - if (stats->maxarenas == 0) { - fprintf(fp, "ARENA LIST %u (thing size %lu): NEVER USED\n", - i, UL(GC_FREELIST_NBYTES(i))); + sumThings = 0; + sumMaxThings = 0; + sumThingSize = 0; + sumTotalThingSize = 0; + sumArenaCapacity = 0; + sumTotalArenaCapacity = 0; + sumAlloc = 0; + sumLocalAlloc = 0; + sumFail = 0; + sumRetry = 0; + for (i = -1; i < (int) GC_NUM_FREELISTS; i++) { + if (i == -1) { + thingSize = sizeof(jsdouble); + thingsPerArena = DOUBLES_PER_ARENA; + st = &rt->gcStats.doubleArenaStats; + fprintf(fp, + "Arena list for double values (%lu doubles per arena):", + UL(thingsPerArena)); + } else { + thingSize = rt->gcArenaList[i].thingSize; + thingsPerArena = THINGS_PER_ARENA(thingSize); + st = &rt->gcStats.arenaStats[i]; + fprintf(fp, + "Arena list %d (thing size %lu, %lu things per arena):", + i, UL(GC_FREELIST_NBYTES(i)), UL(thingsPerArena)); + } + if (st->maxarenas == 0) { + fputs(" NEVER USED\n", fp); continue; } - thingsPerArena = THINGS_PER_ARENA(list->thingSize); - fprintf(fp, "ARENA LIST %u (thing size %lu):\n", - i, UL(GC_FREELIST_NBYTES(i))); - fprintf(fp, " arenas: %lu\n", UL(stats->narenas)); - fprintf(fp, " max arenas: %lu\n", UL(stats->maxarenas)); - fprintf(fp, " things: %lu\n", UL(stats->nthings)); - fprintf(fp, " max things: %lu\n", UL(stats->maxthings)); - fprintf(fp, " free list: %lu\n", UL(stats->freelen)); - fprintf(fp, " free list density: %.1f%%\n", - stats->narenas == 0 - ? 0.0 - : 100.0 * stats->freelen / (thingsPerArena * stats->narenas)); - fprintf(fp, " average free list density: %.1f%%\n", - stats->totalarenas == 0 - ? 0.0 - : 100.0 * stats->totalfreelen / - (thingsPerArena * stats->totalarenas)); - fprintf(fp, " recycles: %lu\n", UL(stats->recycle)); - fprintf(fp, " recycle/alloc ratio: %.2f\n", - (double) stats->recycle / (stats->totalnew - stats->recycle)); - totalThings += stats->nthings; - totalMaxThings += stats->maxthings; - totalBytes += list->thingSize * stats->nthings; - sumArenas += stats->narenas; - sumTotalArenas += stats->totalarenas; - sumFreeSize += list->thingSize * stats->freelen; - sumTotalFreeSize += list->thingSize * stats->totalfreelen; + putc('\n', fp); + fprintf(fp, " arenas before GC: %lu\n", UL(st->narenas)); + fprintf(fp, " new arenas before GC: %lu (%.1f%%)\n", + UL(st->newarenas), PERCENT(st->newarenas, st->narenas)); + fprintf(fp, " arenas after GC: %lu (%.1f%%)\n", + UL(st->livearenas), PERCENT(st->livearenas, st->narenas)); + fprintf(fp, " max arenas: %lu\n", UL(st->maxarenas)); + fprintf(fp, " things: %lu\n", UL(st->nthings)); + fprintf(fp, " GC cell utilization: %.1f%%\n", + PERCENT(st->nthings, thingsPerArena * st->narenas)); + fprintf(fp, " average cell utilization: %.1f%%\n", + PERCENT(st->totalthings, thingsPerArena * st->totalarenas)); + fprintf(fp, " max things: %lu\n", UL(st->maxthings)); + fprintf(fp, " alloc attempts: %lu\n", UL(st->alloc)); + fprintf(fp, " alloc without locks: %1u (%.1f%%)\n", + UL(st->localalloc), PERCENT(st->localalloc, st->alloc)); + sumArenas += st->narenas; + sumTotalArenas += st->totalarenas; + sumThings += st->nthings; + sumMaxThings += st->maxthings; + sumThingSize += thingSize * st->nthings; + sumTotalThingSize += thingSize * st->totalthings; + sumArenaCapacity += thingSize * thingsPerArena * st->narenas; + sumTotalArenaCapacity += thingSize * thingsPerArena * st->totalarenas; + sumAlloc += st->alloc; + sumLocalAlloc += st->localalloc; + sumFail += st->fail; + sumRetry += st->retry; } fprintf(fp, "TOTAL STATS:\n"); fprintf(fp, " bytes allocated: %lu\n", UL(rt->gcBytes)); - fprintf(fp, " alloc attempts: %lu\n", ULSTAT(alloc)); -#ifdef JS_THREADSAFE - fprintf(fp, " alloc without locks: %1u\n", ULSTAT(localalloc)); -#endif - fprintf(fp, " total GC things: %lu\n", UL(totalThings)); - fprintf(fp, " max total GC things: %lu\n", UL(totalMaxThings)); - fprintf(fp, " GC things size: %lu\n", UL(totalBytes)); fprintf(fp, " total GC arenas: %lu\n", UL(sumArenas)); - fprintf(fp, " total free list density: %.1f%%\n", - sumArenas == 0 - ? 0.0 - : 100.0 * sumFreeSize / (sumArenas << GC_ARENA_SHIFT)); - fprintf(fp, " average free list density: %.1f%%\n", - sumTotalFreeSize == 0 - ? 0.0 - : 100.0 * sumTotalFreeSize / (sumTotalArenas << GC_ARENA_SHIFT)); - fprintf(fp, "allocation retries after GC: %lu\n", ULSTAT(retry)); - fprintf(fp, " allocation failures: %lu\n", ULSTAT(fail)); + fprintf(fp, " total GC things: %lu\n", UL(sumThings)); + fprintf(fp, " max total GC things: %lu\n", UL(sumMaxThings)); + fprintf(fp, " GC cell utilization: %.1f%%\n", + PERCENT(sumThingSize, sumArenaCapacity)); + fprintf(fp, " average cell utilization: %.1f%%\n", + PERCENT(sumTotalThingSize, sumTotalArenaCapacity)); + fprintf(fp, "allocation retries after GC: %lu\n", UL(sumRetry)); + fprintf(fp, " alloc attempts: %lu\n", UL(sumAlloc)); + fprintf(fp, " alloc without locks: %1u (%.1f%%)\n", + UL(sumLocalAlloc), PERCENT(sumLocalAlloc, sumAlloc)); + fprintf(fp, " allocation failures: %lu\n", UL(sumFail)); fprintf(fp, " things born locked: %lu\n", ULSTAT(lockborn)); fprintf(fp, " valid lock calls: %lu\n", ULSTAT(lock)); fprintf(fp, " valid unlock calls: %lu\n", ULSTAT(unlock)); @@ -1008,8 +1283,10 @@ js_DumpGCStats(JSRuntime *rt, FILE *fp) fprintf(fp, " max reachable closeable: %lu\n", ULSTAT(maxnclose)); fprintf(fp, " scheduled close hooks: %lu\n", ULSTAT(closelater)); fprintf(fp, " max scheduled close hooks: %lu\n", ULSTAT(maxcloselater)); + #undef UL -#undef US +#undef ULSTAT +#undef PERCENT #ifdef JS_ARENAMETER JS_DumpArenaStats(fp); @@ -1149,13 +1426,14 @@ CheckLeakedRoots(JSRuntime *rt) fprintf(stderr, "JS engine warning: 1 GC root remains after destroying the JSRuntime at %p.\n" " This root may point to freed memory. Objects reachable\n" -" through it have not been finalized.\n", rt); +" through it have not been finalized.\n", + (void *) rt); } else { fprintf(stderr, "JS engine warning: %lu GC roots remain after destroying the JSRuntime at %p.\n" " These roots may point to freed memory. Objects reachable\n" " through them have not been finalized.\n", - (unsigned long) leakedroots, rt); + (unsigned long) leakedroots, (void *) rt); } } } @@ -1298,12 +1576,12 @@ js_NewGCThing(JSContext *cx, uintN flags, size_t nbytes) JSGCThing *thing; uint8 *flagp; JSGCArenaList *arenaList; -#ifdef JS_GCMETER - JSGCArenaStats *listStats; -#endif JSGCArenaInfo *a; uintN thingsLimit; JSLocalRootStack *lrs; +#ifdef JS_GCMETER + JSGCArenaStats *astats; +#endif #ifdef JS_THREADSAFE JSBool gcLocked; uintN localMallocBytes; @@ -1313,11 +1591,14 @@ js_NewGCThing(JSContext *cx, uintN flags, size_t nbytes) uintN maxFreeThings; /* max to take from the global free list */ #endif + JS_ASSERT((flags & GCF_TYPEMASK) != GCX_DOUBLE); rt = cx->runtime; - METER(rt->gcStats.alloc++); /* this is not thread-safe */ nbytes = JS_ROUNDUP(nbytes, sizeof(JSGCThing)); flindex = GC_FREELIST_INDEX(nbytes); - METER(listStats = &rt->gcStats.arenas[flindex]); + + /* Updates of metering counters here may not be thread-safe. */ + METER(astats = &cx->runtime->gcStats.arenaStats[flindex]); + METER(astats->alloc++); #ifdef JS_THREADSAFE gcLocked = JS_FALSE; @@ -1329,7 +1610,7 @@ js_NewGCThing(JSContext *cx, uintN flags, size_t nbytes) if (thing && rt->gcMaxMallocBytes - rt->gcMallocBytes > localMallocBytes) { flagp = thing->flagp; flbase[flindex] = thing->next; - METER(rt->gcStats.localalloc++); /* this is not thread-safe */ + METER(astats->localalloc++); goto success; } @@ -1369,7 +1650,7 @@ js_NewGCThing(JSContext *cx, uintN flags, size_t nbytes) * see bug 162779 at https://bugzilla.mozilla.org/. */ js_GC(cx, GC_LAST_DITCH); - METER(rt->gcStats.retry++); + METER(astats->retry++); } /* Try to get thing from the free list. */ @@ -1378,8 +1659,6 @@ js_NewGCThing(JSContext *cx, uintN flags, size_t nbytes) arenaList->freeList = thing->next; flagp = thing->flagp; JS_ASSERT(*flagp & GCF_FINAL); - METER(listStats->freelen--); - METER(listStats->recycle++); #ifdef JS_THREADSAFE /* @@ -1419,21 +1698,17 @@ js_NewGCThing(JSContext *cx, uintN flags, size_t nbytes) JS_ASSERT(arenaList->lastCount < thingsLimit); a = arenaList->last; } else { - if (rt->gcBytes >= rt->gcMaxBytes || !(a = NewGCArena(rt))) { + a = NewGCArena(rt); + if (!a) { if (doGC) goto fail; doGC = JS_TRUE; continue; } - - rt->gcBytes += GC_ARENA_SIZE; - METER(listStats->narenas++); - METER_UPDATE_MAX(listStats->maxarenas, listStats->narenas); - a->list = arenaList; a->prev = arenaList->last; a->prevUntracedPage = 0; - a->untracedThings = 0; + a->u.untracedThings = 0; arenaList->last = a; arenaList->lastCount = 0; } @@ -1454,7 +1729,6 @@ js_NewGCThing(JSContext *cx, uintN flags, size_t nbytes) maxFreeThings = thingsLimit - arenaList->lastCount; if (maxFreeThings > MAX_THREAD_LOCAL_THINGS) maxFreeThings = MAX_THREAD_LOCAL_THINGS; - METER(listStats->freelen += maxFreeThings); while (maxFreeThings != 0) { --maxFreeThings; @@ -1515,9 +1789,6 @@ js_NewGCThing(JSContext *cx, uintN flags, size_t nbytes) /* This is not thread-safe for thread-local allocations. */ METER_IF(flags & GCF_LOCK, rt->gcStats.lockborn++); - METER(listStats->totalnew++); - METER(listStats->nthings++); - METER_UPDATE_MAX(listStats->maxthings, listStats->nthings); #ifdef JS_THREADSAFE if (gcLocked) @@ -1531,107 +1802,266 @@ fail: if (gcLocked) JS_UNLOCK_GC(rt); #endif - METER(rt->gcStats.fail++); + METER(astats->fail++); JS_ReportOutOfMemory(cx); return NULL; } -JSBool -js_LockGCThing(JSContext *cx, void *thing) +static JSGCDoubleCell * +RefillDoubleFreeList(JSContext *cx) { - JSBool ok = js_LockGCThingRT(cx->runtime, thing); - if (!ok) - JS_ReportOutOfMemory(cx); - return ok; + JSRuntime *rt; + uint8 *doubleFlags; + JSBool doGC; + JSGCArenaInfo *a; + uintN usedBits, bit, index; + JSGCDoubleCell *cell, *list, *lastcell; + + JS_ASSERT(!cx->doubleFreeList); + + rt = cx->runtime; + JS_LOCK_GC(rt); + + JS_ASSERT(!rt->gcRunning); + if (rt->gcRunning) { + METER(rt->gcStats.finalfail++); + JS_UNLOCK_GC(rt); + return NULL; + } + + doGC = rt->gcMallocBytes >= rt->gcMaxMallocBytes && rt->gcPoke; +#ifdef JS_GC_ZEAL + doGC = doGC || rt->gcZeal >= 2 || (rt->gcZeal >= 1 && rt->gcPoke); +#endif + if (doGC) + goto do_gc; + + /* + * Loop until we find a flag bitmap byte with unset bits indicating free + * double cells, then set all bits as used and put the cells to the free + * list for the current context. + */ + doubleFlags = rt->gcDoubleArenaList.nextDoubleFlags; + for (;;) { + if (((jsuword) doubleFlags & GC_ARENA_MASK) == + ARENA_INFO_OFFSET) { + if (doubleFlags == DOUBLE_BITMAP_SENTINEL || + !((JSGCArenaInfo *) doubleFlags)->prev) { + a = NewGCArena(rt); + if (!a) { + if (doGC) { + METER(rt->gcStats.doubleArenaStats.fail++); + JS_UNLOCK_GC(rt); + JS_ReportOutOfMemory(cx); + return NULL; + } + doGC = JS_TRUE; + do_gc: + js_GC(cx, GC_LAST_DITCH); + METER(rt->gcStats.doubleArenaStats.retry++); + doubleFlags = rt->gcDoubleArenaList.nextDoubleFlags; + continue; + } + a->list = NULL; + a->prev = NULL; + if (doubleFlags == DOUBLE_BITMAP_SENTINEL) { + JS_ASSERT(!rt->gcDoubleArenaList.first); + rt->gcDoubleArenaList.first = a; + } else { + JS_ASSERT(rt->gcDoubleArenaList.first); + ((JSGCArenaInfo *) doubleFlags)->prev = a; + } + ClearDoubleArenaFlags(a); + doubleFlags = DOUBLE_ARENA_BITMAP(a); + break; + } + doubleFlags = + DOUBLE_ARENA_BITMAP(((JSGCArenaInfo *) doubleFlags)->prev); + } + + /* + * When doubleFlags points the last bitmap's byte in the arena, its + * high bits corresponds to non-existing cells. ClearDoubleArenaFlags + * sets such bits to 1. Thus even for this last byte *doubleFlags its + * bits is unset only when the corresponding cell exists and free. + */ + if (*doubleFlags != JS_BITMASK(JS_BITS_PER_BYTE)) + break; + ++doubleFlags; + } + + rt->gcDoubleArenaList.nextDoubleFlags = doubleFlags + 1; + usedBits = *doubleFlags; + JS_ASSERT(usedBits != JS_BITMASK(JS_BITS_PER_BYTE)); + *doubleFlags = (uint8) JS_BITMASK(JS_BITS_PER_BYTE); + JS_UNLOCK_GC(rt); + + /* + * Find the index corresponding to the first bit in *doubleFlags. The last + * bit will have "index + JS_BITS_PER_BYTE - 1". + */ + index = ((uintN) ((jsuword) doubleFlags & GC_ARENA_MASK) - + DOUBLES_ARENA_BITMAP_OFFSET) * JS_BITS_PER_BYTE; + cell = (JSGCDoubleCell *) ((jsuword) doubleFlags & ~GC_ARENA_MASK) + index; + + if (usedBits == 0) { + /* The common case when all doubles from *doubleFlags are free. */ + JS_ASSERT(index + JS_BITS_PER_BYTE <= DOUBLES_PER_ARENA); + list = cell; + for (lastcell = cell + JS_BITS_PER_BYTE - 1; cell != lastcell; ++cell) + cell->link = cell + 1; + lastcell->link = NULL; + } else { + /* + * Assemble the free list from free cells from *doubleFlags starting + * from the tail. In the loop + * + * index + bit >= DOUBLES_PER_ARENA + * + * when bit is one of the unused bits. We do not check for such bits + * explicitly as they must be set and the "if" check filters them out. + */ + JS_ASSERT(index + JS_BITS_PER_BYTE <= + DOUBLES_PER_ARENA + UNUSED_DOUBLE_BITMAP_BITS); + bit = JS_BITS_PER_BYTE; + cell += bit; + list = NULL; + do { + --bit; + --cell; + if (!(JS_BIT(bit) & usedBits)) { + JS_ASSERT(index + bit < DOUBLES_PER_ARENA); + JS_ASSERT_IF(index + bit == DOUBLES_PER_ARENA - 1, !list); + cell->link = list; + list = cell; + } + } while (bit != 0); + } + JS_ASSERT(list); + JS_COUNT_OPERATION(cx, JSOW_ALLOCATION * JS_BITS_PER_BYTE); + + /* + * We delegate assigning cx->doubleFreeList to js_NewDoubleGCThing as + * it immediately consumes the head of the list. + */ + return list; +} + +jsdouble * +js_NewDoubleGCThing(JSContext *cx) +{ + JSGCDoubleCell *cell; + jsdouble *dp; +#ifdef JS_GCMETER + JSGCArenaStats *astats; +#endif + + /* Updates of metering counters here are not thread-safe. */ + METER(astats = &cx->runtime->gcStats.doubleArenaStats); + METER(astats->alloc++); + cell = cx->doubleFreeList; + if (!cell) { + cell = RefillDoubleFreeList(cx); + if (!cell) { + METER(astats->fail++); + return NULL; + } + } else { + METER(astats->localalloc++); + } + cx->doubleFreeList = cell->link; + dp = &cell->number; + + if (cx->localRootStack) { + /* + * If we're in a local root scope, don't set newborn[type] at all, to + * avoid entraining garbage from it for an unbounded amount of time + * on this context. A caller will leave the local root scope and pop + * this reference, allowing thing to be GC'd if it has no other refs. + * See JS_EnterLocalRootScope and related APIs. + */ + if (js_PushLocalRoot(cx, cx->localRootStack, DOUBLE_TO_JSVAL(dp)) < 0) + return NULL; + } else { + /* + * No local root scope, so we're stuck with the old, fragile model of + * depending on a pigeon-hole newborn per type per context. + */ + cx->weakRoots.newborn[GCX_DOUBLE] = dp; + } + return dp; } /* - * Deep GC-things can't be locked just by setting the GCF_LOCK bit, because - * their descendants must be marked by the GC. To find them during the mark - * phase, they are added to rt->gcLocksHash, which is created lazily. - * - * NB: we depend on the order of GC-thing type indexes here! + * Shallow GC-things can be locked just by setting the GCF_LOCK bit, because + * they have no descendants to mark during the GC. Currently the optimization + * is only used for non-dependant strings. */ -#define GC_TYPE_IS_STRING(t) ((t) == GCX_STRING || \ - (t) >= GCX_EXTERNAL_STRING) -#define GC_TYPE_IS_XML(t) ((unsigned)((t) - GCX_NAMESPACE) <= \ - (unsigned)(GCX_XML - GCX_NAMESPACE)) -#define GC_TYPE_IS_DEEP(t) ((t) == GCX_OBJECT || GC_TYPE_IS_XML(t)) - -#define IS_DEEP_STRING(t,o) (GC_TYPE_IS_STRING(t) && \ - JSSTRING_IS_DEPENDENT((JSString *)(o))) - -#define GC_THING_IS_DEEP(t,o) (GC_TYPE_IS_DEEP(t) || IS_DEEP_STRING(t, o)) +#define GC_THING_IS_SHALLOW(flagp, thing) \ + ((flagp) && \ + ((*(flagp) & GCF_TYPEMASK) >= GCX_EXTERNAL_STRING || \ + ((*(flagp) & GCF_TYPEMASK) == GCX_STRING && \ + !JSSTRING_IS_DEPENDENT((JSString *) (thing))))) /* This is compatible with JSDHashEntryStub. */ typedef struct JSGCLockHashEntry { JSDHashEntryHdr hdr; - const JSGCThing *thing; + const void *thing; uint32 count; } JSGCLockHashEntry; JSBool js_LockGCThingRT(JSRuntime *rt, void *thing) { - JSBool ok, deep; + JSBool shallow, ok; uint8 *flagp; - uintN flags, lock, type; JSGCLockHashEntry *lhe; - ok = JS_TRUE; if (!thing) - return ok; - - flagp = GetGCThingFlags(thing); + return JS_TRUE; + flagp = GetGCThingFlagsOrNull(thing); JS_LOCK_GC(rt); - flags = *flagp; - lock = (flags & GCF_LOCK); - type = (flags & GCF_TYPEMASK); - deep = GC_THING_IS_DEEP(type, thing); + shallow = GC_THING_IS_SHALLOW(flagp, thing); /* * Avoid adding a rt->gcLocksHash entry for shallow things until someone - * nests a lock -- then start such an entry with a count of 2, not 1. + * nests a lock. */ - if (lock || deep) { - if (!rt->gcLocksHash) { - rt->gcLocksHash = - JS_NewDHashTable(JS_DHashGetStubOps(), NULL, - sizeof(JSGCLockHashEntry), - GC_ROOTS_SIZE); - if (!rt->gcLocksHash) { - ok = JS_FALSE; - goto done; - } - } else if (lock == 0) { -#ifdef DEBUG - JSDHashEntryHdr *hdr = - JS_DHashTableOperate(rt->gcLocksHash, thing, - JS_DHASH_LOOKUP); - JS_ASSERT(JS_DHASH_ENTRY_IS_FREE(hdr)); -#endif - } + if (shallow && !(*flagp & GCF_LOCK)) { + *flagp |= GCF_LOCK; + METER(rt->gcStats.lock++); + ok = JS_TRUE; + goto out; + } - lhe = (JSGCLockHashEntry *) - JS_DHashTableOperate(rt->gcLocksHash, thing, JS_DHASH_ADD); - if (!lhe) { + if (!rt->gcLocksHash) { + rt->gcLocksHash = JS_NewDHashTable(JS_DHashGetStubOps(), NULL, + sizeof(JSGCLockHashEntry), + GC_ROOTS_SIZE); + if (!rt->gcLocksHash) { ok = JS_FALSE; - goto done; - } - if (!lhe->thing) { - lhe->thing = (JSGCThing *) thing; - lhe->count = deep ? 1 : 2; - } else { - JS_ASSERT(lhe->count >= 1); - lhe->count++; + goto out; } } - *flagp = (uint8)(flags | GCF_LOCK); + lhe = (JSGCLockHashEntry *) + JS_DHashTableOperate(rt->gcLocksHash, thing, JS_DHASH_ADD); + if (!lhe) { + ok = JS_FALSE; + goto out; + } + if (!lhe->thing) { + lhe->thing = thing; + lhe->count = 1; + } else { + JS_ASSERT(lhe->count >= 1); + lhe->count++; + } + METER(rt->gcStats.lock++); ok = JS_TRUE; -done: + out: JS_UNLOCK_GC(rt); return ok; } @@ -1639,36 +2069,38 @@ done: JSBool js_UnlockGCThingRT(JSRuntime *rt, void *thing) { - uint8 *flagp, flags; + uint8 *flagp; + JSBool shallow; JSGCLockHashEntry *lhe; if (!thing) return JS_TRUE; - flagp = GetGCThingFlags(thing); + flagp = GetGCThingFlagsOrNull(thing); JS_LOCK_GC(rt); - flags = *flagp; + shallow = GC_THING_IS_SHALLOW(flagp, thing); - if (flags & GCF_LOCK) { - if (!rt->gcLocksHash || - (lhe = (JSGCLockHashEntry *) - JS_DHashTableOperate(rt->gcLocksHash, thing, - JS_DHASH_LOOKUP), + if (shallow && !(*flagp & GCF_LOCK)) + goto out; + if (!rt->gcLocksHash || + (lhe = (JSGCLockHashEntry *) + JS_DHashTableOperate(rt->gcLocksHash, thing, + JS_DHASH_LOOKUP), JS_DHASH_ENTRY_IS_FREE(&lhe->hdr))) { - /* Shallow GC-thing with an implicit lock count of 1. */ - JS_ASSERT(!GC_THING_IS_DEEP(flags & GCF_TYPEMASK, thing)); - } else { - /* Basis or nested unlock of a deep thing, or nested of shallow. */ - if (--lhe->count != 0) - goto out; - JS_DHashTableOperate(rt->gcLocksHash, thing, JS_DHASH_REMOVE); - } - *flagp = (uint8)(flags & ~GCF_LOCK); + /* Shallow entry is not in the hash -> clear its lock bit. */ + if (shallow) + *flagp &= ~GCF_LOCK; + else + goto out; + } else { + if (--lhe->count != 0) + goto out; + JS_DHashTableOperate(rt->gcLocksHash, thing, JS_DHASH_REMOVE); } rt->gcPoke = JS_TRUE; -out: METER(rt->gcStats.unlock++); + out: JS_UNLOCK_GC(rt); return JS_TRUE; } @@ -1729,7 +2161,7 @@ JS_TraceChildren(JSTracer *trc, void *thing, uint32 kind) } /* - * Number of things covered by a single bit of JSGCArenaInfo.untracedThings. + * Number of things covered by a single bit of JSGCArenaInfo.u.untracedThings. */ #define THINGS_PER_UNTRACED_BIT(thingSize) \ JS_HOWMANY(THINGS_PER_ARENA(thingSize), JS_BITS_PER_WORD) @@ -1759,13 +2191,13 @@ DelayTracingChildren(JSRuntime *rt, uint8 *flagp) THINGS_PER_UNTRACED_BIT(a->list->thingSize); JS_ASSERT(untracedBitIndex < JS_BITS_PER_WORD); bit = (jsuword)1 << untracedBitIndex; - if (a->untracedThings != 0) { + if (a->u.untracedThings != 0) { JS_ASSERT(rt->gcUntracedArenaStackTop); - if (a->untracedThings & bit) { + if (a->u.untracedThings & bit) { /* bit already covers things with children to trace later. */ return; } - a->untracedThings |= bit; + a->u.untracedThings |= bit; } else { /* * The thing is the first thing with not yet traced children in the @@ -1779,7 +2211,7 @@ DelayTracingChildren(JSRuntime *rt, uint8 *flagp) * * See comments in TraceDelayedChildren. */ - a->untracedThings = bit; + a->u.untracedThings = bit; if (a->prevUntracedPage == 0) { if (!rt->gcUntracedArenaStackTop) { /* Stack was empty, mark the arena as the bottom element. */ @@ -1828,13 +2260,13 @@ TraceDelayedChildren(JSTracer *trc) thingsPerUntracedBit = THINGS_PER_UNTRACED_BIT(thingSize); /* - * We can not use do-while loop here as a->untracedThings can be zero + * We cannot use do-while loop here as a->u.untracedThings can be zero * before the loop as a leftover from the previous iterations. See * comments after the loop. */ - while (a->untracedThings != 0) { - untracedBitIndex = JS_FLOOR_LOG2W(a->untracedThings); - a->untracedThings &= ~((jsuword)1 << untracedBitIndex); + while (a->u.untracedThings != 0) { + untracedBitIndex = JS_FLOOR_LOG2W(a->u.untracedThings); + a->u.untracedThings &= ~((jsuword)1 << untracedBitIndex); thingIndex = untracedBitIndex * thingsPerUntracedBit; endIndex = thingIndex + thingsPerUntracedBit; @@ -1899,6 +2331,8 @@ JS_CallTracer(JSTracer *trc, void *thing, uint32 kind) { JSContext *cx; JSRuntime *rt; + JSGCArenaInfo *a; + uintN index; uint8 *flagp; JS_ASSERT(thing); @@ -1921,10 +2355,15 @@ JS_CallTracer(JSTracer *trc, void *thing, uint32 kind) */ switch (kind) { case JSTRACE_DOUBLE: - flagp = THING_TO_FLAGP(thing, sizeof(JSGCThing)); - JS_ASSERT((*flagp & GCF_FINAL) == 0); - JS_ASSERT(kind == MapGCFlagsToTraceKind(*flagp)); - *flagp |= GCF_MARK; + a = THING_TO_ARENA(thing); + JS_ASSERT(!a->list); + if (!a->u.hasMarkedDoubles) { + ClearDoubleArenaFlags(a); + a->u.hasMarkedDoubles = JS_TRUE; + } + index = DOUBLE_THING_TO_INDEX(thing); + DOUBLE_ARENA_BITMAP(a)[index >> JS_BITS_PER_BYTE_LOG2] |= + (uint8) JS_BIT(index & (JS_BITS_PER_BYTE - 1)); goto out; case JSTRACE_STRING: @@ -2035,14 +2474,16 @@ gc_root_traversal(JSDHashTable *table, JSDHashEntryHdr *hdr, uint32 num, #ifdef DEBUG JSBool root_points_to_gcArenaList = JS_FALSE; jsuword thing = (jsuword) JSVAL_TO_GCTHING(v); + JSRuntime *rt; uintN i; JSGCArenaList *arenaList; uint32 thingSize; JSGCArenaInfo *a; size_t limit; + rt = trc->context->runtime; for (i = 0; i < GC_NUM_FREELISTS; i++) { - arenaList = &trc->context->runtime->gcArenaList[i]; + arenaList = &rt->gcArenaList[i]; thingSize = arenaList->thingSize; limit = (size_t) arenaList->lastCount * thingSize; for (a = arenaList->last; a; a = a->prev) { @@ -2053,6 +2494,15 @@ gc_root_traversal(JSDHashTable *table, JSDHashEntryHdr *hdr, uint32 num, limit = (size_t) THINGS_PER_ARENA(thingSize) * thingSize; } } + if (!root_points_to_gcArenaList) { + for (a = rt->gcDoubleArenaList.first; a; a = a->prev) { + if (thing - ARENA_INFO_TO_START(a) < + DOUBLES_PER_ARENA * sizeof(jsdouble)) { + root_points_to_gcArenaList = JS_TRUE; + break; + } + } + } if (!root_points_to_gcArenaList && rhe->name) { fprintf(stderr, "JS API usage error: the address passed to JS_AddNamedRoot currently holds an\n" @@ -2215,6 +2665,11 @@ js_TraceContext(JSTracer *trc, JSContext *acx) if (age > (int64) acx->runtime->gcStackPoolLifespan * 1000) JS_FinishArenaPool(&acx->stackPool); } + + /* + * Clear the double free list to release all the pre-allocated doubles. + */ + acx->doubleFreeList = NULL; } /* @@ -2307,6 +2762,7 @@ js_TraceRuntime(JSTracer *trc, JSBool allAtoms) JS_DHashTableEnumerate(rt->gcLocksHash, gc_lock_traversal, trc); js_TraceAtomState(trc, allAtoms); js_TraceNativeIteratorStates(trc); + js_TraceRuntimeNumberState(trc); iter = NULL; while ((acx = js_ContextIterator(rt, JS_TRUE, &iter)) != NULL) @@ -2423,8 +2879,7 @@ js_GC(JSContext *cx, JSGCInvocationKind gckind) JSContext *acx, *iter; #endif #ifdef JS_GCMETER - JSGCArenaStats *listStats; - size_t nfree; + uint32 nlivearenas, nkilledarenas, nthings; #endif rt = cx->runtime; @@ -2669,6 +3124,10 @@ js_GC(JSContext *cx, JSGCInvocationKind gckind) JS_TRACER_INIT(&trc, cx, NULL); rt->gcMarkingTracer = &trc; JS_ASSERT(IS_GC_MARKING_TRACER(&trc)); + + for (a = rt->gcDoubleArenaList.first; a; a = a->prev) + a->u.hasMarkedDoubles = JS_FALSE; + js_TraceRuntime(&trc, keepAtoms); js_MarkScriptFilenames(rt, keepAtoms); @@ -2738,25 +3197,20 @@ js_GC(JSContext *cx, JSGCInvocationKind gckind) JS_ASSERT(arenaList->lastCount > 0); arenaList->freeList = NULL; freeList = NULL; - - /* Here i is not the list index due to the above swap. */ - METER(listStats = &rt->gcStats.arenas[arenaList - &rt->gcArenaList[0]]); - METER(listStats->nthings = 0); - METER(listStats->freelen = 0); thingSize = arenaList->thingSize; indexLimit = THINGS_PER_ARENA(thingSize); flagp = THING_FLAGP(a, arenaList->lastCount - 1); + METER((nlivearenas = 0, nkilledarenas = 0, nthings = 0)); for (;;) { JS_ASSERT(a->prevUntracedPage == 0); - JS_ASSERT(a->untracedThings == 0); + JS_ASSERT(a->u.untracedThings == 0); allClear = JS_TRUE; - METER(nfree = 0); do { flags = *flagp; if (flags & (GCF_MARK | GCF_LOCK)) { *flagp &= ~GCF_MARK; allClear = JS_FALSE; - METER(listStats->nthings++); + METER(nthings++); } else { thing = FLAGP_TO_THING(flagp, thingSize); if (!(flags & GCF_FINAL)) { @@ -2801,7 +3255,6 @@ js_GC(JSContext *cx, JSGCInvocationKind gckind) thing->flagp = flagp; thing->next = freeList; freeList = thing; - METER(++nfree); } } while (++flagp != THING_FLAGS_END(a)); @@ -2814,23 +3267,55 @@ js_GC(JSContext *cx, JSGCInvocationKind gckind) if (a == arenaList->last) arenaList->lastCount = indexLimit; *ap = a->prev; - JS_ASSERT(rt->gcBytes >= GC_ARENA_SIZE); - rt->gcBytes -= GC_ARENA_SIZE; a->prev = emptyArenas; emptyArenas = a; + METER(nkilledarenas++); } else { arenaList->freeList = freeList; ap = &a->prev; - METER(listStats->freelen += nfree); - METER(listStats->totalfreelen += nfree); - METER(listStats->totalarenas++); + METER(nlivearenas++); } if (!(a = *ap)) break; flagp = THING_FLAGP(a, indexLimit - 1); } + + /* + * We use arenaList - &rt->gcArenaList[0], not i, as the stat index + * due to the enumeration reorder at the beginning of the loop. + */ + METER(UpdateArenaStats(&rt->gcStats.arenaStats[arenaList - + &rt->gcArenaList[0]], + nlivearenas, nkilledarenas, nthings)); } + ap = &rt->gcDoubleArenaList.first; + METER((nlivearenas = 0, nkilledarenas = 0, nthings = 0)); + while ((a = *ap) != NULL) { + if (!a->u.hasMarkedDoubles) { + /* No marked double values in the arena. */ + *ap = a->prev; + a->prev = emptyArenas; + emptyArenas = a; + METER(nkilledarenas++); + } else { + ap = &a->prev; +#ifdef JS_GCMETER + for (i = 0; i != DOUBLES_PER_ARENA; ++i) { + if (IsMarkedDouble(a, index)) + METER(nthings++); + } + METER(nlivearenas++); +#endif + } + } + METER(UpdateArenaStats(&rt->gcStats.doubleArenaStats, + nlivearenas, nkilledarenas, nthings)); + rt->gcDoubleArenaList.nextDoubleFlags = + rt->gcDoubleArenaList.first + ? DOUBLE_ARENA_BITMAP(rt->gcDoubleArenaList.first) + : DOUBLE_BITMAP_SENTINEL; + /* * Sweep the runtime's property tree after finalizing objects, in case any * had watchpoints referencing tree nodes. @@ -2849,11 +3334,7 @@ js_GC(JSContext *cx, JSGCInvocationKind gckind) * Destroy arenas after we finished the sweeping sofinalizers can safely * use js_IsAboutToBeFinalized(). */ - while (emptyArenas) { - a = emptyArenas; - emptyArenas = emptyArenas->prev; - DestroyGCArena(rt, a); - } + DestroyGCArenas(rt, emptyArenas); if (rt->gcCallback) (void) rt->gcCallback(cx, JSGC_FINALIZE_END); diff --git a/js/src/jsgc.h b/js/src/jsgc.h index a067ce66fb2..7fcf4e18054 100644 --- a/js/src/jsgc.h +++ b/js/src/jsgc.h @@ -45,6 +45,7 @@ #include "jsprvtd.h" #include "jspubtd.h" #include "jsdhash.h" +#include "jsbit.h" #include "jsutil.h" JS_BEGIN_EXTERN_C @@ -202,8 +203,13 @@ struct JSGCThing { extern void * js_NewGCThing(JSContext *cx, uintN flags, size_t nbytes); -extern JSBool -js_LockGCThing(JSContext *cx, void *thing); +/* + * Return a pointer to a new GC-allocated and weakly rooted jsdouble number + * or null when the allocation fails. The caller must initialize the returned + * double. + */ +extern jsdouble * +js_NewDoubleGCThing(JSContext *cx); extern JSBool js_LockGCThingRT(JSRuntime *rt, void *thing); @@ -303,6 +309,21 @@ struct JSGCArenaList { JSGCThing *freeList; /* list of free GC things */ }; +typedef union JSGCDoubleCell JSGCDoubleCell; + +union JSGCDoubleCell { + double number; + JSGCDoubleCell *link; +}; + +JS_STATIC_ASSERT(sizeof(JSGCDoubleCell) == sizeof(double)); + +typedef struct JSGCDoubleArenaList { + JSGCArenaInfo *first; /* first allocated GC arena */ + uint8 *nextDoubleFlags; /* bitmask with flags to check for free + things */ +} JSGCDoubleArenaList; + struct JSWeakRoots { /* Most recently created things by type, members of the GC's root set. */ void *newborn[GCX_NTYPES]; @@ -324,28 +345,22 @@ JS_STATIC_ASSERT(JSVAL_NULL == 0); #ifdef JS_GCMETER typedef struct JSGCArenaStats { - uint32 narenas; /* number of arena in list */ - uint32 maxarenas; /* maximun number of allocated arenas */ - uint32 nthings; /* number of allocates JSGCThing */ - uint32 maxthings; /* maximum number number of allocates JSGCThing */ - uint32 totalnew; /* number of succeeded calls to js_NewGCThing */ - uint32 freelen; /* freeList lengths */ - uint32 recycle; /* number of things recycled through freeList */ + uint32 alloc; /* allocation attempts */ + uint32 localalloc; /* allocations from local lists */ + uint32 retry; /* allocation retries after running the GC */ + uint32 fail; /* allocation failures */ + uint32 nthings; /* live GC things */ + uint32 maxthings; /* maximum of live GC cells */ + double totalthings; /* live GC things the GC scanned so far */ + uint32 narenas; /* number of arena in list before the GC */ + uint32 newarenas; /* new arenas allocated before the last GC */ + uint32 livearenas; /* number of live arenas after the last GC */ + uint32 maxarenas; /* maximum of allocated arenas */ uint32 totalarenas; /* total number of arenas with live things that GC scanned so far */ - uint32 totalfreelen; /* total number of things that GC put to free - list so far */ } JSGCArenaStats; typedef struct JSGCStats { -#ifdef JS_THREADSAFE - uint32 localalloc; /* number of succeeded allocations from local lists */ -#endif - uint32 alloc; /* number of allocation attempts */ - uint32 retry; /* allocation attempt retries after running the GC */ - uint32 retryhalt; /* allocation retries halted by the operation - callback */ - uint32 fail; /* allocation failures */ uint32 finalfail; /* finalizer calls allocator failures */ uint32 lockborn; /* things born locked */ uint32 lock; /* valid lock calls */ @@ -370,7 +385,8 @@ typedef struct JSGCStats { uint32 closelater; /* number of close hooks scheduled to run */ uint32 maxcloselater; /* max number of close hooks scheduled to run */ - JSGCArenaStats arenas[GC_NUM_FREELISTS]; + JSGCArenaStats arenaStats[GC_NUM_FREELISTS]; + JSGCArenaStats doubleArenaStats; } JSGCStats; extern JS_FRIEND_API(void) diff --git a/js/src/jsnum.c b/js/src/jsnum.c index 9f20167cce6..efe13d5e6f1 100644 --- a/js/src/jsnum.c +++ b/js/src/jsnum.c @@ -548,21 +548,21 @@ js_InitRuntimeNumberState(JSContext *cx) u.s.hi = JSDOUBLE_HI32_EXPMASK | JSDOUBLE_HI32_MANTMASK; u.s.lo = 0xffffffff; number_constants[NC_NaN].dval = NaN = u.d; - rt->jsNaN = js_NewDouble(cx, NaN, GCF_LOCK); + rt->jsNaN = js_NewDouble(cx, NaN); if (!rt->jsNaN) return JS_FALSE; u.s.hi = JSDOUBLE_HI32_EXPMASK; u.s.lo = 0x00000000; number_constants[NC_POSITIVE_INFINITY].dval = u.d; - rt->jsPositiveInfinity = js_NewDouble(cx, u.d, GCF_LOCK); + rt->jsPositiveInfinity = js_NewDouble(cx, u.d); if (!rt->jsPositiveInfinity) return JS_FALSE; u.s.hi = JSDOUBLE_HI32_SIGNBIT | JSDOUBLE_HI32_EXPMASK; u.s.lo = 0x00000000; number_constants[NC_NEGATIVE_INFINITY].dval = u.d; - rt->jsNegativeInfinity = js_NewDouble(cx, u.d, GCF_LOCK); + rt->jsNegativeInfinity = js_NewDouble(cx, u.d); if (!rt->jsNegativeInfinity) return JS_FALSE; @@ -581,6 +581,20 @@ js_InitRuntimeNumberState(JSContext *cx) return rt->thousandsSeparator && rt->decimalSeparator && rt->numGrouping; } +void +js_TraceRuntimeNumberState(JSTracer *trc) +{ + JSRuntime *rt; + + rt = trc->context->runtime; + if (rt->jsNaN) + JS_CALL_DOUBLE_TRACER(trc, rt->jsNaN, "NaN"); + if (rt->jsPositiveInfinity) + JS_CALL_DOUBLE_TRACER(trc, rt->jsPositiveInfinity, "+Infinity"); + if (rt->jsNegativeInfinity) + JS_CALL_DOUBLE_TRACER(trc, rt->jsNegativeInfinity, "-Infinity"); +} + void js_FinishRuntimeNumberState(JSContext *cx) { @@ -637,31 +651,26 @@ js_InitNumberClass(JSContext *cx, JSObject *obj) } jsdouble * -js_NewDouble(JSContext *cx, jsdouble d, uintN gcflag) +js_NewDouble(JSContext *cx, jsdouble d) { jsdouble *dp; - dp = (jsdouble *) js_NewGCThing(cx, gcflag | GCX_DOUBLE, sizeof(jsdouble)); + dp = js_NewDoubleGCThing(cx); if (!dp) return NULL; *dp = d; return dp; } -void -js_FinalizeDouble(JSContext *cx, jsdouble *dp) -{ - *dp = NaN; -} - JSBool js_NewDoubleValue(JSContext *cx, jsdouble d, jsval *rval) { jsdouble *dp; - dp = js_NewDouble(cx, d, 0); + dp = js_NewDoubleGCThing(cx); if (!dp) return JS_FALSE; + *dp = d; *rval = DOUBLE_TO_JSVAL(dp); return JS_TRUE; } diff --git a/js/src/jsnum.h b/js/src/jsnum.h index 7b31129e254..d26af33b7f5 100644 --- a/js/src/jsnum.h +++ b/js/src/jsnum.h @@ -146,6 +146,9 @@ typedef union jsdpun { extern JSBool js_InitRuntimeNumberState(JSContext *cx); +extern void +js_TraceRuntimeNumberState(JSTracer *trc); + extern void js_FinishRuntimeNumberState(JSContext *cx); @@ -167,10 +170,7 @@ extern const char js_parseInt_str[]; /* GC-allocate a new JS number. */ extern jsdouble * -js_NewDouble(JSContext *cx, jsdouble d, uintN gcflag); - -extern void -js_FinalizeDouble(JSContext *cx, jsdouble *dp); +js_NewDouble(JSContext *cx, jsdouble d); extern JSBool js_NewDoubleValue(JSContext *cx, jsdouble d, jsval *rval);