diff --git a/js/src/jsapi.cpp b/js/src/jsapi.cpp index 874d4cad516d..21ba25ad6e7e 100644 --- a/js/src/jsapi.cpp +++ b/js/src/jsapi.cpp @@ -115,8 +115,9 @@ using namespace js; #endif #ifdef JS_USE_JSVAL_JSID_STRUCT_TYPES -JS_PUBLIC_DATA(jsid) JS_DEFAULT_XML_NAMESPACE_ID = { (size_t)JSID_TYPE_DEFAULT_XML_NAMESPACE }; -JS_PUBLIC_DATA(jsid) JSID_VOID = { (size_t)JSID_TYPE_VOID }; +JS_PUBLIC_DATA(jsid) JS_DEFAULT_XML_NAMESPACE_ID = { size_t(JSID_TYPE_DEFAULT_XML_NAMESPACE) }; +JS_PUBLIC_DATA(jsid) JSID_VOID = { size_t(JSID_TYPE_VOID) }; +JS_PUBLIC_DATA(jsid) JSID_EMPTY = { size_t(JSID_TYPE_OBJECT) }; #endif #ifdef JS_USE_JSVAL_JSID_STRUCT_TYPES @@ -576,6 +577,12 @@ JSRuntime::init(uint32 maxbytes) if (!unjoinedFunctionCountMap.init()) return false; } + propTreeStatFilename = getenv("JS_PROPTREE_STATFILE"); + propTreeDumpFilename = getenv("JS_PROPTREE_DUMPFILE"); + if (meterEmptyShapes()) { + if (!emptyShapes.init()) + return false; + } #endif if (!(defaultCompartment = new JSCompartment(this)) || @@ -1572,9 +1579,8 @@ static JSBool AlreadyHasOwnProperty(JSContext *cx, JSObject *obj, JSAtom *atom) { JS_LOCK_OBJ(cx, obj); - JSScope *scope = obj->scope(); - bool found = scope->hasProperty(ATOM_TO_JSID(atom)); - JS_UNLOCK_SCOPE(cx, scope); + bool found = obj->nativeContains(ATOM_TO_JSID(atom)); + JS_UNLOCK_OBJ(cx, obj); return found; } @@ -2098,8 +2104,7 @@ JS_PrintTraceThingInfo(char *buf, size_t bufsize, JSTracer *trc, void *thing, ui JS_snprintf(buf, bufsize, "%p", fun); } else { if (fun->atom) - js_PutEscapedString(buf, bufsize, - ATOM_TO_STRING(fun->atom), 0); + js_PutEscapedString(buf, bufsize, ATOM_TO_STRING(fun->atom), 0); } } else if (clasp->flags & JSCLASS_HAS_PRIVATE) { JS_snprintf(buf, bufsize, "%p", obj->getPrivate()); @@ -2930,67 +2935,47 @@ JS_SealObject(JSContext *cx, JSObject *obj, JSBool deep) CHECK_REQUEST(cx); assertSameCompartment(cx, obj); - JSScope *scope; - JSIdArray *ida; - uint32 nslots, i; + /* Nothing to do if obj is already sealed. */ + if (obj->sealed()) + return true; if (obj->isDenseArray() && !obj->makeDenseArraySlow(cx)) - return JS_FALSE; + return false; if (!obj->isNative()) { JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_CANT_SEAL_OBJECT, obj->getClass()->name); - return JS_FALSE; + return false; } - scope = obj->scope(); - -#if defined JS_THREADSAFE && defined DEBUG +#ifdef JS_THREADSAFE /* Insist on scope being used exclusively by cx's thread. */ - if (scope->title.ownercx != cx) { - JS_LOCK_OBJ(cx, obj); - JS_ASSERT(obj->scope() == scope); - JS_ASSERT(scope->title.ownercx == cx); - JS_UNLOCK_SCOPE(cx, scope); - } + JS_ASSERT(obj->title.ownercx == cx); #endif - /* Nothing to do if obj's scope is already sealed. */ - if (scope->sealed()) - return JS_TRUE; - /* XXX Enumerate lazy properties now, as they can't be added later. */ - ida = JS_Enumerate(cx, obj); + JSIdArray *ida = JS_Enumerate(cx, obj); if (!ida) - return JS_FALSE; + return false; JS_DestroyIdArray(cx, ida); - /* Ensure that obj has its own, mutable scope, and seal that scope. */ - JS_LOCK_OBJ(cx, obj); - scope = js_GetMutableScope(cx, obj); - if (scope) - scope->seal(cx); - JS_UNLOCK_OBJ(cx, obj); - if (!scope) - return JS_FALSE; - - /* If we are not sealing an entire object graph, we're done. */ + /* If not sealing an entire object graph, we're done after sealing obj. */ + obj->seal(cx); if (!deep) - return JS_TRUE; + return true; /* Walk slots in obj and if any value is a non-null object, seal it. */ - nslots = scope->freeslot; - for (i = 0; i != nslots; ++i) { + for (uint32 i = 0, n = obj->freeslot; i != n; ++i) { const Value &v = obj->getSlot(i); if (i == JSSLOT_PRIVATE && (obj->getClass()->flags & JSCLASS_HAS_PRIVATE)) continue; if (v.isPrimitive()) continue; if (!JS_SealObject(cx, &v.toObject(), deep)) - return JS_FALSE; + return false; } - return JS_TRUE; + return true; } JS_PUBLIC_API(JSObject *) @@ -3041,18 +3026,18 @@ LookupResult(JSContext *cx, JSObject *obj, JSObject *obj2, jsid id, } if (obj2->isNative()) { - JSScopeProperty *sprop = (JSScopeProperty *) prop; + Shape *shape = (Shape *) prop; - if (sprop->isMethod()) { - AutoScopePropertyRooter root(cx, sprop); + if (shape->isMethod()) { + AutoShapeRooter root(cx, shape); JS_UNLOCK_OBJ(cx, obj2); - vp->setObject(sprop->methodObject()); - return obj2->scope()->methodReadBarrier(cx, sprop, vp); + vp->setObject(shape->methodObject()); + return obj2->methodReadBarrier(cx, *shape, vp); } /* Peek at the native property's slot value, without doing a Get. */ - if (SPROP_HAS_VALID_SLOT(sprop, obj2->scope())) - *vp = obj2->lockedGetSlot(sprop->slot); + if (obj2->containsSlot(shape->slot)) + *vp = obj2->lockedGetSlot(shape->slot); else vp->setBoolean(true); JS_UNLOCK_OBJ(cx, obj2); @@ -3177,9 +3162,8 @@ JS_AlreadyHasOwnPropertyById(JSContext *cx, JSObject *obj, jsid id, JSBool *foun } JS_LOCK_OBJ(cx, obj); - JSScope *scope = obj->scope(); - *foundp = scope->hasProperty(id); - JS_UNLOCK_SCOPE(cx, scope); + *foundp = obj->nativeContains(id); + JS_UNLOCK_OBJ(cx, obj); return JS_TRUE; } @@ -3276,7 +3260,7 @@ JS_DefinePropertyWithTinyId(JSContext *cx, JSObject *obj, const char *name, int8 jsval value, JSPropertyOp getter, JSPropertyOp setter, uintN attrs) { return DefineProperty(cx, obj, name, Valueify(value), Valueify(getter), - Valueify(setter), attrs, JSScopeProperty::HAS_SHORTID, tinyid); + Valueify(setter), attrs, Shape::HAS_SHORTID, tinyid); } static JSBool @@ -3303,7 +3287,7 @@ JS_DefineUCPropertyWithTinyId(JSContext *cx, JSObject *obj, const jschar *name, uintN attrs) { return DefineUCProperty(cx, obj, name, namelen, Valueify(value), Valueify(getter), - Valueify(setter), attrs, JSScopeProperty::HAS_SHORTID, tinyid); + Valueify(setter), attrs, Shape::HAS_SHORTID, tinyid); } JS_PUBLIC_API(JSBool) @@ -3362,7 +3346,7 @@ JS_DefineProperties(JSContext *cx, JSObject *obj, JSPropertySpec *ps) for (ok = true; ps->name; ps++) { ok = DefineProperty(cx, obj, ps->name, UndefinedValue(), Valueify(ps->getter), Valueify(ps->setter), - ps->flags, JSScopeProperty::HAS_SHORTID, ps->tinyid); + ps->flags, Shape::HAS_SHORTID, ps->tinyid); if (!ok) break; } @@ -3375,7 +3359,7 @@ JS_AliasProperty(JSContext *cx, JSObject *obj, const char *name, const char *ali JSObject *obj2; JSProperty *prop; JSBool ok; - JSScopeProperty *sprop; + Shape *shape; CHECK_REQUEST(cx); assertSameCompartment(cx, obj); @@ -3399,11 +3383,11 @@ JS_AliasProperty(JSContext *cx, JSObject *obj, const char *name, const char *ali if (!atom) { ok = JS_FALSE; } else { - sprop = (JSScopeProperty *)prop; + shape = (Shape *)prop; ok = (js_AddNativeProperty(cx, obj, ATOM_TO_JSID(atom), - sprop->getter(), sprop->setter(), sprop->slot, - sprop->attributes(), sprop->getFlags() | JSScopeProperty::ALIAS, - sprop->shortid) + shape->getter(), shape->setter(), shape->slot, + shape->attributes(), shape->getFlags() | Shape::ALIAS, + shape->shortid) != NULL); } JS_UNLOCK_OBJ(cx, obj); @@ -3415,7 +3399,7 @@ JS_AliasElement(JSContext *cx, JSObject *obj, const char *name, jsint alias) { JSObject *obj2; JSProperty *prop; - JSScopeProperty *sprop; + Shape *shape; JSBool ok; CHECK_REQUEST(cx); @@ -3438,11 +3422,11 @@ JS_AliasElement(JSContext *cx, JSObject *obj, const char *name, jsint alias) numBuf, name, obj2->getClass()->name); return JS_FALSE; } - sprop = (JSScopeProperty *)prop; + shape = (Shape *)prop; ok = (js_AddNativeProperty(cx, obj, INT_TO_JSID(alias), - sprop->getter(), sprop->setter(), sprop->slot, - sprop->attributes(), sprop->getFlags() | JSScopeProperty::ALIAS, - sprop->shortid) + shape->getter(), shape->setter(), shape->slot, + shape->attributes(), shape->getFlags() | Shape::ALIAS, + shape->shortid) != NULL); JS_UNLOCK_OBJ(cx, obj); return ok; @@ -3471,27 +3455,28 @@ GetPropertyDescriptorById(JSContext *cx, JSObject *obj, jsid id, uintN flags, desc->obj = obj2; if (obj2->isNative()) { - JSScopeProperty *sprop = (JSScopeProperty *) prop; - desc->attrs = sprop->attributes(); + Shape *shape = (Shape *) prop; + desc->attrs = shape->attributes(); - if (sprop->isMethod()) { + if (shape->isMethod()) { desc->getter = desc->setter = PropertyStub; - desc->value.setObject(sprop->methodObject()); + desc->value.setObject(shape->methodObject()); } else { - desc->getter = sprop->getter(); - desc->setter = sprop->setter(); - if (SPROP_HAS_VALID_SLOT(sprop, obj2->scope())) - desc->value = obj2->lockedGetSlot(sprop->slot); + desc->getter = shape->getter(); + desc->setter = shape->setter(); + if (obj2->containsSlot(shape->slot)) + desc->value = obj2->lockedGetSlot(shape->slot); else desc->value.setUndefined(); } JS_UNLOCK_OBJ(cx, obj2); - } else if (obj2->isProxy()) { - JSAutoResolveFlags rf(cx, flags); - return own - ? JSProxy::getOwnPropertyDescriptor(cx, obj2, id, desc) - : JSProxy::getPropertyDescriptor(cx, obj2, id, desc); } else { + if (obj2->isProxy()) { + JSAutoResolveFlags rf(cx, flags); + return own + ? JSProxy::getOwnPropertyDescriptor(cx, obj2, id, desc) + : JSProxy::getPropertyDescriptor(cx, obj2, id, desc); + } if (!obj2->getAttributes(cx, id, &desc->attrs)) return false; desc->getter = NULL; @@ -3587,7 +3572,7 @@ SetPropertyAttributesById(JSContext *cx, JSObject *obj, jsid id, uintN attrs, JS return true; } JSBool ok = obj->isNative() - ? js_SetNativeAttributes(cx, obj, (JSScopeProperty *) prop, attrs) + ? js_SetNativeAttributes(cx, obj, (Shape *) prop, attrs) : obj->setAttributes(cx, id, &attrs); if (ok) *foundp = true; @@ -3780,7 +3765,7 @@ JS_Enumerate(JSContext *cx, JSObject *obj) * XXX reverse iterator for properties, unreverse and meld with jsinterp.c's * prop_iterator_class somehow... * + preserve the obj->enumerate API while optimizing the native object case - * + native case here uses a JSScopeProperty *, but that iterates in reverse! + * + native case here uses a Shape *, but that iterates in reverse! * + so we make non-native match, by reverse-iterating after JS_Enumerating */ const uint32 JSSLOT_ITER_INDEX = JSSLOT_PRIVATE + 1; @@ -3809,7 +3794,7 @@ prop_iter_trace(JSTracer *trc, JSObject *obj) if (obj->fslots[JSSLOT_ITER_INDEX].toInt32() < 0) { /* Native case: just mark the next property to visit. */ - ((JSScopeProperty *) pdata)->trace(trc); + ((Shape *) pdata)->trace(trc); } else { /* Non-native case: mark each id in the JSIdArray private. */ JSIdArray *ida = (JSIdArray *) pdata; @@ -3842,8 +3827,7 @@ JS_PUBLIC_API(JSObject *) JS_NewPropertyIterator(JSContext *cx, JSObject *obj) { JSObject *iterobj; - JSScope *scope; - void *pdata; + const void *pdata; jsint index; JSIdArray *ida; @@ -3854,9 +3838,8 @@ JS_NewPropertyIterator(JSContext *cx, JSObject *obj) return NULL; if (obj->isNative()) { - /* Native case: start with the last property in obj's own scope. */ - scope = obj->scope(); - pdata = scope->lastProperty(); + /* Native case: start with the last property in obj. */ + pdata = obj->lastProperty(); index = -1; } else { /* @@ -3874,7 +3857,7 @@ JS_NewPropertyIterator(JSContext *cx, JSObject *obj) } /* iterobj cannot escape to other threads here. */ - iterobj->setPrivate(pdata); + iterobj->setPrivate(const_cast(pdata)); iterobj->fslots[JSSLOT_ITER_INDEX].setInt32(index); return iterobj; } @@ -3884,7 +3867,7 @@ JS_NextProperty(JSContext *cx, JSObject *iterobj, jsid *idp) { jsint i; JSObject *obj; - JSScopeProperty *sprop; + const Shape *shape; JSIdArray *ida; CHECK_REQUEST(cx); @@ -3894,21 +3877,22 @@ JS_NextProperty(JSContext *cx, JSObject *iterobj, jsid *idp) /* Native case: private data is a property tree node pointer. */ obj = iterobj->getParent(); JS_ASSERT(obj->isNative()); - sprop = (JSScopeProperty *) iterobj->getPrivate(); + shape = (Shape *) iterobj->getPrivate(); /* - * If the next property in the property tree ancestor line is - * not enumerable, or it's an alias, skip it and keep on trying - * to find an enumerable property that is still in scope. + * If the next property mapped by obj in the property tree ancestor + * line is not enumerable, or it's an alias, skip it and keep on trying + * to find an enumerable property that is still in obj. */ - while (sprop && (!sprop->enumerable() || sprop->isAlias())) - sprop = sprop->parent; + while (shape->previous() && (!shape->enumerable() || shape->isAlias())) + shape = shape->previous(); - if (!sprop) { + if (!shape->previous()) { + JS_ASSERT(JSID_IS_EMPTY(shape->id)); *idp = JSID_VOID; } else { - iterobj->setPrivate(sprop->parent); - *idp = sprop->id; + iterobj->setPrivate(const_cast(shape->previous())); + *idp = shape->id; } } else { /* Non-native case: use the ida enumerated when iterobj was created. */ @@ -4083,9 +4067,8 @@ JS_CloneFunctionObject(JSContext *cx, JSObject *funobj, JSObject *parent) } JSFunction *fun = GET_FUNCTION_PRIVATE(cx, funobj); - JSObject *clone = CloneFunctionObject(cx, fun, parent); - if (!clone) - return NULL; + if (!FUN_FLAT_CLOSURE(fun)) + return CloneFunctionObject(cx, fun, parent); /* * A flat closure carries its own environment, so why clone it? In case @@ -4099,42 +4082,27 @@ JS_CloneFunctionObject(JSContext *cx, JSObject *funobj, JSObject *parent) * they were activations, respecting the skip field in each upvar's cookie * but looking up the property by name instead of frame slot. */ - if (FUN_FLAT_CLOSURE(fun)) { - JS_ASSERT(funobj->dslots); - if (!js_EnsureReservedSlots(cx, clone, - fun->countInterpretedReservedSlots())) { - return NULL; - } + JSObject *clone = js_AllocFlatClosure(cx, fun, parent); + if (!clone) + return NULL; - JSUpvarArray *uva = fun->u.i.script->upvars(); - JS_ASSERT(uva->length <= clone->dslots[-1].toPrivateUint32()); + JSUpvarArray *uva = fun->u.i.script->upvars(); + uint32 i = uva->length; + JS_ASSERT(i != 0); - void *mark = JS_ARENA_MARK(&cx->tempPool); - jsuword *names = js_GetLocalNameArray(cx, fun, &cx->tempPool); - if (!names) - return NULL; - - uint32 i = 0, n = uva->length; - for (; i < n; i++) { - JSObject *obj = parent; - int skip = uva->vector[i].level(); - while (--skip > 0) { - if (!obj) { - JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, - JSMSG_BAD_CLONE_FUNOBJ_SCOPE); - goto break2; - } - obj = obj->getParent(); + for (Shape::Range r(fun->lastUpvar()); i-- != 0; r.popFront()) { + JSObject *obj = parent; + int skip = uva->vector[i].level(); + while (--skip > 0) { + if (!obj) { + JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, + JSMSG_BAD_CLONE_FUNOBJ_SCOPE); + return NULL; } - - JSAtom *atom = JS_LOCAL_NAME_TO_ATOM(names[i]); - if (!obj->getProperty(cx, ATOM_TO_JSID(atom), &clone->dslots[i])) - break; + obj = obj->getParent(); } - break2: - JS_ARENA_RELEASE(&cx->tempPool, mark); - if (i < n) + if (!obj->getProperty(cx, r.front().id, clone->getFlatClosureUpvars() + i)) return NULL; } @@ -4609,7 +4577,7 @@ JS_CompileUCFunctionForPrincipals(JSContext *cx, JSObject *obj, fun = NULL; goto out2; } - if (!js_AddLocal(cx, fun, argAtom, JSLOCAL_ARG)) { + if (!fun->addLocal(cx, argAtom, JSLOCAL_ARG)) { fun = NULL; goto out2; } diff --git a/js/src/jsapi.h b/js/src/jsapi.h index 16040457ce04..4fa181087bf6 100644 --- a/js/src/jsapi.h +++ b/js/src/jsapi.h @@ -308,21 +308,22 @@ JSVAL_TO_PRIVATE(jsval v) #define JSID_TYPE_MASK 0x7 /* - * Do not use canonical 'id' for jsid parameters since this is a magic word in + * Avoid using canonical 'id' for jsid parameters since this is a magic word in * Objective-C++ which, apparently, wants to be able to #include jsapi.h. */ +#define id iden static JS_ALWAYS_INLINE JSBool -JSID_IS_STRING(jsid iden) +JSID_IS_STRING(jsid id) { - return (JSID_BITS(iden) & JSID_TYPE_MASK) == 0; + return (JSID_BITS(id) & JSID_TYPE_MASK) == 0; } static JS_ALWAYS_INLINE JSString * -JSID_TO_STRING(jsid iden) +JSID_TO_STRING(jsid id) { - JS_ASSERT(JSID_IS_STRING(iden)); - return (JSString *)(JSID_BITS(iden)); + JS_ASSERT(JSID_IS_STRING(id)); + return (JSString *)(JSID_BITS(id)); } JS_PUBLIC_API(JSBool) @@ -332,24 +333,24 @@ JS_StringHasBeenInterned(JSString *str); static JS_ALWAYS_INLINE jsid INTERNED_STRING_TO_JSID(JSString *str) { - jsid iden; + jsid id; JS_ASSERT(JS_StringHasBeenInterned(str)); JS_ASSERT(((size_t)str & JSID_TYPE_MASK) == 0); - JSID_BITS(iden) = (size_t)str; - return iden; + JSID_BITS(id) = (size_t)str; + return id; } static JS_ALWAYS_INLINE JSBool -JSID_IS_INT(jsid iden) +JSID_IS_INT(jsid id) { - return !!(JSID_BITS(iden) & JSID_TYPE_INT); + return !!(JSID_BITS(id) & JSID_TYPE_INT); } static JS_ALWAYS_INLINE int32 -JSID_TO_INT(jsid iden) +JSID_TO_INT(jsid id) { - JS_ASSERT(JSID_IS_INT(iden)); - return ((int32)JSID_BITS(iden)) >> 1; + JS_ASSERT(JSID_IS_INT(id)); + return ((int32)JSID_BITS(id)) >> 1; } #define JSID_INT_MIN (-(1 << 30)) @@ -365,45 +366,46 @@ INT_FITS_IN_JSID(int32 i) static JS_ALWAYS_INLINE jsid INT_TO_JSID(int32 i) { - jsid iden; + jsid id; JS_ASSERT(INT_FITS_IN_JSID(i)); - JSID_BITS(iden) = ((i << 1) | JSID_TYPE_INT); - return iden; + JSID_BITS(id) = ((i << 1) | JSID_TYPE_INT); + return id; } static JS_ALWAYS_INLINE JSBool -JSID_IS_OBJECT(jsid iden) +JSID_IS_OBJECT(jsid id) { - return (JSID_BITS(iden) & JSID_TYPE_MASK) == JSID_TYPE_OBJECT; + return (JSID_BITS(id) & JSID_TYPE_MASK) == JSID_TYPE_OBJECT && + (size_t)JSID_BITS(id) != JSID_TYPE_OBJECT; } static JS_ALWAYS_INLINE JSObject * -JSID_TO_OBJECT(jsid iden) +JSID_TO_OBJECT(jsid id) { - JS_ASSERT(JSID_IS_OBJECT(iden)); - return (JSObject *)(JSID_BITS(iden) & ~(size_t)JSID_TYPE_MASK); + JS_ASSERT(JSID_IS_OBJECT(id)); + return (JSObject *)(JSID_BITS(id) & ~(size_t)JSID_TYPE_MASK); } static JS_ALWAYS_INLINE jsid OBJECT_TO_JSID(JSObject *obj) { - jsid iden; + jsid id; JS_ASSERT(obj != NULL); JS_ASSERT(((size_t)obj & JSID_TYPE_MASK) == 0); - JSID_BITS(iden) = ((size_t)obj | JSID_TYPE_OBJECT); - return iden; + JSID_BITS(id) = ((size_t)obj | JSID_TYPE_OBJECT); + return id; } static JS_ALWAYS_INLINE JSBool -JSID_IS_GCTHING(jsid iden) +JSID_IS_GCTHING(jsid id) { - return JSID_IS_STRING(iden) || JSID_IS_OBJECT(iden); + return JSID_IS_STRING(id) || JSID_IS_OBJECT(id); } static JS_ALWAYS_INLINE void * -JSID_TO_GCTHING(jsid iden) +JSID_TO_GCTHING(jsid id) { - return (void *)(JSID_BITS(iden) & ~(size_t)JSID_TYPE_MASK); + return (void *)(JSID_BITS(id) & ~(size_t)JSID_TYPE_MASK); } /* @@ -412,11 +414,11 @@ JSID_TO_GCTHING(jsid iden) */ static JS_ALWAYS_INLINE JSBool -JSID_IS_DEFAULT_XML_NAMESPACE(jsid iden) +JSID_IS_DEFAULT_XML_NAMESPACE(jsid id) { - JS_ASSERT_IF(((size_t)JSID_BITS(iden) & JSID_TYPE_MASK) == JSID_TYPE_DEFAULT_XML_NAMESPACE, - JSID_BITS(iden) == JSID_TYPE_DEFAULT_XML_NAMESPACE); - return ((size_t)JSID_BITS(iden) == JSID_TYPE_DEFAULT_XML_NAMESPACE); + JS_ASSERT_IF(((size_t)JSID_BITS(id) & JSID_TYPE_MASK) == JSID_TYPE_DEFAULT_XML_NAMESPACE, + JSID_BITS(id) == JSID_TYPE_DEFAULT_XML_NAMESPACE); + return ((size_t)JSID_BITS(id) == JSID_TYPE_DEFAULT_XML_NAMESPACE); } #ifdef JS_USE_JSVAL_JSID_STRUCT_TYPES @@ -433,17 +435,27 @@ extern JS_PUBLIC_DATA(jsid) JS_DEFAULT_XML_NAMESPACE_ID; */ static JS_ALWAYS_INLINE JSBool -JSID_IS_VOID(jsid iden) +JSID_IS_VOID(jsid id) { - JS_ASSERT_IF(((size_t)JSID_BITS(iden) & JSID_TYPE_MASK) == JSID_TYPE_VOID, - JSID_BITS(iden) == JSID_TYPE_VOID); - return ((size_t)JSID_BITS(iden) == JSID_TYPE_VOID); + JS_ASSERT_IF(((size_t)JSID_BITS(id) & JSID_TYPE_MASK) == JSID_TYPE_VOID, + JSID_BITS(id) == JSID_TYPE_VOID); + return ((size_t)JSID_BITS(id) == JSID_TYPE_VOID); } +static JS_ALWAYS_INLINE JSBool +JSID_IS_EMPTY(jsid id) +{ + return ((size_t)JSID_BITS(id) == JSID_TYPE_OBJECT); +} + +#undef id + #ifdef JS_USE_JSVAL_JSID_STRUCT_TYPES extern JS_PUBLIC_DATA(jsid) JSID_VOID; +extern JS_PUBLIC_DATA(jsid) JSID_EMPTY; #else -#define JSID_VOID ((jsid)JSID_TYPE_VOID) +# define JSID_VOID ((jsid)JSID_TYPE_VOID) +# define JSID_EMPTY ((jsid)JSID_TYPE_OBJECT) #endif /************************************************************************/ diff --git a/js/src/jsarray.cpp b/js/src/jsarray.cpp index f6ce5483a92a..b9149b3834fb 100644 --- a/js/src/jsarray.cpp +++ b/js/src/jsarray.cpp @@ -755,7 +755,7 @@ array_getProperty(JSContext *cx, JSObject *obj, jsid id, Value *vp) obj->getDenseArrayElement(i).isMagic(JS_ARRAY_HOLE)) { JSObject *obj2; JSProperty *prop; - JSScopeProperty *sprop; + const Shape *shape; JSObject *proto = obj->getProto(); if (!proto) { @@ -769,8 +769,8 @@ array_getProperty(JSContext *cx, JSObject *obj, jsid id, Value *vp) return JS_FALSE; if (prop && obj2->isNative()) { - sprop = (JSScopeProperty *) prop; - if (!js_NativeGet(cx, obj, obj2, sprop, JSGET_METHOD_BARRIER, vp)) + shape = (const Shape *) prop; + if (!js_NativeGet(cx, obj, obj2, shape, JSGET_METHOD_BARRIER, vp)) return JS_FALSE; JS_UNLOCK_OBJ(cx, obj2); } @@ -842,7 +842,7 @@ js_PrototypeHasIndexedProperties(JSContext *cx, JSObject *obj) */ if (!obj->isNative()) return JS_TRUE; - if (obj->scope()->hadIndexedProperties()) + if (obj->isIndexed()) return JS_TRUE; } return JS_FALSE; @@ -1006,7 +1006,7 @@ array_trace(JSTracer *trc, JSObject *obj) Class js_ArrayClass = { "Array", Class::NON_NATIVE | - JSCLASS_HAS_RESERVED_SLOTS(JSObject::DENSE_ARRAY_FIXED_RESERVED_SLOTS) | + JSCLASS_HAS_RESERVED_SLOTS(JSObject::DENSE_ARRAY_CLASS_RESERVED_SLOTS) | JSCLASS_HAS_CACHED_PROTO(JSProto_Array) | JSCLASS_FAST_CONSTRUCTOR, PropertyStub, /* addProperty */ @@ -1063,30 +1063,25 @@ JSObject::makeDenseArraySlow(JSContext *cx) { JS_ASSERT(isDenseArray()); + /* + * Save old map now, before calling InitScopeForObject. We'll have to undo + * on error. This is gross, but a better way is not obvious. + */ + JSObjectMap *oldMap = map; + /* * Create a native scope. All slow arrays other than Array.prototype get * the same initial shape. */ - uint32 emptyShape; - JSObject *obj = this; - JSObject *arrayProto = obj->getProto(); - if (arrayProto->getClass() == &js_ObjectClass) { - /* obj is Array.prototype. */ - emptyShape = js_GenerateShape(cx, false); - } else { - /* arrayProto is Array.prototype. */ - JS_ASSERT(arrayProto->getClass() == &js_SlowArrayClass); - emptyShape = arrayProto->scope()->emptyScope->shape; - } - JSScope *scope = JSScope::create(cx, &js_SlowArrayClass, obj, emptyShape); - if (!scope) - return JS_FALSE; + JSObject *arrayProto = getProto(); + if (!InitScopeForObject(cx, this, &js_SlowArrayClass, arrayProto)) + return false; uint32 capacity; - if (obj->dslots) { - capacity = obj->getDenseArrayCapacity(); - obj->dslots[-1].setPrivateUint32(JS_INITIAL_NSLOTS + capacity); + if (dslots) { + capacity = getDenseArrayCapacity(); + dslots[-1].setPrivateUint32(JS_INITIAL_NSLOTS + capacity); } else { /* * Array.prototype is constructed as a dense array, but is immediately slowified before @@ -1095,28 +1090,40 @@ JSObject::makeDenseArraySlow(JSContext *cx) capacity = 0; } - scope->freeslot = obj->numSlots(); + uint32 nslots = numSlots(); + if (nslots >= JS_NSLOTS_LIMIT) { + setMap(oldMap); + JS_ReportOutOfMemory(cx); + return false; + } + + freeslot = nslots; /* Begin with the length property to share more of the property tree. */ - if (!scope->addProperty(cx, ATOM_TO_JSID(cx->runtime->atomState.lengthAtom), - array_length_getter, array_length_setter, - JSSLOT_ARRAY_LENGTH, JSPROP_PERMANENT | JSPROP_SHARED, 0, 0)) { - goto out_bad; + if (!addProperty(cx, ATOM_TO_JSID(cx->runtime->atomState.lengthAtom), + array_length_getter, array_length_setter, + JSSLOT_ARRAY_LENGTH, JSPROP_PERMANENT | JSPROP_SHARED, 0, 0)) { + setMap(oldMap); + return false; } /* Create new properties pointing to existing elements. */ for (uint32 i = 0; i < capacity; i++) { jsid id; - if (!ValueToId(cx, Int32Value(i), &id)) - goto out_bad; + if (!ValueToId(cx, Int32Value(i), &id)) { + setMap(oldMap); + return false; + } - if (obj->getDenseArrayElement(i).isMagic(JS_ARRAY_HOLE)) { - obj->setDenseArrayElement(i, UndefinedValue()); + if (getDenseArrayElement(i).isMagic(JS_ARRAY_HOLE)) { + setDenseArrayElement(i, UndefinedValue()); continue; } - if (!scope->addDataProperty(cx, id, JS_INITIAL_NSLOTS + i, JSPROP_ENUMERATE)) - goto out_bad; + if (!addDataProperty(cx, id, JS_INITIAL_NSLOTS + i, JSPROP_ENUMERATE)) { + setMap(oldMap); + return false; + } } /* @@ -1126,14 +1133,11 @@ JSObject::makeDenseArraySlow(JSContext *cx) * can store an arbitrary value. */ JS_ASSERT(js_SlowArrayClass.flags & JSCLASS_HAS_PRIVATE); - obj->voidDenseOnlyArraySlots(); - obj->clasp = &js_SlowArrayClass; - obj->map = scope; - return JS_TRUE; + voidDenseOnlyArraySlots(); - out_bad: - scope->destroy(cx); - return JS_FALSE; + /* Finally, update class. */ + clasp = &js_SlowArrayClass; + return true; } /* Transfer ownership of buffer to returned string. */ @@ -2998,9 +3002,9 @@ js_NewEmptyArray(JSContext* cx, JSObject* proto, int32 len) if (!obj) return NULL; - /* Initialize all fields of JSObject. */ - obj->map = const_cast(&JSObjectMap::sharedNonNative); - obj->init(&js_ArrayClass, proto, proto->getParent(), NullValue()); + /* Initialize all fields, calling init before setting obj->map. */ + obj->init(&js_ArrayClass, proto, proto->getParent(), NullValue(), cx); + obj->setSharedNonNativeMap(); obj->setArrayLength(len); obj->setDenseArrayCapacity(0); return obj; diff --git a/js/src/jsarray.h b/js/src/jsarray.h index 689f67a2ff2a..81993446a5cd 100644 --- a/js/src/jsarray.h +++ b/js/src/jsarray.h @@ -110,7 +110,7 @@ JSObject::isArray() const /* * Dense arrays are not native -- aobj->isNative() for a dense array aobj - * results in false, meaning aobj->map does not point to a JSScope. + * results in false, meaning aobj->map does not point to a js::Shape. * * But Array methods are called via aobj.sort(), e.g., and the interpreter and * the trace recorder must consult the property cache in order to perform well. @@ -249,8 +249,8 @@ js_Array(JSContext *cx, uintN argc, js::Value *vp); * parameter. The caller promises to fill in the first |capacity| values * starting from that pointer immediately after this function returns and * without triggering GC (so this method is allowed to leave those - * uninitialized) and to set them to non-JSVAL_HOLE values, so that the - * resulting array has length and count both equal to |capacity|. + * uninitialized) and to set them to non-JS_ARRAY_HOLE-magic-why values, so + * that the resulting array has length and count both equal to |capacity|. * * FIXME: for some strange reason, when this file is included from * dom/ipc/TabParent.cpp in MSVC, jsuint resolves to a slightly different diff --git a/js/src/jsbuiltins.cpp b/js/src/jsbuiltins.cpp index 3685af033984..1ada0547ece0 100644 --- a/js/src/jsbuiltins.cpp +++ b/js/src/jsbuiltins.cpp @@ -183,73 +183,55 @@ JS_DEFINE_CALLINFO_2(extern, INT32, js_StringToInt32, CONTEXT, STRING, 1, ACCSET /* Nb: it's always safe to set isDefinitelyAtom to false if you're unsure or don't know. */ static inline JSBool -AddPropertyHelper(JSContext* cx, JSObject* obj, JSScopeProperty* sprop, bool isDefinitelyAtom) +AddPropertyHelper(JSContext* cx, JSObject* obj, Shape* shape, bool isDefinitelyAtom) { JS_LOCK_OBJ(cx, obj); + JS_ASSERT(shape->previous() == obj->lastProperty()); - uint32 slot = sprop->slot; - JSScope* scope = obj->scope(); - if (slot != scope->freeslot) - return false; - JS_ASSERT(sprop->parent == scope->lastProperty()); - - if (scope->isSharedEmpty()) { - scope = js_GetMutableScope(cx, obj); - if (!scope) - return false; - } else { - JS_ASSERT(!scope->hasProperty(sprop)); - } - - if (!scope->table) { - if (slot < obj->numSlots()) { - JS_ASSERT(obj->getSlot(scope->freeslot).isUndefined()); - ++scope->freeslot; - } else { - if (!js_AllocSlot(cx, obj, &slot)) - goto exit_trace; - - if (slot != sprop->slot) { - js_FreeSlot(cx, obj, slot); - goto exit_trace; - } - } - - scope->extend(cx, sprop, isDefinitelyAtom); - } else { - JSScopeProperty *sprop2 = - scope->addProperty(cx, sprop->id, sprop->getter(), sprop->setter(), - SPROP_INVALID_SLOT, sprop->attributes(), sprop->getFlags(), - sprop->shortid); - if (sprop2 != sprop) + if (obj->nativeEmpty()) { + if (!obj->ensureClassReservedSlotsForEmptyObject(cx)) goto exit_trace; } + uint32 slot; + slot = shape->slot; + JS_ASSERT(slot == obj->freeslot); + + if (slot < obj->numSlots()) { + JS_ASSERT(obj->getSlot(slot).isUndefined()); + ++obj->freeslot; + JS_ASSERT(obj->freeslot != 0); + } else { + if (!obj->allocSlot(cx, &slot)) + goto exit_trace; + JS_ASSERT(slot == shape->slot); + } + + obj->extend(cx, shape, isDefinitelyAtom); if (js_IsPropertyCacheDisabled(cx)) goto exit_trace; - JS_UNLOCK_SCOPE(cx, scope); + JS_UNLOCK_OBJ(cx, obj); return true; exit_trace: - JS_UNLOCK_SCOPE(cx, scope); + JS_UNLOCK_OBJ(cx, obj); return false; } JSBool FASTCALL -js_AddProperty(JSContext* cx, JSObject* obj, JSScopeProperty* sprop) +js_AddProperty(JSContext* cx, JSObject* obj, Shape* shape) { - return AddPropertyHelper(cx, obj, sprop, /* isDefinitelyAtom = */false); + return AddPropertyHelper(cx, obj, shape, /* isDefinitelyAtom = */false); } -JS_DEFINE_CALLINFO_3(extern, BOOL, js_AddProperty, CONTEXT, OBJECT, SCOPEPROP, 0, ACCSET_STORE_ANY) +JS_DEFINE_CALLINFO_3(extern, BOOL, js_AddProperty, CONTEXT, OBJECT, SHAPE, 0, ACCSET_STORE_ANY) JSBool FASTCALL -js_AddAtomProperty(JSContext* cx, JSObject* obj, JSScopeProperty* sprop) +js_AddAtomProperty(JSContext* cx, JSObject* obj, Shape* shape) { - return AddPropertyHelper(cx, obj, sprop, /* isDefinitelyAtom = */true); + return AddPropertyHelper(cx, obj, shape, /* isDefinitelyAtom = */true); } -JS_DEFINE_CALLINFO_3(extern, BOOL, js_AddAtomProperty, CONTEXT, OBJECT, SCOPEPROP, - 0, ACCSET_STORE_ANY) +JS_DEFINE_CALLINFO_3(extern, BOOL, js_AddAtomProperty, CONTEXT, OBJECT, SHAPE, 0, ACCSET_STORE_ANY) static JSBool HasProperty(JSContext* cx, JSObject* obj, jsid id) @@ -326,7 +308,7 @@ js_NewNullClosure(JSContext* cx, JSObject* funobj, JSObject* proto, JSObject* pa if (!closure) return NULL; - closure->initSharingEmptyScope(&js_FunctionClass, proto, parent, PrivateValue(fun)); + closure->initSharingEmptyShape(&js_FunctionClass, proto, parent, PrivateValue(fun), cx); return closure; } JS_DEFINE_CALLINFO_4(extern, OBJECT, js_NewNullClosure, CONTEXT, OBJECT, OBJECT, OBJECT, diff --git a/js/src/jsbuiltins.h b/js/src/jsbuiltins.h index 381328721193..fd4508f4127d 100644 --- a/js/src/jsbuiltins.h +++ b/js/src/jsbuiltins.h @@ -222,7 +222,7 @@ struct ClosureVarInfo; #define _JS_CTYPE_CONSTRUCTOR_RETRY _JS_CTYPE(JSObject *, _JS_PTR, --, --, FAIL_NULL | \ JSTN_CONSTRUCTOR) #define _JS_CTYPE_REGEXP _JS_CTYPE(JSObject *, _JS_PTR, "","r", INFALLIBLE) -#define _JS_CTYPE_SCOPEPROP _JS_CTYPE(JSScopeProperty *, _JS_PTR, --, --, INFALLIBLE) +#define _JS_CTYPE_SHAPE _JS_CTYPE(js::Shape *, _JS_PTR, --, --, INFALLIBLE) #define _JS_CTYPE_TRACERSTATE _JS_CTYPE(TracerState *, _JS_PTR, --, --, INFALLIBLE) #define _JS_CTYPE_FRAGMENT _JS_CTYPE(nanojit::Fragment *, _JS_PTR, --, --, INFALLIBLE) #define _JS_CTYPE_CLASS _JS_CTYPE(js::Class *, _JS_PTR, --, --, INFALLIBLE) diff --git a/js/src/jscntxt.cpp b/js/src/jscntxt.cpp index f4c1e7edef45..7f3167c38bf6 100644 --- a/js/src/jscntxt.cpp +++ b/js/src/jscntxt.cpp @@ -826,13 +826,13 @@ js_NewContext(JSRuntime *rt, size_t stackChunkSize) if (ok) { /* * Ensure that the empty scopes initialized by - * JSScope::initRuntimeState get the desired special shapes. + * Shape::initRuntimeState get the desired special shapes. * (The rt->state dance above guarantees that this abuse of * rt->shapeGen is thread-safe.) */ uint32 shapeGen = rt->shapeGen; rt->shapeGen = 0; - ok = JSScope::initRuntimeState(cx); + ok = Shape::initRuntimeState(cx); if (rt->shapeGen < shapeGen) rt->shapeGen = shapeGen; } @@ -1063,7 +1063,7 @@ js_DestroyContext(JSContext *cx, JSDestroyContextMode mode) JS_BeginRequest(cx); #endif - JSScope::finishRuntimeState(cx); + Shape::finishRuntimeState(cx); js_FinishRuntimeNumberState(cx); /* Unpin all common atoms before final GC. */ diff --git a/js/src/jscntxt.h b/js/src/jscntxt.h index 79d2f93a664f..e1ecee2ed730 100644 --- a/js/src/jscntxt.h +++ b/js/src/jscntxt.h @@ -59,6 +59,7 @@ #include "jsatom.h" #include "jsdhash.h" #include "jsdtoa.h" +#include "jsfun.h" #include "jsgc.h" #include "jsgcchunk.h" #include "jshashtable.h" @@ -1183,7 +1184,7 @@ typedef enum JSRuntimeState { typedef struct JSPropertyTreeEntry { JSDHashEntryHdr hdr; - JSScopeProperty *child; + js::Shape *child; } JSPropertyTreeEntry; @@ -1346,17 +1347,6 @@ struct JSRuntime { bool gcRunning; bool gcRegenShapes; - /* - * During gc, if rt->gcRegenShapes && - * (scope->flags & JSScope::SHAPE_REGEN) == rt->gcRegenShapesScopeFlag, - * then the scope's shape has already been regenerated during this GC. - * To avoid having to sweep JSScopes, the bit's meaning toggles with each - * shape-regenerating GC. - * - * FIXME Once scopes are GC'd (bug 505004), this will be obsolete. - */ - uint8 gcRegenShapesScopeFlag; - #ifdef JS_GC_ZEAL jsrefcount gcZeal; #endif @@ -1485,9 +1475,9 @@ struct JSRuntime { #define JS_PROPERTY_TREE(cx) ((cx)->runtime->propertyTree) /* - * The propertyRemovals counter is incremented for every JSScope::clear, - * and for each JSScope::remove method call that frees a slot in an object. - * See js_NativeGet and js_NativeSet in jsobj.cpp. + * The propertyRemovals counter is incremented for every JSObject::clear, + * and for each JSObject::remove method call that frees a slot in the given + * object. See js_NativeGet and js_NativeSet in jsobj.cpp. */ int32 propertyRemovals; @@ -1539,14 +1529,14 @@ struct JSRuntime { /* * Runtime-shared empty scopes for well-known built-in objects that lack - * class prototypes (the usual locus of an emptyScope). Mnemonic: ABCDEW + * class prototypes (the usual locus of an emptyShape). Mnemonic: ABCDEW */ - JSEmptyScope *emptyArgumentsScope; - JSEmptyScope *emptyBlockScope; - JSEmptyScope *emptyCallScope; - JSEmptyScope *emptyDeclEnvScope; - JSEmptyScope *emptyEnumeratorScope; - JSEmptyScope *emptyWithScope; + js::EmptyShape *emptyArgumentsShape; + js::EmptyShape *emptyBlockShape; + js::EmptyShape *emptyCallShape; + js::EmptyShape *emptyDeclEnvShape; + js::EmptyShape *emptyEnumeratorShape; + js::EmptyShape *emptyWithShape; /* * Various metering fields are defined at the end of JSRuntime. In this @@ -1578,16 +1568,34 @@ struct JSRuntime { jsrefcount claimedTitles; jsrefcount deadContexts; jsrefcount deadlocksAvoided; - jsrefcount liveScopes; + jsrefcount liveShapes; jsrefcount sharedTitles; - jsrefcount totalScopes; - jsrefcount liveScopeProps; - jsrefcount liveScopePropsPreSweep; - jsrefcount totalScopeProps; + jsrefcount totalShapes; + jsrefcount liveObjectProps; + jsrefcount liveObjectPropsPreSweep; + jsrefcount totalObjectProps; jsrefcount livePropTreeNodes; jsrefcount duplicatePropTreeNodes; jsrefcount totalPropTreeNodes; jsrefcount propTreeKidsChunks; + jsrefcount liveDictModeNodes; + + /* + * NB: emptyShapes is init'ed iff at least one of these envars is set: + * + * JS_PROPTREE_STATFILE statistics on the property tree forest + * JS_PROPTREE_DUMPFILE all paths in the property tree forest + */ + const char *propTreeStatFilename; + const char *propTreeDumpFilename; + + bool meterEmptyShapes() const { return propTreeStatFilename || propTreeDumpFilename; } + + typedef js::HashSet, + js::SystemAllocPolicy> EmptyShapeSet; + + EmptyShapeSet emptyShapes; /* String instrumentation. */ jsrefcount liveStrings; @@ -1741,12 +1749,8 @@ typedef struct JSResolvingEntry { extern const JSDebugHooks js_NullDebugHooks; /* defined in jsdbgapi.cpp */ namespace js { + class AutoGCRooter; -} - -namespace js { - -class RegExp; class RegExpStatics { @@ -1853,7 +1857,7 @@ class RegExpStatics void getRightContext(JSSubString *out) const; }; -} +} /* namespace js */ struct JSContext { @@ -2416,7 +2420,7 @@ class AutoGCRooter { enum { JSVAL = -1, /* js::AutoValueRooter */ - SPROP = -2, /* js::AutoScopePropertyRooter */ + SHAPE = -2, /* js::AutoShapeRooter */ PARSER = -3, /* js::Parser */ SCRIPT = -4, /* js::AutoScriptRooter */ ENUMERATOR = -5, /* js::AutoEnumStateRooter */ @@ -2600,11 +2604,11 @@ class AutoArrayRooter : private AutoGCRooter { JS_DECL_USE_GUARD_OBJECT_NOTIFIER }; -class AutoScopePropertyRooter : private AutoGCRooter { +class AutoShapeRooter : private AutoGCRooter { public: - AutoScopePropertyRooter(JSContext *cx, JSScopeProperty *sprop - JS_GUARD_OBJECT_NOTIFIER_PARAM) - : AutoGCRooter(cx, SPROP), sprop(sprop) + AutoShapeRooter(JSContext *cx, const js::Shape *shape + JS_GUARD_OBJECT_NOTIFIER_PARAM) + : AutoGCRooter(cx, SHAPE), shape(shape) { JS_GUARD_OBJECT_NOTIFIER_INIT; } @@ -2613,7 +2617,7 @@ class AutoScopePropertyRooter : private AutoGCRooter { friend void MarkRuntime(JSTracer *trc); private: - JSScopeProperty * const sprop; + const js::Shape * const shape; JS_DECL_USE_GUARD_OBJECT_NOTIFIER }; @@ -2809,6 +2813,37 @@ class AutoReleasePtr { ~AutoReleasePtr() { cx->free(ptr); } }; +class AutoLocalNameArray { + public: + explicit AutoLocalNameArray(JSContext *cx, JSFunction *fun + JS_GUARD_OBJECT_NOTIFIER_PARAM) + : context(cx), + mark(JS_ARENA_MARK(&cx->tempPool)), + names(fun->getLocalNameArray(cx, &cx->tempPool)), + count(fun->countLocalNames()) + { + JS_GUARD_OBJECT_NOTIFIER_INIT; + } + + ~AutoLocalNameArray() { + JS_ARENA_RELEASE(&context->tempPool, mark); + } + + operator bool() const { return !!names; } + + uint32 length() const { return count; } + + const jsuword &operator [](unsigned i) const { return names[i]; } + + private: + JSContext *context; + void *mark; + jsuword *names; + uint32 count; + + JS_DECL_USE_GUARD_OBJECT_NOTIFIER +}; + } /* namespace js */ class JSAutoResolveFlags diff --git a/js/src/jscntxtinlines.h b/js/src/jscntxtinlines.h index cd89333ce49c..24dbe5b8cbf5 100644 --- a/js/src/jscntxtinlines.h +++ b/js/src/jscntxtinlines.h @@ -548,7 +548,7 @@ assertSameCompartment(JSContext *cx, T1 t1, T2 t2, T3 t3, T4 t4, T5 t5) #undef START_ASSERT_SAME_COMPARTMENT inline JSBool -callJSNative(JSContext *cx, js::Native native, JSObject *thisobj, uintN argc, js::Value *argv, js::Value *rval) +CallJSNative(JSContext *cx, js::Native native, JSObject *thisobj, uintN argc, js::Value *argv, js::Value *rval) { assertSameCompartment(cx, thisobj, ValueArray(argv, argc)); JSBool ok = native(cx, thisobj, argc, argv, rval); @@ -558,7 +558,7 @@ callJSNative(JSContext *cx, js::Native native, JSObject *thisobj, uintN argc, js } inline JSBool -callJSFastNative(JSContext *cx, js::FastNative native, uintN argc, js::Value *vp) +CallJSFastNative(JSContext *cx, js::FastNative native, uintN argc, js::Value *vp) { assertSameCompartment(cx, ValueArray(vp, argc + 2)); JSBool ok = native(cx, argc, vp); @@ -568,7 +568,7 @@ callJSFastNative(JSContext *cx, js::FastNative native, uintN argc, js::Value *vp } inline JSBool -callJSPropertyOp(JSContext *cx, js::PropertyOp op, JSObject *obj, jsid id, js::Value *vp) +CallJSPropertyOp(JSContext *cx, js::PropertyOp op, JSObject *obj, jsid id, js::Value *vp) { assertSameCompartment(cx, obj, id, *vp); JSBool ok = op(cx, obj, id, vp); @@ -578,7 +578,7 @@ callJSPropertyOp(JSContext *cx, js::PropertyOp op, JSObject *obj, jsid id, js::V } inline JSBool -callJSPropertyOpSetter(JSContext *cx, js::PropertyOp op, JSObject *obj, jsid id, js::Value *vp) +CallJSPropertyOpSetter(JSContext *cx, js::PropertyOp op, JSObject *obj, jsid id, js::Value *vp) { assertSameCompartment(cx, obj, id, *vp); return op(cx, obj, id, vp); diff --git a/js/src/jsdbgapi.cpp b/js/src/jsdbgapi.cpp index b3528e666bd2..c42a908b4fa6 100644 --- a/js/src/jsdbgapi.cpp +++ b/js/src/jsdbgapi.cpp @@ -413,7 +413,7 @@ JS_ClearInterrupt(JSRuntime *rt, JSInterruptHook *hoop, void **closurep) typedef struct JSWatchPoint { JSCList links; JSObject *object; /* weak link, see js_FinalizeObject */ - JSScopeProperty *sprop; + const Shape *shape; PropertyOp setter; JSWatchPointHandler handler; JSObject *closure; @@ -424,7 +424,7 @@ typedef struct JSWatchPoint { #define JSWP_HELD 0x2 /* held while running handler/setter */ static bool -IsWatchedProperty(JSContext *cx, JSScopeProperty *sprop); +IsWatchedProperty(JSContext *cx, const Shape &shape); /* * NB: DropWatchPointAndUnlock releases cx->runtime->debuggerLock in all cases. @@ -432,53 +432,53 @@ IsWatchedProperty(JSContext *cx, JSScopeProperty *sprop); static JSBool DropWatchPointAndUnlock(JSContext *cx, JSWatchPoint *wp, uintN flag) { - JSBool ok; - JSScopeProperty *sprop; - JSScope *scope; - PropertyOp setter; + bool ok = true; + JSRuntime *rt = cx->runtime; - ok = JS_TRUE; wp->flags &= ~flag; if (wp->flags != 0) { - DBG_UNLOCK(cx->runtime); + DBG_UNLOCK(rt); return ok; } /* * Remove wp from the list, then if there are no other watchpoints for - * wp->sprop in any scope, restore wp->sprop->setter from wp. + * wp->shape in any scope, restore wp->shape->setter from wp. */ - ++cx->runtime->debuggerMutations; + ++rt->debuggerMutations; JS_REMOVE_LINK(&wp->links); - sprop = wp->sprop; - /* - * Passing null for the scope parameter tells js_GetWatchedSetter to find - * any watch point for sprop, and not to lock or unlock rt->debuggerLock. - * If js_ChangeNativePropertyAttrs fails, propagate failure after removing - * wp->closure's root and freeing wp. - */ - setter = js_GetWatchedSetter(cx->runtime, NULL, sprop); - DBG_UNLOCK(cx->runtime); + const Shape *shape = wp->shape; + PropertyOp setter = NULL; + + for (JSWatchPoint *wp2 = (JSWatchPoint *)rt->watchPointList.next; + &wp2->links != &rt->watchPointList; + wp2 = (JSWatchPoint *)wp2->links.next) { + if (wp2->shape == shape) { + setter = wp->setter; + break; + } + } + DBG_UNLOCK(rt); + if (!setter) { JS_LOCK_OBJ(cx, wp->object); - scope = wp->object->scope(); /* * If the property wasn't found on wp->object, or it isn't still being * watched, then someone else must have deleted or unwatched it, and we * don't need to change the property attributes. */ - JSScopeProperty *wprop = scope->lookup(sprop->id); + const Shape *wprop = wp->object->nativeLookup(shape->id); if (wprop && - wprop->hasSetterValue() == sprop->hasSetterValue() && - IsWatchedProperty(cx, wprop)) { - sprop = scope->changeProperty(cx, wprop, 0, wprop->attributes(), - wprop->getter(), wp->setter); - if (!sprop) - ok = JS_FALSE; + wprop->hasSetterValue() == shape->hasSetterValue() && + IsWatchedProperty(cx, *wprop)) { + shape = wp->object->changeProperty(cx, wprop, 0, wprop->attributes(), + wprop->getter(), wp->setter); + if (!shape) + ok = false; } - JS_UNLOCK_SCOPE(cx, scope); + JS_UNLOCK_OBJ(cx, wp->object); } cx->free(wp); @@ -502,8 +502,8 @@ js_TraceWatchPoints(JSTracer *trc, JSObject *obj) &wp->links != &rt->watchPointList; wp = (JSWatchPoint *)wp->links.next) { if (wp->object == obj) { - wp->sprop->trace(trc); - if (wp->sprop->hasSetterValue() && wp->setter) + wp->shape->trace(trc); + if (wp->shape->hasSetterValue() && wp->setter) JS_CALL_OBJECT_TRACER(trc, CastAsObject(wp->setter), "wp->setter"); JS_CALL_OBJECT_TRACER(trc, wp->closure, "wp->closure"); } @@ -542,57 +542,30 @@ js_SweepWatchPoints(JSContext *cx) * NB: FindWatchPoint must be called with rt->debuggerLock acquired. */ static JSWatchPoint * -FindWatchPoint(JSRuntime *rt, JSScope *scope, jsid id) +FindWatchPoint(JSRuntime *rt, JSObject *obj, jsid id) { JSWatchPoint *wp; for (wp = (JSWatchPoint *)rt->watchPointList.next; &wp->links != &rt->watchPointList; wp = (JSWatchPoint *)wp->links.next) { - if (wp->object->scope() == scope && wp->sprop->id == id) + if (wp->object == obj && wp->shape->id == id) return wp; } return NULL; } -JSScopeProperty * -js_FindWatchPoint(JSRuntime *rt, JSScope *scope, jsid id) +const Shape * +js_FindWatchPoint(JSRuntime *rt, JSObject *obj, jsid id) { JSWatchPoint *wp; - JSScopeProperty *sprop; + const Shape *shape; DBG_LOCK(rt); - wp = FindWatchPoint(rt, scope, id); - sprop = wp ? wp->sprop : NULL; + wp = FindWatchPoint(rt, obj, id); + shape = wp ? wp->shape : NULL; DBG_UNLOCK(rt); - return sprop; -} - -/* - * Secret handshake with DropWatchPointAndUnlock: if (!scope), we know our - * caller has acquired rt->debuggerLock, so we don't have to. - */ -PropertyOp -js_GetWatchedSetter(JSRuntime *rt, JSScope *scope, - const JSScopeProperty *sprop) -{ - PropertyOp setter; - JSWatchPoint *wp; - - setter = NULL; - if (scope) - DBG_LOCK(rt); - for (wp = (JSWatchPoint *)rt->watchPointList.next; - &wp->links != &rt->watchPointList; - wp = (JSWatchPoint *)wp->links.next) { - if ((!scope || wp->object->scope() == scope) && wp->sprop == sprop) { - setter = wp->setter; - break; - } - } - if (scope) - DBG_UNLOCK(rt); - return setter; + return shape; } JSBool @@ -603,22 +576,21 @@ js_watch_set(JSContext *cx, JSObject *obj, jsid id, Value *vp) for (JSWatchPoint *wp = (JSWatchPoint *)rt->watchPointList.next; &wp->links != &rt->watchPointList; wp = (JSWatchPoint *)wp->links.next) { - JSScopeProperty *sprop = wp->sprop; - if (wp->object == obj && SPROP_USERID(sprop) == id && + const Shape *shape = wp->shape; + if (wp->object == obj && SHAPE_USERID(shape) == id && !(wp->flags & JSWP_HELD)) { wp->flags |= JSWP_HELD; DBG_UNLOCK(rt); JS_LOCK_OBJ(cx, obj); - jsid propid = sprop->id; - jsid userid = SPROP_USERID(sprop); - JSScope *scope = obj->scope(); + jsid propid = shape->id; + jsid userid = SHAPE_USERID(shape); JS_UNLOCK_OBJ(cx, obj); /* NB: wp is held, so we can safely dereference it still. */ if (!wp->handler(cx, obj, propid, - SPROP_HAS_VALID_SLOT(sprop, scope) - ? Jsvalify(obj->getSlotMT(cx, sprop->slot)) + obj->containsSlot(shape->slot) + ? Jsvalify(obj->getSlotMT(cx, shape->slot)) : JSVAL_VOID, Jsvalify(vp), wp->closure)) { DBG_LOCK(rt); @@ -631,11 +603,11 @@ js_watch_set(JSContext *cx, JSObject *obj, jsid id, Value *vp) * prevent any funny business between watchpoints and setters. */ JSBool ok = !wp->setter || - (sprop->hasSetterValue() + (shape->hasSetterValue() ? InternalCall(cx, obj, ObjectValue(*CastAsObject(wp->setter)), 1, vp, vp) - : callJSPropertyOpSetter(cx, wp->setter, obj, userid, vp)); + : CallJSPropertyOpSetter(cx, wp->setter, obj, userid, vp)); DBG_LOCK(rt); return DropWatchPointAndUnlock(cx, wp, JSWP_HELD) && ok; @@ -661,17 +633,17 @@ js_watch_set_wrapper(JSContext *cx, JSObject *obj, uintN argc, Value *argv, } static bool -IsWatchedProperty(JSContext *cx, JSScopeProperty *sprop) +IsWatchedProperty(JSContext *cx, const Shape &shape) { - if (sprop->hasSetterValue()) { - JSObject *funobj = sprop->setterObject(); + if (shape.hasSetterValue()) { + JSObject *funobj = shape.setterObject(); if (!funobj || !funobj->isFunction()) return false; JSFunction *fun = GET_FUNCTION_PRIVATE(cx, funobj); return FUN_NATIVE(fun) == js_watch_set_wrapper; } - return sprop->setterOp() == js_watch_set; + return shape.setterOp() == js_watch_set; } PropertyOp @@ -710,7 +682,7 @@ JS_SetWatchPoint(JSContext *cx, JSObject *obj, jsid id, jsid propid; JSObject *pobj; JSProperty *prop; - JSScopeProperty *sprop; + const Shape *shape; JSRuntime *rt; JSBool ok; JSWatchPoint *wp; @@ -747,18 +719,18 @@ JS_SetWatchPoint(JSContext *cx, JSObject *obj, jsid id, if (!js_LookupProperty(cx, obj, propid, &pobj, &prop)) return JS_FALSE; - sprop = (JSScopeProperty *) prop; + shape = (Shape *) prop; rt = cx->runtime; - if (!sprop) { + if (!shape) { /* Check for a deleted symbol watchpoint, which holds its property. */ - sprop = js_FindWatchPoint(rt, obj->scope(), propid); - if (!sprop) { + shape = js_FindWatchPoint(rt, obj, propid); + if (!shape) { /* Make a new property in obj so we can watch for the first set. */ if (!js_DefineNativeProperty(cx, obj, propid, UndefinedValue(), NULL, NULL, JSPROP_ENUMERATE, 0, 0, &prop)) { return JS_FALSE; } - sprop = (JSScopeProperty *) prop; + shape = (Shape *) prop; } } else if (pobj != obj) { /* Clone the prototype property so we can watch the right object. */ @@ -768,14 +740,14 @@ JS_SetWatchPoint(JSContext *cx, JSObject *obj, jsid id, intN shortid; if (pobj->isNative()) { - valroot.set(SPROP_HAS_VALID_SLOT(sprop, pobj->scope()) - ? pobj->lockedGetSlot(sprop->slot) + valroot.set(pobj->containsSlot(shape->slot) + ? pobj->lockedGetSlot(shape->slot) : UndefinedValue()); - getter = sprop->getter(); - setter = sprop->setter(); - attrs = sprop->attributes(); - flags = sprop->getFlags(); - shortid = sprop->shortid; + getter = shape->getter(); + setter = shape->setter(); + attrs = shape->attributes(); + flags = shape->getFlags(); + shortid = shape->shortid; JS_UNLOCK_OBJ(cx, pobj); } else { if (!pobj->getProperty(cx, propid, valroot.addr()) || @@ -793,19 +765,19 @@ JS_SetWatchPoint(JSContext *cx, JSObject *obj, jsid id, shortid, &prop)) { return JS_FALSE; } - sprop = (JSScopeProperty *) prop; + shape = (Shape *) prop; } /* - * At this point, prop/sprop exists in obj, obj is locked, and we must + * At this point, prop/shape exists in obj, obj is locked, and we must * unlock the object before returning. */ ok = JS_TRUE; DBG_LOCK(rt); - wp = FindWatchPoint(rt, obj->scope(), propid); + wp = FindWatchPoint(rt, obj, propid); if (!wp) { DBG_UNLOCK(rt); - watcher = js_WrapWatchedSetter(cx, propid, sprop->attributes(), sprop->setter()); + watcher = js_WrapWatchedSetter(cx, propid, shape->attributes(), shape->setter()); if (!watcher) { ok = JS_FALSE; goto out; @@ -819,13 +791,13 @@ JS_SetWatchPoint(JSContext *cx, JSObject *obj, jsid id, wp->handler = NULL; wp->closure = NULL; wp->object = obj; - wp->setter = sprop->setter(); + wp->setter = shape->setter(); wp->flags = JSWP_LIVE; /* XXXbe nest in obj lock here */ - sprop = js_ChangeNativePropertyAttrs(cx, obj, sprop, 0, sprop->attributes(), - sprop->getter(), watcher); - if (!sprop) { + shape = js_ChangeNativePropertyAttrs(cx, obj, shape, 0, shape->attributes(), + shape->getter(), watcher); + if (!shape) { /* Self-link so DropWatchPointAndUnlock can JS_REMOVE_LINK it. */ JS_INIT_CLIST(&wp->links); DBG_LOCK(rt); @@ -833,7 +805,7 @@ JS_SetWatchPoint(JSContext *cx, JSObject *obj, jsid id, ok = JS_FALSE; goto out; } - wp->sprop = sprop; + wp->shape = shape; /* * Now that wp is fully initialized, append it to rt's wp list. @@ -841,7 +813,7 @@ JS_SetWatchPoint(JSContext *cx, JSObject *obj, jsid id, * a watchpoint for (obj, propid). */ DBG_LOCK(rt); - JS_ASSERT(!FindWatchPoint(rt, obj->scope(), propid)); + JS_ASSERT(!FindWatchPoint(rt, obj, propid)); JS_APPEND_LINK(&wp->links, &rt->watchPointList); ++rt->debuggerMutations; } @@ -866,7 +838,7 @@ JS_ClearWatchPoint(JSContext *cx, JSObject *obj, jsid id, for (wp = (JSWatchPoint *)rt->watchPointList.next; &wp->links != &rt->watchPointList; wp = (JSWatchPoint *)wp->links.next) { - if (wp->object == obj && SPROP_USERID(wp->sprop) == id) { + if (wp->object == obj && SHAPE_USERID(wp->shape) == id) { if (handlerp) *handlerp = wp->handler; if (closurep) @@ -962,7 +934,7 @@ extern JS_PUBLIC_API(jsuword *) JS_GetFunctionLocalNameArray(JSContext *cx, JSFunction *fun, void **markp) { *markp = JS_ARENA_MARK(&cx->tempPool); - return js_GetLocalNameArray(cx, fun, &cx->tempPool); + return fun->getLocalNameArray(cx, &cx->tempPool); } extern JS_PUBLIC_API(JSAtom *) @@ -1324,34 +1296,40 @@ JS_EvaluateInStackFrame(JSContext *cx, JSStackFrame *fp, /************************************************************************/ -/* XXXbe this all needs to be reworked to avoid requiring JSScope types. */ +/* This all should be reworked to avoid requiring JSScopeProperty types. */ JS_PUBLIC_API(JSScopeProperty *) JS_PropertyIterator(JSObject *obj, JSScopeProperty **iteratorp) { - JSScopeProperty *sprop; - JSScope *scope; + const Shape *shape; - sprop = *iteratorp; - scope = obj->scope(); + /* The caller passes null in *iteratorp to get things started. */ + shape = (Shape *) *iteratorp; + if (!shape) { + shape = obj->lastProperty(); + } else { + shape = shape->previous(); + if (!shape->previous()) { + JS_ASSERT(JSID_IS_EMPTY(shape->id)); + shape = NULL; + } + } - /* XXXbe minor(?) incompatibility: iterate in reverse definition order */ - sprop = sprop ? sprop->parent : scope->lastProperty(); - *iteratorp = sprop; - return sprop; + return *iteratorp = reinterpret_cast(const_cast(shape)); } JS_PUBLIC_API(JSBool) JS_GetPropertyDesc(JSContext *cx, JSObject *obj, JSScopeProperty *sprop, JSPropertyDesc *pd) { - pd->id = IdToJsval(sprop->id); + Shape *shape = (Shape *) sprop; + pd->id = IdToJsval(shape->id); JSBool wasThrowing = cx->throwing; AutoValueRooter lastException(cx, cx->exception); cx->throwing = JS_FALSE; - if (!js_GetProperty(cx, obj, sprop->id, Valueify(&pd->value))) { + if (!js_GetProperty(cx, obj, shape->id, Valueify(&pd->value))) { if (!cx->throwing) { pd->flags = JSPD_ERROR; pd->value = JSVAL_VOID; @@ -1367,27 +1345,26 @@ JS_GetPropertyDesc(JSContext *cx, JSObject *obj, JSScopeProperty *sprop, if (wasThrowing) cx->exception = lastException.value(); - pd->flags |= (sprop->enumerable() ? JSPD_ENUMERATE : 0) - | (!sprop->writable() ? JSPD_READONLY : 0) - | (!sprop->configurable() ? JSPD_PERMANENT : 0); + pd->flags |= (shape->enumerable() ? JSPD_ENUMERATE : 0) + | (!shape->writable() ? JSPD_READONLY : 0) + | (!shape->configurable() ? JSPD_PERMANENT : 0); pd->spare = 0; - if (sprop->getter() == js_GetCallArg) { - pd->slot = sprop->shortid; + if (shape->getter() == js_GetCallArg) { + pd->slot = shape->shortid; pd->flags |= JSPD_ARGUMENT; - } else if (sprop->getter() == js_GetCallVar) { - pd->slot = sprop->shortid; + } else if (shape->getter() == js_GetCallVar) { + pd->slot = shape->shortid; pd->flags |= JSPD_VARIABLE; } else { pd->slot = 0; } pd->alias = JSVAL_VOID; - JSScope *scope = obj->scope(); - if (SPROP_HAS_VALID_SLOT(sprop, scope)) { - JSScopeProperty *aprop; - for (aprop = scope->lastProperty(); aprop; aprop = aprop->parent) { - if (aprop != sprop && aprop->slot == sprop->slot) { - pd->alias = IdToJsval(aprop->id); + if (obj->containsSlot(shape->slot)) { + for (Shape::Range r = obj->lastProperty()->all(); !r.empty(); r.popFront()) { + const Shape &aprop = r.front(); + if (&aprop != shape && aprop.slot == shape->slot) { + pd->alias = IdToJsval(aprop.id); break; } } @@ -1398,11 +1375,6 @@ JS_GetPropertyDesc(JSContext *cx, JSObject *obj, JSScopeProperty *sprop, JS_PUBLIC_API(JSBool) JS_GetPropertyDescArray(JSContext *cx, JSObject *obj, JSPropertyDescArray *pda) { - JSScope *scope; - uint32 i, n; - JSPropertyDesc *pd; - JSScopeProperty *sprop; - Class *clasp = obj->getClass(); if (!obj->isNative() || (clasp->flags & JSCLASS_NEW_ENUMERATE)) { JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, @@ -1412,25 +1384,25 @@ JS_GetPropertyDescArray(JSContext *cx, JSObject *obj, JSPropertyDescArray *pda) if (!clasp->enumerate(cx, obj)) return JS_FALSE; - /* have no props, or object's scope has not mutated from that of proto */ - scope = obj->scope(); - if (scope->entryCount == 0) { + /* Return an empty pda early if obj has no own properties. */ + if (obj->nativeEmpty()) { pda->length = 0; pda->array = NULL; return JS_TRUE; } - n = scope->entryCount; - pd = (JSPropertyDesc *) cx->malloc((size_t)n * sizeof(JSPropertyDesc)); + uint32 n = obj->propertyCount(); + JSPropertyDesc *pd = (JSPropertyDesc *) cx->malloc(size_t(n) * sizeof(JSPropertyDesc)); if (!pd) return JS_FALSE; - i = 0; - for (sprop = scope->lastProperty(); sprop; sprop = sprop->parent) { + uint32 i = 0; + for (Shape::Range r = obj->lastProperty()->all(); !r.empty(); r.popFront()) { if (!js_AddRoot(cx, Valueify(&pd[i].id), NULL)) goto bad; if (!js_AddRoot(cx, Valueify(&pd[i].value), NULL)) goto bad; - if (!JS_GetPropertyDesc(cx, obj, sprop, &pd[i])) + Shape *shape = const_cast(&r.front()); + if (!JS_GetPropertyDesc(cx, obj, reinterpret_cast(shape), &pd[i])) goto bad; if ((pd[i].flags & JSPD_ALIAS) && !js_AddRoot(cx, Valueify(&pd[i].alias), NULL)) goto bad; @@ -1580,21 +1552,14 @@ JS_SetDebugErrorHook(JSRuntime *rt, JSDebugErrorHook hook, void *closure) JS_PUBLIC_API(size_t) JS_GetObjectTotalSize(JSContext *cx, JSObject *obj) { - size_t nbytes; - JSScope *scope; + size_t nbytes = (obj->isFunction() && obj->getPrivate() == obj) + ? sizeof(JSFunction) + : sizeof *obj; - nbytes = sizeof *obj; if (obj->dslots) { nbytes += (obj->dslots[-1].toPrivateUint32() - JS_INITIAL_NSLOTS + 1) * sizeof obj->dslots[0]; } - if (obj->isNative()) { - scope = obj->scope(); - if (!scope->isSharedEmpty()) { - nbytes += sizeof *scope; - nbytes += SCOPE_CAPACITY(scope) * sizeof(JSScopeProperty *); - } - } return nbytes; } diff --git a/js/src/jsdbgapi.h b/js/src/jsdbgapi.h index 0204c472e665..61b7af6f3652 100644 --- a/js/src/jsdbgapi.h +++ b/js/src/jsdbgapi.h @@ -111,17 +111,10 @@ js_TraceWatchPoints(JSTracer *trc, JSObject *obj); extern void js_SweepWatchPoints(JSContext *cx); -extern JSScopeProperty * -js_FindWatchPoint(JSRuntime *rt, JSScope *scope, jsid id); - #ifdef __cplusplus -/* - * NB: callers outside of jsdbgapi.c must pass non-null scope. - */ -extern js::PropertyOp -js_GetWatchedSetter(JSRuntime *rt, JSScope *scope, - const JSScopeProperty *sprop); +extern const js::Shape * +js_FindWatchPoint(JSRuntime *rt, JSObject *obj, jsid id); extern JSBool js_watch_set(JSContext *cx, JSObject *obj, jsid id, js::Value *vp); @@ -380,11 +373,13 @@ typedef struct JSPropertyDescArray { JSPropertyDesc *array; /* alloc'd by Get, freed by Put */ } JSPropertyDescArray; +typedef struct JSScopeProperty JSScopeProperty; + extern JS_PUBLIC_API(JSScopeProperty *) JS_PropertyIterator(JSObject *obj, JSScopeProperty **iteratorp); extern JS_PUBLIC_API(JSBool) -JS_GetPropertyDesc(JSContext *cx, JSObject *obj, JSScopeProperty *sprop, +JS_GetPropertyDesc(JSContext *cx, JSObject *obj, JSScopeProperty *shape, JSPropertyDesc *pd); extern JS_PUBLIC_API(JSBool) diff --git a/js/src/jsemit.cpp b/js/src/jsemit.cpp index 6938f8c7b856..9cf15aeee41f 100644 --- a/js/src/jsemit.cpp +++ b/js/src/jsemit.cpp @@ -1275,9 +1275,9 @@ JSTreeContext::ensureSharpSlots() return false; sharpSlotBase = fun->u.i.nvars; - if (!js_AddLocal(cx, fun, sharpArrayAtom, JSLOCAL_VAR)) + if (!fun->addLocal(cx, sharpArrayAtom, JSLOCAL_VAR)) return false; - if (!js_AddLocal(cx, fun, sharpDepthAtom, JSLOCAL_VAR)) + if (!fun->addLocal(cx, sharpDepthAtom, JSLOCAL_VAR)) return false; } else { /* @@ -1561,10 +1561,6 @@ js_DefineCompileTimeConstant(JSContext *cx, JSCodeGenerator *cg, JSAtom *atom, JSStmtInfo * js_LexicalLookup(JSTreeContext *tc, JSAtom *atom, jsint *slotp, JSStmtInfo *stmt) { - JSObject *obj; - JSScope *scope; - JSScopeProperty *sprop; - if (!stmt) stmt = tc->topScopeStmt; for (; stmt; stmt = stmt->downScope) { @@ -1575,17 +1571,17 @@ js_LexicalLookup(JSTreeContext *tc, JSAtom *atom, jsint *slotp, JSStmtInfo *stmt if (!(stmt->flags & SIF_SCOPE)) continue; - obj = stmt->blockObj; + JSObject *obj = stmt->blockObj; JS_ASSERT(obj->getClass() == &js_BlockClass); - scope = obj->scope(); - sprop = scope->lookup(ATOM_TO_JSID(atom)); - if (sprop) { - JS_ASSERT(sprop->hasShortID()); + + const Shape *shape = obj->nativeLookup(ATOM_TO_JSID(atom)); + if (shape) { + JS_ASSERT(shape->hasShortID()); if (slotp) { JS_ASSERT(obj->fslots[JSSLOT_BLOCK_DEPTH].isInt32()); *slotp = obj->fslots[JSSLOT_BLOCK_DEPTH].toInt32() + - sprop->shortid; + shape->shortid; } return stmt; } @@ -1634,30 +1630,29 @@ LookupCompileTimeConstant(JSContext *cx, JSCodeGenerator *cg, JSAtom *atom, * nor can prop be deleted. */ if (cg->inFunction()) { - if (js_LookupLocal(cx, cg->fun, atom, NULL) != JSLOCAL_NONE) + if (cg->fun->lookupLocal(cx, atom, NULL) != JSLOCAL_NONE) break; } else { JS_ASSERT(cg->compileAndGo()); obj = cg->scopeChain; JS_LOCK_OBJ(cx, obj); - JSScope *scope = obj->scope(); - JSScopeProperty *sprop = scope->lookup(ATOM_TO_JSID(atom)); - if (sprop) { + const Shape *shape = obj->nativeLookup(ATOM_TO_JSID(atom)); + if (shape) { /* * We're compiling code that will be executed immediately, * not re-executed against a different scope chain and/or * variable object. Therefore we can get constant values * from our variable object here. */ - if (!sprop->writable() && !sprop->configurable() && - sprop->hasDefaultGetter() && SPROP_HAS_VALID_SLOT(sprop, scope)) { - *constp = obj->lockedGetSlot(sprop->slot); + if (!shape->writable() && !shape->configurable() && + shape->hasDefaultGetter() && obj->containsSlot(shape->slot)) { + *constp = obj->lockedGetSlot(shape->slot); } } - JS_UNLOCK_SCOPE(cx, scope); + JS_UNLOCK_OBJ(cx, obj); - if (sprop) + if (shape) break; } } @@ -1852,8 +1847,10 @@ EmitEnterBlock(JSContext *cx, JSParseNode *pn, JSCodeGenerator *cg) #endif } - blockObj->scope()->freeslot = base; - return blockObj->growSlots(cx, base); + if (!blockObj->growSlots(cx, base)) + return false; + blockObj->freeslot = base; + return true; } /* @@ -1904,7 +1901,7 @@ MakeUpvarForEval(JSParseNode *pn, JSCodeGenerator *cg) JSAtom *atom = pn->pn_atom; uintN index; - JSLocalKind localKind = js_LookupLocal(cx, fun, atom, &index); + JSLocalKind localKind = fun->lookupLocal(cx, atom, &index); if (localKind == JSLOCAL_NONE) return true; @@ -1914,10 +1911,8 @@ MakeUpvarForEval(JSParseNode *pn, JSCodeGenerator *cg) JSAtomListElement *ale = cg->upvarList.lookup(atom); if (!ale) { - if (cg->inFunction() && - !js_AddLocal(cx, cg->fun, atom, JSLOCAL_UPVAR)) { + if (cg->inFunction() && !cg->fun->addLocal(cx, atom, JSLOCAL_UPVAR)) return false; - } ale = cg->upvarList.add(cg->parser, atom); if (!ale) @@ -2206,7 +2201,7 @@ BindNameToSlot(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn) return JS_TRUE; if (FUN_FLAT_CLOSURE(cg->fun)) { - op = JSOP_GETDSLOT; + op = JSOP_GETFCSLOT; } else { /* * The function we're compiling may not be heavyweight, but if it @@ -2233,7 +2228,7 @@ BindNameToSlot(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn) if (ale) { index = ALE_INDEX(ale); } else { - if (!js_AddLocal(cx, cg->fun, atom, JSLOCAL_UPVAR)) + if (!cg->fun->addLocal(cx, atom, JSLOCAL_UPVAR)) return JS_FALSE; ale = cg->upvarList.add(cg->parser, atom); @@ -2601,8 +2596,8 @@ EmitNameOp(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn, case JSOP_GETUPVAR: op = JSOP_CALLUPVAR; break; - case JSOP_GETDSLOT: - op = JSOP_CALLDSLOT; + case JSOP_GETFCSLOT: + op = JSOP_CALLFCSLOT; break; default: JS_ASSERT(op == JSOP_ARGUMENTS || op == JSOP_CALLEE); @@ -4446,7 +4441,7 @@ js_EmitTree(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn) #ifdef DEBUG JSLocalKind localKind = #endif - js_LookupLocal(cx, cg->fun, fun->atom, &slot); + cg->fun->lookupLocal(cx, fun->atom, &slot); JS_ASSERT(localKind == JSLOCAL_VAR || localKind == JSLOCAL_CONST); JS_ASSERT(index < JS_BIT(20)); pn->pn_index = index; @@ -7307,8 +7302,8 @@ js_FinishTakingTryNotes(JSCodeGenerator *cg, JSTryNoteArray *array) * cloned function objects and with the compiler-created clone-parent. There * are nregexps = script->regexps()->length such reserved slots in each * function object cloned from fun->object. NB: during compilation, a funobj - * slots element must never be allocated, because js_AllocSlot could hand out - * one of the slots that should be given to a regexp clone. + * slots element must never be allocated, because JSObject::allocSlot could + * hand out one of the slots that should be given to a regexp clone. * * If the code being compiled is global code, the cloned regexp are stored in * fp->vars slot after cg->ngvars and to protect regexp slots from GC we set diff --git a/js/src/jsfun.cpp b/js/src/jsfun.cpp index 633f6cabc2e0..07c25cf25def 100644 --- a/js/src/jsfun.cpp +++ b/js/src/jsfun.cpp @@ -63,6 +63,7 @@ #include "jsobj.h" #include "jsopcode.h" #include "jsparse.h" +#include "jspropertytree.h" #include "jsproxy.h" #include "jsscan.h" #include "jsscope.h" @@ -84,7 +85,6 @@ #include "jscntxtinlines.h" #include "jsfuninlines.h" #include "jsobjinlines.h" -#include "jscntxtinlines.h" using namespace js; @@ -179,17 +179,23 @@ NewArguments(JSContext *cx, JSObject *parent, uint32 argc, JSObject *callee) if (!argsobj) return NULL; - /* Init immediately to avoid GC seeing a half-init'ed object. */ - bool strict = callee->getFunctionPrivate()->inStrictMode(); - argsobj->init(strict ? &StrictArgumentsClass : &js_ArgumentsClass, proto, parent, - PrivateValue(NULL)); - argsobj->setArgsLength(argc); - argsobj->setArgsCallee(ObjectValue(*callee)); - argsobj->map = cx->runtime->emptyArgumentsScope->hold(); - - /* This must come after argsobj->map has been set. */ - if (!js_EnsureReservedSlots(cx, argsobj, argc)) + ArgumentsData *data = (ArgumentsData *) + cx->malloc(offsetof(ArgumentsData, slots) + argc * sizeof(Value)); + if (!data) return NULL; + SetValueRangeToUndefined(data->slots, argc); + + /* Can't fail from here on, so initialize everything in argsobj. */ + argsobj->init(callee->getFunctionPrivate()->inStrictMode() + ? &StrictArgumentsClass + : &js_ArgumentsClass, + proto, parent, PrivateValue(NULL), cx); + + argsobj->setMap(cx->runtime->emptyArgumentsShape); + + argsobj->setArgsLength(argc); + argsobj->setArgsData(data); + data->callee = ObjectValue(*callee); return argsobj; } @@ -199,9 +205,10 @@ PutArguments(JSContext *cx, JSObject *argsobj, Value *args) { JS_ASSERT(argsobj->isNormalArguments()); uint32 argc = argsobj->getArgsInitialLength(); + ArgumentsData *data = argsobj->getArgsData(); for (uint32 i = 0; i != argc; ++i) { - if (!argsobj->getArgsElement(i).isMagic(JS_ARGS_HOLE)) - argsobj->setArgsElement(i, args[i]); + if (!data->slots[i].isMagic(JS_ARGS_HOLE)) + data->slots[i] = args[i]; } } @@ -231,21 +238,18 @@ js_GetArgsObject(JSContext *cx, JSStackFrame *fp) return argsobj; /* - * Strict mode functions have arguments which copy the initial parameter - * values. It is the caller's responsibility to get the arguments object - * before any parameters are modified! (The emitter ensures this by - * synthesizing an arguments access at the start of any strict mode - * function which contains an assignment to a parameter or which calls - * eval.) Non-strict mode arguments use the frame pointer to retrieve - * up-to-date parameter values. + * Strict mode functions have arguments objects that copy the initial + * actual parameter values. It is the caller's responsibility to get the + * arguments object before any parameters are modified! (The emitter + * ensures this by synthesizing an arguments access at the start of any + * strict mode function that contains an assignment to a parameter, or + * that calls eval.) Non-strict mode arguments use the frame pointer to + * retrieve up-to-date parameter values. */ - if (argsobj->isStrictArguments()) { - JS_ASSERT_IF(fp->numActualArgs() > 0, - argsobj->dslots[-1].toPrivateUint32() >= fp->numActualArgs()); - memcpy(argsobj->dslots, fp->argv, fp->numActualArgs() * sizeof(Value)); - } else { + if (argsobj->isStrictArguments()) + memcpy(argsobj->getArgsData()->slots, fp->argv, fp->numActualArgs() * sizeof(Value)); + else argsobj->setPrivate(fp); - } fp->setArgsObj(argsobj); return argsobj; @@ -265,11 +269,11 @@ js_PutArgsObject(JSContext *cx, JSStackFrame *fp) fp->setArgsObj(NULL); } +#ifdef JS_TRACER + /* * Traced versions of js_GetArgsObject and js_PutArgsObject. */ - -#ifdef JS_TRACER JSObject * JS_FASTCALL js_Arguments(JSContext *cx, JSObject *parent, uint32 argc, JSObject *callee) { @@ -277,20 +281,18 @@ js_Arguments(JSContext *cx, JSObject *parent, uint32 argc, JSObject *callee) if (!argsobj) return NULL; - if (callee->getFunctionPrivate()->inStrictMode()) { + if (argsobj->isStrictArguments()) { /* * Strict mode callers must copy arguments into the created arguments - * object. + * object. The trace-JITting code is in TraceRecorder::newArguments. */ JS_ASSERT(!argsobj->getPrivate()); } else { - argsobj->setPrivate(JS_ARGUMENT_OBJECT_ON_TRACE); + argsobj->setPrivate(JS_ARGUMENTS_OBJECT_ON_TRACE); } return argsobj; } -#endif - JS_DEFINE_CALLINFO_4(extern, OBJECT, js_Arguments, CONTEXT, OBJECT, UINT32, OBJECT, 0, nanojit::ACCSET_STORE_ANY) @@ -299,15 +301,16 @@ JSBool JS_FASTCALL js_PutArguments(JSContext *cx, JSObject *argsobj, Value *args) { JS_ASSERT(argsobj->isNormalArguments()); - JS_ASSERT(argsobj->getPrivate() == JS_ARGUMENT_OBJECT_ON_TRACE); + JS_ASSERT(argsobj->getPrivate() == JS_ARGUMENTS_OBJECT_ON_TRACE); PutArguments(cx, argsobj, args); argsobj->setPrivate(NULL); return true; } - JS_DEFINE_CALLINFO_3(extern, BOOL, js_PutArguments, CONTEXT, OBJECT, VALUEPTR, 0, nanojit::ACCSET_STORE_ANY) +#endif /* JS_TRACER */ + static JSBool args_delProperty(JSContext *cx, JSObject *obj, jsid id, Value *vp) { @@ -349,48 +352,16 @@ WrapEscapingClosure(JSContext *cx, JSStackFrame *fp, JSFunction *fun) JSFunction *wfun = (JSFunction *) wfunobj; wfunobj->setPrivate(wfun); - wfun->nargs = 0; + wfun->nargs = fun->nargs; wfun->flags = fun->flags | JSFUN_HEAVYWEIGHT; - wfun->u.i.nvars = 0; - wfun->u.i.nupvars = 0; + wfun->u.i.nvars = fun->u.i.nvars; + wfun->u.i.nupvars = fun->u.i.nupvars; wfun->u.i.skipmin = fun->u.i.skipmin; wfun->u.i.wrapper = true; wfun->u.i.script = NULL; - wfun->u.i.names.taggedAtom = NULL; + wfun->u.i.names = fun->u.i.names; wfun->atom = fun->atom; - if (fun->hasLocalNames()) { - void *mark = JS_ARENA_MARK(&cx->tempPool); - jsuword *names = js_GetLocalNameArray(cx, fun, &cx->tempPool); - if (!names) - return NULL; - - JSBool ok = true; - for (uintN i = 0, n = fun->countLocalNames(); i != n; i++) { - jsuword name = names[i]; - JSAtom *atom = JS_LOCAL_NAME_TO_ATOM(name); - JSLocalKind localKind = (i < fun->nargs) - ? JSLOCAL_ARG - : (i < fun->countArgsAndVars()) - ? (JS_LOCAL_NAME_IS_CONST(name) - ? JSLOCAL_CONST - : JSLOCAL_VAR) - : JSLOCAL_UPVAR; - - ok = js_AddLocal(cx, wfun, atom, localKind); - if (!ok) - break; - } - - JS_ARENA_RELEASE(&cx->tempPool, mark); - if (!ok) - return NULL; - JS_ASSERT(wfun->nargs == fun->nargs); - JS_ASSERT(wfun->u.i.nvars == fun->u.i.nvars); - JS_ASSERT(wfun->u.i.nupvars == fun->u.i.nupvars); - js_FreezeLocalNames(cx, wfun); - } - JSScript *script = fun->u.i.script; jssrcnote *snbase = script->notes(); jssrcnote *sn = snbase; @@ -444,7 +415,7 @@ WrapEscapingClosure(JSContext *cx, JSStackFrame *fp, JSFunction *fun) jsbytecode *pc = wscript->code; while (*pc != JSOP_STOP) { - /* XYZZYbe should copy JSOP_TRAP? */ + /* FIXME should copy JSOP_TRAP? */ JSOp op = js_GetOpcode(cx, wscript, pc); const JSCodeSpec *cs = &js_CodeSpec[op]; ptrdiff_t oplen = cs->length; @@ -460,8 +431,8 @@ WrapEscapingClosure(JSContext *cx, JSStackFrame *fp, JSFunction *fun) switch (op) { case JSOP_GETUPVAR: *pc = JSOP_GETUPVAR_DBG; break; case JSOP_CALLUPVAR: *pc = JSOP_CALLUPVAR_DBG; break; - case JSOP_GETDSLOT: *pc = JSOP_GETUPVAR_DBG; break; - case JSOP_CALLDSLOT: *pc = JSOP_CALLUPVAR_DBG; break; + case JSOP_GETFCSLOT: *pc = JSOP_GETUPVAR_DBG; break; + case JSOP_CALLFCSLOT: *pc = JSOP_CALLUPVAR_DBG; break; case JSOP_DEFFUN_FC: *pc = JSOP_DEFFUN_DBGFC; break; case JSOP_DEFLOCALFUN_FC: *pc = JSOP_DEFLOCALFUN_DBGFC; break; case JSOP_LAMBDA_FC: *pc = JSOP_LAMBDA_DBGFC; break; @@ -797,7 +768,12 @@ strictargs_enumerate(JSContext *cx, JSObject *obj) } // namespace -#if JS_HAS_GENERATORS +static void +args_finalize(JSContext *cx, JSObject *obj) +{ + cx->free((void *) obj->getArgsData()); +} + /* * If a generator's arguments or call object escapes, and the generator frame * is not executing, the generator object needs to be marked because it is not @@ -806,35 +782,41 @@ strictargs_enumerate(JSContext *cx, JSObject *obj) * generator object), we use the JSFRAME_FLOATING_GENERATOR flag, which is only * set on the JSStackFrame kept in the generator object's JSGenerator. */ -static void -args_or_call_trace(JSTracer *trc, JSObject *obj) +static inline void +MaybeMarkGenerator(JSTracer *trc, JSObject *obj) { - if (obj->isArguments()) { - if (obj->getPrivate() == JS_ARGUMENT_OBJECT_ON_TRACE) - return; - } else { - JS_ASSERT(obj->getClass() == &js_CallClass); - } - +#if JS_HAS_GENERATORS JSStackFrame *fp = (JSStackFrame *) obj->getPrivate(); if (fp && fp->isFloatingGenerator()) { - JSObject *obj = js_FloatingFrameToGenerator(fp)->obj; - JS_CALL_OBJECT_TRACER(trc, obj, "generator object"); + JSObject *genobj = js_FloatingFrameToGenerator(fp)->obj; + MarkObject(trc, genobj, "generator object"); } -} -#else -# define args_or_call_trace NULL #endif +} + +static void +args_trace(JSTracer *trc, JSObject *obj) +{ + JS_ASSERT(obj->isArguments()); + if (obj->getPrivate() == JS_ARGUMENTS_OBJECT_ON_TRACE) { + JS_ASSERT(!obj->isStrictArguments()); + return; + } + + ArgumentsData *data = obj->getArgsData(); + if (data->callee.isObject()) + MarkObject(trc, &data->callee.toObject(), js_callee_str); + MarkValueRange(trc, obj->getArgsInitialLength(), data->slots, js_arguments_str); + + MaybeMarkGenerator(trc, obj); +} /* - * The Arguments classes aren't initialized via JS_InitClass, because arguments + * The Arguments classes aren't initialized via js_InitClass, because arguments * objects have the initial value of Object.prototype as their [[Prototype]]. * However, Object.prototype.toString.call(arguments) === "[object Arguments]" * per ES5 (although not ES3), so the class name is "Arguments" rather than * "Object". - */ - -/* * * The JSClass functions below collaborate to lazily reflect and synchronize * actual argument values, argument count, and callee function object stored @@ -844,7 +826,7 @@ args_or_call_trace(JSTracer *trc, JSObject *obj) Class js_ArgumentsClass = { "Arguments", JSCLASS_HAS_PRIVATE | JSCLASS_NEW_RESOLVE | - JSCLASS_HAS_RESERVED_SLOTS(JSObject::ARGS_FIXED_RESERVED_SLOTS) | + JSCLASS_HAS_RESERVED_SLOTS(JSObject::ARGS_CLASS_RESERVED_SLOTS) | JSCLASS_MARK_IS_TRACE | JSCLASS_HAS_CACHED_PROTO(JSProto_Object), PropertyStub, /* addProperty */ args_delProperty, @@ -853,14 +835,14 @@ Class js_ArgumentsClass = { args_enumerate, (JSResolveOp) args_resolve, ConvertStub, - NULL, /* finalize */ + args_finalize, /* finalize */ NULL, /* reserved0 */ NULL, /* checkAccess */ NULL, /* call */ NULL, /* construct */ NULL, /* xdrObject */ NULL, /* hasInstance */ - JS_CLASS_TRACE(args_or_call_trace) + JS_CLASS_TRACE(args_trace) }; namespace js { @@ -873,7 +855,7 @@ namespace js { Class StrictArgumentsClass = { "Arguments", JSCLASS_HAS_PRIVATE | JSCLASS_NEW_RESOLVE | - JSCLASS_HAS_RESERVED_SLOTS(JSObject::ARGS_FIXED_RESERVED_SLOTS) | + JSCLASS_HAS_RESERVED_SLOTS(JSObject::ARGS_CLASS_RESERVED_SLOTS) | JSCLASS_MARK_IS_TRACE | JSCLASS_HAS_CACHED_PROTO(JSProto_Object), PropertyStub, /* addProperty */ args_delProperty, @@ -882,21 +864,21 @@ Class StrictArgumentsClass = { strictargs_enumerate, reinterpret_cast(strictargs_resolve), ConvertStub, - NULL, /* finalize */ + args_finalize, /* finalize */ NULL, /* reserved0 */ NULL, /* checkAccess */ NULL, /* call */ NULL, /* construct */ NULL, /* xdrObject */ NULL, /* hasInstance */ - JS_CLASS_TRACE(args_or_call_trace) + JS_CLASS_TRACE(args_trace) }; } -const uint32 JSSLOT_CALLEE = JSSLOT_PRIVATE + 1; -const uint32 JSSLOT_CALL_ARGUMENTS = JSSLOT_PRIVATE + 2; -const uint32 CALL_CLASS_FIXED_RESERVED_SLOTS = 2; +const uint32 JSSLOT_CALLEE = JSSLOT_PRIVATE + 1; +const uint32 JSSLOT_CALL_ARGUMENTS = JSSLOT_PRIVATE + 2; +const uint32 CALL_CLASS_RESERVED_SLOTS = 2; /* * A Declarative Environment object stores its active JSStackFrame pointer in @@ -917,8 +899,7 @@ Class js_DeclEnvClass = { static JSBool CheckForEscapingClosure(JSContext *cx, JSObject *obj, Value *vp) { - JS_ASSERT(obj->getClass() == &js_CallClass || - obj->getClass() == &js_DeclEnvClass); + JS_ASSERT(obj->isCall() || obj->getClass() == &js_DeclEnvClass); const Value &v = *vp; @@ -965,13 +946,22 @@ NewCallObject(JSContext *cx, JSFunction *fun, JSObject *scopeChain) if (!callobj) return NULL; - /* Init immediately to avoid GC seeing a half-init'ed object. */ - callobj->init(&js_CallClass, NULL, scopeChain, PrivateValue(NULL)); - callobj->map = cx->runtime->emptyCallScope->hold(); + callobj->init(&js_CallClass, NULL, scopeChain, PrivateValue(NULL), cx); + callobj->setMap(fun->u.i.names); - /* This must come after callobj->map has been set. */ - if (!js_EnsureReservedSlots(cx, callobj, fun->countArgsAndVars())) + /* This must come after callobj->lastProp has been set. */ + if (!callobj->ensureInstanceReservedSlots(cx, fun->countArgsAndVars())) return NULL; + +#ifdef DEBUG + for (Shape::Range r = callobj->lastProp; !r.empty(); r.popFront()) { + const Shape &s = r.front(); + if (s.slot != SHAPE_INVALID_SLOT) { + JS_ASSERT(s.slot + 1 == callobj->freeslot); + break; + } + } +#endif return callobj; } @@ -982,9 +972,8 @@ NewDeclEnvObject(JSContext *cx, JSStackFrame *fp) if (!envobj) return NULL; - /* Init immediately to avoid GC seeing a half-init'ed object. */ - envobj->init(&js_DeclEnvClass, NULL, fp->maybeScopeChain(), PrivateValue(fp)); - envobj->map = cx->runtime->emptyDeclEnvScope->hold(); + envobj->init(&js_DeclEnvClass, NULL, fp->maybeScopeChain(), PrivateValue(fp), cx); + envobj->setMap(cx->runtime->emptyDeclEnvShape); return envobj; } @@ -998,10 +987,10 @@ js_GetCallObject(JSContext *cx, JSStackFrame *fp) #ifdef DEBUG /* A call object should be a frame's outermost scope chain element. */ - Class *classp = fp->getScopeChain()->getClass(); - if (classp == &js_WithClass || classp == &js_BlockClass) + Class *clasp = fp->getScopeChain()->getClass(); + if (clasp == &js_WithClass || clasp == &js_BlockClass) JS_ASSERT(fp->getScopeChain()->getPrivate() != js_FloatingFrameIfGenerator(cx, fp)); - else if (classp == &js_CallClass) + else if (clasp == &js_CallClass) JS_ASSERT(fp->getScopeChain()->getPrivate() != fp); #endif @@ -1065,7 +1054,7 @@ JS_DEFINE_CALLINFO_4(extern, OBJECT, js_CreateCallObjectOnTrace, CONTEXT, FUNCTI JSFunction * js_GetCallObjectFunction(JSObject *obj) { - JS_ASSERT(obj->getClass() == &js_CallClass); + JS_ASSERT(obj->isCall()); const Value &v = obj->getSlot(JSSLOT_CALLEE); if (v.isUndefined()) { /* Newborn or prototype object. */ @@ -1076,10 +1065,36 @@ js_GetCallObjectFunction(JSObject *obj) } inline static void -CopyValuesToCallObject(JSObject *callobj, int nargs, Value *argv, int nvars, Value *slots) +CopyValuesToCallObject(JSObject *callobj, uintN nargs, Value *argv, uintN nvars, Value *slots) { - memcpy(callobj->dslots, argv, nargs * sizeof(Value)); - memcpy(callobj->dslots + nargs, slots, nvars * sizeof(Value)); + /* Copy however many args fit into fslots. */ + uintN first = JSSLOT_PRIVATE + CALL_CLASS_RESERVED_SLOTS + 1; + JS_ASSERT(first <= JS_INITIAL_NSLOTS); + + Value *vp = &callobj->fslots[first]; + uintN len = JS_MIN(nargs, JS_INITIAL_NSLOTS - first); + + memcpy(vp, argv, len * sizeof(Value)); + vp += len; + + nargs -= len; + if (nargs != 0) { + /* Copy any remaining args into dslots. */ + vp = callobj->dslots; + memcpy(vp, argv + len, nargs * sizeof(Value)); + vp += nargs; + } else { + /* Copy however many vars fit into any remaining fslots. */ + first += len; + len = JS_MIN(nvars, JS_INITIAL_NSLOTS - first); + memcpy(vp, slots, len * sizeof(Value)); + slots += len; + nvars -= len; + vp = callobj->dslots; + } + + /* Copy any remaining vars into dslots. */ + memcpy(vp, slots, nvars * sizeof(Value)); } void @@ -1103,10 +1118,9 @@ js_PutCallObject(JSContext *cx, JSStackFrame *fp) * arguments and variables straight into JSObject.dslots. */ JS_STATIC_ASSERT(JS_INITIAL_NSLOTS - JSSLOT_PRIVATE == - 1 + CALL_CLASS_FIXED_RESERVED_SLOTS); + 1 + CALL_CLASS_RESERVED_SLOTS); if (n != 0) { - JS_ASSERT(callobj->numSlots() >= JS_INITIAL_NSLOTS + n); - n += JS_INITIAL_NSLOTS; + JS_ASSERT(JSFunction::FIRST_FREE_SLOT + n <= callobj->numSlots()); CopyValuesToCallObject(callobj, fun->nargs, fp->argv, fun->u.i.nvars, fp->slots()); } @@ -1140,61 +1154,6 @@ js_PutCallObjectOnTrace(JSContext *cx, JSObject *scopeChain, uint32 nargs, Value JS_DEFINE_CALLINFO_6(extern, BOOL, js_PutCallObjectOnTrace, CONTEXT, OBJECT, UINT32, VALUEPTR, UINT32, VALUEPTR, 0, nanojit::ACCSET_STORE_ANY) -static JSBool -call_enumerate(JSContext *cx, JSObject *obj) -{ - JSFunction *fun; - uintN n, i; - void *mark; - jsuword *names; - JSBool ok; - JSAtom *name; - JSObject *pobj; - JSProperty *prop; - - fun = js_GetCallObjectFunction(obj); - n = fun ? fun->countArgsAndVars() : 0; - if (n == 0) - return JS_TRUE; - - mark = JS_ARENA_MARK(&cx->tempPool); - - MUST_FLOW_THROUGH("out"); - names = js_GetLocalNameArray(cx, fun, &cx->tempPool); - if (!names) { - ok = JS_FALSE; - goto out; - } - - for (i = 0; i != n; ++i) { - name = JS_LOCAL_NAME_TO_ATOM(names[i]); - if (!name) - continue; - - /* - * Trigger reflection by looking up the name of the argument or - * variable. - */ - ok = js_LookupProperty(cx, obj, ATOM_TO_JSID(name), &pobj, &prop); - if (!ok) - goto out; - - /* - * The call object will always have a property corresponding to the - * argument or variable name because call_resolve creates the property - * using JSPROP_PERMANENT. - */ - JS_ASSERT(prop); - JS_ASSERT(pobj == obj); - pobj->dropProperty(cx, prop); - } - ok = JS_TRUE; - - out: - JS_ARENA_RELEASE(&cx->tempPool, mark); - return ok; -} - enum JSCallPropertyKind { JSCPK_ARGUMENTS, JSCPK_ARG, @@ -1206,7 +1165,7 @@ static JSBool CallPropertyOp(JSContext *cx, JSObject *obj, jsid id, Value *vp, JSCallPropertyKind kind, JSBool setter = false) { - JS_ASSERT(obj->getClass() == &js_CallClass); + JS_ASSERT(obj->isCall()); uintN i = 0; if (kind != JSCPK_ARGUMENTS) { @@ -1224,7 +1183,7 @@ CallPropertyOp(JSContext *cx, JSObject *obj, jsid id, Value *vp, JS_ASSERT(i < callee_fun->u.i.nupvars); #endif - array = callee->dslots; + array = callee->getFlatClosureUpvars(); } else { JSFunction *fun = js_GetCallObjectFunction(obj); JS_ASSERT_IF(kind == JSCPK_ARG, i < fun->nargs); @@ -1253,17 +1212,17 @@ CallPropertyOp(JSContext *cx, JSObject *obj, jsid id, Value *vp, } if (!fp) { - i += CALL_CLASS_FIXED_RESERVED_SLOTS; if (kind == JSCPK_VAR) i += fun->nargs; else JS_ASSERT(kind == JSCPK_ARG); - return setter - ? JS_SetReservedSlot(cx, obj, i, Jsvalify(*vp)) - : JS_GetReservedSlot(cx, obj, i, Jsvalify(vp)); - } - if (kind == JSCPK_ARG) { + const uintN first = JSSLOT_PRIVATE + CALL_CLASS_RESERVED_SLOTS + 1; + JS_ASSERT(first == JSSLOT_FREE(&js_CallClass)); + JS_ASSERT(first <= JS_INITIAL_NSLOTS); + + array = (i < JS_INITIAL_NSLOTS - first) ? obj->fslots : obj->dslots; + } else if (kind == JSCPK_ARG) { array = fp->argv; } else { JS_ASSERT(kind == JSCPK_VAR); @@ -1361,12 +1320,7 @@ static JSBool call_resolve(JSContext *cx, JSObject *obj, jsid id, uintN flags, JSObject **objp) { - JSFunction *fun; - JSLocalKind localKind; - PropertyOp getter, setter; - uintN slot, attrs; - - JS_ASSERT(obj->getClass() == &js_CallClass); + JS_ASSERT(obj->isCall()); JS_ASSERT(!obj->getProto()); if (!JSID_IS_ATOM(id)) @@ -1375,72 +1329,12 @@ call_resolve(JSContext *cx, JSObject *obj, jsid id, uintN flags, const Value &callee = obj->getSlot(JSSLOT_CALLEE); if (callee.isUndefined()) return JS_TRUE; + +#ifdef DEBUG + JSFunction *fun; fun = GET_FUNCTION_PRIVATE(cx, &callee.toObject()); - - /* - * Check whether the id refers to a formal parameter, local variable or - * the arguments special name. - * - * We define all such names using JSDNP_DONT_PURGE to avoid an expensive - * shape invalidation in js_DefineNativeProperty. If such an id happens to - * shadow a global or upvar of the same name, any inner functions can - * never access the outer binding. Thus it cannot invalidate any property - * cache entries or derived trace guards for the outer binding. See also - * comments in js_PurgeScopeChainHelper from jsobj.cpp. - */ - localKind = js_LookupLocal(cx, fun, JSID_TO_ATOM(id), &slot); - if (localKind != JSLOCAL_NONE) { - JS_ASSERT((uint16) slot == slot); - - /* - * We follow 10.2.3 of ECMA 262 v3 and make argument and variable - * properties of the Call objects enumerable. - */ - attrs = JSPROP_ENUMERATE | JSPROP_PERMANENT | JSPROP_SHARED; - if (localKind == JSLOCAL_ARG) { - JS_ASSERT(slot < fun->nargs); - getter = js_GetCallArg; - setter = SetCallArg; - } else { - JSCallPropertyKind cpkind; - if (localKind == JSLOCAL_UPVAR) { - if (!FUN_FLAT_CLOSURE(fun)) - return JS_TRUE; - getter = GetFlatUpvar; - setter = SetFlatUpvar; - cpkind = JSCPK_UPVAR; - } else { - JS_ASSERT(localKind == JSLOCAL_VAR || localKind == JSLOCAL_CONST); - JS_ASSERT(slot < fun->u.i.nvars); - getter = js_GetCallVar; - setter = SetCallVar; - cpkind = JSCPK_VAR; - if (localKind == JSLOCAL_CONST) - attrs |= JSPROP_READONLY; - } - - /* - * Use js_GetCallVarChecked if the local's value is a null closure. - * This way we penalize performance only slightly on first use of a - * null closure, not on every use. - */ - Value v; - if (!CallPropertyOp(cx, obj, INT_TO_JSID((int16)slot), &v, cpkind)) - return JS_FALSE; - JSObject *funobj; - if (IsFunctionObject(v, &funobj) && - GET_FUNCTION_PRIVATE(cx, funobj)->needsWrapper()) { - getter = js_GetCallVarChecked; - } - } - if (!js_DefineNativeProperty(cx, obj, id, UndefinedValue(), getter, setter, - attrs, JSScopeProperty::HAS_SHORTID, (int16) slot, - NULL, JSDNP_DONT_PURGE)) { - return JS_FALSE; - } - *objp = obj; - return JS_TRUE; - } + JS_ASSERT(fun->lookupLocal(cx, JSID_TO_ATOM(id), NULL) == JSLOCAL_NONE); +#endif /* * Resolve arguments so that we never store a particular Call object's @@ -1461,16 +1355,42 @@ call_resolve(JSContext *cx, JSObject *obj, jsid id, uintN flags, return JS_TRUE; } +static void +call_trace(JSTracer *trc, JSObject *obj) +{ + JS_ASSERT(obj->isCall()); + JSStackFrame *fp = (JSStackFrame *) obj->getPrivate(); + if (fp) { + /* + * FIXME: Hide copies of stack values rooted by fp from the Cycle + * Collector, which currently lacks a non-stub Unlink implementation + * for JS objects (including Call objects), so is unable to collect + * cycles involving Call objects whose frames are active without this + * hiding hack. + */ + uintN first = JSSLOT_PRIVATE + CALL_CLASS_RESERVED_SLOTS + 1; + JS_ASSERT(first <= JS_INITIAL_NSLOTS); + + uintN count = fp->getFunction()->countArgsAndVars(); + uintN fixed = JS_MIN(count, JS_INITIAL_NSLOTS - first); + + SetValueRangeToUndefined(&obj->fslots[first], fixed); + SetValueRangeToUndefined(obj->dslots, count - fixed); + } + + MaybeMarkGenerator(trc, obj); +} + JS_PUBLIC_DATA(Class) js_CallClass = { "Call", JSCLASS_HAS_PRIVATE | - JSCLASS_HAS_RESERVED_SLOTS(CALL_CLASS_FIXED_RESERVED_SLOTS) | + JSCLASS_HAS_RESERVED_SLOTS(CALL_CLASS_RESERVED_SLOTS) | JSCLASS_NEW_RESOLVE | JSCLASS_IS_ANONYMOUS | JSCLASS_MARK_IS_TRACE, PropertyStub, /* addProperty */ PropertyStub, /* delProperty */ PropertyStub, /* getProperty */ PropertyStub, /* setProperty */ - call_enumerate, + JS_EnumerateStub, (JSResolveOp)call_resolve, NULL, /* convert */ NULL, /* finalize */ @@ -1480,7 +1400,7 @@ JS_PUBLIC_DATA(Class) js_CallClass = { NULL, /* construct */ NULL, /* xdrObject */ NULL, /* hasInstance */ - JS_CLASS_TRACE(args_or_call_trace) + JS_CLASS_TRACE(call_trace) }; bool @@ -1494,7 +1414,7 @@ JSStackFrame::getValidCalleeObject(JSContext *cx, Value *vp) JSFunction *fun = getFunction(); /* - * See the equivalent condition in args_getProperty for ARGS_CALLEE, but + * See the equivalent condition in ArgGetter for the 'callee' id case, but * note that here we do not want to throw, since this escape can happen via * a foo.caller reference alone, without any debugger or indirect eval. And * alas, it seems foo.caller is still used on the Web. @@ -1522,13 +1442,12 @@ JSStackFrame::getValidCalleeObject(JSContext *cx, Value *vp) JSObject *thisp = &getThisValue().toObject(); JS_ASSERT(thisp->canHaveMethodBarrier()); - JSScope *scope = thisp->scope(); - if (scope->hasMethodBarrier()) { - JSScopeProperty *sprop = scope->lookup(ATOM_TO_JSID(fun->methodAtom())); + if (thisp->hasMethodBarrier()) { + const Shape *shape = thisp->nativeLookup(ATOM_TO_JSID(fun->methodAtom())); /* * The method property might have been deleted while the method - * barrier scope flag stuck, so we must lookup and test here. + * barrier flag stuck, so we must lookup and test here. * * Two cases follow: the method barrier was not crossed yet, so * we cross it here; the method barrier *was* crossed, in which @@ -1538,15 +1457,15 @@ JSStackFrame::getValidCalleeObject(JSContext *cx, Value *vp) * In either case we must allow for the method property to have * been replaced, or its value to have been overwritten. */ - if (sprop) { - if (sprop->isMethod() && &sprop->methodObject() == funobj) { - if (!scope->methodReadBarrier(cx, sprop, vp)) + if (shape) { + if (shape->isMethod() && &shape->methodObject() == funobj) { + if (!thisp->methodReadBarrier(cx, *shape, vp)) return false; setCalleeObject(vp->toObject()); return true; } - if (sprop->hasSlot()) { - Value v = thisp->getSlot(sprop->slot); + if (shape->hasSlot()) { + Value v = thisp->getSlot(shape->slot); JSObject *clone; if (IsFunctionObject(v, &clone) && @@ -1686,7 +1605,7 @@ fun_getProperty(JSContext *cx, JSObject *obj, jsid id, Value *vp) default: /* XXX fun[0] and fun.arguments[0] are equivalent. */ - if (fp && fp->hasFunction() && (uintN)slot < fp->getFunction()->nargs) + if (fp && fp->hasFunction() && uint16(slot) < fp->getFunction()->nargs) *vp = fp->argv[slot]; break; } @@ -1727,30 +1646,30 @@ fun_enumerate(JSContext *cx, JSObject *obj) { JS_ASSERT(obj->isFunction()); - jsval v; jsid id; + bool found; if (!obj->getFunctionPrivate()->isBound()) { id = ATOM_TO_JSID(cx->runtime->atomState.classPrototypeAtom); - if (!JS_LookupPropertyById(cx, obj, id, &v)) + if (!obj->hasProperty(cx, id, &found, JSRESOLVE_QUALIFIED)) return false; } id = ATOM_TO_JSID(cx->runtime->atomState.lengthAtom); - if (!JS_LookupPropertyById(cx, obj, id, &v)) + if (!obj->hasProperty(cx, id, &found, JSRESOLVE_QUALIFIED)) return false; for (uintN i = 0; i < JS_ARRAY_LENGTH(lazyFunctionDataProps); i++) { const LazyFunctionDataProp &lfp = lazyFunctionDataProps[i]; id = ATOM_TO_JSID(OFFSET_TO_ATOM(cx->runtime, lfp.atomOffset)); - if (!JS_LookupPropertyById(cx, obj, id, &v)) + if (!obj->hasProperty(cx, id, &found, JSRESOLVE_QUALIFIED)) return false; } for (uintN i = 0; i < JS_ARRAY_LENGTH(poisonPillProps); i++) { const PoisonPillProp &p = poisonPillProps[i]; id = ATOM_TO_JSID(OFFSET_TO_ATOM(cx->runtime, p.atomOffset)); - if (!JS_LookupPropertyById(cx, obj, id, &v)) + if (!obj->hasProperty(cx, id, &found, JSRESOLVE_QUALIFIED)) return false; } @@ -1812,7 +1731,7 @@ fun_resolve(JSContext *cx, JSObject *obj, jsid id, uintN flags, * user-defined functions, but DontEnum | ReadOnly | DontDelete for * native "system" constructors such as Object or Function. So lazily * set the former here in fun_resolve, but eagerly define the latter - * in JS_InitClass, with the right attributes. + * in js_InitClass, with the right attributes. */ if (!js_SetClassPrototype(cx, obj, proto, JSPROP_PERMANENT)) return JS_FALSE; @@ -1843,7 +1762,7 @@ fun_resolve(JSContext *cx, JSObject *obj, jsid id, uintN flags, if (!js_DefineNativeProperty(cx, obj, ATOM_TO_JSID(atom), UndefinedValue(), fun_getProperty, PropertyStub, - lfp->attrs, JSScopeProperty::HAS_SHORTID, + lfp->attrs, Shape::HAS_SHORTID, lfp->tinyid, NULL)) { return JS_FALSE; } @@ -1874,7 +1793,7 @@ fun_resolve(JSContext *cx, JSObject *obj, jsid id, uintN flags, if (!js_DefineNativeProperty(cx, obj, ATOM_TO_JSID(atom), UndefinedValue(), getter, setter, - attrs, JSScopeProperty::HAS_SHORTID, + attrs, Shape::HAS_SHORTID, p.tinyid, NULL)) { return JS_FALSE; } @@ -1989,7 +1908,7 @@ js_XDRFunctionObject(JSXDRState *xdr, JSObject **objp) goto release_mark; } if (xdr->mode == JSXDR_ENCODE) { - names = js_GetLocalNameArray(xdr->cx, fun, &xdr->cx->tempPool); + names = fun->getLocalNameArray(xdr->cx, &xdr->cx->tempPool); if (!names) { ok = false; goto release_mark; @@ -2019,7 +1938,7 @@ js_XDRFunctionObject(JSXDRState *xdr, JSObject **objp) !(bitmap[i >> JS_BITS_PER_UINT32_LOG2] & JS_BIT(i & (JS_BITS_PER_UINT32 - 1)))) { if (xdr->mode == JSXDR_DECODE) { - ok = !!js_AddLocal(xdr->cx, fun, NULL, JSLOCAL_ARG); + ok = !!fun->addLocal(xdr->cx, NULL, JSLOCAL_ARG); if (!ok) goto release_mark; } else { @@ -2041,7 +1960,7 @@ js_XDRFunctionObject(JSXDRState *xdr, JSObject **objp) ? JSLOCAL_CONST : JSLOCAL_VAR) : JSLOCAL_UPVAR; - ok = !!js_AddLocal(xdr->cx, fun, name, localKind); + ok = !!fun->addLocal(xdr->cx, name, localKind); if (!ok) goto release_mark; } @@ -2053,7 +1972,7 @@ js_XDRFunctionObject(JSXDRState *xdr, JSObject **objp) return false; if (xdr->mode == JSXDR_DECODE) - js_FreezeLocalNames(cx, fun); + fun->freezeLocalNames(cx); } if (!js_XDRScript(xdr, &fun->u.i.script, false, NULL)) @@ -2110,12 +2029,6 @@ fun_hasInstance(JSContext *cx, JSObject *obj, const Value *v, JSBool *bp) return JS_TRUE; } -static void -TraceLocalNames(JSTracer *trc, JSFunction *fun); - -static void -DestroyLocalNames(JSContext *cx, JSFunction *fun); - static void fun_trace(JSTracer *trc, JSObject *obj) { @@ -2124,37 +2037,48 @@ fun_trace(JSTracer *trc, JSObject *obj) if (!fun) return; - if (FUN_OBJECT(fun) != obj) { - /* obj is cloned function object, trace the original. */ - JS_CALL_TRACER(trc, FUN_OBJECT(fun), JSTRACE_OBJECT, "private"); + if (fun != obj) { + /* obj is a cloned function object, trace the clone-parent, fun. */ + MarkObject(trc, fun, "private"); + + /* The function could be a flat closure with upvar copies in the clone. */ + if (FUN_FLAT_CLOSURE(fun) && fun->u.i.nupvars) + MarkValueRange(trc, fun->u.i.nupvars, obj->getFlatClosureUpvars(), "upvars"); return; } + if (fun->atom) - JS_CALL_STRING_TRACER(trc, ATOM_TO_STRING(fun->atom), "atom"); + MarkString(trc, ATOM_TO_STRING(fun->atom), "atom"); + if (FUN_INTERPRETED(fun)) { if (fun->u.i.script) js_TraceScript(trc, fun->u.i.script); - TraceLocalNames(trc, fun); + for (const Shape *shape = fun->u.i.names; shape; shape = shape->previous()) + shape->trace(trc); } } static void fun_finalize(JSContext *cx, JSObject *obj) { - /* Ignore newborn and cloned function objects. */ + /* Ignore newborn function objects. */ JSFunction *fun = (JSFunction *) obj->getPrivate(); - if (!fun || FUN_OBJECT(fun) != obj) + if (!fun) return; + /* Cloned function objects may be flat closures with upvars to free. */ + if (fun != obj) { + if (FUN_FLAT_CLOSURE(fun) && fun->u.i.nupvars != 0) + cx->free((void *) obj->getFlatClosureUpvars()); + return; + } + /* * Null-check of u.i.script is required since the parser sets interpreted * very early. */ - if (FUN_INTERPRETED(fun)) { - if (fun->u.i.script) - js_DestroyScript(cx, fun->u.i.script); - DestroyLocalNames(cx, fun); - } + if (FUN_INTERPRETED(fun) && fun->u.i.script) + js_DestroyScript(cx, fun->u.i.script); } int @@ -2167,7 +2091,7 @@ JSFunction::sharpSlotBase(JSContext *cx) #ifdef DEBUG JSLocalKind kind = #endif - js_LookupLocal(cx, this, name, &index); + lookupLocal(cx, name, &index); JS_ASSERT(kind == JSLOCAL_VAR); return int(index); } @@ -2176,7 +2100,7 @@ JSFunction::sharpSlotBase(JSContext *cx) } uint32 -JSFunction::countInterpretedReservedSlots() const +JSFunction::countUpvarSlots() const { JS_ASSERT(FUN_INTERPRETED(this)); @@ -2191,7 +2115,7 @@ JSFunction::countInterpretedReservedSlots() const JS_PUBLIC_DATA(Class) js_FunctionClass = { js_Function_str, JSCLASS_HAS_PRIVATE | JSCLASS_NEW_RESOLVE | - JSCLASS_HAS_RESERVED_SLOTS(JSObject::FUN_FIXED_RESERVED_SLOTS) | + JSCLASS_HAS_RESERVED_SLOTS(JSFunction::CLASS_RESERVED_SLOTS) | JSCLASS_MARK_IS_TRACE | JSCLASS_HAS_CACHED_PROTO(JSProto_Function), PropertyStub, /* addProperty */ PropertyStub, /* delProperty */ @@ -2467,7 +2391,15 @@ JSObject::initBoundFunction(JSContext *cx, const Value &thisArg, fslots[JSSLOT_BOUND_FUNCTION_THIS] = thisArg; fslots[JSSLOT_BOUND_FUNCTION_ARGS_COUNT].setPrivateUint32(argslen); if (argslen != 0) { - if (!js_EnsureReservedSlots(cx, this, argslen)) + /* FIXME? Burn memory on an empty scope whose shape covers the args slots. */ + EmptyShape *empty = EmptyShape::create(cx, clasp); + if (!empty) + return false; + + empty->slot += argslen; + map = empty; + + if (!ensureInstanceReservedSlots(cx, argslen)) return false; JS_ASSERT(dslots); @@ -2827,7 +2759,7 @@ Function(JSContext *cx, JSObject *obj, uintN argc, Value *argv, Value *rval) atom = ts.currentToken().t_atom; /* Check for a duplicate parameter name. */ - if (js_LookupLocal(cx, fun, atom, NULL) != JSLOCAL_NONE) { + if (fun->lookupLocal(cx, atom, NULL) != JSLOCAL_NONE) { const char *name; name = js_AtomToPrintableString(cx, atom); @@ -2837,7 +2769,7 @@ Function(JSContext *cx, JSObject *obj, uintN argc, Value *argv, Value *rval) if (!ok) goto after_args; } - if (!js_AddLocal(cx, fun, atom, JSLOCAL_ARG)) + if (!fun->addLocal(cx, atom, JSLOCAL_ARG)) goto after_args; /* @@ -2952,9 +2884,7 @@ js_NewFunction(JSContext *cx, JSObject *funobj, Native native, uintN nargs, fun->u.i.skipmin = 0; fun->u.i.wrapper = false; fun->u.i.script = NULL; -#ifdef DEBUG - fun->u.i.names.taggedAtom = 0; -#endif + fun->u.i.names = cx->runtime->emptyCallShape; } else { fun->u.n.extra = 0; fun->u.n.spare = 0; @@ -3021,12 +2951,15 @@ js_AllocFlatClosure(JSContext *cx, JSFunction *fun, JSObject *scopeChain) if (!closure) return closure; - uint32 nslots = fun->countInterpretedReservedSlots(); + uint32 nslots = fun->countUpvarSlots(); if (nslots == 0) return closure; - if (!js_EnsureReservedSlots(cx, closure, nslots)) + + Value *upvars = (Value *) cx->malloc(nslots * sizeof(Value)); + if (!upvars) return NULL; + closure->setFlatClosureUpvars(upvars); return closure; } @@ -3048,12 +2981,12 @@ js_NewFlatClosure(JSContext *cx, JSFunction *fun) if (!closure || fun->u.i.nupvars == 0) return closure; - JSUpvarArray *uva = fun->u.i.script->upvars(); - JS_ASSERT(uva->length <= closure->dslots[-1].toPrivateUint32()); - + Value *upvars = closure->getFlatClosureUpvars(); uintN level = fun->u.i.script->staticLevel; + JSUpvarArray *uva = fun->u.i.script->upvars(); + for (uint32 i = 0, n = uva->length; i < n; i++) - closure->dslots[i] = GetUpvar(cx, level, uva->vector[i]); + upvars[i] = GetUpvar(cx, level, uva->vector[i]); return closure; } @@ -3176,438 +3109,223 @@ js_ReportIsNotFunction(JSContext *cx, const Value *vp, uintN flags) js_ReportValueError3(cx, error, spindex, *vp, NULL, name, source); } -/* - * When a function has between 2 and MAX_ARRAY_LOCALS arguments and variables, - * their name are stored as the JSLocalNames.array. - */ -#define MAX_ARRAY_LOCALS 8 - -JS_STATIC_ASSERT(2 <= MAX_ARRAY_LOCALS); -JS_STATIC_ASSERT(MAX_ARRAY_LOCALS < JS_BITMASK(16)); - -/* - * When we use a hash table to store the local names, we use a singly linked - * list to record the indexes of duplicated parameter names to preserve the - * duplicates for the decompiler. - */ -typedef struct JSNameIndexPair JSNameIndexPair; - -struct JSNameIndexPair { - JSAtom *name; - uint16 index; - JSNameIndexPair *link; -}; - -struct JSLocalNameMap { - JSDHashTable names; - JSNameIndexPair *lastdup; -}; - -typedef struct JSLocalNameHashEntry { - JSDHashEntryHdr hdr; - JSAtom *name; - uint16 index; - uint8 localKind; -} JSLocalNameHashEntry; - -static void -FreeLocalNameHash(JSContext *cx, JSLocalNameMap *map) +const Shape * +JSFunction::lastArg() const { - JSNameIndexPair *dup, *next; - - for (dup = map->lastdup; dup; dup = next) { - next = dup->link; - cx->free(dup); + const Shape *shape = lastVar(); + if (u.i.nvars != 0) { + while (shape->previous() && shape->getter() != js_GetCallArg) + shape = shape->previous(); } - JS_DHashTableFinish(&map->names); - cx->free(map); + return shape; } -static JSBool -HashLocalName(JSContext *cx, JSLocalNameMap *map, JSAtom *name, - JSLocalKind localKind, uintN index) +const Shape * +JSFunction::lastVar() const { - JSLocalNameHashEntry *entry; - JSNameIndexPair *dup; - - JS_ASSERT(index <= JS_BITMASK(16)); -#if JS_HAS_DESTRUCTURING - if (!name) { - /* A destructuring pattern does not need a hash entry. */ - JS_ASSERT(localKind == JSLOCAL_ARG); - return JS_TRUE; + const Shape *shape = u.i.names; + if (u.i.nupvars != 0) { + while (shape->getter() == GetFlatUpvar) + shape = shape->previous(); } -#endif - entry = (JSLocalNameHashEntry *) - JS_DHashTableOperate(&map->names, name, JS_DHASH_ADD); - if (!entry) { - JS_ReportOutOfMemory(cx); - return JS_FALSE; - } - if (entry->name) { - JS_ASSERT(entry->name == name); - JS_ASSERT(entry->localKind == JSLOCAL_ARG && localKind == JSLOCAL_ARG); - dup = (JSNameIndexPair *) cx->malloc(sizeof *dup); - if (!dup) - return JS_FALSE; - dup->name = entry->name; - dup->index = entry->index; - dup->link = map->lastdup; - map->lastdup = dup; - } - entry->name = name; - entry->index = (uint16) index; - entry->localKind = (uint8) localKind; - return JS_TRUE; + return shape; } -JSBool -js_AddLocal(JSContext *cx, JSFunction *fun, JSAtom *atom, JSLocalKind kind) +bool +JSFunction::addLocal(JSContext *cx, JSAtom *atom, JSLocalKind kind) { - jsuword taggedAtom; + JS_ASSERT(FUN_INTERPRETED(this)); + JS_ASSERT(!u.i.script); + + /* + * We still follow 10.2.3 of ES3 and make argument and variable properties + * of the Call objects enumerable. ES5 reformulated all of its Clause 10 to + * avoid objects as activations, something we should do too. + */ + uintN attrs = JSPROP_ENUMERATE | JSPROP_PERMANENT | JSPROP_SHARED; uint16 *indexp; - uintN n, i; - jsuword *array; - JSLocalNameMap *map; + PropertyOp getter, setter; + uint32 slot = JSSLOT_START(&js_CallClass) + CALL_CLASS_RESERVED_SLOTS; - JS_ASSERT(FUN_INTERPRETED(fun)); - JS_ASSERT(!fun->u.i.script); - JS_ASSERT(((jsuword) atom & 1) == 0); - taggedAtom = (jsuword) atom; if (kind == JSLOCAL_ARG) { - indexp = &fun->nargs; + JS_ASSERT(u.i.nupvars == 0); + + indexp = &nargs; + getter = js_GetCallArg; + setter = SetCallArg; + slot += nargs; } else if (kind == JSLOCAL_UPVAR) { - indexp = &fun->u.i.nupvars; + indexp = &u.i.nupvars; + getter = GetFlatUpvar; + setter = SetFlatUpvar; + slot = SHAPE_INVALID_SLOT; } else { - indexp = &fun->u.i.nvars; + JS_ASSERT(u.i.nupvars == 0); + + indexp = &u.i.nvars; + getter = js_GetCallVar; + setter = SetCallVar; if (kind == JSLOCAL_CONST) - taggedAtom |= 1; + attrs |= JSPROP_READONLY; else JS_ASSERT(kind == JSLOCAL_VAR); + slot += nargs + u.i.nvars; } - n = fun->countLocalNames(); - if (n == 0) { - JS_ASSERT(fun->u.i.names.taggedAtom == 0); - fun->u.i.names.taggedAtom = taggedAtom; - } else if (n < MAX_ARRAY_LOCALS) { - if (n > 1) { - array = fun->u.i.names.array; - } else { - array = (jsuword *) cx->malloc(MAX_ARRAY_LOCALS * sizeof *array); - if (!array) - return JS_FALSE; - array[0] = fun->u.i.names.taggedAtom; - fun->u.i.names.array = array; - } - if (kind == JSLOCAL_ARG) { + + if (*indexp == JS_BITMASK(16)) { + JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, + (kind == JSLOCAL_ARG) + ? JSMSG_TOO_MANY_FUN_ARGS + : JSMSG_TOO_MANY_LOCALS); + return false; + } + + Shape **listp = &u.i.names; + Shape *parent = *listp; + jsid id; + + /* + * The destructuring formal parameter parser adds a null atom, which we + * encode as an INT id. The parser adds such locals after adding vars for + * the destructured-to parameter bindings -- those must be vars to avoid + * aliasing arguments[i] for any i -- so we must switch u.i.names to a + * dictionary list to cope with insertion in the middle of an index-named + * shape for the object or array argument. + */ + if (!atom) { + JS_ASSERT(kind == JSLOCAL_ARG); + if (u.i.nvars != 0) { /* - * A destructuring argument pattern adds variables, not arguments, - * so for the following arguments nvars != 0. + * A dictionary list needed only if the destructing pattern wasn't + * empty, i.e., there were vars for its destructured-to bindings. */ -#if JS_HAS_DESTRUCTURING - if (fun->u.i.nvars != 0) { - memmove(array + fun->nargs + 1, array + fun->nargs, - fun->u.i.nvars * sizeof *array); - } -#else - JS_ASSERT(fun->u.i.nvars == 0); -#endif - array[fun->nargs] = taggedAtom; - } else { - array[n] = taggedAtom; - } - } else if (n == MAX_ARRAY_LOCALS) { - array = fun->u.i.names.array; - map = (JSLocalNameMap *) cx->malloc(sizeof *map); - if (!map) - return JS_FALSE; - if (!JS_DHashTableInit(&map->names, JS_DHashGetStubOps(), - NULL, sizeof(JSLocalNameHashEntry), - JS_DHASH_DEFAULT_CAPACITY(MAX_ARRAY_LOCALS - * 2))) { - JS_ReportOutOfMemory(cx); - cx->free(map); - return JS_FALSE; - } - - map->lastdup = NULL; - for (i = 0; i != MAX_ARRAY_LOCALS; ++i) { - taggedAtom = array[i]; - uintN j = i; - JSLocalKind k = JSLOCAL_ARG; - if (j >= fun->nargs) { - j -= fun->nargs; - if (j < fun->u.i.nvars) { - k = (taggedAtom & 1) ? JSLOCAL_CONST : JSLOCAL_VAR; - } else { - j -= fun->u.i.nvars; - k = JSLOCAL_UPVAR; - } - } - if (!HashLocalName(cx, map, (JSAtom *) (taggedAtom & ~1), k, j)) { - FreeLocalNameHash(cx, map); - return JS_FALSE; + if (!parent->inDictionary() && !(parent = Shape::newDictionaryList(cx, listp))) + return false; + while (parent->parent && parent->getter() != js_GetCallArg) { + ++parent->slot; + listp = &parent->parent; + parent = *listp; } } - if (!HashLocalName(cx, map, atom, kind, *indexp)) { - FreeLocalNameHash(cx, map); - return JS_FALSE; - } - - /* - * At this point the entry is added and we cannot fail. It is time - * to replace fun->u.i.names with the built map. - */ - fun->u.i.names.map = map; - cx->free(array); + id = INT_TO_JSID(nargs); } else { - if (*indexp == JS_BITMASK(16)) { - JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, - (kind == JSLOCAL_ARG) - ? JSMSG_TOO_MANY_FUN_ARGS - : JSMSG_TOO_MANY_LOCALS); - return JS_FALSE; - } - if (!HashLocalName(cx, fun->u.i.names.map, atom, kind, *indexp)) - return JS_FALSE; + id = ATOM_TO_JSID(atom); } - /* Update the argument or variable counter. */ + Shape child(id, getter, setter, slot, attrs, Shape::HAS_SHORTID, *indexp); + + Shape *shape = parent->getChild(cx, child, listp); + if (!shape) + return false; + + JS_ASSERT_IF(!shape->inDictionary(), u.i.names == shape); ++*indexp; - return JS_TRUE; + return true; } JSLocalKind -js_LookupLocal(JSContext *cx, JSFunction *fun, JSAtom *atom, uintN *indexp) +JSFunction::lookupLocal(JSContext *cx, JSAtom *atom, uintN *indexp) { - uintN n, i, upvar_base; - jsuword *array; - JSLocalNameHashEntry *entry; + JS_ASSERT(FUN_INTERPRETED(this)); - JS_ASSERT(FUN_INTERPRETED(fun)); - n = fun->countLocalNames(); - if (n == 0) - return JSLOCAL_NONE; - if (n <= MAX_ARRAY_LOCALS) { - array = (n == 1) ? &fun->u.i.names.taggedAtom : fun->u.i.names.array; + Shape *shape = SHAPE_FETCH(Shape::search(&u.i.names, ATOM_TO_JSID(atom))); + if (shape) { + JSLocalKind localKind; - /* Search from the tail to pick up the last duplicated name. */ - i = n; - upvar_base = fun->countArgsAndVars(); - do { - --i; - if (atom == JS_LOCAL_NAME_TO_ATOM(array[i])) { - if (i < fun->nargs) { - if (indexp) - *indexp = i; - return JSLOCAL_ARG; - } - if (i >= upvar_base) { - if (indexp) - *indexp = i - upvar_base; - return JSLOCAL_UPVAR; - } - if (indexp) - *indexp = i - fun->nargs; - return JS_LOCAL_NAME_IS_CONST(array[i]) - ? JSLOCAL_CONST - : JSLOCAL_VAR; - } - } while (i != 0); - } else { - entry = (JSLocalNameHashEntry *) - JS_DHashTableOperate(&fun->u.i.names.map->names, atom, - JS_DHASH_LOOKUP); - if (JS_DHASH_ENTRY_IS_BUSY(&entry->hdr)) { - JS_ASSERT(entry->localKind != JSLOCAL_NONE); - if (indexp) - *indexp = entry->index; - return (JSLocalKind) entry->localKind; - } + if (shape->getter() == js_GetCallArg) + localKind = JSLOCAL_ARG; + else if (shape->getter() == GetFlatUpvar) + localKind = JSLOCAL_UPVAR; + else if (!shape->writable()) + localKind = JSLOCAL_CONST; + else + localKind = JSLOCAL_VAR; + + if (indexp) + *indexp = shape->shortid; + return localKind; } return JSLOCAL_NONE; } -typedef struct JSLocalNameEnumeratorArgs { - JSFunction *fun; - jsuword *names; -#ifdef DEBUG - uintN nCopiedArgs; - uintN nCopiedVars; -#endif -} JSLocalNameEnumeratorArgs; - -static JSDHashOperator -get_local_names_enumerator(JSDHashTable *table, JSDHashEntryHdr *hdr, - uint32 number, void *arg) -{ - JSLocalNameHashEntry *entry; - JSLocalNameEnumeratorArgs *args; - uint i; - jsuword constFlag; - - entry = (JSLocalNameHashEntry *) hdr; - args = (JSLocalNameEnumeratorArgs *) arg; - JS_ASSERT(entry->name); - if (entry->localKind == JSLOCAL_ARG) { - JS_ASSERT(entry->index < args->fun->nargs); - JS_ASSERT(args->nCopiedArgs++ < args->fun->nargs); - i = entry->index; - constFlag = 0; - } else { - JS_ASSERT(entry->localKind == JSLOCAL_VAR || - entry->localKind == JSLOCAL_CONST || - entry->localKind == JSLOCAL_UPVAR); - JS_ASSERT(entry->index < args->fun->u.i.nvars + args->fun->u.i.nupvars); - JS_ASSERT(args->nCopiedVars++ < unsigned(args->fun->u.i.nvars + args->fun->u.i.nupvars)); - i = args->fun->nargs; - if (entry->localKind == JSLOCAL_UPVAR) - i += args->fun->u.i.nvars; - i += entry->index; - constFlag = (entry->localKind == JSLOCAL_CONST); - } - args->names[i] = (jsuword) entry->name | constFlag; - return JS_DHASH_NEXT; -} - jsuword * -js_GetLocalNameArray(JSContext *cx, JSFunction *fun, JSArenaPool *pool) +JSFunction::getLocalNameArray(JSContext *cx, JSArenaPool *pool) { - uintN n; + JS_ASSERT(hasLocalNames()); + + uintN n = countLocalNames(); jsuword *names; - JSLocalNameMap *map; - JSLocalNameEnumeratorArgs args; - JSNameIndexPair *dup; - - JS_ASSERT(fun->hasLocalNames()); - n = fun->countLocalNames(); - - if (n <= MAX_ARRAY_LOCALS) - return (n == 1) ? &fun->u.i.names.taggedAtom : fun->u.i.names.array; /* * No need to check for overflow of the allocation size as we are making a * copy of already allocated data. As such it must fit size_t. */ - JS_ARENA_ALLOCATE_CAST(names, jsuword *, pool, (size_t) n * sizeof *names); + JS_ARENA_ALLOCATE_CAST(names, jsuword *, pool, size_t(n) * sizeof *names); if (!names) { js_ReportOutOfScriptQuota(cx); return NULL; } -#if JS_HAS_DESTRUCTURING - /* Some parameter names can be NULL due to destructuring patterns. */ - PodZero(names, fun->nargs); -#endif - map = fun->u.i.names.map; - args.fun = fun; - args.names = names; #ifdef DEBUG - args.nCopiedArgs = 0; - args.nCopiedVars = 0; + for (uintN i = 0; i != n; i++) + names[i] = 0xdeadbeef; #endif - JS_DHashTableEnumerate(&map->names, get_local_names_enumerator, &args); - for (dup = map->lastdup; dup; dup = dup->link) { - JS_ASSERT(dup->index < fun->nargs); - JS_ASSERT(args.nCopiedArgs++ < fun->nargs); - names[dup->index] = (jsuword) dup->name; - } -#if !JS_HAS_DESTRUCTURING - JS_ASSERT(args.nCopiedArgs == fun->nargs); -#endif - JS_ASSERT(args.nCopiedVars == fun->u.i.nvars + fun->u.i.nupvars); + for (Shape::Range r = u.i.names; !r.empty(); r.popFront()) { + const Shape &shape = r.front(); + uintN index = uint16(shape.shortid); + jsuword constFlag = 0; + + if (shape.getter() == js_GetCallArg) { + JS_ASSERT(index < nargs); + } else if (shape.getter() == GetFlatUpvar) { + JS_ASSERT(index < u.i.nupvars); + index += nargs + u.i.nvars; + } else { + JS_ASSERT(index < u.i.nvars); + index += nargs; + if (!shape.writable()) + constFlag = 1; + } + + JSAtom *atom; + if (JSID_IS_ATOM(shape.id)) { + atom = JSID_TO_ATOM(shape.id); + } else { + JS_ASSERT(JSID_IS_INT(shape.id)); + JS_ASSERT(shape.getter() == js_GetCallArg); + atom = NULL; + } + + names[index] = jsuword(atom); + } + +#ifdef DEBUG + for (uintN i = 0; i != n; i++) + JS_ASSERT(names[i] != 0xdeadbeef); +#endif return names; } -static JSDHashOperator -trace_local_names_enumerator(JSDHashTable *table, JSDHashEntryHdr *hdr, - uint32 number, void *arg) +void +JSFunction::freezeLocalNames(JSContext *cx) { - JSLocalNameHashEntry *entry; - JSTracer *trc; + JS_ASSERT(FUN_INTERPRETED(this)); - entry = (JSLocalNameHashEntry *) hdr; - JS_ASSERT(entry->name); - trc = (JSTracer *) arg; - JS_SET_TRACING_INDEX(trc, - entry->localKind == JSLOCAL_ARG ? "arg" : "var", - entry->index); - Mark(trc, ATOM_TO_STRING(entry->name), JSTRACE_STRING); - return JS_DHASH_NEXT; -} - -static void -TraceLocalNames(JSTracer *trc, JSFunction *fun) -{ - uintN n, i; - JSAtom *atom; - jsuword *array; - - JS_ASSERT(FUN_INTERPRETED(fun)); - n = fun->countLocalNames(); - if (n == 0) - return; - if (n <= MAX_ARRAY_LOCALS) { - array = (n == 1) ? &fun->u.i.names.taggedAtom : fun->u.i.names.array; - i = n; + Shape *shape = u.i.names; + if (shape->inDictionary()) { do { - --i; - atom = (JSAtom *) (array[i] & ~1); - if (atom) { - JS_SET_TRACING_INDEX(trc, - i < fun->nargs ? "arg" : "var", - i < fun->nargs ? i : i - fun->nargs); - Mark(trc, ATOM_TO_STRING(atom), JSTRACE_STRING); - } - } while (i != 0); - } else { - JS_DHashTableEnumerate(&fun->u.i.names.map->names, - trace_local_names_enumerator, trc); - - /* - * No need to trace the list of duplicates in map->lastdup as the - * names there are traced when enumerating the hash table. - */ + JS_ASSERT(!shape->frozen()); + shape->setFrozen(); + } while ((shape = shape->parent) != NULL); } } -void -DestroyLocalNames(JSContext *cx, JSFunction *fun) -{ - uintN n; - - n = fun->countLocalNames(); - if (n <= 1) - return; - if (n <= MAX_ARRAY_LOCALS) - cx->free(fun->u.i.names.array); - else - FreeLocalNameHash(cx, fun->u.i.names.map); -} - -void -js_FreezeLocalNames(JSContext *cx, JSFunction *fun) -{ - uintN n; - jsuword *array; - - JS_ASSERT(FUN_INTERPRETED(fun)); - JS_ASSERT(!fun->u.i.script); - n = fun->nargs + fun->u.i.nvars + fun->u.i.nupvars; - if (2 <= n && n < MAX_ARRAY_LOCALS) { - /* Shrink over-allocated array ignoring realloc failures. */ - array = (jsuword *) cx->realloc(fun->u.i.names.array, - n * sizeof *array); - if (array) - fun->u.i.names.array = array; - } -#ifdef DEBUG - if (n > MAX_ARRAY_LOCALS) - JS_DHashMarkTableImmutable(&fun->u.i.names.map->names); -#endif -} - +/* + * This method is called only if we parsed a duplicate formal. Let's use the + * simplest possible algorithm, risking O(n^2) pain -- anyone dup'ing formals + * is asking for it! + */ JSAtom * JSFunction::findDuplicateFormal() const { @@ -3616,26 +3334,12 @@ JSFunction::findDuplicateFormal() const if (nargs <= 1) return NULL; - /* Function with two to MAX_ARRAY_LOCALS parameters use an aray. */ - unsigned n = nargs + u.i.nvars + u.i.nupvars; - if (n <= MAX_ARRAY_LOCALS) { - jsuword *array = u.i.names.array; - - /* Quadratic, but MAX_ARRAY_LOCALS is 8, so at most 28 comparisons. */ - for (unsigned i = 0; i < nargs; i++) { - for (unsigned j = i + 1; j < nargs; j++) { - if (array[i] == array[j]) - return JS_LOCAL_NAME_TO_ATOM(array[i]); - } + for (Shape::Range r = lastArg(); !r.empty(); r.popFront()) { + const Shape &shape = r.front(); + for (Shape::Range r2 = shape.previous(); !r2.empty(); r2.popFront()) { + if (r2.front().id == shape.id) + return JSID_TO_ATOM(shape.id); } - return NULL; } - - /* - * Functions with more than MAX_ARRAY_LOCALS parameters use a hash - * table. Hashed local name maps have already made a list of any - * duplicate argument names for us. - */ - JSNameIndexPair *dup = u.i.names.map->lastdup; - return dup ? dup->name : NULL; + return NULL; } diff --git a/js/src/jsfun.h b/js/src/jsfun.h index 95d932d4adf8..e93d08cd4aef 100644 --- a/js/src/jsfun.h +++ b/js/src/jsfun.h @@ -48,21 +48,6 @@ #include "jsatom.h" #include "jsstr.h" -typedef struct JSLocalNameMap JSLocalNameMap; - -/* - * Depending on the number of arguments and variables in the function their - * names and attributes are stored either as a single atom or as an array of - * tagged atoms (when there are few locals) or as a hash-based map (when there - * are many locals). In the first 2 cases the lowest bit of the atom is used - * as a tag to distinguish const from var. See jsfun.c for details. - */ -typedef union JSLocalNames { - jsuword taggedAtom; - jsuword *array; - JSLocalNameMap *map; -} JSLocalNames; - /* * The high two bits of JSFunction.flags encode whether the function is native * or interpreted, and if interpreted, what kind of optimized closure form (if @@ -143,6 +128,29 @@ typedef union JSLocalNames { JS_ASSERT((fun)->flags & JSFUN_TRCINFO), \ fun->u.n.trcinfo) +/* + * Formal parameters, local variables, and upvars are stored in a shape tree + * path with its latest node at fun->u.i.names. The addLocal, lookupLocal, and + * getLocalNameArray methods abstract away this detail. + * + * The lastArg, lastVar, and lastUpvar JSFunction methods provide more direct + * access to the shape path. These methods may be used to make a Shape::Range + * for iterating over the relevant shapes from youngest to oldest (i.e., last + * or right-most to first or left-most in source order). + * + * Sometimes iteration order must be from oldest to youngest, however. For such + * cases, use getLocalNameArray. The RAII helper class js::AutoLocalNameArray, + * defined in jscntxt.h, should be used where possible instead of direct calls + * to getLocalNameArray. + */ +enum JSLocalKind { + JSLOCAL_NONE, + JSLOCAL_ARG, + JSLOCAL_VAR, + JSLOCAL_CONST, + JSLOCAL_UPVAR +}; + struct JSFunction : public JSObject { uint16 nargs; /* maximum number of specified arguments, @@ -171,7 +179,7 @@ struct JSFunction : public JSObject indirect eval; if true, then this function object's proto is the wrapped object */ JSScript *script; /* interpreted bytecode descriptor or null */ - JSLocalNames names; /* argument and variable names */ + js::Shape *names; /* argument and variable names */ } i; } u; JSAtom *atom; /* name for diagnostics and decompiling */ @@ -186,7 +194,7 @@ struct JSFunction : public JSObject inline bool inStrictMode() const; - inline bool isBound() const; + bool isBound() const; uintN countVars() const { JS_ASSERT(FUN_INTERPRETED(this)); @@ -213,13 +221,51 @@ struct JSFunction : public JSObject int sharpSlotBase(JSContext *cx); + uint32 countUpvarSlots() const; + + const js::Shape *lastArg() const; + const js::Shape *lastVar() const; + const js::Shape *lastUpvar() const { return u.i.names; } + + bool addLocal(JSContext *cx, JSAtom *atom, JSLocalKind kind); + + /* + * Look up an argument or variable name returning its kind when found or + * JSLOCAL_NONE when no such name exists. When indexp is not null and the + * name exists, *indexp will receive the index of the corresponding + * argument or variable. + */ + JSLocalKind lookupLocal(JSContext *cx, JSAtom *atom, uintN *indexp); + + /* + * Function and macros to work with local names as an array of words. + * getLocalNameArray returns the array, or null if we are out of memory. + * This function must be called only when fun->hasLocalNames(). + * + * The supplied pool is used to allocate the returned array, so the caller + * is obligated to mark and release to free it. + * + * The elements of the array with index less than fun->nargs correspond to + * the names of function formal parameters. An index >= fun->nargs + * addresses a var binding. Use JS_LOCAL_NAME_TO_ATOM to convert array's + * element to an atom pointer. This pointer can be null when the element is + * for a formal parameter corresponding to a destructuring pattern. + * + * If nameWord does not name a formal parameter, use JS_LOCAL_NAME_IS_CONST + * to check if nameWord corresponds to the const declaration. + */ + jsuword *getLocalNameArray(JSContext *cx, struct JSArenaPool *pool); + + void freezeLocalNames(JSContext *cx); + /* * If fun's formal parameters include any duplicate names, return one * of them (chosen arbitrarily). If they are all unique, return NULL. */ JSAtom *findDuplicateFormal() const; - uint32 countInterpretedReservedSlots() const; +#define JS_LOCAL_NAME_TO_ATOM(nameWord) ((JSAtom *) ((nameWord) & ~(jsuword) 1)) +#define JS_LOCAL_NAME_IS_CONST(nameWord) ((((nameWord) & (jsuword) 1)) != 0) bool mightEscape() const { return FUN_INTERPRETED(this) && (FUN_FLAT_CLOSURE(this) || u.i.nupvars == 0); @@ -262,6 +308,10 @@ struct JSFunction : public JSObject JS_ASSERT(joinable()); fslots[METHOD_ATOM_SLOT].setString(ATOM_TO_STRING(atom)); } + + /* Number of extra fixed function object slots besides JSSLOT_PRIVATE. */ + static const uint32 CLASS_RESERVED_SLOTS = JSObject::FUN_CLASS_RESERVED_SLOTS; + static const uint32 FIRST_FREE_SLOT = JSSLOT_PRIVATE + CLASS_RESERVED_SLOTS + 1; }; JS_STATIC_ASSERT(sizeof(JSFunction) % JS_GCTHING_ALIGN == 0); @@ -296,8 +346,16 @@ JS_STATIC_ASSERT(sizeof(JSFunction) % JS_GCTHING_ALIGN == 0); * single-threaded objects and GC heaps. */ extern js::Class js_ArgumentsClass; + namespace js { + extern Class StrictArgumentsClass; + +struct ArgumentsData { + js::Value callee; + js::Value slots[1]; +}; + } inline bool @@ -318,12 +376,18 @@ JSObject::isArguments() const return isNormalArguments() || isStrictArguments(); } -#define JS_ARGUMENT_OBJECT_ON_TRACE ((void *)0xa126) +#define JS_ARGUMENTS_OBJECT_ON_TRACE ((void *)0xa126) extern JS_PUBLIC_DATA(js::Class) js_CallClass; extern JS_PUBLIC_DATA(js::Class) js_FunctionClass; extern js::Class js_DeclEnvClass; -extern const uint32 CALL_CLASS_FIXED_RESERVED_SLOTS; +extern const uint32 CALL_CLASS_RESERVED_SLOTS; + +inline bool +JSObject::isCall() const +{ + return getClass() == &js_CallClass; +} inline bool JSObject::isFunction() const @@ -331,18 +395,6 @@ JSObject::isFunction() const return getClass() == &js_FunctionClass; } -inline bool -JSObject::isCallable() -{ - return isFunction() || getClass()->call; -} - -static inline bool -js_IsCallable(const js::Value &v) -{ - return v.isObject() && v.toObject().isCallable(); -} - /* * NB: jsapi.h and jsobj.h must be included before any call to this macro. */ @@ -419,6 +471,9 @@ CloneFunctionObject(JSContext *cx, JSFunction *fun, JSObject *parent) return js_CloneFunctionObject(cx, fun, parent, proto); } +extern JSObject * JS_FASTCALL +js_AllocFlatClosure(JSContext *cx, JSFunction *fun, JSObject *scopeChain); + extern JS_REQUIRES_STACK JSObject * js_NewFlatClosure(JSContext *cx, JSFunction *fun); @@ -531,56 +586,6 @@ JS_STATIC_ASSERT(((JS_ARGS_LENGTH_MAX << 1) | 1) <= JSVAL_INT_MAX); extern JSBool js_XDRFunctionObject(JSXDRState *xdr, JSObject **objp); -typedef enum JSLocalKind { - JSLOCAL_NONE, - JSLOCAL_ARG, - JSLOCAL_VAR, - JSLOCAL_CONST, - JSLOCAL_UPVAR -} JSLocalKind; - -extern JSBool -js_AddLocal(JSContext *cx, JSFunction *fun, JSAtom *atom, JSLocalKind kind); - -/* - * Look up an argument or variable name returning its kind when found or - * JSLOCAL_NONE when no such name exists. When indexp is not null and the name - * exists, *indexp will receive the index of the corresponding argument or - * variable. - */ -extern JSLocalKind -js_LookupLocal(JSContext *cx, JSFunction *fun, JSAtom *atom, uintN *indexp); - -/* - * Functions to work with local names as an array of words. - * - * js_GetLocalNameArray returns the array, or null if we are out of memory. - * This function must be called only when fun->hasLocalNames(). - * - * The supplied pool is used to allocate the returned array, so the caller is - * obligated to mark and release to free it. - * - * The elements of the array with index less than fun->nargs correspond to the - * names of function formal parameters. An index >= fun->nargs addresses a var - * binding. Use JS_LOCAL_NAME_TO_ATOM to convert array's element to an atom - * pointer. This pointer can be null when the element is for a formal parameter - * corresponding to a destructuring pattern. - * - * If nameWord does not name a formal parameter, use JS_LOCAL_NAME_IS_CONST to - * check if nameWord corresponds to the const declaration. - */ -extern jsuword * -js_GetLocalNameArray(JSContext *cx, JSFunction *fun, struct JSArenaPool *pool); - -#define JS_LOCAL_NAME_TO_ATOM(nameWord) \ - ((JSAtom *) ((nameWord) & ~(jsuword) 1)) - -#define JS_LOCAL_NAME_IS_CONST(nameWord) \ - ((((nameWord) & (jsuword) 1)) != 0) - -extern void -js_FreezeLocalNames(JSContext *cx, JSFunction *fun); - extern JSBool js_fun_apply(JSContext *cx, uintN argc, js::Value *vp); diff --git a/js/src/jsgc.cpp b/js/src/jsgc.cpp index 34f10190ed8d..0e775c01975b 100644 --- a/js/src/jsgc.cpp +++ b/js/src/jsgc.cpp @@ -2024,8 +2024,8 @@ AutoGCRooter::trace(JSTracer *trc) MarkValue(trc, static_cast(this)->val, "js::AutoValueRooter.val"); return; - case SPROP: - static_cast(this)->sprop->trace(trc); + case SHAPE: + static_cast(this)->shape->trace(trc); return; case PARSER: @@ -2244,6 +2244,19 @@ MarkRuntime(JSTracer *trc) for (ThreadDataIter i(rt); !i.empty(); i.popFront()) i.threadData()->mark(trc); + if (rt->emptyArgumentsShape) + rt->emptyArgumentsShape->trace(trc); + if (rt->emptyBlockShape) + rt->emptyBlockShape->trace(trc); + if (rt->emptyCallShape) + rt->emptyCallShape->trace(trc); + if (rt->emptyDeclEnvShape) + rt->emptyDeclEnvShape->trace(trc); + if (rt->emptyEnumeratorShape) + rt->emptyEnumeratorShape->trace(trc); + if (rt->emptyWithShape) + rt->emptyWithShape->trace(trc); + /* * We mark extra roots at the last thing so it can use use additional * colors to implement cycle collection. @@ -2319,15 +2332,7 @@ FinalizeObject(JSContext *cx, JSObject *obj, unsigned thingKind) DTrace::finalizeObject(obj); - if (JS_LIKELY(obj->isNative())) { - JSScope *scope = obj->scope(); - if (scope->isSharedEmpty()) - static_cast(scope)->dropFromGC(cx); - else - scope->destroy(cx); - } - if (obj->hasSlotsArray()) - obj->freeSlotsArray(cx); + obj->finish(cx); } inline void @@ -2635,7 +2640,7 @@ SweepCompartments(JSContext *cx) /* * Common cache invalidation and so forth that must be done before GC. Even if - * GCUntilDone calls GC several times, this work only needs to be done once. + * GCUntilDone calls GC several times, this work needs to be done only once. */ static void PreGCCleanup(JSContext *cx, JSGCInvocationKind gckind) @@ -2666,8 +2671,7 @@ PreGCCleanup(JSContext *cx, JSGCInvocationKind gckind) #endif ) { rt->gcRegenShapes = true; - rt->gcRegenShapesScopeFlag ^= JSScope::SHAPE_REGEN; - rt->shapeGen = JSScope::LAST_RESERVED_SHAPE; + rt->shapeGen = Shape::LAST_RESERVED_SHAPE; rt->protoHazardShape = 0; } @@ -2745,7 +2749,7 @@ MarkAndSweep(JSContext *cx GCTIMER_PARAM) #ifdef DEBUG /* Save the pre-sweep count of scope-mapped properties. */ - rt->liveScopePropsPreSweep = rt->liveScopeProps; + rt->liveObjectPropsPreSweep = rt->liveObjectProps; #endif /* @@ -2780,10 +2784,10 @@ MarkAndSweep(JSContext *cx GCTIMER_PARAM) SweepCompartments(cx); /* - * Sweep the runtime's property tree after finalizing objects, in case any + * Sweep the runtime's property trees after finalizing objects, in case any * had watchpoints referencing tree nodes. */ - js::SweepScopeProperties(cx); + js::PropertyTree::sweepShapes(cx); /* * Sweep script filenames after sweeping functions in the generic loop diff --git a/js/src/jsgc.h b/js/src/jsgc.h index 08aa8d1a06de..118ab2c21f63 100644 --- a/js/src/jsgc.h +++ b/js/src/jsgc.h @@ -294,7 +294,7 @@ js_NewGCExternalString(JSContext *cx, uintN type) return (JSString *) js_NewFinalizableGCThing(cx, type); } -static inline JSFunction* +static inline JSFunction * js_NewGCFunction(JSContext *cx) { JSFunction* obj = (JSFunction *)js_NewFinalizableGCThing(cx, FINALIZE_FUNCTION); @@ -383,7 +383,7 @@ class BackgroundSweepTask : public JSBackgroundTask { BackgroundSweepTask() : freeCursor(NULL), freeCursorEnd(NULL) { } - void freeLater(void* ptr) { + void freeLater(void *ptr) { if (freeCursor != freeCursorEnd) *freeCursor++ = ptr; else diff --git a/js/src/jsinterp.cpp b/js/src/jsinterp.cpp index ae434a48dfb0..fae4a7ef6b1b 100644 --- a/js/src/jsinterp.cpp +++ b/js/src/jsinterp.cpp @@ -306,7 +306,7 @@ ComputeThisFromArgv(JSContext *cx, Value *argv) } thisp = &argv[-1].toObject(); - if (thisp->getClass() == &js_CallClass || thisp->getClass() == &js_BlockClass) + if (thisp->isCall() || thisp->isBlock()) return ComputeGlobalThis(cx, argv); return CallThisObjectHook(cx, thisp, argv); @@ -379,7 +379,7 @@ js_OnUnknownMethod(JSContext *cx, Value *vp) * NoSuchMethod helper objects own no manually allocated resources. */ obj->map = NULL; - obj->init(&js_NoSuchMethodClass, NULL, NULL, tvr.value()); + obj->init(&js_NoSuchMethodClass, NULL, NULL, tvr.value(), cx); obj->fslots[JSSLOT_SAVED_ID] = vp[0]; vp[0].setObject(*obj); } @@ -432,10 +432,10 @@ class AutoPreserveEnumerators { }; static JS_REQUIRES_STACK bool -callJSNative(JSContext *cx, CallOp callOp, JSObject *thisp, uintN argc, Value *argv, Value *rval) +CallJSNative(JSContext *cx, CallOp callOp, JSObject *thisp, uintN argc, Value *argv, Value *rval) { Value *vp = argv - 2; - if (callJSFastNative(cx, callOp, argc, vp)) { + if (CallJSFastNative(cx, callOp, argc, vp)) { *rval = JS_RVAL(cx, vp); return true; } @@ -453,7 +453,7 @@ InvokeCommon(JSContext *cx, JSFunction *fun, JSScript *script, T native, #ifdef DEBUG_NOT_THROWING JSBool alreadyThrowing = cx->throwing; #endif - JSBool ok = callJSFastNative(cx, (FastNative) native, args.argc(), args.base()); + JSBool ok = CallJSFastNative(cx, (FastNative) native, args.argc(), args.base()); JS_RUNTIME_METER(cx->runtime, nativeCalls); #ifdef DEBUG_NOT_THROWING if (ok && !alreadyThrowing) @@ -562,7 +562,7 @@ InvokeCommon(JSContext *cx, JSFunction *fun, JSScript *script, T native, #endif JSObject *thisp = fp->getThisValue().toObjectOrNull(); - ok = callJSNative(cx, native, thisp, fp->numActualArgs(), fp->argv, + ok = CallJSNative(cx, native, thisp, fp->numActualArgs(), fp->argv, fp->addressReturnValue()); JS_ASSERT(cx->fp() == fp); @@ -920,7 +920,7 @@ CheckRedeclaration(JSContext *cx, JSObject *obj, jsid id, uintN attrs, if (!prop) return true; if (obj2->isNative()) { - oldAttrs = ((JSScopeProperty *) prop)->attributes(); + oldAttrs = ((Shape *) prop)->attributes(); /* If our caller doesn't want prop, unlock obj2. */ if (!propp) @@ -1923,27 +1923,27 @@ AssertValidPropertyCacheHit(JSContext *cx, JSScript *script, JSFrameRegs& regs, JS_ASSERT(prop); JS_ASSERT(pobj == found); - JSScopeProperty *sprop = (JSScopeProperty *) prop; + const Shape *shape = (Shape *) prop; if (entry->vword.isSlot()) { - JS_ASSERT(entry->vword.toSlot() == sprop->slot); - JS_ASSERT(!sprop->isMethod()); - } else if (entry->vword.isSprop()) { - JS_ASSERT(entry->vword.toSprop() == sprop); - JS_ASSERT_IF(sprop->isMethod(), - &sprop->methodObject() == &pobj->lockedGetSlot(sprop->slot).toObject()); + JS_ASSERT(entry->vword.toSlot() == shape->slot); + JS_ASSERT(!shape->isMethod()); + } else if (entry->vword.isShape()) { + JS_ASSERT(entry->vword.toShape() == shape); + JS_ASSERT_IF(shape->isMethod(), + &shape->methodObject() == &pobj->lockedGetSlot(shape->slot).toObject()); } else { Value v; JS_ASSERT(entry->vword.isFunObj()); JS_ASSERT(!entry->vword.isNull()); - JS_ASSERT(pobj->scope()->brandedOrHasMethodBarrier()); - JS_ASSERT(sprop->hasDefaultGetterOrIsMethod()); - JS_ASSERT(SPROP_HAS_VALID_SLOT(sprop, pobj->scope())); - v = pobj->lockedGetSlot(sprop->slot); + JS_ASSERT(pobj->brandedOrHasMethodBarrier()); + JS_ASSERT(shape->hasDefaultGetterOrIsMethod()); + JS_ASSERT(pobj->containsSlot(shape->slot)); + v = pobj->lockedGetSlot(shape->slot); JS_ASSERT(&entry->vword.toFunObj() == &v.toObject()); - if (sprop->isMethod()) { + if (shape->isMethod()) { JS_ASSERT(js_CodeSpec[*regs.pc].format & JOF_CALLOP); - JS_ASSERT(&sprop->methodObject() == &v.toObject()); + JS_ASSERT(&shape->methodObject() == &v.toObject()); } } @@ -1964,7 +1964,7 @@ JS_STATIC_ASSERT(JSOP_GETGVAR_LENGTH == JSOP_CALLGVAR_LENGTH); JS_STATIC_ASSERT(JSOP_GETUPVAR_LENGTH == JSOP_CALLUPVAR_LENGTH); JS_STATIC_ASSERT(JSOP_GETUPVAR_DBG_LENGTH == JSOP_CALLUPVAR_DBG_LENGTH); JS_STATIC_ASSERT(JSOP_GETUPVAR_DBG_LENGTH == JSOP_GETUPVAR_LENGTH); -JS_STATIC_ASSERT(JSOP_GETDSLOT_LENGTH == JSOP_CALLDSLOT_LENGTH); +JS_STATIC_ASSERT(JSOP_GETFCSLOT_LENGTH == JSOP_CALLFCSLOT_LENGTH); JS_STATIC_ASSERT(JSOP_GETARG_LENGTH == JSOP_CALLARG_LENGTH); JS_STATIC_ASSERT(JSOP_GETLOCAL_LENGTH == JSOP_CALLLOCAL_LENGTH); JS_STATIC_ASSERT(JSOP_XMLNAME_LENGTH == JSOP_CALLXMLNAME_LENGTH); @@ -2309,7 +2309,7 @@ Interpret(JSContext *cx) /* * Optimized Get and SetVersion for proper script language versioning. * - * If any native method or Class/JSObjectOps hook calls js_SetVersion + * If any native method or a Class or ObjectOps hook calls js_SetVersion * and changes cx->version, the effect will "stick" and we will stop * maintaining currentVersion. This is relied upon by testsuites, for * the most part -- web browsers select version before compiling and not @@ -3019,32 +3019,32 @@ BEGIN_CASE(JSOP_PICK) } END_CASE(JSOP_PICK) -#define NATIVE_GET(cx,obj,pobj,sprop,getHow,vp) \ +#define NATIVE_GET(cx,obj,pobj,shape,getHow,vp) \ JS_BEGIN_MACRO \ - if (sprop->hasDefaultGetter()) { \ + if (shape->hasDefaultGetter()) { \ /* Fast path for Object instance properties. */ \ - JS_ASSERT((sprop)->slot != SPROP_INVALID_SLOT || \ - !sprop->hasDefaultSetter()); \ - if (((sprop)->slot != SPROP_INVALID_SLOT)) \ - *(vp) = (pobj)->lockedGetSlot((sprop)->slot); \ + JS_ASSERT((shape)->slot != SHAPE_INVALID_SLOT || \ + !shape->hasDefaultSetter()); \ + if (((shape)->slot != SHAPE_INVALID_SLOT)) \ + *(vp) = (pobj)->lockedGetSlot((shape)->slot); \ else \ (vp)->setUndefined(); \ } else { \ - if (!js_NativeGet(cx, obj, pobj, sprop, getHow, vp)) \ + if (!js_NativeGet(cx, obj, pobj, shape, getHow, vp)) \ goto error; \ } \ JS_END_MACRO -#define NATIVE_SET(cx,obj,sprop,entry,vp) \ +#define NATIVE_SET(cx,obj,shape,entry,vp) \ JS_BEGIN_MACRO \ - TRACE_2(SetPropHit, entry, sprop); \ - if (sprop->hasDefaultSetter() && \ - (sprop)->slot != SPROP_INVALID_SLOT && \ - !(obj)->scope()->brandedOrHasMethodBarrier()) { \ + TRACE_2(SetPropHit, entry, shape); \ + if (shape->hasDefaultSetter() && \ + (shape)->slot != SHAPE_INVALID_SLOT && \ + !(obj)->brandedOrHasMethodBarrier()) { \ /* Fast path for, e.g., plain Object instance properties. */ \ - (obj)->lockedSetSlot((sprop)->slot, *vp); \ + (obj)->lockedSetSlot((shape)->slot, *vp); \ } else { \ - if (!js_NativeSet(cx, obj, sprop, false, vp)) \ + if (!js_NativeSet(cx, obj, shape, false, vp)) \ goto error; \ } \ JS_END_MACRO @@ -3711,7 +3711,7 @@ BEGIN_CASE(JSOP_NAMEDEC) ASSERT_VALID_PROPERTY_CACHE_HIT(0, obj, obj2, entry); if (obj == obj2 && entry->vword.isSlot()) { uint32 slot = entry->vword.toSlot(); - JS_ASSERT(slot < obj->scope()->freeslot); + JS_ASSERT(slot < obj->freeslot); Value &rref = obj->getSlotRef(slot); int32_t tmp; if (JS_LIKELY(rref.isInt32() && CanIncDecWithoutOverflow(tmp = rref.toInt32()))) { @@ -3922,7 +3922,7 @@ BEGIN_CASE(JSOP_UNBRANDTHIS) JSObject *obj = fp->getThisObject(cx); if (!obj) goto error; - if (!obj->unbrand(cx)) + if (obj->isNative() && !obj->unbrand(cx)) goto error; } END_CASE(JSOP_UNBRANDTHIS) @@ -3989,12 +3989,12 @@ BEGIN_CASE(JSOP_GETXPROP) rval.setObject(entry->vword.toFunObj()); } else if (entry->vword.isSlot()) { uint32 slot = entry->vword.toSlot(); - JS_ASSERT(slot < obj2->scope()->freeslot); + JS_ASSERT(slot < obj2->freeslot); rval = obj2->lockedGetSlot(slot); } else { - JS_ASSERT(entry->vword.isSprop()); - JSScopeProperty *sprop = entry->vword.toSprop(); - NATIVE_GET(cx, obj, obj2, sprop, + JS_ASSERT(entry->vword.isShape()); + const Shape *shape = entry->vword.toShape(); + NATIVE_GET(cx, obj, obj2, shape, fp->hasIMacroPC() ? JSGET_NO_METHOD_BARRIER : JSGET_METHOD_BARRIER, &rval); } @@ -4084,12 +4084,12 @@ BEGIN_CASE(JSOP_CALLPROP) rval.setObject(entry->vword.toFunObj()); } else if (entry->vword.isSlot()) { uint32 slot = entry->vword.toSlot(); - JS_ASSERT(slot < obj2->scope()->freeslot); + JS_ASSERT(slot < obj2->freeslot); rval = obj2->lockedGetSlot(slot); } else { - JS_ASSERT(entry->vword.isSprop()); - JSScopeProperty *sprop = entry->vword.toSprop(); - NATIVE_GET(cx, &objv.toObject(), obj2, sprop, JSGET_NO_METHOD_BARRIER, &rval); + JS_ASSERT(entry->vword.isShape()); + const Shape *shape = entry->vword.toShape(); + NATIVE_GET(cx, &objv.toObject(), obj2, shape, JSGET_NO_METHOD_BARRIER, &rval); } regs.sp[-1] = rval; PUSH_COPY(lval); @@ -4190,6 +4190,8 @@ BEGIN_CASE(JSOP_SETMETHOD) JSObject *obj2; JSAtom *atom; if (cache->testForSet(cx, regs.pc, obj, &entry, &obj2, &atom)) { + JS_ASSERT(!obj->sealed()); + /* * Fast property cache hit, only partially confirmed by * testForSet. We know that the entry applies to regs.pc and @@ -4199,140 +4201,110 @@ BEGIN_CASE(JSOP_SETMETHOD) * directly to obj by this set, or on an existing "own" * property, or on a prototype property that has a setter. */ - JS_ASSERT(entry->vword.isSprop()); - JSScopeProperty *sprop = entry->vword.toSprop(); - JS_ASSERT_IF(sprop->isDataDescriptor(), sprop->writable()); - JS_ASSERT_IF(sprop->hasSlot(), entry->vcapTag() == 0); - - JSScope *scope = obj->scope(); - JS_ASSERT(!scope->sealed()); + const Shape *shape = entry->vword.toShape(); + JS_ASSERT_IF(shape->isDataDescriptor(), shape->writable()); + JS_ASSERT_IF(shape->hasSlot(), entry->vcapTag() == 0); /* - * Fastest path: check whether the cached sprop is already - * in scope and call NATIVE_SET and break to get out of the - * do-while(0). But we can call NATIVE_SET only if obj owns - * scope or sprop is shared. + * Fastest path: check whether obj already has the cached shape and + * call NATIVE_SET and break to get out of the do-while(0). But we + * can call NATIVE_SET only for a direct or proto-setter hit. */ - bool checkForAdd; - if (!sprop->hasSlot()) { + if (!entry->adding()) { if (entry->vcapTag() == 0 || - ((obj2 = obj->getProto()) && - obj2->isNative() && - obj2->shape() == entry->vshape())) { - goto fast_set_propcache_hit; - } + (obj2 = obj->getProto()) && obj2->shape() == entry->vshape()) + { +#ifdef DEBUG + if (entry->directHit()) { + JS_ASSERT(obj->nativeContains(*shape)); + } else { + JS_ASSERT(obj2->nativeContains(*shape)); + JS_ASSERT(entry->vcapTag() == 1); + JS_ASSERT(entry->kshape != entry->vshape()); + JS_ASSERT(!shape->hasSlot()); + } +#endif - /* The cache entry doesn't apply. vshape mismatch. */ - checkForAdd = false; - } else if (!scope->isSharedEmpty()) { - if (sprop == scope->lastProperty() || scope->hasProperty(sprop)) { - fast_set_propcache_hit: PCMETER(cache->pchits++); PCMETER(cache->setpchits++); - NATIVE_SET(cx, obj, sprop, entry, &rval); + NATIVE_SET(cx, obj, shape, entry, &rval); break; } - checkForAdd = sprop->hasSlot() && sprop->parent == scope->lastProperty(); } else { - /* - * We check that cx own obj here and will continue to - * own it after js_GetMutableScope returns so we can - * continue to skip JS_UNLOCK_OBJ calls. - */ - JS_ASSERT(CX_OWNS_OBJECT_TITLE(cx, obj)); - scope = js_GetMutableScope(cx, obj); - JS_ASSERT(CX_OWNS_OBJECT_TITLE(cx, obj)); - if (!scope) - goto error; - checkForAdd = !sprop->parent; - } - - uint32 slot; - if (checkForAdd && - entry->vshape() == rt->protoHazardShape && - sprop->hasDefaultSetter() && - (slot = sprop->slot) == scope->freeslot) { - /* - * Fast path: adding a plain old property that was once - * at the frontier of the property tree, whose slot is - * next to claim among the allocated slots in obj, - * where scope->table has not been created yet. - * - * We may want to remove hazard conditions above and - * inline compensation code here, depending on - * real-world workloads. - */ - PCMETER(cache->pchits++); - PCMETER(cache->addpchits++); - - if (slot < obj->numSlots()) { - ++scope->freeslot; - } else { - if (!js_AllocSlot(cx, obj, &slot)) + if (obj->nativeEmpty()) { + /* + * We check that cx owns obj here and will continue to own + * it after ensureClassReservedSlotsForEmptyObject returns + * so we can continue to skip JS_UNLOCK_OBJ calls. + */ + JS_ASSERT(CX_OWNS_OBJECT_TITLE(cx, obj)); + bool ok = obj->ensureClassReservedSlotsForEmptyObject(cx); + JS_ASSERT(CX_OWNS_OBJECT_TITLE(cx, obj)); + if (!ok) goto error; - JS_ASSERT(slot + 1 == scope->freeslot); } - /* - * If this obj's number of reserved slots differed, or - * if something created a hash table for scope, we must - * pay the price of JSScope::putProperty. - * - * (A built-in object with a pre-allocated but not fixed - * population of reserved slots hook can cause scopes of the - * same shape to have different freeslot values. Arguments, - * Block, Call, and certain Function objects pre-allocate - * reserveds lots this way. This is what causes the slot != - * sprop->slot case. See js_GetMutableScope. FIXME 558451) - */ - if (slot == sprop->slot && !scope->table) { - scope->extend(cx, sprop); - } else { - JSScopeProperty *sprop2 = - scope->putProperty(cx, sprop->id, - sprop->getter(), sprop->setter(), - slot, sprop->attributes(), - sprop->getFlags(), sprop->shortid); - if (!sprop2) { - js_FreeSlot(cx, obj, slot); - goto error; + uint32 slot; + if (shape->previous() == obj->lastProperty() && + entry->vshape() == rt->protoHazardShape && + shape->hasDefaultSetter()) { + slot = shape->slot; + JS_ASSERT(slot == obj->freeslot); + + /* + * Fast path: adding a plain old property that was once at + * the frontier of the property tree, whose slot is next to + * claim among the already-allocated slots in obj, where + * shape->table has not been created yet. + */ + PCMETER(cache->pchits++); + PCMETER(cache->addpchits++); + + if (slot < obj->numSlots()) { + JS_ASSERT(obj->getSlot(slot).isUndefined()); + ++obj->freeslot; + JS_ASSERT(obj->freeslot != 0); + } else { + if (!obj->allocSlot(cx, &slot)) + goto error; + JS_ASSERT(slot == shape->slot); } - sprop = sprop2; + + /* Simply extend obj's property tree path with shape! */ + obj->extend(cx, shape); + + /* + * No method change check here because here we are adding a + * new property, not updating an existing slot's value that + * might contain a method of a branded shape. + */ + TRACE_2(SetPropHit, entry, shape); + obj->lockedSetSlot(slot, rval); + + /* + * Purge the property cache of the id we may have just + * shadowed in obj's scope and proto chains. + */ + js_PurgeScopeChain(cx, obj, shape->id); + break; } - - /* - * No method change check here because here we are - * adding a new property, not updating an existing - * slot's value that might contain a method of a - * branded scope. - */ - TRACE_2(SetPropHit, entry, sprop); - obj->lockedSetSlot(slot, rval); - - /* - * Purge the property cache of the id we may have just - * shadowed in obj's scope and proto chains. We do this - * after unlocking obj's scope to avoid lock nesting. - */ - js_PurgeScopeChain(cx, obj, sprop->id); - break; } PCMETER(cache->setpcmisses++); atom = NULL; } else if (!atom) { /* - * Slower property cache hit, fully confirmed by testForSet (in - * the slow path, via fullTest). + * Slower property cache hit, fully confirmed by testForSet (in the + * slow path, via fullTest). */ ASSERT_VALID_PROPERTY_CACHE_HIT(0, obj, obj2, entry); - JSScopeProperty *sprop = NULL; + const Shape *shape = NULL; if (obj == obj2) { - sprop = entry->vword.toSprop(); - JS_ASSERT(sprop->writable()); - JS_ASSERT(!obj2->scope()->sealed()); - NATIVE_SET(cx, obj, sprop, entry, &rval); + shape = entry->vword.toShape(); + JS_ASSERT(shape->writable()); + JS_ASSERT(!obj2->sealed()); + NATIVE_SET(cx, obj, shape, entry, &rval); } - if (sprop) + if (shape) break; } @@ -4733,7 +4705,7 @@ BEGIN_CASE(JSOP_CALLNAME) { JSObject *obj = fp->getScopeChain(); - JSScopeProperty *sprop; + const Shape *shape; Value rval; PropertyCacheEntry *entry; @@ -4749,13 +4721,13 @@ BEGIN_CASE(JSOP_CALLNAME) if (entry->vword.isSlot()) { uintN slot = entry->vword.toSlot(); - JS_ASSERT(slot < obj2->scope()->freeslot); + JS_ASSERT(slot < obj2->freeslot); PUSH_COPY(obj2->lockedGetSlot(slot)); goto do_push_obj_if_call; } - JS_ASSERT(entry->vword.isSprop()); - sprop = entry->vword.toSprop(); + JS_ASSERT(entry->vword.isShape()); + shape = entry->vword.toShape(); goto do_native_get; } @@ -4782,9 +4754,9 @@ BEGIN_CASE(JSOP_CALLNAME) if (!obj->getProperty(cx, id, &rval)) goto error; } else { - sprop = (JSScopeProperty *)prop; + shape = (Shape *)prop; do_native_get: - NATIVE_GET(cx, obj, obj2, sprop, JSGET_METHOD_BARRIER, &rval); + NATIVE_GET(cx, obj, obj2, shape, JSGET_METHOD_BARRIER, &rval); JS_UNLOCK_OBJ(cx, obj2); } @@ -5181,8 +5153,7 @@ BEGIN_CASE(JSOP_CALLUPVAR_DBG) jsid id; JSAtom *atom; { - void *mark = JS_ARENA_MARK(&cx->tempPool); - jsuword *names = js_GetLocalNameArray(cx, fun, &cx->tempPool); + AutoLocalNameArray names(cx, fun); if (!names) goto error; @@ -5190,9 +5161,7 @@ BEGIN_CASE(JSOP_CALLUPVAR_DBG) atom = JS_LOCAL_NAME_TO_ATOM(names[index]); id = ATOM_TO_JSID(atom); - JSBool ok = js_FindProperty(cx, id, &obj, &obj2, &prop); - JS_ARENA_RELEASE(&cx->tempPool, mark); - if (!ok) + if (!js_FindProperty(cx, id, &obj, &obj2, &prop)) goto error; } @@ -5213,24 +5182,19 @@ BEGIN_CASE(JSOP_CALLUPVAR_DBG) } END_CASE(JSOP_GETUPVAR_DBG) -BEGIN_CASE(JSOP_GETDSLOT) -BEGIN_CASE(JSOP_CALLDSLOT) +BEGIN_CASE(JSOP_GETFCSLOT) +BEGIN_CASE(JSOP_CALLFCSLOT) { JS_ASSERT(fp->argv); - JSObject *obj = &fp->argv[-2].toObject(); - JS_ASSERT(obj); - JS_ASSERT(obj->dslots); - uintN index = GET_UINT16(regs.pc); - JS_ASSERT(JS_INITIAL_NSLOTS + index < obj->dslots[-1].toPrivateUint32()); - JS_ASSERT_IF(obj->scope()->object == obj, - JS_INITIAL_NSLOTS + index < obj->scope()->freeslot); + JSObject *obj = &fp->argv[-2].toObject(); - PUSH_COPY(obj->dslots[index]); - if (op == JSOP_CALLDSLOT) + JS_ASSERT(index < obj->getFunctionPrivate()->u.i.nupvars); + PUSH_COPY(obj->getFlatClosureUpvar(index)); + if (op == JSOP_CALLFCSLOT) PUSH_NULL(); } -END_CASE(JSOP_GETDSLOT) +END_CASE(JSOP_GETFCSLOT) BEGIN_CASE(JSOP_GETGVAR) BEGIN_CASE(JSOP_CALLGVAR) @@ -5286,13 +5250,12 @@ BEGIN_CASE(JSOP_SETGVAR) } else { uint32 slot = (uint32)lref.toInt32(); JS_LOCK_OBJ(cx, obj); - JSScope *scope = obj->scope(); - if (!scope->methodWriteBarrier(cx, slot, rref)) { - JS_UNLOCK_SCOPE(cx, scope); + if (!obj->methodWriteBarrier(cx, slot, rref)) { + JS_UNLOCK_OBJ(cx, obj); goto error; } obj->lockedSetSlot(slot, rref); - JS_UNLOCK_SCOPE(cx, scope); + JS_UNLOCK_OBJ(cx, obj); } } END_SET_CASE(JSOP_SETGVAR) @@ -5352,18 +5315,18 @@ BEGIN_CASE(JSOP_DEFVAR) index < GlobalVarCount(fp) && obj2 == obj && obj->isNative()) { - JSScopeProperty *sprop = (JSScopeProperty *) prop; - if (!sprop->configurable() && - SPROP_HAS_VALID_SLOT(sprop, obj->scope()) && - sprop->hasDefaultGetterOrIsMethod() && - sprop->hasDefaultSetter()) { + const Shape *shape = (Shape *) prop; + if (!shape->configurable() && + obj->containsSlot(shape->slot) && + shape->hasDefaultGetterOrIsMethod() && + shape->hasDefaultSetter()) { /* * Fast globals use frame variables to map the global name's atom * index to the permanent varobj slot number, tagged as a jsval. * The atom index for the global's name literal is identical to its * variable index. */ - fp->slots()[index].setInt32(sprop->slot); + fp->slots()[index].setInt32(shape->slot); } } @@ -5478,8 +5441,8 @@ BEGIN_CASE(JSOP_DEFFUN) JS_ASSERT_IF(doSet, fp->flags & JSFRAME_EVAL); if (prop) { if (parent == pobj && - parent->getClass() == &js_CallClass && - (old = ((JSScopeProperty *) prop)->attributes(), + parent->isCall() && + (old = ((Shape *) prop)->attributes(), !(old & (JSPROP_GETTER|JSPROP_SETTER)) && (old & (JSPROP_ENUMERATE|JSPROP_PERMANENT)) == attrs)) { /* @@ -5641,7 +5604,7 @@ BEGIN_CASE(JSOP_LAMBDA) JS_ASSERT(lref.isObject()); JSObject *obj2 = &lref.toObject(); JS_ASSERT(obj2->getClass() == &js_ObjectClass); - JS_ASSERT(obj2->scope()->object == obj2); + JS_ASSERT(obj2->freeslot >= JSSLOT_FREE(&js_ObjectClass)); #endif fun->setMethodAtom(script->getAtom(GET_FULL_INDEX(JSOP_LAMBDA_LENGTH))); @@ -5911,23 +5874,6 @@ BEGIN_CASE(JSOP_NEWINIT) obj = NewBuiltinClassInstance(cx, &js_ObjectClass); if (!obj) goto error; - - if (regs.pc[JSOP_NEWINIT_LENGTH] != JSOP_ENDINIT) { - JS_LOCK_OBJ(cx, obj); - JSScope *scope = js_GetMutableScope(cx, obj); - if (!scope) { - JS_UNLOCK_OBJ(cx, obj); - goto error; - } - - /* - * We cannot assume that js_GetMutableScope above creates a scope - * owned by cx and skip JS_UNLOCK_SCOPE. A new object debugger - * hook may add properties to the newly created object, suspend - * the current request and share the object with other threads. - */ - JS_UNLOCK_SCOPE(cx, scope); - } } PUSH_OBJECT(*obj); @@ -5954,8 +5900,6 @@ BEGIN_CASE(JSOP_INITMETHOD) JSObject *obj = ®s.sp[-2].toObject(); JS_ASSERT(obj->isNative()); - JSScope *scope = obj->scope(); - /* * Probe the property cache. * @@ -5963,50 +5907,43 @@ BEGIN_CASE(JSOP_INITMETHOD) * single-threaded as the debugger can access it from other threads. * So check first. * - * On a hit, if the cached sprop has a non-default setter, it must be - * __proto__. If sprop->parent != scope->lastProperty(), there is a + * On a hit, if the cached shape has a non-default setter, it must be + * __proto__. If shape->previous() != obj->lastProperty(), there must be a * repeated property name. The fast path does not handle these two cases. */ PropertyCacheEntry *entry; - JSScopeProperty *sprop; + const Shape *shape; if (CX_OWNS_OBJECT_TITLE(cx, obj) && - JS_PROPERTY_CACHE(cx).testForInit(rt, regs.pc, obj, scope, &sprop, &entry) && - sprop->hasDefaultSetter() && - sprop->parent == scope->lastProperty()) + JS_PROPERTY_CACHE(cx).testForInit(rt, regs.pc, obj, &shape, &entry) && + shape->hasDefaultSetter() && + shape->previous() == obj->lastProperty()) { /* Fast path. Property cache hit. */ - uint32 slot = sprop->slot; - JS_ASSERT(slot == scope->freeslot); + uint32 slot = shape->slot; + + JS_ASSERT(slot == obj->freeslot); + JS_ASSERT(slot >= JSSLOT_FREE(obj->getClass())); if (slot < obj->numSlots()) { - ++scope->freeslot; + JS_ASSERT(obj->getSlot(slot).isUndefined()); + ++obj->freeslot; + JS_ASSERT(obj->freeslot != 0); } else { - if (!js_AllocSlot(cx, obj, &slot)) + if (!obj->allocSlot(cx, &slot)) goto error; - JS_ASSERT(slot == sprop->slot); + JS_ASSERT(slot == shape->slot); } - JS_ASSERT(!scope->lastProperty() || - scope->shape == scope->lastProperty()->shape); - if (scope->table) { - JSScopeProperty *sprop2 = - scope->addProperty(cx, sprop->id, sprop->getter(), sprop->setter(), slot, - sprop->attributes(), sprop->getFlags(), sprop->shortid); - if (!sprop2) { - js_FreeSlot(cx, obj, slot); - goto error; - } - JS_ASSERT(sprop2 == sprop); - } else { - JS_ASSERT(!scope->isSharedEmpty()); - scope->extend(cx, sprop); - } + /* A new object, or one we just extended in a recent initprop op. */ + JS_ASSERT(!obj->lastProperty() || + obj->shape() == obj->lastProperty()->shape); + obj->extend(cx, shape); /* * No method change check here because here we are adding a new * property, not updating an existing slot's value that might - * contain a method of a branded scope. + * contain a method of a branded shape. */ - TRACE_2(SetPropHit, entry, sprop); + TRACE_2(SetPropHit, entry, shape); obj->lockedSetSlot(slot, rval); } else { PCMETER(JS_PROPERTY_CACHE(cx).inipcmisses++); diff --git a/js/src/jsinterp.h b/js/src/jsinterp.h index 89682df2a39a..25329fa9e9c9 100644 --- a/js/src/jsinterp.h +++ b/js/src/jsinterp.h @@ -469,7 +469,7 @@ struct JSStackFrame /* * Fallible getter to compute the correct callee function object, which may - * require deferred cloning due to JSScope::methodReadBarrier. For a frame + * require deferred cloning due to JSObject::methodReadBarrier. For a frame * with null fun member, return true with *vp set from this->calleeValue(), * which may not be an object (it could be undefined). */ diff --git a/js/src/jsiter.cpp b/js/src/jsiter.cpp index 607253a9805a..127eef767b14 100644 --- a/js/src/jsiter.cpp +++ b/js/src/jsiter.cpp @@ -256,12 +256,13 @@ EnumerateNativeProperties(JSContext *cx, JSObject *obj, JSObject *pobj, uintN fl size_t initialLength = props.length(); /* Collect all unique properties from this object's scope. */ - JSScope *scope = pobj->scope(); - for (JSScopeProperty *sprop = scope->lastProperty(); sprop; sprop = sprop->parent) { - if (!JSID_IS_DEFAULT_XML_NAMESPACE(sprop->id) && - !sprop->isAlias() && - !Enumerate(cx, obj, pobj, sprop->id, sprop->enumerable(), sprop->isSharedPermanent(), - flags, ht, props)) + for (Shape::Range r = pobj->lastProperty()->all(); !r.empty(); r.popFront()) { + const Shape &shape = r.front(); + + if (!JSID_IS_DEFAULT_XML_NAMESPACE(shape.id) && + !shape.isAlias() && + !Enumerate(cx, obj, pobj, shape.id, shape.enumerable(), + shape.isSharedPermanent(), flags, ht, props)) { return false; } @@ -269,7 +270,7 @@ EnumerateNativeProperties(JSContext *cx, JSObject *obj, JSObject *pobj, uintN fl Reverse(props.begin() + initialLength, props.end()); - JS_UNLOCK_SCOPE(cx, scope); + JS_UNLOCK_OBJ(cx, pobj); return true; } @@ -455,8 +456,8 @@ NewIteratorObject(JSContext *cx, uintN flags) JSObject *obj = js_NewGCObject(cx); if (!obj) return false; - obj->map = cx->runtime->emptyEnumeratorScope->hold(); - obj->init(&js_IteratorClass, NULL, NULL, NullValue()); + obj->init(&js_IteratorClass, NULL, NULL, NullValue(), cx); + obj->setMap(cx->runtime->emptyEnumeratorShape); return obj; } @@ -853,7 +854,7 @@ js_SuppressDeletedProperty(JSContext *cx, JSObject *obj, jsid id) if (prop) { uintN attrs; if (obj2.object()->isNative()) { - attrs = ((JSScopeProperty *) prop)->attributes(); + attrs = ((Shape *) prop)->attributes(); JS_UNLOCK_OBJ(cx, obj2.object()); } else if (!obj2.object()->getAttributes(cx, id, &attrs)) { return false; diff --git a/js/src/jslock.cpp b/js/src/jslock.cpp index 9457a9291810..1bef20bf8b4d 100644 --- a/js/src/jslock.cpp +++ b/js/src/jslock.cpp @@ -494,10 +494,9 @@ FinishSharingTitle(JSContext *cx, JSTitle *title) js_InitLock(&title->lock); title->u.count = 0; /* NULL may not pun as 0 */ - JSScope *scope = TITLE_TO_SCOPE(title); - JSObject *obj = scope->object; + JSObject *obj = TITLE_TO_OBJECT(title); if (obj) { - uint32 nslots = scope->freeslot; + uint32 nslots = obj->freeslot; JS_ASSERT(nslots >= JSSLOT_START(obj->getClass())); for (uint32 i = JSSLOT_START(obj->getClass()); i != nslots; ++i) { Value v = obj->getSlot(i); @@ -542,9 +541,8 @@ ClaimTitle(JSTitle *title, JSContext *cx) * has the same thread as cx, or cx->thread runs the GC (in which case * all other requests must be suspended), or ownercx->thread runs a GC * and the GC waits for all requests to finish. Set title->ownercx to - * cx so that the matching JS_UNLOCK_SCOPE or JS_UNLOCK_OBJ macro call - * will take the fast path around the corresponding js_UnlockTitle or - * js_UnlockObj function call. + * cx so that the matching JS_UNLOCK_OBJ macro call will take the fast + * path around the corresponding js_UnlockObj function call. * * If title->u.link is non-null, title has already been inserted on * the rt->titleSharingTodo list, because another thread's context @@ -653,8 +651,6 @@ JS_FRIEND_API(jsval) js_GetSlotThreadSafe(JSContext *cx, JSObject *obj, uint32 slot) { jsval v; - JSScope *scope; - JSTitle *title; #ifndef NSPR_LOCK JSThinLock *tl; jsword me; @@ -666,51 +662,44 @@ js_GetSlotThreadSafe(JSContext *cx, JSObject *obj, uint32 slot) * Native object locking is inlined here to optimize the single-threaded * and contention-free multi-threaded cases. */ - scope = obj->scope(); - title = &scope->title; - JS_ASSERT(title->ownercx != cx); - JS_ASSERT(slot < scope->freeslot); + JS_ASSERT(obj->title.ownercx != cx); + JS_ASSERT(slot < obj->freeslot); /* - * Avoid locking if called from the GC. Also avoid locking an object - * owning a sealed scope. If neither of those special cases applies, try - * to claim scope's flyweight lock from whatever context may have had it in - * an earlier request. + * Avoid locking if called from the GC. Also avoid locking a sealed + * object. If neither of those special cases applies, try to claim obj's + * flyweight lock from whatever context may have had it in an earlier + * request. */ if (CX_THREAD_IS_RUNNING_GC(cx) || - scope->sealed() || - (title->ownercx && ClaimTitle(title, cx))) { + obj->sealed() || + (obj->title.ownercx && ClaimTitle(&obj->title, cx))) { return Jsvalify(obj->getSlot(slot)); } #ifndef NSPR_LOCK - tl = &title->lock; + tl = &obj->title.lock; me = CX_THINLOCK_ID(cx); JS_ASSERT(CURRENT_THREAD_IS_ME(me)); if (NativeCompareAndSwap(&tl->owner, 0, me)) { /* - * Got the lock with one compare-and-swap. Even so, someone else may - * have mutated obj so it now has its own scope and lock, which would + * Got the lock with one compare-and-swap. Even so, someone else may + * have mutated obj so it now has its own title lock, which would * require either a restart from the top of this routine, or a thin * lock release followed by fat lock acquisition. */ - if (scope == obj->scope()) { - v = Jsvalify(obj->getSlot(slot)); - if (!NativeCompareAndSwap(&tl->owner, me, 0)) { - /* Assert that scope locks never revert to flyweight. */ - JS_ASSERT(title->ownercx != cx); - LOGIT(title, '1'); - title->u.count = 1; - js_UnlockObj(cx, obj); - } - return v; + v = Jsvalify(obj->getSlot(slot)); + if (!NativeCompareAndSwap(&tl->owner, me, 0)) { + /* Assert that title locks never revert to flyweight. */ + JS_ASSERT(obj->title.ownercx != cx); + LOGIT(obj->title, '1'); + obj->title.u.count = 1; + js_UnlockObj(cx, obj); } - if (!NativeCompareAndSwap(&tl->owner, me, 0)) - js_Dequeue(tl); + return v; } - else if (Thin_RemoveWait(ReadWord(tl->owner)) == me) { + if (Thin_RemoveWait(ReadWord(tl->owner)) == me) return Jsvalify(obj->getSlot(slot)); - } #endif js_LockObj(cx, obj); @@ -725,17 +714,14 @@ js_GetSlotThreadSafe(JSContext *cx, JSObject *obj, uint32 slot) * object's scope (whose lock was not flyweight, else we wouldn't be here * in the first place!). */ - title = &obj->scope()->title; - if (title->ownercx != cx) - js_UnlockTitle(cx, title); + if (obj->title.ownercx != cx) + js_UnlockTitle(cx, &obj->title); return v; } void js_SetSlotThreadSafe(JSContext *cx, JSObject *obj, uint32 slot, jsval v) { - JSTitle *title; - JSScope *scope; #ifndef NSPR_LOCK JSThinLock *tl; jsword me; @@ -754,43 +740,38 @@ js_SetSlotThreadSafe(JSContext *cx, JSObject *obj, uint32 slot, jsval v) * Native object locking is inlined here to optimize the single-threaded * and contention-free multi-threaded cases. */ - scope = obj->scope(); - title = &scope->title; - JS_ASSERT(title->ownercx != cx); - JS_ASSERT(slot < scope->freeslot); + JS_ASSERT(obj->title.ownercx != cx); + JS_ASSERT(slot < obj->freeslot); /* - * Avoid locking if called from the GC. Also avoid locking an object - * owning a sealed scope. If neither of those special cases applies, try - * to claim scope's flyweight lock from whatever context may have had it in - * an earlier request. + * Avoid locking if called from the GC. Also avoid locking a sealed + * object. If neither of those special cases applies, try to claim obj's + * flyweight lock from whatever context may have had it in an earlier + * request. */ if (CX_THREAD_IS_RUNNING_GC(cx) || - scope->sealed() || - (title->ownercx && ClaimTitle(title, cx))) { + obj->sealed() || + (obj->title.ownercx && ClaimTitle(&obj->title, cx))) { obj->lockedSetSlot(slot, Valueify(v)); return; } #ifndef NSPR_LOCK - tl = &title->lock; + tl = &obj->title.lock; me = CX_THINLOCK_ID(cx); JS_ASSERT(CURRENT_THREAD_IS_ME(me)); if (NativeCompareAndSwap(&tl->owner, 0, me)) { - if (scope == obj->scope()) { - obj->lockedSetSlot(slot, Valueify(v)); - if (!NativeCompareAndSwap(&tl->owner, me, 0)) { - /* Assert that scope locks never revert to flyweight. */ - JS_ASSERT(title->ownercx != cx); - LOGIT(title, '1'); - title->u.count = 1; - js_UnlockObj(cx, obj); - } - return; + obj->lockedSetSlot(slot, Valueify(v)); + if (!NativeCompareAndSwap(&tl->owner, me, 0)) { + /* Assert that scope locks never revert to flyweight. */ + JS_ASSERT(obj->title.ownercx != cx); + LOGIT(obj->title, '1'); + obj->title.u.count = 1; + js_UnlockObj(cx, obj); } - if (!NativeCompareAndSwap(&tl->owner, me, 0)) - js_Dequeue(tl); - } else if (Thin_RemoveWait(ReadWord(tl->owner)) == me) { + return; + } + if (Thin_RemoveWait(ReadWord(tl->owner)) == me) { obj->lockedSetSlot(slot, Valueify(v)); return; } @@ -802,9 +783,8 @@ js_SetSlotThreadSafe(JSContext *cx, JSObject *obj, uint32 slot, jsval v) /* * Same drill as above, in js_GetSlotThreadSafe. */ - title = &obj->scope()->title; - if (title->ownercx != cx) - js_UnlockTitle(cx, title); + if (obj->title.ownercx != cx) + js_UnlockTitle(cx, &obj->title); } #ifndef NSPR_LOCK @@ -1188,12 +1168,12 @@ js_LockTitle(JSContext *cx, JSTitle *title) if (Thin_RemoveWait(ReadWord(title->lock.owner)) == me) { JS_ASSERT(title->u.count > 0); - LOGIT(scope, '+'); + LOGIT(title, '+'); title->u.count++; } else { ThinLock(&title->lock, me); JS_ASSERT(title->u.count == 0); - LOGIT(scope, '1'); + LOGIT(title, '1'); title->u.count = 1; } } @@ -1242,84 +1222,32 @@ js_UnlockTitle(JSContext *cx, JSTitle *title) ThinUnlock(&title->lock, me); } -/* - * NB: oldtitle may be null if our caller is js_GetMutableScope and it just - * dropped the last reference to oldtitle. - */ -void -js_DropAllEmptyScopeLocks(JSContext *cx, JSScope *scope) -{ - JS_ASSERT(!CX_OWNS_SCOPE_TITLE(cx,scope)); - JS_ASSERT(scope->isSharedEmpty()); - JS_ASSERT(JS_IS_TITLE_LOCKED(cx, &scope->title)); - - /* - * Shared empty scope cannot be sealed so we do not need to deal with - * cx->lockedSealedTitle. - */ - JS_ASSERT(!scope->sealed()); - JS_ASSERT(cx->thread->lockedSealedTitle != &scope->title); - - /* - * Special case in js_LockTitle and js_UnlockTitle for the GC calling - * code that locks, unlocks, or mutates. Nothing to do in these cases, - * because title and newtitle were "locked" by the GC thread, so neither - * was actually locked. - */ - if (CX_THREAD_IS_RUNNING_GC(cx)) - return; - - /* - * The title cannot be owned at this point by another cx on this or - * another thread as that would imply a missing JS_LOCK_OBJ call. - */ - JS_ASSERT(!scope->title.ownercx); - - LOGIT(&scope->title, '0'); - scope->title.u.count = 0; - ThinUnlock(&scope->title.lock, CX_THINLOCK_ID(cx)); -} - void js_LockObj(JSContext *cx, JSObject *obj) { - JSScope *scope; - JSTitle *title; - JS_ASSERT(obj->isNative()); /* * We must test whether the GC is calling and return without mutating any - * state, especially cx->lockedSealedScope. Note asymmetry with respect to - * js_UnlockObj, which is a thin-layer on top of js_UnlockTitle. + * state, especially cx->thread->lockedSealedScope. Note asymmetry with + * respect to js_UnlockObj, which is a thin-layer on top of js_UnlockTitle. */ if (CX_THREAD_IS_RUNNING_GC(cx)) return; - for (;;) { - scope = obj->scope(); - title = &scope->title; - if (scope->sealed() && !cx->thread->lockedSealedTitle) { - cx->thread->lockedSealedTitle = title; - return; - } - - js_LockTitle(cx, title); - - /* If obj still has this scope, we're done. */ - if (scope == obj->scope()) - return; - - /* Lost a race with a mutator; retry with obj's new scope. */ - js_UnlockTitle(cx, title); + if (obj->sealed() && !cx->thread->lockedSealedTitle) { + cx->thread->lockedSealedTitle = &obj->title; + return; } + + js_LockTitle(cx, &obj->title); } void js_UnlockObj(JSContext *cx, JSObject *obj) { JS_ASSERT(obj->isNative()); - js_UnlockTitle(cx, &obj->scope()->title); + js_UnlockTitle(cx, &obj->title); } void @@ -1334,11 +1262,6 @@ js_InitTitle(JSContext *cx, JSTitle *title) * null pointer has a non-zero integer representation. */ title->u.link = NULL; - -#ifdef JS_DEBUG_TITLE_LOCKS - title->file[0] = title->file[1] = title->file[2] = title->file[3] = NULL; - title->line[0] = title->line[1] = title->line[2] = title->line[3] = 0; -#endif #endif } @@ -1368,7 +1291,7 @@ js_IsRuntimeLocked(JSRuntime *rt) JSBool js_IsObjLocked(JSContext *cx, JSObject *obj) { - return js_IsTitleLocked(cx, &obj->scope()->title); + return js_IsTitleLocked(cx, &obj->title); } JSBool @@ -1378,7 +1301,7 @@ js_IsTitleLocked(JSContext *cx, JSTitle *title) if (CX_THREAD_IS_RUNNING_GC(cx)) return JS_TRUE; - /* Special case: locked object owning a sealed scope, see js_LockObj. */ + /* Special case: locked object is sealed (ES5 frozen) -- see js_LockObj. */ if (cx->thread->lockedSealedTitle == title) return JS_TRUE; @@ -1395,19 +1318,5 @@ js_IsTitleLocked(JSContext *cx, JSTitle *title) ((JSThread *)Thin_RemoveWait(ReadWord(title->lock.owner)))->id; } -#ifdef JS_DEBUG_TITLE_LOCKS -void -js_SetScopeInfo(JSScope *scope, const char *file, int line) -{ - JSTitle *title = &scope->title; - if (!title->ownercx) { - jsrefcount count = title->u.count; - JS_ASSERT_IF(!scope->sealed(), count > 0); - JS_ASSERT(count <= 4); - title->file[count - 1] = file; - title->line[count - 1] = line; - } -} -#endif /* JS_DEBUG_TITLE_LOCKS */ #endif /* DEBUG */ #endif /* JS_THREADSAFE */ diff --git a/js/src/jslock.h b/js/src/jslock.h index da0e0657de37..124015341f0f 100644 --- a/js/src/jslock.h +++ b/js/src/jslock.h @@ -97,17 +97,13 @@ struct JSTitle { jsrefcount count; /* lock entry count for reentrancy */ JSTitle *link; /* next link in rt->titleSharingTodo */ } u; -#ifdef JS_DEBUG_TITLE_LOCKS - const char *file[4]; /* file where lock was (re-)taken */ - unsigned int line[4]; /* line where lock was (re-)taken */ -#endif }; /* - * Title structure is always allocated as a field of JSScope. + * Title structure is always allocated as a field of JSObject. */ -#define TITLE_TO_SCOPE(title) \ - ((JSScope *)((uint8 *) (title) - offsetof(JSScope, title))) +#define TITLE_TO_OBJECT(title) \ + ((JSObject *)((uint8 *) (title) - offsetof(JSObject, title))) /* * Atomic increment and decrement for a reference counter, given jsrefcount *p. @@ -131,16 +127,6 @@ struct JSTitle { #define JS_NOTIFY_CONDVAR(cv) PR_NotifyCondVar(cv) #define JS_NOTIFY_ALL_CONDVAR(cv) PR_NotifyAllCondVar(cv) -#ifdef JS_DEBUG_TITLE_LOCKS - -#define JS_SET_OBJ_INFO(obj_, file_, line_) \ - JS_SET_SCOPE_INFO((obj_)->scope(), file_, line_) - -#define JS_SET_SCOPE_INFO(scope_, file_, line_) \ - js_SetScopeInfo(scope_, file_, line_) - -#endif - #define JS_LOCK(cx, tl) js_Lock(cx, tl) #define JS_UNLOCK(cx, tl) js_Unlock(cx, tl) @@ -152,12 +138,12 @@ struct JSTitle { * (objects for which obj->isNative() returns true). All uses of these macros in * the engine are predicated on obj->isNative or equivalent checks. */ -#define CX_OWNS_SCOPE_TITLE(cx,scope) ((scope)->title.ownercx == (cx)) +#define CX_OWNS_OBJECT_TITLE(cx,obj) ((obj)->title.ownercx == (cx)) #define JS_LOCK_OBJ(cx,obj) \ JS_BEGIN_MACRO \ JSObject *obj_ = (obj); \ - if (!CX_OWNS_SCOPE_TITLE(cx, obj_->scope())) { \ + if (!CX_OWNS_OBJECT_TITLE(cx, obj_)) { \ js_LockObj(cx, obj_); \ JS_SET_OBJ_INFO(obj_, __FILE__, __LINE__); \ } \ @@ -166,7 +152,7 @@ struct JSTitle { #define JS_UNLOCK_OBJ(cx,obj) \ JS_BEGIN_MACRO \ JSObject *obj_ = (obj); \ - if (!CX_OWNS_SCOPE_TITLE(cx, obj_->scope())) \ + if (!CX_OWNS_OBJECT_TITLE(cx, obj_)) \ js_UnlockObj(cx, obj_); \ JS_END_MACRO @@ -178,16 +164,6 @@ struct JSTitle { #define JS_UNLOCK_TITLE(cx,title) ((title)->ownercx == (cx) ? (void)0 \ : js_UnlockTitle(cx, title)) -#define JS_LOCK_SCOPE(cx,scope) JS_LOCK_TITLE(cx,&(scope)->title) -#define JS_UNLOCK_SCOPE(cx,scope) JS_UNLOCK_TITLE(cx,&(scope)->title) - -#define JS_DROP_ALL_EMPTY_SCOPE_LOCKS(cx,scope) \ - JS_BEGIN_MACRO \ - JS_ASSERT((scope)->isSharedEmpty()); \ - if (!CX_OWNS_SCOPE_TITLE(cx, scope)) \ - js_DropAllEmptyScopeLocks(cx, scope); \ - JS_END_MACRO - extern void js_Lock(JSContext *cx, JSThinLock *tl); extern void js_Unlock(JSContext *cx, JSThinLock *tl); extern void js_LockRuntime(JSRuntime *rt); @@ -200,7 +176,6 @@ extern void js_LockTitle(JSContext *cx, JSTitle *title); extern void js_UnlockTitle(JSContext *cx, JSTitle *title); extern int js_SetupLocks(int,int); extern void js_CleanupLocks(); -extern void js_DropAllEmptyScopeLocks(JSContext *cx, JSScope *scope); extern JS_FRIEND_API(jsval) js_GetSlotThreadSafe(JSContext *, JSObject *, uint32); extern void js_SetSlotThreadSafe(JSContext *, JSObject *, uint32, jsval); @@ -222,9 +197,6 @@ js_ShareWaitingTitles(JSContext *cx); extern JSBool js_IsRuntimeLocked(JSRuntime *rt); extern JSBool js_IsObjLocked(JSContext *cx, JSObject *obj); extern JSBool js_IsTitleLocked(JSContext *cx, JSTitle *title); -#ifdef JS_DEBUG_TITLE_LOCKS -extern void js_SetScopeInfo(JSScope *scope, const char *file, int line); -#endif #else @@ -257,14 +229,10 @@ extern void js_SetScopeInfo(JSScope *scope, const char *file, int line); #define JS_LOCK_RUNTIME(rt) ((void)0) #define JS_UNLOCK_RUNTIME(rt) ((void)0) -#define CX_OWNS_SCOPE_TITLE(cx,obj) true #define JS_LOCK_OBJ(cx,obj) ((void)0) #define JS_UNLOCK_OBJ(cx,obj) ((void)0) -#define JS_LOCK_SCOPE(cx,scope) ((void)0) -#define JS_UNLOCK_SCOPE(cx,scope) ((void)0) -#define JS_DROP_ALL_EMPTY_SCOPE_LOCKS(cx,scope) ((void)0) - +#define CX_OWNS_OBJECT_TITLE(cx,obj) 1 #define JS_IS_RUNTIME_LOCKED(rt) 1 #define JS_IS_OBJ_LOCKED(cx,obj) 1 #define JS_IS_TITLE_LOCKED(cx,title) 1 @@ -286,8 +254,6 @@ extern void js_SetScopeInfo(JSScope *scope, const char *file, int line); JS_NO_TIMEOUT) #define JS_NOTIFY_REQUEST_DONE(rt) JS_NOTIFY_CONDVAR((rt)->requestDone) -#define CX_OWNS_OBJECT_TITLE(cx,obj) CX_OWNS_SCOPE_TITLE(cx, (obj)->scope()) - #ifndef JS_SET_OBJ_INFO #define JS_SET_OBJ_INFO(obj,f,l) ((void)0) #endif diff --git a/js/src/jsobj.cpp b/js/src/jsobj.cpp index 19e79a59c386..03ef95b50f0f 100644 --- a/js/src/jsobj.cpp +++ b/js/src/jsobj.cpp @@ -232,13 +232,13 @@ MarkSharpObjects(JSContext *cx, JSObject *obj, JSIdArray **idap) AutoValueRooter v(cx); AutoValueRooter setter(cx); if (obj2->isNative()) { - JSScopeProperty *sprop = (JSScopeProperty *) prop; - hasGetter = sprop->hasGetterValue(); - hasSetter = sprop->hasSetterValue(); + const Shape *shape = (Shape *) prop; + hasGetter = shape->hasGetterValue(); + hasSetter = shape->hasSetterValue(); if (hasGetter) - v.set(sprop->getterValue()); + v.set(shape->getterValue()); if (hasSetter) - setter.set(sprop->setterValue()); + setter.set(shape->setterValue()); JS_UNLOCK_OBJ(cx, obj2); } else { hasGetter = hasSetter = false; @@ -576,17 +576,17 @@ obj_toSource(JSContext *cx, uintN argc, Value *vp) if (prop) { bool doGet = true; if (obj2->isNative()) { - JSScopeProperty *sprop = (JSScopeProperty *) prop; - unsigned attrs = sprop->attributes(); + const Shape *shape = (Shape *) prop; + unsigned attrs = shape->attributes(); if (attrs & JSPROP_GETTER) { doGet = false; - val[valcnt] = sprop->getterValue(); + val[valcnt] = shape->getterValue(); gsop[valcnt] = ATOM_TO_STRING(cx->runtime->atomState.getAtom); valcnt++; } if (attrs & JSPROP_SETTER) { doGet = false; - val[valcnt] = sprop->setterValue(); + val[valcnt] = shape->setterValue(); gsop[valcnt] = ATOM_TO_STRING(cx->runtime->atomState.setAtom); valcnt++; } @@ -1450,8 +1450,8 @@ js_HasOwnProperty(JSContext *cx, JSLookupPropOp lookup, JSObject *obj, jsid id, * the property, there is no way to tell whether it is directly * owned, or indirectly delegated. */ - JSScopeProperty *sprop = reinterpret_cast(*propp); - if (sprop->isSharedPermanent()) + Shape *shape = reinterpret_cast(*propp); + if (shape->isSharedPermanent()) return true; } @@ -1512,9 +1512,9 @@ js_PropertyIsEnumerable(JSContext *cx, JSObject *obj, jsid id, Value *vp) bool shared; uintN attrs; if (pobj->isNative()) { - JSScopeProperty *sprop = (JSScopeProperty *) prop; - shared = sprop->isSharedPermanent(); - attrs = sprop->attributes(); + Shape *shape = (Shape *) prop; + shared = shape->isSharedPermanent(); + attrs = shape->attributes(); JS_UNLOCK_OBJ(cx, pobj); } else { shared = false; @@ -1610,9 +1610,9 @@ obj_lookupGetter(JSContext *cx, uintN argc, Value *vp) vp->setUndefined(); if (prop) { if (pobj->isNative()) { - JSScopeProperty *sprop = (JSScopeProperty *) prop; - if (sprop->hasGetterValue()) - *vp = sprop->getterValue(); + Shape *shape = (Shape *) prop; + if (shape->hasGetterValue()) + *vp = shape->getterValue(); JS_UNLOCK_OBJ(cx, pobj); } } @@ -1633,9 +1633,9 @@ obj_lookupSetter(JSContext *cx, uintN argc, Value *vp) vp->setUndefined(); if (prop) { if (pobj->isNative()) { - JSScopeProperty *sprop = (JSScopeProperty *) prop; - if (sprop->hasSetterValue()) - *vp = sprop->setterValue(); + Shape *shape = (Shape *) prop; + if (shape->hasSetterValue()) + *vp = shape->setterValue(); JS_UNLOCK_OBJ(cx, pobj); } } @@ -1726,14 +1726,14 @@ js_GetOwnPropertyDescriptor(JSContext *cx, JSObject *obj, jsid id, Value *vp) unsigned attrs; bool doGet = true; if (pobj->isNative()) { - JSScopeProperty *sprop = (JSScopeProperty *) prop; - attrs = sprop->attributes(); + Shape *shape = (Shape *) prop; + attrs = shape->attributes(); if (attrs & (JSPROP_GETTER | JSPROP_SETTER)) { doGet = false; if (attrs & JSPROP_GETTER) - roots[0] = sprop->getterValue(); + roots[0] = shape->getterValue(); if (attrs & JSPROP_SETTER) - roots[1] = sprop->setterValue(); + roots[1] = shape->setterValue(); } JS_UNLOCK_OBJ(cx, pobj); } else if (!pobj->getAttributes(cx, id, &attrs)) { @@ -1997,9 +1997,8 @@ DefinePropertyOnObject(JSContext *cx, JSObject *obj, const PropDesc &desc, JS_ASSERT(!obj->getOps()->defineProperty); /* 8.12.9 steps 2-4. */ - JSScope *scope = obj->scope(); if (!current) { - if (scope->sealed()) + if (obj->sealed()) return Reject(cx, JSMSG_OBJECT_NOT_EXTENSIBLE, throwError, rval); *rval = true; @@ -2034,24 +2033,24 @@ DefinePropertyOnObject(JSContext *cx, JSObject *obj, const PropDesc &desc, * can be found on a different object. In that case the returned property * might not be native, except: the shared permanent property optimization * is not applied if the objects have different classes (bug 320854), as - * must be enforced by js_HasOwnProperty for the JSScopeProperty cast below - * to be safe. + * must be enforced by js_HasOwnProperty for the Shape cast below to be + * safe. */ JS_ASSERT(obj->getClass() == obj2->getClass()); - JSScopeProperty *sprop = reinterpret_cast(current); + const Shape *shape = reinterpret_cast(current); do { if (desc.isAccessorDescriptor()) { - if (!sprop->isAccessorDescriptor()) + if (!shape->isAccessorDescriptor()) break; if (desc.hasGet && - !SameValue(desc.getterValue(), sprop->getterOrUndefined(), cx)) { + !SameValue(desc.getterValue(), shape->getterOrUndefined(), cx)) { break; } if (desc.hasSet && - !SameValue(desc.setterValue(), sprop->setterOrUndefined(), cx)) { + !SameValue(desc.setterValue(), shape->setterOrUndefined(), cx)) { break; } } else { @@ -2062,7 +2061,7 @@ DefinePropertyOnObject(JSContext *cx, JSObject *obj, const PropDesc &desc, * avoid calling a getter; we won't need the value if it's not a * data descriptor. */ - if (sprop->isDataDescriptor()) { + if (shape->isDataDescriptor()) { /* * Non-standard: if the property is non-configurable and is * represented by a native getter or setter, don't permit @@ -2086,25 +2085,25 @@ DefinePropertyOnObject(JSContext *cx, JSObject *obj, const PropDesc &desc, * descriptor would be fine) and take a small memory hit, but * for now we'll simply forbid their redefinition. */ - if (!sprop->configurable() && - (!sprop->hasDefaultGetter() || !sprop->hasDefaultSetter())) { + if (!shape->configurable() && + (!shape->hasDefaultGetter() || !shape->hasDefaultSetter())) { return Reject(cx, obj2, current, JSMSG_CANT_REDEFINE_UNCONFIGURABLE_PROP, throwError, desc.id, rval); } - if (!js_NativeGet(cx, obj, obj2, sprop, JSGET_NO_METHOD_BARRIER, &v)) { + if (!js_NativeGet(cx, obj, obj2, shape, JSGET_NO_METHOD_BARRIER, &v)) { /* current was dropped when the failure occurred. */ return JS_FALSE; } } if (desc.isDataDescriptor()) { - if (!sprop->isDataDescriptor()) + if (!shape->isDataDescriptor()) break; if (desc.hasValue && !SameValue(desc.value, v, cx)) break; - if (desc.hasWritable && desc.writable() != sprop->writable()) + if (desc.hasWritable && desc.writable() != shape->writable()) break; } else { /* The only fields in desc will be handled below. */ @@ -2112,9 +2111,9 @@ DefinePropertyOnObject(JSContext *cx, JSObject *obj, const PropDesc &desc, } } - if (desc.hasConfigurable && desc.configurable() != sprop->configurable()) + if (desc.hasConfigurable && desc.configurable() != shape->configurable()) break; - if (desc.hasEnumerable && desc.enumerable() != sprop->enumerable()) + if (desc.hasEnumerable && desc.enumerable() != shape->enumerable()) break; /* The conditions imposed by step 5 or step 6 apply. */ @@ -2124,7 +2123,7 @@ DefinePropertyOnObject(JSContext *cx, JSObject *obj, const PropDesc &desc, } while (0); /* 8.12.9 step 7. */ - if (!sprop->configurable()) { + if (!shape->configurable()) { /* * Since [[Configurable]] defaults to false, we don't need to check * whether it was specified. We can't do likewise for [[Enumerable]] @@ -2134,7 +2133,7 @@ DefinePropertyOnObject(JSContext *cx, JSObject *obj, const PropDesc &desc, */ JS_ASSERT_IF(!desc.hasConfigurable, !desc.configurable()); if (desc.configurable() || - (desc.hasEnumerable && desc.enumerable() != sprop->enumerable())) { + (desc.hasEnumerable && desc.enumerable() != shape->enumerable())) { return Reject(cx, obj2, current, JSMSG_CANT_REDEFINE_UNCONFIGURABLE_PROP, throwError, desc.id, rval); } @@ -2142,16 +2141,16 @@ DefinePropertyOnObject(JSContext *cx, JSObject *obj, const PropDesc &desc, if (desc.isGenericDescriptor()) { /* 8.12.9 step 8, no validation required */ - } else if (desc.isDataDescriptor() != sprop->isDataDescriptor()) { + } else if (desc.isDataDescriptor() != shape->isDataDescriptor()) { /* 8.12.9 step 9. */ - if (!sprop->configurable()) { + if (!shape->configurable()) { return Reject(cx, obj2, current, JSMSG_CANT_REDEFINE_UNCONFIGURABLE_PROP, throwError, desc.id, rval); } } else if (desc.isDataDescriptor()) { /* 8.12.9 step 10. */ - JS_ASSERT(sprop->isDataDescriptor()); - if (!sprop->configurable() && !sprop->writable()) { + JS_ASSERT(shape->isDataDescriptor()); + if (!shape->configurable() && !shape->writable()) { if ((desc.hasWritable && desc.writable()) || (desc.hasValue && !SameValue(desc.value, v, cx))) { return Reject(cx, obj2, current, JSMSG_CANT_REDEFINE_UNCONFIGURABLE_PROP, @@ -2160,12 +2159,12 @@ DefinePropertyOnObject(JSContext *cx, JSObject *obj, const PropDesc &desc, } } else { /* 8.12.9 step 11. */ - JS_ASSERT(desc.isAccessorDescriptor() && sprop->isAccessorDescriptor()); - if (!sprop->configurable()) { + JS_ASSERT(desc.isAccessorDescriptor() && shape->isAccessorDescriptor()); + if (!shape->configurable()) { if ((desc.hasSet && - !SameValue(desc.setterValue(), sprop->setterOrUndefined(), cx)) || + !SameValue(desc.setterValue(), shape->setterOrUndefined(), cx)) || (desc.hasGet && - !SameValue(desc.getterValue(), sprop->getterOrUndefined(), cx))) { + !SameValue(desc.getterValue(), shape->getterOrUndefined(), cx))) { return Reject(cx, obj2, current, JSMSG_CANT_REDEFINE_UNCONFIGURABLE_PROP, throwError, desc.id, rval); } @@ -2182,13 +2181,13 @@ DefinePropertyOnObject(JSContext *cx, JSObject *obj, const PropDesc &desc, if (desc.hasEnumerable) changed |= JSPROP_ENUMERATE; - attrs = (sprop->attributes() & ~changed) | (desc.attrs & changed); - if (sprop->isMethod()) { + attrs = (shape->attributes() & ~changed) | (desc.attrs & changed); + if (shape->isMethod()) { JS_ASSERT(!(attrs & (JSPROP_GETTER | JSPROP_SETTER))); getter = setter = PropertyStub; } else { - getter = sprop->getter(); - setter = sprop->setter(); + getter = shape->getter(); + setter = shape->setter(); } } else if (desc.isDataDescriptor()) { uintN unchanged = 0; @@ -2201,7 +2200,7 @@ DefinePropertyOnObject(JSContext *cx, JSObject *obj, const PropDesc &desc, if (desc.hasValue) v = desc.value; - attrs = (desc.attrs & ~unchanged) | (sprop->attributes() & unchanged); + attrs = (desc.attrs & ~unchanged) | (shape->attributes() & unchanged); getter = setter = PropertyStub; } else { JS_ASSERT(desc.isAccessorDescriptor()); @@ -2216,7 +2215,7 @@ DefinePropertyOnObject(JSContext *cx, JSObject *obj, const PropDesc &desc, return JS_FALSE; } - JS_ASSERT_IF(sprop->isMethod(), !(attrs & (JSPROP_GETTER | JSPROP_SETTER))); + JS_ASSERT_IF(shape->isMethod(), !(attrs & (JSPROP_GETTER | JSPROP_SETTER))); /* 8.12.9 step 12. */ uintN changed = 0; @@ -2229,20 +2228,20 @@ DefinePropertyOnObject(JSContext *cx, JSObject *obj, const PropDesc &desc, if (desc.hasSet) changed |= JSPROP_SETTER | JSPROP_SHARED; - attrs = (desc.attrs & changed) | (sprop->attributes() & ~changed); + attrs = (desc.attrs & changed) | (shape->attributes() & ~changed); if (desc.hasGet) { getter = desc.getter(); } else { - getter = (sprop->isMethod() || (sprop->hasDefaultGetter() && !sprop->hasGetterValue())) + getter = (shape->isMethod() || (shape->hasDefaultGetter() && !shape->hasGetterValue())) ? PropertyStub - : sprop->getter(); + : shape->getter(); } if (desc.hasSet) { setter = desc.setter(); } else { - setter = (sprop->hasDefaultSetter() && !sprop->hasSetterValue()) + setter = (shape->hasDefaultSetter() && !shape->hasSetterValue()) ? PropertyStub - : sprop->setter(); + : shape->setter(); } } @@ -2597,7 +2596,7 @@ js_NewObjectWithClassProto(JSContext *cx, Class *clasp, JSObject *proto, if (!obj) return NULL; - obj->initSharingEmptyScope(clasp, proto, proto->getParent(), privateSlotValue); + obj->initSharingEmptyShape(clasp, proto, proto->getParent(), privateSlotValue, cx); return obj; } @@ -2605,6 +2604,7 @@ JSObject* FASTCALL js_Object_tn(JSContext* cx, JSObject* proto) { JS_ASSERT(!(js_ObjectClass.flags & JSCLASS_HAS_PRIVATE)); + return js_NewObjectWithClassProto(cx, &js_ObjectClass, proto, UndefinedValue()); } @@ -2616,22 +2616,9 @@ JSObject* FASTCALL js_NonEmptyObject(JSContext* cx, JSObject* proto) { JS_ASSERT(!(js_ObjectClass.flags & JSCLASS_HAS_PRIVATE)); - JSObject *obj = js_NewObjectWithClassProto(cx, &js_ObjectClass, proto, UndefinedValue()); - if (!obj) - return NULL; - JS_LOCK_OBJ(cx, obj); - JSScope *scope = js_GetMutableScope(cx, obj); - if (!scope) { - JS_UNLOCK_OBJ(cx, obj); - return NULL; - } - /* - * See comments in the JSOP_NEWINIT case of jsinterp.cpp why we cannot - * assume that cx owns the scope and skip the unlock call. - */ - JS_UNLOCK_SCOPE(cx, scope); - return obj; + JSObject *obj = js_NewObjectWithClassProto(cx, &js_ObjectClass, proto, UndefinedValue()); + return (obj && obj->ensureClassReservedSlotsForEmptyObject(cx)) ? obj : NULL; } JS_DEFINE_CALLINFO_2(extern, CONSTRUCTOR_RETRY, js_NonEmptyObject, CONTEXT, CALLEE_PROTOTYPE, 0, @@ -2642,22 +2629,17 @@ js_NewInstance(JSContext *cx, Class *clasp, JSObject *ctor) { JS_ASSERT(JS_ON_TRACE(cx)); JS_ASSERT(ctor->isFunction()); - - JSAtom *atom = cx->runtime->atomState.classPrototypeAtom; - - JSScope *scope = ctor->scope(); #ifdef JS_THREADSAFE - if (scope->title.ownercx != cx) + if (ctor->title.ownercx != cx) return NULL; #endif - if (scope->isSharedEmpty()) { - scope = js_GetMutableScope(cx, ctor); - if (!scope) - return NULL; - } - JSScopeProperty *sprop = scope->lookup(ATOM_TO_JSID(atom)); - Value pval = sprop ? ctor->getSlot(sprop->slot) : MagicValue(JS_GENERIC_MAGIC); + if (!ctor->ensureClassReservedSlots(cx)) + return NULL; + + jsid classPrototypeId = ATOM_TO_JSID(cx->runtime->atomState.classPrototypeAtom); + const Shape *shape = ctor->nativeLookup(classPrototypeId); + Value pval = shape ? ctor->getSlot(shape->slot) : MagicValue(JS_GENERIC_MAGIC); JSObject *parent = ctor->getParent(); JSObject *proto; @@ -2915,17 +2897,19 @@ js_NewWithObject(JSContext *cx, JSObject *proto, JSObject *parent, jsint depth) obj = js_NewGCObject(cx); if (!obj) return NULL; + obj->init(&js_WithClass, proto, parent, - PrivateValue(js_FloatingFrameIfGenerator(cx, cx->fp()))); + PrivateValue(js_FloatingFrameIfGenerator(cx, cx->fp())), + cx); + obj->setMap(cx->runtime->emptyWithShape); OBJ_SET_BLOCK_DEPTH(cx, obj, depth); - obj->map = cx->runtime->emptyWithScope->hold(); AutoObjectRooter tvr(cx, obj); JSObject *thisp = proto->thisObject(cx); if (!thisp) return NULL; - obj->setWithThis(thisp); + obj->setWithThis(thisp); return obj; } @@ -2939,8 +2923,9 @@ js_NewBlockObject(JSContext *cx) JSObject *blockObj = js_NewGCObject(cx); if (!blockObj) return NULL; - blockObj->init(&js_BlockClass, NULL, NULL, NullValue()); - blockObj->map = cx->runtime->emptyBlockScope->hold(); + + blockObj->init(&js_BlockClass, NULL, NULL, NullValue(), cx); + blockObj->setMap(cx->runtime->emptyBlockShape); return blockObj; } @@ -2957,14 +2942,13 @@ js_CloneBlockObject(JSContext *cx, JSObject *proto, JSStackFrame *fp) Value privateValue = PrivateValue(js_FloatingFrameIfGenerator(cx, fp)); /* The caller sets parent on its own. */ - clone->init(&js_BlockClass, proto, NULL, privateValue); + clone->init(&js_BlockClass, proto, NULL, privateValue, cx); clone->fslots[JSSLOT_BLOCK_DEPTH] = proto->fslots[JSSLOT_BLOCK_DEPTH]; - JS_ASSERT(cx->runtime->emptyBlockScope->freeslot == JSSLOT_BLOCK_DEPTH + 1); - clone->map = cx->runtime->emptyBlockScope->hold(); + clone->setMap(cx->runtime->emptyBlockShape); JS_ASSERT(OBJ_IS_CLONED_BLOCK(clone)); - if (!js_EnsureReservedSlots(cx, clone, OBJ_BLOCK_COUNT(cx, proto))) + if (!clone->ensureInstanceReservedSlots(cx, OBJ_BLOCK_COUNT(cx, proto))) return NULL; return clone; } @@ -2982,12 +2966,12 @@ js_PutBlockObject(JSContext *cx, JSBool normalUnwind) JS_ASSERT(OBJ_IS_CLONED_BLOCK(obj)); /* - * Block objects should never be exposed to scripts. Thus the clone should - * not own the property map and rather always share it with the prototype - * object. This allows us to skip updating obj->scope()->freeslot after - * we copy the stack slots into reserved slots. + * Block objects should never be exposed to scripts. Therefore the clone + * must not have "own" properties, rather it always delegates property + * accesses to its compiler-created prototype Block object, which is the + * object that has shapes mapping all the let bindings. */ - JS_ASSERT(obj->scope()->object != obj); + JS_ASSERT(obj->nativeEmpty()); /* Block objects should have all reserved slots allocated early. */ uintN count = OBJ_BLOCK_COUNT(cx, obj); @@ -2995,17 +2979,23 @@ js_PutBlockObject(JSContext *cx, JSBool normalUnwind) /* The block and its locals must be on the current stack for GC safety. */ uintN depth = OBJ_BLOCK_DEPTH(cx, obj); - JS_ASSERT(depth <= (size_t) (cx->regs->sp - fp->base())); - JS_ASSERT(count <= (size_t) (cx->regs->sp - fp->base() - depth)); + JS_ASSERT(depth <= size_t(cx->regs->sp - fp->base())); + JS_ASSERT(count <= size_t(cx->regs->sp - fp->base() - depth)); /* See comments in CheckDestructuring from jsparse.cpp. */ JS_ASSERT(count >= 1); - depth += fp->getFixedCount(); - obj->fslots[JSSLOT_BLOCK_DEPTH + 1] = fp->slots()[depth]; - if (normalUnwind && count > 1) { - --count; - memcpy(obj->dslots, fp->slots() + depth + 1, count * sizeof(Value)); + if (normalUnwind) { + uintN slot = JSSLOT_BLOCK_DEPTH + 1; + uintN flen = JS_MIN(count, JS_INITIAL_NSLOTS - slot); + uintN stop = slot + flen; + + depth += fp->getFixedCount(); + while (slot < stop) + obj->fslots[slot++] = fp->slots()[depth++]; + count -= flen; + if (count != 0) + memcpy(obj->dslots, fp->slots() + depth, count * sizeof(Value)); } /* We must clear the private slot even with errors. */ @@ -3082,7 +3072,7 @@ js_DefineBlockVariable(JSContext *cx, JSObject *obj, jsid id, intN index) block_getProperty, block_setProperty, JSPROP_ENUMERATE | JSPROP_PERMANENT | JSPROP_SHARED, - JSScopeProperty::HAS_SHORTID, index, NULL); + Shape::HAS_SHORTID, index, NULL); } static size_t @@ -3102,10 +3092,6 @@ GetObjectSize(JSObject *obj) void JSObject::swap(JSObject *other) { - /* For both objects determine whether they own their respective scopes. */ - bool thisOwns = this->isNative() && scope()->object == this; - bool otherOwns = other->isNative() && other->scope()->object == other; - size_t size = GetObjectSize(this); JS_ASSERT(size == GetObjectSize(other)); @@ -3114,12 +3100,6 @@ JSObject::swap(JSObject *other) memcpy(tmp, this, size); memcpy(this, other, size); memcpy(other, tmp, size); - - /* Fixup scope ownerships. */ - if (otherOwns) - scope()->object = this; - if (thisOwns) - other->scope()->object = other; } #if JS_HAS_XDR @@ -3151,7 +3131,7 @@ js_XDRBlockObject(JSXDRState *xdr, JSObject **objp) JSObject *obj, *parent; uint16 depth, count, i; uint32 tmp; - JSScopeProperty *sprop; + const Shape *shape; jsid propid; JSAtom *atom; int16 shortid; @@ -3214,21 +3194,21 @@ js_XDRBlockObject(JSXDRState *xdr, JSObject **objp) * properties to XDR, stored as id/shortid pairs. We do not XDR any * non-native properties, only those that the compiler created. */ - sprop = NULL; + shape = NULL; ok = JS_TRUE; for (i = 0; i < count; i++) { if (xdr->mode == JSXDR_ENCODE) { /* Find a property to XDR. */ do { - /* If sprop is NULL, this is the first property. */ - sprop = sprop ? sprop->parent : obj->scope()->lastProperty(); - } while (!sprop->hasShortID()); + /* If shape is NULL, this is the first property. */ + shape = shape ? shape->previous() : obj->lastProperty(); + } while (!shape->hasShortID()); - JS_ASSERT(sprop->getter() == block_getProperty); - propid = sprop->id; + JS_ASSERT(shape->getter() == block_getProperty); + propid = shape->id; JS_ASSERT(JSID_IS_ATOM(propid)); atom = JSID_TO_ATOM(propid); - shortid = sprop->shortid; + shortid = shape->shortid; JS_ASSERT(shortid >= 0); } @@ -3294,33 +3274,30 @@ DefineStandardSlot(JSContext *cx, JSObject *obj, JSProtoKey key, JSAtom *atom, JS_ASSERT(obj->isNative()); JS_LOCK_OBJ(cx, obj); - - JSScope *scope = js_GetMutableScope(cx, obj); - if (!scope) { + if (!obj->ensureClassReservedSlots(cx)) { JS_UNLOCK_OBJ(cx, obj); return false; } - JSScopeProperty *sprop = scope->lookup(id); - if (!sprop) { + const Shape *shape = obj->nativeLookup(id); + if (!shape) { uint32 index = 2 * JSProto_LIMIT + key; if (!js_SetReservedSlot(cx, obj, index, v)) { - JS_UNLOCK_SCOPE(cx, scope); + JS_UNLOCK_OBJ(cx, obj); return false; } uint32 slot = JSSLOT_START(obj->getClass()) + index; - sprop = scope->addProperty(cx, id, PropertyStub, PropertyStub, - slot, attrs, 0, 0); + shape = obj->addProperty(cx, id, PropertyStub, PropertyStub, slot, attrs, 0, 0); - JS_UNLOCK_SCOPE(cx, scope); - if (!sprop) + JS_UNLOCK_OBJ(cx, obj); + if (!shape) return false; named = true; return true; } - JS_UNLOCK_SCOPE(cx, scope); + JS_UNLOCK_OBJ(cx, obj); } named = obj->defineProperty(cx, id, v, PropertyStub, PropertyStub, attrs); @@ -3466,18 +3443,16 @@ js_InitClass(JSContext *cx, JSObject *obj, JSObject *parent_proto, } /* - * Make sure proto's scope's emptyScope is available to be shared by - * objects of this class. JSScope::emptyScope is a one-slot cache. If we - * omit this, some other class could snap it up. (The risk is particularly - * great for Object.prototype.) + * Make sure proto's emptyShape is available to be shared by objects of + * this class. JSObject::emptyShape is a one-slot cache. If we omit this, + * some other class could snap it up. (The risk is particularly great for + * Object.prototype.) * - * All callers of JSObject::initSharingEmptyScope depend on this. + * All callers of JSObject::initSharingEmptyShape depend on this. */ - JSScope *scope; bool ok; JS_LOCK_OBJ(cx, proto); - scope = js_GetMutableScope(cx, proto); - ok = scope && scope->ensureEmptyScope(cx, clasp); + ok = proto->getEmptyShape(cx, clasp); JS_UNLOCK_OBJ(cx, proto); if (!ok) goto bad; @@ -3529,7 +3504,13 @@ JSObject::growSlots(JSContext *cx, size_t nslots) /* If we are allocating fslots, there is nothing to do. */ if (nslots <= JS_INITIAL_NSLOTS) - return JS_TRUE; + return true; + + /* Don't let nslots (or JSObject::freeslot) get close to overflowing. */ + if (nslots >= JS_NSLOTS_LIMIT) { + JS_ReportOutOfMemory(cx); + return false; + } size_t nwords = slotsToDynamicWords(nslots); @@ -3559,9 +3540,9 @@ JSObject::growSlots(JSContext *cx, size_t nslots) Value *tmpdslots = (Value*) cx->realloc(dslots - 1, nwords * sizeof(Value)); if (!tmpdslots) - return false; /* Leave dslots at its old size. */ - dslots = tmpdslots; + return false; /* leave dslots at its old size */ + dslots = tmpdslots; dslots++; dslots[-1].setPrivateUint32(nslots); @@ -3591,32 +3572,29 @@ JSObject::shrinkSlots(JSContext *cx, size_t nslots) size_t nwords = slotsToDynamicWords(nslots); Value *tmpdslots = (Value*) cx->realloc(dslots - 1, nwords * sizeof(Value)); if (!tmpdslots) - return; /* Leave dslots at its old size. */ - dslots = tmpdslots; + return; /* leave dslots at its old size */ + dslots = tmpdslots; dslots++; dslots[-1].setPrivateUint32(nslots); } } bool -js_EnsureReservedSlots(JSContext *cx, JSObject *obj, size_t nreserved) +JSObject::ensureInstanceReservedSlots(JSContext *cx, size_t nreserved) { - uintN nslots = JSSLOT_FREE(obj->getClass()) + nreserved; - if (nslots > obj->numSlots() && !obj->allocSlots(cx, nslots)) + JS_ASSERT_IF(isNative(), + isBlock() || isCall() || isFunction() && getFunctionPrivate()->isBound()); + JS_ASSERT_IF(isBlock(), nativeEmpty()); + + uintN nslots = JSSLOT_FREE(clasp) + nreserved; + if (nslots > numSlots() && !allocSlots(cx, nslots)) return false; - if (obj->isNative()) { - JSScope *scope = obj->scope(); - if (!scope->isSharedEmpty()) { -#ifdef JS_THREADSAFE - JS_ASSERT(scope->title.ownercx->thread == cx->thread); -#endif - JS_ASSERT(scope->freeslot == JSSLOT_FREE(obj->getClass())); - if (scope->freeslot < nslots) - scope->freeslot = nslots; - } - } + JS_ASSERT(freeslot >= JSSLOT_START(clasp)); + JS_ASSERT(freeslot <= JSSLOT_FREE(clasp)); + if (freeslot < nslots) + freeslot = nslots; return true; } @@ -3646,7 +3624,7 @@ SetProto(JSContext *cx, JSObject *obj, JSObject *proto, bool checkForCycles) if (obj->isNative()) { JS_LOCK_OBJ(cx, obj); - bool ok = !!js_GetMutableScope(cx, obj); + bool ok = obj->ensureClassReservedSlots(cx); JS_UNLOCK_OBJ(cx, obj); if (!ok) return false; @@ -3660,8 +3638,7 @@ SetProto(JSContext *cx, JSObject *obj, JSObject *proto, bool checkForCycles) JSObject *oldproto = obj; while (oldproto && oldproto->isNative()) { JS_LOCK_OBJ(cx, oldproto); - JSScope *scope = oldproto->scope(); - scope->protoShapeChange(cx); + oldproto->protoShapeChange(cx); JSObject *tmp = oldproto->getProto(); JS_UNLOCK_OBJ(cx, oldproto); oldproto = tmp; @@ -3750,7 +3727,7 @@ js_FindClassObject(JSContext *cx, JSObject *start, JSProtoKey protoKey, JSObject *obj, *cobj, *pobj; jsid id; JSProperty *prop; - JSScopeProperty *sprop; + const Shape *shape; /* * Find the global object. Use cx->fp() directly to avoid falling off @@ -3803,9 +3780,9 @@ js_FindClassObject(JSContext *cx, JSObject *start, JSProtoKey protoKey, } Value v = UndefinedValue(); if (prop && pobj->isNative()) { - sprop = (JSScopeProperty *) prop; - if (SPROP_HAS_VALID_SLOT(sprop, pobj->scope())) { - v = pobj->lockedGetSlot(sprop->slot); + shape = (Shape *) prop; + if (pobj->containsSlot(shape->slot)) { + v = pobj->lockedGetSlot(shape->slot); if (v.isPrimitive()) v.setUndefined(); } @@ -3885,33 +3862,30 @@ js_ConstructObject(JSContext *cx, Class *clasp, JSObject *proto, JSObject *paren * FIXME bug 535629: If one adds props, deletes earlier props, adds more, the * last added won't recycle the deleted props' slots. */ -JSBool -js_AllocSlot(JSContext *cx, JSObject *obj, uint32 *slotp) +bool +JSObject::allocSlot(JSContext *cx, uint32 *slotp) { - JSScope *scope = obj->scope(); - JS_ASSERT(scope->object == obj); + JS_ASSERT(freeslot >= JSSLOT_FREE(clasp)); - if (scope->freeslot >= obj->numSlots() && - !obj->growSlots(cx, scope->freeslot + 1)) { - return JS_FALSE; - } + if (freeslot >= numSlots() && !growSlots(cx, freeslot + 1)) + return false; - /* js_ReallocSlots or js_FreeSlot should set the free slots to void. */ - JS_ASSERT(obj->getSlot(scope->freeslot).isUndefined()); - *slotp = scope->freeslot++; - return JS_TRUE; + /* JSObject::growSlots or JSObject::freeSlot should set the free slots to void. */ + JS_ASSERT(getSlot(freeslot).isUndefined()); + *slotp = freeslot++; + JS_ASSERT(freeslot != 0); + return true; } void -js_FreeSlot(JSContext *cx, JSObject *obj, uint32 slot) +JSObject::freeSlot(JSContext *cx, uint32 slot) { - JSScope *scope = obj->scope(); - JS_ASSERT(scope->object == obj); - obj->lockedSetSlot(slot, UndefinedValue()); - if (scope->freeslot == slot + 1) - scope->freeslot = slot; -} + JS_ASSERT(freeslot > JSSLOT_FREE(clasp)); + lockedSetSlot(slot, UndefinedValue()); + if (freeslot == slot + 1) + freeslot = slot; +} /* JSBOXEDWORD_INT_MAX as a string */ #define JSBOXEDWORD_INT_MAX_STRING "1073741823" @@ -3980,8 +3954,7 @@ js_CheckForStringIndex(jsid id) static JSBool PurgeProtoChain(JSContext *cx, JSObject *obj, jsid id) { - JSScope *scope; - JSScopeProperty *sprop; + const Shape *shape; while (obj) { if (!obj->isNative()) { @@ -3989,12 +3962,11 @@ PurgeProtoChain(JSContext *cx, JSObject *obj, jsid id) continue; } JS_LOCK_OBJ(cx, obj); - scope = obj->scope(); - sprop = scope->lookup(id); - if (sprop) { + shape = obj->nativeLookup(id); + if (shape) { PCMETER(JS_PROPERTY_CACHE(cx).pcpurges++); - scope->shadowingShapeChange(cx, sprop); - JS_UNLOCK_SCOPE(cx, scope); + obj->shadowingShapeChange(cx, *shape); + JS_UNLOCK_OBJ(cx, obj); if (!obj->getParent()) { /* @@ -4006,8 +3978,11 @@ PurgeProtoChain(JSContext *cx, JSObject *obj, jsid id) } return JS_TRUE; } +#ifdef JS_THREADSAFE + JSObject *pobj = obj; +#endif obj = obj->getProto(); - JS_UNLOCK_SCOPE(cx, scope); + JS_UNLOCK_OBJ(cx, pobj); } return JS_FALSE; } @@ -4024,7 +3999,7 @@ js_PurgeScopeChainHelper(JSContext *cx, JSObject *obj, jsid id) * properties with the same names have been cached or traced. Call objects * may gain such properties via eval introducing new vars; see bug 490364. */ - if (obj->getClass() == &js_CallClass) { + if (obj->isCall()) { while ((obj = obj->getParent()) != NULL) { if (PurgeProtoChain(cx, obj, id)) break; @@ -4032,15 +4007,14 @@ js_PurgeScopeChainHelper(JSContext *cx, JSObject *obj, jsid id) } } -JSScopeProperty * +const Shape * js_AddNativeProperty(JSContext *cx, JSObject *obj, jsid id, PropertyOp getter, PropertyOp setter, uint32 slot, uintN attrs, uintN flags, intN shortid) { - JSScope *scope; - JSScopeProperty *sprop; + const Shape *shape; - JS_ASSERT(!(flags & JSScopeProperty::METHOD)); + JS_ASSERT(!(flags & Shape::METHOD)); /* * Purge the property cache of now-shadowed id in obj's scope chain. Do @@ -4050,34 +4024,28 @@ js_AddNativeProperty(JSContext *cx, JSObject *obj, jsid id, js_PurgeScopeChain(cx, obj, id); JS_LOCK_OBJ(cx, obj); - scope = js_GetMutableScope(cx, obj); - if (!scope) { - sprop = NULL; + if (!obj->ensureClassReservedSlots(cx)) { + shape = NULL; } else { /* Convert string indices to integers if appropriate. */ id = js_CheckForStringIndex(id); - sprop = scope->putProperty(cx, id, getter, setter, slot, attrs, flags, shortid); + shape = obj->putProperty(cx, id, getter, setter, slot, attrs, flags, shortid); } JS_UNLOCK_OBJ(cx, obj); - return sprop; + return shape; } -JSScopeProperty * +const Shape * js_ChangeNativePropertyAttrs(JSContext *cx, JSObject *obj, - JSScopeProperty *sprop, uintN attrs, uintN mask, + const Shape *shape, uintN attrs, uintN mask, PropertyOp getter, PropertyOp setter) { - JSScope *scope; - JS_LOCK_OBJ(cx, obj); - scope = js_GetMutableScope(cx, obj); - if (!scope) { - sprop = NULL; - } else { - sprop = scope->changeProperty(cx, sprop, attrs, mask, getter, setter); - } + shape = obj->ensureClassReservedSlots(cx) + ? obj->changeProperty(cx, shape, attrs, mask, getter, setter) + : NULL; JS_UNLOCK_OBJ(cx, obj); - return sprop; + return shape; } JSBool @@ -4095,17 +4063,16 @@ js_DefineProperty(JSContext *cx, JSObject *obj, jsid id, const Value *value, * both while saving cycles for classes that stub their addProperty hook. */ static inline bool -AddPropertyHelper(JSContext *cx, Class *clasp, JSObject *obj, JSScope *scope, - JSScopeProperty *sprop, Value *vp) +CallAddPropertyHook(JSContext *cx, Class *clasp, JSObject *obj, const Shape *shape, Value *vp) { if (clasp->addProperty != PropertyStub) { Value nominal = *vp; - if (!callJSPropertyOp(cx, clasp->addProperty, obj, SPROP_USERID(sprop), vp)) + if (!CallJSPropertyOp(cx, clasp->addProperty, obj, SHAPE_USERID(shape), vp)) return false; if (*vp != nominal) { - if (SPROP_HAS_VALID_SLOT(sprop, scope)) - obj->lockedSetSlot(sprop->slot, *vp); + if (obj->containsSlot(shape->slot)) + obj->lockedSetSlot(shape->slot, *vp); } } return true; @@ -4118,8 +4085,7 @@ js_DefineNativeProperty(JSContext *cx, JSObject *obj, jsid id, const Value &valu uintN defineHow /* = 0 */) { Class *clasp; - JSScope *scope; - JSScopeProperty *sprop; + const Shape *shape; JSBool added; Value valueCopy; @@ -4134,39 +4100,39 @@ js_DefineNativeProperty(JSContext *cx, JSObject *obj, jsid id, const Value &valu * update the attributes and property ops. A getter or setter is really * only half of a property. */ - sprop = NULL; + shape = NULL; if (attrs & (JSPROP_GETTER | JSPROP_SETTER)) { JSObject *pobj; JSProperty *prop; /* * If JS_THREADSAFE and id is found, js_LookupProperty returns with - * sprop non-null and pobj locked. If pobj == obj, the property is + * shape non-null and pobj locked. If pobj == obj, the property is * already in obj and obj has its own (mutable) scope. So if we are * defining a getter whose setter was already defined, or vice versa, - * finish the job via js_ChangeScopePropertyAttributes, and refresh - * the property cache line for (obj, id) to map sprop. + * finish the job via obj->changeProperty, and refresh the property + * cache line for (obj, id) to map shape. */ if (!js_LookupProperty(cx, obj, id, &pobj, &prop)) return JS_FALSE; - sprop = (JSScopeProperty *) prop; - if (sprop && pobj == obj && sprop->isAccessorDescriptor()) { - sprop = obj->scope()->changeProperty(cx, sprop, attrs, - JSPROP_GETTER | JSPROP_SETTER, - (attrs & JSPROP_GETTER) - ? getter - : sprop->getter(), - (attrs & JSPROP_SETTER) - ? setter - : sprop->setter()); + shape = (Shape *) prop; + if (shape && pobj == obj && shape->isAccessorDescriptor()) { + shape = obj->changeProperty(cx, shape, attrs, + JSPROP_GETTER | JSPROP_SETTER, + (attrs & JSPROP_GETTER) + ? getter + : shape->getter(), + (attrs & JSPROP_SETTER) + ? setter + : shape->setter()); /* NB: obj == pobj, so we can share unlock code at the bottom. */ - if (!sprop) + if (!shape) goto error; } else if (prop) { pobj->dropProperty(cx, prop); prop = NULL; - sprop = NULL; + shape = NULL; } } @@ -4199,12 +4165,11 @@ js_DefineNativeProperty(JSContext *cx, JSObject *obj, jsid id, const Value &valu } /* Get obj's own scope if it has one, or create a new one for obj. */ - scope = js_GetMutableScope(cx, obj); - if (!scope) + if (!obj->ensureClassReservedSlots(cx)) goto error; added = false; - if (!sprop) { + if (!shape) { /* Add a new property, or replace an existing one of the same id. */ if (defineHow & JSDNP_SET_METHOD) { JS_ASSERT(clasp == &js_ObjectClass); @@ -4214,36 +4179,36 @@ js_DefineNativeProperty(JSContext *cx, JSObject *obj, jsid id, const Value &valu JSObject *funobj = &value.toObject(); if (FUN_OBJECT(GET_FUNCTION_PRIVATE(cx, funobj)) == funobj) { - flags |= JSScopeProperty::METHOD; + flags |= Shape::METHOD; getter = CastAsPropertyOp(funobj); } } - added = !scope->hasProperty(id); - uint32 oldShape = scope->shape; - sprop = scope->putProperty(cx, id, getter, setter, SPROP_INVALID_SLOT, - attrs, flags, shortid); - if (!sprop) + added = !obj->nativeContains(id); + uint32 oldShape = obj->shape(); + shape = obj->putProperty(cx, id, getter, setter, SHAPE_INVALID_SLOT, + attrs, flags, shortid); + if (!shape) goto error; /* - * If sprop is a method, the above call to putProperty suffices to + * If shape is a method, the above call to putProperty suffices to * update the shape if necessary. But if scope->branded(), the shape * may not have changed and we may be overwriting a function-valued * property. See bug 560998. */ - if (scope->shape == oldShape && scope->branded() && sprop->slot != SPROP_INVALID_SLOT) - scope->methodWriteBarrier(cx, sprop->slot, value); + if (obj->shape() == oldShape && obj->branded() && shape->slot != SHAPE_INVALID_SLOT) + obj->methodWriteBarrier(cx, shape->slot, value); } /* Store value before calling addProperty, in case the latter GC's. */ - if (SPROP_HAS_VALID_SLOT(sprop, scope)) - obj->lockedSetSlot(sprop->slot, value); + if (obj->containsSlot(shape->slot)) + obj->lockedSetSlot(shape->slot, value); /* XXXbe called with lock held */ valueCopy = value; - if (!AddPropertyHelper(cx, clasp, obj, scope, sprop, &valueCopy)) { - scope->removeProperty(cx, id); + if (!CallAddPropertyHook(cx, clasp, obj, shape, &valueCopy)) { + obj->removeProperty(cx, id); goto error; } @@ -4252,11 +4217,11 @@ js_DefineNativeProperty(JSContext *cx, JSObject *obj, jsid id, const Value &valu JS_ASSERT_NOT_ON_TRACE(cx); PropertyCacheEntry *entry = #endif - JS_PROPERTY_CACHE(cx).fill(cx, obj, 0, 0, obj, sprop, added); - TRACE_2(SetPropHit, entry, sprop); + JS_PROPERTY_CACHE(cx).fill(cx, obj, 0, 0, obj, shape, added); + TRACE_2(SetPropHit, entry, shape); } if (propp) - *propp = (JSProperty *) sprop; + *propp = (JSProperty *) shape; else JS_UNLOCK_OBJ(cx, obj); return JS_TRUE; @@ -4305,7 +4270,6 @@ CallResolveOp(JSContext *cx, JSObject *start, JSObject *obj, jsid id, uintN flag { Class *clasp = obj->getClass(); JSResolveOp resolve = clasp->resolve; - JSScope *scope = obj->scope(); /* * Avoid recursion on (obj, id) already being resolved on cx. @@ -4333,11 +4297,11 @@ CallResolveOp(JSContext *cx, JSObject *start, JSObject *obj, jsid id, uintN flag *propp = NULL; JSBool ok; - JSScopeProperty *sprop = NULL; + const Shape *shape = NULL; if (clasp->flags & JSCLASS_NEW_RESOLVE) { JSNewResolveOp newresolve = (JSNewResolveOp)resolve; if (flags == JSRESOLVE_INFER) - flags = js_InferFlags(cx, flags); + flags = js_InferFlags(cx, 0); JSObject *obj2 = (clasp->flags & JSCLASS_NEW_RESOLVE_GETS_START) ? start : NULL; JS_UNLOCK_OBJ(cx, obj); @@ -4366,20 +4330,17 @@ CallResolveOp(JSContext *cx, JSObject *start, JSObject *obj, jsid id, uintN flag JS_LOCK_OBJ(cx, obj2); } else { /* - * Require that obj2 have its own scope now, as we - * do for old-style resolve. If it doesn't, then - * id was not truly resolved, and we'll find it in - * the proto chain, or miss it if obj2's proto is - * not on obj's proto chain. That last case is a - * "too bad!" case. + * Require that obj2 not be empty now, as we do for old-style + * resolve. If it doesn't, then id was not truly resolved, and + * we'll find it in the proto chain, or miss it if obj2's proto + * is not on obj's proto chain. That last case is a "too bad!" + * case. */ - scope = obj2->scope(); - if (!scope->isSharedEmpty()) - sprop = scope->lookup(id); + if (!obj2->nativeEmpty()) + shape = obj2->nativeLookup(id); } - if (sprop) { - JS_ASSERT(scope == obj2->scope()); - JS_ASSERT(!scope->isSharedEmpty()); + if (shape) { + JS_ASSERT(!obj2->nativeEmpty()); obj = obj2; } else if (obj2 != obj) { if (obj2->isNative()) @@ -4389,8 +4350,8 @@ CallResolveOp(JSContext *cx, JSObject *start, JSObject *obj, jsid id, uintN flag } } else { /* - * Old resolve always requires id re-lookup if obj owns - * its scope after resolve returns. + * Old resolve always requires id re-lookup if obj is not empty after + * resolve returns. */ JS_UNLOCK_OBJ(cx, obj); ok = resolve(cx, obj, id); @@ -4398,16 +4359,14 @@ CallResolveOp(JSContext *cx, JSObject *start, JSObject *obj, jsid id, uintN flag goto cleanup; JS_LOCK_OBJ(cx, obj); JS_ASSERT(obj->isNative()); - scope = obj->scope(); - if (!scope->isSharedEmpty()) - sprop = scope->lookup(id); + if (!obj->nativeEmpty()) + shape = obj->nativeLookup(id); } cleanup: - if (ok && sprop) { - JS_ASSERT(obj->scope() == scope); + if (ok && shape) { *objp = obj; - *propp = (JSProperty *) sprop; + *propp = (JSProperty *) shape; } js_StopResolving(cx, &key, JSRESFLAG_LOOKUP, entry, generation); return ok; @@ -4425,16 +4384,16 @@ js_LookupPropertyWithFlags(JSContext *cx, JSObject *obj, jsid id, uintN flags, int protoIndex; for (protoIndex = 0; ; protoIndex++) { JS_LOCK_OBJ(cx, obj); - JSScopeProperty *sprop = obj->scope()->lookup(id); - if (sprop) { + const Shape *shape = obj->nativeLookup(id); + if (shape) { SCOPE_DEPTH_ACCUM(&cx->runtime->protoLookupDepthStats, protoIndex); *objp = obj; - *propp = (JSProperty *) sprop; + *propp = (JSProperty *) shape; return protoIndex; } /* Try obj's class resolve hook if id was not found in obj's scope. */ - if (!sprop && obj->getClass()->resolve != JS_ResolveStub) { + if (!shape && obj->getClass()->resolve != JS_ResolveStub) { bool recursed; if (!CallResolveOp(cx, start, obj, id, flags, objp, propp, &recursed)) return -1; @@ -4524,7 +4483,7 @@ js_FindPropertyHelper(JSContext *cx, jsid id, JSBool cacheResult, if (cacheResult && pobj->isNative()) { entry = JS_PROPERTY_CACHE(cx).fill(cx, scopeChain, scopeIndex, protoIndex, pobj, - (JSScopeProperty *) prop); + (Shape *) prop); } SCOPE_DEPTH_ACCUM(&cx->runtime->scopeSearchDepthStats, scopeIndex); goto out; @@ -4614,7 +4573,7 @@ js_FindIdentifierBase(JSContext *cx, JSObject *scopeChain, jsid id) PropertyCacheEntry *entry = #endif JS_PROPERTY_CACHE(cx).fill(cx, scopeChain, scopeIndex, protoIndex, pobj, - (JSScopeProperty *) prop); + (Shape *) prop); JS_ASSERT(entry); JS_UNLOCK_OBJ(cx, pobj); return obj; @@ -4651,47 +4610,45 @@ js_FindIdentifierBase(JSContext *cx, JSObject *scopeChain, jsid id) } JSBool -js_NativeGet(JSContext *cx, JSObject *obj, JSObject *pobj, - JSScopeProperty *sprop, uintN getHow, Value *vp) +js_NativeGet(JSContext *cx, JSObject *obj, JSObject *pobj, const Shape *shape, uintN getHow, + Value *vp) { LeaveTraceIfGlobalObject(cx, pobj); - JSScope *scope; uint32 slot; int32 sample; JS_ASSERT(pobj->isNative()); JS_ASSERT(JS_IS_OBJ_LOCKED(cx, pobj)); - scope = pobj->scope(); - slot = sprop->slot; - if (slot != SPROP_INVALID_SLOT) + slot = shape->slot; + if (slot != SHAPE_INVALID_SLOT) *vp = pobj->lockedGetSlot(slot); else vp->setUndefined(); - if (sprop->hasDefaultGetter()) + if (shape->hasDefaultGetter()) return true; - if (JS_UNLIKELY(sprop->isMethod()) && (getHow & JSGET_NO_METHOD_BARRIER)) { - JS_ASSERT(&sprop->methodObject() == &vp->toObject()); + if (JS_UNLIKELY(shape->isMethod()) && (getHow & JSGET_NO_METHOD_BARRIER)) { + JS_ASSERT(&shape->methodObject() == &vp->toObject()); return true; } sample = cx->runtime->propertyRemovals; - JS_UNLOCK_SCOPE(cx, scope); + JS_UNLOCK_OBJ(cx, pobj); { - AutoScopePropertyRooter tvr(cx, sprop); + AutoShapeRooter tvr(cx, shape); AutoObjectRooter tvr2(cx, pobj); - if (!sprop->get(cx, obj, pobj, vp)) + if (!shape->get(cx, obj, pobj, vp)) return false; } - JS_LOCK_SCOPE(cx, scope); + JS_LOCK_OBJ(cx, pobj); - if (SLOT_IN_SCOPE(slot, scope) && + if (pobj->containsSlot(slot) && (JS_LIKELY(cx->runtime->propertyRemovals == sample) || - scope->hasProperty(sprop))) { - if (!scope->methodWriteBarrier(cx, sprop, *vp)) { - JS_UNLOCK_SCOPE(cx, scope); + pobj->nativeContains(*shape))) { + if (!pobj->methodWriteBarrier(cx, *shape, *vp)) { + JS_UNLOCK_OBJ(cx, pobj); return false; } pobj->lockedSetSlot(slot, *vp); @@ -4701,27 +4658,24 @@ js_NativeGet(JSContext *cx, JSObject *obj, JSObject *pobj, } JSBool -js_NativeSet(JSContext *cx, JSObject *obj, JSScopeProperty *sprop, bool added, - Value *vp) +js_NativeSet(JSContext *cx, JSObject *obj, const Shape *shape, bool added, Value *vp) { LeaveTraceIfGlobalObject(cx, obj); - JSScope *scope; uint32 slot; int32 sample; JS_ASSERT(obj->isNative()); JS_ASSERT(JS_IS_OBJ_LOCKED(cx, obj)); - scope = obj->scope(); - slot = sprop->slot; - if (slot != SPROP_INVALID_SLOT) { + slot = shape->slot; + if (slot != SHAPE_INVALID_SLOT) { OBJ_CHECK_SLOT(obj, slot); - /* If sprop has a stub setter, keep scope locked and just store *vp. */ - if (sprop->hasDefaultSetter()) { - if (!added && !scope->methodWriteBarrier(cx, sprop, *vp)) { - JS_UNLOCK_SCOPE(cx, scope); + /* If shape has a stub setter, keep obj locked and just store *vp. */ + if (shape->hasDefaultSetter()) { + if (!added && !obj->methodWriteBarrier(cx, *shape, *vp)) { + JS_UNLOCK_OBJ(cx, obj); return false; } obj->lockedSetSlot(slot, *vp); @@ -4734,24 +4688,24 @@ js_NativeSet(JSContext *cx, JSObject *obj, JSScopeProperty *sprop, bool added, * not writable, so attempting to set such a property should do nothing * or throw if we're in strict mode. */ - if (!sprop->hasGetterValue() && sprop->hasDefaultSetter()) + if (!shape->hasGetterValue() && shape->hasDefaultSetter()) return js_ReportGetterOnlyAssignment(cx); } sample = cx->runtime->propertyRemovals; - JS_UNLOCK_SCOPE(cx, scope); + JS_UNLOCK_OBJ(cx, obj); { - AutoScopePropertyRooter tvr(cx, sprop); - if (!sprop->set(cx, obj, vp)) + AutoShapeRooter tvr(cx, shape); + if (!shape->set(cx, obj, vp)) return false; } - JS_LOCK_SCOPE(cx, scope); - if (SLOT_IN_SCOPE(slot, scope) && + JS_LOCK_OBJ(cx, obj); + if (obj->containsSlot(slot) && (JS_LIKELY(cx->runtime->propertyRemovals == sample) || - scope->hasProperty(sprop))) { - if (!added && !scope->methodWriteBarrier(cx, sprop, *vp)) { - JS_UNLOCK_SCOPE(cx, scope); + obj->nativeContains(*shape))) { + if (!added && !obj->methodWriteBarrier(cx, *shape, *vp)) { + JS_UNLOCK_OBJ(cx, obj); return false; } obj->lockedSetSlot(slot, *vp); @@ -4767,7 +4721,7 @@ js_GetPropertyHelper(JSContext *cx, JSObject *obj, jsid id, uintN getHow, JSObject *aobj, *obj2; int protoIndex; JSProperty *prop; - JSScopeProperty *sprop; + const Shape *shape; JS_ASSERT_IF(getHow & JSGET_CACHE_RESULT, !JS_ON_TRACE(cx)); @@ -4782,7 +4736,7 @@ js_GetPropertyHelper(JSContext *cx, JSObject *obj, jsid id, uintN getHow, if (!prop) { vp->setUndefined(); - if (!callJSPropertyOp(cx, obj->getClass()->getProperty, obj, id, vp)) + if (!CallJSPropertyOp(cx, obj->getClass()->getProperty, obj, id, vp)) return JS_FALSE; PCMETER(getHow & JSGET_CACHE_RESULT && JS_PROPERTY_CACHE(cx).nofills++); @@ -4843,14 +4797,14 @@ js_GetPropertyHelper(JSContext *cx, JSObject *obj, jsid id, uintN getHow, if (!obj2->isNative()) return obj2->getProperty(cx, id, vp); - sprop = (JSScopeProperty *) prop; + shape = (Shape *) prop; if (getHow & JSGET_CACHE_RESULT) { JS_ASSERT_NOT_ON_TRACE(cx); - JS_PROPERTY_CACHE(cx).fill(cx, aobj, 0, protoIndex, obj2, sprop); + JS_PROPERTY_CACHE(cx).fill(cx, aobj, 0, protoIndex, obj2, shape); } - if (!js_NativeGet(cx, obj, obj2, sprop, getHow, vp)) + if (!js_NativeGet(cx, obj, obj2, shape, getHow, vp)) return JS_FALSE; JS_UNLOCK_OBJ(cx, obj2); @@ -4929,8 +4883,7 @@ js_SetPropertyHelper(JSContext *cx, JSObject *obj, jsid id, uintN defineHow, int protoIndex; JSObject *pobj; JSProperty *prop; - JSScopeProperty *sprop; - JSScope *scope; + const Shape *shape; uintN attrs, flags; intN shortid; Class *clasp; @@ -4945,11 +4898,8 @@ js_SetPropertyHelper(JSContext *cx, JSObject *obj, jsid id, uintN defineHow, /* Convert string indices to integers if appropriate. */ id = js_CheckForStringIndex(id); - /* - * We peek at obj->scope() without locking obj. Any race means a failure - * to seal before sharing, which is inherently ambiguous. - */ - if (obj->scope()->sealed() && obj->scope()->object == obj) + /* Check for a sealed object first (now that id has been normalized). */ + if (obj->sealed()) return ReportReadOnly(cx, id, JSREPORT_ERROR); protoIndex = js_LookupPropertyWithFlags(cx, obj, id, cx->resolveFlags, @@ -4969,15 +4919,15 @@ js_SetPropertyHelper(JSContext *cx, JSObject *obj, jsid id, uintN defineHow, return JS_FALSE; } } - sprop = (JSScopeProperty *) prop; + shape = (Shape *) prop; /* - * Now either sprop is null, meaning id was not found in obj or one of its - * prototypes; or sprop is non-null, meaning id was found in pobj's scope. - * If JS_THREADSAFE and sprop is non-null, then scope is locked, and sprop - * is held: we must JSObject::dropProperty or JS_UNLOCK_SCOPE before we - * return (the two are equivalent for native objects, but we use - * JS_UNLOCK_SCOPE because it is cheaper). + * Now either shape is null, meaning id was not found in obj or one of its + * prototypes; or shape is non-null, meaning id was found directly in pobj. + * If JS_THREADSAFE and shape is non-null, then pobj is locked, and shape + * is held: we must JSObject::dropProperty or else JS_UNLOCK_OBJ before we + * return (the two are equivalent for native objects; we use JS_UNLOCK_OBJ + * because it is cheaper). */ attrs = JSPROP_ENUMERATE; flags = 0; @@ -4986,32 +4936,25 @@ js_SetPropertyHelper(JSContext *cx, JSObject *obj, jsid id, uintN defineHow, getter = clasp->getProperty; setter = clasp->setProperty; - if (sprop) { - /* - * Set scope for use below. It was locked by js_LookupProperty, and - * we know pobj owns it (i.e., scope->object == pobj). Therefore we - * optimize JS_UNLOCK_OBJ(cx, pobj) into JS_UNLOCK_SCOPE(cx, scope). - */ - scope = pobj->scope(); - + if (shape) { /* ES5 8.12.4 [[Put]] step 2. */ - if (sprop->isAccessorDescriptor()) { - if (sprop->hasDefaultSetter()) { - JS_UNLOCK_SCOPE(cx, scope); + if (shape->isAccessorDescriptor()) { + if (shape->hasDefaultSetter()) { + JS_UNLOCK_OBJ(cx, pobj); if (defineHow & JSDNP_CACHE_RESULT) - TRACE_2(SetPropHit, JS_NO_PROP_CACHE_FILL, sprop); + TRACE_2(SetPropHit, JS_NO_PROP_CACHE_FILL, shape); return js_ReportGetterOnlyAssignment(cx); } } else { - JS_ASSERT(sprop->isDataDescriptor()); + JS_ASSERT(shape->isDataDescriptor()); - if (!sprop->writable()) { - JS_UNLOCK_SCOPE(cx, scope); + if (!shape->writable()) { + JS_UNLOCK_OBJ(cx, pobj); PCMETER((defineHow & JSDNP_CACHE_RESULT) && JS_PROPERTY_CACHE(cx).rofills++); if (defineHow & JSDNP_CACHE_RESULT) { JS_ASSERT_NOT_ON_TRACE(cx); - TRACE_2(SetPropHit, JS_NO_PROP_CACHE_FILL, sprop); + TRACE_2(SetPropHit, JS_NO_PROP_CACHE_FILL, shape); } /* Warn in strict mode, otherwise do nothing. */ @@ -5025,37 +4968,37 @@ js_SetPropertyHelper(JSContext *cx, JSObject *obj, jsid id, uintN defineHow, #endif } } - if (scope->sealed() && !sprop->hasSlot()) { - JS_UNLOCK_SCOPE(cx, scope); + if (pobj->sealed() && !shape->hasSlot()) { + JS_UNLOCK_OBJ(cx, pobj); return ReportReadOnly(cx, id, JSREPORT_ERROR); } - attrs = sprop->attributes(); + attrs = shape->attributes(); if (pobj != obj) { /* * We found id in a prototype object: prepare to share or shadow. * * NB: Thanks to the immutable, garbage-collected property tree * maintained by jsscope.c in cx->runtime, we needn't worry about - * sprop going away behind our back after we've unlocked scope. + * shape going away behind our back after we've unlocked pobj. */ - JS_UNLOCK_SCOPE(cx, scope); + JS_UNLOCK_OBJ(cx, pobj); /* Don't clone a prototype property that doesn't have a slot. */ - if (!sprop->hasSlot()) { + if (!shape->hasSlot()) { if (defineHow & JSDNP_CACHE_RESULT) { #ifdef JS_TRACER JS_ASSERT_NOT_ON_TRACE(cx); PropertyCacheEntry *entry = #endif - JS_PROPERTY_CACHE(cx).fill(cx, obj, 0, protoIndex, pobj, sprop); - TRACE_2(SetPropHit, entry, sprop); + JS_PROPERTY_CACHE(cx).fill(cx, obj, 0, protoIndex, pobj, shape); + TRACE_2(SetPropHit, entry, shape); } - if (sprop->hasDefaultSetter() && !sprop->hasGetterValue()) + if (shape->hasDefaultSetter() && !shape->hasGetterValue()) return JS_TRUE; - return sprop->set(cx, obj, vp); + return shape->set(cx, obj, vp); } /* Restore attrs to the ECMA default for new properties. */ @@ -5066,29 +5009,25 @@ js_SetPropertyHelper(JSContext *cx, JSObject *obj, jsid id, uintN defineHow, * property that has a shortid. An old API convention requires * that the property's getter and setter functions receive the * shortid, not id, when they are called on the shadow we are - * about to create in obj's scope. + * about to create in obj. */ - if (sprop->hasShortID()) { - flags = JSScopeProperty::HAS_SHORTID; - shortid = sprop->shortid; - getter = sprop->getter(); - setter = sprop->setter(); + if (shape->hasShortID()) { + flags = Shape::HAS_SHORTID; + shortid = shape->shortid; + getter = shape->getter(); + setter = shape->setter(); } /* * Forget we found the proto-property now that we've copied any * needed member values. */ - sprop = NULL; + shape = NULL; } -#ifdef __GNUC__ /* suppress bogus gcc warnings */ - } else { - scope = NULL; -#endif } added = false; - if (!sprop) { + if (!shape) { /* * Purge the property cache of now-shadowed id in obj's scope chain. * Do this early, before locking obj to avoid nesting locks. @@ -5097,8 +5036,7 @@ js_SetPropertyHelper(JSContext *cx, JSObject *obj, jsid id, uintN defineHow, /* Find or make a property descriptor with the right heritage. */ JS_LOCK_OBJ(cx, obj); - scope = js_GetMutableScope(cx, obj); - if (!scope) { + if (!obj->ensureClassReservedSlots(cx)) { JS_UNLOCK_OBJ(cx, obj); return JS_FALSE; } @@ -5114,15 +5052,15 @@ js_SetPropertyHelper(JSContext *cx, JSObject *obj, jsid id, uintN defineHow, JSObject *funobj = &vp->toObject(); JSFunction *fun = GET_FUNCTION_PRIVATE(cx, funobj); if (fun == funobj) { - flags |= JSScopeProperty::METHOD; + flags |= Shape::METHOD; getter = CastAsPropertyOp(funobj); } } - sprop = scope->putProperty(cx, id, getter, setter, SPROP_INVALID_SLOT, - attrs, flags, shortid); - if (!sprop) { - JS_UNLOCK_SCOPE(cx, scope); + shape = obj->putProperty(cx, id, getter, setter, SHAPE_INVALID_SLOT, + attrs, flags, shortid); + if (!shape) { + JS_UNLOCK_OBJ(cx, obj); return JS_FALSE; } @@ -5131,13 +5069,13 @@ js_SetPropertyHelper(JSContext *cx, JSObject *obj, jsid id, uintN defineHow, * Note that we store before calling addProperty, to match the order * in js_DefineNativeProperty. */ - if (SPROP_HAS_VALID_SLOT(sprop, scope)) - obj->lockedSetSlot(sprop->slot, UndefinedValue()); + if (obj->containsSlot(shape->slot)) + obj->lockedSetSlot(shape->slot, UndefinedValue()); /* XXXbe called with obj locked */ - if (!AddPropertyHelper(cx, clasp, obj, scope, sprop, vp)) { - scope->removeProperty(cx, id); - JS_UNLOCK_SCOPE(cx, scope); + if (!CallAddPropertyHook(cx, clasp, obj, shape, vp)) { + obj->removeProperty(cx, id); + JS_UNLOCK_OBJ(cx, obj); return JS_FALSE; } added = true; @@ -5148,14 +5086,14 @@ js_SetPropertyHelper(JSContext *cx, JSObject *obj, jsid id, uintN defineHow, JS_ASSERT_NOT_ON_TRACE(cx); PropertyCacheEntry *entry = #endif - JS_PROPERTY_CACHE(cx).fill(cx, obj, 0, 0, obj, sprop, added); - TRACE_2(SetPropHit, entry, sprop); + JS_PROPERTY_CACHE(cx).fill(cx, obj, 0, 0, obj, shape, added); + TRACE_2(SetPropHit, entry, shape); } - if (!js_NativeSet(cx, obj, sprop, added, vp)) + if (!js_NativeSet(cx, obj, shape, added, vp)) return NULL; - JS_UNLOCK_SCOPE(cx, scope); + JS_UNLOCK_OBJ(cx, obj); return JS_TRUE; } @@ -5178,21 +5116,20 @@ js_GetAttributes(JSContext *cx, JSObject *obj, jsid id, uintN *attrsp) if (!obj->isNative()) return obj->getAttributes(cx, id, attrsp); - JSScopeProperty *sprop = (JSScopeProperty *)prop; - *attrsp = sprop->attributes(); + const Shape *shape = (Shape *)prop; + *attrsp = shape->attributes(); JS_UNLOCK_OBJ(cx, obj); return true; } JSBool -js_SetNativeAttributes(JSContext *cx, JSObject *obj, JSScopeProperty *sprop, - uintN attrs) +js_SetNativeAttributes(JSContext *cx, JSObject *obj, Shape *shape, uintN attrs) { JS_ASSERT(obj->isNative()); - sprop = js_ChangeNativePropertyAttrs(cx, obj, sprop, attrs, 0, - sprop->getter(), sprop->setter()); + bool ok = !!js_ChangeNativePropertyAttrs(cx, obj, shape, attrs, 0, + shape->getter(), shape->setter()); JS_UNLOCK_OBJ(cx, obj); - return (sprop != NULL); + return ok; } JSBool @@ -5204,7 +5141,7 @@ js_SetAttributes(JSContext *cx, JSObject *obj, jsid id, uintN *attrsp) if (!prop) return true; return obj->isNative() - ? js_SetNativeAttributes(cx, obj, (JSScopeProperty *) prop, *attrsp) + ? js_SetNativeAttributes(cx, obj, (Shape *) prop, *attrsp) : obj->setAttributes(cx, id, attrsp); } @@ -5213,8 +5150,7 @@ js_DeleteProperty(JSContext *cx, JSObject *obj, jsid id, Value *rval) { JSObject *proto; JSProperty *prop; - JSScopeProperty *sprop; - JSScope *scope; + const Shape *shape; JSBool ok; rval->setBoolean(true); @@ -5233,8 +5169,8 @@ js_DeleteProperty(JSContext *cx, JSObject *obj, jsid id, Value *rval) */ if (prop) { if (proto->isNative()) { - sprop = (JSScopeProperty *)prop; - if (sprop->isSharedPermanent()) + shape = (Shape *)prop; + if (shape->isSharedPermanent()) rval->setBoolean(false); JS_UNLOCK_OBJ(cx, proto); } @@ -5247,25 +5183,24 @@ js_DeleteProperty(JSContext *cx, JSObject *obj, jsid id, Value *rval) * a prototype, call the class's delProperty hook, passing rval as the * result parameter. */ - return callJSPropertyOp(cx, obj->getClass()->delProperty, obj, id, rval); + return CallJSPropertyOp(cx, obj->getClass()->delProperty, obj, id, rval); } - sprop = (JSScopeProperty *)prop; - if (!sprop->configurable()) { + shape = (Shape *)prop; + if (!shape->configurable()) { JS_UNLOCK_OBJ(cx, obj); rval->setBoolean(false); return JS_TRUE; } /* XXXbe called with obj locked */ - if (!callJSPropertyOp(cx, obj->getClass()->delProperty, obj, SPROP_USERID(sprop), rval)) { + if (!CallJSPropertyOp(cx, obj->getClass()->delProperty, obj, SHAPE_USERID(shape), rval)) { JS_UNLOCK_OBJ(cx, obj); return JS_FALSE; } - scope = obj->scope(); - if (SPROP_HAS_VALID_SLOT(sprop, scope)) { - const Value &v = obj->lockedGetSlot(sprop->slot); + if (obj->containsSlot(shape->slot)) { + const Value &v = obj->lockedGetSlot(shape->slot); GC_POKE(cx, v); /* @@ -5279,7 +5214,7 @@ js_DeleteProperty(JSContext *cx, JSObject *obj, jsid id, Value *rval) * so the only way they could have the method's joined function object * as callee is through an API abusage. We break any such edge case. */ - if (scope->hasMethodBarrier()) { + if (obj->hasMethodBarrier()) { JSObject *funobj; if (IsFunctionObject(v, &funobj)) { @@ -5298,7 +5233,7 @@ js_DeleteProperty(JSContext *cx, JSObject *obj, jsid id, Value *rval) } } - ok = scope->removeProperty(cx, id); + ok = obj->removeProperty(cx, id); JS_UNLOCK_OBJ(cx, obj); return ok && js_SuppressDeletedProperty(cx, obj, id); @@ -5322,36 +5257,36 @@ DefaultValue(JSContext *cx, JSObject *obj, JSType hint, Value *vp) jsid toStringId = ATOM_TO_JSID(cx->runtime->atomState.toStringAtom); JS_LOCK_OBJ(cx, obj); - JSScope *scope = obj->scope(); - JSScopeProperty *sprop = scope->lookup(toStringId); + JSObject *lockedobj = obj; + const Shape *shape = obj->nativeLookup(toStringId); JSObject *pobj = obj; - if (!sprop) { + if (!shape) { pobj = obj->getProto(); if (pobj && pobj->getClass() == &js_StringClass) { - JS_UNLOCK_SCOPE(cx, scope); + JS_UNLOCK_OBJ(cx, obj); JS_LOCK_OBJ(cx, pobj); - scope = pobj->scope(); - sprop = scope->lookup(toStringId); + lockedobj = pobj; + shape = pobj->nativeLookup(toStringId); } } - if (sprop && sprop->hasDefaultGetter() && SPROP_HAS_VALID_SLOT(sprop, scope)) { - const Value &fval = pobj->lockedGetSlot(sprop->slot); + if (shape && shape->hasDefaultGetter() && pobj->containsSlot(shape->slot)) { + const Value &fval = pobj->lockedGetSlot(shape->slot); JSObject *funobj; if (IsFunctionObject(fval, &funobj)) { JSFunction *fun = GET_FUNCTION_PRIVATE(cx, funobj); if (FUN_FAST_NATIVE(fun) == js_str_toString) { - JS_UNLOCK_SCOPE(cx, scope); + JS_UNLOCK_OBJ(cx, lockedobj); *vp = obj->getPrimitiveThis(); return JS_TRUE; } } } - JS_UNLOCK_SCOPE(cx, scope); + JS_UNLOCK_OBJ(cx, lockedobj); } /* @@ -5430,7 +5365,7 @@ CheckAccess(JSContext *cx, JSObject *obj, jsid id, JSAccessMode mode, JSObject *pobj; JSProperty *prop; Class *clasp; - JSScopeProperty *sprop; + const Shape *shape; JSSecurityCallbacks *callbacks; CheckAccessOp check; @@ -5472,11 +5407,11 @@ CheckAccess(JSContext *cx, JSObject *obj, jsid id, JSAccessMode mode, break; } - sprop = (JSScopeProperty *)prop; - *attrsp = sprop->attributes(); + shape = (Shape *)prop; + *attrsp = shape->attributes(); if (!writing) { - if (SPROP_HAS_VALID_SLOT(sprop, pobj->scope())) - *vp = pobj->lockedGetSlot(sprop->slot); + if (pobj->containsSlot(shape->slot)) + *vp = pobj->lockedGetSlot(shape->slot); else vp->setUndefined(); } @@ -5552,11 +5487,11 @@ js_IsDelegate(JSContext *cx, JSObject *obj, const Value &v) } bool -js::FindClassPrototype(JSContext *cx, JSObject *scope, JSProtoKey protoKey, JSObject **protop, - Class *clasp) +js::FindClassPrototype(JSContext *cx, JSObject *scopeobj, JSProtoKey protoKey, + JSObject **protop, Class *clasp) { Value v; - if (!js_FindClassObject(cx, scope, protoKey, &v, clasp)) + if (!js_FindClassObject(cx, scopeobj, protoKey, &v, clasp)) return false; if (IsFunctionObject(v)) { @@ -5574,7 +5509,7 @@ js::FindClassPrototype(JSContext *cx, JSObject *scope, JSProtoKey protoKey, JSOb * NewBuiltinClassInstance in jsobjinlines.h. */ JSBool -js_GetClassPrototype(JSContext *cx, JSObject *scope, JSProtoKey protoKey, +js_GetClassPrototype(JSContext *cx, JSObject *scopeobj, JSProtoKey protoKey, JSObject **protop, Class *clasp) { VOUCH_DOES_NOT_REQUIRE_STACK(); @@ -5582,20 +5517,20 @@ js_GetClassPrototype(JSContext *cx, JSObject *scope, JSProtoKey protoKey, JS_ASSERT(protoKey < JSProto_LIMIT); if (protoKey != JSProto_Null) { - if (!scope) { + if (!scopeobj) { if (cx->hasfp()) - scope = cx->fp()->maybeScopeChain(); - if (!scope) { - scope = cx->globalObject; - if (!scope) { + scopeobj = cx->fp()->maybeScopeChain(); + if (!scopeobj) { + scopeobj = cx->globalObject; + if (!scopeobj) { *protop = NULL; return true; } } } - scope = scope->getGlobal(); - if (scope->getClass()->flags & JSCLASS_IS_GLOBAL) { - const Value &v = scope->getReservedSlot(JSProto_LIMIT + protoKey); + scopeobj = scopeobj->getGlobal(); + if (scopeobj->getClass()->flags & JSCLASS_IS_GLOBAL) { + const Value &v = scopeobj->getReservedSlot(JSProto_LIMIT + protoKey); if (v.isObject()) { *protop = &v.toObject(); return true; @@ -5603,7 +5538,7 @@ js_GetClassPrototype(JSContext *cx, JSObject *scope, JSProtoKey protoKey, } } - return FindClassPrototype(cx, scope, protoKey, protop, clasp); + return FindClassPrototype(cx, scopeobj, protoKey, protop, clasp); } /* @@ -5876,17 +5811,16 @@ js_PrintObjectSlotName(JSTracer *trc, char *buf, size_t bufsize) uint32 slot = (uint32)trc->debugPrintIndex; JS_ASSERT(slot >= JSSLOT_START(obj->getClass())); - JSScopeProperty *sprop; + const Shape *shape; if (obj->isNative()) { - JSScope *scope = obj->scope(); - sprop = scope->lastProperty(); - while (sprop && sprop->slot != slot) - sprop = sprop->parent; + shape = obj->lastProperty(); + while (shape->previous() && shape->slot != slot) + shape = shape->previous(); } else { - sprop = NULL; + shape = NULL; } - if (!sprop) { + if (!shape) { const char *slotname = NULL; Class *clasp = obj->getClass(); if (clasp->flags & JSCLASS_IS_GLOBAL) { @@ -5902,7 +5836,7 @@ js_PrintObjectSlotName(JSTracer *trc, char *buf, size_t bufsize) else JS_snprintf(buf, bufsize, "**UNKNOWN SLOT %ld**", (long)slot); } else { - jsid id = sprop->id; + jsid id = shape->id; if (JSID_IS_INT(id)) { JS_snprintf(buf, bufsize, "%ld", (long)JSID_TO_INT(id)); } else if (JSID_IS_ATOM(id)) { @@ -5920,23 +5854,23 @@ js_TraceObject(JSTracer *trc, JSObject *obj) JS_ASSERT(obj->isNative()); JSContext *cx = trc->context; - JSScope *scope = obj->scope(); - if (!scope->isSharedEmpty() && IS_GC_MARKING_TRACER(trc)) { + if (!obj->nativeEmpty() && IS_GC_MARKING_TRACER(trc)) { /* - * Check whether we should shrink the object's slots. Skip this check - * if the scope is shared, since for Block objects and flat closures - * that share their scope, scope->freeslot can be an underestimate. + * Trim overlong dslots allocations from the GC, to avoid thrashing in + * case of delete-happy code that settles down at a given population. + * The !obj->nativeEmpty() guard above is due to the bug described by + * the FIXME comment below. */ - size_t slots = scope->freeslot; + size_t slots = obj->freeslot; if (obj->numSlots() != slots) obj->shrinkSlots(cx, slots); } #ifdef JS_DUMP_SCOPE_METERS - MeterEntryCount(scope->entryCount); + MeterEntryCount(obj->propertyCount); #endif - scope->trace(trc); + obj->trace(trc); if (!JS_CLIST_IS_EMPTY(&cx->runtime->watchPointList)) js_TraceWatchPoints(trc, obj); @@ -5955,17 +5889,18 @@ js_TraceObject(JSTracer *trc, JSObject *obj) } /* - * An unmutated object that shares a prototype object's scope. We can't - * tell how many slots are in use in obj by looking at its scope, so we - * use obj->numSlots(). + * NB: In case clasp->mark mutates something (which would be a bug, but we + * want to be defensive), leave this code here -- don't move it up and + * unify it with the |if (!traceScope)| section above. * - * NB: In case clasp->mark mutates something, leave this code here -- - * don't move it up and unify it with the |if (!traceScope)| section - * above. + * FIXME: We minimize nslots against obj->freeslot because native objects + * such as Date instances may have failed to advance freeslot to cover all + * reserved slots (this Date issue may be a bug in JSObject::growSlots, but + * the general problem occurs in other built-in class implementations). */ uint32 nslots = obj->numSlots(); - if (!scope->isSharedEmpty() && scope->freeslot < nslots) - nslots = scope->freeslot; + if (!obj->nativeEmpty() && obj->freeslot < nslots) + nslots = obj->freeslot; JS_ASSERT(nslots >= JSSLOT_START(clasp)); for (uint32 i = JSSLOT_START(clasp); i != nslots; ++i) { @@ -5979,22 +5914,20 @@ void js_ClearNative(JSContext *cx, JSObject *obj) { /* - * Clear our scope and the property cache of all obj's properties only if - * obj owns the scope (i.e., not if obj is sharing another object's scope). - * NB: we do not clear any reserved slots lying below JSSLOT_FREE(clasp). + * Clear obj of all obj's properties. FIXME: we do not clear reserved slots + * lying below JSSLOT_FREE(clasp). JS_ClearScope does that. */ JS_LOCK_OBJ(cx, obj); - JSScope *scope = obj->scope(); - if (!scope->isSharedEmpty()) { - /* Now that we're done using scope->lastProp/table, clear scope. */ - scope->clear(cx); + if (!obj->nativeEmpty()) { + /* Now that we're done using real properties, clear obj. */ + obj->clear(cx); /* Clear slot values and reset freeslot so we're consistent. */ uint32 freeslot = JSSLOT_FREE(obj->getClass()); uint32 n = obj->numSlots(); for (uint32 i = freeslot; i < n; ++i) obj->setSlot(i, UndefinedValue()); - scope->freeslot = freeslot; + obj->freeslot = freeslot; } JS_UNLOCK_OBJ(cx, obj); } @@ -6028,11 +5961,6 @@ js_SetReservedSlot(JSContext *cx, JSObject *obj, uint32 index, const Value &v) JS_LOCK_OBJ(cx, obj); if (slot >= obj->numSlots()) { - /* - * At this point, obj may or may not own scope, and we may or may not - * need to allocate slots. If scope is shared, scope->freeslot may not - * be accurate for obj (see comment below). - */ uint32 nslots = JSSLOT_FREE(clasp); JS_ASSERT(slot < nslots); if (!obj->allocSlots(cx, nslots)) { @@ -6041,21 +5969,12 @@ js_SetReservedSlot(JSContext *cx, JSObject *obj, uint32 index, const Value &v) } } - /* - * Whether or not we grew nslots, we may need to advance freeslot. - * - * If scope is shared, do not modify scope->freeslot. It is OK for freeslot - * to be an underestimate in objects with shared scopes, as they will get - * their own scopes before mutating, and elsewhere (e.g. js_TraceObject) we - * use obj->numSlots() rather than rely on freeslot. - */ - JSScope *scope = obj->scope(); - if (!scope->isSharedEmpty() && slot >= scope->freeslot) - scope->freeslot = slot + 1; + if (slot >= obj->freeslot) + obj->freeslot = slot + 1; obj->setSlot(slot, v); GC_POKE(cx, JS_NULL); - JS_UNLOCK_SCOPE(cx, scope); + JS_UNLOCK_OBJ(cx, obj); return true; } @@ -6256,10 +6175,10 @@ js_DumpId(jsid id) } static void -dumpScopeProp(JSScopeProperty *sprop) +DumpShape(const Shape &shape) { - jsid id = sprop->id; - uint8 attrs = sprop->attributes(); + jsid id = shape.id; + uint8 attrs = shape.attributes(); fprintf(stderr, " "); if (attrs & JSPROP_ENUMERATE) fprintf(stderr, "enumerate "); @@ -6268,14 +6187,14 @@ dumpScopeProp(JSScopeProperty *sprop) if (attrs & JSPROP_GETTER) fprintf(stderr, "getter "); if (attrs & JSPROP_SETTER) fprintf(stderr, "setter "); if (attrs & JSPROP_SHARED) fprintf(stderr, "shared "); - if (sprop->isAlias()) fprintf(stderr, "alias "); + if (shape.isAlias()) fprintf(stderr, "alias "); if (JSID_IS_ATOM(id)) dumpString(JSID_TO_STRING(id)); else if (JSID_IS_INT(id)) fprintf(stderr, "%d", (int) JSID_TO_INT(id)); else fprintf(stderr, "unknown jsid %p", (void *) JSID_BITS(id)); - fprintf(stderr, ": slot %d", sprop->slot); + fprintf(stderr, ": slot %d", shape.slot); fprintf(stderr, "\n"); } @@ -6290,6 +6209,29 @@ js_DumpObject(JSObject *obj) clasp = obj->getClass(); fprintf(stderr, "class %p %s\n", (void *)clasp, clasp->name); + fprintf(stderr, "flags:"); + uint32 flags = obj->flags; + if (flags & JSObject::DELEGATE) fprintf(stderr, " delegate"); + if (flags & JSObject::SYSTEM) fprintf(stderr, " system"); + if (flags & JSObject::SEALED) fprintf(stderr, " sealed"); + if (flags & JSObject::BRANDED) fprintf(stderr, " branded"); + if (flags & JSObject::GENERIC) fprintf(stderr, " generic"); + if (flags & JSObject::METHOD_BARRIER) fprintf(stderr, " method_barrier"); + if (flags & JSObject::INDEXED) fprintf(stderr, " indexed"); + if (flags & JSObject::OWN_SHAPE) fprintf(stderr, " own_shape"); + bool anyFlags = flags != 0; + if (obj->inDictionaryMode()) { + fprintf(stderr, " inDictionaryMode"); + anyFlags = true; + } + if (obj->hasPropertyTable()) { + fprintf(stderr, " hasPropertyTable"); + anyFlags = true; + } + if (!anyFlags) + fprintf(stderr, " none"); + fprintf(stderr, "\n"); + if (obj->isDenseArray()) { slots = JS_MIN(obj->getArrayLength(), obj->getDenseArrayCapacity()); fprintf(stderr, "elements\n"); @@ -6303,15 +6245,12 @@ js_DumpObject(JSObject *obj) } if (obj->isNative()) { - JSScope *scope = obj->scope(); - if (scope->sealed()) + if (obj->sealed()) fprintf(stderr, "sealed\n"); fprintf(stderr, "properties:\n"); - for (JSScopeProperty *sprop = scope->lastProperty(); sprop; - sprop = sprop->parent) { - dumpScopeProp(sprop); - } + for (Shape::Range r = obj->lastProperty()->all(); !r.empty(); r.popFront()) + DumpShape(r.front()); } else { if (!obj->isNative()) fprintf(stderr, "not native\n"); @@ -6333,9 +6272,7 @@ js_DumpObject(JSObject *obj) fprintf(stderr, "slots:\n"); reservedEnd = i + JSCLASS_RESERVED_SLOTS(clasp); - slots = (obj->isNative() && !obj->scope()->isSharedEmpty()) - ? obj->scope()->freeslot - : obj->numSlots(); + slots = obj->freeslot; for (; i < slots; i++) { fprintf(stderr, " %3d ", i); if (i < reservedEnd) diff --git a/js/src/jsobj.h b/js/src/jsobj.h index af2b7547e2fe..192f11e5c60e 100644 --- a/js/src/jsobj.h +++ b/js/src/jsobj.h @@ -52,6 +52,7 @@ #include "jshash.h" /* Added by JSIFY */ #include "jspubtd.h" #include "jsprvtd.h" +#include "jslock.h" #include "jsvalue.h" #include "jsvector.h" @@ -175,11 +176,11 @@ typedef Vector PropDescArray; struct JSObjectMap { static JS_FRIEND_DATA(const JSObjectMap) sharedNonNative; - uint32 shape; /* shape identifier */ + uint32 shape; /* shape identifier */ explicit JSObjectMap(uint32 shape) : shape(shape) {} - enum { SHAPELESS = 0xffffffff }; + enum { INVALID_SHAPE = 0x8fffffff, SHAPELESS = 0xffffffff }; bool isNative() const { return this != &sharedNonNative; } @@ -246,6 +247,9 @@ struct JSFunction; * 64 bytes on 64-bit systems. The JSFunction struct is an extension of this * struct allocated from a larger GC size-class. * + * The clasp member stores the js::Class pointer for this object. We do *not* + * synchronize updates of clasp or flags -- API clients must take care. + * * An object is a delegate if it is on another object's prototype (the proto * field) or scope chain (the parent field), and therefore the delegate might * be asked implicitly to get or set a property on behalf of another object. @@ -261,8 +265,8 @@ struct JSFunction; * to be complementary to this bit, but it is up to the API client to implement * any such association. * - * Both these flags are initially zero; they may be set or queried using the - * (is|set)(Delegate|System) inline methods. + * Both these flag bits are initially zero; they may be set or queried using + * the (is|set)(Delegate|System) inline methods. * * The dslots member is null or a pointer into a dynamically allocated vector * of Values for reserved and dynamic slots. If dslots is not null, dslots[-1] @@ -275,25 +279,83 @@ struct JSObject { */ friend class js::TraceRecorder; - JSObjectMap *map; /* property map, see jsscope.h */ - js::Class *clasp; /* class pointer */ - jsuword flags; /* see above */ + /* + * Private pointer to the last added property and methods to manipulate the + * list it links among properties in this scope. The {remove,insert} pair + * for DictionaryProperties assert that the scope is in dictionary mode and + * any reachable properties are flagged as dictionary properties. + * + * NB: these private methods do *not* update this scope's shape to track + * lastProp->shape after they finish updating the linked list in the case + * where lastProp is updated. It is up to calling code in jsscope.cpp to + * call updateShape(cx) after updating lastProp. + */ + union { + js::Shape *lastProp; + JSObjectMap *map; + }; + + js::Class *clasp; + + private: + inline void setLastProperty(const js::Shape *shape); + inline void removeLastProperty(); + + public: + inline const js::Shape *lastProperty() const; + + inline js::Shape **nativeSearch(jsid id, bool adding = false); + inline const js::Shape *nativeLookup(jsid id); + + inline bool nativeContains(jsid id); + inline bool nativeContains(const js::Shape &shape); + + enum { + DELEGATE = 0x01, + SYSTEM = 0x02, + SEALED = 0x04, + BRANDED = 0x08, + GENERIC = 0x10, + METHOD_BARRIER = 0x20, + INDEXED = 0x40, + OWN_SHAPE = 0x80 + }; + + enum { + JS_NSLOTS_BITS = 24, + JS_NSLOTS_LIMIT = JS_BIT(JS_NSLOTS_BITS) + }; + + uint32 flags: 32-JS_NSLOTS_BITS, /* flags */ + freeslot: JS_NSLOTS_BITS; /* next free slot in abstract slot space */ + uint32 objShape; /* copy of lastProp->shape, or override if different */ + JSObject *proto; /* object's prototype */ JSObject *parent; /* object's parent */ js::Value *dslots; /* dynamically allocated slots */ + + /* Empty shape of kids if prototype, located here to align fslots on 32 bit targets. */ + js::EmptyShape *emptyShape; + js::Value fslots[JS_INITIAL_NSLOTS]; /* small number of fixed slots */ +#ifdef JS_THREADSAFE + JSTitle title; +#endif - bool isNative() const { - return map->isNative(); - } + /* + * Return an immutable, shareable, empty scope with the same ops as this + * and the same freeslot as this had when empty. + * + * If |this| is the scope of an object |proto|, the resulting scope can be + * used as the scope of a new object whose prototype is |proto|. + */ + inline bool canProvideEmptyShape(js::Class *clasp); + inline js::EmptyShape *getEmptyShape(JSContext *cx, js::Class *aclasp); - js::Class *getClass() const { - return clasp; - } + bool isNative() const { return map->isNative(); } - JSClass *getJSClass() const { - return Jsvalify(clasp); - } + js::Class *getClass() const { return clasp; } + JSClass *getJSClass() const { return Jsvalify(clasp); } bool hasClass(const js::Class *c) const { return c == clasp; @@ -303,32 +365,157 @@ struct JSObject { return &getClass()->ops; } - inline JSScope *scope() const; - inline uint32 shape() const; + inline void trace(JSTracer *trc); - bool isDelegate() const { - return (flags & jsuword(1)) != jsuword(0); + uint32 shape() const { + JS_ASSERT(objShape != JSObjectMap::INVALID_SHAPE); + return objShape; } - void setDelegate() { - flags |= jsuword(1); - } + bool isDelegate() const { return !!(flags & DELEGATE); } + void setDelegate() { flags |= DELEGATE; } static void setDelegateNullSafe(JSObject *obj) { if (obj) obj->setDelegate(); } - bool isSystem() const { - return (flags & jsuword(2)) != jsuword(0); + bool isSystem() const { return !!(flags & SYSTEM); } + void setSystem() { flags |= SYSTEM; } + + /* + * Don't define clearSealed, as it can't be done safely because JS_LOCK_OBJ + * will avoid taking the lock if the object owns its scope and the scope is + * sealed. + */ + bool sealed() { return !!(flags & SEALED); } + void seal(JSContext *cx); + + /* + * A branded object contains plain old methods (function-valued properties + * without magic getters and setters), and its shape evolves whenever a + * function value changes. + */ + bool branded() { return !!(flags & BRANDED); } + + bool brand(JSContext *cx, uint32 slot, js::Value v); + bool unbrand(JSContext *cx); + + bool generic() { return !!(flags & GENERIC); } + void setGeneric() { flags |= GENERIC; } + + private: + void generateOwnShape(JSContext *cx); + + void setOwnShape(uint32 s) { flags |= OWN_SHAPE; objShape = s; } + void clearOwnShape() { flags &= ~OWN_SHAPE; objShape = map->shape; } + + public: + inline bool nativeEmpty() const; + + bool hasOwnShape() const { return !!(flags & OWN_SHAPE); } + + void setMap(JSObjectMap *amap) { + JS_ASSERT(!hasOwnShape()); + map = amap; + objShape = map->shape; } - void setSystem() { - flags |= jsuword(2); + void setSharedNonNativeMap() { + setMap(const_cast(&JSObjectMap::sharedNonNative)); } + void deletingShapeChange(JSContext *cx, const js::Shape &shape); + bool methodShapeChange(JSContext *cx, const js::Shape &shape); + bool methodShapeChange(JSContext *cx, uint32 slot); + void protoShapeChange(JSContext *cx); + void shadowingShapeChange(JSContext *cx, const js::Shape &shape); + bool globalObjectOwnShapeChange(JSContext *cx); + + /* + * A scope has a method barrier when some compiler-created "null closure" + * function objects (functions that do not use lexical bindings above their + * scope, only free variable names) that have a correct JSSLOT_PARENT value + * thanks to the COMPILE_N_GO optimization are stored as newly added direct + * property values of the scope's object. + * + * The de-facto standard JS language requires each evaluation of such a + * closure to result in a unique (according to === and observable effects) + * function object. ES3 tried to allow implementations to "join" such + * objects to a single compiler-created object, but this makes an overt + * mutation hazard, also an "identity hazard" against interoperation among + * implementations that join and do not join. + * + * To stay compatible with the de-facto standard, we store the compiler- + * created function object as the method value and set the METHOD_BARRIER + * flag. + * + * The method value is part of the method property tree node's identity, so + * it effectively brands the scope with a predictable shape corresponding + * to the method value, but without the overhead of setting the BRANDED + * flag, which requires assigning a new shape peculiar to each branded + * scope. Instead the shape is shared via the property tree among all the + * scopes referencing the method property tree node. + * + * Then when reading from a scope for which scope->hasMethodBarrier() is + * true, we count on the scope's qualified/guarded shape being unique and + * add a read barrier that clones the compiler-created function object on + * demand, reshaping the scope. + * + * This read barrier is bypassed when evaluating the callee sub-expression + * of a call expression (see the JOF_CALLOP opcodes in jsopcode.tbl), since + * such ops do not present an identity or mutation hazard. The compiler + * performs this optimization only for null closures that do not use their + * own name or equivalent built-in references (arguments.callee). + * + * The BRANDED write barrier, JSObject::methodWriteBarrer, must check for + * METHOD_BARRIER too, and regenerate this scope's shape if the method's + * value is in fact changing. + */ + bool hasMethodBarrier() { return !!(flags & METHOD_BARRIER); } + void setMethodBarrier() { flags |= METHOD_BARRIER; } + + /* + * Test whether this object may be branded due to method calls, which means + * any assignment to a function-valued property must regenerate shape; else + * test whether this object has method properties, which require a method + * write barrier. + */ + bool brandedOrHasMethodBarrier() { return !!(flags & (BRANDED | METHOD_BARRIER)); } + + /* + * Read barrier to clone a joined function object stored as a method. + * Defined in jsobjinlines.h, but not declared inline per standard style in + * order to avoid gcc warnings. + */ + bool methodReadBarrier(JSContext *cx, const js::Shape &shape, js::Value *vp); + + /* + * Write barrier to check for a change of method value. Defined inline in + * jsobjinlines.h after methodReadBarrier. The slot flavor is required by + * JSOP_*GVAR, which deals in slots not shapes, while not deoptimizing to + * map slot to shape unless JSObject::flags show that this is necessary. + * The methodShapeChange overload (directly below) parallels this. + */ + bool methodWriteBarrier(JSContext *cx, const js::Shape &shape, const js::Value &v); + bool methodWriteBarrier(JSContext *cx, uint32 slot, const js::Value &v); + + bool isIndexed() const { return !!(flags & INDEXED); } + void setIndexed() { flags |= INDEXED; } + + /* + * Return true if this object is a native one that has been converted from + * shared-immutable prototype-rooted shape storage to dictionary-shapes in + * a doubly-linked list. + */ + inline bool inDictionaryMode() const; + + inline uint32 propertyCount() const; + + inline bool hasPropertyTable() const; + uint32 numSlots(void) const { - return dslots ? dslots[-1].toPrivateUint32() : (uint32)JS_INITIAL_NSLOTS; + return dslots ? dslots[-1].toPrivateUint32() : uint32(JS_INITIAL_NSLOTS); } private: @@ -347,6 +534,35 @@ struct JSObject { bool growSlots(JSContext *cx, size_t nslots); void shrinkSlots(JSContext *cx, size_t nslots); + /* + * Ensure that the object has at least JSCLASS_RESERVED_SLOTS(clasp) + + * nreserved slots. + * + * This method may be called only for native objects freshly created using + * NewObject or one of its variant where the new object will both (a) never + * escape to script and (b) never be extended with ad-hoc properties that + * would try to allocate higher slots without the fresh object first having + * its map set to a shape path that maps those slots. + * + * Block objects satisfy (a) and (b), as there is no evil eval-based way to + * add ad-hoc properties to a Block instance. Call objects satisfy (a) and + * (b) as well, because the compiler-created Shape path that covers args, + * vars, and upvars, stored in their callee function in u.i.names, becomes + * their initial map. + */ + bool ensureInstanceReservedSlots(JSContext *cx, size_t nreserved); + + /* + * NB: ensureClassReservedSlotsForEmptyObject asserts that nativeEmpty() + * Use ensureClassReservedSlots for any object, either empty or already + * extended with properties. + */ + bool ensureClassReservedSlotsForEmptyObject(JSContext *cx); + + inline bool ensureClassReservedSlots(JSContext *cx); + + bool containsSlot(uint32 slot) const { return slot < freeslot; } + js::Value& getSlotRef(uintN slot) { return (slot < JS_INITIAL_NSLOTS) ? fslots[slot] @@ -384,13 +600,15 @@ struct JSObject { inline js::Value getReservedSlot(uintN index) const; - JSObject *getProto() const { - return proto; - } + /* Defined in jsscopeinlines.h to avoid including implementation dependencies here. */ + inline void updateShape(JSContext *cx); + inline void updateFlags(const js::Shape *shape, bool isDefinitelyAtom = false); - void clearProto() { - proto = NULL; - } + /* Extend this object to have shape as its last-added property. */ + inline void extend(JSContext *cx, const js::Shape *shape, bool isDefinitelyAtom = false); + + JSObject *getProto() const { return proto; } + void clearProto() { proto = NULL; } void setProto(JSObject *newProto) { #ifdef DEBUG @@ -469,7 +687,7 @@ struct JSObject { inline void staticAssertArrayLengthIsInPrivateSlot(); public: - static const uint32 DENSE_ARRAY_FIXED_RESERVED_SLOTS = 3; + static const uint32 DENSE_ARRAY_CLASS_RESERVED_SLOTS = 3; inline uint32 getArrayLength() const; inline void setArrayLength(uint32 length); @@ -499,25 +717,31 @@ struct JSObject { /* * Reserved slot structure for Arguments objects: * - * JSSLOT_PRIVATE - the corresponding frame until the frame exits. + * JSSLOT_PRIVATE - the function's stack frame until the function + * returns; also, JS_ARGUMENTS_OBJECT_ON_TRACE if + * arguments was created on trace * JSSLOT_ARGS_LENGTH - the number of actual arguments and a flag * indicating whether arguments.length was - * overwritten. This slot is not used to represent + * overwritten. This slot is not used to represent * arguments.length after that property has been * assigned, even if the new value is integral: it's * always the original length. - * JSSLOT_ARGS_CALLEE - the arguments.callee value or JSVAL_HOLE if that - * was overwritten. + * JSSLOT_ARGS_DATA - pointer to an ArgumentsData structure containing + * the arguments.callee value or JSVAL_HOLE if that + * was overwritten, and the values of all arguments + * once the function has returned (or as soon as a + * strict arguments object has been created). * - * Argument index i is stored in dslots[i], accessible via + * Argument index i is stored in ArgumentsData.slots[i], accessible via * {get,set}ArgsElement(). */ static const uint32 JSSLOT_ARGS_LENGTH = JSSLOT_PRIVATE + 1; - static const uint32 JSSLOT_ARGS_CALLEE = JSSLOT_PRIVATE + 2; + static const uint32 JSSLOT_ARGS_DATA = JSSLOT_PRIVATE + 2; public: - /* Number of extra fixed slots besides JSSLOT_PRIVATE. */ - static const uint32 ARGS_FIXED_RESERVED_SLOTS = 2; + /* Number of extra fixed arguments object slots besides JSSLOT_PRIVATE. */ + static const uint32 ARGS_CLASS_RESERVED_SLOTS = 2; + static const uint32 ARGS_FIRST_FREE_SLOT = JSSLOT_PRIVATE + ARGS_CLASS_RESERVED_SLOTS + 1; /* Lower-order bit stolen from the length slot. */ static const uint32 ARGS_LENGTH_OVERRIDDEN_BIT = 0x1; @@ -537,6 +761,9 @@ struct JSObject { inline void setArgsLengthOverridden(); inline bool isArgsLengthOverridden() const; + inline js::ArgumentsData *getArgsData() const; + inline void setArgsData(js::ArgumentsData *data); + inline const js::Value &getArgsCallee() const; inline void setArgsCallee(const js::Value &callee); @@ -548,7 +775,6 @@ struct JSObject { * Date-specific getters and setters. */ - public: static const uint32 JSSLOT_DATE_UTC_TIME = JSSLOT_PRIVATE; /* @@ -579,6 +805,16 @@ struct JSObject { private: friend struct JSFunction; + /* + * Flat closures with one or more upvars snapshot the upvars' values into a + * vector of js::Values referenced from this slot. + */ + static const uint32 JSSLOT_FLAT_CLOSURE_UPVARS = JSSLOT_PRIVATE + 1; + + /* + * Null closures set or initialized as methods have these slots. See the + * "method barrier" comments and methods. + */ static const uint32 JSSLOT_FUN_METHOD_ATOM = JSSLOT_PRIVATE + 1; static const uint32 JSSLOT_FUN_METHOD_OBJ = JSSLOT_PRIVATE + 2; @@ -586,16 +822,19 @@ struct JSObject { static const uint32 JSSLOT_BOUND_FUNCTION_ARGS_COUNT = JSSLOT_PRIVATE + 2; public: - static const uint32 FUN_FIXED_RESERVED_SLOTS = 2; + static const uint32 FUN_CLASS_RESERVED_SLOTS = 2; + + inline JSFunction *getFunctionPrivate() const; + + inline js::Value *getFlatClosureUpvars() const; + inline js::Value getFlatClosureUpvar(uint32 i) const; + inline void setFlatClosureUpvars(js::Value *upvars); inline bool hasMethodObj(const JSObject& obj) const; inline void setMethodObj(JSObject& obj); - inline JSFunction *getFunctionPrivate() const; - - inline bool - initBoundFunction(JSContext *cx, const js::Value &thisArg, - const js::Value *args, uintN argslen); + inline bool initBoundFunction(JSContext *cx, const js::Value &thisArg, + const js::Value *args, uintN argslen); inline JSObject *getBoundFunctionTarget() const; inline const js::Value &getBoundFunctionThis() const; @@ -609,7 +848,7 @@ struct JSObject { static const uint32 JSSLOT_REGEXP_LAST_INDEX = JSSLOT_PRIVATE + 1; public: - static const uint32 REGEXP_FIXED_RESERVED_SLOTS = 1; + static const uint32 REGEXP_CLASS_RESERVED_SLOTS = 1; inline const js::Value &getRegExpLastIndex() const; inline void setRegExpLastIndex(const js::Value &v); @@ -643,8 +882,8 @@ struct JSObject { static const uint32 JSSLOT_QNAME_LOCAL_NAME = JSSLOT_PRIVATE + 2; public: - static const uint32 NAMESPACE_FIXED_RESERVED_SLOTS = 3; - static const uint32 QNAME_FIXED_RESERVED_SLOTS = 3; + static const uint32 NAMESPACE_CLASS_RESERVED_SLOTS = 3; + static const uint32 QNAME_CLASS_RESERVED_SLOTS = 3; inline jsval getNamePrefix() const; inline void setNamePrefix(jsval prefix); @@ -675,41 +914,76 @@ struct JSObject { /* * Back to generic stuff. */ - inline bool isCallable(); /* The map field is not initialized here and should be set separately. */ - void init(js::Class *aclasp, JSObject *proto, JSObject *parent, - const js::Value &privateSlotValue) { - JS_STATIC_ASSERT(JSSLOT_PRIVATE + 3 == JS_INITIAL_NSLOTS); + inline void init(js::Class *aclasp, JSObject *proto, JSObject *parent, + const js::Value &privateSlotValue, JSContext *cx); - clasp = aclasp; - flags = 0; - JS_ASSERT(!isDelegate()); - JS_ASSERT(!isSystem()); - - setProto(proto); - setParent(parent); - fslots[JSSLOT_PRIVATE] = privateSlotValue; - fslots[JSSLOT_PRIVATE + 1].setUndefined(); - fslots[JSSLOT_PRIVATE + 2].setUndefined(); - dslots = NULL; - } + inline void finish(JSContext *cx); /* * Like init, but also initializes map. The catch: proto must be the result * of a call to js_InitClass(...clasp, ...). */ - inline void initSharingEmptyScope(js::Class *clasp, + inline void initSharingEmptyShape(js::Class *clasp, JSObject *proto, JSObject *parent, - const js::Value &privateSlotValue); + const js::Value &privateSlotValue, + JSContext *cx); inline bool hasSlotsArray() const { return !!dslots; } /* This method can only be called when hasSlotsArray() returns true. */ inline void freeSlotsArray(JSContext *cx); + inline bool hasProperty(JSContext *cx, jsid id, bool *foundp, uintN flags = 0); + + bool allocSlot(JSContext *cx, uint32 *slotp); + void freeSlot(JSContext *cx, uint32 slot); + + private: + void reportReadOnlyScope(JSContext *cx); + + js::Shape *getChildProperty(JSContext *cx, js::Shape *parent, js::Shape &child); + + const js::Shape *addPropertyCommon(JSContext *cx, jsid id, + js::PropertyOp getter, js::PropertyOp setter, + uint32 slot, uintN attrs, + uintN flags, intN shortid, + js::Shape **spp); + + bool toDictionaryMode(JSContext *cx); + + public: + /* Add a property whose id is not yet in this scope. */ + const js::Shape *addProperty(JSContext *cx, jsid id, + js::PropertyOp getter, js::PropertyOp setter, + uint32 slot, uintN attrs, + uintN flags, intN shortid); + + /* Add a data property whose id is not yet in this scope. */ + const js::Shape *addDataProperty(JSContext *cx, jsid id, uint32 slot, uintN attrs) { + JS_ASSERT(!(attrs & (JSPROP_GETTER | JSPROP_SETTER))); + return addProperty(cx, id, NULL, NULL, slot, attrs, 0, 0); + } + + /* Add or overwrite a property for id in this scope. */ + const js::Shape *putProperty(JSContext *cx, jsid id, + js::PropertyOp getter, js::PropertyOp setter, + uint32 slot, uintN attrs, + uintN flags, intN shortid); + + /* Change the given property into a sibling with the same id in this scope. */ + const js::Shape *changeProperty(JSContext *cx, const js::Shape *shape, uintN attrs, uintN mask, + js::PropertyOp getter, js::PropertyOp setter); + + /* Remove id from this object. */ + bool removeProperty(JSContext *cx, jsid id); + + /* Clear the scope, making it empty. */ + void clear(JSContext *cx); + JSBool lookupProperty(JSContext *cx, jsid id, JSObject **objp, JSProperty **propp) { JSLookupPropOp op = getOps()->lookupProperty; return (op ? op : js_LookupProperty)(cx, this, id, objp, propp); @@ -793,6 +1067,7 @@ struct JSObject { inline bool isObject() const; inline bool isWith() const; inline bool isBlock() const; + inline bool isCall() const; inline bool isRegExp() const; inline bool isXML() const; inline bool isXMLId() const; @@ -806,8 +1081,6 @@ struct JSObject { JS_FRIEND_API(bool) isWrapper() const; JS_FRIEND_API(JSObject *) unwrap(uintN *flagsp = NULL); - inline bool unbrand(JSContext *cx); - inline void initArrayClass(); }; @@ -829,8 +1102,7 @@ JS_STATIC_ASSERT(sizeof(JSObject) % JS_GCTHING_ALIGN == 0); #define MAX_DSLOTS_LENGTH (~size_t(0) / sizeof(js::Value) - 1) #define MAX_DSLOTS_LENGTH32 (~uint32(0) / sizeof(js::Value) - 1) -#define OBJ_CHECK_SLOT(obj,slot) \ - (JS_ASSERT((obj)->isNative()), JS_ASSERT(slot < (obj)->scope()->freeslot)) +#define OBJ_CHECK_SLOT(obj,slot) (JS_ASSERT(slot < (obj)->freeslot)) #ifdef JS_THREADSAFE @@ -914,7 +1186,7 @@ extern JSBool js_DefineBlockVariable(JSContext *cx, JSObject *obj, jsid id, intN index); #define OBJ_BLOCK_COUNT(cx,obj) \ - ((OBJ_IS_CLONED_BLOCK(obj) ? obj->getProto() : obj)->scope()->entryCount) + ((OBJ_IS_CLONED_BLOCK(obj) ? obj->getProto() : obj)->propertyCount()) #define OBJ_BLOCK_DEPTH(cx,obj) \ obj->getSlot(JSSLOT_BLOCK_DEPTH).toInt32() #define OBJ_SET_BLOCK_DEPTH(cx,obj,depth) \ @@ -1073,21 +1345,6 @@ extern JSObject * js_ConstructObject(JSContext *cx, js::Class *clasp, JSObject *proto, JSObject *parent, uintN argc, js::Value *argv); -extern JSBool -js_AllocSlot(JSContext *cx, JSObject *obj, uint32 *slotp); - -extern void -js_FreeSlot(JSContext *cx, JSObject *obj, uint32 slot); - -/* - * Ensure that the object has at least JSCLASS_RESERVED_SLOTS(clasp)+nreserved - * slots. The function can be called only for native objects just created with - * js_NewObject or its forms. In particular, the object should not be shared - * between threads and its dslots array must be null. - */ -bool -js_EnsureReservedSlots(JSContext *cx, JSObject *obj, size_t nreserved); - extern jsid js_CheckForStringIndex(jsid id); @@ -1111,19 +1368,19 @@ js_PurgeScopeChain(JSContext *cx, JSObject *obj, jsid id) * Find or create a property named by id in obj's scope, with the given getter * and setter, slot, attributes, and other members. */ -extern JSScopeProperty * +extern const js::Shape * js_AddNativeProperty(JSContext *cx, JSObject *obj, jsid id, js::PropertyOp getter, js::PropertyOp setter, uint32 slot, uintN attrs, uintN flags, intN shortid); /* - * Change sprop to have the given attrs, getter, and setter in scope, morphing - * it into a potentially new JSScopeProperty. Return a pointer to the changed + * Change shape to have the given attrs, getter, and setter in scope, morphing + * it into a potentially new js::Shape. Return a pointer to the changed * or identical property. */ -extern JSScopeProperty * +extern const js::Shape * js_ChangeNativePropertyAttrs(JSContext *cx, JSObject *obj, - JSScopeProperty *sprop, uintN attrs, uintN mask, + const js::Shape *shape, uintN attrs, uintN mask, js::PropertyOp getter, js::PropertyOp setter); extern JSBool @@ -1136,8 +1393,8 @@ js_DefineOwnProperty(JSContext *cx, JSObject *obj, jsid id, const uintN JSDNP_CACHE_RESULT = 1; /* an interpreter call from JSOP_INITPROP */ const uintN JSDNP_DONT_PURGE = 2; /* suppress js_PurgeScopeChain */ const uintN JSDNP_SET_METHOD = 4; /* js_{DefineNativeProperty,SetPropertyHelper} - must pass the JSScopeProperty::METHOD - flag on to js_AddScopeProperty */ + must pass the js::Shape::METHOD + flag on to JSObject::{add,put}Property */ const uintN JSDNP_UNQUALIFIED = 8; /* Unqualified property set. Only used in the defineHow argument of js_SetPropertyHelper. */ @@ -1225,17 +1482,17 @@ const uintN JSGET_METHOD_BARRIER = 0; // get can leak joined function object const uintN JSGET_NO_METHOD_BARRIER = 2; // call to joined function can't leak /* - * NB: js_NativeGet and js_NativeSet are called with the scope containing sprop + * NB: js_NativeGet and js_NativeSet are called with the scope containing shape * (pobj's scope for Get, obj's for Set) locked, and on successful return, that * scope is again locked. But on failure, both functions return false with the - * scope containing sprop unlocked. + * scope containing shape unlocked. */ extern JSBool -js_NativeGet(JSContext *cx, JSObject *obj, JSObject *pobj, - JSScopeProperty *sprop, uintN getHow, js::Value *vp); +js_NativeGet(JSContext *cx, JSObject *obj, JSObject *pobj, const js::Shape *shape, uintN getHow, + js::Value *vp); extern JSBool -js_NativeSet(JSContext *cx, JSObject *obj, JSScopeProperty *sprop, bool added, +js_NativeSet(JSContext *cx, JSObject *obj, const js::Shape *shape, bool added, js::Value *vp); extern JSBool @@ -1266,7 +1523,7 @@ js_SetPropertyHelper(JSContext *cx, JSObject *obj, jsid id, uintN defineHow, * that obj is locked and this function always unlocks obj on return. */ extern JSBool -js_SetNativeAttributes(JSContext *cx, JSObject *obj, JSScopeProperty *sprop, +js_SetNativeAttributes(JSContext *cx, JSObject *obj, js::Shape *shape, uintN attrs); namespace js { diff --git a/js/src/jsobjinlines.h b/js/src/jsobjinlines.h index c35f25e85c21..ebb394d68cd6 100644 --- a/js/src/jsobjinlines.h +++ b/js/src/jsobjinlines.h @@ -41,18 +41,25 @@ #ifndef jsobjinlines_h___ #define jsobjinlines_h___ -#include "jsbool.h" +#include #include "jsdate.h" +#include "jsfun.h" #include "jsiter.h" +#include "jslock.h" #include "jsobj.h" +#include "jspropertytree.h" #include "jsscope.h" #include "jsstaticcheck.h" #include "jsxml.h" #include "jsdtracef.h" +/* Headers included for inline implementations used by this header. */ +#include "jsbool.h" #include "jscntxt.h" +#include "jsnum.h" #include "jsscopeinlines.h" +#include "jsstr.h" inline void JSObject::dropProperty(JSContext *cx, JSProperty *prop) @@ -62,23 +69,139 @@ JSObject::dropProperty(JSContext *cx, JSProperty *prop) JS_UNLOCK_OBJ(cx, this); } +inline void +JSObject::seal(JSContext *cx) +{ + JS_ASSERT(!sealed()); + if (isNative()) + generateOwnShape(cx); + flags |= SEALED; +} + +inline bool +JSObject::brand(JSContext *cx, uint32 slot, js::Value v) +{ + JS_ASSERT(!generic()); + JS_ASSERT(!branded()); + JS_ASSERT(isNative()); + generateOwnShape(cx); + if (js_IsPropertyCacheDisabled(cx)) // check for rt->shapeGen overflow + return false; + flags |= BRANDED; + return true; +} + +inline bool +JSObject::unbrand(JSContext *cx) +{ + JS_ASSERT(isNative()); + if (!branded()) + setGeneric(); + return true; +} + +/* + * Property read barrier for deferred cloning of compiler-created function + * objects optimized as typically non-escaping, ad-hoc methods in obj. + */ +inline bool +JSObject::methodReadBarrier(JSContext *cx, const js::Shape &shape, js::Value *vp) +{ + JS_ASSERT(canHaveMethodBarrier()); + JS_ASSERT(hasMethodBarrier()); + JS_ASSERT(nativeContains(shape)); + JS_ASSERT(shape.isMethod()); + JS_ASSERT(&shape.methodObject() == &vp->toObject()); + + JSObject *funobj = &vp->toObject(); + JSFunction *fun = GET_FUNCTION_PRIVATE(cx, funobj); + JS_ASSERT(fun == funobj && FUN_NULL_CLOSURE(fun)); + + funobj = CloneFunctionObject(cx, fun, funobj->getParent()); + if (!funobj) + return false; + funobj->setMethodObj(*this); + + vp->setObject(*funobj); + if (!js_SetPropertyHelper(cx, this, shape.id, 0, vp)) + return false; + +#ifdef DEBUG + if (cx->runtime->functionMeterFilename) { + JS_FUNCTION_METER(cx, mreadbarrier); + + typedef JSRuntime::FunctionCountMap HM; + HM &h = cx->runtime->methodReadBarrierCountMap; + HM::AddPtr p = h.lookupForAdd(fun); + if (!p) { + h.add(p, fun, 1); + } else { + JS_ASSERT(p->key == fun); + ++p->value; + } + } +#endif + return true; +} + +static JS_ALWAYS_INLINE bool +ChangesMethodValue(const js::Value &prev, const js::Value &v) +{ + JSObject *prevObj; + return prev.isObject() && (prevObj = &prev.toObject())->isFunction() && + (!v.isObject() || &v.toObject() != prevObj); +} + +inline bool +JSObject::methodWriteBarrier(JSContext *cx, const js::Shape &shape, const js::Value &v) +{ + if (flags & (BRANDED | METHOD_BARRIER)) { + const js::Value &prev = lockedGetSlot(shape.slot); + + if (ChangesMethodValue(prev, v)) { + JS_FUNCTION_METER(cx, mwritebarrier); + return methodShapeChange(cx, shape); + } + } + return true; +} + +inline bool +JSObject::methodWriteBarrier(JSContext *cx, uint32 slot, const js::Value &v) +{ + if (flags & (BRANDED | METHOD_BARRIER)) { + const js::Value &prev = lockedGetSlot(slot); + + if (ChangesMethodValue(prev, v)) { + JS_FUNCTION_METER(cx, mwslotbarrier); + return methodShapeChange(cx, slot); + } + } + return true; +} + +inline bool +JSObject::ensureClassReservedSlots(JSContext *cx) +{ + return !nativeEmpty() || ensureClassReservedSlotsForEmptyObject(cx); +} + inline js::Value JSObject::getSlotMT(JSContext *cx, uintN slot) { #ifdef JS_THREADSAFE /* - * If thread-safe, define a getSlotMT() that bypasses, for a native - * object, the lock-free "fast path" test of - * (obj->scope()->ownercx == cx), to avoid needlessly switching from - * lock-free to lock-full scope when doing GC on a different context - * from the last one to own the scope. The caller in this case is - * probably a Class.mark function, e.g., fun_mark, or maybe a - * finalizer. + * If thread-safe, define a getSlotMT() that bypasses, for a native object, + * the lock-free "fast path" test of (obj->title.ownercx == cx), to avoid + * needlessly switching from lock-free to lock-full scope when doing GC on + * a different context from the last one to own the scope. The caller in + * this case is probably a JSClass.mark function, e.g., fun_mark, or maybe + * a finalizer. */ OBJ_CHECK_SLOT(this, slot); - return (scope()->title.ownercx == cx) - ? this->lockedGetSlot(slot) - : js::Valueify(js_GetSlotThreadSafe(cx, this, slot)); + return (title.ownercx == cx) + ? this->lockedGetSlot(slot) + : js::Valueify(js_GetSlotThreadSafe(cx, this, slot)); #else return this->lockedGetSlot(slot); #endif @@ -90,7 +213,7 @@ JSObject::setSlotMT(JSContext *cx, uintN slot, const js::Value &value) #ifdef JS_THREADSAFE /* Thread-safe way to set a slot. */ OBJ_CHECK_SLOT(this, slot); - if (scope()->title.ownercx == cx) + if (title.ownercx == cx) this->lockedSetSlot(slot, value); else js_SetSlotThreadSafe(cx, this, slot, js::Jsvalify(value)); @@ -125,7 +248,7 @@ JSObject::getPrimitiveThis() const return fslots[JSSLOT_PRIMITIVE_THIS]; } -inline void +inline void JSObject::setPrimitiveThis(const js::Value &pthis) { JS_ASSERT(isPrimitive()); @@ -145,7 +268,7 @@ JSObject::getArrayLength() const return fslots[JSSLOT_ARRAY_LENGTH].toPrivateUint32(); } -inline void +inline void JSObject::setArrayLength(uint32 length) { JS_ASSERT(isArray()); @@ -208,7 +331,7 @@ JSObject::freeDenseArrayElements(JSContext *cx) } } -inline void +inline void JSObject::voidDenseOnlyArraySlots() { JS_ASSERT(isDenseArray()); @@ -249,42 +372,54 @@ JSObject::isArgsLengthOverridden() const return v.toInt32() & ARGS_LENGTH_OVERRIDDEN_BIT; } -inline const js::Value & -JSObject::getArgsCallee() const +inline js::ArgumentsData * +JSObject::getArgsData() const { JS_ASSERT(isArguments()); - return fslots[JSSLOT_ARGS_CALLEE]; + return (js::ArgumentsData *) fslots[JSSLOT_ARGS_DATA].toPrivate(); } -inline void -JSObject::setArgsCallee(const js::Value &callee) +inline void +JSObject::setArgsData(js::ArgumentsData *data) { JS_ASSERT(isArguments()); - fslots[JSSLOT_ARGS_CALLEE] = callee; + fslots[JSSLOT_ARGS_DATA].setPrivate(data); +} + +inline const js::Value & +JSObject::getArgsCallee() const +{ + return getArgsData()->callee; +} + +inline void +JSObject::setArgsCallee(const js::Value &callee) +{ + getArgsData()->callee = callee; } inline const js::Value & JSObject::getArgsElement(uint32 i) const { JS_ASSERT(isArguments()); - JS_ASSERT(i < numSlots() - JS_INITIAL_NSLOTS); - return dslots[i]; + JS_ASSERT(i < getArgsInitialLength()); + return getArgsData()->slots[i]; } inline js::Value * JSObject::addressOfArgsElement(uint32 i) const { JS_ASSERT(isArguments()); - JS_ASSERT(i < numSlots() - JS_INITIAL_NSLOTS); - return &dslots[i]; + JS_ASSERT(i < getArgsInitialLength()); + return &getArgsData()->slots[i]; } inline void JSObject::setArgsElement(uint32 i, const js::Value &v) { JS_ASSERT(isArguments()); - JS_ASSERT(i < numSlots() - JS_INITIAL_NSLOTS); - dslots[i] = v; + JS_ASSERT(i < getArgsInitialLength()); + getArgsData()->slots[i] = v; } inline const js::Value & @@ -301,6 +436,35 @@ JSObject::setDateUTCTime(const js::Value &time) fslots[JSSLOT_DATE_UTC_TIME] = time; } +inline JSFunction * +JSObject::getFunctionPrivate() const +{ + JS_ASSERT(isFunction()); + return reinterpret_cast(getPrivate()); +} + +inline js::Value * +JSObject::getFlatClosureUpvars() const +{ + JS_ASSERT(isFunction()); + JS_ASSERT(FUN_FLAT_CLOSURE(getFunctionPrivate())); + return (js::Value *) fslots[JSSLOT_FLAT_CLOSURE_UPVARS].toPrivate(); +} + +inline js::Value +JSObject::getFlatClosureUpvar(uint32 i) const +{ + return getFlatClosureUpvars()[i]; +} + +inline void +JSObject::setFlatClosureUpvars(js::Value *upvars) +{ + JS_ASSERT(isFunction()); + JS_ASSERT(FUN_FLAT_CLOSURE(getFunctionPrivate())); + fslots[JSSLOT_FLAT_CLOSURE_UPVARS].setPrivate(upvars); +} + inline bool JSObject::hasMethodObj(const JSObject& obj) const { @@ -314,13 +478,6 @@ JSObject::setMethodObj(JSObject& obj) fslots[JSSLOT_FUN_METHOD_OBJ].setObject(obj); } -inline JSFunction * -JSObject::getFunctionPrivate() const -{ - JS_ASSERT(isFunction()); - return reinterpret_cast(getPrivate()); -} - inline NativeIterator * JSObject::getNativeIterator() const { @@ -402,14 +559,67 @@ JSObject::setWithThis(JSObject *thisp) } inline void -JSObject::initSharingEmptyScope(js::Class *clasp, JSObject *proto, JSObject *parent, - const js::Value &privateSlotValue) +JSObject::init(js::Class *aclasp, JSObject *proto, JSObject *parent, + const js::Value &privateSlotValue, JSContext *cx) { - init(clasp, proto, parent, privateSlotValue); + JS_STATIC_ASSERT(JSSLOT_PRIVATE + 3 == JS_INITIAL_NSLOTS); - JSEmptyScope *emptyScope = proto->scope()->emptyScope; - JS_ASSERT(emptyScope->clasp == clasp); - map = emptyScope->hold(); + clasp = aclasp; + flags = 0; + freeslot = JSSLOT_START(aclasp); + +#ifdef DEBUG + /* + * NB: objShape must not be set here; rather, the caller must call setMap + * or setSharedNonNativeMap after calling init. To defend this requirement + * we set map to null in DEBUG builds, and set objShape to a value we then + * assert obj->shape() never returns. + */ + map = NULL; + objShape = JSObjectMap::INVALID_SHAPE; +#endif + + setProto(proto); + setParent(parent); + fslots[JSSLOT_PRIVATE] = privateSlotValue; + fslots[JSSLOT_PRIVATE + 1].setUndefined(); + fslots[JSSLOT_PRIVATE + 2].setUndefined(); + + dslots = NULL; + +#ifdef JS_THREADSAFE + js_InitTitle(cx, &title); +#endif + + emptyShape = NULL; +} + +inline void +JSObject::finish(JSContext *cx) +{ +#ifdef DEBUG + if (isNative()) + JS_LOCK_RUNTIME_VOID(cx->runtime, cx->runtime->liveObjectProps -= propertyCount()); +#endif + if (hasSlotsArray()) + freeSlotsArray(cx); +#ifdef JS_THREADSAFE + js_FinishTitle(cx, &title); +#endif +} + +inline void +JSObject::initSharingEmptyShape(js::Class *aclasp, + JSObject *proto, + JSObject *parent, + const js::Value &privateSlotValue, + JSContext *cx) +{ + init(aclasp, proto, parent, privateSlotValue, cx); + + js::EmptyShape *empty = proto->emptyShape; + JS_ASSERT(empty->getClass() == aclasp); + setMap(empty); } inline void @@ -421,24 +631,31 @@ JSObject::freeSlotsArray(JSContext *cx) } inline bool -JSObject::unbrand(JSContext *cx) +JSObject::hasProperty(JSContext *cx, jsid id, bool *foundp, uintN flags) { - if (this->isNative()) { - JS_LOCK_OBJ(cx, this); - JSScope *scope = this->scope(); - if (scope->isSharedEmpty()) { - scope = js_GetMutableScope(cx, this); - if (!scope) { - JS_UNLOCK_OBJ(cx, this); - return false; - } - } - scope->unbrand(cx); - JS_UNLOCK_SCOPE(cx, scope); - } + JSObject *pobj; + JSProperty *prop; + JSAutoResolveFlags rf(cx, flags); + if (!lookupProperty(cx, id, &pobj, &prop)) + return false; + *foundp = !!prop; + if (prop) + pobj->dropProperty(cx, prop); return true; } +inline bool +JSObject::isCallable() +{ + return isFunction() || getClass()->call; +} + +static inline bool +js_IsCallable(const js::Value &v) +{ + return v.isObject() && v.toObject().isCallable(); +} + namespace js { class AutoPropDescArrayRooter : private AutoGCRooter @@ -475,7 +692,9 @@ class AutoPropertyDescriptorRooter : private AutoGCRooter, public PropertyDescri value.setUndefined(); } - AutoPropertyDescriptorRooter(JSContext *cx, PropertyDescriptor *desc) : AutoGCRooter(cx, DESCRIPTOR) { + AutoPropertyDescriptorRooter(JSContext *cx, PropertyDescriptor *desc) + : AutoGCRooter(cx, DESCRIPTOR) + { obj = desc->obj; attrs = desc->attrs; getter = desc->getter; @@ -492,40 +711,34 @@ InitScopeForObject(JSContext* cx, JSObject* obj, js::Class *clasp, JSObject* pro JS_ASSERT(clasp->isNative()); JS_ASSERT(proto == obj->getProto()); - /* Share proto's emptyScope only if obj is similar to proto. */ - JSScope *scope = NULL; + /* Share proto's emptyShape only if obj is similar to proto. */ + js::EmptyShape *empty = NULL; - if (proto && proto->isNative()) { + if (proto) { JS_LOCK_OBJ(cx, proto); - scope = proto->scope(); - if (scope->canProvideEmptyScope(clasp)) { - JSScope *emptyScope = scope->getEmptyScope(cx, clasp); - JS_UNLOCK_SCOPE(cx, scope); - if (!emptyScope) + if (proto->canProvideEmptyShape(clasp)) { + empty = proto->getEmptyShape(cx, clasp); + JS_UNLOCK_OBJ(cx, proto); + if (!empty) goto bad; - scope = emptyScope; } else { - JS_UNLOCK_SCOPE(cx, scope); - scope = NULL; + JS_UNLOCK_OBJ(cx, proto); } } - if (!scope) { - scope = JSScope::create(cx, clasp, obj, js_GenerateShape(cx, false)); - if (!scope) - goto bad; + if (!empty) { uint32 freeslot = JSSLOT_FREE(clasp); - JS_ASSERT(freeslot >= scope->freeslot); + JS_ASSERT(freeslot >= JSSLOT_PRIVATE); + + empty = js::EmptyShape::create(cx, clasp); + if (!empty) + goto bad; if (freeslot > JS_INITIAL_NSLOTS && !obj->allocSlots(cx, freeslot)) goto bad; - scope->freeslot = freeslot; -#ifdef DEBUG - if (freeslot < obj->numSlots()) - obj->setSlot(freeslot, UndefinedValue()); -#endif + obj->freeslot = freeslot; } - obj->map = scope; + obj->setMap(empty); return true; bad: @@ -560,19 +773,17 @@ NewNativeClassInstance(JSContext *cx, Class *clasp, JSObject *proto, JSObject *p * Default parent to the parent of the prototype, which was set from * the parent of the prototype's constructor. */ - obj->init(clasp, proto, parent, JSObject::defaultPrivate(clasp)); + obj->init(clasp, proto, parent, JSObject::defaultPrivate(clasp), cx); JS_LOCK_OBJ(cx, proto); - JSScope *scope = proto->scope(); - JS_ASSERT(scope->canProvideEmptyScope(clasp)); - scope = scope->getEmptyScope(cx, clasp); + JS_ASSERT(proto->canProvideEmptyShape(clasp)); + js::EmptyShape *empty = proto->getEmptyShape(cx, clasp); JS_UNLOCK_OBJ(cx, proto); - if (!scope) { + if (empty) + obj->setMap(empty); + else obj = NULL; - } else { - obj->map = scope; - } } objectCreationScope.handleCreation(obj); @@ -691,10 +902,7 @@ NewObject(JSContext *cx, js::Class *clasp, JSObject *proto, JSObject *parent) * * The should be specialized by the template. */ - JSObject* obj = isFunction - ? (JSObject *)js_NewGCFunction(cx) - : js_NewGCObject(cx); - + JSObject* obj = isFunction ? js_NewGCFunction(cx) : js_NewGCObject(cx); if (!obj) goto out; @@ -702,10 +910,10 @@ NewObject(JSContext *cx, js::Class *clasp, JSObject *proto, JSObject *parent) * Default parent to the parent of the prototype, which was set from * the parent of the prototype's constructor. */ - obj->init(clasp, - proto, + obj->init(clasp, proto, (!parent && proto) ? proto->getParent() : parent, - JSObject::defaultPrivate(clasp)); + JSObject::defaultPrivate(clasp), + cx); if (clasp->isNative()) { if (!InitScopeForObject(cx, obj, clasp, proto)) { @@ -713,7 +921,7 @@ NewObject(JSContext *cx, js::Class *clasp, JSObject *proto, JSObject *parent) goto out; } } else { - obj->map = const_cast(&JSObjectMap::sharedNonNative); + obj->setSharedNonNativeMap(); } out: diff --git a/js/src/jsopcode.cpp b/js/src/jsopcode.cpp index 721b6ca19b75..3bed594d4e4f 100644 --- a/js/src/jsopcode.cpp +++ b/js/src/jsopcode.cpp @@ -297,15 +297,18 @@ ToDisassemblySource(JSContext *cx, jsval v) if (clasp == &js_BlockClass) { char *source = JS_sprintf_append(NULL, "depth %d {", OBJ_BLOCK_DEPTH(cx, obj)); - for (JSScopeProperty *sprop = obj->scope()->lastProperty(); - sprop; - sprop = sprop->parent) { - const char *bytes = js_AtomToPrintableString(cx, JSID_TO_ATOM(sprop->id)); + + Shape::Range r = obj->lastProperty()->all(); + while (!r.empty()) { + const Shape &shape = r.front(); + const char *bytes = js_AtomToPrintableString(cx, JSID_TO_ATOM(shape.id)); if (!bytes) return NULL; + + r.popFront(); source = JS_sprintf_append(source, "%s: %d%s", - bytes, sprop->shortid, - sprop->parent ? ", " : ""); + bytes, shape.shortid, + !r.empty() ? ", " : ""); } source = JS_sprintf_append(source, "}"); @@ -784,7 +787,7 @@ js_NewPrinter(JSContext *cx, const char *name, JSFunction *fun, jp->fun = fun; jp->localNames = NULL; if (fun && FUN_INTERPRETED(fun) && fun->hasLocalNames()) { - jp->localNames = js_GetLocalNameArray(cx, fun, &jp->pool); + jp->localNames = fun->getLocalNameArray(cx, &jp->pool); if (!jp->localNames) { js_DestroyPrinter(jp); return NULL; @@ -1303,19 +1306,9 @@ GetArgOrVarAtom(JSPrinter *jp, uintN slot) const char * GetLocal(SprintStack *ss, jsint i) { - ptrdiff_t off; - JSContext *cx; - JSScript *script; - jsatomid j, n; - JSAtom *atom; - JSObject *obj; - jsint depth, count; - JSScopeProperty *sprop; - const char *rval; - #define LOCAL_ASSERT(expr) LOCAL_ASSERT_RV(expr, "") - off = ss->offsets[i]; + ptrdiff_t off = ss->offsets[i]; if (off >= 0) return OFF2STR(&ss->sprinter, off); @@ -1329,35 +1322,41 @@ GetLocal(SprintStack *ss, jsint i) * none of the script's object literals are blocks), or the stack slot i is * not in a block. In either case, return GetStr(ss, i). */ - cx = ss->sprinter.context; - script = ss->printer->script; + JSScript *script = ss->printer->script; if (script->objectsOffset == 0) return GetStr(ss, i); - for (j = 0, n = script->objects()->length; ; j++) { - if (j == n) - return GetStr(ss, i); - obj = script->getObject(j); - if (obj->getClass() == &js_BlockClass) { - depth = OBJ_BLOCK_DEPTH(cx, obj); - count = OBJ_BLOCK_COUNT(cx, obj); - if ((jsuint)(i - depth) < (jsuint)count) + + for (jsatomid j = 0, n = script->objects()->length; j != n; j++) { + JSObject *obj = script->getObject(j); + if (obj->isBlock()) { + jsint depth = OBJ_BLOCK_DEPTH(cx, obj); + jsint count = OBJ_BLOCK_COUNT(cx, obj); + + if (jsuint(i - depth) < jsuint(count)) { + jsint slot = i - depth; + + for (Shape::Range r(obj->lastProperty()); !r.empty(); r.popFront()) { + const Shape &shape = r.front(); + + if (shape.shortid == slot) { + LOCAL_ASSERT(JSID_IS_ATOM(shape.id)); + + JSAtom *atom = JSID_TO_ATOM(shape.id); + const char *rval = QuoteString(&ss->sprinter, ATOM_TO_STRING(atom), 0); + if (!rval) + return NULL; + + RETRACT(&ss->sprinter, rval); + return rval; + } + } + break; + } } } - i -= depth; - for (sprop = obj->scope()->lastProperty(); sprop; sprop = sprop->parent) { - if (sprop->shortid == i) - break; - } - - LOCAL_ASSERT(sprop && JSID_IS_ATOM(sprop->id)); - atom = JSID_TO_ATOM(sprop->id); - rval = QuoteString(&ss->sprinter, ATOM_TO_STRING(atom), 0); - if (!rval) - return NULL; - RETRACT(&ss->sprinter, rval); - return rval; + return GetStr(ss, i); #undef LOCAL_ASSERT } @@ -2650,7 +2649,6 @@ Decompile(SprintStack *ss, jsbytecode *pc, intN nb, JSOp nextop) case JSOP_ENTERBLOCK: { JSAtom **atomv, *smallv[5]; - JSScopeProperty *sprop; LOAD_OBJECT(0); argc = OBJ_BLOCK_COUNT(cx, obj); @@ -2665,12 +2663,13 @@ Decompile(SprintStack *ss, jsbytecode *pc, intN nb, JSOp nextop) MUST_FLOW_THROUGH("enterblock_out"); #define LOCAL_ASSERT_OUT(expr) LOCAL_ASSERT_CUSTOM(expr, ok = JS_FALSE; \ goto enterblock_out) - for (sprop = obj->scope()->lastProperty(); sprop; - sprop = sprop->parent) { - if (!sprop->hasShortID()) + for (Shape::Range r = obj->lastProperty()->all(); !r.empty(); r.popFront()) { + const Shape &shape = r.front(); + + if (!shape.hasShortID()) continue; - LOCAL_ASSERT_OUT(sprop->shortid < argc); - atomv[sprop->shortid] = JSID_TO_ATOM(sprop->id); + LOCAL_ASSERT_OUT(shape.shortid < argc); + atomv[shape.shortid] = JSID_TO_ATOM(shape.id); } ok = JS_TRUE; for (i = 0; i < argc; i++) { @@ -2843,8 +2842,8 @@ Decompile(SprintStack *ss, jsbytecode *pc, intN nb, JSOp nextop) case JSOP_CALLUPVAR: case JSOP_GETUPVAR_DBG: case JSOP_CALLUPVAR_DBG: - case JSOP_GETDSLOT: - case JSOP_CALLDSLOT: + case JSOP_GETFCSLOT: + case JSOP_CALLFCSLOT: { if (!jp->fun) { JS_ASSERT(jp->script->savedCallerFun); @@ -2852,7 +2851,7 @@ Decompile(SprintStack *ss, jsbytecode *pc, intN nb, JSOp nextop) } if (!jp->localNames) - jp->localNames = js_GetLocalNameArray(cx, jp->fun, &jp->pool); + jp->localNames = jp->fun->getLocalNameArray(cx, &jp->pool); uintN index = GET_UINT16(pc); if (index < jp->fun->u.i.nupvars) { @@ -4031,7 +4030,7 @@ Decompile(SprintStack *ss, jsbytecode *pc, intN nb, JSOp nextop) if (!fun->hasLocalNames()) { innerLocalNames = NULL; } else { - innerLocalNames = js_GetLocalNameArray(cx, fun, &cx->tempPool); + innerLocalNames = fun->getLocalNameArray(cx, &cx->tempPool); if (!innerLocalNames) return NULL; } diff --git a/js/src/jsopcode.tbl b/js/src/jsopcode.tbl index 55ada7aa9c1b..0565328f447e 100644 --- a/js/src/jsopcode.tbl +++ b/js/src/jsopcode.tbl @@ -351,12 +351,13 @@ OPDEF(JSOP_TRY, 132,"try", NULL, 1, 0, 0, 0, JOF_BYTE) OPDEF(JSOP_FINALLY, 133,"finally", NULL, 1, 0, 2, 0, JOF_BYTE) /* - * Get a dynamic slot from an object known to have at least one greater than - * the slot index number of values at obj->dslots. The CALL variant computes - * the callee and this-object in preparation for a JSOP_CALL. + * Get a slot from a flat closure function object that contains a snapshot of + * the closure-invariant upvar values. The immediate operand indexes the upvar + * in the function's u.i.script->upvars() array. The CALL variant computes the + * callee and this-object in preparation for a JSOP_CALL. */ -OPDEF(JSOP_GETDSLOT, 134,"getdslot", NULL, 3, 0, 1, 19, JOF_UINT16|JOF_NAME) -OPDEF(JSOP_CALLDSLOT, 135,"calldslot", NULL, 3, 0, 2, 19, JOF_UINT16|JOF_NAME|JOF_CALLOP) +OPDEF(JSOP_GETFCSLOT, 134,"getfcslot", NULL, 3, 0, 1, 19, JOF_UINT16|JOF_NAME) +OPDEF(JSOP_CALLFCSLOT, 135,"callfcslot", NULL, 3, 0, 2, 19, JOF_UINT16|JOF_NAME|JOF_CALLOP) /* * Bytecodes that avoid making an arguments object in most cases: diff --git a/js/src/jsparse.cpp b/js/src/jsparse.cpp index 2ff28146b2b7..eb320db74fb8 100644 --- a/js/src/jsparse.cpp +++ b/js/src/jsparse.cpp @@ -1209,8 +1209,8 @@ CheckStrictFormals(JSContext *cx, JSTreeContext *tc, JSFunction *fun, if (tc->flags & (TCF_FUN_PARAM_ARGUMENTS | TCF_FUN_PARAM_EVAL)) { JSAtomState *atoms = &cx->runtime->atomState; - atom = (tc->flags & TCF_FUN_PARAM_ARGUMENTS - ? atoms->argumentsAtom : atoms->evalAtom); + atom = (tc->flags & TCF_FUN_PARAM_ARGUMENTS) ? atoms->argumentsAtom : atoms->evalAtom; + /* The definition's source position will be more precise. */ JSDefinition *dn = ALE_DEFN(tc->decls.lookup(atom)); JS_ASSERT(dn->pn_atom == atom); @@ -1546,7 +1546,11 @@ Compiler::compileFunctionBody(JSContext *cx, JSFunction *fun, JSPrincipals *prin uintN nargs = fun->nargs; if (nargs) { - jsuword *names = js_GetLocalNameArray(cx, fun, &cx->tempPool); + /* + * NB: do not use AutoLocalNameArray because it will release space + * allocated from cx->tempPool by DefineArg. + */ + jsuword *names = fun->getLocalNameArray(cx, &cx->tempPool); if (!names) { fn = NULL; } else { @@ -1643,7 +1647,7 @@ BindLocalVariable(JSContext *cx, JSFunction *fun, JSAtom *atom, if (atom == cx->runtime->atomState.argumentsAtom && !isArg) return JS_TRUE; - return js_AddLocal(cx, fun, atom, localKind); + return fun->addLocal(cx, atom, localKind); } #if JS_HAS_DESTRUCTURING @@ -1661,7 +1665,7 @@ BindDestructuringArg(JSContext *cx, BindData *data, JSAtom *atom, JS_ASSERT(tc->inFunction()); - JSLocalKind localKind = js_LookupLocal(cx, tc->fun, atom, NULL); + JSLocalKind localKind = tc->fun->lookupLocal(cx, atom, NULL); if (localKind != JSLOCAL_NONE) { ReportCompileErrorNumber(cx, TS(tc->parser), NULL, JSREPORT_ERROR, JSMSG_DESTRUCT_DUP_ARG); @@ -2149,7 +2153,7 @@ Parser::setFunctionKinds(JSFunctionBox *funbox, uint32& tcflags) * also classifies enclosing functions holding upvars referenced in * those descendants' bodies. So now we can check our "methods". * - * Despecialize from branded method-identity-based shape to sprop- + * Despecialize from branded method-identity-based shape to shape- * or slot-based shape if this function smells like a constructor * and too many of its methods are *not* joinable null closures * (i.e., they have one or more upvars fetched via the display). @@ -2325,9 +2329,9 @@ Parser::setFunctionKinds(JSFunctionBox *funbox, uint32& tcflags) if (FUN_KIND(fun) == JSFUN_INTERPRETED && pn->pn_type == TOK_UPVARS) { /* * One or more upvars cannot be safely snapshot into a flat - * closure's dslot (see JSOP_GETDSLOT), so we loop again over - * all upvars, and for each non-free upvar, ensure that its - * containing function has been flagged as heavyweight. + * closure's non-reserved slot (see JSOP_GETFCSLOT), so we loop + * again over all upvars, and for each non-free upvar, ensure that + * its containing function has been flagged as heavyweight. * * The emitter must see TCF_FUN_HEAVYWEIGHT accurately before * generating any code for a tree of nested functions. @@ -2569,6 +2573,7 @@ Parser::functionArguments(JSTreeContext &funtc, JSFunctionBox *funbox, JSFunctio bool destructuringArg = false; JSParseNode *list = NULL; #endif + do { switch (TokenKind tt = tokenStream.getToken()) { #if JS_HAS_DESTRUCTURING @@ -2599,7 +2604,7 @@ Parser::functionArguments(JSTreeContext &funtc, JSFunctionBox *funbox, JSFunctio * parameter that is to be destructured. */ jsint slot = fun->nargs; - if (!js_AddLocal(context, fun, NULL, JSLOCAL_ARG)) + if (!fun->addLocal(context, NULL, JSLOCAL_ARG)) return false; /* @@ -2615,7 +2620,8 @@ Parser::functionArguments(JSTreeContext &funtc, JSFunctionBox *funbox, JSFunctio rhs->pn_cookie.set(funtc.staticLevel, uint16(slot)); rhs->pn_dflags |= PND_BOUND; - JSParseNode *item = JSParseNode::newBinaryOrAppend(TOK_ASSIGN, JSOP_NOP, lhs, rhs, &funtc); + JSParseNode *item = + JSParseNode::newBinaryOrAppend(TOK_ASSIGN, JSOP_NOP, lhs, rhs, &funtc); if (!item) return false; if (!list) { @@ -2645,15 +2651,15 @@ Parser::functionArguments(JSTreeContext &funtc, JSFunctionBox *funbox, JSFunctio * * Duplicates are warned about (strict option) or cause errors (strict * mode code), but we do those tests in one place below, after having - * parsed the body. + * parsed the body in case it begins with a "use strict"; directive. */ - if (js_LookupLocal(context, fun, atom, NULL) != JSLOCAL_NONE) { + if (fun->lookupLocal(context, atom, NULL) != JSLOCAL_NONE) { duplicatedArg = atom; if (destructuringArg) goto report_dup_and_destructuring; } #endif - if (!js_AddLocal(context, fun, atom, JSLOCAL_ARG)) + if (!fun->addLocal(context, atom, JSLOCAL_ARG)) return false; break; } @@ -2788,12 +2794,12 @@ Parser::functionDef(JSAtom *funAtom, FunctionType type, uintN lambda) * we add a variable even if a parameter with the given name * already exists. */ - localKind = js_LookupLocal(context, tc->fun, funAtom, &index); + localKind = tc->fun->lookupLocal(context, funAtom, &index); switch (localKind) { case JSLOCAL_NONE: case JSLOCAL_ARG: index = tc->fun->u.i.nvars; - if (!js_AddLocal(context, tc->fun, funAtom, JSLOCAL_VAR)) + if (!tc->fun->addLocal(context, funAtom, JSLOCAL_VAR)) return NULL; /* FALL THROUGH */ @@ -3243,7 +3249,7 @@ BindLet(JSContext *cx, BindData *data, JSAtom *atom, JSTreeContext *tc) uintN slot = JSSLOT_FREE(&js_BlockClass) + n; if (slot >= blockObj->numSlots() && !blockObj->growSlots(cx, slot + 1)) return false; - blockObj->scope()->freeslot = slot + 1; + blockObj->freeslot = slot + 1; blockObj->setSlot(slot, PrivateValue(pn)); return true; } @@ -3255,11 +3261,10 @@ PopStatement(JSTreeContext *tc) if (stmt->flags & SIF_SCOPE) { JSObject *obj = stmt->blockObj; - JSScope *scope = obj->scope(); JS_ASSERT(!OBJ_IS_CLONED_BLOCK(obj)); - for (JSScopeProperty *sprop = scope->lastProperty(); sprop; sprop = sprop->parent) { - JSAtom *atom = JSID_TO_ATOM(sprop->id); + for (Shape::Range r = obj->lastProperty()->all(); !r.empty(); r.popFront()) { + JSAtom *atom = JSID_TO_ATOM(r.front().id); /* Beware the empty destructuring dummy. */ if (atom == tc->parser->context->runtime->atomState.emptyAtom) @@ -3489,7 +3494,7 @@ BindVarOrConst(JSContext *cx, BindData *data, JSAtom *atom, JSTreeContext *tc) return JS_TRUE; } - JSLocalKind localKind = js_LookupLocal(cx, tc->fun, atom, NULL); + JSLocalKind localKind = tc->fun->lookupLocal(cx, atom, NULL); if (localKind == JSLOCAL_NONE) { /* * Property not found in current variable scope: we have not seen this @@ -3965,7 +3970,7 @@ CheckDestructuring(JSContext *cx, BindData *data, JSPROP_ENUMERATE | JSPROP_PERMANENT | JSPROP_SHARED, - JSScopeProperty::HAS_SHORTID, 0, NULL); + Shape::HAS_SHORTID, 0, NULL); if (!ok) goto out; } @@ -6039,9 +6044,8 @@ JSParseNode * Parser::bitXorExpr() { JSParseNode *pn = bitAndExpr(); - while (pn && tokenStream.matchToken(TOK_BITXOR)) { + while (pn && tokenStream.matchToken(TOK_BITXOR)) pn = JSParseNode::newBinaryOrAppend(TOK_BITXOR, JSOP_BITXOR, pn, bitAndExpr(), tc); - } return pn; } diff --git a/js/src/jsparse.h b/js/src/jsparse.h index f366468a591b..cde7bb2cc3f9 100644 --- a/js/src/jsparse.h +++ b/js/src/jsparse.h @@ -901,9 +901,9 @@ struct JSFunctionBox : public JSObjectBox * be joined to one compiler-created null closure shared among N different * closure environments. * - * We despecialize from caching function objects, caching slots or sprops + * We despecialize from caching function objects, caching slots or shapes * instead, because an unbranded object may still have joined methods (for - * which sprop->isMethod), since PropertyCache::fill gives precedence to + * which shape->isMethod), since PropertyCache::fill gives precedence to * joined methods over branded methods. */ bool shouldUnbrand(uintN methods, uintN slowMethods) const; diff --git a/js/src/jspropertycache.cpp b/js/src/jspropertycache.cpp index a989eab1d878..617f4a502c32 100644 --- a/js/src/jspropertycache.cpp +++ b/js/src/jspropertycache.cpp @@ -39,8 +39,10 @@ * ***** END LICENSE BLOCK ***** */ #include "jspropertycache.h" -#include "jspropertycacheinlines.h" #include "jscntxt.h" +#include "jsnum.h" +#include "jsobjinlines.h" +#include "jspropertycacheinlines.h" using namespace js; @@ -48,10 +50,9 @@ JS_STATIC_ASSERT(sizeof(PCVal) == sizeof(jsuword)); JS_REQUIRES_STACK PropertyCacheEntry * PropertyCache::fill(JSContext *cx, JSObject *obj, uintN scopeIndex, uintN protoIndex, - JSObject *pobj, JSScopeProperty *sprop, JSBool adding) + JSObject *pobj, const Shape *shape, JSBool adding) { jsbytecode *pc; - JSScope *scope; jsuword kshape, vshape; JSOp op; const JSCodeSpec *cs; @@ -67,11 +68,10 @@ PropertyCache::fill(JSContext *cx, JSObject *obj, uintN scopeIndex, uintN protoI } /* - * Check for fill from js_SetPropertyHelper where the setter removed sprop - * from pobj's scope (via unwatch or delete, e.g.). + * Check for fill from js_SetPropertyHelper where the setter removed shape + * from pobj (via unwatch or delete, e.g.). */ - scope = pobj->scope(); - if (!scope->hasProperty(sprop)) { + if (!pobj->nativeContains(*shape)) { PCMETER(oddfills++); return JS_NO_PROP_CACHE_FILL; } @@ -81,7 +81,7 @@ PropertyCache::fill(JSContext *cx, JSObject *obj, uintN scopeIndex, uintN protoI * and setter hooks can change the prototype chain using JS_SetPrototype * after js_LookupPropertyWithFlags has returned the nominal protoIndex, * we have to validate protoIndex if it is non-zero. If it is zero, then - * we know thanks to the scope->hasProperty test above, combined with the + * we know thanks to the pobj->nativeContains test above, combined with the * fact that obj == pobj, that protoIndex is invariant. * * The scopeIndex can't be wrong. We require JS_SetParent calls to happen @@ -138,37 +138,38 @@ PropertyCache::fill(JSContext *cx, JSObject *obj, uintN scopeIndex, uintN protoI * getter, so get of a function is idempotent. */ if (cs->format & JOF_CALLOP) { - if (sprop->isMethod()) { + if (shape->isMethod()) { /* * A compiler-created function object, AKA a method, already * memoized in the property tree. */ - JS_ASSERT(scope->hasMethodBarrier()); - JSObject &funobj = sprop->methodObject(); - JS_ASSERT(&funobj == &pobj->lockedGetSlot(sprop->slot).toObject()); + JS_ASSERT(pobj->hasMethodBarrier()); + JSObject &funobj = shape->methodObject(); + JS_ASSERT(&funobj == &pobj->lockedGetSlot(shape->slot).toObject()); vword.setFunObj(funobj); break; } - if (!scope->generic() && - sprop->hasDefaultGetter() && - SPROP_HAS_VALID_SLOT(sprop, scope)) { - const Value &v = pobj->lockedGetSlot(sprop->slot); + if (!pobj->generic() && + shape->hasDefaultGetter() && + pobj->containsSlot(shape->slot)) { + const Value &v = pobj->lockedGetSlot(shape->slot); JSObject *funobj; + if (IsFunctionObject(v, &funobj)) { /* * Great, we have a function-valued prototype property * where the getter is JS_PropertyStub. The type id in - * pobj's scope does not evolve with changes to property - * values, however. + * pobj does not evolve with changes to property values, + * however. * * So here, on first cache fill for this method, we brand - * the scope with a new shape and set the JSScope::BRANDED - * flag. Once this flag is set, any property assignment - * that changes the value from or to a different function - * object will result in shape being regenerated. + * obj with a new shape and set the JSObject::BRANDED flag. + * Once this flag is set, any property assignment that + * changes the value from or to a different function object + * will result in shape being regenerated. */ - if (!scope->branded()) { + if (!pobj->branded()) { PCMETER(brandfills++); #ifdef DEBUG_notme fprintf(stderr, @@ -178,7 +179,7 @@ PropertyCache::fill(JSContext *cx, JSObject *obj, uintN scopeIndex, uintN protoI JS_GetFunctionName(GET_FUNCTION_PRIVATE(cx, JSVAL_TO_OBJECT(v))), obj->shape()); #endif - if (!scope->brand(cx, sprop->slot, v)) + if (!pobj->brand(cx, shape->slot, v)) return JS_NO_PROP_CACHE_FILL; } vword.setFunObj(*funobj); @@ -192,24 +193,23 @@ PropertyCache::fill(JSContext *cx, JSObject *obj, uintN scopeIndex, uintN protoI * with stub getters and setters, we can cache the slot. */ if (!(cs->format & (JOF_SET | JOF_FOR)) && - (!(cs->format & JOF_INCDEC) || sprop->hasDefaultSetter()) && - sprop->hasDefaultGetter() && - SPROP_HAS_VALID_SLOT(sprop, scope)) { - /* Great, let's cache sprop's slot and use it on cache hit. */ - vword.setSlot(sprop->slot); + (!(cs->format & JOF_INCDEC) || shape->hasDefaultSetter()) && + shape->hasDefaultGetter() && + pobj->containsSlot(shape->slot)) { + /* Great, let's cache shape's slot and use it on cache hit. */ + vword.setSlot(shape->slot); } else { - /* Best we can do is to cache sprop (still a nice speedup). */ - vword.setSprop(sprop); + /* Best we can do is to cache shape (still a nice speedup). */ + vword.setShape(shape); if (adding && - sprop == scope->lastProperty() && - scope->shape == sprop->shape) { + pobj->shape() == shape->shape) { /* * Our caller added a new property. We also know that a setter - * that js_NativeSet could have run has not mutated the scope, - * so the added property is still the last one added, and the - * scope is not branded. + * that js_NativeSet might have run has not mutated pobj, so + * the added property is still the last one added, and pobj is + * not branded. * - * We want to cache under scope's shape before the property + * We want to cache under pobj's shape before the property * addition to bias for the case when the mutator opcode * always adds the same property. This allows us to optimize * periodic execution of object initializers or other explicit @@ -230,26 +230,10 @@ PropertyCache::fill(JSContext *cx, JSObject *obj, uintN scopeIndex, uintN protoI * that on the third and subsequent iterations the cache will * be hit because the shape is no longer updated. */ - JS_ASSERT(!scope->isSharedEmpty()); - if (sprop->parent) { - kshape = sprop->parent->shape; - } else { - /* - * If obj had its own empty scope before, with a unique - * shape, that is lost. Here we only attempt to find a - * matching empty scope. In unusual cases involving - * __proto__ assignment we may not find one. - */ - JSObject *proto = obj->getProto(); - if (!proto || !proto->isNative()) - return JS_NO_PROP_CACHE_FILL; - JSScope *protoscope = proto->scope(); - if (!protoscope->emptyScope || - protoscope->emptyScope->clasp != obj->getClass()) { - return JS_NO_PROP_CACHE_FILL; - } - kshape = protoscope->emptyScope->shape; - } + JS_ASSERT(shape == pobj->lastProperty()); + JS_ASSERT(!pobj->nativeEmpty()); + + kshape = shape->previous()->shape; /* * When adding we predict no prototype object will later gain a @@ -262,7 +246,7 @@ PropertyCache::fill(JSContext *cx, JSObject *obj, uintN scopeIndex, uintN protoI if (kshape == 0) { kshape = obj->shape(); - vshape = scope->shape; + vshape = pobj->shape(); } JS_ASSERT(kshape < SHAPE_OVERFLOW_BIT); @@ -281,11 +265,10 @@ PropertyCache::fill(JSContext *cx, JSObject *obj, uintN scopeIndex, uintN protoI * Make sure that a later shadowing assignment will enter * PurgeProtoChain and invalidate this entry, bug 479198. * - * This is thread-safe even though obj is not locked. Only the - * DELEGATE bit of obj->classword can change at runtime, given that - * obj is native; and the bit is only set, never cleared. And on - * platforms where another CPU can fail to see this write, it's OK - * because the property cache and JIT cache are thread-local. + * This is not thread-safe but we are about to make all objects + * except multi-threaded wrappers (bug 566951) single-threaded. + * And multi-threaded wrappers are non-native Proxy instances, so + * they won't use the property cache. */ obj->setDelegate(); } @@ -363,7 +346,7 @@ PropertyCache::fullTest(JSContext *cx, jsbytecode *pc, JSObject **objp, JSObject return atom; } - if (entry->kshape != obj->map->shape) { + if (entry->kshape != obj->shape()) { PCMETER(kshapemisses++); return GetAtomFromBytecode(cx, pc, op, cs); } @@ -401,8 +384,7 @@ PropertyCache::fullTest(JSContext *cx, jsbytecode *pc, JSObject **objp, JSObject jsid id = ATOM_TO_JSID(atom); id = js_CheckForStringIndex(id); - JS_ASSERT(pobj->scope()->lookup(id)); - JS_ASSERT_IF(pobj->scope()->object, pobj->scope()->object == pobj); + JS_ASSERT(pobj->nativeContains(id)); #endif *pobjp = pobj; return NULL; diff --git a/js/src/jspropertycache.h b/js/src/jspropertycache.h index 960c1260ab35..30137e5817e9 100644 --- a/js/src/jspropertycache.h +++ b/js/src/jspropertycache.h @@ -71,7 +71,7 @@ const uint32 SHAPE_OVERFLOW_BIT = JS_BIT(32 - PCVCAP_TAGBITS); /* * Property cache value. This is simply a tagged union: - * PCVal = (JSObject * | uint32 | JSScopeProperty *). + * PCVal = (JSObject * | uint32 | js::Shape *). * It is the type of PropertyCacheEntry::vword and combines with the tag bits * of PropertyCacheEntry::vcap to tell how to get or set the property, once a * property cache hit is validated. @@ -84,7 +84,7 @@ class PCVal enum { OBJECT = 0, SLOT = 1, - SPROP = 2, + SHAPE = 2, TAG = 3 }; @@ -95,16 +95,27 @@ class PCVal void setNull() { v = 0; } bool isFunObj() const { return (v & TAG) == OBJECT; } - JSObject &toFunObj() const { JS_ASSERT(isFunObj()); return *reinterpret_cast(v); } - void setFunObj(JSObject &obj) { v = reinterpret_cast(&obj); } + JSObject &toFunObj() const { + JS_ASSERT(isFunObj()); + return *reinterpret_cast(v); + } + void setFunObj(JSObject &obj) { + v = reinterpret_cast(&obj); + } bool isSlot() const { return v & SLOT; } uint32 toSlot() const { JS_ASSERT(isSlot()); return uint32(v) >> 1; } void setSlot(uint32 slot) { v = (jsuword(slot) << 1) | SLOT; } - bool isSprop() const { return (v & TAG) == SPROP; } - JSScopeProperty *toSprop() const { JS_ASSERT(isSprop()); return reinterpret_cast(v & ~TAG); } - void setSprop(JSScopeProperty *sprop) { JS_ASSERT(sprop); v = reinterpret_cast(sprop) | SPROP; } + bool isShape() const { return (v & TAG) == SHAPE; } + const js::Shape *toShape() const { + JS_ASSERT(isShape()); + return reinterpret_cast(v & ~TAG); + } + void setShape(const js::Shape *shape) { + JS_ASSERT(shape); + v = reinterpret_cast(shape) | SHAPE; + } }; struct PropertyCacheEntry @@ -197,7 +208,7 @@ class PropertyCache /* * Add kshape rather than xor it to avoid collisions between nearby bytecode * that are evolving an object by setting successive properties, incrementing - * the object's scope->shape on each set. + * the object's shape on each set. */ static inline jsuword hash(jsbytecode *pc, jsuword kshape) @@ -239,18 +250,18 @@ class PropertyCache /* * Test for cached information about creating a new own data property on obj at pc. * - * On a hit, set *spropp to an sprop from the property tree describing the + * On a hit, set *shapep to an shape from the property tree describing the * new property as well as all existing properties on obj and return * true. Otherwise return false. * * Hit or miss, *entryp receives a pointer to the property cache entry. */ - JS_ALWAYS_INLINE bool testForInit(JSRuntime *rt, jsbytecode *pc, JSObject *obj, JSScope *scope, - JSScopeProperty **spropp, PropertyCacheEntry **entryp); + JS_ALWAYS_INLINE bool testForInit(JSRuntime *rt, jsbytecode *pc, JSObject *obj, + const js::Shape **shapep, PropertyCacheEntry **entryp); /* * Fill property cache entry for key cx->fp->pc, optimized value word - * computed from obj and sprop, and entry capability forged from 24-bit + * computed from obj and shape, and entry capability forged from 24-bit * obj->shape(), 4-bit scopeIndex, and 4-bit protoIndex. * * Return the filled cache entry or JS_NO_PROP_CACHE_FILL if caching was @@ -258,7 +269,7 @@ class PropertyCache */ JS_REQUIRES_STACK PropertyCacheEntry *fill(JSContext *cx, JSObject *obj, uintN scopeIndex, uintN protoIndex, JSObject *pobj, - JSScopeProperty *sprop, JSBool adding = false); + const js::Shape *shape, JSBool adding = false); void purge(JSContext *cx); void purgeForScript(JSScript *script); diff --git a/js/src/jspropertycacheinlines.h b/js/src/jspropertycacheinlines.h index 31fdb1a31b56..2e1a64d51698 100644 --- a/js/src/jspropertycacheinlines.h +++ b/js/src/jspropertycacheinlines.h @@ -42,6 +42,7 @@ #ifndef jspropertycacheinlines_h___ #define jspropertycacheinlines_h___ +#include "jslock.h" #include "jspropertycache.h" #include "jsscope.h" @@ -66,7 +67,7 @@ PropertyCache::matchShape(JSContext *cx, JSObject *obj, uint32 shape) * * We must lock pobj on a hit in order to close races with threads that might * be deleting a property from its scope, or otherwise invalidating property - * caches (on all threads) by re-generating scope->shape. + * caches (on all threads) by re-generating JSObject::shape(). */ JS_ALWAYS_INLINE void PropertyCache::test(JSContext *cx, jsbytecode *pc, JSObject *&obj, @@ -74,7 +75,7 @@ PropertyCache::test(JSContext *cx, jsbytecode *pc, JSObject *&obj, { JS_ASSERT(this == &JS_PROPERTY_CACHE(cx)); - uint32 kshape = obj->map->shape; + uint32 kshape = obj->shape(); entry = &table[hash(pc, kshape)]; PCMETER(pctestentry = entry); PCMETER(tests++); @@ -104,14 +105,14 @@ JS_ALWAYS_INLINE bool PropertyCache::testForSet(JSContext *cx, jsbytecode *pc, JSObject *obj, PropertyCacheEntry **entryp, JSObject **obj2p, JSAtom **atomp) { - uint32 shape = obj->map->shape; + uint32 shape = obj->shape(); PropertyCacheEntry *entry = &table[hash(pc, shape)]; *entryp = entry; PCMETER(pctestentry = entry); PCMETER(tests++); PCMETER(settests++); JS_ASSERT(entry->kshape < SHAPE_OVERFLOW_BIT); - if (entry->kpc == pc && entry->kshape == shape && matchShape(cx, obj, shape)) + if (entry->kpc == pc && entry->kshape == shape && CX_OWNS_OBJECT_TITLE(cx, obj)) return true; #ifdef DEBUG @@ -129,12 +130,12 @@ PropertyCache::testForSet(JSContext *cx, jsbytecode *pc, JSObject *obj, } JS_ALWAYS_INLINE bool -PropertyCache::testForInit(JSRuntime *rt, jsbytecode *pc, JSObject *obj, JSScope *scope, - JSScopeProperty **spropp, PropertyCacheEntry **entryp) +PropertyCache::testForInit(JSRuntime *rt, jsbytecode *pc, JSObject *obj, + const js::Shape **shapep, PropertyCacheEntry **entryp) { - JS_ASSERT(scope->object == obj); - JS_ASSERT(!scope->sealed()); - uint32 kshape = scope->shape; + JS_ASSERT(obj->freeslot >= JSSLOT_FREE(obj->getClass())); + JS_ASSERT(!obj->sealed()); + uint32 kshape = obj->shape(); PropertyCacheEntry *entry = &table[hash(pc, kshape)]; *entryp = entry; PCMETER(pctestentry = entry); @@ -148,8 +149,8 @@ PropertyCache::testForInit(JSRuntime *rt, jsbytecode *pc, JSObject *obj, JSScope PCMETER(pchits++); PCMETER(inipchits++); JS_ASSERT(entry->vcapTag() == 0); - *spropp = entry->vword.toSprop(); - JS_ASSERT((*spropp)->writable()); + *shapep = entry->vword.toShape(); + JS_ASSERT((*shapep)->writable()); return true; } return false; diff --git a/js/src/jspropertytree.cpp b/js/src/jspropertytree.cpp index adea4b874408..16476da9aa94 100644 --- a/js/src/jspropertytree.cpp +++ b/js/src/jspropertytree.cpp @@ -37,9 +37,10 @@ * * ***** END LICENSE BLOCK ***** */ +#include + #include "jstypes.h" #include "jsarena.h" -#include "jsdhash.h" #include "jsprf.h" #include "jsapi.h" #include "jscntxt.h" @@ -47,104 +48,34 @@ #include "jspropertytree.h" #include "jsscope.h" -#include "jsnum.h" #include "jsobjinlines.h" #include "jsscopeinlines.h" using namespace js; -struct PropertyRootKey +inline HashNumber +ShapeHasher::hash(const Lookup l) { - const JSScopeProperty *firstProp; - uint32 emptyShape; - - PropertyRootKey(const JSScopeProperty *child, uint32 shape) - : firstProp(child), emptyShape(shape) {} - - static JSDHashNumber hash(JSDHashTable *table, const void *key) { - const PropertyRootKey *rkey = (const PropertyRootKey *)key; - - return rkey->firstProp->hash() ^ rkey->emptyShape; - } -}; - -struct PropertyRootEntry : public JSDHashEntryHdr -{ - JSScopeProperty *firstProp; - uint32 emptyShape; - uint32 newEmptyShape; - - static JSBool match(JSDHashTable *table, const JSDHashEntryHdr *hdr, const void *key) { - const PropertyRootEntry *rent = (const PropertyRootEntry *)hdr; - const PropertyRootKey *rkey = (const PropertyRootKey *)key; - - return rent->firstProp->matches(rkey->firstProp) && - rent->emptyShape == rkey->emptyShape; - } -}; - -static const JSDHashTableOps PropertyRootHashOps = { - JS_DHashAllocTable, - JS_DHashFreeTable, - PropertyRootKey::hash, - PropertyRootEntry::match, - JS_DHashMoveEntryStub, - JS_DHashClearEntryStub, - JS_DHashFinalizeStub, - NULL -}; - -static JSDHashNumber -HashScopeProperty(JSDHashTable *table, const void *key) -{ - const JSScopeProperty *sprop = (const JSScopeProperty *)key; - return sprop->hash(); + return l->hash(); } -static JSBool -MatchScopeProperty(JSDHashTable *table, - const JSDHashEntryHdr *hdr, - const void *key) +inline bool +ShapeHasher::match(const Key k, const Lookup l) { - const JSPropertyTreeEntry *entry = (const JSPropertyTreeEntry *)hdr; - const JSScopeProperty *sprop = entry->child; - const JSScopeProperty *kprop = (const JSScopeProperty *)key; - - return sprop->matches(kprop); + return l->matches(k); } -static const JSDHashTableOps PropertyTreeHashOps = { - JS_DHashAllocTable, - JS_DHashFreeTable, - HashScopeProperty, - MatchScopeProperty, - JS_DHashMoveEntryStub, - JS_DHashClearEntryStub, - JS_DHashFinalizeStub, - NULL -}; - bool PropertyTree::init() { - if (!JS_DHashTableInit(&hash, &PropertyRootHashOps, NULL, - sizeof(PropertyRootEntry), JS_DHASH_MIN_SIZE)) { - hash.ops = NULL; - return false; - } JS_InitArenaPool(&arenaPool, "properties", - 256 * sizeof(JSScopeProperty), sizeof(void *), NULL); - emptyShapeChanges = 0; + 256 * sizeof(Shape), sizeof(void *), NULL); return true; } void PropertyTree::finish() { - if (hash.ops) { - JS_DHashTableFinish(&hash); - hash.ops = NULL; - } JS_FinishArenaPool(&arenaPool); } @@ -152,20 +83,19 @@ PropertyTree::finish() * NB: Called with cx->runtime->gcLock held if gcLocked is true. * On failure, return null after unlocking the GC and reporting out of memory. */ -JSScopeProperty * -PropertyTree::newScopeProperty(JSContext *cx, bool gcLocked) +Shape * +PropertyTree::newShape(JSContext *cx, bool gcLocked) { - JSScopeProperty *sprop; + Shape *shape; if (!gcLocked) JS_LOCK_GC(cx->runtime); - sprop = freeList; - if (sprop) { - sprop->removeFree(); + shape = freeList; + if (shape) { + shape->removeFree(); } else { - JS_ARENA_ALLOCATE_CAST(sprop, JSScopeProperty *, &arenaPool, - sizeof(JSScopeProperty)); - if (!sprop) { + JS_ARENA_ALLOCATE_CAST(shape, Shape *, &arenaPool, sizeof(Shape)); + if (!shape) { JS_UNLOCK_GC(cx->runtime); JS_ReportOutOfMemory(cx); return NULL; @@ -176,52 +106,34 @@ PropertyTree::newScopeProperty(JSContext *cx, bool gcLocked) JS_RUNTIME_METER(cx->runtime, livePropTreeNodes); JS_RUNTIME_METER(cx->runtime, totalPropTreeNodes); - return sprop; + return shape; } -#define CHUNKY_KIDS_TAG ((jsuword)1) -#define KIDS_IS_CHUNKY(kids) ((jsuword)(kids) & CHUNKY_KIDS_TAG) -#define KIDS_TO_CHUNK(kids) ((PropTreeKidsChunk *) \ - ((jsuword)(kids) & ~CHUNKY_KIDS_TAG)) -#define CHUNK_TO_KIDS(chunk) ((JSScopeProperty *) \ - ((jsuword)(chunk) | CHUNKY_KIDS_TAG)) -#define MAX_KIDS_PER_CHUNK 10 -#define CHUNK_HASH_THRESHOLD 30 - -struct PropTreeKidsChunk { - JSScopeProperty *kids[MAX_KIDS_PER_CHUNK]; - JSDHashTable *table; - PropTreeKidsChunk *next; -}; - /* * NB: Called with cx->runtime->gcLock held, always. * On failure, return null after unlocking the GC and reporting out of memory. */ -static PropTreeKidsChunk * -NewPropTreeKidsChunk(JSContext *cx) +KidsChunk * +KidsChunk::create(JSContext *cx) { - PropTreeKidsChunk *chunk; + KidsChunk *chunk; - chunk = (PropTreeKidsChunk *) js_calloc(sizeof *chunk); + chunk = (KidsChunk *) js_calloc(sizeof *chunk); if (!chunk) { JS_UNLOCK_GC(cx->runtime); JS_ReportOutOfMemory(cx); return NULL; } - JS_ASSERT(((jsuword)chunk & CHUNKY_KIDS_TAG) == 0); JS_RUNTIME_METER(cx->runtime, propTreeKidsChunks); return chunk; } -static PropTreeKidsChunk * -DestroyPropTreeKidsChunk(JSContext *cx, PropTreeKidsChunk *chunk) +KidsChunk * +KidsChunk::destroy(JSContext *cx, KidsChunk *chunk) { JS_RUNTIME_UNMETER(cx->runtime, propTreeKidsChunks); - if (chunk->table) - JS_DHashTableDestroy(chunk->table); - PropTreeKidsChunk *nextChunk = chunk->next; + KidsChunk *nextChunk = chunk->next; js_free(chunk); return nextChunk; } @@ -231,213 +143,189 @@ DestroyPropTreeKidsChunk(JSContext *cx, PropTreeKidsChunk *chunk) * On failure, return null after unlocking the GC and reporting out of memory. */ bool -PropertyTree::insertChild(JSContext *cx, JSScopeProperty *parent, - JSScopeProperty *child) +PropertyTree::insertChild(JSContext *cx, Shape *parent, Shape *child) { - JS_ASSERT(parent); + JS_ASSERT(!parent->inDictionary()); JS_ASSERT(!child->parent); + JS_ASSERT(!child->inDictionary()); JS_ASSERT(!JSID_IS_VOID(parent->id)); JS_ASSERT(!JSID_IS_VOID(child->id)); - JSScopeProperty **childp = &parent->kids; - if (JSScopeProperty *kids = *childp) { - JSScopeProperty *sprop; - PropTreeKidsChunk *chunk; + child->parent = parent; - if (!KIDS_IS_CHUNKY(kids)) { - sprop = kids; - JS_ASSERT(sprop != child); - if (sprop->matches(child)) { - /* - * Duplicate child created while racing to getChild on the same - * node label. See PropertyTree::getChild, further below. - */ - JS_RUNTIME_METER(cx->runtime, duplicatePropTreeNodes); - } - chunk = NewPropTreeKidsChunk(cx); - if (!chunk) - return false; - parent->kids = CHUNK_TO_KIDS(chunk); - chunk->kids[0] = sprop; - childp = &chunk->kids[1]; - } else { - PropTreeKidsChunk **chunkp; - - chunk = KIDS_TO_CHUNK(kids); - if (JSDHashTable *table = chunk->table) { - JSPropertyTreeEntry *entry = (JSPropertyTreeEntry *) - JS_DHashTableOperate(table, child, JS_DHASH_ADD); - if (!entry) { - JS_UNLOCK_GC(cx->runtime); - JS_ReportOutOfMemory(cx); - return false; - } - if (!entry->child) { - entry->child = child; - while (chunk->next) - chunk = chunk->next; - for (uintN i = 0; i < MAX_KIDS_PER_CHUNK; i++) { - childp = &chunk->kids[i]; - sprop = *childp; - if (!sprop) - goto insert; - } - chunkp = &chunk->next; - goto new_chunk; - } - } - - do { - for (uintN i = 0; i < MAX_KIDS_PER_CHUNK; i++) { - childp = &chunk->kids[i]; - sprop = *childp; - if (!sprop) - goto insert; - - JS_ASSERT(sprop != child); - if (sprop->matches(child)) { - /* - * Duplicate child, see comment above. In this - * case, we must let the duplicate be inserted at - * this level in the tree, so we keep iterating, - * looking for an empty slot in which to insert. - */ - JS_ASSERT(sprop != child); - JS_RUNTIME_METER(cx->runtime, duplicatePropTreeNodes); - } - } - chunkp = &chunk->next; - } while ((chunk = *chunkp) != NULL); - - new_chunk: - chunk = NewPropTreeKidsChunk(cx); - if (!chunk) - return false; - *chunkp = chunk; - childp = &chunk->kids[0]; - } + KidsPointer *kidp = &parent->kids; + if (kidp->isNull()) { + kidp->setShape(child); + return true; } - insert: - *childp = child; - child->parent = parent; + Shape *shape; + + if (kidp->isShape()) { + shape = kidp->toShape(); + JS_ASSERT(shape != child); + if (shape->matches(child)) { + /* + * Duplicate child created while racing to getChild on the same + * node label. See PropertyTree::getChild, further below. + */ + JS_RUNTIME_METER(cx->runtime, duplicatePropTreeNodes); + } + + KidsChunk *chunk = KidsChunk::create(cx); + if (!chunk) + return false; + parent->kids.setChunk(chunk); + chunk->kids[0] = shape; + chunk->kids[1] = child; + return true; + } + + if (kidp->isChunk()) { + KidsChunk **chunkp; + KidsChunk *chunk = kidp->toChunk(); + + do { + for (uintN i = 0; i < MAX_KIDS_PER_CHUNK; i++) { + shape = chunk->kids[i]; + if (!shape) { + chunk->kids[i] = child; + return true; + } + + JS_ASSERT(shape != child); + if (shape->matches(child)) { + /* + * Duplicate child, see comment above. In this case, we + * must let the duplicate be inserted at this level in the + * tree, so we keep iterating, looking for an empty slot in + * which to insert. + */ + JS_ASSERT(shape != child); + JS_RUNTIME_METER(cx->runtime, duplicatePropTreeNodes); + } + } + chunkp = &chunk->next; + } while ((chunk = *chunkp) != NULL); + + chunk = KidsChunk::create(cx); + if (!chunk) + return false; + *chunkp = chunk; + chunk->kids[0] = child; + return true; + } + + KidsHash *hash = kidp->toHash(); + KidsHash::AddPtr addPtr = hash->lookupForAdd(child); + if (!addPtr) { + if (!hash->add(addPtr, child)) + return false; + } else { + // FIXME ignore duplicate child case here, going thread-local soon! + } return true; } /* NB: Called with cx->runtime->gcLock held. */ void -PropertyTree::removeChild(JSContext *cx, JSScopeProperty *child) +PropertyTree::removeChild(JSContext *cx, Shape *child) { - uintN i, j; - JSPropertyTreeEntry *entry; + JS_ASSERT(!child->inDictionary()); - JSScopeProperty *parent = child->parent; + Shape *parent = child->parent; JS_ASSERT(parent); JS_ASSERT(!JSID_IS_VOID(parent->id)); - JSScopeProperty *kids = parent->kids; - if (!KIDS_IS_CHUNKY(kids)) { - JSScopeProperty *kid = kids; + KidsPointer *kidp = &parent->kids; + if (kidp->isShape()) { + Shape *kid = kidp->toShape(); if (kid == child) - parent->kids = NULL; + parent->kids.setNull(); return; } - PropTreeKidsChunk *list = KIDS_TO_CHUNK(kids); - PropTreeKidsChunk *chunk = list; - PropTreeKidsChunk **chunkp = &list; + if (kidp->isChunk()) { + KidsChunk *list = kidp->toChunk(); + KidsChunk *chunk = list; + KidsChunk **chunkp = &list; - JSDHashTable *table = chunk->table; - PropTreeKidsChunk *freeChunk = NULL; + do { + for (uintN i = 0; i < MAX_KIDS_PER_CHUNK; i++) { + if (chunk->kids[i] == child) { + KidsChunk *lastChunk = chunk; + + uintN j; + if (!lastChunk->next) { + j = i + 1; + } else { + j = 0; + do { + chunkp = &lastChunk->next; + lastChunk = *chunkp; + } while (lastChunk->next); + } + for (; j < MAX_KIDS_PER_CHUNK; j++) { + if (!lastChunk->kids[j]) + break; + } + --j; + + if (chunk != lastChunk || j > i) + chunk->kids[i] = lastChunk->kids[j]; + lastChunk->kids[j] = NULL; + if (j == 0) { + *chunkp = NULL; + if (!list) + parent->kids.setNull(); + KidsChunk::destroy(cx, lastChunk); + } + return; + } + } + + chunkp = &chunk->next; + } while ((chunk = *chunkp) != NULL); + return; + } + + kidp->toHash()->remove(child); +} + +static KidsHash * +HashChunks(KidsChunk *chunk, uintN n) +{ + void *mem = js_malloc(sizeof(KidsHash)); + if (!mem) + return NULL; + + KidsHash *hash = new (mem) KidsHash(); + if (!hash->init(n)) { + js_free(hash); + return NULL; + } do { - for (i = 0; i < MAX_KIDS_PER_CHUNK; i++) { - if (chunk->kids[i] == child) { - PropTreeKidsChunk *lastChunk = chunk; - if (!lastChunk->next) { - j = i + 1; - } else { - j = 0; - do { - chunkp = &lastChunk->next; - lastChunk = *chunkp; - } while (lastChunk->next); - } - for (; j < MAX_KIDS_PER_CHUNK; j++) { - if (!lastChunk->kids[j]) - break; - } - --j; - if (chunk != lastChunk || j > i) - chunk->kids[i] = lastChunk->kids[j]; - lastChunk->kids[j] = NULL; - if (j == 0) { - *chunkp = NULL; - if (!list) - parent->kids = NULL; - freeChunk = lastChunk; - } - goto out; + for (uintN i = 0; i < MAX_KIDS_PER_CHUNK; i++) { + Shape *shape = chunk->kids[i]; + if (!shape) + break; + KidsHash::AddPtr addPtr = hash->lookupForAdd(shape); + if (!addPtr) { + /* + * Infallible, we right-sized via hash->init(n) just above. + * Assert just in case jshashtable.h ever regresses. + */ + JS_ALWAYS_TRUE(hash->add(addPtr, shape)); + } else { + /* + * Duplicate child case, we don't handle this race, + * multi-threaded shapes are going away... + */ } } - - chunkp = &chunk->next; - } while ((chunk = *chunkp) != NULL); - - out: - if (table) { - entry = (JSPropertyTreeEntry *) - JS_DHashTableOperate(table, child, JS_DHASH_LOOKUP); - - if (entry->child == child) - JS_DHashTableRawRemove(table, &entry->hdr); - } - if (freeChunk) - DestroyPropTreeKidsChunk(cx, freeChunk); -} - -void -PropertyTree::emptyShapeChange(uint32 oldEmptyShape, uint32 newEmptyShape) -{ - if (oldEmptyShape == newEmptyShape) - return; - - PropertyRootEntry *rent = (PropertyRootEntry *) hash.entryStore; - PropertyRootEntry *rend = rent + JS_DHASH_TABLE_SIZE(&hash); - - while (rent < rend) { - if (rent->emptyShape == oldEmptyShape) - rent->newEmptyShape = newEmptyShape; - rent++; - } - - ++emptyShapeChanges; -} - -static JSDHashTable * -HashChunks(PropTreeKidsChunk *chunk, uintN n) -{ - JSDHashTable *table; - uintN i; - JSScopeProperty *sprop; - JSPropertyTreeEntry *entry; - - table = JS_NewDHashTable(&PropertyTreeHashOps, NULL, - sizeof(JSPropertyTreeEntry), - JS_DHASH_DEFAULT_CAPACITY(n + 1)); - if (!table) - return NULL; - do { - for (i = 0; i < MAX_KIDS_PER_CHUNK; i++) { - sprop = chunk->kids[i]; - if (!sprop) - break; - entry = (JSPropertyTreeEntry *) - JS_DHashTableOperate(table, sprop, JS_DHASH_ADD); - entry->child = sprop; - } } while ((chunk = chunk->next) != NULL); - return table; + return hash; } /* @@ -448,175 +336,111 @@ HashChunks(PropTreeKidsChunk *chunk, uintN n) * We use cx->runtime->gcLock, not ...->rtLock, to avoid nesting the former * inside the latter in js_GenerateShape below. */ -JSScopeProperty * -PropertyTree::getChild(JSContext *cx, JSScopeProperty *parent, uint32 shape, - const JSScopeProperty &child) +Shape * +PropertyTree::getChild(JSContext *cx, Shape *parent, const Shape &child) { - PropertyRootEntry *rent; - JSScopeProperty *sprop; + Shape *shape; - if (!parent) { - PropertyRootKey rkey(&child, shape); + JS_ASSERT(parent); + JS_ASSERT(!JSID_IS_VOID(parent->id)); - JS_LOCK_GC(cx->runtime); - rent = (PropertyRootEntry *) JS_DHashTableOperate(&hash, &rkey, JS_DHASH_ADD); - if (!rent) { - JS_UNLOCK_GC(cx->runtime); - JS_ReportOutOfMemory(cx); - return NULL; - } + /* + * Because chunks are appended at the end and never deleted except by + * the GC, we can search without taking the runtime's GC lock. We may + * miss a matching shape added by another thread, and make a duplicate + * one, but that is an unlikely, therefore small, cost. The property + * tree has extremely low fan-out below its root in popular embeddings + * with real-world workloads. + * + * Patterns such as defining closures that capture a constructor's + * environment as getters or setters on the new object that is passed + * in as |this| can significantly increase fan-out below the property + * tree root -- see bug 335700 for details. + */ + KidsPointer *kidp = &parent->kids; + if (!kidp->isNull()) { + if (kidp->isShape()) { + shape = kidp->toShape(); + if (shape->matches(&child)) + return shape; + } else if (kidp->isChunk()) { + KidsChunk *chunk = kidp->toChunk(); - sprop = rent->firstProp; - if (sprop) - goto out; - } else { - JS_ASSERT(!JSID_IS_VOID(parent->id)); + uintN n = 0; + do { + for (uintN i = 0; i < MAX_KIDS_PER_CHUNK; i++) { + shape = chunk->kids[i]; + if (!shape) { + n += i; + if (n >= CHUNK_HASH_THRESHOLD) { + /* + * kidp->isChunk() was true, but if we're racing it + * may not be by this point. FIXME: thread "safety" + * is for the birds! + */ + if (!kidp->isHash()) { + chunk = kidp->toChunk(); - /* - * Because chunks are appended at the end and never deleted except by - * the GC, we can search without taking the runtime's GC lock. We may - * miss a matching sprop added by another thread, and make a duplicate - * one, but that is an unlikely, therefore small, cost. The property - * tree has extremely low fan-out below its root in popular embeddings - * with real-world workloads. - * - * Patterns such as defining closures that capture a constructor's - * environment as getters or setters on the new object that is passed - * in as |this| can significantly increase fan-out below the property - * tree root -- see bug 335700 for details. - */ - rent = NULL; - sprop = parent->kids; - if (sprop) { - if (!KIDS_IS_CHUNKY(sprop)) { - if (sprop->matches(&child)) - return sprop; - } else { - PropTreeKidsChunk *chunk = KIDS_TO_CHUNK(sprop); - - if (JSDHashTable *table = chunk->table) { - JS_LOCK_GC(cx->runtime); - JSPropertyTreeEntry *entry = (JSPropertyTreeEntry *) - JS_DHashTableOperate(table, &child, JS_DHASH_LOOKUP); - sprop = entry->child; - if (sprop) - goto out; - goto locked_not_found; - } - - uintN n = 0; - do { - for (uintN i = 0; i < MAX_KIDS_PER_CHUNK; i++) { - sprop = chunk->kids[i]; - if (!sprop) { - n += i; - if (n >= CHUNK_HASH_THRESHOLD) { - chunk = KIDS_TO_CHUNK(parent->kids); - if (!chunk->table) { - JSDHashTable *table = HashChunks(chunk, n); - if (!table) { - JS_ReportOutOfMemory(cx); - return NULL; - } - - JS_LOCK_GC(cx->runtime); - if (chunk->table) - JS_DHashTableDestroy(table); - else - chunk->table = table; - goto locked_not_found; + KidsHash *hash = HashChunks(chunk, n); + if (!hash) { + JS_ReportOutOfMemory(cx); + return NULL; } + + JS_LOCK_GC(cx->runtime); + if (kidp->isHash()) { + hash->KidsHash::~KidsHash(); + js_free(hash); + } else { + // FIXME unsafe race with kidp->is/toChunk() above. + // But this is all going single-threaded soon... + while (chunk) + chunk = KidsChunk::destroy(cx, chunk); + kidp->setHash(hash); + } + goto locked_not_found; } - goto not_found; } - - if (sprop->matches(&child)) - return sprop; + goto not_found; } - n += MAX_KIDS_PER_CHUNK; - } while ((chunk = chunk->next) != NULL); - } - } - not_found: - JS_LOCK_GC(cx->runtime); + if (shape->matches(&child)) + return shape; + } + n += MAX_KIDS_PER_CHUNK; + } while ((chunk = chunk->next) != NULL); + } else { + JS_LOCK_GC(cx->runtime); + shape = *kidp->toHash()->lookup(&child); + if (shape) + goto out; + goto locked_not_found; + } } + not_found: + JS_LOCK_GC(cx->runtime); + locked_not_found: - sprop = newScopeProperty(cx, true); - if (!sprop) + shape = newShape(cx, true); + if (!shape) return NULL; - new (sprop) JSScopeProperty(child.id, child.rawGetter, child.rawSetter, child.slot, - child.attrs, child.flags, child.shortid); - sprop->parent = sprop->kids = NULL; - sprop->shape = js_GenerateShape(cx, true); + new (shape) Shape(child.id, child.rawGetter, child.rawSetter, child.slot, child.attrs, + child.flags, child.shortid); + shape->shape = js_GenerateShape(cx, true); - if (!parent) { - rent->firstProp = sprop; - rent->emptyShape = shape; - rent->newEmptyShape = 0; - } else { - if (!PropertyTree::insertChild(cx, parent, sprop)) - return NULL; - } + if (!insertChild(cx, parent, shape)) + return NULL; out: JS_UNLOCK_GC(cx->runtime); - return sprop; + return shape; } #ifdef DEBUG - -static void -MeterKidCount(JSBasicStats *bs, uintN nkids) -{ - JS_BASIC_STATS_ACCUM(bs, nkids); - bs->hist[JS_MIN(nkids, 10)]++; -} - -static void -MeterPropertyTree(JSBasicStats *bs, JSScopeProperty *node) -{ - uintN i, nkids; - JSScopeProperty *kids, *kid; - PropTreeKidsChunk *chunk; - - nkids = 0; - kids = node->kids; - if (kids) { - if (KIDS_IS_CHUNKY(kids)) { - for (chunk = KIDS_TO_CHUNK(kids); chunk; chunk = chunk->next) { - for (i = 0; i < MAX_KIDS_PER_CHUNK; i++) { - kid = chunk->kids[i]; - if (!kid) - break; - MeterPropertyTree(bs, kid); - nkids++; - } - } - } else { - MeterPropertyTree(bs, kids); - nkids = 1; - } - } - - MeterKidCount(bs, nkids); -} - -static JSDHashOperator -js_MeterPropertyTree(JSDHashTable *table, JSDHashEntryHdr *hdr, uint32 number, - void *arg) -{ - PropertyRootEntry *rent = (PropertyRootEntry *)hdr; - JSBasicStats *bs = (JSBasicStats *)arg; - - MeterPropertyTree(bs, rent->firstProp); - return JS_DHASH_NEXT; -} - void -JSScopeProperty::dump(JSContext *cx, FILE *fp) +Shape::dump(JSContext *cx, FILE *fp) const { JS_ASSERT(!JSID_IS_VOID(id)); @@ -672,20 +496,72 @@ JSScopeProperty::dump(JSContext *cx, FILE *fp) fprintf(fp, "shortid %d\n", shortid); } +#endif + +#ifdef DEBUG + +static void +MeterKidCount(JSBasicStats *bs, uintN nkids) +{ + JS_BASIC_STATS_ACCUM(bs, nkids); +} void -JSScopeProperty::dumpSubtree(JSContext *cx, int level, FILE *fp) +js::PropertyTree::meter(JSBasicStats *bs, Shape *node) { - fprintf(fp, "%*sid ", level, ""); - dump(cx, fp); + uintN nkids = 0; + const KidsPointer &kids = node->kids; + if (!kids.isNull()) { + if (kids.isShape()) { + meter(bs, kids.toShape()); + nkids = 1; + } else if (kids.isChunk()) { + for (KidsChunk *chunk = kids.toChunk(); chunk; chunk = chunk->next) { + for (uintN i = 0; i < MAX_KIDS_PER_CHUNK; i++) { + Shape *kid = chunk->kids[i]; + if (!kid) + break; + meter(bs, kid); + nkids++; + } + } + } else { + const KidsHash &hash = *kids.toHash(); + for (KidsHash::Range range = hash.all(); !range.empty(); range.popFront()) { + Shape *kid = range.front(); - if (kids) { + meter(bs, kid); + nkids++; + } + } + } + + MeterKidCount(bs, nkids); +} + +void +Shape::dumpSubtree(JSContext *cx, int level, FILE *fp) const +{ + if (!parent) { + JS_ASSERT(level == 0); + JS_ASSERT(JSID_IS_EMPTY(id)); + fprintf(fp, "class %s emptyShape %u\n", clasp->name, shape); + } else { + fprintf(fp, "%*sid ", level, ""); + dump(cx, fp); + } + + if (!kids.isNull()) { ++level; - if (KIDS_IS_CHUNKY(kids)) { - PropTreeKidsChunk *chunk = KIDS_TO_CHUNK(kids); + if (kids.isShape()) { + Shape *kid = kids.toShape(); + JS_ASSERT(kid->parent == this); + kid->dumpSubtree(cx, level, fp); + } else if (kids.isChunk()) { + KidsChunk *chunk = kids.toChunk(); do { for (uintN i = 0; i < MAX_KIDS_PER_CHUNK; i++) { - JSScopeProperty *kid = chunk->kids[i]; + Shape *kid = chunk->kids[i]; if (!kid) break; JS_ASSERT(kid->parent == this); @@ -693,230 +569,200 @@ JSScopeProperty::dumpSubtree(JSContext *cx, int level, FILE *fp) } } while ((chunk = chunk->next) != NULL); } else { - JSScopeProperty *kid = kids; - JS_ASSERT(kid->parent == this); - kid->dumpSubtree(cx, level, fp); + const KidsHash &hash = *kids.toHash(); + for (KidsHash::Range range = hash.all(); !range.empty(); range.popFront()) { + Shape *kid = range.front(); + + JS_ASSERT(kid->parent == this); + kid->dumpSubtree(cx, level, fp); + } } } } #endif /* DEBUG */ -static void -OrphanNodeKids(JSContext *cx, JSScopeProperty *sprop) +JS_ALWAYS_INLINE void +js::PropertyTree::orphanKids(JSContext *cx, Shape *shape) { - JSScopeProperty *kids = sprop->kids; + KidsPointer *kidp = &shape->kids; - JS_ASSERT(kids); - sprop->kids = NULL; + JS_ASSERT(!kidp->isNull()); /* - * The grandparent must have either no kids or (still, after the - * removeChild call above) chunky kids. + * Note that JS_PROPERTY_TREE(cx).removeChild(cx, shape) precedes the call + * to orphanKids in sweepShapes, below. Therefore the grandparent must have + * either no kids left, or else space in chunks or a hash for more than one + * kid. */ - JS_ASSERT(!sprop->parent || !sprop->parent->kids || - KIDS_IS_CHUNKY(sprop->parent->kids)); + JS_ASSERT_IF(shape->parent, !shape->parent->kids.isShape()); - if (KIDS_IS_CHUNKY(kids)) { - PropTreeKidsChunk *chunk = KIDS_TO_CHUNK(kids); + if (kidp->isShape()) { + Shape *kid = kidp->toShape(); + + if (!JSID_IS_VOID(kid->id)) { + JS_ASSERT(kid->parent == shape); + kid->parent = NULL; + } + } else if (kidp->isChunk()) { + KidsChunk *chunk = kidp->toChunk(); do { for (uintN i = 0; i < MAX_KIDS_PER_CHUNK; i++) { - JSScopeProperty *kid = chunk->kids[i]; + Shape *kid = chunk->kids[i]; if (!kid) break; if (!JSID_IS_VOID(kid->id)) { - JS_ASSERT(kid->parent == sprop); + JS_ASSERT(kid->parent == shape); kid->parent = NULL; } } - } while ((chunk = DestroyPropTreeKidsChunk(cx, chunk)) != NULL); + } while ((chunk = KidsChunk::destroy(cx, chunk)) != NULL); } else { - JSScopeProperty *kid = kids; + KidsHash *hash = kidp->toHash(); - if (!JSID_IS_VOID(kid->id)) { - JS_ASSERT(kid->parent == sprop); - kid->parent = NULL; + for (KidsHash::Range range = hash->all(); !range.empty(); range.popFront()) { + Shape *kid = range.front(); + if (!JSID_IS_VOID(kid->id)) { + JS_ASSERT(kid->parent == shape); + kid->parent = NULL; + } } - } -} -JSDHashOperator -js::RemoveNodeIfDead(JSDHashTable *table, JSDHashEntryHdr *hdr, uint32 number, void *arg) -{ - PropertyRootEntry *rent = (PropertyRootEntry *)hdr; - JSScopeProperty *sprop = rent->firstProp; - - JS_ASSERT(!sprop->parent); - if (!sprop->marked()) { - if (sprop->kids) - OrphanNodeKids((JSContext *)arg, sprop); - return JS_DHASH_REMOVE; + hash->KidsHash::~KidsHash(); + js_free(hash); } - return JS_DHASH_NEXT; + + kidp->setNull(); } void -js::SweepScopeProperties(JSContext *cx) +js::PropertyTree::sweepShapes(JSContext *cx) { #ifdef DEBUG JSBasicStats bs; uint32 livePropCapacity = 0, totalLiveCount = 0; static FILE *logfp; if (!logfp) { - if (const char *filename = getenv("JS_PROPTREE_STATFILE")) + if (const char *filename = cx->runtime->propTreeStatFilename) logfp = fopen(filename, "w"); } if (logfp) { JS_BASIC_STATS_INIT(&bs); - MeterKidCount(&bs, JS_PROPERTY_TREE(cx).hash.entryCount); - JS_DHashTableEnumerate(&JS_PROPERTY_TREE(cx).hash, js_MeterPropertyTree, &bs); - double props, nodes, mean, sigma; + uint32 empties; + { + typedef JSRuntime::EmptyShapeSet HS; - props = cx->runtime->liveScopePropsPreSweep; - nodes = cx->runtime->livePropTreeNodes; - JS_ASSERT(nodes == bs.sum); - mean = JS_MeanAndStdDevBS(&bs, &sigma); + HS &h = cx->runtime->emptyShapes; + empties = h.count(); + MeterKidCount(&bs, empties); + for (HS::Range r = h.all(); !r.empty(); r.popFront()) + meter(&bs, r.front()); + } + + double props = cx->runtime->liveObjectPropsPreSweep; + double nodes = cx->runtime->livePropTreeNodes; + double dicts = cx->runtime->liveDictModeNodes; + + /* Empty scope nodes are never hashed, so subtract them from nodes. */ + JS_ASSERT(nodes - dicts == bs.sum); + nodes -= empties; + + double sigma; + double mean = JS_MeanAndStdDevBS(&bs, &sigma); fprintf(logfp, - "props %g nodes %g beta %g meankids %g sigma %g max %u\n", - props, nodes, nodes / props, mean, sigma, bs.max); + "props %g nodes %g (dicts %g) beta %g meankids %g sigma %g max %u\n", + props, nodes, dicts, nodes / props, mean, sigma, bs.max); JS_DumpHistogram(&bs, logfp); } #endif /* - * First, remove unmarked nodes from JS_PROPERTY_TREE(cx).hash. This table - * requires special handling up front, rather than removal during regular - * heap sweeping, because we cannot find an entry in it from the firstProp - * node pointer alone -- we would need the emptyShape too. - * - * Rather than encode emptyShape in firstProp somehow (a tagged overlay on - * parent, perhaps, but that would slow down JSScope::search and other hot - * paths), we simply orphan kids of garbage nodes in the property tree's - * root-ply before sweeping the node heap. - */ - JS_DHashTableEnumerate(&JS_PROPERTY_TREE(cx).hash, RemoveNodeIfDead, cx); - - /* - * Second, if any empty scopes have been reshaped, rehash the root ply of - * this tree using the new empty shape numbers as key-halves. If we run out - * of memory trying to allocate the new hash, disable the property cache by - * setting SHAPE_OVERFLOW_BIT in rt->shapeGen. The next GC will therefore - * renumber shapes as well as (we hope, eventually) free sufficient memory - * for a successful re-run through this code. - */ - if (JS_PROPERTY_TREE(cx).emptyShapeChanges) { - JSDHashTable &oldHash = JS_PROPERTY_TREE(cx).hash; - uint32 tableSize = JS_DHASH_TABLE_SIZE(&oldHash); - JSDHashTable newHash; - - if (!JS_DHashTableInit(&newHash, &PropertyRootHashOps, NULL, - sizeof(PropertyRootEntry), tableSize)) { - cx->runtime->shapeGen |= SHAPE_OVERFLOW_BIT; - } else { - PropertyRootEntry *rent = (PropertyRootEntry *) oldHash.entryStore; - PropertyRootEntry *rend = rent + tableSize; - - while (rent < rend) { - if (rent->firstProp) { - uint32 emptyShape = rent->newEmptyShape; - if (emptyShape == 0) - emptyShape = rent->emptyShape; - - PropertyRootKey rkey(rent->firstProp, emptyShape); - PropertyRootEntry *newRent = - (PropertyRootEntry *) JS_DHashTableOperate(&newHash, &rkey, JS_DHASH_ADD); - - newRent->firstProp = rent->firstProp; - newRent->emptyShape = emptyShape; - newRent->newEmptyShape = 0; - } - rent++; - } - - JS_ASSERT(newHash.generation == 0); - JS_DHashTableFinish(&oldHash); - JS_PROPERTY_TREE(cx).hash = newHash; - JS_PROPERTY_TREE(cx).emptyShapeChanges = 0; - } - } - - /* - * Third, sweep the heap clean of all unmarked nodes. Here we will find - * nodes already GC'ed from the root ply, but we will avoid re-orphaning - * their kids, because the kids member will already be null. + * Sweep the heap clean of all unmarked nodes. Here we will find nodes + * already GC'ed from the root ply, but we will avoid re-orphaning their + * kids, because the kids member will already be null. */ JSArena **ap = &JS_PROPERTY_TREE(cx).arenaPool.first.next; while (JSArena *a = *ap) { - JSScopeProperty *limit = (JSScopeProperty *) a->avail; + Shape *limit = (Shape *) a->avail; uintN liveCount = 0; - for (JSScopeProperty *sprop = (JSScopeProperty *) a->base; sprop < limit; sprop++) { - /* If the id is null, sprop is already on the freelist. */ - if (JSID_IS_VOID(sprop->id)) + for (Shape *shape = (Shape *) a->base; shape < limit; shape++) { + /* If the id is null, shape is already on the freelist. */ + if (JSID_IS_VOID(shape->id)) continue; /* - * If the mark bit is set, sprop is alive, so clear the mark bit + * If the mark bit is set, shape is alive, so clear the mark bit * and continue the while loop. * - * Regenerate sprop->shape if it hasn't already been refreshed + * Regenerate shape->shape if it hasn't already been refreshed * during the mark phase, when live scopes' lastProp members are * followed to update both scope->shape and lastProp->shape. */ - if (sprop->marked()) { - sprop->clearMark(); + if (shape->marked()) { + shape->clearMark(); if (cx->runtime->gcRegenShapes) { - if (sprop->hasRegenFlag()) - sprop->clearRegenFlag(); + if (shape->hasRegenFlag()) + shape->clearRegenFlag(); else - sprop->shape = js_RegenerateShapeForGC(cx); + shape->shape = js_RegenerateShapeForGC(cx); } liveCount++; continue; } - if (!sprop->inDictionary()) { +#ifdef DEBUG + if ((shape->flags & Shape::SHARED_EMPTY) && + cx->runtime->meterEmptyShapes()) { + cx->runtime->emptyShapes.remove((EmptyShape *) shape); + } +#endif + + if (shape->inDictionary()) { + JS_RUNTIME_UNMETER(cx->runtime, liveDictModeNodes); + } else { /* - * Here, sprop is garbage to collect, but its parent might not - * be, so we may have to remove it from its parent's kids chunk - * list or kid singleton pointer set. + * Here, shape is garbage to collect, but its parent might not + * be, so we may have to remove it from its parent's kids hash, + * chunk list, or kid singleton pointer set. * * Without a separate mark-clearing pass, we can't tell whether - * sprop->parent is live at this point, so we must remove sprop - * if its parent member is non-null. A saving grace: if sprop's - * parent is dead and swept by this point, sprop->parent will + * shape->parent is live at this point, so we must remove shape + * if its parent member is non-null. A saving grace: if shape's + * parent is dead and swept by this point, shape->parent will * be null -- in the next paragraph, we null all of a property * tree node's kids' parent links when sweeping that node. */ - if (sprop->parent) - JS_PROPERTY_TREE(cx).removeChild(cx, sprop); + if (shape->parent) + JS_PROPERTY_TREE(cx).removeChild(cx, shape); - if (sprop->kids) - OrphanNodeKids(cx, sprop); + if (!shape->kids.isNull()) + orphanKids(cx, shape); } /* - * Note that JSScopeProperty::insertFree nulls sprop->id so we know - * that sprop is on the freelist. + * Note that Shape::insertFree nulls shape->id so we know that + * shape is on the freelist. */ - sprop->insertFree(JS_PROPERTY_TREE(cx).freeList); + shape->freeTable(cx); + shape->insertFree(&JS_PROPERTY_TREE(cx).freeList); JS_RUNTIME_UNMETER(cx->runtime, livePropTreeNodes); } /* If a contains no live properties, return it to the malloc heap. */ if (liveCount == 0) { - for (JSScopeProperty *sprop = (JSScopeProperty *) a->base; sprop < limit; sprop++) - sprop->removeFree(); + for (Shape *shape = (Shape *) a->base; shape < limit; shape++) + shape->removeFree(); JS_ARENA_DESTROY(&JS_PROPERTY_TREE(cx).arenaPool, a, ap); } else { #ifdef DEBUG - livePropCapacity += limit - (JSScopeProperty *) a->base; + livePropCapacity += limit - (Shape *) a->base; totalLiveCount += liveCount; #endif ap = &a->next; @@ -938,38 +784,44 @@ js::SweepScopeProperties(JSContext *cx) fprintf(logfp, "Scope search stats:\n" - " searches: %6u\n" - " hits: %6u %5.2f%% of searches\n" - " misses: %6u %5.2f%%\n" - " hashes: %6u %5.2f%%\n" - " steps: %6u %5.2f%% %5.2f%% of hashes\n" - " stepHits: %6u %5.2f%% %5.2f%%\n" - " stepMisses: %6u %5.2f%% %5.2f%%\n" - " tableAllocFails %6u\n" - " toDictFails %6u\n" - " wrapWatchFails %6u\n" - " adds: %6u\n" - " addFails: %6u\n" - " puts: %6u\n" - " redundantPuts: %6u\n" - " putFails: %6u\n" - " changes: %6u\n" - " changeFails: %6u\n" - " compresses: %6u\n" - " grows: %6u\n" - " removes: %6u\n" - " removeFrees: %6u\n" - " uselessRemoves: %6u\n" - " shrinks: %6u\n", + " searches: %6u\n" + " hits: %6u %5.2f%% of searches\n" + " misses: %6u %5.2f%%\n" + " hashes: %6u %5.2f%%\n" + " hashHits: %6u %5.2f%% (%5.2f%% of hashes)\n" + " hashMisses: %6u %5.2f%% (%5.2f%%)\n" + " steps: %6u %5.2f%% (%5.2f%%)\n" + " stepHits: %6u %5.2f%% (%5.2f%%)\n" + " stepMisses: %6u %5.2f%% (%5.2f%%)\n" + " initSearches: %6u\n" + " changeSearches: %6u\n" + " tableAllocFails: %6u\n" + " toDictFails: %6u\n" + " wrapWatchFails: %6u\n" + " adds: %6u\n" + " addFails: %6u\n" + " puts: %6u\n" + " redundantPuts: %6u\n" + " putFails: %6u\n" + " changes: %6u\n" + " changeFails: %6u\n" + " compresses: %6u\n" + " grows: %6u\n" + " removes: %6u\n" + " removeFrees: %6u\n" + " uselessRemoves: %6u\n" + " shrinks: %6u\n", js_scope_stats.searches, js_scope_stats.hits, RATE(hits, searches), js_scope_stats.misses, RATE(misses, searches), js_scope_stats.hashes, RATE(hashes, searches), + js_scope_stats.hashHits, RATE(hashHits, searches), RATE(hashHits, hashes), + js_scope_stats.hashMisses, RATE(hashMisses, searches), RATE(hashMisses, hashes), js_scope_stats.steps, RATE(steps, searches), RATE(steps, hashes), - js_scope_stats.stepHits, - RATE(stepHits, searches), RATE(stepHits, hashes), - js_scope_stats.stepMisses, - RATE(stepMisses, searches), RATE(stepMisses, hashes), + js_scope_stats.stepHits, RATE(stepHits, searches), RATE(stepHits, hashes), + js_scope_stats.stepMisses, RATE(stepMisses, searches), RATE(stepMisses, hashes), + js_scope_stats.initSearches, + js_scope_stats.changeSearches, js_scope_stats.tableAllocFails, js_scope_stats.toDictFails, js_scope_stats.wrapWatchFails, @@ -992,22 +844,21 @@ js::SweepScopeProperties(JSContext *cx) fflush(logfp); } - if (const char *filename = getenv("JS_PROPTREE_DUMPFILE")) { + if (const char *filename = cx->runtime->propTreeDumpFilename) { char pathname[1024]; JS_snprintf(pathname, sizeof pathname, "%s.%lu", filename, (unsigned long)cx->runtime->gcNumber); FILE *dumpfp = fopen(pathname, "w"); if (dumpfp) { - PropertyRootEntry *rent = (PropertyRootEntry *) JS_PROPERTY_TREE(cx).hash.entryStore; - PropertyRootEntry *rend = rent + JS_DHASH_TABLE_SIZE(&JS_PROPERTY_TREE(cx).hash); + typedef JSRuntime::EmptyShapeSet HS; - while (rent < rend) { - if (rent->firstProp) { - fprintf(dumpfp, "emptyShape %u ", rent->emptyShape); - rent->firstProp->dumpSubtree(cx, 0, dumpfp); - } - rent++; + HS &h = cx->runtime->emptyShapes; + for (HS::Range r = h.all(); !r.empty(); r.popFront()) { + Shape *empty = r.front(); + empty->dumpSubtree(cx, 0, dumpfp); + putc('\n', dumpfp); } + fclose(dumpfp); } } diff --git a/js/src/jspropertytree.h b/js/src/jspropertytree.h index 640325e239d3..8703f9ee0227 100644 --- a/js/src/jspropertytree.h +++ b/js/src/jspropertytree.h @@ -41,40 +41,106 @@ #define jspropertytree_h___ #include "jsarena.h" -#include "jsdhash.h" +#include "jshashtable.h" #include "jsprvtd.h" -struct JSScope; - namespace js { -JSDHashOperator RemoveNodeIfDead(JSDHashTable *table, JSDHashEntryHdr *hdr, - uint32 number, void *arg); +enum { + MAX_KIDS_PER_CHUNK = 10U, + CHUNK_HASH_THRESHOLD = 30U +}; -void SweepScopeProperties(JSContext *cx); +struct KidsChunk { + js::Shape *kids[MAX_KIDS_PER_CHUNK]; + KidsChunk *next; + + static KidsChunk *create(JSContext *cx); + static KidsChunk *destroy(JSContext *cx, KidsChunk *chunk); +}; + +struct ShapeHasher { + typedef js::Shape *Key; + typedef const js::Shape *Lookup; + + static HashNumber hash(const Lookup l); + static bool match(Key k, Lookup l); +}; + +typedef HashSet KidsHash; + +class KidsPointer { + private: + enum { + SHAPE = 0, + CHUNK = 1, + HASH = 2, + TAG = 3 + }; + + jsuword w; + + public: + bool isNull() const { return !w; } + void setNull() { w = 0; } + + bool isShapeOrNull() const { return (w & TAG) == SHAPE; } + bool isShape() const { return (w & TAG) == SHAPE && !isNull(); } + js::Shape *toShape() const { + JS_ASSERT(isShape()); + return reinterpret_cast(w & ~jsuword(TAG)); + } + void setShape(js::Shape *shape) { + JS_ASSERT(shape); + JS_ASSERT((reinterpret_cast(shape) & TAG) == 0); + w = reinterpret_cast(shape) | SHAPE; + } + + bool isChunk() const { return (w & TAG) == CHUNK; } + KidsChunk *toChunk() const { + JS_ASSERT(isChunk()); + return reinterpret_cast(w & ~jsuword(TAG)); + } + void setChunk(KidsChunk *chunk) { + JS_ASSERT(chunk); + JS_ASSERT((reinterpret_cast(chunk) & TAG) == 0); + w = reinterpret_cast(chunk) | CHUNK; + } + + bool isHash() const { return (w & TAG) == HASH; } + KidsHash *toHash() const { + JS_ASSERT(isHash()); + return reinterpret_cast(w & ~jsuword(TAG)); + } + void setHash(KidsHash *hash) { + JS_ASSERT(hash); + JS_ASSERT((reinterpret_cast(hash) & TAG) == 0); + w = reinterpret_cast(hash) | HASH; + } +}; class PropertyTree { - friend struct ::JSScope; - friend void js::SweepScopeProperties(JSContext *cx); + friend struct ::JSFunction; - JSDHashTable hash; - JSScopeProperty *freeList; - JSArenaPool arenaPool; - uint32 emptyShapeChanges; + JSArenaPool arenaPool; + js::Shape *freeList; - bool insertChild(JSContext *cx, JSScopeProperty *parent, JSScopeProperty *child); - void removeChild(JSContext *cx, JSScopeProperty *child); - void emptyShapeChange(uint32 oldEmptyShape, uint32 newEmptyShape); + bool insertChild(JSContext *cx, js::Shape *parent, js::Shape *child); + void removeChild(JSContext *cx, js::Shape *child); public: bool init(); void finish(); - JSScopeProperty *newScopeProperty(JSContext *cx, bool gcLocked = false); + js::Shape *newShape(JSContext *cx, bool gcLocked = false); + js::Shape *getChild(JSContext *cx, js::Shape *parent, const js::Shape &child); - JSScopeProperty *getChild(JSContext *cx, JSScopeProperty *parent, uint32 shape, - const JSScopeProperty &child); + static void orphanKids(JSContext *cx, js::Shape *shape); + static void sweepShapes(JSContext *cx); +#ifdef DEBUG + static void meter(JSBasicStats *bs, js::Shape *node); +#endif }; } /* namespace js */ diff --git a/js/src/jsproxy.cpp b/js/src/jsproxy.cpp index 86643a8ff10a..e3c016c11ca7 100644 --- a/js/src/jsproxy.cpp +++ b/js/src/jsproxy.cpp @@ -130,7 +130,7 @@ JSProxyHandler::get(JSContext *cx, JSObject *proxy, JSObject *receiver, jsid id, } if (desc.attrs & JSPROP_SHORTID) id = INT_TO_JSID(desc.shortid); - return callJSPropertyOp(cx, desc.getter, proxy, id, vp); + return CallJSPropertyOp(cx, desc.getter, proxy, id, vp); } bool @@ -149,7 +149,7 @@ JSProxyHandler::set(JSContext *cx, JSObject *proxy, JSObject *receiver, jsid id, } if (desc.attrs & JSPROP_SHORTID) id = INT_TO_JSID(desc.shortid); - return callJSPropertyOpSetter(cx, desc.setter, proxy, id, vp); + return CallJSPropertyOpSetter(cx, desc.setter, proxy, id, vp); } if (desc.attrs & JSPROP_READONLY) return true; @@ -166,7 +166,7 @@ JSProxyHandler::set(JSContext *cx, JSObject *proxy, JSObject *receiver, jsid id, } if (desc.attrs & JSPROP_SHORTID) id = INT_TO_JSID(desc.shortid); - return callJSPropertyOpSetter(cx, desc.setter, proxy, id, vp); + return CallJSPropertyOpSetter(cx, desc.setter, proxy, id, vp); } if (desc.attrs & JSPROP_READONLY) return true; @@ -1005,14 +1005,16 @@ NewProxyObject(JSContext *cx, JSProxyHandler *handler, const Value &priv, JSObje bool fun = call || construct; Class *clasp = fun ? &FunctionProxyClass : &ObjectProxyClass; JSObject *obj = NewNonFunction(cx, clasp, proto, parent); - if (!obj || (construct && !js_EnsureReservedSlots(cx, obj, 0))) + if (!obj || (construct && !obj->ensureInstanceReservedSlots(cx, 0))) return NULL; obj->setSlot(JSSLOT_PROXY_HANDLER, PrivateValue(handler)); obj->setSlot(JSSLOT_PROXY_PRIVATE, priv); if (fun) { obj->setSlot(JSSLOT_PROXY_CALL, call ? ObjectValue(*call) : UndefinedValue()); - if (construct) - obj->setSlot(JSSLOT_PROXY_CONSTRUCT, construct ? ObjectValue(*construct) : UndefinedValue()); + if (construct) { + obj->setSlot(JSSLOT_PROXY_CONSTRUCT, + construct ? ObjectValue(*construct) : UndefinedValue()); + } } return obj; } diff --git a/js/src/jsprvtd.h b/js/src/jsprvtd.h index cbfabbdaf075..eedb9908c73f 100644 --- a/js/src/jsprvtd.h +++ b/js/src/jsprvtd.h @@ -74,19 +74,6 @@ typedef uint8 jsbytecode; typedef uint8 jssrcnote; typedef uint32 jsatomid; -#ifdef __cplusplus - -/* Class and struct forward declarations in namespace js. */ -extern "C++" { -namespace js { -struct Parser; -struct Compiler; -class RegExp; -} -} - -#endif - /* Struct typedefs. */ typedef struct JSArgumentFormatMap JSArgumentFormatMap; typedef struct JSCodeGenerator JSCodeGenerator; @@ -98,7 +85,6 @@ typedef struct JSObjectBox JSObjectBox; typedef struct JSParseNode JSParseNode; typedef struct JSProperty JSProperty; typedef struct JSSharpObjectMap JSSharpObjectMap; -typedef struct JSEmptyScope JSEmptyScope; typedef struct JSThread JSThread; typedef struct JSThreadData JSThreadData; typedef struct JSTreeContext JSTreeContext; @@ -113,9 +99,6 @@ typedef struct JSAtomState JSAtomState; typedef struct JSCodeSpec JSCodeSpec; typedef struct JSPrinter JSPrinter; typedef struct JSRegExpStatics JSRegExpStatics; -typedef struct JSScope JSScope; -typedef struct JSScopeOps JSScopeOps; -typedef struct JSScopeProperty JSScopeProperty; typedef struct JSStackHeader JSStackHeader; typedef struct JSSubString JSSubString; typedef struct JSNativeTraceInfo JSNativeTraceInfo; @@ -136,6 +119,8 @@ extern "C++" { namespace js { +struct ArgumentsData; + class RegExp; class RegExpStatics; class AutoStringRooter; @@ -147,6 +132,8 @@ struct TraceMonitor; class StackSpace; class StackSegment; +struct Compiler; +struct Parser; class TokenStream; struct Token; struct TokenPos; @@ -179,6 +166,8 @@ class DeflatedStringCache; class PropertyCache; struct PropertyCacheEntry; +struct Shape; +struct EmptyShape; } /* namespace js */ @@ -290,7 +279,7 @@ typedef struct JSDebugHooks { void *debugErrorHookData; } JSDebugHooks; -/* JSObjectOps function pointer typedefs. */ +/* js::ObjectOps function pointer typedefs. */ /* * Look for id in obj and its prototype chain, returning false on error or diff --git a/js/src/jspubtd.h b/js/src/jspubtd.h index 7c0bda6c6f5e..74ecfd25b39d 100644 --- a/js/src/jspubtd.h +++ b/js/src/jspubtd.h @@ -170,7 +170,7 @@ typedef class JSWrapper JSWrapper; typedef class JSCrossCompartmentWrapper JSCrossCompartmentWrapper; #endif -/* JSClass (and JSObjectOps where appropriate) function pointer typedefs. */ +/* JSClass (and js::ObjectOps where appropriate) function pointer typedefs. */ /* * Add, delete, get or set a property named by id in obj. Note the jsid id diff --git a/js/src/jsregexp.cpp b/js/src/jsregexp.cpp index 09b18d6e3755..d49a74d7ceb5 100644 --- a/js/src/jsregexp.cpp +++ b/js/src/jsregexp.cpp @@ -655,7 +655,7 @@ regexp_enumerate(JSContext *cx, JSObject *obj) js::Class js_RegExpClass = { js_RegExp_str, JSCLASS_HAS_PRIVATE | JSCLASS_NEW_RESOLVE | - JSCLASS_HAS_RESERVED_SLOTS(JSObject::REGEXP_FIXED_RESERVED_SLOTS) | + JSCLASS_HAS_RESERVED_SLOTS(JSObject::REGEXP_CLASS_RESERVED_SLOTS) | JSCLASS_MARK_IS_TRACE | JSCLASS_HAS_CACHED_PROTO(JSProto_RegExp), PropertyStub, /* addProperty */ PropertyStub, /* delProperty */ diff --git a/js/src/jsscope.cpp b/js/src/jsscope.cpp index 76b442e16008..6c45a9e4509b 100644 --- a/js/src/jsscope.cpp +++ b/js/src/jsscope.cpp @@ -91,75 +91,40 @@ js_GenerateShape(JSContext *cx, bool gcLocked) return shape; } -JSScope * -js_GetMutableScope(JSContext *cx, JSObject *obj) +bool +JSObject::ensureClassReservedSlotsForEmptyObject(JSContext *cx) { - JSScope *scope = obj->scope(); - JS_ASSERT(JS_IS_SCOPE_LOCKED(cx, scope)); - if (!scope->isSharedEmpty()) - return scope; - - JSScope *newscope = JSScope::create(cx, obj->getClass(), obj, scope->shape); - if (!newscope) - return NULL; - - /* The newly allocated scope is single-threaded and, as such, is locked. */ - JS_ASSERT(CX_OWNS_SCOPE_TITLE(cx, newscope)); - JS_ASSERT(JS_IS_SCOPE_LOCKED(cx, newscope)); - obj->map = newscope; + JS_ASSERT(nativeEmpty()); /* - * Subtle dependency on objects that call js_EnsureReservedSlots either: - * (a) never escaping anywhere an ad-hoc property could be set on them; - * (b) having at least JSSLOT_FREE(obj->getClass()) >= JS_INITIAL_NSLOTS. - * Note that (b) depends on fine-tuning of JS_INITIAL_NSLOTS (5). + * Subtle rule: objects that call JSObject::ensureInstanceReservedSlots + * either must: * - * Block objects fall into (a); Argument, Call, and Function objects (flat - * closures only) fall into (b). All of this goes away soon (FIXME 558451). + * (a) never escape anywhere an ad-hoc property could be set on them; + * + * (b) have at least JSSLOT_FREE(this->clasp) >= JS_INITIAL_NSLOTS. + * + * Note that (b) depends on fine-tuning of JS_INITIAL_NSLOTS (3). + * + * Block objects are the only objects that fall into category (a). While + * Call objects cannot escape, they can grow ad-hoc properties via eval + * of a var declaration, but they have slots mapped by compiler-created + * shapes, and thus no problem predicting first ad-hoc property slot. + * + * (Note that Block and Call objects are the only native classes that are + * allowed to call ensureInstanceReservedSlots.) */ - JS_ASSERT(newscope->freeslot >= JSSLOT_START(obj->getClass()) && - newscope->freeslot <= JSSLOT_FREE(obj->getClass())); - newscope->freeslot = JSSLOT_FREE(obj->getClass()); - - uint32 nslots = obj->numSlots(); - if (newscope->freeslot > nslots && !obj->allocSlots(cx, newscope->freeslot)) { - newscope->destroy(cx); - obj->map = scope; - return NULL; + uint32 nfixed = JSSLOT_FREE(getClass()); + if (nfixed > freeslot) { + if (nfixed > numSlots() && !allocSlots(cx, nfixed)) + return false; + freeslot = nfixed; } - if (nslots > JS_INITIAL_NSLOTS && nslots > newscope->freeslot) - newscope->freeslot = nslots; -#ifdef DEBUG - if (newscope->freeslot < nslots) - obj->setSlot(newscope->freeslot, UndefinedValue()); -#endif - - JS_DROP_ALL_EMPTY_SCOPE_LOCKS(cx, scope); - static_cast(scope)->drop(cx); - return newscope; + return true; } -/* - * JSScope uses multiplicative hashing, _a la_ jsdhash.[ch], but specialized - * to minimize footprint. But if a scope has fewer than SCOPE_HASH_THRESHOLD - * entries, we use linear search and avoid allocating scope->table. - */ -#define SCOPE_HASH_THRESHOLD 6 -#define MIN_SCOPE_SIZE_LOG2 4 -#define MIN_SCOPE_SIZE JS_BIT(MIN_SCOPE_SIZE_LOG2) -#define SCOPE_TABLE_NBYTES(n) ((n) * sizeof(JSScopeProperty *)) - -void -JSScope::initMinimal(JSContext *cx, uint32 newShape) -{ - shape = newShape; - emptyScope = NULL; - hashShift = JS_DHASH_BITS - MIN_SCOPE_SIZE_LOG2; - entryCount = removedCount = 0; - table = NULL; - lastProp = NULL; -} +#define PROPERTY_TABLE_NBYTES(n) ((n) * sizeof(Shape *)) #ifdef DEBUG JS_FRIEND_DATA(JSScopeStats) js_scope_stats = {0}; @@ -170,15 +135,11 @@ JS_FRIEND_DATA(JSScopeStats) js_scope_stats = {0}; #endif bool -JSScope::createTable(JSContext *cx, bool report) +PropertyTable::init(JSContext *cx, Shape *lastProp) { int sizeLog2; - JSScopeProperty *sprop, **spp; - JS_ASSERT(!table); - JS_ASSERT(lastProp); - - if (entryCount > SCOPE_HASH_THRESHOLD) { + if (entryCount > HASH_THRESHOLD) { /* * Either we're creating a table for a large scope that was populated * via property cache hit logic under JSOP_INITPROP, JSOP_SETNAME, or @@ -187,67 +148,39 @@ JSScope::createTable(JSContext *cx, bool report) * current population. */ sizeLog2 = JS_CeilingLog2(2 * entryCount); - hashShift = JS_DHASH_BITS - sizeLog2; } else { - JS_ASSERT(hashShift == JS_DHASH_BITS - MIN_SCOPE_SIZE_LOG2); - sizeLog2 = MIN_SCOPE_SIZE_LOG2; + JS_ASSERT(hashShift == JS_DHASH_BITS - MIN_SIZE_LOG2); + sizeLog2 = MIN_SIZE_LOG2; } - table = (JSScopeProperty **) js_calloc(JS_BIT(sizeLog2) * sizeof(JSScopeProperty *)); - if (!table) { - if (report) - JS_ReportOutOfMemory(cx); + entries = (Shape **) js_calloc(JS_BIT(sizeLog2) * sizeof(Shape *)); + if (!entries) { METER(tableAllocFails); return false; } - cx->updateMallocCounter(JS_BIT(sizeLog2) * sizeof(JSScopeProperty *)); + cx->updateMallocCounter(JS_BIT(sizeLog2) * sizeof(Shape *)); hashShift = JS_DHASH_BITS - sizeLog2; - for (sprop = lastProp; sprop; sprop = sprop->parent) { - spp = search(sprop->id, true); - SPROP_STORE_PRESERVING_COLLISION(spp, sprop); + for (Shape::Range r = lastProp->all(); !r.empty(); r.popFront()) { + const Shape &shape = r.front(); + METER(searches); + METER(initSearches); + Shape **spp = search(shape.id, true); + SHAPE_STORE_PRESERVING_COLLISION(spp, &shape); } return true; } -JSScope * -JSScope::create(JSContext *cx, Class *clasp, JSObject *obj, uint32 shape) +bool +Shape::maybeHash(JSContext *cx) { - JS_ASSERT(obj); - - JSScope *scope = cx->create(obj); - if (!scope) - return NULL; - - scope->freeslot = JSSLOT_START(clasp); - scope->flags = cx->runtime->gcRegenShapesScopeFlag; - scope->initMinimal(cx, shape); - -#ifdef JS_THREADSAFE - js_InitTitle(cx, &scope->title); -#endif - JS_RUNTIME_METER(cx->runtime, liveScopes); - JS_RUNTIME_METER(cx->runtime, totalScopes); - return scope; -} - -JSEmptyScope::JSEmptyScope(JSContext *cx, Class *clasp) - : JSScope(NULL), clasp(clasp) -{ - /* - * This scope holds a reference to the new empty scope. Our only caller, - * getEmptyScope, also promises to incref on behalf of its caller. - */ - nrefs = 2; - freeslot = JSSLOT_START(clasp); - flags = OWN_SHAPE | cx->runtime->gcRegenShapesScopeFlag; - initMinimal(cx, js_GenerateShape(cx, false)); - -#ifdef JS_THREADSAFE - js_InitTitle(cx, &title); -#endif - JS_RUNTIME_METER(cx->runtime, liveScopes); - JS_RUNTIME_METER(cx->runtime, totalScopes); + JS_ASSERT(!table); + uint32 nentries = entryCount(); + if (nentries >= PropertyTable::HASH_THRESHOLD) { + table = cx->create(nentries); + return table && table->init(cx, this); + } + return true; } #ifdef DEBUG @@ -257,62 +190,29 @@ JSEmptyScope::JSEmptyScope(JSContext *cx, Class *clasp) # define LIVE_SCOPE_METER(cx,expr) /* nothing */ #endif -void -JSScope::destroy(JSContext *cx) -{ -#ifdef JS_THREADSAFE - js_FinishTitle(cx, &title); -#endif - if (table) - cx->free(table); - - /* - * The scopes containing empty scopes are only destroyed from the GC - * thread. - */ - if (emptyScope) - emptyScope->dropFromGC(cx); - - LIVE_SCOPE_METER(cx, cx->runtime->liveScopeProps -= entryCount); - JS_RUNTIME_UNMETER(cx->runtime, liveScopes); - cx->free(this); -} - /* static */ bool -JSScope::initRuntimeState(JSContext *cx) +Shape::initRuntimeState(JSContext *cx) { JSRuntime *rt = cx->runtime; -#define SCOPE(Name) rt->empty##Name##Scope +#define SHAPE(Name) rt->empty##Name##Shape #define CLASP(Name) &js_##Name##Class -#define INIT_EMPTY_SCOPE(Name,NAME) \ - INIT_EMPTY_SCOPE_WITH_CLASS(Name, NAME, CLASP(Name)) +#define INIT_EMPTY_SHAPE(Name,NAME) \ + INIT_EMPTY_SHAPE_WITH_CLASS(Name, NAME, CLASP(Name)) -#define INIT_EMPTY_SCOPE_WITH_CLASS(Name,NAME,clasp) \ - INIT_EMPTY_SCOPE_WITH_FREESLOT(Name, NAME, clasp, JSSLOT_FREE(clasp)) - -#define INIT_EMPTY_SCOPE_WITH_FREESLOT(Name,NAME,clasp,slot) \ - SCOPE(Name) = cx->create(cx, clasp); \ - if (!SCOPE(Name)) \ - return false; \ - JS_ASSERT(SCOPE(Name)->shape == JSScope::EMPTY_##NAME##_SHAPE); \ - JS_ASSERT(SCOPE(Name)->nrefs == 2); \ - SCOPE(Name)->nrefs = 1; \ - SCOPE(Name)->freeslot = slot +#define INIT_EMPTY_SHAPE_WITH_CLASS(Name,NAME,clasp) \ + JS_BEGIN_MACRO \ + SHAPE(Name) = EmptyShape::create(cx, clasp); \ + if (!SHAPE(Name)) \ + return false; \ + JS_ASSERT(SHAPE(Name)->shape == Shape::EMPTY_##NAME##_SHAPE); \ + JS_END_MACRO /* * NewArguments allocates dslots to have enough room for the argc of the * particular arguments object being created. - * - * Thus we fake freeslot in the shared empty scope for the many unmutated - * arguments objects so that, until and unless a scope property is defined - * on a particular arguments object, it can share the runtime-wide empty - * scope with other arguments objects, whatever their initial argc values. - * - * This allows assertions that the arg slot being got or set by a fast path - * is less than freeslot to succeed. As the shared emptyArgumentsScope is * never mutated, it's safe to pretend to have all the slots possible. * * Note how the fast paths in jsinterp.cpp for JSOP_LENGTH and JSOP_GETELEM @@ -320,60 +220,46 @@ JSScope::initRuntimeState(JSContext *cx) * arguments objects. This helps ensure that any arguments object needing * its own mutable scope (with unique shape) is a rare event. */ - INIT_EMPTY_SCOPE_WITH_FREESLOT(Arguments, ARGUMENTS, CLASP(Arguments), - JS_INITIAL_NSLOTS + JS_ARGS_LENGTH_MAX); + INIT_EMPTY_SHAPE(Arguments, ARGUMENTS); - INIT_EMPTY_SCOPE(Block, BLOCK); + INIT_EMPTY_SHAPE(Block, BLOCK); /* * Initialize the shared scope for all empty Call objects so gets for args * and vars do not force the creation of a mutable scope for the particular * call object being accessed. - * - * See comment above for rt->emptyArgumentsScope->freeslot initialization. */ - INIT_EMPTY_SCOPE_WITH_FREESLOT(Call, CALL, CLASP(Call), - JS_INITIAL_NSLOTS + JSFunction::MAX_ARGS_AND_VARS); + INIT_EMPTY_SHAPE(Call, CALL); /* A DeclEnv object holds the name binding for a named function expression. */ - INIT_EMPTY_SCOPE(DeclEnv, DECL_ENV); + INIT_EMPTY_SHAPE(DeclEnv, DECL_ENV); /* Non-escaping native enumerator objects share this empty scope. */ - INIT_EMPTY_SCOPE_WITH_CLASS(Enumerator, ENUMERATOR, &js_IteratorClass); + INIT_EMPTY_SHAPE_WITH_CLASS(Enumerator, ENUMERATOR, &js_IteratorClass); /* Same drill for With objects. */ - INIT_EMPTY_SCOPE_WITH_CLASS(With, WITH, &js_WithClass); + INIT_EMPTY_SHAPE(With, WITH); -#undef SCOPE +#undef SHAPE #undef CLASP -#undef INIT_EMPTY_SCOPE -#undef INIT_EMPTY_SCOPE_WITH_CLASS -#undef INIT_EMPTY_SCOPE_WITH_FREESLOT +#undef INIT_EMPTY_SHAPE +#undef INIT_EMPTY_SHAPE_WITH_CLASS return true; } /* static */ void -JSScope::finishRuntimeState(JSContext *cx) +Shape::finishRuntimeState(JSContext *cx) { JSRuntime *rt = cx->runtime; -#define FINISH_EMPTY_SCOPE(Name) \ - if (rt->empty##Name##Scope) { \ - rt->empty##Name##Scope->drop(cx); \ - rt->empty##Name##Scope = NULL; \ - } - - /* Mnemonic: ABCDEW */ - FINISH_EMPTY_SCOPE(Arguments); - FINISH_EMPTY_SCOPE(Block); - FINISH_EMPTY_SCOPE(Call); - FINISH_EMPTY_SCOPE(DeclEnv); - FINISH_EMPTY_SCOPE(Enumerator); - FINISH_EMPTY_SCOPE(With); - -#undef FINISH_EMPTY_SCOPE + rt->emptyArgumentsShape = NULL; + rt->emptyBlockShape = NULL; + rt->emptyCallShape = NULL; + rt->emptyDeclEnvShape = NULL; + rt->emptyEnumeratorShape = NULL; + rt->emptyWithShape = NULL; } JS_STATIC_ASSERT(sizeof(JSHashNumber) == 4); @@ -393,59 +279,61 @@ JS_STATIC_ASSERT(sizeof(jsid) == JS_BYTES_PER_WORD); * the golden ratio, expressed as a fixed-point 32 bit fraction, and the id * itself. */ -#define SCOPE_HASH0(id) (HASH_ID(id) * JS_GOLDEN_RATIO) -#define SCOPE_HASH1(hash0,shift) ((hash0) >> (shift)) -#define SCOPE_HASH2(hash0,log2,shift) ((((hash0) << (log2)) >> (shift)) | 1) +#define HASH0(id) (HASH_ID(id) * JS_GOLDEN_RATIO) +#define HASH1(hash0,shift) ((hash0) >> (shift)) +#define HASH2(hash0,log2,shift) ((((hash0) << (log2)) >> (shift)) | 1) -JSScopeProperty ** -JSScope::searchTable(jsid id, bool adding) +Shape ** +PropertyTable::search(jsid id, bool adding) { JSHashNumber hash0, hash1, hash2; int sizeLog2; - JSScopeProperty *stored, *sprop, **spp, **firstRemoved; + Shape *stored, *shape, **spp, **firstRemoved; uint32 sizeMask; - JS_ASSERT(table); + JS_ASSERT(entries); JS_ASSERT(!JSID_IS_VOID(id)); /* Compute the primary hash address. */ METER(hashes); - hash0 = SCOPE_HASH0(id); - hash1 = SCOPE_HASH1(hash0, hashShift); - spp = table + hash1; + hash0 = HASH0(id); + hash1 = HASH1(hash0, hashShift); + spp = entries + hash1; /* Miss: return space for a new entry. */ stored = *spp; - if (SPROP_IS_FREE(stored)) { + if (SHAPE_IS_FREE(stored)) { METER(misses); + METER(hashMisses); return spp; } /* Hit: return entry. */ - sprop = SPROP_CLEAR_COLLISION(stored); - if (sprop && sprop->id == id) { + shape = SHAPE_CLEAR_COLLISION(stored); + if (shape && shape->id == id) { METER(hits); + METER(hashHits); return spp; } /* Collision: double hash. */ sizeLog2 = JS_DHASH_BITS - hashShift; - hash2 = SCOPE_HASH2(hash0, sizeLog2, hashShift); + hash2 = HASH2(hash0, sizeLog2, hashShift); sizeMask = JS_BITMASK(sizeLog2); #ifdef DEBUG - jsuword collision_flag = SPROP_COLLISION; + jsuword collision_flag = SHAPE_COLLISION; #endif /* Save the first removed entry pointer so we can recycle it if adding. */ - if (SPROP_IS_REMOVED(stored)) { + if (SHAPE_IS_REMOVED(stored)) { firstRemoved = spp; } else { firstRemoved = NULL; - if (adding && !SPROP_HAD_COLLISION(stored)) - SPROP_FLAG_COLLISION(spp, sprop); + if (adding && !SHAPE_HAD_COLLISION(stored)) + SHAPE_FLAG_COLLISION(spp, shape); #ifdef DEBUG - collision_flag &= jsuword(*spp) & SPROP_COLLISION; + collision_flag &= jsuword(*spp) & SHAPE_COLLISION; #endif } @@ -453,29 +341,31 @@ JSScope::searchTable(jsid id, bool adding) METER(steps); hash1 -= hash2; hash1 &= sizeMask; - spp = table + hash1; + spp = entries + hash1; stored = *spp; - if (SPROP_IS_FREE(stored)) { + if (SHAPE_IS_FREE(stored)) { + METER(misses); METER(stepMisses); return (adding && firstRemoved) ? firstRemoved : spp; } - sprop = SPROP_CLEAR_COLLISION(stored); - if (sprop && sprop->id == id) { + shape = SHAPE_CLEAR_COLLISION(stored); + if (shape && shape->id == id) { + METER(hits); METER(stepHits); JS_ASSERT(collision_flag); return spp; } - if (SPROP_IS_REMOVED(stored)) { + if (SHAPE_IS_REMOVED(stored)) { if (!firstRemoved) firstRemoved = spp; } else { - if (adding && !SPROP_HAD_COLLISION(stored)) - SPROP_FLAG_COLLISION(spp, sprop); + if (adding && !SHAPE_HAD_COLLISION(stored)) + SHAPE_FLAG_COLLISION(spp, shape); #ifdef DEBUG - collision_flag &= jsuword(*spp) & SPROP_COLLISION; + collision_flag &= jsuword(*spp) & SHAPE_COLLISION; #endif } } @@ -485,60 +375,81 @@ JSScope::searchTable(jsid id, bool adding) } bool -JSScope::changeTable(JSContext *cx, int change) +PropertyTable::change(JSContext *cx, int change) { int oldlog2, newlog2; uint32 oldsize, newsize, nbytes; - JSScopeProperty **newtable, **oldtable, **spp, **oldspp, *sprop; + Shape **newTable, **oldTable, **spp, **oldspp, *shape; - if (!table) - return createTable(cx, true); + JS_ASSERT(entries); - /* Grow, shrink, or compress by changing this->table. */ + /* Grow, shrink, or compress by changing this->entries. */ oldlog2 = JS_DHASH_BITS - hashShift; newlog2 = oldlog2 + change; oldsize = JS_BIT(oldlog2); newsize = JS_BIT(newlog2); - nbytes = SCOPE_TABLE_NBYTES(newsize); - newtable = (JSScopeProperty **) cx->calloc(nbytes); - if (!newtable) { + nbytes = PROPERTY_TABLE_NBYTES(newsize); + newTable = (Shape **) cx->calloc(nbytes); + if (!newTable) { METER(tableAllocFails); return false; } - /* Now that we have newtable allocated, update members. */ + /* Now that we have newTable allocated, update members. */ hashShift = JS_DHASH_BITS - newlog2; removedCount = 0; - oldtable = table; - table = newtable; + oldTable = entries; + entries = newTable; /* Treat the above calloc as a JS_malloc, to match CreateScopeTable. */ cx->updateMallocCounter(nbytes); /* Copy only live entries, leaving removed and free ones behind. */ - for (oldspp = oldtable; oldsize != 0; oldspp++) { - sprop = SPROP_FETCH(oldspp); - if (sprop) { - spp = search(sprop->id, true); - JS_ASSERT(SPROP_IS_FREE(*spp)); - *spp = sprop; + for (oldspp = oldTable; oldsize != 0; oldspp++) { + shape = SHAPE_FETCH(oldspp); + if (shape) { + METER(searches); + METER(changeSearches); + spp = search(shape->id, true); + JS_ASSERT(SHAPE_IS_FREE(*spp)); + *spp = shape; } oldsize--; } - /* Finally, free the old table storage. */ - cx->free(oldtable); + /* Finally, free the old entries storage. */ + cx->free(oldTable); return true; } +Shape * +Shape::getChild(JSContext *cx, const js::Shape &child, Shape **listp) +{ + JS_ASSERT(!JSID_IS_VOID(child.id)); + JS_ASSERT(!child.inDictionary()); + + if (inDictionary()) { + if (newDictionaryShape(cx, child, listp)) + return *listp; + return NULL; + } + + Shape *shape = JS_PROPERTY_TREE(cx).getChild(cx, this, child); + if (shape) { + JS_ASSERT(shape->parent == this); + JS_ASSERT(this == *listp); + *listp = shape; + } + return shape; +} + /* * Get or create a property-tree or dictionary child property of parent, which * must be lastProp if inDictionaryMode(), else parent must be one of lastProp * or lastProp->parent. */ -JSScopeProperty * -JSScope::getChildProperty(JSContext *cx, JSScopeProperty *parent, - JSScopeProperty &child) +Shape * +JSObject::getChildProperty(JSContext *cx, Shape *parent, Shape &child) { JS_ASSERT(!JSID_IS_VOID(child.id)); JS_ASSERT(!child.inDictionary()); @@ -551,93 +462,53 @@ JSScope::getChildProperty(JSContext *cx, JSScopeProperty *parent, */ if (!child.isAlias()) { if (child.attrs & JSPROP_SHARED) { - child.slot = SPROP_INVALID_SLOT; + child.slot = SHAPE_INVALID_SLOT; } else { /* - * We may have set slot from a nearly-matching sprop, above. - * If so, we're overwriting that nearly-matching sprop, so we + * We may have set slot from a nearly-matching shape, above. + * If so, we're overwriting that nearly-matching shape, so we * can reuse its slot -- we don't need to allocate a new one. * Similarly, we use a specific slot if provided by the caller. */ - if (child.slot == SPROP_INVALID_SLOT && - !js_AllocSlot(cx, object, &child.slot)) { + if (child.slot == SHAPE_INVALID_SLOT && !allocSlot(cx, &child.slot)) return NULL; - } } } if (inDictionaryMode()) { JS_ASSERT(parent == lastProp); - if (newDictionaryProperty(cx, child, &lastProp)) { + if (parent->frozen()) { + parent = Shape::newDictionaryList(cx, &lastProp); + if (!parent) + return NULL; + JS_ASSERT(!parent->frozen()); + } + if (Shape::newDictionaryShape(cx, child, &lastProp)) { + updateFlags(lastProp); updateShape(cx); return lastProp; } return NULL; } - JSScopeProperty *sprop = JS_PROPERTY_TREE(cx).getChild(cx, parent, shape, child); - if (sprop) { - JS_ASSERT(sprop->parent == parent); - if (parent == lastProp) { - extend(cx, sprop); - } else { - JS_ASSERT(parent == lastProp->parent); - setLastProperty(sprop); - updateShape(cx); - } + Shape *shape = JS_PROPERTY_TREE(cx).getChild(cx, parent, child); + if (shape) { + JS_ASSERT(shape->parent == parent); + JS_ASSERT_IF(parent != lastProp, parent == lastProp->parent); + setLastProperty(shape); + updateFlags(shape); + updateShape(cx); } - return sprop; + return shape; } -#ifdef DEBUG_notbrendan -#define CHECK_ANCESTOR_LINE(scope, sparse) \ - JS_BEGIN_MACRO \ - if ((scope)->table) CheckAncestorLine(scope); \ - JS_END_MACRO - -static void -CheckAncestorLine(JSScope *scope) -{ - uint32 size; - JSScopeProperty **spp, **start, **end, *ancestorLine, *sprop, *aprop; - uint32 entryCount, ancestorCount; - - ancestorLine = scope->lastProperty(); - if (ancestorLine) - JS_ASSERT(scope->hasProperty(ancestorLine)); - - entryCount = 0; - size = SCOPE_CAPACITY(scope); - start = scope->table; - for (spp = start, end = start + size; spp < end; spp++) { - sprop = SPROP_FETCH(spp); - if (sprop) { - ++entryCount; - for (aprop = ancestorLine; aprop; aprop = aprop->parent) { - if (aprop == sprop) - break; - } - JS_ASSERT(aprop); - } - } - JS_ASSERT(entryCount == scope->entryCount); - - ancestorCount = 0; - for (sprop = ancestorLine; sprop; sprop = sprop->parent) - ancestorCount++; - JS_ASSERT(ancestorCount == scope->entryCount); -} -#else -#define CHECK_ANCESTOR_LINE(scope, sparse) /* nothing */ -#endif - void -JSScope::reportReadOnlyScope(JSContext *cx) +JSObject::reportReadOnlyScope(JSContext *cx) { JSString *str; const char *bytes; - str = js_ValueToString(cx, ObjectOrNullValue(object)); + str = js_ValueToString(cx, ObjectValue(*this)); if (!str) return; bytes = js_GetStringBytes(cx, str); @@ -646,165 +517,72 @@ JSScope::reportReadOnlyScope(JSContext *cx) JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_READ_ONLY, bytes); } -void -JSScope::generateOwnShape(JSContext *cx) +Shape * +Shape::newDictionaryShape(JSContext *cx, const Shape &child, Shape **listp) { -#ifdef JS_TRACER - if (object) { - LeaveTraceIfGlobalObject(cx, object); - - /* - * The JIT must have arranged to re-guard after any unpredictable shape - * change, so if we are on trace here, we should already be prepared to - * bail off trace. - */ - JS_ASSERT_IF(JS_ON_TRACE(cx), cx->bailExit); - - /* - * If we are recording, here is where we forget already-guarded shapes. - * Any subsequent property operation upon object on the trace currently - * being recorded will re-guard (and re-memoize). - */ - TraceMonitor *tm = &JS_TRACE_MONITOR(cx); - if (TraceRecorder *tr = tm->recorder) - tr->forgetGuardedShapesForObject(object); - } -#endif - - shape = js_GenerateShape(cx, false); - setOwnShape(); -} - -JSScopeProperty * -JSScope::newDictionaryProperty(JSContext *cx, const JSScopeProperty &child, - JSScopeProperty **childp) -{ - JSScopeProperty *dprop = JS_PROPERTY_TREE(cx).newScopeProperty(cx); + Shape *dprop = JS_PROPERTY_TREE(cx).newShape(cx); if (!dprop) return NULL; - new (dprop) JSScopeProperty(child.id, child.rawGetter, child.rawSetter, child.slot, - child.attrs, child.flags | JSScopeProperty::IN_DICTIONARY, - child.shortid); + new (dprop) Shape(child.id, child.rawGetter, child.rawSetter, child.slot, child.attrs, + (child.flags & ~FROZEN) | IN_DICTIONARY, + child.shortid); dprop->shape = js_GenerateShape(cx, false); - dprop->childp = NULL; - insertDictionaryProperty(dprop, childp); - updateFlags(dprop); + dprop->listp = NULL; + dprop->insertIntoDictionary(listp); + + JS_RUNTIME_METER(cx->runtime, liveDictModeNodes); return dprop; } -bool -JSScope::toDictionaryMode(JSContext *cx, JSScopeProperty *&aprop) +Shape * +Shape::newDictionaryList(JSContext *cx, Shape **listp) { - JS_ASSERT(!inDictionaryMode()); + Shape *shape = *listp; + Shape *list = shape; - JSScopeProperty **oldTable = table; - uint32 saveRemovedCount = removedCount; - if (oldTable) { - int sizeLog2 = JS_DHASH_BITS - hashShift; - JSScopeProperty **newTable = (JSScopeProperty **) - js_calloc(JS_BIT(sizeLog2) * sizeof(JSScopeProperty *)); + Shape **childp = listp; + *childp = NULL; - if (!newTable) { - JS_ReportOutOfMemory(cx); - METER(toDictFails); - return false; - } - table = newTable; - removedCount = 0; - } + while (shape) { + JS_ASSERT(!shape->inDictionary()); - /* - * We are committed from here on. If we fail due to OOM in the loop below, - * we'll restore saveEntryCount, oldTable, oldLastProp. - */ - JSScopeProperty *oldLastProp = lastProp; - lastProp = NULL; - - /* - * Clear entryCount because JSScope::insertDictionaryProperty called from - * JSScope::newDictionaryProperty bumps it. - */ - uint32 saveEntryCount = entryCount; - entryCount = 0; - - for (JSScopeProperty *sprop = oldLastProp, **childp = &lastProp; sprop; sprop = sprop->parent) { - JSScopeProperty *dprop = newDictionaryProperty(cx, *sprop, childp); + Shape *dprop = Shape::newDictionaryShape(cx, *shape, childp); if (!dprop) { - entryCount = saveEntryCount; - removedCount = saveRemovedCount; - if (table) - js_free(table); - table = oldTable; - lastProp = oldLastProp; METER(toDictFails); - return false; + *listp = list; + return NULL; } - if (table) { - JSScopeProperty **spp = search(dprop->id, true); - JS_ASSERT(!SPROP_FETCH(spp)); - SPROP_STORE_PRESERVING_COLLISION(spp, dprop); - } - - if (aprop == sprop) - aprop = dprop; + JS_ASSERT(!dprop->table); childp = &dprop->parent; + shape = shape->parent; } - if (oldTable) - js_free(oldTable); - setDictionaryMode(); - clearOwnShape(); - - if (lastProp) { - /* - * This scope may get OWN_SHAPE set again, but for now its shape must - * be the shape of its lastProp. If it is empty, its initial shape is - * still valid. See JSScope::updateShape's definition in jsscope.h. - */ - shape = lastProp->shape; - } - return true; + list = *listp; + JS_ASSERT(list->inDictionary()); + list->maybeHash(cx); + return list; } -JSScopeProperty * -JSScope::addProperty(JSContext *cx, jsid id, - PropertyOp getter, PropertyOp setter, - uint32 slot, uintN attrs, - uintN flags, intN shortid) +bool +JSObject::toDictionaryMode(JSContext *cx) { - JS_ASSERT(JS_IS_SCOPE_LOCKED(cx, this)); - CHECK_ANCESTOR_LINE(this, true); + JS_ASSERT(!inDictionaryMode()); + if (!Shape::newDictionaryList(cx, &lastProp)) + return false; - JS_ASSERT(!JSID_IS_VOID(id)); - JS_ASSERT_IF(!cx->runtime->gcRegenShapes, - hasRegenFlag(cx->runtime->gcRegenShapesScopeFlag)); - - /* - * You can't add properties to a sealed scope. But note well that you can - * change property attributes in a sealed scope, even though that replaces - * a JSScopeProperty * in the scope's hash table -- but no id is added, so - * the scope remains sealed. - */ - if (sealed()) { - reportReadOnlyScope(cx); - return NULL; - } - - /* Search for id with adding = true in order to claim its entry. */ - JSScopeProperty **spp = search(id, true); - JS_ASSERT(!SPROP_FETCH(spp)); - return addPropertyHelper(cx, id, getter, setter, slot, attrs, flags, shortid, spp); + clearOwnShape(); + return true; } /* * Normalize stub getter and setter values for faster is-stub testing in the - * SPROP_CALL_[GS]ETTER macros. + * SHAPE_CALL_[GS]ETTER macros. */ static inline bool -NormalizeGetterAndSetter(JSContext *cx, JSScope *scope, +NormalizeGetterAndSetter(JSContext *cx, JSObject *obj, jsid id, uintN attrs, uintN flags, PropertyOp &getter, PropertyOp &setter) @@ -813,7 +591,7 @@ NormalizeGetterAndSetter(JSContext *cx, JSScope *scope, JS_ASSERT(!(attrs & JSPROP_SETTER)); setter = NULL; } - if (flags & JSScopeProperty::METHOD) { + if (flags & Shape::METHOD) { /* Here, getter is the method, a function object reference. */ JS_ASSERT(getter); JS_ASSERT(!setter || setter == js_watch_set); @@ -831,7 +609,7 @@ NormalizeGetterAndSetter(JSContext *cx, JSScope *scope, * XXXbe this could get expensive with lots of watchpoints... */ if (!JS_CLIST_IS_EMPTY(&cx->runtime->watchPointList) && - js_FindWatchPoint(cx->runtime, scope, id)) { + js_FindWatchPoint(cx->runtime, obj, id)) { setter = js_WrapWatchedSetter(cx, id, attrs, setter); if (!setter) { METER(wrapWatchFails); @@ -841,392 +619,480 @@ NormalizeGetterAndSetter(JSContext *cx, JSScope *scope, return true; } -JSScopeProperty * -JSScope::addPropertyHelper(JSContext *cx, jsid id, - PropertyOp getter, PropertyOp setter, - uint32 slot, uintN attrs, - uintN flags, intN shortid, - JSScopeProperty **spp) +const Shape * +JSObject::addProperty(JSContext *cx, jsid id, + PropertyOp getter, PropertyOp setter, + uint32 slot, uintN attrs, + uintN flags, intN shortid) { + JS_ASSERT(!JSID_IS_VOID(id)); + + /* + * You can't add properties to a sealed object. But note well that you can + * change property attributes in a sealed object, even though that replaces + * a Shape * in the scope's hash table -- but no id is added, so the object + * remains sealed. + */ + if (sealed()) { + reportReadOnlyScope(cx); + return NULL; + } + NormalizeGetterAndSetter(cx, this, id, attrs, flags, getter, setter); - /* Check whether we need to grow, if the load factor is >= .75. */ - uint32 size = SCOPE_CAPACITY(this); - if (entryCount + removedCount >= size - (size >> 2)) { - int change = removedCount < size >> 2; - if (!change) - METER(compresses); - else - METER(grows); - if (!changeTable(cx, change) && entryCount + removedCount == size - 1) - return NULL; - spp = search(id, true); - JS_ASSERT(!SPROP_FETCH(spp)); + /* Search for id with adding = true in order to claim its entry. */ + Shape **spp = nativeSearch(id, true); + JS_ASSERT(!SHAPE_FETCH(spp)); + return addPropertyCommon(cx, id, getter, setter, slot, attrs, flags, shortid, spp); +} + +const uint32 MAX_PROPERTY_TREE_HEIGHT = 64; + +const Shape * +JSObject::addPropertyCommon(JSContext *cx, jsid id, + PropertyOp getter, PropertyOp setter, + uint32 slot, uintN attrs, + uintN flags, intN shortid, + Shape **spp) +{ + PropertyTable *table = NULL; + if (!inDictionaryMode()) { + if (lastProp->entryCount() > MAX_PROPERTY_TREE_HEIGHT) { + if (!toDictionaryMode(cx)) + return NULL; + spp = nativeSearch(id, true); + table = lastProp->table; + } + } else if ((table = lastProp->table) != NULL) { + /* Check whether we need to grow, if the load factor is >= .75. */ + uint32 size = table->capacity(); + if (table->entryCount + table->removedCount >= size - (size >> 2)) { + int change = table->removedCount < size >> 2; + if (!change) + METER(compresses); + else + METER(grows); + if (!table->change(cx, change) && table->entryCount + table->removedCount == size - 1) + return NULL; + METER(searches); + METER(changeSearches); + spp = table->search(id, true); + JS_ASSERT(!SHAPE_FETCH(spp)); + } } /* Find or create a property tree node labeled by our arguments. */ - JSScopeProperty *sprop; + const Shape *shape; { - JSScopeProperty child(id, getter, setter, slot, attrs, flags, shortid); - sprop = getChildProperty(cx, lastProp, child); + Shape child(id, getter, setter, slot, attrs, flags, shortid); + shape = getChildProperty(cx, lastProp, child); } - if (sprop) { - /* Store the tree node pointer in the table entry for id. */ - if (table) - SPROP_STORE_PRESERVING_COLLISION(spp, sprop); - CHECK_ANCESTOR_LINE(this, false); + if (shape) { + JS_ASSERT(shape == lastProp); + + if (table) { + /* Store the tree node pointer in the table entry for id. */ + SHAPE_STORE_PRESERVING_COLLISION(spp, shape); + ++table->entryCount; + + /* Pass the table along to the new lastProp, namely shape. */ + JS_ASSERT(shape->parent->table == table); + shape->parent->setTable(NULL); + shape->setTable(table); + } #ifdef DEBUG - LIVE_SCOPE_METER(cx, ++cx->runtime->liveScopeProps); - JS_RUNTIME_METER(cx->runtime, totalScopeProps); + LIVE_SCOPE_METER(cx, ++cx->runtime->liveObjectProps); + JS_RUNTIME_METER(cx->runtime, totalObjectProps); #endif /* - * If we reach the hashing threshold, try to allocate this->table. + * If we reach the hashing threshold, try to allocate lastProp->table. * If we can't (a rare event, preceded by swapping to death on most * modern OSes), stick with linear search rather than whining about - * this little set-back. Therefore we must test !this->table and - * this->entryCount >= SCOPE_HASH_THRESHOLD, not merely whether the + * this little set-back. Therefore we must test !lastProp->table and + * entry count >= PropertyTable::HASH_THRESHOLD, not merely whether the * entry count just reached the threshold. */ - if (!table && entryCount >= SCOPE_HASH_THRESHOLD) - (void) createTable(cx, false); + if (!lastProp->table) + lastProp->maybeHash(cx); METER(adds); - return sprop; + return shape; } METER(addFails); return NULL; } -JSScopeProperty * -JSScope::putProperty(JSContext *cx, jsid id, - PropertyOp getter, PropertyOp setter, - uint32 slot, uintN attrs, - uintN flags, intN shortid) +const Shape * +JSObject::putProperty(JSContext *cx, jsid id, + PropertyOp getter, PropertyOp setter, + uint32 slot, uintN attrs, + uintN flags, intN shortid) { - JSScopeProperty **spp, *sprop, *overwriting; - - JS_ASSERT(JS_IS_SCOPE_LOCKED(cx, this)); - CHECK_ANCESTOR_LINE(this, true); + Shape **spp, *shape, *overwriting; JS_ASSERT(!JSID_IS_VOID(id)); - JS_ASSERT_IF(!cx->runtime->gcRegenShapes, - hasRegenFlag(cx->runtime->gcRegenShapesScopeFlag)); - if (sealed()) { reportReadOnlyScope(cx); return NULL; } - /* Search for id in order to claim its entry if table has been allocated. */ - spp = search(id, true); - sprop = SPROP_FETCH(spp); - if (!sprop) - return addPropertyHelper(cx, id, getter, setter, slot, attrs, flags, shortid, spp); - - /* Property exists: JSScope::search must have returned a valid *spp. */ - JS_ASSERT(!SPROP_IS_REMOVED(*spp)); - overwriting = sprop; - NormalizeGetterAndSetter(cx, this, id, attrs, flags, getter, setter); + /* Search for id in order to claim its entry if table has been allocated. */ + spp = nativeSearch(id, true); + shape = SHAPE_FETCH(spp); + if (!shape) + return addPropertyCommon(cx, id, getter, setter, slot, attrs, flags, shortid, spp); + + /* Property exists: search must have returned a valid *spp. */ + JS_ASSERT(!SHAPE_IS_REMOVED(*spp)); + overwriting = shape; + /* * If all property members match, this is a redundant add and we can * return early. If the caller wants to allocate a slot, but doesn't - * care which slot, copy sprop->slot into slot so we can match sprop, + * care which slot, copy shape->slot into slot so we can match shape, * if all other members match. */ - if (!(attrs & JSPROP_SHARED) && - slot == SPROP_INVALID_SLOT && - SPROP_HAS_VALID_SLOT(sprop, this)) { - slot = sprop->slot; - } - if (sprop->matchesParamsAfterId(getter, setter, slot, attrs, flags, shortid)) { + if (!(attrs & JSPROP_SHARED) && slot == SHAPE_INVALID_SLOT && containsSlot(shape->slot)) + slot = shape->slot; + if (shape->matchesParamsAfterId(getter, setter, slot, attrs, flags, shortid)) { METER(redundantPuts); - return sprop; + return shape; } + PropertyTable *table = inDictionaryMode() ? lastProp->table : NULL; + /* - * If we are clearing sprop to force the existing property that it - * describes to be overwritten, then we have to unlink sprop from the - * ancestor line at this->lastProp. + * If we are clearing shape to force the existing property that it + * describes to be overwritten, then we have to unlink shape from the + * ancestor line at lastProp->lastProp. * - * If sprop is not lastProp and this scope is not in dictionary mode, + * If shape is not lastProp and this scope is not in dictionary mode, * we must switch to dictionary mode so we can unlink the non-terminal - * sprop without breaking anyone sharing the property lineage via the - * runtime's property tree. + * shape without breaking anyone sharing the property lineage via our + * prototype's property tree. */ - if (sprop == lastProp && !inDictionaryMode()) { + Shape *oldLastProp = lastProp; + if (shape == lastProp && !inDictionaryMode()) { removeLastProperty(); } else { if (!inDictionaryMode()) { - if (!toDictionaryMode(cx, sprop)) + if (!toDictionaryMode(cx)) return NULL; - spp = search(id, false); + spp = nativeSearch(id); + shape = SHAPE_FETCH(spp); } - removeDictionaryProperty(sprop); + shape->removeFromDictionary(this); } /* - * If we fail later on trying to find or create a new sprop, we will + * If we fail later on trying to find or create a new shape, we will * restore *spp from |overwriting|. Note that we don't bother to keep - * this->removedCount in sync, because we will fix up both *spp and - * this->entryCount shortly. + * table->removedCount in sync, because we will fix up both *spp and + * table->entryCount shortly. */ if (table) - SPROP_STORE_PRESERVING_COLLISION(spp, NULL); - CHECK_ANCESTOR_LINE(this, true); + SHAPE_STORE_PRESERVING_COLLISION(spp, NULL); { /* Find or create a property tree node labeled by our arguments. */ - JSScopeProperty child(id, getter, setter, slot, attrs, flags, shortid); - sprop = getChildProperty(cx, lastProp, child); + Shape child(id, getter, setter, slot, attrs, flags, shortid); + shape = getChildProperty(cx, lastProp, child); } - if (sprop) { - CHECK_ANCESTOR_LINE(this, false); + if (shape) { + JS_ASSERT(shape == lastProp); if (table) { /* Store the tree node pointer in the table entry for id. */ - SPROP_STORE_PRESERVING_COLLISION(spp, sprop); - } else if (entryCount >= SCOPE_HASH_THRESHOLD) { - /* See comment in JSScope::addPropertyHelper about ignoring OOM here. */ - (void) createTable(cx, false); + SHAPE_STORE_PRESERVING_COLLISION(spp, shape); + + /* Move table from oldLastProp to the new lastProp, aka shape. */ + JS_ASSERT(oldLastProp->table == table); + oldLastProp->setTable(NULL); + shape->setTable(table); + } + + if (!lastProp->table) { + /* See comment in JSObject::addPropertyCommon about ignoring OOM here. */ + lastProp->maybeHash(cx); } METER(puts); - return sprop; + return shape; } if (table) - SPROP_STORE_PRESERVING_COLLISION(spp, overwriting); - ++entryCount; - CHECK_ANCESTOR_LINE(this, true); + SHAPE_STORE_PRESERVING_COLLISION(spp, overwriting); METER(putFails); return NULL; } -JSScopeProperty * -JSScope::changeProperty(JSContext *cx, JSScopeProperty *sprop, - uintN attrs, uintN mask, - PropertyOp getter, PropertyOp setter) +const Shape * +JSObject::changeProperty(JSContext *cx, const Shape *shape, uintN attrs, uintN mask, + PropertyOp getter, PropertyOp setter) { - JSScopeProperty *newsprop; + const Shape *newShape; - JS_ASSERT(JS_IS_SCOPE_LOCKED(cx, this)); - CHECK_ANCESTOR_LINE(this, true); + JS_ASSERT(!JSID_IS_VOID(shape->id)); + JS_ASSERT(nativeContains(*shape)); - JS_ASSERT(!JSID_IS_VOID(sprop->id)); - JS_ASSERT(hasProperty(sprop)); - - attrs |= sprop->attrs & mask; + attrs |= shape->attrs & mask; /* Allow only shared (slot-less) => unshared (slot-full) transition. */ - JS_ASSERT(!((attrs ^ sprop->attrs) & JSPROP_SHARED) || + JS_ASSERT(!((attrs ^ shape->attrs) & JSPROP_SHARED) || !(attrs & JSPROP_SHARED)); /* Don't allow method properties to be changed to have a getter. */ - JS_ASSERT_IF(getter != sprop->rawGetter, !sprop->isMethod()); + JS_ASSERT_IF(getter != shape->rawGetter, !shape->isMethod()); if (getter == PropertyStub) getter = NULL; if (setter == PropertyStub) setter = NULL; - if (sprop->attrs == attrs && sprop->getter() == getter && sprop->setter() == setter) - return sprop; + if (shape->attrs == attrs && shape->getter() == getter && shape->setter() == setter) + return shape; - JSScopeProperty child(sprop->id, getter, setter, sprop->slot, attrs, sprop->flags, - sprop->shortid); + Shape child(shape->id, getter, setter, shape->slot, attrs, shape->flags, shape->shortid); if (inDictionaryMode()) { - removeDictionaryProperty(sprop); - newsprop = newDictionaryProperty(cx, child, &lastProp); - if (newsprop) { - if (table) { - JSScopeProperty **spp = search(sprop->id, false); - SPROP_STORE_PRESERVING_COLLISION(spp, newsprop); + shape->removeFromDictionary(this); + newShape = Shape::newDictionaryShape(cx, child, &lastProp); + if (newShape) { + JS_ASSERT(newShape == lastProp); + + if (PropertyTable *table = shape->table) { + /* Overwrite shape with newShape in newShape's table. */ + Shape **spp = table->search(shape->id, true); + SHAPE_STORE_PRESERVING_COLLISION(spp, newShape); + + /* Hand the table off from shape to newShape. */ + shape->setTable(NULL); + newShape->setTable(table); } + + updateFlags(newShape); updateShape(cx); } - } else if (sprop == lastProp) { - newsprop = getChildProperty(cx, sprop->parent, child); - if (newsprop) { - if (table) { - JSScopeProperty **spp = search(sprop->id, false); - JS_ASSERT(SPROP_FETCH(spp) == sprop); - SPROP_STORE_PRESERVING_COLLISION(spp, newsprop); + } else if (shape == lastProp) { + newShape = getChildProperty(cx, shape->parent, child); +#ifdef DEBUG + if (newShape) { + JS_ASSERT(newShape == lastProp); + if (newShape->table) { + Shape **spp = nativeSearch(shape->id); + JS_ASSERT(SHAPE_FETCH(spp) == newShape); } - CHECK_ANCESTOR_LINE(this, true); } +#endif } else { /* - * Let JSScope::putProperty handle this |overwriting| case, including - * the conservation of sprop->slot (if it's valid). We must not call - * JSScope::removeProperty because it will free a valid sprop->slot and - * JSScope::putProperty won't re-allocate it. + * Let JSObject::putProperty handle this |overwriting| case, including + * the conservation of shape->slot (if it's valid). We must not call + * JSObject::removeProperty because it will free a valid shape->slot and + * JSObject::putProperty won't re-allocate it. */ - newsprop = putProperty(cx, child.id, child.rawGetter, child.rawSetter, child.slot, + newShape = putProperty(cx, child.id, child.rawGetter, child.rawSetter, child.slot, child.attrs, child.flags, child.shortid); } #ifdef DEBUG - if (newsprop) + if (newShape) METER(changes); else METER(changeFails); #endif - return newsprop; + return newShape; } bool -JSScope::removeProperty(JSContext *cx, jsid id) +JSObject::removeProperty(JSContext *cx, jsid id) { - JSScopeProperty **spp, *sprop; - uint32 size; - - JS_ASSERT(JS_IS_SCOPE_LOCKED(cx, this)); - CHECK_ANCESTOR_LINE(this, true); if (sealed()) { reportReadOnlyScope(cx); return false; } - spp = search(id, false); - sprop = SPROP_CLEAR_COLLISION(*spp); - if (!sprop) { + Shape **spp = nativeSearch(id); + Shape *shape = SHAPE_FETCH(spp); + if (!shape) { METER(uselessRemoves); return true; } - /* If sprop is not the last property added, switch to dictionary mode. */ - if (sprop != lastProp) { + /* If shape is not the last property added, switch to dictionary mode. */ + if (shape != lastProp) { if (!inDictionaryMode()) { - if (!toDictionaryMode(cx, sprop)) + if (!toDictionaryMode(cx)) return false; - spp = search(id, false); + spp = nativeSearch(shape->id); + shape = SHAPE_FETCH(spp); } - JS_ASSERT(SPROP_FETCH(spp) == sprop); + JS_ASSERT(SHAPE_FETCH(spp) == shape); } - /* First, if sprop is unshared and not cleared, free its slot number. */ - if (SPROP_HAS_VALID_SLOT(sprop, this)) { - js_FreeSlot(cx, object, sprop->slot); + /* First, if shape is unshared and not cleared, free its slot number. */ + if (containsSlot(shape->slot)) { + freeSlot(cx, shape->slot); JS_ATOMIC_INCREMENT(&cx->runtime->propertyRemovals); } - /* Next, remove id by setting its entry to a removed or free sentinel. */ - if (SPROP_HAD_COLLISION(*spp)) { - JS_ASSERT(table); - *spp = SPROP_REMOVED; - ++removedCount; - } else { - METER(removeFrees); - if (table) { - *spp = NULL; -#ifdef DEBUG - /* - * Check the consistency of the table but limit the number of - * checks not to alter significantly the complexity of the delete - * in debug builds, see bug 534493. - */ - JSScopeProperty *aprop = lastProp; - for (unsigned n = 50; aprop && n != 0; aprop = aprop->parent, --n) - JS_ASSERT_IF(aprop != sprop, hasProperty(aprop)); -#endif - } - } - LIVE_SCOPE_METER(cx, --cx->runtime->liveScopeProps); - + /* + * Next, consider removing id from lastProp->table if in dictionary mode, + * by setting its entry to a removed or free sentinel. + */ if (inDictionaryMode()) { + PropertyTable *table = lastProp->table; + + if (SHAPE_HAD_COLLISION(*spp)) { + JS_ASSERT(table); + *spp = SHAPE_REMOVED; + ++table->removedCount; + --table->entryCount; + } else { + METER(removeFrees); + if (table) { + *spp = NULL; + --table->entryCount; + +#ifdef DEBUG + /* + * Check the consistency of the table but limit the number of + * checks not to alter significantly the complexity of the + * delete in debug builds, see bug 534493. + */ + const Shape *aprop = lastProp; + for (unsigned n = 50; aprop->parent && n != 0; aprop = aprop->parent, --n) + JS_ASSERT_IF(aprop != shape, nativeContains(*aprop)); +#endif + } + } + /* - * Remove sprop from its scope-owned doubly linked list, setting this - * scope's OWN_SHAPE flag first if sprop is lastProp so updateShape(cx) + * Remove shape from its non-circular doubly linked list, setting this + * object's shape first if shape is not lastProp so the updateShape(cx) * after this if-else will generate a fresh shape for this scope. */ - if (sprop != lastProp) - setOwnShape(); - removeDictionaryProperty(sprop); + if (shape != lastProp) + setOwnShape(lastProp->shape); + shape->setTable(NULL); + shape->removeFromDictionary(this); + lastProp->setTable(table); } else { - JS_ASSERT(sprop == lastProp); + /* + * Non-dictionary-mode property tables are shared immutables, so all we + * need do is retract lastProp and we'll either get or else lazily make + * via a later maybeHash the exact table for the new property lineage. + */ + JS_ASSERT(shape == lastProp); removeLastProperty(); } updateShape(cx); - CHECK_ANCESTOR_LINE(this, true); - /* Last, consider shrinking this->table if its load factor is <= .25. */ - size = SCOPE_CAPACITY(this); - if (size > MIN_SCOPE_SIZE && entryCount <= size >> 2) { - METER(shrinks); - (void) changeTable(cx, -1); + /* Last, consider shrinking table if its load factor is <= .25. */ + if (PropertyTable *table = lastProp->table) { + uint32 size = table->capacity(); + if (size > PropertyTable::MIN_SIZE && table->entryCount <= size >> 2) { + METER(shrinks); + (void) table->change(cx, -1); + } } + LIVE_SCOPE_METER(cx, --cx->runtime->liveObjectProps); METER(removes); return true; } void -JSScope::clear(JSContext *cx) +JSObject::clear(JSContext *cx) { - CHECK_ANCESTOR_LINE(this, true); - LIVE_SCOPE_METER(cx, cx->runtime->liveScopeProps -= entryCount); + LIVE_SCOPE_METER(cx, cx->runtime->liveObjectProps -= propertyCount()); - if (table) - js_free(table); - clearDictionaryMode(); - clearOwnShape(); - LeaveTraceIfGlobalObject(cx, object); + Shape *shape = lastProp; + JS_ASSERT(inDictionaryMode() == shape->inDictionary()); - Class *clasp = object->getClass(); - JSObject *proto = object->getProto(); - JSEmptyScope *emptyScope; - uint32 newShape; - if (proto && - proto->isNative() && - (emptyScope = proto->scope()->emptyScope) && - emptyScope->clasp == clasp) { - newShape = emptyScope->shape; - } else { - newShape = js_GenerateShape(cx, false); + while (shape->parent) { + shape = shape->parent; + JS_ASSERT(inDictionaryMode() == shape->inDictionary()); } - initMinimal(cx, newShape); + JS_ASSERT(shape->isEmptyShape()); + if (inDictionaryMode()) + shape->listp = &lastProp; + + /* + * We have rewound to a uniquely-shaped empty scope, so we don't need an + * override for this object's shape. + */ + clearOwnShape(); + setMap(shape); + + LeaveTraceIfGlobalObject(cx, this); JS_ATOMIC_INCREMENT(&cx->runtime->propertyRemovals); } void -JSScope::deletingShapeChange(JSContext *cx, JSScopeProperty *sprop) +JSObject::generateOwnShape(JSContext *cx) { - JS_ASSERT(!JSID_IS_VOID(sprop->id)); +#ifdef JS_TRACER + LeaveTraceIfGlobalObject(cx, this); + + /* + * The JIT must have arranged to re-guard after any unpredictable shape + * change, so if we are on trace here, we should already be prepared to + * bail off trace. + */ + JS_ASSERT_IF(JS_ON_TRACE(cx), cx->bailExit); + + /* + * If we are recording, here is where we forget already-guarded shapes. + * Any subsequent property operation upon object on the trace currently + * being recorded will re-guard (and re-memoize). + */ + TraceMonitor *tm = &JS_TRACE_MONITOR(cx); + if (TraceRecorder *tr = tm->recorder) + tr->forgetGuardedShapesForObject(this); +#endif + + setOwnShape(js_GenerateShape(cx, false)); +} + +void +JSObject::deletingShapeChange(JSContext *cx, const Shape &shape) +{ + JS_ASSERT(!JSID_IS_VOID(shape.id)); generateOwnShape(cx); } bool -JSScope::methodShapeChange(JSContext *cx, JSScopeProperty *sprop) +JSObject::methodShapeChange(JSContext *cx, const Shape &shape) { - JS_ASSERT(!JSID_IS_VOID(sprop->id)); - if (sprop->isMethod()) { + JS_ASSERT(!JSID_IS_VOID(shape.id)); + if (shape.isMethod()) { #ifdef DEBUG - const Value &prev = object->lockedGetSlot(sprop->slot); - JS_ASSERT(&sprop->methodObject() == &prev.toObject()); + const Value &prev = lockedGetSlot(shape.slot); + JS_ASSERT(&shape.methodObject() == &prev.toObject()); + JS_ASSERT(canHaveMethodBarrier()); JS_ASSERT(hasMethodBarrier()); - JS_ASSERT(object->canHaveMethodBarrier()); - JS_ASSERT(!sprop->rawSetter || sprop->rawSetter == js_watch_set); + JS_ASSERT(!shape.rawSetter || shape.rawSetter == js_watch_set); #endif /* - * Pass null to make a stub getter, but pass along sprop->setter to - * preserve watchpoints. Clear JSScopeProperty::METHOD from flags as we - * are despecializing from a method memoized in the property tree to a + * Pass null to make a stub getter, but pass along shape.rawSetter to + * preserve watchpoints. Clear Shape::METHOD from flags as we are + * despecializing from a method memoized in the property tree to a * plain old function-valued property. */ - sprop = putProperty(cx, sprop->id, NULL, sprop->rawSetter, sprop->slot, - sprop->attrs, - sprop->getFlags() & ~JSScopeProperty::METHOD, - sprop->shortid); - if (!sprop) + if (!putProperty(cx, shape.id, NULL, shape.rawSetter, shape.slot, + shape.attrs, + shape.getFlags() & ~Shape::METHOD, + shape.shortid)) { return false; + } } generateOwnShape(cx); @@ -1234,35 +1100,36 @@ JSScope::methodShapeChange(JSContext *cx, JSScopeProperty *sprop) } bool -JSScope::methodShapeChange(JSContext *cx, uint32 slot) +JSObject::methodShapeChange(JSContext *cx, uint32 slot) { if (!hasMethodBarrier()) { generateOwnShape(cx); } else { - for (JSScopeProperty *sprop = lastProp; sprop; sprop = sprop->parent) { - JS_ASSERT(!JSID_IS_VOID(sprop->id)); - if (sprop->slot == slot) - return methodShapeChange(cx, sprop); + for (Shape::Range r = lastProp->all(); !r.empty(); r.popFront()) { + const Shape &shape = r.front(); + JS_ASSERT(!JSID_IS_VOID(shape.id)); + if (shape.slot == slot) + return methodShapeChange(cx, shape); } } return true; } void -JSScope::protoShapeChange(JSContext *cx) +JSObject::protoShapeChange(JSContext *cx) { generateOwnShape(cx); } void -JSScope::shadowingShapeChange(JSContext *cx, JSScopeProperty *sprop) +JSObject::shadowingShapeChange(JSContext *cx, const Shape &shape) { - JS_ASSERT(!JSID_IS_VOID(sprop->id)); + JS_ASSERT(!JSID_IS_VOID(shape.id)); generateOwnShape(cx); } bool -JSScope::globalObjectOwnShapeChange(JSContext *cx) +JSObject::globalObjectOwnShapeChange(JSContext *cx) { generateOwnShape(cx); return !js_IsPropertyCacheDisabled(cx); @@ -1272,14 +1139,14 @@ JSScope::globalObjectOwnShapeChange(JSContext *cx) static void PrintPropertyGetterOrSetter(JSTracer *trc, char *buf, size_t bufsize) { - JSScopeProperty *sprop; + Shape *shape; jsid id; size_t n; const char *name; JS_ASSERT(trc->debugPrinter == PrintPropertyGetterOrSetter); - sprop = (JSScopeProperty *)trc->debugPrintArg; - id = sprop->id; + shape = (Shape *)trc->debugPrintArg; + id = shape->id; JS_ASSERT(!JSID_IS_VOID(id)); name = trc->debugPrintIndex ? js_setter_str : js_getter_str; @@ -1288,7 +1155,7 @@ PrintPropertyGetterOrSetter(JSTracer *trc, char *buf, size_t bufsize) JSID_TO_STRING(id), 0); if (n < bufsize - 1) JS_snprintf(buf + n, bufsize - n, " %s", name); - } else if (JSID_IS_INT(sprop->id)) { + } else if (JSID_IS_INT(shape->id)) { JS_snprintf(buf, bufsize, "%d %s", JSID_TO_INT(id), name); } else { JS_snprintf(buf, bufsize, " %s", name); @@ -1298,13 +1165,13 @@ PrintPropertyGetterOrSetter(JSTracer *trc, char *buf, size_t bufsize) static void PrintPropertyMethod(JSTracer *trc, char *buf, size_t bufsize) { - JSScopeProperty *sprop; + Shape *shape; jsid id; size_t n; JS_ASSERT(trc->debugPrinter == PrintPropertyMethod); - sprop = (JSScopeProperty *)trc->debugPrintArg; - id = sprop->id; + shape = (Shape *)trc->debugPrintArg; + id = shape->id; JS_ASSERT(!JSID_IS_VOID(id)); JS_ASSERT(JSID_IS_ATOM(id)); @@ -1315,10 +1182,11 @@ PrintPropertyMethod(JSTracer *trc, char *buf, size_t bufsize) #endif void -JSScopeProperty::trace(JSTracer *trc) +Shape::trace(JSTracer *trc) const { if (IS_GC_MARKING_TRACER(trc)) mark(); + MarkId(trc, id, "id"); if (attrs & (JSPROP_GETTER | JSPROP_SETTER)) { diff --git a/js/src/jsscope.h b/js/src/jsscope.h index 47ebe52a78af..cf296b261aa5 100644 --- a/js/src/jsscope.h +++ b/js/src/jsscope.h @@ -43,17 +43,17 @@ /* * JS symbol tables. */ +#include #ifdef DEBUG #include #endif #include "jstypes.h" #include "jscntxt.h" -#include "jslock.h" +#include "jshashtable.h" #include "jsobj.h" #include "jsprvtd.h" #include "jspubtd.h" -#include "jspropertycache.h" #include "jspropertytree.h" #ifdef _MSC_VER @@ -73,10 +73,10 @@ * * The tree construction goes as follows. If any empty scope in the runtime * has a property X added to it, find or create a node under the tree root - * labeled X, and set scope->lastProp to point at that node. If any non-empty + * labeled X, and set obj->lastProp to point at that node. If any non-empty * scope whose most recently added property is labeled Y has another property * labeled Z added, find or create a node for Z under the node that was added - * for Y, and set scope->lastProp to point at that node. + * for Y, and set obj->lastProp to point at that node. * * A property is labeled by its members' values: id, getter, setter, slot, * attributes, tiny or short id, and a field telling for..in order. Note that @@ -98,7 +98,7 @@ * would require a sort in js_Enumerate, and an entry order generation number * per scope. An order number beats a list, which should be doubly-linked for * O(1) delete. An even better scheme is to use a parent link in the property - * tree, so that the ancestor line can be iterated from scope->lastProp when + * tree, so that the ancestor line can be iterated from obj->lastProp when * filling in a JSIdArray from back to front. This parent link also helps the * GC to sweep properties iteratively. * @@ -195,388 +195,66 @@ * have extremely low degree (see the MeterPropertyTree code that histograms * child-counts in jsscope.c), so instead of a hash-table we use a linked list * of child node pointer arrays ("kid chunks"). The details are isolated in - * jsscope.c; others must treat JSScopeProperty.kids as opaque. We leave it - * strongly typed for debug-ability of the common (null or one-kid) cases. + * jspropertytree.h/.cpp; others must treat js::Shape.kids as opaque. * * One final twist (can you stand it?): the mean number of entries per scope * in Mozilla is < 5, with a large standard deviation (~8). Instead of always * allocating scope->table, we leave it null while initializing all the other * scope members as if it were non-null and minimal-length. Until a property * is added that crosses the threshold of 6 or more entries for hashing, we use - * linear search from scope->lastProp to find a given id, and save on the space + * linear search from obj->lastProp to find a given id, and save on the space * overhead of a hash table. - * - * See jspropertytree.{h,cpp} for the actual PropertyTree implementation. This - * file contains object property map (historical misnomer: "scope" AKA JSScope) - * and property tree node ("sprop", JSScopeProperty) declarations. */ -struct JSEmptyScope; +namespace js { -#define SPROP_INVALID_SLOT 0xffffffff +/* + * Shapes use multiplicative hashing, _a la_ jsdhash.[ch], but specialized to + * minimize footprint. But if a Shape lineage has fewer than HASH_THRESHOLD + * entries, we use linear search and avoid allocating scope->table. + */ +struct PropertyTable { + enum { + HASH_THRESHOLD = 6, + MIN_SIZE_LOG2 = 4, + MIN_SIZE = JS_BIT(MIN_SIZE_LOG2) + }; -struct JSScope : public JSObjectMap -{ -#ifdef JS_THREADSAFE - JSTitle title; /* lock state */ -#endif - JSObject *object; /* object that owns this scope */ - uint32 freeslot; /* index of next free slot in object */ - protected: - uint8 flags; /* flags, see below */ - public: - int8 hashShift; /* multiplicative hash shift */ + int hashShift; /* multiplicative hash shift */ - uint16 spare; /* reserved */ uint32 entryCount; /* number of entries in table */ uint32 removedCount; /* removed entry sentinels in table */ - JSScopeProperty **table; /* table of ptrs to shared tree nodes */ - JSEmptyScope *emptyScope; /* cache for getEmptyScope below */ + js::Shape **entries; /* table of ptrs to shared tree nodes */ - /* - * A little information hiding for scope->lastProp, in case it ever becomes - * a tagged pointer again. - */ - inline JSScopeProperty *lastProperty() const; - - private: - JSScopeProperty *getChildProperty(JSContext *cx, JSScopeProperty *parent, - JSScopeProperty &child); - - JSScopeProperty *newDictionaryProperty(JSContext *cx, const JSScopeProperty &child, - JSScopeProperty **childp); - - bool toDictionaryMode(JSContext *cx, JSScopeProperty *&aprop); - - /* - * Private pointer to the last added property and methods to manipulate the - * list it links among properties in this scope. The {remove,insert} pair - * for DictionaryProperties assert that the scope is in dictionary mode and - * any reachable properties are flagged as dictionary properties. - * - * NB: these private methods do *not* update this scope's shape to track - * lastProp->shape after they finish updating the linked list in the case - * where lastProp is updated. It is up to calling code in jsscope.cpp to - * call updateShape(cx) after updating lastProp. - */ - JSScopeProperty *lastProp; - - /* These four inline methods are defined further below in this .h file. */ - inline void setLastProperty(JSScopeProperty *sprop); - inline void removeLastProperty(); - inline void removeDictionaryProperty(JSScopeProperty *sprop); - inline void insertDictionaryProperty(JSScopeProperty *sprop, JSScopeProperty **childp); - - /* Defined in jsscopeinlines.h to avoid including implementation dependencies here. */ - inline void updateShape(JSContext *cx); - inline void updateFlags(const JSScopeProperty *sprop, bool isDefinitelyAtom = false); - - protected: - void initMinimal(JSContext *cx, uint32 newShape); - - private: - bool createTable(JSContext *cx, bool report); - bool changeTable(JSContext *cx, int change); - void reportReadOnlyScope(JSContext *cx); - - void setOwnShape() { flags |= OWN_SHAPE; } - void clearOwnShape() { flags &= ~OWN_SHAPE; } - void generateOwnShape(JSContext *cx); - - JSScopeProperty **searchTable(jsid id, bool adding); - inline JSScopeProperty **search(jsid id, bool adding); - inline JSEmptyScope *createEmptyScope(JSContext *cx, js::Class *clasp); - - JSScopeProperty *addPropertyHelper(JSContext *cx, jsid id, - js::PropertyOp getter, js::PropertyOp setter, - uint32 slot, uintN attrs, - uintN flags, intN shortid, - JSScopeProperty **spp); - - public: - JSScope(JSObject *obj) - : JSObjectMap(0), object(obj) {} - - /* Create a mutable, owned, empty scope. */ - static JSScope *create(JSContext *cx, js::Class *clasp, JSObject *obj, uint32 shape); - - void destroy(JSContext *cx); - - /* - * Return an immutable, shareable, empty scope with the same ops as this - * and the same freeslot as this had when empty. - * - * If |this| is the scope of an object |proto|, the resulting scope can be - * used as the scope of a new object whose prototype is |proto|. - */ - inline JSEmptyScope *getEmptyScope(JSContext *cx, js::Class *clasp); - - inline bool ensureEmptyScope(JSContext *cx, js::Class *clasp); - - inline bool canProvideEmptyScope(js::Class *clasp); - - JSScopeProperty *lookup(jsid id); - - inline bool hasProperty(jsid id) { return lookup(id) != NULL; } - inline bool hasProperty(JSScopeProperty *sprop); - - /* Add a property whose id is not yet in this scope. */ - JSScopeProperty *addProperty(JSContext *cx, jsid id, - js::PropertyOp getter, js::PropertyOp setter, - uint32 slot, uintN attrs, - uintN flags, intN shortid); - - /* Add a data property whose id is not yet in this scope. */ - JSScopeProperty *addDataProperty(JSContext *cx, jsid id, uint32 slot, uintN attrs) { - JS_ASSERT(!(attrs & (JSPROP_GETTER | JSPROP_SETTER))); - return addProperty(cx, id, NULL, NULL, slot, attrs, 0, 0); + PropertyTable(uint32 nentries) + : hashShift(JS_DHASH_BITS - MIN_SIZE_LOG2), + entryCount(nentries), + removedCount(0) + { + /* NB: entries is set by init, which must be called. */ } - /* Add or overwrite a property for id in this scope. */ - JSScopeProperty *putProperty(JSContext *cx, jsid id, - js::PropertyOp getter, js::PropertyOp setter, - uint32 slot, uintN attrs, - uintN flags, intN shortid); - - /* Change the given property into a sibling with the same id in this scope. */ - JSScopeProperty *changeProperty(JSContext *cx, JSScopeProperty *sprop, - uintN attrs, uintN mask, - js::PropertyOp getter, js::PropertyOp setter); - - /* Remove id from this scope. */ - bool removeProperty(JSContext *cx, jsid id); - - /* Clear the scope, making it empty. */ - void clear(JSContext *cx); - - /* Extend this scope to have sprop as its last-added property. */ - void extend(JSContext *cx, JSScopeProperty *sprop, bool isDefinitelyAtom = false); - - /* - * Read barrier to clone a joined function object stored as a method. - * Defined in jsscopeinlines.h, but not declared inline per standard style - * in order to avoid gcc warnings. - */ - bool methodReadBarrier(JSContext *cx, JSScopeProperty *sprop, js::Value *vp); - - /* - * Write barrier to check for a method value change. Defined inline below - * after methodReadBarrier. Two flavors to handle JSOP_*GVAR, which deals - * in slots not sprops, while not deoptimizing to map slot to sprop unless - * flags show this is necessary. The methodShapeChange overload (directly - * below) parallels this. - */ - bool methodWriteBarrier(JSContext *cx, JSScopeProperty *sprop, const js::Value &v); - bool methodWriteBarrier(JSContext *cx, uint32 slot, const js::Value &v); - - void trace(JSTracer *trc); - - void deletingShapeChange(JSContext *cx, JSScopeProperty *sprop); - bool methodShapeChange(JSContext *cx, JSScopeProperty *sprop); - bool methodShapeChange(JSContext *cx, uint32 slot); - void protoShapeChange(JSContext *cx); - void shadowingShapeChange(JSContext *cx, JSScopeProperty *sprop); - bool globalObjectOwnShapeChange(JSContext *cx); - -/* By definition, hashShift = JS_DHASH_BITS - log2(capacity). */ -#define SCOPE_CAPACITY(scope) JS_BIT(JS_DHASH_BITS-(scope)->hashShift) - - enum { - DICTIONARY_MODE = 0x0001, - SEALED = 0x0002, - BRANDED = 0x0004, - INDEXED_PROPERTIES = 0x0008, - OWN_SHAPE = 0x0010, - METHOD_BARRIER = 0x0020, - - /* - * This flag toggles with each shape-regenerating GC cycle. - * See JSRuntime::gcRegenShapesScopeFlag. - */ - SHAPE_REGEN = 0x0040, - - /* The anti-branded flag, to avoid overspecializing. */ - GENERIC = 0x0080 - }; - - bool inDictionaryMode() { return flags & DICTIONARY_MODE; } - void setDictionaryMode() { flags |= DICTIONARY_MODE; } - void clearDictionaryMode() { flags &= ~DICTIONARY_MODE; } - - /* - * Don't define clearSealed, as it can't be done safely because JS_LOCK_OBJ - * will avoid taking the lock if the object owns its scope and the scope is - * sealed. - */ - bool sealed() { return flags & SEALED; } - - void seal(JSContext *cx) { - JS_ASSERT(!isSharedEmpty()); - JS_ASSERT(!sealed()); - generateOwnShape(cx); - flags |= SEALED; + ~PropertyTable() { + js_free(entries); } - /* - * A branded scope's object contains plain old methods (function-valued - * properties without magic getters and setters), and its scope->shape - * evolves whenever a function value changes. - */ - bool branded() { return flags & BRANDED; } - - bool brand(JSContext *cx, uint32 slot, const js::Value &) { - JS_ASSERT(!generic()); - JS_ASSERT(!branded()); - generateOwnShape(cx); - if (js_IsPropertyCacheDisabled(cx)) // check for rt->shapeGen overflow - return false; - flags |= BRANDED; - return true; - } - - bool generic() { return flags & GENERIC; } + /* By definition, hashShift = JS_DHASH_BITS - log2(capacity). */ + uint32 capacity() const { return JS_BIT(JS_DHASH_BITS - hashShift); } /* - * Here and elsewhere "unbrand" means "make generic". We never actually - * clear the BRANDED bit on any object. Once branded, there's no point in - * being generic, since the shape has already evolved unpredictably. So - * obj->unbrand() on a branded object does nothing. + * NB: init and change are fallible but do not report OOM, so callers can + * cope or ignore. They do update the malloc counter on success. */ - void unbrand(JSContext *cx) { - if (!branded()) - flags |= GENERIC; - } - - bool hadIndexedProperties() { return flags & INDEXED_PROPERTIES; } - void setIndexedProperties() { flags |= INDEXED_PROPERTIES; } - - bool hasOwnShape() { return flags & OWN_SHAPE; } - - bool hasRegenFlag(uint8 regenFlag) { return (flags & SHAPE_REGEN) == regenFlag; } - - /* - * A scope has a method barrier when some compiler-created "null closure" - * function objects (functions that do not use lexical bindings above - * their scope, only free variable names) that have a correct parent value - * thanks to the COMPILE_N_GO optimization are stored as newly added direct - * property values of the scope's object. - * - * The de-facto standard JS language requires each evaluation of such a - * closure to result in a unique (according to === and observable effects) - * function object. ES3 tried to allow implementations to "join" such - * objects to a single compiler-created object, but this makes an overt - * mutation hazard, also an "identity hazard" against interoperation among - * implementations that join and do not join. - * - * To stay compatible with the de-facto standard, we store the compiler- - * created function object as the method value and set the METHOD_BARRIER - * flag. - * - * The method value is part of the method property tree node's identity, so - * it effectively brands the scope with a predictable shape corresponding - * to the method value, but without the overhead of setting the BRANDED - * flag, which requires assigning a new shape peculiar to each branded - * scope. Instead the shape is shared via the property tree among all the - * scopes referencing the method property tree node. - * - * Then when reading from a scope for which scope->hasMethodBarrier() is - * true, we count on the scope's qualified/guarded shape being unique and - * add a read barrier that clones the compiler-created function object on - * demand, reshaping the scope. - * - * This read barrier is bypassed when evaluating the callee sub-expression - * of a call expression (see the JOF_CALLOP opcodes in jsopcode.tbl), since - * such ops do not present an identity or mutation hazard. The compiler - * performs this optimization only for null closures that do not use their - * own name or equivalent built-in references (arguments.callee). - * - * The BRANDED write barrier, JSScope::methodWriteBarrer, must check for - * METHOD_BARRIER too, and regenerate this scope's shape if the method's - * value is in fact changing. - */ - bool hasMethodBarrier() { return flags & METHOD_BARRIER; } - void setMethodBarrier() { flags |= METHOD_BARRIER; } - - /* - * Test whether this scope may be branded due to method calls, which means - * any assignment to a function-valued property must regenerate shape; else - * test whether this scope has method properties, which require a method - * write barrier. - */ - bool - brandedOrHasMethodBarrier() { return flags & (BRANDED | METHOD_BARRIER); } - - bool isSharedEmpty() const { return !object; } - - static bool initRuntimeState(JSContext *cx); - static void finishRuntimeState(JSContext *cx); - - enum { - EMPTY_ARGUMENTS_SHAPE = 1, - EMPTY_BLOCK_SHAPE = 2, - EMPTY_CALL_SHAPE = 3, - EMPTY_DECL_ENV_SHAPE = 4, - EMPTY_ENUMERATOR_SHAPE = 5, - EMPTY_WITH_SHAPE = 6, - LAST_RESERVED_SHAPE = 6 - }; + bool init(JSContext *cx, js::Shape *lastProp); + bool change(JSContext *cx, int change); + js::Shape **search(jsid id, bool adding); }; -struct JSEmptyScope : public JSScope -{ - js::Class * const clasp; - jsrefcount nrefs; /* count of all referencing objects */ +} /* namespace js */ - JSEmptyScope(JSContext *cx, js::Class *clasp); +struct JSObject; - JSEmptyScope *hold() { - /* The method is only called for already held objects. */ - JS_ASSERT(nrefs >= 1); - JS_ATOMIC_INCREMENT(&nrefs); - return this; - } - - void drop(JSContext *cx) { - JS_ASSERT(nrefs >= 1); - JS_ATOMIC_DECREMENT(&nrefs); - if (nrefs == 0) - destroy(cx); - } - - /* - * Optimized version of the drop method to use from the object finalizer - * to skip expensive JS_ATOMIC_DECREMENT. - */ - void dropFromGC(JSContext *cx) { -#ifdef JS_THREADSAFE - JS_ASSERT(CX_THREAD_IS_RUNNING_GC(cx)); -#endif - JS_ASSERT(nrefs >= 1); - --nrefs; - if (nrefs == 0) - destroy(cx); - } -}; - -inline bool -JS_IS_SCOPE_LOCKED(JSContext *cx, JSScope *scope) -{ - return JS_IS_TITLE_LOCKED(cx, &scope->title); -} - -inline JSScope * -JSObject::scope() const -{ - JS_ASSERT(isNative()); - return (JSScope *) map; -} - -inline uint32 -JSObject::shape() const -{ - JS_ASSERT(map->shape != JSObjectMap::SHAPELESS); - return map->shape; -} +#define SHAPE_INVALID_SLOT 0xffffffff inline const js::Value & JSObject::lockedGetSlot(uintN slot) const @@ -596,73 +274,142 @@ namespace js { class PropertyTree; -} /* namespace js */ +static inline PropertyOp +CastAsPropertyOp(js::Class *clasp) +{ + return JS_DATA_TO_FUNC_PTR(PropertyOp, clasp); +} -struct JSScopeProperty { - friend struct JSScope; +struct Shape : public JSObjectMap +{ + friend struct ::JSObject; + friend struct ::JSFunction; friend class js::PropertyTree; - friend JSDHashOperator js::RemoveNodeIfDead(JSDHashTable *table, JSDHashEntryHdr *hdr, - uint32 number, void *arg); - friend void js::SweepScopeProperties(JSContext *cx); - jsid id; + protected: + mutable js::PropertyTable *table; - private: + public: + inline void freeTable(JSContext *cx); + + static bool initRuntimeState(JSContext *cx); + static void finishRuntimeState(JSContext *cx); + + enum { + EMPTY_ARGUMENTS_SHAPE = 1, + EMPTY_BLOCK_SHAPE = 2, + EMPTY_CALL_SHAPE = 3, + EMPTY_DECL_ENV_SHAPE = 4, + EMPTY_ENUMERATOR_SHAPE = 5, + EMPTY_WITH_SHAPE = 6, + LAST_RESERVED_SHAPE = 6 + }; + + jsid id; + + protected: union { js::PropertyOp rawGetter; /* getter and setter hooks or objects */ JSObject *getterObj; /* user-defined callable "get" object or - null if sprop->hasGetterValue(); or + null if shape->hasGetterValue(); or joined function object if METHOD flag is set. */ - JSScopeProperty *next; /* next node in freelist */ + js::Class *clasp; /* prototype class for empty scope */ }; union { js::PropertyOp rawSetter; /* getter is JSObject* and setter is 0 - if sprop->isMethod() */ + if shape->isMethod() */ JSObject *setterObj; /* user-defined callable "set" object or - null if sprop->hasSetterValue() */ - JSScopeProperty **prevp; /* pointer to previous node's next, or - pointer to head of freelist */ + null if shape->hasSetterValue() */ }; - void insertFree(JSScopeProperty *&list) { + public: + uint32 slot; /* abstract index in object slots */ + private: + uint8 attrs; /* attributes, see jsapi.h JSPROP_* */ + mutable uint8 flags; /* flags, see below for defines */ + public: + int16 shortid; /* tinyid, or local arg/var index */ + + protected: + mutable js::Shape *parent; /* parent node, reverse for..in order */ + union { + mutable js::KidsPointer kids; /* null, single child, or a tagged ptr + to many-kids data structure */ + mutable js::Shape **listp; /* dictionary list starting at lastProp + has a double-indirect back pointer, + either to shape->parent if not last, + else to obj->lastProp */ + }; + + static inline js::Shape **search(js::Shape **startp, jsid id, bool adding = false); + static js::Shape *newDictionaryShape(JSContext *cx, const js::Shape &child, js::Shape **listp); + static js::Shape *newDictionaryList(JSContext *cx, js::Shape **listp); + + inline void removeFromDictionary(JSObject *obj) const; + inline void insertIntoDictionary(js::Shape **dictp); + + js::Shape *getChild(JSContext *cx, const js::Shape &child, js::Shape **listp); + + bool maybeHash(JSContext *cx); + + void setTable(js::PropertyTable *t) const { table = t; } + + void insertFree(js::Shape **freep) { id = JSID_VOID; - next = list; - prevp = &list; - if (list) - list->prevp = &next; - list = this; + parent = *freep; + if (parent) + parent->listp = &parent; + listp = freep; + *freep = this; } void removeFree() { JS_ASSERT(JSID_IS_VOID(id)); - *prevp = next; - if (next) - next->prevp = prevp; + *listp = parent; + if (parent) + parent->listp = listp; } public: - uint32 slot; /* abstract index in object slots */ - private: - uint8 attrs; /* attributes, see jsapi.h JSPROP_* */ - uint8 flags; /* flags, see below for defines */ - public: - int16 shortid; /* tinyid, or local arg/var index */ - JSScopeProperty *parent; /* parent node, reverse for..in order */ - union { - JSScopeProperty *kids; /* null, single child, or a tagged ptr - to many-kids data structure */ - JSScopeProperty **childp; /* dictionary list starting at lastProp - has a double-indirect back pointer, - either to sprop->parent if not last, - else to scope->lastProp */ - }; - uint32 shape; /* property cache shape identifier */ + const js::Shape *previous() const { + return parent; + } - private: + class Range { + protected: + friend struct Shape; + + const Shape *cursor; + const Shape *end; + + public: + Range(const Shape *shape) : cursor(shape) { } + + bool empty() const { + JS_ASSERT_IF(!cursor->parent, JSID_IS_EMPTY(cursor->id)); + return !cursor->parent; + } + + const Shape &front() const { + JS_ASSERT(!empty()); + return *cursor; + } + + void popFront() { + JS_ASSERT(!empty()); + cursor = cursor->parent; + } + }; + + Range all() const { + return Range(this); + } + + protected: /* - * Implementation-private bits stored in sprop->flags. See public: enum {} + * Implementation-private bits stored in shape->flags. See public: enum {} * flags further below, which were allocated FCFS over time, so interleave * with these bits. */ @@ -670,36 +417,47 @@ struct JSScopeProperty { /* GC mark flag. */ MARK = 0x01, + SHARED_EMPTY = 0x02, + /* * Set during a shape-regenerating GC if the shape has already been - * regenerated. Unlike JSScope::SHAPE_REGEN, this does not toggle with - * each GC. js::SweepScopeProperties clears it. + * regenerated. */ - SHAPE_REGEN = 0x08, + SHAPE_REGEN = 0x04, /* Property stored in per-object dictionary, not shared property tree. */ - IN_DICTIONARY = 0x20 + IN_DICTIONARY = 0x08, + + /* Prevent unwanted mutation of shared JSFunction::u.i.names nodes. */ + FROZEN = 0x10 }; - JSScopeProperty(jsid id, js::PropertyOp getter, js::PropertyOp setter, uint32 slot, - uintN attrs, uintN flags, intN shortid); + Shape(jsid id, js::PropertyOp getter, js::PropertyOp setter, uint32 slot, + uintN attrs, uintN flags, intN shortid); - bool marked() const { return (flags & MARK) != 0; } - void mark() { flags |= MARK; } - void clearMark() { flags &= ~MARK; } + /* Used by EmptyShape (see jsscopeinlines.h). */ + Shape(JSContext *cx, Class *aclasp); - bool hasRegenFlag() const { return (flags & SHAPE_REGEN) != 0; } - void setRegenFlag() { flags |= SHAPE_REGEN; } - void clearRegenFlag() { flags &= ~SHAPE_REGEN; } + bool marked() const { return (flags & MARK) != 0; } + void mark() const { flags |= MARK; } + void clearMark() { flags &= ~MARK; } - bool inDictionary() const { return (flags & IN_DICTIONARY) != 0; } + bool hasRegenFlag() const { return (flags & SHAPE_REGEN) != 0; } + void setRegenFlag() { flags |= SHAPE_REGEN; } + void clearRegenFlag() { flags &= ~SHAPE_REGEN; } + + bool inDictionary() const { return (flags & IN_DICTIONARY) != 0; } + bool frozen() const { return (flags & FROZEN) != 0; } + void setFrozen() { flags |= FROZEN; } + + bool isEmptyShape() const { JS_ASSERT_IF(!parent, JSID_IS_EMPTY(id)); return !parent; } public: - /* Public bits stored in sprop->flags. */ + /* Public bits stored in shape->flags. */ enum { - ALIAS = 0x02, - HAS_SHORTID = 0x04, - METHOD = 0x10, + ALIAS = 0x20, + HAS_SHORTID = 0x40, + METHOD = 0x80, PUBLIC_FLAGS = ALIAS | HAS_SHORTID | METHOD }; @@ -741,17 +499,17 @@ struct JSScopeProperty { } inline JSDHashNumber hash() const; - inline bool matches(const JSScopeProperty *p) const; + inline bool matches(const js::Shape *p) const; inline bool matchesParamsAfterId(js::PropertyOp agetter, js::PropertyOp asetter, uint32 aslot, uintN aattrs, uintN aflags, intN ashortid) const; - bool get(JSContext* cx, JSObject *obj, JSObject *pobj, js::Value* vp); - bool set(JSContext* cx, JSObject *obj, js::Value* vp); + bool get(JSContext* cx, JSObject *obj, JSObject *pobj, js::Value* vp) const; + bool set(JSContext* cx, JSObject *obj, js::Value* vp) const; inline bool isSharedPermanent() const; - void trace(JSTracer *trc); + void trace(JSTracer *trc) const; bool hasSlot() const { return (attrs & JSPROP_SHARED) == 0; } @@ -776,128 +534,190 @@ struct JSScopeProperty { return (attrs & (JSPROP_SETTER | JSPROP_GETTER)) != 0; } + uint32 entryCount() const { + if (table) + return table->entryCount; + + const js::Shape *shape = this; + uint32 count = 0; + for (js::Shape::Range r = shape->all(); !r.empty(); r.popFront()) + ++count; + return count; + } + #ifdef DEBUG - void dump(JSContext *cx, FILE *fp); - void dumpSubtree(JSContext *cx, int level, FILE *fp); + void dump(JSContext *cx, FILE *fp) const; + void dumpSubtree(JSContext *cx, int level, FILE *fp) const; #endif }; -/* JSScopeProperty pointer tag bit indicating a collision. */ -#define SPROP_COLLISION ((jsuword)1) -#define SPROP_REMOVED ((JSScopeProperty *) SPROP_COLLISION) - -/* Macros to get and set sprop pointer values and collision flags. */ -#define SPROP_IS_FREE(sprop) ((sprop) == NULL) -#define SPROP_IS_REMOVED(sprop) ((sprop) == SPROP_REMOVED) -#define SPROP_IS_LIVE(sprop) ((sprop) > SPROP_REMOVED) -#define SPROP_FLAG_COLLISION(spp,sprop) (*(spp) = (JSScopeProperty *) \ - ((jsuword)(sprop) | SPROP_COLLISION)) -#define SPROP_HAD_COLLISION(sprop) ((jsuword)(sprop) & SPROP_COLLISION) -#define SPROP_FETCH(spp) SPROP_CLEAR_COLLISION(*(spp)) - -#define SPROP_CLEAR_COLLISION(sprop) \ - ((JSScopeProperty *) ((jsuword)(sprop) & ~SPROP_COLLISION)) - -#define SPROP_STORE_PRESERVING_COLLISION(spp, sprop) \ - (*(spp) = (JSScopeProperty *) ((jsuword)(sprop) \ - | SPROP_HAD_COLLISION(*(spp)))) - -inline JSScopeProperty * -JSScope::lookup(jsid id) +struct EmptyShape : public js::Shape { - return SPROP_FETCH(search(id, false)); + EmptyShape(JSContext *cx, js::Class *aclasp); + + js::Class *getClass() const { return clasp; }; + + static EmptyShape *create(JSContext *cx, js::Class *clasp) { + js::Shape *eprop = JS_PROPERTY_TREE(cx).newShape(cx); + if (!eprop) + return NULL; + return new (eprop) EmptyShape(cx, clasp); + } +}; + +} /* namespace js */ + +/* js::Shape pointer tag bit indicating a collision. */ +#define SHAPE_COLLISION (jsuword(1)) +#define SHAPE_REMOVED ((js::Shape *) SHAPE_COLLISION) + +/* Macros to get and set shape pointer values and collision flags. */ +#define SHAPE_IS_FREE(shape) ((shape) == NULL) +#define SHAPE_IS_REMOVED(shape) ((shape) == SHAPE_REMOVED) +#define SHAPE_IS_LIVE(shape) ((shape) > SHAPE_REMOVED) +#define SHAPE_FLAG_COLLISION(spp,shape) (*(spp) = (js::Shape *) \ + (jsuword(shape) | SHAPE_COLLISION)) +#define SHAPE_HAD_COLLISION(shape) (jsuword(shape) & SHAPE_COLLISION) +#define SHAPE_FETCH(spp) SHAPE_CLEAR_COLLISION(*(spp)) + +#define SHAPE_CLEAR_COLLISION(shape) \ + ((js::Shape *) (jsuword(shape) & ~SHAPE_COLLISION)) + +#define SHAPE_STORE_PRESERVING_COLLISION(spp, shape) \ + (*(spp) = (js::Shape *) (jsuword(shape) | SHAPE_HAD_COLLISION(*(spp)))) + +inline js::Shape ** +JSObject::nativeSearch(jsid id, bool adding) +{ + return js::Shape::search(&lastProp, id, adding); +} + +inline const js::Shape * +JSObject::nativeLookup(jsid id) +{ + return SHAPE_FETCH(nativeSearch(id)); } inline bool -JSScope::hasProperty(JSScopeProperty *sprop) +JSObject::nativeContains(jsid id) { - return lookup(sprop->id) == sprop; + return nativeLookup(id) != NULL; } -inline JSScopeProperty * -JSScope::lastProperty() const +inline bool +JSObject::nativeContains(const js::Shape &shape) { - JS_ASSERT_IF(lastProp, !JSID_IS_VOID(lastProp->id)); + return nativeLookup(shape.id) == &shape; +} + +inline const js::Shape * +JSObject::lastProperty() const +{ + JS_ASSERT(isNative()); + JS_ASSERT(!JSID_IS_VOID(lastProp->id)); return lastProp; } +inline bool +JSObject::nativeEmpty() const +{ + return lastProperty()->isEmptyShape(); +} + +inline bool +JSObject::inDictionaryMode() const +{ + return lastProperty()->inDictionary(); +} + +inline uint32 +JSObject::propertyCount() const +{ + return lastProperty()->entryCount(); +} + +inline bool +JSObject::hasPropertyTable() const +{ + return !!lastProperty()->table; +} + /* - * Note that sprop must not be null, as emptying a scope requires extra work - * done only by methods in jsscope.cpp. + * FIXME: shape must not be null, should use a reference here and other places. */ inline void -JSScope::setLastProperty(JSScopeProperty *sprop) -{ - JS_ASSERT(!JSID_IS_VOID(sprop->id)); - JS_ASSERT_IF(lastProp, !JSID_IS_VOID(lastProp->id)); - - lastProp = sprop; -} - -inline void -JSScope::removeLastProperty() +JSObject::setLastProperty(const js::Shape *shape) { JS_ASSERT(!inDictionaryMode()); - JS_ASSERT_IF(lastProp->parent, !JSID_IS_VOID(lastProp->parent->id)); + JS_ASSERT(!JSID_IS_VOID(shape->id)); + JS_ASSERT_IF(lastProp, !JSID_IS_VOID(lastProp->id)); + + lastProp = const_cast(shape); +} + +inline void +JSObject::removeLastProperty() +{ + JS_ASSERT(!inDictionaryMode()); + JS_ASSERT(!JSID_IS_VOID(lastProp->parent->id)); lastProp = lastProp->parent; - --entryCount; } +namespace js { + inline void -JSScope::removeDictionaryProperty(JSScopeProperty *sprop) +Shape::removeFromDictionary(JSObject *obj) const { - JS_ASSERT(inDictionaryMode()); - JS_ASSERT(sprop->inDictionary()); - JS_ASSERT(sprop->childp); - JS_ASSERT(!JSID_IS_VOID(sprop->id)); + JS_ASSERT(!frozen()); + JS_ASSERT(inDictionary()); + JS_ASSERT(obj->inDictionaryMode()); + JS_ASSERT(listp); + JS_ASSERT(!JSID_IS_VOID(id)); - JS_ASSERT(lastProp->inDictionary()); - JS_ASSERT(lastProp->childp == &lastProp); - JS_ASSERT_IF(lastProp != sprop, !JSID_IS_VOID(lastProp->id)); - JS_ASSERT_IF(lastProp->parent, !JSID_IS_VOID(lastProp->parent->id)); + JS_ASSERT(obj->lastProp->inDictionary()); + JS_ASSERT(obj->lastProp->listp == &obj->lastProp); + JS_ASSERT_IF(obj->lastProp != this, !JSID_IS_VOID(obj->lastProp->id)); + JS_ASSERT_IF(obj->lastProp->parent, !JSID_IS_VOID(obj->lastProp->parent->id)); - if (sprop->parent) - sprop->parent->childp = sprop->childp; - *sprop->childp = sprop->parent; - --entryCount; - sprop->childp = NULL; + if (parent) + parent->listp = listp; + *listp = parent; + listp = NULL; } inline void -JSScope::insertDictionaryProperty(JSScopeProperty *sprop, JSScopeProperty **childp) +Shape::insertIntoDictionary(js::Shape **dictp) { /* * Don't assert inDictionaryMode() here because we may be called from - * toDictionaryMode via newDictionaryProperty. + * JSObject::toDictionaryMode via JSObject::newDictionaryShape. */ - JS_ASSERT(sprop->inDictionary()); - JS_ASSERT(!sprop->childp); - JS_ASSERT(!JSID_IS_VOID(sprop->id)); + JS_ASSERT(inDictionary()); + JS_ASSERT(!listp); + JS_ASSERT(!JSID_IS_VOID(id)); - JS_ASSERT_IF(*childp, (*childp)->inDictionary()); - JS_ASSERT_IF(lastProp, lastProp->inDictionary()); - JS_ASSERT_IF(lastProp, lastProp->childp == &lastProp); - JS_ASSERT_IF(lastProp, !JSID_IS_VOID(lastProp->id)); + JS_ASSERT_IF(*dictp, !(*dictp)->frozen()); + JS_ASSERT_IF(*dictp, (*dictp)->inDictionary()); + JS_ASSERT_IF(*dictp, (*dictp)->listp == dictp); + JS_ASSERT_IF(*dictp, !JSID_IS_VOID((*dictp)->id)); - sprop->parent = *childp; - *childp = sprop; - if (sprop->parent) - sprop->parent->childp = &sprop->parent; - sprop->childp = childp; - ++entryCount; + parent = *dictp; + if (parent) + parent->listp = &parent; + listp = dictp; + *dictp = this; } -/* - * If SHORTID is set in sprop->flags, we use sprop->shortid rather - * than id when calling sprop's getter or setter. - */ -#define SPROP_USERID(sprop) \ - ((sprop)->hasShortID() ? INT_TO_JSID((sprop)->shortid) \ - : (sprop)->id) +} /* namespace js */ -#define SLOT_IN_SCOPE(slot,scope) ((slot) < (scope)->freeslot) -#define SPROP_HAS_VALID_SLOT(sprop,scope) SLOT_IN_SCOPE((sprop)->slot, scope) +/* + * If SHORTID is set in shape->flags, we use shape->shortid rather + * than id when calling shape's getter or setter. + */ +#define SHAPE_USERID(shape) \ + ((shape)->hasShortID() ? INT_TO_JSID((shape)->shortid) \ + : (shape)->id) #ifndef JS_THREADSAFE # define js_GenerateShape(cx, gcLocked) js_GenerateShape (cx) @@ -912,9 +732,13 @@ struct JSScopeStats { jsrefcount hits; jsrefcount misses; jsrefcount hashes; + jsrefcount hashHits; + jsrefcount hashMisses; jsrefcount steps; jsrefcount stepHits; jsrefcount stepMisses; + jsrefcount initSearches; + jsrefcount changeSearches; jsrefcount tableAllocFails; jsrefcount toDictFails; jsrefcount wrapWatchFails; @@ -940,16 +764,24 @@ extern JS_FRIEND_DATA(JSScopeStats) js_scope_stats; # define METER(x) /* nothing */ #endif -inline JSScopeProperty ** -JSScope::search(jsid id, bool adding) -{ - JSScopeProperty *sprop, **spp; +namespace js { +JS_ALWAYS_INLINE js::Shape ** +Shape::search(js::Shape **startp, jsid id, bool adding) +{ METER(searches); - if (!table) { - /* Not enough properties to justify hashing: search from lastProp. */ - for (spp = &lastProp; (sprop = *spp); spp = &sprop->parent) { - if (sprop->id == id) { + if (!(*startp)->table) { + /* + * Not enough properties to justify hashing: search from *startp. + * + * We don't use a Range here, or stop at null parent (the empty shape + * at the end), to avoid an extra load per iteration just to save a + * load and id test at the end (when missing). + */ + js::Shape **spp; + + for (spp = startp; js::Shape *shape = *spp; spp = &shape->parent) { + if (shape->id == id) { METER(hits); return spp; } @@ -957,34 +789,17 @@ JSScope::search(jsid id, bool adding) METER(misses); return spp; } - return searchTable(id, adding); + return (*startp)->table->search(id, adding); } #undef METER inline bool -JSScope::canProvideEmptyScope(js::Class *clasp) -{ - /* - * An empty scope cannot provide another empty scope, or wrongful two-level - * prototype shape sharing ensues -- see bug 497789. - */ - if (!object) - return false; - return !emptyScope || emptyScope->clasp == clasp; -} - -inline bool -JSScopeProperty::isSharedPermanent() const +Shape::isSharedPermanent() const { return (~attrs & (JSPROP_SHARED | JSPROP_PERMANENT)) == 0; } -extern JSScope * -js_GetMutableScope(JSContext *cx, JSObject *obj); - -namespace js { - class AutoObjectLocker { JSContext * const cx; JSObject * const obj; diff --git a/js/src/jsscopeinlines.h b/js/src/jsscopeinlines.h index 3a914020f1ca..12b8aaf20a4c 100644 --- a/js/src/jsscopeinlines.h +++ b/js/src/jsscopeinlines.h @@ -40,6 +40,8 @@ #ifndef jsscopeinlines_h___ #define jsscopeinlines_h___ +#include +#include "jsbool.h" #include "jscntxt.h" #include "jsdbgapi.h" #include "jsfun.h" @@ -48,212 +50,122 @@ #include "jscntxtinlines.h" -inline JSEmptyScope * -JSScope::createEmptyScope(JSContext *cx, js::Class *clasp) +inline void +js::Shape::freeTable(JSContext *cx) { - JS_ASSERT(!isSharedEmpty()); - JS_ASSERT(!emptyScope); - emptyScope = cx->create(cx, clasp); - return emptyScope; + if (table) { + cx->destroy(table); + table = NULL; + } } -inline JSEmptyScope * -JSScope::getEmptyScope(JSContext *cx, js::Class *clasp) +inline js::EmptyShape * +JSObject::getEmptyShape(JSContext *cx, js::Class *aclasp) { - if (emptyScope) { - JS_ASSERT(clasp == emptyScope->clasp); - return emptyScope->hold(); - } - return createEmptyScope(cx, clasp); + if (emptyShape) + JS_ASSERT(aclasp == emptyShape->getClass()); + else + emptyShape = js::EmptyShape::create(cx, aclasp); + return emptyShape; } inline bool -JSScope::ensureEmptyScope(JSContext *cx, js::Class *clasp) +JSObject::canProvideEmptyShape(js::Class *aclasp) { - if (emptyScope) { - JS_ASSERT(clasp == emptyScope->clasp); - return true; - } - if (!createEmptyScope(cx, clasp)) - return false; - - /* We are going to have only single ref to the scope. */ - JS_ASSERT(emptyScope->nrefs == 2); - emptyScope->nrefs = 1; - return true; + return !emptyShape || emptyShape->getClass() == aclasp; } inline void -JSScope::updateShape(JSContext *cx) +JSObject::updateShape(JSContext *cx) { - JS_ASSERT(object); - js::LeaveTraceIfGlobalObject(cx, object); - shape = (hasOwnShape() || !lastProp) ? js_GenerateShape(cx, false) : lastProp->shape; + JS_ASSERT(isNative()); + js::LeaveTraceIfGlobalObject(cx, this); + if (hasOwnShape()) + setOwnShape(js_GenerateShape(cx, false)); + else + objShape = lastProp->shape; } inline void -JSScope::updateFlags(const JSScopeProperty *sprop, bool isDefinitelyAtom) +JSObject::updateFlags(const js::Shape *shape, bool isDefinitelyAtom) { jsuint index; - if (!isDefinitelyAtom && js_IdIsIndex(sprop->id, &index)) - setIndexedProperties(); + if (!isDefinitelyAtom && js_IdIsIndex(shape->id, &index)) + setIndexed(); - if (sprop->isMethod()) + if (shape->isMethod()) setMethodBarrier(); } inline void -JSScope::extend(JSContext *cx, JSScopeProperty *sprop, bool isDefinitelyAtom) +JSObject::extend(JSContext *cx, const js::Shape *shape, bool isDefinitelyAtom) { - ++entryCount; - setLastProperty(sprop); + setLastProperty(shape); + updateFlags(shape, isDefinitelyAtom); updateShape(cx); - updateFlags(sprop, isDefinitelyAtom); -} - -/* - * Property read barrier for deferred cloning of compiler-created function - * objects optimized as typically non-escaping, ad-hoc methods in obj. - */ -inline bool -JSScope::methodReadBarrier(JSContext *cx, JSScopeProperty *sprop, js::Value *vp) -{ - JS_ASSERT(hasMethodBarrier()); - JS_ASSERT(hasProperty(sprop)); - JS_ASSERT(sprop->isMethod()); - JS_ASSERT(&vp->toObject() == &sprop->methodObject()); - JS_ASSERT(object->canHaveMethodBarrier()); - - JSObject *funobj = &vp->toObject(); - JSFunction *fun = GET_FUNCTION_PRIVATE(cx, funobj); - JS_ASSERT(fun == funobj && FUN_NULL_CLOSURE(fun)); - - funobj = CloneFunctionObject(cx, fun, funobj->getParent()); - if (!funobj) - return false; - funobj->setMethodObj(*object); - - vp->setObject(*funobj); - if (!js_SetPropertyHelper(cx, object, sprop->id, 0, vp)) - return false; - -#ifdef DEBUG - if (cx->runtime->functionMeterFilename) { - JS_FUNCTION_METER(cx, mreadbarrier); - - typedef JSRuntime::FunctionCountMap HM; - HM &h = cx->runtime->methodReadBarrierCountMap; - HM::AddPtr p = h.lookupForAdd(fun); - if (!p) { - h.add(p, fun, 1); - } else { - JS_ASSERT(p->key == fun); - ++p->value; - } - } -#endif - return true; -} - -static JS_ALWAYS_INLINE bool -ChangesMethodValue(const js::Value &prev, const js::Value &v) -{ - JSObject *prevObj; - return prev.isObject() && (prevObj = &prev.toObject())->isFunction() && - (!v.isObject() || &v.toObject() != prevObj); -} - -inline bool -JSScope::methodWriteBarrier(JSContext *cx, JSScopeProperty *sprop, - const js::Value &v) -{ - if (flags & (BRANDED | METHOD_BARRIER)) { - const js::Value &prev = object->lockedGetSlot(sprop->slot); - if (ChangesMethodValue(prev, v)) { - JS_FUNCTION_METER(cx, mwritebarrier); - return methodShapeChange(cx, sprop); - } - } - return true; -} - -inline bool -JSScope::methodWriteBarrier(JSContext *cx, uint32 slot, const js::Value &v) -{ - if (flags & (BRANDED | METHOD_BARRIER)) { - const js::Value &prev = object->lockedGetSlot(slot); - if (ChangesMethodValue(prev, v)) { - JS_FUNCTION_METER(cx, mwslotbarrier); - return methodShapeChange(cx, slot); - } - } - return true; } inline void -JSScope::trace(JSTracer *trc) +JSObject::trace(JSTracer *trc) { - JSContext *cx = trc->context; - JSScopeProperty *sprop = lastProp; - uint8 regenFlag = cx->runtime->gcRegenShapesScopeFlag; + if (emptyShape) + emptyShape->trace(trc); - if (IS_GC_MARKING_TRACER(trc) && cx->runtime->gcRegenShapes && !hasRegenFlag(regenFlag)) { + if (!isNative()) + return; + + JSContext *cx = trc->context; + js::Shape *shape = lastProp; + + if (IS_GC_MARKING_TRACER(trc) && cx->runtime->gcRegenShapes) { /* - * Either this scope has its own shape, which must be regenerated, or + * Either this object has its own shape, which must be regenerated, or * it must have the same shape as lastProp. */ - uint32 newShape; - - if (sprop) { - if (!sprop->hasRegenFlag()) { - sprop->shape = js_RegenerateShapeForGC(cx); - sprop->setRegenFlag(); - } - newShape = sprop->shape; + if (!shape->hasRegenFlag()) { + shape->shape = js_RegenerateShapeForGC(cx); + shape->setRegenFlag(); } - if (!sprop || hasOwnShape()) { + + uint32 newShape = shape->shape; + if (hasOwnShape()) { newShape = js_RegenerateShapeForGC(cx); - JS_ASSERT_IF(sprop, newShape != sprop->shape); - } - shape = newShape; - flags ^= JSScope::SHAPE_REGEN; - - /* Also regenerate the shapes of this scope's empty scope, if there is one. */ - JSScope *empty = emptyScope; - if (empty) { - JS_ASSERT(!empty->emptyScope); - if (!empty->hasRegenFlag(regenFlag)) { - uint32 newEmptyShape = js_RegenerateShapeForGC(cx); - - JS_PROPERTY_TREE(cx).emptyShapeChange(empty->shape, newEmptyShape); - empty->shape = newEmptyShape; - empty->flags ^= JSScope::SHAPE_REGEN; - } + JS_ASSERT(newShape != shape->shape); } + objShape = newShape; } - if (sprop) { - JS_ASSERT(hasProperty(sprop)); - - /* Trace scope's property tree ancestor line. */ - do { - sprop->trace(trc); - } while ((sprop = sprop->parent) != NULL); - } + /* Trace our property tree or dictionary ancestor line. */ + do { + shape->trace(trc); + } while ((shape = shape->parent) != NULL); } +namespace js { + inline -JSScopeProperty::JSScopeProperty(jsid id, js::PropertyOp getter, js::PropertyOp setter, - uint32 slot, uintN attrs, uintN flags, intN shortid) - : id(id), rawGetter(getter), rawSetter(setter), slot(slot), attrs(uint8(attrs)), - flags(uint8(flags)), shortid(int16(shortid)) +Shape::Shape(jsid id, js::PropertyOp getter, js::PropertyOp setter, + uint32 slot, uintN attrs, uintN flags, intN shortid) + : JSObjectMap(0), table(NULL), + id(id), rawGetter(getter), rawSetter(setter), slot(slot), attrs(uint8(attrs)), + flags(uint8(flags)), shortid(int16(shortid)), parent(NULL) { JS_ASSERT_IF(getter && (attrs & JSPROP_GETTER), getterObj->isCallable()); JS_ASSERT_IF(setter && (attrs & JSPROP_SETTER), setterObj->isCallable()); + kids.setNull(); +} + +inline +Shape::Shape(JSContext *cx, Class *aclasp) + : JSObjectMap(js_GenerateShape(cx, false)), table(NULL), + id(JSID_EMPTY), clasp(aclasp), rawSetter(NULL), slot(JSSLOT_FREE(aclasp)), attrs(0), + flags(SHARED_EMPTY), shortid(0), parent(NULL) +{ + kids.setNull(); } inline JSDHashNumber -JSScopeProperty::hash() const +Shape::hash() const { JSDHashNumber hash = 0; @@ -272,18 +184,18 @@ JSScopeProperty::hash() const } inline bool -JSScopeProperty::matches(const JSScopeProperty *p) const +Shape::matches(const js::Shape *other) const { JS_ASSERT(!JSID_IS_VOID(id)); - JS_ASSERT(!JSID_IS_VOID(p->id)); - return id == p->id && - matchesParamsAfterId(p->rawGetter, p->rawSetter, p->slot, p->attrs, p->flags, - p->shortid); + JS_ASSERT(!JSID_IS_VOID(other->id)); + return id == other->id && + matchesParamsAfterId(other->rawGetter, other->rawSetter, other->slot, other->attrs, + other->flags, other->shortid); } inline bool -JSScopeProperty::matchesParamsAfterId(js::PropertyOp agetter, js::PropertyOp asetter, uint32 aslot, - uintN aattrs, uintN aflags, intN ashortid) const +Shape::matchesParamsAfterId(js::PropertyOp agetter, js::PropertyOp asetter, uint32 aslot, + uintN aattrs, uintN aflags, intN ashortid) const { JS_ASSERT(!JSID_IS_VOID(id)); return rawGetter == agetter && @@ -295,7 +207,7 @@ JSScopeProperty::matchesParamsAfterId(js::PropertyOp agetter, js::PropertyOp ase } inline bool -JSScopeProperty::get(JSContext* cx, JSObject* obj, JSObject *pobj, js::Value* vp) +Shape::get(JSContext* cx, JSObject* obj, JSObject *pobj, js::Value* vp) const { JS_ASSERT(!JSID_IS_VOID(this->id)); JS_ASSERT(!hasDefaultGetter()); @@ -308,10 +220,7 @@ JSScopeProperty::get(JSContext* cx, JSObject* obj, JSObject *pobj, js::Value* vp if (isMethod()) { vp->setObject(methodObject()); - - JSScope *scope = pobj->scope(); - JS_ASSERT(scope->object == pobj); - return scope->methodReadBarrier(cx, this, vp); + return pobj->methodReadBarrier(cx, *this, vp); } /* @@ -320,11 +229,11 @@ JSScopeProperty::get(JSContext* cx, JSObject* obj, JSObject *pobj, js::Value* vp */ if (obj->getClass() == &js_WithClass) obj = js_UnwrapWithObject(cx, obj); - return js::callJSPropertyOp(cx, getterOp(), obj, SPROP_USERID(this), vp); + return js::CallJSPropertyOp(cx, getterOp(), obj, SHAPE_USERID(this), vp); } inline bool -JSScopeProperty::set(JSContext* cx, JSObject* obj, js::Value* vp) +Shape::set(JSContext* cx, JSObject* obj, js::Value* vp) const { JS_ASSERT_IF(hasDefaultSetter(), hasGetterValue()); @@ -336,10 +245,22 @@ JSScopeProperty::set(JSContext* cx, JSObject* obj, js::Value* vp) if (attrs & JSPROP_GETTER) return js_ReportGetterOnlyAssignment(cx); - /* See the comment in JSScopeProperty::get as to why we check for With. */ + /* See the comment in js::Shape::get as to why we check for With. */ if (obj->getClass() == &js_WithClass) obj = js_UnwrapWithObject(cx, obj); - return js::callJSPropertyOpSetter(cx, setterOp(), obj, SPROP_USERID(this), vp); + return js::CallJSPropertyOpSetter(cx, setterOp(), obj, SHAPE_USERID(this), vp); } +inline +EmptyShape::EmptyShape(JSContext *cx, js::Class *aclasp) + : js::Shape(cx, aclasp) +{ +#ifdef DEBUG + if (cx->runtime->meterEmptyShapes()) + cx->runtime->emptyShapes.put(this); +#endif +} + +} /* namespace js */ + #endif /* jsscopeinlines_h___ */ diff --git a/js/src/jsscript.cpp b/js/src/jsscript.cpp index 4ce4f64bec92..e4df8d57a6d7 100644 --- a/js/src/jsscript.cpp +++ b/js/src/jsscript.cpp @@ -1034,7 +1034,7 @@ js_NewScriptFromCG(JSContext *cx, JSCodeGenerator *cg) */ goto skip_empty; } - js_FreezeLocalNames(cx, fun); + fun->freezeLocalNames(cx); fun->u.i.script = empty; } @@ -1120,7 +1120,7 @@ js_NewScriptFromCG(JSContext *cx, JSCodeGenerator *cg) else fun->u.i.nupvars = 0; - js_FreezeLocalNames(cx, fun); + fun->freezeLocalNames(cx); fun->u.i.script = script; #ifdef CHECK_SCRIPT_OWNER script->owner = NULL; diff --git a/js/src/jsstr.h b/js/src/jsstr.h index 6375e2207201..66354ef64340 100644 --- a/js/src/jsstr.h +++ b/js/src/jsstr.h @@ -1029,9 +1029,9 @@ js_SkipWhiteSpace(const jschar *s, const jschar *end) } /* - * Inflate bytes to JS chars and vice versa. Report out of memory via cx - * and return null on error, otherwise return the jschar or byte vector that - * was JS_malloc'ed. length is updated with the length of the new string in jschars. + * Inflate bytes to JS chars and vice versa. Report out of memory via cx and + * return null on error, otherwise return the jschar or byte vector that was + * JS_malloc'ed. length is updated to the length of the new string in jschars. */ extern jschar * js_InflateString(JSContext *cx, const char *bytes, size_t *length); diff --git a/js/src/jstracer.cpp b/js/src/jstracer.cpp index 9c8ac7614258..9753b3a7a010 100644 --- a/js/src/jstracer.cpp +++ b/js/src/jstracer.cpp @@ -449,7 +449,7 @@ InitJITStatsClass(JSContext *cx, JSObject *glob) /* * INS_CONSTPTR can be used to embed arbitrary pointers into the native code. It should not - * be used directly to embed GC thing pointers. Instead, use the INS_CONSTOBJ/FUN/STR/SPROP + * be used directly to embed GC thing pointers. Instead, use the INS_CONSTOBJ/FUN/STR/SHAPE * variants which ensure that the embedded pointer will be kept alive across GCs. */ @@ -461,7 +461,7 @@ InitJITStatsClass(JSContext *cx, JSObject *glob) #define INS_CONSTOBJ(obj) addName(insImmObj(obj), #obj) #define INS_CONSTFUN(fun) addName(insImmFun(fun), #fun) #define INS_CONSTSTR(str) addName(insImmStr(str), #str) -#define INS_CONSTSPROP(sprop) addName(insImmSprop(sprop), #sprop) +#define INS_CONSTSHAPE(shape) addName(insImmShape(shape), #shape) #define INS_CONSTID(id) addName(insImmId(id), #id) #define INS_ATOM(atom) INS_CONSTSTR(ATOM_TO_STRING(atom)) #define INS_NULL() INS_CONSTPTR(NULL) @@ -1546,7 +1546,7 @@ TreeFragment::initialize(JSContext* cx, SlotList *globalSlots, bool speculate) this->script = cx->fp()->getScript(); this->recursion = Recursion_None; this->gcthings.clear(); - this->sprops.clear(); + this->shapes.clear(); this->unstableExits = NULL; this->sideExits.clear(); @@ -2265,7 +2265,7 @@ TraceRecorder::TraceRecorder(JSContext* cx, VMSideExit* anchor, VMFragment* frag tempTypeMap(cx) { JS_ASSERT(globalObj == cx->fp()->getScopeChain()->getGlobal()); - JS_ASSERT(globalObj->scope()->hasOwnShape()); + JS_ASSERT(globalObj->hasOwnShape()); JS_ASSERT(cx->regs->pc == (jsbytecode*)fragment->ip); fragment->lirbuf = lirbuf; @@ -2565,10 +2565,10 @@ TraceRecorder::insImmStr(JSString* str) } inline LIns* -TraceRecorder::insImmSprop(JSScopeProperty* sprop) +TraceRecorder::insImmShape(const Shape* shape) { - tree->sprops.addUnique(sprop); - return lir->insImmP((void*)sprop); + tree->shapes.addUnique(shape); + return lir->insImmP((void*)shape); } inline LIns* @@ -2828,11 +2828,11 @@ MarkTree(JSTracer* trc, TreeFragment *f) JS_ASSERT(v.isMarkable()); Mark(trc, v.asGCThing(), v.gcKind()); } - JSScopeProperty** spropp = f->sprops.data(); - len = f->sprops.length(); + const Shape** shapep = f->shapes.data(); + len = f->shapes.length(); while (len--) { - JSScopeProperty* sprop = *spropp++; - sprop->trace(trc); + const Shape* shape = *shapep++; + shape->trace(trc); } } @@ -3277,7 +3277,7 @@ template inline uint32 GetFromClosure(JSContext* cx, JSObject* call, const ClosureVarInfo* cv, double* result) { - JS_ASSERT(call->getClass() == &js_CallClass); + JS_ASSERT(call->isCall()); TracerState* state = cx->tracerState; @@ -3321,7 +3321,7 @@ GetFromClosure(JSContext* cx, JSObject* call, const ClosureVarInfo* cv, double* JSStackFrame* fp = (JSStackFrame*) call->getPrivate(); Value v; if (fp) { - v = T::slots(fp)[slot]; + v = T::get_slot(fp, slot); } else { /* * Get the value from the object. We know we have a Call object, and @@ -3331,7 +3331,7 @@ GetFromClosure(JSContext* cx, JSObject* call, const ClosureVarInfo* cv, double* * asserts in jsfun.cpp which make sure Call objects use dslots. */ JS_ASSERT(slot < T::slot_count(call)); - v = T::slots(call)[slot]; + v = T::get_slot(call, slot); } JSValueType type = getCoercedType(v); ValueToNative(v, type, result); @@ -3351,22 +3351,26 @@ struct ArgClosureTraits // Get the right frame slots to use our slot index with. // See also UpvarArgTraits. - static inline Value* slots(JSStackFrame* fp) { return fp->argv; } + static inline Value get_slot(JSStackFrame* fp, unsigned slot) { + JS_ASSERT(slot < fp->numFormalArgs()); + return fp->argv[slot]; + } // Get the right object slots to use our slot index with. - static inline Value* slots(JSObject* obj) { - // We know Call objects use dslots. - return obj->dslots + slot_offset(obj); + static inline Value get_slot(JSObject* obj, unsigned slot) { + return obj->getSlot(slot_offset(obj) + slot); } + // Get the offset of our object slots from the object's dslots pointer. static inline uint32 slot_offset(JSObject* obj) { - return JSSLOT_START(&js_CallClass) + - CALL_CLASS_FIXED_RESERVED_SLOTS - JS_INITIAL_NSLOTS; + return JSSLOT_START(&js_CallClass) + CALL_CLASS_RESERVED_SLOTS; } + // Get the maximum slot index of this type that should be allowed static inline uint16 slot_count(JSObject* obj) { return js_GetCallObjectFunction(obj)->nargs; } + private: ArgClosureTraits(); }; @@ -3392,19 +3396,24 @@ struct VarClosureTraits } // See also UpvarVarTraits. - static inline Value* slots(JSStackFrame* fp) { return fp->slots(); } - static inline Value* slots(JSObject* obj) { - // We know Call objects use dslots. - return obj->dslots + slot_offset(obj); + static inline Value get_slot(JSStackFrame* fp, unsigned slot) { + JS_ASSERT(slot < fp->getFunction()->u.i.nvars); + return fp->slots()[slot]; } + + static inline Value get_slot(JSObject* obj, unsigned slot) { + return obj->getSlot(slot_offset(obj) + slot); + } + static inline uint32 slot_offset(JSObject* obj) { - return JSSLOT_START(&js_CallClass) + - CALL_CLASS_FIXED_RESERVED_SLOTS - JS_INITIAL_NSLOTS + - js_GetCallObjectFunction(obj)->nargs; + return JSSLOT_START(&js_CallClass) + CALL_CLASS_RESERVED_SLOTS + + js_GetCallObjectFunction(obj)->nargs; } + static inline uint16 slot_count(JSObject* obj) { return js_GetCallObjectFunction(obj)->u.i.nvars; } + private: VarClosureTraits(); }; @@ -3460,7 +3469,9 @@ FlushNativeStackFrame(JSContext* cx, unsigned callDepth, const JSValueType* mp, for (; n != 0; fp = fp->down) { --n; if (fp->argv) { - if (fp->hasArgsObj() && fp->getArgsObj()->getPrivate() == JS_ARGUMENT_OBJECT_ON_TRACE) { + if (fp->hasArgsObj() && + fp->getArgsObj()->getPrivate() == JS_ARGUMENTS_OBJECT_ON_TRACE) + { JS_ASSERT(fp->getArgsObj()->isNormalArguments()); fp->getArgsObj()->setPrivate(fp); } @@ -3539,7 +3550,7 @@ TraceRecorder::importImpl(LIns* base, ptrdiff_t offset, const void* p, JSValueTy if (*prefix == 'a' || *prefix == 'v') { mark = JS_ARENA_MARK(&cx->tempPool); if (fp->getFunction()->hasLocalNames()) - localNames = js_GetLocalNameArray(cx, fp->getFunction(), &cx->tempPool); + localNames = fp->getFunction()->getLocalNameArray(cx, &cx->tempPool); funName = fp->getFunction()->atom ? js_AtomToPrintableString(cx, fp->getFunction()->atom) : ""; @@ -3671,24 +3682,24 @@ TraceRecorder::import(TreeFragment* tree, LIns* sp, unsigned stackSlots, unsigne } JS_REQUIRES_STACK bool -TraceRecorder::isValidSlot(JSScope* scope, JSScopeProperty* sprop) +TraceRecorder::isValidSlot(JSObject *obj, const Shape* shape) { uint32 setflags = (js_CodeSpec[*cx->regs->pc].format & (JOF_SET | JOF_INCDEC | JOF_FOR)); if (setflags) { - if (!sprop->hasDefaultSetter()) + if (!shape->hasDefaultSetter()) RETURN_VALUE("non-stub setter", false); - if (!sprop->writable()) + if (!shape->writable()) RETURN_VALUE("writing to a read-only property", false); } /* This check applies even when setflags == 0. */ - if (setflags != JOF_SET && !sprop->hasDefaultGetter()) { - JS_ASSERT(!sprop->isMethod()); + if (setflags != JOF_SET && !shape->hasDefaultGetter()) { + JS_ASSERT(!shape->isMethod()); RETURN_VALUE("non-stub getter", false); } - if (!SPROP_HAS_VALID_SLOT(sprop, scope)) + if (!obj->containsSlot(shape->slot)) RETURN_VALUE("invalid-slot obj property", false); return true; @@ -5539,10 +5550,9 @@ CheckGlobalObjectShape(JSContext* cx, TraceMonitor* tm, JSObject* globalObj, * isn't the global at record time, a shape guard suffices to ensure * that it isn't the global at run time. */ - if (!globalObj->scope()->hasOwnShape()) { + if (!globalObj->hasOwnShape()) { JS_LOCK_OBJ(cx, globalObj); - JSScope *scope = js_GetMutableScope(cx, globalObj); - bool ok = scope && scope->globalObjectOwnShapeChange(cx); + bool ok = globalObj->globalObjectOwnShapeChange(cx); JS_UNLOCK_OBJ(cx, globalObj); if (!ok) { debug_only_print0(LC_TMTracer, @@ -6142,7 +6152,7 @@ JS_REQUIRES_STACK MonitorResult TraceRecorder::recordLoopEdge(JSContext* cx, TraceRecorder* r, uintN& inlineCallCount) { #ifdef JS_THREADSAFE - if (cx->fp()->getScopeChain()->getGlobal()->scope()->title.ownercx != cx) { + if (cx->fp()->getScopeChain()->getGlobal()->title.ownercx != cx) { AbortRecording(cx, "Global object not owned by this context"); return MONITOR_NOT_RECORDING; /* we stay away from shared global objects */ } @@ -8203,23 +8213,23 @@ TraceRecorder::scopeChainProp(JSObject* chainHead, Value*& vp, LIns*& ins, NameR RETURN_STOP_A("prototype property"); } - JSScopeProperty* sprop = (JSScopeProperty*) prop; - if (!isValidSlot(obj->scope(), sprop)) { + Shape* shape = (Shape*) prop; + if (!isValidSlot(obj, shape)) { JS_UNLOCK_OBJ(cx, obj2); return ARECORD_STOP; } - if (!lazilyImportGlobalSlot(sprop->slot)) { + if (!lazilyImportGlobalSlot(shape->slot)) { JS_UNLOCK_OBJ(cx, obj2); RETURN_STOP_A("lazy import of global slot failed"); } - vp = &obj->getSlotRef(sprop->slot); + vp = &obj->getSlotRef(shape->slot); ins = get(vp); JS_UNLOCK_OBJ(cx, obj2); nr.tracked = true; return ARECORD_CONTINUE; } - if (obj == obj2 && obj->getClass() == &js_CallClass) { + if (obj == obj2 && obj->isCall()) { AbortableRecordingStatus status = InjectStatus(callProp(obj, prop, ATOM_TO_JSID(atom), vp, ins, nr)); JS_UNLOCK_OBJ(cx, obj); @@ -8237,24 +8247,24 @@ JS_REQUIRES_STACK RecordingStatus TraceRecorder::callProp(JSObject* obj, JSProperty* prop, jsid id, Value*& vp, LIns*& ins, NameResult& nr) { - JSScopeProperty *sprop = (JSScopeProperty*) prop; + Shape *shape = (Shape*) prop; JSOp op = JSOp(*cx->regs->pc); uint32 setflags = (js_CodeSpec[op].format & (JOF_SET | JOF_INCDEC | JOF_FOR)); - if (setflags && !sprop->writable()) + if (setflags && !shape->writable()) RETURN_STOP("writing to a read-only property"); - uintN slot = uint16(sprop->shortid); + uintN slot = uint16(shape->shortid); vp = NULL; JSStackFrame* cfp = (JSStackFrame*) obj->getPrivate(); if (cfp) { - if (sprop->getterOp() == js_GetCallArg) { + if (shape->getterOp() == js_GetCallArg) { JS_ASSERT(slot < cfp->numFormalArgs()); vp = &cfp->argv[slot]; nr.v = *vp; - } else if (sprop->getterOp() == js_GetCallVar || - sprop->getterOp() == js_GetCallVarChecked) { + } else if (shape->getterOp() == js_GetCallVar || + shape->getterOp() == js_GetCallVarChecked) { JS_ASSERT(slot < cfp->getSlotCount()); vp = &cfp->slots()[slot]; nr.v = *vp; @@ -8262,8 +8272,8 @@ TraceRecorder::callProp(JSObject* obj, JSProperty* prop, jsid id, Value*& vp, RETURN_STOP("dynamic property of Call object"); } - // Now assert that our use of sprop->shortid was in fact kosher. - JS_ASSERT(sprop->hasShortID()); + // Now assert that our use of shape->shortid was in fact kosher. + JS_ASSERT(shape->hasShortID()); if (frameIfInRange(obj)) { // At this point we are guaranteed to be looking at an active call oject @@ -8273,13 +8283,13 @@ TraceRecorder::callProp(JSObject* obj, JSProperty* prop, jsid id, Value*& vp, return RECORD_CONTINUE; } } else { - // Call objects do not yet have sprop->isMethod() properties, but they + // Call objects do not yet have shape->isMethod() properties, but they // should. See bug 514046, for which this code is future-proof. Remove // this comment when that bug is fixed (so, FIXME: 514046). #ifdef DEBUG JSBool rv = #endif - js_GetPropertyHelper(cx, obj, sprop->id, + js_GetPropertyHelper(cx, obj, shape->id, (op == JSOP_CALLNAME) ? JSGET_NO_METHOD_BARRIER : JSGET_METHOD_BARRIER, @@ -8292,31 +8302,27 @@ TraceRecorder::callProp(JSObject* obj, JSProperty* prop, jsid id, Value*& vp, LIns* parent_ins = stobj_get_parent(get(&cx->fp()->argv[-2])); CHECK_STATUS(traverseScopeChain(parent, parent_ins, obj, obj_ins)); - LIns* call_ins; if (!cfp) { // Because the parent guard in guardCallee ensures this Call object // will be the same object now and on trace, and because once a Call // object loses its frame it never regains one, on trace we will also // have a null private in the Call object. So all we need to do is // write the value to the Call object's slot. - int32 dslot_index = slot; - if (sprop->getterOp() == js_GetCallArg) { - JS_ASSERT(dslot_index < ArgClosureTraits::slot_count(obj)); - dslot_index += ArgClosureTraits::slot_offset(obj); - } else if (sprop->getterOp() == js_GetCallVar || - sprop->getterOp() == js_GetCallVarChecked) { - JS_ASSERT(dslot_index < VarClosureTraits::slot_count(obj)); - dslot_index += VarClosureTraits::slot_offset(obj); + if (shape->getterOp() == js_GetCallArg) { + JS_ASSERT(slot < ArgClosureTraits::slot_count(obj)); + slot += ArgClosureTraits::slot_offset(obj); + } else if (shape->getterOp() == js_GetCallVar || + shape->getterOp() == js_GetCallVarChecked) { + JS_ASSERT(slot < VarClosureTraits::slot_count(obj)); + slot += VarClosureTraits::slot_offset(obj); } else { RETURN_STOP("dynamic property of Call object"); } - // Now assert that our use of sprop->shortid was in fact kosher. - JS_ASSERT(sprop->hasShortID()); + // Now assert that our use of shape->shortid was in fact kosher. + JS_ASSERT(shape->hasShortID()); - LIns* base = lir->insLoad(LIR_ldp, obj_ins, offsetof(JSObject, dslots), ACCSET_OTHER); - ins = unbox_value(obj->dslots[dslot_index], base, dslot_index * sizeof(Value), - snapshot(BRANCH_EXIT)); + ins = unbox_slot(obj, obj_ins, slot, snapshot(BRANCH_EXIT)); } else { ClosureVarInfo* cv = new (traceAlloc()) ClosureVarInfo(); cv->slot = slot; @@ -8332,19 +8338,19 @@ TraceRecorder::callProp(JSObject* obj, JSProperty* prop, jsid id, Value*& vp, cx_ins }; const CallInfo* ci; - if (sprop->getterOp() == js_GetCallArg) { + if (shape->getterOp() == js_GetCallArg) { ci = &GetClosureArg_ci; - } else if (sprop->getterOp() == js_GetCallVar || - sprop->getterOp() == js_GetCallVarChecked) { + } else if (shape->getterOp() == js_GetCallVar || + shape->getterOp() == js_GetCallVarChecked) { ci = &GetClosureVar_ci; } else { RETURN_STOP("dynamic property of Call object"); } - // Now assert that our use of sprop->shortid was in fact kosher. - JS_ASSERT(sprop->hasShortID()); + // Now assert that our use of shape->shortid was in fact kosher. + JS_ASSERT(shape->hasShortID()); - call_ins = lir->insCall(ci, args); + LIns* call_ins = lir->insCall(ci, args); JSValueType type = getCoercedType(nr.v); guard(true, @@ -8356,7 +8362,7 @@ TraceRecorder::callProp(JSObject* obj, JSProperty* prop, jsid id, Value*& vp, nr.tracked = false; nr.obj = obj; nr.obj_ins = obj_ins; - nr.sprop = sprop; + nr.shape = shape; return RECORD_CONTINUE; } @@ -8867,7 +8873,7 @@ TraceRecorder::incProp(jsint incr, bool pre) LIns* v_ins; CHECK_STATUS_A(prop(obj, obj_ins, &slot, &v_ins, NULL)); - if (slot == SPROP_INVALID_SLOT) + if (slot == SHAPE_INVALID_SLOT) RETURN_STOP_A("incProp on invalid slot"); Value& v = obj->getSlotRef(slot); @@ -9352,25 +9358,24 @@ static FILE* shapefp = NULL; static void DumpShape(JSObject* obj, const char* prefix) { - JSScope* scope = obj->scope(); - if (!shapefp) { shapefp = fopen("/tmp/shapes.dump", "w"); if (!shapefp) return; } - fprintf(shapefp, "\n%s: shape %u flags %x\n", prefix, scope->shape, scope->flags); - for (JSScopeProperty* sprop = scope->lastProperty(); sprop; sprop = sprop->parent) { - if (JSID_IS_ATOM(sprop->id)) { - fprintf(shapefp, " %s", JS_GetStringBytes(JSID_TO_STRING(sprop->id))); + fprintf(shapefp, "\n%s: shape %u flags %x\n", prefix, obj->shape(), obj->flags); + for (Shape::Range r = obj->lastProperty()->all(); !r.empty(); r.popFront()) { + const Shape &shape = r.front(); + + if (JSID_IS_ATOM(shape.id)) { + fprintf(shapefp, " %s", JS_GetStringBytes(JSID_TO_STRING(shape.id))); } else { - JS_ASSERT(!JSID_IS_OBJECT(sprop->id)); - fprintf(shapefp, " %d", JSID_TO_INT(sprop->id)); + JS_ASSERT(!JSID_IS_OBJECT(shape.id)); + fprintf(shapefp, " %d", JSID_TO_INT(shape.id)); } fprintf(shapefp, " %u %p %p %x %x %d\n", - sprop->slot, sprop->getter, sprop->setter, sprop->attrs, sprop->flags, - sprop->shortid); + shape.slot, shape.getter, shape.setter, shape.attrs, shape.flags, shape.shortid); } fflush(shapefp); } @@ -9410,11 +9415,7 @@ TraceRecorder::guardShape(LIns* obj_ins, JSObject* obj, uint32 shape, const char #endif // Finally, emit the shape guard. - LIns* shape_ins = - addName(lir->insLoad(LIR_ldi, map(obj_ins), offsetof(JSScope, shape), ACCSET_OTHER), "shape"); - guard(true, - addName(lir->ins2ImmI(LIR_eqi, shape_ins, shape), guardName), - exit); + guard(true, addName(lir->ins2ImmI(LIR_eqi, shape_ins(obj_ins), shape), guardName), exit); return RECORD_CONTINUE; } @@ -9441,9 +9442,10 @@ TraceRecorder::forgetGuardedShapes() } inline LIns* -TraceRecorder::map(LIns* obj_ins) +TraceRecorder::shape_ins(LIns* obj_ins) { - return addName(lir->insLoad(LIR_ldp, obj_ins, (int) offsetof(JSObject, map), ACCSET_OTHER), "map"); + return addName(lir->insLoad(LIR_ldi, obj_ins, int(offsetof(JSObject, objShape)), ACCSET_OTHER), + "objShape"); } JS_REQUIRES_STACK AbortableRecordingStatus @@ -9513,7 +9515,7 @@ TraceRecorder::test_property_cache(JSObject* obj, LIns* obj_ins, JSObject*& obj2 if (!obj2->isNative()) RETURN_STOP_A("property found on non-native object"); entry = JS_PROPERTY_CACHE(cx).fill(cx, aobj, 0, protoIndex, obj2, - (JSScopeProperty*) prop); + (Shape*) prop); JS_ASSERT(entry); if (entry == JS_NO_PROP_CACHE_FILL) entry = NULL; @@ -9632,11 +9634,10 @@ void TraceRecorder::stobj_set_slot(LIns* obj_ins, unsigned slot, LIns*& dslots_ins, const Value &v, LIns* v_ins) { - if (slot < JS_INITIAL_NSLOTS) { + if (slot < JS_INITIAL_NSLOTS) stobj_set_fslot(obj_ins, slot, v, v_ins); - } else { + else stobj_set_dslot(obj_ins, slot - JS_INITIAL_NSLOTS, dslots_ins, v, v_ins); - } } #if JS_BITS_PER_WORD == 32 || JS_BITS_PER_WORD == 64 @@ -9682,10 +9683,11 @@ TraceRecorder::unbox_slot(JSObject *obj, LIns *obj_ins, uint32 slot, VMSideExit #if JS_BITS_PER_WORD == 32 LIns* -TraceRecorder::stobj_get_const_private_ptr(LIns *obj_ins) +TraceRecorder::stobj_get_const_private_ptr(LIns *obj_ins, unsigned slot) { + JS_ASSERT(slot < JS_INITIAL_NSLOTS); return lir->insLoad(LIR_ldi, obj_ins, - offsetof(JSObject, fslots) + JSSLOT_PRIVATE * sizeof(Value) + sPayloadOffset, + offsetof(JSObject, fslots) + slot * sizeof(Value) + sPayloadOffset, ACCSET_OTHER, LOAD_CONST); } @@ -9852,11 +9854,12 @@ TraceRecorder::box_value_for_native_call(const Value &v, LIns *v_ins) #elif JS_BITS_PER_WORD == 64 LIns* -TraceRecorder::stobj_get_const_private_ptr(LIns *obj_ins) +TraceRecorder::stobj_get_const_private_ptr(LIns *obj_ins, unsigned slot) { - /* N.B. On 64-bit, privates are encoded differently than other pointers. */ + /* N.B. On 64-bit, privates are encoded differently from other pointers. */ + JS_ASSERT(slot < JS_INITIAL_NSLOTS); LIns *v_ins = lir->insLoad(LIR_ldq, obj_ins, - offsetof(JSObject, fslots) + JSSLOT_PRIVATE * sizeof(Value), + offsetof(JSObject, fslots) + slot * sizeof(Value), ACCSET_OTHER, LOAD_CONST); return lir->ins2ImmI(LIR_lshq, v_ins, 1); } @@ -10676,18 +10679,22 @@ TraceRecorder::newArguments(LIns* callee_ins, bool strict) LIns* argc_ins = INS_CONST(cx->fp()->numActualArgs()); LIns* args[] = { callee_ins, argc_ins, global_ins, cx_ins }; - LIns* call_ins = lir->insCall(&js_Arguments_ci, args); - guard(false, lir->insEqP_0(call_ins), OOM_EXIT); + LIns* argsobj_ins = lir->insCall(&js_Arguments_ci, args); + guard(false, lir->insEqP_0(argsobj_ins), OOM_EXIT); if (strict) { JSStackFrame* fp = cx->fp(); uintN argc = fp->numActualArgs(); - LIns* argsSlots_ins = NULL; - for (uintN i = 0; i < argc; i++) - stobj_set_dslot(call_ins, i, argsSlots_ins, fp->argv[i], get(&fp->argv[i])); + LIns* argsData_ins = stobj_get_const_private_ptr(argsobj_ins, JSObject::JSSLOT_ARGS_DATA); + + for (uintN i = 0; i < argc; i++) { + box_value_into(fp->argv[i], get(&fp->argv[i]), argsData_ins, + offsetof(ArgumentsData, slots) + i * sizeof(Value), + ACCSET_OTHER); + } } - return call_ins; + return argsobj_ins; } JS_REQUIRES_STACK AbortableRecordingStatus @@ -11077,7 +11084,7 @@ TraceRecorder::getClassPrototype(JSObject* ctor, LIns*& proto_ins) // that pval is usable. JS_ASSERT(!pval.isPrimitive()); JSObject *proto = &pval.toObject(); - JS_ASSERT_IF(clasp != &js_ArrayClass, proto->scope()->emptyScope->clasp == clasp); + JS_ASSERT_IF(clasp != &js_ArrayClass, proto->emptyShape->getClass() == clasp); proto_ins = INS_CONSTOBJ(proto); return RECORD_CONTINUE; @@ -11098,12 +11105,12 @@ TraceRecorder::getClassPrototype(JSProtoKey key, LIns*& proto_ins) JS_ASSERT(localtm.recorder); #ifdef DEBUG - /* Double-check that a native proto has a matching emptyScope. */ + /* Double-check that a native proto has a matching emptyShape. */ if (key != JSProto_Array) { JS_ASSERT(proto->isNative()); - JSEmptyScope *emptyScope = proto->scope()->emptyScope; - JS_ASSERT(emptyScope); - JS_ASSERT(JSCLASS_CACHED_PROTO_KEY(emptyScope->clasp) == key); + EmptyShape *empty = proto->emptyShape; + JS_ASSERT(empty); + JS_ASSERT(JSCLASS_CACHED_PROTO_KEY(empty->getClass()) == key); } #endif @@ -11190,12 +11197,12 @@ TraceRecorder::propagateFailureToBuiltinStatus(LIns* ok_ins, LIns*& status_ins) } JS_REQUIRES_STACK void -TraceRecorder::emitNativePropertyOp(JSScope* scope, JSScopeProperty* sprop, LIns* obj_ins, +TraceRecorder::emitNativePropertyOp(const Shape* shape, LIns* obj_ins, bool setflag, LIns* addr_boxed_val_ins) { JS_ASSERT(addr_boxed_val_ins->isop(LIR_allocp)); - JS_ASSERT(setflag ? !sprop->hasSetterValue() : !sprop->hasGetterValue()); - JS_ASSERT(setflag ? !sprop->hasDefaultSetter() : !sprop->hasDefaultGetterOrIsMethod()); + JS_ASSERT(setflag ? !shape->hasSetterValue() : !shape->hasGetterValue()); + JS_ASSERT(setflag ? !shape->hasDefaultSetter() : !shape->hasDefaultGetterOrIsMethod()); enterDeepBailCall(); @@ -11203,7 +11210,7 @@ TraceRecorder::emitNativePropertyOp(JSScope* scope, JSScopeProperty* sprop, LIns lir->insStore(INS_CONST(1), lirbuf->state, offsetof(TracerState, nativeVpLen), ACCSET_OTHER); CallInfo* ci = new (traceAlloc()) CallInfo(); - ci->_address = uintptr_t(setflag ? sprop->setterOp() : sprop->getterOp()); + ci->_address = uintptr_t(setflag ? shape->setterOp() : shape->getterOp()); ci->_typesig = CallInfo::typeSig4(ARGTYPE_I, ARGTYPE_P, ARGTYPE_P, ARGTYPE_P, ARGTYPE_P); ci->_isPure = 0; ci->_storeAccSet = ACCSET_STORE_ANY; @@ -11211,7 +11218,7 @@ TraceRecorder::emitNativePropertyOp(JSScope* scope, JSScopeProperty* sprop, LIns #ifdef DEBUG ci->_name = "JSPropertyOp"; #endif - LIns* args[] = { addr_boxed_val_ins, INS_CONSTID(SPROP_USERID(sprop)), obj_ins, cx_ins }; + LIns* args[] = { addr_boxed_val_ins, INS_CONSTID(SHAPE_USERID(shape)), obj_ins, cx_ins }; LIns* ok_ins = lir->insCall(ci, args); // Cleanup. Immediately clear nativeVp before we might deep bail. @@ -11906,10 +11913,10 @@ TraceRecorder::incName(jsint incr, bool pre) return ARECORD_CONTINUE; } - if (nr.obj->getClass() != &js_CallClass) + if (!nr.obj->isCall()) RETURN_STOP_A("incName on unsupported object class"); - CHECK_STATUS_A(setCallProp(nr.obj, nr.obj_ins, nr.sprop, v_after, v)); + CHECK_STATUS_A(setCallProp(nr.obj, nr.obj_ins, nr.shape, v_after, v)); stack(0, v_result); return ARECORD_CONTINUE; } @@ -11966,17 +11973,16 @@ TraceRecorder::record_JSOP_SETPROP() JSObject* obj = &l.toObject(); if (obj->getOps()->setProperty) - RETURN_STOP_A("non-native JSObjectOps::setProperty"); + RETURN_STOP_A("non-native js::ObjectOps::setProperty"); return ARECORD_CONTINUE; } /* Emit a specialized, inlined copy of js_NativeSet. */ JS_REQUIRES_STACK RecordingStatus -TraceRecorder::nativeSet(JSObject* obj, LIns* obj_ins, JSScopeProperty* sprop, +TraceRecorder::nativeSet(JSObject* obj, LIns* obj_ins, const Shape* shape, const Value &v, LIns* v_ins) { - JSScope* scope = obj->scope(); - uint32 slot = sprop->slot; + uint32 slot = shape->slot; /* * We do not trace assignment to properties that have both a nonstub setter @@ -11996,16 +12002,16 @@ TraceRecorder::nativeSet(JSObject* obj, LIns* obj_ins, JSScopeProperty* sprop, * case unboxing would fail and, having called a native setter, we could * not just retry the instruction in the interpreter. */ - JS_ASSERT(sprop->hasDefaultSetter() || slot == SPROP_INVALID_SLOT); + JS_ASSERT(shape->hasDefaultSetter() || slot == SHAPE_INVALID_SLOT); // Call the setter, if any. - if (!sprop->hasDefaultSetter()) - emitNativePropertyOp(scope, sprop, obj_ins, true, box_value_into_alloc(v, v_ins)); + if (!shape->hasDefaultSetter()) + emitNativePropertyOp(shape, obj_ins, true, box_value_into_alloc(v, v_ins)); // Store the value, if this property has a slot. - if (slot != SPROP_INVALID_SLOT) { - JS_ASSERT(SPROP_HAS_VALID_SLOT(sprop, scope)); - JS_ASSERT(sprop->hasSlot()); + if (slot != SHAPE_INVALID_SLOT) { + JS_ASSERT(obj->containsSlot(shape->slot)); + JS_ASSERT(shape->hasSlot()); if (obj == globalObj) { if (!lazilyImportGlobalSlot(slot)) RETURN_STOP("lazy import of global slot failed"); @@ -12020,43 +12026,45 @@ TraceRecorder::nativeSet(JSObject* obj, LIns* obj_ins, JSScopeProperty* sprop, } static JSBool FASTCALL -MethodWriteBarrier(JSContext* cx, JSObject* obj, JSScopeProperty* sprop, JSObject* funobj) +MethodWriteBarrier(JSContext* cx, JSObject* obj, Shape* shape, JSObject* funobj) { AutoObjectRooter tvr(cx, funobj); - return obj->scope()->methodWriteBarrier(cx, sprop, ObjectValue(*tvr.object())); + return obj->methodWriteBarrier(cx, *shape, ObjectValue(*tvr.object())); } -JS_DEFINE_CALLINFO_4(static, BOOL_FAIL, MethodWriteBarrier, CONTEXT, OBJECT, SCOPEPROP, OBJECT, +JS_DEFINE_CALLINFO_4(static, BOOL_FAIL, MethodWriteBarrier, CONTEXT, OBJECT, SHAPE, OBJECT, 0, ACCSET_STORE_ANY) JS_REQUIRES_STACK RecordingStatus -TraceRecorder::setProp(Value &l, PropertyCacheEntry* entry, JSScopeProperty* sprop, +TraceRecorder::setProp(Value &l, PropertyCacheEntry* entry, const Shape* shape, Value &v, LIns*& v_ins, bool isDefinitelyAtom) { if (entry == JS_NO_PROP_CACHE_FILL) RETURN_STOP("can't trace uncacheable property set"); - JS_ASSERT_IF(entry->vcapTag() >= 1, !sprop->hasSlot()); - if (!sprop->hasDefaultSetter() && sprop->slot != SPROP_INVALID_SLOT) - RETURN_STOP("can't trace set of property with setter and slot"); - if (sprop->hasSetterValue()) - RETURN_STOP("can't trace JavaScript function setter"); - - // These two cases are errors and can't be traced. - if (sprop->hasGetterValue()) - RETURN_STOP("can't assign to property with script getter but no setter"); - if (!sprop->writable()) - RETURN_STOP("can't assign to readonly property"); + JS_ASSERT_IF(entry->vcapTag() >= 1, !shape->hasSlot()); JS_ASSERT(!l.isPrimitive()); JSObject* obj = &l.toObject(); + + if (!shape->hasDefaultSetter() && shape->slot != SHAPE_INVALID_SLOT && !obj->isCall()) + RETURN_STOP("can't trace set of property with setter and slot"); + if (shape->hasSetterValue()) + RETURN_STOP("can't trace JavaScript function setter"); + + // These two cases are errors and can't be traced. + if (shape->hasGetterValue()) + RETURN_STOP("can't assign to property with script getter but no setter"); + if (!shape->writable()) + RETURN_STOP("can't assign to readonly property"); + LIns* obj_ins = get(&l); - JS_ASSERT_IF(entry->directHit(), obj->scope()->hasProperty(sprop)); + JS_ASSERT_IF(entry->directHit(), obj->nativeContains(*shape)); // Fast path for CallClass. This is about 20% faster than the general case. v_ins = get(&v); - if (obj->getClass() == &js_CallClass) - return setCallProp(obj, obj_ins, sprop, v_ins, v); + if (obj->isCall()) + return setCallProp(obj, obj_ins, shape, v_ins, v); // Find obj2. If entry->adding(), the TAG bits are all 0. JSObject* obj2 = obj; @@ -12064,15 +12072,14 @@ TraceRecorder::setProp(Value &l, PropertyCacheEntry* entry, JSScopeProperty* spr obj2 = obj2->getParent(); for (jsuword j = entry->protoIndex(); j; j--) obj2 = obj2->getProto(); - JSScope *scope = obj2->scope(); JS_ASSERT_IF(entry->adding(), obj2 == obj); // Guard before anything else. PCVal pcval; CHECK_STATUS(guardPropertyCacheHit(obj_ins, obj, obj2, entry, pcval)); - JS_ASSERT(scope->object == obj2); - JS_ASSERT(scope->hasProperty(sprop)); - JS_ASSERT_IF(obj2 != obj, !sprop->hasSlot()); + JS_ASSERT(!obj2->nativeEmpty()); + JS_ASSERT(obj2->nativeContains(*shape)); + JS_ASSERT_IF(obj2 != obj, !shape->hasSlot()); /* * Setting a function-valued property might need to rebrand the object, so @@ -12080,12 +12087,12 @@ TraceRecorder::setProp(Value &l, PropertyCacheEntry* entry, JSScopeProperty* spr * this, because functions have distinct trace-type from other values and * branded-ness is implied by the shape, which we've already guarded on. */ - if (scope->brandedOrHasMethodBarrier() && IsFunctionObject(v) && entry->directHit()) { + if (obj2->brandedOrHasMethodBarrier() && IsFunctionObject(v) && entry->directHit()) { if (obj == globalObj) RETURN_STOP("can't trace function-valued property set in branded global scope"); enterDeepBailCall(); - LIns* args[] = { v_ins, INS_CONSTSPROP(sprop), obj_ins, cx_ins }; + LIns* args[] = { v_ins, INS_CONSTSHAPE(shape), obj_ins, cx_ins }; LIns* ok_ins = lir->insCall(&MethodWriteBarrier_ci, args); guard(false, lir->insEqI_0(ok_ins), OOM_EXIT); leaveDeepBailCall(); @@ -12093,17 +12100,17 @@ TraceRecorder::setProp(Value &l, PropertyCacheEntry* entry, JSScopeProperty* spr // Add a property to the object if necessary. if (entry->adding()) { - JS_ASSERT(sprop->hasSlot()); + JS_ASSERT(shape->hasSlot()); if (obj == globalObj) RETURN_STOP("adding a property to the global object"); - LIns* args[] = { INS_CONSTSPROP(sprop), obj_ins, cx_ins }; + LIns* args[] = { INS_CONSTSHAPE(shape), obj_ins, cx_ins }; const CallInfo *ci = isDefinitelyAtom ? &js_AddAtomProperty_ci : &js_AddProperty_ci; LIns* ok_ins = lir->insCall(ci, args); guard(false, lir->insEqI_0(ok_ins), OOM_EXIT); } - return nativeSet(obj, obj_ins, sprop, v, v_ins); + return nativeSet(obj, obj_ins, shape, v, v_ins); } JS_REQUIRES_STACK RecordingStatus @@ -12127,22 +12134,22 @@ TraceRecorder::setUpwardTrackedVar(Value* stackVp, const Value &v, LIns* v_ins) } JS_REQUIRES_STACK RecordingStatus -TraceRecorder::setCallProp(JSObject *callobj, LIns *callobj_ins, JSScopeProperty *sprop, +TraceRecorder::setCallProp(JSObject *callobj, LIns *callobj_ins, const Shape *shape, LIns *v_ins, const Value &v) { // Set variables in on-trace-stack call objects by updating the tracker. JSStackFrame *fp = frameIfInRange(callobj); if (fp) { - if (sprop->setterOp() == SetCallArg) { - JS_ASSERT(sprop->hasShortID()); - uintN slot = uint16(sprop->shortid); + if (shape->setterOp() == SetCallArg) { + JS_ASSERT(shape->hasShortID()); + uintN slot = uint16(shape->shortid); Value *vp2 = &fp->argv[slot]; CHECK_STATUS(setUpwardTrackedVar(vp2, v, v_ins)); return RECORD_CONTINUE; } - if (sprop->setterOp() == SetCallVar) { - JS_ASSERT(sprop->hasShortID()); - uintN slot = uint16(sprop->shortid); + if (shape->setterOp() == SetCallVar) { + JS_ASSERT(shape->hasShortID()); + uintN slot = uint16(shape->shortid); Value *vp2 = &fp->slots()[slot]; CHECK_STATUS(setUpwardTrackedVar(vp2, v, v_ins)); return RECORD_CONTINUE; @@ -12156,13 +12163,13 @@ TraceRecorder::setCallProp(JSObject *callobj, LIns *callobj_ins, JSScopeProperty // object loses its frame it never regains one, on trace we will also // have a null private in the Call object. So all we need to do is // write the value to the Call object's slot. - int32 dslot_index = uint16(sprop->shortid); - if (sprop->setterOp() == SetCallArg) { - JS_ASSERT(dslot_index < ArgClosureTraits::slot_count(callobj)); - dslot_index += ArgClosureTraits::slot_offset(callobj); - } else if (sprop->setterOp() == SetCallVar) { - JS_ASSERT(dslot_index < VarClosureTraits::slot_count(callobj)); - dslot_index += VarClosureTraits::slot_offset(callobj); + intN slot = uint16(shape->shortid); + if (shape->setterOp() == SetCallArg) { + JS_ASSERT(slot < ArgClosureTraits::slot_count(callobj)); + slot += ArgClosureTraits::slot_offset(callobj); + } else if (shape->setterOp() == SetCallVar) { + JS_ASSERT(slot < VarClosureTraits::slot_count(callobj)); + slot += VarClosureTraits::slot_offset(callobj); } else { RETURN_STOP("can't trace special CallClass setter"); } @@ -12170,10 +12177,10 @@ TraceRecorder::setCallProp(JSObject *callobj, LIns *callobj_ins, JSScopeProperty // Now assert that the shortid get we did above was ok. Have to do it // after the RETURN_STOP above, since in that case we may in fact not // have a valid shortid; but we don't use it in that case anyway. - JS_ASSERT(sprop->hasShortID()); + JS_ASSERT(shape->hasShortID()); - LIns* base = lir->insLoad(LIR_ldp, callobj_ins, offsetof(JSObject, dslots), ACCSET_OTHER); - box_value_into(v, v_ins, base, dslot_index * sizeof(Value), ACCSET_OTHER); + LIns* dslots_ins = NULL; + stobj_set_slot(callobj_ins, slot, dslots_ins, v, v_ins); return RECORD_CONTINUE; } @@ -12183,9 +12190,9 @@ TraceRecorder::setCallProp(JSObject *callobj, LIns *callobj_ins, JSScopeProperty // Set variables in off-trace-stack call objects by calling standard builtins. const CallInfo* ci = NULL; - if (sprop->setterOp() == SetCallArg) + if (shape->setterOp() == SetCallArg) ci = &js_SetCallArg_ci; - else if (sprop->setterOp() == SetCallVar) + else if (shape->setterOp() == SetCallVar) ci = &js_SetCallVar_ci; else RETURN_STOP("can't trace special CallClass setter"); @@ -12203,9 +12210,9 @@ TraceRecorder::setCallProp(JSObject *callobj, LIns *callobj_ins, JSScopeProperty // Case 1: storing to native stack area. // Compute native stack slot and address offset we are storing to. - unsigned slot = uint16(sprop->shortid); + unsigned slot = uint16(shape->shortid); LIns *slot_ins; - if (sprop->setterOp() == SetCallArg) + if (shape->setterOp() == SetCallArg) slot_ins = ArgClosureTraits::adj_slot_lir(lir, fp_ins, slot); else slot_ins = VarClosureTraits::adj_slot_lir(lir, fp_ins, slot); @@ -12241,7 +12248,7 @@ TraceRecorder::setCallProp(JSObject *callobj, LIns *callobj_ins, JSScopeProperty br1->setTarget(label1); LIns* args[] = { box_value_for_native_call(v, v_ins), - INS_CONSTWORD(JSID_BITS(SPROP_USERID(sprop))), + INS_CONSTWORD(JSID_BITS(SHAPE_USERID(shape))), callobj_ins, cx_ins }; @@ -12255,7 +12262,7 @@ TraceRecorder::setCallProp(JSObject *callobj, LIns *callobj_ins, JSScopeProperty } JS_REQUIRES_STACK AbortableRecordingStatus -TraceRecorder::record_SetPropHit(PropertyCacheEntry* entry, JSScopeProperty* sprop) +TraceRecorder::record_SetPropHit(PropertyCacheEntry* entry, const Shape* shape) { Value& r = stackval(-1); Value& l = stackval(-2); @@ -12264,7 +12271,7 @@ TraceRecorder::record_SetPropHit(PropertyCacheEntry* entry, JSScopeProperty* spr jsbytecode* pc = cx->regs->pc; bool isDefinitelyAtom = (*pc == JSOP_SETPROP); - CHECK_STATUS_A(setProp(l, entry, sprop, r, v_ins, isDefinitelyAtom)); + CHECK_STATUS_A(setProp(l, entry, shape, r, v_ins, isDefinitelyAtom)); switch (*pc) { case JSOP_SETPROP: @@ -12482,46 +12489,46 @@ TraceRecorder::getPropertyById(LIns* obj_ins, Value* outp) /* Manually inlined, specialized copy of js_NativeGet. */ static JSBool FASTCALL -GetPropertyWithNativeGetter(JSContext* cx, JSObject* obj, JSScopeProperty* sprop, Value* vp) +GetPropertyWithNativeGetter(JSContext* cx, JSObject* obj, Shape* shape, Value* vp) { LeaveTraceIfGlobalObject(cx, obj); #ifdef DEBUG JSProperty* prop; JSObject* pobj; - JS_ASSERT(obj->lookupProperty(cx, sprop->id, &pobj, &prop)); - JS_ASSERT(prop == (JSProperty*) sprop); + JS_ASSERT(obj->lookupProperty(cx, shape->id, &pobj, &prop)); + JS_ASSERT(prop == (JSProperty*) shape); pobj->dropProperty(cx, prop); #endif - // JSScopeProperty::get contains a special case for With objects. We can - // elide it here because With objects are, we claim, never on the operand - // stack while recording. + // Shape::get contains a special case for With objects. We can elide it + // here because With objects are, we claim, never on the operand stack + // while recording. JS_ASSERT(obj->getClass() != &js_WithClass); vp->setUndefined(); - if (!sprop->getterOp()(cx, obj, SPROP_USERID(sprop), vp)) { + if (!shape->getterOp()(cx, obj, SHAPE_USERID(shape), vp)) { SetBuiltinError(cx); return JS_FALSE; } return cx->tracerState->builtinStatus == 0; } JS_DEFINE_CALLINFO_4(static, BOOL_FAIL, GetPropertyWithNativeGetter, - CONTEXT, OBJECT, SCOPEPROP, VALUEPTR, 0, ACCSET_STORE_ANY) + CONTEXT, OBJECT, SHAPE, VALUEPTR, 0, ACCSET_STORE_ANY) JS_REQUIRES_STACK RecordingStatus -TraceRecorder::getPropertyWithNativeGetter(LIns* obj_ins, JSScopeProperty* sprop, Value* outp) +TraceRecorder::getPropertyWithNativeGetter(LIns* obj_ins, const Shape* shape, Value* outp) { - JS_ASSERT(!sprop->hasGetterValue()); - JS_ASSERT(sprop->slot == SPROP_INVALID_SLOT); - JS_ASSERT(!sprop->hasDefaultGetterOrIsMethod()); + JS_ASSERT(!shape->hasGetterValue()); + JS_ASSERT(shape->slot == SHAPE_INVALID_SLOT); + JS_ASSERT(!shape->hasDefaultGetterOrIsMethod()); // Call GetPropertyWithNativeGetter. See note in getPropertyByName about vp. // FIXME - We should call the getter directly. Using a builtin function for // now because it buys some extra asserts. See bug 508310. enterDeepBailCall(); LIns* vp_ins = addName(lir->insAlloc(sizeof(Value)), "vp"); - LIns* args[] = {vp_ins, INS_CONSTPTR(sprop), obj_ins, cx_ins}; + LIns* args[] = {vp_ins, INS_CONSTPTR(shape), obj_ins, cx_ins}; LIns* ok_ins = lir->insCall(&GetPropertyWithNativeGetter_ci, args); finishGetProp(obj_ins, vp_ins, ok_ins, outp); leaveDeepBailCall(); @@ -12529,7 +12536,7 @@ TraceRecorder::getPropertyWithNativeGetter(LIns* obj_ins, JSScopeProperty* sprop } JS_REQUIRES_STACK RecordingStatus -TraceRecorder::getPropertyWithScriptGetter(JSObject *obj, LIns* obj_ins, JSScopeProperty* sprop) +TraceRecorder::getPropertyWithScriptGetter(JSObject *obj, LIns* obj_ins, const Shape* shape) { if (!canCallImacro()) RETURN_STOP("cannot trace script getter, already in imacro"); @@ -12537,7 +12544,7 @@ TraceRecorder::getPropertyWithScriptGetter(JSObject *obj, LIns* obj_ins, JSScope // Rearrange the stack in preparation for the imacro, taking care to adjust // the interpreter state and the tracker in the same way. This adjustment // is noted in imacros.jsasm with .fixup tags. - Value getter = sprop->getterValue(); + Value getter = shape->getterValue(); Value*& sp = cx->regs->sp; switch (*cx->regs->pc) { case JSOP_GETPROP: @@ -12958,7 +12965,7 @@ TraceRecorder::setElem(int lval_spindex, int idx_spindex, int v_spindex) LIns* idx_ins = get(&idx); LIns* v_ins = get(&v); - if (InstanceOf(cx, obj, &js_ArgumentsClass, NULL)) + if (obj->isArguments()) RETURN_STOP_A("can't trace setting elements of the |arguments| object"); if (obj == globalObj) @@ -13300,22 +13307,25 @@ TraceRecorder::record_JSOP_CALLUPVAR() } JS_REQUIRES_STACK AbortableRecordingStatus -TraceRecorder::record_JSOP_GETDSLOT() +TraceRecorder::record_JSOP_GETFCSLOT() { JSObject* callee = cx->fp()->callee(); LIns* callee_ins = get(&cx->fp()->argv[-2]); + LIns* upvars_ins = stobj_get_const_private_ptr(callee_ins, + JSObject::JSSLOT_FLAT_CLOSURE_UPVARS); + unsigned index = GET_UINT16(cx->regs->pc); - LIns* dslots_ins = lir->insLoad(LIR_ldp, callee_ins, offsetof(JSObject, dslots), ACCSET_OTHER); - stack(0, unbox_value(callee->dslots[index], dslots_ins, index * sizeof(Value), - snapshot(BRANCH_EXIT))); + LIns *v_ins = unbox_value(callee->getFlatClosureUpvar(index), upvars_ins, index * sizeof(Value), + snapshot(BRANCH_EXIT)); + stack(0, v_ins); return ARECORD_CONTINUE; } JS_REQUIRES_STACK AbortableRecordingStatus -TraceRecorder::record_JSOP_CALLDSLOT() +TraceRecorder::record_JSOP_CALLFCSLOT() { - CHECK_STATUS_A(record_JSOP_GETDSLOT()); + CHECK_STATUS_A(record_JSOP_GETFCSLOT()); stack(1, INS_NULL()); return ARECORD_CONTINUE; } @@ -13350,7 +13360,7 @@ TraceRecorder::guardCallee(Value& callee) * variable accesses for a Call object having no private data that we can * emit code that avoids checking for an active JSStackFrame for the Call * object (which would hold fresh variable values -- the Call object's - * dslots would be stale until the stack frame is popped). This is because + * slots would be stale until the stack frame is popped). This is because * Call objects can't pick up a new stack frame in their private slot once * they have none. TR::callProp and TR::setCallProp depend on this fact and * document where; if this guard is removed make sure to fix those methods. @@ -13370,7 +13380,7 @@ TraceRecorder::guardCallee(Value& callee) JSObject* parent = callee_obj->getParent(); if (parent != globalObj) { - if (parent->getClass() != &js_CallClass) + if (!parent->isCall()) RETURN_STOP("closure scoped by neither the global object nor a Call object"); guard(true, @@ -13713,11 +13723,11 @@ TraceRecorder::name(Value*& vp, LIns*& ins, NameResult& nr) RETURN_STOP_A("name() hit prototype chain"); /* Don't trace getter or setter calls, our caller wants a direct slot. */ - if (pcval.isSprop()) { - JSScopeProperty* sprop = pcval.toSprop(); - if (!isValidSlot(obj->scope(), sprop)) + if (pcval.isShape()) { + const Shape* shape = pcval.toShape(); + if (!isValidSlot(obj, shape)) RETURN_STOP_A("name() not accessing a valid slot"); - slot = sprop->slot; + slot = shape->slot; } else { if (!pcval.isSlot()) RETURN_STOP_A("PCE is not a slot"); @@ -13734,16 +13744,16 @@ TraceRecorder::name(Value*& vp, LIns*& ins, NameResult& nr) } static JSObject* FASTCALL -MethodReadBarrier(JSContext* cx, JSObject* obj, JSScopeProperty* sprop, JSObject* funobj) +MethodReadBarrier(JSContext* cx, JSObject* obj, Shape* shape, JSObject* funobj) { Value v = ObjectValue(*funobj); AutoValueRooter tvr(cx, v); - if (!obj->scope()->methodReadBarrier(cx, sprop, tvr.addr())) + if (!obj->methodReadBarrier(cx, *shape, tvr.addr())) return NULL; return &tvr.value().toObject(); } -JS_DEFINE_CALLINFO_4(static, OBJECT_FAIL, MethodReadBarrier, CONTEXT, OBJECT, SCOPEPROP, OBJECT, +JS_DEFINE_CALLINFO_4(static, OBJECT_FAIL, MethodReadBarrier, CONTEXT, OBJECT, SHAPE, OBJECT, 0, ACCSET_STORE_ANY) /* @@ -13765,7 +13775,7 @@ TraceRecorder::prop(JSObject* obj, LIns* obj_ins, uint32 *slotp, LIns** v_insp, * must differ in its shape (or not be the global object). */ if (!obj->isDenseArray() && obj->getOps()->getProperty) - RETURN_STOP_A("non-dense-array, non-native JSObjectOps::getProperty"); + RETURN_STOP_A("non-dense-array, non-native js::ObjectOps::getProperty"); JS_ASSERT((slotp && v_insp && !outp) || (!slotp && !v_insp && outp)); @@ -13825,37 +13835,37 @@ TraceRecorder::propTail(JSObject* obj, LIns* obj_ins, JSObject* obj2, PCVal pcva uint32 setflags = (cs.format & (JOF_INCDEC | JOF_FOR)); JS_ASSERT(!(cs.format & JOF_SET)); - JSScopeProperty* sprop; + const Shape* shape; uint32 slot; bool isMethod; - if (pcval.isSprop()) { - sprop = pcval.toSprop(); - JS_ASSERT(obj2->scope()->hasProperty(sprop)); + if (pcval.isShape()) { + shape = pcval.toShape(); + JS_ASSERT(obj2->nativeContains(*shape)); - if (setflags && !sprop->hasDefaultSetter()) + if (setflags && !shape->hasDefaultSetter()) RETURN_STOP("non-stub setter"); - if (setflags && !sprop->writable()) + if (setflags && !shape->writable()) RETURN_STOP("writing to a readonly property"); - if (!sprop->hasDefaultGetterOrIsMethod()) { + if (!shape->hasDefaultGetterOrIsMethod()) { if (slotp) RETURN_STOP("can't trace non-stub getter for this opcode"); - if (sprop->hasGetterValue()) - return getPropertyWithScriptGetter(obj, obj_ins, sprop); - if (sprop->slot == SPROP_INVALID_SLOT) - return getPropertyWithNativeGetter(obj_ins, sprop, outp); + if (shape->hasGetterValue()) + return getPropertyWithScriptGetter(obj, obj_ins, shape); + if (shape->slot == SHAPE_INVALID_SLOT) + return getPropertyWithNativeGetter(obj_ins, shape, outp); return getPropertyById(obj_ins, outp); } - if (!SPROP_HAS_VALID_SLOT(sprop, obj2->scope())) + if (!obj2->containsSlot(shape->slot)) RETURN_STOP("no valid slot"); - slot = sprop->slot; - isMethod = sprop->isMethod(); - JS_ASSERT_IF(isMethod, obj2->scope()->hasMethodBarrier()); + slot = shape->slot; + isMethod = shape->isMethod(); + JS_ASSERT_IF(isMethod, obj2->hasMethodBarrier()); } else { if (!pcval.isSlot()) RETURN_STOP("PCE is not a slot"); slot = pcval.toSlot(); - sprop = NULL; + shape = NULL; isMethod = false; } @@ -13905,7 +13915,7 @@ TraceRecorder::propTail(JSObject* obj, LIns* obj_ins, JSObject* obj2, PCVal pcva */ if (isMethod && !cx->fp()->hasIMacroPC()) { enterDeepBailCall(); - LIns* args[] = { v_ins, INS_CONSTSPROP(sprop), obj_ins, cx_ins }; + LIns* args[] = { v_ins, INS_CONSTSHAPE(shape), obj_ins, cx_ins }; v_ins = lir->insCall(&MethodReadBarrier_ci, args); leaveDeepBailCall(); } @@ -14755,16 +14765,13 @@ TraceRecorder::traverseScopeChain(JSObject *obj, LIns *obj_ins, JSObject *target // We must guard on the shape of all call objects for heavyweight functions // that we traverse on the scope chain: if the shape changes, a variable with // the same name may have been inserted in the scope chain. - if (obj->getClass() == &js_CallClass && + if (obj->isCall() && JSFUN_HEAVYWEIGHT_TEST(js_GetCallObjectFunction(obj)->flags)) { - LIns* map_ins = map(obj_ins); - LIns* shape_ins = addName(lir->insLoad(LIR_ldi, map_ins, offsetof(JSScope, shape), - ACCSET_OTHER), - "obj_shape"); if (!exit) exit = snapshot(BRANCH_EXIT); guard(true, - addName(lir->ins2ImmI(LIR_eqi, shape_ins, obj->shape()), "guard_shape"), + addName(lir->ins2ImmI(LIR_eqi, shape_ins(obj_ins), obj->shape()), + "guard_shape"), exit); } } @@ -14855,7 +14862,7 @@ TraceRecorder::record_JSOP_BINDNAME() RETURN_ERROR_A("error in js_FindIdentifierBase"); if (!TRACE_RECORDER(localCx)) return ARECORD_ABORTED; - if (obj2 != globalObj && obj2->getClass() != &js_CallClass) + if (obj2 != globalObj && !obj2->isCall()) RETURN_STOP_A("BINDNAME on non-global, non-call object"); // Generate LIR to get to the target object from the start object. @@ -15205,25 +15212,28 @@ TraceRecorder::record_JSOP_LAMBDA_FC() INS_CONSTFUN(fun), cx_ins }; - LIns* call_ins = lir->insCall(&js_AllocFlatClosure_ci, args); + LIns* closure_ins = lir->insCall(&js_AllocFlatClosure_ci, args); guard(false, - addName(lir->ins2(LIR_eqp, call_ins, INS_NULL()), + addName(lir->ins2(LIR_eqp, closure_ins, INS_NULL()), "guard(js_AllocFlatClosure)"), OOM_EXIT); if (fun->u.i.nupvars) { JSUpvarArray *uva = fun->u.i.script->upvars(); + LIns* upvars_ins = stobj_get_const_private_ptr(closure_ins, + JSObject::JSSLOT_FLAT_CLOSURE_UPVARS); + for (uint32 i = 0, n = uva->length; i < n; i++) { Value v; - LIns* upvar_ins = upvar(fun->u.i.script, uva, i, v); - if (!upvar_ins) + LIns* v_ins = upvar(fun->u.i.script, uva, i, v); + if (!v_ins) return ARECORD_STOP; - LIns* dslots_ins = NULL; - stobj_set_dslot(call_ins, i, dslots_ins, v, upvar_ins); + + box_value_into(v, v_ins, upvars_ins, i * sizeof(Value), ACCSET_OTHER); } } - stack(0, call_ins); + stack(0, closure_ins); return ARECORD_CONTINUE; } @@ -15781,7 +15791,7 @@ TraceRecorder::record_JSOP_CALLPROP() } else { if (l.isPrimitive()) RETURN_STOP_A("callprop of primitive method"); - JS_ASSERT_IF(pcval.isSprop(), !pcval.toSprop()->isMethod()); + JS_ASSERT_IF(pcval.isShape(), !pcval.toShape()->isMethod()); CHECK_STATUS_A(propTail(obj, obj_ins, obj2, pcval, NULL, NULL, &l)); } stack(0, this_ins); diff --git a/js/src/jstracer.h b/js/src/jstracer.h index 544444069285..9e15535b3faa 100644 --- a/js/src/jstracer.h +++ b/js/src/jstracer.h @@ -668,7 +668,7 @@ struct TreeFragment : public LinkableFragment linkedTrees(alloc), sideExits(alloc), gcthings(alloc), - sprops(alloc) + shapes(alloc) { } TreeFragment* first; @@ -694,7 +694,7 @@ struct TreeFragment : public LinkableFragment unsigned maxCallDepth; /* All embedded GC things are registered here so the GC can scan them. */ Queue gcthings; - Queue sprops; + Queue shapes; unsigned maxNativeStackSlots; inline unsigned nGlobalTypes() { @@ -1014,7 +1014,7 @@ class TraceRecorder nanojit::LIns* insImmObj(JSObject* obj); nanojit::LIns* insImmFun(JSFunction* fun); nanojit::LIns* insImmStr(JSString* str); - nanojit::LIns* insImmSprop(JSScopeProperty* sprop); + nanojit::LIns* insImmShape(const js::Shape* shape); nanojit::LIns* insImmId(jsid id); nanojit::LIns* p2i(nanojit::LIns* ins); @@ -1066,7 +1066,7 @@ class TraceRecorder unsigned callDepth, unsigned ngslots, JSValueType* typeMap); void trackNativeStackUse(unsigned slots); - JS_REQUIRES_STACK bool isValidSlot(JSScope* scope, JSScopeProperty* sprop); + JS_REQUIRES_STACK bool isValidSlot(JSObject *obj, const js::Shape* shape); JS_REQUIRES_STACK bool lazilyImportGlobalSlot(unsigned slot); JS_REQUIRES_STACK void importGlobalSlot(unsigned slot); @@ -1131,7 +1131,7 @@ class TraceRecorder Value v; // current property value JSObject *obj; // Call object where name was found nanojit::LIns *obj_ins; // LIR value for obj - JSScopeProperty *sprop; // sprop name was resolved to + js::Shape *shape; // shape name was resolved to }; JS_REQUIRES_STACK nanojit::LIns* scopeChain(); @@ -1140,7 +1140,7 @@ class TraceRecorder JS_REQUIRES_STACK JSStackFrame* frameIfInRange(JSObject* obj, unsigned* depthp = NULL) const; JS_REQUIRES_STACK RecordingStatus traverseScopeChain(JSObject *obj, nanojit::LIns *obj_ins, JSObject *obj2, nanojit::LIns *&obj2_ins); JS_REQUIRES_STACK AbortableRecordingStatus scopeChainProp(JSObject* obj, Value*& vp, nanojit::LIns*& ins, NameResult& nr); - JS_REQUIRES_STACK RecordingStatus callProp(JSObject* obj, JSProperty* sprop, jsid id, Value*& vp, nanojit::LIns*& ins, NameResult& nr); + JS_REQUIRES_STACK RecordingStatus callProp(JSObject* obj, JSProperty* shape, jsid id, Value*& vp, nanojit::LIns*& ins, NameResult& nr); JS_REQUIRES_STACK nanojit::LIns* arg(unsigned n); JS_REQUIRES_STACK void arg(unsigned n, nanojit::LIns* i); @@ -1200,7 +1200,7 @@ class TraceRecorder void forgetGuardedShapes(); - inline nanojit::LIns* map(nanojit::LIns *obj_ins); + inline nanojit::LIns* shape_ins(nanojit::LIns *obj_ins); JS_REQUIRES_STACK AbortableRecordingStatus test_property_cache(JSObject* obj, nanojit::LIns* obj_ins, JSObject*& obj2, PCVal& pcval); JS_REQUIRES_STACK RecordingStatus guardPropertyCacheHit(nanojit::LIns* obj_ins, @@ -1217,7 +1217,8 @@ class TraceRecorder nanojit::LIns*& dslots_ins, const Value &v, nanojit::LIns* v_ins); void set_array_fslot(nanojit::LIns *obj_ins, unsigned slot, uint32 val); - nanojit::LIns* stobj_get_const_private_ptr(nanojit::LIns* obj_ins); + nanojit::LIns* stobj_get_const_private_ptr(nanojit::LIns* obj_ins, + unsigned slot = JSSLOT_PRIVATE); nanojit::LIns* stobj_get_fslot_uint32(nanojit::LIns* obj_ins, unsigned slot); nanojit::LIns* stobj_get_fslot_ptr(nanojit::LIns* obj_ins, unsigned slot); nanojit::LIns* unbox_slot(JSObject *obj, nanojit::LIns *obj_ins, uint32 slot, @@ -1261,11 +1262,11 @@ class TraceRecorder nanojit::LIns* index_ins, Value* outp); JS_REQUIRES_STACK RecordingStatus getPropertyById(nanojit::LIns* obj_ins, Value* outp); JS_REQUIRES_STACK RecordingStatus getPropertyWithNativeGetter(nanojit::LIns* obj_ins, - JSScopeProperty* sprop, + const js::Shape* shape, Value* outp); JS_REQUIRES_STACK RecordingStatus getPropertyWithScriptGetter(JSObject *obj, nanojit::LIns* obj_ins, - JSScopeProperty* sprop); + const js::Shape* shape); JS_REQUIRES_STACK nanojit::LIns* getStringLength(nanojit::LIns* str_ins); JS_REQUIRES_STACK nanojit::LIns* getStringChars(nanojit::LIns* str_ins); @@ -1276,21 +1277,21 @@ class TraceRecorder JSOp mode); JS_REQUIRES_STACK RecordingStatus nativeSet(JSObject* obj, nanojit::LIns* obj_ins, - JSScopeProperty* sprop, - const Value &v, nanojit::LIns* v_ins); + const js::Shape* shape, + const Value &v, nanojit::LIns* v_ins); JS_REQUIRES_STACK RecordingStatus setProp(Value &l, PropertyCacheEntry* entry, - JSScopeProperty* sprop, - Value &v, nanojit::LIns*& v_ins, - bool isDefinitelyAtom); + const js::Shape* shape, + Value &v, nanojit::LIns*& v_ins, + bool isDefinitelyAtom); JS_REQUIRES_STACK RecordingStatus setCallProp(JSObject *callobj, nanojit::LIns *callobj_ins, - JSScopeProperty *sprop, nanojit::LIns *v_ins, - const Value &v); + const js::Shape *shape, nanojit::LIns *v_ins, + const Value &v); JS_REQUIRES_STACK RecordingStatus initOrSetPropertyByName(nanojit::LIns* obj_ins, - Value* idvalp, Value* rvalp, - bool init); + Value* idvalp, Value* rvalp, + bool init); JS_REQUIRES_STACK RecordingStatus initOrSetPropertyByIndex(nanojit::LIns* obj_ins, - nanojit::LIns* index_ins, - Value* rvalp, bool init); + nanojit::LIns* index_ins, + Value* rvalp, bool init); JS_REQUIRES_STACK AbortableRecordingStatus setElem(int lval_spindex, int idx_spindex, int v_spindex); @@ -1379,8 +1380,7 @@ class TraceRecorder nanojit::LIns *&status_ins); JS_REQUIRES_STACK RecordingStatus emitNativeCall(JSSpecializedNative* sn, uintN argc, nanojit::LIns* args[], bool rooted); - JS_REQUIRES_STACK void emitNativePropertyOp(JSScope* scope, - JSScopeProperty* sprop, + JS_REQUIRES_STACK void emitNativePropertyOp(const js::Shape* shape, nanojit::LIns* obj_ins, bool setflag, nanojit::LIns* addr_boxed_val_ins); @@ -1482,8 +1482,9 @@ public: JS_REQUIRES_STACK AbortableRecordingStatus record_EnterFrame(uintN& inlineCallCount); JS_REQUIRES_STACK AbortableRecordingStatus record_LeaveFrame(); JS_REQUIRES_STACK AbortableRecordingStatus record_SetPropHit(PropertyCacheEntry* entry, - JSScopeProperty* sprop); - JS_REQUIRES_STACK AbortableRecordingStatus record_DefLocalFunSetSlot(uint32 slot, JSObject* obj); + const js::Shape* shape); + JS_REQUIRES_STACK AbortableRecordingStatus record_DefLocalFunSetSlot(uint32 slot, + JSObject* obj); JS_REQUIRES_STACK AbortableRecordingStatus record_NativeCallComplete(); void forgetGuardedShapesForObject(JSObject* obj); diff --git a/js/src/jstypedarray.cpp b/js/src/jstypedarray.cpp index c675a8b650f1..6c9df6102462 100644 --- a/js/src/jstypedarray.cpp +++ b/js/src/jstypedarray.cpp @@ -517,7 +517,7 @@ class TypedArrayTemplate } else { JSObject *obj2; JSProperty *prop; - JSScopeProperty *sprop; + const Shape *shape; JSObject *proto = obj->getProto(); if (!proto) { @@ -531,8 +531,8 @@ class TypedArrayTemplate if (prop) { if (obj2->isNative()) { - sprop = (JSScopeProperty *) prop; - if (!js_NativeGet(cx, obj, obj2, sprop, JSGET_METHOD_BARRIER, vp)) + shape = (Shape *) prop; + if (!js_NativeGet(cx, obj, obj2, shape, JSGET_METHOD_BARRIER, vp)) return false; JS_UNLOCK_OBJ(cx, obj2); } @@ -878,7 +878,7 @@ class TypedArrayTemplate return false; } - // note the usage of JS_NewObject here -- we don't want the + // note the usage of NewObject here -- we don't want the // constructor to be called! JS_ASSERT(slowClass() != &js_FunctionClass); JSObject *nobj = NewNonFunction(cx, slowClass(), NULL, NULL); @@ -985,9 +985,9 @@ class TypedArrayTemplate makeFastWithPrivate(JSContext *cx, JSObject *obj, ThisTypeArray *tarray) { JS_ASSERT(obj->getClass() == slowClass()); - obj->setPrivate(tarray); + obj->setSharedNonNativeMap(); obj->clasp = fastClass(); - obj->map = const_cast(&JSObjectMap::sharedNonNative); + obj->setPrivate(tarray); } public: diff --git a/js/src/jsutil.cpp b/js/src/jsutil.cpp index d1c70d0a0415..4d0f8f223be9 100644 --- a/js/src/jsutil.cpp +++ b/js/src/jsutil.cpp @@ -199,7 +199,7 @@ void JS_DumpHistogram(JSBasicStats *bs, FILE *fp) { uintN bin; - uint32 cnt, max, prev, val, i; + uint32 cnt, max; double sum, mean; for (bin = 0, max = 0, sum = 0; bin <= 10; bin++) { @@ -209,20 +209,23 @@ JS_DumpHistogram(JSBasicStats *bs, FILE *fp) sum += cnt; } mean = sum / cnt; - for (bin = 0, prev = 0; bin <= 10; bin++, prev = val) { - val = BinToVal(bs->logscale, bin); + for (bin = 0; bin <= 10; bin++) { + uintN val = BinToVal(bs->logscale, bin); + uintN end = (bin == 10) ? 0 : BinToVal(bs->logscale, bin + 1); cnt = bs->hist[bin]; - if (prev + 1 >= val) + if (val + 1 == end) fprintf(fp, " [%6u]", val); + else if (end != 0) + fprintf(fp, "[%6u, %6u]", val, end - 1); else - fprintf(fp, "[%6u, %6u]", prev + 1, val); - fprintf(fp, "%s %8u ", (bin == 10) ? "+" : ":", cnt); + fprintf(fp, "[%6u, +inf]", val); + fprintf(fp, ": %8u ", cnt); if (cnt != 0) { if (max > 1e6 && mean > 1e3) cnt = (uint32) ceil(log10((double) cnt)); else if (max > 16 && mean > 8) cnt = JS_CeilingLog2(cnt); - for (i = 0; i < cnt; i++) + for (uintN i = 0; i < cnt; i++) putc('*', fp); } putc('\n', fp); diff --git a/js/src/jsutil.h b/js/src/jsutil.h index 377a460b03e7..19a05f09da45 100644 --- a/js/src/jsutil.h +++ b/js/src/jsutil.h @@ -256,7 +256,7 @@ public: *mStatementDone = true; } - void SetStatementDone(bool *aStatementDone) { + void setStatementDone(bool *aStatementDone) { mStatementDone = aStatementDone; } }; @@ -284,7 +284,7 @@ public: * temporary, but we really intend it as non-const */ const_cast(aNotifier). - SetStatementDone(&mStatementDone); + setStatementDone(&mStatementDone); } }; diff --git a/js/src/jswrapper.cpp b/js/src/jswrapper.cpp index bffa7c406851..46c44e3eeb50 100644 --- a/js/src/jswrapper.cpp +++ b/js/src/jswrapper.cpp @@ -410,7 +410,8 @@ JSCompartment::wrap(JSContext *cx, JSObject **objp) } bool -JSCompartment::wrapId(JSContext *cx, jsid *idp) { +JSCompartment::wrapId(JSContext *cx, jsid *idp) +{ if (JSID_IS_INT(*idp)) return true; AutoValueRooter tvr(cx, IdToValue(*idp)); @@ -430,7 +431,8 @@ JSCompartment::wrap(JSContext *cx, PropertyOp *propp) } bool -JSCompartment::wrap(JSContext *cx, PropertyDescriptor *desc) { +JSCompartment::wrap(JSContext *cx, PropertyDescriptor *desc) +{ return wrap(cx, &desc->obj) && (!(desc->attrs & JSPROP_GETTER) || wrap(cx, &desc->getter)) && (!(desc->attrs & JSPROP_SETTER) || wrap(cx, &desc->setter)) && @@ -438,7 +440,8 @@ JSCompartment::wrap(JSContext *cx, PropertyDescriptor *desc) { } bool -JSCompartment::wrap(JSContext *cx, AutoIdVector &props) { +JSCompartment::wrap(JSContext *cx, AutoIdVector &props) +{ jsid *vector = props.begin(); jsint length = props.length(); for (size_t n = 0; n < size_t(length); ++n) { @@ -449,7 +452,8 @@ JSCompartment::wrap(JSContext *cx, AutoIdVector &props) { } bool -JSCompartment::wrapException(JSContext *cx) { +JSCompartment::wrapException(JSContext *cx) +{ JS_ASSERT(cx->compartment == this); if (cx->throwing) { diff --git a/js/src/jsxml.cpp b/js/src/jsxml.cpp index 5020251c3d07..658ed0614d02 100644 --- a/js/src/jsxml.cpp +++ b/js/src/jsxml.cpp @@ -251,7 +251,7 @@ namespace_equality(JSContext *cx, JSObject *obj, const Value *v, JSBool *bp) JS_FRIEND_DATA(Class) js_NamespaceClass = { "Namespace", JSCLASS_CONSTRUCT_PROTOTYPE | - JSCLASS_HAS_RESERVED_SLOTS(JSObject::NAMESPACE_FIXED_RESERVED_SLOTS) | + JSCLASS_HAS_RESERVED_SLOTS(JSObject::NAMESPACE_CLASS_RESERVED_SLOTS) | JSCLASS_MARK_IS_TRACE | JSCLASS_HAS_CACHED_PROTO(JSProto_Namespace), PropertyStub, /* addProperty */ PropertyStub, /* delProperty */ @@ -370,7 +370,7 @@ qname_equality(JSContext *cx, JSObject *qn, const Value *v, JSBool *bp) JS_FRIEND_DATA(Class) js_QNameClass = { "QName", JSCLASS_CONSTRUCT_PROTOTYPE | - JSCLASS_HAS_RESERVED_SLOTS(JSObject::QNAME_FIXED_RESERVED_SLOTS) | + JSCLASS_HAS_RESERVED_SLOTS(JSObject::QNAME_CLASS_RESERVED_SLOTS) | JSCLASS_MARK_IS_TRACE | JSCLASS_HAS_CACHED_PROTO(JSProto_QName), PropertyStub, /* addProperty */ PropertyStub, /* delProperty */ @@ -405,7 +405,7 @@ JS_FRIEND_DATA(Class) js_QNameClass = { JS_FRIEND_DATA(Class) js_AttributeNameClass = { js_AttributeName_str, JSCLASS_CONSTRUCT_PROTOTYPE | - JSCLASS_HAS_RESERVED_SLOTS(JSObject::QNAME_FIXED_RESERVED_SLOTS) | + JSCLASS_HAS_RESERVED_SLOTS(JSObject::QNAME_CLASS_RESERVED_SLOTS) | JSCLASS_MARK_IS_TRACE | JSCLASS_HAS_CACHED_PROTO(JSProto_AttributeName), PropertyStub, /* addProperty */ PropertyStub, /* delProperty */ @@ -419,7 +419,7 @@ JS_FRIEND_DATA(Class) js_AttributeNameClass = { JS_FRIEND_DATA(Class) js_AnyNameClass = { js_AnyName_str, JSCLASS_CONSTRUCT_PROTOTYPE | - JSCLASS_HAS_RESERVED_SLOTS(JSObject::QNAME_FIXED_RESERVED_SLOTS) | + JSCLASS_HAS_RESERVED_SLOTS(JSObject::QNAME_CLASS_RESERVED_SLOTS) | JSCLASS_MARK_IS_TRACE | JSCLASS_HAS_CACHED_PROTO(JSProto_AnyName), PropertyStub, /* addProperty */ PropertyStub, /* delProperty */ @@ -4642,15 +4642,14 @@ xml_trace_vector(JSTracer *trc, JSXML **vec, uint32 len) /* * XML objects are native. Thus xml_lookupProperty must return a valid - * JSScopeProperty pointer parameter via *propp to signify "property found". - * Since the only call to xml_lookupProperty is via JSObject::lookupProperty, - * and then only from js_FindProperty (in jsobj.c, called from jsinterp.c) or - * from JSOP_IN case in the interpreter, the only time we add a - * JSScopeProperty here is when an unqualified name is being accessed or when - * "name in xml" is called. + * Shape pointer parameter via *propp to signify "property found". Since the + * only call to xml_lookupProperty is via JSObject::lookupProperty, and then + * only from js_FindProperty (in jsobj.c, called from jsinterp.c) or from + * JSOP_IN case in the interpreter, the only time we add a Shape here is when + * an unqualified name is being accessed or when "name in xml" is called. * * This scope property keeps the JSOP_NAME code in js_Interpret happy by - * giving it an sprop with (getter, setter) == (GetProperty, PutProperty). + * giving it an shape with (getter, setter) == (GetProperty, PutProperty). * * NB: xml_deleteProperty must take care to remove any property added here. * @@ -4673,7 +4672,6 @@ xml_lookupProperty(JSContext *cx, JSObject *obj, jsid id, JSObject **objp, uint32 i; JSObject *qn; jsid funid; - JSScopeProperty *sprop; xml = (JSXML *) obj->getPrivate(); if (js_IdIsIndex(id, &i)) { @@ -4690,16 +4688,17 @@ xml_lookupProperty(JSContext *cx, JSObject *obj, jsid id, JSObject **objp, *objp = NULL; *propp = NULL; } else { - sprop = js_AddNativeProperty(cx, obj, id, - Valueify(GetProperty), Valueify(PutProperty), - SPROP_INVALID_SLOT, JSPROP_ENUMERATE, - 0, 0); - if (!sprop) + const Shape *shape = + js_AddNativeProperty(cx, obj, id, + Valueify(GetProperty), Valueify(PutProperty), + SHAPE_INVALID_SLOT, JSPROP_ENUMERATE, + 0, 0); + if (!shape) return JS_FALSE; JS_LOCK_OBJ(cx, obj); *objp = obj; - *propp = (JSProperty *) sprop; + *propp = (JSProperty *) shape; } return JS_TRUE; } @@ -4799,7 +4798,7 @@ xml_deleteProperty(JSContext *cx, JSObject *obj, jsid id, Value *rval) * property's getter or setter. But now it's time to remove any such * property, to purge the property cache and remove the scope entry. */ - if (obj->scope()->object == obj && !js_DeleteProperty(cx, obj, id, rval)) + if (!obj->nativeEmpty() && !js_DeleteProperty(cx, obj, id, rval)) return JS_FALSE; rval->setBoolean(true); @@ -7072,7 +7071,7 @@ js_InitXMLClass(JSContext *cx, JSObject *obj) JSFunction *fun; JSXML *xml; JSProperty *prop; - JSScopeProperty *sprop; + Shape *shape; jsval cval, vp[3]; /* Define the isXMLName function. */ @@ -7105,9 +7104,9 @@ js_InitXMLClass(JSContext *cx, JSObject *obj) return NULL; } JS_ASSERT(prop); - sprop = (JSScopeProperty *) prop; - JS_ASSERT(SPROP_HAS_VALID_SLOT(sprop, pobj->scope())); - cval = Jsvalify(pobj->getSlotMT(cx, sprop->slot)); + shape = (Shape *) prop; + JS_ASSERT(pobj->containsSlot(shape->slot)); + cval = Jsvalify(pobj->getSlotMT(cx, shape->slot)); JS_UNLOCK_OBJ(cx, pobj); JS_ASSERT(VALUE_IS_FUNCTION(cx, cval)); diff --git a/js/src/shell/js.cpp b/js/src/shell/js.cpp index bc65f1b98003..8d5248830206 100644 --- a/js/src/shell/js.cpp +++ b/js/src/shell/js.cpp @@ -1800,7 +1800,7 @@ DisassembleValue(JSContext *cx, jsval v, bool lines, bool recursive) fputs("\nupvars: {\n", stdout); void *mark = JS_ARENA_MARK(&cx->tempPool); - jsuword *localNames = js_GetLocalNameArray(cx, fun, &cx->tempPool); + jsuword *localNames = fun->getLocalNameArray(cx, &cx->tempPool); if (!localNames) return false; @@ -2047,7 +2047,7 @@ DumpScope(JSContext *cx, JSObject *obj, FILE *fp) uintN i = 0; for (JSScopeProperty *sprop = NULL; JS_PropertyIterator(obj, &sprop);) { fprintf(fp, "%3u %p ", i++, (void *) sprop); - sprop->dump(cx, fp); + ((Shape *) sprop)->dump(cx, fp); } } diff --git a/js/src/tests/js1_5/Regress/regress-203278-1.js b/js/src/tests/js1_5/Regress/regress-203278-1.js index cb72cbb485be..eb4f968910f3 100644 --- a/js/src/tests/js1_5/Regress/regress-203278-1.js +++ b/js/src/tests/js1_5/Regress/regress-203278-1.js @@ -49,7 +49,7 @@ function test1() {} function test() { test1.call(this); } test.prototype = new test1(); -var length = 1024 * 1024 - 1; +var length = 512 * 1024 - 1; var obj = new test(); var first = obj; for(var i = 0 ; i < length ; i++) { diff --git a/js/src/trace-test/tests/basic/firstSlotConflict.js b/js/src/trace-test/tests/basic/firstSlotConflict.js new file mode 100644 index 000000000000..fa394691b7df --- /dev/null +++ b/js/src/trace-test/tests/basic/firstSlotConflict.js @@ -0,0 +1,10 @@ +(function(x) { + function f1() { return 1; } + function f2() { return 2; } + function f3() { return 3; } + function f4() { return 4; } + var g = function () { return x; } + var a = [f1, f2, f3, f4, g]; + for each (var v in a) + v.adhoc = 42; // Don't assertbotch in jsbuiltins.cpp setting g.adhoc +})(33);